+#ifndef PERL_MAGIC_tied
+# define PERL_MAGIC_tied 'P'
+#endif
+
+#ifndef MGf_LOCAL
+# define MGf_LOCAL 0
+#endif
+
+#ifndef IN_PERL_COMPILETIME
+# define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
+#endif
+
+#ifndef OP_NAME
+# define OP_NAME(O) (PL_op_name[(O)->op_type])
+#endif
+
+#ifndef OP_CLASS
+# define OP_CLASS(O) (PL_opargs[(O)->op_type] & OA_CLASS_MASK)
+#endif
+
+#define VMG_CAREFUL_SELF_DESTRUCTION XSH_HAS_PERL(5, 25, 3)
+
+/* ... Bug-free mg_magical ................................................. */
+
+/* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
+
+#if XSH_HAS_PERL(5, 11, 3)
+
+#define vmg_mg_magical(S) mg_magical(S)
+
+#else
+
+static void vmg_mg_magical(SV *sv) {
+ const MAGIC *mg;
+
+ SvMAGICAL_off(sv);
+ if ((mg = SvMAGIC(sv))) {
+ do {
+ const MGVTBL* const vtbl = mg->mg_virtual;
+ if (vtbl) {
+ if (vtbl->svt_get && !(mg->mg_flags & MGf_GSKIP))
+ SvGMAGICAL_on(sv);
+ if (vtbl->svt_set)
+ SvSMAGICAL_on(sv);
+ if (vtbl->svt_clear)
+ SvRMAGICAL_on(sv);
+ }
+ } while ((mg = mg->mg_moremagic));
+ if (!(SvFLAGS(sv) & (SVs_GMG|SVs_SMG)))
+ SvRMAGICAL_on(sv);
+ }
+}
+
+#endif
+
+/* ... Cleaner version of sv_magicext() .................................... */
+
+static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
+#define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
+ MAGIC *mg;
+
+ mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
+ if (!mg)
+ return NULL;
+
+ mg->mg_private = 0;
+
+ if (vtbl->svt_copy)
+ mg->mg_flags |= MGf_COPY;
+#if MGf_DUP
+ if (vtbl->svt_dup)
+ mg->mg_flags |= MGf_DUP;
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ if (vtbl->svt_local)
+ mg->mg_flags |= MGf_LOCAL;
+#endif /* MGf_LOCAL */
+
+ if (mg->mg_flags & MGf_REFCOUNTED)
+ SvREFCNT_dec(obj);
+
+ return mg;
+}
+
+/* ... Safe version of call_sv() ........................................... */
+
+static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
+#define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
+ I32 ret;
+ SV *old_err = NULL;
+
+ if (SvTRUE(ERRSV)) {
+ old_err = newSVsv(ERRSV);
+ sv_setsv(ERRSV, &PL_sv_undef);
+ }
+
+ ret = call_sv(sv, flags | G_EVAL);
+
+ if (SvTRUE(ERRSV)) {
+ SvREFCNT_dec(old_err);
+
+ if (IN_PERL_COMPILETIME) {
+ if (!PL_in_eval) {
+ if (PL_errors)
+ sv_catsv(PL_errors, ERRSV);
+ else
+ Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
+ SvCUR_set(ERRSV, 0);
+ }
+#if XSH_HAS_PERL(5, 10, 0) || defined(PL_parser)
+ if (PL_parser)
+ ++PL_parser->error_count;
+#elif defined(PL_error_count)
+ ++PL_error_count;
+#else
+ ++PL_Ierror_count;
+#endif
+ } else {
+ if (!cleanup || cleanup(aTHX_ ud))
+ croak(NULL);
+ }
+ } else {
+ if (old_err) {
+ sv_setsv(ERRSV, old_err);
+ SvREFCNT_dec(old_err);
+ }
+ }
+
+ return ret;
+}
+
+/* --- Stolen chunk of B --------------------------------------------------- */
+
+typedef enum {
+ OPc_NULL,
+ OPc_BASEOP,
+ OPc_UNOP,
+ OPc_BINOP,
+ OPc_LOGOP,
+ OPc_LISTOP,
+ OPc_PMOP,
+ OPc_SVOP,
+ OPc_PADOP,
+ OPc_PVOP,
+ OPc_LOOP,
+ OPc_COP,
+#if XSH_HAS_PERL(5, 21, 5)
+ OPc_METHOP,
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ OPc_UNOP_AUX,
+#endif
+ OPc_MAX
+} opclass;
+
+static const char *const vmg_opclassnames[] = {
+ "B::NULL",
+ "B::OP",
+ "B::UNOP",
+ "B::BINOP",
+ "B::LOGOP",
+ "B::LISTOP",
+ "B::PMOP",
+ "B::SVOP",
+ "B::PADOP",
+ "B::PVOP",
+ "B::LOOP",
+ "B::COP",
+#if XSH_HAS_PERL(5, 21, 5)
+ "B::METHOP",
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ "B::UNOP_AUX",
+#endif
+ NULL
+};
+
+static opclass vmg_opclass(pTHX_ const OP *o) {
+#define vmg_opclass(O) vmg_opclass(aTHX_ (O))
+#if 0
+ if (!o)
+ return OPc_NULL;
+#endif
+
+ if (o->op_type == 0) {
+#if XSH_HAS_PERL(5, 21, 7)
+ if (o->op_targ == OP_NEXTSTATE || o->op_targ == OP_DBSTATE)
+ return OPc_COP;
+#endif
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+ }
+
+ if (o->op_type == OP_SASSIGN)
+ return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
+
+ if (o->op_type == OP_AELEMFAST) {
+#if PERL_VERSION <= 14
+ if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+#endif
+#ifdef USE_ITHREADS
+ return OPc_PADOP;
+#else
+ return OPc_SVOP;
+#endif
+ }
+
+#ifdef USE_ITHREADS
+ if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
+ return OPc_PADOP;
+#endif
+
+ switch (OP_CLASS(o)) {
+ case OA_BASEOP:
+ return OPc_BASEOP;
+ case OA_UNOP:
+ return OPc_UNOP;
+ case OA_BINOP:
+ return OPc_BINOP;
+ case OA_LOGOP:
+ return OPc_LOGOP;
+ case OA_LISTOP:
+ return OPc_LISTOP;
+ case OA_PMOP:
+ return OPc_PMOP;
+ case OA_SVOP:
+ return OPc_SVOP;
+ case OA_PADOP:
+ return OPc_PADOP;
+ case OA_PVOP_OR_SVOP:
+ return (
+#if XSH_HAS_PERL(5, 13, 7)
+ (o->op_type != OP_CUSTOM) &&
+#endif
+ (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)))
+#if defined(USE_ITHREADS) && XSH_HAS_PERL(5, 8, 9)
+ ? OPc_PADOP : OPc_PVOP;
+#else
+ ? OPc_SVOP : OPc_PVOP;
+#endif
+ case OA_LOOP:
+ return OPc_LOOP;
+ case OA_COP:
+ return OPc_COP;
+ case OA_BASEOP_OR_UNOP:
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+ case OA_FILESTATOP:
+ return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
+#ifdef USE_ITHREADS
+ (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
+#else
+ (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
+#endif
+ case OA_LOOPEXOP:
+ if (o->op_flags & OPf_STACKED)
+ return OPc_UNOP;
+ else if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+ return OPc_PVOP;
+#if XSH_HAS_PERL(5, 21, 5)
+ case OA_METHOP:
+ return OPc_METHOP;
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ case OA_UNOP_AUX:
+ return OPc_UNOP_AUX;
+#endif
+ }
+
+ return OPc_BASEOP;
+}
+
+/* --- Error messages ------------------------------------------------------ */
+
+static const char vmg_invalid_wiz[] = "Invalid wizard object";
+static const char vmg_wrongargnum[] = "Wrong number of arguments";
+
+/* --- Thread-local storage ------------------------------------------------ */
+
+typedef struct {
+ HV *b__op_stashes[OPc_MAX];
+ I32 depth;
+ MAGIC *freed_tokens;
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ vmg_trampoline propagate_errsv;
+#endif
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+ vmg_trampoline reset_rmg;
+#endif
+} xsh_user_cxt_t;
+
+#if XSH_THREADSAFE
+
+static void xsh_user_clone(pTHX_ const xsh_user_cxt_t *old_cxt, xsh_user_cxt_t *new_cxt) {
+ int c;
+
+ for (c = OPc_NULL; c < OPc_MAX; ++c) {
+ new_cxt->b__op_stashes[c] = old_cxt->b__op_stashes[c]
+ ? gv_stashpv(vmg_opclassnames[c], 1)
+ : NULL;
+ }
+
+ new_cxt->depth = old_cxt->depth;
+ new_cxt->freed_tokens = NULL;
+
+ return;
+}
+
+
+#endif /* XSH_THREADSAFE */
+
+#define XSH_THREADS_NEED_TEARDOWN_LATE 1
+
+#include "xsh/threads.h"
+
+/* --- <vmg_vtable> structure ---------------------------------------------- */
+
+#if XSH_THREADSAFE
+
+typedef struct {
+ MGVTBL *vtbl;
+ U32 refcount;
+} vmg_vtable;
+
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
+
+ t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
+ t->refcount = 1;
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) (T)->vtbl
+
+static perl_mutex vmg_vtable_refcount_mutex;
+
+static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
+ XSH_LOCK(&vmg_vtable_refcount_mutex);
+ ++t->refcount;
+ XSH_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ return t;
+}
+
+static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
+ U32 refcount;
+
+ XSH_LOCK(&vmg_vtable_refcount_mutex);
+ refcount = --t->refcount;
+ XSH_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ if (!refcount) {
+ PerlMemShared_free(t->vtbl);
+ PerlMemShared_free(t);
+ }
+}
+
+#else /* XSH_THREADSAFE */
+
+typedef MGVTBL vmg_vtable;
+
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ Newx(t, 1, vmg_vtable);
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
+
+#define vmg_vtable_free(T) Safefree(T)
+
+#endif /* !XSH_THREADSAFE */
+
+/* --- <vmg_wizard> structure ---------------------------------------------- */
+
+typedef struct {
+ vmg_vtable *vtable;
+
+ U8 opinfo;
+ U8 uvar;
+
+ SV *cb_data;
+ SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
+ SV *cb_copy;
+ SV *cb_dup;
+#if MGf_LOCAL
+ SV *cb_local;
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
+#endif /* VMG_UVAR */
+} vmg_wizard;
+
+static void vmg_op_info_init(pTHX_ unsigned int opinfo);
+
+static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
+#define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
+ vmg_wizard *w;
+
+ Newx(w, 1, vmg_wizard);
+
+ w->uvar = 0;
+ w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
+ if (w->opinfo)
+ vmg_op_info_init(aTHX_ w->opinfo);
+
+ w->vtable = vmg_vtable_alloc();
+
+ return w;
+}
+
+static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
+#define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
+ if (!w)
+ return;
+
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
+#if 0
+ SvREFCNT_dec(w->cb_dup);
+#endif
+#if MGf_LOCAL
+ SvREFCNT_dec(w->cb_local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
+#endif /* VMG_UVAR */
+ }
+
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
+ vmg_vtable_free(w->vtable);
+ Safefree(w);
+
+ return;
+}
+
+#if XSH_THREADSAFE
+
+#define VMG_CLONE_CB(N) \
+ z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
+ : NULL;
+
+static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
+#define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
+ vmg_wizard *z;
+
+ if (!w)
+ return NULL;
+
+ Newx(z, 1, vmg_wizard);
+
+ z->vtable = vmg_vtable_dup(w->vtable);
+ z->uvar = w->uvar;
+ z->opinfo = w->opinfo;
+
+ VMG_CLONE_CB(data);
+ VMG_CLONE_CB(get);
+ VMG_CLONE_CB(set);
+ VMG_CLONE_CB(len);
+ VMG_CLONE_CB(clear);
+ VMG_CLONE_CB(free);
+ VMG_CLONE_CB(copy);
+ VMG_CLONE_CB(dup);
+#if MGf_LOCAL
+ VMG_CLONE_CB(local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ VMG_CLONE_CB(fetch);
+ VMG_CLONE_CB(store);
+ VMG_CLONE_CB(exists);
+ VMG_CLONE_CB(delete);
+#endif /* VMG_UVAR */
+
+ return z;
+}
+
+#endif /* XSH_THREADSAFE */
+
+#define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
+
+/* --- Wizard SV objects --------------------------------------------------- */
+
+static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
+
+ return 0;
+}
+
+#if XSH_THREADSAFE
+
+static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
+
+ return 0;
+}
+
+#endif /* XSH_THREADSAFE */
+
+static MGVTBL vmg_wizard_sv_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_wizard_sv_free, /* free */
+ NULL, /* copy */
+#if XSH_THREADSAFE
+ vmg_wizard_sv_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
+#define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
+ SV *wiz;
+
+#if XSH_THREADSAFE
+ wiz = newSV(0);
+#else
+ wiz = newSViv(PTR2IV(w));
+#endif
+
+ vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
+
+ SvREADONLY_on(wiz);
+
+ return wiz;
+}
+
+#if XSH_THREADSAFE
+
+#define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
+
+static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
+ MAGIC *mg;
+
+ for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
+ return (const vmg_wizard *) mg->mg_ptr;
+ }
+
+ return NULL;
+}
+
+#else /* XSH_THREADSAFE */
+
+#define vmg_sv_has_wizard_type(S) SvIOK(S)
+
+#define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
+
+#endif /* !XSH_THREADSAFE */
+
+#define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
+
+static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
+ SV *sv = (SV *) mg->mg_ptr;
+
+ if (vmg_sv_has_wizard_type(sv))
+ return vmg_wizard_from_sv_nocheck(sv);
+ }
+
+ return NULL;
+}
+
+#define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
+
+/* --- User-level functions implementation --------------------------------- */
+
+static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
+ const MAGIC *mg;
+ IV wid;
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return NULL;
+
+ wid = vmg_wizard_id(w);
+
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && vmg_wizard_id(z) == wid)
+ return mg;
+ }
+
+ return NULL;
+}
+
+/* ... Construct private data .............................................. */
+
+static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
+#define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
+ I32 i;
+ SV *nsv;
+
+ dSP;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHSTACKi(PERLSI_MAGIC);
+
+ PUSHMARK(SP);
+ EXTEND(SP, items + 1);
+ PUSHs(sv_2mortal(newRV_inc(sv)));
+ for (i = 0; i < items; ++i)
+ PUSHs(args[i]);
+ PUTBACK;
+
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
+
+ SPAGAIN;
+ nsv = POPs;
+#if XSH_HAS_PERL(5, 8, 3)
+ SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
+#else
+ nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
+#endif
+ PUTBACK;
+
+ POPSTACK;
+
+ FREETMPS;
+ LEAVE;
+
+ return nsv;
+}
+
+static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
+ const MAGIC *mg = vmg_find(sv, w);
+
+ return mg ? mg->mg_obj : NULL;
+}
+
+/* ... Magic cast/dispell .................................................. */
+
+#if VMG_UVAR
+
+static I32 vmg_svt_val(pTHX_ IV, SV *);
+
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ dXSH_CXT;
+
+ if (prevmagic)
+ prevmagic->mg_moremagic = moremagic;
+ else
+ SvMAGIC_set(sv, moremagic);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv) {
+ SvREFCNT_dec(mg->mg_obj);
+ mg->mg_obj = NULL;
+ }
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+ mg->mg_ptr = NULL;
+#if VMG_UVAR
+ }
+#endif /* VMG_UVAR */
+
+ if (XSH_CXT.depth) {
+ mg->mg_moremagic = XSH_CXT.freed_tokens;
+ XSH_CXT.freed_tokens = mg;
+ } else {
+ mg->mg_moremagic = NULL;
+ Safefree(mg);
+ }
+}
+
+static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
+#define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
+ int skipped = 0;
+
+ while (mg) {
+ MAGIC *moremagic = mg->mg_moremagic;
+
+ if (mg == skip)
+ ++skipped;
+ else
+ Safefree(mg);
+
+ mg = moremagic;
+ }
+
+ return skipped;
+}
+
+static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
+#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
+ MAGIC *mg;
+ MGVTBL *t;
+ SV *data;
+ U32 oldgmg;
+
+ if (vmg_find(sv, w))
+ return 1;
+
+ oldgmg = SvGMAGICAL(sv);