+/* --- <vmg_vtable> structure ---------------------------------------------- */
+
+#if VMG_THREADSAFE
+
+typedef struct {
+ MGVTBL *vtbl;
+ U32 refcount;
+} vmg_vtable;
+
+STATIC vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
+
+ t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
+ t->refcount = 1;
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) (T)->vtbl
+
+STATIC perl_mutex vmg_vtable_refcount_mutex;
+
+STATIC vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
+ VMG_LOCK(&vmg_vtable_refcount_mutex);
+ ++t->refcount;
+ VMG_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ return t;
+}
+
+STATIC void vmg_vtable_free(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
+ U32 refcount;
+
+ VMG_LOCK(&vmg_vtable_refcount_mutex);
+ refcount = --t->refcount;
+ VMG_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ if (!refcount) {
+ PerlMemShared_free(t->vtbl);
+ PerlMemShared_free(t);
+ }
+}
+
+#else /* VMG_THREADSAFE */
+
+typedef MGVTBL vmg_vtable;
+
+STATIC vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ Newx(t, 1, vmg_vtable);
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
+
+#define vmg_vtable_free(T) Safefree(T)
+
+#endif /* !VMG_THREADSAFE */
+
+/* --- <vmg_wizard> structure ---------------------------------------------- */
+
+typedef struct {
+ vmg_vtable *vtable;
+
+ U8 opinfo;
+ U8 uvar;
+
+ SV *cb_data;
+ SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
+ SV *cb_copy;
+ SV *cb_dup;
+#if MGf_LOCAL
+ SV *cb_local;
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
+#endif /* VMG_UVAR */
+} vmg_wizard;
+
+STATIC void vmg_op_info_init(pTHX_ unsigned int opinfo);
+
+STATIC vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
+#define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
+ vmg_wizard *w;
+
+ Newx(w, 1, vmg_wizard);
+
+ w->uvar = 0;
+ w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
+ if (w->opinfo)
+ vmg_op_info_init(aTHX_ w->opinfo);
+
+ w->vtable = vmg_vtable_alloc();
+
+ return w;
+}
+
+STATIC void vmg_wizard_free(pTHX_ vmg_wizard *w) {
+#define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
+ if (!w)
+ return;
+
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
+#if 0
+ SvREFCNT_dec(w->cb_dup);
+#endif
+#if MGf_LOCAL
+ SvREFCNT_dec(w->cb_local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
+#endif /* VMG_UVAR */
+ }
+
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
+ vmg_vtable_free(w->vtable);
+ Safefree(w);
+
+ return;
+}
+
+#if VMG_THREADSAFE
+
+#define VMG_CLONE_CB(N) \
+ z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
+ : NULL;
+
+STATIC const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
+#define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
+ vmg_wizard *z;
+
+ if (!w)
+ return NULL;
+
+ Newx(z, 1, vmg_wizard);
+
+ z->vtable = vmg_vtable_dup(w->vtable);
+ z->uvar = w->uvar;
+ z->opinfo = w->opinfo;
+
+ VMG_CLONE_CB(data);
+ VMG_CLONE_CB(get);
+ VMG_CLONE_CB(set);
+ VMG_CLONE_CB(len);
+ VMG_CLONE_CB(clear);
+ VMG_CLONE_CB(free);
+ VMG_CLONE_CB(copy);
+ VMG_CLONE_CB(dup);
+#if MGf_LOCAL
+ VMG_CLONE_CB(local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ VMG_CLONE_CB(fetch);
+ VMG_CLONE_CB(store);
+ VMG_CLONE_CB(exists);
+ VMG_CLONE_CB(delete);
+#endif /* VMG_UVAR */
+
+ return z;
+}
+
+#endif /* VMG_THREADSAFE */
+
+#define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
+
+/* --- Wizard SV objects --------------------------------------------------- */
+
+STATIC int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
+
+ return 0;
+}
+
+#if VMG_THREADSAFE
+
+STATIC int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
+
+ return 0;
+}
+
+#endif /* VMG_THREADSAFE */
+
+STATIC MGVTBL vmg_wizard_sv_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_wizard_sv_free, /* free */
+ NULL, /* copy */
+#if VMG_THREADSAFE
+ vmg_wizard_sv_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+STATIC SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
+#define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
+ SV *wiz;
+
+#if VMG_THREADSAFE
+ wiz = newSV(0);
+#else
+ wiz = newSViv(PTR2IV(w));
+#endif
+
+ if (w) {
+ MAGIC *mg = sv_magicext(wiz, NULL, PERL_MAGIC_ext, &vmg_wizard_sv_vtbl,
+ (const char *) w, 0);
+ mg->mg_private = 0;
+#if VMG_THREADSAFE
+ mg->mg_flags |= MGf_DUP;
+#endif
+ }
+ SvREADONLY_on(wiz);
+
+ return wiz;
+}
+
+#if VMG_THREADSAFE
+
+#define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
+
+STATIC const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
+ MAGIC *mg;
+
+ for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
+ return (const vmg_wizard *) mg->mg_ptr;
+ }
+
+ return NULL;
+}
+
+#else /* VMG_THREADSAFE */
+
+#define vmg_sv_has_wizard_type(S) SvIOK(S)
+
+#define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
+
+#endif /* !VMG_THREADSAFE */
+
+#define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
+
+STATIC const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
+ SV *sv = (SV *) mg->mg_ptr;
+
+ if (vmg_sv_has_wizard_type(sv))
+ return vmg_wizard_from_sv_nocheck(sv);
+ }
+
+ return NULL;
+}
+
+#define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
+
+/* --- User-level functions implementation --------------------------------- */
+
+STATIC const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
+ const MAGIC *mg;
+ IV wid;
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return NULL;
+
+ wid = vmg_wizard_id(w);
+
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && vmg_wizard_id(z) == wid)
+ return mg;
+ }
+
+ return NULL;
+}
+
+/* ... Construct private data .............................................. */
+
+STATIC SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
+#define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
+ I32 i;
+ SV *nsv;
+
+ dSP;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHMARK(SP);
+ EXTEND(SP, items + 1);
+ PUSHs(sv_2mortal(newRV_inc(sv)));
+ for (i = 0; i < items; ++i)
+ PUSHs(args[i]);
+ PUTBACK;
+
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
+
+ SPAGAIN;
+ nsv = POPs;
+#if VMG_HAS_PERL(5, 8, 3)
+ SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
+#else
+ nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
+#endif
+ PUTBACK;
+
+ FREETMPS;
+ LEAVE;
+
+ return nsv;
+}
+
+STATIC SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
+ const MAGIC *mg = vmg_find(sv, w);
+
+ return mg ? mg->mg_obj : NULL;
+}
+
+/* ... Magic cast/dispell .................................................. */
+
+#if VMG_UVAR
+
+STATIC I32 vmg_svt_val(pTHX_ IV, SV *);
+
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+STATIC void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ dMY_CXT;
+
+ if (prevmagic)
+ prevmagic->mg_moremagic = moremagic;
+ else
+ SvMAGIC_set(sv, moremagic);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv) {
+ SvREFCNT_dec(mg->mg_obj);
+ mg->mg_obj = NULL;
+ }
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+ mg->mg_ptr = NULL;
+#if VMG_UVAR
+ }
+#endif /* VMG_UVAR */
+
+ if (MY_CXT.depth) {
+ mg->mg_moremagic = MY_CXT.freed_tokens;
+ MY_CXT.freed_tokens = mg;
+ } else {
+ mg->mg_moremagic = NULL;
+ Safefree(mg);
+ }
+}
+
+STATIC int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
+#define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
+ int skipped = 0;
+
+ while (mg) {
+ MAGIC *moremagic = mg->mg_moremagic;
+
+ if (mg == skip)
+ ++skipped;
+ else
+ Safefree(mg);
+
+ mg = moremagic;
+ }
+
+ return skipped;
+}
+
+STATIC UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
+#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
+ MAGIC *mg;
+ MGVTBL *t;
+ SV *data;
+ U32 oldgmg;
+
+ if (vmg_find(sv, w))
+ return 1;
+
+ oldgmg = SvGMAGICAL(sv);
+
+ data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
+
+ t = vmg_vtable_vtbl(w->vtable);
+ mg = sv_magicext(sv, data, PERL_MAGIC_ext, t, (const char *) wiz, HEf_SVKEY);
+ mg->mg_private = 0;
+
+ /* sv_magicext() calls mg_magical and increments data's refcount */
+ SvREFCNT_dec(data);
+
+ if (t->svt_copy)
+ mg->mg_flags |= MGf_COPY;
+#if 0
+ if (t->svt_dup)
+ mg->mg_flags |= MGf_DUP;
+#endif
+#if MGf_LOCAL
+ if (t->svt_local)
+ mg->mg_flags |= MGf_LOCAL;
+#endif /* MGf_LOCAL */
+
+ if (SvTYPE(sv) < SVt_PVHV)
+ goto done;
+
+ /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
+ * magic is actually never called for them. If the GMAGICAL flag was off before
+ * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
+ * now on, then this wizard has get magic. Hence we can work around the
+ * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
+ * has uvar callbacks, it will be turned back on later. */
+ if (!oldgmg && SvGMAGICAL(sv))
+ SvGMAGICAL_off(sv);
+
+#if VMG_UVAR
+ if (w->uvar) {
+ MAGIC *prevmagic, *moremagic = NULL;
+ vmg_uvar_ud ud;
+
+ ud.new_uf.uf_val = vmg_svt_val;
+ ud.new_uf.uf_set = NULL;
+ ud.new_uf.uf_index = 0;
+ ud.old_uf.uf_val = NULL;
+ ud.old_uf.uf_set = NULL;
+ ud.old_uf.uf_index = 0;
+
+ /* One uvar magic in the chain is enough. */
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ if (mg) { /* Found another uvar magic. */
+ struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
+ if (uf->uf_val == vmg_svt_val) {
+ /* It's our uvar magic, nothing to do. oldgmg was true. */
+ goto done;
+ } else {
+ /* It's another uvar magic, backup it and replace it by ours. */
+ ud.old_uf = *uf;
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
+ vmg_mg_magical(sv);
+ /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
+ * handled by our uvar callback. */
+ }
+#endif /* VMG_UVAR */
+
+done:
+ return 1;
+}
+
+STATIC UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
+#if VMG_UVAR
+ U32 uvars = 0;
+#endif /* VMG_UVAR */
+ MAGIC *mg, *prevmagic, *moremagic = NULL;
+ IV wid = vmg_wizard_id(w);
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return 0;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ const vmg_wizard *z;
+
+ moremagic = mg->mg_moremagic;
+
+ z = vmg_wizard_from_mg(mg);
+ if (z) {
+ IV zid = vmg_wizard_id(z);
+
+#if VMG_UVAR
+ if (zid == wid) {
+ /* If the current has no uvar, short-circuit uvar deletion. */
+ uvars = z->uvar ? (uvars + 1) : 0;
+ break;
+ } else if (z->uvar) {
+ ++uvars;
+ /* We can't break here since we need to find the ext magic to delete. */
+ }
+#else /* VMG_UVAR */
+ if (zid == wid)
+ break;
+#endif /* !VMG_UVAR */
+ }
+ }
+ if (!mg)
+ return 0;
+
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+
+#if VMG_UVAR
+ if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
+ /* mg was the first ext magic in the chain that had uvar */
+
+ for (mg = moremagic; mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && z->uvar) {
+ ++uvars;
+ break;
+ }
+ }
+
+ if (uvars == 1) {
+ vmg_uvar_ud *ud;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ ud = (vmg_uvar_ud *) mg->mg_ptr;
+ if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
+ /* Revert the original uvar magic. */
+ struct ufuncs *uf;
+ Newx(uf, 1, struct ufuncs);
+ *uf = ud->old_uf;
+ Safefree(ud);
+ mg->mg_ptr = (char *) uf;
+ mg->mg_len = sizeof(*uf);
+ } else {
+ /* Remove the uvar magic. */
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+ }
+#endif /* VMG_UVAR */
+
+ vmg_mg_magical(sv);
+
+ return 1;
+}
+
+/* ... OP info ............................................................. */
+
+#define VMG_OP_INFO_NAME 1
+#define VMG_OP_INFO_OBJECT 2
+
+#if VMG_THREADSAFE
+STATIC perl_mutex vmg_op_name_init_mutex;
+#endif
+
+STATIC U32 vmg_op_name_init = 0;
+STATIC unsigned char vmg_op_name_len[MAXO] = { 0 };
+
+STATIC void vmg_op_info_init(pTHX_ unsigned int opinfo) {
+#define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME:
+ VMG_LOCK(&vmg_op_name_init_mutex);
+ if (!vmg_op_name_init) {
+ OPCODE t;
+ for (t = 0; t < OP_max; ++t)
+ vmg_op_name_len[t] = strlen(PL_op_name[t]);
+ vmg_op_name_init = 1;
+ }
+ VMG_UNLOCK(&vmg_op_name_init_mutex);
+ break;
+ case VMG_OP_INFO_OBJECT: {
+ dMY_CXT;
+ if (!MY_CXT.b__op_stashes[0]) {
+ int c;
+ require_pv("B.pm");
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ MY_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+STATIC SV *vmg_op_info(pTHX_ unsigned int opinfo) {
+#define vmg_op_info(W) vmg_op_info(aTHX_ (W))
+ if (!PL_op)
+ return &PL_sv_undef;
+
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME: {
+ OPCODE t = PL_op->op_type;
+ return sv_2mortal(newSVpvn(PL_op_name[t], vmg_op_name_len[t]));
+ }
+ case VMG_OP_INFO_OBJECT: {
+ dMY_CXT;
+ return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
+ MY_CXT.b__op_stashes[vmg_opclass(PL_op)]);
+ }
+ default:
+ break;
+ }
+
+ return &PL_sv_undef;
+}
+
+/* --- svt callbacks ------------------------------------------------------- */
+
+#define VMG_CB_CALL_ARGS_MASK 15
+#define VMG_CB_CALL_ARGS_SHIFT 4
+#define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
+#define VMG_CB_CALL_GUARD 4
+
+STATIC int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
+ dMY_CXT;
+
+ MY_CXT.depth--;
+
+ /* If we're at the upmost magic call and we're about to die, we can just free
+ * the tokens right now, since we will jump past the problematic part of our
+ * caller. */
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
+ vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
+ MY_CXT.freed_tokens = NULL;
+ }
+
+ return 1;
+}