+ SV *cb_data;
+ SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
+ SV *cb_copy;
+ SV *cb_dup;
+#if MGf_LOCAL
+ SV *cb_local;
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
+#endif /* VMG_UVAR */
+} vmg_wizard;
+
+static void vmg_op_info_init(pTHX_ unsigned int opinfo);
+
+static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
+#define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
+ vmg_wizard *w;
+
+ Newx(w, 1, vmg_wizard);
+
+ w->uvar = 0;
+ w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
+ if (w->opinfo)
+ vmg_op_info_init(aTHX_ w->opinfo);
+
+ w->vtable = vmg_vtable_alloc();
+
+ return w;
+}
+
+static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
+#define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
+ if (!w)
+ return;
+
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
+#if 0
+ SvREFCNT_dec(w->cb_dup);
+#endif
+#if MGf_LOCAL
+ SvREFCNT_dec(w->cb_local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
+#endif /* VMG_UVAR */
+ }
+
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
+ vmg_vtable_free(w->vtable);
+ Safefree(w);
+
+ return;
+}
+
+#if VMG_THREADSAFE
+
+#define VMG_CLONE_CB(N) \
+ z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
+ : NULL;
+
+static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
+#define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
+ vmg_wizard *z;
+
+ if (!w)
+ return NULL;
+
+ Newx(z, 1, vmg_wizard);
+
+ z->vtable = vmg_vtable_dup(w->vtable);
+ z->uvar = w->uvar;
+ z->opinfo = w->opinfo;
+
+ VMG_CLONE_CB(data);
+ VMG_CLONE_CB(get);
+ VMG_CLONE_CB(set);
+ VMG_CLONE_CB(len);
+ VMG_CLONE_CB(clear);
+ VMG_CLONE_CB(free);
+ VMG_CLONE_CB(copy);
+ VMG_CLONE_CB(dup);
+#if MGf_LOCAL
+ VMG_CLONE_CB(local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ VMG_CLONE_CB(fetch);
+ VMG_CLONE_CB(store);
+ VMG_CLONE_CB(exists);
+ VMG_CLONE_CB(delete);
+#endif /* VMG_UVAR */
+
+ return z;
+}
+
+#endif /* VMG_THREADSAFE */
+
+#define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
+
+/* --- Wizard SV objects --------------------------------------------------- */
+
+static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
+
+ return 0;
+}
+
+#if VMG_THREADSAFE
+
+static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
+
+ return 0;
+}
+
+#endif /* VMG_THREADSAFE */
+
+static MGVTBL vmg_wizard_sv_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_wizard_sv_free, /* free */
+ NULL, /* copy */
+#if VMG_THREADSAFE
+ vmg_wizard_sv_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
+#define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
+ SV *wiz;
+
+#if VMG_THREADSAFE
+ wiz = newSV(0);
+#else
+ wiz = newSViv(PTR2IV(w));
+#endif
+
+ vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
+
+ SvREADONLY_on(wiz);
+
+ return wiz;
+}
+
+#if VMG_THREADSAFE
+
+#define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
+
+static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
+ MAGIC *mg;
+
+ for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
+ return (const vmg_wizard *) mg->mg_ptr;
+ }
+
+ return NULL;
+}
+
+#else /* VMG_THREADSAFE */
+
+#define vmg_sv_has_wizard_type(S) SvIOK(S)
+
+#define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
+
+#endif /* !VMG_THREADSAFE */
+
+#define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
+
+static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
+ SV *sv = (SV *) mg->mg_ptr;
+
+ if (vmg_sv_has_wizard_type(sv))
+ return vmg_wizard_from_sv_nocheck(sv);
+ }
+
+ return NULL;
+}
+
+#define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
+
+/* --- User-level functions implementation --------------------------------- */
+
+static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
+ const MAGIC *mg;
+ IV wid;
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return NULL;
+
+ wid = vmg_wizard_id(w);
+
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && vmg_wizard_id(z) == wid)
+ return mg;
+ }
+
+ return NULL;
+}
+
+/* ... Construct private data .............................................. */
+
+static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
+#define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
+ I32 i;
+ SV *nsv;
+
+ dSP;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHSTACKi(PERLSI_MAGIC);
+
+ PUSHMARK(SP);
+ EXTEND(SP, items + 1);
+ PUSHs(sv_2mortal(newRV_inc(sv)));
+ for (i = 0; i < items; ++i)
+ PUSHs(args[i]);
+ PUTBACK;
+
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
+
+ SPAGAIN;
+ nsv = POPs;
+#if VMG_HAS_PERL(5, 8, 3)
+ SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
+#else
+ nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
+#endif
+ PUTBACK;
+
+ POPSTACK;
+
+ FREETMPS;
+ LEAVE;
+
+ return nsv;
+}
+
+static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
+ const MAGIC *mg = vmg_find(sv, w);
+
+ return mg ? mg->mg_obj : NULL;
+}
+
+/* ... Magic cast/dispell .................................................. */
+
+#if VMG_UVAR
+
+static I32 vmg_svt_val(pTHX_ IV, SV *);
+
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ dMY_CXT;
+
+ if (prevmagic)
+ prevmagic->mg_moremagic = moremagic;
+ else
+ SvMAGIC_set(sv, moremagic);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv) {
+ SvREFCNT_dec(mg->mg_obj);
+ mg->mg_obj = NULL;
+ }
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+ mg->mg_ptr = NULL;
+#if VMG_UVAR
+ }
+#endif /* VMG_UVAR */
+
+ if (MY_CXT.depth) {
+ mg->mg_moremagic = MY_CXT.freed_tokens;
+ MY_CXT.freed_tokens = mg;
+ } else {
+ mg->mg_moremagic = NULL;
+ Safefree(mg);
+ }
+}
+
+static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
+#define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
+ int skipped = 0;
+
+ while (mg) {
+ MAGIC *moremagic = mg->mg_moremagic;
+
+ if (mg == skip)
+ ++skipped;
+ else
+ Safefree(mg);
+
+ mg = moremagic;
+ }
+
+ return skipped;
+}
+
+static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
+#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
+ MAGIC *mg;
+ MGVTBL *t;
+ SV *data;
+ U32 oldgmg;
+
+ if (vmg_find(sv, w))
+ return 1;
+
+ oldgmg = SvGMAGICAL(sv);
+
+ data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
+
+ t = vmg_vtable_vtbl(w->vtable);
+ mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
+
+ if (SvTYPE(sv) < SVt_PVHV)
+ goto done;
+
+ /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
+ * magic is actually never called for them. If the GMAGICAL flag was off before
+ * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
+ * now on, then this wizard has get magic. Hence we can work around the
+ * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
+ * has uvar callbacks, it will be turned back on later. */
+ if (!oldgmg && SvGMAGICAL(sv))
+ SvGMAGICAL_off(sv);
+
+#if VMG_UVAR
+ if (w->uvar) {
+ MAGIC *prevmagic, *moremagic = NULL;
+ vmg_uvar_ud ud;
+
+ ud.new_uf.uf_val = vmg_svt_val;
+ ud.new_uf.uf_set = NULL;
+ ud.new_uf.uf_index = 0;
+ ud.old_uf.uf_val = NULL;
+ ud.old_uf.uf_set = NULL;
+ ud.old_uf.uf_index = 0;
+
+ /* One uvar magic in the chain is enough. */
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ if (mg) { /* Found another uvar magic. */
+ struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
+ if (uf->uf_val == vmg_svt_val) {
+ /* It's our uvar magic, nothing to do. oldgmg was true. */
+ goto done;
+ } else {
+ /* It's another uvar magic, backup it and replace it by ours. */
+ ud.old_uf = *uf;
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
+ vmg_mg_magical(sv);
+ /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
+ * handled by our uvar callback. */
+ }
+#endif /* VMG_UVAR */
+
+done:
+ return 1;
+}
+
+static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
+#if VMG_UVAR
+ U32 uvars = 0;
+#endif /* VMG_UVAR */
+ MAGIC *mg, *prevmagic, *moremagic = NULL;
+ IV wid = vmg_wizard_id(w);
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return 0;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ const vmg_wizard *z;
+
+ moremagic = mg->mg_moremagic;
+
+ z = vmg_wizard_from_mg(mg);
+ if (z) {
+ IV zid = vmg_wizard_id(z);
+
+#if VMG_UVAR
+ if (zid == wid) {
+ /* If the current has no uvar, short-circuit uvar deletion. */
+ uvars = z->uvar ? (uvars + 1) : 0;
+ break;
+ } else if (z->uvar) {
+ ++uvars;
+ /* We can't break here since we need to find the ext magic to delete. */
+ }
+#else /* VMG_UVAR */
+ if (zid == wid)
+ break;
+#endif /* !VMG_UVAR */
+ }
+ }
+ if (!mg)
+ return 0;
+
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+
+#if VMG_UVAR
+ if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
+ /* mg was the first ext magic in the chain that had uvar */
+
+ for (mg = moremagic; mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && z->uvar) {
+ ++uvars;
+ break;
+ }
+ }
+
+ if (uvars == 1) {
+ vmg_uvar_ud *ud;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ ud = (vmg_uvar_ud *) mg->mg_ptr;
+ if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
+ /* Revert the original uvar magic. */
+ struct ufuncs *uf;
+ Newx(uf, 1, struct ufuncs);
+ *uf = ud->old_uf;
+ Safefree(ud);
+ mg->mg_ptr = (char *) uf;
+ mg->mg_len = sizeof(*uf);
+ } else {
+ /* Remove the uvar magic. */
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+ }
+#endif /* VMG_UVAR */
+
+ vmg_mg_magical(sv);
+
+ return 1;
+}
+
+/* ... OP info ............................................................. */
+
+#define VMG_OP_INFO_NAME 1
+#define VMG_OP_INFO_OBJECT 2
+
+#if VMG_THREADSAFE
+static perl_mutex vmg_op_name_init_mutex;
+#endif
+
+static U32 vmg_op_name_init = 0;
+static unsigned char vmg_op_name_len[MAXO] = { 0 };
+
+static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
+#define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME:
+ VMG_LOCK(&vmg_op_name_init_mutex);
+ if (!vmg_op_name_init) {
+ OPCODE t;
+ for (t = 0; t < OP_max; ++t)
+ vmg_op_name_len[t] = strlen(PL_op_name[t]);
+ vmg_op_name_init = 1;
+ }
+ VMG_UNLOCK(&vmg_op_name_init_mutex);
+ break;
+ case VMG_OP_INFO_OBJECT: {
+ dMY_CXT;
+ if (!MY_CXT.b__op_stashes[0]) {
+ int c;
+ require_pv("B.pm");
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ MY_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
+#define vmg_op_info(W) vmg_op_info(aTHX_ (W))
+ if (!PL_op)
+ return &PL_sv_undef;
+
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME: {
+ const char *name;
+ STRLEN name_len;
+ OPCODE t = PL_op->op_type;
+ name = OP_NAME(PL_op);
+ name_len = (t == OP_CUSTOM) ? strlen(name) : vmg_op_name_len[t];
+ return sv_2mortal(newSVpvn(name, name_len));
+ }
+ case VMG_OP_INFO_OBJECT: {
+ dMY_CXT;
+ return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
+ MY_CXT.b__op_stashes[vmg_opclass(PL_op)]);
+ }
+ default:
+ break;
+ }
+
+ return &PL_sv_undef;
+}
+
+/* --- svt callbacks ------------------------------------------------------- */
+
+#define VMG_CB_CALL_ARGS_MASK 15
+#define VMG_CB_CALL_ARGS_SHIFT 4
+#define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
+#define VMG_CB_CALL_GUARD 4
+
+static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
+ dMY_CXT;
+
+ MY_CXT.depth--;
+
+ /* If we're at the upmost magic call and we're about to die, we can just free
+ * the tokens right now, since we will jump past the problematic part of our
+ * caller. */
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
+ vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
+ MY_CXT.freed_tokens = NULL;
+ }
+
+ return 1;
+}
+
+static int vmg_dispell_guard_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_magic_chain_free((MAGIC *) mg->mg_ptr, NULL);
+
+ return 0;
+}
+
+#if VMG_THREADSAFE
+
+static int vmg_dispell_guard_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ /* The freed magic tokens aren't cloned by perl because it cannot reach them
+ * (they have been detached from their parent SV when they were enqueued).
+ * Hence there's nothing to purge in the new thread. */
+ mg->mg_ptr = NULL;
+
+ return 0;
+}
+
+#endif /* VMG_THREADSAFE */
+
+static MGVTBL vmg_dispell_guard_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_dispell_guard_free, /* free */
+ NULL, /* copy */
+#if VMG_THREADSAFE
+ vmg_dispell_guard_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+static SV *vmg_dispell_guard_new(pTHX_ MAGIC *root) {
+#define vmg_dispell_guard_new(R) vmg_dispell_guard_new(aTHX_ (R))
+ SV *guard;
+
+ guard = sv_newmortal();
+ vmg_sv_magicext(guard, NULL, &vmg_dispell_guard_vtbl, root, 0);
+
+ return guard;
+}
+
+static int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
+ va_list ap;
+ int ret = 0;
+ unsigned int i, args, opinfo;
+ MAGIC **chain = NULL;
+ SV *svr;
+
+ dSP;
+
+ args = flags & VMG_CB_CALL_ARGS_MASK;
+ flags >>= VMG_CB_CALL_ARGS_SHIFT;
+ opinfo = flags & VMG_CB_CALL_OPINFO;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHSTACKi(PERLSI_MAGIC);
+
+ PUSHMARK(SP);
+ EXTEND(SP, args + 1);
+ PUSHs(sv_2mortal(newRV_inc(sv)));
+ va_start(ap, sv);
+ for (i = 0; i < args; ++i) {
+ SV *sva = va_arg(ap, SV *);
+ PUSHs(sva ? sva : &PL_sv_undef);
+ }
+ va_end(ap);
+ if (opinfo)
+ XPUSHs(vmg_op_info(opinfo));
+ PUTBACK;
+
+ if (flags & VMG_CB_CALL_GUARD) {
+ dMY_CXT;
+ MY_CXT.depth++;
+ vmg_call_sv(cb, G_SCALAR, vmg_dispell_guard_oncroak, NULL);
+ MY_CXT.depth--;
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens)
+ chain = &MY_CXT.freed_tokens;
+ } else {
+ vmg_call_sv(cb, G_SCALAR, 0, NULL);
+ }
+
+ SPAGAIN;
+ svr = POPs;
+ if (SvOK(svr))
+ ret = (int) SvIV(svr);
+ if (SvROK(svr))
+ SvREFCNT_inc(svr);
+ else
+ svr = NULL;
+ PUTBACK;
+
+ POPSTACK;
+
+ FREETMPS;
+ LEAVE;
+
+ if (svr && !SvTEMP(svr))
+ sv_2mortal(svr);
+
+ if (chain) {
+ vmg_dispell_guard_new(*chain);
+ *chain = NULL;
+ }
+
+ return ret;
+}
+
+#define VMG_CB_FLAGS(OI, A) \
+ ((((unsigned int) (OI)) << VMG_CB_CALL_ARGS_SHIFT) | (A))
+
+#define vmg_cb_call1(I, OI, S, A1) \
+ vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 1), (S), (A1))
+#define vmg_cb_call2(I, OI, S, A1, A2) \
+ vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 2), (S), (A1), (A2))
+#define vmg_cb_call3(I, OI, S, A1, A2, A3) \
+ vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
+
+/* ... Default no-op magic callback ........................................ */
+
+static int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
+ return 0;
+}
+
+/* ... get magic ........................................................... */
+
+static int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
+ const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
+
+ return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
+}
+
+#define vmg_svt_get_noop vmg_svt_default_noop
+
+/* ... set magic ........................................................... */
+
+static int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
+ const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
+
+ return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
+}
+
+#define vmg_svt_set_noop vmg_svt_default_noop
+
+/* ... len magic ........................................................... */
+
+static U32 vmg_sv_len(pTHX_ SV *sv) {
+#define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
+ STRLEN len;
+#if VMG_HAS_PERL(5, 9, 3)
+ const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, len)));
+#else
+ U8 *s = SvPV(sv, len);
+#endif
+
+ return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
+}
+
+static U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
+ const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
+ unsigned int opinfo = w->opinfo;
+ U32 len, ret;
+ SV *svr;
+ svtype t = SvTYPE(sv);
+
+ dSP;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHSTACKi(PERLSI_MAGIC);
+
+ PUSHMARK(SP);
+ EXTEND(SP, 3);
+ PUSHs(sv_2mortal(newRV_inc(sv)));