+#endif /* MGf_LOCAL */
+
+/* ... uvar magic .......................................................... */
+
+#if VMG_UVAR
+
+static OP *vmg_pp_reset_rmg(pTHX) {
+ SVOP *o = cSVOPx(PL_op);
+
+ SvRMAGICAL_on(o->op_sv);
+ o->op_sv = NULL;
+
+ return NORMAL;
+}
+
+static I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
+ vmg_uvar_ud *ud;
+ MAGIC *mg, *umg, *moremagic;
+ SV *key = NULL, *newkey = NULL;
+ int tied = 0;
+
+ umg = mg_find(sv, PERL_MAGIC_uvar);
+ /* umg can't be NULL or we wouldn't be there. */
+ key = umg->mg_obj;
+ ud = (vmg_uvar_ud *) umg->mg_ptr;
+
+ if (ud->old_uf.uf_val)
+ ud->old_uf.uf_val(aTHX_ action, sv);
+ if (ud->old_uf.uf_set)
+ ud->old_uf.uf_set(aTHX_ action, sv);
+
+ for (mg = SvMAGIC(sv); mg; mg = moremagic) {
+ const vmg_wizard *w;
+
+ /* mg may be freed later by the uvar call, so we need to fetch the next
+ * token before reaching that fateful point. */
+ moremagic = mg->mg_moremagic;
+
+ switch (mg->mg_type) {
+ case PERL_MAGIC_ext:
+ break;
+ case PERL_MAGIC_tied:
+ ++tied;
+ continue;
+ default:
+ continue;
+ }
+
+ w = vmg_wizard_from_mg(mg);
+ if (!w)
+ continue;
+
+ switch (w->uvar) {
+ case 0:
+ continue;
+ case 2:
+ if (!newkey)
+ newkey = key = umg->mg_obj = sv_mortalcopy(umg->mg_obj);
+ }
+
+ switch (action
+ & (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
+ case 0:
+ if (w->cb_fetch)
+ vmg_cb_call2(w->cb_fetch, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
+ break;
+ case HV_FETCH_ISSTORE:
+ case HV_FETCH_LVALUE:
+ case (HV_FETCH_ISSTORE|HV_FETCH_LVALUE):
+ if (w->cb_store)
+ vmg_cb_call2(w->cb_store, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
+ break;
+ case HV_FETCH_ISEXISTS:
+ if (w->cb_exists)
+ vmg_cb_call2(w->cb_exists, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
+ break;
+ case HV_DELETE:
+ if (w->cb_delete)
+ vmg_cb_call2(w->cb_delete, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
+ break;
+ }
+ }
+
+ if (SvRMAGICAL(sv) && !tied && !(action & (HV_FETCH_ISSTORE|HV_DELETE))) {
+ /* Temporarily hide the RMAGICAL flag of the hash so it isn't wrongly
+ * mistaken for a tied hash by the rest of hv_common. It will be reset by
+ * the op_ppaddr of a new fake op injected between the current and the next
+ * one. */
+
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+
+ dXSH_CXT;
+
+ PL_op = vmg_trampoline_bump(&XSH_CXT.reset_rmg, sv, PL_op);
+
+#else /* !VMG_RESET_RMG_NEEDS_TRAMPOLINE */
+
+ OP *nop = PL_op->op_next;
+ SVOP *svop = NULL;
+
+ if (nop && nop->op_ppaddr == vmg_pp_reset_rmg) {
+ svop = (SVOP *) nop;
+ } else {
+ NewOp(1101, svop, 1, SVOP);
+ svop->op_type = OP_STUB;
+ svop->op_ppaddr = vmg_pp_reset_rmg;
+ svop->op_next = nop;
+ svop->op_flags = 0;
+ svop->op_private = 0;
+
+ PL_op->op_next = (OP *) svop;
+ }
+
+ svop->op_sv = sv;
+
+#endif /* VMG_RESET_RMG_NEEDS_TRAMPOLINE */
+
+ SvRMAGICAL_off(sv);
+ }
+
+ return 0;
+}
+
+#endif /* VMG_UVAR */
+
+/* --- Module setup/teardown ----------------------------------------------- */
+
+#if XSH_THREADSAFE
+
+static void vmg_global_teardown_late_locked(pTHX_ void *ud) {
+#define vmg_global_teardown_late_locked(UD) vmg_global_teardown_late_locked(aTHX_ (UD))
+ MUTEX_DESTROY(&vmg_op_name_init_mutex);
+ MUTEX_DESTROY(&vmg_vtable_refcount_mutex);
+
+ return;
+}
+
+static signed char vmg_destruct_level(pTHX) {
+#define vmg_destruct_level() vmg_destruct_level(aTHX)
+ signed char lvl;
+
+ lvl = PL_perl_destruct_level;
+
+#ifdef DEBUGGING
+ {
+ const char *s = PerlEnv_getenv("PERL_DESTRUCT_LEVEL");
+ if (s) {
+ int i;
+#if XSH_HAS_PERL(5, 21, 3)
+ if (strEQ(s, "-1")) {
+ i = -1;
+ } else {
+# if XSH_HAS_PERL(5, 21, 10)
+ UV uv;
+ if (Perl_grok_atoUV(s, &uv, NULL) && uv <= INT_MAX)
+ i = (int) uv;
+ else
+ i = 0;
+# else /* XSH_HAS_PERL(5, 21, 3) && !XSH_HAS_PERL(5, 21, 10) */
+ i = Perl_grok_atou(s, NULL);
+# endif
+ }
+#else /* !XSH_HAS_PERL(5, 21, 3) */
+ i = atoi(s);
+#endif
+ if (lvl < i)
+ lvl = i;
+ }
+ }
+#endif
+
+ return lvl;
+}
+
+#endif /* XSH_THREADSAFE */
+
+static void xsh_user_global_setup(pTHX) {
+#if XSH_THREADSAFE
+ MUTEX_INIT(&vmg_vtable_refcount_mutex);
+ MUTEX_INIT(&vmg_op_name_init_mutex);
+#endif
+
+ return;
+}
+
+static void xsh_user_local_setup(pTHX_ xsh_user_cxt_t *cxt) {
+ HV *stash;
+ int c;
+
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ cxt->b__op_stashes[c] = NULL;
+
+ cxt->depth = 0;
+ cxt->freed_tokens = NULL;
+
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ vmg_trampoline_init(&cxt->propagate_errsv, vmg_pp_propagate_errsv);
+#endif
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+ vmg_trampoline_init(&cxt->reset_rmg, vmg_pp_reset_rmg);
+#endif
+
+ stash = gv_stashpv(XSH_PACKAGE, 1);
+ newCONSTSUB(stash, "MGf_COPY", newSVuv(MGf_COPY));
+ newCONSTSUB(stash, "MGf_DUP", newSVuv(MGf_DUP));
+ newCONSTSUB(stash, "MGf_LOCAL", newSVuv(MGf_LOCAL));
+ newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
+ newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
+ newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_SCALAR_NOLEN",
+ newSVuv(VMG_COMPAT_SCALAR_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
+ newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
+ newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID));
+ newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID",
+ newSVuv(VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID));
+ newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNDEF_CLEAR",
+ newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
+ newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
+ newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
+ newCONSTSUB(stash, "VMG_COMPAT_CODE_COPY_CLONE",
+ newSVuv(VMG_COMPAT_CODE_COPY_CLONE));
+ newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
+ newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(XSH_PERL_PATCHLEVEL));
+ newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(XSH_THREADSAFE));
+ newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(XSH_FORKSAFE));
+ newCONSTSUB(stash, "VMG_OP_INFO_NAME", newSVuv(VMG_OP_INFO_NAME));
+ newCONSTSUB(stash, "VMG_OP_INFO_OBJECT", newSVuv(VMG_OP_INFO_OBJECT));
+
+ return;
+}
+
+static void xsh_user_local_teardown(pTHX_ xsh_user_cxt_t *cxt) {
+ if (cxt->depth == 0 && cxt->freed_tokens) {
+ vmg_magic_chain_free(cxt->freed_tokens, NULL);
+ cxt->freed_tokens = NULL;
+ }
+
+ return;
+}
+
+static void xsh_user_global_teardown(pTHX) {
+#if XSH_THREADSAFE
+ if (vmg_destruct_level() == 0)
+ vmg_global_teardown_late_locked(NULL);
+ else
+ xsh_teardown_late_register(vmg_global_teardown_late_locked, NULL);
+#endif
+
+ return;
+}
+
+/* --- Macros for the XS section ------------------------------------------- */
+
+#ifdef CvISXSUB
+# define VMG_CVOK(C) \
+ ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
+#else
+# define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
+#endif
+
+#define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
+
+#define VMG_SET_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) \
+ SvREFCNT_inc_simple_void(cb); \
+ else \
+ cb = NULL; \
+ } else { \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}
+
+#define VMG_SET_SVT_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) { \
+ t->svt_ ## N = vmg_svt_ ## N; \
+ SvREFCNT_inc_simple_void(cb); \
+ } else { \
+ t->svt_ ## N = vmg_svt_ ## N ## _noop; \
+ cb = NULL; \
+ } \
+ } else { \
+ t->svt_ ## N = NULL; \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}