+#define PERL_NO_GET_CONTEXT
+#include "EXTERN.h"
+#include "perl.h"
+#include "XSUB.h"
+
+/* --- XS helpers ---------------------------------------------------------- */
+
+#define XSH_PACKAGE "Variable::Magic"
+
+#include "xsh/caps.h"
+#include "xsh/util.h"
+
+/* ... Features ............................................................ */
+
+/* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
+ * enable them on 5.10 */
+#if XSH_HAS_PERL(5, 10, 0)
+# define VMG_UVAR 1
+#else
+# define VMG_UVAR 0
+#endif
+
+#if XSH_HAS_PERL_MAINT(5, 11, 0, 32969) || XSH_HAS_PERL(5, 12, 0)
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
+#else
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
+#endif
+
+#if XSH_HAS_PERL(5, 17, 4)
+# define VMG_COMPAT_SCALAR_NOLEN 1
+#else
+# define VMG_COMPAT_SCALAR_NOLEN 0
+#endif
+
+/* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
+ * reverted to dev-5.11 as 9cdcb38b */
+#if XSH_HAS_PERL_MAINT(5, 8, 9, 28160) || XSH_HAS_PERL_MAINT(5, 9, 3, 25854) || XSH_HAS_PERL(5, 10, 0)
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
+# if XSH_HAS_PERL(5, 11, 0)
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
+# else
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
+# endif
+# endif
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
+# endif
+#else
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
+# endif
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 0
+# endif
+#endif
+
+/* Applied to dev-5.11 as 34908 */
+#if XSH_HAS_PERL_MAINT(5, 11, 0, 34908) || XSH_HAS_PERL(5, 12, 0)
+# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
+#else
+# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
+#endif
+
+/* Applied to dev-5.9 as 31473 (see #43357), integrated to maint-5.8 as 32542 */
+#if XSH_HAS_PERL_MAINT(5, 8, 9, 32542) || XSH_HAS_PERL_MAINT(5, 9, 5, 31473) || XSH_HAS_PERL(5, 10, 0)
+# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 1
+#else
+# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
+#endif
+
+#if XSH_HAS_PERL(5, 11, 0)
+# define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 1
+#else
+# define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
+#endif
+
+#if XSH_HAS_PERL(5, 17, 0)
+# define VMG_COMPAT_CODE_COPY_CLONE 1
+#else
+# define VMG_COMPAT_CODE_COPY_CLONE 0
+#endif
+
+#if XSH_HAS_PERL(5, 13, 2)
+# define VMG_COMPAT_GLOB_GET 1
+#else
+# define VMG_COMPAT_GLOB_GET 0
+#endif
+
+/* ... Trampoline ops ...................................................... */
+
+#define VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE (XSH_HAS_PERL(5, 10, 0) && !XSH_HAS_PERL(5, 10, 1))
+
+/* NewOp() isn't public in perl 5.8.0. */
+#define VMG_RESET_RMG_NEEDS_TRAMPOLINE (VMG_UVAR && (XSH_THREADSAFE || !XSH_HAS_PERL(5, 8, 1)))
+
+#define VMG_NEEDS_TRAMPOLINE VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE || VMG_RESET_RMG_NEEDS_TRAMPOLINE
+
+#if VMG_NEEDS_TRAMPOLINE
+
+typedef struct {
+ OP temp;
+ SVOP target;
+} vmg_trampoline;
+
+static void vmg_trampoline_init(vmg_trampoline *t, OP *(*cb)(pTHX)) {
+ t->temp.op_type = OP_STUB;
+ t->temp.op_ppaddr = 0;
+ t->temp.op_next = (OP *) &t->target;
+ t->temp.op_flags = 0;
+ t->temp.op_private = 0;
+
+ t->target.op_type = OP_STUB;
+ t->target.op_ppaddr = cb;
+ t->target.op_next = NULL;
+ t->target.op_flags = 0;
+ t->target.op_private = 0;
+ t->target.op_sv = NULL;
+}
+
+static OP *vmg_trampoline_bump(pTHX_ vmg_trampoline *t, SV *sv, OP *o) {
+#define vmg_trampoline_bump(T, S, O) vmg_trampoline_bump(aTHX_ (T), (S), (O))
+ t->temp = *o;
+ t->temp.op_next = (OP *) &t->target;
+
+ t->target.op_sv = sv;
+ t->target.op_next = o->op_next;
+
+ return &t->temp;
+}
+
+#endif /* VMG_NEEDS_TRAMPOLINE */
+
+/* --- Compatibility ------------------------------------------------------- */
+
+#ifndef Newx
+# define Newx(v, n, c) New(0, v, n, c)
+#endif
+
+#ifndef SvMAGIC_set
+# define SvMAGIC_set(sv, val) (SvMAGIC(sv) = (val))
+#endif
+
+#ifndef SvRV_const
+# define SvRV_const(sv) SvRV((SV *) sv)
+#endif
+
+#ifndef SvREFCNT_inc_simple_void
+# define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
+#endif
+
+#ifndef SvREFCNT_dec_NN
+# define SvREFCNT_dec_NN(sv) ((void) SvREFCNT_dec(sv))
+#endif
+
+#ifndef mPUSHu
+# define mPUSHu(U) PUSHs(sv_2mortal(newSVuv(U)))
+#endif
+
+#ifndef PERL_MAGIC_ext
+# define PERL_MAGIC_ext '~'
+#endif
+
+#ifndef PERL_MAGIC_tied
+# define PERL_MAGIC_tied 'P'
+#endif
+
+#ifndef MGf_LOCAL
+# define MGf_LOCAL 0
+#endif
+
+#ifndef IN_PERL_COMPILETIME
+# define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
+#endif
+
+#ifndef OP_NAME
+# define OP_NAME(O) (PL_op_name[(O)->op_type])
+#endif
+
+#ifndef OP_CLASS
+# define OP_CLASS(O) (PL_opargs[(O)->op_type] & OA_CLASS_MASK)
+#endif
+
+#define VMG_CAREFUL_SELF_DESTRUCTION XSH_HAS_PERL(5, 25, 3)
+
+/* ... Bug-free mg_magical ................................................. */
+
+/* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
+
+#if XSH_HAS_PERL(5, 11, 3)
+
+#define vmg_mg_magical(S) mg_magical(S)
+
+#else
+
+static void vmg_mg_magical(SV *sv) {
+ const MAGIC *mg;
+
+ SvMAGICAL_off(sv);
+ if ((mg = SvMAGIC(sv))) {
+ do {
+ const MGVTBL* const vtbl = mg->mg_virtual;
+ if (vtbl) {
+ if (vtbl->svt_get && !(mg->mg_flags & MGf_GSKIP))
+ SvGMAGICAL_on(sv);
+ if (vtbl->svt_set)
+ SvSMAGICAL_on(sv);
+ if (vtbl->svt_clear)
+ SvRMAGICAL_on(sv);
+ }
+ } while ((mg = mg->mg_moremagic));
+ if (!(SvFLAGS(sv) & (SVs_GMG|SVs_SMG)))
+ SvRMAGICAL_on(sv);
+ }
+}
+
+#endif
+
+/* ... Cleaner version of sv_magicext() .................................... */
+
+static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
+#define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
+ MAGIC *mg;
+
+ mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
+ if (!mg)
+ return NULL;
+
+ mg->mg_private = 0;
+
+ if (vtbl->svt_copy)
+ mg->mg_flags |= MGf_COPY;
+#if MGf_DUP
+ if (vtbl->svt_dup)
+ mg->mg_flags |= MGf_DUP;
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ if (vtbl->svt_local)
+ mg->mg_flags |= MGf_LOCAL;
+#endif /* MGf_LOCAL */
+
+ if (mg->mg_flags & MGf_REFCOUNTED)
+ SvREFCNT_dec(obj);
+
+ return mg;
+}
+
+/* ... Safe version of call_sv() ........................................... */
+
+static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
+#define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
+ I32 ret;
+ SV *old_err = NULL;
+
+ if (SvTRUE(ERRSV)) {
+ old_err = newSVsv(ERRSV);
+ sv_setsv(ERRSV, &PL_sv_undef);
+ }
+
+ ret = call_sv(sv, flags | G_EVAL);
+
+ if (SvTRUE(ERRSV)) {
+ SvREFCNT_dec(old_err);
+
+ if (IN_PERL_COMPILETIME) {
+ if (!PL_in_eval) {
+ if (PL_errors)
+ sv_catsv(PL_errors, ERRSV);
+ else
+ Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
+ SvCUR_set(ERRSV, 0);
+ }
+#if XSH_HAS_PERL(5, 10, 0) || defined(PL_parser)
+ if (PL_parser)
+ ++PL_parser->error_count;
+#elif defined(PL_error_count)
+ ++PL_error_count;
+#else
+ ++PL_Ierror_count;
+#endif
+ } else {
+ if (!cleanup || cleanup(aTHX_ ud))
+ croak(NULL);
+ }
+ } else {
+ if (old_err) {
+ sv_setsv(ERRSV, old_err);
+ SvREFCNT_dec(old_err);
+ }
+ }
+
+ return ret;
+}
+
+/* --- Stolen chunk of B --------------------------------------------------- */
+
+typedef enum {
+ OPc_NULL,
+ OPc_BASEOP,
+ OPc_UNOP,
+ OPc_BINOP,
+ OPc_LOGOP,
+ OPc_LISTOP,
+ OPc_PMOP,
+ OPc_SVOP,
+ OPc_PADOP,
+ OPc_PVOP,
+ OPc_LOOP,
+ OPc_COP,
+#if XSH_HAS_PERL(5, 21, 5)
+ OPc_METHOP,
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ OPc_UNOP_AUX,
+#endif
+ OPc_MAX
+} opclass;
+
+static const char *const vmg_opclassnames[] = {
+ "B::NULL",
+ "B::OP",
+ "B::UNOP",
+ "B::BINOP",
+ "B::LOGOP",
+ "B::LISTOP",
+ "B::PMOP",
+ "B::SVOP",
+ "B::PADOP",
+ "B::PVOP",
+ "B::LOOP",
+ "B::COP",
+#if XSH_HAS_PERL(5, 21, 5)
+ "B::METHOP",
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ "B::UNOP_AUX",
+#endif
+ NULL
+};
+
+static opclass vmg_opclass(pTHX_ const OP *o) {
+#define vmg_opclass(O) vmg_opclass(aTHX_ (O))
+#if 0
+ if (!o)
+ return OPc_NULL;
+#endif
+
+ if (o->op_type == 0) {
+#if XSH_HAS_PERL(5, 21, 7)
+ if (o->op_targ == OP_NEXTSTATE || o->op_targ == OP_DBSTATE)
+ return OPc_COP;
+#endif
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+ }
+
+ if (o->op_type == OP_SASSIGN)
+ return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
+
+ if (o->op_type == OP_AELEMFAST) {
+#if PERL_VERSION <= 14
+ if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+#endif
+#ifdef USE_ITHREADS
+ return OPc_PADOP;
+#else
+ return OPc_SVOP;
+#endif
+ }
+
+#ifdef USE_ITHREADS
+ if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
+ return OPc_PADOP;
+#endif
+
+ switch (OP_CLASS(o)) {
+ case OA_BASEOP:
+ return OPc_BASEOP;
+ case OA_UNOP:
+ return OPc_UNOP;
+ case OA_BINOP:
+ return OPc_BINOP;
+ case OA_LOGOP:
+ return OPc_LOGOP;
+ case OA_LISTOP:
+ return OPc_LISTOP;
+ case OA_PMOP:
+ return OPc_PMOP;
+ case OA_SVOP:
+ return OPc_SVOP;
+ case OA_PADOP:
+ return OPc_PADOP;
+ case OA_PVOP_OR_SVOP:
+ return (
+#if XSH_HAS_PERL(5, 13, 7)
+ (o->op_type != OP_CUSTOM) &&
+#endif
+ (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)))
+#if defined(USE_ITHREADS) && XSH_HAS_PERL(5, 8, 9)
+ ? OPc_PADOP : OPc_PVOP;
+#else
+ ? OPc_SVOP : OPc_PVOP;
+#endif
+ case OA_LOOP:
+ return OPc_LOOP;
+ case OA_COP:
+ return OPc_COP;
+ case OA_BASEOP_OR_UNOP:
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+ case OA_FILESTATOP:
+ return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
+#ifdef USE_ITHREADS
+ (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
+#else
+ (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
+#endif
+ case OA_LOOPEXOP:
+ if (o->op_flags & OPf_STACKED)
+ return OPc_UNOP;
+ else if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+ return OPc_PVOP;
+#if XSH_HAS_PERL(5, 21, 5)
+ case OA_METHOP:
+ return OPc_METHOP;
+#endif
+#if XSH_HAS_PERL(5, 21, 7)
+ case OA_UNOP_AUX:
+ return OPc_UNOP_AUX;
+#endif
+ }
+
+ return OPc_BASEOP;
+}
+
+/* --- Error messages ------------------------------------------------------ */
+
+static const char vmg_invalid_wiz[] = "Invalid wizard object";
+static const char vmg_wrongargnum[] = "Wrong number of arguments";
+
+/* --- Thread-local storage ------------------------------------------------ */
+
+typedef struct {
+ HV *b__op_stashes[OPc_MAX];
+ I32 depth;
+ MAGIC *freed_tokens;
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ vmg_trampoline propagate_errsv;
+#endif
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+ vmg_trampoline reset_rmg;
+#endif
+} xsh_user_cxt_t;
+
+#if XSH_THREADSAFE
+
+static void xsh_user_clone(pTHX_ const xsh_user_cxt_t *old_cxt, xsh_user_cxt_t *new_cxt) {
+ int c;
+
+ for (c = OPc_NULL; c < OPc_MAX; ++c) {
+ new_cxt->b__op_stashes[c] = old_cxt->b__op_stashes[c]
+ ? gv_stashpv(vmg_opclassnames[c], 1)
+ : NULL;
+ }
+
+ new_cxt->depth = old_cxt->depth;
+ new_cxt->freed_tokens = NULL;
+
+ return;
+}
+
+
+#endif /* XSH_THREADSAFE */
+
+#define XSH_THREADS_NEED_TEARDOWN_LATE 1
+
+#include "xsh/threads.h"
+
+/* --- <vmg_vtable> structure ---------------------------------------------- */
+
+#if XSH_THREADSAFE
+
+typedef struct {
+ MGVTBL *vtbl;
+ U32 refcount;
+} vmg_vtable;
+
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
+
+ t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
+ t->refcount = 1;
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) (T)->vtbl
+
+static perl_mutex vmg_vtable_refcount_mutex;
+
+static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
+ XSH_LOCK(&vmg_vtable_refcount_mutex);
+ ++t->refcount;
+ XSH_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ return t;
+}
+
+static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
+#define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
+ U32 refcount;
+
+ XSH_LOCK(&vmg_vtable_refcount_mutex);
+ refcount = --t->refcount;
+ XSH_UNLOCK(&vmg_vtable_refcount_mutex);
+
+ if (!refcount) {
+ PerlMemShared_free(t->vtbl);
+ PerlMemShared_free(t);
+ }
+}
+
+#else /* XSH_THREADSAFE */
+
+typedef MGVTBL vmg_vtable;
+
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
+#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
+ vmg_vtable *t;
+
+ Newx(t, 1, vmg_vtable);
+
+ return t;
+}
+
+#define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
+
+#define vmg_vtable_free(T) Safefree(T)
+
+#endif /* !XSH_THREADSAFE */
+
+/* --- <vmg_wizard> structure ---------------------------------------------- */
+
+typedef struct {
+ vmg_vtable *vtable;
+
+ U8 opinfo;
+ U8 uvar;
+
+ SV *cb_data;
+ SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
+ SV *cb_copy;
+ SV *cb_dup;
+#if MGf_LOCAL
+ SV *cb_local;
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
+#endif /* VMG_UVAR */
+} vmg_wizard;
+
+static void vmg_op_info_init(pTHX_ unsigned int opinfo);
+
+static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
+#define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
+ vmg_wizard *w;
+
+ Newx(w, 1, vmg_wizard);
+
+ w->uvar = 0;
+ w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
+ if (w->opinfo)
+ vmg_op_info_init(aTHX_ w->opinfo);
+
+ w->vtable = vmg_vtable_alloc();
+
+ return w;
+}
+
+static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
+#define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
+ if (!w)
+ return;
+
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
+#if 0
+ SvREFCNT_dec(w->cb_dup);
+#endif
+#if MGf_LOCAL
+ SvREFCNT_dec(w->cb_local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
+#endif /* VMG_UVAR */
+ }
+
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
+ vmg_vtable_free(w->vtable);
+ Safefree(w);
+
+ return;
+}
+
+#if XSH_THREADSAFE
+
+#define VMG_CLONE_CB(N) \
+ z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
+ : NULL;
+
+static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
+#define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
+ vmg_wizard *z;
+
+ if (!w)
+ return NULL;
+
+ Newx(z, 1, vmg_wizard);
+
+ z->vtable = vmg_vtable_dup(w->vtable);
+ z->uvar = w->uvar;
+ z->opinfo = w->opinfo;
+
+ VMG_CLONE_CB(data);
+ VMG_CLONE_CB(get);
+ VMG_CLONE_CB(set);
+ VMG_CLONE_CB(len);
+ VMG_CLONE_CB(clear);
+ VMG_CLONE_CB(free);
+ VMG_CLONE_CB(copy);
+ VMG_CLONE_CB(dup);
+#if MGf_LOCAL
+ VMG_CLONE_CB(local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ VMG_CLONE_CB(fetch);
+ VMG_CLONE_CB(store);
+ VMG_CLONE_CB(exists);
+ VMG_CLONE_CB(delete);
+#endif /* VMG_UVAR */
+
+ return z;
+}
+
+#endif /* XSH_THREADSAFE */
+
+#define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
+
+/* --- Wizard SV objects --------------------------------------------------- */
+
+static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
+
+ return 0;
+}
+
+#if XSH_THREADSAFE
+
+static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
+
+ return 0;
+}
+
+#endif /* XSH_THREADSAFE */
+
+static MGVTBL vmg_wizard_sv_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_wizard_sv_free, /* free */
+ NULL, /* copy */
+#if XSH_THREADSAFE
+ vmg_wizard_sv_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
+#define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
+ SV *wiz;
+
+#if XSH_THREADSAFE
+ wiz = newSV(0);
+#else
+ wiz = newSViv(PTR2IV(w));
+#endif
+
+ vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
+
+ SvREADONLY_on(wiz);
+
+ return wiz;
+}
+
+#if XSH_THREADSAFE
+
+#define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
+
+static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
+ MAGIC *mg;
+
+ for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
+ return (const vmg_wizard *) mg->mg_ptr;
+ }
+
+ return NULL;
+}
+
+#else /* XSH_THREADSAFE */
+
+#define vmg_sv_has_wizard_type(S) SvIOK(S)
+
+#define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
+
+#endif /* !XSH_THREADSAFE */
+
+#define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
+
+static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
+ if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
+ SV *sv = (SV *) mg->mg_ptr;
+
+ if (vmg_sv_has_wizard_type(sv))
+ return vmg_wizard_from_sv_nocheck(sv);
+ }
+
+ return NULL;
+}
+
+#define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
+
+/* --- User-level functions implementation --------------------------------- */
+
+static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
+ const MAGIC *mg;
+ IV wid;
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return NULL;
+
+ wid = vmg_wizard_id(w);
+
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && vmg_wizard_id(z) == wid)
+ return mg;
+ }
+
+ return NULL;
+}
+
+/* ... Construct private data .............................................. */
+
+static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
+#define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
+ I32 i;
+ SV *nsv;
+
+ dSP;
+
+ ENTER;
+ SAVETMPS;
+
+ PUSHSTACKi(PERLSI_MAGIC);
+
+ PUSHMARK(SP);
+ EXTEND(SP, items + 1);
+ PUSHs(sv_2mortal(newRV_inc(sv)));
+ for (i = 0; i < items; ++i)
+ PUSHs(args[i]);
+ PUTBACK;
+
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
+
+ SPAGAIN;
+ nsv = POPs;
+#if XSH_HAS_PERL(5, 8, 3)
+ SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
+#else
+ nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
+#endif
+ PUTBACK;
+
+ POPSTACK;
+
+ FREETMPS;
+ LEAVE;
+
+ return nsv;
+}
+
+static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
+ const MAGIC *mg = vmg_find(sv, w);
+
+ return mg ? mg->mg_obj : NULL;
+}
+
+/* ... Magic cast/dispell .................................................. */
+
+#if VMG_UVAR
+
+static I32 vmg_svt_val(pTHX_ IV, SV *);
+
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ dXSH_CXT;
+
+ if (prevmagic)
+ prevmagic->mg_moremagic = moremagic;
+ else
+ SvMAGIC_set(sv, moremagic);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv) {
+ SvREFCNT_dec(mg->mg_obj);
+ mg->mg_obj = NULL;
+ }
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+ mg->mg_ptr = NULL;
+#if VMG_UVAR
+ }
+#endif /* VMG_UVAR */
+
+ if (XSH_CXT.depth) {
+ mg->mg_moremagic = XSH_CXT.freed_tokens;
+ XSH_CXT.freed_tokens = mg;
+ } else {
+ mg->mg_moremagic = NULL;
+ Safefree(mg);
+ }
+}
+
+static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
+#define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
+ int skipped = 0;
+
+ while (mg) {
+ MAGIC *moremagic = mg->mg_moremagic;
+
+ if (mg == skip)
+ ++skipped;
+ else
+ Safefree(mg);
+
+ mg = moremagic;
+ }
+
+ return skipped;
+}
+
+static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
+#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
+ MAGIC *mg;
+ MGVTBL *t;
+ SV *data;
+ U32 oldgmg;
+
+ if (vmg_find(sv, w))
+ return 1;
+
+ oldgmg = SvGMAGICAL(sv);
+
+ data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
+
+ t = vmg_vtable_vtbl(w->vtable);
+ mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
+
+ if (SvTYPE(sv) < SVt_PVHV)
+ goto done;
+
+ /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
+ * magic is actually never called for them. If the GMAGICAL flag was off before
+ * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
+ * now on, then this wizard has get magic. Hence we can work around the
+ * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
+ * has uvar callbacks, it will be turned back on later. */
+ if (!oldgmg && SvGMAGICAL(sv))
+ SvGMAGICAL_off(sv);
+
+#if VMG_UVAR
+ if (w->uvar) {
+ MAGIC *prevmagic, *moremagic = NULL;
+ vmg_uvar_ud ud;
+
+ ud.new_uf.uf_val = vmg_svt_val;
+ ud.new_uf.uf_set = NULL;
+ ud.new_uf.uf_index = 0;
+ ud.old_uf.uf_val = NULL;
+ ud.old_uf.uf_set = NULL;
+ ud.old_uf.uf_index = 0;
+
+ /* One uvar magic in the chain is enough. */
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ if (mg) { /* Found another uvar magic. */
+ struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
+ if (uf->uf_val == vmg_svt_val) {
+ /* It's our uvar magic, nothing to do. oldgmg was true. */
+ goto done;
+ } else {
+ /* It's another uvar magic, backup it and replace it by ours. */
+ ud.old_uf = *uf;
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
+ vmg_mg_magical(sv);
+ /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
+ * handled by our uvar callback. */
+ }
+#endif /* VMG_UVAR */
+
+done:
+ return 1;
+}
+
+static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
+#define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
+#if VMG_UVAR
+ U32 uvars = 0;
+#endif /* VMG_UVAR */
+ MAGIC *mg, *prevmagic, *moremagic = NULL;
+ IV wid = vmg_wizard_id(w);
+
+ if (SvTYPE(sv) < SVt_PVMG)
+ return 0;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
+ const vmg_wizard *z;
+
+ moremagic = mg->mg_moremagic;
+
+ z = vmg_wizard_from_mg(mg);
+ if (z) {
+ IV zid = vmg_wizard_id(z);
+
+#if VMG_UVAR
+ if (zid == wid) {
+ /* If the current has no uvar, short-circuit uvar deletion. */
+ uvars = z->uvar ? (uvars + 1) : 0;
+ break;
+ } else if (z->uvar) {
+ ++uvars;
+ /* We can't break here since we need to find the ext magic to delete. */
+ }
+#else /* VMG_UVAR */
+ if (zid == wid)
+ break;
+#endif /* !VMG_UVAR */
+ }
+ }
+ if (!mg)
+ return 0;
+
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+
+#if VMG_UVAR
+ if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
+ /* mg was the first ext magic in the chain that had uvar */
+
+ for (mg = moremagic; mg; mg = mg->mg_moremagic) {
+ const vmg_wizard *z = vmg_wizard_from_mg(mg);
+
+ if (z && z->uvar) {
+ ++uvars;
+ break;
+ }
+ }
+
+ if (uvars == 1) {
+ vmg_uvar_ud *ud;
+
+ for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
+ moremagic = mg->mg_moremagic;
+ if (mg->mg_type == PERL_MAGIC_uvar)
+ break;
+ }
+
+ ud = (vmg_uvar_ud *) mg->mg_ptr;
+ if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
+ /* Revert the original uvar magic. */
+ struct ufuncs *uf;
+ Newx(uf, 1, struct ufuncs);
+ *uf = ud->old_uf;
+ Safefree(ud);
+ mg->mg_ptr = (char *) uf;
+ mg->mg_len = sizeof(*uf);
+ } else {
+ /* Remove the uvar magic. */
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
+ }
+ }
+ }
+#endif /* VMG_UVAR */
+
+ vmg_mg_magical(sv);
+
+ return 1;
+}
+
+/* ... OP info ............................................................. */
+
+#define VMG_OP_INFO_NAME 1
+#define VMG_OP_INFO_OBJECT 2
+
+#if XSH_THREADSAFE
+static perl_mutex vmg_op_name_init_mutex;
+#endif
+
+static U32 vmg_op_name_init = 0;
+static unsigned char vmg_op_name_len[MAXO] = { 0 };
+
+static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
+#define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME:
+ XSH_LOCK(&vmg_op_name_init_mutex);
+ if (!vmg_op_name_init) {
+ OPCODE t;
+ for (t = 0; t < OP_max; ++t)
+ vmg_op_name_len[t] = strlen(PL_op_name[t]);
+ vmg_op_name_init = 1;
+ }
+ XSH_UNLOCK(&vmg_op_name_init_mutex);
+ break;
+ case VMG_OP_INFO_OBJECT: {
+ dXSH_CXT;
+ if (!XSH_CXT.b__op_stashes[0]) {
+ int c;
+ require_pv("B.pm");
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ XSH_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
+#define vmg_op_info(W) vmg_op_info(aTHX_ (W))
+ if (!PL_op)
+ return &PL_sv_undef;
+
+ switch (opinfo) {
+ case VMG_OP_INFO_NAME: {
+ const char *name;
+ STRLEN name_len;
+ OPCODE t = PL_op->op_type;
+ name = OP_NAME(PL_op);
+ name_len = (t == OP_CUSTOM) ? strlen(name) : vmg_op_name_len[t];
+ return sv_2mortal(newSVpvn(name, name_len));
+ }
+ case VMG_OP_INFO_OBJECT: {
+ dXSH_CXT;
+ return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
+ XSH_CXT.b__op_stashes[vmg_opclass(PL_op)]);
+ }
+ default:
+ break;
+ }
+
+ return &PL_sv_undef;
+}
+
+/* --- svt callbacks ------------------------------------------------------- */
+
+#define VMG_CB_CALL_ARGS_MASK 15
+#define VMG_CB_CALL_ARGS_SHIFT 4
+#define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
+#define VMG_CB_CALL_GUARD 4
+
+static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
+ dXSH_CXT;
+
+ XSH_CXT.depth--;
+
+ /* If we're at the upmost magic call and we're about to die, we can just free
+ * the tokens right now, since we will jump past the problematic part of our
+ * caller. */
+ if (XSH_CXT.depth == 0 && XSH_CXT.freed_tokens) {
+ vmg_magic_chain_free(XSH_CXT.freed_tokens, NULL);
+ XSH_CXT.freed_tokens = NULL;
+ }