+/* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
+ * enable them on 5.10 */
+#if VMG_HAS_PERL(5, 10, 0)
+# define VMG_UVAR 1
+#else
+# define VMG_UVAR 0
+#endif
+
+/* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
+ * reverted to dev-5.11 as 9cdcb38b */
+#if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
+# if VMG_HAS_PERL(5, 11, 0)
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
+# else
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
+# endif
+# endif
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
+# endif
+#else
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
+# endif
+# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 0
+# endif
+#endif
+
+/* Applied to dev-5.11 as 34908 */
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 34908) || VMG_HAS_PERL(5, 12, 0)
+# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
+#else
+# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
+#endif
+
+/* Applied to dev-5.9 as 31473 (see #43357), integrated to maint-5.8 as 32542 */
+#if VMG_HAS_PERL_MAINT(5, 8, 9, 32542) || VMG_HAS_PERL_MAINT(5, 9, 5, 31473) || VMG_HAS_PERL(5, 10, 0)
+# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 1
+#else
+# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
+#endif
+
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
+#else
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
+#endif
+
+#if VMG_HAS_PERL(5, 13, 2)
+# define VMG_COMPAT_GLOB_GET 1
+#else
+# define VMG_COMPAT_GLOB_GET 0
+#endif
+
+/* ... Bug-free mg_magical ................................................. */
+
+/* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html. This version is specialized to our needs. */
+
+#if VMG_UVAR
+
+STATIC void vmg_sv_magicuvar(pTHX_ SV *sv, const char *uf, I32 len) {
+#define vmg_sv_magicuvar(S, U, L) vmg_sv_magicuvar(aTHX_ (S), (U), (L))
+ const MAGIC* mg;
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, uf, len);
+ /* uvar magic has set and get magic, hence this has set SVs_GMG and SVs_SMG. */
+ if ((mg = SvMAGIC(sv))) {
+ SvRMAGICAL_off(sv);
+ do {
+ const MGVTBL* const vtbl = mg->mg_virtual;
+ if (vtbl) {
+ if (vtbl->svt_clear) {
+ SvRMAGICAL_on(sv);
+ break;
+ }
+ }
+ } while ((mg = mg->mg_moremagic));
+ }
+}
+
+#endif /* VMG_UVAR */
+
+/* ... Safe version of call_sv() ........................................... */
+
+#define VMG_SAVE_LAST_CX (!VMG_HAS_PERL(5, 8, 4) || VMG_HAS_PERL(5, 9, 5))
+
+STATIC I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, I32 destructor) {
+#define vmg_call_sv(S, F, D) vmg_call_sv(aTHX_ (S), (F), (D))
+ I32 ret, cxix = 0, in_eval = 0;
+#if VMG_SAVE_LAST_CX
+ PERL_CONTEXT saved_cx;
+#endif
+ SV *old_err = NULL;
+
+ if (SvTRUE(ERRSV)) {
+ old_err = ERRSV;
+ ERRSV = newSV(0);
+ }
+
+ if (cxstack_ix < cxstack_max) {
+ cxix = cxstack_ix + 1;
+ if (destructor && CxTYPE(cxstack + cxix) == CXt_EVAL)
+ in_eval = 1;
+ }
+
+#if VMG_SAVE_LAST_CX
+ /* The last popped context will be reused by call_sv(), but our callers may
+ * still need its previous value. Back it up so that it isn't clobbered. */
+ saved_cx = cxstack[cxix];
+#endif
+
+ ret = call_sv(sv, flags | G_EVAL);
+
+#if VMG_SAVE_LAST_CX
+ cxstack[cxix] = saved_cx;
+#endif
+
+ if (SvTRUE(ERRSV)) {
+ if (old_err) {
+ sv_setsv(old_err, ERRSV);
+ SvREFCNT_dec(ERRSV);
+ ERRSV = old_err;
+ }
+ if (IN_PERL_COMPILETIME) {
+ if (!PL_in_eval) {
+ if (PL_errors)
+ sv_catsv(PL_errors, ERRSV);
+ else
+ Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
+ SvCUR_set(ERRSV, 0);
+ }
+#if VMG_HAS_PERL(5, 10, 0) || defined(PL_parser)
+ if (PL_parser)
+ ++PL_parser->error_count;
+#elif defined(PL_error_count)
+ ++PL_error_count;
+#else
+ ++PL_Ierror_count;
+#endif
+ } else if (!in_eval)
+ croak(NULL);
+ } else {
+ if (old_err) {
+ SvREFCNT_dec(ERRSV);
+ ERRSV = old_err;
+ }
+ }
+
+ return ret;
+}
+
+/* --- Stolen chunk of B --------------------------------------------------- */
+
+typedef enum {
+ OPc_NULL = 0,
+ OPc_BASEOP = 1,
+ OPc_UNOP = 2,
+ OPc_BINOP = 3,
+ OPc_LOGOP = 4,
+ OPc_LISTOP = 5,
+ OPc_PMOP = 6,
+ OPc_SVOP = 7,
+ OPc_PADOP = 8,
+ OPc_PVOP = 9,
+ OPc_LOOP = 10,
+ OPc_COP = 11,
+ OPc_MAX = 12
+} opclass;
+
+STATIC const char *const vmg_opclassnames[] = {
+ "B::NULL",
+ "B::OP",
+ "B::UNOP",
+ "B::BINOP",
+ "B::LOGOP",
+ "B::LISTOP",
+ "B::PMOP",
+ "B::SVOP",
+ "B::PADOP",
+ "B::PVOP",
+ "B::LOOP",
+ "B::COP"
+};
+
+STATIC opclass vmg_opclass(const OP *o) {
+#if 0
+ if (!o)
+ return OPc_NULL;
+#endif
+
+ if (o->op_type == 0)
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+
+ if (o->op_type == OP_SASSIGN)
+ return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
+
+ if (o->op_type == OP_AELEMFAST) {
+ if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+#ifdef USE_ITHREADS
+ return OPc_PADOP;
+#else
+ return OPc_SVOP;
+#endif
+ }
+
+#ifdef USE_ITHREADS
+ if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
+ return OPc_PADOP;
+#endif
+
+ switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
+ case OA_BASEOP:
+ return OPc_BASEOP;
+ case OA_UNOP:
+ return OPc_UNOP;
+ case OA_BINOP:
+ return OPc_BINOP;
+ case OA_LOGOP:
+ return OPc_LOGOP;
+ case OA_LISTOP:
+ return OPc_LISTOP;
+ case OA_PMOP:
+ return OPc_PMOP;
+ case OA_SVOP:
+ return OPc_SVOP;
+ case OA_PADOP:
+ return OPc_PADOP;
+ case OA_PVOP_OR_SVOP:
+ return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)) ? OPc_SVOP : OPc_PVOP;
+ case OA_LOOP:
+ return OPc_LOOP;
+ case OA_COP:
+ return OPc_COP;
+ case OA_BASEOP_OR_UNOP:
+ return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
+ case OA_FILESTATOP:
+ return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
+#ifdef USE_ITHREADS
+ (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
+#else
+ (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
+#endif
+ case OA_LOOPEXOP:
+ if (o->op_flags & OPf_STACKED)
+ return OPc_UNOP;
+ else if (o->op_flags & OPf_SPECIAL)
+ return OPc_BASEOP;
+ else
+ return OPc_PVOP;
+ }
+
+ return OPc_BASEOP;
+}
+
+/* --- Error messages ------------------------------------------------------ */
+
+STATIC const char vmg_invalid_wiz[] = "Invalid wizard object";
+STATIC const char vmg_wrongargnum[] = "Wrong number of arguments";
+STATIC const char vmg_argstorefailed[] = "Error while storing arguments";
+
+/* --- Signatures ---------------------------------------------------------- */
+
+#define SIG_WZO ((U16) (0x3891))
+#define SIG_WIZ ((U16) (0x3892))
+
+/* --- MGWIZ structure ----------------------------------------------------- */
+
+typedef struct {
+ MGVTBL *vtbl;
+
+ U8 opinfo;
+ U8 uvar;
+
+ SV *cb_data;
+ SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
+#if MGf_COPY
+ SV *cb_copy;
+#endif /* MGf_COPY */
+#if MGf_DUP
+ SV *cb_dup;
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ SV *cb_local;
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
+#endif /* VMG_UVAR */
+
+#if VMG_MULTIPLICITY
+ tTHX owner;
+#endif /* VMG_MULTIPLICITY */
+} MGWIZ;
+
+STATIC void vmg_mgwiz_free(pTHX_ MGWIZ *w) {
+#define vmg_mgwiz_free(W) vmg_mgwiz_free(aTHX_ (W))
+ if (!w)
+ return;
+
+ /* We reach this point in dirty state when ptable_free() is called from the
+ * atexit cleanup callback, and that the global table still holds a live
+ * wizard. This happens before all the SV bodies are freed, so all the wizard
+ * callbacks are still alive (as they are referenced by the undead wizard).
+ * Hence it is safe to decrement their refcount. Later on, the wizard free
+ * callback itself will trigger when the wizard body is reaped, but it will
+ * be skipped as it guards against dirty state - which is good since nothing
+ * has to be done anymore at that point. */
+
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+#if MGf_COPY
+ SvREFCNT_dec(w->cb_copy);
+#endif /* MGf_COPY */
+#if 0 /* MGf_DUP */
+ SvREFCNT_dec(w->cb_dup);
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ SvREFCNT_dec(w->cb_local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
+#endif /* VMG_UVAR */
+
+ Safefree(w->vtbl);
+ Safefree(w);
+
+ return;
+}
+
+#if VMG_THREADSAFE
+
+#define VMG_CLONE_CB(N) \
+ z->cb_ ## N = (w->cb_ ## N) ? vmg_clone(w->cb_ ## N, w->owner) \
+ : NULL;
+
+STATIC MGWIZ *vmg_mgwiz_clone(pTHX_ const MGWIZ *w) {
+#define vmg_mgwiz_clone(W) vmg_mgwiz_clone(aTHX_ (W))
+ MGVTBL *t;
+ MGWIZ *z;
+
+ if (!w)
+ return NULL;
+
+ Newx(t, 1, MGVTBL);
+ Copy(w->vtbl, t, 1, MGVTBL);
+
+ Newx(z, 1, MGWIZ);
+
+ z->vtbl = t;
+ z->uvar = w->uvar;
+ z->opinfo = w->opinfo;
+
+ VMG_CLONE_CB(data);
+ VMG_CLONE_CB(get);
+ VMG_CLONE_CB(set);
+ VMG_CLONE_CB(len);
+ VMG_CLONE_CB(clear);
+ VMG_CLONE_CB(free);
+#if MGf_COPY
+ VMG_CLONE_CB(copy);
+#endif /* MGf_COPY */
+#if MGf_DUP
+ VMG_CLONE_CB(dup);
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ VMG_CLONE_CB(local);
+#endif /* MGf_LOCAL */
+#if VMG_UVAR
+ VMG_CLONE_CB(fetch);
+ VMG_CLONE_CB(store);
+ VMG_CLONE_CB(exists);
+ VMG_CLONE_CB(delete);
+#endif /* VMG_UVAR */
+
+ z->owner = aTHX;
+
+ return z;
+}
+
+#endif /* VMG_THREADSAFE */
+