# define VMG_UVAR 0
#endif
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
+#else
+# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
+#endif
+
/* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
* reverted to dev-5.11 as 9cdcb38b */
#if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
#endif
-#if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
-# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
+#if VMG_HAS_PERL(5, 11, 0)
+# define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 1
#else
-# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
+# define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
#endif
#if VMG_HAS_PERL(5, 13, 2)
/* ... Safe version of call_sv() ........................................... */
-#define VMG_SAVE_LAST_CX (!VMG_HAS_PERL(5, 8, 4) || VMG_HAS_PERL(5, 9, 5))
-
-STATIC I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, I32 destructor) {
-#define vmg_call_sv(S, F, D) vmg_call_sv(aTHX_ (S), (F), (D))
- I32 ret, cxix = 0, in_eval = 0;
-#if VMG_SAVE_LAST_CX
+STATIC I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
+#define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
+ I32 ret, cxix, in_eval = 0;
PERL_CONTEXT saved_cx;
-#endif
SV *old_err = NULL;
if (SvTRUE(ERRSV)) {
}
if (cxstack_ix < cxstack_max) {
- cxix = cxstack_ix + 1;
- if (destructor && CxTYPE(cxstack + cxix) == CXt_EVAL)
- in_eval = 1;
+ cxix = cxstack_ix + 1;
+ in_eval = CxTYPE(cxstack + cxix) == CXt_EVAL;
+ } else {
+ cxix = Perl_cxinc(aTHX);
}
-
-#if VMG_SAVE_LAST_CX
/* The last popped context will be reused by call_sv(), but our callers may
* still need its previous value. Back it up so that it isn't clobbered. */
saved_cx = cxstack[cxix];
-#endif
ret = call_sv(sv, flags | G_EVAL);
-#if VMG_SAVE_LAST_CX
cxstack[cxix] = saved_cx;
-#endif
if (SvTRUE(ERRSV)) {
if (old_err) {
#else
++PL_Ierror_count;
#endif
- } else if (!in_eval)
+ } else if (!in_eval) {
+ if (!cleanup || cleanup(aTHX_ ud))
croak(NULL);
+ }
} else {
if (old_err) {
SvREFCNT_dec(ERRSV);
#define vmg_vtable_vtbl(T) (T)->vtbl
-#if VMG_THREADSAFE
STATIC perl_mutex vmg_vtable_refcount_mutex;
-#endif
STATIC vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
#define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
if (!w)
return;
- SvREFCNT_dec(w->cb_data);
- SvREFCNT_dec(w->cb_get);
- SvREFCNT_dec(w->cb_set);
- SvREFCNT_dec(w->cb_len);
- SvREFCNT_dec(w->cb_clear);
- SvREFCNT_dec(w->cb_free);
- SvREFCNT_dec(w->cb_copy);
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
#if 0
- SvREFCNT_dec(w->cb_dup);
+ SvREFCNT_dec(w->cb_dup);
#endif
#if MGf_LOCAL
- SvREFCNT_dec(w->cb_local);
+ SvREFCNT_dec(w->cb_local);
#endif /* MGf_LOCAL */
#if VMG_UVAR
- SvREFCNT_dec(w->cb_fetch);
- SvREFCNT_dec(w->cb_store);
- SvREFCNT_dec(w->cb_exists);
- SvREFCNT_dec(w->cb_delete);
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
#endif /* VMG_UVAR */
+ }
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
vmg_vtable_free(w->vtable);
Safefree(w);
/* --- Wizard SV objects --------------------------------------------------- */
STATIC int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
- if (PL_dirty) /* During global destruction, the context is already freed */
- return 0;
-
vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
return 0;
PUSHs(args[i]);
PUTBACK;
- vmg_call_sv(ctor, G_SCALAR, 0);
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
SPAGAIN;
nsv = POPs;
/* ... Magic cast/dispell .................................................. */
#if VMG_UVAR
+
STATIC I32 vmg_svt_val(pTHX_ IV, SV *);
-STATIC void vmg_uvar_del(SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
- if (prevmagic) {
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+STATIC void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ if (prevmagic)
prevmagic->mg_moremagic = moremagic;
- } else {
+ else
SvMAGIC_set(sv, moremagic);
- }
mg->mg_moremagic = NULL;
- Safefree(mg->mg_ptr);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv)
+ SvREFCNT_dec(mg->mg_obj);
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+#if VMG_UVAR
+ }
+#endif /* VMG_UVAR */
+
Safefree(mg);
}
-#endif /* VMG_UVAR */
STATIC UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
#if VMG_UVAR
if (w->uvar) {
MAGIC *prevmagic, *moremagic = NULL;
- struct ufuncs uf[2];
+ vmg_uvar_ud ud;
- uf[0].uf_val = vmg_svt_val;
- uf[0].uf_set = NULL;
- uf[0].uf_index = 0;
- uf[1].uf_val = NULL;
- uf[1].uf_set = NULL;
- uf[1].uf_index = 0;
+ ud.new_uf.uf_val = vmg_svt_val;
+ ud.new_uf.uf_set = NULL;
+ ud.new_uf.uf_index = 0;
+ ud.old_uf.uf_val = NULL;
+ ud.old_uf.uf_set = NULL;
+ ud.old_uf.uf_index = 0;
/* One uvar magic in the chain is enough. */
for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
}
if (mg) { /* Found another uvar magic. */
- struct ufuncs *olduf = (struct ufuncs *) mg->mg_ptr;
- if (olduf->uf_val == vmg_svt_val) {
+ struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
+ if (uf->uf_val == vmg_svt_val) {
/* It's our uvar magic, nothing to do. oldgmg was true. */
goto done;
} else {
/* It's another uvar magic, backup it and replace it by ours. */
- uf[1] = *olduf;
- vmg_uvar_del(sv, prevmagic, mg, moremagic);
+ ud.old_uf = *uf;
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
}
}
- sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &uf, sizeof(uf));
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
vmg_mg_magical(sv);
/* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
* handled by our uvar callback. */
if (!mg)
return 0;
- if (prevmagic) {
- prevmagic->mg_moremagic = moremagic;
- } else {
- SvMAGIC_set(sv, moremagic);
- }
- mg->mg_moremagic = NULL;
-
- /* Destroy private data */
- if (mg->mg_obj != sv)
- SvREFCNT_dec(mg->mg_obj);
- /* Unreference the wizard */
- SvREFCNT_dec((SV *) mg->mg_ptr);
- Safefree(mg);
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
#if VMG_UVAR
if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
}
if (uvars == 1) {
- struct ufuncs *uf;
+ vmg_uvar_ud *ud;
+
for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
moremagic = mg->mg_moremagic;
if (mg->mg_type == PERL_MAGIC_uvar)
break;
}
- /* assert(mg); */
- uf = (struct ufuncs *) mg->mg_ptr;
- /* assert(uf->uf_val == vmg_svt_val); */
- if (uf[1].uf_val || uf[1].uf_set) {
+
+ ud = (vmg_uvar_ud *) mg->mg_ptr;
+ if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
/* Revert the original uvar magic. */
- uf[0] = uf[1];
- Renew(uf, 1, struct ufuncs);
+ struct ufuncs *uf;
+ Newx(uf, 1, struct ufuncs);
+ *uf = ud->old_uf;
+ Safefree(ud);
mg->mg_ptr = (char *) uf;
- mg->mg_len = sizeof(struct ufuncs);
+ mg->mg_len = sizeof(*uf);
} else {
/* Remove the uvar magic. */
- vmg_uvar_del(sv, prevmagic, mg, moremagic);
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
}
}
}
return &PL_sv_undef;
}
-/* ... svt callbacks ....................................................... */
+/* --- svt callbacks ------------------------------------------------------- */
#define VMG_CB_CALL_ARGS_MASK 15
#define VMG_CB_CALL_ARGS_SHIFT 4
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- vmg_call_sv(cb, G_SCALAR, 0);
+ vmg_call_sv(cb, G_SCALAR, 0, NULL);
SPAGAIN;
svr = POPs;
#define vmg_cb_call3(I, OI, S, A1, A2, A3) \
vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
+STATIC int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
+ return 0;
+}
+
+/* ... get magic ........................................................... */
+
STATIC int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
}
+#define vmg_svt_get_noop vmg_svt_default_noop
+
+/* ... set magic ........................................................... */
+
STATIC int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
}
+#define vmg_svt_set_noop vmg_svt_default_noop
+
+/* ... len magic ........................................................... */
+
+STATIC U32 vmg_sv_len(pTHX_ SV *sv) {
+#define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
+ STRLEN len;
+#if VMG_HAS_PERL(5, 9, 3)
+ const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, len)));
+#else
+ U8 *s = SvPV(sv, len);
+#endif
+
+ return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
+}
+
STATIC U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
unsigned int opinfo = w->opinfo;
PUSHs(sv_2mortal(newRV_inc(sv)));
PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
if (t < SVt_PVAV) {
- STRLEN l;
-#if VMG_HAS_PERL(5, 9, 3)
- const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, l)));
-#else
- U8 *s = SvPV(sv, l);
-#endif
- if (DO_UTF8(sv))
- len = utf8_length(s, s + l);
- else
- len = l;
+ len = vmg_sv_len(sv);
mPUSHu(len);
} else if (t == SVt_PVAV) {
len = av_len((AV *) sv) + 1;
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- vmg_call_sv(w->cb_len, G_SCALAR, 0);
+ vmg_call_sv(w->cb_len, G_SCALAR, 0, NULL);
SPAGAIN;
svr = POPs;
return ret;
}
+STATIC U32 vmg_svt_len_noop(pTHX_ SV *sv, MAGIC *mg) {
+ U32 len = 0;
+ svtype t = SvTYPE(sv);
+
+ if (t < SVt_PVAV) {
+ len = vmg_sv_len(sv);
+ } else if (t == SVt_PVAV) {
+ len = (U32) av_len((AV *) sv);
+ }
+
+ return len;
+}
+
+/* ... clear magic ......................................................... */
+
STATIC int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_clear, w->opinfo, sv, mg->mg_obj);
}
+#define vmg_svt_clear_noop vmg_svt_default_noop
+
+/* ... free magic .......................................................... */
+
+STATIC int vmg_svt_free_cleanup(pTHX_ void *ud) {
+ SV *sv = VOID2(SV *, ud);
+ MAGIC *mg;
+
+ /* We are about to croak() while sv is being destroyed. Try to clean up
+ * things a bit. */
+ mg = SvMAGIC(sv);
+ if (mg) {
+ vmg_mg_del(sv, NULL, mg, mg->mg_moremagic);
+ mg_magical(sv);
+ }
+ SvREFCNT_dec(sv);
+
+ /* After that, propagate the error upwards. */
+ return 1;
+}
+
STATIC int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w;
int ret = 0;
dSP;
- /* Don't even bother if we are in global destruction - the wizard is prisoner
- * of circular references and we are way beyond user realm */
+ /* During global destruction, we cannot be sure that the wizard and its free
+ * callback are still alive. */
if (PL_dirty)
return 0;
XPUSHs(vmg_op_info(w->opinfo));
PUTBACK;
- vmg_call_sv(w->cb_free, G_SCALAR, 1);
+ vmg_call_sv(w->cb_free, G_SCALAR, vmg_svt_free_cleanup, sv);
SPAGAIN;
svr = POPs;
return ret;
}
-STATIC int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key,
-# if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
- I32 keylen
-# else
- int keylen
-# endif
- ) {
+#define vmg_svt_free_noop vmg_svt_default_noop
+
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
+# define VMG_SVT_COPY_KEYLEN_TYPE I32
+#else
+# define VMG_SVT_COPY_KEYLEN_TYPE int
+#endif
+
+/* ... copy magic .......................................................... */
+
+STATIC int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
SV *keysv;
int ret;
return ret;
}
+STATIC int vmg_svt_copy_noop(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
+ return 0;
+}
+
+/* ... dup magic ........................................................... */
+
#if 0
STATIC int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
return 0;
}
+#define vmg_svt_dup_noop vmg_svt_dup
#endif
+/* ... local magic ......................................................... */
+
#if MGf_LOCAL
+
STATIC int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_local, w->opinfo, nsv, mg->mg_obj);
}
+
+#define vmg_svt_local_noop vmg_svt_default_noop
+
#endif /* MGf_LOCAL */
+/* ... uvar magic .......................................................... */
+
#if VMG_UVAR
STATIC OP *vmg_pp_resetuvar(pTHX) {
SvRMAGICAL_on(cSVOP_sv);
}
STATIC I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
- struct ufuncs *uf;
+ vmg_uvar_ud *ud;
MAGIC *mg, *umg;
SV *key = NULL, *newkey = NULL;
int tied = 0;
umg = mg_find(sv, PERL_MAGIC_uvar);
/* umg can't be NULL or we wouldn't be there. */
key = umg->mg_obj;
- uf = (struct ufuncs *) umg->mg_ptr;
+ ud = (vmg_uvar_ud *) umg->mg_ptr;
- if (uf[1].uf_val)
- uf[1].uf_val(aTHX_ action, sv);
- if (uf[1].uf_set)
- uf[1].uf_set(aTHX_ action, sv);
+ if (ud->old_uf.uf_val)
+ ud->old_uf.uf_val(aTHX_ action, sv);
+ if (ud->old_uf.uf_set)
+ ud->old_uf.uf_set(aTHX_ action, sv);
- action &= HV_FETCH_ISSTORE | HV_FETCH_ISEXISTS | HV_FETCH_LVALUE | HV_DELETE;
for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
const vmg_wizard *w;
+
switch (mg->mg_type) {
case PERL_MAGIC_ext:
break;
default:
continue;
}
+
w = vmg_wizard_from_mg(mg);
- if (!w) continue;
+ if (!w)
+ continue;
+
switch (w->uvar) {
case 0:
continue;
if (!newkey)
newkey = key = umg->mg_obj = sv_mortalcopy(umg->mg_obj);
}
- switch (action) {
+
+ switch (action
+ & (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
case 0:
if (w->cb_fetch)
vmg_cb_call2(w->cb_fetch, w->opinfo, sv, mg->mg_obj, key);
}
}
- if (SvRMAGICAL(sv) && !tied) {
+ if (SvRMAGICAL(sv) && !tied && !(action & (HV_FETCH_ISSTORE|HV_DELETE))) {
/* Temporarily hide the RMAGICAL flag of the hash so it isn't wrongly
* mistaken for a tied hash by the rest of hv_common. It will be reset by
* the op_ppaddr of a new fake op injected between the current and the next
* one. */
- OP *o = PL_op;
- if (!o->op_next || o->op_next->op_ppaddr != vmg_pp_resetuvar) {
+ OP *nop = PL_op->op_next;
+ if (!nop || nop->op_ppaddr != vmg_pp_resetuvar) {
SVOP *svop;
NewOp(1101, svop, 1, SVOP);
svop->op_type = OP_STUB;
svop->op_ppaddr = vmg_pp_resetuvar;
- svop->op_next = o->op_next;
+ svop->op_next = nop;
svop->op_flags = 0;
svop->op_sv = sv;
- o->op_next = (OP *) svop;
+ PL_op->op_next = (OP *) svop;
}
SvRMAGICAL_off(sv);
}
/* --- Macros for the XS section ------------------------------------------- */
-#define VMG_SET_CB(S, N) \
- cb = (S); \
- w->cb_ ## N = (SvOK(cb) && SvROK(cb)) ? SvREFCNT_inc(SvRV(cb)) : NULL;
-
-#define VMG_SET_SVT_CB(S, N) \
- cb = (S); \
- if (SvOK(cb) && SvROK(cb)) { \
- t->svt_ ## N = vmg_svt_ ## N; \
- w->cb_ ## N = SvREFCNT_inc(SvRV(cb)); \
- } else { \
- t->svt_ ## N = NULL; \
- w->cb_ ## N = NULL; \
- }
+#ifdef CvISXSUB
+# define VMG_CVOK(C) \
+ ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
+#else
+# define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
+#endif
+
+#define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
+
+#define VMG_SET_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) \
+ SvREFCNT_inc_simple_void(cb); \
+ else \
+ cb = NULL; \
+ } else { \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}
+
+#define VMG_SET_SVT_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) { \
+ t->svt_ ## N = vmg_svt_ ## N; \
+ SvREFCNT_inc_simple_void(cb); \
+ } else { \
+ t->svt_ ## N = vmg_svt_ ## N ## _noop; \
+ cb = NULL; \
+ } \
+ } else { \
+ t->svt_ ## N = NULL; \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}
/* --- XS ------------------------------------------------------------------ */
BOOT:
{
HV *stash;
+ int c;
MY_CXT_INIT;
- MY_CXT.b__op_stashes[0] = NULL;
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ MY_CXT.b__op_stashes[c] = NULL;
#if VMG_THREADSAFE
MUTEX_INIT(&vmg_vtable_refcount_mutex);
MUTEX_INIT(&vmg_op_name_init_mutex);
newCONSTSUB(stash, "MGf_DUP", newSVuv(MGf_DUP));
newCONSTSUB(stash, "MGf_LOCAL", newSVuv(MGf_LOCAL));
newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
+ newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
+ newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
newSVuv(VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID));
newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNDEF_CLEAR",
newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
- newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
- newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
+ newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(VMG_PERL_PATCHLEVEL));
newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(VMG_THREADSAFE));
PREINIT:
vmg_wizard *w;
MGVTBL *t;
- SV *cb, *op_info, *copy_key;
+ SV *op_info, *copy_key;
I32 i = 0;
CODE:
if (items != 9