#endif
#ifndef VMG_MULTIPLICITY
-# if defined(MULTIPLICITY) || defined(PERL_IMPLICIT_CONTEXT)
+# if defined(MULTIPLICITY)
# define VMG_MULTIPLICITY 1
# else
# define VMG_MULTIPLICITY 0
# endif
#endif
+#if VMG_MULTIPLICITY
+# ifndef PERL_IMPLICIT_CONTEXT
+# error MULTIPLICITY builds must set PERL_IMPLICIT_CONTEXT
+# endif
+#endif
#if VMG_MULTIPLICITY && defined(USE_ITHREADS) && defined(dMY_CXT) && defined(MY_CXT) && defined(START_MY_CXT) && defined(MY_CXT_INIT) && (defined(MY_CXT_CLONE) || defined(dMY_CXT_SV))
-# define VMG_THREADSAFE 1
+# ifndef VMG_THREADSAFE
+# define VMG_THREADSAFE 1
+# endif
# ifndef MY_CXT_CLONE
# define MY_CXT_CLONE \
dMY_CXT_SV; \
sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
# endif
#else
+# undef VMG_THREADSAFE
# define VMG_THREADSAFE 0
# undef dMY_CXT
# define dMY_CXT dNOOP
# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
#endif
+#if VMG_HAS_PERL(5, 17, 4)
+# define VMG_COMPAT_SCALAR_NOLEN 1
+#else
+# define VMG_COMPAT_SCALAR_NOLEN 0
+#endif
+
/* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
* reverted to dev-5.11 as 9cdcb38b */
#if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
# define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
#endif
+#if VMG_HAS_PERL(5, 17, 0)
+# define VMG_COMPAT_CODE_COPY_CLONE 1
+#else
+# define VMG_COMPAT_CODE_COPY_CLONE 0
+#endif
+
#if VMG_HAS_PERL(5, 13, 2)
# define VMG_COMPAT_GLOB_GET 1
#else
# define VMG_COMPAT_GLOB_GET 0
#endif
+#define VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE (VMG_HAS_PERL(5, 10, 0) && !VMG_HAS_PERL(5, 10, 1))
+
+/* NewOp() isn't public in perl 5.8.0. */
+#define VMG_RESET_RMG_NEEDS_TRAMPOLINE (VMG_UVAR && (VMG_THREADSAFE || !VMG_HAS_PERL(5, 8, 1)))
+
/* ... Bug-free mg_magical ................................................. */
/* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
#else
-STATIC void vmg_mg_magical(SV *sv) {
+static void vmg_mg_magical(SV *sv) {
const MAGIC *mg;
SvMAGICAL_off(sv);
#endif
-/* ... Safe version of call_sv() ........................................... */
+/* --- Trampoline ops ------------------------------------------------------ */
+
+#define VMG_NEEDS_TRAMPOLINE VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE || VMG_RESET_RMG_NEEDS_TRAMPOLINE
+
+#if VMG_NEEDS_TRAMPOLINE
+
+typedef struct {
+ OP temp;
+ SVOP target;
+} vmg_trampoline;
+
+static void vmg_trampoline_init(vmg_trampoline *t, OP *(*cb)(pTHX)) {
+ t->temp.op_type = OP_STUB;
+ t->temp.op_ppaddr = 0;
+ t->temp.op_next = (OP *) &t->target;
+ t->temp.op_flags = 0;
+ t->temp.op_private = 0;
+
+ t->target.op_type = OP_STUB;
+ t->target.op_ppaddr = cb;
+ t->target.op_next = NULL;
+ t->target.op_flags = 0;
+ t->target.op_private = 0;
+ t->target.op_sv = NULL;
+}
+
+static OP *vmg_trampoline_bump(pTHX_ vmg_trampoline *t, SV *sv, OP *o) {
+#define vmg_trampoline_bump(T, S, O) vmg_trampoline_bump(aTHX_ (T), (S), (O))
+ t->temp = *o;
+ t->temp.op_next = (OP *) &t->target;
+
+ t->target.op_sv = sv;
+ t->target.op_next = o->op_next;
+
+ return &t->temp;
+}
+
+#endif /* VMG_NEEDS_TRAMPOLINE */
-#define VMG_SAVE_LAST_CX (!VMG_HAS_PERL(5, 8, 4) || VMG_HAS_PERL(5, 9, 5))
+/* --- Cleaner version of sv_magicext() ------------------------------------ */
-STATIC I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, I32 destructor) {
-#define vmg_call_sv(S, F, D) vmg_call_sv(aTHX_ (S), (F), (D))
- I32 ret, cxix = 0, in_eval = 0;
-#if VMG_SAVE_LAST_CX
+static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
+#define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
+ MAGIC *mg;
+
+ mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
+ if (!mg)
+ return NULL;
+
+ mg->mg_private = 0;
+
+ if (vtbl->svt_copy)
+ mg->mg_flags |= MGf_COPY;
+#if MGf_DUP
+ if (vtbl->svt_dup)
+ mg->mg_flags |= MGf_DUP;
+#endif /* MGf_DUP */
+#if MGf_LOCAL
+ if (vtbl->svt_local)
+ mg->mg_flags |= MGf_LOCAL;
+#endif /* MGf_LOCAL */
+
+ if (mg->mg_flags & MGf_REFCOUNTED)
+ SvREFCNT_dec(obj);
+
+ return mg;
+}
+
+/* --- Safe version of call_sv() ------------------------------------------- */
+
+static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
+#define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
+ I32 ret, cxix;
PERL_CONTEXT saved_cx;
-#endif
SV *old_err = NULL;
if (SvTRUE(ERRSV)) {
- old_err = ERRSV;
- ERRSV = newSV(0);
+ old_err = newSVsv(ERRSV);
+ sv_setsv(ERRSV, &PL_sv_undef);
}
- if (cxstack_ix < cxstack_max) {
- cxix = cxstack_ix + 1;
- if (destructor && CxTYPE(cxstack + cxix) == CXt_EVAL)
- in_eval = 1;
- }
-
-#if VMG_SAVE_LAST_CX
+ cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
/* The last popped context will be reused by call_sv(), but our callers may
* still need its previous value. Back it up so that it isn't clobbered. */
saved_cx = cxstack[cxix];
-#endif
ret = call_sv(sv, flags | G_EVAL);
-#if VMG_SAVE_LAST_CX
cxstack[cxix] = saved_cx;
-#endif
if (SvTRUE(ERRSV)) {
- if (old_err) {
- sv_setsv(old_err, ERRSV);
- SvREFCNT_dec(ERRSV);
- ERRSV = old_err;
- }
+ SvREFCNT_dec(old_err);
+
if (IN_PERL_COMPILETIME) {
if (!PL_in_eval) {
if (PL_errors)
#else
++PL_Ierror_count;
#endif
- } else if (!in_eval)
+ } else {
+ if (!cleanup || cleanup(aTHX_ ud))
croak(NULL);
+ }
} else {
if (old_err) {
- SvREFCNT_dec(ERRSV);
- ERRSV = old_err;
+ sv_setsv(ERRSV, old_err);
+ SvREFCNT_dec(old_err);
}
}
/* --- Stolen chunk of B --------------------------------------------------- */
typedef enum {
- OPc_NULL = 0,
- OPc_BASEOP = 1,
- OPc_UNOP = 2,
- OPc_BINOP = 3,
- OPc_LOGOP = 4,
- OPc_LISTOP = 5,
- OPc_PMOP = 6,
- OPc_SVOP = 7,
- OPc_PADOP = 8,
- OPc_PVOP = 9,
- OPc_LOOP = 10,
- OPc_COP = 11,
- OPc_MAX = 12
+ OPc_NULL,
+ OPc_BASEOP,
+ OPc_UNOP,
+ OPc_BINOP,
+ OPc_LOGOP,
+ OPc_LISTOP,
+ OPc_PMOP,
+ OPc_SVOP,
+ OPc_PADOP,
+ OPc_PVOP,
+ OPc_LOOP,
+ OPc_COP,
+#if VMG_HAS_PERL(5, 21, 5)
+ OPc_METHOP,
+#endif
+ OPc_MAX
} opclass;
-STATIC const char *const vmg_opclassnames[] = {
+static const char *const vmg_opclassnames[] = {
"B::NULL",
"B::OP",
"B::UNOP",
"B::PADOP",
"B::PVOP",
"B::LOOP",
- "B::COP"
+ "B::COP",
+#if VMG_HAS_PERL(5, 21, 5)
+ "B::METHOP",
+#endif
+ NULL
};
-STATIC opclass vmg_opclass(const OP *o) {
+static opclass vmg_opclass(const OP *o) {
#if 0
if (!o)
return OPc_NULL;
return OPc_BASEOP;
else
return OPc_PVOP;
+#if VMG_HAS_PERL(5, 21, 5)
+ case OA_METHOP:
+ return OPc_METHOP;
+#endif
}
return OPc_BASEOP;
/* --- Error messages ------------------------------------------------------ */
-STATIC const char vmg_invalid_wiz[] = "Invalid wizard object";
-STATIC const char vmg_wrongargnum[] = "Wrong number of arguments";
-STATIC const char vmg_argstorefailed[] = "Error while storing arguments";
+static const char vmg_invalid_wiz[] = "Invalid wizard object";
+static const char vmg_wrongargnum[] = "Wrong number of arguments";
/* --- Context-safe global data -------------------------------------------- */
#define MY_CXT_KEY __PACKAGE__ "::_guts" XS_VERSION
typedef struct {
- HV *b__op_stashes[OPc_MAX];
+ HV *b__op_stashes[OPc_MAX];
+ I32 depth;
+ MAGIC *freed_tokens;
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ vmg_trampoline propagate_errsv;
+#endif
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+ vmg_trampoline reset_rmg;
+#endif
} my_cxt_t;
START_MY_CXT
U32 refcount;
} vmg_vtable;
-STATIC vmg_vtable *vmg_vtable_alloc(pTHX) {
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
vmg_vtable *t;
#define vmg_vtable_vtbl(T) (T)->vtbl
-#if VMG_THREADSAFE
-STATIC perl_mutex vmg_vtable_refcount_mutex;
-#endif
+static perl_mutex vmg_vtable_refcount_mutex;
-STATIC vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
+static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
#define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
VMG_LOCK(&vmg_vtable_refcount_mutex);
++t->refcount;
return t;
}
-STATIC void vmg_vtable_free(pTHX_ vmg_vtable *t) {
+static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
#define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
U32 refcount;
typedef MGVTBL vmg_vtable;
-STATIC vmg_vtable *vmg_vtable_alloc(pTHX) {
+static vmg_vtable *vmg_vtable_alloc(pTHX) {
#define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
vmg_vtable *t;
#endif /* VMG_UVAR */
} vmg_wizard;
-STATIC void vmg_op_info_init(pTHX_ unsigned int opinfo);
+static void vmg_op_info_init(pTHX_ unsigned int opinfo);
-STATIC vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
+static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
#define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
vmg_wizard *w;
return w;
}
-STATIC void vmg_wizard_free(pTHX_ vmg_wizard *w) {
+static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
#define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
if (!w)
return;
- SvREFCNT_dec(w->cb_data);
- SvREFCNT_dec(w->cb_get);
- SvREFCNT_dec(w->cb_set);
- SvREFCNT_dec(w->cb_len);
- SvREFCNT_dec(w->cb_clear);
- SvREFCNT_dec(w->cb_free);
- SvREFCNT_dec(w->cb_copy);
+ /* During global destruction, any of the callbacks may already have been
+ * freed, so we can't rely on still being able to access them. */
+ if (!PL_dirty) {
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
+ SvREFCNT_dec(w->cb_copy);
#if 0
- SvREFCNT_dec(w->cb_dup);
+ SvREFCNT_dec(w->cb_dup);
#endif
#if MGf_LOCAL
- SvREFCNT_dec(w->cb_local);
+ SvREFCNT_dec(w->cb_local);
#endif /* MGf_LOCAL */
#if VMG_UVAR
- SvREFCNT_dec(w->cb_fetch);
- SvREFCNT_dec(w->cb_store);
- SvREFCNT_dec(w->cb_exists);
- SvREFCNT_dec(w->cb_delete);
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
#endif /* VMG_UVAR */
+ }
+ /* PerlMemShared_free() and Safefree() are still fine during global
+ * destruction though. */
vmg_vtable_free(w->vtable);
Safefree(w);
z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
: NULL;
-STATIC const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
+static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
#define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
vmg_wizard *z;
/* --- Wizard SV objects --------------------------------------------------- */
-STATIC int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
- if (PL_dirty) /* During global destruction, the context is already freed */
- return 0;
-
+static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
return 0;
#if VMG_THREADSAFE
-STATIC int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
return 0;
#endif /* VMG_THREADSAFE */
-STATIC MGVTBL vmg_wizard_sv_vtbl = {
+static MGVTBL vmg_wizard_sv_vtbl = {
NULL, /* get */
NULL, /* set */
NULL, /* len */
#endif /* MGf_LOCAL */
};
-STATIC SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
+static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
#define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
SV *wiz;
wiz = newSViv(PTR2IV(w));
#endif
- if (w) {
- MAGIC *mg = sv_magicext(wiz, NULL, PERL_MAGIC_ext, &vmg_wizard_sv_vtbl,
- (const char *) w, 0);
- mg->mg_private = 0;
-#if VMG_THREADSAFE
- mg->mg_flags |= MGf_DUP;
-#endif
- }
+ vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
+
SvREADONLY_on(wiz);
return wiz;
#define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
-STATIC const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
+static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
MAGIC *mg;
for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
#define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
-STATIC const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
+static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
SV *sv = (SV *) mg->mg_ptr;
/* --- User-level functions implementation --------------------------------- */
-STATIC const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
+static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
const MAGIC *mg;
IV wid;
/* ... Construct private data .............................................. */
-STATIC SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
+static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
#define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
I32 i;
SV *nsv;
PUSHs(args[i]);
PUTBACK;
- vmg_call_sv(ctor, G_SCALAR, 0);
+ vmg_call_sv(ctor, G_SCALAR, 0, NULL);
SPAGAIN;
nsv = POPs;
return nsv;
}
-STATIC SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
+static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
#define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
const MAGIC *mg = vmg_find(sv, w);
/* ... Magic cast/dispell .................................................. */
#if VMG_UVAR
-STATIC I32 vmg_svt_val(pTHX_ IV, SV *);
-STATIC void vmg_uvar_del(SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
- if (prevmagic) {
+static I32 vmg_svt_val(pTHX_ IV, SV *);
+
+typedef struct {
+ struct ufuncs new_uf;
+ struct ufuncs old_uf;
+} vmg_uvar_ud;
+
+#endif /* VMG_UVAR */
+
+static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
+#define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
+ dMY_CXT;
+
+ if (prevmagic)
prevmagic->mg_moremagic = moremagic;
- } else {
+ else
SvMAGIC_set(sv, moremagic);
+
+ /* Destroy private data */
+#if VMG_UVAR
+ if (mg->mg_type == PERL_MAGIC_uvar) {
+ Safefree(mg->mg_ptr);
+ } else {
+#endif /* VMG_UVAR */
+ if (mg->mg_obj != sv) {
+ SvREFCNT_dec(mg->mg_obj);
+ mg->mg_obj = NULL;
+ }
+ /* Unreference the wizard */
+ SvREFCNT_dec((SV *) mg->mg_ptr);
+ mg->mg_ptr = NULL;
+#if VMG_UVAR
}
- mg->mg_moremagic = NULL;
- Safefree(mg->mg_ptr);
- Safefree(mg);
-}
#endif /* VMG_UVAR */
-STATIC UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
+ if (MY_CXT.depth) {
+ mg->mg_moremagic = MY_CXT.freed_tokens;
+ MY_CXT.freed_tokens = mg;
+ } else {
+ mg->mg_moremagic = NULL;
+ Safefree(mg);
+ }
+}
+
+static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
+#define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
+ int skipped = 0;
+
+ while (mg) {
+ MAGIC *moremagic = mg->mg_moremagic;
+
+ if (mg == skip)
+ ++skipped;
+ else
+ Safefree(mg);
+
+ mg = moremagic;
+ }
+
+ return skipped;
+}
+
+static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
#define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
MAGIC *mg;
MGVTBL *t;
data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
t = vmg_vtable_vtbl(w->vtable);
- mg = sv_magicext(sv, data, PERL_MAGIC_ext, t, (const char *) wiz, HEf_SVKEY);
- mg->mg_private = 0;
-
- /* sv_magicext() calls mg_magical and increments data's refcount */
- SvREFCNT_dec(data);
-
- if (t->svt_copy)
- mg->mg_flags |= MGf_COPY;
-#if 0
- if (t->svt_dup)
- mg->mg_flags |= MGf_DUP;
-#endif
-#if MGf_LOCAL
- if (t->svt_local)
- mg->mg_flags |= MGf_LOCAL;
-#endif /* MGf_LOCAL */
+ mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
if (SvTYPE(sv) < SVt_PVHV)
goto done;
#if VMG_UVAR
if (w->uvar) {
MAGIC *prevmagic, *moremagic = NULL;
- struct ufuncs uf[2];
+ vmg_uvar_ud ud;
- uf[0].uf_val = vmg_svt_val;
- uf[0].uf_set = NULL;
- uf[0].uf_index = 0;
- uf[1].uf_val = NULL;
- uf[1].uf_set = NULL;
- uf[1].uf_index = 0;
+ ud.new_uf.uf_val = vmg_svt_val;
+ ud.new_uf.uf_set = NULL;
+ ud.new_uf.uf_index = 0;
+ ud.old_uf.uf_val = NULL;
+ ud.old_uf.uf_set = NULL;
+ ud.old_uf.uf_index = 0;
/* One uvar magic in the chain is enough. */
for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
}
if (mg) { /* Found another uvar magic. */
- struct ufuncs *olduf = (struct ufuncs *) mg->mg_ptr;
- if (olduf->uf_val == vmg_svt_val) {
+ struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
+ if (uf->uf_val == vmg_svt_val) {
/* It's our uvar magic, nothing to do. oldgmg was true. */
goto done;
} else {
/* It's another uvar magic, backup it and replace it by ours. */
- uf[1] = *olduf;
- vmg_uvar_del(sv, prevmagic, mg, moremagic);
+ ud.old_uf = *uf;
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
}
}
- sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &uf, sizeof(uf));
+ sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
vmg_mg_magical(sv);
/* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
* handled by our uvar callback. */
return 1;
}
-STATIC UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
+static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
#define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
#if VMG_UVAR
U32 uvars = 0;
if (!mg)
return 0;
- if (prevmagic) {
- prevmagic->mg_moremagic = moremagic;
- } else {
- SvMAGIC_set(sv, moremagic);
- }
- mg->mg_moremagic = NULL;
-
- /* Destroy private data */
- if (mg->mg_obj != sv)
- SvREFCNT_dec(mg->mg_obj);
- /* Unreference the wizard */
- SvREFCNT_dec((SV *) mg->mg_ptr);
- Safefree(mg);
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
#if VMG_UVAR
if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
}
if (uvars == 1) {
- struct ufuncs *uf;
+ vmg_uvar_ud *ud;
+
for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
moremagic = mg->mg_moremagic;
if (mg->mg_type == PERL_MAGIC_uvar)
break;
}
- /* assert(mg); */
- uf = (struct ufuncs *) mg->mg_ptr;
- /* assert(uf->uf_val == vmg_svt_val); */
- if (uf[1].uf_val || uf[1].uf_set) {
+
+ ud = (vmg_uvar_ud *) mg->mg_ptr;
+ if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
/* Revert the original uvar magic. */
- uf[0] = uf[1];
- Renew(uf, 1, struct ufuncs);
+ struct ufuncs *uf;
+ Newx(uf, 1, struct ufuncs);
+ *uf = ud->old_uf;
+ Safefree(ud);
mg->mg_ptr = (char *) uf;
- mg->mg_len = sizeof(struct ufuncs);
+ mg->mg_len = sizeof(*uf);
} else {
/* Remove the uvar magic. */
- vmg_uvar_del(sv, prevmagic, mg, moremagic);
+ vmg_mg_del(sv, prevmagic, mg, moremagic);
}
}
}
#define VMG_OP_INFO_OBJECT 2
#if VMG_THREADSAFE
-STATIC perl_mutex vmg_op_name_init_mutex;
+static perl_mutex vmg_op_name_init_mutex;
#endif
-STATIC U32 vmg_op_name_init = 0;
-STATIC unsigned char vmg_op_name_len[MAXO] = { 0 };
+static U32 vmg_op_name_init = 0;
+static unsigned char vmg_op_name_len[MAXO] = { 0 };
-STATIC void vmg_op_info_init(pTHX_ unsigned int opinfo) {
+static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
#define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
switch (opinfo) {
case VMG_OP_INFO_NAME:
}
}
-STATIC SV *vmg_op_info(pTHX_ unsigned int opinfo) {
+static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
#define vmg_op_info(W) vmg_op_info(aTHX_ (W))
if (!PL_op)
return &PL_sv_undef;
return &PL_sv_undef;
}
-/* ... svt callbacks ....................................................... */
+/* --- svt callbacks ------------------------------------------------------- */
#define VMG_CB_CALL_ARGS_MASK 15
#define VMG_CB_CALL_ARGS_SHIFT 4
-#define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT)
+#define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
+#define VMG_CB_CALL_GUARD 4
+
+static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
+ dMY_CXT;
+
+ MY_CXT.depth--;
+
+ /* If we're at the upmost magic call and we're about to die, we can just free
+ * the tokens right now, since we will jump past the problematic part of our
+ * caller. */
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
+ vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
+ MY_CXT.freed_tokens = NULL;
+ }
+
+ return 1;
+}
+
+static int vmg_dispell_guard_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_magic_chain_free((MAGIC *) mg->mg_ptr, NULL);
+
+ return 0;
+}
+
+#if VMG_THREADSAFE
+
+static int vmg_dispell_guard_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
+ /* The freed magic tokens aren't cloned by perl because it cannot reach them
+ * (they have been detached from their parent SV when they were enqueued).
+ * Hence there's nothing to purge in the new thread. */
+ mg->mg_ptr = NULL;
-STATIC int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
+ return 0;
+}
+
+#endif /* VMG_THREADSAFE */
+
+static MGVTBL vmg_dispell_guard_vtbl = {
+ NULL, /* get */
+ NULL, /* set */
+ NULL, /* len */
+ NULL, /* clear */
+ vmg_dispell_guard_free, /* free */
+ NULL, /* copy */
+#if VMG_THREADSAFE
+ vmg_dispell_guard_dup, /* dup */
+#else
+ NULL, /* dup */
+#endif
+#if MGf_LOCAL
+ NULL, /* local */
+#endif /* MGf_LOCAL */
+};
+
+static SV *vmg_dispell_guard_new(pTHX_ MAGIC *root) {
+#define vmg_dispell_guard_new(R) vmg_dispell_guard_new(aTHX_ (R))
+ SV *guard;
+
+ guard = sv_newmortal();
+ vmg_sv_magicext(guard, NULL, &vmg_dispell_guard_vtbl, root, 0);
+
+ return guard;
+}
+
+static int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
va_list ap;
int ret = 0;
unsigned int i, args, opinfo;
+ MAGIC **chain = NULL;
SV *svr;
dSP;
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- vmg_call_sv(cb, G_SCALAR, 0);
+ if (flags & VMG_CB_CALL_GUARD) {
+ dMY_CXT;
+ MY_CXT.depth++;
+ vmg_call_sv(cb, G_SCALAR, vmg_dispell_guard_oncroak, NULL);
+ MY_CXT.depth--;
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens)
+ chain = &MY_CXT.freed_tokens;
+ } else {
+ vmg_call_sv(cb, G_SCALAR, 0, NULL);
+ }
SPAGAIN;
svr = POPs;
if (SvOK(svr))
ret = (int) SvIV(svr);
+ if (SvROK(svr))
+ SvREFCNT_inc(svr);
+ else
+ svr = NULL;
PUTBACK;
FREETMPS;
LEAVE;
+ if (svr && !SvTEMP(svr))
+ sv_2mortal(svr);
+
+ if (chain) {
+ vmg_dispell_guard_new(*chain);
+ *chain = NULL;
+ }
+
return ret;
}
#define vmg_cb_call3(I, OI, S, A1, A2, A3) \
vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
-STATIC int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
+/* ... Default no-op magic callback ........................................ */
+
+static int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
+ return 0;
+}
+
+/* ... get magic ........................................................... */
+
+static int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
}
-STATIC int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
+#define vmg_svt_get_noop vmg_svt_default_noop
+
+/* ... set magic ........................................................... */
+
+static int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
}
-STATIC U32 vmg_sv_len(pTHX_ SV *sv) {
+#define vmg_svt_set_noop vmg_svt_default_noop
+
+/* ... len magic ........................................................... */
+
+static U32 vmg_sv_len(pTHX_ SV *sv) {
#define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
STRLEN len;
#if VMG_HAS_PERL(5, 9, 3)
return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
}
-STATIC U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
+static U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
unsigned int opinfo = w->opinfo;
U32 len, ret;
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- vmg_call_sv(w->cb_len, G_SCALAR, 0);
+ vmg_call_sv(w->cb_len, G_SCALAR, 0, NULL);
SPAGAIN;
svr = POPs;
return ret;
}
-STATIC int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
+static U32 vmg_svt_len_noop(pTHX_ SV *sv, MAGIC *mg) {
+ U32 len = 0;
+ svtype t = SvTYPE(sv);
+
+ if (t < SVt_PVAV) {
+ len = vmg_sv_len(sv);
+ } else if (t == SVt_PVAV) {
+ len = (U32) av_len((AV *) sv);
+ }
+
+ return len;
+}
+
+/* ... clear magic ......................................................... */
+
+static int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
+ unsigned int flags = w->opinfo;
- return vmg_cb_call1(w->cb_clear, w->opinfo, sv, mg->mg_obj);
+#if !VMG_HAS_PERL(5, 12, 0)
+ flags |= VMG_CB_CALL_GUARD;
+#endif
+
+ return vmg_cb_call1(w->cb_clear, flags, sv, mg->mg_obj);
+}
+
+#define vmg_svt_clear_noop vmg_svt_default_noop
+
+/* ... free magic .......................................................... */
+
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+
+static OP *vmg_pp_propagate_errsv(pTHX) {
+ SVOP *o = cSVOPx(PL_op);
+
+ if (o->op_sv) {
+ sv_setsv(ERRSV, o->op_sv);
+ SvREFCNT_dec(o->op_sv);
+ o->op_sv = NULL;
+ }
+
+ return NORMAL;
}
-STATIC int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
+#endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
+
+static int vmg_propagate_errsv_free(pTHX_ SV *sv, MAGIC *mg) {
+ if (mg->mg_obj)
+ sv_setsv(ERRSV, mg->mg_obj);
+
+ return 0;
+}
+
+/* perl is already kind enough to handle the cloning of the mg_obj member,
+ hence we don't need to define a dup magic callback. */
+
+static MGVTBL vmg_propagate_errsv_vtbl = {
+ 0, /* get */
+ 0, /* set */
+ 0, /* len */
+ 0, /* clear */
+ vmg_propagate_errsv_free, /* free */
+ 0, /* copy */
+ 0, /* dup */
+#if MGf_LOCAL
+ 0, /* local */
+#endif /* MGf_LOCAL */
+};
+
+typedef struct {
+ SV *sv;
+ int in_eval;
+ I32 base;
+} vmg_svt_free_cleanup_ud;
+
+static int vmg_svt_free_cleanup(pTHX_ void *ud_) {
+ vmg_svt_free_cleanup_ud *ud = VOID2(vmg_svt_free_cleanup_ud *, ud_);
+
+ if (ud->in_eval) {
+ U32 optype = PL_op ? PL_op->op_type : OP_NULL;
+
+ if (optype == OP_LEAVETRY || optype == OP_LEAVEEVAL) {
+ SV *errsv = newSVsv(ERRSV);
+
+ FREETMPS;
+ LEAVE_SCOPE(ud->base);
+
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ if (optype == OP_LEAVETRY) {
+ dMY_CXT;
+ PL_op = vmg_trampoline_bump(&MY_CXT.propagate_errsv, errsv, PL_op);
+ } else if (optype == OP_LEAVEEVAL) {
+ SV *guard = sv_newmortal();
+ vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
+ }
+#else /* !VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
+# if !VMG_HAS_PERL(5, 8, 9)
+ {
+ SV *guard = sv_newmortal();
+ vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
+ }
+# else
+ vmg_sv_magicext(ERRSV, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
+# endif
+#endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
+
+ SAVETMPS;
+ }
+
+ /* Don't propagate */
+ return 0;
+ } else {
+ SV *sv = ud->sv;
+ MAGIC *mg;
+
+ /* We are about to croak() while sv is being destroyed. Try to clean up
+ * things a bit. */
+ mg = SvMAGIC(sv);
+ if (mg) {
+ vmg_mg_del(sv, NULL, mg, mg->mg_moremagic);
+ mg_magical(sv);
+ }
+ SvREFCNT_dec(sv);
+
+ vmg_dispell_guard_oncroak(aTHX_ NULL);
+
+ /* After that, propagate the error upwards. */
+ return 1;
+ }
+}
+
+static int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
+ vmg_svt_free_cleanup_ud ud;
const vmg_wizard *w;
int ret = 0;
SV *svr;
dSP;
- /* Don't even bother if we are in global destruction - the wizard is prisoner
- * of circular references and we are way beyond user realm */
+ /* During global destruction, we cannot be sure that the wizard and its free
+ * callback are still alive. */
if (PL_dirty)
return 0;
SvMAGIC_set(sv, mg);
#endif
+ ud.sv = sv;
+ if (cxstack_ix < cxstack_max) {
+ ud.in_eval = (CxTYPE(cxstack + cxstack_ix + 1) == CXt_EVAL);
+ ud.base = ud.in_eval ? PL_scopestack[PL_scopestack_ix] : 0;
+ } else {
+ ud.in_eval = 0;
+ ud.base = 0;
+ }
+
ENTER;
SAVETMPS;
XPUSHs(vmg_op_info(w->opinfo));
PUTBACK;
- vmg_call_sv(w->cb_free, G_SCALAR, 1);
+ {
+ dMY_CXT;
+ MY_CXT.depth++;
+ vmg_call_sv(w->cb_free, G_SCALAR, vmg_svt_free_cleanup, &ud);
+ MY_CXT.depth--;
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
+ /* Free all the tokens in the chain but the current one (if it's present).
+ * It will be taken care of by our caller, Perl_mg_free(). */
+ vmg_magic_chain_free(MY_CXT.freed_tokens, mg);
+ MY_CXT.freed_tokens = NULL;
+ }
+ }
SPAGAIN;
svr = POPs;
return ret;
}
+#define vmg_svt_free_noop vmg_svt_default_noop
+
#if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
# define VMG_SVT_COPY_KEYLEN_TYPE I32
#else
# define VMG_SVT_COPY_KEYLEN_TYPE int
#endif
-STATIC int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
+/* ... copy magic .......................................................... */
+
+static int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
SV *keysv;
int ret;
keysv = newSVpvn(key, keylen);
}
+ if (SvTYPE(sv) >= SVt_PVCV)
+ nsv = sv_2mortal(newRV_inc(nsv));
+
ret = vmg_cb_call3(w->cb_copy, w->opinfo, sv, mg->mg_obj, keysv, nsv);
if (keylen != HEf_SVKEY) {
return ret;
}
+static int vmg_svt_copy_noop(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
+ return 0;
+}
+
+/* ... dup magic ........................................................... */
+
#if 0
-STATIC int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
+static int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
return 0;
}
+#define vmg_svt_dup_noop vmg_svt_dup
#endif
+/* ... local magic ......................................................... */
+
#if MGf_LOCAL
-STATIC int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
+
+static int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
return vmg_cb_call1(w->cb_local, w->opinfo, nsv, mg->mg_obj);
}
+
+#define vmg_svt_local_noop vmg_svt_default_noop
+
#endif /* MGf_LOCAL */
+/* ... uvar magic .......................................................... */
+
#if VMG_UVAR
-STATIC OP *vmg_pp_resetuvar(pTHX) {
- SvRMAGICAL_on(cSVOP_sv);
+
+static OP *vmg_pp_reset_rmg(pTHX) {
+ SVOP *o = cSVOPx(PL_op);
+
+ SvRMAGICAL_on(o->op_sv);
+ o->op_sv = NULL;
+
return NORMAL;
}
-STATIC I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
- struct ufuncs *uf;
- MAGIC *mg, *umg;
+static I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
+ vmg_uvar_ud *ud;
+ MAGIC *mg, *umg, *moremagic;
SV *key = NULL, *newkey = NULL;
int tied = 0;
umg = mg_find(sv, PERL_MAGIC_uvar);
/* umg can't be NULL or we wouldn't be there. */
key = umg->mg_obj;
- uf = (struct ufuncs *) umg->mg_ptr;
+ ud = (vmg_uvar_ud *) umg->mg_ptr;
- if (uf[1].uf_val)
- uf[1].uf_val(aTHX_ action, sv);
- if (uf[1].uf_set)
- uf[1].uf_set(aTHX_ action, sv);
+ if (ud->old_uf.uf_val)
+ ud->old_uf.uf_val(aTHX_ action, sv);
+ if (ud->old_uf.uf_set)
+ ud->old_uf.uf_set(aTHX_ action, sv);
- for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ for (mg = SvMAGIC(sv); mg; mg = moremagic) {
const vmg_wizard *w;
+ /* mg may be freed later by the uvar call, so we need to fetch the next
+ * token before reaching that fateful point. */
+ moremagic = mg->mg_moremagic;
+
switch (mg->mg_type) {
case PERL_MAGIC_ext:
break;
& (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
case 0:
if (w->cb_fetch)
- vmg_cb_call2(w->cb_fetch, w->opinfo, sv, mg->mg_obj, key);
+ vmg_cb_call2(w->cb_fetch, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
break;
case HV_FETCH_ISSTORE:
case HV_FETCH_LVALUE:
case (HV_FETCH_ISSTORE|HV_FETCH_LVALUE):
if (w->cb_store)
- vmg_cb_call2(w->cb_store, w->opinfo, sv, mg->mg_obj, key);
+ vmg_cb_call2(w->cb_store, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
break;
case HV_FETCH_ISEXISTS:
if (w->cb_exists)
- vmg_cb_call2(w->cb_exists, w->opinfo, sv, mg->mg_obj, key);
+ vmg_cb_call2(w->cb_exists, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
break;
case HV_DELETE:
if (w->cb_delete)
- vmg_cb_call2(w->cb_delete, w->opinfo, sv, mg->mg_obj, key);
+ vmg_cb_call2(w->cb_delete, w->opinfo | VMG_CB_CALL_GUARD, sv,
+ mg->mg_obj, key);
break;
}
}
* mistaken for a tied hash by the rest of hv_common. It will be reset by
* the op_ppaddr of a new fake op injected between the current and the next
* one. */
- OP *nop = PL_op->op_next;
- if (!nop || nop->op_ppaddr != vmg_pp_resetuvar) {
- SVOP *svop;
+
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+
+ dMY_CXT;
+
+ PL_op = vmg_trampoline_bump(&MY_CXT.reset_rmg, sv, PL_op);
+
+#else /* !VMG_RESET_RMG_NEEDS_TRAMPOLINE */
+
+ OP *nop = PL_op->op_next;
+ SVOP *svop = NULL;
+
+ if (nop && nop->op_ppaddr == vmg_pp_reset_rmg) {
+ svop = (SVOP *) nop;
+ } else {
NewOp(1101, svop, 1, SVOP);
- svop->op_type = OP_STUB;
- svop->op_ppaddr = vmg_pp_resetuvar;
- svop->op_next = nop;
- svop->op_flags = 0;
- svop->op_sv = sv;
- PL_op->op_next = (OP *) svop;
+ svop->op_type = OP_STUB;
+ svop->op_ppaddr = vmg_pp_reset_rmg;
+ svop->op_next = nop;
+ svop->op_flags = 0;
+ svop->op_private = 0;
+
+ PL_op->op_next = (OP *) svop;
}
+
+ svop->op_sv = sv;
+
+#endif /* VMG_RESET_RMG_NEEDS_TRAMPOLINE */
+
SvRMAGICAL_off(sv);
}
return 0;
}
+
#endif /* VMG_UVAR */
-/* --- Macros for the XS section ------------------------------------------- */
+/* --- Module setup/teardown ----------------------------------------------- */
-#define VMG_SET_CB(S, N) \
- cb = (S); \
- w->cb_ ## N = (SvOK(cb) && SvROK(cb)) ? SvREFCNT_inc(SvRV(cb)) : NULL;
-
-#define VMG_SET_SVT_CB(S, N) \
- cb = (S); \
- if (SvOK(cb) && SvROK(cb)) { \
- t->svt_ ## N = vmg_svt_ ## N; \
- w->cb_ ## N = SvREFCNT_inc(SvRV(cb)); \
- } else { \
- t->svt_ ## N = NULL; \
- w->cb_ ## N = NULL; \
+#if VMG_THREADSAFE
+
+static I32 vmg_loaded = 0;
+
+/* We must use preexistent global mutexes or we will never be able to destroy
+ * them. */
+# if VMG_HAS_PERL(5, 9, 3)
+# define VMG_LOADED_LOCK MUTEX_LOCK(&PL_my_ctx_mutex)
+# define VMG_LOADED_UNLOCK MUTEX_UNLOCK(&PL_my_ctx_mutex)
+# else
+# define VMG_LOADED_LOCK OP_REFCNT_LOCK
+# define VMG_LOADED_UNLOCK OP_REFCNT_UNLOCK
+# endif
+
+static void vmg_global_teardown_late_locked(pTHX) {
+#define vmg_global_teardown_late_locked() vmg_global_teardown_late_locked(aTHX)
+ MUTEX_DESTROY(&vmg_op_name_init_mutex);
+ MUTEX_DESTROY(&vmg_vtable_refcount_mutex);
+
+ return;
+}
+
+static int vmg_global_teardown_free(pTHX_ SV *sv, MAGIC *mg) {
+ VMG_LOADED_LOCK;
+
+ if (vmg_loaded == 0)
+ vmg_global_teardown_late_locked();
+
+ VMG_LOADED_UNLOCK;
+
+ return 0;
+}
+
+static MGVTBL vmg_global_teardown_vtbl = {
+ 0,
+ 0,
+ 0,
+ 0,
+ vmg_global_teardown_free
+#if MGf_COPY
+ , 0
+#endif
+#if MGf_DUP
+ , 0
+#endif
+#if MGf_LOCAL
+ , 0
+#endif
+};
+
+static signed char vmg_destruct_level(pTHX) {
+#define vmg_destruct_level() vmg_destruct_level(aTHX)
+ signed char lvl;
+
+ lvl = PL_perl_destruct_level;
+
+#ifdef DEBUGGING
+ {
+ const char *s = PerlEnv_getenv("PERL_DESTRUCT_LEVEL");
+ if (s) {
+ int i;
+#if VMG_HAS_PERL(5, 21, 3)
+ if (strEQ(s, "-1")) {
+ i = -1;
+ } else {
+# if VMG_HAS_PERL(5, 21, 10)
+ UV uv;
+ if (Perl_grok_atoUV(s, &uv, NULL) && uv <= INT_MAX)
+ i = (int) uv;
+ else
+ i = 0;
+# else /* VMG_HAS_PERL(5, 21, 3) && !VMG_HAS_PERL(5, 21, 10) */
+ i = Perl_grok_atou(s, NULL);
+# endif
+ }
+#else /* !VMG_HAS_PERL(5, 21, 3) */
+ i = atoi(s);
+#endif
+ if (lvl < i)
+ lvl = i;
+ }
}
+#endif
-/* --- XS ------------------------------------------------------------------ */
+ return lvl;
+}
-MODULE = Variable::Magic PACKAGE = Variable::Magic
+#endif /* VMG_THREADSAFE */
-PROTOTYPES: ENABLE
+static void vmg_teardown(pTHX_ void *param) {
+ dMY_CXT;
-BOOT:
-{
- HV *stash;
+#if VMG_THREADSAFE
+ VMG_LOADED_LOCK;
+
+ if (vmg_loaded == 1) {
+ vmg_loaded = 0;
+ if (vmg_destruct_level() == 0) {
+ vmg_global_teardown_late_locked();
+ } else {
+ if (!PL_strtab)
+ PL_strtab = newHV();
+ vmg_sv_magicext((SV *) PL_strtab, NULL, &vmg_global_teardown_vtbl, NULL, 0);
+ }
+ } else {
+ assert(vmg_loaded > 1);
+ --vmg_loaded;
+ }
+
+ VMG_LOADED_UNLOCK;
+#endif
+
+ if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
+ vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
+ MY_CXT.freed_tokens = NULL;
+ }
+
+ return;
+}
+static void vmg_setup(pTHX) {
+#define vmg_setup() vmg_setup(aTHX)
+ HV *stash;
+ int c;
MY_CXT_INIT;
- MY_CXT.b__op_stashes[0] = NULL;
+
#if VMG_THREADSAFE
- MUTEX_INIT(&vmg_vtable_refcount_mutex);
- MUTEX_INIT(&vmg_op_name_init_mutex);
+ VMG_LOADED_LOCK;
+
+ if (vmg_loaded == 0) {
+ MUTEX_INIT(&vmg_vtable_refcount_mutex);
+ MUTEX_INIT(&vmg_op_name_init_mutex);
+ vmg_loaded = 1;
+ } else {
+ assert(vmg_loaded > 0);
+ ++vmg_loaded;
+ }
+
+ VMG_LOADED_UNLOCK;
+#endif
+
+ for (c = OPc_NULL; c < OPc_MAX; ++c)
+ MY_CXT.b__op_stashes[c] = NULL;
+
+ MY_CXT.depth = 0;
+ MY_CXT.freed_tokens = NULL;
+
+#if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
+ vmg_trampoline_init(&MY_CXT.propagate_errsv, vmg_pp_propagate_errsv);
+#endif
+#if VMG_RESET_RMG_NEEDS_TRAMPOLINE
+ vmg_trampoline_init(&MY_CXT.reset_rmg, vmg_pp_reset_rmg);
#endif
stash = gv_stashpv(__PACKAGE__, 1);
newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_SCALAR_NOLEN",
+ newSVuv(VMG_COMPAT_SCALAR_NOLEN));
newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
+ newCONSTSUB(stash, "VMG_COMPAT_CODE_COPY_CLONE",
+ newSVuv(VMG_COMPAT_CODE_COPY_CLONE));
newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(VMG_PERL_PATCHLEVEL));
newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(VMG_THREADSAFE));
newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(VMG_FORKSAFE));
newCONSTSUB(stash, "VMG_OP_INFO_NAME", newSVuv(VMG_OP_INFO_NAME));
newCONSTSUB(stash, "VMG_OP_INFO_OBJECT", newSVuv(VMG_OP_INFO_OBJECT));
+
+ call_atexit(vmg_teardown, NULL);
+
+ return;
+}
+
+/* --- Macros for the XS section ------------------------------------------- */
+
+#ifdef CvISXSUB
+# define VMG_CVOK(C) \
+ ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
+#else
+# define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
+#endif
+
+#define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
+
+#define VMG_SET_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) \
+ SvREFCNT_inc_simple_void(cb); \
+ else \
+ cb = NULL; \
+ } else { \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}
+
+#define VMG_SET_SVT_CB(S, N) { \
+ SV *cb = (S); \
+ if (SvOK(cb) && SvROK(cb)) { \
+ cb = SvRV(cb); \
+ if (VMG_CBOK(cb)) { \
+ t->svt_ ## N = vmg_svt_ ## N; \
+ SvREFCNT_inc_simple_void(cb); \
+ } else { \
+ t->svt_ ## N = vmg_svt_ ## N ## _noop; \
+ cb = NULL; \
+ } \
+ } else { \
+ t->svt_ ## N = NULL; \
+ cb = NULL; \
+ } \
+ w->cb_ ## N = cb; \
+}
+
+/* --- XS ------------------------------------------------------------------ */
+
+MODULE = Variable::Magic PACKAGE = Variable::Magic
+
+PROTOTYPES: ENABLE
+
+BOOT:
+{
+ vmg_setup();
}
#if VMG_THREADSAFE
PROTOTYPE: DISABLE
PREINIT:
U32 had_b__op_stash = 0;
+ I32 old_depth;
int c;
PPCODE:
{
if (MY_CXT.b__op_stashes[c])
had_b__op_stash |= (((U32) 1) << c);
}
+ old_depth = MY_CXT.depth;
}
{
MY_CXT_CLONE;
MY_CXT.b__op_stashes[c] = (had_b__op_stash & (((U32) 1) << c))
? gv_stashpv(vmg_opclassnames[c], 1) : NULL;
}
+ MY_CXT.depth = old_depth;
+ MY_CXT.freed_tokens = NULL;
+ VMG_LOADED_LOCK;
+ assert(vmg_loaded > 0);
+ ++vmg_loaded;
+ VMG_LOADED_UNLOCK;
}
XSRETURN(0);
PREINIT:
vmg_wizard *w;
MGVTBL *t;
- SV *cb, *op_info, *copy_key;
+ SV *op_info, *copy_key;
I32 i = 0;
CODE:
if (items != 9
PREINIT:
const vmg_wizard *w = NULL;
SV **args = NULL;
- UV ret;
I32 i = 0;
CODE:
if (items > 2) {