STATIC SV *vmg_clone(pTHX_ SV *sv, tTHX owner) {
#define vmg_clone(P, O) vmg_clone(aTHX_ (P), (O))
+ SV *dupsv;
+
+#if VMG_HAS_PERL(5, 13, 2)
+ CLONE_PARAMS *param = Perl_clone_params_new(owner, aTHX);
+
+ dupsv = sv_dup(sv, param);
+
+ Perl_clone_params_del(param);
+#else
CLONE_PARAMS param;
param.stashes = NULL; /* don't need it unless sv is a PVHV */
param.flags = 0;
param.proto_perl = owner;
- return SvREFCNT_inc(sv_dup(sv, ¶m));
+ dupsv = sv_dup(sv, ¶m);
+#endif
+
+ return SvREFCNT_inc(dupsv);
}
#endif /* VMG_THREADSAFE */
# define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
#endif
-#if VMG_HAS_PERL(5, 10, 0) || defined(PL_parser)
-# ifndef PL_error_count
-# define PL_error_count PL_parser->error_count
-# endif
-#else
-# ifndef PL_error_count
-# define PL_error_count PL_Ierror_count
-# endif
-#endif
-
-/* uvar magic and Hash::Util::FieldHash were commited with 28419, but only
- * enable it on 5.10 */
+/* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
+ * enable them on 5.10 */
#if VMG_HAS_PERL(5, 10, 0)
# define VMG_UVAR 1
#else
* reverted to dev-5.11 as 9cdcb38b */
#if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
-/* This branch should only apply for perls before the official 5.11.0 release.
- * Makefile.PL takes care of the higher ones. */
-# define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
+# if VMG_HAS_PERL(5, 11, 0)
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
+# else
+# define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
+# endif
# endif
# ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
# define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
#endif
/* Applied to dev-5.11 as 34908 */
-#if VMG_HAS_PERL_MAINT(5, 11, 0, 34908)
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 34908) || VMG_HAS_PERL(5, 12, 0)
# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
#else
# define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
# define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
#endif
-#if VMG_HAS_PERL_MAINT(5, 11, 0, 32969)
+#if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
#else
# define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
#endif
+#if VMG_HAS_PERL(5, 13, 2)
+# define VMG_COMPAT_GLOB_GET 1
+#else
+# define VMG_COMPAT_GLOB_GET 0
+#endif
+
+/* ... Bug-free mg_magical ................................................. */
+
+/* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html. This version is specialized to our needs. */
+
#if VMG_UVAR
-/* Bug-free mg_magical - see http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html - but specialized to our needs. */
STATIC void vmg_sv_magicuvar(pTHX_ SV *sv, const char *uf, I32 len) {
#define vmg_sv_magicuvar(S, U, L) vmg_sv_magicuvar(aTHX_ (S), (U), (L))
const MAGIC* mg;
#endif /* VMG_UVAR */
+/* ... Safe version of call_sv() ........................................... */
+
+#define VMG_SAVE_LAST_CX (!VMG_HAS_PERL(5, 8, 4) || VMG_HAS_PERL(5, 9, 5))
+
+STATIC I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, I32 destructor) {
+#define vmg_call_sv(S, F, D) vmg_call_sv(aTHX_ (S), (F), (D))
+ I32 ret, cxix = 0, in_eval = 0;
+#if VMG_SAVE_LAST_CX
+ PERL_CONTEXT saved_cx;
+#endif
+ SV *old_err = NULL;
+
+ if (SvTRUE(ERRSV)) {
+ old_err = ERRSV;
+ ERRSV = newSV(0);
+ }
+
+ if (cxstack_ix < cxstack_max) {
+ cxix = cxstack_ix + 1;
+ if (destructor && CxTYPE(cxstack + cxix) == CXt_EVAL)
+ in_eval = 1;
+ }
+
+#if VMG_SAVE_LAST_CX
+ /* The last popped context will be reused by call_sv(), but our callers may
+ * still need its previous value. Back it up so that it isn't clobbered. */
+ saved_cx = cxstack[cxix];
+#endif
+
+ ret = call_sv(sv, flags | G_EVAL);
+
+#if VMG_SAVE_LAST_CX
+ cxstack[cxix] = saved_cx;
+#endif
+
+ if (SvTRUE(ERRSV)) {
+ if (old_err) {
+ sv_setsv(old_err, ERRSV);
+ SvREFCNT_dec(ERRSV);
+ ERRSV = old_err;
+ }
+ if (IN_PERL_COMPILETIME) {
+ if (!PL_in_eval) {
+ if (PL_errors)
+ sv_catsv(PL_errors, ERRSV);
+ else
+ Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
+ SvCUR_set(ERRSV, 0);
+ }
+#if VMG_HAS_PERL(5, 10, 0) || defined(PL_parser)
+ if (PL_parser)
+ ++PL_parser->error_count;
+#elif defined(PL_error_count)
+ ++PL_error_count;
+#else
+ ++PL_Ierror_count;
+#endif
+ } else if (!in_eval)
+ croak(NULL);
+ } else {
+ if (old_err) {
+ SvREFCNT_dec(ERRSV);
+ ERRSV = old_err;
+ }
+ }
+
+ return ret;
+}
+
/* --- Stolen chunk of B --------------------------------------------------- */
typedef enum {
if (!w)
return;
- if (w->cb_data) SvREFCNT_dec(w->cb_data);
- if (w->cb_get) SvREFCNT_dec(w->cb_get);
- if (w->cb_set) SvREFCNT_dec(w->cb_set);
- if (w->cb_len) SvREFCNT_dec(w->cb_len);
- if (w->cb_clear) SvREFCNT_dec(w->cb_clear);
- if (w->cb_free) SvREFCNT_dec(w->cb_free);
+ /* We reach this point in dirty state when ptable_free() is called from the
+ * atexit cleanup callback, and that the global table still holds a live
+ * wizard. This happens before all the SV bodies are freed, so all the wizard
+ * callbacks are still alive (as they are referenced by the undead wizard).
+ * Hence it is safe to decrement their refcount. Later on, the wizard free
+ * callback itself will trigger when the wizard body is reaped, but it will
+ * be skipped as it guards against dirty state - which is good since nothing
+ * has to be done anymore at that point. */
+
+ SvREFCNT_dec(w->cb_data);
+ SvREFCNT_dec(w->cb_get);
+ SvREFCNT_dec(w->cb_set);
+ SvREFCNT_dec(w->cb_len);
+ SvREFCNT_dec(w->cb_clear);
+ SvREFCNT_dec(w->cb_free);
#if MGf_COPY
- if (w->cb_copy) SvREFCNT_dec(w->cb_copy);
+ SvREFCNT_dec(w->cb_copy);
#endif /* MGf_COPY */
#if 0 /* MGf_DUP */
- if (w->cb_dup) SvREFCNT_dec(w->cb_dup);
+ SvREFCNT_dec(w->cb_dup);
#endif /* MGf_DUP */
#if MGf_LOCAL
- if (w->cb_local) SvREFCNT_dec(w->cb_local);
+ SvREFCNT_dec(w->cb_local);
#endif /* MGf_LOCAL */
#if VMG_UVAR
- if (w->cb_fetch) SvREFCNT_dec(w->cb_fetch);
- if (w->cb_store) SvREFCNT_dec(w->cb_store);
- if (w->cb_exists) SvREFCNT_dec(w->cb_exists);
- if (w->cb_delete) SvREFCNT_dec(w->cb_delete);
+ SvREFCNT_dec(w->cb_fetch);
+ SvREFCNT_dec(w->cb_store);
+ SvREFCNT_dec(w->cb_exists);
+ SvREFCNT_dec(w->cb_delete);
#endif /* VMG_UVAR */
Safefree(w->vtbl);
PUSHs(args[i]);
PUTBACK;
- call_sv(ctor, G_SCALAR);
+ vmg_call_sv(ctor, G_SCALAR, 0);
SPAGAIN;
nsv = POPs;
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- call_sv(cb, G_SCALAR);
+ vmg_call_sv(cb, G_SCALAR, 0);
SPAGAIN;
svr = POPs;
XPUSHs(vmg_op_info(opinfo));
PUTBACK;
- call_sv(w->cb_len, G_SCALAR);
+ vmg_call_sv(w->cb_len, G_SCALAR, 0);
SPAGAIN;
svr = POPs;
ret = SvOK(svr) ? (U32) SvUV(svr) : len;
if (t == SVt_PVAV)
- --ret;
+ --ret;
PUTBACK;
FREETMPS;
STATIC int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
const MGWIZ *w;
-#if VMG_HAS_PERL(5, 9, 5)
- PERL_CONTEXT saved_cx;
- I32 cxix;
-#endif
- I32 had_err, has_err, flags = G_SCALAR | G_EVAL;
int ret = 0;
SV *svr;
/* So that it survives the temp cleanup below */
SvREFCNT_inc_simple_void(sv);
-#if !VMG_HAS_PERL_MAINT(5, 11, 0, 32686)
+#if !(VMG_HAS_PERL_MAINT(5, 11, 0, 32686) || VMG_HAS_PERL(5, 12, 0))
/* The previous magic tokens were freed but the magic chain wasn't updated, so
* if you access the sv from the callback the old deleted magics will trigger
* and cause memory misreads. Change 32686 solved it that way : */
XPUSHs(vmg_op_info(w->opinfo));
PUTBACK;
- had_err = SvTRUE(ERRSV);
- if (had_err)
- flags |= G_KEEPERR;
-
-#if VMG_HAS_PERL(5, 9, 5)
- /* This context should not be used anymore, but since we croak in places the
- * core doesn't even dare to, some pointers to it may remain in the upper call
- * stack. Make sure call_sv() doesn't clobber it. */
- if (cxstack_ix < cxstack_max)
- cxix = cxstack_ix + 1;
- else
- cxix = Perl_cxinc(aTHX);
- saved_cx = cxstack[cxix];
-#endif
-
- call_sv(w->cb_free, flags);
-
-#if VMG_HAS_PERL(5, 9, 5)
- cxstack[cxix] = saved_cx;
-#endif
-
- has_err = SvTRUE(ERRSV);
- if (IN_PERL_COMPILETIME && !had_err && has_err)
- ++PL_error_count;
+ vmg_call_sv(w->cb_free, G_SCALAR, 1);
SPAGAIN;
svr = POPs;
#if MGf_COPY
STATIC int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key,
-# if VMG_HAS_PERL_MAINT(5, 11, 0, 33256)
+# if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
I32 keylen
# else
int keylen
newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
+ newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(VMG_PERL_PATCHLEVEL));
newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(VMG_THREADSAFE));
newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(VMG_FORKSAFE));