#define PERL_NO_GET_CONTEXT
#include "EXTERN.h"
-#include "perl.h"
+#include "perl.h"
#include "XSUB.h"
#define __PACKAGE__ "Scope::Upper"
# define dNOOP
#endif
+#ifndef dVAR
+# define dVAR dNOOP
+#endif
+
+#ifndef MUTABLE_SV
+# define MUTABLE_SV(S) ((SV *) (S))
+#endif
+
+#ifndef MUTABLE_AV
+# define MUTABLE_AV(A) ((AV *) (A))
+#endif
+
+#ifndef MUTABLE_CV
+# define MUTABLE_CV(C) ((CV *) (C))
+#endif
+
#ifndef PERL_UNUSED_VAR
# define PERL_UNUSED_VAR(V)
#endif
# define SU_POISON(D, N, T) NOOP
#endif
+#ifndef newSV_type
+STATIC SV *su_newSV_type(pTHX_ svtype t) {
+ SV *sv = newSV(0);
+ SvUPGRADE(sv, t);
+ return sv;
+}
+# define newSV_type(T) su_newSV_type(aTHX_ (T))
+#endif
+
#ifndef SvPV_const
# define SvPV_const(S, L) SvPV(S, L)
#endif
+#ifndef SvPVX_const
+# define SvPVX_const(S) SvPVX(S)
+#endif
+
#ifndef SvPV_nolen_const
# define SvPV_nolen_const(S) SvPV_nolen(S)
#endif
#ifndef SvREFCNT_inc_simple_void
-# define SvREFCNT_inc_simple_void(sv) SvREFCNT_inc(sv)
+# define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
+#endif
+
+#ifndef mPUSHi
+# define mPUSHi(I) PUSHs(sv_2mortal(newSViv(I)))
#endif
#ifndef GvCV_set
# define CvGV_set(C, G) (CvGV(C) = (G))
#endif
+#ifndef CvSTASH_set
+# define CvSTASH_set(C, S) (CvSTASH(C) = (S))
+#endif
+
+#ifndef CvISXSUB
+# define CvISXSUB(C) CvXSUB(C)
+#endif
+
+#ifndef PADLIST_ARRAY
+# define PADLIST_ARRAY(P) AvARRAY(P)
+#endif
+
#ifndef CxHASARGS
# define CxHASARGS(C) ((C)->blk_sub.hasargs)
#endif
# define gv_fetchpvn_flags(A, B, C, D) gv_fetchpv((A), (C), (D))
#endif
-#ifndef cv_clone
-# define cv_clone(P) Perl_cv_clone(aTHX_ (P))
-#endif
-
#ifndef PERL_MAGIC_tied
# define PERL_MAGIC_tied 'P'
#endif
# define MY_CXT_CLONE NOOP
#endif
-/* --- uplevel() data tokens ----------------------------------------------- */
+/* --- Unique context ID global storage ------------------------------------ */
+
+/* ... Sequence ID counter ................................................. */
typedef struct {
- void *next;
+ UV *seqs;
+ STRLEN size;
+} su_uv_array;
- I32 cxix;
- CV *target;
- bool died;
+STATIC su_uv_array su_uid_seq_counter;
- PERL_SI *si;
- PERL_SI *old_curstackinfo;
- AV *old_mainstack;
+#ifdef USE_ITHREADS
+
+STATIC perl_mutex su_uid_seq_counter_mutex;
+
+#define SU_LOCK(M) MUTEX_LOCK(M)
+#define SU_UNLOCK(M) MUTEX_UNLOCK(M)
+
+#else /* USE_ITHREADS */
+
+#define SU_LOCK(M)
+#define SU_UNLOCK(M)
+
+#endif /* !USE_ITHREADS */
+
+STATIC UV su_uid_seq_next(pTHX_ UV depth) {
+#define su_uid_seq_next(D) su_uid_seq_next(aTHX_ (D))
+ UV seq;
+ UV *seqs;
+
+ SU_LOCK(&su_uid_seq_counter_mutex);
+
+ seqs = su_uid_seq_counter.seqs;
+
+ if (depth >= su_uid_seq_counter.size) {
+ UV i;
+
+ seqs = PerlMemShared_realloc(seqs, (depth + 1) * sizeof(UV));
+ for (i = su_uid_seq_counter.size; i <= depth; ++i)
+ seqs[i] = 0;
+
+ su_uid_seq_counter.seqs = seqs;
+ su_uid_seq_counter.size = depth + 1;
+ }
+
+ seq = ++seqs[depth];
+
+ SU_UNLOCK(&su_uid_seq_counter_mutex);
+
+ return seq;
+}
+
+/* ... UID storage ......................................................... */
+
+typedef struct {
+ UV seq;
+ U32 flags;
+} su_uid;
+
+#define SU_UID_ACTIVE 1
+
+STATIC UV su_uid_depth(pTHX_ I32 cxix) {
+#define su_uid_depth(I) su_uid_depth(aTHX_ (I))
+ const PERL_SI *si;
+ UV depth;
+
+ depth = cxix;
+ for (si = PL_curstackinfo->si_prev; si; si = si->si_prev)
+ depth += si->si_cxix + 1;
+
+ return depth;
+}
+
+typedef struct {
+ su_uid **map;
+ STRLEN used;
+ STRLEN alloc;
+} su_uid_storage;
+
+STATIC void su_uid_storage_dup(pTHX_ su_uid_storage *new_cxt, const su_uid_storage *old_cxt, UV max_depth) {
+#define su_uid_storage_dup(N, O, D) su_uid_storage_dup(aTHX_ (N), (O), (D))
+ su_uid **old_map = old_cxt->map;
+
+ if (old_map) {
+ su_uid **new_map = new_cxt->map;
+ STRLEN old_used = old_cxt->used;
+ STRLEN old_alloc = old_cxt->alloc;
+ STRLEN new_used, new_alloc;
+ STRLEN i;
+
+ new_used = max_depth < old_used ? max_depth : old_used;
+ new_cxt->used = new_used;
+
+ if (new_used <= new_cxt->alloc)
+ new_alloc = new_cxt->alloc;
+ else {
+ new_alloc = new_used;
+ Renew(new_map, new_alloc, su_uid *);
+ for (i = new_cxt->alloc; i < new_alloc; ++i)
+ new_map[i] = NULL;
+ new_cxt->map = new_map;
+ new_cxt->alloc = new_alloc;
+ }
+
+ for (i = 0; i < new_alloc; ++i) {
+ su_uid *new_uid = new_map[i];
+
+ if (i < new_used) { /* => i < max_depth && i < old_used */
+ su_uid *old_uid = old_map[i];
+
+ if (old_uid && (old_uid->flags & SU_UID_ACTIVE)) {
+ if (!new_uid) {
+ Newx(new_uid, 1, su_uid);
+ new_map[i] = new_uid;
+ }
+ *new_uid = *old_uid;
+ continue;
+ }
+ }
+
+ if (new_uid)
+ new_uid->flags &= ~SU_UID_ACTIVE;
+ }
+ }
+
+ return;
+}
+
+/* --- unwind() global storage --------------------------------------------- */
+
+typedef struct {
+ I32 cxix;
+ I32 items;
+ SV **savesp;
+ LISTOP return_op;
+ OP proxy_op;
+} su_unwind_storage;
+
+/* --- uplevel() data tokens and global storage ---------------------------- */
+
+#define SU_UPLEVEL_HIJACKS_RUNOPS SU_HAS_PERL(5, 8, 0)
+
+typedef struct {
+ void *next;
+
+ su_uid_storage tmp_uid_storage;
+ su_uid_storage old_uid_storage;
+
+ I32 cxix;
+
+ I32 target_depth;
+ CV *target;
- I32 old_depth;
- COP *old_curcop;
+ CV *callback;
+ CV *renamed;
- bool old_catch;
- OP *old_op;
- CV *cloned_cv;
+ PERL_SI *si;
+ PERL_SI *old_curstackinfo;
+ AV *old_mainstack;
+
+ COP *old_curcop;
+
+ OP *old_op;
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ runops_proc_t old_runops;
+#endif
+ bool old_catch;
+
+ bool died;
} su_uplevel_ud;
STATIC su_uplevel_ud *su_uplevel_ud_new(pTHX) {
Newx(sud, 1, su_uplevel_ud);
sud->next = NULL;
+ sud->tmp_uid_storage.map = NULL;
+ sud->tmp_uid_storage.used = 0;
+ sud->tmp_uid_storage.alloc = 0;
+
Newx(si, 1, PERL_SI);
si->si_stack = newAV();
AvREAL_off(si->si_stack);
si->si_cxstack = NULL;
+ si->si_cxmax = 0;
+
sud->si = si;
return sud;
Safefree(si->si_cxstack);
SvREFCNT_dec(si->si_stack);
Safefree(si);
+
+ if (sud->tmp_uid_storage.map) {
+ su_uid **map = sud->tmp_uid_storage.map;
+ STRLEN alloc = sud->tmp_uid_storage.alloc;
+ STRLEN i;
+
+ for (i = 0; i < alloc; ++i)
+ Safefree(map[i]);
+
+ Safefree(map);
+ }
+
Safefree(sud);
return;
}
typedef struct {
+ su_uplevel_ud *top;
su_uplevel_ud *root;
I32 count;
} su_uplevel_storage;
#define MY_CXT_KEY __PACKAGE__ "::_guts" XS_VERSION
typedef struct {
- char *stack_placeholder;
-
- I32 cxix;
- I32 items;
- SV **savesp;
- LISTOP return_op;
- OP proxy_op;
-
- su_uplevel_storage uplevel_storage;
+ char *stack_placeholder;
+ su_unwind_storage unwind_storage;
+ su_uplevel_storage uplevel_storage;
+ su_uid_storage uid_storage;
} my_cxt_t;
START_MY_CXT
/* ... Reap ................................................................ */
+#define SU_SAVE_LAST_CX (!SU_HAS_PERL(5, 8, 4) || (SU_HAS_PERL(5, 9, 5) && !SU_HAS_PERL(5, 14, 0)) || SU_HAS_PERL(5, 15, 0))
+
typedef struct {
su_ud_common ci;
SV *cb;
STATIC void su_call(pTHX_ void *ud_) {
su_ud_reap *ud = (su_ud_reap *) ud_;
-#if SU_HAS_PERL(5, 9, 5)
- PERL_CONTEXT saved_cx;
+#if SU_SAVE_LAST_CX
I32 cxix;
-#endif
+ PERL_CONTEXT saved_cx;
+#endif /* SU_SAVE_LAST_CX */
dSP;
PUSHMARK(SP);
PUTBACK;
+#if SU_SAVE_LAST_CX
/* If the recently popped context isn't saved there, it will be overwritten by
* the sub scope from call_sv, although it's still needed in our caller. */
-
-#if SU_HAS_PERL(5, 9, 5)
- if (cxstack_ix < cxstack_max)
- cxix = cxstack_ix + 1;
- else
- cxix = Perl_cxinc(aTHX);
+ cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
saved_cx = cxstack[cxix];
-#endif
+#endif /* SU_SAVE_LAST_CX */
call_sv(ud->cb, G_VOID);
-#if SU_HAS_PERL(5, 9, 5)
+#if SU_SAVE_LAST_CX
cxstack[cxix] = saved_cx;
-#endif
+#endif /* SU_SAVE_LAST_CX */
PUTBACK;
STATIC void su_unwind(pTHX_ void *ud_) {
dMY_CXT;
- I32 cxix = MY_CXT.cxix;
- I32 items = MY_CXT.items - 1;
- SV **savesp = MY_CXT.savesp;
+ I32 cxix = MY_CXT.unwind_storage.cxix;
+ I32 items = MY_CXT.unwind_storage.items - 1;
+ SV **savesp = MY_CXT.unwind_storage.savesp;
I32 mark;
PERL_UNUSED_VAR(ud_);
items, PL_stack_sp - PL_stack_base, *PL_markstack_ptr, mark);
});
- PL_op = (OP *) &(MY_CXT.return_op);
+ PL_op = (OP *) &(MY_CXT.unwind_storage.return_op);
PL_op = PL_op->op_ppaddr(aTHX);
*PL_markstack_ptr = mark;
- MY_CXT.proxy_op.op_next = PL_op;
- PL_op = &(MY_CXT.proxy_op);
+ MY_CXT.unwind_storage.proxy_op.op_next = PL_op;
+ PL_op = &(MY_CXT.unwind_storage.proxy_op);
}
/* --- Uplevel ------------------------------------------------------------- */
#define SU_UPLEVEL_SAVE(f, t) STMT_START { sud->old_##f = PL_##f; PL_##f = (t); } STMT_END
#define SU_UPLEVEL_RESTORE(f) STMT_START { PL_##f = sud->old_##f; } STMT_END
-STATIC int su_uplevel_restore_free(pTHX_ SV *sv, MAGIC *mg) {
- su_uplevel_ud_delete((su_uplevel_ud *) mg->mg_ptr);
+STATIC su_uplevel_ud *su_uplevel_storage_new(pTHX_ I32 cxix) {
+#define su_uplevel_storage_new(I) su_uplevel_storage_new(aTHX_ (I))
+ su_uplevel_ud *sud;
+ UV depth;
+ dMY_CXT;
+
+ sud = MY_CXT.uplevel_storage.root;
+ if (sud) {
+ MY_CXT.uplevel_storage.root = sud->next;
+ MY_CXT.uplevel_storage.count--;
+ } else {
+ sud = su_uplevel_ud_new();
+ }
+
+ sud->next = MY_CXT.uplevel_storage.top;
+ MY_CXT.uplevel_storage.top = sud;
+
+ depth = su_uid_depth(cxix);
+ su_uid_storage_dup(&sud->tmp_uid_storage, &MY_CXT.uid_storage, depth);
+ sud->old_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->tmp_uid_storage;
+
+ return sud;
+}
+
+STATIC void su_uplevel_storage_delete(pTHX_ su_uplevel_ud *sud) {
+#define su_uplevel_storage_delete(S) su_uplevel_storage_delete(aTHX_ (S))
+ dMY_CXT;
+
+ sud->tmp_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->old_uid_storage;
+ {
+ su_uid **map;
+ UV i, alloc;
+ map = sud->tmp_uid_storage.map;
+ alloc = sud->tmp_uid_storage.alloc;
+ for (i = 0; i < alloc; ++i) {
+ if (map[i])
+ map[i]->flags &= SU_UID_ACTIVE;
+ }
+ }
+ MY_CXT.uplevel_storage.top = sud->next;
+
+ if (MY_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
+ su_uplevel_ud_delete(sud);
+ } else {
+ sud->next = MY_CXT.uplevel_storage.root;
+ MY_CXT.uplevel_storage.root = sud;
+ MY_CXT.uplevel_storage.count++;
+ }
+}
+
+STATIC int su_uplevel_goto_static(const OP *o) {
+ for (; o; o = o->op_sibling) {
+ /* goto ops are unops with kids. */
+ if (!(o->op_flags & OPf_KIDS))
+ continue;
+
+ switch (o->op_type) {
+ case OP_LEAVEEVAL:
+ case OP_LEAVETRY:
+ /* Don't care about gotos inside eval, as they are forbidden at run time. */
+ break;
+ case OP_GOTO:
+ return 1;
+ default:
+ if (su_uplevel_goto_static(cUNOPo->op_first))
+ return 1;
+ break;
+ }
+ }
+
+ return 0;
+}
+
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+
+STATIC int su_uplevel_goto_runops(pTHX) {
+#define su_uplevel_goto_runops() su_uplevel_goto_runops(aTHX)
+ register OP *op;
+ dVAR;
+
+ op = PL_op;
+ do {
+ if (op->op_type == OP_GOTO) {
+ AV *argarray = NULL;
+ I32 cxix;
+
+ for (cxix = cxstack_ix; cxix >= 0; --cxix) {
+ const PERL_CONTEXT *cx = cxstack + cxix;
+
+ switch (CxTYPE(cx)) {
+ case CXt_SUB:
+ if (CxHASARGS(cx)) {
+ argarray = cx->blk_sub.argarray;
+ goto done;
+ }
+ break;
+ case CXt_EVAL:
+ case CXt_FORMAT:
+ goto done;
+ default:
+ break;
+ }
+ }
+
+done:
+ if (argarray) {
+ dMY_CXT;
+
+ if (MY_CXT.uplevel_storage.top->cxix == cxix) {
+ AV *args = GvAV(PL_defgv);
+ I32 items = AvFILLp(args);
+
+ av_extend(argarray, items);
+ Copy(AvARRAY(args), AvARRAY(argarray), items + 1, SV *);
+ AvFILLp(argarray) = items;
+ }
+ }
+ }
+
+ PL_op = op = op->op_ppaddr(aTHX);
+
+#if !SU_HAS_PERL(5, 13, 0)
+ PERL_ASYNC_CHECK();
+#endif
+ } while (op);
+
+ TAINT_NOT;
return 0;
}
-STATIC MGVTBL su_uplevel_restore_vtbl = {
- 0,
- 0,
- 0,
- 0,
- su_uplevel_restore_free
-};
+#endif /* SU_UPLEVEL_HIJACKS_RUNOPS */
+
+#define su_at_underscore(C) AvARRAY(PADLIST_ARRAY(CvPADLIST(C))[CvDEPTH(C)])[0]
STATIC void su_uplevel_restore(pTHX_ void *sus_) {
su_uplevel_ud *sud = sus_;
PERL_SI *cur = sud->old_curstackinfo;
PERL_SI *si = sud->si;
- dMY_CXT;
- /* When we reach this place, POPSUB has already been called (with our fake
- * argarray). GvAV(PL_defgv) points to the savearray (that is, what @_ was
- * before uplevel). argarray is either the fake AV we created in su_uplevel()
- * or some empty replacement POPSUB creates when @_ is reified. In both cases
- * we have to destroy it before the context stack is swapped back to its
- * original state. */
- SvREFCNT_dec(cxstack[sud->cxix].blk_sub.argarray);
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops == su_uplevel_goto_runops)
+ PL_runops = sud->old_runops;
+#endif
- CATCH_SET(sud->old_catch);
+ if (sud->callback) {
+ PERL_CONTEXT *cx = cxstack + sud->cxix;
+ AV *argarray = MUTABLE_AV(su_at_underscore(sud->callback));
+
+ /* We have to fix the pad entry for @_ in the original callback because it
+ * may have been reified. */
+ if (AvREAL(argarray)) {
+ const I32 fill = AvFILLp(argarray);
+ SvREFCNT_dec(argarray);
+ argarray = newAV();
+ AvREAL_off(argarray);
+ AvREIFY_on(argarray);
+ av_extend(argarray, fill);
+ su_at_underscore(sud->callback) = MUTABLE_SV(argarray);
+ } else {
+ CLEAR_ARGARRAY(argarray);
+ }
- SvREFCNT_dec(sud->cloned_cv);
+ /* If the old cv member is our renamed CV, it means that this place has been
+ * reached without a goto() happening, and the old argarray member is
+ * actually our fake argarray. Destroy it properly in that case. */
+ if (cx->blk_sub.cv == sud->renamed) {
+ SvREFCNT_dec(cx->blk_sub.argarray);
+ cx->blk_sub.argarray = argarray;
+ }
+
+ CvDEPTH(sud->callback)--;
+ SvREFCNT_dec(sud->callback);
+ }
+
+ /* Free the renamed CV. We must do it ourselves so that we can force the
+ * depth to be 0, or perl would complain about it being "still in use".
+ * But we *know* that it cannot be so. */
+ if (sud->renamed) {
+ CvDEPTH(sud->renamed) = 0;
+ CvPADLIST(sud->renamed) = NULL;
+ SvREFCNT_dec(sud->renamed);
+ }
+
+ CATCH_SET(sud->old_catch);
SU_UPLEVEL_RESTORE(op);
SU_UPLEVEL_RESTORE(curstackinfo);
if (sud->died) {
- CV *target_cv = sud->target;
+ CV *target = sud->target;
I32 levels = 0, i;
/* When we die, the depth of the target CV is not updated because of the
register const PERL_CONTEXT *cx = cxstack + i;
if (CxTYPE(cx) == CXt_SUB) {
- if (cx->blk_sub.cv == target_cv)
+ if (cx->blk_sub.cv == target)
++levels;
}
}
switch (CxTYPE(cx)) {
case CXt_SUB:
- if (cx->blk_sub.cv == target_cv)
+ if (cx->blk_sub.cv == target)
++levels;
break;
case CXt_EVAL:
}
found_it:
- CvDEPTH(target_cv) = sud->old_depth - levels;
+ CvDEPTH(target) = sud->target_depth - levels;
PL_curstackinfo->si_cxix = i - 1;
#if !SU_HAS_PERL(5, 13, 1)
* reset $@ to its proper value. Note that the the call to
* su_uplevel_restore() must happen before the "reset $@" item of the save
* stack is processed, as uplevel was called after the localization.
- * Andrew's change to how $@ was treated, which were mainly integrated
+ * Andrew's changes to how $@ was handled, which were mainly integrated
* between perl 5.13.0 and 5.13.1, fixed this. */
if (ERRSV && SvTRUE(ERRSV)) {
register const PERL_CONTEXT *cx = cxstack + i; /* This is the eval scope */
PL_stack_sp = PL_stack_base + AvFILLp(cur->si_stack);
PL_stack_max = PL_stack_base + AvMAX(cur->si_stack);
-#if SU_HAS_PERL(5, 8, 0)
- if (MY_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
- /* When an exception is thrown from the uplevel'd subroutine,
- * su_uplevel_restore() may be called by the LEAVE in die_unwind() (called
- * die_where() in more recent perls), which has the sad habit of keeping a
- * pointer to the current context frame across this call. This means that
- * we can't free the temporary context stack we used for the uplevel call
- * right now, or that pointer upwards would point to garbage. We work around
- * this by attaching the state data to a scalar that will be freed "soon".
- * This issue has been fixed in perl with commit 8f89e5a9. */
- SV *sv = sv_newmortal();
- sv_magicext(sv, NULL, PERL_MAGIC_ext, &su_uplevel_restore_vtbl,
- (const char *) sud, 0);
- } else {
-#endif
+ /* When an exception is thrown from the uplevel'd subroutine,
+ * su_uplevel_restore() may be called by the LEAVE in die_unwind() (renamed
+ * die_where() in more recent perls), which has the sad habit of keeping a
+ * pointer to the current context frame across this call. This means that we
+ * can't free the temporary context stack we used for the uplevel call right
+ * now, or that pointer upwards would point to garbage. */
+#if SU_HAS_PERL(5, 13, 7)
+ /* This issue has been fixed in perl with commit 8f89e5a9, which was made
+ * public in perl 5.13.7. */
+ su_uplevel_storage_delete(sud);
+#else
+ /* Otherwise, we just enqueue it back in the global storage list. */
+ {
+ dMY_CXT;
+
+ sud->tmp_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->old_uid_storage;
+
+ MY_CXT.uplevel_storage.top = sud->next;
sud->next = MY_CXT.uplevel_storage.root;
MY_CXT.uplevel_storage.root = sud;
MY_CXT.uplevel_storage.count++;
-#if SU_HAS_PERL(5, 8, 0)
}
#endif
return;
}
-STATIC CV *su_cv_clone(pTHX_ CV *old_cv) {
-#define su_cv_clone(C) su_cv_clone(aTHX_ (C))
- CV *new_cv;
-
- /* Starting from commit b5c19bd7, cv_clone() has an assert that checks whether
- * CvDEPTH(CvOUTSIDE(proto)) > 0, so we have to fool cv_clone() with a little
- * dance. */
-#if defined(DEBUGGING) && SU_HAS_PERL(5, 9, 0)
- I32 old_depth;
- CV *outside = CvOUTSIDE(old_cv);
-
- if (outside && CvCLONE(outside) && !CvCLONED(outside))
- outside = find_runcv(NULL);
- old_depth = CvDEPTH(outside);
- if (!old_depth)
- CvDEPTH(outside) = 1;
-#endif
+STATIC CV *su_cv_clone(pTHX_ CV *proto, GV *gv) {
+#define su_cv_clone(P, G) su_cv_clone(aTHX_ (P), (G))
+ dVAR;
+ CV *cv;
- new_cv = cv_clone(old_cv);
+ cv = MUTABLE_CV(newSV_type(SvTYPE(proto)));
-#if defined(DEBUGGING) && SU_HAS_PERL(5, 9, 0)
- CvDEPTH(outside) = old_depth;
+ CvFLAGS(cv) = CvFLAGS(proto);
+#ifdef CVf_CVGV_RC
+ CvFLAGS(cv) &= ~CVf_CVGV_RC;
+#endif
+ CvDEPTH(cv) = CvDEPTH(proto);
+#ifdef USE_ITHREADS
+ CvFILE(cv) = CvISXSUB(proto) ? CvFILE(proto) : savepv(CvFILE(proto));
+#else
+ CvFILE(cv) = CvFILE(proto);
#endif
- /* Starting from perl 5.9 (more exactly commit b5c19bd7), cv_clone() is no
- * longer able to clone named subs propery. With this commit, pad_findlex()
- * stores the parent index of a fake pad entry in the NV slot of the
- * corresponding pad name SV, but only for anonymous subs (since named subs
- * aren't supposed to be cloned in pure Perl land). To fix this, we just
- * manually relink the new fake pad entries to the new ones.
- * For some reason perl 5.8 crashes too without this, supposedly because of
- * other closure bugs. Hence we enable it everywhere. */
- if (!CvCLONE(old_cv)) {
- const AV *old_padname = (const AV *) AvARRAY(CvPADLIST(old_cv))[0];
- AV *old_pad = (AV *) AvARRAY(CvPADLIST(old_cv))[1];
- AV *new_pad = (AV *) AvARRAY(CvPADLIST(new_cv))[1];
- const SV **old_aryname = (const SV **) AvARRAY(old_padname);
- SV **old_ary = AvARRAY(old_pad);
- SV **new_ary = AvARRAY(new_pad);
- I32 fname = AvFILLp(old_padname);
- I32 fpad = AvFILLp(old_pad);
- I32 ix;
-
- for (ix = fpad; ix > 0; ix--) {
- const SV *namesv = (ix <= fname) ? old_aryname[ix] : NULL;
-
- if (namesv && namesv != &PL_sv_undef && SvFAKE(namesv)) {
- SvREFCNT_dec(new_ary[ix]);
- new_ary[ix] = SvREFCNT_inc(old_ary[ix]);
- }
- }
+ CvGV_set(cv, gv);
+ CvSTASH_set(cv, CvSTASH(proto));
+ /* Commit 4c74a7df, publicized with perl 5.13.3, began to add backrefs to
+ * stashes. CvSTASH_set() started to do it as well with commit c68d95645
+ * (which was part of perl 5.13.7). */
+#if SU_HAS_PERL(5, 13, 3) && !SU_HAS_PERL(5, 13, 7)
+ if (CvSTASH(proto))
+ Perl_sv_add_backref(aTHX_ CvSTASH(proto), MUTABLE_SV(cv));
+#endif
+
+ if (CvISXSUB(proto)) {
+ CvXSUB(cv) = CvXSUB(proto);
+ CvXSUBANY(cv) = CvXSUBANY(proto);
+ } else {
+ OP_REFCNT_LOCK;
+ CvROOT(cv) = OpREFCNT_inc(CvROOT(proto));
+ OP_REFCNT_UNLOCK;
+ CvSTART(cv) = CvSTART(proto);
}
+ CvOUTSIDE(cv) = CvOUTSIDE(proto);
+#ifdef CVf_WEAKOUTSIDE
+ if (!(CvFLAGS(proto) & CVf_WEAKOUTSIDE))
+#endif
+ SvREFCNT_inc_simple_void(CvOUTSIDE(cv));
+ CvPADLIST(cv) = CvPADLIST(proto);
+#ifdef CvOUTSIDE_SEQ
+ CvOUTSIDE_SEQ(cv) = CvOUTSIDE_SEQ(proto);
+#endif
- return new_cv;
+ if (SvPOK(proto))
+ sv_setpvn(MUTABLE_SV(cv), SvPVX_const(proto), SvCUR(proto));
+
+#ifdef CvCONST
+ if (CvCONST(cv))
+ CvCONST_off(cv);
+#endif
+
+ return cv;
}
-STATIC I32 su_uplevel(pTHX_ CV *cv, I32 cxix, I32 args) {
+STATIC I32 su_uplevel(pTHX_ CV *callback, I32 cxix, I32 args) {
#define su_uplevel(C, I, A) su_uplevel(aTHX_ (C), (I), (A))
su_uplevel_ud *sud;
const PERL_CONTEXT *cx = cxstack + cxix;
PERL_SI *si;
PERL_SI *cur = PL_curstackinfo;
SV **old_stack_sp;
- CV *target_cv;
+ CV *target;
+ CV *renamed;
UNOP sub_op;
- I32 marksize;
I32 gimme;
I32 old_mark, new_mark;
I32 ret;
dSP;
- dMY_CXT;
ENTER;
old_mark = AvFILLp(PL_curstack) = PL_stack_sp - PL_stack_base;
SPAGAIN;
- sud = MY_CXT.uplevel_storage.root;
- if (sud) {
- MY_CXT.uplevel_storage.root = sud->next;
- MY_CXT.uplevel_storage.count--;
- } else {
- sud = su_uplevel_ud_new();
- }
- si = sud->si;
+ sud = su_uplevel_storage_new(cxix);
- sud->cxix = cxix;
- sud->died = 1;
+ sud->cxix = cxix;
+ sud->died = 1;
+ sud->callback = NULL;
+ sud->renamed = NULL;
SAVEDESTRUCTOR_X(su_uplevel_restore, sud);
- si->si_type = cur->si_type;
- si->si_next = NULL;
- si->si_prev = cur->si_prev;
+ si = sud->si;
+
+ si->si_type = cur->si_type;
+ si->si_next = NULL;
+ si->si_prev = cur->si_prev;
+#ifdef DEBUGGING
+ si->si_markoff = cx->blk_oldmarksp;
+#endif
/* Allocate enough space for all the elements of the original stack up to the
* target context, plus the forthcoming arguments. */
PL_stack_max = PL_stack_base + AvMAX(si->si_stack);
SPAGAIN;
-#ifdef DEBUGGING
- si->si_markoff = cx->blk_oldmarksp;
-#endif
-
/* Copy the context stack up to the context just below the target. */
- si->si_cxix = (cxix < 0) ? -1 : (cxix - 1);
- /* The max size must be at least two so that GROW(max) = (max * 3) / 2 > max */
- si->si_cxmax = (cxix < 4) ? 4 : cxix;
- Renew(si->si_cxstack, si->si_cxmax + 1, PERL_CONTEXT);
+ si->si_cxix = (cxix < 0) ? -1 : (cxix - 1);
+ if (si->si_cxmax < cxix) {
+ /* The max size must be at least two so that GROW(max) = (max*3)/2 > max */
+ si->si_cxmax = (cxix < 4) ? 4 : cxix;
+ Renew(si->si_cxstack, si->si_cxmax + 1, PERL_CONTEXT);
+ }
Copy(cur->si_cxstack, si->si_cxstack, cxix, PERL_CONTEXT);
SU_POISON(si->si_cxstack + cxix, si->si_cxmax + 1 - cxix, PERL_CONTEXT);
- target_cv = cx->blk_sub.cv;
- sud->target = (CV *) SvREFCNT_inc(target_cv);
- sud->old_depth = CvDEPTH(target_cv);
+ target = cx->blk_sub.cv;
+ sud->target = (CV *) SvREFCNT_inc(target);
+ sud->target_depth = CvDEPTH(target);
/* blk_oldcop is essentially needed for caller() and stack traces. It has no
* run-time implication, since PL_curcop will be overwritten as soon as we
* reports the right file name, line number and lexical hints. */
SU_UPLEVEL_SAVE(curcop, cx->blk_oldcop);
/* Don't reset PL_markstack_ptr, or we would overwrite the mark stack below
- * this point. */
- /* Don't reset PL_curpm, we want the most recent matches. */
+ * this point. Don't reset PL_curpm either, we want the most recent matches. */
SU_UPLEVEL_SAVE(curstackinfo, si);
/* If those two are equal, we need to fool POPSTACK_TO() */
sud->old_mainstack = NULL;
PL_curstack = si->si_stack;
- cv = su_cv_clone(cv);
- sud->cloned_cv = cv;
- CvGV_set(cv, CvGV(target_cv));
+ renamed = su_cv_clone(callback, CvGV(target));
+ sud->renamed = renamed;
PUSHMARK(SP);
- /* Both SP and old_stack_sp points just before the CV. */
+ /* Both SP and old_stack_sp point just before the CV. */
Copy(old_stack_sp + 2, SP + 1, args, SV *);
SP += args;
- PUSHs((SV *) cv);
+ PUSHs((SV *) renamed);
PUTBACK;
Zero(&sub_op, 1, UNOP);
SU_UPLEVEL_SAVE(op, (OP *) &sub_op);
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ sud->old_runops = PL_runops;
+#endif
+
sud->old_catch = CATCH_GET;
CATCH_SET(TRUE);
- if (PL_op = PL_ppaddr[OP_ENTERSUB](aTHX)) {
+ if ((PL_op = PL_ppaddr[OP_ENTERSUB](aTHX))) {
+ PERL_CONTEXT *sub_cx = cxstack + cxstack_ix;
+
+ /* If pp_entersub() returns a non-null OP, it means that the callback is not
+ * an XSUB. */
+
+ sud->callback = MUTABLE_CV(SvREFCNT_inc(callback));
+ CvDEPTH(callback)++;
+
if (CxHASARGS(cx) && cx->blk_sub.argarray) {
/* The call to pp_entersub() has saved the current @_ (in XS terms,
* GvAV(PL_defgv)) in the savearray member, and has created a new argarray
* argarray with an unreal copy of the original @_. */
AV *av = newAV();
AvREAL_off(av);
+ AvREIFY_on(av);
av_extend(av, AvMAX(cx->blk_sub.argarray));
AvFILLp(av) = AvFILLp(cx->blk_sub.argarray);
Copy(AvARRAY(cx->blk_sub.argarray), AvARRAY(av), AvFILLp(av) + 1, SV *);
- cxstack[cxix].blk_sub.argarray = av;
- } else if (PL_DBsub) {
- SvREFCNT_inc(cxstack[cxix].blk_sub.argarray);
+ sub_cx->blk_sub.argarray = av;
+ } else {
+ SvREFCNT_inc_simple_void(sub_cx->blk_sub.argarray);
}
- CALLRUNOPS(aTHX);
+ if (su_uplevel_goto_static(CvROOT(renamed))) {
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops != PL_runops_std) {
+ if (PL_runops == PL_runops_dbg) {
+ if (PL_debug)
+ croak("uplevel() can't execute code that calls goto when debugging flags are set");
+ } else if (PL_runops != su_uplevel_goto_runops)
+ croak("uplevel() can't execute code that calls goto with a custom runloop");
+ }
- ret = PL_stack_sp - (PL_stack_base + new_mark);
+ PL_runops = su_uplevel_goto_runops;
+#else /* SU_UPLEVEL_HIJACKS_RUNOPS */
+ croak("uplevel() can't execute code that calls goto before perl 5.8");
+#endif /* !SU_UPLEVEL_HIJACKS_RUNOPS */
+ }
+
+ CALLRUNOPS(aTHX);
}
sud->died = 0;
- SPAGAIN;
-
+ ret = PL_stack_sp - (PL_stack_base + new_mark);
if (ret > 0) {
AV *old_stack = sud->old_curstackinfo->si_stack;
AvFILLp(old_stack) += ret;
}
- PUTBACK;
-
LEAVE;
return ret;
}
+/* --- Unique context ID --------------------------------------------------- */
+
+STATIC su_uid *su_uid_storage_fetch(pTHX_ UV depth) {
+#define su_uid_storage_fetch(D) su_uid_storage_fetch(aTHX_ (D))
+ su_uid **map, *uid;
+ STRLEN alloc;
+ dMY_CXT;
+
+ map = MY_CXT.uid_storage.map;
+ alloc = MY_CXT.uid_storage.alloc;
+
+ if (depth >= alloc) {
+ STRLEN i;
+
+ Renew(map, depth + 1, su_uid *);
+ for (i = alloc; i <= depth; ++i)
+ map[i] = NULL;
+
+ MY_CXT.uid_storage.map = map;
+ MY_CXT.uid_storage.alloc = depth + 1;
+ }
+
+ uid = map[depth];
+
+ if (!uid) {
+ Newx(uid, 1, su_uid);
+ uid->seq = 0;
+ uid->flags = 0;
+ map[depth] = uid;
+ }
+
+ if (depth >= MY_CXT.uid_storage.used)
+ MY_CXT.uid_storage.used = depth + 1;
+
+ return uid;
+}
+
+STATIC int su_uid_storage_check(pTHX_ UV depth, UV seq) {
+#define su_uid_storage_check(D, S) su_uid_storage_check(aTHX_ (D), (S))
+ su_uid *uid;
+ dMY_CXT;
+
+ if (depth >= MY_CXT.uid_storage.used)
+ return 0;
+
+ uid = MY_CXT.uid_storage.map[depth];
+
+ return uid && (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
+}
+
+STATIC void su_uid_drop(pTHX_ void *ud_) {
+ su_uid *uid = ud_;
+
+ uid->flags &= ~SU_UID_ACTIVE;
+}
+
+STATIC void su_uid_bump(pTHX_ void *ud_) {
+ su_ud_reap *ud = ud_;
+
+ SAVEDESTRUCTOR_X(su_uid_drop, ud->cb);
+}
+
+STATIC SV *su_uid_get(pTHX_ I32 cxix) {
+#define su_uid_get(I) su_uid_get(aTHX_ (I))
+ su_uid *uid;
+ SV *uid_sv;
+ UV depth;
+
+ depth = su_uid_depth(cxix);
+ uid = su_uid_storage_fetch(depth);
+
+ if (!(uid->flags & SU_UID_ACTIVE)) {
+ su_ud_reap *ud;
+
+ uid->seq = su_uid_seq_next(depth);
+ uid->flags |= SU_UID_ACTIVE;
+
+ Newx(ud, 1, su_ud_reap);
+ SU_UD_ORIGIN(ud) = NULL;
+ SU_UD_HANDLER(ud) = su_uid_bump;
+ ud->cb = (SV *) uid;
+ su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
+ }
+
+ uid_sv = sv_newmortal();
+ sv_setpvf(uid_sv, "%"UVuf"-%"UVuf, depth, uid->seq);
+ return uid_sv;
+}
+
+#ifdef grok_number
+
+#define su_grok_number(S, L, VP) grok_number((S), (L), (VP))
+
+#else /* grok_number */
+
+#define IS_NUMBER_IN_UV 0x1
+
+STATIC int su_grok_number(pTHX_ const char *s, STRLEN len, UV *valuep) {
+#define su_grok_number(S, L, VP) su_grok_number(aTHX_ (S), (L), (VP))
+ STRLEN i;
+ SV *tmpsv;
+
+ /* This crude check should be good enough for a fallback implementation.
+ * Better be too strict than too lax. */
+ for (i = 0; i < len; ++i) {
+ if (!isDIGIT(s[i]))
+ return 0;
+ }
+
+ tmpsv = sv_newmortal();
+ sv_setpvn(tmpsv, s, len);
+ *valuep = sv_2uv(tmpsv);
+
+ return IS_NUMBER_IN_UV;
+}
+
+#endif /* !grok_number */
+
+STATIC int su_uid_validate(pTHX_ SV *uid) {
+#define su_uid_validate(U) su_uid_validate(aTHX_ (U))
+ const char *s;
+ STRLEN len, p = 0;
+ UV depth, seq;
+ int type;
+
+ s = SvPV_const(uid, len);
+
+ while (p < len && s[p] != '-')
+ ++p;
+ if (p >= len)
+ croak("UID contains only one part");
+
+ type = su_grok_number(s, p, &depth);
+ if (type != IS_NUMBER_IN_UV)
+ croak("First UID part is not an unsigned integer");
+
+ ++p; /* Skip '-'. As we used to have p < len, len - (p + 1) >= 0. */
+
+ type = su_grok_number(s + p, len - p, &seq);
+ if (type != IS_NUMBER_IN_UV)
+ croak("Second UID part is not an unsigned integer");
+
+ return su_uid_storage_check(depth, seq);
+}
+
/* --- Interpreter setup/teardown ------------------------------------------ */
STATIC void su_teardown(pTHX_ void *param) {
- su_uplevel_ud *cur, *prev;
+ su_uplevel_ud *cur;
+ su_uid **map;
dMY_CXT;
+ map = MY_CXT.uid_storage.map;
+ if (map) {
+ STRLEN i;
+ for (i = 0; i < MY_CXT.uid_storage.used; ++i)
+ Safefree(map[i]);
+ Safefree(map);
+ }
+
cur = MY_CXT.uplevel_storage.root;
if (cur) {
su_uplevel_ud *prev;
MY_CXT.stack_placeholder = NULL;
/* NewOp() calls calloc() which just zeroes the memory with memset(). */
- Zero(&(MY_CXT.return_op), 1, sizeof(MY_CXT.return_op));
- MY_CXT.return_op.op_type = OP_RETURN;
- MY_CXT.return_op.op_ppaddr = PL_ppaddr[OP_RETURN];
+ Zero(&(MY_CXT.unwind_storage.return_op), 1, LISTOP);
+ MY_CXT.unwind_storage.return_op.op_type = OP_RETURN;
+ MY_CXT.unwind_storage.return_op.op_ppaddr = PL_ppaddr[OP_RETURN];
- Zero(&(MY_CXT.proxy_op), 1, sizeof(MY_CXT.proxy_op));
- MY_CXT.proxy_op.op_type = OP_STUB;
- MY_CXT.proxy_op.op_ppaddr = NULL;
+ Zero(&(MY_CXT.unwind_storage.proxy_op), 1, OP);
+ MY_CXT.unwind_storage.proxy_op.op_type = OP_STUB;
+ MY_CXT.unwind_storage.proxy_op.op_ppaddr = NULL;
+ MY_CXT.uplevel_storage.top = NULL;
MY_CXT.uplevel_storage.root = NULL;
MY_CXT.uplevel_storage.count = 0;
+ MY_CXT.uid_storage.map = NULL;
+ MY_CXT.uid_storage.used = 0;
+ MY_CXT.uid_storage.alloc = 0;
+
call_atexit(su_teardown, NULL);
return;
continue;
case CXt_EVAL:
case CXt_FORMAT:
- MY_CXT.cxix = cxix;
- MY_CXT.items = items;
+ MY_CXT.unwind_storage.cxix = cxix;
+ MY_CXT.unwind_storage.items = items;
/* pp_entersub will want to sanitize the stack after returning from there
* Screw that, we're insane */
if (GIMME_V == G_SCALAR) {
- MY_CXT.savesp = PL_stack_sp;
+ MY_CXT.unwind_storage.savesp = PL_stack_sp;
/* dXSARGS calls POPMARK, so we need to match PL_markstack_ptr[1] */
PL_stack_sp = PL_stack_base + PL_markstack_ptr[1] + 1;
} else {
- MY_CXT.savesp = NULL;
+ MY_CXT.unwind_storage.savesp = NULL;
}
SAVEDESTRUCTOR_X(su_unwind, NULL);
return;
{
HV *stash;
+ MUTEX_INIT(&su_uid_seq_counter_mutex);
+
+ su_uid_seq_counter.seqs = NULL;
+ su_uid_seq_counter.size = 0;
+
stash = gv_stashpv(__PACKAGE__, 1);
newCONSTSUB(stash, "TOP", newSViv(0));
newCONSTSUB(stash, "SU_THREADSAFE", newSVuv(SU_THREADSAFE));
void
CLONE(...)
PROTOTYPE: DISABLE
+PREINIT:
+ su_uid_storage new_cxt;
PPCODE:
+ {
+ dMY_CXT;
+ new_cxt.map = NULL;
+ new_cxt.used = 0;
+ new_cxt.alloc = 0;
+ su_uid_storage_dup(&new_cxt, &MY_CXT.uid_storage, MY_CXT.uid_storage.used);
+ }
{
MY_CXT_CLONE;
+ MY_CXT.uplevel_storage.top = NULL;
MY_CXT.uplevel_storage.root = NULL;
MY_CXT.uplevel_storage.count = 0;
+ MY_CXT.uid_storage = new_cxt;
}
XSRETURN(0);
#endif /* SU_THREADSAFE */
-SV *
+void
HERE()
PROTOTYPE:
PREINIT:
I32 cxix = cxstack_ix;
-CODE:
+PPCODE:
if (PL_DBsub)
SU_SKIP_DB(cxix);
- RETVAL = newSViv(cxix);
-OUTPUT:
- RETVAL
+ EXTEND(SP, 1);
+ mPUSHi(cxix);
+ XSRETURN(1);
-SV *
+void
UP(...)
PROTOTYPE: ;$
PREINIT:
I32 cxix;
-CODE:
+PPCODE:
SU_GET_CONTEXT(0, 0);
if (--cxix < 0)
cxix = 0;
if (PL_DBsub)
SU_SKIP_DB(cxix);
- RETVAL = newSViv(cxix);
-OUTPUT:
- RETVAL
+ EXTEND(SP, 1);
+ mPUSHi(cxix);
+ XSRETURN(1);
void
SUB(...)
I32 cxix;
PPCODE:
SU_GET_CONTEXT(0, 0);
+ EXTEND(SP, 1);
for (; cxix >= 0; --cxix) {
PERL_CONTEXT *cx = cxstack + cxix;
switch (CxTYPE(cx)) {
case CXt_SUB:
if (PL_DBsub && cx->blk_sub.cv == GvCV(PL_DBsub))
continue;
- ST(0) = sv_2mortal(newSViv(cxix));
+ mPUSHi(cxix);
XSRETURN(1);
}
}
I32 cxix;
PPCODE:
SU_GET_CONTEXT(0, 0);
+ EXTEND(SP, 1);
for (; cxix >= 0; --cxix) {
PERL_CONTEXT *cx = cxstack + cxix;
switch (CxTYPE(cx)) {
default:
continue;
case CXt_EVAL:
- ST(0) = sv_2mortal(newSViv(cxix));
+ mPUSHi(cxix);
XSRETURN(1);
}
}
if (cxix < 0)
cxix = 0;
}
- ST(0) = sv_2mortal(newSViv(cxix));
+ EXTEND(SP, 1);
+ mPUSHi(cxix);
XSRETURN(1);
void
}
}
done:
- ST(0) = sv_2mortal(newSViv(cxix));
+ EXTEND(SP, 1);
+ mPUSHi(cxix);
XSRETURN(1);
void
I32 cxix;
PPCODE:
SU_GET_CONTEXT(0, 0);
+ EXTEND(SP, 1);
while (cxix > 0) {
PERL_CONTEXT *cx = cxstack + cxix--;
switch (CxTYPE(cx)) {
PL_stack_sp--;
args = items - 2;
}
+ /* su_uplevel() takes care of extending the stack if needed. */
ret = su_uplevel((CV *) code, cxix, args);
XSRETURN(ret);
default:
}
} while (--cxix >= 0);
croak("Can't uplevel outside a subroutine");
+
+void
+uid(...)
+PROTOTYPE: ;$
+PREINIT:
+ I32 cxix;
+ SV *uid;
+PPCODE:
+ SU_GET_CONTEXT(0, 0);
+ uid = su_uid_get(cxix);
+ EXTEND(SP, 1);
+ PUSHs(uid);
+ XSRETURN(1);
+
+void
+validate_uid(SV *uid)
+PROTOTYPE: $
+PREINIT:
+ SV *ret;
+PPCODE:
+ ret = su_uid_validate(uid) ? &PL_sv_yes : &PL_sv_no;
+ EXTEND(SP, 1);
+ PUSHs(ret);
+ XSRETURN(1);