+ XSH_D({
+ SV *z = newSV(0);
+ SvUPGRADE(z, t);
+ xsh_debug_log("%p: === localize a %s\n", ud, sv_reftype(z, 0));
+ xsh_debug_log("%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
+ ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
+ SvREFCNT_dec(z);
+ });
+
+ /* Inspired from Alias.pm */
+ switch (t) {
+ case SVt_PVAV:
+ if (elem) {
+ su_save_aelem(GvAV(gv), elem, val);
+ return;
+ } else {
+ save_ary(gv);
+ }
+ break;
+ case SVt_PVHV:
+ if (elem) {
+ su_save_helem(GvHV(gv), elem, val);
+ return;
+ } else {
+ save_hash(gv);
+ }
+ break;
+ case SVt_PVGV:
+ save_gp(gv, 1); /* hide previous entry in symtab */
+ break;
+ case SVt_PVCV:
+ su_save_gvcv(gv);
+ break;
+ default:
+ save_scalar(gv);
+ break;
+ }
+
+ if (val)
+ SvSetMagicSV((SV *) gv, val);
+
+ return;
+}
+
+/* ... Unique context ID ................................................... */
+
+/* We must pass the index because XSH_CXT.uid_storage might be reallocated
+ * between the UID fetch and the invalidation at the end of scope. */
+
+typedef struct {
+ su_ud_common ci;
+ I32 idx;
+} su_ud_uid;
+
+static void su_uid_drop(pTHX_ void *ud_) {
+ su_ud_uid *ud = ud_;
+ dXSH_CXT;
+
+ XSH_CXT.uid_storage.map[ud->idx].flags &= ~SU_UID_ACTIVE;
+
+ SU_UD_FREE(ud);
+
+ return;
+}
+
+/* --- Pop a context back -------------------------------------------------- */
+
+#ifdef DEBUGGING
+# define SU_CX_TYPENAME(T) PL_block_type[(T)]
+#else
+# if XSH_HAS_PERL(5, 23, 8)
+static const char *su_block_type[] = {
+ "NULL",
+ "WHEN",
+ "BLOCK",
+ "GIVEN",
+ "LOOP_ARY",
+ "LOOP_LAZYSV",
+ "LOOP_LAZYIV",
+ "LOOP_LIST",
+ "LOOP_PLAIN",
+ "SUB",
+ "FORMAT",
+ "EVAL",
+ "SUBST"
+};
+# elif XSH_HAS_PERL(5, 11, 0)
+static const char *su_block_type[] = {
+ "NULL",
+ "WHEN",
+ "BLOCK",
+ "GIVEN",
+ "LOOP_FOR",
+ "LOOP_PLAIN",
+ "LOOP_LAZYSV",
+ "LOOP_LAZYIV",
+ "SUB",
+ "FORMAT",
+ "EVAL",
+ "SUBST"
+};
+# elif XSH_HAS_PERL(5, 10, 0)
+static const char *su_block_type[] = {
+ "NULL",
+ "SUB",
+ "EVAL",
+ "LOOP",
+ "SUBST",
+ "BLOCK",
+ "FORMAT"
+ "WHEN",
+ "GIVEN"
+};
+# else
+static const char *su_block_type[] = {
+ "NULL",
+ "SUB",
+ "EVAL",
+ "LOOP",
+ "SUBST",
+ "BLOCK",
+ "FORMAT"
+};
+# endif
+# define SU_CX_TYPENAME(T) su_block_type[(T)]
+#endif
+
+#define SU_CXNAME(C) SU_CX_TYPENAME(CxTYPE(C))
+
+#if XSH_DEBUG
+
+/* for debugging. These indicate how many ENTERs each context type
+ * does before the PUSHBLOCK */
+
+static const int su_cxt_enter_count[] = {
+# if XSH_HAS_PERL(5, 23, 8)
+ 0 /* context pushes no longer do ENTERs */
+# elif XSH_HAS_PERL(5, 11, 0)
+ /* NULL WHEN BLOCK GIVEN LOOP_FOR LOOP_PLAIN LOOP_LAZYSV
+ * LOOP_LAZYIV SUB FORMAT EVAL SUBST */
+ 0, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 0
+# elif XSH_HAS_PERL(5, 10, 0)
+ /* NULL SUB EVAL LOOP SUBST BLOCK FORMAT WHEN GIVEN */
+ 0, 1, 1, 2, 0, 1, 1, 1, 1
+# else
+ /* NULL SUB EVAL LOOP SUBST BLOCK FORMAT */
+ 0, 1, 1, 2, 0, 1, 1
+# endif
+};
+
+#endif /* XSH_DEBUG */
+
+/* push at least 'size' slots worth of padding onto the savestack */
+
+static void su_ss_push_padding(pTHX_ void *ud, I32 size) {
+#define su_ss_push_padding(U, S) su_ss_push_padding(aTHX_ (U), (S))
+ if (size <= 0)
+ return;
+
+ if (size < SU_SAVE_ALLOC_SIZE + 1) /* minimum possible SAVEt_ALLOC */
+ size = SU_SAVE_ALLOC_SIZE + 1;
+
+ XSH_D(xsh_debug_log(
+ "%p: push %2d padding at save_ix=%d\n",
+ ud, size, PL_savestack_ix));
+
+ save_alloc((size - SU_SAVE_ALLOC_SIZE) * sizeof(*PL_savestack), 0);
+
+ return;
+}
+
+static void su_pop(pTHX_ void *ud);
+
+/* push an su_pop destructor onto the savestack with suitable padding.
+ * first indicates that this is the first push of a destructor */
+
+static void su_ss_push_destructor(pTHX_ void *ud, I32 depth, bool first) {
+#define su_ss_push_destructor(U, D, F) su_ss_push_destructor(aTHX_ (U), (D), (F))
+ su_ud_origin_elem *origin = SU_UD_ORIGIN(ud);
+
+ assert(first || origin[depth+1].orig_ix == PL_savestack_ix);
+
+ su_ss_push_padding(ud,
+ (origin[depth].orig_ix + origin[depth].offset) - PL_savestack_ix);
+
+ XSH_D(xsh_debug_log(
+ "%p: push destructor at save_ix=%d depth=%d scope_ix=%d\n",
+ ud, PL_savestack_ix, depth, PL_scopestack_ix));
+
+ SAVEDESTRUCTOR_X(su_pop, ud);
+
+ assert(first ||
+ PL_savestack_ix <= origin[depth+1].orig_ix + origin[depth+1].offset);
+
+ return;
+}
+
+/* this is called during each leave_scope() via SAVEDESTRUCTOR_X */
+
+static void su_pop(pTHX_ void *ud) {
+#define su_pop(U) su_pop(aTHX_ (U))
+ I32 depth, base, mark;
+ su_ud_origin_elem *origin;
+
+ depth = SU_UD_DEPTH(ud);
+ origin = SU_UD_ORIGIN(ud);
+
+ XSH_D(xsh_debug_log("%p: ### su_pop: depth=%d\n", ud, depth));
+
+ depth--;
+ mark = PL_savestack_ix;
+ base = origin[depth].orig_ix;
+
+ XSH_D(xsh_debug_log("%p: residual savestack frame is %d(+%d)..%d\n",
+ ud, base, origin[depth].offset, mark));
+
+ if (base < mark) {
+ XSH_D(xsh_debug_log("%p: clear leftovers at %d..%d\n", ud, base, mark));
+ leave_scope(base);
+ }
+ assert(PL_savestack_ix == base);
+
+ SU_UD_DEPTH(ud) = depth;
+
+ if (depth > 0) {
+ su_ss_push_destructor(ud, depth-1, 0);
+ } else {
+ I32 offset = origin[0].offset; /* grab value before origin is freed */
+ switch (SU_UD_TYPE(ud)) {
+ case SU_UD_TYPE_REAP: {
+ XSH_D(
+ xsh_debug_log("%p: === reap\n%p: depth=%d scope_ix=%d save_ix=%d\n",
+ ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix)
+ );
+ SAVEDESTRUCTOR_X(su_call, SU_UD_REAP_CB(ud));
+ SU_UD_FREE(ud);
+ break;
+ }
+ case SU_UD_TYPE_LOCALIZE:
+ su_localize(ud);
+ SU_UD_LOCALIZE_FREE(ud);
+ break;
+ case SU_UD_TYPE_UID:
+ SAVEDESTRUCTOR_X(su_uid_drop, ud);
+ break;
+ }
+ /* perl 5.23.8 onwards is very fussy about the return from leave_scope()
+ * leaving PL_savestack_ix where it expects it to be */
+ if (PL_savestack_ix < base + offset) {
+ I32 gap = (base + offset) - PL_savestack_ix;
+ assert(gap >= SU_SAVE_ALLOC_SIZE + 1);
+ su_ss_push_padding(ud, gap);
+ }
+ assert(PL_savestack_ix == base + offset);
+ }
+
+ XSH_D(xsh_debug_log("%p: end pop: ss_ix=%d\n", ud, PL_savestack_ix));
+}
+
+/* --- Initialize the stack and the action userdata ------------------------ */
+
+static void su_init(pTHX_ void *ud, I32 cxix, I32 size) {
+#define su_init(U, C, S) su_init(aTHX_ (U), (C), (S))
+ su_ud_origin_elem *origin;
+ I32 i, depth;
+ I32 cur_cx_ix, cur_scope_ix;
+
+ XSH_D(xsh_debug_log("%p: ### su_init(cxix=%d, size=%d)\n", ud, cxix, size));
+
+ depth = PL_scopestack_ix - cxstack[cxix].blk_oldscopesp;
+#if SU_HAS_NEW_CXT
+ depth += (cxstack_ix - cxix); /* each context frame holds 1 scope */
+#endif
+ XSH_D(xsh_debug_log(
+ "%p: going down by depth=%d with scope_ix=%d save_ix=%d\n",
+ ud, depth, PL_scopestack_ix, PL_savestack_ix));
+
+ /* Artificially increase the position of each savestack frame boundary
+ * to make space to squeeze in a 'size' sized entry (first one) or a
+ * SU_SAVE_DESTRUCTOR_SIZE sized entry (higher ones). In addition, make
+ * sure that each boundary is higher than the previous, so that *every*
+ * scope exit triggers a call to leave_scope(). Each scope exit will call
+ * the su_pop() destructor, which is responsible for: freeing any
+ * savestack entries below the artificially raised floor; then pushing a
+ * new destructor in that space. On the final pop, the "real" savestack
+ * action is pushed rather than another destructor.
+ *
+ * On older perls, savestack frame boundaries are specified by a range of
+ * scopestack entries (one per ENTER). Each scope entry typically does
+ * one or two ENTERs followed by a PUSHBLOCK. Thus the
+ * cx->blku_oldscopesp field set by the PUSHBLOCK points to the next free
+ * slot, which is one above the last of the ENTERs. In the debugging
+ * output we indicate that by bracketing the ENTERs directly preceding
+ * that context push with dashes, e.g.:
+ *
+ * 13b98d8: ------------------
+ * 13b98d8: ENTER origin[0] scope[3] savestack=3+3
+ * 13b98d8: ENTER origin[1] scope[4] savestack=9+3
+ * 13b98d8: cx=1 LOOP_LAZYIV
+ * 13b98d8: ------------------
+ *
+ * In addition to context stack pushes, other activities can push ENTERs
+ * too, such as grep expr and XS sub calls.
+ *
+ * For newer perls (SU_HAS_NEW_CXT), a context push no longer does any
+ * ENTERs; instead the old savestack position is stored in the new
+ * cx->blk_oldsaveix field; thus this field specifies an additional
+ * savestack frame boundary point in addition to the scopestack entries,
+ * and will also need adjusting.
+ *
+ * We record the original and modified position of each boundary in the
+ * origin array.
+ *
+ * The passed cxix argument represents the scope we wish to inject into;
+ * we have to adjust all the savestack frame boundaries above (but not
+ * including) that context.
+ */
+
+ Newx(origin, depth, su_ud_origin_elem);
+
+ cur_cx_ix = cxix;
+ cur_scope_ix = cxstack[cxix].blk_oldscopesp;
+
+#if SU_HAS_NEW_CXT
+ XSH_D(xsh_debug_log("%p: cx=%-2d %-11s\n",
+ ud, cur_cx_ix, SU_CXNAME(cxstack+cur_cx_ix)));
+ cur_cx_ix++;
+#endif
+
+ for (i = 0; cur_scope_ix < PL_scopestack_ix; i++) {
+ I32 *ixp;
+ I32 offset;
+
+#if SU_HAS_NEW_CXT
+
+ if (cur_cx_ix <= cxstack_ix
+ && cur_scope_ix == cxstack[cur_cx_ix].blk_oldscopesp)
+ ixp = &(cxstack[cur_cx_ix++].blk_oldsaveix);
+ else
+ ixp = &PL_scopestack[cur_scope_ix++]; /* an ENTER pushed after cur context */
+
+#else
+
+ XSH_D({
+ if (cur_cx_ix <= cxstack_ix) {
+ if (cur_scope_ix == cxstack[cur_cx_ix].blk_oldscopesp) {
+ xsh_debug_log("%p: cx=%-2d %s\n%p: ------------------\n",
+ ud, cur_cx_ix, SU_CXNAME(cxstack+cur_cx_ix), ud);
+ cur_cx_ix++;
+ }
+ else if (cur_scope_ix + su_cxt_enter_count[CxTYPE(cxstack+cur_cx_ix)]
+ == cxstack[cur_cx_ix].blk_oldscopesp)
+ xsh_debug_log("%p: ------------------\n", ud);
+ }
+ });
+
+ ixp = &PL_scopestack[cur_scope_ix++];
+
+#endif
+
+ if (i == 0) {
+ offset = size;
+ } else {
+ /* we have three constraints to satisfy:
+ * 1) Each adjusted offset must be at least SU_SAVE_DESTRUCTOR_SIZE
+ * above its unadjusted boundary, so that there is space to inject a
+ * destructor into the outer scope.
+ * 2) Each adjusted boundary must be at least SU_SAVE_DESTRUCTOR_SIZE
+ * higher than the previous adjusted boundary, so that a new
+ * destructor can be added below the Nth adjusted frame boundary,
+ * but be within the (N-1)th adjusted frame and so be triggered on
+ * the next scope exit;
+ * 3) If the adjustment needs to be greater than SU_SAVE_DESTRUCTOR_SIZE,
+ * then it should be greater by an amount of at least the minimum
+ * pad side, so a destructor and padding can be pushed.
+ */
+ I32 pad;
+ offset = SU_SAVE_DESTRUCTOR_SIZE; /* rule 1 */
+ pad = (origin[i-1].orig_ix + origin[i-1].offset) + offset
+ - (*ixp + offset);
+ if (pad > 0) { /* rule 2 */
+ if (pad < SU_SAVE_ALLOC_SIZE + 1) /* rule 3 */
+ pad = SU_SAVE_ALLOC_SIZE + 1;
+ offset += pad;
+ }
+ }
+
+ origin[i].offset = offset;
+ origin[i].orig_ix = *ixp;
+ *ixp += offset;
+
+#if SU_HAS_NEW_CXT
+ XSH_D({
+ if (ixp == &PL_scopestack[cur_scope_ix-1])
+ xsh_debug_log(
+ "%p: ENTER origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, i, cur_scope_ix, origin[i].orig_ix, origin[i].offset);
+ else
+ xsh_debug_log(
+ "%p: cx=%-2d %-11s origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, cur_cx_ix-1, SU_CXNAME(cxstack+cur_cx_ix-1),
+ i, cur_scope_ix, origin[i].orig_ix, origin[i].offset);
+ });
+#else
+ XSH_D(xsh_debug_log(
+ "%p: ENTER origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, i, cur_scope_ix, origin[i].orig_ix, origin[i].offset));
+#endif
+
+ }
+
+ assert(i == depth);
+
+ SU_UD_DEPTH(ud) = depth;
+ SU_UD_ORIGIN(ud) = origin;
+
+ su_ss_push_destructor(ud, depth-1, 1);
+}
+
+/* --- Unwind stack -------------------------------------------------------- */
+
+static void su_unwind(pTHX_ void *ud_) {
+ dXSH_CXT;
+ I32 cxix = XSH_CXT.unwind_storage.cxix;
+ I32 items = XSH_CXT.unwind_storage.items;
+ I32 mark;
+
+ PERL_UNUSED_VAR(ud_);
+
+ PL_stack_sp = XSH_CXT.unwind_storage.savesp;
+#if XSH_HAS_PERL(5, 19, 4)
+ {
+ I32 i;
+ SV **sp = PL_stack_sp;
+ for (i = -items + 1; i <= 0; ++i)
+ if (!SvTEMP(sp[i]))
+ sv_2mortal(SvREFCNT_inc(sp[i]));
+ }
+#endif
+
+ if (cxstack_ix > cxix)
+ dounwind(cxix);
+
+ mark = PL_markstack[cxstack[cxix].blk_oldmarksp];
+ PUSHMARK(PL_stack_sp - items);
+
+ XSH_D({
+ I32 gimme = GIMME_V;
+ xsh_debug_log("%p: cx=%d gimme=%s items=%d sp=%d oldmark=%d mark=%d\n",
+ &XSH_CXT, cxix,
+ gimme == G_VOID ? "void" : gimme == G_ARRAY ? "list" : "scalar",
+ items, PL_stack_sp - PL_stack_base, *PL_markstack_ptr, mark);
+ });
+
+ PL_op = (OP *) &(XSH_CXT.unwind_storage.return_op);
+ PL_op = PL_op->op_ppaddr(aTHX);
+
+ *PL_markstack_ptr = mark;
+
+ XSH_CXT.unwind_storage.proxy_op.op_next = PL_op;
+ PL_op = &(XSH_CXT.unwind_storage.proxy_op);
+}
+
+/* --- Yield --------------------------------------------------------------- */
+
+#if XSH_HAS_PERL(5, 10, 0)
+# define SU_RETOP_SUB(C) ((C)->blk_sub.retop)
+# define SU_RETOP_EVAL(C) ((C)->blk_eval.retop)
+# define SU_RETOP_LOOP(C) ((C)->blk_loop.my_op->op_lastop->op_next)
+# define SU_RETOP_GIVEN(C) ((C)->blk_givwhen.leave_op->op_next)
+#else
+# define SU_RETOP_SUB(C) ((C)->blk_oldretsp > 0 ? PL_retstack[(C)->blk_oldretsp - 1] : NULL)
+# define SU_RETOP_EVAL(C) SU_RETOP_SUB(C)
+# define SU_RETOP_LOOP(C) ((C)->blk_loop.last_op->op_next)
+#endif
+
+static void su_yield(pTHX_ void *ud_) {
+ dXSH_CXT;
+ PERL_CONTEXT *cx;
+ const char *which = ud_;
+ I32 cxix = XSH_CXT.yield_storage.cxix;
+ I32 items = XSH_CXT.yield_storage.items;
+ opcode type = OP_NULL;
+ U8 flags = 0;
+ OP *next;
+
+ cx = cxstack + cxix;
+ switch (CxTYPE(cx)) {
+ case CXt_BLOCK: {
+ I32 i, cur = cxstack_ix, n = 1;
+ OP *o = NULL;
+ /* Is this actually a given/when block? This may occur only when yield was
+ * called with HERE (or nothing) as the context. */
+#if XSH_HAS_PERL(5, 10, 0)
+ if (cxix > 0) {
+ PERL_CONTEXT *prev = cx - 1;
+ U8 prev_type = CxTYPE(prev);
+ if ((prev_type == CXt_GIVEN || prev_type == CXt_WHEN)
+ && (prev->blk_oldcop == cx->blk_oldcop)) {
+ cxix--;
+ cx = prev;
+ if (prev_type == CXt_GIVEN)
+ goto cxt_given;
+ else
+ goto cxt_when;
+ }
+ }
+#endif
+ type = OP_LEAVE;
+ next = NULL;
+ /* Bare blocks (that appear as do { ... } blocks, map { ... } blocks or
+ * constant folded blcoks) don't need to save the op to return to anywhere
+ * since 'last' isn't supposed to work inside them. So we climb higher in
+ * the context stack until we reach a context that has a return op (i.e. a
+ * sub, an eval, a format or a real loop), recording how many blocks we
+ * crossed. Then we follow the op_next chain until we get to the leave op
+ * that closes the original block, which we are assured to reach since
+ * everything is static (the blocks we have crossed cannot be evals or
+ * subroutine calls). */
+ for (i = cxix + 1; i <= cur; ++i) {
+ PERL_CONTEXT *cx2 = cxstack + i;
+ switch (CxTYPE(cx2)) {
+ case CXt_BLOCK:
+ ++n;
+ break;
+ case CXt_SUB:
+ case CXt_FORMAT:
+ o = SU_RETOP_SUB(cx2);
+ break;
+ case CXt_EVAL:
+ o = SU_RETOP_EVAL(cx2);
+ break;
+#if XSH_HAS_PERL(5, 11, 0)
+# if XSH_HAS_PERL(5, 23, 8)
+ case CXt_LOOP_ARY:
+ case CXt_LOOP_LIST:
+# else
+ case CXt_LOOP_FOR:
+# endif
+ case CXt_LOOP_PLAIN:
+ case CXt_LOOP_LAZYSV:
+ case CXt_LOOP_LAZYIV:
+#else
+ case CXt_LOOP:
+#endif
+ o = SU_RETOP_LOOP(cx2);
+ break;
+ }
+ if (o)
+ break;
+ }
+ if (!o)
+ o = PL_op;
+ while (n && o) {
+ /* We may find other enter/leave blocks on our way to the matching leave.
+ * Make sure the depth is incremented/decremented appropriately. */
+ if (o->op_type == OP_ENTER) {
+ ++n;
+ } else if (o->op_type == OP_LEAVE) {
+ --n;
+ if (!n) {
+ next = o->op_next;
+ break;
+ }
+ }
+ o = o->op_next;
+ }
+ break;
+ }
+ case CXt_SUB:
+ case CXt_FORMAT:
+ type = OP_LEAVESUB;
+ next = SU_RETOP_SUB(cx);
+ break;
+ case CXt_EVAL:
+ type = CxTRYBLOCK(cx) ? OP_LEAVETRY : OP_LEAVEEVAL;
+ next = SU_RETOP_EVAL(cx);
+ break;
+#if XSH_HAS_PERL(5, 11, 0)
+# if XSH_HAS_PERL(5, 23, 8)
+ case CXt_LOOP_ARY:
+ case CXt_LOOP_LIST:
+# else
+ case CXt_LOOP_FOR:
+# endif
+ case CXt_LOOP_PLAIN:
+ case CXt_LOOP_LAZYSV:
+ case CXt_LOOP_LAZYIV:
+#else
+ case CXt_LOOP:
+#endif
+ type = OP_LEAVELOOP;
+ next = SU_RETOP_LOOP(cx);
+ break;
+#if XSH_HAS_PERL(5, 10, 0)
+ case CXt_GIVEN:
+cxt_given:
+ type = OP_LEAVEGIVEN;
+ next = SU_RETOP_GIVEN(cx);
+ break;
+ case CXt_WHEN:
+cxt_when:
+#if XSH_HAS_PERL(5, 15, 1)
+ type = OP_LEAVEWHEN;
+#else
+ type = OP_BREAK;
+ flags |= OPf_SPECIAL;
+#endif
+ next = NULL;
+ break;
+#endif
+ case CXt_SUBST:
+ croak("%s() can't target a substitution context", which);
+ break;
+ default:
+ croak("%s() doesn't know how to leave a %s context",
+ which, SU_CXNAME(cxstack + cxix));
+ break;
+ }
+
+ PL_stack_sp = XSH_CXT.yield_storage.savesp;
+#if XSH_HAS_PERL(5, 19, 4)
+ {
+ I32 i;
+ SV **sp = PL_stack_sp;
+ for (i = -items + 1; i <= 0; ++i)
+ if (!SvTEMP(sp[i]))
+ sv_2mortal(SvREFCNT_inc(sp[i]));
+ }
+#endif
+
+ if (cxstack_ix > cxix)
+ dounwind(cxix);
+
+ /* Copy the arguments passed to yield() where the leave op expects to find
+ * them. */
+ if (items)
+ Move(PL_stack_sp - items + 1, PL_stack_base + cx->blk_oldsp + 1, items, SV *);
+ PL_stack_sp = PL_stack_base + cx->blk_oldsp + items;
+
+ flags |= OP_GIMME_REVERSE(cx->blk_gimme);
+
+ XSH_CXT.yield_storage.leave_op.op_type = type;
+ XSH_CXT.yield_storage.leave_op.op_ppaddr = PL_ppaddr[type];
+ XSH_CXT.yield_storage.leave_op.op_flags = flags;
+ XSH_CXT.yield_storage.leave_op.op_next = next;
+
+ PL_op = (OP *) &(XSH_CXT.yield_storage.leave_op);
+ PL_op = PL_op->op_ppaddr(aTHX);
+
+ XSH_CXT.yield_storage.proxy_op.op_next = PL_op;
+ PL_op = &(XSH_CXT.yield_storage.proxy_op);
+}
+
+/* --- Uplevel ------------------------------------------------------------- */
+
+#define SU_UPLEVEL_SAVE(f, t) STMT_START { sud->old_##f = PL_##f; PL_##f = (t); } STMT_END
+#define SU_UPLEVEL_RESTORE(f) STMT_START { PL_##f = sud->old_##f; } STMT_END
+
+static su_uplevel_ud *su_uplevel_storage_new(pTHX_ I32 cxix) {
+#define su_uplevel_storage_new(I) su_uplevel_storage_new(aTHX_ (I))
+ su_uplevel_ud *sud;
+ UV depth;
+ dXSH_CXT;
+
+ sud = XSH_CXT.uplevel_storage.root;
+ if (sud) {
+ XSH_CXT.uplevel_storage.root = sud->next;
+ XSH_CXT.uplevel_storage.count--;
+ } else {
+ sud = su_uplevel_ud_new();
+ }
+
+ sud->next = XSH_CXT.uplevel_storage.top;
+ XSH_CXT.uplevel_storage.top = sud;
+
+ depth = su_uid_depth(cxix);
+ su_uid_storage_dup(&sud->tmp_uid_storage, &XSH_CXT.uid_storage, depth);
+ sud->old_uid_storage = XSH_CXT.uid_storage;
+ XSH_CXT.uid_storage = sud->tmp_uid_storage;
+
+ return sud;
+}
+
+#if XSH_HAS_PERL(5, 13, 7)
+
+static void su_uplevel_storage_delete(pTHX_ su_uplevel_ud *sud) {
+#define su_uplevel_storage_delete(S) su_uplevel_storage_delete(aTHX_ (S))
+ dXSH_CXT;
+
+ sud->tmp_uid_storage = XSH_CXT.uid_storage;
+ XSH_CXT.uid_storage = sud->old_uid_storage;
+ {
+ su_uid *map;
+ STRLEN i, alloc;
+ map = sud->tmp_uid_storage.map;
+ alloc = sud->tmp_uid_storage.alloc;
+ for (i = 0; i < alloc; ++i)
+ map[i].flags &= ~SU_UID_ACTIVE;
+ }
+ XSH_CXT.uplevel_storage.top = sud->next;
+
+ if (XSH_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
+ su_uplevel_ud_delete(sud);
+ } else {
+ sud->next = XSH_CXT.uplevel_storage.root;
+ XSH_CXT.uplevel_storage.root = sud;
+ XSH_CXT.uplevel_storage.count++;
+ }
+}
+
+#endif
+
+static int su_uplevel_goto_static(const OP *o) {
+ for (; o; o = OpSIBLING(o)) {
+ /* goto ops are unops with kids. */
+ if (!(o->op_flags & OPf_KIDS))
+ continue;
+
+ switch (o->op_type) {
+ case OP_LEAVEEVAL:
+ case OP_LEAVETRY:
+ /* Don't care about gotos inside eval, as they are forbidden at run time. */
+ break;
+ case OP_GOTO:
+ return 1;
+ default:
+ if (su_uplevel_goto_static(((const UNOP *) o)->op_first))
+ return 1;
+ break;
+ }
+ }
+
+ return 0;
+}
+
+#if !SU_HAS_NEW_CXT && SU_UPLEVEL_HIJACKS_RUNOPS
+
+static int su_uplevel_goto_runops(pTHX) {
+#define su_uplevel_goto_runops() su_uplevel_goto_runops(aTHX)
+ register OP *op;
+ dVAR;
+
+ op = PL_op;
+ do {
+ if (op->op_type == OP_GOTO) {
+ AV *argarray = NULL;
+ I32 cxix;
+
+ for (cxix = cxstack_ix; cxix >= 0; --cxix) {
+ const PERL_CONTEXT *cx = cxstack + cxix;
+
+ switch (CxTYPE(cx)) {
+ case CXt_SUB:
+ if (CxHASARGS(cx)) {
+ argarray = cx->blk_sub.argarray;
+ goto done;
+ }
+ break;
+ case CXt_EVAL:
+ case CXt_FORMAT:
+ goto done;
+ default:
+ break;
+ }
+ }
+
+done:
+ if (argarray) {
+ dXSH_CXT;
+
+ if (XSH_CXT.uplevel_storage.top->cxix == cxix) {
+ AV *args = GvAV(PL_defgv);
+ I32 items = AvFILLp(args);
+
+ av_extend(argarray, items);
+ Copy(AvARRAY(args), AvARRAY(argarray), items + 1, SV *);
+ AvFILLp(argarray) = items;
+ }
+ }
+ }
+
+ PL_op = op = op->op_ppaddr(aTHX);
+
+#if !XSH_HAS_PERL(5, 13, 0)
+ PERL_ASYNC_CHECK();
+#endif
+ } while (op);
+
+ TAINT_NOT;
+
+ return 0;
+}
+
+#endif /* SU_UPLEVEL_HIJACKS_RUNOPS */
+
+#define su_at_underscore(C) PadARRAY(PadlistARRAY(CvPADLIST(C))[CvDEPTH(C)])[0]
+
+#if SU_HAS_NEW_CXT
+
+static void su_uplevel_restore_new(pTHX_ void *sus_) {
+ su_uplevel_ud *sud = sus_;
+ PERL_CONTEXT *cx;
+ I32 i;
+ U8 *saved_cxtypes = sud->cxtypes;
+
+ for (i = 0; i < sud->gap; i++) {
+ PERL_CONTEXT *cx = cxstack + sud->cxix + i;
+ XSH_D(xsh_debug_log("su_uplevel_restore: i=%d cxix=%d type %s => %s\n",
+ i, cx-cxstack, SU_CX_TYPENAME(CxTYPE(cx)),
+ SU_CX_TYPENAME(saved_cxtypes[i] & CXTYPEMASK)));
+ cx->cx_type = saved_cxtypes[i];
+ }
+ Safefree(saved_cxtypes);
+
+ /* renamed is a copy of callback, but they share the same CvPADLIST.
+ * At this point any calls to renamed should have exited so that its
+ * depth is back to that of of callback. At this point its safe to free
+ * renamed, then undo the extra ref count that was ensuring that callback
+ * remains alive
+ */
+ assert(sud->renamed);
+ assert(sud->callback);
+
+ CvDEPTH(sud->callback)--;
+ assert(CvDEPTH(sud->callback) == CvDEPTH(sud->renamed));
+ if (!CvISXSUB(sud->renamed)) {
+ CvDEPTH(sud->renamed) = 0;
+ CvPADLIST(sud->renamed) = NULL;
+ }
+ SvREFCNT_dec(sud->renamed);
+ SvREFCNT_dec(sud->callback);
+
+ SU_UPLEVEL_RESTORE(curcop);
+
+ su_uplevel_storage_delete(sud);
+
+ return;
+}
+
+#else
+
+/* 5.23.7 and earlier */
+
+static void su_uplevel_restore_old(pTHX_ void *sus_) {
+ su_uplevel_ud *sud = sus_;
+ PERL_SI *cur = sud->old_curstackinfo;
+ PERL_SI *si = sud->si;
+
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops == su_uplevel_goto_runops)
+ PL_runops = sud->old_runops;
+#endif
+
+ if (sud->callback) {
+ PERL_CONTEXT *cx = cxstack + sud->cxix;
+ AV *argarray = MUTABLE_AV(su_at_underscore(sud->callback));
+
+ /* We have to fix the pad entry for @_ in the original callback because it
+ * may have been reified. */
+ if (AvREAL(argarray)) {
+ const I32 fill = AvFILLp(argarray);
+ SvREFCNT_dec(argarray);
+ argarray = newAV();
+ AvREAL_off(argarray);
+ AvREIFY_on(argarray);
+ av_extend(argarray, fill);
+ su_at_underscore(sud->callback) = MUTABLE_SV(argarray);
+ } else {
+ CLEAR_ARGARRAY(argarray);
+ }
+
+ /* If the old cv member is our renamed CV, it means that this place has been
+ * reached without a goto() happening, and the old argarray member is
+ * actually our fake argarray. Destroy it properly in that case. */
+ if (cx->blk_sub.cv == sud->renamed) {
+ SvREFCNT_dec(cx->blk_sub.argarray);
+ cx->blk_sub.argarray = argarray;
+ }
+
+ CvDEPTH(sud->callback)--;
+ SvREFCNT_dec(sud->callback);
+ }
+
+ /* Free the renamed CV. We must do it ourselves so that we can force the
+ * depth to be 0, or perl would complain about it being "still in use".
+ * But we *know* that it cannot be so. */
+ if (sud->renamed) {
+ if (!CvISXSUB(sud->renamed)) {
+ CvDEPTH(sud->renamed) = 0;
+ CvPADLIST(sud->renamed) = NULL;
+ }
+ SvREFCNT_dec(sud->renamed);
+ }
+
+ CATCH_SET(sud->old_catch);
+
+ SU_UPLEVEL_RESTORE(op);
+
+ /* stack_grow() wants PL_curstack so restore the old stack first */
+ if (PL_curstackinfo == si) {
+ PL_curstack = cur->si_stack;
+ if (sud->old_mainstack)
+ SU_UPLEVEL_RESTORE(mainstack);
+ SU_UPLEVEL_RESTORE(curstackinfo);
+
+ if (sud->died) {
+ CV *target = sud->target;
+ I32 levels = 0, i;
+
+ /* When we die, the depth of the target CV is not updated because of the
+ * stack switcheroo. So we have to look at all the frames between the
+ * uplevel call and the catch block to count how many call frames to the
+ * target CV were skipped. */
+ for (i = cur->si_cxix; i > sud->cxix; i--) {
+ register const PERL_CONTEXT *cx = cxstack + i;
+
+ if (CxTYPE(cx) == CXt_SUB) {
+ if (cx->blk_sub.cv == target)
+ ++levels;
+ }
+ }
+
+ /* If we died, the replacement stack was already unwinded to the first
+ * eval frame, and all the contexts down there were popped. We don't have
+ * to pop manually any context of the original stack, because they must
+ * have been in the replacement stack as well (since the second was copied
+ * from the first). Thus we only have to make sure the original stack index
+ * points to the context just below the first eval scope under the target
+ * frame. */
+ for (; i >= 0; i--) {
+ register const PERL_CONTEXT *cx = cxstack + i;
+
+ switch (CxTYPE(cx)) {
+ case CXt_SUB:
+ if (cx->blk_sub.cv == target)
+ ++levels;
+ break;
+ case CXt_EVAL:
+ goto found_it;
+ break;
+ default:
+ break;
+ }
+ }
+
+found_it:
+ CvDEPTH(target) = sud->target_depth - levels;
+ PL_curstackinfo->si_cxix = i - 1;
+
+#if !XSH_HAS_PERL(5, 13, 1)
+ /* Since $@ was maybe localized between the target frame and the uplevel
+ * call, we forcefully flush the save stack to get rid of it and then
+ * reset $@ to its proper value. Note that the the call to
+ * su_uplevel_restore() must happen before the "reset $@" item of the save
+ * stack is processed, as uplevel was called after the localization.
+ * Andrew's changes to how $@ was handled, which were mainly integrated
+ * between perl 5.13.0 and 5.13.1, fixed this. */
+ if (ERRSV && SvTRUE(ERRSV)) {
+ register const PERL_CONTEXT *cx = cxstack + i; /* This is the eval scope */
+ SV *errsv = SvREFCNT_inc(ERRSV);
+ PL_scopestack_ix = cx->blk_oldscopesp;
+ leave_scope(PL_scopestack[PL_scopestack_ix]);
+ sv_setsv(ERRSV, errsv);
+ SvREFCNT_dec(errsv);
+ }
+#endif
+ }
+ }
+
+ SU_UPLEVEL_RESTORE(curcop);
+
+ SvREFCNT_dec(sud->target);
+
+ PL_stack_base = AvARRAY(cur->si_stack);
+ PL_stack_sp = PL_stack_base + AvFILLp(cur->si_stack);
+ PL_stack_max = PL_stack_base + AvMAX(cur->si_stack);
+
+ /* When an exception is thrown from the uplevel'd subroutine,
+ * su_uplevel_restore() may be called by the LEAVE in die_unwind() (renamed
+ * die_where() in more recent perls), which has the sad habit of keeping a
+ * pointer to the current context frame across this call. This means that we
+ * can't free the temporary context stack we used for the uplevel call right
+ * now, or that pointer upwards would point to garbage. */
+#if XSH_HAS_PERL(5, 13, 7)
+ /* This issue has been fixed in perl with commit 8f89e5a9, which was made
+ * public in perl 5.13.7. */
+ su_uplevel_storage_delete(sud);
+#else
+ /* Otherwise, we just enqueue it back in the global storage list. */
+ {
+ dXSH_CXT;
+
+ sud->tmp_uid_storage = XSH_CXT.uid_storage;
+ XSH_CXT.uid_storage = sud->old_uid_storage;
+
+ XSH_CXT.uplevel_storage.top = sud->next;
+ sud->next = XSH_CXT.uplevel_storage.root;
+ XSH_CXT.uplevel_storage.root = sud;
+ XSH_CXT.uplevel_storage.count++;
+ }
+#endif
+
+ return;
+}
+
+#endif
+
+static CV *su_cv_clone(pTHX_ CV *proto, GV *gv) {
+#define su_cv_clone(P, G) su_cv_clone(aTHX_ (P), (G))
+ dVAR;
+ CV *cv;
+
+ cv = MUTABLE_CV(newSV_type(SvTYPE(proto)));
+
+ CvFLAGS(cv) = CvFLAGS(proto);
+#ifdef CVf_CVGV_RC
+ CvFLAGS(cv) &= ~CVf_CVGV_RC;
+#endif
+ CvDEPTH(cv) = CvDEPTH(proto);
+#ifdef USE_ITHREADS
+ CvFILE(cv) = CvISXSUB(proto) ? CvFILE(proto) : savepv(CvFILE(proto));
+#else
+ CvFILE(cv) = CvFILE(proto);
+#endif
+
+ CvGV_set(cv, gv);
+#if SU_RELEASE && XSH_HAS_PERL_EXACT(5, 21, 4)
+ CvNAMED_off(cv);
+#endif
+ CvSTASH_set(cv, CvSTASH(proto));
+ /* Commit 4c74a7df, publicized with perl 5.13.3, began to add backrefs to
+ * stashes. CvSTASH_set() started to do it as well with commit c68d95645
+ * (which was part of perl 5.13.7). */
+#if XSH_HAS_PERL(5, 13, 3) && !XSH_HAS_PERL(5, 13, 7)
+ if (CvSTASH(proto))
+ Perl_sv_add_backref(aTHX_ CvSTASH(proto), MUTABLE_SV(cv));
+#endif
+
+ if (CvISXSUB(proto)) {
+ CvXSUB(cv) = CvXSUB(proto);
+ CvXSUBANY(cv) = CvXSUBANY(proto);
+ } else {
+ OP_REFCNT_LOCK;
+ CvROOT(cv) = OpREFCNT_inc(CvROOT(proto));
+ OP_REFCNT_UNLOCK;
+ CvSTART(cv) = CvSTART(proto);
+ CvPADLIST(cv) = CvPADLIST(proto);
+ }
+ CvOUTSIDE(cv) = CvOUTSIDE(proto);
+#ifdef CVf_WEAKOUTSIDE
+ if (!(CvFLAGS(proto) & CVf_WEAKOUTSIDE))
+#endif
+ SvREFCNT_inc_simple_void(CvOUTSIDE(cv));
+#ifdef CvOUTSIDE_SEQ
+ CvOUTSIDE_SEQ(cv) = CvOUTSIDE_SEQ(proto);
+#endif
+
+ if (SvPOK(proto))
+ sv_setpvn(MUTABLE_SV(cv), SvPVX_const(proto), SvCUR(proto));
+
+#ifdef CvCONST
+ if (CvCONST(cv))
+ CvCONST_off(cv);
+#endif
+
+ return cv;
+}
+
+#if SU_HAS_NEW_CXT
+
+/* this one-shot runops "loop" is designed to be called just before
+ * execution of the first op following an uplevel()'s entersub. It gets a
+ * chance to fix up the args as seen by caller(), before immediately
+ * falling through to the previous runops loop. Note that pp_entersub is
+ * called directly by call_sv() rather than being called from a runops
+ * loop.
+ */
+
+static int su_uplevel_runops_hook_entersub(pTHX) {
+ OP *op = PL_op;
+ dXSH_CXT;
+ su_uplevel_ud *sud = XSH_CXT.uplevel_storage.top;
+
+ /* Create a new array containing a copy of the original sub's call args,
+ * then stick it in PL_curpad[0] of the current running sub so that
+ * thay will be seen by caller().
+ */
+ assert(sud);
+ if (sud->argarray) {
+ I32 fill;
+ AV *av = newAV();
+ AvREAL_off(av);
+ AvREIFY_on(av);
+
+ fill = AvFILLp(sud->argarray);
+ if (fill >= 0) {
+ av_extend(av, fill);
+ Copy(AvARRAY(sud->argarray), AvARRAY(av), fill + 1, SV *);
+ AvFILLp(av) = fill;
+ }
+
+#if !XSH_HAS_PERL(5, 37, 10)
+ /* should be referenced by PL_curpad[0] and *_ */
+ assert(SvREFCNT(PL_curpad[0]) > 1);
+ SvREFCNT_dec(PL_curpad[0]);
+#endif
+
+ PL_curpad[0] = (SV *) av;
+ }
+
+ /* undo the temporary runops hook and fall through to a real runops loop. */
+ assert(sud->old_runops != su_uplevel_runops_hook_entersub);
+ PL_runops = sud->old_runops;
+
+ CALLRUNOPS(aTHX);
+
+ return 0;
+}
+
+static I32 su_uplevel_new(pTHX_ CV *callback, I32 cxix, I32 args) {
+#define su_uplevel_new(CB, CX, A) su_uplevel_new(aTHX_ (CB), (CX), (A))
+ su_uplevel_ud *sud;
+ U8 *saved_cxtypes;
+ I32 i, ret;
+ I32 gimme;
+ CV *base_cv = cxstack[cxix].blk_sub.cv;
+ dSP;
+
+ assert(CxTYPE(&cxstack[cxix]) == CXt_SUB);
+
+ ENTER;
+
+ gimme = GIMME_V;
+
+ /* At this point SP points to the top arg.
+ * Shuffle the args down by one, eliminating the CV slot */
+ Move(SP - args + 1, SP - args, args, SV *);
+ SP--;
+ PUSHMARK(SP - args);
+ PUTBACK;
+
+ sud = su_uplevel_storage_new(cxix);
+
+ sud->cxix = cxix;
+ sud->callback = (CV *) SvREFCNT_inc_simple(callback);
+ sud->renamed = NULL;
+ sud->gap = cxstack_ix - cxix + 1;
+ sud->argarray = NULL;
+
+ Newx(saved_cxtypes, sud->gap, U8);
+ sud->cxtypes = saved_cxtypes;
+
+ SAVEDESTRUCTOR_X(su_uplevel_restore_new, sud);
+ SU_UPLEVEL_SAVE(curcop, cxstack[cxix].blk_oldcop);
+
+/* temporarily change the type of any contexts to NULL, so they're
+ * invisible to caller() etc. */
+ for (i = 0; i < sud->gap; i++) {
+ PERL_CONTEXT *cx = cxstack + cxix + i;
+ saved_cxtypes[i] = cx->cx_type; /* save type and flags */
+ XSH_D(xsh_debug_log("su_uplevel: i=%d cxix=%d type %-11s => %s\n",
+ i, cx-cxstack, SU_CX_TYPENAME(CxTYPE(cx)), SU_CX_TYPENAME(CXt_NULL)));
+ cx->cx_type = (CXt_NULL | CXp_SU_UPLEVEL_NULLED);
+ }
+
+ /* create a copy of the callback with a doctored name (as seen by
+ * caller). It shares the padlist with callback */
+ sud->renamed = su_cv_clone(callback, CvGV(base_cv));
+ sud->old_runops = PL_runops;
+
+ if (!CvISXSUB(sud->renamed) && CxHASARGS(&cxstack[cxix])) {
+ sud->argarray = (AV *) su_at_underscore(base_cv);
+ assert(PL_runops != su_uplevel_runops_hook_entersub);
+ /* set up a one-shot runops hook so that we can fake up the
+ * args as seen by caller() on return from pp_entersub */
+ PL_runops = su_uplevel_runops_hook_entersub;
+ }
+
+ CvDEPTH(callback)++; /* match what CvDEPTH(sud->renamed) is about to become */
+
+ ret = call_sv((SV *) sud->renamed, gimme);
+
+ LEAVE;
+
+ return ret;
+}
+
+#else
+
+static I32 su_uplevel_old(pTHX_ CV *callback, I32 cxix, I32 args) {
+#define su_uplevel_old(CB, CX, A) su_uplevel_old(aTHX_ (CB), (CX), (A))
+ su_uplevel_ud *sud;
+ const PERL_CONTEXT *cx = cxstack + cxix;
+ PERL_SI *si;
+ PERL_SI *cur = PL_curstackinfo;
+ SV **old_stack_sp;
+ CV *target;
+ CV *renamed;
+ UNOP sub_op;
+ I32 gimme;
+ I32 old_mark, new_mark;
+ I32 ret;
+ dSP;
+
+ ENTER;
+
+ gimme = GIMME_V;
+ /* Make PL_stack_sp point just before the CV. */
+ PL_stack_sp -= args + 1;
+ old_mark = AvFILLp(PL_curstack) = PL_stack_sp - PL_stack_base;
+ SPAGAIN;
+
+ sud = su_uplevel_storage_new(cxix);
+
+ sud->cxix = cxix;
+ sud->died = 1;
+ sud->callback = NULL;
+ sud->renamed = NULL;
+ SAVEDESTRUCTOR_X(su_uplevel_restore_old, sud);
+
+ si = sud->si;
+
+ si->si_type = cur->si_type;
+ si->si_next = NULL;
+ si->si_prev = cur->si_prev;
+#ifdef DEBUGGING
+ si->si_markoff = cx->blk_oldmarksp;
+#endif
+
+ /* Allocate enough space for all the elements of the original stack up to the
+ * target context, plus the forthcoming arguments. */
+ new_mark = cx->blk_oldsp;
+ av_extend(si->si_stack, new_mark + 1 + args + 1);
+ Copy(AvARRAY(PL_curstack), AvARRAY(si->si_stack), new_mark + 1, SV *);
+ AvFILLp(si->si_stack) = new_mark;
+ SU_POISON(AvARRAY(si->si_stack) + new_mark + 1, args + 1, SV *);
+
+ /* Specialized SWITCHSTACK() */
+ PL_stack_base = AvARRAY(si->si_stack);
+ old_stack_sp = PL_stack_sp;
+ PL_stack_sp = PL_stack_base + AvFILLp(si->si_stack);
+ PL_stack_max = PL_stack_base + AvMAX(si->si_stack);
+ SPAGAIN;
+
+ /* Copy the context stack up to the context just below the target. */
+ si->si_cxix = (cxix < 0) ? -1 : (cxix - 1);
+ if (si->si_cxmax < cxix) {
+ /* The max size must be at least two so that GROW(max) = (max*3)/2 > max */
+ si->si_cxmax = (cxix < 4) ? 4 : cxix;
+ Renew(si->si_cxstack, si->si_cxmax + 1, PERL_CONTEXT);
+ }
+ Copy(cur->si_cxstack, si->si_cxstack, cxix, PERL_CONTEXT);
+ SU_POISON(si->si_cxstack + cxix, si->si_cxmax + 1 - cxix, PERL_CONTEXT);
+
+ target = cx->blk_sub.cv;
+ sud->target = (CV *) SvREFCNT_inc(target);
+ sud->target_depth = CvDEPTH(target);
+
+ /* blk_oldcop is essentially needed for caller() and stack traces. It has no
+ * run-time implication, since PL_curcop will be overwritten as soon as we
+ * enter a sub (a sub starts by a nextstate/dbstate). Hence it's safe to just
+ * make it point to the blk_oldcop for the target frame, so that caller()
+ * reports the right file name, line number and lexical hints. */
+ SU_UPLEVEL_SAVE(curcop, cx->blk_oldcop);
+ /* Don't reset PL_markstack_ptr, or we would overwrite the mark stack below
+ * this point. Don't reset PL_curpm either, we want the most recent matches. */
+
+ SU_UPLEVEL_SAVE(curstackinfo, si);
+ /* If those two are equal, we need to fool POPSTACK_TO() */
+ if (PL_mainstack == PL_curstack)
+ SU_UPLEVEL_SAVE(mainstack, si->si_stack);
+ else
+ sud->old_mainstack = NULL;
+ PL_curstack = si->si_stack;
+
+ renamed = su_cv_clone(callback, CvGV(target));
+ sud->renamed = renamed;
+
+ PUSHMARK(SP);
+ /* Both SP and old_stack_sp point just before the CV. */
+ Copy(old_stack_sp + 2, SP + 1, args, SV *);
+ SP += args;
+ PUSHs((SV *) renamed);
+ PUTBACK;
+
+ Zero(&sub_op, 1, UNOP);
+ sub_op.op_type = OP_ENTERSUB;
+ sub_op.op_next = NULL;
+ sub_op.op_flags = OP_GIMME_REVERSE(gimme) | OPf_STACKED;
+ if (PL_DBsub)
+ sub_op.op_flags |= OPpENTERSUB_DB;
+
+ SU_UPLEVEL_SAVE(op, (OP *) &sub_op);
+
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ sud->old_runops = PL_runops;
+#endif
+
+ sud->old_catch = CATCH_GET;
+ CATCH_SET(TRUE);
+
+ if ((PL_op = PL_ppaddr[OP_ENTERSUB](aTHX))) {
+ PERL_CONTEXT *sub_cx = cxstack + cxstack_ix;
+ AV *argarray = cx->blk_sub.argarray;
+
+ /* If pp_entersub() returns a non-null OP, it means that the callback is not
+ * an XSUB. */
+
+ sud->callback = MUTABLE_CV(SvREFCNT_inc(callback));
+ CvDEPTH(callback)++;
+
+ if (CxHASARGS(cx) && argarray) {
+ /* The call to pp_entersub() has saved the current @_ (in XS terms,
+ * GvAV(PL_defgv)) in the savearray member, and has created a new argarray
+ * with what we put on the stack. But we want to fake up the same arguments
+ * as the ones in use at the context we uplevel to, so we replace the
+ * argarray with an unreal copy of the original @_. */
+ AV *av = newAV();
+ AvREAL_off(av);
+ AvREIFY_on(av);
+ av_extend(av, AvMAX(argarray));
+ AvFILLp(av) = AvFILLp(argarray);
+ Copy(AvARRAY(argarray), AvARRAY(av), AvFILLp(av) + 1, SV *);
+ sub_cx->blk_sub.argarray = av;
+ } else {
+ SvREFCNT_inc_simple_void(sub_cx->blk_sub.argarray);
+ }
+
+ if (su_uplevel_goto_static(CvROOT(renamed))) {
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops != PL_runops_std) {
+ if (PL_runops == PL_runops_dbg) {
+ if (PL_debug)
+ croak("uplevel() can't execute code that calls goto when debugging flags are set");
+ } else if (PL_runops != su_uplevel_goto_runops)
+ croak("uplevel() can't execute code that calls goto with a custom runloop");
+ }
+
+ PL_runops = su_uplevel_goto_runops;
+#else /* SU_UPLEVEL_HIJACKS_RUNOPS */
+ croak("uplevel() can't execute code that calls goto before perl 5.8");
+#endif /* !SU_UPLEVEL_HIJACKS_RUNOPS */
+ }
+
+ CALLRUNOPS(aTHX);
+ }
+
+ sud->died = 0;
+
+ ret = PL_stack_sp - (PL_stack_base + new_mark);
+ if (ret > 0) {
+ AV *old_stack = sud->old_curstackinfo->si_stack;
+
+ if (old_mark + ret > AvMAX(old_stack)) {
+ /* Specialized EXTEND(old_sp, ret) */
+ av_extend(old_stack, old_mark + ret + 1);
+ old_stack_sp = AvARRAY(old_stack) + old_mark;
+ }
+
+ Copy(PL_stack_sp - ret + 1, old_stack_sp + 1, ret, SV *);
+ PL_stack_sp += ret;
+ AvFILLp(old_stack) += ret;
+ }
+
+ LEAVE;
+
+ return ret;
+}
+
+#endif
+
+/* --- Unique context ID --------------------------------------------------- */
+
+static su_uid *su_uid_storage_fetch(pTHX_ UV depth) {
+#define su_uid_storage_fetch(D) su_uid_storage_fetch(aTHX_ (D))
+ su_uid *map;
+ STRLEN alloc;
+ dXSH_CXT;
+
+ map = XSH_CXT.uid_storage.map;
+ alloc = XSH_CXT.uid_storage.alloc;
+
+ if (depth >= alloc) {
+ STRLEN i;
+
+ Renew(map, depth + 1, su_uid);
+ for (i = alloc; i <= depth; ++i) {
+ map[i].seq = 0;
+ map[i].flags = 0;
+ }
+
+ XSH_CXT.uid_storage.map = map;
+ XSH_CXT.uid_storage.alloc = depth + 1;
+ }
+
+ if (depth >= XSH_CXT.uid_storage.used)
+ XSH_CXT.uid_storage.used = depth + 1;
+
+ return map + depth;
+}
+
+static int su_uid_storage_check(pTHX_ UV depth, UV seq) {
+#define su_uid_storage_check(D, S) su_uid_storage_check(aTHX_ (D), (S))
+ su_uid *uid;
+ dXSH_CXT;
+
+ if (depth >= XSH_CXT.uid_storage.used)
+ return 0;
+
+ uid = XSH_CXT.uid_storage.map + depth;
+
+ return (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
+}
+
+static SV *su_uid_get(pTHX_ I32 cxix) {
+#define su_uid_get(I) su_uid_get(aTHX_ (I))
+ su_uid *uid;
+ SV *uid_sv;
+ UV depth;
+
+ depth = su_uid_depth(cxix);
+ uid = su_uid_storage_fetch(depth);
+
+ if (!(uid->flags & SU_UID_ACTIVE)) {
+ su_ud_uid *ud;
+
+ uid->seq = su_uid_seq_next(depth);
+ uid->flags |= SU_UID_ACTIVE;
+
+ Newx(ud, 1, su_ud_uid);
+ SU_UD_TYPE(ud) = SU_UD_TYPE_UID;
+ ud->idx = depth;
+ su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
+ }
+
+ uid_sv = sv_newmortal();
+ sv_setpvf(uid_sv, "%"UVuf"-%"UVuf, depth, uid->seq);
+
+ return uid_sv;
+}
+
+#ifdef grok_number