+};
+
+#endif /* XSH_DEBUG */
+
+/* push at least 'size' slots worth of padding onto the savestack */
+
+static void su_ss_push_padding(pTHX_ void *ud, I32 size) {
+ if (size <= 0)
+ return;
+ if (size < SU_SAVE_ALLOC_SIZE + 1) /* minimum possible SAVEt_ALLOC */
+ size = SU_SAVE_ALLOC_SIZE + 1;
+ XSH_D(su_debug_log(
+ "%p: push %2d padding at save_ix=%d\n",
+ ud, size, PL_savestack_ix));
+ save_alloc((size - SU_SAVE_ALLOC_SIZE)*sizeof(*PL_savestack), 0);
+}
+
+static void su_pop(pTHX_ void *ud);
+
+/* push an su_pop destructor onto the savestack with suitable padding.
+ * first indicates that this is the first push of a destructor */
+
+static void su_ss_push_destructor(pTHX_ void *ud, I32 depth, bool first) {
+ su_ud_origin_elem *origin = SU_UD_ORIGIN(ud);
+
+ assert(first || origin[depth+1].orig_ix == PL_savestack_ix);
+ su_ss_push_padding(aTHX_ ud,
+ (origin[depth].orig_ix + origin[depth].offset) - PL_savestack_ix);
+ XSH_D(su_debug_log(
+ "%p: push destructor at save_ix=%d depth=%d scope_ix=%d\n",
+ ud, PL_savestack_ix, depth, PL_scopestack_ix));
+ SAVEDESTRUCTOR_X(su_pop, ud);
+ assert(first ||
+ PL_savestack_ix <= origin[depth+1].orig_ix + origin[depth+1].offset);
+}
+
+/* this is called during each leave_scope() via SAVEDESTRUCTOR_X */
+
+static void su_pop(pTHX_ void *ud) {
+#define su_pop(U) su_pop(aTHX_ (U))
+ I32 depth, base, mark;
+ su_ud_origin_elem *origin;
+
+ depth = SU_UD_DEPTH(ud);
+ origin = SU_UD_ORIGIN(ud);
+
+ XSH_D(su_debug_log( "%p: ### su_pop: depth=%d\n", ud, depth));
+
+ depth--;
+ mark = PL_savestack_ix;
+ base = origin[depth].orig_ix;
+
+ XSH_D(su_debug_log("%p: residual savestack frame is %d(+%d)..%d\n",
+ ud, base, origin[depth].offset, mark));
+
+ if (base < mark) {
+ XSH_D(su_debug_log("%p: clear leftovers at %d..%d\n", ud, base, mark));
+ leave_scope(base);
+ }
+ assert(PL_savestack_ix == base);
+
+ SU_UD_DEPTH(ud) = depth;
+
+ if (depth > 0) {
+ su_ss_push_destructor(aTHX_ ud, depth-1, 0);
+ } else {
+ I32 offset = origin[0].offset; /* grab value before origin is freed */
+ switch (SU_UD_TYPE(ud)) {
+ case SU_UD_TYPE_REAP: {
+ XSH_D(su_debug_log("%p: === reap\n%p: depth=%d scope_ix=%d save_ix=%d\n",
+ ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix));
+ SAVEDESTRUCTOR_X(su_call, SU_UD_REAP_CB(ud));
+ SU_UD_FREE(ud);
+ break;
+ }
+ case SU_UD_TYPE_LOCALIZE:
+ su_localize(ud);
+ SU_UD_LOCALIZE_FREE(ud);
+ break;
+ case SU_UD_TYPE_UID:
+ SAVEDESTRUCTOR_X(su_uid_drop, ud);
+ break;
+ }
+ /* perl 5.23.8 onwards is very fussy about the return from leave_scope()
+ * leaving PL_savestack_ix where it expects it to be */
+ if (PL_savestack_ix < base + offset) {
+ I32 gap = (base + offset) - PL_savestack_ix;
+ assert(gap >= SU_SAVE_ALLOC_SIZE + 1);
+ su_ss_push_padding(aTHX_ ud, gap);
+ }
+ assert(PL_savestack_ix == base + offset);
+ }
+
+ XSH_D(su_debug_log("%p: end pop: ss_ix=%d\n", ud, PL_savestack_ix));
+}
+
+/* --- Initialize the stack and the action userdata ------------------------ */
+
+static void su_init(pTHX_ void *ud, I32 cxix, I32 size) {
+#define su_init(U, C, S) su_init(aTHX_ (U), (C), (S))
+ I32 i, depth;
+ su_ud_origin_elem *origin;
+ I32 cur_cx_ix;
+ I32 cur_scope_ix;
+
+ XSH_D(su_debug_log("%p: ### su_init(cxix=%d, size=%d)\n", ud, cxix, size));
+
+ depth = PL_scopestack_ix - cxstack[cxix].blk_oldscopesp;
+#if SU_HAS_NEW_CXT
+ depth += (cxstack_ix - cxix); /* each context frame holds 1 scope */
+#endif
+ XSH_D(su_debug_log(
+ "%p: going down by depth=%d with scope_ix=%d save_ix=%d\n",
+ ud, depth, PL_scopestack_ix, PL_savestack_ix));
+
+ /* Artificially increase the position of each savestack frame boundary
+ * to make space to squeeze in a 'size' sized entry (first one) or a
+ * SU_SAVE_DESTRUCTOR_SIZE sized entry (higher ones). In addition, make
+ * sure that each boundary is higher than the previous, so that *every*
+ * scope exit triggers a call to leave_scope(). Each scope exit will call
+ * the su_pop() destructor, which is responsible for: freeing any
+ * savestack entries below the artificially raised floor; then pushing a
+ * new destructor in that space. On the final pop, the "real" savestack
+ * action is pushed rather than another destructor.
+ *
+ * On older perls, savestack frame boundaries are specified by a range of
+ * scopestack entries (one per ENTER). Each scope entry typically does
+ * one or two ENTERs followed by a PUSHBLOCK. Thus the
+ * cx->blku_oldscopesp field set by the PUSHBLOCK points to the next free
+ * slot, which is one above the last of the ENTERs. In the debugging
+ * output we indicate that by bracketing the ENTERs directly preceding
+ * that context push with dashes, e.g.:
+ *
+ * 13b98d8: ------------------
+ * 13b98d8: ENTER origin[0] scope[3] savestack=3+3
+ * 13b98d8: ENTER origin[1] scope[4] savestack=9+3
+ * 13b98d8: cx=1 LOOP_LAZYIV
+ * 13b98d8: ------------------
+ *
+ * In addition to context stack pushes, other activities can push ENTERs
+ * too, such as grep expr and XS sub calls.
+ *
+ * For newer perls (SU_HAS_NEW_CXT), a context push no longer does any
+ * ENTERs; instead the old savestack position is stored in the new
+ * cx->blk_oldsaveix field; thus this field specifies an additional
+ * savestack frame boundary point in addition to the scopestack entries,
+ * and will also need adjusting.
+ *
+ * We record the original and modified position of each boundary in the
+ * origin array.
+ *
+ * The passed cxix argument represents the scope we wish to inject into;
+ * we have to adjust all the savestack frame boundaries above (but not
+ * including) that context.
+ */
+
+ Newx(origin, depth, su_ud_origin_elem);
+
+ cur_cx_ix = cxix;
+ cur_scope_ix = cxstack[cxix].blk_oldscopesp;
+#if SU_HAS_NEW_CXT
+ XSH_D(su_debug_log("%p: cx=%-2d %-11s\n",
+ ud, cur_cx_ix, SU_CXNAME(cxstack+cur_cx_ix)));
+ cur_cx_ix++;
+#endif
+
+ for (i = 0; cur_scope_ix < PL_scopestack_ix; i++) {
+ I32 *ixp;
+ I32 offset;
+
+#if SU_HAS_NEW_CXT
+
+ if ( cur_cx_ix <= cxstack_ix
+ && cur_scope_ix == cxstack[cur_cx_ix].blk_oldscopesp
+ )
+ ixp = &(cxstack[cur_cx_ix++].blk_oldsaveix);
+ else
+ ixp = &PL_scopestack[cur_scope_ix++]; /* an ENTER pushed after cur context */
+
+#else
+
+ XSH_D({
+ if (cur_cx_ix <= cxstack_ix) {
+ if (cur_scope_ix == cxstack[cur_cx_ix].blk_oldscopesp) {
+ su_debug_log(
+ "%p: cx=%-2d %s\n%p: ------------------\n",
+ ud, cur_cx_ix, SU_CXNAME(cxstack+cur_cx_ix), ud);
+ cur_cx_ix++;
+ }
+ else if (cur_scope_ix + su_cxt_enter_count[CxTYPE(cxstack+cur_cx_ix)]
+ == cxstack[cur_cx_ix].blk_oldscopesp)
+ su_debug_log("%p: ------------------\n", ud);
+ }
+ });
+ ixp = &PL_scopestack[cur_scope_ix++];
+
+#endif
+
+ if (i == 0)
+ offset = size;
+ else {
+ /* we have three constraints to satisfy:
+ * 1) Each adjusted offset must be at least SU_SAVE_DESTRUCTOR_SIZE
+ * above its unadjusted boundary, so that there is space to inject a
+ * destructor into the outer scope.
+ * 2) Each adjusted boundary must be at least SU_SAVE_DESTRUCTOR_SIZE
+ * higher than the previous adjusted boundary, so that a new
+ * destructor can be added below the Nth adjusted frame boundary,
+ * but be within the (N-1)th adjusted frame and so be triggered on
+ * the next scope exit;
+ * 3) If the adjustment needs to be greater than SU_SAVE_DESTRUCTOR_SIZE,
+ * then it should be greater by an amount of at least the minimum
+ * pad side, so a destructor and padding can be pushed.
+ */
+ I32 pad;
+ offset = SU_SAVE_DESTRUCTOR_SIZE; /* rule 1 */
+ pad = (origin[i-1].orig_ix + origin[i-1].offset) + offset - (*ixp + offset);
+ if (pad > 0) { /* rule 2 */
+ if (pad < SU_SAVE_ALLOC_SIZE + 1) /* rule 3 */
+ pad = SU_SAVE_ALLOC_SIZE + 1;
+ offset += pad;
+ }
+ }
+
+ origin[i].offset = offset;
+ origin[i].orig_ix = *ixp;
+ *ixp += offset;
+
+#if SU_HAS_NEW_CXT
+ XSH_D({
+ if (ixp == &PL_scopestack[cur_scope_ix-1])
+ su_debug_log(
+ "%p: ENTER origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, i, cur_scope_ix, origin[i].orig_ix, origin[i].offset);
+ else
+ su_debug_log(
+ "%p: cx=%-2d %-11s origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, cur_cx_ix-1, SU_CXNAME(cxstack+cur_cx_ix-1),
+ i, cur_scope_ix, origin[i].orig_ix, origin[i].offset);
+ });
+#else
+ XSH_D(su_debug_log(
+ "%p: ENTER origin[%d] scope[%d] savestack=%d+%d\n",
+ ud, i, cur_scope_ix, origin[i].orig_ix, origin[i].offset));
+#endif
+
+ }
+
+ assert(i == depth);
+
+ SU_UD_DEPTH(ud) = depth;
+ SU_UD_ORIGIN(ud) = origin;
+
+ su_ss_push_destructor(aTHX_ ud, depth-1, 1);
+}
+
+/* --- Unwind stack -------------------------------------------------------- */
+
+static void su_unwind(pTHX_ void *ud_) {
+ dXSH_CXT;
+ I32 cxix = XSH_CXT.unwind_storage.cxix;
+ I32 items = XSH_CXT.unwind_storage.items;
+ I32 mark;
+
+ PERL_UNUSED_VAR(ud_);
+
+ PL_stack_sp = XSH_CXT.unwind_storage.savesp;
+#if XSH_HAS_PERL(5, 19, 4)
+ {
+ I32 i;
+ SV **sp = PL_stack_sp;
+ for (i = -items + 1; i <= 0; ++i)
+ if (!SvTEMP(sp[i]))
+ sv_2mortal(SvREFCNT_inc(sp[i]));
+ }
+#endif
+
+ if (cxstack_ix > cxix)
+ dounwind(cxix);
+
+ mark = PL_markstack[cxstack[cxix].blk_oldmarksp];
+ PUSHMARK(PL_stack_sp - items);
+
+ XSH_D({
+ I32 gimme = GIMME_V;
+ su_debug_log("%p: cx=%d gimme=%s items=%d sp=%d oldmark=%d mark=%d\n",
+ &XSH_CXT, cxix,
+ gimme == G_VOID ? "void" : gimme == G_ARRAY ? "list" : "scalar",
+ items, PL_stack_sp - PL_stack_base, *PL_markstack_ptr, mark);
+ });
+
+ PL_op = (OP *) &(XSH_CXT.unwind_storage.return_op);
+ PL_op = PL_op->op_ppaddr(aTHX);
+
+ *PL_markstack_ptr = mark;
+
+ XSH_CXT.unwind_storage.proxy_op.op_next = PL_op;
+ PL_op = &(XSH_CXT.unwind_storage.proxy_op);
+}
+
+/* --- Yield --------------------------------------------------------------- */
+
+#if XSH_HAS_PERL(5, 10, 0)
+# define SU_RETOP_SUB(C) ((C)->blk_sub.retop)
+# define SU_RETOP_EVAL(C) ((C)->blk_eval.retop)
+# define SU_RETOP_LOOP(C) ((C)->blk_loop.my_op->op_lastop->op_next)
+# define SU_RETOP_GIVEN(C) ((C)->blk_givwhen.leave_op->op_next)
+#else
+# define SU_RETOP_SUB(C) ((C)->blk_oldretsp > 0 ? PL_retstack[(C)->blk_oldretsp - 1] : NULL)
+# define SU_RETOP_EVAL(C) SU_RETOP_SUB(C)
+# define SU_RETOP_LOOP(C) ((C)->blk_loop.last_op->op_next)
+#endif
+
+static void su_yield(pTHX_ void *ud_) {
+ dXSH_CXT;
+ PERL_CONTEXT *cx;
+ const char *which = ud_;
+ I32 cxix = XSH_CXT.yield_storage.cxix;
+ I32 items = XSH_CXT.yield_storage.items;
+ opcode type = OP_NULL;
+ U8 flags = 0;
+ OP *next;
+
+ PERL_UNUSED_VAR(ud_);
+
+ cx = cxstack + cxix;
+ switch (CxTYPE(cx)) {
+ case CXt_BLOCK: {
+ I32 i, cur = cxstack_ix, n = 1;
+ OP *o = NULL;
+ /* Is this actually a given/when block? This may occur only when yield was
+ * called with HERE (or nothing) as the context. */
+#if XSH_HAS_PERL(5, 10, 0)
+ if (cxix > 0) {
+ PERL_CONTEXT *prev = cx - 1;
+ U8 prev_type = CxTYPE(prev);
+ if ((prev_type == CXt_GIVEN || prev_type == CXt_WHEN)
+ && (prev->blk_oldcop == cx->blk_oldcop)) {
+ cxix--;
+ cx = prev;
+ if (prev_type == CXt_GIVEN)
+ goto cxt_given;
+ else
+ goto cxt_when;
+ }
+ }
+#endif
+ type = OP_LEAVE;
+ next = NULL;
+ /* Bare blocks (that appear as do { ... } blocks, map { ... } blocks or
+ * constant folded blcoks) don't need to save the op to return to anywhere
+ * since 'last' isn't supposed to work inside them. So we climb higher in
+ * the context stack until we reach a context that has a return op (i.e. a
+ * sub, an eval, a format or a real loop), recording how many blocks we
+ * crossed. Then we follow the op_next chain until we get to the leave op
+ * that closes the original block, which we are assured to reach since
+ * everything is static (the blocks we have crossed cannot be evals or
+ * subroutine calls). */
+ for (i = cxix + 1; i <= cur; ++i) {
+ PERL_CONTEXT *cx2 = cxstack + i;
+ switch (CxTYPE(cx2)) {
+ case CXt_BLOCK:
+ ++n;
+ break;
+ case CXt_SUB:
+ case CXt_FORMAT:
+ o = SU_RETOP_SUB(cx2);
+ break;
+ case CXt_EVAL:
+ o = SU_RETOP_EVAL(cx2);
+ break;
+#if XSH_HAS_PERL(5, 11, 0)
+# if XSH_HAS_PERL(5, 23, 8)
+ case CXt_LOOP_ARY:
+ case CXt_LOOP_LIST:
+# else
+ case CXt_LOOP_FOR:
+# endif
+ case CXt_LOOP_PLAIN:
+ case CXt_LOOP_LAZYSV:
+ case CXt_LOOP_LAZYIV:
+#else
+ case CXt_LOOP:
+#endif
+ o = SU_RETOP_LOOP(cx2);
+ break;
+ }
+ if (o)
+ break;
+ }
+ if (!o)
+ o = PL_op;
+ while (n && o) {
+ /* We may find other enter/leave blocks on our way to the matching leave.
+ * Make sure the depth is incremented/decremented appropriately. */
+ if (o->op_type == OP_ENTER) {
+ ++n;
+ } else if (o->op_type == OP_LEAVE) {
+ --n;
+ if (!n) {
+ next = o->op_next;
+ break;
+ }
+ }
+ o = o->op_next;
+ }
+ break;
+ }
+ case CXt_SUB:
+ case CXt_FORMAT:
+ type = OP_LEAVESUB;
+ next = SU_RETOP_SUB(cx);
+ break;
+ case CXt_EVAL:
+ type = CxTRYBLOCK(cx) ? OP_LEAVETRY : OP_LEAVEEVAL;
+ next = SU_RETOP_EVAL(cx);
+ break;
+#if XSH_HAS_PERL(5, 11, 0)
+# if XSH_HAS_PERL(5, 23, 8)
+ case CXt_LOOP_ARY:
+ case CXt_LOOP_LIST:
+# else
+ case CXt_LOOP_FOR:
+# endif
+ case CXt_LOOP_PLAIN:
+ case CXt_LOOP_LAZYSV:
+ case CXt_LOOP_LAZYIV:
+#else
+ case CXt_LOOP:
+#endif
+ type = OP_LEAVELOOP;
+ next = SU_RETOP_LOOP(cx);
+ break;
+#if XSH_HAS_PERL(5, 10, 0)
+ case CXt_GIVEN:
+cxt_given:
+ type = OP_LEAVEGIVEN;
+ next = SU_RETOP_GIVEN(cx);
+ break;
+ case CXt_WHEN:
+cxt_when:
+#if XSH_HAS_PERL(5, 15, 1)
+ type = OP_LEAVEWHEN;
+#else
+ type = OP_BREAK;
+ flags |= OPf_SPECIAL;
+#endif
+ next = NULL;
+ break;
+#endif
+ case CXt_SUBST:
+ croak("%s() can't target a substitution context", which);
+ break;
+ default:
+ croak("%s() doesn't know how to leave a %s context",
+ which, SU_CXNAME(cxstack + cxix));
+ break;
+ }
+
+ PL_stack_sp = XSH_CXT.yield_storage.savesp;
+#if XSH_HAS_PERL(5, 19, 4)
+ {
+ I32 i;
+ SV **sp = PL_stack_sp;
+ for (i = -items + 1; i <= 0; ++i)
+ if (!SvTEMP(sp[i]))
+ sv_2mortal(SvREFCNT_inc(sp[i]));
+ }
+#endif
+
+ if (cxstack_ix > cxix)
+ dounwind(cxix);
+
+ /* Copy the arguments passed to yield() where the leave op expects to find
+ * them. */
+ if (items)
+ Move(PL_stack_sp - items + 1, PL_stack_base + cx->blk_oldsp + 1, items, SV *);
+ PL_stack_sp = PL_stack_base + cx->blk_oldsp + items;
+
+ flags |= OP_GIMME_REVERSE(cx->blk_gimme);
+
+ XSH_CXT.yield_storage.leave_op.op_type = type;
+ XSH_CXT.yield_storage.leave_op.op_ppaddr = PL_ppaddr[type];
+ XSH_CXT.yield_storage.leave_op.op_flags = flags;
+ XSH_CXT.yield_storage.leave_op.op_next = next;
+
+ PL_op = (OP *) &(XSH_CXT.yield_storage.leave_op);
+ PL_op = PL_op->op_ppaddr(aTHX);
+
+ XSH_CXT.yield_storage.proxy_op.op_next = PL_op;
+ PL_op = &(XSH_CXT.yield_storage.proxy_op);
+}
+
+/* --- Uplevel ------------------------------------------------------------- */
+
+#define SU_UPLEVEL_SAVE(f, t) STMT_START { sud->old_##f = PL_##f; PL_##f = (t); } STMT_END
+#define SU_UPLEVEL_RESTORE(f) STMT_START { PL_##f = sud->old_##f; } STMT_END
+
+static su_uplevel_ud *su_uplevel_storage_new(pTHX_ I32 cxix) {
+#define su_uplevel_storage_new(I) su_uplevel_storage_new(aTHX_ (I))
+ su_uplevel_ud *sud;
+ UV depth;
+ dXSH_CXT;
+
+ sud = XSH_CXT.uplevel_storage.root;
+ if (sud) {
+ XSH_CXT.uplevel_storage.root = sud->next;
+ XSH_CXT.uplevel_storage.count--;
+ } else {
+ sud = su_uplevel_ud_new();
+ }
+
+ sud->next = XSH_CXT.uplevel_storage.top;
+ XSH_CXT.uplevel_storage.top = sud;
+
+ depth = su_uid_depth(cxix);
+ su_uid_storage_dup(&sud->tmp_uid_storage, &XSH_CXT.uid_storage, depth);
+ sud->old_uid_storage = XSH_CXT.uid_storage;
+ XSH_CXT.uid_storage = sud->tmp_uid_storage;
+
+ return sud;
+}
+
+#if XSH_HAS_PERL(5, 13, 7)
+
+static void su_uplevel_storage_delete(pTHX_ su_uplevel_ud *sud) {
+#define su_uplevel_storage_delete(S) su_uplevel_storage_delete(aTHX_ (S))
+ dXSH_CXT;
+
+ sud->tmp_uid_storage = XSH_CXT.uid_storage;
+ XSH_CXT.uid_storage = sud->old_uid_storage;
+ {
+ su_uid *map;
+ STRLEN i, alloc;
+ map = sud->tmp_uid_storage.map;
+ alloc = sud->tmp_uid_storage.alloc;
+ for (i = 0; i < alloc; ++i)
+ map[i].flags &= ~SU_UID_ACTIVE;
+ }
+ XSH_CXT.uplevel_storage.top = sud->next;
+
+ if (XSH_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
+ su_uplevel_ud_delete(sud);
+ } else {
+ sud->next = XSH_CXT.uplevel_storage.root;
+ XSH_CXT.uplevel_storage.root = sud;
+ XSH_CXT.uplevel_storage.count++;
+ }
+}
+
+#endif
+
+static int su_uplevel_goto_static(const OP *o) {
+ for (; o; o = OpSIBLING(o)) {
+ /* goto ops are unops with kids. */
+ if (!(o->op_flags & OPf_KIDS))
+ continue;
+
+ switch (o->op_type) {
+ case OP_LEAVEEVAL:
+ case OP_LEAVETRY:
+ /* Don't care about gotos inside eval, as they are forbidden at run time. */
+ break;
+ case OP_GOTO:
+ return 1;
+ default:
+ if (su_uplevel_goto_static(((const UNOP *) o)->op_first))
+ return 1;
+ break;
+ }
+ }
+
+ return 0;
+}
+
+#if !SU_HAS_NEW_CXT && SU_UPLEVEL_HIJACKS_RUNOPS
+
+static int su_uplevel_goto_runops(pTHX) {
+#define su_uplevel_goto_runops() su_uplevel_goto_runops(aTHX)
+ register OP *op;
+ dVAR;
+
+ op = PL_op;
+ do {
+ if (op->op_type == OP_GOTO) {
+ AV *argarray = NULL;
+ I32 cxix;
+
+ for (cxix = cxstack_ix; cxix >= 0; --cxix) {
+ const PERL_CONTEXT *cx = cxstack + cxix;
+
+ switch (CxTYPE(cx)) {
+ case CXt_SUB:
+ if (CxHASARGS(cx)) {
+ argarray = cx->blk_sub.argarray;
+ goto done;
+ }
+ break;
+ case CXt_EVAL:
+ case CXt_FORMAT:
+ goto done;
+ default:
+ break;
+ }
+ }
+
+done:
+ if (argarray) {
+ dXSH_CXT;
+
+ if (XSH_CXT.uplevel_storage.top->cxix == cxix) {
+ AV *args = GvAV(PL_defgv);
+ I32 items = AvFILLp(args);
+
+ av_extend(argarray, items);
+ Copy(AvARRAY(args), AvARRAY(argarray), items + 1, SV *);
+ AvFILLp(argarray) = items;
+ }
+ }
+ }
+
+ PL_op = op = op->op_ppaddr(aTHX);
+
+#if !XSH_HAS_PERL(5, 13, 0)
+ PERL_ASYNC_CHECK();
+#endif
+ } while (op);
+
+ TAINT_NOT;
+
+ return 0;
+}
+
+#endif /* SU_UPLEVEL_HIJACKS_RUNOPS */
+
+#define su_at_underscore(C) PadARRAY(PadlistARRAY(CvPADLIST(C))[CvDEPTH(C)])[0]
+
+#if SU_HAS_NEW_CXT
+
+static void su_uplevel_restore_new(pTHX_ void *sus_) {
+ su_uplevel_ud *sud = sus_;
+ PERL_CONTEXT *cx;
+ I32 i;
+ U8 *saved_cxtypes = sud->cxtypes;
+
+ for (i = 0; i < sud->gap; i++) {
+ PERL_CONTEXT *cx = cxstack + sud->cxix + i;
+ XSH_D(su_debug_log("su_uplevel_restore: i=%d cxix=%d type %s => %s\n",
+ i, cx-cxstack, SU_CX_TYPENAME(CxTYPE(cx)),
+ SU_CX_TYPENAME(saved_cxtypes[i] & CXTYPEMASK)));
+ cx->cx_type = saved_cxtypes[i];
+ }
+ Safefree(saved_cxtypes);
+
+ /* renamed is a copy of callback, but they share the same CvPADLIST.
+ * At this point any calls to renamed should have exited so that its
+ * depth is back to that of of callback. At this point its safe to free
+ * renamed, then undo the extra ref count that was ensuring that callback
+ * remains alive
+ */
+ assert(sud->renamed);
+ assert(sud->callback);
+
+ CvDEPTH(sud->callback)--;
+ assert(CvDEPTH(sud->callback) == CvDEPTH(sud->renamed));
+ if (!CvISXSUB(sud->renamed)) {
+ CvDEPTH(sud->renamed) = 0;
+ CvPADLIST(sud->renamed) = NULL;
+ }
+ SvREFCNT_dec(sud->renamed);
+ SvREFCNT_dec(sud->callback);
+
+ SU_UPLEVEL_RESTORE(curcop);
+
+ su_uplevel_storage_delete(sud);
+
+ return;
+}
+
+#else
+
+/* 5.23.7 and earlier */
+
+static void su_uplevel_restore_old(pTHX_ void *sus_) {
+ su_uplevel_ud *sud = sus_;
+ PERL_SI *cur = sud->old_curstackinfo;
+ PERL_SI *si = sud->si;
+
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops == su_uplevel_goto_runops)
+ PL_runops = sud->old_runops;