#define PERL_NO_GET_CONTEXT
#include "EXTERN.h"
-#include "perl.h"
+#include "perl.h"
#include "XSUB.h"
#define __PACKAGE__ "Scope::Upper"
# define CvISXSUB(C) CvXSUB(C)
#endif
+#ifndef PadlistARRAY
+# define PadlistARRAY(P) AvARRAY(P)
+# define PadARRAY(P) AvARRAY(P)
+#endif
+
#ifndef CxHASARGS
# define CxHASARGS(C) ((C)->blk_sub.hasargs)
#endif
# define MY_CXT_CLONE NOOP
#endif
+/* --- Unique context ID global storage ------------------------------------ */
+
+/* ... Sequence ID counter ................................................. */
+
+typedef struct {
+ UV *seqs;
+ STRLEN size;
+} su_uv_array;
+
+STATIC su_uv_array su_uid_seq_counter;
+
+#ifdef USE_ITHREADS
+
+STATIC perl_mutex su_uid_seq_counter_mutex;
+
+#define SU_LOCK(M) MUTEX_LOCK(M)
+#define SU_UNLOCK(M) MUTEX_UNLOCK(M)
+
+#else /* USE_ITHREADS */
+
+#define SU_LOCK(M)
+#define SU_UNLOCK(M)
+
+#endif /* !USE_ITHREADS */
+
+STATIC UV su_uid_seq_next(pTHX_ UV depth) {
+#define su_uid_seq_next(D) su_uid_seq_next(aTHX_ (D))
+ UV seq;
+ UV *seqs;
+
+ SU_LOCK(&su_uid_seq_counter_mutex);
+
+ seqs = su_uid_seq_counter.seqs;
+
+ if (depth >= su_uid_seq_counter.size) {
+ UV i;
+
+ seqs = PerlMemShared_realloc(seqs, (depth + 1) * sizeof(UV));
+ for (i = su_uid_seq_counter.size; i <= depth; ++i)
+ seqs[i] = 0;
+
+ su_uid_seq_counter.seqs = seqs;
+ su_uid_seq_counter.size = depth + 1;
+ }
+
+ seq = ++seqs[depth];
+
+ SU_UNLOCK(&su_uid_seq_counter_mutex);
+
+ return seq;
+}
+
+/* ... UID storage ......................................................... */
+
+typedef struct {
+ UV seq;
+ U32 flags;
+} su_uid;
+
+#define SU_UID_ACTIVE 1
+
+STATIC UV su_uid_depth(pTHX_ I32 cxix) {
+#define su_uid_depth(I) su_uid_depth(aTHX_ (I))
+ const PERL_SI *si;
+ UV depth;
+
+ depth = cxix;
+ for (si = PL_curstackinfo->si_prev; si; si = si->si_prev)
+ depth += si->si_cxix + 1;
+
+ return depth;
+}
+
+typedef struct {
+ su_uid **map;
+ STRLEN used;
+ STRLEN alloc;
+} su_uid_storage;
+
+STATIC void su_uid_storage_dup(pTHX_ su_uid_storage *new_cxt, const su_uid_storage *old_cxt, UV max_depth) {
+#define su_uid_storage_dup(N, O, D) su_uid_storage_dup(aTHX_ (N), (O), (D))
+ su_uid **old_map = old_cxt->map;
+
+ if (old_map) {
+ su_uid **new_map = new_cxt->map;
+ STRLEN old_used = old_cxt->used;
+ STRLEN old_alloc = old_cxt->alloc;
+ STRLEN new_used, new_alloc;
+ STRLEN i;
+
+ new_used = max_depth < old_used ? max_depth : old_used;
+ new_cxt->used = new_used;
+
+ if (new_used <= new_cxt->alloc)
+ new_alloc = new_cxt->alloc;
+ else {
+ new_alloc = new_used;
+ Renew(new_map, new_alloc, su_uid *);
+ for (i = new_cxt->alloc; i < new_alloc; ++i)
+ new_map[i] = NULL;
+ new_cxt->map = new_map;
+ new_cxt->alloc = new_alloc;
+ }
+
+ for (i = 0; i < new_alloc; ++i) {
+ su_uid *new_uid = new_map[i];
+
+ if (i < new_used) { /* => i < max_depth && i < old_used */
+ su_uid *old_uid = old_map[i];
+
+ if (old_uid && (old_uid->flags & SU_UID_ACTIVE)) {
+ if (!new_uid) {
+ Newx(new_uid, 1, su_uid);
+ new_map[i] = new_uid;
+ }
+ *new_uid = *old_uid;
+ continue;
+ }
+ }
+
+ if (new_uid)
+ new_uid->flags &= ~SU_UID_ACTIVE;
+ }
+ }
+
+ return;
+}
+
/* --- unwind() global storage --------------------------------------------- */
typedef struct {
/* --- uplevel() data tokens and global storage ---------------------------- */
+#define SU_UPLEVEL_HIJACKS_RUNOPS SU_HAS_PERL(5, 8, 0)
+
typedef struct {
- void *next;
+ void *next;
- I32 cxix;
- CV *target;
- CV *callback;
- bool died;
+ su_uid_storage tmp_uid_storage;
+ su_uid_storage old_uid_storage;
- PERL_SI *si;
- PERL_SI *old_curstackinfo;
- AV *old_mainstack;
+ I32 cxix;
+
+ I32 target_depth;
+ CV *target;
+
+ CV *callback;
+ CV *renamed;
+
+ PERL_SI *si;
+ PERL_SI *old_curstackinfo;
+ AV *old_mainstack;
- I32 old_depth;
- COP *old_curcop;
+ COP *old_curcop;
- bool old_catch;
- OP *old_op;
+ OP *old_op;
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ runops_proc_t old_runops;
+#endif
+ bool old_catch;
+
+ bool died;
} su_uplevel_ud;
STATIC su_uplevel_ud *su_uplevel_ud_new(pTHX) {
Newx(sud, 1, su_uplevel_ud);
sud->next = NULL;
+ sud->tmp_uid_storage.map = NULL;
+ sud->tmp_uid_storage.used = 0;
+ sud->tmp_uid_storage.alloc = 0;
+
Newx(si, 1, PERL_SI);
si->si_stack = newAV();
AvREAL_off(si->si_stack);
Safefree(si->si_cxstack);
SvREFCNT_dec(si->si_stack);
Safefree(si);
+
+ if (sud->tmp_uid_storage.map) {
+ su_uid **map = sud->tmp_uid_storage.map;
+ STRLEN alloc = sud->tmp_uid_storage.alloc;
+ STRLEN i;
+
+ for (i = 0; i < alloc; ++i)
+ Safefree(map[i]);
+
+ Safefree(map);
+ }
+
Safefree(sud);
return;
}
typedef struct {
+ su_uplevel_ud *top;
su_uplevel_ud *root;
I32 count;
} su_uplevel_storage;
char *stack_placeholder;
su_unwind_storage unwind_storage;
su_uplevel_storage uplevel_storage;
+ su_uid_storage uid_storage;
} my_cxt_t;
START_MY_CXT
/* ... Reap ................................................................ */
+#define SU_SAVE_LAST_CX (!SU_HAS_PERL(5, 8, 4) || (SU_HAS_PERL(5, 9, 5) && !SU_HAS_PERL(5, 14, 0)) || SU_HAS_PERL(5, 15, 0))
+
typedef struct {
su_ud_common ci;
SV *cb;
STATIC void su_call(pTHX_ void *ud_) {
su_ud_reap *ud = (su_ud_reap *) ud_;
-#if SU_HAS_PERL(5, 9, 5)
- PERL_CONTEXT saved_cx;
+#if SU_SAVE_LAST_CX
I32 cxix;
-#endif
+ PERL_CONTEXT saved_cx;
+#endif /* SU_SAVE_LAST_CX */
dSP;
PUSHMARK(SP);
PUTBACK;
+#if SU_SAVE_LAST_CX
/* If the recently popped context isn't saved there, it will be overwritten by
* the sub scope from call_sv, although it's still needed in our caller. */
-
-#if SU_HAS_PERL(5, 9, 5)
- if (cxstack_ix < cxstack_max)
- cxix = cxstack_ix + 1;
- else
- cxix = Perl_cxinc(aTHX);
+ cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
saved_cx = cxstack[cxix];
-#endif
+#endif /* SU_SAVE_LAST_CX */
call_sv(ud->cb, G_VOID);
-#if SU_HAS_PERL(5, 9, 5)
+#if SU_SAVE_LAST_CX
cxstack[cxix] = saved_cx;
-#endif
+#endif /* SU_SAVE_LAST_CX */
PUTBACK;
#if SU_DEBUG
# ifdef DEBUGGING
-# define SU_CXNAME PL_block_type[CxTYPE(&cxstack[cxstack_ix])]
+# define SU_CXNAME(C) PL_block_type[CxTYPE(C)]
# else
-# define SU_CXNAME "XXX"
+# define SU_CXNAME(C) "XXX"
# endif
#endif
PerlIO_printf(Perl_debug_log,
"%p: --- pop a %s\n"
"%p: leave scope at depth=%2d scope_ix=%2d cur_top=%2d cur_base=%2d\n",
- ud, SU_CXNAME,
+ ud, SU_CXNAME(cxstack + cxstack_ix),
ud, depth, PL_scopestack_ix,PL_savestack_ix,PL_scopestack[PL_scopestack_ix])
);
#define SU_UPLEVEL_SAVE(f, t) STMT_START { sud->old_##f = PL_##f; PL_##f = (t); } STMT_END
#define SU_UPLEVEL_RESTORE(f) STMT_START { PL_##f = sud->old_##f; } STMT_END
-STATIC su_uplevel_ud *su_uplevel_storage_new(pTHX) {
-#define su_uplevel_storage_new() su_uplevel_storage_new(aTHX)
+STATIC su_uplevel_ud *su_uplevel_storage_new(pTHX_ I32 cxix) {
+#define su_uplevel_storage_new(I) su_uplevel_storage_new(aTHX_ (I))
su_uplevel_ud *sud;
+ UV depth;
dMY_CXT;
sud = MY_CXT.uplevel_storage.root;
sud = su_uplevel_ud_new();
}
+ sud->next = MY_CXT.uplevel_storage.top;
+ MY_CXT.uplevel_storage.top = sud;
+
+ depth = su_uid_depth(cxix);
+ su_uid_storage_dup(&sud->tmp_uid_storage, &MY_CXT.uid_storage, depth);
+ sud->old_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->tmp_uid_storage;
+
return sud;
}
#define su_uplevel_storage_delete(S) su_uplevel_storage_delete(aTHX_ (S))
dMY_CXT;
+ sud->tmp_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->old_uid_storage;
+ {
+ su_uid **map;
+ UV i, alloc;
+ map = sud->tmp_uid_storage.map;
+ alloc = sud->tmp_uid_storage.alloc;
+ for (i = 0; i < alloc; ++i) {
+ if (map[i])
+ map[i]->flags &= SU_UID_ACTIVE;
+ }
+ }
+ MY_CXT.uplevel_storage.top = sud->next;
+
if (MY_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
su_uplevel_ud_delete(sud);
} else {
}
}
-#define SU_HAS_EXT_MAGIC SU_HAS_PERL(5, 8, 0)
+STATIC int su_uplevel_goto_static(const OP *o) {
+ for (; o; o = o->op_sibling) {
+ /* goto ops are unops with kids. */
+ if (!(o->op_flags & OPf_KIDS))
+ continue;
+
+ switch (o->op_type) {
+ case OP_LEAVEEVAL:
+ case OP_LEAVETRY:
+ /* Don't care about gotos inside eval, as they are forbidden at run time. */
+ break;
+ case OP_GOTO:
+ return 1;
+ default:
+ if (su_uplevel_goto_static(cUNOPo->op_first))
+ return 1;
+ break;
+ }
+ }
+
+ return 0;
+}
+
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+
+STATIC int su_uplevel_goto_runops(pTHX) {
+#define su_uplevel_goto_runops() su_uplevel_goto_runops(aTHX)
+ register OP *op;
+ dVAR;
+
+ op = PL_op;
+ do {
+ if (op->op_type == OP_GOTO) {
+ AV *argarray = NULL;
+ I32 cxix;
+
+ for (cxix = cxstack_ix; cxix >= 0; --cxix) {
+ const PERL_CONTEXT *cx = cxstack + cxix;
+
+ switch (CxTYPE(cx)) {
+ case CXt_SUB:
+ if (CxHASARGS(cx)) {
+ argarray = cx->blk_sub.argarray;
+ goto done;
+ }
+ break;
+ case CXt_EVAL:
+ case CXt_FORMAT:
+ goto done;
+ default:
+ break;
+ }
+ }
+
+done:
+ if (argarray) {
+ dMY_CXT;
+
+ if (MY_CXT.uplevel_storage.top->cxix == cxix) {
+ AV *args = GvAV(PL_defgv);
+ I32 items = AvFILLp(args);
-#if SU_HAS_EXT_MAGIC && !SU_HAS_PERL(5, 13, 7)
+ av_extend(argarray, items);
+ Copy(AvARRAY(args), AvARRAY(argarray), items + 1, SV *);
+ AvFILLp(argarray) = items;
+ }
+ }
+ }
-STATIC int su_uplevel_restore_free(pTHX_ SV *sv, MAGIC *mg) {
- su_uplevel_storage_delete((su_uplevel_ud *) mg->mg_ptr);
+ PL_op = op = op->op_ppaddr(aTHX);
+
+#if !SU_HAS_PERL(5, 13, 0)
+ PERL_ASYNC_CHECK();
+#endif
+ } while (op);
+
+ TAINT_NOT;
return 0;
}
-STATIC MGVTBL su_uplevel_restore_vtbl = {
- 0,
- 0,
- 0,
- 0,
- su_uplevel_restore_free
-};
+#endif /* SU_UPLEVEL_HIJACKS_RUNOPS */
-#endif /* SU_HAS_EXT_MAGIC && !SU_HAS_PERL(5, 13, 7) */
+#define su_at_underscore(C) PadARRAY(PadlistARRAY(CvPADLIST(C))[CvDEPTH(C)])[0]
STATIC void su_uplevel_restore(pTHX_ void *sus_) {
su_uplevel_ud *sud = sus_;
- const PERL_CONTEXT *sub_cx;
PERL_SI *cur = sud->old_curstackinfo;
PERL_SI *si = sud->si;
- sub_cx = cxstack + sud->cxix;
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops == su_uplevel_goto_runops)
+ PL_runops = sud->old_runops;
+#endif
- /* When we reach this place, POPSUB has already been called (with our fake
- * argarray). GvAV(PL_defgv) points to the savearray (that is, what @_ was
- * before uplevel). argarray is either the fake AV we created in su_uplevel()
- * or some empty replacement POPSUB creates when @_ is reified. In both cases
- * we have to destroy it before the context stack is swapped back to its
- * original state. */
- SvREFCNT_dec(sub_cx->blk_sub.argarray);
+ if (sud->callback) {
+ PERL_CONTEXT *cx = cxstack + sud->cxix;
+ AV *argarray = MUTABLE_AV(su_at_underscore(sud->callback));
+
+ /* We have to fix the pad entry for @_ in the original callback because it
+ * may have been reified. */
+ if (AvREAL(argarray)) {
+ const I32 fill = AvFILLp(argarray);
+ SvREFCNT_dec(argarray);
+ argarray = newAV();
+ AvREAL_off(argarray);
+ AvREIFY_on(argarray);
+ av_extend(argarray, fill);
+ su_at_underscore(sud->callback) = MUTABLE_SV(argarray);
+ } else {
+ CLEAR_ARGARRAY(argarray);
+ }
- /* PUSHSUB was exerted with the original callback, but after calling
- * pp_entersub() we hijacked the blk_sub.cv member of the fresh sub context
- * with the renamed CV. Thus POPSUB and LEAVESUB applied to this CV, not the
- * original. Repair this imbalance right now. */
- if (!(CvDEPTH(sud->callback) = sub_cx->blk_sub.olddepth))
- LEAVESUB(sud->callback);
+ /* If the old cv member is our renamed CV, it means that this place has been
+ * reached without a goto() happening, and the old argarray member is
+ * actually our fake argarray. Destroy it properly in that case. */
+ if (cx->blk_sub.cv == sud->renamed) {
+ SvREFCNT_dec(cx->blk_sub.argarray);
+ cx->blk_sub.argarray = argarray;
+ }
- /* Free the renamed cv. */
- {
- CV *renamed_cv = sub_cx->blk_sub.cv;
- CvDEPTH(renamed_cv) = 0;
- SvREFCNT_dec(renamed_cv);
+ CvDEPTH(sud->callback)--;
+ SvREFCNT_dec(sud->callback);
+ }
+
+ /* Free the renamed CV. We must do it ourselves so that we can force the
+ * depth to be 0, or perl would complain about it being "still in use".
+ * But we *know* that it cannot be so. */
+ if (sud->renamed) {
+ CvDEPTH(sud->renamed) = 0;
+ CvPADLIST(sud->renamed) = NULL;
+ SvREFCNT_dec(sud->renamed);
}
CATCH_SET(sud->old_catch);
SU_UPLEVEL_RESTORE(curstackinfo);
if (sud->died) {
- CV *target_cv = sud->target;
+ CV *target = sud->target;
I32 levels = 0, i;
/* When we die, the depth of the target CV is not updated because of the
register const PERL_CONTEXT *cx = cxstack + i;
if (CxTYPE(cx) == CXt_SUB) {
- if (cx->blk_sub.cv == target_cv)
+ if (cx->blk_sub.cv == target)
++levels;
}
}
switch (CxTYPE(cx)) {
case CXt_SUB:
- if (cx->blk_sub.cv == target_cv)
+ if (cx->blk_sub.cv == target)
++levels;
break;
case CXt_EVAL:
}
found_it:
- CvDEPTH(target_cv) = sud->old_depth - levels;
+ CvDEPTH(target) = sud->target_depth - levels;
PL_curstackinfo->si_cxix = i - 1;
#if !SU_HAS_PERL(5, 13, 1)
/* This issue has been fixed in perl with commit 8f89e5a9, which was made
* public in perl 5.13.7. */
su_uplevel_storage_delete(sud);
-#elif SU_HAS_EXT_MAGIC
- /* If 'ext' magic is available, we work around this by attaching the state
- * data to a scalar that will be freed "soon". */
- {
- SV *sv = sv_newmortal();
-
- sv_magicext(sv, NULL, PERL_MAGIC_ext, &su_uplevel_restore_vtbl,
- (const char *) sud, 0);
- }
#else
/* Otherwise, we just enqueue it back in the global storage list. */
{
dMY_CXT;
+ sud->tmp_uid_storage = MY_CXT.uid_storage;
+ MY_CXT.uid_storage = sud->old_uid_storage;
+
+ MY_CXT.uplevel_storage.top = sud->next;
sud->next = MY_CXT.uplevel_storage.root;
MY_CXT.uplevel_storage.root = sud;
MY_CXT.uplevel_storage.count++;
STATIC CV *su_cv_clone(pTHX_ CV *proto, GV *gv) {
#define su_cv_clone(P, G) su_cv_clone(aTHX_ (P), (G))
dVAR;
- AV *protopadlist = CvPADLIST(proto);
- const AV *protopadname = (const AV *) *av_fetch(protopadlist, 0, FALSE);
- SV **pname = AvARRAY(protopadname);
- const I32 fpadlist = AvFILLp(protopadlist);
- const I32 fpadname = AvFILLp(protopadname);
- AV *padlist, *padname;
CV *cv;
cv = MUTABLE_CV(newSV_type(SvTYPE(proto)));
CvGV_set(cv, gv);
CvSTASH_set(cv, CvSTASH(proto));
+ /* Commit 4c74a7df, publicized with perl 5.13.3, began to add backrefs to
+ * stashes. CvSTASH_set() started to do it as well with commit c68d95645
+ * (which was part of perl 5.13.7). */
+#if SU_HAS_PERL(5, 13, 3) && !SU_HAS_PERL(5, 13, 7)
+ if (CvSTASH(proto))
+ Perl_sv_add_backref(aTHX_ CvSTASH(proto), MUTABLE_SV(cv));
+#endif
- OP_REFCNT_LOCK;
- CvROOT(cv) = OpREFCNT_inc(CvROOT(proto));
- OP_REFCNT_UNLOCK;
- CvSTART(cv) = CvSTART(proto);
+ if (CvISXSUB(proto)) {
+ CvXSUB(cv) = CvXSUB(proto);
+ CvXSUBANY(cv) = CvXSUBANY(proto);
+ } else {
+ OP_REFCNT_LOCK;
+ CvROOT(cv) = OpREFCNT_inc(CvROOT(proto));
+ OP_REFCNT_UNLOCK;
+ CvSTART(cv) = CvSTART(proto);
+ }
CvOUTSIDE(cv) = CvOUTSIDE(proto);
#ifdef CVf_WEAKOUTSIDE
if (!(CvFLAGS(proto) & CVf_WEAKOUTSIDE))
#endif
SvREFCNT_inc_simple_void(CvOUTSIDE(cv));
+ CvPADLIST(cv) = CvPADLIST(proto);
#ifdef CvOUTSIDE_SEQ
CvOUTSIDE_SEQ(cv) = CvOUTSIDE_SEQ(proto);
#endif
if (SvPOK(proto))
sv_setpvn(MUTABLE_SV(cv), SvPVX_const(proto), SvCUR(proto));
- padlist = newAV();
- AvREAL_off(padlist);
- av_fill(padlist, fpadlist);
- CvPADLIST(cv) = padlist;
-
- padname = newAV();
- av_fill(padname, fpadname);
- if (fpadname >= 0) {
- I32 j;
- SV **psvp = AvARRAY(protopadname);
- SV **svp = AvARRAY(padname);
-
- svp[0] = &PL_sv_undef;
- for (j = 1; j <= fpadname; ++j)
- svp[j] = SvREFCNT_inc(psvp[j]);
- }
- AvARRAY(padlist)[0] = MUTABLE_SV(padname);
-
- if (fpadlist >= 1) {
- I32 i;
-
- for (i = 1; i <= fpadlist; ++i) {
- AV *protoframe = MUTABLE_AV(AvARRAY(protopadlist)[i]);
- AV *frame = newAV();
- SV **psvp = AvARRAY(protoframe);
- SV **svp;
- I32 j, fframe = AvFILLp(protoframe);
-
- av_fill(frame, fframe);
- svp = AvARRAY(frame);
- if (i == 1) {
- AV *a0 = newAV(); /* will be @_ */
- AvREAL_off(a0);
- AvREIFY_on(a0);
- svp[0] = MUTABLE_SV(a0);
- } else {
- svp[0] = SvREFCNT_inc(psvp[0]);
- }
- for (j = 1; j <= fframe; ++j)
- svp[j] = SvREFCNT_inc(psvp[j]);
-
- AvARRAY(padlist)[i] = MUTABLE_SV(frame);
- }
- }
-
#ifdef CvCONST
if (CvCONST(cv))
CvCONST_off(cv);
return cv;
}
-STATIC I32 su_uplevel(pTHX_ CV *cv, I32 cxix, I32 args) {
+STATIC I32 su_uplevel(pTHX_ CV *callback, I32 cxix, I32 args) {
#define su_uplevel(C, I, A) su_uplevel(aTHX_ (C), (I), (A))
su_uplevel_ud *sud;
const PERL_CONTEXT *cx = cxstack + cxix;
PERL_SI *si;
PERL_SI *cur = PL_curstackinfo;
SV **old_stack_sp;
- CV *target_cv;
+ CV *target;
+ CV *renamed;
UNOP sub_op;
I32 gimme;
I32 old_mark, new_mark;
old_mark = AvFILLp(PL_curstack) = PL_stack_sp - PL_stack_base;
SPAGAIN;
- sud = su_uplevel_storage_new();
+ sud = su_uplevel_storage_new(cxix);
sud->cxix = cxix;
sud->died = 1;
- sud->callback = cv;
+ sud->callback = NULL;
+ sud->renamed = NULL;
SAVEDESTRUCTOR_X(su_uplevel_restore, sud);
si = sud->si;
Copy(cur->si_cxstack, si->si_cxstack, cxix, PERL_CONTEXT);
SU_POISON(si->si_cxstack + cxix, si->si_cxmax + 1 - cxix, PERL_CONTEXT);
- target_cv = cx->blk_sub.cv;
- sud->target = (CV *) SvREFCNT_inc(target_cv);
- sud->old_depth = CvDEPTH(target_cv);
+ target = cx->blk_sub.cv;
+ sud->target = (CV *) SvREFCNT_inc(target);
+ sud->target_depth = CvDEPTH(target);
/* blk_oldcop is essentially needed for caller() and stack traces. It has no
* run-time implication, since PL_curcop will be overwritten as soon as we
sud->old_mainstack = NULL;
PL_curstack = si->si_stack;
+ renamed = su_cv_clone(callback, CvGV(target));
+ sud->renamed = renamed;
+
PUSHMARK(SP);
/* Both SP and old_stack_sp point just before the CV. */
Copy(old_stack_sp + 2, SP + 1, args, SV *);
SP += args;
- PUSHs((SV *) cv);
+ PUSHs((SV *) renamed);
PUTBACK;
Zero(&sub_op, 1, UNOP);
SU_UPLEVEL_SAVE(op, (OP *) &sub_op);
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ sud->old_runops = PL_runops;
+#endif
+
sud->old_catch = CATCH_GET;
CATCH_SET(TRUE);
if ((PL_op = PL_ppaddr[OP_ENTERSUB](aTHX))) {
- PERL_CONTEXT *sub_cx;
- CV *renamed_cv;
+ PERL_CONTEXT *sub_cx = cxstack + cxstack_ix;
- renamed_cv = su_cv_clone(cv, CvGV(target_cv));
+ /* If pp_entersub() returns a non-null OP, it means that the callback is not
+ * an XSUB. */
- sub_cx = cxstack + cxstack_ix;
- sub_cx->blk_sub.cv = renamed_cv;
- if (!sub_cx->blk_sub.olddepth) {
- SvREFCNT_inc_simple_void(renamed_cv);
- SvREFCNT_inc_simple_void(renamed_cv);
- SAVEFREESV(renamed_cv);
- }
+ sud->callback = MUTABLE_CV(SvREFCNT_inc(callback));
+ CvDEPTH(callback)++;
if (CxHASARGS(cx) && cx->blk_sub.argarray) {
/* The call to pp_entersub() has saved the current @_ (in XS terms,
* argarray with an unreal copy of the original @_. */
AV *av = newAV();
AvREAL_off(av);
+ AvREIFY_on(av);
av_extend(av, AvMAX(cx->blk_sub.argarray));
AvFILLp(av) = AvFILLp(cx->blk_sub.argarray);
Copy(AvARRAY(cx->blk_sub.argarray), AvARRAY(av), AvFILLp(av) + 1, SV *);
- cxstack[cxix].blk_sub.argarray = av;
+ sub_cx->blk_sub.argarray = av;
} else {
- SvREFCNT_inc_simple_void(cxstack[cxix].blk_sub.argarray);
+ SvREFCNT_inc_simple_void(sub_cx->blk_sub.argarray);
}
- CALLRUNOPS(aTHX);
+ if (su_uplevel_goto_static(CvROOT(renamed))) {
+#if SU_UPLEVEL_HIJACKS_RUNOPS
+ if (PL_runops != PL_runops_std) {
+ if (PL_runops == PL_runops_dbg) {
+ if (PL_debug)
+ croak("uplevel() can't execute code that calls goto when debugging flags are set");
+ } else if (PL_runops != su_uplevel_goto_runops)
+ croak("uplevel() can't execute code that calls goto with a custom runloop");
+ }
+
+ PL_runops = su_uplevel_goto_runops;
+#else /* SU_UPLEVEL_HIJACKS_RUNOPS */
+ croak("uplevel() can't execute code that calls goto before perl 5.8");
+#endif /* !SU_UPLEVEL_HIJACKS_RUNOPS */
+ }
- ret = PL_stack_sp - (PL_stack_base + new_mark);
+ CALLRUNOPS(aTHX);
}
sud->died = 0;
- SPAGAIN;
-
+ ret = PL_stack_sp - (PL_stack_base + new_mark);
if (ret > 0) {
AV *old_stack = sud->old_curstackinfo->si_stack;
AvFILLp(old_stack) += ret;
}
- PUTBACK;
-
LEAVE;
return ret;
}
+/* --- Unique context ID --------------------------------------------------- */
+
+STATIC su_uid *su_uid_storage_fetch(pTHX_ UV depth) {
+#define su_uid_storage_fetch(D) su_uid_storage_fetch(aTHX_ (D))
+ su_uid **map, *uid;
+ STRLEN alloc;
+ dMY_CXT;
+
+ map = MY_CXT.uid_storage.map;
+ alloc = MY_CXT.uid_storage.alloc;
+
+ if (depth >= alloc) {
+ STRLEN i;
+
+ Renew(map, depth + 1, su_uid *);
+ for (i = alloc; i <= depth; ++i)
+ map[i] = NULL;
+
+ MY_CXT.uid_storage.map = map;
+ MY_CXT.uid_storage.alloc = depth + 1;
+ }
+
+ uid = map[depth];
+
+ if (!uid) {
+ Newx(uid, 1, su_uid);
+ uid->seq = 0;
+ uid->flags = 0;
+ map[depth] = uid;
+ }
+
+ if (depth >= MY_CXT.uid_storage.used)
+ MY_CXT.uid_storage.used = depth + 1;
+
+ return uid;
+}
+
+STATIC int su_uid_storage_check(pTHX_ UV depth, UV seq) {
+#define su_uid_storage_check(D, S) su_uid_storage_check(aTHX_ (D), (S))
+ su_uid *uid;
+ dMY_CXT;
+
+ if (depth >= MY_CXT.uid_storage.used)
+ return 0;
+
+ uid = MY_CXT.uid_storage.map[depth];
+
+ return uid && (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
+}
+
+STATIC void su_uid_drop(pTHX_ void *ud_) {
+ su_uid *uid = ud_;
+
+ uid->flags &= ~SU_UID_ACTIVE;
+}
+
+STATIC void su_uid_bump(pTHX_ void *ud_) {
+ su_ud_reap *ud = ud_;
+
+ SAVEDESTRUCTOR_X(su_uid_drop, ud->cb);
+}
+
+STATIC SV *su_uid_get(pTHX_ I32 cxix) {
+#define su_uid_get(I) su_uid_get(aTHX_ (I))
+ su_uid *uid;
+ SV *uid_sv;
+ UV depth;
+
+ depth = su_uid_depth(cxix);
+ uid = su_uid_storage_fetch(depth);
+
+ if (!(uid->flags & SU_UID_ACTIVE)) {
+ su_ud_reap *ud;
+
+ uid->seq = su_uid_seq_next(depth);
+ uid->flags |= SU_UID_ACTIVE;
+
+ Newx(ud, 1, su_ud_reap);
+ SU_UD_ORIGIN(ud) = NULL;
+ SU_UD_HANDLER(ud) = su_uid_bump;
+ ud->cb = (SV *) uid;
+ su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
+ }
+
+ uid_sv = sv_newmortal();
+ sv_setpvf(uid_sv, "%"UVuf"-%"UVuf, depth, uid->seq);
+ return uid_sv;
+}
+
+#ifdef grok_number
+
+#define su_grok_number(S, L, VP) grok_number((S), (L), (VP))
+
+#else /* grok_number */
+
+#define IS_NUMBER_IN_UV 0x1
+
+STATIC int su_grok_number(pTHX_ const char *s, STRLEN len, UV *valuep) {
+#define su_grok_number(S, L, VP) su_grok_number(aTHX_ (S), (L), (VP))
+ STRLEN i;
+ SV *tmpsv;
+
+ /* This crude check should be good enough for a fallback implementation.
+ * Better be too strict than too lax. */
+ for (i = 0; i < len; ++i) {
+ if (!isDIGIT(s[i]))
+ return 0;
+ }
+
+ tmpsv = sv_newmortal();
+ sv_setpvn(tmpsv, s, len);
+ *valuep = sv_2uv(tmpsv);
+
+ return IS_NUMBER_IN_UV;
+}
+
+#endif /* !grok_number */
+
+STATIC int su_uid_validate(pTHX_ SV *uid) {
+#define su_uid_validate(U) su_uid_validate(aTHX_ (U))
+ const char *s;
+ STRLEN len, p = 0;
+ UV depth, seq;
+ int type;
+
+ s = SvPV_const(uid, len);
+
+ while (p < len && s[p] != '-')
+ ++p;
+ if (p >= len)
+ croak("UID contains only one part");
+
+ type = su_grok_number(s, p, &depth);
+ if (type != IS_NUMBER_IN_UV)
+ croak("First UID part is not an unsigned integer");
+
+ ++p; /* Skip '-'. As we used to have p < len, len - (p + 1) >= 0. */
+
+ type = su_grok_number(s + p, len - p, &seq);
+ if (type != IS_NUMBER_IN_UV)
+ croak("Second UID part is not an unsigned integer");
+
+ return su_uid_storage_check(depth, seq);
+}
+
/* --- Interpreter setup/teardown ------------------------------------------ */
STATIC void su_teardown(pTHX_ void *param) {
su_uplevel_ud *cur;
+ su_uid **map;
dMY_CXT;
+ map = MY_CXT.uid_storage.map;
+ if (map) {
+ STRLEN i;
+ for (i = 0; i < MY_CXT.uid_storage.used; ++i)
+ Safefree(map[i]);
+ Safefree(map);
+ }
+
cur = MY_CXT.uplevel_storage.root;
if (cur) {
su_uplevel_ud *prev;
MY_CXT.unwind_storage.proxy_op.op_type = OP_STUB;
MY_CXT.unwind_storage.proxy_op.op_ppaddr = NULL;
+ MY_CXT.uplevel_storage.top = NULL;
MY_CXT.uplevel_storage.root = NULL;
MY_CXT.uplevel_storage.count = 0;
+ MY_CXT.uid_storage.map = NULL;
+ MY_CXT.uid_storage.used = 0;
+ MY_CXT.uid_storage.alloc = 0;
+
call_atexit(su_teardown, NULL);
return;
{
HV *stash;
+ MUTEX_INIT(&su_uid_seq_counter_mutex);
+
+ su_uid_seq_counter.seqs = NULL;
+ su_uid_seq_counter.size = 0;
+
stash = gv_stashpv(__PACKAGE__, 1);
newCONSTSUB(stash, "TOP", newSViv(0));
newCONSTSUB(stash, "SU_THREADSAFE", newSVuv(SU_THREADSAFE));
void
CLONE(...)
PROTOTYPE: DISABLE
+PREINIT:
+ su_uid_storage new_cxt;
PPCODE:
+ {
+ dMY_CXT;
+ new_cxt.map = NULL;
+ new_cxt.used = 0;
+ new_cxt.alloc = 0;
+ su_uid_storage_dup(&new_cxt, &MY_CXT.uid_storage, MY_CXT.uid_storage.used);
+ }
{
MY_CXT_CLONE;
+ MY_CXT.uplevel_storage.top = NULL;
MY_CXT.uplevel_storage.root = NULL;
MY_CXT.uplevel_storage.count = 0;
+ MY_CXT.uid_storage = new_cxt;
}
XSRETURN(0);
}
} while (--cxix >= 0);
croak("Can't uplevel outside a subroutine");
+
+void
+uid(...)
+PROTOTYPE: ;$
+PREINIT:
+ I32 cxix;
+ SV *uid;
+PPCODE:
+ SU_GET_CONTEXT(0, 0);
+ uid = su_uid_get(cxix);
+ EXTEND(SP, 1);
+ PUSHs(uid);
+ XSRETURN(1);
+
+void
+validate_uid(SV *uid)
+PROTOTYPE: $
+PREINIT:
+ SV *ret;
+PPCODE:
+ ret = su_uid_validate(uid) ? &PL_sv_yes : &PL_sv_no;
+ EXTEND(SP, 1);
+ PUSHs(ret);
+ XSRETURN(1);