1 /* This file is part of the Scope::Upper Perl module.
2 * See http://search.cpan.org/dist/Scope-Upper/ */
4 #define PERL_NO_GET_CONTEXT
9 #define __PACKAGE__ "Scope::Upper"
15 /* --- Compatibility ------------------------------------------------------- */
30 # define MUTABLE_SV(S) ((SV *) (S))
34 # define MUTABLE_AV(A) ((AV *) (A))
38 # define MUTABLE_CV(C) ((CV *) (C))
41 #ifndef PERL_UNUSED_VAR
42 # define PERL_UNUSED_VAR(V)
46 # define STMT_START do
50 # define STMT_END while (0)
54 # define SU_D(X) STMT_START X STMT_END
60 # define Newx(v, n, c) New(0, v, n, c)
65 # define SU_POISON(D, N, T) PoisonNew((D), (N), T)
66 # elif defined(Poison)
67 # define SU_POISON(D, N, T) Poison((D), (N), T)
71 # define SU_POISON(D, N, T) NOOP
75 STATIC SV *su_newSV_type(pTHX_ svtype t) {
80 # define newSV_type(T) su_newSV_type(aTHX_ (T))
84 # define SvPV_const(S, L) SvPV(S, L)
88 # define SvPVX_const(S) SvPVX(S)
91 #ifndef SvPV_nolen_const
92 # define SvPV_nolen_const(S) SvPV_nolen(S)
95 #ifndef SvREFCNT_inc_simple_void
96 # define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
100 # define mPUSHi(I) PUSHs(sv_2mortal(newSViv(I)))
104 # define GvCV_set(G, C) (GvCV(G) = (C))
108 # define CvGV_set(C, G) (CvGV(C) = (G))
112 # define CvSTASH_set(C, S) (CvSTASH(C) = (S))
116 # define CvISXSUB(C) CvXSUB(C)
120 # define PadlistARRAY(P) AvARRAY(P)
121 # define PadARRAY(P) AvARRAY(P)
125 # define CxHASARGS(C) ((C)->blk_sub.hasargs)
129 # define HvNAME_get(H) HvNAME(H)
132 #ifndef gv_fetchpvn_flags
133 # define gv_fetchpvn_flags(A, B, C, D) gv_fetchpv((A), (C), (D))
136 #ifndef OP_GIMME_REVERSE
137 STATIC U8 su_op_gimme_reverse(U8 gimme) {
140 return OPf_WANT_VOID;
142 return OPf_WANT_LIST;
147 return OPf_WANT_SCALAR;
149 #define OP_GIMME_REVERSE(G) su_op_gimme_reverse(G)
152 #ifndef PERL_MAGIC_tied
153 # define PERL_MAGIC_tied 'P'
156 #ifndef PERL_MAGIC_env
157 # define PERL_MAGIC_env 'E'
160 #ifndef NEGATIVE_INDICES_VAR
161 # define NEGATIVE_INDICES_VAR "NEGATIVE_INDICES"
164 #define SU_HAS_PERL(R, V, S) (PERL_REVISION > (R) || (PERL_REVISION == (R) && (PERL_VERSION > (V) || (PERL_VERSION == (V) && (PERL_SUBVERSION >= (S))))))
165 #define SU_HAS_PERL_EXACT(R, V, S) ((PERL_REVISION == (R)) && (PERL_VERSION == (V)) && (PERL_SUBVERSION == (S)))
167 /* --- Threads and multiplicity -------------------------------------------- */
169 #ifndef SU_MULTIPLICITY
170 # if defined(MULTIPLICITY) || defined(PERL_IMPLICIT_CONTEXT)
171 # define SU_MULTIPLICITY 1
173 # define SU_MULTIPLICITY 0
176 #if SU_MULTIPLICITY && !defined(tTHX)
177 # define tTHX PerlInterpreter*
180 #if SU_MULTIPLICITY && defined(USE_ITHREADS) && defined(dMY_CXT) && defined(MY_CXT) && defined(START_MY_CXT) && defined(MY_CXT_INIT) && (defined(MY_CXT_CLONE) || defined(dMY_CXT_SV))
181 # define SU_THREADSAFE 1
182 # ifndef MY_CXT_CLONE
183 # define MY_CXT_CLONE \
185 my_cxt_t *my_cxtp = (my_cxt_t*)SvPVX(newSV(sizeof(my_cxt_t)-1)); \
186 Copy(INT2PTR(my_cxt_t*, SvUV(my_cxt_sv)), my_cxtp, 1, my_cxt_t); \
187 sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
190 # define SU_THREADSAFE 0
192 # define dMY_CXT dNOOP
194 # define MY_CXT su_globaldata
196 # define START_MY_CXT STATIC my_cxt_t MY_CXT;
198 # define MY_CXT_INIT NOOP
200 # define MY_CXT_CLONE NOOP
203 /* --- Unique context ID global storage ------------------------------------ */
205 /* ... Sequence ID counter ................................................. */
212 STATIC su_uv_array su_uid_seq_counter;
216 STATIC perl_mutex su_uid_seq_counter_mutex;
218 #define SU_LOCK(M) MUTEX_LOCK(M)
219 #define SU_UNLOCK(M) MUTEX_UNLOCK(M)
221 #else /* USE_ITHREADS */
226 #endif /* !USE_ITHREADS */
228 STATIC UV su_uid_seq_next(pTHX_ UV depth) {
229 #define su_uid_seq_next(D) su_uid_seq_next(aTHX_ (D))
233 SU_LOCK(&su_uid_seq_counter_mutex);
235 seqs = su_uid_seq_counter.seqs;
237 if (depth >= su_uid_seq_counter.size) {
240 seqs = PerlMemShared_realloc(seqs, (depth + 1) * sizeof(UV));
241 for (i = su_uid_seq_counter.size; i <= depth; ++i)
244 su_uid_seq_counter.seqs = seqs;
245 su_uid_seq_counter.size = depth + 1;
250 SU_UNLOCK(&su_uid_seq_counter_mutex);
255 /* ... UID storage ......................................................... */
262 #define SU_UID_ACTIVE 1
264 STATIC UV su_uid_depth(pTHX_ I32 cxix) {
265 #define su_uid_depth(I) su_uid_depth(aTHX_ (I))
270 for (si = PL_curstackinfo->si_prev; si; si = si->si_prev)
271 depth += si->si_cxix + 1;
282 STATIC void su_uid_storage_dup(pTHX_ su_uid_storage *new_cxt, const su_uid_storage *old_cxt, UV max_depth) {
283 #define su_uid_storage_dup(N, O, D) su_uid_storage_dup(aTHX_ (N), (O), (D))
284 su_uid **old_map = old_cxt->map;
287 su_uid **new_map = new_cxt->map;
288 STRLEN old_used = old_cxt->used;
289 STRLEN old_alloc = old_cxt->alloc;
290 STRLEN new_used, new_alloc;
293 new_used = max_depth < old_used ? max_depth : old_used;
294 new_cxt->used = new_used;
296 if (new_used <= new_cxt->alloc)
297 new_alloc = new_cxt->alloc;
299 new_alloc = new_used;
300 Renew(new_map, new_alloc, su_uid *);
301 for (i = new_cxt->alloc; i < new_alloc; ++i)
303 new_cxt->map = new_map;
304 new_cxt->alloc = new_alloc;
307 for (i = 0; i < new_alloc; ++i) {
308 su_uid *new_uid = new_map[i];
310 if (i < new_used) { /* => i < max_depth && i < old_used */
311 su_uid *old_uid = old_map[i];
313 if (old_uid && (old_uid->flags & SU_UID_ACTIVE)) {
315 Newx(new_uid, 1, su_uid);
316 new_map[i] = new_uid;
324 new_uid->flags &= ~SU_UID_ACTIVE;
331 /* --- unwind() global storage --------------------------------------------- */
341 /* --- uplevel() data tokens and global storage ---------------------------- */
343 #define SU_UPLEVEL_HIJACKS_RUNOPS SU_HAS_PERL(5, 8, 0)
348 su_uid_storage tmp_uid_storage;
349 su_uid_storage old_uid_storage;
360 PERL_SI *old_curstackinfo;
366 #if SU_UPLEVEL_HIJACKS_RUNOPS
367 runops_proc_t old_runops;
374 STATIC su_uplevel_ud *su_uplevel_ud_new(pTHX) {
375 #define su_uplevel_ud_new() su_uplevel_ud_new(aTHX)
379 Newx(sud, 1, su_uplevel_ud);
382 sud->tmp_uid_storage.map = NULL;
383 sud->tmp_uid_storage.used = 0;
384 sud->tmp_uid_storage.alloc = 0;
386 Newx(si, 1, PERL_SI);
387 si->si_stack = newAV();
388 AvREAL_off(si->si_stack);
389 si->si_cxstack = NULL;
397 STATIC void su_uplevel_ud_delete(pTHX_ su_uplevel_ud *sud) {
398 #define su_uplevel_ud_delete(S) su_uplevel_ud_delete(aTHX_ (S))
399 PERL_SI *si = sud->si;
401 Safefree(si->si_cxstack);
402 SvREFCNT_dec(si->si_stack);
405 if (sud->tmp_uid_storage.map) {
406 su_uid **map = sud->tmp_uid_storage.map;
407 STRLEN alloc = sud->tmp_uid_storage.alloc;
410 for (i = 0; i < alloc; ++i)
425 } su_uplevel_storage;
427 #ifndef SU_UPLEVEL_STORAGE_SIZE
428 # define SU_UPLEVEL_STORAGE_SIZE 4
431 /* --- Global data --------------------------------------------------------- */
433 #define MY_CXT_KEY __PACKAGE__ "::_guts" XS_VERSION
436 char *stack_placeholder;
437 su_unwind_storage unwind_storage;
438 su_uplevel_storage uplevel_storage;
439 su_uid_storage uid_storage;
444 /* --- Stack manipulations ------------------------------------------------- */
446 #define SU_SAVE_PLACEHOLDER() save_pptr(&MY_CXT.stack_placeholder)
448 #define SU_SAVE_DESTRUCTOR_SIZE 3
449 #define SU_SAVE_PLACEHOLDER_SIZE 3
451 #define SU_SAVE_SCALAR_SIZE 3
453 #define SU_SAVE_ARY_SIZE 3
454 #define SU_SAVE_AELEM_SIZE 4
456 # define SU_SAVE_ADELETE_SIZE 3
458 # define SU_SAVE_ADELETE_SIZE SU_SAVE_DESTRUCTOR_SIZE
460 #if SU_SAVE_AELEM_SIZE < SU_SAVE_ADELETE_SIZE
461 # define SU_SAVE_AELEM_OR_ADELETE_SIZE SU_SAVE_ADELETE_SIZE
463 # define SU_SAVE_AELEM_OR_ADELETE_SIZE SU_SAVE_AELEM_SIZE
466 #define SU_SAVE_HASH_SIZE 3
467 #define SU_SAVE_HELEM_SIZE 4
468 #define SU_SAVE_HDELETE_SIZE 4
469 #if SU_SAVE_HELEM_SIZE < SU_SAVE_HDELETE_SIZE
470 # define SU_SAVE_HELEM_OR_HDELETE_SIZE SU_SAVE_HDELETE_SIZE
472 # define SU_SAVE_HELEM_OR_HDELETE_SIZE SU_SAVE_HELEM_SIZE
475 #define SU_SAVE_GVCV_SIZE SU_SAVE_DESTRUCTOR_SIZE
477 #if !SU_HAS_PERL(5, 8, 9)
478 # define SU_SAVE_GP_SIZE 6
479 #elif !SU_HAS_PERL(5, 13, 0) || (SU_RELEASE && SU_HAS_PERL_EXACT(5, 13, 0))
480 # define SU_SAVE_GP_SIZE 3
481 #elif !SU_HAS_PERL(5, 13, 8)
482 # define SU_SAVE_GP_SIZE 4
484 # define SU_SAVE_GP_SIZE 3
487 #ifndef SvCANEXISTDELETE
488 # define SvCANEXISTDELETE(sv) \
490 || ((mg = mg_find((SV *) sv, PERL_MAGIC_tied)) \
491 && (stash = SvSTASH(SvRV(SvTIED_obj((SV *) sv, mg)))) \
492 && gv_fetchmethod_autoload(stash, "EXISTS", TRUE) \
493 && gv_fetchmethod_autoload(stash, "DELETE", TRUE) \
498 /* ... Saving array elements ............................................... */
500 STATIC I32 su_av_key2idx(pTHX_ AV *av, I32 key) {
501 #define su_av_key2idx(A, K) su_av_key2idx(aTHX_ (A), (K))
507 /* Added by MJD in perl-5.8.1 with 6f12eb6d2a1dfaf441504d869b27d2e40ef4966a */
508 #if SU_HAS_PERL(5, 8, 1)
509 if (SvRMAGICAL(av)) {
510 const MAGIC * const tied_magic = mg_find((SV *) av, PERL_MAGIC_tied);
512 SV * const * const negative_indices_glob =
513 hv_fetch(SvSTASH(SvRV(SvTIED_obj((SV *) (av), tied_magic))),
514 NEGATIVE_INDICES_VAR, 16, 0);
515 if (negative_indices_glob && SvTRUE(GvSV(*negative_indices_glob)))
521 idx = key + av_len(av) + 1;
535 STATIC void su_adelete(pTHX_ void *ud_) {
536 su_ud_adelete *ud = (su_ud_adelete *) ud_;
538 av_delete(ud->av, ud->idx, G_DISCARD);
539 SvREFCNT_dec(ud->av);
544 STATIC void su_save_adelete(pTHX_ AV *av, I32 idx) {
545 #define su_save_adelete(A, K) su_save_adelete(aTHX_ (A), (K))
548 Newx(ud, 1, su_ud_adelete);
551 SvREFCNT_inc_simple_void(av);
553 SAVEDESTRUCTOR_X(su_adelete, ud);
556 #define SAVEADELETE(A, K) su_save_adelete((A), (K))
558 #endif /* SAVEADELETE */
560 STATIC void su_save_aelem(pTHX_ AV *av, SV *key, SV *val) {
561 #define su_save_aelem(A, K, V) su_save_aelem(aTHX_ (A), (K), (V))
568 idx = su_av_key2idx(av, SvIV(key));
570 if (SvCANEXISTDELETE(av))
571 preeminent = av_exists(av, idx);
573 svp = av_fetch(av, idx, 1);
574 if (!svp || *svp == &PL_sv_undef) croak(PL_no_aelem, idx);
577 save_aelem(av, idx, svp);
579 SAVEADELETE(av, idx);
581 if (val) { /* local $x[$idx] = $val; */
582 SvSetMagicSV(*svp, val);
583 } else { /* local $x[$idx]; delete $x[$idx]; */
584 av_delete(av, idx, G_DISCARD);
588 /* ... Saving hash elements ................................................ */
590 STATIC void su_save_helem(pTHX_ HV *hv, SV *keysv, SV *val) {
591 #define su_save_helem(H, K, V) su_save_helem(aTHX_ (H), (K), (V))
598 if (SvCANEXISTDELETE(hv) || mg_find((SV *) hv, PERL_MAGIC_env))
599 preeminent = hv_exists_ent(hv, keysv, 0);
601 he = hv_fetch_ent(hv, keysv, 1, 0);
602 svp = he ? &HeVAL(he) : NULL;
603 if (!svp || *svp == &PL_sv_undef) croak("Modification of non-creatable hash value attempted, subscript \"%s\"", SvPV_nolen_const(*svp));
605 if (HvNAME_get(hv) && isGV(*svp)) {
606 save_gp((GV *) *svp, 0);
611 save_helem(hv, keysv, svp);
614 const char * const key = SvPV_const(keysv, keylen);
615 SAVEDELETE(hv, savepvn(key, keylen),
616 SvUTF8(keysv) ? -(I32)keylen : (I32)keylen);
619 if (val) { /* local $x{$keysv} = $val; */
620 SvSetMagicSV(*svp, val);
621 } else { /* local $x{$keysv}; delete $x{$keysv}; */
622 (void)hv_delete_ent(hv, keysv, G_DISCARD, HeHASH(he));
626 /* ... Saving code slots from a glob ....................................... */
628 #if !SU_HAS_PERL(5, 10, 0) && !defined(mro_method_changed_in)
629 # define mro_method_changed_in(G) PL_sub_generation++
637 STATIC void su_restore_gvcv(pTHX_ void *ud_) {
638 su_save_gvcv_ud *ud = ud_;
641 GvCV_set(gv, ud->old_cv);
643 mro_method_changed_in(GvSTASH(gv));
648 STATIC void su_save_gvcv(pTHX_ GV *gv) {
649 #define su_save_gvcv(G) su_save_gvcv(aTHX_ (G))
652 Newx(ud, 1, su_save_gvcv_ud);
654 ud->old_cv = GvCV(gv);
658 mro_method_changed_in(GvSTASH(gv));
660 SAVEDESTRUCTOR_X(su_restore_gvcv, ud);
663 /* --- Actions ------------------------------------------------------------- */
669 void (*handler)(pTHX_ void *);
672 #define SU_UD_DEPTH(U) (((su_ud_common *) (U))->depth)
673 #define SU_UD_PAD(U) (((su_ud_common *) (U))->pad)
674 #define SU_UD_ORIGIN(U) (((su_ud_common *) (U))->origin)
675 #define SU_UD_HANDLER(U) (((su_ud_common *) (U))->handler)
677 #define SU_UD_FREE(U) STMT_START { \
678 if (SU_UD_ORIGIN(U)) Safefree(SU_UD_ORIGIN(U)); \
682 /* ... Reap ................................................................ */
684 #define SU_SAVE_LAST_CX (!SU_HAS_PERL(5, 8, 4) || (SU_HAS_PERL(5, 9, 5) && !SU_HAS_PERL(5, 14, 0)) || SU_HAS_PERL(5, 15, 0))
691 STATIC void su_call(pTHX_ void *ud_) {
692 su_ud_reap *ud = (su_ud_reap *) ud_;
695 PERL_CONTEXT saved_cx;
696 #endif /* SU_SAVE_LAST_CX */
701 PerlIO_printf(Perl_debug_log,
702 "%p: @@@ call\n%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
703 ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
713 /* If the recently popped context isn't saved there, it will be overwritten by
714 * the sub scope from call_sv, although it's still needed in our caller. */
715 cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
716 saved_cx = cxstack[cxix];
717 #endif /* SU_SAVE_LAST_CX */
719 call_sv(ud->cb, G_VOID);
722 cxstack[cxix] = saved_cx;
723 #endif /* SU_SAVE_LAST_CX */
730 SvREFCNT_dec(ud->cb);
734 STATIC void su_reap(pTHX_ void *ud) {
735 #define su_reap(U) su_reap(aTHX_ (U))
737 PerlIO_printf(Perl_debug_log,
738 "%p: === reap\n%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
739 ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
742 SAVEDESTRUCTOR_X(su_call, ud);
745 /* ... Localize & localize array/hash element .............................. */
755 #define SU_UD_LOCALIZE_FREE(U) STMT_START { \
756 SvREFCNT_dec((U)->elem); \
757 SvREFCNT_dec((U)->val); \
758 SvREFCNT_dec((U)->sv); \
762 STATIC I32 su_ud_localize_init(pTHX_ su_ud_localize *ud, SV *sv, SV *val, SV *elem) {
763 #define su_ud_localize_init(UD, S, V, E) su_ud_localize_init(aTHX_ (UD), (S), (V), (E))
768 SvREFCNT_inc_simple_void(sv);
770 if (SvTYPE(sv) >= SVt_PVGV) {
771 if (!val || !SvROK(val)) { /* local *x; or local *x = $val; */
773 } else { /* local *x = \$val; */
774 t = SvTYPE(SvRV(val));
777 } else if (SvROK(sv)) {
778 croak("Invalid %s reference as the localization target",
779 sv_reftype(SvRV(sv), 0));
782 const char *p = SvPV_const(sv, len), *s;
783 for (s = p, l = len; l > 0 && isSPACE(*s); ++s, --l) { }
789 case '$': t = SVt_PV; break;
790 case '@': t = SVt_PVAV; break;
791 case '%': t = SVt_PVHV; break;
792 case '&': t = SVt_PVCV; break;
793 case '*': t = SVt_PVGV; break;
798 } else if (val) { /* t == SVt_NULL, type can't be inferred from the sigil */
799 if (SvROK(val) && !sv_isobject(val)) {
800 t = SvTYPE(SvRV(val));
812 size = elem ? SU_SAVE_AELEM_OR_ADELETE_SIZE
817 size = elem ? SU_SAVE_HELEM_OR_HDELETE_SIZE
822 size = SU_SAVE_GP_SIZE;
826 size = SU_SAVE_GVCV_SIZE;
830 size = SU_SAVE_SCALAR_SIZE;
833 /* When deref is set, val isn't NULL */
836 ud->val = val ? newSVsv(deref ? SvRV(val) : val) : NULL;
837 ud->elem = SvREFCNT_inc(elem);
843 STATIC void su_localize(pTHX_ void *ud_) {
844 #define su_localize(U) su_localize(aTHX_ (U))
845 su_ud_localize *ud = (su_ud_localize *) ud_;
852 if (SvTYPE(sv) >= SVt_PVGV) {
856 gv = gv_fetchsv(sv, GV_ADDMULTI, t);
859 const char *name = SvPV_const(sv, len);
860 gv = gv_fetchpvn_flags(name, len, GV_ADDMULTI, t);
867 PerlIO_printf(Perl_debug_log, "%p: === localize a %s\n",ud, sv_reftype(z, 0));
868 PerlIO_printf(Perl_debug_log,
869 "%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
870 ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
874 /* Inspired from Alias.pm */
878 su_save_aelem(GvAV(gv), elem, val);
885 su_save_helem(GvHV(gv), elem, val);
891 save_gp(gv, 1); /* hide previous entry in symtab */
897 gv = (GV *) save_scalar(gv);
902 SvSetMagicSV((SV *) gv, val);
905 SU_UD_LOCALIZE_FREE(ud);
908 /* --- Pop a context back -------------------------------------------------- */
912 # define SU_CXNAME(C) PL_block_type[CxTYPE(C)]
914 # define SU_CXNAME(C) "XXX"
918 STATIC void su_pop(pTHX_ void *ud) {
919 #define su_pop(U) su_pop(aTHX_ (U))
920 I32 depth, base, mark, *origin;
921 depth = SU_UD_DEPTH(ud);
924 PerlIO_printf(Perl_debug_log,
926 "%p: leave scope at depth=%2d scope_ix=%2d cur_top=%2d cur_base=%2d\n",
927 ud, SU_CXNAME(cxstack + cxstack_ix),
928 ud, depth, PL_scopestack_ix,PL_savestack_ix,PL_scopestack[PL_scopestack_ix])
931 origin = SU_UD_ORIGIN(ud);
932 mark = origin[depth];
933 base = origin[depth - 1];
935 SU_D(PerlIO_printf(Perl_debug_log,
936 "%p: original scope was %*c top=%2d base=%2d\n",
937 ud, 24, ' ', mark, base));
940 SU_D(PerlIO_printf(Perl_debug_log, "%p: clear leftovers\n", ud));
941 PL_savestack_ix = mark;
944 PL_savestack_ix = base;
946 SU_UD_DEPTH(ud) = --depth;
951 if ((pad = SU_UD_PAD(ud))) {
954 SU_D(PerlIO_printf(Perl_debug_log,
955 "%p: push a pad slot at depth=%2d scope_ix=%2d save_ix=%2d\n",
956 ud, depth, PL_scopestack_ix, PL_savestack_ix));
957 SU_SAVE_PLACEHOLDER();
961 SU_D(PerlIO_printf(Perl_debug_log,
962 "%p: push destructor at depth=%2d scope_ix=%2d save_ix=%2d\n",
963 ud, depth, PL_scopestack_ix, PL_savestack_ix));
964 SAVEDESTRUCTOR_X(su_pop, ud);
966 SU_UD_HANDLER(ud)(aTHX_ ud);
969 SU_D(PerlIO_printf(Perl_debug_log,
970 "%p: --- end pop: cur_top=%2d == cur_base=%2d\n",
971 ud, PL_savestack_ix, PL_scopestack[PL_scopestack_ix]));
974 /* --- Initialize the stack and the action userdata ------------------------ */
976 STATIC I32 su_init(pTHX_ void *ud, I32 cxix, I32 size) {
977 #define su_init(U, C, S) su_init(aTHX_ (U), (C), (S))
978 I32 i, depth = 1, pad, offset, *origin;
980 SU_D(PerlIO_printf(Perl_debug_log, "%p: ### init for cx %d\n", ud, cxix));
982 if (size <= SU_SAVE_DESTRUCTOR_SIZE)
985 I32 extra = size - SU_SAVE_DESTRUCTOR_SIZE;
986 pad = extra / SU_SAVE_PLACEHOLDER_SIZE;
987 if (extra % SU_SAVE_PLACEHOLDER_SIZE)
990 offset = SU_SAVE_DESTRUCTOR_SIZE + SU_SAVE_PLACEHOLDER_SIZE * pad;
992 SU_D(PerlIO_printf(Perl_debug_log, "%p: size=%d pad=%d offset=%d\n",
993 ud, size, pad, offset));
995 for (i = cxstack_ix; i > cxix; --i) {
996 PERL_CONTEXT *cx = cxstack + i;
997 switch (CxTYPE(cx)) {
998 #if SU_HAS_PERL(5, 11, 0)
1000 case CXt_LOOP_PLAIN:
1001 case CXt_LOOP_LAZYSV:
1002 case CXt_LOOP_LAZYIV:
1006 SU_D(PerlIO_printf(Perl_debug_log, "%p: cx %d is loop\n", ud, i));
1010 SU_D(PerlIO_printf(Perl_debug_log, "%p: cx %d is other\n", ud, i));
1015 SU_D(PerlIO_printf(Perl_debug_log, "%p: going down to depth %d\n", ud, depth));
1017 Newx(origin, depth + 1, I32);
1018 origin[0] = PL_scopestack[PL_scopestack_ix - depth];
1019 PL_scopestack[PL_scopestack_ix - depth] += size;
1020 for (i = depth - 1; i >= 1; --i) {
1021 I32 j = PL_scopestack_ix - i;
1022 origin[depth - i] = PL_scopestack[j];
1023 PL_scopestack[j] += offset;
1025 origin[depth] = PL_savestack_ix;
1027 SU_UD_ORIGIN(ud) = origin;
1028 SU_UD_DEPTH(ud) = depth;
1029 SU_UD_PAD(ud) = pad;
1031 /* Make sure the first destructor fires by pushing enough fake slots on the
1033 if (PL_savestack_ix + SU_SAVE_DESTRUCTOR_SIZE
1034 <= PL_scopestack[PL_scopestack_ix - 1]) {
1037 SU_D(PerlIO_printf(Perl_debug_log,
1038 "%p: push a fake slot at scope_ix=%2d save_ix=%2d\n",
1039 ud, PL_scopestack_ix, PL_savestack_ix));
1040 SU_SAVE_PLACEHOLDER();
1041 } while (PL_savestack_ix + SU_SAVE_DESTRUCTOR_SIZE
1042 <= PL_scopestack[PL_scopestack_ix - 1]);
1044 SU_D(PerlIO_printf(Perl_debug_log,
1045 "%p: push first destructor at scope_ix=%2d save_ix=%2d\n",
1046 ud, PL_scopestack_ix, PL_savestack_ix));
1047 SAVEDESTRUCTOR_X(su_pop, ud);
1050 for (i = 0; i <= depth; ++i) {
1051 I32 j = PL_scopestack_ix - i;
1052 PerlIO_printf(Perl_debug_log,
1053 "%p: depth=%2d scope_ix=%2d saved_floor=%2d new_floor=%2d\n",
1054 ud, i, j, origin[depth - i],
1055 i == 0 ? PL_savestack_ix : PL_scopestack[j]);
1062 /* --- Unwind stack -------------------------------------------------------- */
1064 STATIC void su_unwind(pTHX_ void *ud_) {
1066 I32 cxix = MY_CXT.unwind_storage.cxix;
1067 I32 items = MY_CXT.unwind_storage.items - 1;
1068 SV **savesp = MY_CXT.unwind_storage.savesp;
1071 PERL_UNUSED_VAR(ud_);
1074 PL_stack_sp = savesp;
1076 if (cxstack_ix > cxix)
1079 /* Hide the level */
1083 mark = PL_markstack[cxstack[cxix].blk_oldmarksp];
1084 *PL_markstack_ptr = PL_stack_sp - PL_stack_base - items;
1087 I32 gimme = GIMME_V;
1088 PerlIO_printf(Perl_debug_log,
1089 "%p: cx=%d gimme=%s items=%d sp=%d oldmark=%d mark=%d\n",
1091 gimme == G_VOID ? "void" : gimme == G_ARRAY ? "list" : "scalar",
1092 items, PL_stack_sp - PL_stack_base, *PL_markstack_ptr, mark);
1095 PL_op = (OP *) &(MY_CXT.unwind_storage.return_op);
1096 PL_op = PL_op->op_ppaddr(aTHX);
1098 *PL_markstack_ptr = mark;
1100 MY_CXT.unwind_storage.proxy_op.op_next = PL_op;
1101 PL_op = &(MY_CXT.unwind_storage.proxy_op);
1104 /* --- Uplevel ------------------------------------------------------------- */
1106 #define SU_UPLEVEL_SAVE(f, t) STMT_START { sud->old_##f = PL_##f; PL_##f = (t); } STMT_END
1107 #define SU_UPLEVEL_RESTORE(f) STMT_START { PL_##f = sud->old_##f; } STMT_END
1109 STATIC su_uplevel_ud *su_uplevel_storage_new(pTHX_ I32 cxix) {
1110 #define su_uplevel_storage_new(I) su_uplevel_storage_new(aTHX_ (I))
1115 sud = MY_CXT.uplevel_storage.root;
1117 MY_CXT.uplevel_storage.root = sud->next;
1118 MY_CXT.uplevel_storage.count--;
1120 sud = su_uplevel_ud_new();
1123 sud->next = MY_CXT.uplevel_storage.top;
1124 MY_CXT.uplevel_storage.top = sud;
1126 depth = su_uid_depth(cxix);
1127 su_uid_storage_dup(&sud->tmp_uid_storage, &MY_CXT.uid_storage, depth);
1128 sud->old_uid_storage = MY_CXT.uid_storage;
1129 MY_CXT.uid_storage = sud->tmp_uid_storage;
1134 STATIC void su_uplevel_storage_delete(pTHX_ su_uplevel_ud *sud) {
1135 #define su_uplevel_storage_delete(S) su_uplevel_storage_delete(aTHX_ (S))
1138 sud->tmp_uid_storage = MY_CXT.uid_storage;
1139 MY_CXT.uid_storage = sud->old_uid_storage;
1143 map = sud->tmp_uid_storage.map;
1144 alloc = sud->tmp_uid_storage.alloc;
1145 for (i = 0; i < alloc; ++i) {
1147 map[i]->flags &= SU_UID_ACTIVE;
1150 MY_CXT.uplevel_storage.top = sud->next;
1152 if (MY_CXT.uplevel_storage.count >= SU_UPLEVEL_STORAGE_SIZE) {
1153 su_uplevel_ud_delete(sud);
1155 sud->next = MY_CXT.uplevel_storage.root;
1156 MY_CXT.uplevel_storage.root = sud;
1157 MY_CXT.uplevel_storage.count++;
1161 STATIC int su_uplevel_goto_static(const OP *o) {
1162 for (; o; o = o->op_sibling) {
1163 /* goto ops are unops with kids. */
1164 if (!(o->op_flags & OPf_KIDS))
1167 switch (o->op_type) {
1170 /* Don't care about gotos inside eval, as they are forbidden at run time. */
1175 if (su_uplevel_goto_static(cUNOPo->op_first))
1184 #if SU_UPLEVEL_HIJACKS_RUNOPS
1186 STATIC int su_uplevel_goto_runops(pTHX) {
1187 #define su_uplevel_goto_runops() su_uplevel_goto_runops(aTHX)
1193 if (op->op_type == OP_GOTO) {
1194 AV *argarray = NULL;
1197 for (cxix = cxstack_ix; cxix >= 0; --cxix) {
1198 const PERL_CONTEXT *cx = cxstack + cxix;
1200 switch (CxTYPE(cx)) {
1202 if (CxHASARGS(cx)) {
1203 argarray = cx->blk_sub.argarray;
1219 if (MY_CXT.uplevel_storage.top->cxix == cxix) {
1220 AV *args = GvAV(PL_defgv);
1221 I32 items = AvFILLp(args);
1223 av_extend(argarray, items);
1224 Copy(AvARRAY(args), AvARRAY(argarray), items + 1, SV *);
1225 AvFILLp(argarray) = items;
1230 PL_op = op = op->op_ppaddr(aTHX);
1232 #if !SU_HAS_PERL(5, 13, 0)
1242 #endif /* SU_UPLEVEL_HIJACKS_RUNOPS */
1244 #define su_at_underscore(C) PadARRAY(PadlistARRAY(CvPADLIST(C))[CvDEPTH(C)])[0]
1246 STATIC void su_uplevel_restore(pTHX_ void *sus_) {
1247 su_uplevel_ud *sud = sus_;
1248 PERL_SI *cur = sud->old_curstackinfo;
1249 PERL_SI *si = sud->si;
1251 #if SU_UPLEVEL_HIJACKS_RUNOPS
1252 if (PL_runops == su_uplevel_goto_runops)
1253 PL_runops = sud->old_runops;
1256 if (sud->callback) {
1257 PERL_CONTEXT *cx = cxstack + sud->cxix;
1258 AV *argarray = MUTABLE_AV(su_at_underscore(sud->callback));
1260 /* We have to fix the pad entry for @_ in the original callback because it
1261 * may have been reified. */
1262 if (AvREAL(argarray)) {
1263 const I32 fill = AvFILLp(argarray);
1264 SvREFCNT_dec(argarray);
1266 AvREAL_off(argarray);
1267 AvREIFY_on(argarray);
1268 av_extend(argarray, fill);
1269 su_at_underscore(sud->callback) = MUTABLE_SV(argarray);
1271 CLEAR_ARGARRAY(argarray);
1274 /* If the old cv member is our renamed CV, it means that this place has been
1275 * reached without a goto() happening, and the old argarray member is
1276 * actually our fake argarray. Destroy it properly in that case. */
1277 if (cx->blk_sub.cv == sud->renamed) {
1278 SvREFCNT_dec(cx->blk_sub.argarray);
1279 cx->blk_sub.argarray = argarray;
1282 CvDEPTH(sud->callback)--;
1283 SvREFCNT_dec(sud->callback);
1286 /* Free the renamed CV. We must do it ourselves so that we can force the
1287 * depth to be 0, or perl would complain about it being "still in use".
1288 * But we *know* that it cannot be so. */
1290 CvDEPTH(sud->renamed) = 0;
1291 CvPADLIST(sud->renamed) = NULL;
1292 SvREFCNT_dec(sud->renamed);
1295 CATCH_SET(sud->old_catch);
1297 SU_UPLEVEL_RESTORE(op);
1299 /* stack_grow() wants PL_curstack so restore the old stack first */
1300 if (PL_curstackinfo == si) {
1301 PL_curstack = cur->si_stack;
1302 if (sud->old_mainstack)
1303 SU_UPLEVEL_RESTORE(mainstack);
1304 SU_UPLEVEL_RESTORE(curstackinfo);
1307 CV *target = sud->target;
1310 /* When we die, the depth of the target CV is not updated because of the
1311 * stack switcheroo. So we have to look at all the frames between the
1312 * uplevel call and the catch block to count how many call frames to the
1313 * target CV were skipped. */
1314 for (i = cur->si_cxix; i > sud->cxix; i--) {
1315 register const PERL_CONTEXT *cx = cxstack + i;
1317 if (CxTYPE(cx) == CXt_SUB) {
1318 if (cx->blk_sub.cv == target)
1323 /* If we died, the replacement stack was already unwinded to the first
1324 * eval frame, and all the contexts down there were popped. We don't have
1325 * to pop manually any context of the original stack, because they must
1326 * have been in the replacement stack as well (since the second was copied
1327 * from the first). Thus we only have to make sure the original stack index
1328 * points to the context just below the first eval scope under the target
1330 for (; i >= 0; i--) {
1331 register const PERL_CONTEXT *cx = cxstack + i;
1333 switch (CxTYPE(cx)) {
1335 if (cx->blk_sub.cv == target)
1347 CvDEPTH(target) = sud->target_depth - levels;
1348 PL_curstackinfo->si_cxix = i - 1;
1350 #if !SU_HAS_PERL(5, 13, 1)
1351 /* Since $@ was maybe localized between the target frame and the uplevel
1352 * call, we forcefully flush the save stack to get rid of it and then
1353 * reset $@ to its proper value. Note that the the call to
1354 * su_uplevel_restore() must happen before the "reset $@" item of the save
1355 * stack is processed, as uplevel was called after the localization.
1356 * Andrew's changes to how $@ was handled, which were mainly integrated
1357 * between perl 5.13.0 and 5.13.1, fixed this. */
1358 if (ERRSV && SvTRUE(ERRSV)) {
1359 register const PERL_CONTEXT *cx = cxstack + i; /* This is the eval scope */
1360 SV *errsv = SvREFCNT_inc(ERRSV);
1361 PL_scopestack_ix = cx->blk_oldscopesp;
1362 leave_scope(PL_scopestack[PL_scopestack_ix]);
1363 sv_setsv(ERRSV, errsv);
1364 SvREFCNT_dec(errsv);
1370 SU_UPLEVEL_RESTORE(curcop);
1372 SvREFCNT_dec(sud->target);
1374 PL_stack_base = AvARRAY(cur->si_stack);
1375 PL_stack_sp = PL_stack_base + AvFILLp(cur->si_stack);
1376 PL_stack_max = PL_stack_base + AvMAX(cur->si_stack);
1378 /* When an exception is thrown from the uplevel'd subroutine,
1379 * su_uplevel_restore() may be called by the LEAVE in die_unwind() (renamed
1380 * die_where() in more recent perls), which has the sad habit of keeping a
1381 * pointer to the current context frame across this call. This means that we
1382 * can't free the temporary context stack we used for the uplevel call right
1383 * now, or that pointer upwards would point to garbage. */
1384 #if SU_HAS_PERL(5, 13, 7)
1385 /* This issue has been fixed in perl with commit 8f89e5a9, which was made
1386 * public in perl 5.13.7. */
1387 su_uplevel_storage_delete(sud);
1389 /* Otherwise, we just enqueue it back in the global storage list. */
1393 sud->tmp_uid_storage = MY_CXT.uid_storage;
1394 MY_CXT.uid_storage = sud->old_uid_storage;
1396 MY_CXT.uplevel_storage.top = sud->next;
1397 sud->next = MY_CXT.uplevel_storage.root;
1398 MY_CXT.uplevel_storage.root = sud;
1399 MY_CXT.uplevel_storage.count++;
1406 STATIC CV *su_cv_clone(pTHX_ CV *proto, GV *gv) {
1407 #define su_cv_clone(P, G) su_cv_clone(aTHX_ (P), (G))
1411 cv = MUTABLE_CV(newSV_type(SvTYPE(proto)));
1413 CvFLAGS(cv) = CvFLAGS(proto);
1415 CvFLAGS(cv) &= ~CVf_CVGV_RC;
1417 CvDEPTH(cv) = CvDEPTH(proto);
1419 CvFILE(cv) = CvISXSUB(proto) ? CvFILE(proto) : savepv(CvFILE(proto));
1421 CvFILE(cv) = CvFILE(proto);
1425 CvSTASH_set(cv, CvSTASH(proto));
1426 /* Commit 4c74a7df, publicized with perl 5.13.3, began to add backrefs to
1427 * stashes. CvSTASH_set() started to do it as well with commit c68d95645
1428 * (which was part of perl 5.13.7). */
1429 #if SU_HAS_PERL(5, 13, 3) && !SU_HAS_PERL(5, 13, 7)
1431 Perl_sv_add_backref(aTHX_ CvSTASH(proto), MUTABLE_SV(cv));
1434 if (CvISXSUB(proto)) {
1435 CvXSUB(cv) = CvXSUB(proto);
1436 CvXSUBANY(cv) = CvXSUBANY(proto);
1439 CvROOT(cv) = OpREFCNT_inc(CvROOT(proto));
1441 CvSTART(cv) = CvSTART(proto);
1443 CvOUTSIDE(cv) = CvOUTSIDE(proto);
1444 #ifdef CVf_WEAKOUTSIDE
1445 if (!(CvFLAGS(proto) & CVf_WEAKOUTSIDE))
1447 SvREFCNT_inc_simple_void(CvOUTSIDE(cv));
1448 CvPADLIST(cv) = CvPADLIST(proto);
1449 #ifdef CvOUTSIDE_SEQ
1450 CvOUTSIDE_SEQ(cv) = CvOUTSIDE_SEQ(proto);
1454 sv_setpvn(MUTABLE_SV(cv), SvPVX_const(proto), SvCUR(proto));
1464 STATIC I32 su_uplevel(pTHX_ CV *callback, I32 cxix, I32 args) {
1465 #define su_uplevel(C, I, A) su_uplevel(aTHX_ (C), (I), (A))
1467 const PERL_CONTEXT *cx = cxstack + cxix;
1469 PERL_SI *cur = PL_curstackinfo;
1475 I32 old_mark, new_mark;
1482 /* Make PL_stack_sp point just before the CV. */
1483 PL_stack_sp -= args + 1;
1484 old_mark = AvFILLp(PL_curstack) = PL_stack_sp - PL_stack_base;
1487 sud = su_uplevel_storage_new(cxix);
1491 sud->callback = NULL;
1492 sud->renamed = NULL;
1493 SAVEDESTRUCTOR_X(su_uplevel_restore, sud);
1497 si->si_type = cur->si_type;
1499 si->si_prev = cur->si_prev;
1501 si->si_markoff = cx->blk_oldmarksp;
1504 /* Allocate enough space for all the elements of the original stack up to the
1505 * target context, plus the forthcoming arguments. */
1506 new_mark = cx->blk_oldsp;
1507 av_extend(si->si_stack, new_mark + 1 + args + 1);
1508 Copy(PL_curstack, AvARRAY(si->si_stack), new_mark + 1, SV *);
1509 AvFILLp(si->si_stack) = new_mark;
1510 SU_POISON(AvARRAY(si->si_stack) + new_mark + 1, args + 1, SV *);
1512 /* Specialized SWITCHSTACK() */
1513 PL_stack_base = AvARRAY(si->si_stack);
1514 old_stack_sp = PL_stack_sp;
1515 PL_stack_sp = PL_stack_base + AvFILLp(si->si_stack);
1516 PL_stack_max = PL_stack_base + AvMAX(si->si_stack);
1519 /* Copy the context stack up to the context just below the target. */
1520 si->si_cxix = (cxix < 0) ? -1 : (cxix - 1);
1521 if (si->si_cxmax < cxix) {
1522 /* The max size must be at least two so that GROW(max) = (max*3)/2 > max */
1523 si->si_cxmax = (cxix < 4) ? 4 : cxix;
1524 Renew(si->si_cxstack, si->si_cxmax + 1, PERL_CONTEXT);
1526 Copy(cur->si_cxstack, si->si_cxstack, cxix, PERL_CONTEXT);
1527 SU_POISON(si->si_cxstack + cxix, si->si_cxmax + 1 - cxix, PERL_CONTEXT);
1529 target = cx->blk_sub.cv;
1530 sud->target = (CV *) SvREFCNT_inc(target);
1531 sud->target_depth = CvDEPTH(target);
1533 /* blk_oldcop is essentially needed for caller() and stack traces. It has no
1534 * run-time implication, since PL_curcop will be overwritten as soon as we
1535 * enter a sub (a sub starts by a nextstate/dbstate). Hence it's safe to just
1536 * make it point to the blk_oldcop for the target frame, so that caller()
1537 * reports the right file name, line number and lexical hints. */
1538 SU_UPLEVEL_SAVE(curcop, cx->blk_oldcop);
1539 /* Don't reset PL_markstack_ptr, or we would overwrite the mark stack below
1540 * this point. Don't reset PL_curpm either, we want the most recent matches. */
1542 SU_UPLEVEL_SAVE(curstackinfo, si);
1543 /* If those two are equal, we need to fool POPSTACK_TO() */
1544 if (PL_mainstack == PL_curstack)
1545 SU_UPLEVEL_SAVE(mainstack, si->si_stack);
1547 sud->old_mainstack = NULL;
1548 PL_curstack = si->si_stack;
1550 renamed = su_cv_clone(callback, CvGV(target));
1551 sud->renamed = renamed;
1554 /* Both SP and old_stack_sp point just before the CV. */
1555 Copy(old_stack_sp + 2, SP + 1, args, SV *);
1557 PUSHs((SV *) renamed);
1560 Zero(&sub_op, 1, UNOP);
1561 sub_op.op_type = OP_ENTERSUB;
1562 sub_op.op_next = NULL;
1563 sub_op.op_flags = OP_GIMME_REVERSE(gimme) | OPf_STACKED;
1565 sub_op.op_flags |= OPpENTERSUB_DB;
1567 SU_UPLEVEL_SAVE(op, (OP *) &sub_op);
1569 #if SU_UPLEVEL_HIJACKS_RUNOPS
1570 sud->old_runops = PL_runops;
1573 sud->old_catch = CATCH_GET;
1576 if ((PL_op = PL_ppaddr[OP_ENTERSUB](aTHX))) {
1577 PERL_CONTEXT *sub_cx = cxstack + cxstack_ix;
1579 /* If pp_entersub() returns a non-null OP, it means that the callback is not
1582 sud->callback = MUTABLE_CV(SvREFCNT_inc(callback));
1583 CvDEPTH(callback)++;
1585 if (CxHASARGS(cx) && cx->blk_sub.argarray) {
1586 /* The call to pp_entersub() has saved the current @_ (in XS terms,
1587 * GvAV(PL_defgv)) in the savearray member, and has created a new argarray
1588 * with what we put on the stack. But we want to fake up the same arguments
1589 * as the ones in use at the context we uplevel to, so we replace the
1590 * argarray with an unreal copy of the original @_. */
1594 av_extend(av, AvMAX(cx->blk_sub.argarray));
1595 AvFILLp(av) = AvFILLp(cx->blk_sub.argarray);
1596 Copy(AvARRAY(cx->blk_sub.argarray), AvARRAY(av), AvFILLp(av) + 1, SV *);
1597 sub_cx->blk_sub.argarray = av;
1599 SvREFCNT_inc_simple_void(sub_cx->blk_sub.argarray);
1602 if (su_uplevel_goto_static(CvROOT(renamed))) {
1603 #if SU_UPLEVEL_HIJACKS_RUNOPS
1604 if (PL_runops != PL_runops_std) {
1605 if (PL_runops == PL_runops_dbg) {
1607 croak("uplevel() can't execute code that calls goto when debugging flags are set");
1608 } else if (PL_runops != su_uplevel_goto_runops)
1609 croak("uplevel() can't execute code that calls goto with a custom runloop");
1612 PL_runops = su_uplevel_goto_runops;
1613 #else /* SU_UPLEVEL_HIJACKS_RUNOPS */
1614 croak("uplevel() can't execute code that calls goto before perl 5.8");
1615 #endif /* !SU_UPLEVEL_HIJACKS_RUNOPS */
1623 ret = PL_stack_sp - (PL_stack_base + new_mark);
1625 AV *old_stack = sud->old_curstackinfo->si_stack;
1627 if (old_mark + ret > AvMAX(old_stack)) {
1628 /* Specialized EXTEND(old_sp, ret) */
1629 av_extend(old_stack, old_mark + ret + 1);
1630 old_stack_sp = AvARRAY(old_stack) + old_mark;
1633 Copy(PL_stack_sp - ret + 1, old_stack_sp + 1, ret, SV *);
1635 AvFILLp(old_stack) += ret;
1643 /* --- Unique context ID --------------------------------------------------- */
1645 STATIC su_uid *su_uid_storage_fetch(pTHX_ UV depth) {
1646 #define su_uid_storage_fetch(D) su_uid_storage_fetch(aTHX_ (D))
1651 map = MY_CXT.uid_storage.map;
1652 alloc = MY_CXT.uid_storage.alloc;
1654 if (depth >= alloc) {
1657 Renew(map, depth + 1, su_uid *);
1658 for (i = alloc; i <= depth; ++i)
1661 MY_CXT.uid_storage.map = map;
1662 MY_CXT.uid_storage.alloc = depth + 1;
1668 Newx(uid, 1, su_uid);
1674 if (depth >= MY_CXT.uid_storage.used)
1675 MY_CXT.uid_storage.used = depth + 1;
1680 STATIC int su_uid_storage_check(pTHX_ UV depth, UV seq) {
1681 #define su_uid_storage_check(D, S) su_uid_storage_check(aTHX_ (D), (S))
1685 if (depth >= MY_CXT.uid_storage.used)
1688 uid = MY_CXT.uid_storage.map[depth];
1690 return uid && (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
1693 STATIC void su_uid_drop(pTHX_ void *ud_) {
1696 uid->flags &= ~SU_UID_ACTIVE;
1699 STATIC void su_uid_bump(pTHX_ void *ud_) {
1700 su_ud_reap *ud = ud_;
1702 SAVEDESTRUCTOR_X(su_uid_drop, ud->cb);
1705 STATIC SV *su_uid_get(pTHX_ I32 cxix) {
1706 #define su_uid_get(I) su_uid_get(aTHX_ (I))
1711 depth = su_uid_depth(cxix);
1712 uid = su_uid_storage_fetch(depth);
1714 if (!(uid->flags & SU_UID_ACTIVE)) {
1717 uid->seq = su_uid_seq_next(depth);
1718 uid->flags |= SU_UID_ACTIVE;
1720 Newx(ud, 1, su_ud_reap);
1721 SU_UD_ORIGIN(ud) = NULL;
1722 SU_UD_HANDLER(ud) = su_uid_bump;
1723 ud->cb = (SV *) uid;
1724 su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
1727 uid_sv = sv_newmortal();
1728 sv_setpvf(uid_sv, "%"UVuf"-%"UVuf, depth, uid->seq);
1734 #define su_grok_number(S, L, VP) grok_number((S), (L), (VP))
1736 #else /* grok_number */
1738 #define IS_NUMBER_IN_UV 0x1
1740 STATIC int su_grok_number(pTHX_ const char *s, STRLEN len, UV *valuep) {
1741 #define su_grok_number(S, L, VP) su_grok_number(aTHX_ (S), (L), (VP))
1745 /* This crude check should be good enough for a fallback implementation.
1746 * Better be too strict than too lax. */
1747 for (i = 0; i < len; ++i) {
1752 tmpsv = sv_newmortal();
1753 sv_setpvn(tmpsv, s, len);
1754 *valuep = sv_2uv(tmpsv);
1756 return IS_NUMBER_IN_UV;
1759 #endif /* !grok_number */
1761 STATIC int su_uid_validate(pTHX_ SV *uid) {
1762 #define su_uid_validate(U) su_uid_validate(aTHX_ (U))
1768 s = SvPV_const(uid, len);
1770 while (p < len && s[p] != '-')
1773 croak("UID contains only one part");
1775 type = su_grok_number(s, p, &depth);
1776 if (type != IS_NUMBER_IN_UV)
1777 croak("First UID part is not an unsigned integer");
1779 ++p; /* Skip '-'. As we used to have p < len, len - (p + 1) >= 0. */
1781 type = su_grok_number(s + p, len - p, &seq);
1782 if (type != IS_NUMBER_IN_UV)
1783 croak("Second UID part is not an unsigned integer");
1785 return su_uid_storage_check(depth, seq);
1788 /* --- Context operations -------------------------------------------------- */
1790 #if SU_HAS_PERL(5, 8, 9)
1791 # define SU_SKIP_DB_MAX 2
1793 # define SU_SKIP_DB_MAX 3
1796 /* Skip context sequences of 1 to SU_SKIP_DB_MAX (included) block contexts
1797 * followed by a DB sub */
1799 #define SU_SKIP_DB(C) \
1802 PERL_CONTEXT *base = cxstack; \
1803 PERL_CONTEXT *cx = base + (C); \
1804 while (cx >= base && (C) > skipped && CxTYPE(cx) == CXt_BLOCK) \
1806 if (cx >= base && (C) > skipped) { \
1807 switch (CxTYPE(cx)) { \
1809 if (skipped <= SU_SKIP_DB_MAX && cx->blk_sub.cv == GvCV(PL_DBsub)) \
1810 (C) -= skipped + 1; \
1818 STATIC I32 su_context_up(pTHX_ I32 cxix) {
1819 #define su_context_up(C) su_context_up(aTHX_ (C))
1825 cx = cxstack + cxix;
1826 if (CxTYPE(cx) == CXt_BLOCK) {
1827 PERL_CONTEXT *prev = cx - 1;
1829 switch (CxTYPE(prev)) {
1830 #if SU_HAS_PERL(5, 10, 0)
1834 #if SU_HAS_PERL(5, 11, 0)
1835 /* That's the only subcategory that can cause an extra BLOCK context */
1836 case CXt_LOOP_PLAIN:
1840 if (cx->blk_oldcop == prev->blk_oldcop)
1846 if (cx->blk_oldcop && cx->blk_oldcop->op_sibling
1847 && cx->blk_oldcop->op_sibling->op_type == OP_SUBST)
1866 /* --- Interpreter setup/teardown ------------------------------------------ */
1868 STATIC void su_teardown(pTHX_ void *param) {
1873 map = MY_CXT.uid_storage.map;
1876 for (i = 0; i < MY_CXT.uid_storage.used; ++i)
1881 cur = MY_CXT.uplevel_storage.root;
1883 su_uplevel_ud *prev;
1887 su_uplevel_ud_delete(prev);
1894 STATIC void su_setup(pTHX) {
1895 #define su_setup() su_setup(aTHX)
1898 MY_CXT.stack_placeholder = NULL;
1900 /* NewOp() calls calloc() which just zeroes the memory with memset(). */
1901 Zero(&(MY_CXT.unwind_storage.return_op), 1, LISTOP);
1902 MY_CXT.unwind_storage.return_op.op_type = OP_RETURN;
1903 MY_CXT.unwind_storage.return_op.op_ppaddr = PL_ppaddr[OP_RETURN];
1905 Zero(&(MY_CXT.unwind_storage.proxy_op), 1, OP);
1906 MY_CXT.unwind_storage.proxy_op.op_type = OP_STUB;
1907 MY_CXT.unwind_storage.proxy_op.op_ppaddr = NULL;
1909 MY_CXT.uplevel_storage.top = NULL;
1910 MY_CXT.uplevel_storage.root = NULL;
1911 MY_CXT.uplevel_storage.count = 0;
1913 MY_CXT.uid_storage.map = NULL;
1914 MY_CXT.uid_storage.used = 0;
1915 MY_CXT.uid_storage.alloc = 0;
1917 call_atexit(su_teardown, NULL);
1922 /* --- XS ------------------------------------------------------------------ */
1924 #define SU_GET_CONTEXT(A, B) \
1933 else if (cxix > cxstack_ix) \
1934 cxix = cxstack_ix; \
1937 cxix = cxstack_ix; \
1943 #define SU_GET_LEVEL(A, B) \
1949 level = SvIV(lsv); \
1956 XS(XS_Scope__Upper_unwind); /* prototype to pass -Wmissing-prototypes */
1958 XS(XS_Scope__Upper_unwind) {
1967 PERL_UNUSED_VAR(cv); /* -W */
1968 PERL_UNUSED_VAR(ax); /* -Wall */
1970 SU_GET_CONTEXT(0, items - 1);
1972 PERL_CONTEXT *cx = cxstack + cxix;
1973 switch (CxTYPE(cx)) {
1975 if (PL_DBsub && cx->blk_sub.cv == GvCV(PL_DBsub))
1979 MY_CXT.unwind_storage.cxix = cxix;
1980 MY_CXT.unwind_storage.items = items;
1981 /* pp_entersub will want to sanitize the stack after returning from there
1982 * Screw that, we're insane */
1983 if (GIMME_V == G_SCALAR) {
1984 MY_CXT.unwind_storage.savesp = PL_stack_sp;
1985 /* dXSARGS calls POPMARK, so we need to match PL_markstack_ptr[1] */
1986 PL_stack_sp = PL_stack_base + PL_markstack_ptr[1] + 1;
1988 MY_CXT.unwind_storage.savesp = NULL;
1990 SAVEDESTRUCTOR_X(su_unwind, NULL);
1995 } while (--cxix >= 0);
1996 croak("Can't return outside a subroutine");
1999 MODULE = Scope::Upper PACKAGE = Scope::Upper
2007 MUTEX_INIT(&su_uid_seq_counter_mutex);
2009 su_uid_seq_counter.seqs = NULL;
2010 su_uid_seq_counter.size = 0;
2012 stash = gv_stashpv(__PACKAGE__, 1);
2013 newCONSTSUB(stash, "TOP", newSViv(0));
2014 newCONSTSUB(stash, "SU_THREADSAFE", newSVuv(SU_THREADSAFE));
2016 newXSproto("Scope::Upper::unwind", XS_Scope__Upper_unwind, file, NULL);
2027 su_uid_storage new_cxt;
2034 su_uid_storage_dup(&new_cxt, &MY_CXT.uid_storage, MY_CXT.uid_storage.used);
2038 MY_CXT.uplevel_storage.top = NULL;
2039 MY_CXT.uplevel_storage.root = NULL;
2040 MY_CXT.uplevel_storage.count = 0;
2041 MY_CXT.uid_storage = new_cxt;
2045 #endif /* SU_THREADSAFE */
2051 I32 cxix = cxstack_ix;
2065 SU_GET_CONTEXT(0, 0);
2066 cxix = su_context_up(cxix);
2077 SU_GET_CONTEXT(0, 0);
2079 for (; cxix >= 0; --cxix) {
2080 PERL_CONTEXT *cx = cxstack + cxix;
2081 switch (CxTYPE(cx)) {
2085 if (PL_DBsub && cx->blk_sub.cv == GvCV(PL_DBsub))
2099 SU_GET_CONTEXT(0, 0);
2101 for (; cxix >= 0; --cxix) {
2102 PERL_CONTEXT *cx = cxstack + cxix;
2103 switch (CxTYPE(cx)) {
2123 while (--level >= 0)
2124 cxix = su_context_up(cxix);
2136 for (cxix = cxstack_ix; cxix > 0; --cxix) {
2137 PERL_CONTEXT *cx = cxstack + cxix;
2138 switch (CxTYPE(cx)) {
2140 if (PL_DBsub && cx->blk_sub.cv == GvCV(PL_DBsub))
2160 SU_GET_CONTEXT(0, 0);
2163 PERL_CONTEXT *cx = cxstack + cxix--;
2164 switch (CxTYPE(cx)) {
2168 I32 gimme = cx->blk_gimme;
2170 case G_VOID: XSRETURN_UNDEF; break;
2171 case G_SCALAR: XSRETURN_NO; break;
2172 case G_ARRAY: XSRETURN_YES; break;
2187 SU_GET_CONTEXT(1, 1);
2188 Newx(ud, 1, su_ud_reap);
2189 SU_UD_ORIGIN(ud) = NULL;
2190 SU_UD_HANDLER(ud) = su_reap;
2191 ud->cb = newSVsv(hook);
2192 su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
2195 localize(SV *sv, SV *val, ...)
2202 SU_GET_CONTEXT(2, 2);
2203 Newx(ud, 1, su_ud_localize);
2204 SU_UD_ORIGIN(ud) = NULL;
2205 SU_UD_HANDLER(ud) = su_localize;
2206 size = su_ud_localize_init(ud, sv, val, NULL);
2207 su_init(ud, cxix, size);
2210 localize_elem(SV *sv, SV *elem, SV *val, ...)
2217 if (SvTYPE(sv) >= SVt_PVGV)
2218 croak("Can't infer the element localization type from a glob and the value");
2219 SU_GET_CONTEXT(3, 3);
2220 Newx(ud, 1, su_ud_localize);
2221 SU_UD_ORIGIN(ud) = NULL;
2222 SU_UD_HANDLER(ud) = su_localize;
2223 size = su_ud_localize_init(ud, sv, val, elem);
2224 if (ud->type != SVt_PVAV && ud->type != SVt_PVHV) {
2225 SU_UD_LOCALIZE_FREE(ud);
2226 croak("Can't localize an element of something that isn't an array or a hash");
2228 su_init(ud, cxix, size);
2231 localize_delete(SV *sv, SV *elem, ...)
2238 SU_GET_CONTEXT(2, 2);
2239 Newx(ud, 1, su_ud_localize);
2240 SU_UD_ORIGIN(ud) = NULL;
2241 SU_UD_HANDLER(ud) = su_localize;
2242 size = su_ud_localize_init(ud, sv, NULL, elem);
2243 su_init(ud, cxix, size);
2246 uplevel(SV *code, ...)
2249 I32 cxix, ret, args = 0;
2253 if (SvTYPE(code) < SVt_PVCV)
2254 croak("First argument to uplevel must be a code reference");
2255 SU_GET_CONTEXT(1, items - 1);
2257 PERL_CONTEXT *cx = cxstack + cxix;
2258 switch (CxTYPE(cx)) {
2260 croak("Can't uplevel to an eval frame");
2262 croak("Can't uplevel to a format frame");
2264 if (PL_DBsub && cx->blk_sub.cv == GvCV(PL_DBsub))
2270 /* su_uplevel() takes care of extending the stack if needed. */
2271 ret = su_uplevel((CV *) code, cxix, args);
2276 } while (--cxix >= 0);
2277 croak("Can't uplevel outside a subroutine");
2286 SU_GET_CONTEXT(0, 0);
2287 uid = su_uid_get(cxix);
2293 validate_uid(SV *uid)
2298 ret = su_uid_validate(uid) ? &PL_sv_yes : &PL_sv_no;