1 /* This file is part of the Variable::Magic Perl module.
2 * See http://search.cpan.org/dist/Variable-Magic/ */
4 #include <stdarg.h> /* <va_list>, va_{start,arg,end}, ... */
6 #include <stdio.h> /* sprintf() */
8 #define PERL_NO_GET_CONTEXT
13 /* --- XS helpers ---------------------------------------------------------- */
15 #define XSH_PACKAGE "Variable::Magic"
20 /* ... Features ............................................................ */
22 /* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
23 * enable them on 5.10 */
24 #if XSH_HAS_PERL(5, 10, 0)
30 #if XSH_HAS_PERL_MAINT(5, 11, 0, 32969) || XSH_HAS_PERL(5, 12, 0)
31 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
33 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
36 #if XSH_HAS_PERL(5, 17, 4)
37 # define VMG_COMPAT_SCALAR_NOLEN 1
39 # define VMG_COMPAT_SCALAR_NOLEN 0
42 /* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
43 * reverted to dev-5.11 as 9cdcb38b */
44 #if XSH_HAS_PERL_MAINT(5, 8, 9, 28160) || XSH_HAS_PERL_MAINT(5, 9, 3, 25854) || XSH_HAS_PERL(5, 10, 0)
45 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
46 # if XSH_HAS_PERL(5, 11, 0)
47 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
49 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
52 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
53 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
56 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
57 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
59 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
60 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 0
64 /* Applied to dev-5.11 as 34908 */
65 #if XSH_HAS_PERL_MAINT(5, 11, 0, 34908) || XSH_HAS_PERL(5, 12, 0)
66 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
68 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
71 /* Applied to dev-5.9 as 31473 (see #43357), integrated to maint-5.8 as 32542 */
72 #if XSH_HAS_PERL_MAINT(5, 8, 9, 32542) || XSH_HAS_PERL_MAINT(5, 9, 5, 31473) || XSH_HAS_PERL(5, 10, 0)
73 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 1
75 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
78 #if XSH_HAS_PERL(5, 11, 0)
79 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 1
81 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
84 #if XSH_HAS_PERL(5, 17, 0)
85 # define VMG_COMPAT_CODE_COPY_CLONE 1
87 # define VMG_COMPAT_CODE_COPY_CLONE 0
90 #if XSH_HAS_PERL(5, 13, 2)
91 # define VMG_COMPAT_GLOB_GET 1
93 # define VMG_COMPAT_GLOB_GET 0
96 /* ... Trampoline ops ...................................................... */
98 #define VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE (XSH_HAS_PERL(5, 10, 0) && !XSH_HAS_PERL(5, 10, 1))
100 /* NewOp() isn't public in perl 5.8.0. */
101 #define VMG_RESET_RMG_NEEDS_TRAMPOLINE (VMG_UVAR && (XSH_THREADSAFE || !XSH_HAS_PERL(5, 8, 1)))
103 #define VMG_NEEDS_TRAMPOLINE VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE || VMG_RESET_RMG_NEEDS_TRAMPOLINE
105 #if VMG_NEEDS_TRAMPOLINE
112 static void vmg_trampoline_init(vmg_trampoline *t, OP *(*cb)(pTHX)) {
113 t->temp.op_type = OP_STUB;
114 t->temp.op_ppaddr = 0;
115 t->temp.op_next = (OP *) &t->target;
116 t->temp.op_flags = 0;
117 t->temp.op_private = 0;
119 t->target.op_type = OP_STUB;
120 t->target.op_ppaddr = cb;
121 t->target.op_next = NULL;
122 t->target.op_flags = 0;
123 t->target.op_private = 0;
124 t->target.op_sv = NULL;
127 static OP *vmg_trampoline_bump(pTHX_ vmg_trampoline *t, SV *sv, OP *o) {
128 #define vmg_trampoline_bump(T, S, O) vmg_trampoline_bump(aTHX_ (T), (S), (O))
130 t->temp.op_next = (OP *) &t->target;
132 t->target.op_sv = sv;
133 t->target.op_next = o->op_next;
138 #endif /* VMG_NEEDS_TRAMPOLINE */
140 /* --- Compatibility ------------------------------------------------------- */
143 # define Newx(v, n, c) New(0, v, n, c)
147 # define SvMAGIC_set(sv, val) (SvMAGIC(sv) = (val))
151 # define SvRV_const(sv) SvRV((SV *) sv)
154 #ifndef SvREFCNT_inc_simple_void
155 # define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
158 #ifndef SvREFCNT_dec_NN
159 # define SvREFCNT_dec_NN(sv) ((void) SvREFCNT_dec(sv))
163 # define mPUSHu(U) PUSHs(sv_2mortal(newSVuv(U)))
166 #ifndef PERL_MAGIC_ext
167 # define PERL_MAGIC_ext '~'
170 #ifndef PERL_MAGIC_tied
171 # define PERL_MAGIC_tied 'P'
178 #ifndef IN_PERL_COMPILETIME
179 # define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
183 # define OP_NAME(O) (PL_op_name[(O)->op_type])
187 # define OP_CLASS(O) (PL_opargs[(O)->op_type] & OA_CLASS_MASK)
190 /* ... Bug-free mg_magical ................................................. */
192 /* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
194 #if XSH_HAS_PERL(5, 11, 3)
196 #define vmg_mg_magical(S) mg_magical(S)
200 static void vmg_mg_magical(SV *sv) {
204 if ((mg = SvMAGIC(sv))) {
206 const MGVTBL* const vtbl = mg->mg_virtual;
208 if (vtbl->svt_get && !(mg->mg_flags & MGf_GSKIP))
215 } while ((mg = mg->mg_moremagic));
216 if (!(SvFLAGS(sv) & (SVs_GMG|SVs_SMG)))
223 /* ... Cleaner version of sv_magicext() .................................... */
225 static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
226 #define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
229 mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
236 mg->mg_flags |= MGf_COPY;
239 mg->mg_flags |= MGf_DUP;
243 mg->mg_flags |= MGf_LOCAL;
244 #endif /* MGf_LOCAL */
246 if (mg->mg_flags & MGf_REFCOUNTED)
252 /* ... Safe version of call_sv() ........................................... */
254 static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
255 #define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
260 old_err = newSVsv(ERRSV);
261 sv_setsv(ERRSV, &PL_sv_undef);
264 ret = call_sv(sv, flags | G_EVAL);
267 SvREFCNT_dec(old_err);
269 if (IN_PERL_COMPILETIME) {
272 sv_catsv(PL_errors, ERRSV);
274 Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
277 #if XSH_HAS_PERL(5, 10, 0) || defined(PL_parser)
279 ++PL_parser->error_count;
280 #elif defined(PL_error_count)
286 if (!cleanup || cleanup(aTHX_ ud))
291 sv_setsv(ERRSV, old_err);
292 SvREFCNT_dec(old_err);
299 /* --- Stolen chunk of B --------------------------------------------------- */
314 #if XSH_HAS_PERL(5, 21, 5)
317 #if XSH_HAS_PERL(5, 21, 7)
323 static const char *const vmg_opclassnames[] = {
336 #if XSH_HAS_PERL(5, 21, 5)
339 #if XSH_HAS_PERL(5, 21, 7)
345 static opclass vmg_opclass(pTHX_ const OP *o) {
346 #define vmg_opclass(O) vmg_opclass(aTHX_ (O))
352 if (o->op_type == 0) {
353 #if XSH_HAS_PERL(5, 21, 7)
354 if (o->op_targ == OP_NEXTSTATE || o->op_targ == OP_DBSTATE)
357 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
360 if (o->op_type == OP_SASSIGN)
361 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
363 if (o->op_type == OP_AELEMFAST) {
364 #if PERL_VERSION <= 14
365 if (o->op_flags & OPf_SPECIAL)
377 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
381 switch (OP_CLASS(o)) {
398 case OA_PVOP_OR_SVOP:
400 #if XSH_HAS_PERL(5, 13, 7)
401 (o->op_type != OP_CUSTOM) &&
403 (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)))
404 #if defined(USE_ITHREADS) && XSH_HAS_PERL(5, 8, 9)
405 ? OPc_PADOP : OPc_PVOP;
407 ? OPc_SVOP : OPc_PVOP;
413 case OA_BASEOP_OR_UNOP:
414 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
416 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
418 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
420 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
423 if (o->op_flags & OPf_STACKED)
425 else if (o->op_flags & OPf_SPECIAL)
429 #if XSH_HAS_PERL(5, 21, 5)
433 #if XSH_HAS_PERL(5, 21, 7)
442 /* --- Error messages ------------------------------------------------------ */
444 static const char vmg_invalid_wiz[] = "Invalid wizard object";
445 static const char vmg_wrongargnum[] = "Wrong number of arguments";
447 /* --- Thread-local storage ------------------------------------------------ */
450 HV *b__op_stashes[OPc_MAX];
453 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
454 vmg_trampoline propagate_errsv;
456 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
457 vmg_trampoline reset_rmg;
463 static void xsh_user_clone(pTHX_ const xsh_user_cxt_t *old_cxt, xsh_user_cxt_t *new_cxt) {
466 for (c = OPc_NULL; c < OPc_MAX; ++c) {
467 new_cxt->b__op_stashes[c] = old_cxt->b__op_stashes[c]
468 ? gv_stashpv(vmg_opclassnames[c], 1)
472 new_cxt->depth = old_cxt->depth;
473 new_cxt->freed_tokens = NULL;
479 #endif /* XSH_THREADSAFE */
481 #define XSH_THREADS_NEED_TEARDOWN_LATE 1
483 #include "xsh/threads.h"
485 /* --- <vmg_vtable> structure ---------------------------------------------- */
494 static vmg_vtable *vmg_vtable_alloc(pTHX) {
495 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
498 t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
500 t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
506 #define vmg_vtable_vtbl(T) (T)->vtbl
508 static perl_mutex vmg_vtable_refcount_mutex;
510 static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
511 #define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
512 XSH_LOCK(&vmg_vtable_refcount_mutex);
514 XSH_UNLOCK(&vmg_vtable_refcount_mutex);
519 static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
520 #define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
523 XSH_LOCK(&vmg_vtable_refcount_mutex);
524 refcount = --t->refcount;
525 XSH_UNLOCK(&vmg_vtable_refcount_mutex);
528 PerlMemShared_free(t->vtbl);
529 PerlMemShared_free(t);
533 #else /* XSH_THREADSAFE */
535 typedef MGVTBL vmg_vtable;
537 static vmg_vtable *vmg_vtable_alloc(pTHX) {
538 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
541 Newx(t, 1, vmg_vtable);
546 #define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
548 #define vmg_vtable_free(T) Safefree(T)
550 #endif /* !XSH_THREADSAFE */
552 /* --- <vmg_wizard> structure ---------------------------------------------- */
561 SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
566 #endif /* MGf_LOCAL */
568 SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
569 #endif /* VMG_UVAR */
572 static void vmg_op_info_init(pTHX_ unsigned int opinfo);
574 static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
575 #define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
578 Newx(w, 1, vmg_wizard);
581 w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
583 vmg_op_info_init(aTHX_ w->opinfo);
585 w->vtable = vmg_vtable_alloc();
590 static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
591 #define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
595 /* During global destruction, any of the callbacks may already have been
596 * freed, so we can't rely on still being able to access them. */
598 SvREFCNT_dec(w->cb_data);
599 SvREFCNT_dec(w->cb_get);
600 SvREFCNT_dec(w->cb_set);
601 SvREFCNT_dec(w->cb_len);
602 SvREFCNT_dec(w->cb_clear);
603 SvREFCNT_dec(w->cb_free);
604 SvREFCNT_dec(w->cb_copy);
606 SvREFCNT_dec(w->cb_dup);
609 SvREFCNT_dec(w->cb_local);
610 #endif /* MGf_LOCAL */
612 SvREFCNT_dec(w->cb_fetch);
613 SvREFCNT_dec(w->cb_store);
614 SvREFCNT_dec(w->cb_exists);
615 SvREFCNT_dec(w->cb_delete);
616 #endif /* VMG_UVAR */
619 /* PerlMemShared_free() and Safefree() are still fine during global
620 * destruction though. */
621 vmg_vtable_free(w->vtable);
629 #define VMG_CLONE_CB(N) \
630 z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
633 static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
634 #define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
640 Newx(z, 1, vmg_wizard);
642 z->vtable = vmg_vtable_dup(w->vtable);
644 z->opinfo = w->opinfo;
656 #endif /* MGf_LOCAL */
660 VMG_CLONE_CB(exists);
661 VMG_CLONE_CB(delete);
662 #endif /* VMG_UVAR */
667 #endif /* XSH_THREADSAFE */
669 #define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
671 /* --- Wizard SV objects --------------------------------------------------- */
673 static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
674 vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
681 static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
682 mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
687 #endif /* XSH_THREADSAFE */
689 static MGVTBL vmg_wizard_sv_vtbl = {
694 vmg_wizard_sv_free, /* free */
697 vmg_wizard_sv_dup, /* dup */
703 #endif /* MGf_LOCAL */
706 static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
707 #define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
713 wiz = newSViv(PTR2IV(w));
716 vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
725 #define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
727 static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
730 for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
731 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
732 return (const vmg_wizard *) mg->mg_ptr;
738 #else /* XSH_THREADSAFE */
740 #define vmg_sv_has_wizard_type(S) SvIOK(S)
742 #define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
744 #endif /* !XSH_THREADSAFE */
746 #define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
748 static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
749 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
750 SV *sv = (SV *) mg->mg_ptr;
752 if (vmg_sv_has_wizard_type(sv))
753 return vmg_wizard_from_sv_nocheck(sv);
759 #define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
761 /* --- User-level functions implementation --------------------------------- */
763 static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
767 if (SvTYPE(sv) < SVt_PVMG)
770 wid = vmg_wizard_id(w);
772 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
773 const vmg_wizard *z = vmg_wizard_from_mg(mg);
775 if (z && vmg_wizard_id(z) == wid)
782 /* ... Construct private data .............................................. */
784 static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
785 #define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
794 PUSHSTACKi(PERLSI_MAGIC);
797 EXTEND(SP, items + 1);
798 PUSHs(sv_2mortal(newRV_inc(sv)));
799 for (i = 0; i < items; ++i)
803 vmg_call_sv(ctor, G_SCALAR, 0, NULL);
807 #if XSH_HAS_PERL(5, 8, 3)
808 SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
810 nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
822 static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
823 #define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
824 const MAGIC *mg = vmg_find(sv, w);
826 return mg ? mg->mg_obj : NULL;
829 /* ... Magic cast/dispell .................................................. */
833 static I32 vmg_svt_val(pTHX_ IV, SV *);
836 struct ufuncs new_uf;
837 struct ufuncs old_uf;
840 #endif /* VMG_UVAR */
842 static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
843 #define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
847 prevmagic->mg_moremagic = moremagic;
849 SvMAGIC_set(sv, moremagic);
851 /* Destroy private data */
853 if (mg->mg_type == PERL_MAGIC_uvar) {
854 Safefree(mg->mg_ptr);
856 #endif /* VMG_UVAR */
857 if (mg->mg_obj != sv) {
858 SvREFCNT_dec(mg->mg_obj);
861 /* Unreference the wizard */
862 SvREFCNT_dec((SV *) mg->mg_ptr);
866 #endif /* VMG_UVAR */
869 mg->mg_moremagic = XSH_CXT.freed_tokens;
870 XSH_CXT.freed_tokens = mg;
872 mg->mg_moremagic = NULL;
877 static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
878 #define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
882 MAGIC *moremagic = mg->mg_moremagic;
895 static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
896 #define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
905 oldgmg = SvGMAGICAL(sv);
907 data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
909 t = vmg_vtable_vtbl(w->vtable);
910 mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
912 if (SvTYPE(sv) < SVt_PVHV)
915 /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
916 * magic is actually never called for them. If the GMAGICAL flag was off before
917 * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
918 * now on, then this wizard has get magic. Hence we can work around the
919 * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
920 * has uvar callbacks, it will be turned back on later. */
921 if (!oldgmg && SvGMAGICAL(sv))
926 MAGIC *prevmagic, *moremagic = NULL;
929 ud.new_uf.uf_val = vmg_svt_val;
930 ud.new_uf.uf_set = NULL;
931 ud.new_uf.uf_index = 0;
932 ud.old_uf.uf_val = NULL;
933 ud.old_uf.uf_set = NULL;
934 ud.old_uf.uf_index = 0;
936 /* One uvar magic in the chain is enough. */
937 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
938 moremagic = mg->mg_moremagic;
939 if (mg->mg_type == PERL_MAGIC_uvar)
943 if (mg) { /* Found another uvar magic. */
944 struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
945 if (uf->uf_val == vmg_svt_val) {
946 /* It's our uvar magic, nothing to do. oldgmg was true. */
949 /* It's another uvar magic, backup it and replace it by ours. */
951 vmg_mg_del(sv, prevmagic, mg, moremagic);
955 sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
957 /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
958 * handled by our uvar callback. */
960 #endif /* VMG_UVAR */
966 static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
967 #define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
970 #endif /* VMG_UVAR */
971 MAGIC *mg, *prevmagic, *moremagic = NULL;
972 IV wid = vmg_wizard_id(w);
974 if (SvTYPE(sv) < SVt_PVMG)
977 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
980 moremagic = mg->mg_moremagic;
982 z = vmg_wizard_from_mg(mg);
984 IV zid = vmg_wizard_id(z);
988 /* If the current has no uvar, short-circuit uvar deletion. */
989 uvars = z->uvar ? (uvars + 1) : 0;
991 } else if (z->uvar) {
993 /* We can't break here since we need to find the ext magic to delete. */
998 #endif /* !VMG_UVAR */
1004 vmg_mg_del(sv, prevmagic, mg, moremagic);
1007 if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
1008 /* mg was the first ext magic in the chain that had uvar */
1010 for (mg = moremagic; mg; mg = mg->mg_moremagic) {
1011 const vmg_wizard *z = vmg_wizard_from_mg(mg);
1022 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
1023 moremagic = mg->mg_moremagic;
1024 if (mg->mg_type == PERL_MAGIC_uvar)
1028 ud = (vmg_uvar_ud *) mg->mg_ptr;
1029 if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
1030 /* Revert the original uvar magic. */
1032 Newx(uf, 1, struct ufuncs);
1035 mg->mg_ptr = (char *) uf;
1036 mg->mg_len = sizeof(*uf);
1038 /* Remove the uvar magic. */
1039 vmg_mg_del(sv, prevmagic, mg, moremagic);
1043 #endif /* VMG_UVAR */
1050 /* ... OP info ............................................................. */
1052 #define VMG_OP_INFO_NAME 1
1053 #define VMG_OP_INFO_OBJECT 2
1056 static perl_mutex vmg_op_name_init_mutex;
1059 static U32 vmg_op_name_init = 0;
1060 static unsigned char vmg_op_name_len[MAXO] = { 0 };
1062 static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
1063 #define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
1065 case VMG_OP_INFO_NAME:
1066 XSH_LOCK(&vmg_op_name_init_mutex);
1067 if (!vmg_op_name_init) {
1069 for (t = 0; t < OP_max; ++t)
1070 vmg_op_name_len[t] = strlen(PL_op_name[t]);
1071 vmg_op_name_init = 1;
1073 XSH_UNLOCK(&vmg_op_name_init_mutex);
1075 case VMG_OP_INFO_OBJECT: {
1077 if (!XSH_CXT.b__op_stashes[0]) {
1080 for (c = OPc_NULL; c < OPc_MAX; ++c)
1081 XSH_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
1090 static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
1091 #define vmg_op_info(W) vmg_op_info(aTHX_ (W))
1093 return &PL_sv_undef;
1096 case VMG_OP_INFO_NAME: {
1099 OPCODE t = PL_op->op_type;
1100 name = OP_NAME(PL_op);
1101 name_len = (t == OP_CUSTOM) ? strlen(name) : vmg_op_name_len[t];
1102 return sv_2mortal(newSVpvn(name, name_len));
1104 case VMG_OP_INFO_OBJECT: {
1106 return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
1107 XSH_CXT.b__op_stashes[vmg_opclass(PL_op)]);
1113 return &PL_sv_undef;
1116 /* --- svt callbacks ------------------------------------------------------- */
1118 #define VMG_CB_CALL_ARGS_MASK 15
1119 #define VMG_CB_CALL_ARGS_SHIFT 4
1120 #define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
1121 #define VMG_CB_CALL_GUARD 4
1123 static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
1128 /* If we're at the upmost magic call and we're about to die, we can just free
1129 * the tokens right now, since we will jump past the problematic part of our
1131 if (XSH_CXT.depth == 0 && XSH_CXT.freed_tokens) {
1132 vmg_magic_chain_free(XSH_CXT.freed_tokens, NULL);
1133 XSH_CXT.freed_tokens = NULL;
1139 static int vmg_dispell_guard_free(pTHX_ SV *sv, MAGIC *mg) {
1140 vmg_magic_chain_free((MAGIC *) mg->mg_ptr, NULL);
1147 static int vmg_dispell_guard_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
1148 /* The freed magic tokens aren't cloned by perl because it cannot reach them
1149 * (they have been detached from their parent SV when they were enqueued).
1150 * Hence there's nothing to purge in the new thread. */
1156 #endif /* XSH_THREADSAFE */
1158 static MGVTBL vmg_dispell_guard_vtbl = {
1163 vmg_dispell_guard_free, /* free */
1166 vmg_dispell_guard_dup, /* dup */
1172 #endif /* MGf_LOCAL */
1175 static SV *vmg_dispell_guard_new(pTHX_ MAGIC *root) {
1176 #define vmg_dispell_guard_new(R) vmg_dispell_guard_new(aTHX_ (R))
1179 guard = sv_newmortal();
1180 vmg_sv_magicext(guard, NULL, &vmg_dispell_guard_vtbl, root, 0);
1185 static int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
1188 unsigned int i, args, opinfo;
1189 MAGIC **chain = NULL;
1194 args = flags & VMG_CB_CALL_ARGS_MASK;
1195 flags >>= VMG_CB_CALL_ARGS_SHIFT;
1196 opinfo = flags & VMG_CB_CALL_OPINFO;
1201 PUSHSTACKi(PERLSI_MAGIC);
1204 EXTEND(SP, args + 1);
1205 PUSHs(sv_2mortal(newRV_inc(sv)));
1207 for (i = 0; i < args; ++i) {
1208 SV *sva = va_arg(ap, SV *);
1209 PUSHs(sva ? sva : &PL_sv_undef);
1213 XPUSHs(vmg_op_info(opinfo));
1216 if (flags & VMG_CB_CALL_GUARD) {
1219 vmg_call_sv(cb, G_SCALAR, vmg_dispell_guard_oncroak, NULL);
1221 if (XSH_CXT.depth == 0 && XSH_CXT.freed_tokens)
1222 chain = &XSH_CXT.freed_tokens;
1224 vmg_call_sv(cb, G_SCALAR, 0, NULL);
1230 ret = (int) SvIV(svr);
1242 if (svr && !SvTEMP(svr))
1246 vmg_dispell_guard_new(*chain);
1253 #define VMG_CB_FLAGS(OI, A) \
1254 ((((unsigned int) (OI)) << VMG_CB_CALL_ARGS_SHIFT) | (A))
1256 #define vmg_cb_call1(I, OI, S, A1) \
1257 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 1), (S), (A1))
1258 #define vmg_cb_call2(I, OI, S, A1, A2) \
1259 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 2), (S), (A1), (A2))
1260 #define vmg_cb_call3(I, OI, S, A1, A2, A3) \
1261 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
1263 /* ... Default no-op magic callback ........................................ */
1265 static int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
1269 /* ... get magic ........................................................... */
1271 static int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
1272 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1274 return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
1277 #define vmg_svt_get_noop vmg_svt_default_noop
1279 /* ... set magic ........................................................... */
1281 static int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
1282 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1284 return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
1287 #define vmg_svt_set_noop vmg_svt_default_noop
1289 /* ... len magic ........................................................... */
1291 static U32 vmg_sv_len(pTHX_ SV *sv) {
1292 #define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
1294 #if XSH_HAS_PERL(5, 9, 3)
1295 const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, len)));
1297 U8 *s = SvPV(sv, len);
1300 return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
1303 static U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
1304 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1305 unsigned int opinfo = w->opinfo;
1308 svtype t = SvTYPE(sv);
1315 PUSHSTACKi(PERLSI_MAGIC);
1319 PUSHs(sv_2mortal(newRV_inc(sv)));
1320 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1322 len = vmg_sv_len(sv);
1324 } else if (t == SVt_PVAV) {
1325 len = av_len((AV *) sv) + 1;
1329 PUSHs(&PL_sv_undef);
1332 XPUSHs(vmg_op_info(opinfo));
1335 vmg_call_sv(w->cb_len, G_SCALAR, 0, NULL);
1339 ret = SvOK(svr) ? (U32) SvUV(svr) : len;
1352 static U32 vmg_svt_len_noop(pTHX_ SV *sv, MAGIC *mg) {
1354 svtype t = SvTYPE(sv);
1357 len = vmg_sv_len(sv);
1358 } else if (t == SVt_PVAV) {
1359 len = (U32) av_len((AV *) sv);
1365 /* ... clear magic ......................................................... */
1367 static int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
1368 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1369 unsigned int flags = w->opinfo;
1371 #if !XSH_HAS_PERL(5, 12, 0)
1372 flags |= VMG_CB_CALL_GUARD;
1375 return vmg_cb_call1(w->cb_clear, flags, sv, mg->mg_obj);
1378 #define vmg_svt_clear_noop vmg_svt_default_noop
1380 /* ... free magic .......................................................... */
1382 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1384 static OP *vmg_pp_propagate_errsv(pTHX) {
1385 SVOP *o = cSVOPx(PL_op);
1388 sv_setsv(ERRSV, o->op_sv);
1389 SvREFCNT_dec(o->op_sv);
1396 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1398 static int vmg_propagate_errsv_free(pTHX_ SV *sv, MAGIC *mg) {
1400 sv_setsv(ERRSV, mg->mg_obj);
1405 /* perl is already kind enough to handle the cloning of the mg_obj member,
1406 hence we don't need to define a dup magic callback. */
1408 static MGVTBL vmg_propagate_errsv_vtbl = {
1413 vmg_propagate_errsv_free, /* free */
1418 #endif /* MGf_LOCAL */
1423 SV *rsv; /* The ref to the sv currently being freed, pushed on the stack */
1426 } vmg_svt_free_cleanup_ud;
1428 static int vmg_svt_free_cleanup(pTHX_ void *ud_) {
1429 vmg_svt_free_cleanup_ud *ud = VOID2(vmg_svt_free_cleanup_ud *, ud_);
1432 U32 optype = PL_op ? PL_op->op_type : OP_NULL;
1434 if (optype == OP_LEAVETRY || optype == OP_LEAVEEVAL) {
1435 SV *errsv = newSVsv(ERRSV);
1438 LEAVE_SCOPE(ud->base);
1440 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1441 if (optype == OP_LEAVETRY) {
1443 PL_op = vmg_trampoline_bump(&XSH_CXT.propagate_errsv, errsv, PL_op);
1444 } else if (optype == OP_LEAVEEVAL) {
1445 SV *guard = sv_newmortal();
1446 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1448 #else /* !VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1449 # if !XSH_HAS_PERL(5, 8, 9)
1451 SV *guard = sv_newmortal();
1452 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1455 vmg_sv_magicext(ERRSV, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1457 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1462 /* Don't propagate */
1468 /* Silently undo the ref - don't trigger destruction in the referent
1469 * for a second time */
1470 if (SvROK(ud->rsv) && SvRV(ud->rsv) == sv) {
1472 SvRV_set(ud->rsv, NULL);
1475 SvREFCNT_dec_NN(ud->rsv);
1477 /* We are about to croak() while sv is being destroyed. Try to clean up
1481 vmg_mg_del(sv, NULL, mg, mg->mg_moremagic);
1484 SvREFCNT_dec(sv); /* Re-trigger destruction */
1486 vmg_dispell_guard_oncroak(aTHX_ NULL);
1488 /* After that, propagate the error upwards. */
1493 static int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
1494 vmg_svt_free_cleanup_ud ud;
1495 const vmg_wizard *w;
1501 /* During global destruction, we cannot be sure that the wizard and its free
1502 * callback are still alive. */
1506 w = vmg_wizard_from_mg_nocheck(mg);
1508 /* So that it survives the temp cleanup below */
1509 SvREFCNT_inc_simple_void(sv);
1511 #if !(XSH_HAS_PERL_MAINT(5, 11, 0, 32686) || XSH_HAS_PERL(5, 12, 0))
1512 /* The previous magic tokens were freed but the magic chain wasn't updated, so
1513 * if you access the sv from the callback the old deleted magics will trigger
1514 * and cause memory misreads. Change 32686 solved it that way : */
1515 SvMAGIC_set(sv, mg);
1519 if (cxstack_ix < cxstack_max) {
1520 ud.in_eval = (CxTYPE(cxstack + cxstack_ix + 1) == CXt_EVAL);
1521 ud.base = ud.in_eval ? PL_scopestack[PL_scopestack_ix] : 0;
1530 PUSHSTACKi(PERLSI_MAGIC);
1534 /* This will bump the refcount of sv from 0 to 1 */
1535 ud.rsv = newRV_inc(sv);
1537 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1539 XPUSHs(vmg_op_info(w->opinfo));
1545 vmg_call_sv(w->cb_free, G_SCALAR, vmg_svt_free_cleanup, &ud);
1547 if (XSH_CXT.depth == 0 && XSH_CXT.freed_tokens) {
1548 /* Free all the tokens in the chain but the current one (if it's present).
1549 * It will be taken care of by our caller, Perl_mg_free(). */
1550 vmg_magic_chain_free(XSH_CXT.freed_tokens, mg);
1551 XSH_CXT.freed_tokens = NULL;
1558 ret = (int) SvIV(svr);
1563 /* Silently undo the ref - don't trigger destruction in the referent
1564 * for a second time */
1565 if (SvROK(ud.rsv) && SvRV(ud.rsv) == sv) {
1566 SvRV_set(ud.rsv, NULL);
1568 --SvREFCNT(sv); /* silent */
1570 SvREFCNT_dec_NN(ud.rsv);
1575 /* Calling SvREFCNT_dec() will trigger destructors in an infinite loop, so
1576 * we have to rely on SvREFCNT() being a lvalue. Heck, even the core does it */
1579 /* Perl_mg_free will get rid of the magic and decrement mg->mg_obj and
1580 * mg->mg_ptr reference count */
1584 #define vmg_svt_free_noop vmg_svt_default_noop
1586 #if XSH_HAS_PERL_MAINT(5, 11, 0, 33256) || XSH_HAS_PERL(5, 12, 0)
1587 # define VMG_SVT_COPY_KEYLEN_TYPE I32
1589 # define VMG_SVT_COPY_KEYLEN_TYPE int
1592 /* ... copy magic .......................................................... */
1594 static int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1595 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1599 if (keylen == HEf_SVKEY) {
1602 keysv = newSVpvn(key, keylen);
1605 if (SvTYPE(sv) >= SVt_PVCV)
1606 nsv = sv_2mortal(newRV_inc(nsv));
1608 ret = vmg_cb_call3(w->cb_copy, w->opinfo, sv, mg->mg_obj, keysv, nsv);
1610 if (keylen != HEf_SVKEY) {
1611 SvREFCNT_dec(keysv);
1617 static int vmg_svt_copy_noop(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1621 /* ... dup magic ........................................................... */
1624 static int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
1627 #define vmg_svt_dup_noop vmg_svt_dup
1630 /* ... local magic ......................................................... */
1634 static int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
1635 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1637 return vmg_cb_call1(w->cb_local, w->opinfo, nsv, mg->mg_obj);
1640 #define vmg_svt_local_noop vmg_svt_default_noop
1642 #endif /* MGf_LOCAL */
1644 /* ... uvar magic .......................................................... */
1648 static OP *vmg_pp_reset_rmg(pTHX) {
1649 SVOP *o = cSVOPx(PL_op);
1651 SvRMAGICAL_on(o->op_sv);
1657 static I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
1659 MAGIC *mg, *umg, *moremagic;
1660 SV *key = NULL, *newkey = NULL;
1663 umg = mg_find(sv, PERL_MAGIC_uvar);
1664 /* umg can't be NULL or we wouldn't be there. */
1666 ud = (vmg_uvar_ud *) umg->mg_ptr;
1668 if (ud->old_uf.uf_val)
1669 ud->old_uf.uf_val(aTHX_ action, sv);
1670 if (ud->old_uf.uf_set)
1671 ud->old_uf.uf_set(aTHX_ action, sv);
1673 for (mg = SvMAGIC(sv); mg; mg = moremagic) {
1674 const vmg_wizard *w;
1676 /* mg may be freed later by the uvar call, so we need to fetch the next
1677 * token before reaching that fateful point. */
1678 moremagic = mg->mg_moremagic;
1680 switch (mg->mg_type) {
1681 case PERL_MAGIC_ext:
1683 case PERL_MAGIC_tied:
1690 w = vmg_wizard_from_mg(mg);
1699 newkey = key = umg->mg_obj = sv_mortalcopy(umg->mg_obj);
1703 & (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
1706 vmg_cb_call2(w->cb_fetch, w->opinfo | VMG_CB_CALL_GUARD, sv,
1709 case HV_FETCH_ISSTORE:
1710 case HV_FETCH_LVALUE:
1711 case (HV_FETCH_ISSTORE|HV_FETCH_LVALUE):
1713 vmg_cb_call2(w->cb_store, w->opinfo | VMG_CB_CALL_GUARD, sv,
1716 case HV_FETCH_ISEXISTS:
1718 vmg_cb_call2(w->cb_exists, w->opinfo | VMG_CB_CALL_GUARD, sv,
1723 vmg_cb_call2(w->cb_delete, w->opinfo | VMG_CB_CALL_GUARD, sv,
1729 if (SvRMAGICAL(sv) && !tied && !(action & (HV_FETCH_ISSTORE|HV_DELETE))) {
1730 /* Temporarily hide the RMAGICAL flag of the hash so it isn't wrongly
1731 * mistaken for a tied hash by the rest of hv_common. It will be reset by
1732 * the op_ppaddr of a new fake op injected between the current and the next
1735 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1739 PL_op = vmg_trampoline_bump(&XSH_CXT.reset_rmg, sv, PL_op);
1741 #else /* !VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1743 OP *nop = PL_op->op_next;
1746 if (nop && nop->op_ppaddr == vmg_pp_reset_rmg) {
1747 svop = (SVOP *) nop;
1749 NewOp(1101, svop, 1, SVOP);
1750 svop->op_type = OP_STUB;
1751 svop->op_ppaddr = vmg_pp_reset_rmg;
1752 svop->op_next = nop;
1754 svop->op_private = 0;
1756 PL_op->op_next = (OP *) svop;
1761 #endif /* VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1769 #endif /* VMG_UVAR */
1771 /* --- Module setup/teardown ----------------------------------------------- */
1775 static void vmg_global_teardown_late_locked(pTHX_ void *ud) {
1776 #define vmg_global_teardown_late_locked(UD) vmg_global_teardown_late_locked(aTHX_ (UD))
1777 MUTEX_DESTROY(&vmg_op_name_init_mutex);
1778 MUTEX_DESTROY(&vmg_vtable_refcount_mutex);
1783 static signed char vmg_destruct_level(pTHX) {
1784 #define vmg_destruct_level() vmg_destruct_level(aTHX)
1787 lvl = PL_perl_destruct_level;
1791 const char *s = PerlEnv_getenv("PERL_DESTRUCT_LEVEL");
1794 #if XSH_HAS_PERL(5, 21, 3)
1795 if (strEQ(s, "-1")) {
1798 # if XSH_HAS_PERL(5, 21, 10)
1800 if (Perl_grok_atoUV(s, &uv, NULL) && uv <= INT_MAX)
1804 # else /* XSH_HAS_PERL(5, 21, 3) && !XSH_HAS_PERL(5, 21, 10) */
1805 i = Perl_grok_atou(s, NULL);
1808 #else /* !XSH_HAS_PERL(5, 21, 3) */
1820 #endif /* XSH_THREADSAFE */
1822 static void xsh_user_global_setup(pTHX) {
1824 MUTEX_INIT(&vmg_vtable_refcount_mutex);
1825 MUTEX_INIT(&vmg_op_name_init_mutex);
1831 static void xsh_user_local_setup(pTHX_ xsh_user_cxt_t *cxt) {
1835 for (c = OPc_NULL; c < OPc_MAX; ++c)
1836 cxt->b__op_stashes[c] = NULL;
1839 cxt->freed_tokens = NULL;
1841 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1842 vmg_trampoline_init(&cxt->propagate_errsv, vmg_pp_propagate_errsv);
1844 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1845 vmg_trampoline_init(&cxt->reset_rmg, vmg_pp_reset_rmg);
1848 stash = gv_stashpv(XSH_PACKAGE, 1);
1849 newCONSTSUB(stash, "MGf_COPY", newSVuv(MGf_COPY));
1850 newCONSTSUB(stash, "MGf_DUP", newSVuv(MGf_DUP));
1851 newCONSTSUB(stash, "MGf_LOCAL", newSVuv(MGf_LOCAL));
1852 newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
1853 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
1854 newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
1855 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_NOLEN",
1856 newSVuv(VMG_COMPAT_SCALAR_NOLEN));
1857 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
1858 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
1859 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
1860 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID));
1861 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID",
1862 newSVuv(VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID));
1863 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNDEF_CLEAR",
1864 newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
1865 newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
1866 newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
1867 newCONSTSUB(stash, "VMG_COMPAT_CODE_COPY_CLONE",
1868 newSVuv(VMG_COMPAT_CODE_COPY_CLONE));
1869 newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
1870 newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(XSH_PERL_PATCHLEVEL));
1871 newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(XSH_THREADSAFE));
1872 newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(XSH_FORKSAFE));
1873 newCONSTSUB(stash, "VMG_OP_INFO_NAME", newSVuv(VMG_OP_INFO_NAME));
1874 newCONSTSUB(stash, "VMG_OP_INFO_OBJECT", newSVuv(VMG_OP_INFO_OBJECT));
1879 static void xsh_user_local_teardown(pTHX_ xsh_user_cxt_t *cxt) {
1880 if (cxt->depth == 0 && cxt->freed_tokens) {
1881 vmg_magic_chain_free(cxt->freed_tokens, NULL);
1882 cxt->freed_tokens = NULL;
1888 static void xsh_user_global_teardown(pTHX) {
1890 if (vmg_destruct_level() == 0)
1891 vmg_global_teardown_late_locked(NULL);
1893 xsh_teardown_late_register(vmg_global_teardown_late_locked, NULL);
1899 /* --- Macros for the XS section ------------------------------------------- */
1902 # define VMG_CVOK(C) \
1903 ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
1905 # define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
1908 #define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
1910 #define VMG_SET_CB(S, N) { \
1912 if (SvOK(cb) && SvROK(cb)) { \
1915 SvREFCNT_inc_simple_void(cb); \
1924 #define VMG_SET_SVT_CB(S, N) { \
1926 if (SvOK(cb) && SvROK(cb)) { \
1928 if (VMG_CBOK(cb)) { \
1929 t->svt_ ## N = vmg_svt_ ## N; \
1930 SvREFCNT_inc_simple_void(cb); \
1932 t->svt_ ## N = vmg_svt_ ## N ## _noop; \
1936 t->svt_ ## N = NULL; \
1942 /* --- XS ------------------------------------------------------------------ */
1944 MODULE = Variable::Magic PACKAGE = Variable::Magic
1962 #endif /* XSH_THREADSAFE */
1969 SV *op_info, *copy_key;
1975 #endif /* MGf_LOCAL */
1978 #endif /* VMG_UVAR */
1979 ) { croak(vmg_wrongargnum); }
1982 w = vmg_wizard_alloc(SvOK(op_info) ? SvUV(op_info) : 0);
1983 t = vmg_vtable_vtbl(w->vtable);
1985 VMG_SET_CB(ST(i++), data);
1987 VMG_SET_SVT_CB(ST(i++), get);
1988 VMG_SET_SVT_CB(ST(i++), set);
1989 VMG_SET_SVT_CB(ST(i++), len);
1990 VMG_SET_SVT_CB(ST(i++), clear);
1991 VMG_SET_SVT_CB(ST(i++), free);
1992 VMG_SET_SVT_CB(ST(i++), copy);
1993 /* VMG_SET_SVT_CB(ST(i++), dup); */
1998 VMG_SET_SVT_CB(ST(i++), local);
1999 #endif /* MGf_LOCAL */
2001 VMG_SET_CB(ST(i++), fetch);
2002 VMG_SET_CB(ST(i++), store);
2003 VMG_SET_CB(ST(i++), exists);
2004 VMG_SET_CB(ST(i++), delete);
2007 if (w->cb_fetch || w->cb_store || w->cb_exists || w->cb_delete)
2008 w->uvar = SvTRUE(copy_key) ? 2 : 1;
2009 #endif /* VMG_UVAR */
2011 RETVAL = newRV_noinc(vmg_wizard_sv_new(w));
2015 SV *cast(SV *sv, SV *wiz, ...)
2016 PROTOTYPE: \[$@%&*]$@
2018 const vmg_wizard *w = NULL;
2027 wiz = SvRV_const(wiz);
2028 w = vmg_wizard_from_sv(wiz);
2031 croak(vmg_invalid_wiz);
2032 RETVAL = newSVuv(vmg_cast(SvRV(sv), w, wiz, args, i));
2037 getdata(SV *sv, SV *wiz)
2038 PROTOTYPE: \[$@%&*]$
2040 const vmg_wizard *w = NULL;
2044 w = vmg_wizard_from_sv(SvRV_const(wiz));
2046 croak(vmg_invalid_wiz);
2047 data = vmg_data_get(SvRV(sv), w);
2053 SV *dispell(SV *sv, SV *wiz)
2054 PROTOTYPE: \[$@%&*]$
2056 const vmg_wizard *w = NULL;
2059 w = vmg_wizard_from_sv(SvRV_const(wiz));
2061 croak(vmg_invalid_wiz);
2062 RETVAL = newSVuv(vmg_dispell(SvRV(sv), w));