1 /* This file is part of the Variable::Magic Perl module.
2 * See http://search.cpan.org/dist/Variable-Magic/ */
4 #include <stdarg.h> /* <va_list>, va_{start,arg,end}, ... */
6 #include <stdio.h> /* sprintf() */
8 #define PERL_NO_GET_CONTEXT
13 #define __PACKAGE__ "Variable::Magic"
17 # define VOID2(T, P) static_cast<T>(P)
19 # define VOID2(T, P) (P)
22 #ifndef VMG_PERL_PATCHLEVEL
24 # define VMG_PERL_PATCHLEVEL PERL_PATCHNUM
26 # define VMG_PERL_PATCHLEVEL 0
30 #define VMG_HAS_PERL(R, V, S) (PERL_REVISION > (R) || (PERL_REVISION == (R) && (PERL_VERSION > (V) || (PERL_VERSION == (V) && (PERL_SUBVERSION >= (S))))))
32 #define VMG_HAS_PERL_BRANCH(R, V, S) (PERL_REVISION == (R) && PERL_VERSION == (V) && PERL_SUBVERSION >= (S))
34 #define VMG_HAS_PERL_MAINT(R, V, S, P) (PERL_REVISION == (R) && PERL_VERSION == (V) && (VMG_PERL_PATCHLEVEL >= (P) || (!VMG_PERL_PATCHLEVEL && PERL_SUBVERSION >= (S))))
36 /* --- Threads and multiplicity -------------------------------------------- */
46 /* Safe unless stated otherwise in Makefile.PL */
48 # define VMG_FORKSAFE 1
51 #ifndef VMG_MULTIPLICITY
52 # if defined(MULTIPLICITY)
53 # define VMG_MULTIPLICITY 1
55 # define VMG_MULTIPLICITY 0
59 # ifndef PERL_IMPLICIT_CONTEXT
60 # error MULTIPLICITY builds must set PERL_IMPLICIT_CONTEXT
64 #if VMG_MULTIPLICITY && defined(USE_ITHREADS) && defined(dMY_CXT) && defined(MY_CXT) && defined(START_MY_CXT) && defined(MY_CXT_INIT) && (defined(MY_CXT_CLONE) || defined(dMY_CXT_SV))
65 # ifndef VMG_THREADSAFE
66 # define VMG_THREADSAFE 1
69 # define MY_CXT_CLONE \
71 my_cxt_t *my_cxtp = (my_cxt_t*)SvPVX(newSV(sizeof(my_cxt_t)-1)); \
72 Copy(INT2PTR(my_cxt_t*, SvUV(my_cxt_sv)), my_cxtp, 1, my_cxt_t); \
73 sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
76 # undef VMG_THREADSAFE
77 # define VMG_THREADSAFE 0
79 # define dMY_CXT dNOOP
81 # define MY_CXT vmg_globaldata
83 # define START_MY_CXT STATIC my_cxt_t MY_CXT;
85 # define MY_CXT_INIT NOOP
87 # define MY_CXT_CLONE NOOP
91 # define VMG_LOCK(M) MUTEX_LOCK(M)
92 # define VMG_UNLOCK(M) MUTEX_UNLOCK(M)
95 # define VMG_UNLOCK(M)
98 /* --- Compatibility ------------------------------------------------------- */
101 # define Newx(v, n, c) New(0, v, n, c)
105 # define SvMAGIC_set(sv, val) (SvMAGIC(sv) = (val))
109 # define SvRV_const(sv) SvRV((SV *) sv)
112 #ifndef SvREFCNT_inc_simple_void
113 # define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
117 # define mPUSHu(U) PUSHs(sv_2mortal(newSVuv(U)))
120 #ifndef PERL_MAGIC_ext
121 # define PERL_MAGIC_ext '~'
124 #ifndef PERL_MAGIC_tied
125 # define PERL_MAGIC_tied 'P'
132 #ifndef IN_PERL_COMPILETIME
133 # define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
137 # define VMG_ASSERT(C) assert(C)
139 # define VMG_ASSERT(C)
142 /* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
143 * enable them on 5.10 */
144 #if VMG_HAS_PERL(5, 10, 0)
150 #if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
151 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
153 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
156 #if VMG_HAS_PERL(5, 17, 4)
157 # define VMG_COMPAT_SCALAR_NOLEN 1
159 # define VMG_COMPAT_SCALAR_NOLEN 0
162 /* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
163 * reverted to dev-5.11 as 9cdcb38b */
164 #if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
165 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
166 # if VMG_HAS_PERL(5, 11, 0)
167 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
169 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
172 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
173 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
176 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
177 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
179 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
180 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 0
184 /* Applied to dev-5.11 as 34908 */
185 #if VMG_HAS_PERL_MAINT(5, 11, 0, 34908) || VMG_HAS_PERL(5, 12, 0)
186 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
188 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
191 /* Applied to dev-5.9 as 31473 (see #43357), integrated to maint-5.8 as 32542 */
192 #if VMG_HAS_PERL_MAINT(5, 8, 9, 32542) || VMG_HAS_PERL_MAINT(5, 9, 5, 31473) || VMG_HAS_PERL(5, 10, 0)
193 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 1
195 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
198 #if VMG_HAS_PERL(5, 11, 0)
199 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 1
201 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
204 #if VMG_HAS_PERL(5, 17, 0)
205 # define VMG_COMPAT_CODE_COPY_CLONE 1
207 # define VMG_COMPAT_CODE_COPY_CLONE 0
210 #if VMG_HAS_PERL(5, 13, 2)
211 # define VMG_COMPAT_GLOB_GET 1
213 # define VMG_COMPAT_GLOB_GET 0
216 #define VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE (VMG_HAS_PERL(5, 10, 0) && !VMG_HAS_PERL(5, 10, 1))
218 /* NewOp() isn't public in perl 5.8.0. */
219 #define VMG_RESET_RMG_NEEDS_TRAMPOLINE (VMG_UVAR && (VMG_THREADSAFE || !VMG_HAS_PERL(5, 8, 1)))
221 /* ... Bug-free mg_magical ................................................. */
223 /* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
225 #if VMG_HAS_PERL(5, 11, 3)
227 #define vmg_mg_magical(S) mg_magical(S)
231 static void vmg_mg_magical(SV *sv) {
235 if ((mg = SvMAGIC(sv))) {
237 const MGVTBL* const vtbl = mg->mg_virtual;
239 if (vtbl->svt_get && !(mg->mg_flags & MGf_GSKIP))
246 } while ((mg = mg->mg_moremagic));
247 if (!(SvFLAGS(sv) & (SVs_GMG|SVs_SMG)))
254 /* --- Trampoline ops ------------------------------------------------------ */
256 #define VMG_NEEDS_TRAMPOLINE VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE || VMG_RESET_RMG_NEEDS_TRAMPOLINE
258 #if VMG_NEEDS_TRAMPOLINE
265 static void vmg_trampoline_init(vmg_trampoline *t, OP *(*cb)(pTHX)) {
266 t->temp.op_type = OP_STUB;
267 t->temp.op_ppaddr = 0;
268 t->temp.op_next = (OP *) &t->target;
269 t->temp.op_flags = 0;
270 t->temp.op_private = 0;
272 t->target.op_type = OP_STUB;
273 t->target.op_ppaddr = cb;
274 t->target.op_next = NULL;
275 t->target.op_flags = 0;
276 t->target.op_private = 0;
277 t->target.op_sv = NULL;
280 static OP *vmg_trampoline_bump(pTHX_ vmg_trampoline *t, SV *sv, OP *o) {
281 #define vmg_trampoline_bump(T, S, O) vmg_trampoline_bump(aTHX_ (T), (S), (O))
283 t->temp.op_next = (OP *) &t->target;
285 t->target.op_sv = sv;
286 t->target.op_next = o->op_next;
291 #endif /* VMG_NEEDS_TRAMPOLINE */
293 /* --- Cleaner version of sv_magicext() ------------------------------------ */
295 static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
296 #define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
299 mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
306 mg->mg_flags |= MGf_COPY;
309 mg->mg_flags |= MGf_DUP;
313 mg->mg_flags |= MGf_LOCAL;
314 #endif /* MGf_LOCAL */
316 if (mg->mg_flags & MGf_REFCOUNTED)
322 /* --- Safe version of call_sv() ------------------------------------------- */
324 static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
325 #define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
327 PERL_CONTEXT saved_cx;
331 old_err = newSVsv(ERRSV);
332 sv_setsv(ERRSV, &PL_sv_undef);
335 cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
336 /* The last popped context will be reused by call_sv(), but our callers may
337 * still need its previous value. Back it up so that it isn't clobbered. */
338 saved_cx = cxstack[cxix];
340 ret = call_sv(sv, flags | G_EVAL);
342 cxstack[cxix] = saved_cx;
345 SvREFCNT_dec(old_err);
347 if (IN_PERL_COMPILETIME) {
350 sv_catsv(PL_errors, ERRSV);
352 Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
355 #if VMG_HAS_PERL(5, 10, 0) || defined(PL_parser)
357 ++PL_parser->error_count;
358 #elif defined(PL_error_count)
364 if (!cleanup || cleanup(aTHX_ ud))
369 sv_setsv(ERRSV, old_err);
370 SvREFCNT_dec(old_err);
377 /* --- Stolen chunk of B --------------------------------------------------- */
392 #if VMG_HAS_PERL(5, 21, 5)
398 static const char *const vmg_opclassnames[] = {
411 #if VMG_HAS_PERL(5, 21, 5)
417 static opclass vmg_opclass(const OP *o) {
424 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
426 if (o->op_type == OP_SASSIGN)
427 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
429 if (o->op_type == OP_AELEMFAST) {
430 #if PERL_VERSION <= 14
431 if (o->op_flags & OPf_SPECIAL)
443 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
447 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
464 case OA_PVOP_OR_SVOP:
465 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)) ? OPc_SVOP : OPc_PVOP;
470 case OA_BASEOP_OR_UNOP:
471 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
473 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
475 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
477 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
480 if (o->op_flags & OPf_STACKED)
482 else if (o->op_flags & OPf_SPECIAL)
486 #if VMG_HAS_PERL(5, 21, 5)
495 /* --- Error messages ------------------------------------------------------ */
497 static const char vmg_invalid_wiz[] = "Invalid wizard object";
498 static const char vmg_wrongargnum[] = "Wrong number of arguments";
500 /* --- Context-safe global data -------------------------------------------- */
502 #define MY_CXT_KEY __PACKAGE__ "::_guts" XS_VERSION
505 HV *b__op_stashes[OPc_MAX];
508 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
509 vmg_trampoline propagate_errsv;
511 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
512 vmg_trampoline reset_rmg;
518 /* --- <vmg_vtable> structure ---------------------------------------------- */
527 static vmg_vtable *vmg_vtable_alloc(pTHX) {
528 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
531 t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
533 t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
539 #define vmg_vtable_vtbl(T) (T)->vtbl
541 static perl_mutex vmg_vtable_refcount_mutex;
543 static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
544 #define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
545 VMG_LOCK(&vmg_vtable_refcount_mutex);
547 VMG_UNLOCK(&vmg_vtable_refcount_mutex);
552 static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
553 #define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
556 VMG_LOCK(&vmg_vtable_refcount_mutex);
557 refcount = --t->refcount;
558 VMG_UNLOCK(&vmg_vtable_refcount_mutex);
561 PerlMemShared_free(t->vtbl);
562 PerlMemShared_free(t);
566 #else /* VMG_THREADSAFE */
568 typedef MGVTBL vmg_vtable;
570 static vmg_vtable *vmg_vtable_alloc(pTHX) {
571 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
574 Newx(t, 1, vmg_vtable);
579 #define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
581 #define vmg_vtable_free(T) Safefree(T)
583 #endif /* !VMG_THREADSAFE */
585 /* --- <vmg_wizard> structure ---------------------------------------------- */
594 SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
599 #endif /* MGf_LOCAL */
601 SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
602 #endif /* VMG_UVAR */
605 static void vmg_op_info_init(pTHX_ unsigned int opinfo);
607 static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
608 #define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
611 Newx(w, 1, vmg_wizard);
614 w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
616 vmg_op_info_init(aTHX_ w->opinfo);
618 w->vtable = vmg_vtable_alloc();
623 static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
624 #define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
628 /* During global destruction, any of the callbacks may already have been
629 * freed, so we can't rely on still being able to access them. */
631 SvREFCNT_dec(w->cb_data);
632 SvREFCNT_dec(w->cb_get);
633 SvREFCNT_dec(w->cb_set);
634 SvREFCNT_dec(w->cb_len);
635 SvREFCNT_dec(w->cb_clear);
636 SvREFCNT_dec(w->cb_free);
637 SvREFCNT_dec(w->cb_copy);
639 SvREFCNT_dec(w->cb_dup);
642 SvREFCNT_dec(w->cb_local);
643 #endif /* MGf_LOCAL */
645 SvREFCNT_dec(w->cb_fetch);
646 SvREFCNT_dec(w->cb_store);
647 SvREFCNT_dec(w->cb_exists);
648 SvREFCNT_dec(w->cb_delete);
649 #endif /* VMG_UVAR */
652 /* PerlMemShared_free() and Safefree() are still fine during global
653 * destruction though. */
654 vmg_vtable_free(w->vtable);
662 #define VMG_CLONE_CB(N) \
663 z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
666 static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
667 #define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
673 Newx(z, 1, vmg_wizard);
675 z->vtable = vmg_vtable_dup(w->vtable);
677 z->opinfo = w->opinfo;
689 #endif /* MGf_LOCAL */
693 VMG_CLONE_CB(exists);
694 VMG_CLONE_CB(delete);
695 #endif /* VMG_UVAR */
700 #endif /* VMG_THREADSAFE */
702 #define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
704 /* --- Wizard SV objects --------------------------------------------------- */
706 static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
707 vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
714 static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
715 mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
720 #endif /* VMG_THREADSAFE */
722 static MGVTBL vmg_wizard_sv_vtbl = {
727 vmg_wizard_sv_free, /* free */
730 vmg_wizard_sv_dup, /* dup */
736 #endif /* MGf_LOCAL */
739 static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
740 #define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
746 wiz = newSViv(PTR2IV(w));
749 vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
758 #define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
760 static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
763 for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
764 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
765 return (const vmg_wizard *) mg->mg_ptr;
771 #else /* VMG_THREADSAFE */
773 #define vmg_sv_has_wizard_type(S) SvIOK(S)
775 #define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
777 #endif /* !VMG_THREADSAFE */
779 #define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
781 static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
782 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
783 SV *sv = (SV *) mg->mg_ptr;
785 if (vmg_sv_has_wizard_type(sv))
786 return vmg_wizard_from_sv_nocheck(sv);
792 #define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
794 /* --- User-level functions implementation --------------------------------- */
796 static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
800 if (SvTYPE(sv) < SVt_PVMG)
803 wid = vmg_wizard_id(w);
805 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
806 const vmg_wizard *z = vmg_wizard_from_mg(mg);
808 if (z && vmg_wizard_id(z) == wid)
815 /* ... Construct private data .............................................. */
817 static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
818 #define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
828 EXTEND(SP, items + 1);
829 PUSHs(sv_2mortal(newRV_inc(sv)));
830 for (i = 0; i < items; ++i)
834 vmg_call_sv(ctor, G_SCALAR, 0, NULL);
838 #if VMG_HAS_PERL(5, 8, 3)
839 SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
841 nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
851 static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
852 #define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
853 const MAGIC *mg = vmg_find(sv, w);
855 return mg ? mg->mg_obj : NULL;
858 /* ... Magic cast/dispell .................................................. */
862 static I32 vmg_svt_val(pTHX_ IV, SV *);
865 struct ufuncs new_uf;
866 struct ufuncs old_uf;
869 #endif /* VMG_UVAR */
871 static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
872 #define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
876 prevmagic->mg_moremagic = moremagic;
878 SvMAGIC_set(sv, moremagic);
880 /* Destroy private data */
882 if (mg->mg_type == PERL_MAGIC_uvar) {
883 Safefree(mg->mg_ptr);
885 #endif /* VMG_UVAR */
886 if (mg->mg_obj != sv) {
887 SvREFCNT_dec(mg->mg_obj);
890 /* Unreference the wizard */
891 SvREFCNT_dec((SV *) mg->mg_ptr);
895 #endif /* VMG_UVAR */
898 mg->mg_moremagic = MY_CXT.freed_tokens;
899 MY_CXT.freed_tokens = mg;
901 mg->mg_moremagic = NULL;
906 static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
907 #define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
911 MAGIC *moremagic = mg->mg_moremagic;
924 static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
925 #define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
934 oldgmg = SvGMAGICAL(sv);
936 data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
938 t = vmg_vtable_vtbl(w->vtable);
939 mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
941 if (SvTYPE(sv) < SVt_PVHV)
944 /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
945 * magic is actually never called for them. If the GMAGICAL flag was off before
946 * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
947 * now on, then this wizard has get magic. Hence we can work around the
948 * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
949 * has uvar callbacks, it will be turned back on later. */
950 if (!oldgmg && SvGMAGICAL(sv))
955 MAGIC *prevmagic, *moremagic = NULL;
958 ud.new_uf.uf_val = vmg_svt_val;
959 ud.new_uf.uf_set = NULL;
960 ud.new_uf.uf_index = 0;
961 ud.old_uf.uf_val = NULL;
962 ud.old_uf.uf_set = NULL;
963 ud.old_uf.uf_index = 0;
965 /* One uvar magic in the chain is enough. */
966 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
967 moremagic = mg->mg_moremagic;
968 if (mg->mg_type == PERL_MAGIC_uvar)
972 if (mg) { /* Found another uvar magic. */
973 struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
974 if (uf->uf_val == vmg_svt_val) {
975 /* It's our uvar magic, nothing to do. oldgmg was true. */
978 /* It's another uvar magic, backup it and replace it by ours. */
980 vmg_mg_del(sv, prevmagic, mg, moremagic);
984 sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
986 /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
987 * handled by our uvar callback. */
989 #endif /* VMG_UVAR */
995 static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
996 #define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
999 #endif /* VMG_UVAR */
1000 MAGIC *mg, *prevmagic, *moremagic = NULL;
1001 IV wid = vmg_wizard_id(w);
1003 if (SvTYPE(sv) < SVt_PVMG)
1006 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
1007 const vmg_wizard *z;
1009 moremagic = mg->mg_moremagic;
1011 z = vmg_wizard_from_mg(mg);
1013 IV zid = vmg_wizard_id(z);
1017 /* If the current has no uvar, short-circuit uvar deletion. */
1018 uvars = z->uvar ? (uvars + 1) : 0;
1020 } else if (z->uvar) {
1022 /* We can't break here since we need to find the ext magic to delete. */
1024 #else /* VMG_UVAR */
1027 #endif /* !VMG_UVAR */
1033 vmg_mg_del(sv, prevmagic, mg, moremagic);
1036 if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
1037 /* mg was the first ext magic in the chain that had uvar */
1039 for (mg = moremagic; mg; mg = mg->mg_moremagic) {
1040 const vmg_wizard *z = vmg_wizard_from_mg(mg);
1051 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
1052 moremagic = mg->mg_moremagic;
1053 if (mg->mg_type == PERL_MAGIC_uvar)
1057 ud = (vmg_uvar_ud *) mg->mg_ptr;
1058 if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
1059 /* Revert the original uvar magic. */
1061 Newx(uf, 1, struct ufuncs);
1064 mg->mg_ptr = (char *) uf;
1065 mg->mg_len = sizeof(*uf);
1067 /* Remove the uvar magic. */
1068 vmg_mg_del(sv, prevmagic, mg, moremagic);
1072 #endif /* VMG_UVAR */
1079 /* ... OP info ............................................................. */
1081 #define VMG_OP_INFO_NAME 1
1082 #define VMG_OP_INFO_OBJECT 2
1085 static perl_mutex vmg_op_name_init_mutex;
1088 static U32 vmg_op_name_init = 0;
1089 static unsigned char vmg_op_name_len[MAXO] = { 0 };
1091 static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
1092 #define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
1094 case VMG_OP_INFO_NAME:
1095 VMG_LOCK(&vmg_op_name_init_mutex);
1096 if (!vmg_op_name_init) {
1098 for (t = 0; t < OP_max; ++t)
1099 vmg_op_name_len[t] = strlen(PL_op_name[t]);
1100 vmg_op_name_init = 1;
1102 VMG_UNLOCK(&vmg_op_name_init_mutex);
1104 case VMG_OP_INFO_OBJECT: {
1106 if (!MY_CXT.b__op_stashes[0]) {
1109 for (c = OPc_NULL; c < OPc_MAX; ++c)
1110 MY_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
1119 static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
1120 #define vmg_op_info(W) vmg_op_info(aTHX_ (W))
1122 return &PL_sv_undef;
1125 case VMG_OP_INFO_NAME: {
1126 OPCODE t = PL_op->op_type;
1127 return sv_2mortal(newSVpvn(PL_op_name[t], vmg_op_name_len[t]));
1129 case VMG_OP_INFO_OBJECT: {
1131 return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
1132 MY_CXT.b__op_stashes[vmg_opclass(PL_op)]);
1138 return &PL_sv_undef;
1141 /* --- svt callbacks ------------------------------------------------------- */
1143 #define VMG_CB_CALL_ARGS_MASK 15
1144 #define VMG_CB_CALL_ARGS_SHIFT 4
1145 #define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
1146 #define VMG_CB_CALL_GUARD 4
1148 static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
1153 /* If we're at the upmost magic call and we're about to die, we can just free
1154 * the tokens right now, since we will jump past the problematic part of our
1156 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1157 vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
1158 MY_CXT.freed_tokens = NULL;
1164 static int vmg_dispell_guard_free(pTHX_ SV *sv, MAGIC *mg) {
1165 vmg_magic_chain_free((MAGIC *) mg->mg_ptr, NULL);
1172 static int vmg_dispell_guard_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
1173 /* The freed magic tokens aren't cloned by perl because it cannot reach them
1174 * (they have been detached from their parent SV when they were enqueued).
1175 * Hence there's nothing to purge in the new thread. */
1181 #endif /* VMG_THREADSAFE */
1183 static MGVTBL vmg_dispell_guard_vtbl = {
1188 vmg_dispell_guard_free, /* free */
1191 vmg_dispell_guard_dup, /* dup */
1197 #endif /* MGf_LOCAL */
1200 static SV *vmg_dispell_guard_new(pTHX_ MAGIC *root) {
1201 #define vmg_dispell_guard_new(R) vmg_dispell_guard_new(aTHX_ (R))
1204 guard = sv_newmortal();
1205 vmg_sv_magicext(guard, NULL, &vmg_dispell_guard_vtbl, root, 0);
1210 static int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
1213 unsigned int i, args, opinfo;
1214 MAGIC **chain = NULL;
1219 args = flags & VMG_CB_CALL_ARGS_MASK;
1220 flags >>= VMG_CB_CALL_ARGS_SHIFT;
1221 opinfo = flags & VMG_CB_CALL_OPINFO;
1227 EXTEND(SP, args + 1);
1228 PUSHs(sv_2mortal(newRV_inc(sv)));
1230 for (i = 0; i < args; ++i) {
1231 SV *sva = va_arg(ap, SV *);
1232 PUSHs(sva ? sva : &PL_sv_undef);
1236 XPUSHs(vmg_op_info(opinfo));
1239 if (flags & VMG_CB_CALL_GUARD) {
1242 vmg_call_sv(cb, G_SCALAR, vmg_dispell_guard_oncroak, NULL);
1244 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens)
1245 chain = &MY_CXT.freed_tokens;
1247 vmg_call_sv(cb, G_SCALAR, 0, NULL);
1253 ret = (int) SvIV(svr);
1263 if (svr && !SvTEMP(svr))
1267 vmg_dispell_guard_new(*chain);
1274 #define VMG_CB_FLAGS(OI, A) \
1275 ((((unsigned int) (OI)) << VMG_CB_CALL_ARGS_SHIFT) | (A))
1277 #define vmg_cb_call1(I, OI, S, A1) \
1278 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 1), (S), (A1))
1279 #define vmg_cb_call2(I, OI, S, A1, A2) \
1280 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 2), (S), (A1), (A2))
1281 #define vmg_cb_call3(I, OI, S, A1, A2, A3) \
1282 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
1284 /* ... Default no-op magic callback ........................................ */
1286 static int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
1290 /* ... get magic ........................................................... */
1292 static int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
1293 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1295 return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
1298 #define vmg_svt_get_noop vmg_svt_default_noop
1300 /* ... set magic ........................................................... */
1302 static int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
1303 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1305 return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
1308 #define vmg_svt_set_noop vmg_svt_default_noop
1310 /* ... len magic ........................................................... */
1312 static U32 vmg_sv_len(pTHX_ SV *sv) {
1313 #define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
1315 #if VMG_HAS_PERL(5, 9, 3)
1316 const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, len)));
1318 U8 *s = SvPV(sv, len);
1321 return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
1324 static U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
1325 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1326 unsigned int opinfo = w->opinfo;
1329 svtype t = SvTYPE(sv);
1338 PUSHs(sv_2mortal(newRV_inc(sv)));
1339 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1341 len = vmg_sv_len(sv);
1343 } else if (t == SVt_PVAV) {
1344 len = av_len((AV *) sv) + 1;
1348 PUSHs(&PL_sv_undef);
1351 XPUSHs(vmg_op_info(opinfo));
1354 vmg_call_sv(w->cb_len, G_SCALAR, 0, NULL);
1358 ret = SvOK(svr) ? (U32) SvUV(svr) : len;
1369 static U32 vmg_svt_len_noop(pTHX_ SV *sv, MAGIC *mg) {
1371 svtype t = SvTYPE(sv);
1374 len = vmg_sv_len(sv);
1375 } else if (t == SVt_PVAV) {
1376 len = (U32) av_len((AV *) sv);
1382 /* ... clear magic ......................................................... */
1384 static int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
1385 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1386 unsigned int flags = w->opinfo;
1388 #if !VMG_HAS_PERL(5, 12, 0)
1389 flags |= VMG_CB_CALL_GUARD;
1392 return vmg_cb_call1(w->cb_clear, flags, sv, mg->mg_obj);
1395 #define vmg_svt_clear_noop vmg_svt_default_noop
1397 /* ... free magic .......................................................... */
1399 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1401 static OP *vmg_pp_propagate_errsv(pTHX) {
1402 SVOP *o = cSVOPx(PL_op);
1405 sv_setsv(ERRSV, o->op_sv);
1406 SvREFCNT_dec(o->op_sv);
1413 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1415 static int vmg_propagate_errsv_free(pTHX_ SV *sv, MAGIC *mg) {
1417 sv_setsv(ERRSV, mg->mg_obj);
1422 /* perl is already kind enough to handle the cloning of the mg_obj member,
1423 hence we don't need to define a dup magic callback. */
1425 static MGVTBL vmg_propagate_errsv_vtbl = {
1430 vmg_propagate_errsv_free, /* free */
1435 #endif /* MGf_LOCAL */
1442 } vmg_svt_free_cleanup_ud;
1444 static int vmg_svt_free_cleanup(pTHX_ void *ud_) {
1445 vmg_svt_free_cleanup_ud *ud = VOID2(vmg_svt_free_cleanup_ud *, ud_);
1448 U32 optype = PL_op ? PL_op->op_type : OP_NULL;
1450 if (optype == OP_LEAVETRY || optype == OP_LEAVEEVAL) {
1451 SV *errsv = newSVsv(ERRSV);
1454 LEAVE_SCOPE(ud->base);
1456 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1457 if (optype == OP_LEAVETRY) {
1459 PL_op = vmg_trampoline_bump(&MY_CXT.propagate_errsv, errsv, PL_op);
1460 } else if (optype == OP_LEAVEEVAL) {
1461 SV *guard = sv_newmortal();
1462 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1464 #else /* !VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1465 # if !VMG_HAS_PERL(5, 8, 9)
1467 SV *guard = sv_newmortal();
1468 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1471 vmg_sv_magicext(ERRSV, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1473 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1478 /* Don't propagate */
1484 /* We are about to croak() while sv is being destroyed. Try to clean up
1488 vmg_mg_del(sv, NULL, mg, mg->mg_moremagic);
1493 vmg_dispell_guard_oncroak(aTHX_ NULL);
1495 /* After that, propagate the error upwards. */
1500 static int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
1501 vmg_svt_free_cleanup_ud ud;
1502 const vmg_wizard *w;
1508 /* During global destruction, we cannot be sure that the wizard and its free
1509 * callback are still alive. */
1513 w = vmg_wizard_from_mg_nocheck(mg);
1515 /* So that it survives the temp cleanup below */
1516 SvREFCNT_inc_simple_void(sv);
1518 #if !(VMG_HAS_PERL_MAINT(5, 11, 0, 32686) || VMG_HAS_PERL(5, 12, 0))
1519 /* The previous magic tokens were freed but the magic chain wasn't updated, so
1520 * if you access the sv from the callback the old deleted magics will trigger
1521 * and cause memory misreads. Change 32686 solved it that way : */
1522 SvMAGIC_set(sv, mg);
1526 if (cxstack_ix < cxstack_max) {
1527 ud.in_eval = (CxTYPE(cxstack + cxstack_ix + 1) == CXt_EVAL);
1528 ud.base = ud.in_eval ? PL_scopestack[PL_scopestack_ix] : 0;
1539 PUSHs(sv_2mortal(newRV_inc(sv)));
1540 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1542 XPUSHs(vmg_op_info(w->opinfo));
1548 vmg_call_sv(w->cb_free, G_SCALAR, vmg_svt_free_cleanup, &ud);
1550 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1551 /* Free all the tokens in the chain but the current one (if it's present).
1552 * It will be taken care of by our caller, Perl_mg_free(). */
1553 vmg_magic_chain_free(MY_CXT.freed_tokens, mg);
1554 MY_CXT.freed_tokens = NULL;
1561 ret = (int) SvIV(svr);
1567 /* Calling SvREFCNT_dec() will trigger destructors in an infinite loop, so
1568 * we have to rely on SvREFCNT() being a lvalue. Heck, even the core does it */
1571 /* Perl_mg_free will get rid of the magic and decrement mg->mg_obj and
1572 * mg->mg_ptr reference count */
1576 #define vmg_svt_free_noop vmg_svt_default_noop
1578 #if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
1579 # define VMG_SVT_COPY_KEYLEN_TYPE I32
1581 # define VMG_SVT_COPY_KEYLEN_TYPE int
1584 /* ... copy magic .......................................................... */
1586 static int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1587 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1591 if (keylen == HEf_SVKEY) {
1594 keysv = newSVpvn(key, keylen);
1597 if (SvTYPE(sv) >= SVt_PVCV)
1598 nsv = sv_2mortal(newRV_inc(nsv));
1600 ret = vmg_cb_call3(w->cb_copy, w->opinfo, sv, mg->mg_obj, keysv, nsv);
1602 if (keylen != HEf_SVKEY) {
1603 SvREFCNT_dec(keysv);
1609 static int vmg_svt_copy_noop(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1613 /* ... dup magic ........................................................... */
1616 static int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
1619 #define vmg_svt_dup_noop vmg_svt_dup
1622 /* ... local magic ......................................................... */
1626 static int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
1627 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1629 return vmg_cb_call1(w->cb_local, w->opinfo, nsv, mg->mg_obj);
1632 #define vmg_svt_local_noop vmg_svt_default_noop
1634 #endif /* MGf_LOCAL */
1636 /* ... uvar magic .......................................................... */
1640 static OP *vmg_pp_reset_rmg(pTHX) {
1641 SVOP *o = cSVOPx(PL_op);
1643 SvRMAGICAL_on(o->op_sv);
1649 static I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
1651 MAGIC *mg, *umg, *moremagic;
1652 SV *key = NULL, *newkey = NULL;
1655 umg = mg_find(sv, PERL_MAGIC_uvar);
1656 /* umg can't be NULL or we wouldn't be there. */
1658 ud = (vmg_uvar_ud *) umg->mg_ptr;
1660 if (ud->old_uf.uf_val)
1661 ud->old_uf.uf_val(aTHX_ action, sv);
1662 if (ud->old_uf.uf_set)
1663 ud->old_uf.uf_set(aTHX_ action, sv);
1665 for (mg = SvMAGIC(sv); mg; mg = moremagic) {
1666 const vmg_wizard *w;
1668 /* mg may be freed later by the uvar call, so we need to fetch the next
1669 * token before reaching that fateful point. */
1670 moremagic = mg->mg_moremagic;
1672 switch (mg->mg_type) {
1673 case PERL_MAGIC_ext:
1675 case PERL_MAGIC_tied:
1682 w = vmg_wizard_from_mg(mg);
1691 newkey = key = umg->mg_obj = sv_mortalcopy(umg->mg_obj);
1695 & (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
1698 vmg_cb_call2(w->cb_fetch, w->opinfo | VMG_CB_CALL_GUARD, sv,
1701 case HV_FETCH_ISSTORE:
1702 case HV_FETCH_LVALUE:
1703 case (HV_FETCH_ISSTORE|HV_FETCH_LVALUE):
1705 vmg_cb_call2(w->cb_store, w->opinfo | VMG_CB_CALL_GUARD, sv,
1708 case HV_FETCH_ISEXISTS:
1710 vmg_cb_call2(w->cb_exists, w->opinfo | VMG_CB_CALL_GUARD, sv,
1715 vmg_cb_call2(w->cb_delete, w->opinfo | VMG_CB_CALL_GUARD, sv,
1721 if (SvRMAGICAL(sv) && !tied && !(action & (HV_FETCH_ISSTORE|HV_DELETE))) {
1722 /* Temporarily hide the RMAGICAL flag of the hash so it isn't wrongly
1723 * mistaken for a tied hash by the rest of hv_common. It will be reset by
1724 * the op_ppaddr of a new fake op injected between the current and the next
1727 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1731 PL_op = vmg_trampoline_bump(&MY_CXT.reset_rmg, sv, PL_op);
1733 #else /* !VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1735 OP *nop = PL_op->op_next;
1738 if (nop && nop->op_ppaddr == vmg_pp_reset_rmg) {
1739 svop = (SVOP *) nop;
1741 NewOp(1101, svop, 1, SVOP);
1742 svop->op_type = OP_STUB;
1743 svop->op_ppaddr = vmg_pp_reset_rmg;
1744 svop->op_next = nop;
1746 svop->op_private = 0;
1748 PL_op->op_next = (OP *) svop;
1753 #endif /* VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1761 #endif /* VMG_UVAR */
1763 /* --- Module setup/teardown ----------------------------------------------- */
1767 static I32 vmg_loaded = 0;
1769 /* We must use preexistent global mutexes or we will never be able to destroy
1771 # if VMG_HAS_PERL(5, 9, 3)
1772 # define VMG_LOADED_LOCK MUTEX_LOCK(&PL_my_ctx_mutex)
1773 # define VMG_LOADED_UNLOCK MUTEX_UNLOCK(&PL_my_ctx_mutex)
1775 # define VMG_LOADED_LOCK OP_REFCNT_LOCK
1776 # define VMG_LOADED_UNLOCK OP_REFCNT_UNLOCK
1779 static void vmg_global_teardown_late_locked(pTHX) {
1780 #define vmg_global_teardown_late_locked() vmg_global_teardown_late_locked(aTHX)
1781 MUTEX_DESTROY(&vmg_op_name_init_mutex);
1782 MUTEX_DESTROY(&vmg_vtable_refcount_mutex);
1787 static int vmg_global_teardown_free(pTHX_ SV *sv, MAGIC *mg) {
1790 if (vmg_loaded == 0)
1791 vmg_global_teardown_late_locked();
1798 static MGVTBL vmg_global_teardown_vtbl = {
1803 vmg_global_teardown_free
1815 static signed char vmg_destruct_level(pTHX) {
1816 #define vmg_destruct_level() vmg_destruct_level(aTHX)
1819 lvl = PL_perl_destruct_level;
1823 const char *s = PerlEnv_getenv("PERL_DESTRUCT_LEVEL");
1826 #if VMG_HAS_PERL(5, 21, 3)
1827 if (strEQ(s, "-1")) {
1830 # if VMG_HAS_PERL(5, 21, 10)
1832 if (Perl_grok_atoUV(s, &uv, NULL) && uv <= INT_MAX)
1836 # else /* VMG_HAS_PERL(5, 21, 3) && !VMG_HAS_PERL(5, 21, 10) */
1837 i = Perl_grok_atou(s, NULL);
1840 #else /* !VMG_HAS_PERL(5, 21, 3) */
1852 #endif /* VMG_THREADSAFE */
1854 static void vmg_teardown(pTHX_ void *param) {
1860 if (vmg_loaded == 1) {
1862 if (vmg_destruct_level() == 0) {
1863 vmg_global_teardown_late_locked();
1866 PL_strtab = newHV();
1867 vmg_sv_magicext((SV *) PL_strtab, NULL, &vmg_global_teardown_vtbl, NULL, 0);
1870 VMG_ASSERT(vmg_loaded > 1);
1877 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1878 vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
1879 MY_CXT.freed_tokens = NULL;
1885 static void vmg_setup(pTHX) {
1886 #define vmg_setup() vmg_setup(aTHX)
1894 if (vmg_loaded == 0) {
1895 MUTEX_INIT(&vmg_vtable_refcount_mutex);
1896 MUTEX_INIT(&vmg_op_name_init_mutex);
1899 VMG_ASSERT(vmg_loaded > 0);
1906 for (c = OPc_NULL; c < OPc_MAX; ++c)
1907 MY_CXT.b__op_stashes[c] = NULL;
1910 MY_CXT.freed_tokens = NULL;
1912 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1913 vmg_trampoline_init(&MY_CXT.propagate_errsv, vmg_pp_propagate_errsv);
1915 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1916 vmg_trampoline_init(&MY_CXT.reset_rmg, vmg_pp_reset_rmg);
1919 stash = gv_stashpv(__PACKAGE__, 1);
1920 newCONSTSUB(stash, "MGf_COPY", newSVuv(MGf_COPY));
1921 newCONSTSUB(stash, "MGf_DUP", newSVuv(MGf_DUP));
1922 newCONSTSUB(stash, "MGf_LOCAL", newSVuv(MGf_LOCAL));
1923 newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
1924 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
1925 newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
1926 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_NOLEN",
1927 newSVuv(VMG_COMPAT_SCALAR_NOLEN));
1928 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
1929 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
1930 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
1931 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID));
1932 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID",
1933 newSVuv(VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID));
1934 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNDEF_CLEAR",
1935 newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
1936 newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
1937 newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
1938 newCONSTSUB(stash, "VMG_COMPAT_CODE_COPY_CLONE",
1939 newSVuv(VMG_COMPAT_CODE_COPY_CLONE));
1940 newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
1941 newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(VMG_PERL_PATCHLEVEL));
1942 newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(VMG_THREADSAFE));
1943 newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(VMG_FORKSAFE));
1944 newCONSTSUB(stash, "VMG_OP_INFO_NAME", newSVuv(VMG_OP_INFO_NAME));
1945 newCONSTSUB(stash, "VMG_OP_INFO_OBJECT", newSVuv(VMG_OP_INFO_OBJECT));
1947 call_atexit(vmg_teardown, NULL);
1952 /* --- Macros for the XS section ------------------------------------------- */
1955 # define VMG_CVOK(C) \
1956 ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
1958 # define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
1961 #define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
1963 #define VMG_SET_CB(S, N) { \
1965 if (SvOK(cb) && SvROK(cb)) { \
1968 SvREFCNT_inc_simple_void(cb); \
1977 #define VMG_SET_SVT_CB(S, N) { \
1979 if (SvOK(cb) && SvROK(cb)) { \
1981 if (VMG_CBOK(cb)) { \
1982 t->svt_ ## N = vmg_svt_ ## N; \
1983 SvREFCNT_inc_simple_void(cb); \
1985 t->svt_ ## N = vmg_svt_ ## N ## _noop; \
1989 t->svt_ ## N = NULL; \
1995 /* --- XS ------------------------------------------------------------------ */
1997 MODULE = Variable::Magic PACKAGE = Variable::Magic
2012 U32 had_b__op_stash = 0;
2018 for (c = OPc_NULL; c < OPc_MAX; ++c) {
2019 if (MY_CXT.b__op_stashes[c])
2020 had_b__op_stash |= (((U32) 1) << c);
2022 old_depth = MY_CXT.depth;
2026 for (c = OPc_NULL; c < OPc_MAX; ++c) {
2027 MY_CXT.b__op_stashes[c] = (had_b__op_stash & (((U32) 1) << c))
2028 ? gv_stashpv(vmg_opclassnames[c], 1) : NULL;
2030 MY_CXT.depth = old_depth;
2031 MY_CXT.freed_tokens = NULL;
2033 VMG_ASSERT(vmg_loaded > 0);
2039 #endif /* VMG_THREADSAFE */
2046 SV *op_info, *copy_key;
2052 #endif /* MGf_LOCAL */
2055 #endif /* VMG_UVAR */
2056 ) { croak(vmg_wrongargnum); }
2059 w = vmg_wizard_alloc(SvOK(op_info) ? SvUV(op_info) : 0);
2060 t = vmg_vtable_vtbl(w->vtable);
2062 VMG_SET_CB(ST(i++), data);
2064 VMG_SET_SVT_CB(ST(i++), get);
2065 VMG_SET_SVT_CB(ST(i++), set);
2066 VMG_SET_SVT_CB(ST(i++), len);
2067 VMG_SET_SVT_CB(ST(i++), clear);
2068 VMG_SET_SVT_CB(ST(i++), free);
2069 VMG_SET_SVT_CB(ST(i++), copy);
2070 /* VMG_SET_SVT_CB(ST(i++), dup); */
2075 VMG_SET_SVT_CB(ST(i++), local);
2076 #endif /* MGf_LOCAL */
2078 VMG_SET_CB(ST(i++), fetch);
2079 VMG_SET_CB(ST(i++), store);
2080 VMG_SET_CB(ST(i++), exists);
2081 VMG_SET_CB(ST(i++), delete);
2084 if (w->cb_fetch || w->cb_store || w->cb_exists || w->cb_delete)
2085 w->uvar = SvTRUE(copy_key) ? 2 : 1;
2086 #endif /* VMG_UVAR */
2088 RETVAL = newRV_noinc(vmg_wizard_sv_new(w));
2092 SV *cast(SV *sv, SV *wiz, ...)
2093 PROTOTYPE: \[$@%&*]$@
2095 const vmg_wizard *w = NULL;
2104 wiz = SvRV_const(wiz);
2105 w = vmg_wizard_from_sv(wiz);
2108 croak(vmg_invalid_wiz);
2109 RETVAL = newSVuv(vmg_cast(SvRV(sv), w, wiz, args, i));
2114 getdata(SV *sv, SV *wiz)
2115 PROTOTYPE: \[$@%&*]$
2117 const vmg_wizard *w = NULL;
2121 w = vmg_wizard_from_sv(SvRV_const(wiz));
2123 croak(vmg_invalid_wiz);
2124 data = vmg_data_get(SvRV(sv), w);
2130 SV *dispell(SV *sv, SV *wiz)
2131 PROTOTYPE: \[$@%&*]$
2133 const vmg_wizard *w = NULL;
2136 w = vmg_wizard_from_sv(SvRV_const(wiz));
2138 croak(vmg_invalid_wiz);
2139 RETVAL = newSVuv(vmg_dispell(SvRV(sv), w));