1 /* This file is part of the Variable::Magic Perl module.
2 * See http://search.cpan.org/dist/Variable-Magic/ */
4 #include <stdarg.h> /* <va_list>, va_{start,arg,end}, ... */
6 #include <stdio.h> /* sprintf() */
8 #define PERL_NO_GET_CONTEXT
13 #define __PACKAGE__ "Variable::Magic"
17 # define VOID2(T, P) static_cast<T>(P)
19 # define VOID2(T, P) (P)
22 #ifndef VMG_PERL_PATCHLEVEL
24 # define VMG_PERL_PATCHLEVEL PERL_PATCHNUM
26 # define VMG_PERL_PATCHLEVEL 0
30 #define VMG_HAS_PERL(R, V, S) (PERL_REVISION > (R) || (PERL_REVISION == (R) && (PERL_VERSION > (V) || (PERL_VERSION == (V) && (PERL_SUBVERSION >= (S))))))
32 #define VMG_HAS_PERL_BRANCH(R, V, S) (PERL_REVISION == (R) && PERL_VERSION == (V) && PERL_SUBVERSION >= (S))
34 #define VMG_HAS_PERL_MAINT(R, V, S, P) (PERL_REVISION == (R) && PERL_VERSION == (V) && (VMG_PERL_PATCHLEVEL >= (P) || (!VMG_PERL_PATCHLEVEL && PERL_SUBVERSION >= (S))))
36 /* --- Threads and multiplicity -------------------------------------------- */
46 /* Safe unless stated otherwise in Makefile.PL */
48 # define VMG_FORKSAFE 1
51 #ifndef VMG_MULTIPLICITY
52 # if defined(MULTIPLICITY)
53 # define VMG_MULTIPLICITY 1
55 # define VMG_MULTIPLICITY 0
59 # ifndef PERL_IMPLICIT_CONTEXT
60 # error MULTIPLICITY builds must set PERL_IMPLICIT_CONTEXT
64 #if VMG_MULTIPLICITY && defined(USE_ITHREADS) && defined(dMY_CXT) && defined(MY_CXT) && defined(START_MY_CXT) && defined(MY_CXT_INIT) && (defined(MY_CXT_CLONE) || defined(dMY_CXT_SV))
65 # ifndef VMG_THREADSAFE
66 # define VMG_THREADSAFE 1
69 # define MY_CXT_CLONE \
71 my_cxt_t *my_cxtp = (my_cxt_t*)SvPVX(newSV(sizeof(my_cxt_t)-1)); \
72 Copy(INT2PTR(my_cxt_t*, SvUV(my_cxt_sv)), my_cxtp, 1, my_cxt_t); \
73 sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
76 # undef VMG_THREADSAFE
77 # define VMG_THREADSAFE 0
79 # define dMY_CXT dNOOP
81 # define MY_CXT vmg_globaldata
83 # define START_MY_CXT STATIC my_cxt_t MY_CXT;
85 # define MY_CXT_INIT NOOP
87 # define MY_CXT_CLONE NOOP
91 # define VMG_LOCK(M) MUTEX_LOCK(M)
92 # define VMG_UNLOCK(M) MUTEX_UNLOCK(M)
95 # define VMG_UNLOCK(M)
98 /* --- Compatibility ------------------------------------------------------- */
101 # define Newx(v, n, c) New(0, v, n, c)
105 # define SvMAGIC_set(sv, val) (SvMAGIC(sv) = (val))
109 # define SvRV_const(sv) SvRV((SV *) sv)
112 #ifndef SvREFCNT_inc_simple_void
113 # define SvREFCNT_inc_simple_void(sv) ((void) SvREFCNT_inc(sv))
117 # define mPUSHu(U) PUSHs(sv_2mortal(newSVuv(U)))
120 #ifndef PERL_MAGIC_ext
121 # define PERL_MAGIC_ext '~'
124 #ifndef PERL_MAGIC_tied
125 # define PERL_MAGIC_tied 'P'
132 #ifndef IN_PERL_COMPILETIME
133 # define IN_PERL_COMPILETIME (PL_curcop == &PL_compiling)
137 # define OP_NAME(O) (PL_op_name[(O)->op_type])
141 # define OP_CLASS(O) (PL_opargs[(O)->op_type] & OA_CLASS_MASK)
145 # define VMG_ASSERT(C) assert(C)
147 # define VMG_ASSERT(C)
150 /* uvar magic and Hash::Util::FieldHash were commited with 28419, but we only
151 * enable them on 5.10 */
152 #if VMG_HAS_PERL(5, 10, 0)
158 #if VMG_HAS_PERL_MAINT(5, 11, 0, 32969) || VMG_HAS_PERL(5, 12, 0)
159 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 1
161 # define VMG_COMPAT_SCALAR_LENGTH_NOLEN 0
164 #if VMG_HAS_PERL(5, 17, 4)
165 # define VMG_COMPAT_SCALAR_NOLEN 1
167 # define VMG_COMPAT_SCALAR_NOLEN 0
170 /* Applied to dev-5.9 as 25854, integrated to maint-5.8 as 28160, partially
171 * reverted to dev-5.11 as 9cdcb38b */
172 #if VMG_HAS_PERL_MAINT(5, 8, 9, 28160) || VMG_HAS_PERL_MAINT(5, 9, 3, 25854) || VMG_HAS_PERL(5, 10, 0)
173 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
174 # if VMG_HAS_PERL(5, 11, 0)
175 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
177 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 1
180 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
181 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 1
184 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN
185 # define VMG_COMPAT_ARRAY_PUSH_NOLEN 0
187 # ifndef VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID
188 # define VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID 0
192 /* Applied to dev-5.11 as 34908 */
193 #if VMG_HAS_PERL_MAINT(5, 11, 0, 34908) || VMG_HAS_PERL(5, 12, 0)
194 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 1
196 # define VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID 0
199 /* Applied to dev-5.9 as 31473 (see #43357), integrated to maint-5.8 as 32542 */
200 #if VMG_HAS_PERL_MAINT(5, 8, 9, 32542) || VMG_HAS_PERL_MAINT(5, 9, 5, 31473) || VMG_HAS_PERL(5, 10, 0)
201 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 1
203 # define VMG_COMPAT_ARRAY_UNDEF_CLEAR 0
206 #if VMG_HAS_PERL(5, 11, 0)
207 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 1
209 # define VMG_COMPAT_HASH_DELETE_NOUVAR_VOID 0
212 #if VMG_HAS_PERL(5, 17, 0)
213 # define VMG_COMPAT_CODE_COPY_CLONE 1
215 # define VMG_COMPAT_CODE_COPY_CLONE 0
218 #if VMG_HAS_PERL(5, 13, 2)
219 # define VMG_COMPAT_GLOB_GET 1
221 # define VMG_COMPAT_GLOB_GET 0
224 #define VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE (VMG_HAS_PERL(5, 10, 0) && !VMG_HAS_PERL(5, 10, 1))
226 /* NewOp() isn't public in perl 5.8.0. */
227 #define VMG_RESET_RMG_NEEDS_TRAMPOLINE (VMG_UVAR && (VMG_THREADSAFE || !VMG_HAS_PERL(5, 8, 1)))
229 /* ... Bug-free mg_magical ................................................. */
231 /* See the discussion at http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2008-01/msg00036.html */
233 #if VMG_HAS_PERL(5, 11, 3)
235 #define vmg_mg_magical(S) mg_magical(S)
239 static void vmg_mg_magical(SV *sv) {
243 if ((mg = SvMAGIC(sv))) {
245 const MGVTBL* const vtbl = mg->mg_virtual;
247 if (vtbl->svt_get && !(mg->mg_flags & MGf_GSKIP))
254 } while ((mg = mg->mg_moremagic));
255 if (!(SvFLAGS(sv) & (SVs_GMG|SVs_SMG)))
262 /* --- Trampoline ops ------------------------------------------------------ */
264 #define VMG_NEEDS_TRAMPOLINE VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE || VMG_RESET_RMG_NEEDS_TRAMPOLINE
266 #if VMG_NEEDS_TRAMPOLINE
273 static void vmg_trampoline_init(vmg_trampoline *t, OP *(*cb)(pTHX)) {
274 t->temp.op_type = OP_STUB;
275 t->temp.op_ppaddr = 0;
276 t->temp.op_next = (OP *) &t->target;
277 t->temp.op_flags = 0;
278 t->temp.op_private = 0;
280 t->target.op_type = OP_STUB;
281 t->target.op_ppaddr = cb;
282 t->target.op_next = NULL;
283 t->target.op_flags = 0;
284 t->target.op_private = 0;
285 t->target.op_sv = NULL;
288 static OP *vmg_trampoline_bump(pTHX_ vmg_trampoline *t, SV *sv, OP *o) {
289 #define vmg_trampoline_bump(T, S, O) vmg_trampoline_bump(aTHX_ (T), (S), (O))
291 t->temp.op_next = (OP *) &t->target;
293 t->target.op_sv = sv;
294 t->target.op_next = o->op_next;
299 #endif /* VMG_NEEDS_TRAMPOLINE */
301 /* --- Cleaner version of sv_magicext() ------------------------------------ */
303 static MAGIC *vmg_sv_magicext(pTHX_ SV *sv, SV *obj, const MGVTBL *vtbl, const void *ptr, I32 len) {
304 #define vmg_sv_magicext(S, O, V, P, L) vmg_sv_magicext(aTHX_ (S), (O), (V), (P), (L))
307 mg = sv_magicext(sv, obj, PERL_MAGIC_ext, vtbl, ptr, len);
314 mg->mg_flags |= MGf_COPY;
317 mg->mg_flags |= MGf_DUP;
321 mg->mg_flags |= MGf_LOCAL;
322 #endif /* MGf_LOCAL */
324 if (mg->mg_flags & MGf_REFCOUNTED)
330 /* --- Safe version of call_sv() ------------------------------------------- */
332 static I32 vmg_call_sv(pTHX_ SV *sv, I32 flags, int (*cleanup)(pTHX_ void *), void *ud) {
333 #define vmg_call_sv(S, F, C, U) vmg_call_sv(aTHX_ (S), (F), (C), (U))
335 PERL_CONTEXT saved_cx;
339 old_err = newSVsv(ERRSV);
340 sv_setsv(ERRSV, &PL_sv_undef);
343 cxix = (cxstack_ix < cxstack_max) ? (cxstack_ix + 1) : Perl_cxinc(aTHX);
344 /* The last popped context will be reused by call_sv(), but our callers may
345 * still need its previous value. Back it up so that it isn't clobbered. */
346 saved_cx = cxstack[cxix];
348 ret = call_sv(sv, flags | G_EVAL);
350 cxstack[cxix] = saved_cx;
353 SvREFCNT_dec(old_err);
355 if (IN_PERL_COMPILETIME) {
358 sv_catsv(PL_errors, ERRSV);
360 Perl_warn(aTHX_ "%s", SvPV_nolen(ERRSV));
363 #if VMG_HAS_PERL(5, 10, 0) || defined(PL_parser)
365 ++PL_parser->error_count;
366 #elif defined(PL_error_count)
372 if (!cleanup || cleanup(aTHX_ ud))
377 sv_setsv(ERRSV, old_err);
378 SvREFCNT_dec(old_err);
385 /* --- Stolen chunk of B --------------------------------------------------- */
400 #if VMG_HAS_PERL(5, 21, 5)
403 #if VMG_HAS_PERL(5, 21, 7)
409 static const char *const vmg_opclassnames[] = {
422 #if VMG_HAS_PERL(5, 21, 5)
425 #if VMG_HAS_PERL(5, 21, 7)
431 static opclass vmg_opclass(pTHX_ const OP *o) {
432 #define vmg_opclass(O) vmg_opclass(aTHX_ (O))
438 if (o->op_type == 0) {
439 #if VMG_HAS_PERL(5, 21, 7)
440 if (o->op_targ == OP_NEXTSTATE || o->op_targ == OP_DBSTATE)
443 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
446 if (o->op_type == OP_SASSIGN)
447 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
449 if (o->op_type == OP_AELEMFAST) {
450 #if PERL_VERSION <= 14
451 if (o->op_flags & OPf_SPECIAL)
463 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_RCATLINE)
467 switch (OP_CLASS(o)) {
484 case OA_PVOP_OR_SVOP:
486 #if VMG_HAS_PERL(5, 13, 7)
487 (o->op_type != OP_CUSTOM) &&
489 (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF)))
490 #if defined(USE_ITHREADS) && VMG_HAS_PERL(5, 8, 9)
491 ? OPc_PADOP : OPc_PVOP;
493 ? OPc_SVOP : OPc_PVOP;
499 case OA_BASEOP_OR_UNOP:
500 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
502 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
504 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
506 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
509 if (o->op_flags & OPf_STACKED)
511 else if (o->op_flags & OPf_SPECIAL)
515 #if VMG_HAS_PERL(5, 21, 5)
519 #if VMG_HAS_PERL(5, 21, 7)
528 /* --- Error messages ------------------------------------------------------ */
530 static const char vmg_invalid_wiz[] = "Invalid wizard object";
531 static const char vmg_wrongargnum[] = "Wrong number of arguments";
533 /* --- Context-safe global data -------------------------------------------- */
535 #define MY_CXT_KEY __PACKAGE__ "::_guts" XS_VERSION
538 HV *b__op_stashes[OPc_MAX];
541 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
542 vmg_trampoline propagate_errsv;
544 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
545 vmg_trampoline reset_rmg;
551 /* --- <vmg_vtable> structure ---------------------------------------------- */
560 static vmg_vtable *vmg_vtable_alloc(pTHX) {
561 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
564 t = VOID2(vmg_vtable *, PerlMemShared_malloc(sizeof *t));
566 t->vtbl = VOID2(MGVTBL *, PerlMemShared_malloc(sizeof *t->vtbl));
572 #define vmg_vtable_vtbl(T) (T)->vtbl
574 static perl_mutex vmg_vtable_refcount_mutex;
576 static vmg_vtable *vmg_vtable_dup(pTHX_ vmg_vtable *t) {
577 #define vmg_vtable_dup(T) vmg_vtable_dup(aTHX_ (T))
578 VMG_LOCK(&vmg_vtable_refcount_mutex);
580 VMG_UNLOCK(&vmg_vtable_refcount_mutex);
585 static void vmg_vtable_free(pTHX_ vmg_vtable *t) {
586 #define vmg_vtable_free(T) vmg_vtable_free(aTHX_ (T))
589 VMG_LOCK(&vmg_vtable_refcount_mutex);
590 refcount = --t->refcount;
591 VMG_UNLOCK(&vmg_vtable_refcount_mutex);
594 PerlMemShared_free(t->vtbl);
595 PerlMemShared_free(t);
599 #else /* VMG_THREADSAFE */
601 typedef MGVTBL vmg_vtable;
603 static vmg_vtable *vmg_vtable_alloc(pTHX) {
604 #define vmg_vtable_alloc() vmg_vtable_alloc(aTHX)
607 Newx(t, 1, vmg_vtable);
612 #define vmg_vtable_vtbl(T) ((MGVTBL *) (T))
614 #define vmg_vtable_free(T) Safefree(T)
616 #endif /* !VMG_THREADSAFE */
618 /* --- <vmg_wizard> structure ---------------------------------------------- */
627 SV *cb_get, *cb_set, *cb_len, *cb_clear, *cb_free;
632 #endif /* MGf_LOCAL */
634 SV *cb_fetch, *cb_store, *cb_exists, *cb_delete;
635 #endif /* VMG_UVAR */
638 static void vmg_op_info_init(pTHX_ unsigned int opinfo);
640 static vmg_wizard *vmg_wizard_alloc(pTHX_ UV opinfo) {
641 #define vmg_wizard_alloc(O) vmg_wizard_alloc(aTHX_ (O))
644 Newx(w, 1, vmg_wizard);
647 w->opinfo = (U8) ((opinfo < 255) ? opinfo : 255);
649 vmg_op_info_init(aTHX_ w->opinfo);
651 w->vtable = vmg_vtable_alloc();
656 static void vmg_wizard_free(pTHX_ vmg_wizard *w) {
657 #define vmg_wizard_free(W) vmg_wizard_free(aTHX_ (W))
661 /* During global destruction, any of the callbacks may already have been
662 * freed, so we can't rely on still being able to access them. */
664 SvREFCNT_dec(w->cb_data);
665 SvREFCNT_dec(w->cb_get);
666 SvREFCNT_dec(w->cb_set);
667 SvREFCNT_dec(w->cb_len);
668 SvREFCNT_dec(w->cb_clear);
669 SvREFCNT_dec(w->cb_free);
670 SvREFCNT_dec(w->cb_copy);
672 SvREFCNT_dec(w->cb_dup);
675 SvREFCNT_dec(w->cb_local);
676 #endif /* MGf_LOCAL */
678 SvREFCNT_dec(w->cb_fetch);
679 SvREFCNT_dec(w->cb_store);
680 SvREFCNT_dec(w->cb_exists);
681 SvREFCNT_dec(w->cb_delete);
682 #endif /* VMG_UVAR */
685 /* PerlMemShared_free() and Safefree() are still fine during global
686 * destruction though. */
687 vmg_vtable_free(w->vtable);
695 #define VMG_CLONE_CB(N) \
696 z->cb_ ## N = (w->cb_ ## N) ? SvREFCNT_inc(sv_dup(w->cb_ ## N, params)) \
699 static const vmg_wizard *vmg_wizard_dup(pTHX_ const vmg_wizard *w, CLONE_PARAMS *params) {
700 #define vmg_wizard_dup(W, P) vmg_wizard_dup(aTHX_ (W), (P))
706 Newx(z, 1, vmg_wizard);
708 z->vtable = vmg_vtable_dup(w->vtable);
710 z->opinfo = w->opinfo;
722 #endif /* MGf_LOCAL */
726 VMG_CLONE_CB(exists);
727 VMG_CLONE_CB(delete);
728 #endif /* VMG_UVAR */
733 #endif /* VMG_THREADSAFE */
735 #define vmg_wizard_id(W) PTR2IV(vmg_vtable_vtbl((W)->vtable))
737 /* --- Wizard SV objects --------------------------------------------------- */
739 static int vmg_wizard_sv_free(pTHX_ SV *sv, MAGIC *mg) {
740 vmg_wizard_free((vmg_wizard *) mg->mg_ptr);
747 static int vmg_wizard_sv_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
748 mg->mg_ptr = (char *) vmg_wizard_dup((const vmg_wizard *) mg->mg_ptr, params);
753 #endif /* VMG_THREADSAFE */
755 static MGVTBL vmg_wizard_sv_vtbl = {
760 vmg_wizard_sv_free, /* free */
763 vmg_wizard_sv_dup, /* dup */
769 #endif /* MGf_LOCAL */
772 static SV *vmg_wizard_sv_new(pTHX_ const vmg_wizard *w) {
773 #define vmg_wizard_sv_new(W) vmg_wizard_sv_new(aTHX_ (W))
779 wiz = newSViv(PTR2IV(w));
782 vmg_sv_magicext(wiz, NULL, &vmg_wizard_sv_vtbl, w, 0);
791 #define vmg_sv_has_wizard_type(S) (SvTYPE(S) >= SVt_PVMG)
793 static const vmg_wizard *vmg_wizard_from_sv_nocheck(const SV *wiz) {
796 for (mg = SvMAGIC(wiz); mg; mg = mg->mg_moremagic) {
797 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_virtual == &vmg_wizard_sv_vtbl)
798 return (const vmg_wizard *) mg->mg_ptr;
804 #else /* VMG_THREADSAFE */
806 #define vmg_sv_has_wizard_type(S) SvIOK(S)
808 #define vmg_wizard_from_sv_nocheck(W) INT2PTR(const vmg_wizard *, SvIVX(W))
810 #endif /* !VMG_THREADSAFE */
812 #define vmg_wizard_from_sv(W) (vmg_sv_has_wizard_type(W) ? vmg_wizard_from_sv_nocheck(W) : NULL)
814 static const vmg_wizard *vmg_wizard_from_mg(const MAGIC *mg) {
815 if (mg->mg_type == PERL_MAGIC_ext && mg->mg_len == HEf_SVKEY) {
816 SV *sv = (SV *) mg->mg_ptr;
818 if (vmg_sv_has_wizard_type(sv))
819 return vmg_wizard_from_sv_nocheck(sv);
825 #define vmg_wizard_from_mg_nocheck(M) vmg_wizard_from_sv_nocheck((const SV *) (M)->mg_ptr)
827 /* --- User-level functions implementation --------------------------------- */
829 static const MAGIC *vmg_find(const SV *sv, const vmg_wizard *w) {
833 if (SvTYPE(sv) < SVt_PVMG)
836 wid = vmg_wizard_id(w);
838 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
839 const vmg_wizard *z = vmg_wizard_from_mg(mg);
841 if (z && vmg_wizard_id(z) == wid)
848 /* ... Construct private data .............................................. */
850 static SV *vmg_data_new(pTHX_ SV *ctor, SV *sv, SV **args, I32 items) {
851 #define vmg_data_new(C, S, A, I) vmg_data_new(aTHX_ (C), (S), (A), (I))
861 EXTEND(SP, items + 1);
862 PUSHs(sv_2mortal(newRV_inc(sv)));
863 for (i = 0; i < items; ++i)
867 vmg_call_sv(ctor, G_SCALAR, 0, NULL);
871 #if VMG_HAS_PERL(5, 8, 3)
872 SvREFCNT_inc_simple_void(nsv); /* Or it will be destroyed in FREETMPS */
874 nsv = sv_newref(nsv); /* Workaround some bug in SvREFCNT_inc() */
884 static SV *vmg_data_get(pTHX_ SV *sv, const vmg_wizard *w) {
885 #define vmg_data_get(S, W) vmg_data_get(aTHX_ (S), (W))
886 const MAGIC *mg = vmg_find(sv, w);
888 return mg ? mg->mg_obj : NULL;
891 /* ... Magic cast/dispell .................................................. */
895 static I32 vmg_svt_val(pTHX_ IV, SV *);
898 struct ufuncs new_uf;
899 struct ufuncs old_uf;
902 #endif /* VMG_UVAR */
904 static void vmg_mg_del(pTHX_ SV *sv, MAGIC *prevmagic, MAGIC *mg, MAGIC *moremagic) {
905 #define vmg_mg_del(S, P, M, N) vmg_mg_del(aTHX_ (S), (P), (M), (N))
909 prevmagic->mg_moremagic = moremagic;
911 SvMAGIC_set(sv, moremagic);
913 /* Destroy private data */
915 if (mg->mg_type == PERL_MAGIC_uvar) {
916 Safefree(mg->mg_ptr);
918 #endif /* VMG_UVAR */
919 if (mg->mg_obj != sv) {
920 SvREFCNT_dec(mg->mg_obj);
923 /* Unreference the wizard */
924 SvREFCNT_dec((SV *) mg->mg_ptr);
928 #endif /* VMG_UVAR */
931 mg->mg_moremagic = MY_CXT.freed_tokens;
932 MY_CXT.freed_tokens = mg;
934 mg->mg_moremagic = NULL;
939 static int vmg_magic_chain_free(pTHX_ MAGIC *mg, MAGIC *skip) {
940 #define vmg_magic_chain_free(M, S) vmg_magic_chain_free(aTHX_ (M), (S))
944 MAGIC *moremagic = mg->mg_moremagic;
957 static UV vmg_cast(pTHX_ SV *sv, const vmg_wizard *w, const SV *wiz, SV **args, I32 items) {
958 #define vmg_cast(S, W, WIZ, A, I) vmg_cast(aTHX_ (S), (W), (WIZ), (A), (I))
967 oldgmg = SvGMAGICAL(sv);
969 data = (w->cb_data) ? vmg_data_new(w->cb_data, sv, args, items) : NULL;
971 t = vmg_vtable_vtbl(w->vtable);
972 mg = vmg_sv_magicext(sv, data, t, wiz, HEf_SVKEY);
974 if (SvTYPE(sv) < SVt_PVHV)
977 /* The GMAGICAL flag only says that a hash is tied or has uvar magic - get
978 * magic is actually never called for them. If the GMAGICAL flag was off before
979 * calling sv_magicext(), the hash isn't tied and has no uvar magic. If it's
980 * now on, then this wizard has get magic. Hence we can work around the
981 * get/clear shortcoming by turning the GMAGICAL flag off. If the current magic
982 * has uvar callbacks, it will be turned back on later. */
983 if (!oldgmg && SvGMAGICAL(sv))
988 MAGIC *prevmagic, *moremagic = NULL;
991 ud.new_uf.uf_val = vmg_svt_val;
992 ud.new_uf.uf_set = NULL;
993 ud.new_uf.uf_index = 0;
994 ud.old_uf.uf_val = NULL;
995 ud.old_uf.uf_set = NULL;
996 ud.old_uf.uf_index = 0;
998 /* One uvar magic in the chain is enough. */
999 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
1000 moremagic = mg->mg_moremagic;
1001 if (mg->mg_type == PERL_MAGIC_uvar)
1005 if (mg) { /* Found another uvar magic. */
1006 struct ufuncs *uf = (struct ufuncs *) mg->mg_ptr;
1007 if (uf->uf_val == vmg_svt_val) {
1008 /* It's our uvar magic, nothing to do. oldgmg was true. */
1011 /* It's another uvar magic, backup it and replace it by ours. */
1013 vmg_mg_del(sv, prevmagic, mg, moremagic);
1017 sv_magic(sv, NULL, PERL_MAGIC_uvar, (const char *) &ud, sizeof(ud));
1019 /* Our hash now carries uvar magic. The uvar/clear shortcoming has to be
1020 * handled by our uvar callback. */
1022 #endif /* VMG_UVAR */
1028 static UV vmg_dispell(pTHX_ SV *sv, const vmg_wizard *w) {
1029 #define vmg_dispell(S, W) vmg_dispell(aTHX_ (S), (W))
1032 #endif /* VMG_UVAR */
1033 MAGIC *mg, *prevmagic, *moremagic = NULL;
1034 IV wid = vmg_wizard_id(w);
1036 if (SvTYPE(sv) < SVt_PVMG)
1039 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic) {
1040 const vmg_wizard *z;
1042 moremagic = mg->mg_moremagic;
1044 z = vmg_wizard_from_mg(mg);
1046 IV zid = vmg_wizard_id(z);
1050 /* If the current has no uvar, short-circuit uvar deletion. */
1051 uvars = z->uvar ? (uvars + 1) : 0;
1053 } else if (z->uvar) {
1055 /* We can't break here since we need to find the ext magic to delete. */
1057 #else /* VMG_UVAR */
1060 #endif /* !VMG_UVAR */
1066 vmg_mg_del(sv, prevmagic, mg, moremagic);
1069 if (uvars == 1 && SvTYPE(sv) >= SVt_PVHV) {
1070 /* mg was the first ext magic in the chain that had uvar */
1072 for (mg = moremagic; mg; mg = mg->mg_moremagic) {
1073 const vmg_wizard *z = vmg_wizard_from_mg(mg);
1084 for (prevmagic = NULL, mg = SvMAGIC(sv); mg; prevmagic = mg, mg = moremagic){
1085 moremagic = mg->mg_moremagic;
1086 if (mg->mg_type == PERL_MAGIC_uvar)
1090 ud = (vmg_uvar_ud *) mg->mg_ptr;
1091 if (ud->old_uf.uf_val || ud->old_uf.uf_set) {
1092 /* Revert the original uvar magic. */
1094 Newx(uf, 1, struct ufuncs);
1097 mg->mg_ptr = (char *) uf;
1098 mg->mg_len = sizeof(*uf);
1100 /* Remove the uvar magic. */
1101 vmg_mg_del(sv, prevmagic, mg, moremagic);
1105 #endif /* VMG_UVAR */
1112 /* ... OP info ............................................................. */
1114 #define VMG_OP_INFO_NAME 1
1115 #define VMG_OP_INFO_OBJECT 2
1118 static perl_mutex vmg_op_name_init_mutex;
1121 static U32 vmg_op_name_init = 0;
1122 static unsigned char vmg_op_name_len[MAXO] = { 0 };
1124 static void vmg_op_info_init(pTHX_ unsigned int opinfo) {
1125 #define vmg_op_info_init(W) vmg_op_info_init(aTHX_ (W))
1127 case VMG_OP_INFO_NAME:
1128 VMG_LOCK(&vmg_op_name_init_mutex);
1129 if (!vmg_op_name_init) {
1131 for (t = 0; t < OP_max; ++t)
1132 vmg_op_name_len[t] = strlen(PL_op_name[t]);
1133 vmg_op_name_init = 1;
1135 VMG_UNLOCK(&vmg_op_name_init_mutex);
1137 case VMG_OP_INFO_OBJECT: {
1139 if (!MY_CXT.b__op_stashes[0]) {
1142 for (c = OPc_NULL; c < OPc_MAX; ++c)
1143 MY_CXT.b__op_stashes[c] = gv_stashpv(vmg_opclassnames[c], 1);
1152 static SV *vmg_op_info(pTHX_ unsigned int opinfo) {
1153 #define vmg_op_info(W) vmg_op_info(aTHX_ (W))
1155 return &PL_sv_undef;
1158 case VMG_OP_INFO_NAME: {
1161 OPCODE t = PL_op->op_type;
1162 name = OP_NAME(PL_op);
1163 name_len = (t == OP_CUSTOM) ? strlen(name) : vmg_op_name_len[t];
1164 return sv_2mortal(newSVpvn(name, name_len));
1166 case VMG_OP_INFO_OBJECT: {
1168 return sv_bless(sv_2mortal(newRV_noinc(newSViv(PTR2IV(PL_op)))),
1169 MY_CXT.b__op_stashes[vmg_opclass(PL_op)]);
1175 return &PL_sv_undef;
1178 /* --- svt callbacks ------------------------------------------------------- */
1180 #define VMG_CB_CALL_ARGS_MASK 15
1181 #define VMG_CB_CALL_ARGS_SHIFT 4
1182 #define VMG_CB_CALL_OPINFO (VMG_OP_INFO_NAME|VMG_OP_INFO_OBJECT) /* 1|2 */
1183 #define VMG_CB_CALL_GUARD 4
1185 static int vmg_dispell_guard_oncroak(pTHX_ void *ud) {
1190 /* If we're at the upmost magic call and we're about to die, we can just free
1191 * the tokens right now, since we will jump past the problematic part of our
1193 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1194 vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
1195 MY_CXT.freed_tokens = NULL;
1201 static int vmg_dispell_guard_free(pTHX_ SV *sv, MAGIC *mg) {
1202 vmg_magic_chain_free((MAGIC *) mg->mg_ptr, NULL);
1209 static int vmg_dispell_guard_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *params) {
1210 /* The freed magic tokens aren't cloned by perl because it cannot reach them
1211 * (they have been detached from their parent SV when they were enqueued).
1212 * Hence there's nothing to purge in the new thread. */
1218 #endif /* VMG_THREADSAFE */
1220 static MGVTBL vmg_dispell_guard_vtbl = {
1225 vmg_dispell_guard_free, /* free */
1228 vmg_dispell_guard_dup, /* dup */
1234 #endif /* MGf_LOCAL */
1237 static SV *vmg_dispell_guard_new(pTHX_ MAGIC *root) {
1238 #define vmg_dispell_guard_new(R) vmg_dispell_guard_new(aTHX_ (R))
1241 guard = sv_newmortal();
1242 vmg_sv_magicext(guard, NULL, &vmg_dispell_guard_vtbl, root, 0);
1247 static int vmg_cb_call(pTHX_ SV *cb, unsigned int flags, SV *sv, ...) {
1250 unsigned int i, args, opinfo;
1251 MAGIC **chain = NULL;
1256 args = flags & VMG_CB_CALL_ARGS_MASK;
1257 flags >>= VMG_CB_CALL_ARGS_SHIFT;
1258 opinfo = flags & VMG_CB_CALL_OPINFO;
1264 EXTEND(SP, args + 1);
1265 PUSHs(sv_2mortal(newRV_inc(sv)));
1267 for (i = 0; i < args; ++i) {
1268 SV *sva = va_arg(ap, SV *);
1269 PUSHs(sva ? sva : &PL_sv_undef);
1273 XPUSHs(vmg_op_info(opinfo));
1276 if (flags & VMG_CB_CALL_GUARD) {
1279 vmg_call_sv(cb, G_SCALAR, vmg_dispell_guard_oncroak, NULL);
1281 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens)
1282 chain = &MY_CXT.freed_tokens;
1284 vmg_call_sv(cb, G_SCALAR, 0, NULL);
1290 ret = (int) SvIV(svr);
1300 if (svr && !SvTEMP(svr))
1304 vmg_dispell_guard_new(*chain);
1311 #define VMG_CB_FLAGS(OI, A) \
1312 ((((unsigned int) (OI)) << VMG_CB_CALL_ARGS_SHIFT) | (A))
1314 #define vmg_cb_call1(I, OI, S, A1) \
1315 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 1), (S), (A1))
1316 #define vmg_cb_call2(I, OI, S, A1, A2) \
1317 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 2), (S), (A1), (A2))
1318 #define vmg_cb_call3(I, OI, S, A1, A2, A3) \
1319 vmg_cb_call(aTHX_ (I), VMG_CB_FLAGS((OI), 3), (S), (A1), (A2), (A3))
1321 /* ... Default no-op magic callback ........................................ */
1323 static int vmg_svt_default_noop(pTHX_ SV *sv, MAGIC *mg) {
1327 /* ... get magic ........................................................... */
1329 static int vmg_svt_get(pTHX_ SV *sv, MAGIC *mg) {
1330 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1332 return vmg_cb_call1(w->cb_get, w->opinfo, sv, mg->mg_obj);
1335 #define vmg_svt_get_noop vmg_svt_default_noop
1337 /* ... set magic ........................................................... */
1339 static int vmg_svt_set(pTHX_ SV *sv, MAGIC *mg) {
1340 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1342 return vmg_cb_call1(w->cb_set, w->opinfo, sv, mg->mg_obj);
1345 #define vmg_svt_set_noop vmg_svt_default_noop
1347 /* ... len magic ........................................................... */
1349 static U32 vmg_sv_len(pTHX_ SV *sv) {
1350 #define vmg_sv_len(S) vmg_sv_len(aTHX_ (S))
1352 #if VMG_HAS_PERL(5, 9, 3)
1353 const U8 *s = VOID2(const U8 *, VOID2(const void *, SvPV_const(sv, len)));
1355 U8 *s = SvPV(sv, len);
1358 return DO_UTF8(sv) ? utf8_length(s, s + len) : len;
1361 static U32 vmg_svt_len(pTHX_ SV *sv, MAGIC *mg) {
1362 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1363 unsigned int opinfo = w->opinfo;
1366 svtype t = SvTYPE(sv);
1375 PUSHs(sv_2mortal(newRV_inc(sv)));
1376 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1378 len = vmg_sv_len(sv);
1380 } else if (t == SVt_PVAV) {
1381 len = av_len((AV *) sv) + 1;
1385 PUSHs(&PL_sv_undef);
1388 XPUSHs(vmg_op_info(opinfo));
1391 vmg_call_sv(w->cb_len, G_SCALAR, 0, NULL);
1395 ret = SvOK(svr) ? (U32) SvUV(svr) : len;
1406 static U32 vmg_svt_len_noop(pTHX_ SV *sv, MAGIC *mg) {
1408 svtype t = SvTYPE(sv);
1411 len = vmg_sv_len(sv);
1412 } else if (t == SVt_PVAV) {
1413 len = (U32) av_len((AV *) sv);
1419 /* ... clear magic ......................................................... */
1421 static int vmg_svt_clear(pTHX_ SV *sv, MAGIC *mg) {
1422 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1423 unsigned int flags = w->opinfo;
1425 #if !VMG_HAS_PERL(5, 12, 0)
1426 flags |= VMG_CB_CALL_GUARD;
1429 return vmg_cb_call1(w->cb_clear, flags, sv, mg->mg_obj);
1432 #define vmg_svt_clear_noop vmg_svt_default_noop
1434 /* ... free magic .......................................................... */
1436 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1438 static OP *vmg_pp_propagate_errsv(pTHX) {
1439 SVOP *o = cSVOPx(PL_op);
1442 sv_setsv(ERRSV, o->op_sv);
1443 SvREFCNT_dec(o->op_sv);
1450 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1452 static int vmg_propagate_errsv_free(pTHX_ SV *sv, MAGIC *mg) {
1454 sv_setsv(ERRSV, mg->mg_obj);
1459 /* perl is already kind enough to handle the cloning of the mg_obj member,
1460 hence we don't need to define a dup magic callback. */
1462 static MGVTBL vmg_propagate_errsv_vtbl = {
1467 vmg_propagate_errsv_free, /* free */
1472 #endif /* MGf_LOCAL */
1479 } vmg_svt_free_cleanup_ud;
1481 static int vmg_svt_free_cleanup(pTHX_ void *ud_) {
1482 vmg_svt_free_cleanup_ud *ud = VOID2(vmg_svt_free_cleanup_ud *, ud_);
1485 U32 optype = PL_op ? PL_op->op_type : OP_NULL;
1487 if (optype == OP_LEAVETRY || optype == OP_LEAVEEVAL) {
1488 SV *errsv = newSVsv(ERRSV);
1491 LEAVE_SCOPE(ud->base);
1493 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1494 if (optype == OP_LEAVETRY) {
1496 PL_op = vmg_trampoline_bump(&MY_CXT.propagate_errsv, errsv, PL_op);
1497 } else if (optype == OP_LEAVEEVAL) {
1498 SV *guard = sv_newmortal();
1499 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1501 #else /* !VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1502 # if !VMG_HAS_PERL(5, 8, 9)
1504 SV *guard = sv_newmortal();
1505 vmg_sv_magicext(guard, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1508 vmg_sv_magicext(ERRSV, errsv, &vmg_propagate_errsv_vtbl, NULL, 0);
1510 #endif /* VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE */
1515 /* Don't propagate */
1521 /* We are about to croak() while sv is being destroyed. Try to clean up
1525 vmg_mg_del(sv, NULL, mg, mg->mg_moremagic);
1530 vmg_dispell_guard_oncroak(aTHX_ NULL);
1532 /* After that, propagate the error upwards. */
1537 static int vmg_svt_free(pTHX_ SV *sv, MAGIC *mg) {
1538 vmg_svt_free_cleanup_ud ud;
1539 const vmg_wizard *w;
1545 /* During global destruction, we cannot be sure that the wizard and its free
1546 * callback are still alive. */
1550 w = vmg_wizard_from_mg_nocheck(mg);
1552 /* So that it survives the temp cleanup below */
1553 SvREFCNT_inc_simple_void(sv);
1555 #if !(VMG_HAS_PERL_MAINT(5, 11, 0, 32686) || VMG_HAS_PERL(5, 12, 0))
1556 /* The previous magic tokens were freed but the magic chain wasn't updated, so
1557 * if you access the sv from the callback the old deleted magics will trigger
1558 * and cause memory misreads. Change 32686 solved it that way : */
1559 SvMAGIC_set(sv, mg);
1563 if (cxstack_ix < cxstack_max) {
1564 ud.in_eval = (CxTYPE(cxstack + cxstack_ix + 1) == CXt_EVAL);
1565 ud.base = ud.in_eval ? PL_scopestack[PL_scopestack_ix] : 0;
1576 PUSHs(sv_2mortal(newRV_inc(sv)));
1577 PUSHs(mg->mg_obj ? mg->mg_obj : &PL_sv_undef);
1579 XPUSHs(vmg_op_info(w->opinfo));
1585 vmg_call_sv(w->cb_free, G_SCALAR, vmg_svt_free_cleanup, &ud);
1587 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1588 /* Free all the tokens in the chain but the current one (if it's present).
1589 * It will be taken care of by our caller, Perl_mg_free(). */
1590 vmg_magic_chain_free(MY_CXT.freed_tokens, mg);
1591 MY_CXT.freed_tokens = NULL;
1598 ret = (int) SvIV(svr);
1604 /* Calling SvREFCNT_dec() will trigger destructors in an infinite loop, so
1605 * we have to rely on SvREFCNT() being a lvalue. Heck, even the core does it */
1608 /* Perl_mg_free will get rid of the magic and decrement mg->mg_obj and
1609 * mg->mg_ptr reference count */
1613 #define vmg_svt_free_noop vmg_svt_default_noop
1615 #if VMG_HAS_PERL_MAINT(5, 11, 0, 33256) || VMG_HAS_PERL(5, 12, 0)
1616 # define VMG_SVT_COPY_KEYLEN_TYPE I32
1618 # define VMG_SVT_COPY_KEYLEN_TYPE int
1621 /* ... copy magic .......................................................... */
1623 static int vmg_svt_copy(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1624 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1628 if (keylen == HEf_SVKEY) {
1631 keysv = newSVpvn(key, keylen);
1634 if (SvTYPE(sv) >= SVt_PVCV)
1635 nsv = sv_2mortal(newRV_inc(nsv));
1637 ret = vmg_cb_call3(w->cb_copy, w->opinfo, sv, mg->mg_obj, keysv, nsv);
1639 if (keylen != HEf_SVKEY) {
1640 SvREFCNT_dec(keysv);
1646 static int vmg_svt_copy_noop(pTHX_ SV *sv, MAGIC *mg, SV *nsv, const char *key, VMG_SVT_COPY_KEYLEN_TYPE keylen) {
1650 /* ... dup magic ........................................................... */
1653 static int vmg_svt_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *param) {
1656 #define vmg_svt_dup_noop vmg_svt_dup
1659 /* ... local magic ......................................................... */
1663 static int vmg_svt_local(pTHX_ SV *nsv, MAGIC *mg) {
1664 const vmg_wizard *w = vmg_wizard_from_mg_nocheck(mg);
1666 return vmg_cb_call1(w->cb_local, w->opinfo, nsv, mg->mg_obj);
1669 #define vmg_svt_local_noop vmg_svt_default_noop
1671 #endif /* MGf_LOCAL */
1673 /* ... uvar magic .......................................................... */
1677 static OP *vmg_pp_reset_rmg(pTHX) {
1678 SVOP *o = cSVOPx(PL_op);
1680 SvRMAGICAL_on(o->op_sv);
1686 static I32 vmg_svt_val(pTHX_ IV action, SV *sv) {
1688 MAGIC *mg, *umg, *moremagic;
1689 SV *key = NULL, *newkey = NULL;
1692 umg = mg_find(sv, PERL_MAGIC_uvar);
1693 /* umg can't be NULL or we wouldn't be there. */
1695 ud = (vmg_uvar_ud *) umg->mg_ptr;
1697 if (ud->old_uf.uf_val)
1698 ud->old_uf.uf_val(aTHX_ action, sv);
1699 if (ud->old_uf.uf_set)
1700 ud->old_uf.uf_set(aTHX_ action, sv);
1702 for (mg = SvMAGIC(sv); mg; mg = moremagic) {
1703 const vmg_wizard *w;
1705 /* mg may be freed later by the uvar call, so we need to fetch the next
1706 * token before reaching that fateful point. */
1707 moremagic = mg->mg_moremagic;
1709 switch (mg->mg_type) {
1710 case PERL_MAGIC_ext:
1712 case PERL_MAGIC_tied:
1719 w = vmg_wizard_from_mg(mg);
1728 newkey = key = umg->mg_obj = sv_mortalcopy(umg->mg_obj);
1732 & (HV_FETCH_ISSTORE|HV_FETCH_ISEXISTS|HV_FETCH_LVALUE|HV_DELETE)) {
1735 vmg_cb_call2(w->cb_fetch, w->opinfo | VMG_CB_CALL_GUARD, sv,
1738 case HV_FETCH_ISSTORE:
1739 case HV_FETCH_LVALUE:
1740 case (HV_FETCH_ISSTORE|HV_FETCH_LVALUE):
1742 vmg_cb_call2(w->cb_store, w->opinfo | VMG_CB_CALL_GUARD, sv,
1745 case HV_FETCH_ISEXISTS:
1747 vmg_cb_call2(w->cb_exists, w->opinfo | VMG_CB_CALL_GUARD, sv,
1752 vmg_cb_call2(w->cb_delete, w->opinfo | VMG_CB_CALL_GUARD, sv,
1758 if (SvRMAGICAL(sv) && !tied && !(action & (HV_FETCH_ISSTORE|HV_DELETE))) {
1759 /* Temporarily hide the RMAGICAL flag of the hash so it isn't wrongly
1760 * mistaken for a tied hash by the rest of hv_common. It will be reset by
1761 * the op_ppaddr of a new fake op injected between the current and the next
1764 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1768 PL_op = vmg_trampoline_bump(&MY_CXT.reset_rmg, sv, PL_op);
1770 #else /* !VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1772 OP *nop = PL_op->op_next;
1775 if (nop && nop->op_ppaddr == vmg_pp_reset_rmg) {
1776 svop = (SVOP *) nop;
1778 NewOp(1101, svop, 1, SVOP);
1779 svop->op_type = OP_STUB;
1780 svop->op_ppaddr = vmg_pp_reset_rmg;
1781 svop->op_next = nop;
1783 svop->op_private = 0;
1785 PL_op->op_next = (OP *) svop;
1790 #endif /* VMG_RESET_RMG_NEEDS_TRAMPOLINE */
1798 #endif /* VMG_UVAR */
1800 /* --- Module setup/teardown ----------------------------------------------- */
1804 static I32 vmg_loaded = 0;
1806 /* We must use preexistent global mutexes or we will never be able to destroy
1808 # if VMG_HAS_PERL(5, 9, 3)
1809 # define VMG_LOADED_LOCK MUTEX_LOCK(&PL_my_ctx_mutex)
1810 # define VMG_LOADED_UNLOCK MUTEX_UNLOCK(&PL_my_ctx_mutex)
1812 # define VMG_LOADED_LOCK OP_REFCNT_LOCK
1813 # define VMG_LOADED_UNLOCK OP_REFCNT_UNLOCK
1816 static void vmg_global_teardown_late_locked(pTHX) {
1817 #define vmg_global_teardown_late_locked() vmg_global_teardown_late_locked(aTHX)
1818 MUTEX_DESTROY(&vmg_op_name_init_mutex);
1819 MUTEX_DESTROY(&vmg_vtable_refcount_mutex);
1824 static int vmg_global_teardown_free(pTHX_ SV *sv, MAGIC *mg) {
1827 if (vmg_loaded == 0)
1828 vmg_global_teardown_late_locked();
1835 static MGVTBL vmg_global_teardown_vtbl = {
1840 vmg_global_teardown_free
1852 static signed char vmg_destruct_level(pTHX) {
1853 #define vmg_destruct_level() vmg_destruct_level(aTHX)
1856 lvl = PL_perl_destruct_level;
1860 const char *s = PerlEnv_getenv("PERL_DESTRUCT_LEVEL");
1863 #if VMG_HAS_PERL(5, 21, 3)
1864 if (strEQ(s, "-1")) {
1867 # if VMG_HAS_PERL(5, 21, 10)
1869 if (Perl_grok_atoUV(s, &uv, NULL) && uv <= INT_MAX)
1873 # else /* VMG_HAS_PERL(5, 21, 3) && !VMG_HAS_PERL(5, 21, 10) */
1874 i = Perl_grok_atou(s, NULL);
1877 #else /* !VMG_HAS_PERL(5, 21, 3) */
1889 #endif /* VMG_THREADSAFE */
1891 static void vmg_teardown(pTHX_ void *param) {
1897 if (vmg_loaded == 1) {
1899 if (vmg_destruct_level() == 0) {
1900 vmg_global_teardown_late_locked();
1903 PL_strtab = newHV();
1904 vmg_sv_magicext((SV *) PL_strtab, NULL, &vmg_global_teardown_vtbl, NULL, 0);
1907 VMG_ASSERT(vmg_loaded > 1);
1914 if (MY_CXT.depth == 0 && MY_CXT.freed_tokens) {
1915 vmg_magic_chain_free(MY_CXT.freed_tokens, NULL);
1916 MY_CXT.freed_tokens = NULL;
1922 static void vmg_setup(pTHX) {
1923 #define vmg_setup() vmg_setup(aTHX)
1931 if (vmg_loaded == 0) {
1932 MUTEX_INIT(&vmg_vtable_refcount_mutex);
1933 MUTEX_INIT(&vmg_op_name_init_mutex);
1936 VMG_ASSERT(vmg_loaded > 0);
1943 for (c = OPc_NULL; c < OPc_MAX; ++c)
1944 MY_CXT.b__op_stashes[c] = NULL;
1947 MY_CXT.freed_tokens = NULL;
1949 #if VMG_PROPAGATE_ERRSV_NEEDS_TRAMPOLINE
1950 vmg_trampoline_init(&MY_CXT.propagate_errsv, vmg_pp_propagate_errsv);
1952 #if VMG_RESET_RMG_NEEDS_TRAMPOLINE
1953 vmg_trampoline_init(&MY_CXT.reset_rmg, vmg_pp_reset_rmg);
1956 stash = gv_stashpv(__PACKAGE__, 1);
1957 newCONSTSUB(stash, "MGf_COPY", newSVuv(MGf_COPY));
1958 newCONSTSUB(stash, "MGf_DUP", newSVuv(MGf_DUP));
1959 newCONSTSUB(stash, "MGf_LOCAL", newSVuv(MGf_LOCAL));
1960 newCONSTSUB(stash, "VMG_UVAR", newSVuv(VMG_UVAR));
1961 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_LENGTH_NOLEN",
1962 newSVuv(VMG_COMPAT_SCALAR_LENGTH_NOLEN));
1963 newCONSTSUB(stash, "VMG_COMPAT_SCALAR_NOLEN",
1964 newSVuv(VMG_COMPAT_SCALAR_NOLEN));
1965 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN",
1966 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN));
1967 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID",
1968 newSVuv(VMG_COMPAT_ARRAY_PUSH_NOLEN_VOID));
1969 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID",
1970 newSVuv(VMG_COMPAT_ARRAY_UNSHIFT_NOLEN_VOID));
1971 newCONSTSUB(stash, "VMG_COMPAT_ARRAY_UNDEF_CLEAR",
1972 newSVuv(VMG_COMPAT_ARRAY_UNDEF_CLEAR));
1973 newCONSTSUB(stash, "VMG_COMPAT_HASH_DELETE_NOUVAR_VOID",
1974 newSVuv(VMG_COMPAT_HASH_DELETE_NOUVAR_VOID));
1975 newCONSTSUB(stash, "VMG_COMPAT_CODE_COPY_CLONE",
1976 newSVuv(VMG_COMPAT_CODE_COPY_CLONE));
1977 newCONSTSUB(stash, "VMG_COMPAT_GLOB_GET", newSVuv(VMG_COMPAT_GLOB_GET));
1978 newCONSTSUB(stash, "VMG_PERL_PATCHLEVEL", newSVuv(VMG_PERL_PATCHLEVEL));
1979 newCONSTSUB(stash, "VMG_THREADSAFE", newSVuv(VMG_THREADSAFE));
1980 newCONSTSUB(stash, "VMG_FORKSAFE", newSVuv(VMG_FORKSAFE));
1981 newCONSTSUB(stash, "VMG_OP_INFO_NAME", newSVuv(VMG_OP_INFO_NAME));
1982 newCONSTSUB(stash, "VMG_OP_INFO_OBJECT", newSVuv(VMG_OP_INFO_OBJECT));
1984 call_atexit(vmg_teardown, NULL);
1989 /* --- Macros for the XS section ------------------------------------------- */
1992 # define VMG_CVOK(C) \
1993 ((CvISXSUB(C) ? (void *) CvXSUB(C) : (void *) CvROOT(C)) ? 1 : 0)
1995 # define VMG_CVOK(C) (CvROOT(C) || CvXSUB(C))
1998 #define VMG_CBOK(S) ((SvTYPE(S) == SVt_PVCV) ? VMG_CVOK(S) : SvOK(S))
2000 #define VMG_SET_CB(S, N) { \
2002 if (SvOK(cb) && SvROK(cb)) { \
2005 SvREFCNT_inc_simple_void(cb); \
2014 #define VMG_SET_SVT_CB(S, N) { \
2016 if (SvOK(cb) && SvROK(cb)) { \
2018 if (VMG_CBOK(cb)) { \
2019 t->svt_ ## N = vmg_svt_ ## N; \
2020 SvREFCNT_inc_simple_void(cb); \
2022 t->svt_ ## N = vmg_svt_ ## N ## _noop; \
2026 t->svt_ ## N = NULL; \
2032 /* --- XS ------------------------------------------------------------------ */
2034 MODULE = Variable::Magic PACKAGE = Variable::Magic
2049 U32 had_b__op_stash = 0;
2055 for (c = OPc_NULL; c < OPc_MAX; ++c) {
2056 if (MY_CXT.b__op_stashes[c])
2057 had_b__op_stash |= (((U32) 1) << c);
2059 old_depth = MY_CXT.depth;
2063 for (c = OPc_NULL; c < OPc_MAX; ++c) {
2064 MY_CXT.b__op_stashes[c] = (had_b__op_stash & (((U32) 1) << c))
2065 ? gv_stashpv(vmg_opclassnames[c], 1) : NULL;
2067 MY_CXT.depth = old_depth;
2068 MY_CXT.freed_tokens = NULL;
2070 VMG_ASSERT(vmg_loaded > 0);
2076 #endif /* VMG_THREADSAFE */
2083 SV *op_info, *copy_key;
2089 #endif /* MGf_LOCAL */
2092 #endif /* VMG_UVAR */
2093 ) { croak(vmg_wrongargnum); }
2096 w = vmg_wizard_alloc(SvOK(op_info) ? SvUV(op_info) : 0);
2097 t = vmg_vtable_vtbl(w->vtable);
2099 VMG_SET_CB(ST(i++), data);
2101 VMG_SET_SVT_CB(ST(i++), get);
2102 VMG_SET_SVT_CB(ST(i++), set);
2103 VMG_SET_SVT_CB(ST(i++), len);
2104 VMG_SET_SVT_CB(ST(i++), clear);
2105 VMG_SET_SVT_CB(ST(i++), free);
2106 VMG_SET_SVT_CB(ST(i++), copy);
2107 /* VMG_SET_SVT_CB(ST(i++), dup); */
2112 VMG_SET_SVT_CB(ST(i++), local);
2113 #endif /* MGf_LOCAL */
2115 VMG_SET_CB(ST(i++), fetch);
2116 VMG_SET_CB(ST(i++), store);
2117 VMG_SET_CB(ST(i++), exists);
2118 VMG_SET_CB(ST(i++), delete);
2121 if (w->cb_fetch || w->cb_store || w->cb_exists || w->cb_delete)
2122 w->uvar = SvTRUE(copy_key) ? 2 : 1;
2123 #endif /* VMG_UVAR */
2125 RETVAL = newRV_noinc(vmg_wizard_sv_new(w));
2129 SV *cast(SV *sv, SV *wiz, ...)
2130 PROTOTYPE: \[$@%&*]$@
2132 const vmg_wizard *w = NULL;
2141 wiz = SvRV_const(wiz);
2142 w = vmg_wizard_from_sv(wiz);
2145 croak(vmg_invalid_wiz);
2146 RETVAL = newSVuv(vmg_cast(SvRV(sv), w, wiz, args, i));
2151 getdata(SV *sv, SV *wiz)
2152 PROTOTYPE: \[$@%&*]$
2154 const vmg_wizard *w = NULL;
2158 w = vmg_wizard_from_sv(SvRV_const(wiz));
2160 croak(vmg_invalid_wiz);
2161 data = vmg_data_get(SvRV(sv), w);
2167 SV *dispell(SV *sv, SV *wiz)
2168 PROTOTYPE: \[$@%&*]$
2170 const vmg_wizard *w = NULL;
2173 w = vmg_wizard_from_sv(SvRV_const(wiz));
2175 croak(vmg_invalid_wiz);
2176 RETVAL = newSVuv(vmg_dispell(SvRV(sv), w));