# define MY_CXT_CLONE NOOP
#endif
+/* --- Error messages ------------------------------------------------------ */
+
+static const char su_stack_smash[] = "Cannot target a scope outside of the current stack";
+static const char su_no_such_target[] = "No targetable %s scope in the current stack";
+
/* --- Unique context ID global storage ------------------------------------ */
/* ... Sequence ID counter ................................................. */
}
typedef struct {
- su_uid **map;
- STRLEN used;
- STRLEN alloc;
+ su_uid *map;
+ STRLEN used;
+ STRLEN alloc;
} su_uid_storage;
static void su_uid_storage_dup(pTHX_ su_uid_storage *new_cxt, const su_uid_storage *old_cxt, UV max_depth) {
#define su_uid_storage_dup(N, O, D) su_uid_storage_dup(aTHX_ (N), (O), (D))
- su_uid **old_map = old_cxt->map;
+ su_uid *old_map = old_cxt->map;
if (old_map) {
- su_uid **new_map = new_cxt->map;
- STRLEN old_used = old_cxt->used;
- STRLEN new_used, new_alloc;
- STRLEN i;
+ su_uid *new_map = new_cxt->map;
+ STRLEN old_used = old_cxt->used;
+ STRLEN new_used, new_alloc;
+ STRLEN i;
- new_used = max_depth < old_used ? max_depth : old_used;
+ new_used = max_depth < old_used ? max_depth : old_used;
new_cxt->used = new_used;
- if (new_used <= new_cxt->alloc)
- new_alloc = new_cxt->alloc;
- else {
- new_alloc = new_used;
- Renew(new_map, new_alloc, su_uid *);
- for (i = new_cxt->alloc; i < new_alloc; ++i)
- new_map[i] = NULL;
+ if (new_used <= new_cxt->alloc) {
+ new_alloc = new_cxt->alloc;
+ } else {
+ new_alloc = new_used;
+ Renew(new_map, new_alloc, su_uid);
new_cxt->map = new_map;
new_cxt->alloc = new_alloc;
}
for (i = 0; i < new_alloc; ++i) {
- su_uid *new_uid = new_map[i];
+ su_uid *new_uid = new_map + i;
if (i < new_used) { /* => i < max_depth && i < old_used */
- su_uid *old_uid = old_map[i];
+ su_uid *old_uid = old_map + i;
if (old_uid && (old_uid->flags & SU_UID_ACTIVE)) {
- if (!new_uid) {
- Newx(new_uid, 1, su_uid);
- new_map[i] = new_uid;
- }
*new_uid = *old_uid;
continue;
}
}
- if (new_uid)
- new_uid->flags &= ~SU_UID_ACTIVE;
+ new_uid->seq = 0;
+ new_uid->flags = 0;
}
}
SvREFCNT_dec(si->si_stack);
Safefree(si);
- if (sud->tmp_uid_storage.map) {
- su_uid **map = sud->tmp_uid_storage.map;
- STRLEN alloc = sud->tmp_uid_storage.alloc;
- STRLEN i;
-
- for (i = 0; i < alloc; ++i)
- Safefree(map[i]);
-
- Safefree(map);
- }
+ Safefree(sud->tmp_uid_storage.map);
Safefree(sud);
/* --- Actions ------------------------------------------------------------- */
typedef struct {
- I32 depth;
- I32 pad;
+ U8 type;
+ U8 private;
+ U8 pad;
+ /* spare */
+ I32 depth;
I32 *origin;
- void (*handler)(pTHX_ void *);
} su_ud_common;
-#define SU_UD_DEPTH(U) (((su_ud_common *) (U))->depth)
+#define SU_UD_TYPE(U) (((su_ud_common *) (U))->type)
+#define SU_UD_PRIVATE(U) (((su_ud_common *) (U))->private)
#define SU_UD_PAD(U) (((su_ud_common *) (U))->pad)
+#define SU_UD_DEPTH(U) (((su_ud_common *) (U))->depth)
#define SU_UD_ORIGIN(U) (((su_ud_common *) (U))->origin)
-#define SU_UD_HANDLER(U) (((su_ud_common *) (U))->handler)
+
+#define SU_UD_TYPE_REAP 0
+#define SU_UD_TYPE_LOCALIZE 1
+#define SU_UD_TYPE_UID 2
#define SU_UD_FREE(U) STMT_START { \
if (SU_UD_ORIGIN(U)) Safefree(SU_UD_ORIGIN(U)); \
typedef struct {
su_ud_common ci;
- SV *cb;
+ SV *cb;
} su_ud_reap;
-static void su_call(pTHX_ void *ud_) {
- su_ud_reap *ud = (su_ud_reap *) ud_;
+#define SU_UD_REAP_CB(U) (((su_ud_reap *) (U))->cb)
+
+static void su_call(pTHX_ SV *cb) {
#if SU_SAVE_LAST_CX
I32 cxix;
PERL_CONTEXT saved_cx;
SU_D({
PerlIO_printf(Perl_debug_log,
- "%p: @@@ call\n%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
- ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
+ "@@@ call scope_ix=%2d save_ix=%2d\n",
+ PL_scopestack_ix, PL_savestack_ix);
});
ENTER;
saved_cx = cxstack[cxix];
#endif /* SU_SAVE_LAST_CX */
- call_sv(ud->cb, G_VOID);
+ call_sv(cb, G_VOID);
#if SU_SAVE_LAST_CX
cxstack[cxix] = saved_cx;
FREETMPS;
LEAVE;
- SvREFCNT_dec(ud->cb);
- SU_UD_FREE(ud);
-}
-
-static void su_reap(pTHX_ void *ud) {
-#define su_reap(U) su_reap(aTHX_ (U))
- SU_D({
- PerlIO_printf(Perl_debug_log,
- "%p: === reap\n%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
- ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
- });
+ SvREFCNT_dec(cb);
- SAVEDESTRUCTOR_X(su_call, ud);
+ return;
}
/* ... Localize & localize array/hash element .............................. */
typedef struct {
su_ud_common ci;
- SV *sv;
- SV *val;
- SV *elem;
- svtype type;
+ SV *sv;
+ SV *val;
+ SV *elem;
} su_ud_localize;
+#define SU_UD_LOCALIZE_SV(U) (((su_ud_localize *) (U))->sv)
+#define SU_UD_LOCALIZE_VAL(U) (((su_ud_localize *) (U))->val)
+#define SU_UD_LOCALIZE_ELEM(U) (((su_ud_localize *) (U))->elem)
+
#define SU_UD_LOCALIZE_FREE(U) STMT_START { \
- SvREFCNT_dec((U)->elem); \
- SvREFCNT_dec((U)->val); \
- SvREFCNT_dec((U)->sv); \
- SU_UD_FREE(U); \
+ SvREFCNT_dec(SU_UD_LOCALIZE_ELEM(U)); \
+ SvREFCNT_dec(SU_UD_LOCALIZE_VAL(U)); \
+ SvREFCNT_dec(SU_UD_LOCALIZE_SV(U)); \
+ SU_UD_FREE(U); \
} STMT_END
static I32 su_ud_localize_init(pTHX_ su_ud_localize *ud, SV *sv, SV *val, SV *elem) {
}
/* When deref is set, val isn't NULL */
+ SU_UD_PRIVATE(ud) = t;
+
ud->sv = sv;
ud->val = val ? newSVsv(deref ? SvRV(val) : val) : NULL;
ud->elem = SvREFCNT_inc(elem);
- ud->type = t;
return size;
}
SV *sv = ud->sv;
SV *val = ud->val;
SV *elem = ud->elem;
- svtype t = ud->type;
+ svtype t = SU_UD_PRIVATE(ud);
GV *gv;
if (SvTYPE(sv) >= SVt_PVGV) {
case SVt_PVAV:
if (elem) {
su_save_aelem(GvAV(gv), elem, val);
- goto done;
+ return;
} else
save_ary(gv);
break;
case SVt_PVHV:
if (elem) {
su_save_helem(GvHV(gv), elem, val);
- goto done;
+ return;
} else
save_hash(gv);
break;
if (val)
SvSetMagicSV((SV *) gv, val);
-done:
- SU_UD_LOCALIZE_FREE(ud);
+ return;
+}
+
+/* ... Unique context ID ................................................... */
+
+/* We must pass the index because MY_CXT.uid_storage might be reallocated
+ * between the UID fetch and the invalidation at the end of scope. */
+
+typedef struct {
+ su_ud_common ci;
+ I32 idx;
+} su_ud_uid;
+
+static void su_uid_drop(pTHX_ void *ud_) {
+ su_ud_uid *ud = ud_;
+ dMY_CXT;
+
+ MY_CXT.uid_storage.map[ud->idx].flags &= ~SU_UID_ACTIVE;
+
+ SU_UD_FREE(ud);
+
+ return;
}
/* --- Pop a context back -------------------------------------------------- */
-#if SU_DEBUG && defined(DEBUGGING)
+#ifdef DEBUGGING
# define SU_CXNAME(C) PL_block_type[CxTYPE(C)]
#else
-# define SU_CXNAME(C) "XXX"
+# if SU_HAS_PERL(5, 11, 0)
+static const char *su_block_type[] = {
+ "NULL",
+ "WHEN",
+ "BLOCK",
+ "GIVEN",
+ "LOOP_FOR",
+ "LOOP_PLAIN",
+ "LOOP_LAZYSV",
+ "LOOP_LAZYIV",
+ "SUB",
+ "FORMAT",
+ "EVAL",
+ "SUBST"
+};
+# elif SU_HAS_PERL(5, 9, 3)
+static const char *su_block_type[] = {
+ "NULL",
+ "SUB",
+ "EVAL",
+ "WHEN",
+ "SUBST",
+ "BLOCK",
+ "FORMAT",
+ "GIVEN",
+ "LOOP_FOR",
+ "LOOP_PLAIN",
+ "LOOP_LAZYSV",
+ "LOOP_LAZYIV"
+};
+# else
+static const char *su_block_type[] = {
+ "NULL",
+ "SUB",
+ "EVAL",
+ "LOOP",
+ "SUBST",
+ "BLOCK"
+};
+# endif
+# define SU_CXNAME(C) su_block_type[CxTYPE(C)]
#endif
static void su_pop(pTHX_ void *ud) {
#if SU_HAS_PERL(5, 19, 4)
cx = cxstack + cxstack_ix;
- if (CxTYPE(cx) == CXt_SUB)
+ if (CxTYPE(cx) == CXt_SUB || CxTYPE(cx) == CXt_FORMAT)
save = PL_scopestack[cx->blk_oldscopesp - 1];
#endif
leave_scope(base);
#if SU_HAS_PERL(5, 19, 4)
- if (CxTYPE(cx) == CXt_SUB)
+ if (CxTYPE(cx) == CXt_SUB || CxTYPE(cx) == CXt_FORMAT)
PL_scopestack[cx->blk_oldscopesp - 1] = save;
#endif
}
SU_UD_DEPTH(ud) = --depth;
if (depth > 0) {
- I32 pad;
+ U8 pad;
- if ((pad = SU_UD_PAD(ud))) {
+ if ((pad = SU_UD_PAD(ud)) > 0) {
dMY_CXT;
do {
SU_D(PerlIO_printf(Perl_debug_log,
ud, depth, PL_scopestack_ix, PL_savestack_ix));
SAVEDESTRUCTOR_X(su_pop, ud);
} else {
- SU_UD_HANDLER(ud)(aTHX_ ud);
+ switch (SU_UD_TYPE(ud)) {
+ case SU_UD_TYPE_REAP: {
+ SU_D({
+ PerlIO_printf(Perl_debug_log,
+ "%p: === reap\n%p: depth=%2d scope_ix=%2d save_ix=%2d\n",
+ ud, ud, SU_UD_DEPTH(ud), PL_scopestack_ix, PL_savestack_ix);
+ });
+ SAVEDESTRUCTOR_X(su_call, SU_UD_REAP_CB(ud));
+ SU_UD_FREE(ud);
+ break;
+ }
+ case SU_UD_TYPE_LOCALIZE:
+ su_localize(ud);
+ SU_UD_LOCALIZE_FREE(ud);
+ break;
+ case SU_UD_TYPE_UID:
+ SAVEDESTRUCTOR_X(su_uid_drop, ud);
+ break;
+ }
}
SU_D(PerlIO_printf(Perl_debug_log,
static I32 su_init(pTHX_ void *ud, I32 cxix, I32 size) {
#define su_init(U, C, S) su_init(aTHX_ (U), (C), (S))
- I32 i, depth = 1, pad, offset, *origin;
+ I32 i, depth, offset, base, *origin;
+ U8 pad;
SU_D(PerlIO_printf(Perl_debug_log, "%p: ### init for cx %d\n", ud, cxix));
+ /* su_pop() is going to be called from leave_scope(), so before pushing the
+ * next callback, we'll want to flush the current scope stack slice first.
+ * However, if we want the next callback not to be processed immediately by
+ * the current leave_scope(), we'll need to hide it by artificially
+ * incrementing the scope stack marker before. For the intermediate bumps,
+ * we will only need a bump of SU_SAVE_DESTRUCTOR_SIZE items, but for the
+ * last one we will need a bump of size items. However, in order to preserve
+ * the natural ordering between scope stack markers, we cannot bump lower
+ * markers more than higher ones. This is why we bump the intermediate markers
+ * by the smallest multiple of SU_SAVE_PLACEHOLDER_SIZE greater or equal to
+ * max(SU_SAVE_DESTRUCTOR_SIZE, size). */
+
if (size <= SU_SAVE_DESTRUCTOR_SIZE)
pad = 0;
else {
SU_D(PerlIO_printf(Perl_debug_log, "%p: size=%d pad=%d offset=%d\n",
ud, size, pad, offset));
- for (i = cxstack_ix; i > cxix; --i) {
- PERL_CONTEXT *cx = cxstack + i;
- switch (CxTYPE(cx)) {
-#if SU_HAS_PERL(5, 11, 0)
- case CXt_LOOP_FOR:
- case CXt_LOOP_PLAIN:
- case CXt_LOOP_LAZYSV:
- case CXt_LOOP_LAZYIV:
-#else
- case CXt_LOOP:
-#endif
- SU_D(PerlIO_printf(Perl_debug_log, "%p: cx %d is loop\n", ud, i));
- depth += 2;
- break;
- default:
- SU_D(PerlIO_printf(Perl_debug_log, "%p: cx %d is other\n", ud, i));
- depth++;
- break;
- }
- }
+ depth = PL_scopestack_ix - cxstack[cxix].blk_oldscopesp;
SU_D(PerlIO_printf(Perl_debug_log, "%p: going down to depth %d\n", ud, depth));
+ /* We need to bump all the intermediary stack markers just in case an
+ * exception is thrown before the target scope is reached. Indeed, in this
+ * case there might be arbitrary many scope frames flushed at the same time,
+ * and since we cannot know in advance whether this will happen or not, we
+ * have to make sure the final frame is protected for the actual action. But
+ * of course, in order to do that, we also need to bump all the previous stack
+ * markers. If not for this, it should have been possible to just bump the two
+ * next frames in su_pop(). */
+
Newx(origin, depth + 1, I32);
- origin[0] = PL_scopestack[PL_scopestack_ix - depth];
- PL_scopestack[PL_scopestack_ix - depth] += size;
- for (i = depth - 1; i >= 1; --i) {
- I32 j = PL_scopestack_ix - i;
- origin[depth - i] = PL_scopestack[j];
+ base = PL_scopestack_ix - depth;
+ origin[0] = PL_scopestack[base];
+ PL_scopestack[base] += size;
+ for (i = 1; i < depth; ++i) {
+ I32 j = i + base;
+ origin[i] = PL_scopestack[j];
PL_scopestack[j] += offset;
}
origin[depth] = PL_savestack_ix;
- SU_UD_ORIGIN(ud) = origin;
- SU_UD_DEPTH(ud) = depth;
SU_UD_PAD(ud) = pad;
+ SU_UD_DEPTH(ud) = depth;
+ SU_UD_ORIGIN(ud) = origin;
/* Make sure the first destructor fires by pushing enough fake slots on the
* stack. */
#if SU_HAS_PERL(5, 10, 0)
if (cxix > 0) {
PERL_CONTEXT *prev = cx - 1;
- U8 type = CxTYPE(prev);
- if ((type == CXt_GIVEN || type == CXt_WHEN)
+ U8 prev_type = CxTYPE(prev);
+ if ((prev_type == CXt_GIVEN || prev_type == CXt_WHEN)
&& (prev->blk_oldcop == cx->blk_oldcop)) {
cxix--;
cx = prev;
- if (type == CXt_GIVEN)
+ if (prev_type == CXt_GIVEN)
goto cxt_given;
else
goto cxt_when;
sud->tmp_uid_storage = MY_CXT.uid_storage;
MY_CXT.uid_storage = sud->old_uid_storage;
{
- su_uid **map;
- UV i, alloc;
+ su_uid *map;
+ STRLEN i, alloc;
map = sud->tmp_uid_storage.map;
alloc = sud->tmp_uid_storage.alloc;
- for (i = 0; i < alloc; ++i) {
- if (map[i])
- map[i]->flags &= SU_UID_ACTIVE;
- }
+ for (i = 0; i < alloc; ++i)
+ map[i].flags &= ~SU_UID_ACTIVE;
}
MY_CXT.uplevel_storage.top = sud->next;
static su_uid *su_uid_storage_fetch(pTHX_ UV depth) {
#define su_uid_storage_fetch(D) su_uid_storage_fetch(aTHX_ (D))
- su_uid **map, *uid;
- STRLEN alloc;
+ su_uid *map;
+ STRLEN alloc;
dMY_CXT;
map = MY_CXT.uid_storage.map;
if (depth >= alloc) {
STRLEN i;
- Renew(map, depth + 1, su_uid *);
- for (i = alloc; i <= depth; ++i)
- map[i] = NULL;
+ Renew(map, depth + 1, su_uid);
+ for (i = alloc; i <= depth; ++i) {
+ map[i].seq = 0;
+ map[i].flags = 0;
+ }
MY_CXT.uid_storage.map = map;
MY_CXT.uid_storage.alloc = depth + 1;
}
- uid = map[depth];
-
- if (!uid) {
- Newx(uid, 1, su_uid);
- uid->seq = 0;
- uid->flags = 0;
- map[depth] = uid;
- }
-
if (depth >= MY_CXT.uid_storage.used)
MY_CXT.uid_storage.used = depth + 1;
- return uid;
+ return map + depth;
}
static int su_uid_storage_check(pTHX_ UV depth, UV seq) {
if (depth >= MY_CXT.uid_storage.used)
return 0;
- uid = MY_CXT.uid_storage.map[depth];
-
- return uid && (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
-}
-
-static void su_uid_drop(pTHX_ void *ud_) {
- su_uid *uid = ud_;
+ uid = MY_CXT.uid_storage.map + depth;
- uid->flags &= ~SU_UID_ACTIVE;
-}
-
-static void su_uid_bump(pTHX_ void *ud_) {
- su_ud_reap *ud = ud_;
-
- SAVEDESTRUCTOR_X(su_uid_drop, ud->cb);
-
- SU_UD_FREE(ud);
+ return (uid->seq == seq) && (uid->flags & SU_UID_ACTIVE);
}
static SV *su_uid_get(pTHX_ I32 cxix) {
#define su_uid_get(I) su_uid_get(aTHX_ (I))
su_uid *uid;
- SV *uid_sv;
- UV depth;
+ SV *uid_sv;
+ UV depth;
depth = su_uid_depth(cxix);
uid = su_uid_storage_fetch(depth);
if (!(uid->flags & SU_UID_ACTIVE)) {
- su_ud_reap *ud;
+ su_ud_uid *ud;
- uid->seq = su_uid_seq_next(depth);
+ uid->seq = su_uid_seq_next(depth);
uid->flags |= SU_UID_ACTIVE;
- Newx(ud, 1, su_ud_reap);
- SU_UD_ORIGIN(ud) = NULL;
- SU_UD_HANDLER(ud) = su_uid_bump;
- ud->cb = (SV *) uid;
+ Newx(ud, 1, su_ud_uid);
+ SU_UD_TYPE(ud) = SU_UD_TYPE_UID;
+ ud->idx = depth;
su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
}
uid_sv = sv_newmortal();
sv_setpvf(uid_sv, "%"UVuf"-%"UVuf, depth, uid->seq);
+
return uid_sv;
}
/* --- Global setup/teardown ----------------------------------------------- */
-static U32 su_initialized = 0;
+static VOL U32 su_initialized = 0;
static void su_global_teardown(pTHX_ void *root) {
if (!su_initialized)
static void su_local_teardown(pTHX_ void *param) {
su_uplevel_ud *cur;
- su_uid **map;
dMY_CXT;
- map = MY_CXT.uid_storage.map;
- if (map) {
- STRLEN i;
- for (i = 0; i < MY_CXT.uid_storage.used; ++i)
- Safefree(map[i]);
- Safefree(map);
- }
+ Safefree(MY_CXT.uid_storage.map);
cur = MY_CXT.uplevel_storage.root;
if (cur) {
--cxix;
cxix = su_context_skip_db(cxix);
cxix = su_context_normalize_up(cxix);
+ } else {
+ warn(su_stack_smash);
}
EXTEND(SP, 1);
mPUSHi(cxix);
XSRETURN(1);
}
}
+ warn(su_no_such_target, "subroutine");
XSRETURN_UNDEF;
void
XSRETURN(1);
}
}
+ warn(su_no_such_target, "eval");
XSRETURN_UNDEF;
void
SU_GET_LEVEL(0, 0);
cxix = su_context_here();
while (--level >= 0) {
- if (cxix <= 0)
+ if (cxix <= 0) {
+ warn(su_stack_smash);
break;
+ }
--cxix;
cxix = su_context_skip_db(cxix);
cxix = su_context_normalize_up(cxix);
}
}
done:
+ if (level >= 0)
+ warn(su_stack_smash);
EXTEND(SP, 1);
mPUSHi(cxix);
XSRETURN(1);
SU_GET_CONTEXT(1, 1, su_context_skip_db(cxstack_ix));
cxix = su_context_normalize_down(cxix);
Newx(ud, 1, su_ud_reap);
- SU_UD_ORIGIN(ud) = NULL;
- SU_UD_HANDLER(ud) = su_reap;
- ud->cb = newSVsv(hook);
+ SU_UD_TYPE(ud) = SU_UD_TYPE_REAP;
+ ud->cb = (SvROK(hook) && SvTYPE(SvRV(hook)) >= SVt_PVCV)
+ ? SvRV(hook) : hook;
+ SvREFCNT_inc_simple_void(ud->cb);
su_init(ud, cxix, SU_SAVE_DESTRUCTOR_SIZE);
void
SU_GET_CONTEXT(2, 2, su_context_skip_db(cxstack_ix));
cxix = su_context_normalize_down(cxix);
Newx(ud, 1, su_ud_localize);
- SU_UD_ORIGIN(ud) = NULL;
- SU_UD_HANDLER(ud) = su_localize;
+ SU_UD_TYPE(ud) = SU_UD_TYPE_LOCALIZE;
size = su_ud_localize_init(ud, sv, val, NULL);
su_init(ud, cxix, size);
SU_GET_CONTEXT(3, 3, su_context_skip_db(cxstack_ix));
cxix = su_context_normalize_down(cxix);
Newx(ud, 1, su_ud_localize);
- SU_UD_ORIGIN(ud) = NULL;
- SU_UD_HANDLER(ud) = su_localize;
+ /* Initialize SU_UD_ORIGIN(ud) in case SU_UD_LOCALIZE_FREE(ud) needs it */
+ SU_UD_ORIGIN(ud) = NULL;
+ SU_UD_TYPE(ud) = SU_UD_TYPE_LOCALIZE;
size = su_ud_localize_init(ud, sv, val, elem);
- if (ud->type != SVt_PVAV && ud->type != SVt_PVHV) {
+ if (SU_UD_PRIVATE(ud) != SVt_PVAV && SU_UD_PRIVATE(ud) != SVt_PVHV) {
SU_UD_LOCALIZE_FREE(ud);
croak("Can't localize an element of something that isn't an array or a hash");
}
SU_GET_CONTEXT(2, 2, su_context_skip_db(cxstack_ix));
cxix = su_context_normalize_down(cxix);
Newx(ud, 1, su_ud_localize);
- SU_UD_ORIGIN(ud) = NULL;
- SU_UD_HANDLER(ud) = su_localize;
+ SU_UD_TYPE(ud) = SU_UD_TYPE_LOCALIZE;
size = su_ud_localize_init(ud, sv, NULL, elem);
su_init(ud, cxix, size);