new_SV(), del_SV(),
- new_XIV(), del_XIV(),
- new_XNV(), del_XNV(),
+ new_XPVNV(), del_XPVGV(),
etc
Public API:
because the leading fields arent accessed. Pointers to such bodies
are decremented to point at the unused 'ghost' memory, knowing that
the pointers are used with offsets to the real memory.
-
- HE, HEK arenas are managed separately, with separate code, but may
- be merge-able later..
*/
-/* get_arena(size): this creates custom-sized arenas
- TBD: export properly for hv.c: S_more_he().
-*/
-void*
-Perl_get_arena(pTHX_ const size_t arena_size, const svtype bodytype)
-{
- dVAR;
- struct arena_desc* adesc;
- struct arena_set *aroot = (struct arena_set*) PL_body_arenas;
- unsigned int curr;
-
- /* shouldnt need this
- if (!arena_size) arena_size = PERL_ARENA_SIZE;
- */
-
- /* may need new arena-set to hold new arena */
- if (!aroot || aroot->curr >= aroot->set_size) {
- struct arena_set *newroot;
- Newxz(newroot, 1, struct arena_set);
- newroot->set_size = ARENAS_PER_SET;
- newroot->next = aroot;
- aroot = newroot;
- PL_body_arenas = (void *) newroot;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
- }
-
- /* ok, now have arena-set with at least 1 empty/available arena-desc */
- curr = aroot->curr++;
- adesc = &(aroot->set[curr]);
- assert(!adesc->arena);
-
- Newx(adesc->arena, arena_size, char);
- adesc->size = arena_size;
- adesc->utype = bodytype;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
- curr, (void*)adesc->arena, (UV)arena_size));
-
- return adesc->arena;
-}
-
-
/* return a thing to the free list */
#define del_body(thing, root) \
In turn, the new_body_* allocators call S_new_body(), which invokes
new_body_inline macro, which takes a lock, and takes a body off the
-linked list at PL_body_roots[sv_type], calling S_more_bodies() if
+linked list at PL_body_roots[sv_type], calling Perl_more_bodies() if
necessary to refresh an empty list. Then the lock is released, and
the body is returned.
-S_more_bodies calls get_arena(), and carves it up into an array of N
+Perl_more_bodies allocates a new arena, and carves it up into an array of N
bodies, which it strings into a linked list. It looks up arena-size
and body-size from the body_details table described below, thus
supporting the multiple body-types.
SVt_NV, FALSE, HADNV, HASARENA, FIT_ARENA(0, sizeof(NV)) },
/* 8 bytes on most ILP32 with IEEE doubles */
- { sizeof(XPV),
+ { sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPV, xpv_len) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
-#if 2 *PTRSIZE <= IVSIZE
/* 12 */
- { sizeof(XPVIV),
+ { sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PVIV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur)) },
- /* 12 */
-#else
- { sizeof(XPVIV),
- copy_length(XPVIV, xiv_u),
- 0,
- SVt_PVIV, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPVIV)) },
-#endif
-#if (2 *PTRSIZE <= IVSIZE) && (2 *PTRSIZE <= NVSIZE)
/* 20 */
- { sizeof(XPVNV),
+ { sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVNV, xnv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PVNV, FALSE, HADNV, HASARENA,
FIT_ARENA(0, sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur)) },
-#else
- /* 20 */
- { sizeof(XPVNV), copy_length(XPVNV, xnv_u), 0, SVt_PVNV, FALSE, HADNV,
- HASARENA, FIT_ARENA(0, sizeof(XPVNV)) },
-#endif
/* 28 */
{ sizeof(XPVMG), copy_length(XPVMG, xnv_u), 0, SVt_PVMG, FALSE, HADNV,
sizeof(regexp),
0,
SVt_REGEXP, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur))
+ FIT_ARENA(0, sizeof(regexp))
},
/* 48 */
#define del_body_allocated(p, sv_type) \
del_body(p + bodies_by_type[sv_type].offset, &PL_body_roots[sv_type])
-
-#define my_safemalloc(s) (void*)safemalloc(s)
-#define my_safecalloc(s) (void*)safecalloc(s, 1)
-#define my_safefree(p) safefree((char*)p)
-
#ifdef PURIFY
-#define new_XNV() my_safemalloc(sizeof(XPVNV))
-#define del_XNV(p) my_safefree(p)
-
-#define new_XPVNV() my_safemalloc(sizeof(XPVNV))
-#define del_XPVNV(p) my_safefree(p)
-
-#define new_XPVAV() my_safemalloc(sizeof(XPVAV))
-#define del_XPVAV(p) my_safefree(p)
+#define new_XNV() safemalloc(sizeof(XPVNV))
+#define new_XPVNV() safemalloc(sizeof(XPVNV))
+#define new_XPVMG() safemalloc(sizeof(XPVMG))
-#define new_XPVHV() my_safemalloc(sizeof(XPVHV))
-#define del_XPVHV(p) my_safefree(p)
-
-#define new_XPVMG() my_safemalloc(sizeof(XPVMG))
-#define del_XPVMG(p) my_safefree(p)
-
-#define new_XPVGV() my_safemalloc(sizeof(XPVGV))
-#define del_XPVGV(p) my_safefree(p)
+#define del_XPVGV(p) safefree(p)
#else /* !PURIFY */
#define new_XNV() new_body_allocated(SVt_NV)
-#define del_XNV(p) del_body_allocated(p, SVt_NV)
-
#define new_XPVNV() new_body_allocated(SVt_PVNV)
-#define del_XPVNV(p) del_body_allocated(p, SVt_PVNV)
-
-#define new_XPVAV() new_body_allocated(SVt_PVAV)
-#define del_XPVAV(p) del_body_allocated(p, SVt_PVAV)
-
-#define new_XPVHV() new_body_allocated(SVt_PVHV)
-#define del_XPVHV(p) del_body_allocated(p, SVt_PVHV)
-
#define new_XPVMG() new_body_allocated(SVt_PVMG)
-#define del_XPVMG(p) del_body_allocated(p, SVt_PVMG)
-#define new_XPVGV() new_body_allocated(SVt_PVGV)
#define del_XPVGV(p) del_body_allocated(p, SVt_PVGV)
#endif /* PURIFY */
/* no arena for you! */
#define new_NOARENA(details) \
- my_safemalloc((details)->body_size + (details)->offset)
+ safemalloc((details)->body_size + (details)->offset)
#define new_NOARENAZ(details) \
- my_safecalloc((details)->body_size + (details)->offset)
+ safecalloc((details)->body_size + (details)->offset, 1)
-STATIC void *
-S_more_bodies (pTHX_ const svtype sv_type)
+void *
+Perl_more_bodies (pTHX_ const svtype sv_type, const size_t body_size,
+ const size_t arena_size)
{
dVAR;
void ** const root = &PL_body_roots[sv_type];
- const struct body_details * const bdp = &bodies_by_type[sv_type];
- const size_t body_size = bdp->body_size;
+ struct arena_desc *adesc;
+ struct arena_set *aroot = (struct arena_set *) PL_body_arenas;
+ unsigned int curr;
char *start;
const char *end;
- const size_t arena_size = Perl_malloc_good_size(bdp->arena_size);
+ const size_t good_arena_size = Perl_malloc_good_size(arena_size);
#if defined(DEBUGGING) && !defined(PERL_GLOBAL_STRUCT_PRIVATE)
static bool done_sanity_check;
}
#endif
- assert(bdp->arena_size);
+ assert(arena_size);
- start = (char*) Perl_get_arena(aTHX_ arena_size, sv_type);
+ /* may need new arena-set to hold new arena */
+ if (!aroot || aroot->curr >= aroot->set_size) {
+ struct arena_set *newroot;
+ Newxz(newroot, 1, struct arena_set);
+ newroot->set_size = ARENAS_PER_SET;
+ newroot->next = aroot;
+ aroot = newroot;
+ PL_body_arenas = (void *) newroot;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
+ }
- end = start + arena_size - 2 * body_size;
+ /* ok, now have arena-set with at least 1 empty/available arena-desc */
+ curr = aroot->curr++;
+ adesc = &(aroot->set[curr]);
+ assert(!adesc->arena);
+
+ Newx(adesc->arena, good_arena_size, char);
+ adesc->size = good_arena_size;
+ adesc->utype = sv_type;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
+ curr, (void*)adesc->arena, (UV)good_arena_size));
+
+ start = (char *) adesc->arena;
+
+ /* Get the address of the byte after the end of the last body we can fit.
+ Remember, this is integer division: */
+ end = start + good_arena_size / body_size * body_size;
/* computed count doesnt reflect the 1st slot reservation */
#if defined(MYMALLOC) || defined(HAS_MALLOC_GOOD_SIZE)
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d (from %d) type %d "
"size %d ct %d\n",
- (void*)start, (void*)end, (int)arena_size,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)arena_size / (int)body_size));
+ (void*)start, (void*)end, (int)good_arena_size,
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#else
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d type %d size %d ct %d\n",
(void*)start, (void*)end,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)bdp->arena_size / (int)body_size));
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#endif
*root = (void *)start;
- while (start <= end) {
+ while (1) {
+ /* Where the next body would start: */
char * const next = start + body_size;
+
+ if (next >= end) {
+ /* This is the last body: */
+ assert(next == end);
+
+ *(void **)start = 0;
+ return *root;
+ }
+
*(void**) start = (void *)next;
start = next;
}
- *(void **)start = 0;
-
- return *root;
}
/* grab a new thing from the free list, allocating more if necessary.
STMT_START { \
void ** const r3wt = &PL_body_roots[sv_type]; \
xpv = (PTR_TBL_ENT_t*) (*((void **)(r3wt)) \
- ? *((void **)(r3wt)) : more_bodies(sv_type)); \
+ ? *((void **)(r3wt)) : Perl_more_bodies(aTHX_ sv_type, \
+ bodies_by_type[sv_type].body_size,\
+ bodies_by_type[sv_type].arena_size)); \
*(r3wt) = *(void**)(xpv); \
} STMT_END
if (old_type > SVt_IV) {
#ifdef PURIFY
- my_safefree(old_body);
+ safefree(old_body);
#else
/* Note that there is an assumption that all bodies of types that
can be upgraded came from arenas. Only the more complex non-
s = SvPVX_mutable(sv);
if (newlen > SvLEN(sv)) { /* need more room? */
+ STRLEN minlen = SvCUR(sv);
+ minlen += (minlen >> PERL_STRLEN_EXPAND_SHIFT) + 10;
+ if (newlen < minlen)
+ newlen = minlen;
#ifndef Perl_safesysmalloc_size
newlen = PERL_STRLEN_ROUNDUP(newlen);
#endif
/* Give tsv backref magic if it hasn't already got it, then push a
* back-reference to sv onto the array associated with the backref magic.
+ *
+ * As an optimisation, if there's only one backref and it's not an AV,
+ * store it directly in the HvAUX or mg_obj slot, avoiding the need to
+ * allocate an AV. (Whether the slot holds an AV tells us whether this is
+ * active.)
+ *
+ * If an HV's backref is stored in magic, it is moved back to HvAUX.
*/
/* A discussion about the backreferences array and its refcount:
* picked on before its parent to have its refcount decremented by the
* random zapper, it won't actually be freed, meaning it's still there for
* when its parent gets freed.
- * When the parent SV is freed, in the case of magic, the magic is freed,
- * Perl_magic_killbackrefs is called which decrements one refcount, then
- * mg_obj is freed which kills the second count.
- * In the case of an HV being freed, one ref is removed by
- * Perl_hv_kill_backrefs, the other by Perl_sv_kill_backrefs, which it
- * calls.
+ *
+ * When the parent SV is freed, the extra ref is killed by
+ * Perl_sv_kill_backrefs. The other ref is killed, in the case of magic,
+ * by mg_free() / MGf_REFCOUNTED, or for a hash, by Perl_hv_kill_backrefs.
+ *
+ * When a single backref SV is stored directly, it is not reference
+ * counted.
*/
void
Perl_sv_add_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av;
+ SV **svp;
+ AV *av = NULL;
+ MAGIC *mg = NULL;
PERL_ARGS_ASSERT_SV_ADD_BACKREF;
- if (SvTYPE(tsv) == SVt_PVHV) {
- AV **const avp = Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
+ /* find slot to store array or singleton backref */
- av = *avp;
- if (!av) {
- /* There is no AV in the official place - try a fixup. */
- MAGIC *const mg = mg_find(tsv, PERL_MAGIC_backref);
+ if (SvTYPE(tsv) == SVt_PVHV) {
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
- if (mg) {
- /* Aha. They've got it stowed in magic. Bring it back. */
- av = MUTABLE_AV(mg->mg_obj);
+ if (!*svp) {
+ if ((mg = mg_find(tsv, PERL_MAGIC_backref))) {
+ /* Aha. They've got it stowed in magic instead.
+ * Move it back to xhv_backreferences */
+ *svp = mg->mg_obj;
/* Stop mg_free decreasing the reference count. */
mg->mg_obj = NULL;
/* Stop mg_free even calling the destructor, given that
there's no AV to free up. */
mg->mg_virtual = 0;
sv_unmagic(tsv, PERL_MAGIC_backref);
- } else {
- av = newAV();
- AvREAL_off(av);
- SvREFCNT_inc_simple_void(av); /* see discussion above */
+ mg = NULL;
}
- *avp = av;
}
} else {
- const MAGIC *const mg
- = SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
- else {
- av = newAV();
- AvREAL_off(av);
- sv_magic(tsv, MUTABLE_SV(av), PERL_MAGIC_backref, NULL, 0);
- /* av now has a refcnt of 2; see discussion above */
+ if (! ((mg =
+ (SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL))))
+ {
+ sv_magic(tsv, NULL, PERL_MAGIC_backref, NULL, 0);
+ mg = mg_find(tsv, PERL_MAGIC_backref);
+ }
+ svp = &(mg->mg_obj);
+ }
+
+ /* create or retrieve the array */
+
+ if ( (!*svp && SvTYPE(sv) == SVt_PVAV)
+ || (*svp && SvTYPE(*svp) != SVt_PVAV)
+ ) {
+ /* create array */
+ av = newAV();
+ AvREAL_off(av);
+ SvREFCNT_inc_simple_void(av);
+ /* av now has a refcnt of 2; see discussion above */
+ if (*svp) {
+ /* move single existing backref to the array */
+ av_extend(av, 1);
+ AvARRAY(av)[++AvFILLp(av)] = *svp; /* av_push() */
}
+ *svp = (SV*)av;
+ if (mg)
+ mg->mg_flags |= MGf_REFCOUNTED;
+ }
+ else
+ av = MUTABLE_AV(*svp);
+
+ if (!av) {
+ /* optimisation: store single backref directly in HvAUX or mg_obj */
+ *svp = sv;
+ return;
}
+ /* push new backref */
+ assert(SvTYPE(av) == SVt_PVAV);
if (AvFILLp(av) >= AvMAX(av)) {
av_extend(av, AvFILLp(av)+1);
}
Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av = NULL;
- SV **svp;
+ SV **svp = NULL;
I32 i;
PERL_ARGS_ASSERT_SV_DEL_BACKREF;
if (SvTYPE(tsv) == SVt_PVHV && SvOOK(tsv)) {
- av = *Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
- /* We mustn't attempt to "fix up" the hash here by moving the
- backreference array back to the hv_aux structure, as that is stored
- in the main HvARRAY(), and hfreentries assumes that no-one
- reallocates HvARRAY() while it is running. */
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
}
- if (!av) {
- const MAGIC *const mg
+ if (!svp || !*svp) {
+ MAGIC *const mg
= SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
+ svp = mg ? &(mg->mg_obj) : NULL;
}
- if (!av)
+ if (!svp || !*svp)
Perl_croak(aTHX_ "panic: del_backref");
- assert(!SvIS_FREED(av));
-
- svp = AvARRAY(av);
- /* We shouldn't be in here more than once, but for paranoia reasons lets
- not assume this. */
- for (i = AvFILLp(av); i >= 0; i--) {
- if (svp[i] == sv) {
- const SSize_t fill = AvFILLp(av);
- if (i != fill) {
- /* We weren't the last entry.
- An unordered list has this property that you can take the
- last element off the end to fill the hole, and it's still
- an unordered list :-)
- */
- svp[i] = svp[fill];
+ if (SvTYPE(*svp) == SVt_PVAV) {
+ int count = 0;
+ AV * const av = (AV*)*svp;
+ assert(!SvIS_FREED(av));
+ svp = AvARRAY(av);
+ for (i = AvFILLp(av); i >= 0; i--) {
+ if (svp[i] == sv) {
+ const SSize_t fill = AvFILLp(av);
+ if (i != fill) {
+ /* We weren't the last entry.
+ An unordered list has this property that you can take the
+ last element off the end to fill the hole, and it's still
+ an unordered list :-)
+ */
+ svp[i] = svp[fill];
+ }
+ svp[fill] = NULL;
+ AvFILLp(av) = fill - 1;
+ count++;
+#ifndef DEBUGGING
+ break; /* should only be one */
+#endif
}
- svp[fill] = NULL;
- AvFILLp(av) = fill - 1;
}
+ assert(count == 1);
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ if (*svp != sv)
+ Perl_croak(aTHX_ "panic: del_backref");
+ *svp = NULL;
}
+
}
-int
+void
Perl_sv_kill_backrefs(pTHX_ SV *const sv, AV *const av)
{
- SV **svp = AvARRAY(av);
+ SV **svp;
+ SV **last;
+ bool is_array;
PERL_ARGS_ASSERT_SV_KILL_BACKREFS;
- if (svp) {
- SV *const *const last = svp + AvFILLp(av);
+ if (!av)
+ return;
+ is_array = (SvTYPE(av) == SVt_PVAV);
+ if (is_array) {
assert(!SvIS_FREED(av));
+ svp = AvARRAY(av);
+ if (svp)
+ last = svp + AvFILLp(av);
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ svp = (SV**)&av;
+ last = svp;
+ }
+
+ if (svp) {
while (svp <= last) {
if (*svp) {
SV *const referrer = *svp;
(UV)SvFLAGS(referrer));
}
- *svp = NULL;
+ if (is_array)
+ *svp = NULL;
}
svp++;
}
+ }
+ if (is_array) {
AvFILLp(av) = -1;
+ SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
}
- SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
- return 0;
+ return;
}
/*
&PL_body_roots[type]);
}
else if (sv_type_details->body_size) {
- my_safefree(SvANY(sv));
+ safefree(SvANY(sv));
}
}
}
else
*/
- if(nmg->mg_type == PERL_MAGIC_backref) {
- /* The backref AV has its reference count deliberately bumped by
- 1. */
- nmg->mg_obj
- = SvREFCNT_inc(av_dup_inc((const AV *) nmg->mg_obj, param));
- }
- else {
- nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
- ? sv_dup_inc(nmg->mg_obj, param)
- : sv_dup(nmg->mg_obj, param);
- }
+ nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
+ ? nmg->mg_type == PERL_MAGIC_backref
+ /* The backref AV has its reference
+ * count deliberately bumped by 1 */
+ ? SvREFCNT_inc(av_dup_inc((const AV *)
+ nmg->mg_obj, param))
+ : sv_dup_inc(nmg->mg_obj, param)
+ : sv_dup(nmg->mg_obj, param);
if (nmg->mg_ptr && nmg->mg_type != PERL_MAGIC_regex_global) {
if (nmg->mg_len > 0) {
* thread */
? NULL
: saux->xhv_backreferences
- ? MUTABLE_AV(SvREFCNT_inc(
- sv_dup_inc((const SV *)saux->xhv_backreferences, param)))
+ ? (SvTYPE(saux->xhv_backreferences) == SVt_PVAV)
+ ? MUTABLE_AV(SvREFCNT_inc(
+ sv_dup_inc((const SV *)
+ saux->xhv_backreferences, param)))
+ : MUTABLE_AV(sv_dup((const SV *)
+ saux->xhv_backreferences, param))
: 0;
daux->xhv_mro_meta = saux->xhv_mro_meta
void
Perl_clone_params_del(CLONE_PARAMS *param)
{
- PerlInterpreter *const was = PERL_GET_THX;
+ /* This seemingly funky ordering keeps the build with PERL_GLOBAL_STRUCT
+ happy: */
PerlInterpreter *const to = param->new_perl;
dTHXa(to);
+ PerlInterpreter *const was = PERL_GET_THX;
PERL_ARGS_ASSERT_CLONE_PARAMS_DEL;
CLONE_PARAMS *
Perl_clone_params_new(PerlInterpreter *const from, PerlInterpreter *const to)
{
+ dVAR;
/* Need to play this game, as newAV() can call safesysmalloc(), and that
does a dTHX; to get the context from thread local storage.
FIXME - under PERL_CORE Newx(), Safefree() and friends should expand to