new_SV(), del_SV(),
- new_XIV(), del_XIV(),
- new_XNV(), del_XNV(),
+ new_XPVNV(), del_XPVGV(),
etc
Public API:
* "A time to plant, and a time to uproot what was planted..."
*/
-void
-Perl_offer_nice_chunk(pTHX_ void *const chunk, const U32 chunk_size)
-{
- dVAR;
- void *new_chunk;
- U32 new_chunk_size;
-
- PERL_ARGS_ASSERT_OFFER_NICE_CHUNK;
-
- new_chunk = (void *)(chunk);
- new_chunk_size = (chunk_size);
- if (new_chunk_size > PL_nice_chunk_size) {
- Safefree(PL_nice_chunk);
- PL_nice_chunk = (char *) new_chunk;
- PL_nice_chunk_size = new_chunk_size;
- } else {
- Safefree(chunk);
- }
-}
-
#ifdef PERL_MEM_LOG
# define MEM_LOG_NEW_SV(sv, file, line, func) \
Perl_mem_log_new_sv(sv, file, line, func)
{
dVAR;
SV* sv;
-
- if (PL_nice_chunk) {
- sv_add_arena(PL_nice_chunk, PL_nice_chunk_size, 0);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
- }
- else {
- char *chunk; /* must use New here to match call to */
- Newx(chunk,PERL_ARENA_SIZE,char); /* Safefree() in sv_free_arenas() */
- sv_add_arena(chunk, PERL_ARENA_SIZE, 0);
- }
+ char *chunk; /* must use New here to match call to */
+ Newx(chunk,PERL_ARENA_SIZE,char); /* Safefree() in sv_free_arenas() */
+ sv_add_arena(chunk, PERL_ARENA_SIZE, 0);
uproot_SV(sv);
return sv;
}
: 0
);
sv->sv_debug_inpad = 0;
- sv->sv_debug_cloned = 0;
+ sv->sv_debug_parent = NULL;
sv->sv_debug_file = PL_curcop ? savepv(CopFILE(PL_curcop)): NULL;
sv->sv_debug_serial = PL_sv_serial++;
while (i--)
PL_body_roots[i] = 0;
- Safefree(PL_nice_chunk);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
PL_sv_arenaroot = 0;
PL_sv_root = 0;
}
are decremented to point at the unused 'ghost' memory, knowing that
the pointers are used with offsets to the real memory.
- HE, HEK arenas are managed separately, with separate code, but may
- be merge-able later..
-*/
-
-/* get_arena(size): this creates custom-sized arenas
- TBD: export properly for hv.c: S_more_he().
-*/
-void*
-Perl_get_arena(pTHX_ const size_t arena_size, const svtype bodytype)
-{
- dVAR;
- struct arena_desc* adesc;
- struct arena_set *aroot = (struct arena_set*) PL_body_arenas;
- unsigned int curr;
-
- /* shouldnt need this
- if (!arena_size) arena_size = PERL_ARENA_SIZE;
- */
-
- /* may need new arena-set to hold new arena */
- if (!aroot || aroot->curr >= aroot->set_size) {
- struct arena_set *newroot;
- Newxz(newroot, 1, struct arena_set);
- newroot->set_size = ARENAS_PER_SET;
- newroot->next = aroot;
- aroot = newroot;
- PL_body_arenas = (void *) newroot;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
- }
-
- /* ok, now have arena-set with at least 1 empty/available arena-desc */
- curr = aroot->curr++;
- adesc = &(aroot->set[curr]);
- assert(!adesc->arena);
-
- Newx(adesc->arena, arena_size, char);
- adesc->size = arena_size;
- adesc->utype = bodytype;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
- curr, (void*)adesc->arena, (UV)arena_size));
-
- return adesc->arena;
-}
-
-
-/* return a thing to the free list */
-
-#define del_body(thing, root) \
- STMT_START { \
- void ** const thing_copy = (void **)thing;\
- *thing_copy = *root; \
- *root = (void*)thing_copy; \
- } STMT_END
-
-/*
=head1 SV-Body Allocation
In turn, the new_body_* allocators call S_new_body(), which invokes
new_body_inline macro, which takes a lock, and takes a body off the
-linked list at PL_body_roots[sv_type], calling S_more_bodies() if
+linked list at PL_body_roots[sv_type], calling Perl_more_bodies() if
necessary to refresh an empty list. Then the lock is released, and
the body is returned.
-S_more_bodies calls get_arena(), and carves it up into an array of N
+Perl_more_bodies allocates a new arena, and carves it up into an array of N
bodies, which it strings into a linked list. It looks up arena-size
and body-size from the body_details table described below, thus
supporting the multiple body-types.
If PURIFY is defined, or PERL_ARENA_SIZE=0, arenas are not used, and
the (new|del)_X*V macros are mapped directly to malloc/free.
-*/
-
-/*
-
For each sv-type, struct body_details bodies_by_type[] carries
parameters which control these aspects of SV handling:
+ sizeof (((type*)SvANY((const SV *)0))->last_member)
static const struct body_details bodies_by_type[] = {
- { sizeof(HE), 0, 0, SVt_NULL,
- FALSE, NONV, NOARENA, FIT_ARENA(0, sizeof(HE)) },
+ /* HEs use this offset for their arena. */
+ { 0, 0, 0, SVt_NULL, FALSE, NONV, NOARENA, 0 },
/* The bind placeholder pretends to be an RV for now.
Also it's marked as "can't upgrade" to stop anyone using it before it's
},
/* 8 bytes on most ILP32 with IEEE doubles */
- { sizeof(NV), sizeof(NV), 0, SVt_NV, FALSE, HADNV, HASARENA,
- FIT_ARENA(0, sizeof(NV)) },
+ { sizeof(NV), sizeof(NV),
+ STRUCT_OFFSET(XPVNV, xnv_u),
+ SVt_NV, FALSE, HADNV, HASARENA, FIT_ARENA(0, sizeof(NV)) },
/* 8 bytes on most ILP32 with IEEE doubles */
{ sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur),
/* 12 */
{ sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur),
- + STRUCT_OFFSET(XPVIV, xpv_cur),
+ + STRUCT_OFFSET(XPV, xpv_cur),
SVt_PVIV, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
+ FIT_ARENA(0, sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur)) },
/* 20 */
- { sizeof(XPVNV), copy_length(XPVNV, xiv_u), 0, SVt_PVNV, FALSE, HADNV,
- HASARENA, FIT_ARENA(0, sizeof(XPVNV)) },
+ { sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur),
+ copy_length(XPVNV, xnv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ + STRUCT_OFFSET(XPV, xpv_cur),
+ SVt_PVNV, FALSE, HADNV, HASARENA,
+ FIT_ARENA(0, sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur)) },
/* 28 */
- { sizeof(XPVMG), copy_length(XPVMG, xmg_stash), 0, SVt_PVMG, FALSE, HADNV,
+ { sizeof(XPVMG), copy_length(XPVMG, xnv_u), 0, SVt_PVMG, FALSE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVMG)) },
/* something big */
- { sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur),
- sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur),
- + STRUCT_OFFSET(regexp, xpv_cur),
+ { sizeof(regexp),
+ sizeof(regexp),
+ 0,
SVt_REGEXP, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur))
+ FIT_ARENA(0, sizeof(regexp))
},
/* 48 */
{ sizeof(XPVLV), sizeof(XPVLV), 0, SVt_PVLV, TRUE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVLV)) },
- { sizeof(XPVAV) - STRUCT_OFFSET(XPVAV, xav_fill),
- copy_length(XPVAV, xmg_stash) - STRUCT_OFFSET(XPVAV, xav_fill),
- + STRUCT_OFFSET(XPVAV, xav_fill),
+ { sizeof(XPVAV),
+ copy_length(XPVAV, xav_alloc),
+ 0,
SVt_PVAV, TRUE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPVAV) - STRUCT_OFFSET(XPVAV, xav_fill)) },
+ FIT_ARENA(0, sizeof(XPVAV)) },
- { sizeof(XPVHV) - STRUCT_OFFSET(XPVHV, xhv_fill),
- copy_length(XPVHV, xmg_stash) - STRUCT_OFFSET(XPVHV, xhv_fill),
- + STRUCT_OFFSET(XPVHV, xhv_fill),
+ { sizeof(XPVHV),
+ copy_length(XPVHV, xhv_max),
+ 0,
SVt_PVHV, TRUE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPVHV) - STRUCT_OFFSET(XPVHV, xhv_fill)) },
+ FIT_ARENA(0, sizeof(XPVHV)) },
/* 56 */
- { sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur),
- sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur),
- + STRUCT_OFFSET(XPVCV, xpv_cur),
+ { sizeof(XPVCV),
+ sizeof(XPVCV),
+ 0,
SVt_PVCV, TRUE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur)) },
+ FIT_ARENA(0, sizeof(XPVCV)) },
- { sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur),
- sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur),
- + STRUCT_OFFSET(XPVFM, xpv_cur),
+ { sizeof(XPVFM),
+ sizeof(XPVFM),
+ 0,
SVt_PVFM, TRUE, NONV, NOARENA,
- FIT_ARENA(20, sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur)) },
+ FIT_ARENA(20, sizeof(XPVFM)) },
/* XPVIO is 84 bytes, fits 48x */
- { sizeof(XPVIO) - STRUCT_OFFSET(XPVIO, xpv_cur),
- sizeof(XPVIO) - STRUCT_OFFSET(XPVIO, xpv_cur),
- + STRUCT_OFFSET(XPVIO, xpv_cur),
+ { sizeof(XPVIO),
+ sizeof(XPVIO),
+ 0,
SVt_PVIO, TRUE, NONV, HASARENA,
- FIT_ARENA(24, sizeof(XPVIO) - STRUCT_OFFSET(XPVIO, xpv_cur)) },
+ FIT_ARENA(24, sizeof(XPVIO)) },
};
-#define new_body_type(sv_type) \
- (void *)((char *)S_new_body(aTHX_ sv_type))
-
-#define del_body_type(p, sv_type) \
- del_body(p, &PL_body_roots[sv_type])
-
-
#define new_body_allocated(sv_type) \
(void *)((char *)S_new_body(aTHX_ sv_type) \
- bodies_by_type[sv_type].offset)
-#define del_body_allocated(p, sv_type) \
- del_body(p + bodies_by_type[sv_type].offset, &PL_body_roots[sv_type])
-
+/* return a thing to the free list */
-#define my_safemalloc(s) (void*)safemalloc(s)
-#define my_safecalloc(s) (void*)safecalloc(s, 1)
-#define my_safefree(p) safefree((char*)p)
+#define del_body(thing, root) \
+ STMT_START { \
+ void ** const thing_copy = (void **)thing; \
+ *thing_copy = *root; \
+ *root = (void*)thing_copy; \
+ } STMT_END
#ifdef PURIFY
-#define new_XNV() my_safemalloc(sizeof(XPVNV))
-#define del_XNV(p) my_safefree(p)
-
-#define new_XPVNV() my_safemalloc(sizeof(XPVNV))
-#define del_XPVNV(p) my_safefree(p)
-
-#define new_XPVAV() my_safemalloc(sizeof(XPVAV))
-#define del_XPVAV(p) my_safefree(p)
+#define new_XNV() safemalloc(sizeof(XPVNV))
+#define new_XPVNV() safemalloc(sizeof(XPVNV))
+#define new_XPVMG() safemalloc(sizeof(XPVMG))
-#define new_XPVHV() my_safemalloc(sizeof(XPVHV))
-#define del_XPVHV(p) my_safefree(p)
-
-#define new_XPVMG() my_safemalloc(sizeof(XPVMG))
-#define del_XPVMG(p) my_safefree(p)
-
-#define new_XPVGV() my_safemalloc(sizeof(XPVGV))
-#define del_XPVGV(p) my_safefree(p)
+#define del_XPVGV(p) safefree(p)
#else /* !PURIFY */
-#define new_XNV() new_body_type(SVt_NV)
-#define del_XNV(p) del_body_type(p, SVt_NV)
-
-#define new_XPVNV() new_body_type(SVt_PVNV)
-#define del_XPVNV(p) del_body_type(p, SVt_PVNV)
-
-#define new_XPVAV() new_body_allocated(SVt_PVAV)
-#define del_XPVAV(p) del_body_allocated(p, SVt_PVAV)
-
-#define new_XPVHV() new_body_allocated(SVt_PVHV)
-#define del_XPVHV(p) del_body_allocated(p, SVt_PVHV)
+#define new_XNV() new_body_allocated(SVt_NV)
+#define new_XPVNV() new_body_allocated(SVt_PVNV)
+#define new_XPVMG() new_body_allocated(SVt_PVMG)
-#define new_XPVMG() new_body_type(SVt_PVMG)
-#define del_XPVMG(p) del_body_type(p, SVt_PVMG)
-
-#define new_XPVGV() new_body_type(SVt_PVGV)
-#define del_XPVGV(p) del_body_type(p, SVt_PVGV)
+#define del_XPVGV(p) del_body(p + bodies_by_type[SVt_PVGV].offset, \
+ &PL_body_roots[SVt_PVGV])
#endif /* PURIFY */
/* no arena for you! */
#define new_NOARENA(details) \
- my_safemalloc((details)->body_size + (details)->offset)
+ safemalloc((details)->body_size + (details)->offset)
#define new_NOARENAZ(details) \
- my_safecalloc((details)->body_size + (details)->offset)
+ safecalloc((details)->body_size + (details)->offset, 1)
-STATIC void *
-S_more_bodies (pTHX_ const svtype sv_type)
+void *
+Perl_more_bodies (pTHX_ const svtype sv_type, const size_t body_size,
+ const size_t arena_size)
{
dVAR;
void ** const root = &PL_body_roots[sv_type];
- const struct body_details * const bdp = &bodies_by_type[sv_type];
- const size_t body_size = bdp->body_size;
+ struct arena_desc *adesc;
+ struct arena_set *aroot = (struct arena_set *) PL_body_arenas;
+ unsigned int curr;
char *start;
const char *end;
- const size_t arena_size = Perl_malloc_good_size(bdp->arena_size);
+ const size_t good_arena_size = Perl_malloc_good_size(arena_size);
#if defined(DEBUGGING) && !defined(PERL_GLOBAL_STRUCT_PRIVATE)
static bool done_sanity_check;
}
#endif
- assert(bdp->arena_size);
+ assert(arena_size);
+
+ /* may need new arena-set to hold new arena */
+ if (!aroot || aroot->curr >= aroot->set_size) {
+ struct arena_set *newroot;
+ Newxz(newroot, 1, struct arena_set);
+ newroot->set_size = ARENAS_PER_SET;
+ newroot->next = aroot;
+ aroot = newroot;
+ PL_body_arenas = (void *) newroot;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
+ }
+
+ /* ok, now have arena-set with at least 1 empty/available arena-desc */
+ curr = aroot->curr++;
+ adesc = &(aroot->set[curr]);
+ assert(!adesc->arena);
+
+ Newx(adesc->arena, good_arena_size, char);
+ adesc->size = good_arena_size;
+ adesc->utype = sv_type;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
+ curr, (void*)adesc->arena, (UV)good_arena_size));
- start = (char*) Perl_get_arena(aTHX_ arena_size, sv_type);
+ start = (char *) adesc->arena;
- end = start + arena_size - 2 * body_size;
+ /* Get the address of the byte after the end of the last body we can fit.
+ Remember, this is integer division: */
+ end = start + good_arena_size / body_size * body_size;
/* computed count doesnt reflect the 1st slot reservation */
#if defined(MYMALLOC) || defined(HAS_MALLOC_GOOD_SIZE)
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d (from %d) type %d "
"size %d ct %d\n",
- (void*)start, (void*)end, (int)arena_size,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)arena_size / (int)body_size));
+ (void*)start, (void*)end, (int)good_arena_size,
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#else
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d type %d size %d ct %d\n",
(void*)start, (void*)end,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)bdp->arena_size / (int)body_size));
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#endif
*root = (void *)start;
- while (start <= end) {
+ while (1) {
+ /* Where the next body would start: */
char * const next = start + body_size;
+
+ if (next >= end) {
+ /* This is the last body: */
+ assert(next == end);
+
+ *(void **)start = 0;
+ return *root;
+ }
+
*(void**) start = (void *)next;
start = next;
}
- *(void **)start = 0;
-
- return *root;
}
/* grab a new thing from the free list, allocating more if necessary.
STMT_START { \
void ** const r3wt = &PL_body_roots[sv_type]; \
xpv = (PTR_TBL_ENT_t*) (*((void **)(r3wt)) \
- ? *((void **)(r3wt)) : more_bodies(sv_type)); \
+ ? *((void **)(r3wt)) : Perl_more_bodies(aTHX_ sv_type, \
+ bodies_by_type[sv_type].body_size,\
+ bodies_by_type[sv_type].arena_size)); \
*(r3wt) = *(void**)(xpv); \
} STMT_END
HvSHAREKEYS_on(sv); /* key-sharing on by default */
#endif
HvMAX(sv) = 7; /* (start with 8 buckets) */
- if (old_type_details->body_size) {
- HvFILL(sv) = 0;
- } else {
- /* It will have been zeroed when the new body was allocated.
- Lets not write to it, in case it confuses a write-back
- cache. */
- }
}
/* SVt_NULL isn't the only thing upgraded to AV or HV.
if (old_type > SVt_IV) {
#ifdef PURIFY
- my_safefree(old_body);
+ safefree(old_body);
#else
/* Note that there is an assumption that all bodies of types that
can be upgraded came from arenas. Only the more complex non-
s = SvPVX_mutable(sv);
if (newlen > SvLEN(sv)) { /* need more room? */
+ STRLEN minlen = SvCUR(sv);
+ minlen += (minlen >> PERL_STRLEN_EXPAND_SHIFT) + 10;
+ if (newlen < minlen)
+ newlen = minlen;
#ifndef Perl_safesysmalloc_size
newlen = PERL_STRLEN_ROUNDUP(newlen);
#endif
if (SvROK(sv)) {
return_rok:
if (SvAMAGIC(sv)) {
- SV * const tmpstr=AMG_CALLun(sv,numer);
+ SV * tmpstr;
+ if (flags & SV_SKIP_OVERLOAD)
+ return 0;
+ tmpstr=AMG_CALLun(sv,numer);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvIV(tmpstr);
}
if (SvROK(sv)) {
return_rok:
if (SvAMAGIC(sv)) {
- SV *const tmpstr = AMG_CALLun(sv,numer);
+ SV *tmpstr;
+ if (flags & SV_SKIP_OVERLOAD)
+ return 0;
+ tmpstr = AMG_CALLun(sv,numer);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvUV(tmpstr);
}
}
/*
-=for apidoc sv_2nv
+=for apidoc sv_2nv_flags
Return the num value of an SV, doing any necessary string or integer
-conversion, magic etc. Normally used via the C<SvNV(sv)> and C<SvNVx(sv)>
-macros.
+conversion. If flags includes SV_GMAGIC, does an mg_get() first.
+Normally used via the C<SvNV(sv)> and C<SvNVx(sv)> macros.
=cut
*/
NV
-Perl_sv_2nv(pTHX_ register SV *const sv)
+Perl_sv_2nv_flags(pTHX_ register SV *const sv, const I32 flags)
{
dVAR;
if (!sv)
if (SvGMAGICAL(sv) || (SvTYPE(sv) == SVt_PVGV && SvVALID(sv))) {
/* FBMs use the same flag bit as SVf_IVisUV, so must let them
cache IVs just in case. */
- mg_get(sv);
+ if (flags & SV_GMAGIC)
+ mg_get(sv);
if (SvNOKp(sv))
return SvNVX(sv);
if ((SvPOKp(sv) && SvLEN(sv)) && !SvIOKp(sv)) {
if (SvROK(sv)) {
return_rok:
if (SvAMAGIC(sv)) {
- SV *const tmpstr = AMG_CALLun(sv,numer);
+ SV *tmpstr;
+ if (flags & SV_SKIP_OVERLOAD)
+ return 0;
+ tmpstr = AMG_CALLun(sv,numer);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvNV(tmpstr);
}
return sv;
if (SvAMAGIC(sv)) {
SV * const tmpsv = AMG_CALLun(sv,numer);
+ TAINT_IF(tmpsv && SvTAINTED(tmpsv));
if (tmpsv && (!SvROK(tmpsv) || (SvRV(tmpsv) != SvRV(sv))))
return sv_2num(tmpsv);
}
if (SvROK(sv)) {
return_rok:
if (SvAMAGIC(sv)) {
- SV *const tmpstr = AMG_CALLun(sv,string);
+ SV *tmpstr;
+ if (flags & SV_SKIP_OVERLOAD)
+ return NULL;
+ tmpstr = AMG_CALLun(sv,string);
+ TAINT_IF(tmpstr && SvTAINTED(tmpstr));
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
/* Unwrap this: */
/* char *pv = lp ? SvPV(tmpstr, *lp) : SvPV_nolen(tmpstr);
sv_force_normal_flags(sv, 0);
}
if (SvREADONLY(sv)) {
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
(void) sv_utf8_upgrade(sv);
SvUTF8_off(sv);
}
}
else if (IN_PERL_RUNTIME)
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
#else
if (SvREADONLY(sv)) {
unshare_hek(SvSHARED_HEK_FROM_PV(pvx));
}
else if (IN_PERL_RUNTIME)
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
#endif
if (SvROK(sv))
&& how != PERL_MAGIC_backref
)
{
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
}
if (SvMAGICAL(sv) || (how == PERL_MAGIC_taint && SvTYPE(sv) >= SVt_PVMG)) {
const MGVTBL* const vtbl = mg->mg_virtual;
*mgp = mg->mg_moremagic;
if (vtbl && vtbl->svt_free)
- CALL_FPTR(vtbl->svt_free)(aTHX_ sv, mg);
+ vtbl->svt_free(aTHX_ sv, mg);
if (mg->mg_ptr && mg->mg_type != PERL_MAGIC_regex_global) {
if (mg->mg_len > 0)
Safefree(mg->mg_ptr);
/* Give tsv backref magic if it hasn't already got it, then push a
* back-reference to sv onto the array associated with the backref magic.
+ *
+ * As an optimisation, if there's only one backref and it's not an AV,
+ * store it directly in the HvAUX or mg_obj slot, avoiding the need to
+ * allocate an AV. (Whether the slot holds an AV tells us whether this is
+ * active.)
+ *
+ * If an HV's backref is stored in magic, it is moved back to HvAUX.
*/
/* A discussion about the backreferences array and its refcount:
* structure, from the xhv_backreferences field. (A HV without hv_aux will
* have the standard magic instead.) The array is created with a refcount
* of 2. This means that if during global destruction the array gets
- * picked on first to have its refcount decremented by the random zapper,
- * it won't actually be freed, meaning it's still theere for when its
- * parent gets freed.
- * When the parent SV is freed, in the case of magic, the magic is freed,
- * Perl_magic_killbackrefs is called which decrements one refcount, then
- * mg_obj is freed which kills the second count.
- * In the vase of a HV being freed, one ref is removed by
- * Perl_hv_kill_backrefs, the other by Perl_sv_kill_backrefs, which it
- * calls.
+ * picked on before its parent to have its refcount decremented by the
+ * random zapper, it won't actually be freed, meaning it's still there for
+ * when its parent gets freed.
+ *
+ * When the parent SV is freed, the extra ref is killed by
+ * Perl_sv_kill_backrefs. The other ref is killed, in the case of magic,
+ * by mg_free() / MGf_REFCOUNTED, or for a hash, by Perl_hv_kill_backrefs.
+ *
+ * When a single backref SV is stored directly, it is not reference
+ * counted.
*/
void
Perl_sv_add_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av;
+ SV **svp;
+ AV *av = NULL;
+ MAGIC *mg = NULL;
PERL_ARGS_ASSERT_SV_ADD_BACKREF;
- if (SvTYPE(tsv) == SVt_PVHV) {
- AV **const avp = Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
-
- av = *avp;
- if (!av) {
- /* There is no AV in the offical place - try a fixup. */
- MAGIC *const mg = mg_find(tsv, PERL_MAGIC_backref);
+ /* find slot to store array or singleton backref */
- if (mg) {
- /* Aha. They've got it stowed in magic. Bring it back. */
- av = MUTABLE_AV(mg->mg_obj);
- /* Stop mg_free decreasing the refernce count. */
+ if (SvTYPE(tsv) == SVt_PVHV) {
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
+
+ if (!*svp) {
+ if ((mg = mg_find(tsv, PERL_MAGIC_backref))) {
+ /* Aha. They've got it stowed in magic instead.
+ * Move it back to xhv_backreferences */
+ *svp = mg->mg_obj;
+ /* Stop mg_free decreasing the reference count. */
mg->mg_obj = NULL;
/* Stop mg_free even calling the destructor, given that
there's no AV to free up. */
mg->mg_virtual = 0;
sv_unmagic(tsv, PERL_MAGIC_backref);
- } else {
- av = newAV();
- AvREAL_off(av);
- SvREFCNT_inc_simple_void(av); /* see discussion above */
+ mg = NULL;
}
- *avp = av;
}
} else {
- const MAGIC *const mg
- = SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
- else {
- av = newAV();
- AvREAL_off(av);
- sv_magic(tsv, MUTABLE_SV(av), PERL_MAGIC_backref, NULL, 0);
- /* av now has a refcnt of 2; see discussion above */
+ if (! ((mg =
+ (SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL))))
+ {
+ sv_magic(tsv, NULL, PERL_MAGIC_backref, NULL, 0);
+ mg = mg_find(tsv, PERL_MAGIC_backref);
+ }
+ svp = &(mg->mg_obj);
+ }
+
+ /* create or retrieve the array */
+
+ if ( (!*svp && SvTYPE(sv) == SVt_PVAV)
+ || (*svp && SvTYPE(*svp) != SVt_PVAV)
+ ) {
+ /* create array */
+ av = newAV();
+ AvREAL_off(av);
+ SvREFCNT_inc_simple_void(av);
+ /* av now has a refcnt of 2; see discussion above */
+ if (*svp) {
+ /* move single existing backref to the array */
+ av_extend(av, 1);
+ AvARRAY(av)[++AvFILLp(av)] = *svp; /* av_push() */
}
+ *svp = (SV*)av;
+ if (mg)
+ mg->mg_flags |= MGf_REFCOUNTED;
}
+ else
+ av = MUTABLE_AV(*svp);
+
+ if (!av) {
+ /* optimisation: store single backref directly in HvAUX or mg_obj */
+ *svp = sv;
+ return;
+ }
+ /* push new backref */
+ assert(SvTYPE(av) == SVt_PVAV);
if (AvFILLp(av) >= AvMAX(av)) {
av_extend(av, AvFILLp(av)+1);
}
* with the SV we point to.
*/
-STATIC void
-S_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
+void
+Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av = NULL;
- SV **svp;
+ SV **svp = NULL;
I32 i;
PERL_ARGS_ASSERT_SV_DEL_BACKREF;
if (SvTYPE(tsv) == SVt_PVHV && SvOOK(tsv)) {
- av = *Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
- /* We mustn't attempt to "fix up" the hash here by moving the
- backreference array back to the hv_aux structure, as that is stored
- in the main HvARRAY(), and hfreentries assumes that no-one
- reallocates HvARRAY() while it is running. */
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
}
- if (!av) {
- const MAGIC *const mg
+ if (!svp || !*svp) {
+ MAGIC *const mg
= SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
+ svp = mg ? &(mg->mg_obj) : NULL;
}
- if (!av)
+ if (!svp || !*svp)
Perl_croak(aTHX_ "panic: del_backref");
- assert(!SvIS_FREED(av));
-
- svp = AvARRAY(av);
- /* We shouldn't be in here more than once, but for paranoia reasons lets
- not assume this. */
- for (i = AvFILLp(av); i >= 0; i--) {
- if (svp[i] == sv) {
- const SSize_t fill = AvFILLp(av);
- if (i != fill) {
- /* We weren't the last entry.
- An unordered list has this property that you can take the
- last element off the end to fill the hole, and it's still
- an unordered list :-)
- */
- svp[i] = svp[fill];
+ if (SvTYPE(*svp) == SVt_PVAV) {
+ int count = 0;
+ AV * const av = (AV*)*svp;
+ assert(!SvIS_FREED(av));
+ svp = AvARRAY(av);
+ for (i = AvFILLp(av); i >= 0; i--) {
+ if (svp[i] == sv) {
+ const SSize_t fill = AvFILLp(av);
+ if (i != fill) {
+ /* We weren't the last entry.
+ An unordered list has this property that you can take the
+ last element off the end to fill the hole, and it's still
+ an unordered list :-)
+ */
+ svp[i] = svp[fill];
+ }
+ svp[fill] = NULL;
+ AvFILLp(av) = fill - 1;
+ count++;
+#ifndef DEBUGGING
+ break; /* should only be one */
+#endif
}
- svp[fill] = NULL;
- AvFILLp(av) = fill - 1;
}
+ assert(count == 1);
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ if (*svp != sv)
+ Perl_croak(aTHX_ "panic: del_backref");
+ *svp = NULL;
}
+
}
-int
+void
Perl_sv_kill_backrefs(pTHX_ SV *const sv, AV *const av)
{
- SV **svp = AvARRAY(av);
+ SV **svp;
+ SV **last;
+ bool is_array;
PERL_ARGS_ASSERT_SV_KILL_BACKREFS;
- PERL_UNUSED_ARG(sv);
- assert(!svp || !SvIS_FREED(av));
- if (svp) {
- SV *const *const last = svp + AvFILLp(av);
+ if (!av)
+ return;
+ is_array = (SvTYPE(av) == SVt_PVAV);
+ if (is_array) {
+ assert(!SvIS_FREED(av));
+ svp = AvARRAY(av);
+ if (svp)
+ last = svp + AvFILLp(av);
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ svp = (SV**)&av;
+ last = svp;
+ }
+
+ if (svp) {
while (svp <= last) {
if (*svp) {
SV *const referrer = *svp;
if (SvWEAKREF(referrer)) {
/* XXX Should we check that it hasn't changed? */
+ assert(SvROK(referrer));
SvRV_set(referrer, 0);
SvOK_off(referrer);
SvWEAKREF_off(referrer);
SvSETMAGIC(referrer);
} else if (SvTYPE(referrer) == SVt_PVGV ||
SvTYPE(referrer) == SVt_PVLV) {
+ assert(SvTYPE(sv) == SVt_PVHV); /* stash backref */
/* You lookin' at me? */
assert(GvSTASH(referrer));
assert(GvSTASH(referrer) == (const HV *)sv);
GvSTASH(referrer) = 0;
+ } else if (SvTYPE(referrer) == SVt_PVCV ||
+ SvTYPE(referrer) == SVt_PVFM) {
+ if (SvTYPE(sv) == SVt_PVHV) { /* stash backref */
+ /* You lookin' at me? */
+ assert(CvSTASH(referrer));
+ assert(CvSTASH(referrer) == (const HV *)sv);
+ CvSTASH(referrer) = 0;
+ }
+ else {
+ assert(SvTYPE(sv) == SVt_PVGV);
+ /* You lookin' at me? */
+ assert(CvGV(referrer));
+ assert(CvGV(referrer) == (const GV *)sv);
+ anonymise_cv_maybe(MUTABLE_GV(sv),
+ MUTABLE_CV(referrer));
+ }
+
} else {
Perl_croak(aTHX_
"panic: magic_killbackrefs (flags=%"UVxf")",
(UV)SvFLAGS(referrer));
}
- *svp = NULL;
+ if (is_array)
+ *svp = NULL;
}
svp++;
}
}
- SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
- return 0;
+ if (is_array) {
+ AvFILLp(av) = -1;
+ SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
+ }
+ return;
}
/*
del_SV(nsv);
}
+/* We're about to free a GV which has a CV that refers back to us.
+ * If that CV will outlive us, make it anonymous (i.e. fix up its CvGV
+ * field) */
+
+STATIC void
+S_anonymise_cv_maybe(pTHX_ GV *gv, CV* cv)
+{
+ char *stash;
+ SV *gvname;
+ GV *anongv;
+
+ PERL_ARGS_ASSERT_ANONYMISE_CV_MAYBE;
+
+ /* be assertive! */
+ assert(SvREFCNT(gv) == 0);
+ assert(isGV(gv) && isGV_with_GP(gv));
+ assert(GvGP(gv));
+ assert(!CvANON(cv));
+ assert(CvGV(cv) == gv);
+
+ /* will the CV shortly be freed by gp_free() ? */
+ if (GvCV(gv) == cv && GvGP(gv)->gp_refcnt < 2 && SvREFCNT(cv) < 2) {
+ SvANY(cv)->xcv_gv = NULL;
+ return;
+ }
+
+ /* if not, anonymise: */
+ stash = GvSTASH(gv) ? HvNAME(GvSTASH(gv)) : NULL;
+ gvname = Perl_newSVpvf(aTHX_ "%s::__ANON__",
+ stash ? stash : "__ANON__");
+ anongv = gv_fetchsv(gvname, GV_ADDMULTI, SVt_PVCV);
+ SvREFCNT_dec(gvname);
+
+ CvANON_on(cv);
+ CvCVGV_RC_on(cv);
+ SvANY(cv)->xcv_gv = MUTABLE_GV(SvREFCNT_inc(anongv));
+}
+
+
/*
=for apidoc sv_clear
if (IoIFP(sv) &&
IoIFP(sv) != PerlIO_stdin() &&
IoIFP(sv) != PerlIO_stdout() &&
- IoIFP(sv) != PerlIO_stderr())
+ IoIFP(sv) != PerlIO_stderr() &&
+ !(IoFLAGS(sv) & IOf_FAKE_DIRP))
{
io_close(MUTABLE_IO(sv), FALSE);
}
case SVt_PVCV:
case SVt_PVFM:
cv_undef(MUTABLE_CV(sv));
+ /* If we're in a stash, we don't own a reference to it. However it does
+ have a back reference to us, which needs to be cleared. */
+ if ((stash = CvSTASH(sv)))
+ sv_del_backref(MUTABLE_SV(stash), sv);
goto freescalar;
case SVt_PVHV:
if (PL_last_swash_hv == (const HV *)sv) {
}
}
#ifdef PERL_OLD_COPY_ON_WRITE
- else if (SvPVX_const(sv)) {
+ else if (SvPVX_const(sv)
+ && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP))) {
if (SvIsCOW(sv)) {
if (DEBUG_C_TEST) {
PerlIO_printf(Perl_debug_log, "Copy on write: clear\n");
}
}
#else
- else if (SvPVX_const(sv) && SvLEN(sv))
+ else if (SvPVX_const(sv) && SvLEN(sv)
+ && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP)))
Safefree(SvPVX_mutable(sv));
else if (SvPVX_const(sv) && SvREADONLY(sv) && SvFAKE(sv)) {
unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
&PL_body_roots[type]);
}
else if (sv_type_details->body_size) {
- my_safefree(SvANY(sv));
+ safefree(SvANY(sv));
}
}
STRLEN ulen;
MAGIC *mg = SvMAGICAL(sv) ? mg_find(sv, PERL_MAGIC_utf8) : NULL;
- if (mg && mg->mg_len != -1) {
- ulen = mg->mg_len;
+ if (mg && (mg->mg_len != -1 || mg->mg_ptr)) {
+ if (mg->mg_len != -1)
+ ulen = mg->mg_len;
+ else {
+ /* We can use the offset cache for a headstart.
+ The longer value is stored in the first pair. */
+ STRLEN *cache = (STRLEN *) mg->mg_ptr;
+
+ ulen = cache[0] + Perl_utf8_length(aTHX_ s + cache[1],
+ s + len);
+ }
+
if (PL_utf8cache < 0) {
const STRLEN real = Perl_utf8_length(aTHX_ s, s + len);
- if (real != ulen) {
- /* Need to turn the assertions off otherwise we may
- recurse infinitely while printing error messages.
- */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_len_utf8 cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) ulen, (UV) real, SVfARG(sv));
- }
+ assert_uft8_cache_coherent("sv_len_utf8", ulen, real, sv);
}
}
else {
ulen = Perl_utf8_length(aTHX_ s, s + len);
- if (!SvREADONLY(sv)) {
- if (!mg && (SvTYPE(sv) < SVt_PVMG ||
- !(mg = mg_find(sv, PERL_MAGIC_utf8)))) {
- mg = sv_magicext(sv, 0, PERL_MAGIC_utf8,
- &PL_vtbl_utf8, 0, 0);
- }
- assert(mg);
- mg->mg_len = ulen;
- /* For now, treat "overflowed" as "still unknown".
- See RT #72924. */
- if (ulen != (STRLEN) mg->mg_len)
- mg->mg_len = -1;
- }
+ utf8_mg_len_cache_update(sv, &mg, ulen);
}
return ulen;
}
offset. */
static STRLEN
S_sv_pos_u2b_forwards(const U8 *const start, const U8 *const send,
- STRLEN uoffset)
+ STRLEN *const uoffset_p, bool *const at_end)
{
const U8 *s = start;
+ STRLEN uoffset = *uoffset_p;
PERL_ARGS_ASSERT_SV_POS_U2B_FORWARDS;
- while (s < send && uoffset--)
+ while (s < send && uoffset) {
+ --uoffset;
s += UTF8SKIP(s);
- if (s > send) {
+ }
+ if (s == send) {
+ *at_end = TRUE;
+ }
+ else if (s > send) {
+ *at_end = TRUE;
/* This is the existing behaviour. Possibly it should be a croak, as
it's actually a bounds error */
s = send;
}
+ *uoffset_p -= uoffset;
return s - start;
}
the passed in UTF-8 offset. */
static STRLEN
S_sv_pos_u2b_midway(const U8 *const start, const U8 *send,
- const STRLEN uoffset, const STRLEN uend)
+ STRLEN uoffset, const STRLEN uend)
{
STRLEN backw = uend - uoffset;
/* The assumption is that going forwards is twice the speed of going
forward (that's where the 2 * backw comes from).
(The real figure of course depends on the UTF-8 data.) */
- return sv_pos_u2b_forwards(start, send, uoffset);
+ const U8 *s = start;
+
+ while (s < send && uoffset--)
+ s += UTF8SKIP(s);
+ assert (s <= send);
+ if (s > send)
+ s = send;
+ return s - start;
}
while (backw--) {
created if necessary, and the found value offered to it for update. */
static STRLEN
S_sv_pos_u2b_cached(pTHX_ SV *const sv, MAGIC **const mgp, const U8 *const start,
- const U8 *const send, const STRLEN uoffset,
+ const U8 *const send, STRLEN uoffset,
STRLEN uoffset0, STRLEN boffset0)
{
STRLEN boffset = 0; /* Actually always set, but let's keep gcc happy. */
bool found = FALSE;
+ bool at_end = FALSE;
PERL_ARGS_ASSERT_SV_POS_U2B_CACHED;
assert (uoffset >= uoffset0);
+ if (!uoffset)
+ return 0;
+
if (!SvREADONLY(sv)
&& PL_utf8cache
&& (*mgp || (SvTYPE(sv) >= SVt_PVMG &&
uoffset - uoffset0,
(*mgp)->mg_len - uoffset0);
} else {
+ uoffset -= uoffset0;
boffset = boffset0
+ sv_pos_u2b_forwards(start + boffset0,
- send, uoffset - uoffset0);
+ send, &uoffset, &at_end);
+ uoffset += uoffset0;
}
}
else if (cache[2] < uoffset) {
}
if (!found || PL_utf8cache < 0) {
- const STRLEN real_boffset
- = boffset0 + sv_pos_u2b_forwards(start + boffset0,
- send, uoffset - uoffset0);
-
- if (found && PL_utf8cache < 0) {
- if (real_boffset != boffset) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_pos_u2b_cache cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) boffset, (UV) real_boffset, SVfARG(sv));
- }
- }
+ STRLEN real_boffset;
+ uoffset -= uoffset0;
+ real_boffset = boffset0 + sv_pos_u2b_forwards(start + boffset0,
+ send, &uoffset, &at_end);
+ uoffset += uoffset0;
+
+ if (found && PL_utf8cache < 0)
+ assert_uft8_cache_coherent("sv_pos_u2b_cache", boffset,
+ real_boffset, sv);
boffset = real_boffset;
}
- if (PL_utf8cache)
- utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start);
+ if (PL_utf8cache) {
+ if (at_end)
+ utf8_mg_len_cache_update(sv, mgp, uoffset);
+ else
+ utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start);
+ }
return boffset;
}
MAGIC *mg = NULL;
boffset = sv_pos_u2b_cached(sv, &mg, start, send, uoffset, 0, 0);
- if (lenp) {
+ if (lenp
+ && *lenp /* don't bother doing work for 0, as its bytes equivalent
+ is 0, and *lenp is already set to that. */) {
/* Convert the relative offset to absolute. */
const STRLEN uoffset2 = uoffset + *lenp;
const STRLEN boffset2
}
}
+static void
+S_utf8_mg_len_cache_update(pTHX_ SV *const sv, MAGIC **const mgp,
+ const STRLEN ulen)
+{
+ PERL_ARGS_ASSERT_UTF8_MG_LEN_CACHE_UPDATE;
+ if (SvREADONLY(sv))
+ return;
+
+ if (!*mgp && (SvTYPE(sv) < SVt_PVMG ||
+ !(*mgp = mg_find(sv, PERL_MAGIC_utf8)))) {
+ *mgp = sv_magicext(sv, 0, PERL_MAGIC_utf8, &PL_vtbl_utf8, 0, 0);
+ }
+ assert(*mgp);
+
+ (*mgp)->mg_len = ulen;
+ /* For now, treat "overflowed" as "still unknown". See RT #72924. */
+ if (ulen != (STRLEN) (*mgp)->mg_len)
+ (*mgp)->mg_len = -1;
+}
+
/* Create and update the UTF8 magic offset cache, with the proffered utf8/
byte length pairing. The (byte) length of the total SV is passed in too,
as blen, because for some (more esoteric) SVs, the call to SvPV_const()
const U8 *start = (const U8 *) SvPVX_const(sv);
const STRLEN realutf8 = utf8_length(start, start + byte);
- if (realutf8 != utf8) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: utf8_mg_pos_cache_update cache %"UVuf
- " real %"UVuf" for %"SVf, (UV) utf8, (UV) realutf8, SVfARG(sv));
- }
+ assert_uft8_cache_coherent("utf8_mg_pos_cache_update", utf8, realutf8,
+ sv);
}
/* Cache is held with the later position first, to simplify the code
if (!found || PL_utf8cache < 0) {
const STRLEN real_len = utf8_length(s, send);
- if (found && PL_utf8cache < 0) {
- if (len != real_len) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_pos_b2u cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) len, (UV) real_len, SVfARG(sv));
- }
- }
+ if (found && PL_utf8cache < 0)
+ assert_uft8_cache_coherent("sv_pos_b2u", len, real_len, sv);
len = real_len;
}
*offsetp = len;
- if (PL_utf8cache)
- utf8_mg_pos_cache_update(sv, &mg, byte, len, blen);
+ if (PL_utf8cache) {
+ if (blen == byte)
+ utf8_mg_len_cache_update(sv, &mg, len);
+ else
+ utf8_mg_pos_cache_update(sv, &mg, byte, len, blen);
+ }
+}
+
+static void
+S_assert_uft8_cache_coherent(pTHX_ const char *const func, STRLEN from_cache,
+ STRLEN real, SV *const sv)
+{
+ PERL_ARGS_ASSERT_ASSERT_UFT8_CACHE_COHERENT;
+
+ /* As this is debugging only code, save space by keeping this test here,
+ rather than inlining it in all the callers. */
+ if (from_cache == real)
+ return;
+
+ /* Need to turn the assertions off otherwise we may recurse infinitely
+ while printing error messages. */
+ SAVEI8(PL_utf8cache);
+ PL_utf8cache = 0;
+ Perl_croak(aTHX_ "panic: %s cache %"UVuf" real %"UVuf" for %"SVf,
+ func, (UV) from_cache, (UV) real, SVfARG(sv));
}
/*
}
SvPOK_only(sv);
+ if (!append) {
+ SvCUR_set(sv,0);
+ }
if (PerlIO_isutf8(fp))
SvUTF8_on(sv);
=for apidoc sv_inc
Auto-increment of the value in the SV, doing string to numeric conversion
-if necessary. Handles 'get' magic.
+if necessary. Handles 'get' magic and operator overloading.
=cut
*/
void
Perl_sv_inc(pTHX_ register SV *const sv)
{
+ if (!sv)
+ return;
+ SvGETMAGIC(sv);
+ sv_inc_nomg(sv);
+}
+
+/*
+=for apidoc sv_inc_nomg
+
+Auto-increment of the value in the SV, doing string to numeric conversion
+if necessary. Handles operator overloading. Skips handling 'get' magic.
+
+=cut
+*/
+
+void
+Perl_sv_inc_nomg(pTHX_ register SV *const sv)
+{
dVAR;
register char *d;
int flags;
if (!sv)
return;
- SvGETMAGIC(sv);
if (SvTHINKFIRST(sv)) {
if (SvIsCOW(sv))
sv_force_normal_flags(sv, 0);
if (SvREADONLY(sv)) {
if (IN_PERL_RUNTIME)
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
if (SvROK(sv)) {
IV i;
=for apidoc sv_dec
Auto-decrement of the value in the SV, doing string to numeric conversion
-if necessary. Handles 'get' magic.
+if necessary. Handles 'get' magic and operator overloading.
=cut
*/
Perl_sv_dec(pTHX_ register SV *const sv)
{
dVAR;
+ if (!sv)
+ return;
+ SvGETMAGIC(sv);
+ sv_dec_nomg(sv);
+}
+
+/*
+=for apidoc sv_dec_nomg
+
+Auto-decrement of the value in the SV, doing string to numeric conversion
+if necessary. Handles operator overloading. Skips handling 'get' magic.
+
+=cut
+*/
+
+void
+Perl_sv_dec_nomg(pTHX_ register SV *const sv)
+{
+ dVAR;
int flags;
if (!sv)
return;
- SvGETMAGIC(sv);
if (SvTHINKFIRST(sv)) {
if (SvIsCOW(sv))
sv_force_normal_flags(sv, 0);
if (SvREADONLY(sv)) {
if (IN_PERL_RUNTIME)
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
}
if (SvROK(sv)) {
IV i;
case SVt_PVFM: return "FORMAT";
case SVt_PVIO: return "IO";
case SVt_BIND: return "BIND";
- case SVt_REGEXP: return "REGEXP";
+ case SVt_REGEXP: return "REGEXP";
default: return "UNKNOWN";
}
}
if (SvIsCOW(tmpRef))
sv_force_normal_flags(tmpRef, 0);
if (SvREADONLY(tmpRef))
- Perl_croak(aTHX_ "%s", PL_no_modify);
+ Perl_croak_no_modify(aTHX);
if (SvOBJECT(tmpRef)) {
if (SvTYPE(tmpRef) != SVt_PVIO)
--PL_sv_objcount;
else if (svix < svmax) {
sv_catsv(sv, *svargs);
}
+ else
+ S_vcatpvfn_missing_argument(aTHX);
return;
}
if (args && patlen == 3 && pat[0] == '%' &&
pp = pat + 2;
while (*pp >= '0' && *pp <= '9')
digits = 10 * digits + (*pp++ - '0');
- if (pp - pat == (int)patlen - 1) {
- NV nv;
-
- if (svix < svmax)
- nv = SvNV(*svargs);
- else
- return;
+ if (pp - pat == (int)patlen - 1 && svix < svmax) {
+ const NV nv = SvNV(*svargs);
if (*pp == 'g') {
/* Add check for digits != 0 because it seems that some
gconverts are buggy in this case, and we don't yet have
that currently av_dup, gv_dup and hv_dup are the same as sv_dup.
If this changes, please unmerge ss_dup.
Likewise, sv_dup_inc_multiple() relies on this fact. */
-#define sv_dup_inc(s,t) SvREFCNT_inc(sv_dup(s,t))
-#define sv_dup_inc_NN(s,t) SvREFCNT_inc_NN(sv_dup(s,t))
+#define sv_dup_inc_NN(s,t) SvREFCNT_inc_NN(sv_dup_inc(s,t))
#define av_dup(s,t) MUTABLE_AV(sv_dup((const SV *)s,t))
-#define av_dup_inc(s,t) MUTABLE_AV(SvREFCNT_inc(sv_dup((const SV *)s,t)))
+#define av_dup_inc(s,t) MUTABLE_AV(sv_dup_inc((const SV *)s,t))
#define hv_dup(s,t) MUTABLE_HV(sv_dup((const SV *)s,t))
-#define hv_dup_inc(s,t) MUTABLE_HV(SvREFCNT_inc(sv_dup((const SV *)s,t)))
+#define hv_dup_inc(s,t) MUTABLE_HV(sv_dup_inc((const SV *)s,t))
#define cv_dup(s,t) MUTABLE_CV(sv_dup((const SV *)s,t))
-#define cv_dup_inc(s,t) MUTABLE_CV(SvREFCNT_inc(sv_dup((const SV *)s,t)))
+#define cv_dup_inc(s,t) MUTABLE_CV(sv_dup_inc((const SV *)s,t))
#define io_dup(s,t) MUTABLE_IO(sv_dup((const SV *)s,t))
-#define io_dup_inc(s,t) MUTABLE_IO(SvREFCNT_inc(sv_dup((const SV *)s,t)))
+#define io_dup_inc(s,t) MUTABLE_IO(sv_dup_inc((const SV *)s,t))
#define gv_dup(s,t) MUTABLE_GV(sv_dup((const SV *)s,t))
-#define gv_dup_inc(s,t) MUTABLE_GV(SvREFCNT_inc(sv_dup((const SV *)s,t)))
+#define gv_dup_inc(s,t) MUTABLE_GV(sv_dup_inc((const SV *)s,t))
#define SAVEPV(p) ((p) ? savepv(p) : NULL)
#define SAVEPVN(p,n) ((p) ? savepvn(p,n) : NULL)
Newxz(parser, 1, yy_parser);
ptr_table_store(PL_ptr_table, proto, parser);
- parser->yyerrstatus = 0;
- parser->yychar = YYEMPTY; /* Cause a token to be read. */
-
/* XXX these not yet duped */
parser->old_parser = NULL;
parser->stack = NULL;
for (; mg; mg = mg->mg_moremagic) {
MAGIC *nmg;
+
+ if ((param->flags & CLONEf_JOIN_IN)
+ && mg->mg_type == PERL_MAGIC_backref)
+ /* when joining, we let the individual SVs add themselves to
+ * backref as needed. */
+ continue;
+
Newx(nmg, 1, MAGIC);
*mgprev_p = nmg;
mgprev_p = &(nmg->mg_moremagic);
}
else
*/
- if(nmg->mg_type == PERL_MAGIC_backref) {
- /* The backref AV has its reference count deliberately bumped by
- 1. */
- nmg->mg_obj
- = SvREFCNT_inc(av_dup_inc((const AV *) nmg->mg_obj, param));
- }
- else {
- nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
- ? sv_dup_inc(nmg->mg_obj, param)
- : sv_dup(nmg->mg_obj, param);
- }
+ nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
+ ? nmg->mg_type == PERL_MAGIC_backref
+ /* The backref AV has its reference
+ * count deliberately bumped by 1 */
+ ? SvREFCNT_inc(av_dup_inc((const AV *)
+ nmg->mg_obj, param))
+ : sv_dup_inc(nmg->mg_obj, param)
+ : sv_dup(nmg->mg_obj, param);
if (nmg->mg_ptr && nmg->mg_type != PERL_MAGIC_regex_global) {
if (nmg->mg_len > 0) {
nmg->mg_ptr = (char*)sv_dup_inc((const SV *)nmg->mg_ptr, param);
}
if ((nmg->mg_flags & MGf_DUP) && nmg->mg_virtual && nmg->mg_virtual->svt_dup) {
- CALL_FPTR(nmg->mg_virtual->svt_dup)(aTHX_ nmg, param);
+ nmg->mg_virtual->svt_dup(aTHX_ nmg, param);
}
}
return mgret;
tbl->tbl_max = --newsize;
tbl->tbl_ary = ary;
for (i=0; i < oldsize; i++, ary++) {
- PTR_TBL_ENT_t **curentp, **entp, *ent;
- if (!*ary)
+ PTR_TBL_ENT_t **entp = ary;
+ PTR_TBL_ENT_t *ent = *ary;
+ PTR_TBL_ENT_t **curentp;
+ if (!ent)
continue;
curentp = ary + oldsize;
- for (entp = ary, ent = *ary; ent; ent = *entp) {
+ do {
if ((newsize & PTR_TABLE_HASH(ent->oldval)) != i) {
*entp = ent->next;
ent->next = *curentp;
*curentp = ent;
- continue;
}
else
entp = &ent->next;
- }
+ ent = *entp;
+ } while (ent);
}
}
PERL_ARGS_ASSERT_RVPV_DUP;
if (SvROK(sstr)) {
- SvRV_set(dstr, SvWEAKREF(sstr)
- ? sv_dup(SvRV_const(sstr), param)
- : sv_dup_inc(SvRV_const(sstr), param));
-
+ if (SvWEAKREF(sstr)) {
+ SvRV_set(dstr, sv_dup(SvRV_const(sstr), param));
+ if (param->flags & CLONEf_JOIN_IN) {
+ /* if joining, we add any back references individually rather
+ * than copying the whole backref array */
+ Perl_sv_add_backref(aTHX_ SvRV(dstr), dstr);
+ }
+ }
+ else
+ SvRV_set(dstr, sv_dup_inc(SvRV_const(sstr), param));
}
else if (SvPVX_const(sstr)) {
/* Has something there */
/* duplicate an SV of any type (including AV, HV etc) */
-SV *
-Perl_sv_dup(pTHX_ const SV *const sstr, CLONE_PARAMS *const param)
+static SV *
+S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param)
{
dVAR;
SV *dstr;
- PERL_ARGS_ASSERT_SV_DUP;
+ PERL_ARGS_ASSERT_SV_DUP_COMMON;
- if (!sstr)
- return NULL;
if (SvTYPE(sstr) == SVTYPEMASK) {
#ifdef DEBUG_LEAKING_SCALARS_ABORT
abort();
something that is bad **/
if (SvTYPE(sstr) == SVt_PVHV) {
const HEK * const hvname = HvNAME_HEK(sstr);
- if (hvname)
+ if (hvname) {
/** don't clone stashes if they already exist **/
- return MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0));
+ dstr = MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0));
+ ptr_table_store(PL_ptr_table, sstr, dstr);
+ return dstr;
+ }
}
}
dstr->sv_debug_optype = sstr->sv_debug_optype;
dstr->sv_debug_line = sstr->sv_debug_line;
dstr->sv_debug_inpad = sstr->sv_debug_inpad;
- dstr->sv_debug_cloned = 1;
+ dstr->sv_debug_parent = (SV*)sstr;
dstr->sv_debug_file = savepv(sstr->sv_debug_file);
#endif
#endif
if (sv_type != SVt_PVAV && sv_type != SVt_PVHV
- && !isGV_with_GP(dstr))
+ && !isGV_with_GP(dstr)
+ && !(sv_type == SVt_PVIO && !(IoFLAGS(dstr) & IOf_FAKE_DIRP)))
Perl_rvpv_dup(aTHX_ dstr, sstr, param);
/* The Copy above means that all the source (unduplicated) pointers
else
LvTARG(dstr) = sv_dup_inc(LvTARG(dstr), param);
case SVt_PVGV:
+ /* non-GP case already handled above */
if(isGV_with_GP(sstr)) {
GvNAME_HEK(dstr) = hek_dup(GvNAME_HEK(dstr), param);
/* Don't call sv_add_backref here as it's going to be
/* Danger Will Robinson - GvGP(dstr) isn't initialised
at the point of this comment. */
GvSTASH(dstr) = hv_dup(GvSTASH(dstr), param);
- if(param->flags & CLONEf_JOIN_IN) {
- const HEK * const hvname
- = HvNAME_HEK(GvSTASH(dstr));
- if( hvname
- && GvSTASH(dstr) == gv_stashpvn(
- HEK_KEY(hvname), HEK_LEN(hvname), 0
- )
- )
- Perl_sv_add_backref(
- aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr
- );
- }
+ if (param->flags & CLONEf_JOIN_IN)
+ Perl_sv_add_backref(aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr);
GvGP(dstr) = gp_dup(GvGP(sstr), param);
(void)GpREFCNT_inc(GvGP(dstr));
- } else
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
+ }
break;
case SVt_PVIO:
- IoIFP(dstr) = fp_dup(IoIFP(dstr), IoTYPE(dstr), param);
- if (IoOFP(dstr) == IoIFP(sstr))
- IoOFP(dstr) = IoIFP(dstr);
- else
- IoOFP(dstr) = fp_dup(IoOFP(dstr), IoTYPE(dstr), param);
/* PL_parser->rsfp_filters entries have fake IoDIRP() */
if(IoFLAGS(dstr) & IOf_FAKE_DIRP) {
/* I have no idea why fake dirp (rsfps)
NOOP;
/* IoDIRP(dstr) is already a copy of IoDIRP(sstr) */
}
+ IoIFP(dstr) = fp_dup(IoIFP(sstr), IoTYPE(dstr), param);
}
+ if (IoOFP(dstr) == IoIFP(sstr))
+ IoOFP(dstr) = IoIFP(dstr);
+ else
+ IoOFP(dstr) = fp_dup(IoOFP(dstr), IoTYPE(dstr), param);
IoTOP_NAME(dstr) = SAVEPV(IoTOP_NAME(dstr));
IoFMT_NAME(dstr) = SAVEPV(IoFMT_NAME(dstr));
IoBOTTOM_NAME(dstr) = SAVEPV(IoBOTTOM_NAME(dstr));
else {
while (items-- > 0)
*dst_ary++ = sv_dup(*src_ary++, param);
- if (!(param->flags & CLONEf_COPY_STACKS)
- && AvREIFY(sstr))
- {
- av_reify(MUTABLE_AV(dstr)); /* #41138 */
- }
}
items = AvMAX((const AV *)sstr) - AvFILLp((const AV *)sstr);
while (items-- > 0) {
cBOOL(HvSHAREKEYS(sstr)), param) : 0;
/* backref array needs refcnt=2; see sv_add_backref */
daux->xhv_backreferences =
- saux->xhv_backreferences
- ? MUTABLE_AV(SvREFCNT_inc(
- sv_dup_inc((const SV *)saux->xhv_backreferences, param)))
+ (param->flags & CLONEf_JOIN_IN)
+ /* when joining, we let the individual GVs and
+ * CVs add themselves to backref as
+ * needed. This avoids pulling in stuff
+ * that isn't required, and simplifies the
+ * case where stashes aren't cloned back
+ * if they already exist in the parent
+ * thread */
+ ? NULL
+ : saux->xhv_backreferences
+ ? (SvTYPE(saux->xhv_backreferences) == SVt_PVAV)
+ ? MUTABLE_AV(SvREFCNT_inc(
+ sv_dup_inc((const SV *)
+ saux->xhv_backreferences, param)))
+ : MUTABLE_AV(sv_dup((const SV *)
+ saux->xhv_backreferences, param))
: 0;
daux->xhv_mro_meta = saux->xhv_mro_meta
if (!(param->flags & CLONEf_COPY_STACKS)) {
CvDEPTH(dstr) = 0;
}
+ /*FALLTHROUGH*/
case SVt_PVFM:
/* NOTE: not refcounted */
CvSTASH(dstr) = hv_dup(CvSTASH(dstr), param);
+ if ((param->flags & CLONEf_JOIN_IN) && CvSTASH(dstr))
+ Perl_sv_add_backref(aTHX_ MUTABLE_SV(CvSTASH(dstr)), dstr);
OP_REFCNT_LOCK;
if (!CvISXSUB(dstr))
CvROOT(dstr) = OpREFCNT_inc(CvROOT(dstr));
}
/* don't dup if copying back - CvGV isn't refcounted, so the
* duped GV may never be freed. A bit of a hack! DAPM */
- CvGV(dstr) = (param->flags & CLONEf_JOIN_IN) ?
- NULL : gv_dup(CvGV(dstr), param) ;
- PAD_DUP(CvPADLIST(dstr), CvPADLIST(sstr), param);
+ SvANY(MUTABLE_CV(dstr))->xcv_gv =
+ CvCVGV_RC(dstr)
+ ? gv_dup_inc(CvGV(sstr), param)
+ : (param->flags & CLONEf_JOIN_IN)
+ ? NULL
+ : gv_dup(CvGV(sstr), param);
+
+ CvPADLIST(dstr) = padlist_dup(CvPADLIST(sstr), param);
CvOUTSIDE(dstr) =
CvWEAKOUTSIDE(sstr)
? cv_dup( CvOUTSIDE(dstr), param)
return dstr;
}
+SV *
+Perl_sv_dup_inc(pTHX_ const SV *const sstr, CLONE_PARAMS *const param)
+{
+ PERL_ARGS_ASSERT_SV_DUP_INC;
+ return sstr ? SvREFCNT_inc(sv_dup_common(sstr, param)) : NULL;
+}
+
+SV *
+Perl_sv_dup(pTHX_ const SV *const sstr, CLONE_PARAMS *const param)
+{
+ SV *dstr = sstr ? sv_dup_common(sstr, param) : NULL;
+ PERL_ARGS_ASSERT_SV_DUP;
+
+ /* Track every SV that (at least initially) had a reference count of 0.
+ We need to do this by holding an actual reference to it in this array.
+ If we attempt to cheat, turn AvREAL_off(), and store only pointers
+ (akin to the stashes hash, and the perl stack), we come unstuck if
+ a weak reference (or other SV legitimately SvREFCNT() == 0 for this
+ thread) is manipulated in a CLONE method, because CLONE runs before the
+ unreferenced array is walked to find SVs still with SvREFCNT() == 0
+ (and fix things up by giving each a reference via the temps stack).
+ Instead, during CLONE, if the 0-referenced SV has SvREFCNT_inc() and
+ then SvREFCNT_dec(), it will be cleaned up (and added to the free list)
+ before the walk of unreferenced happens and a reference to that is SV
+ added to the temps stack. At which point we have the same SV considered
+ to be in use, and free to be re-used. Not good.
+ */
+ if (dstr && !(param->flags & CLONEf_COPY_STACKS) && !SvREFCNT(dstr)) {
+ assert(param->unreferenced);
+ av_push(param->unreferenced, SvREFCNT_inc(dstr));
+ }
+
+ return dstr;
+}
+
/* duplicate a context */
PERL_CONTEXT *
case CXt_LOOP_LAZYIV:
case CXt_LOOP_PLAIN:
if (CxPADLOOP(ncx)) {
- ncx->blk_loop.oldcomppad
+ ncx->blk_loop.itervar_u.oldcomppad
= (PAD*)ptr_table_fetch(PL_ptr_table,
- ncx->blk_loop.oldcomppad);
+ ncx->blk_loop.itervar_u.oldcomppad);
} else {
- ncx->blk_loop.oldcomppad
- = (PAD*)gv_dup((const GV *)ncx->blk_loop.oldcomppad,
- param);
+ ncx->blk_loop.itervar_u.gv
+ = gv_dup((const GV *)ncx->blk_loop.itervar_u.gv,
+ param);
}
break;
case CXt_FORMAT:
TOPPTR(nss,ix) = sv_dup_inc(sv, param);
/* fall through */
case SAVEt_ITEM: /* normal string */
+ case SAVEt_GVSV: /* scalar slot in GV */
case SAVEt_SV: /* scalar reference */
sv = (const SV *)POPPTR(ss,ix);
TOPPTR(nss,ix) = sv_dup_inc(sv, param);
TOPLONG(nss,ix) = longval;
break;
case SAVEt_I32: /* I32 reference */
- case SAVEt_I16: /* I16 reference */
- case SAVEt_I8: /* I8 reference */
case SAVEt_COP_ARYBASE: /* call CopARYBASE_set */
ptr = POPPTR(ss,ix);
TOPPTR(nss,ix) = any_dup(ptr, proto_perl);
case SAVEt_VPTR: /* random* reference */
ptr = POPPTR(ss,ix);
TOPPTR(nss,ix) = any_dup(ptr, proto_perl);
+ /* Fall through */
+ case SAVEt_INT_SMALL:
+ case SAVEt_I32_SMALL:
+ case SAVEt_I16: /* I16 reference */
+ case SAVEt_I8: /* I8 reference */
+ case SAVEt_BOOL:
ptr = POPPTR(ss,ix);
TOPPTR(nss,ix) = any_dup(ptr, proto_perl);
break;
sv = (const SV *)POPPTR(ss,ix);
TOPPTR(nss,ix) = sv_dup_inc(sv, param);
break;
- case SAVEt_BOOL:
- ptr = POPPTR(ss,ix);
- TOPPTR(nss,ix) = any_dup(ptr, proto_perl);
- longval = (long)POPBOOL(ss,ix);
- TOPBOOL(nss,ix) = cBOOL(longval);
- break;
case SAVEt_SET_SVFLAGS:
i = POPINT(ss,ix);
TOPINT(nss,ix) = i;
PL_parser = NULL;
Zero(&PL_debug_pad, 1, struct perl_debug_pad);
# ifdef DEBUG_LEAKING_SCALARS
- PL_sv_serial = (((U32)my_perl >> 2) & 0xfff) * 1000000;
+ PL_sv_serial = (((UV)my_perl >> 2) & 0xfff) * 1000000;
# endif
#else /* !DEBUGGING */
Zero(my_perl, 1, PerlInterpreter);
#endif /* PERL_IMPLICIT_SYS */
param->flags = flags;
+ /* Nothing in the core code uses this, but we make it available to
+ extensions (using mg_dup). */
param->proto_perl = proto_perl;
+ /* Likely nothing will use this, but it is initialised to be consistent
+ with Perl_clone_params_new(). */
+ param->proto_perl = my_perl;
+ param->unreferenced = NULL;
INIT_TRACK_MEMPOOL(my_perl->Imemory_debug_header, my_perl);
PL_body_arenas = NULL;
Zero(&PL_body_roots, 1, PL_body_roots);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
PL_sv_count = 0;
PL_sv_objcount = 0;
PL_sv_root = NULL;
PL_origargv = proto_perl->Iorigargv;
param->stashes = newAV(); /* Setup array of objects to call clone on */
+ /* This makes no difference to the implementation, as it always pushes
+ and shifts pointers to other SVs without changing their reference
+ count, with the array becoming empty before it is freed. However, it
+ makes it conceptually clear what is going on, and will avoid some
+ work inside av.c, filling slots between AvFILL() and AvMAX() with
+ &PL_sv_undef, and SvREFCNT_dec()ing those. */
+ AvREAL_off(param->stashes);
+
+ if (!(flags & CLONEf_COPY_STACKS)) {
+ param->unreferenced = newAV();
+ }
/* Set tainting stuff before PerlIO_debug can possibly get called */
PL_tainting = proto_perl->Itainting;
/* switches */
PL_minus_c = proto_perl->Iminus_c;
PL_patchlevel = sv_dup_inc(proto_perl->Ipatchlevel, param);
+ PL_apiversion = sv_dup_inc(proto_perl->Iapiversion, param);
PL_localpatches = proto_perl->Ilocalpatches;
PL_splitstr = proto_perl->Isplitstr;
PL_minus_n = proto_perl->Iminus_n;
PL_unlockhook = proto_perl->Iunlockhook;
PL_threadhook = proto_perl->Ithreadhook;
PL_destroyhook = proto_perl->Idestroyhook;
+ PL_signalhook = proto_perl->Isignalhook;
#ifdef THREADS_HAVE_PIDS
PL_ppid = proto_perl->Ippid;
else {
init_stacks();
ENTER; /* perl_destruct() wants to LEAVE; */
-
- /* although we're not duplicating the tmps stack, we should still
- * add entries for any SVs on the tmps stack that got cloned by a
- * non-refcount means (eg a temp in @_); otherwise they will be
- * orphaned
- */
- for (i = 0; i<= proto_perl->Itmps_ix; i++) {
- SV * const nsv = MUTABLE_SV(ptr_table_fetch(PL_ptr_table,
- proto_perl->Itmps_stack[i]));
- if (nsv && !SvREFCNT(nsv)) {
- PUSH_EXTEND_MORTAL__SV_C(SvREFCNT_inc_simple(nsv));
- }
- }
}
PL_start_env = proto_perl->Istart_env; /* XXXXXX */
/* Pluggable optimizer */
PL_peepp = proto_perl->Ipeepp;
+ PL_rpeepp = proto_perl->Irpeepp;
/* op_free() hook */
PL_opfreehook = proto_perl->Iopfreehook;
}
PL_registered_mros = hv_dup_inc(proto_perl->Iregistered_mros, param);
+ PL_blockhooks = av_dup_inc(proto_perl->Iblockhooks, param);
/* Call the ->CLONE method, if it exists, for each of the stashes
identified by sv_dup() above.
PL_ptr_table = NULL;
}
+ if (!(flags & CLONEf_COPY_STACKS)) {
+ unreferenced_to_tmp_stack(param->unreferenced);
+ }
SvREFCNT_dec(param->stashes);
return my_perl;
}
+static void
+S_unreferenced_to_tmp_stack(pTHX_ AV *const unreferenced)
+{
+ PERL_ARGS_ASSERT_UNREFERENCED_TO_TMP_STACK;
+
+ if (AvFILLp(unreferenced) > -1) {
+ SV **svp = AvARRAY(unreferenced);
+ SV **const last = svp + AvFILLp(unreferenced);
+ SSize_t count = 0;
+
+ do {
+ if (SvREFCNT(*svp) == 1)
+ ++count;
+ } while (++svp <= last);
+
+ EXTEND_MORTAL(count);
+ svp = AvARRAY(unreferenced);
+
+ do {
+ if (SvREFCNT(*svp) == 1) {
+ /* Our reference is the only one to this SV. This means that
+ in this thread, the scalar effectively has a 0 reference.
+ That doesn't work (cleanup never happens), so donate our
+ reference to it onto the save stack. */
+ PL_tmps_stack[++PL_tmps_ix] = *svp;
+ } else {
+ /* As an optimisation, because we are already walking the
+ entire array, instead of above doing either
+ SvREFCNT_inc(*svp) or *svp = &PL_sv_undef, we can instead
+ release our reference to the scalar, so that at the end of
+ the array owns zero references to the scalars it happens to
+ point to. We are effectively converting the array from
+ AvREAL() on to AvREAL() off. This saves the av_clear()
+ (triggered by the SvREFCNT_dec(unreferenced) below) from
+ walking the array a second time. */
+ SvREFCNT_dec(*svp);
+ }
+
+ } while (++svp <= last);
+ AvREAL_off(unreferenced);
+ }
+ SvREFCNT_dec(unreferenced);
+}
+
+void
+Perl_clone_params_del(CLONE_PARAMS *param)
+{
+ /* This seemingly funky ordering keeps the build with PERL_GLOBAL_STRUCT
+ happy: */
+ PerlInterpreter *const to = param->new_perl;
+ dTHXa(to);
+ PerlInterpreter *const was = PERL_GET_THX;
+
+ PERL_ARGS_ASSERT_CLONE_PARAMS_DEL;
+
+ if (was != to) {
+ PERL_SET_THX(to);
+ }
+
+ SvREFCNT_dec(param->stashes);
+ if (param->unreferenced)
+ unreferenced_to_tmp_stack(param->unreferenced);
+
+ Safefree(param);
+
+ if (was != to) {
+ PERL_SET_THX(was);
+ }
+}
+
+CLONE_PARAMS *
+Perl_clone_params_new(PerlInterpreter *const from, PerlInterpreter *const to)
+{
+ dVAR;
+ /* Need to play this game, as newAV() can call safesysmalloc(), and that
+ does a dTHX; to get the context from thread local storage.
+ FIXME - under PERL_CORE Newx(), Safefree() and friends should expand to
+ a version that passes in my_perl. */
+ PerlInterpreter *const was = PERL_GET_THX;
+ CLONE_PARAMS *param;
+
+ PERL_ARGS_ASSERT_CLONE_PARAMS_NEW;
+
+ if (was != to) {
+ PERL_SET_THX(to);
+ }
+
+ /* Given that we've set the context, we can do this unshared. */
+ Newx(param, 1, CLONE_PARAMS);
+
+ param->flags = 0;
+ param->proto_perl = from;
+ param->new_perl = to;
+ param->stashes = (AV *)Perl_newSV_type(to, SVt_PVAV);
+ AvREAL_off(param->stashes);
+ param->unreferenced = (AV *)Perl_newSV_type(to, SVt_PVAV);
+
+ if (was != to) {
+ PERL_SET_THX(was);
+ }
+ return param;
+}
+
#endif /* USE_ITHREADS */
/*