X-Git-Url: https://perl5.git.perl.org/perl5.git/blobdiff_plain/5bb89d25eef36595e8b2275f7b0739655e3535d5..95ddc6755c1ff41d06e5afc2212c85f918ebcd28:/sv.c diff --git a/sv.c b/sv.c index 3111c75..3a0cf89 100644 --- a/sv.c +++ b/sv.c @@ -147,8 +147,7 @@ Private API to rest of sv.c new_SV(), del_SV(), - new_XIV(), del_XIV(), - new_XNV(), del_XNV(), + new_XPVNV(), del_XPVGV(), etc Public API: @@ -294,7 +293,7 @@ S_new_SV(pTHX_ const char *file, int line, const char *func) : 0 ); sv->sv_debug_inpad = 0; - sv->sv_debug_cloned = 0; + sv->sv_debug_parent = NULL; sv->sv_debug_file = PL_curcop ? savepv(CopFILE(PL_curcop)): NULL; sv->sv_debug_serial = PL_sv_serial++; @@ -705,61 +704,6 @@ Perl_sv_free_arenas(pTHX) are decremented to point at the unused 'ghost' memory, knowing that the pointers are used with offsets to the real memory. - HE, HEK arenas are managed separately, with separate code, but may - be merge-able later.. -*/ - -/* get_arena(size): this creates custom-sized arenas - TBD: export properly for hv.c: S_more_he(). -*/ -void* -Perl_get_arena(pTHX_ const size_t arena_size, const svtype bodytype) -{ - dVAR; - struct arena_desc* adesc; - struct arena_set *aroot = (struct arena_set*) PL_body_arenas; - unsigned int curr; - - /* shouldnt need this - if (!arena_size) arena_size = PERL_ARENA_SIZE; - */ - - /* may need new arena-set to hold new arena */ - if (!aroot || aroot->curr >= aroot->set_size) { - struct arena_set *newroot; - Newxz(newroot, 1, struct arena_set); - newroot->set_size = ARENAS_PER_SET; - newroot->next = aroot; - aroot = newroot; - PL_body_arenas = (void *) newroot; - DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot)); - } - - /* ok, now have arena-set with at least 1 empty/available arena-desc */ - curr = aroot->curr++; - adesc = &(aroot->set[curr]); - assert(!adesc->arena); - - Newx(adesc->arena, arena_size, char); - adesc->size = arena_size; - adesc->utype = bodytype; - DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n", - curr, (void*)adesc->arena, (UV)arena_size)); - - return adesc->arena; -} - - -/* return a thing to the free list */ - -#define del_body(thing, root) \ - STMT_START { \ - void ** const thing_copy = (void **)thing;\ - *thing_copy = *root; \ - *root = (void*)thing_copy; \ - } STMT_END - -/* =head1 SV-Body Allocation @@ -806,11 +750,11 @@ they are no longer allocated. In turn, the new_body_* allocators call S_new_body(), which invokes new_body_inline macro, which takes a lock, and takes a body off the -linked list at PL_body_roots[sv_type], calling S_more_bodies() if +linked list at PL_body_roots[sv_type], calling Perl_more_bodies() if necessary to refresh an empty list. Then the lock is released, and the body is returned. -S_more_bodies calls get_arena(), and carves it up into an array of N +Perl_more_bodies allocates a new arena, and carves it up into an array of N bodies, which it strings into a linked list. It looks up arena-size and body-size from the body_details table described below, thus supporting the multiple body-types. @@ -818,10 +762,6 @@ supporting the multiple body-types. If PURIFY is defined, or PERL_ARENA_SIZE=0, arenas are not used, and the (new|del)_X*V macros are mapped directly to malloc/free. -*/ - -/* - For each sv-type, struct body_details bodies_by_type[] carries parameters which control these aspects of SV handling: @@ -899,8 +839,8 @@ struct body_details { + sizeof (((type*)SvANY((const SV *)0))->last_member) static const struct body_details bodies_by_type[] = { - { sizeof(HE), 0, 0, SVt_NULL, - FALSE, NONV, NOARENA, FIT_ARENA(0, sizeof(HE)) }, + /* HEs use this offset for their arena. */ + { 0, 0, 0, SVt_NULL, FALSE, NONV, NOARENA, 0 }, /* The bind placeholder pretends to be an RV for now. Also it's marked as "can't upgrade" to stop anyone using it before it's @@ -920,40 +860,25 @@ static const struct body_details bodies_by_type[] = { SVt_NV, FALSE, HADNV, HASARENA, FIT_ARENA(0, sizeof(NV)) }, /* 8 bytes on most ILP32 with IEEE doubles */ - { sizeof(XPV), + { sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur), copy_length(XPV, xpv_len) - STRUCT_OFFSET(XPV, xpv_cur), + STRUCT_OFFSET(XPV, xpv_cur), SVt_PV, FALSE, NONV, HASARENA, FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) }, -#if 2 *PTRSIZE <= IVSIZE /* 12 */ - { sizeof(XPVIV), + { sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur), copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur), + STRUCT_OFFSET(XPV, xpv_cur), SVt_PVIV, FALSE, NONV, HASARENA, FIT_ARENA(0, sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur)) }, - /* 12 */ -#else - { sizeof(XPVIV), - copy_length(XPVIV, xiv_u), - 0, - SVt_PVIV, FALSE, NONV, HASARENA, - FIT_ARENA(0, sizeof(XPVIV)) }, -#endif -#if (2 *PTRSIZE <= IVSIZE) && (2 *PTRSIZE <= NVSIZE) /* 20 */ - { sizeof(XPVNV), + { sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur), copy_length(XPVNV, xnv_u) - STRUCT_OFFSET(XPV, xpv_cur), + STRUCT_OFFSET(XPV, xpv_cur), SVt_PVNV, FALSE, HADNV, HASARENA, FIT_ARENA(0, sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur)) }, -#else - /* 20 */ - { sizeof(XPVNV), copy_length(XPVNV, xnv_u), 0, SVt_PVNV, FALSE, HADNV, - HASARENA, FIT_ARENA(0, sizeof(XPVNV)) }, -#endif /* 28 */ { sizeof(XPVMG), copy_length(XPVMG, xnv_u), 0, SVt_PVMG, FALSE, HADNV, @@ -964,7 +889,7 @@ static const struct body_details bodies_by_type[] = { sizeof(regexp), 0, SVt_REGEXP, FALSE, NONV, HASARENA, - FIT_ARENA(0, sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur)) + FIT_ARENA(0, sizeof(regexp)) }, /* 48 */ @@ -1012,73 +937,53 @@ static const struct body_details bodies_by_type[] = { (void *)((char *)S_new_body(aTHX_ sv_type) \ - bodies_by_type[sv_type].offset) -#define del_body_allocated(p, sv_type) \ - del_body(p + bodies_by_type[sv_type].offset, &PL_body_roots[sv_type]) - +/* return a thing to the free list */ -#define my_safemalloc(s) (void*)safemalloc(s) -#define my_safecalloc(s) (void*)safecalloc(s, 1) -#define my_safefree(p) safefree((char*)p) +#define del_body(thing, root) \ + STMT_START { \ + void ** const thing_copy = (void **)thing; \ + *thing_copy = *root; \ + *root = (void*)thing_copy; \ + } STMT_END #ifdef PURIFY -#define new_XNV() my_safemalloc(sizeof(XPVNV)) -#define del_XNV(p) my_safefree(p) +#define new_XNV() safemalloc(sizeof(XPVNV)) +#define new_XPVNV() safemalloc(sizeof(XPVNV)) +#define new_XPVMG() safemalloc(sizeof(XPVMG)) -#define new_XPVNV() my_safemalloc(sizeof(XPVNV)) -#define del_XPVNV(p) my_safefree(p) - -#define new_XPVAV() my_safemalloc(sizeof(XPVAV)) -#define del_XPVAV(p) my_safefree(p) - -#define new_XPVHV() my_safemalloc(sizeof(XPVHV)) -#define del_XPVHV(p) my_safefree(p) - -#define new_XPVMG() my_safemalloc(sizeof(XPVMG)) -#define del_XPVMG(p) my_safefree(p) - -#define new_XPVGV() my_safemalloc(sizeof(XPVGV)) -#define del_XPVGV(p) my_safefree(p) +#define del_XPVGV(p) safefree(p) #else /* !PURIFY */ #define new_XNV() new_body_allocated(SVt_NV) -#define del_XNV(p) del_body_allocated(p, SVt_NV) - #define new_XPVNV() new_body_allocated(SVt_PVNV) -#define del_XPVNV(p) del_body_allocated(p, SVt_PVNV) - -#define new_XPVAV() new_body_allocated(SVt_PVAV) -#define del_XPVAV(p) del_body_allocated(p, SVt_PVAV) - -#define new_XPVHV() new_body_allocated(SVt_PVHV) -#define del_XPVHV(p) del_body_allocated(p, SVt_PVHV) - #define new_XPVMG() new_body_allocated(SVt_PVMG) -#define del_XPVMG(p) del_body_allocated(p, SVt_PVMG) -#define new_XPVGV() new_body_allocated(SVt_PVGV) -#define del_XPVGV(p) del_body_allocated(p, SVt_PVGV) +#define del_XPVGV(p) del_body(p + bodies_by_type[SVt_PVGV].offset, \ + &PL_body_roots[SVt_PVGV]) #endif /* PURIFY */ /* no arena for you! */ #define new_NOARENA(details) \ - my_safemalloc((details)->body_size + (details)->offset) + safemalloc((details)->body_size + (details)->offset) #define new_NOARENAZ(details) \ - my_safecalloc((details)->body_size + (details)->offset) + safecalloc((details)->body_size + (details)->offset, 1) -STATIC void * -S_more_bodies (pTHX_ const svtype sv_type) +void * +Perl_more_bodies (pTHX_ const svtype sv_type, const size_t body_size, + const size_t arena_size) { dVAR; void ** const root = &PL_body_roots[sv_type]; - const struct body_details * const bdp = &bodies_by_type[sv_type]; - const size_t body_size = bdp->body_size; + struct arena_desc *adesc; + struct arena_set *aroot = (struct arena_set *) PL_body_arenas; + unsigned int curr; char *start; const char *end; - const size_t arena_size = Perl_malloc_good_size(bdp->arena_size); + const size_t good_arena_size = Perl_malloc_good_size(arena_size); #if defined(DEBUGGING) && !defined(PERL_GLOBAL_STRUCT_PRIVATE) static bool done_sanity_check; @@ -1094,37 +999,68 @@ S_more_bodies (pTHX_ const svtype sv_type) } #endif - assert(bdp->arena_size); + assert(arena_size); + + /* may need new arena-set to hold new arena */ + if (!aroot || aroot->curr >= aroot->set_size) { + struct arena_set *newroot; + Newxz(newroot, 1, struct arena_set); + newroot->set_size = ARENAS_PER_SET; + newroot->next = aroot; + aroot = newroot; + PL_body_arenas = (void *) newroot; + DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot)); + } + + /* ok, now have arena-set with at least 1 empty/available arena-desc */ + curr = aroot->curr++; + adesc = &(aroot->set[curr]); + assert(!adesc->arena); + + Newx(adesc->arena, good_arena_size, char); + adesc->size = good_arena_size; + adesc->utype = sv_type; + DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n", + curr, (void*)adesc->arena, (UV)good_arena_size)); - start = (char*) Perl_get_arena(aTHX_ arena_size, sv_type); + start = (char *) adesc->arena; - end = start + arena_size - 2 * body_size; + /* Get the address of the byte after the end of the last body we can fit. + Remember, this is integer division: */ + end = start + good_arena_size / body_size * body_size; /* computed count doesnt reflect the 1st slot reservation */ #if defined(MYMALLOC) || defined(HAS_MALLOC_GOOD_SIZE) DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %p end %p arena-size %d (from %d) type %d " "size %d ct %d\n", - (void*)start, (void*)end, (int)arena_size, - (int)bdp->arena_size, sv_type, (int)body_size, - (int)arena_size / (int)body_size)); + (void*)start, (void*)end, (int)good_arena_size, + (int)arena_size, sv_type, (int)body_size, + (int)good_arena_size / (int)body_size)); #else DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %p end %p arena-size %d type %d size %d ct %d\n", (void*)start, (void*)end, - (int)bdp->arena_size, sv_type, (int)body_size, - (int)bdp->arena_size / (int)body_size)); + (int)arena_size, sv_type, (int)body_size, + (int)good_arena_size / (int)body_size)); #endif *root = (void *)start; - while (start <= end) { + while (1) { + /* Where the next body would start: */ char * const next = start + body_size; + + if (next >= end) { + /* This is the last body: */ + assert(next == end); + + *(void **)start = 0; + return *root; + } + *(void**) start = (void *)next; start = next; } - *(void **)start = 0; - - return *root; } /* grab a new thing from the free list, allocating more if necessary. @@ -1135,7 +1071,9 @@ S_more_bodies (pTHX_ const svtype sv_type) STMT_START { \ void ** const r3wt = &PL_body_roots[sv_type]; \ xpv = (PTR_TBL_ENT_t*) (*((void **)(r3wt)) \ - ? *((void **)(r3wt)) : more_bodies(sv_type)); \ + ? *((void **)(r3wt)) : Perl_more_bodies(aTHX_ sv_type, \ + bodies_by_type[sv_type].body_size,\ + bodies_by_type[sv_type].arena_size)); \ *(r3wt) = *(void**)(xpv); \ } STMT_END @@ -1445,7 +1383,7 @@ Perl_sv_upgrade(pTHX_ register SV *const sv, svtype new_type) if (old_type > SVt_IV) { #ifdef PURIFY - my_safefree(old_body); + safefree(old_body); #else /* Note that there is an assumption that all bodies of types that can be upgraded came from arenas. Only the more complex non- @@ -1536,6 +1474,10 @@ Perl_sv_grow(pTHX_ register SV *const sv, register STRLEN newlen) s = SvPVX_mutable(sv); if (newlen > SvLEN(sv)) { /* need more room? */ + STRLEN minlen = SvCUR(sv); + minlen += (minlen >> PERL_STRLEN_EXPAND_SHIFT) + 10; + if (newlen < minlen) + newlen = minlen; #ifndef Perl_safesysmalloc_size newlen = PERL_STRLEN_ROUNDUP(newlen); #endif @@ -2683,6 +2625,7 @@ Perl_sv_2num(pTHX_ register SV *const sv) return sv; if (SvAMAGIC(sv)) { SV * const tmpsv = AMG_CALLun(sv,numer); + TAINT_IF(tmpsv && SvTAINTED(tmpsv)); if (tmpsv && (!SvROK(tmpsv) || (SvRV(tmpsv) != SvRV(sv)))) return sv_2num(tmpsv); } @@ -2804,6 +2747,7 @@ Perl_sv_2pv_flags(pTHX_ register SV *const sv, STRLEN *const lp, const I32 flags if (flags & SV_SKIP_OVERLOAD) return NULL; tmpstr = AMG_CALLun(sv,string); + TAINT_IF(tmpstr && SvTAINTED(tmpstr)); if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) { /* Unwrap this: */ /* char *pv = lp ? SvPV(tmpstr, *lp) : SvPV_nolen(tmpstr); @@ -5240,7 +5184,7 @@ Perl_sv_unmagic(pTHX_ SV *const sv, const int type) const MGVTBL* const vtbl = mg->mg_virtual; *mgp = mg->mg_moremagic; if (vtbl && vtbl->svt_free) - CALL_FPTR(vtbl->svt_free)(aTHX_ sv, mg); + vtbl->svt_free(aTHX_ sv, mg); if (mg->mg_ptr && mg->mg_type != PERL_MAGIC_regex_global) { if (mg->mg_len > 0) Safefree(mg->mg_ptr); @@ -5303,6 +5247,13 @@ Perl_sv_rvweaken(pTHX_ SV *const sv) /* Give tsv backref magic if it hasn't already got it, then push a * back-reference to sv onto the array associated with the backref magic. + * + * As an optimisation, if there's only one backref and it's not an AV, + * store it directly in the HvAUX or mg_obj slot, avoiding the need to + * allocate an AV. (Whether the slot holds an AV tells us whether this is + * active.) + * + * If an HV's backref is stored in magic, it is moved back to HvAUX. */ /* A discussion about the backreferences array and its refcount: @@ -5312,61 +5263,86 @@ Perl_sv_rvweaken(pTHX_ SV *const sv) * structure, from the xhv_backreferences field. (A HV without hv_aux will * have the standard magic instead.) The array is created with a refcount * of 2. This means that if during global destruction the array gets - * picked on first to have its refcount decremented by the random zapper, - * it won't actually be freed, meaning it's still theere for when its - * parent gets freed. - * When the parent SV is freed, in the case of magic, the magic is freed, - * Perl_magic_killbackrefs is called which decrements one refcount, then - * mg_obj is freed which kills the second count. - * In the vase of a HV being freed, one ref is removed by - * Perl_hv_kill_backrefs, the other by Perl_sv_kill_backrefs, which it - * calls. + * picked on before its parent to have its refcount decremented by the + * random zapper, it won't actually be freed, meaning it's still there for + * when its parent gets freed. + * + * When the parent SV is freed, the extra ref is killed by + * Perl_sv_kill_backrefs. The other ref is killed, in the case of magic, + * by mg_free() / MGf_REFCOUNTED, or for a hash, by Perl_hv_kill_backrefs. + * + * When a single backref SV is stored directly, it is not reference + * counted. */ void Perl_sv_add_backref(pTHX_ SV *const tsv, SV *const sv) { dVAR; - AV *av; + SV **svp; + AV *av = NULL; + MAGIC *mg = NULL; PERL_ARGS_ASSERT_SV_ADD_BACKREF; - if (SvTYPE(tsv) == SVt_PVHV) { - AV **const avp = Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv)); - - av = *avp; - if (!av) { - /* There is no AV in the offical place - try a fixup. */ - MAGIC *const mg = mg_find(tsv, PERL_MAGIC_backref); + /* find slot to store array or singleton backref */ - if (mg) { - /* Aha. They've got it stowed in magic. Bring it back. */ - av = MUTABLE_AV(mg->mg_obj); - /* Stop mg_free decreasing the refernce count. */ + if (SvTYPE(tsv) == SVt_PVHV) { + svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv)); + + if (!*svp) { + if ((mg = mg_find(tsv, PERL_MAGIC_backref))) { + /* Aha. They've got it stowed in magic instead. + * Move it back to xhv_backreferences */ + *svp = mg->mg_obj; + /* Stop mg_free decreasing the reference count. */ mg->mg_obj = NULL; /* Stop mg_free even calling the destructor, given that there's no AV to free up. */ mg->mg_virtual = 0; sv_unmagic(tsv, PERL_MAGIC_backref); - } else { - av = newAV(); - AvREAL_off(av); - SvREFCNT_inc_simple_void(av); /* see discussion above */ + mg = NULL; } - *avp = av; } } else { - const MAGIC *const mg - = SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL; - if (mg) - av = MUTABLE_AV(mg->mg_obj); - else { - av = newAV(); - AvREAL_off(av); - sv_magic(tsv, MUTABLE_SV(av), PERL_MAGIC_backref, NULL, 0); - /* av now has a refcnt of 2; see discussion above */ + if (! ((mg = + (SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL)))) + { + sv_magic(tsv, NULL, PERL_MAGIC_backref, NULL, 0); + mg = mg_find(tsv, PERL_MAGIC_backref); } + svp = &(mg->mg_obj); } + + /* create or retrieve the array */ + + if ( (!*svp && SvTYPE(sv) == SVt_PVAV) + || (*svp && SvTYPE(*svp) != SVt_PVAV) + ) { + /* create array */ + av = newAV(); + AvREAL_off(av); + SvREFCNT_inc_simple_void(av); + /* av now has a refcnt of 2; see discussion above */ + if (*svp) { + /* move single existing backref to the array */ + av_extend(av, 1); + AvARRAY(av)[++AvFILLp(av)] = *svp; /* av_push() */ + } + *svp = (SV*)av; + if (mg) + mg->mg_flags |= MGf_REFCOUNTED; + } + else + av = MUTABLE_AV(*svp); + + if (!av) { + /* optimisation: store single backref directly in HvAUX or mg_obj */ + *svp = sv; + return; + } + /* push new backref */ + assert(SvTYPE(av) == SVt_PVAV); if (AvFILLp(av) >= AvMAX(av)) { av_extend(av, AvFILLp(av)+1); } @@ -5377,95 +5353,139 @@ Perl_sv_add_backref(pTHX_ SV *const tsv, SV *const sv) * with the SV we point to. */ -STATIC void -S_sv_del_backref(pTHX_ SV *const tsv, SV *const sv) +void +Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv) { dVAR; - AV *av = NULL; - SV **svp; + SV **svp = NULL; I32 i; PERL_ARGS_ASSERT_SV_DEL_BACKREF; if (SvTYPE(tsv) == SVt_PVHV && SvOOK(tsv)) { - av = *Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv)); - /* We mustn't attempt to "fix up" the hash here by moving the - backreference array back to the hv_aux structure, as that is stored - in the main HvARRAY(), and hfreentries assumes that no-one - reallocates HvARRAY() while it is running. */ + svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv)); } - if (!av) { - const MAGIC *const mg + if (!svp || !*svp) { + MAGIC *const mg = SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL; - if (mg) - av = MUTABLE_AV(mg->mg_obj); + svp = mg ? &(mg->mg_obj) : NULL; } - if (!av) + if (!svp || !*svp) Perl_croak(aTHX_ "panic: del_backref"); - assert(!SvIS_FREED(av)); - - svp = AvARRAY(av); - /* We shouldn't be in here more than once, but for paranoia reasons lets - not assume this. */ - for (i = AvFILLp(av); i >= 0; i--) { - if (svp[i] == sv) { - const SSize_t fill = AvFILLp(av); - if (i != fill) { - /* We weren't the last entry. - An unordered list has this property that you can take the - last element off the end to fill the hole, and it's still - an unordered list :-) - */ - svp[i] = svp[fill]; + if (SvTYPE(*svp) == SVt_PVAV) { + int count = 0; + AV * const av = (AV*)*svp; + assert(!SvIS_FREED(av)); + svp = AvARRAY(av); + for (i = AvFILLp(av); i >= 0; i--) { + if (svp[i] == sv) { + const SSize_t fill = AvFILLp(av); + if (i != fill) { + /* We weren't the last entry. + An unordered list has this property that you can take the + last element off the end to fill the hole, and it's still + an unordered list :-) + */ + svp[i] = svp[fill]; + } + svp[fill] = NULL; + AvFILLp(av) = fill - 1; + count++; +#ifndef DEBUGGING + break; /* should only be one */ +#endif } - svp[fill] = NULL; - AvFILLp(av) = fill - 1; } + assert(count == 1); + } + else { + /* optimisation: only a single backref, stored directly */ + if (*svp != sv) + Perl_croak(aTHX_ "panic: del_backref"); + *svp = NULL; } + } -int +void Perl_sv_kill_backrefs(pTHX_ SV *const sv, AV *const av) { - SV **svp = AvARRAY(av); + SV **svp; + SV **last; + bool is_array; PERL_ARGS_ASSERT_SV_KILL_BACKREFS; - PERL_UNUSED_ARG(sv); - assert(!svp || !SvIS_FREED(av)); - if (svp) { - SV *const *const last = svp + AvFILLp(av); + if (!av) + return; + + is_array = (SvTYPE(av) == SVt_PVAV); + if (is_array) { + assert(!SvIS_FREED(av)); + svp = AvARRAY(av); + if (svp) + last = svp + AvFILLp(av); + } + else { + /* optimisation: only a single backref, stored directly */ + svp = (SV**)&av; + last = svp; + } + if (svp) { while (svp <= last) { if (*svp) { SV *const referrer = *svp; if (SvWEAKREF(referrer)) { /* XXX Should we check that it hasn't changed? */ + assert(SvROK(referrer)); SvRV_set(referrer, 0); SvOK_off(referrer); SvWEAKREF_off(referrer); SvSETMAGIC(referrer); } else if (SvTYPE(referrer) == SVt_PVGV || SvTYPE(referrer) == SVt_PVLV) { + assert(SvTYPE(sv) == SVt_PVHV); /* stash backref */ /* You lookin' at me? */ assert(GvSTASH(referrer)); assert(GvSTASH(referrer) == (const HV *)sv); GvSTASH(referrer) = 0; + } else if (SvTYPE(referrer) == SVt_PVCV || + SvTYPE(referrer) == SVt_PVFM) { + if (SvTYPE(sv) == SVt_PVHV) { /* stash backref */ + /* You lookin' at me? */ + assert(CvSTASH(referrer)); + assert(CvSTASH(referrer) == (const HV *)sv); + CvSTASH(referrer) = 0; + } + else { + assert(SvTYPE(sv) == SVt_PVGV); + /* You lookin' at me? */ + assert(CvGV(referrer)); + assert(CvGV(referrer) == (const GV *)sv); + anonymise_cv_maybe(MUTABLE_GV(sv), + MUTABLE_CV(referrer)); + } + } else { Perl_croak(aTHX_ "panic: magic_killbackrefs (flags=%"UVxf")", (UV)SvFLAGS(referrer)); } - *svp = NULL; + if (is_array) + *svp = NULL; } svp++; } } - SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */ - return 0; + if (is_array) { + AvFILLp(av) = -1; + SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */ + } + return; } /* @@ -5646,6 +5666,45 @@ Perl_sv_replace(pTHX_ register SV *const sv, register SV *const nsv) del_SV(nsv); } +/* We're about to free a GV which has a CV that refers back to us. + * If that CV will outlive us, make it anonymous (i.e. fix up its CvGV + * field) */ + +STATIC void +S_anonymise_cv_maybe(pTHX_ GV *gv, CV* cv) +{ + char *stash; + SV *gvname; + GV *anongv; + + PERL_ARGS_ASSERT_ANONYMISE_CV_MAYBE; + + /* be assertive! */ + assert(SvREFCNT(gv) == 0); + assert(isGV(gv) && isGV_with_GP(gv)); + assert(GvGP(gv)); + assert(!CvANON(cv)); + assert(CvGV(cv) == gv); + + /* will the CV shortly be freed by gp_free() ? */ + if (GvCV(gv) == cv && GvGP(gv)->gp_refcnt < 2 && SvREFCNT(cv) < 2) { + SvANY(cv)->xcv_gv = NULL; + return; + } + + /* if not, anonymise: */ + stash = GvSTASH(gv) ? HvNAME(GvSTASH(gv)) : NULL; + gvname = Perl_newSVpvf(aTHX_ "%s::__ANON__", + stash ? stash : "__ANON__"); + anongv = gv_fetchsv(gvname, GV_ADDMULTI, SVt_PVCV); + SvREFCNT_dec(gvname); + + CvANON_on(cv); + CvCVGV_RC_on(cv); + SvANY(cv)->xcv_gv = MUTABLE_GV(SvREFCNT_inc(anongv)); +} + + /* =for apidoc sv_clear @@ -5757,7 +5816,8 @@ Perl_sv_clear(pTHX_ register SV *const sv) if (IoIFP(sv) && IoIFP(sv) != PerlIO_stdin() && IoIFP(sv) != PerlIO_stdout() && - IoIFP(sv) != PerlIO_stderr()) + IoIFP(sv) != PerlIO_stderr() && + !(IoFLAGS(sv) & IOf_FAKE_DIRP)) { io_close(MUTABLE_IO(sv), FALSE); } @@ -5775,6 +5835,10 @@ Perl_sv_clear(pTHX_ register SV *const sv) case SVt_PVCV: case SVt_PVFM: cv_undef(MUTABLE_CV(sv)); + /* If we're in a stash, we don't own a reference to it. However it does + have a back reference to us, which needs to be cleared. */ + if ((stash = CvSTASH(sv))) + sv_del_backref(MUTABLE_SV(stash), sv); goto freescalar; case SVt_PVHV: if (PL_last_swash_hv == (const HV *)sv) { @@ -5839,7 +5903,8 @@ Perl_sv_clear(pTHX_ register SV *const sv) } } #ifdef PERL_OLD_COPY_ON_WRITE - else if (SvPVX_const(sv)) { + else if (SvPVX_const(sv) + && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP))) { if (SvIsCOW(sv)) { if (DEBUG_C_TEST) { PerlIO_printf(Perl_debug_log, "Copy on write: clear\n"); @@ -5857,7 +5922,8 @@ Perl_sv_clear(pTHX_ register SV *const sv) } } #else - else if (SvPVX_const(sv) && SvLEN(sv)) + else if (SvPVX_const(sv) && SvLEN(sv) + && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP))) Safefree(SvPVX_mutable(sv)); else if (SvPVX_const(sv) && SvREADONLY(sv) && SvFAKE(sv)) { unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv))); @@ -5877,7 +5943,7 @@ Perl_sv_clear(pTHX_ register SV *const sv) &PL_body_roots[type]); } else if (sv_type_details->body_size) { - my_safefree(SvANY(sv)); + safefree(SvANY(sv)); } } @@ -6042,37 +6108,26 @@ Perl_sv_len_utf8(pTHX_ register SV *const sv) STRLEN ulen; MAGIC *mg = SvMAGICAL(sv) ? mg_find(sv, PERL_MAGIC_utf8) : NULL; - if (mg && mg->mg_len != -1) { - ulen = mg->mg_len; + if (mg && (mg->mg_len != -1 || mg->mg_ptr)) { + if (mg->mg_len != -1) + ulen = mg->mg_len; + else { + /* We can use the offset cache for a headstart. + The longer value is stored in the first pair. */ + STRLEN *cache = (STRLEN *) mg->mg_ptr; + + ulen = cache[0] + Perl_utf8_length(aTHX_ s + cache[1], + s + len); + } + if (PL_utf8cache < 0) { const STRLEN real = Perl_utf8_length(aTHX_ s, s + len); - if (real != ulen) { - /* Need to turn the assertions off otherwise we may - recurse infinitely while printing error messages. - */ - SAVEI8(PL_utf8cache); - PL_utf8cache = 0; - Perl_croak(aTHX_ "panic: sv_len_utf8 cache %"UVuf - " real %"UVuf" for %"SVf, - (UV) ulen, (UV) real, SVfARG(sv)); - } + assert_uft8_cache_coherent("sv_len_utf8", ulen, real, sv); } } else { ulen = Perl_utf8_length(aTHX_ s, s + len); - if (!SvREADONLY(sv)) { - if (!mg && (SvTYPE(sv) < SVt_PVMG || - !(mg = mg_find(sv, PERL_MAGIC_utf8)))) { - mg = sv_magicext(sv, 0, PERL_MAGIC_utf8, - &PL_vtbl_utf8, 0, 0); - } - assert(mg); - mg->mg_len = ulen; - /* For now, treat "overflowed" as "still unknown". - See RT #72924. */ - if (ulen != (STRLEN) mg->mg_len) - mg->mg_len = -1; - } + utf8_mg_len_cache_update(sv, &mg, ulen); } return ulen; } @@ -6084,19 +6139,27 @@ Perl_sv_len_utf8(pTHX_ register SV *const sv) offset. */ static STRLEN S_sv_pos_u2b_forwards(const U8 *const start, const U8 *const send, - STRLEN uoffset) + STRLEN *const uoffset_p, bool *const at_end) { const U8 *s = start; + STRLEN uoffset = *uoffset_p; PERL_ARGS_ASSERT_SV_POS_U2B_FORWARDS; - while (s < send && uoffset--) + while (s < send && uoffset) { + --uoffset; s += UTF8SKIP(s); - if (s > send) { + } + if (s == send) { + *at_end = TRUE; + } + else if (s > send) { + *at_end = TRUE; /* This is the existing behaviour. Possibly it should be a croak, as it's actually a bounds error */ s = send; } + *uoffset_p -= uoffset; return s - start; } @@ -6105,7 +6168,7 @@ S_sv_pos_u2b_forwards(const U8 *const start, const U8 *const send, the passed in UTF-8 offset. */ static STRLEN S_sv_pos_u2b_midway(const U8 *const start, const U8 *send, - const STRLEN uoffset, const STRLEN uend) + STRLEN uoffset, const STRLEN uend) { STRLEN backw = uend - uoffset; @@ -6115,7 +6178,14 @@ S_sv_pos_u2b_midway(const U8 *const start, const U8 *send, /* The assumption is that going forwards is twice the speed of going forward (that's where the 2 * backw comes from). (The real figure of course depends on the UTF-8 data.) */ - return sv_pos_u2b_forwards(start, send, uoffset); + const U8 *s = start; + + while (s < send && uoffset--) + s += UTF8SKIP(s); + assert (s <= send); + if (s > send) + s = send; + return s - start; } while (backw--) { @@ -6136,16 +6206,20 @@ S_sv_pos_u2b_midway(const U8 *const start, const U8 *send, created if necessary, and the found value offered to it for update. */ static STRLEN S_sv_pos_u2b_cached(pTHX_ SV *const sv, MAGIC **const mgp, const U8 *const start, - const U8 *const send, const STRLEN uoffset, + const U8 *const send, STRLEN uoffset, STRLEN uoffset0, STRLEN boffset0) { STRLEN boffset = 0; /* Actually always set, but let's keep gcc happy. */ bool found = FALSE; + bool at_end = FALSE; PERL_ARGS_ASSERT_SV_POS_U2B_CACHED; assert (uoffset >= uoffset0); + if (!uoffset) + return 0; + if (!SvREADONLY(sv) && PL_utf8cache && (*mgp || (SvTYPE(sv) >= SVt_PVMG && @@ -6175,9 +6249,11 @@ S_sv_pos_u2b_cached(pTHX_ SV *const sv, MAGIC **const mgp, const U8 *const start uoffset - uoffset0, (*mgp)->mg_len - uoffset0); } else { + uoffset -= uoffset0; boffset = boffset0 + sv_pos_u2b_forwards(start + boffset0, - send, uoffset - uoffset0); + send, &uoffset, &at_end); + uoffset += uoffset0; } } else if (cache[2] < uoffset) { @@ -6215,26 +6291,24 @@ S_sv_pos_u2b_cached(pTHX_ SV *const sv, MAGIC **const mgp, const U8 *const start } if (!found || PL_utf8cache < 0) { - const STRLEN real_boffset - = boffset0 + sv_pos_u2b_forwards(start + boffset0, - send, uoffset - uoffset0); - - if (found && PL_utf8cache < 0) { - if (real_boffset != boffset) { - /* Need to turn the assertions off otherwise we may recurse - infinitely while printing error messages. */ - SAVEI8(PL_utf8cache); - PL_utf8cache = 0; - Perl_croak(aTHX_ "panic: sv_pos_u2b_cache cache %"UVuf - " real %"UVuf" for %"SVf, - (UV) boffset, (UV) real_boffset, SVfARG(sv)); - } - } + STRLEN real_boffset; + uoffset -= uoffset0; + real_boffset = boffset0 + sv_pos_u2b_forwards(start + boffset0, + send, &uoffset, &at_end); + uoffset += uoffset0; + + if (found && PL_utf8cache < 0) + assert_uft8_cache_coherent("sv_pos_u2b_cache", boffset, + real_boffset, sv); boffset = real_boffset; } - if (PL_utf8cache) - utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start); + if (PL_utf8cache) { + if (at_end) + utf8_mg_len_cache_update(sv, mgp, uoffset); + else + utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start); + } return boffset; } @@ -6275,7 +6349,9 @@ Perl_sv_pos_u2b_flags(pTHX_ SV *const sv, STRLEN uoffset, STRLEN *const lenp, MAGIC *mg = NULL; boffset = sv_pos_u2b_cached(sv, &mg, start, send, uoffset, 0, 0); - if (lenp) { + if (lenp + && *lenp /* don't bother doing work for 0, as its bytes equivalent + is 0, and *lenp is already set to that. */) { /* Convert the relative offset to absolute. */ const STRLEN uoffset2 = uoffset + *lenp; const STRLEN boffset2 @@ -6333,6 +6409,26 @@ Perl_sv_pos_u2b(pTHX_ register SV *const sv, I32 *const offsetp, I32 *const lenp } } +static void +S_utf8_mg_len_cache_update(pTHX_ SV *const sv, MAGIC **const mgp, + const STRLEN ulen) +{ + PERL_ARGS_ASSERT_UTF8_MG_LEN_CACHE_UPDATE; + if (SvREADONLY(sv)) + return; + + if (!*mgp && (SvTYPE(sv) < SVt_PVMG || + !(*mgp = mg_find(sv, PERL_MAGIC_utf8)))) { + *mgp = sv_magicext(sv, 0, PERL_MAGIC_utf8, &PL_vtbl_utf8, 0, 0); + } + assert(*mgp); + + (*mgp)->mg_len = ulen; + /* For now, treat "overflowed" as "still unknown". See RT #72924. */ + if (ulen != (STRLEN) (*mgp)->mg_len) + (*mgp)->mg_len = -1; +} + /* Create and update the UTF8 magic offset cache, with the proffered utf8/ byte length pairing. The (byte) length of the total SV is passed in too, as blen, because for some (more esoteric) SVs, the call to SvPV_const() @@ -6391,14 +6487,8 @@ S_utf8_mg_pos_cache_update(pTHX_ SV *const sv, MAGIC **const mgp, const STRLEN b const U8 *start = (const U8 *) SvPVX_const(sv); const STRLEN realutf8 = utf8_length(start, start + byte); - if (realutf8 != utf8) { - /* Need to turn the assertions off otherwise we may recurse - infinitely while printing error messages. */ - SAVEI8(PL_utf8cache); - PL_utf8cache = 0; - Perl_croak(aTHX_ "panic: utf8_mg_pos_cache_update cache %"UVuf - " real %"UVuf" for %"SVf, (UV) utf8, (UV) realutf8, SVfARG(sv)); - } + assert_uft8_cache_coherent("utf8_mg_pos_cache_update", utf8, realutf8, + sv); } /* Cache is held with the later position first, to simplify the code @@ -6619,23 +6709,37 @@ Perl_sv_pos_b2u(pTHX_ register SV *const sv, I32 *const offsetp) if (!found || PL_utf8cache < 0) { const STRLEN real_len = utf8_length(s, send); - if (found && PL_utf8cache < 0) { - if (len != real_len) { - /* Need to turn the assertions off otherwise we may recurse - infinitely while printing error messages. */ - SAVEI8(PL_utf8cache); - PL_utf8cache = 0; - Perl_croak(aTHX_ "panic: sv_pos_b2u cache %"UVuf - " real %"UVuf" for %"SVf, - (UV) len, (UV) real_len, SVfARG(sv)); - } - } + if (found && PL_utf8cache < 0) + assert_uft8_cache_coherent("sv_pos_b2u", len, real_len, sv); len = real_len; } *offsetp = len; - if (PL_utf8cache) - utf8_mg_pos_cache_update(sv, &mg, byte, len, blen); + if (PL_utf8cache) { + if (blen == byte) + utf8_mg_len_cache_update(sv, &mg, len); + else + utf8_mg_pos_cache_update(sv, &mg, byte, len, blen); + } +} + +static void +S_assert_uft8_cache_coherent(pTHX_ const char *const func, STRLEN from_cache, + STRLEN real, SV *const sv) +{ + PERL_ARGS_ASSERT_ASSERT_UFT8_CACHE_COHERENT; + + /* As this is debugging only code, save space by keeping this test here, + rather than inlining it in all the callers. */ + if (from_cache == real) + return; + + /* Need to turn the assertions off otherwise we may recurse infinitely + while printing error messages. */ + SAVEI8(PL_utf8cache); + PL_utf8cache = 0; + Perl_croak(aTHX_ "panic: %s cache %"UVuf" real %"UVuf" for %"SVf, + func, (UV) from_cache, (UV) real, SVfARG(sv)); } /* @@ -6997,6 +7101,9 @@ Perl_sv_gets(pTHX_ register SV *const sv, register PerlIO *const fp, I32 append) } SvPOK_only(sv); + if (!append) { + SvCUR_set(sv,0); + } if (PerlIO_isutf8(fp)) SvUTF8_on(sv); @@ -10712,6 +10819,13 @@ Perl_mg_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *const param) for (; mg; mg = mg->mg_moremagic) { MAGIC *nmg; + + if ((param->flags & CLONEf_JOIN_IN) + && mg->mg_type == PERL_MAGIC_backref) + /* when joining, we let the individual SVs add themselves to + * backref as needed. */ + continue; + Newx(nmg, 1, MAGIC); *mgprev_p = nmg; mgprev_p = &(nmg->mg_moremagic); @@ -10729,17 +10843,14 @@ Perl_mg_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *const param) } else */ - if(nmg->mg_type == PERL_MAGIC_backref) { - /* The backref AV has its reference count deliberately bumped by - 1. */ - nmg->mg_obj - = SvREFCNT_inc(av_dup_inc((const AV *) nmg->mg_obj, param)); - } - else { - nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED) - ? sv_dup_inc(nmg->mg_obj, param) - : sv_dup(nmg->mg_obj, param); - } + nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED) + ? nmg->mg_type == PERL_MAGIC_backref + /* The backref AV has its reference + * count deliberately bumped by 1 */ + ? SvREFCNT_inc(av_dup_inc((const AV *) + nmg->mg_obj, param)) + : sv_dup_inc(nmg->mg_obj, param) + : sv_dup(nmg->mg_obj, param); if (nmg->mg_ptr && nmg->mg_type != PERL_MAGIC_regex_global) { if (nmg->mg_len > 0) { @@ -10756,7 +10867,7 @@ Perl_mg_dup(pTHX_ MAGIC *mg, CLONE_PARAMS *const param) nmg->mg_ptr = (char*)sv_dup_inc((const SV *)nmg->mg_ptr, param); } if ((nmg->mg_flags & MGf_DUP) && nmg->mg_virtual && nmg->mg_virtual->svt_dup) { - CALL_FPTR(nmg->mg_virtual->svt_dup)(aTHX_ nmg, param); + nmg->mg_virtual->svt_dup(aTHX_ nmg, param); } } return mgret; @@ -10951,10 +11062,16 @@ Perl_rvpv_dup(pTHX_ SV *const dstr, const SV *const sstr, CLONE_PARAMS *const pa PERL_ARGS_ASSERT_RVPV_DUP; if (SvROK(sstr)) { - SvRV_set(dstr, SvWEAKREF(sstr) - ? sv_dup(SvRV_const(sstr), param) - : sv_dup_inc(SvRV_const(sstr), param)); - + if (SvWEAKREF(sstr)) { + SvRV_set(dstr, sv_dup(SvRV_const(sstr), param)); + if (param->flags & CLONEf_JOIN_IN) { + /* if joining, we add any back references individually rather + * than copying the whole backref array */ + Perl_sv_add_backref(aTHX_ SvRV(dstr), dstr); + } + } + else + SvRV_set(dstr, sv_dup_inc(SvRV_const(sstr), param)); } else if (SvPVX_const(sstr)) { /* Has something there */ @@ -11031,9 +11148,12 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) something that is bad **/ if (SvTYPE(sstr) == SVt_PVHV) { const HEK * const hvname = HvNAME_HEK(sstr); - if (hvname) + if (hvname) { /** don't clone stashes if they already exist **/ - return MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0)); + dstr = MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0)); + ptr_table_store(PL_ptr_table, sstr, dstr); + return dstr; + } } } @@ -11044,7 +11164,7 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) dstr->sv_debug_optype = sstr->sv_debug_optype; dstr->sv_debug_line = sstr->sv_debug_line; dstr->sv_debug_inpad = sstr->sv_debug_inpad; - dstr->sv_debug_cloned = 1; + dstr->sv_debug_parent = (SV*)sstr; dstr->sv_debug_file = savepv(sstr->sv_debug_file); #endif @@ -11182,29 +11302,14 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) /* Danger Will Robinson - GvGP(dstr) isn't initialised at the point of this comment. */ GvSTASH(dstr) = hv_dup(GvSTASH(dstr), param); - if(param->flags & CLONEf_JOIN_IN) { - const HEK * const hvname - = HvNAME_HEK(GvSTASH(dstr)); - if( hvname - && GvSTASH(dstr) == gv_stashpvn( - HEK_KEY(hvname), HEK_LEN(hvname), 0 - ) - ) - Perl_sv_add_backref( - aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr - ); - } + if (param->flags & CLONEf_JOIN_IN) + Perl_sv_add_backref(aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr); GvGP(dstr) = gp_dup(GvGP(sstr), param); (void)GpREFCNT_inc(GvGP(dstr)); } else Perl_rvpv_dup(aTHX_ dstr, sstr, param); break; case SVt_PVIO: - IoIFP(dstr) = fp_dup(IoIFP(dstr), IoTYPE(dstr), param); - if (IoOFP(dstr) == IoIFP(sstr)) - IoOFP(dstr) = IoIFP(dstr); - else - IoOFP(dstr) = fp_dup(IoOFP(dstr), IoTYPE(dstr), param); /* PL_parser->rsfp_filters entries have fake IoDIRP() */ if(IoFLAGS(dstr) & IOf_FAKE_DIRP) { /* I have no idea why fake dirp (rsfps) @@ -11223,7 +11328,12 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) NOOP; /* IoDIRP(dstr) is already a copy of IoDIRP(sstr) */ } + IoIFP(dstr) = fp_dup(IoIFP(sstr), IoTYPE(dstr), param); } + if (IoOFP(dstr) == IoIFP(sstr)) + IoOFP(dstr) = IoIFP(dstr); + else + IoOFP(dstr) = fp_dup(IoOFP(dstr), IoTYPE(dstr), param); IoTOP_NAME(dstr) = SAVEPV(IoTOP_NAME(dstr)); IoFMT_NAME(dstr) = SAVEPV(IoFMT_NAME(dstr)); IoBOTTOM_NAME(dstr) = SAVEPV(IoBOTTOM_NAME(dstr)); @@ -11293,9 +11403,22 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) cBOOL(HvSHAREKEYS(sstr)), param) : 0; /* backref array needs refcnt=2; see sv_add_backref */ daux->xhv_backreferences = - saux->xhv_backreferences - ? MUTABLE_AV(SvREFCNT_inc( - sv_dup_inc((const SV *)saux->xhv_backreferences, param))) + (param->flags & CLONEf_JOIN_IN) + /* when joining, we let the individual GVs and + * CVs add themselves to backref as + * needed. This avoids pulling in stuff + * that isn't required, and simplifies the + * case where stashes aren't cloned back + * if they already exist in the parent + * thread */ + ? NULL + : saux->xhv_backreferences + ? (SvTYPE(saux->xhv_backreferences) == SVt_PVAV) + ? MUTABLE_AV(SvREFCNT_inc( + sv_dup_inc((const SV *) + saux->xhv_backreferences, param))) + : MUTABLE_AV(sv_dup((const SV *) + saux->xhv_backreferences, param)) : 0; daux->xhv_mro_meta = saux->xhv_mro_meta @@ -11314,9 +11437,12 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) if (!(param->flags & CLONEf_COPY_STACKS)) { CvDEPTH(dstr) = 0; } + /*FALLTHROUGH*/ case SVt_PVFM: /* NOTE: not refcounted */ CvSTASH(dstr) = hv_dup(CvSTASH(dstr), param); + if ((param->flags & CLONEf_JOIN_IN) && CvSTASH(dstr)) + Perl_sv_add_backref(aTHX_ MUTABLE_SV(CvSTASH(dstr)), dstr); OP_REFCNT_LOCK; if (!CvISXSUB(dstr)) CvROOT(dstr) = OpREFCNT_inc(CvROOT(dstr)); @@ -11327,8 +11453,13 @@ S_sv_dup_common(pTHX_ const SV *const sstr, CLONE_PARAMS *const param) } /* don't dup if copying back - CvGV isn't refcounted, so the * duped GV may never be freed. A bit of a hack! DAPM */ - CvGV(dstr) = (param->flags & CLONEf_JOIN_IN) ? - NULL : gv_dup(CvGV(dstr), param) ; + SvANY(MUTABLE_CV(dstr))->xcv_gv = + CvCVGV_RC(dstr) + ? gv_dup_inc(CvGV(sstr), param) + : (param->flags & CLONEf_JOIN_IN) + ? NULL + : gv_dup(CvGV(sstr), param); + CvPADLIST(dstr) = padlist_dup(CvPADLIST(sstr), param); CvOUTSIDE(dstr) = CvWEAKOUTSIDE(sstr) @@ -12167,6 +12298,7 @@ perl_clone_using(PerlInterpreter *proto_perl, UV flags, /* switches */ PL_minus_c = proto_perl->Iminus_c; PL_patchlevel = sv_dup_inc(proto_perl->Ipatchlevel, param); + PL_apiversion = sv_dup_inc(proto_perl->Iapiversion, param); PL_localpatches = proto_perl->Ilocalpatches; PL_splitstr = proto_perl->Isplitstr; PL_minus_n = proto_perl->Iminus_n; @@ -12608,6 +12740,7 @@ perl_clone_using(PerlInterpreter *proto_perl, UV flags, /* Pluggable optimizer */ PL_peepp = proto_perl->Ipeepp; + PL_rpeepp = proto_perl->Irpeepp; /* op_free() hook */ PL_opfreehook = proto_perl->Iopfreehook; @@ -12624,6 +12757,7 @@ perl_clone_using(PerlInterpreter *proto_perl, UV flags, } PL_registered_mros = hv_dup_inc(proto_perl->Iregistered_mros, param); + PL_blockhooks = av_dup_inc(proto_perl->Iblockhooks, param); /* Call the ->CLONE method, if it exists, for each of the stashes identified by sv_dup() above. @@ -12711,9 +12845,11 @@ S_unreferenced_to_tmp_stack(pTHX_ AV *const unreferenced) void Perl_clone_params_del(CLONE_PARAMS *param) { - PerlInterpreter *const was = PERL_GET_THX; + /* This seemingly funky ordering keeps the build with PERL_GLOBAL_STRUCT + happy: */ PerlInterpreter *const to = param->new_perl; dTHXa(to); + PerlInterpreter *const was = PERL_GET_THX; PERL_ARGS_ASSERT_CLONE_PARAMS_DEL; @@ -12735,6 +12871,7 @@ Perl_clone_params_del(CLONE_PARAMS *param) CLONE_PARAMS * Perl_clone_params_new(PerlInterpreter *const from, PerlInterpreter *const to) { + dVAR; /* Need to play this game, as newAV() can call safesysmalloc(), and that does a dTHX; to get the context from thread local storage. FIXME - under PERL_CORE Newx(), Safefree() and friends should expand to