SV* sva;
bool ok = 0;
for (sva = PL_sv_arenaroot; sva; sva = (SV *) SvANY(sva)) {
- SV *sv = sva + 1;
- SV *svend = &sva[SvREFCNT(sva)];
+ const SV * const sv = sva + 1;
+ const SV * const svend = &sva[SvREFCNT(sva)];
if (p >= sv && p < svend) {
ok = 1;
break;
I32 visited = 0;
for (sva = PL_sv_arenaroot; sva; sva = (SV*)SvANY(sva)) {
- register SV * const svend = &sva[SvREFCNT(sva)];
+ register const SV * const svend = &sva[SvREFCNT(sva)];
register SV* sv;
for (sv = sva + 1; sv < svend; ++sv) {
if (SvTYPE(sv) != SVTYPEMASK
/* called by sv_clean_objs() for each live SV */
static void
-do_clean_objs(pTHX_ SV *sv)
+do_clean_objs(pTHX_ SV *ref)
{
- SV* rv;
+ SV* target;
- if (SvROK(sv) && SvOBJECT(rv = SvRV(sv))) {
- DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning object ref:\n "), sv_dump(sv)));
- if (SvWEAKREF(sv)) {
- sv_del_backref(sv);
- SvWEAKREF_off(sv);
- SvRV_set(sv, NULL);
+ if (SvROK(ref) && SvOBJECT(target = SvRV(ref))) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning object ref:\n "), sv_dump(ref)));
+ if (SvWEAKREF(ref)) {
+ sv_del_backref(target, ref);
+ SvWEAKREF_off(ref);
+ SvRV_set(ref, NULL);
} else {
- SvROK_off(sv);
- SvRV_set(sv, NULL);
- SvREFCNT_dec(rv);
+ SvROK_off(ref);
+ SvRV_set(ref, NULL);
+ SvREFCNT_dec(target);
}
}
return cleaned;
}
+static void
+S_free_arena(pTHX_ void **root) {
+ while (root) {
+ void ** const next = *(void **)root;
+ Safefree(root);
+ root = next;
+ }
+}
+
/*
=for apidoc sv_free_arenas
=cut
*/
+#define free_arena(name) \
+ STMT_START { \
+ S_free_arena(aTHX_ (void**) PL_ ## name ## _arenaroot); \
+ PL_ ## name ## _arenaroot = 0; \
+ PL_ ## name ## _root = 0; \
+ } STMT_END
+
void
Perl_sv_free_arenas(pTHX)
{
SV* sva;
SV* svanext;
- void *arena, *arenanext;
/* Free arenas here, but be careful about fake ones. (We assume
contiguity of the fake ones with the corresponding real ones.) */
if (!SvFAKE(sva))
Safefree(sva);
}
-
- for (arena = PL_xnv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xnv_arenaroot = 0;
- PL_xnv_root = 0;
-
- for (arena = PL_xpv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpv_arenaroot = 0;
- PL_xpv_root = 0;
-
- for (arena = PL_xpviv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpviv_arenaroot = 0;
- PL_xpviv_root = 0;
-
- for (arena = PL_xpvnv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvnv_arenaroot = 0;
- PL_xpvnv_root = 0;
-
- for (arena = PL_xpvcv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvcv_arenaroot = 0;
- PL_xpvcv_root = 0;
-
- for (arena = PL_xpvav_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvav_arenaroot = 0;
- PL_xpvav_root = 0;
-
- for (arena = PL_xpvhv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvhv_arenaroot = 0;
- PL_xpvhv_root = 0;
-
- for (arena = PL_xpvmg_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvmg_arenaroot = 0;
- PL_xpvmg_root = 0;
-
- for (arena = PL_xpvgv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvgv_arenaroot = 0;
- PL_xpvgv_root = 0;
-
- for (arena = PL_xpvlv_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvlv_arenaroot = 0;
- PL_xpvlv_root = 0;
-
- for (arena = PL_xpvbm_arenaroot; arena; arena = arenanext) {
- arenanext = *(void **)arena;
- Safefree(arena);
- }
- PL_xpvbm_arenaroot = 0;
- PL_xpvbm_root = 0;
-
- {
- HE *he;
- HE *he_next;
- for (he = PL_he_arenaroot; he; he = he_next) {
- he_next = HeNEXT(he);
- Safefree(he);
- }
- }
- PL_he_arenaroot = 0;
- PL_he_root = 0;
-
+
+ free_arena(xnv);
+ free_arena(xpv);
+ free_arena(xpviv);
+ free_arena(xpvnv);
+ free_arena(xpvcv);
+ free_arena(xpvav);
+ free_arena(xpvhv);
+ free_arena(xpvmg);
+ free_arena(xpvgv);
+ free_arena(xpvlv);
+ free_arena(xpvbm);
+ free_arena(he);
#if defined(USE_ITHREADS)
- {
- struct ptr_tbl_ent *pte;
- struct ptr_tbl_ent *pte_next;
- for (pte = PL_pte_arenaroot; pte; pte = pte_next) {
- pte_next = pte->next;
- Safefree(pte);
- }
- }
- PL_pte_arenaroot = 0;
- PL_pte_root = 0;
+ free_arena(pte);
#endif
if (PL_nice_chunk)
S_varname(pTHX_ GV *gv, const char *gvtype, PADOFFSET targ,
SV* keyname, I32 aindex, int subscript_type)
{
- AV *av;
- SV *sv;
SV * const name = sv_newmortal();
if (gv) {
* directly */
const char *p;
- HV *hv = GvSTASH(gv);
+ HV * const hv = GvSTASH(gv);
sv_setpv(name, gvtype);
if (!hv)
p = "???";
sv_catpvn(name,GvNAME(gv),GvNAMELEN(gv));
}
else {
- U32 u;
- CV *cv = find_runcv(&u);
- STRLEN len;
- const char *str;
+ U32 unused;
+ CV * const cv = find_runcv(&unused);
+ SV *sv;
+ AV *av;
+
if (!cv || !CvPADLIST(cv))
- return Nullsv;;
+ return Nullsv;
av = (AV*)(*av_fetch(CvPADLIST(cv), 0, FALSE));
sv = *av_fetch(av, targ, FALSE);
/* SvLEN in a pad name is not to be trusted */
- str = SvPV(sv,len);
- sv_setpvn(name, str, len);
+ sv_setpv(name, SvPV_nolen_const(sv));
}
if (subscript_type == FUV_SUBSCRIPT_HASH) {
+ SV * const sv = NEWSV(0,0);
*SvPVX(name) = '$';
- sv = NEWSV(0,0);
Perl_sv_catpvf(aTHX_ name, "{%s}",
pv_display(sv,SvPVX_const(keyname), SvCUR(keyname), 0, 32));
SvREFCNT_dec(sv);
dVAR;
SV *sv;
AV *av;
- SV **svp;
GV *gv;
OP *o, *o2, *kid;
if (match && subscript_type == FUV_SUBSCRIPT_WITHIN)
break;
- return S_varname(aTHX_ gv, hash ? "%" : "@", obase->op_targ,
+ return varname(gv, hash ? "%" : "@", obase->op_targ,
keysv, index, subscript_type);
}
case OP_PADSV:
if (match && PAD_SVl(obase->op_targ) != uninit_sv)
break;
- return S_varname(aTHX_ Nullgv, "$", obase->op_targ,
+ return varname(Nullgv, "$", obase->op_targ,
Nullsv, 0, FUV_SUBSCRIPT_NONE);
case OP_GVSV:
gv = cGVOPx_gv(obase);
if (!gv || (match && GvSV(gv) != uninit_sv))
break;
- return S_varname(aTHX_ gv, "$", 0, Nullsv, 0, FUV_SUBSCRIPT_NONE);
+ return varname(gv, "$", 0, Nullsv, 0, FUV_SUBSCRIPT_NONE);
case OP_AELEMFAST:
if (obase->op_flags & OPf_SPECIAL) { /* lexical array */
if (match) {
+ SV **svp;
av = (AV*)PAD_SV(obase->op_targ);
if (!av || SvRMAGICAL(av))
break;
if (!svp || *svp != uninit_sv)
break;
}
- return S_varname(aTHX_ Nullgv, "$", obase->op_targ,
+ return varname(Nullgv, "$", obase->op_targ,
Nullsv, (I32)obase->op_private, FUV_SUBSCRIPT_ARRAY);
}
else {
if (!gv)
break;
if (match) {
+ SV **svp;
av = GvAV(gv);
if (!av || SvRMAGICAL(av))
break;
if (!svp || *svp != uninit_sv)
break;
}
- return S_varname(aTHX_ gv, "$", 0,
+ return varname(gv, "$", 0,
Nullsv, (I32)obase->op_private, FUV_SUBSCRIPT_ARRAY);
}
break;
break;
}
else {
- svp = av_fetch((AV*)sv, SvIV(cSVOPx_sv(kid)), FALSE);
+ SV ** const svp = av_fetch((AV*)sv, SvIV(cSVOPx_sv(kid)), FALSE);
if (!svp || *svp != uninit_sv)
break;
}
}
if (obase->op_type == OP_HELEM)
- return S_varname(aTHX_ gv, "%", o->op_targ,
+ return varname(gv, "%", o->op_targ,
cSVOPx_sv(kid), 0, FUV_SUBSCRIPT_HASH);
else
- return S_varname(aTHX_ gv, "@", o->op_targ, Nullsv,
+ return varname(gv, "@", o->op_targ, Nullsv,
SvIV(cSVOPx_sv(kid)), FUV_SUBSCRIPT_ARRAY);
;
}
/* index is an expression;
* attempt to find a match within the aggregate */
if (obase->op_type == OP_HELEM) {
- SV *keysv = S_find_hash_subscript(aTHX_ (HV*)sv, uninit_sv);
+ SV * const keysv = S_find_hash_subscript(aTHX_ (HV*)sv, uninit_sv);
if (keysv)
- return S_varname(aTHX_ gv, "%", o->op_targ,
+ return varname(gv, "%", o->op_targ,
keysv, 0, FUV_SUBSCRIPT_HASH);
}
else {
const I32 index = S_find_array_subscript(aTHX_ (AV*)sv, uninit_sv);
if (index >= 0)
- return S_varname(aTHX_ gv, "@", o->op_targ,
+ return varname(gv, "@", o->op_targ,
Nullsv, index, FUV_SUBSCRIPT_ARRAY);
}
if (match)
break;
- return S_varname(aTHX_ gv,
+ return varname(gv,
(o->op_type == OP_PADAV || o->op_type == OP_RV2AV)
? "@" : "%",
o->op_targ, Nullsv, 0, FUV_SUBSCRIPT_WITHIN);
gv = cGVOPx_gv(o);
if (match && GvSV(gv) != uninit_sv)
break;
- return S_varname(aTHX_ gv, "$", 0,
+ return varname(gv, "$", 0,
Nullsv, 0, FUV_SUBSCRIPT_NONE);
}
/* other possibilities not handled are:
sv_insert(varname, 0, 0, " ", 1);
}
Perl_warner(aTHX_ packWARN(WARN_UNINITIALIZED), PL_warn_uninit,
- varname ? SvPV_nolen(varname) : "",
+ varname ? SvPV_nolen_const(varname) : "",
" in ", OP_DESC(PL_op));
}
else
"", "", "");
}
-/* allocate another arena's worth of NV bodies */
-
-STATIC void
-S_more_xnv(pTHX)
-{
- NV* xnv;
- NV* xnvend;
- void *ptr;
- New(711, ptr, PERL_ARENA_SIZE/sizeof(NV), NV);
- *((void **) ptr) = (void *)PL_xnv_arenaroot;
- PL_xnv_arenaroot = ptr;
-
- xnv = (NV*) ptr;
- xnvend = &xnv[PERL_ARENA_SIZE / sizeof(NV) - 1];
- xnv += (sizeof(XPVIV) - 1) / sizeof(NV) + 1; /* fudge by sizeof XPVIV */
- PL_xnv_root = xnv;
- while (xnv < xnvend) {
- *(NV**)xnv = (NV*)(xnv + 1);
- xnv++;
- }
- *(NV**)xnv = 0;
-}
-
-/* allocate another arena's worth of struct xpv */
-
-STATIC void
-S_more_xpv(pTHX)
+STATIC void *
+S_more_bodies (pTHX_ void **arena_root, void **root, size_t size)
{
- xpv_allocated* xpv;
- xpv_allocated* xpvend;
- New(713, xpv, PERL_ARENA_SIZE/sizeof(xpv_allocated), xpv_allocated);
- *((xpv_allocated**)xpv) = PL_xpv_arenaroot;
- PL_xpv_arenaroot = xpv;
-
- xpvend = &xpv[PERL_ARENA_SIZE / sizeof(xpv_allocated) - 1];
- PL_xpv_root = ++xpv;
- while (xpv < xpvend) {
- *((xpv_allocated**)xpv) = xpv + 1;
- xpv++;
- }
- *((xpv_allocated**)xpv) = 0;
-}
+ char *start;
+ const char *end;
+ const size_t count = PERL_ARENA_SIZE/size;
+ New(0, start, count*size, char);
+ *((void **) start) = *arena_root;
+ *arena_root = (void *)start;
-/* allocate another arena's worth of struct xpviv */
-
-STATIC void
-S_more_xpviv(pTHX)
-{
- xpviv_allocated* xpviv;
- xpviv_allocated* xpvivend;
- New(713, xpviv, PERL_ARENA_SIZE/sizeof(xpviv_allocated), xpviv_allocated);
- *((xpviv_allocated**)xpviv) = PL_xpviv_arenaroot;
- PL_xpviv_arenaroot = xpviv;
+ end = start + (count-1) * size;
- xpvivend = &xpviv[PERL_ARENA_SIZE / sizeof(xpviv_allocated) - 1];
- PL_xpviv_root = ++xpviv;
- while (xpviv < xpvivend) {
- *((xpviv_allocated**)xpviv) = xpviv + 1;
- xpviv++;
- }
- *((xpviv_allocated**)xpviv) = 0;
-}
+ /* The initial slot is used to link the arenas together, so it isn't to be
+ linked into the list of ready-to-use bodies. */
-/* allocate another arena's worth of struct xpvnv */
+ start += size;
-STATIC void
-S_more_xpvnv(pTHX)
-{
- XPVNV* xpvnv;
- XPVNV* xpvnvend;
- New(715, xpvnv, PERL_ARENA_SIZE/sizeof(XPVNV), XPVNV);
- *((XPVNV**)xpvnv) = PL_xpvnv_arenaroot;
- PL_xpvnv_arenaroot = xpvnv;
+ *root = (void *)start;
- xpvnvend = &xpvnv[PERL_ARENA_SIZE / sizeof(XPVNV) - 1];
- PL_xpvnv_root = ++xpvnv;
- while (xpvnv < xpvnvend) {
- *((XPVNV**)xpvnv) = xpvnv + 1;
- xpvnv++;
+ while (start < end) {
+ char * const next = start + size;
+ *(void**) start = (void *)next;
+ start = next;
}
- *((XPVNV**)xpvnv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvcv */
-
-STATIC void
-S_more_xpvcv(pTHX)
-{
- XPVCV* xpvcv;
- XPVCV* xpvcvend;
- New(716, xpvcv, PERL_ARENA_SIZE/sizeof(XPVCV), XPVCV);
- *((XPVCV**)xpvcv) = PL_xpvcv_arenaroot;
- PL_xpvcv_arenaroot = xpvcv;
-
- xpvcvend = &xpvcv[PERL_ARENA_SIZE / sizeof(XPVCV) - 1];
- PL_xpvcv_root = ++xpvcv;
- while (xpvcv < xpvcvend) {
- *((XPVCV**)xpvcv) = xpvcv + 1;
- xpvcv++;
- }
- *((XPVCV**)xpvcv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvav */
-
-STATIC void
-S_more_xpvav(pTHX)
-{
- xpvav_allocated* xpvav;
- xpvav_allocated* xpvavend;
- New(717, xpvav, PERL_ARENA_SIZE/sizeof(xpvav_allocated),
- xpvav_allocated);
- *((xpvav_allocated**)xpvav) = PL_xpvav_arenaroot;
- PL_xpvav_arenaroot = xpvav;
-
- xpvavend = &xpvav[PERL_ARENA_SIZE / sizeof(xpvav_allocated) - 1];
- PL_xpvav_root = ++xpvav;
- while (xpvav < xpvavend) {
- *((xpvav_allocated**)xpvav) = xpvav + 1;
- xpvav++;
- }
- *((xpvav_allocated**)xpvav) = 0;
-}
-
-/* allocate another arena's worth of struct xpvhv */
-
-STATIC void
-S_more_xpvhv(pTHX)
-{
- xpvhv_allocated* xpvhv;
- xpvhv_allocated* xpvhvend;
- New(718, xpvhv, PERL_ARENA_SIZE/sizeof(xpvhv_allocated),
- xpvhv_allocated);
- *((xpvhv_allocated**)xpvhv) = PL_xpvhv_arenaroot;
- PL_xpvhv_arenaroot = xpvhv;
-
- xpvhvend = &xpvhv[PERL_ARENA_SIZE / sizeof(xpvhv_allocated) - 1];
- PL_xpvhv_root = ++xpvhv;
- while (xpvhv < xpvhvend) {
- *((xpvhv_allocated**)xpvhv) = xpvhv + 1;
- xpvhv++;
- }
- *((xpvhv_allocated**)xpvhv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvmg */
-
-STATIC void
-S_more_xpvmg(pTHX)
-{
- XPVMG* xpvmg;
- XPVMG* xpvmgend;
- New(719, xpvmg, PERL_ARENA_SIZE/sizeof(XPVMG), XPVMG);
- *((XPVMG**)xpvmg) = PL_xpvmg_arenaroot;
- PL_xpvmg_arenaroot = xpvmg;
-
- xpvmgend = &xpvmg[PERL_ARENA_SIZE / sizeof(XPVMG) - 1];
- PL_xpvmg_root = ++xpvmg;
- while (xpvmg < xpvmgend) {
- *((XPVMG**)xpvmg) = xpvmg + 1;
- xpvmg++;
- }
- *((XPVMG**)xpvmg) = 0;
-}
-
-/* allocate another arena's worth of struct xpvgv */
-
-STATIC void
-S_more_xpvgv(pTHX)
-{
- XPVGV* xpvgv;
- XPVGV* xpvgvend;
- New(720, xpvgv, PERL_ARENA_SIZE/sizeof(XPVGV), XPVGV);
- *((XPVGV**)xpvgv) = PL_xpvgv_arenaroot;
- PL_xpvgv_arenaroot = xpvgv;
-
- xpvgvend = &xpvgv[PERL_ARENA_SIZE / sizeof(XPVGV) - 1];
- PL_xpvgv_root = ++xpvgv;
- while (xpvgv < xpvgvend) {
- *((XPVGV**)xpvgv) = xpvgv + 1;
- xpvgv++;
- }
- *((XPVGV**)xpvgv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvlv */
-
-STATIC void
-S_more_xpvlv(pTHX)
-{
- XPVLV* xpvlv;
- XPVLV* xpvlvend;
- New(720, xpvlv, PERL_ARENA_SIZE/sizeof(XPVLV), XPVLV);
- *((XPVLV**)xpvlv) = PL_xpvlv_arenaroot;
- PL_xpvlv_arenaroot = xpvlv;
-
- xpvlvend = &xpvlv[PERL_ARENA_SIZE / sizeof(XPVLV) - 1];
- PL_xpvlv_root = ++xpvlv;
- while (xpvlv < xpvlvend) {
- *((XPVLV**)xpvlv) = xpvlv + 1;
- xpvlv++;
- }
- *((XPVLV**)xpvlv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvbm */
-
-STATIC void
-S_more_xpvbm(pTHX)
-{
- XPVBM* xpvbm;
- XPVBM* xpvbmend;
- New(721, xpvbm, PERL_ARENA_SIZE/sizeof(XPVBM), XPVBM);
- *((XPVBM**)xpvbm) = PL_xpvbm_arenaroot;
- PL_xpvbm_arenaroot = xpvbm;
-
- xpvbmend = &xpvbm[PERL_ARENA_SIZE / sizeof(XPVBM) - 1];
- PL_xpvbm_root = ++xpvbm;
- while (xpvbm < xpvbmend) {
- *((XPVBM**)xpvbm) = xpvbm + 1;
- xpvbm++;
- }
- *((XPVBM**)xpvbm) = 0;
-}
-
-/* grab a new NV body from the free list, allocating more if necessary */
-
-STATIC XPVNV*
-S_new_xnv(pTHX)
-{
- NV* xnv;
- LOCK_SV_MUTEX;
- if (!PL_xnv_root)
- S_more_xnv(aTHX);
- xnv = PL_xnv_root;
- PL_xnv_root = *(NV**)xnv;
- UNLOCK_SV_MUTEX;
- return (XPVNV*)((char*)xnv - STRUCT_OFFSET(XPVNV, xnv_nv));
-}
-
-/* return an NV body to the free list */
-
-STATIC void
-S_del_xnv(pTHX_ XPVNV *p)
-{
- NV* xnv = (NV*)((char*)(p) + STRUCT_OFFSET(XPVNV, xnv_nv));
- LOCK_SV_MUTEX;
- *(NV**)xnv = PL_xnv_root;
- PL_xnv_root = xnv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpv from the free list, allocating more if necessary */
-
-STATIC XPV*
-S_new_xpv(pTHX)
-{
- xpv_allocated* xpv;
- LOCK_SV_MUTEX;
- if (!PL_xpv_root)
- S_more_xpv(aTHX);
- xpv = PL_xpv_root;
- PL_xpv_root = *(xpv_allocated**)xpv;
- UNLOCK_SV_MUTEX;
- /* If xpv_allocated is the same structure as XPV then the two OFFSETs
- sum to zero, and the pointer is unchanged. If the allocated structure
- is smaller (no initial IV actually allocated) then the net effect is
- to subtract the size of the IV from the pointer, to return a new pointer
- as if an initial IV were actually allocated. */
- return (XPV*)((char*)xpv - STRUCT_OFFSET(XPV, xpv_cur)
- + STRUCT_OFFSET(xpv_allocated, xpv_cur));
-}
-
-/* return a struct xpv to the free list */
-
-STATIC void
-S_del_xpv(pTHX_ XPV *p)
-{
- xpv_allocated* xpv
- = (xpv_allocated*)((char*)(p) + STRUCT_OFFSET(XPV, xpv_cur)
- - STRUCT_OFFSET(xpv_allocated, xpv_cur));
- LOCK_SV_MUTEX;
- *(xpv_allocated**)xpv = PL_xpv_root;
- PL_xpv_root = xpv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpviv from the free list, allocating more if necessary */
-
-STATIC XPVIV*
-S_new_xpviv(pTHX)
-{
- xpviv_allocated* xpviv;
- LOCK_SV_MUTEX;
- if (!PL_xpviv_root)
- S_more_xpviv(aTHX);
- xpviv = PL_xpviv_root;
- PL_xpviv_root = *(xpviv_allocated**)xpviv;
- UNLOCK_SV_MUTEX;
- /* If xpviv_allocated is the same structure as XPVIV then the two OFFSETs
- sum to zero, and the pointer is unchanged. If the allocated structure
- is smaller (no initial IV actually allocated) then the net effect is
- to subtract the size of the IV from the pointer, to return a new pointer
- as if an initial IV were actually allocated. */
- return (XPVIV*)((char*)xpviv - STRUCT_OFFSET(XPVIV, xpv_cur)
- + STRUCT_OFFSET(xpviv_allocated, xpv_cur));
-}
-
-/* return a struct xpviv to the free list */
-
-STATIC void
-S_del_xpviv(pTHX_ XPVIV *p)
-{
- xpviv_allocated* xpviv
- = (xpviv_allocated*)((char*)(p) + STRUCT_OFFSET(XPVIV, xpv_cur)
- - STRUCT_OFFSET(xpviv_allocated, xpv_cur));
- LOCK_SV_MUTEX;
- *(xpviv_allocated**)xpviv = PL_xpviv_root;
- PL_xpviv_root = xpviv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvnv from the free list, allocating more if necessary */
-
-STATIC XPVNV*
-S_new_xpvnv(pTHX)
-{
- XPVNV* xpvnv;
- LOCK_SV_MUTEX;
- if (!PL_xpvnv_root)
- S_more_xpvnv(aTHX);
- xpvnv = PL_xpvnv_root;
- PL_xpvnv_root = *(XPVNV**)xpvnv;
- UNLOCK_SV_MUTEX;
- return xpvnv;
-}
-
-/* return a struct xpvnv to the free list */
-
-STATIC void
-S_del_xpvnv(pTHX_ XPVNV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVNV**)p = PL_xpvnv_root;
- PL_xpvnv_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvcv from the free list, allocating more if necessary */
-
-STATIC XPVCV*
-S_new_xpvcv(pTHX)
-{
- XPVCV* xpvcv;
- LOCK_SV_MUTEX;
- if (!PL_xpvcv_root)
- S_more_xpvcv(aTHX);
- xpvcv = PL_xpvcv_root;
- PL_xpvcv_root = *(XPVCV**)xpvcv;
- UNLOCK_SV_MUTEX;
- return xpvcv;
-}
-
-/* return a struct xpvcv to the free list */
-
-STATIC void
-S_del_xpvcv(pTHX_ XPVCV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVCV**)p = PL_xpvcv_root;
- PL_xpvcv_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvav from the free list, allocating more if necessary */
-
-STATIC XPVAV*
-S_new_xpvav(pTHX)
-{
- xpvav_allocated* xpvav;
- LOCK_SV_MUTEX;
- if (!PL_xpvav_root)
- S_more_xpvav(aTHX);
- xpvav = PL_xpvav_root;
- PL_xpvav_root = *(xpvav_allocated**)xpvav;
- UNLOCK_SV_MUTEX;
- return (XPVAV*)((char*)xpvav - STRUCT_OFFSET(XPVAV, xav_fill)
- + STRUCT_OFFSET(xpvav_allocated, xav_fill));
-}
-
-/* return a struct xpvav to the free list */
+ *(void **)start = 0;
-STATIC void
-S_del_xpvav(pTHX_ XPVAV *p)
-{
- xpvav_allocated* xpvav
- = (xpvav_allocated*)((char*)(p) + STRUCT_OFFSET(XPVAV, xav_fill)
- - STRUCT_OFFSET(xpvav_allocated, xav_fill));
- LOCK_SV_MUTEX;
- *(xpvav_allocated**)xpvav = PL_xpvav_root;
- PL_xpvav_root = xpvav;
- UNLOCK_SV_MUTEX;
+ return *root;
}
-/* grab a new struct xpvhv from the free list, allocating more if necessary */
+/* grab a new thing from the free list, allocating more if necessary */
-STATIC XPVHV*
-S_new_xpvhv(pTHX)
+STATIC void *
+S_new_body(pTHX_ void **arena_root, void **root, size_t size)
{
- xpvhv_allocated* xpvhv;
+ void *xpv;
LOCK_SV_MUTEX;
- if (!PL_xpvhv_root)
- S_more_xpvhv(aTHX);
- xpvhv = PL_xpvhv_root;
- PL_xpvhv_root = *(xpvhv_allocated**)xpvhv;
+ xpv = *root ? *root : S_more_bodies(aTHX_ arena_root, root, size);
+ *root = *(void**)xpv;
UNLOCK_SV_MUTEX;
- return (XPVHV*)((char*)xpvhv - STRUCT_OFFSET(XPVHV, xhv_fill)
- + STRUCT_OFFSET(xpvhv_allocated, xhv_fill));
+ return xpv;
}
-/* return a struct xpvhv to the free list */
+/* return a thing to the free list */
-STATIC void
-S_del_xpvhv(pTHX_ XPVHV *p)
-{
- xpvhv_allocated* xpvhv
- = (xpvhv_allocated*)((char*)(p) + STRUCT_OFFSET(XPVHV, xhv_fill)
- - STRUCT_OFFSET(xpvhv_allocated, xhv_fill));
- LOCK_SV_MUTEX;
- *(xpvhv_allocated**)xpvhv = PL_xpvhv_root;
- PL_xpvhv_root = xpvhv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvmg from the free list, allocating more if necessary */
-
-STATIC XPVMG*
-S_new_xpvmg(pTHX)
-{
- XPVMG* xpvmg;
- LOCK_SV_MUTEX;
- if (!PL_xpvmg_root)
- S_more_xpvmg(aTHX);
- xpvmg = PL_xpvmg_root;
- PL_xpvmg_root = *(XPVMG**)xpvmg;
- UNLOCK_SV_MUTEX;
- return xpvmg;
-}
-
-/* return a struct xpvmg to the free list */
-
-STATIC void
-S_del_xpvmg(pTHX_ XPVMG *p)
-{
- LOCK_SV_MUTEX;
- *(XPVMG**)p = PL_xpvmg_root;
- PL_xpvmg_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvgv from the free list, allocating more if necessary */
-
-STATIC XPVGV*
-S_new_xpvgv(pTHX)
-{
- XPVGV* xpvgv;
- LOCK_SV_MUTEX;
- if (!PL_xpvgv_root)
- S_more_xpvgv(aTHX);
- xpvgv = PL_xpvgv_root;
- PL_xpvgv_root = *(XPVGV**)xpvgv;
- UNLOCK_SV_MUTEX;
- return xpvgv;
-}
-
-/* return a struct xpvgv to the free list */
-
-STATIC void
-S_del_xpvgv(pTHX_ XPVGV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVGV**)p = PL_xpvgv_root;
- PL_xpvgv_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvlv from the free list, allocating more if necessary */
-
-STATIC XPVLV*
-S_new_xpvlv(pTHX)
-{
- XPVLV* xpvlv;
- LOCK_SV_MUTEX;
- if (!PL_xpvlv_root)
- S_more_xpvlv(aTHX);
- xpvlv = PL_xpvlv_root;
- PL_xpvlv_root = *(XPVLV**)xpvlv;
- UNLOCK_SV_MUTEX;
- return xpvlv;
-}
-
-/* return a struct xpvlv to the free list */
+#define del_body(thing, root) \
+ STMT_START { \
+ LOCK_SV_MUTEX; \
+ *(void **)thing = *root; \
+ *root = (void*)thing; \
+ UNLOCK_SV_MUTEX; \
+ } STMT_END
-STATIC void
-S_del_xpvlv(pTHX_ XPVLV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVLV**)p = PL_xpvlv_root;
- PL_xpvlv_root = p;
- UNLOCK_SV_MUTEX;
-}
+/* Conventionally we simply malloc() a big block of memory, then divide it
+ up into lots of the thing that we're allocating.
-/* grab a new struct xpvbm from the free list, allocating more if necessary */
+ This macro will expand to call to S_new_body. So for XPVBM (with ithreads),
+ it would become
-STATIC XPVBM*
-S_new_xpvbm(pTHX)
-{
- XPVBM* xpvbm;
- LOCK_SV_MUTEX;
- if (!PL_xpvbm_root)
- S_more_xpvbm(aTHX);
- xpvbm = PL_xpvbm_root;
- PL_xpvbm_root = *(XPVBM**)xpvbm;
- UNLOCK_SV_MUTEX;
- return xpvbm;
-}
+ S_new_body(my_perl, (void**)&(my_perl->Ixpvbm_arenaroot),
+ (void**)&(my_perl->Ixpvbm_root), sizeof(XPVBM), 0)
+*/
-/* return a struct xpvbm to the free list */
-
-STATIC void
-S_del_xpvbm(pTHX_ XPVBM *p)
-{
- LOCK_SV_MUTEX;
- *(XPVBM**)p = PL_xpvbm_root;
- PL_xpvbm_root = p;
- UNLOCK_SV_MUTEX;
-}
+#define new_body(TYPE,lctype) \
+ S_new_body(aTHX_ (void**)&PL_ ## lctype ## _arenaroot, \
+ (void**)&PL_ ## lctype ## _root, \
+ sizeof(TYPE))
+
+#define del_body_type(p,TYPE,lctype) \
+ del_body((void*)p, (void**)&PL_ ## lctype ## _root)
+
+/* But for some types, we cheat. The type starts with some members that are
+ never accessed. So we allocate the substructure, starting at the first used
+ member, then adjust the pointer back in memory by the size of the bit not
+ allocated, so it's as if we allocated the full structure.
+ (But things will all go boom if you write to the part that is "not there",
+ because you'll be overwriting the last members of the preceding structure
+ in memory.)
+
+ We calculate the correction using the STRUCT_OFFSET macro. For example, if
+ xpv_allocated is the same structure as XPV then the two OFFSETs sum to zero,
+ and the pointer is unchanged. If the allocated structure is smaller (no
+ initial NV actually allocated) then the net effect is to subtract the size
+ of the NV from the pointer, to return a new pointer as if an initial NV were
+ actually allocated.
+
+ This is the same trick as was used for NV and IV bodies. Ironically it
+ doesn't need to be used for NV bodies any more, because NV is now at the
+ start of the structure. IV bodies don't need it either, because they are
+ no longer allocated. */
+
+#define new_body_allocated(TYPE,lctype,member) \
+ (void*)((char*)S_new_body(aTHX_ (void**)&PL_ ## lctype ## _arenaroot, \
+ (void**)&PL_ ## lctype ## _root, \
+ sizeof(lctype ## _allocated)) - \
+ STRUCT_OFFSET(TYPE, member) \
+ + STRUCT_OFFSET(lctype ## _allocated, member))
+
+
+#define del_body_allocated(p,TYPE,lctype,member) \
+ del_body((void*)((char*)p + STRUCT_OFFSET(TYPE, member) \
+ - STRUCT_OFFSET(lctype ## _allocated, member)), \
+ (void**)&PL_ ## lctype ## _root)
#define my_safemalloc(s) (void*)safemalloc(s)
#define my_safefree(p) safefree((char*)p)
#else /* !PURIFY */
-#define new_XNV() (void*)new_xnv()
-#define del_XNV(p) del_xnv((XPVNV*) p)
+#define new_XNV() new_body(NV, xnv)
+#define del_XNV(p) del_body_type(p, NV, xnv)
-#define new_XPV() (void*)new_xpv()
-#define del_XPV(p) del_xpv((XPV *)p)
+#define new_XPV() new_body_allocated(XPV, xpv, xpv_cur)
+#define del_XPV(p) del_body_allocated(p, XPV, xpv, xpv_cur)
-#define new_XPVIV() (void*)new_xpviv()
-#define del_XPVIV(p) del_xpviv((XPVIV *)p)
+#define new_XPVIV() new_body_allocated(XPVIV, xpviv, xpv_cur)
+#define del_XPVIV(p) del_body_allocated(p, XPVIV, xpviv, xpv_cur)
-#define new_XPVNV() (void*)new_xpvnv()
-#define del_XPVNV(p) del_xpvnv((XPVNV *)p)
+#define new_XPVNV() new_body(XPVNV, xpvnv)
+#define del_XPVNV(p) del_body_type(p, XPVNV, xpvnv)
-#define new_XPVCV() (void*)new_xpvcv()
-#define del_XPVCV(p) del_xpvcv((XPVCV *)p)
+#define new_XPVCV() new_body(XPVCV, xpvcv)
+#define del_XPVCV(p) del_body_type(p, XPVCV, xpvcv)
-#define new_XPVAV() (void*)new_xpvav()
-#define del_XPVAV(p) del_xpvav((XPVAV *)p)
+#define new_XPVAV() new_body_allocated(XPVAV, xpvav, xav_fill)
+#define del_XPVAV(p) del_body_allocated(p, XPVAV, xpvav, xav_fill)
-#define new_XPVHV() (void*)new_xpvhv()
-#define del_XPVHV(p) del_xpvhv((XPVHV *)p)
+#define new_XPVHV() new_body_allocated(XPVHV, xpvhv, xhv_fill)
+#define del_XPVHV(p) del_body_allocated(p, XPVHV, xpvhv, xhv_fill)
-#define new_XPVMG() (void*)new_xpvmg()
-#define del_XPVMG(p) del_xpvmg((XPVMG *)p)
+#define new_XPVMG() new_body(XPVMG, xpvmg)
+#define del_XPVMG(p) del_body_type(p, XPVMG, xpvmg)
-#define new_XPVGV() (void*)new_xpvgv()
-#define del_XPVGV(p) del_xpvgv((XPVGV *)p)
+#define new_XPVGV() new_body(XPVGV, xpvgv)
+#define del_XPVGV(p) del_body_type(p, XPVGV, xpvgv)
-#define new_XPVLV() (void*)new_xpvlv()
-#define del_XPVLV(p) del_xpvlv((XPVLV *)p)
+#define new_XPVLV() new_body(XPVLV, xpvlv)
+#define del_XPVLV(p) del_body_type(p, XPVLV, xpvlv)
-#define new_XPVBM() (void*)new_xpvbm()
-#define del_XPVBM(p) del_xpvbm((XPVBM *)p)
+#define new_XPVBM() new_body(XPVBM, xpvbm)
+#define del_XPVBM(p) del_body_type(p, XPVBM, xpvbm)
#endif /* PURIFY */
=cut
*/
-bool
+void
Perl_sv_upgrade(pTHX_ register SV *sv, U32 mt)
{
-
- char* pv;
- U32 cur;
- U32 len;
- IV iv;
- NV nv;
- MAGIC* magic;
- HV* stash;
+ void** old_body_arena;
+ size_t old_body_offset;
+ size_t old_body_length; /* Well, the length to copy. */
+ void* old_body;
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ /* If NV 0.0 is store as all bits 0 then Zero() already creates a correct
+ 0.0 for us. */
+ bool zero_nv = TRUE;
+#endif
+ void* new_body;
+ size_t new_body_length;
+ size_t new_body_offset;
+ void** new_body_arena;
+ void** new_body_arenaroot;
+ const U32 old_type = SvTYPE(sv);
if (mt != SVt_PV && SvIsCOW(sv)) {
sv_force_normal_flags(sv, 0);
}
if (SvTYPE(sv) == mt)
- return TRUE;
+ return;
+
+ if (SvTYPE(sv) > mt)
+ Perl_croak(aTHX_ "sv_upgrade from type %d down to type %d",
+ (int)SvTYPE(sv), (int)mt);
+
+
+ old_body = SvANY(sv);
+ old_body_arena = 0;
+ old_body_offset = 0;
+ old_body_length = 0;
+ new_body_offset = 0;
+ new_body_length = ~0;
+
+ /* Copying structures onto other structures that have been neatly zeroed
+ has a subtle gotcha. Consider XPVMG
+
+ +------+------+------+------+------+-------+-------+
+ | NV | CUR | LEN | IV | MAGIC | STASH |
+ +------+------+------+------+------+-------+-------+
+ 0 4 8 12 16 20 24 28
+
+ where NVs are aligned to 8 bytes, so that sizeof that structure is
+ actually 32 bytes long, with 4 bytes of padding at the end:
+
+ +------+------+------+------+------+-------+-------+------+
+ | NV | CUR | LEN | IV | MAGIC | STASH | ??? |
+ +------+------+------+------+------+-------+-------+------+
+ 0 4 8 12 16 20 24 28 32
- pv = NULL;
- cur = 0;
- len = 0;
- iv = 0;
- nv = 0.0;
- magic = NULL;
- stash = Nullhv;
+ so what happens if you allocate memory for this structure:
+
+ +------+------+------+------+------+-------+-------+------+------+...
+ | NV | CUR | LEN | IV | MAGIC | STASH | GP | NAME |
+ +------+------+------+------+------+-------+-------+------+------+...
+ 0 4 8 12 16 20 24 28 32 36
+
+ zero it, then copy sizeof(XPVMG) bytes on top of it? Not quite what you
+ expect, because you copy the area marked ??? onto GP. Now, ??? may have
+ started out as zero once, but it's quite possible that it isn't. So now,
+ rather than a nicely zeroed GP, you have it pointing somewhere random.
+ Bugs ensue.
+
+ (In fact, GP ends up pointing at a previous GP structure, because the
+ principle cause of the padding in XPVMG getting garbage is a copy of
+ sizeof(XPVMG) bytes from a XPVGV structure in sv_unglob)
+
+ So we are careful and work out the size of used parts of all the
+ structures. */
switch (SvTYPE(sv)) {
case SVt_NULL:
break;
case SVt_IV:
- iv = SvIVX(sv);
if (mt == SVt_NV)
mt = SVt_PVNV;
else if (mt < SVt_PVIV)
mt = SVt_PVIV;
+ old_body_offset = STRUCT_OFFSET(XPVIV, xiv_iv);
+ old_body_length = sizeof(IV);
break;
case SVt_NV:
- nv = SvNVX(sv);
- del_XNV(SvANY(sv));
+ old_body_arena = (void **) &PL_xnv_root;
+ old_body_length = sizeof(NV);
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ zero_nv = FALSE;
+#endif
if (mt < SVt_PVNV)
mt = SVt_PVNV;
break;
case SVt_RV:
- pv = (char*)SvRV(sv);
break;
case SVt_PV:
- pv = SvPVX_mutable(sv);
- cur = SvCUR(sv);
- len = SvLEN(sv);
- del_XPV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpv_root;
+ old_body_offset = STRUCT_OFFSET(XPV, xpv_cur)
+ - STRUCT_OFFSET(xpv_allocated, xpv_cur);
+ old_body_length = STRUCT_OFFSET(XPV, xpv_len)
+ + sizeof (((XPV*)SvANY(sv))->xpv_len)
+ - old_body_offset;
if (mt <= SVt_IV)
mt = SVt_PVIV;
else if (mt == SVt_NV)
mt = SVt_PVNV;
break;
case SVt_PVIV:
- pv = SvPVX_mutable(sv);
- cur = SvCUR(sv);
- len = SvLEN(sv);
- iv = SvIVX(sv);
- del_XPVIV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpviv_root;
+ old_body_offset = STRUCT_OFFSET(XPVIV, xpv_cur)
+ - STRUCT_OFFSET(xpviv_allocated, xpv_cur);
+ old_body_length = STRUCT_OFFSET(XPVIV, xiv_u)
+ + sizeof (((XPVIV*)SvANY(sv))->xiv_u)
+ - old_body_offset;
break;
case SVt_PVNV:
- pv = SvPVX_mutable(sv);
- cur = SvCUR(sv);
- len = SvLEN(sv);
- iv = SvIVX(sv);
- nv = SvNVX(sv);
- del_XPVNV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpvnv_root;
+ old_body_length = STRUCT_OFFSET(XPVNV, xiv_u)
+ + sizeof (((XPVNV*)SvANY(sv))->xiv_u);
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ zero_nv = FALSE;
+#endif
break;
case SVt_PVMG:
/* Because the XPVMG of PL_mess_sv isn't allocated from the arena,
Given that it only has meaning inside the pad, it shouldn't be set
on anything that can get upgraded. */
assert((SvFLAGS(sv) & SVpad_TYPED) == 0);
- pv = SvPVX_mutable(sv);
- cur = SvCUR(sv);
- len = SvLEN(sv);
- iv = SvIVX(sv);
- nv = SvNVX(sv);
- magic = SvMAGIC(sv);
- stash = SvSTASH(sv);
- del_XPVMG(SvANY(sv));
+ old_body_arena = (void **) &PL_xpvmg_root;
+ old_body_length = STRUCT_OFFSET(XPVMG, xmg_stash)
+ + sizeof (((XPVMG*)SvANY(sv))->xmg_stash);
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ zero_nv = FALSE;
+#endif
break;
default:
Perl_croak(aTHX_ "Can't upgrade that kind of scalar");
case SVt_NULL:
Perl_croak(aTHX_ "Can't upgrade to undef");
case SVt_IV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = (XPVIV*)((char*)&(sv->sv_u.svu_iv) - STRUCT_OFFSET(XPVIV, xiv_iv));
- SvIV_set(sv, iv);
- break;
+ SvIV_set(sv, 0);
+ return;
case SVt_NV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = new_XNV();
- SvNV_set(sv, nv);
- break;
+ SvNV_set(sv, 0);
+ return;
case SVt_RV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = &sv->sv_u.svu_rv;
- SvRV_set(sv, (SV*)pv);
- break;
+ SvRV_set(sv, 0);
+ return;
case SVt_PVHV:
SvANY(sv) = new_XPVHV();
HvFILL(sv) = 0;
HvMAX(sv) = 0;
HvTOTALKEYS(sv) = 0;
- /* Fall through... */
- if (0) {
- case SVt_PVAV:
- SvANY(sv) = new_XPVAV();
- AvMAX(sv) = -1;
- AvFILLp(sv) = -1;
- AvALLOC(sv) = 0;
- AvREAL_only(sv);
- }
- /* to here. */
- /* XXX? Only SVt_NULL is ever upgraded to AV or HV? */
- assert(!pv);
- /* FIXME. Should be able to remove all this if()... if the above
- assertion is genuinely always true. */
- if(SvOOK(sv)) {
- pv -= iv;
- SvFLAGS(sv) &= ~SVf_OOK;
- }
- Safefree(pv);
+ goto hv_av_common;
+
+ case SVt_PVAV:
+ SvANY(sv) = new_XPVAV();
+ AvMAX(sv) = -1;
+ AvFILLp(sv) = -1;
+ AvALLOC(sv) = 0;
+ AvREAL_only(sv);
+
+ hv_av_common:
+ /* SVt_NULL isn't the only thing upgraded to AV or HV.
+ The target created by newSVrv also is, and it can have magic.
+ However, it never has SvPVX set.
+ */
+ if (old_type >= SVt_RV) {
+ assert(SvPVX_const(sv) == 0);
+ }
+
+ /* Could put this in the else clause below, as PVMG must have SvPVX
+ 0 already (the assertion above) */
SvPV_set(sv, (char*)0);
- SvMAGIC_set(sv, magic);
- SvSTASH_set(sv, stash);
+
+ if (old_type >= SVt_PVMG) {
+ SvMAGIC_set(sv, ((XPVMG*)old_body)->xmg_magic);
+ SvSTASH_set(sv, ((XPVMG*)old_body)->xmg_stash);
+ } else {
+ SvMAGIC_set(sv, 0);
+ SvSTASH_set(sv, 0);
+ }
break;
case SVt_PVIO:
- SvANY(sv) = new_XPVIO();
- Zero(SvANY(sv), 1, XPVIO);
- IoPAGE_LEN(sv) = 60;
- goto set_magic_common;
+ new_body = new_XPVIO();
+ new_body_length = sizeof(XPVIO);
+ goto zero;
case SVt_PVFM:
- SvANY(sv) = new_XPVFM();
- Zero(SvANY(sv), 1, XPVFM);
- goto set_magic_common;
+ new_body = new_XPVFM();
+ new_body_length = sizeof(XPVFM);
+ goto zero;
+
case SVt_PVBM:
- SvANY(sv) = new_XPVBM();
- BmRARE(sv) = 0;
- BmUSEFUL(sv) = 0;
- BmPREVIOUS(sv) = 0;
- goto set_magic_common;
+ new_body_length = sizeof(XPVBM);
+ new_body_arena = (void **) &PL_xpvbm_root;
+ new_body_arenaroot = (void **) &PL_xpvbm_arenaroot;
+ goto new_body;
case SVt_PVGV:
- SvANY(sv) = new_XPVGV();
- GvGP(sv) = 0;
- GvNAME(sv) = 0;
- GvNAMELEN(sv) = 0;
- GvSTASH(sv) = 0;
- GvFLAGS(sv) = 0;
- goto set_magic_common;
+ new_body_length = sizeof(XPVGV);
+ new_body_arena = (void **) &PL_xpvgv_root;
+ new_body_arenaroot = (void **) &PL_xpvgv_arenaroot;
+ goto new_body;
case SVt_PVCV:
- SvANY(sv) = new_XPVCV();
- Zero(SvANY(sv), 1, XPVCV);
- goto set_magic_common;
+ new_body_length = sizeof(XPVCV);
+ new_body_arena = (void **) &PL_xpvcv_root;
+ new_body_arenaroot = (void **) &PL_xpvcv_arenaroot;
+ goto new_body;
case SVt_PVLV:
- SvANY(sv) = new_XPVLV();
- LvTARGOFF(sv) = 0;
- LvTARGLEN(sv) = 0;
- LvTARG(sv) = 0;
- LvTYPE(sv) = 0;
- GvGP(sv) = 0;
- GvNAME(sv) = 0;
- GvNAMELEN(sv) = 0;
- GvSTASH(sv) = 0;
- GvFLAGS(sv) = 0;
- /* Fall through. */
- if (0) {
- case SVt_PVMG:
- SvANY(sv) = new_XPVMG();
- }
- set_magic_common:
- SvMAGIC_set(sv, magic);
- SvSTASH_set(sv, stash);
- /* Fall through. */
- if (0) {
- case SVt_PVNV:
- SvANY(sv) = new_XPVNV();
- }
- SvNV_set(sv, nv);
- /* Fall through. */
- if (0) {
- case SVt_PVIV:
- SvANY(sv) = new_XPVIV();
- if (SvNIOK(sv))
- (void)SvIOK_on(sv);
- SvNOK_off(sv);
- }
- SvIV_set(sv, iv);
- /* Fall through. */
- if (0) {
- case SVt_PV:
- SvANY(sv) = new_XPV();
+ new_body_length = sizeof(XPVLV);
+ new_body_arena = (void **) &PL_xpvlv_root;
+ new_body_arenaroot = (void **) &PL_xpvlv_arenaroot;
+ goto new_body;
+ case SVt_PVMG:
+ new_body_length = sizeof(XPVMG);
+ new_body_arena = (void **) &PL_xpvmg_root;
+ new_body_arenaroot = (void **) &PL_xpvmg_arenaroot;
+ goto new_body;
+ case SVt_PVNV:
+ new_body_length = sizeof(XPVNV);
+ new_body_arena = (void **) &PL_xpvnv_root;
+ new_body_arenaroot = (void **) &PL_xpvnv_arenaroot;
+ goto new_body;
+ case SVt_PVIV:
+ new_body_offset = STRUCT_OFFSET(XPVIV, xpv_cur)
+ - STRUCT_OFFSET(xpviv_allocated, xpv_cur);
+ new_body_length = sizeof(XPVIV) - new_body_offset;
+ new_body_arena = (void **) &PL_xpviv_root;
+ new_body_arenaroot = (void **) &PL_xpviv_arenaroot;
+ /* XXX Is this still needed? Was it ever needed? Surely as there is
+ no route from NV to PVIV, NOK can never be true */
+ if (SvNIOK(sv))
+ (void)SvIOK_on(sv);
+ SvNOK_off(sv);
+ goto new_body_no_NV;
+ case SVt_PV:
+ new_body_offset = STRUCT_OFFSET(XPV, xpv_cur)
+ - STRUCT_OFFSET(xpv_allocated, xpv_cur);
+ new_body_length = sizeof(XPV) - new_body_offset;
+ new_body_arena = (void **) &PL_xpv_root;
+ new_body_arenaroot = (void **) &PL_xpv_arenaroot;
+ new_body_no_NV:
+ /* PV and PVIV don't have an NV slot. */
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ zero_nv = FALSE;
+#endif
+
+ new_body:
+ assert(new_body_length);
+#ifndef PURIFY
+ /* This points to the start of the allocated area. */
+ new_body = S_new_body(aTHX_ new_body_arenaroot, new_body_arena,
+ new_body_length);
+#else
+ /* We always allocated the full length item with PURIFY */
+ new_body_length += new_body_offset;
+ new_body_offset = 0;
+ new_body = my_safemalloc(new_body_length);
+
+#endif
+ zero:
+ Zero(new_body, new_body_length, char);
+ new_body = ((char *)new_body) - new_body_offset;
+ SvANY(sv) = new_body;
+
+ if (old_body_length) {
+ Copy((char *)old_body + old_body_offset,
+ (char *)new_body + old_body_offset,
+ old_body_length, char);
}
- SvPV_set(sv, pv);
- SvCUR_set(sv, cur);
- SvLEN_set(sv, len);
+
+#ifndef NV_ZERO_IS_ALLBITS_ZERO
+ if (zero_nv)
+ SvNV_set(sv, 0);
+#endif
+
+ if (mt == SVt_PVIO)
+ IoPAGE_LEN(sv) = 60;
+ if (old_type < SVt_RV)
+ SvPV_set(sv, 0);
break;
+ default:
+ Perl_croak(aTHX_ "panic: sv_upgrade to unknown type %lu", mt);
+ }
+
+
+ if (old_body_arena) {
+#ifdef PURIFY
+ my_safefree(old_body);
+#else
+ del_body((void*)((char*)old_body + old_body_offset),
+ old_body_arena);
+#endif
}
- return TRUE;
}
/*
assert(SvTYPE(sv) != SVt_PVHV);
assert(SvTYPE(sv) != SVt_PVAV);
if (SvIVX(sv)) {
- const char *s = SvPVX_const(sv);
+ const char * const s = SvPVX_const(sv);
SvLEN_set(sv, SvLEN(sv) + SvIVX(sv));
SvPV_set(sv, SvPVX(sv) - SvIVX(sv));
SvIV_set(sv, 0);
sv_unref(sv);
if (SvTYPE(sv) < SVt_PV) {
sv_upgrade(sv, SVt_PV);
- s = SvPVX(sv);
+ s = SvPVX_mutable(sv);
}
else if (SvOOK(sv)) { /* pv is offset? */
sv_backoff(sv);
- s = SvPVX(sv);
+ s = SvPVX_mutable(sv);
if (newlen > SvLEN(sv))
newlen += 10 * (newlen - SvCUR(sv)); /* avoid copy each time */
#ifdef HAS_64K_LIMIT
newlen = PERL_STRLEN_ROUNDUP(newlen);
if (SvLEN(sv) && s) {
#ifdef MYMALLOC
- const STRLEN l = malloced_size((void*)SvPVX(sv));
+ const STRLEN l = malloced_size((void*)SvPVX_const(sv));
if (newlen <= l) {
SvLEN_set(sv, l);
return s;
{
SV *dsv;
char tmpbuf[64];
- char *pv;
+ const char *pv;
if (DO_UTF8(sv)) {
dsv = sv_2mortal(newSVpv("", 0));
register const char *typestr;
if (SvAMAGIC(sv) && (tmpstr=AMG_CALLun(sv,string)) &&
(!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
- char *pv = lp ? SvPV(tmpstr, *lp) : SvPV_nolen(tmpstr);
+ /* Unwrap this: */
+ /* char *pv = lp ? SvPV(tmpstr, *lp) : SvPV_nolen(tmpstr); */
+
+ char *pv;
+ if ((SvFLAGS(tmpstr) & (SVf_POK)) == SVf_POK) {
+ if (flags & SV_CONST_RETURN) {
+ pv = (char *) SvPVX_const(tmpstr);
+ } else {
+ pv = (flags & SV_MUTABLE_RETURN)
+ ? SvPVX_mutable(tmpstr) : SvPVX(tmpstr);
+ }
+ if (lp)
+ *lp = SvCUR(tmpstr);
+ } else {
+ pv = sv_2pv_flags(tmpstr, lp, flags);
+ }
if (SvUTF8(tmpstr))
SvUTF8_on(sv);
else
Perl_sv_copypv(pTHX_ SV *dsv, register SV *ssv)
{
STRLEN len;
- const char *s;
- s = SvPV_const(ssv,len);
+ const char * const s = SvPV_const(ssv,len);
sv_setpvn(dsv,s,len);
if (SvUTF8(ssv))
SvUTF8_on(dsv);
Perl_sv_2pvbyte(pTHX_ register SV *sv, STRLEN *lp)
{
sv_utf8_downgrade(sv,0);
- return SvPV(sv,*lp);
+ return lp ? SvPV(sv,*lp) : SvPV_nolen(sv);
}
/*
return SvRV(sv) != 0;
}
if (SvPOKp(sv)) {
- register XPV* Xpvtmp;
- if ((Xpvtmp = (XPV*)SvANY(sv)) &&
+ register XPV* const Xpvtmp = (XPV*)SvANY(sv);
+ if (Xpvtmp &&
(*sv->sv_u.svu_pv > '0' ||
Xpvtmp->xpv_cur > 1 ||
(Xpvtmp->xpv_cur && *sv->sv_u.svu_pv != '0')))
* had a FLAG in SVs to signal if there are any hibit
* chars in the PV. Given that there isn't such a flag
* make the loop as fast as possible. */
- U8 *s = (U8 *) SvPVX(sv);
- U8 *e = (U8 *) SvEND(sv);
- U8 *t = s;
+ const U8 *s = (U8 *) SvPVX_const(sv);
+ const U8 *e = (U8 *) SvEND(sv);
+ const U8 *t = s;
int hibit = 0;
while (t < e) {
- U8 ch = *t++;
+ const U8 ch = *t++;
if ((hibit = !NATIVE_IS_INVARIANT(ch)))
break;
}
if (hibit) {
STRLEN len = SvCUR(sv) + 1; /* Plus the \0 */
- s = bytes_to_utf8((U8*)s, &len);
+ U8 * const recoded = bytes_to_utf8((U8*)s, &len);
SvPV_free(sv); /* No longer using what was there before. */
- SvPV_set(sv, (char*)s);
+ SvPV_set(sv, (char*)recoded);
SvCUR_set(sv, len - 1);
SvLEN_set(sv, len); /* No longer know the real size. */
}
Perl_sv_utf8_decode(pTHX_ register SV *sv)
{
if (SvPOKp(sv)) {
- U8 *c;
- U8 *e;
+ const U8 *c;
+ const U8 *e;
/* The octets may have got themselves encoded - get them back as
* bytes
/* it is actually just a matter of turning the utf8 flag on, but
* we want to make sure everything inside is valid utf8 first.
*/
- c = (U8 *) SvPVX(sv);
+ c = (const U8 *) SvPVX_const(sv);
if (!is_utf8_string(c, SvCUR(sv)+1))
return FALSE;
- e = (U8 *) SvEND(sv);
+ e = (const U8 *) SvEND(sv);
while (c < e) {
U8 ch = *c++;
if (!UTF8_IS_INVARIANT(ch)) {
if (dtype != SVt_PVLV)
sv_upgrade(dstr, SVt_PVGV);
sv_magic(dstr, dstr, PERL_MAGIC_glob, Nullch, 0);
- GvSTASH(dstr) = (HV*)SvREFCNT_inc(GvSTASH(sstr));
+ GvSTASH(dstr) = GvSTASH(sstr);
+ if (GvSTASH(dstr))
+ Perl_sv_add_backref(aTHX_ (SV*)GvSTASH(dstr), dstr);
GvNAME(dstr) = savepvn(name, len);
GvNAMELEN(dstr) = len;
SvFAKE_on(dstr); /* can coerce to non-glob */
}
if (!intro)
cv_ckproto(cv, (GV*)dstr,
- SvPOK(sref) ? SvPVX(sref) : Nullch);
+ SvPOK(sref)
+ ? SvPVX_const(sref) : Nullch);
}
GvCV(dstr) = (CV*)sref;
GvCVGEN(dstr) = 0; /* Switch off cacheness. */
}
#endif
/* Initial code is common. */
- if (SvPVX_const(dstr)) { /* we know that dtype >= SVt_PV */
- if (SvOOK(dstr)) {
- SvFLAGS(dstr) &= ~SVf_OOK;
- Safefree(SvPVX_const(dstr) - SvIVX(dstr));
- }
- else if (SvLEN(dstr))
- Safefree(SvPVX_const(dstr));
+ if (SvPVX_const(dstr)) { /* we know that dtype >= SVt_PV */
+ SvPV_free(dstr);
}
if (!isSwipe) {
#endif
{
/* SvIsCOW_shared_hash */
- UV hash = SvSHARED_HASH(sstr);
DEBUG_C(PerlIO_printf(Perl_debug_log,
"Copy on write: Sharing hash\n"));
- assert (SvTYPE(dstr) >= SVt_PVIV);
+ assert (SvTYPE(dstr) >= SVt_PV);
SvPV_set(dstr,
- sharepvn(SvPVX_const(sstr),
- (sflags & SVf_UTF8?-cur:cur), hash));
- SvUV_set(dstr, hash);
- }
+ HEK_KEY(share_hek_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sstr)))));
+ }
SvLEN_set(dstr, len);
SvCUR_set(dstr, cur);
SvREADONLY_on(dstr);
}
if (sflags & SVf_UTF8)
SvUTF8_on(dstr);
- /*SUPPRESS 560*/
if (sflags & SVp_NOK) {
SvNOKp_on(dstr);
if (sflags & SVf_NOK)
if (SvLEN(sstr) == 0) {
/* source is a COW shared hash key. */
- UV hash = SvSHARED_HASH(sstr);
DEBUG_C(PerlIO_printf(Perl_debug_log,
"Fast copy on write: Sharing hash\n"));
- SvUV_set(dstr, hash);
- new_pv = sharepvn(SvPVX_const(sstr), (SvUTF8(sstr)?-cur:cur), hash);
+ new_pv = HEK_KEY(share_hek_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sstr))));
goto common_exit;
}
SV_COW_NEXT_SV_SET(dstr, SV_COW_NEXT_SV(sstr));
(which it can do by means other than releasing copy-on-write Svs)
or by changing the other copy-on-write SVs in the loop. */
STATIC void
-S_sv_release_COW(pTHX_ register SV *sv, const char *pvx, STRLEN cur, STRLEN len,
- U32 hash, SV *after)
+S_sv_release_COW(pTHX_ register SV *sv, const char *pvx, STRLEN len, SV *after)
{
if (len) { /* this SV was SvIsCOW_normal(sv) */
/* we need to find the SV pointing to us. */
SV_COW_NEXT_SV_SET(current, after);
}
} else {
- unsharepvn(pvx, SvUTF8(sv) ? -(I32)cur : cur, hash);
+ unshare_hek(SvSHARED_HEK_FROM_PV(pvx));
}
}
const char *pvx = SvPVX_const(sv);
const STRLEN len = SvLEN(sv);
const STRLEN cur = SvCUR(sv);
- const U32 hash = SvSHARED_HASH(sv);
SV * const next = SV_COW_NEXT_SV(sv); /* next COW sv in the loop. */
if (DEBUG_C_TEST) {
PerlIO_printf(Perl_debug_log,
SvCUR_set(sv, cur);
*SvEND(sv) = '\0';
}
- sv_release_COW(sv, pvx, cur, len, hash, next);
+ sv_release_COW(sv, pvx, len, next);
if (DEBUG_C_TEST) {
sv_dump(sv);
}
if (SvREADONLY(sv)) {
if (SvFAKE(sv)) {
const char *pvx = SvPVX_const(sv);
- const int is_utf8 = SvUTF8(sv);
const STRLEN len = SvCUR(sv);
- const U32 hash = SvSHARED_HASH(sv);
SvFAKE_off(sv);
SvREADONLY_off(sv);
SvPV_set(sv, Nullch);
SvGROW(sv, len + 1);
Move(pvx,SvPVX_const(sv),len,char);
*SvEND(sv) = '\0';
- unsharepvn(pvx, is_utf8 ? -(I32)len : len, hash);
+ unshare_hek(SvSHARED_HEK_FROM_PV(pvx));
}
else if (IN_PERL_RUNTIME)
Perl_croak(aTHX_ PL_no_modify);
SV* csv = sv_2mortal(newSVpvn(spv, slen));
sv_utf8_upgrade(csv);
- spv = SvPV(csv, slen);
+ spv = SvPV_const(csv, slen);
}
else
sv_utf8_upgrade_nomg(dsv);
return sv;
}
tsv = SvRV(sv);
- sv_add_backref(tsv, sv);
+ Perl_sv_add_backref(aTHX_ tsv, sv);
SvWEAKREF_on(sv);
SvREFCNT_dec(tsv);
return sv;
* back-reference to sv onto the array associated with the backref magic.
*/
-STATIC void
-S_sv_add_backref(pTHX_ SV *tsv, SV *sv)
+void
+Perl_sv_add_backref(pTHX_ SV *tsv, SV *sv)
{
AV *av;
MAGIC *mg;
* by magic_killbackrefs() when tsv is being freed */
}
if (AvFILLp(av) >= AvMAX(av)) {
- I32 i;
- SV **svp = AvARRAY(av);
- for (i = AvFILLp(av); i >= 0; i--)
- if (!svp[i]) {
- svp[i] = sv; /* reuse the slot */
- return;
- }
av_extend(av, AvFILLp(av)+1);
}
AvARRAY(av)[++AvFILLp(av)] = sv; /* av_push() */
*/
STATIC void
-S_sv_del_backref(pTHX_ SV *sv)
+S_sv_del_backref(pTHX_ SV *tsv, SV *sv)
{
AV *av;
SV **svp;
I32 i;
- SV *tsv = SvRV(sv);
MAGIC *mg = NULL;
+ if (!SvMAGICAL(tsv) || !(mg = mg_find(tsv, PERL_MAGIC_backref))) {
+ if (PL_in_clean_all)
+ return;
+ }
if (!SvMAGICAL(tsv) || !(mg = mg_find(tsv, PERL_MAGIC_backref)))
Perl_croak(aTHX_ "panic: del_backref");
av = (AV *)mg->mg_obj;
svp = AvARRAY(av);
- for (i = AvFILLp(av); i >= 0; i--)
- if (svp[i] == sv) svp[i] = Nullsv;
+ /* We shouldn't be in here more than once, but for paranoia reasons lets
+ not assume this. */
+ for (i = AvFILLp(av); i >= 0; i--) {
+ if (svp[i] == sv) {
+ const SSize_t fill = AvFILLp(av);
+ if (i != fill) {
+ /* We weren't the last entry.
+ An unordered list has this property that you can take the
+ last element off the end to fill the hole, and it's still
+ an unordered list :-)
+ */
+ svp[i] = svp[fill];
+ }
+ svp[fill] = Nullsv;
+ AvFILLp(av) = fill - 1;
+ }
+ }
}
/*
(void)SvPOK_only_UTF8(bigstr);
if (offset + len > curlen) {
SvGROW(bigstr, offset+len+1);
- Zero(SvPVX_const(bigstr)+curlen, offset+len-curlen, char);
+ Zero(SvPVX(bigstr)+curlen, offset+len-curlen, char);
SvCUR_set(bigstr, offset+len);
}
*mid = '\0';
SvCUR_set(bigstr, mid - big);
}
- /*SUPPRESS 560*/
else if ((i = mid - big)) { /* faster from front */
midend -= littlelen;
mid = midend;
stash = SvSTASH(sv);
destructor = StashHANDLER(stash,DESTROY);
if (destructor) {
- SV* tmpref = newRV(sv);
+ SV* const tmpref = newRV(sv);
SvREADONLY_on(tmpref); /* DESTROY() could be naughty */
ENTER;
PUSHSTACKi(PERLSI_DESTROY);
freescalar:
/* Don't bother with SvOOK_off(sv); as we're only going to free it. */
if (SvOOK(sv)) {
- SvPV_set(sv, SvPVX(sv) - SvIVX(sv));
+ SvPV_set(sv, SvPVX_mutable(sv) - SvIVX(sv));
/* Don't even bother with turning off the OOK flag. */
}
/* FALL THROUGH */
case SVt_PV:
case SVt_RV:
if (SvROK(sv)) {
+ SV *target = SvRV(sv);
if (SvWEAKREF(sv))
- sv_del_backref(sv);
+ sv_del_backref(target, sv);
else
- SvREFCNT_dec(SvRV(sv));
+ SvREFCNT_dec(target);
}
#ifdef PERL_OLD_COPY_ON_WRITE
else if (SvPVX_const(sv)) {
PerlIO_printf(Perl_debug_log, "Copy on write: clear\n");
sv_dump(sv);
}
- sv_release_COW(sv, SvPVX_const(sv), SvCUR(sv), SvLEN(sv),
- SvUVX(sv), SV_COW_NEXT_SV(sv));
+ sv_release_COW(sv, SvPVX_const(sv), SvLEN(sv),
+ SV_COW_NEXT_SV(sv));
/* And drop it here. */
SvFAKE_off(sv);
} else if (SvLEN(sv)) {
}
#else
else if (SvPVX_const(sv) && SvLEN(sv))
- Safefree(SvPVX_const(sv));
+ Safefree(SvPVX_mutable(sv));
else if (SvPVX_const(sv) && SvREADONLY(sv) && SvFAKE(sv)) {
- unsharepvn(SvPVX_const(sv),
- SvUTF8(sv) ? -(I32)SvCUR(sv) : SvCUR(sv),
- SvUVX(sv));
+ unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
SvFAKE_off(sv);
}
#endif
SvFLAGS(sv) |= SVTYPEMASK;
/* decrease refcount of the stash that owns this GV, if any */
if (stash)
- SvREFCNT_dec(stash);
+ sv_del_backref((SV*)stash, sv);
return; /* not break, SvFLAGS reset already happened */
case SVt_PVBM:
del_XPVBM(SvANY(sv));
SvREFCNT(sv) = (~(U32)0)/2;
return;
}
- if (ckWARN_d(WARN_INTERNAL))
+ if (ckWARN_d(WARN_INTERNAL)) {
Perl_warner(aTHX_ packWARN(WARN_INTERNAL),
"Attempt to free unreferenced scalar: SV 0x%"UVxf
pTHX__FORMAT, PTR2UV(sv) pTHX__VALUE);
+#ifdef DEBUG_LEAKING_SCALARS_FORK_DUMP
+ Perl_dump_sv_child(aTHX_ sv);
+#endif
+ }
return;
}
if (--(SvREFCNT(sv)) > 0)
STRLEN *cache = 0;
const U8 *s = start;
I32 uoffset = *offsetp;
- const U8 *send = s + len;
+ const U8 * const send = s + len;
MAGIC *mg = 0;
bool found = FALSE;
* is made as in S_utf8_mg_pos(), namely that
* walking backward is twice slower than
* walking forward. */
- STRLEN forw = *offsetp;
+ const STRLEN forw = *offsetp;
STRLEN backw = cache[1] - *offsetp;
if (!(forw < 2 * backw)) {
if (SvUTF8(sv1)) {
svrecode = newSVpvn(pv2, cur2);
sv_recode_to_utf8(svrecode, PL_encoding);
- pv2 = SvPV(svrecode, cur2);
+ pv2 = SvPV_const(svrecode, cur2);
}
else {
svrecode = newSVpvn(pv1, cur1);
sv_recode_to_utf8(svrecode, PL_encoding);
- pv1 = SvPV(svrecode, cur1);
+ pv1 = SvPV_const(svrecode, cur1);
}
/* Now both are in UTF-8. */
if (cur1 != cur2) {
if (SvUTF8(sv1)) {
/* sv1 is the UTF-8 one,
* if is equal it must be downgrade-able */
- char *pv = (char*)bytes_from_utf8((const U8*)pv1,
+ char * const pv = (char*)bytes_from_utf8((const U8*)pv1,
&cur1, &is_utf8);
if (pv != pv1)
pv1 = tpv = pv;
else {
/* sv2 is the UTF-8 one,
* if is equal it must be downgrade-able */
- char *pv = (char *)bytes_from_utf8((const U8*)pv2,
+ char * const pv = (char *)bytes_from_utf8((const U8*)pv2,
&cur2, &is_utf8);
if (pv != pv2)
pv2 = tpv = pv;
if (PL_encoding) {
svrecode = newSVpvn(pv2, cur2);
sv_recode_to_utf8(svrecode, PL_encoding);
- pv2 = SvPV(svrecode, cur2);
+ pv2 = SvPV_const(svrecode, cur2);
}
else {
pv2 = tpv = (char*)bytes_to_utf8((const U8*)pv2, &cur2);
if (PL_encoding) {
svrecode = newSVpvn(pv1, cur1);
sv_recode_to_utf8(svrecode, PL_encoding);
- pv1 = SvPV(svrecode, cur1);
+ pv1 = SvPV_const(svrecode, cur1);
}
else {
pv1 = tpv = (char*)bytes_to_utf8((const U8*)pv1, &cur1);
mg = SvMAGICAL(sv) ? mg_find(sv, PERL_MAGIC_collxfrm) : (MAGIC *) NULL;
if (!mg || !mg->mg_ptr || *(U32*)mg->mg_ptr != PL_collation_ix) {
- char *s, *xf;
+ const char *s;
+ char *xf;
STRLEN len, xlen;
if (mg)
Safefree(mg->mg_ptr);
- s = SvPV(sv, len);
+ s = SvPV_const(sv, len);
if ((xf = mem_collxfrm(s, len, &xlen))) {
if (SvREADONLY(sv)) {
SAVEFREEPV(xf);
sv_pos_u2b(sv,&append,0);
}
} else if (SvUTF8(sv)) {
- SV *tsv = NEWSV(0,0);
+ SV * const tsv = NEWSV(0,0);
sv_gets(tsv, fp, 0);
sv_utf8_upgrade_nomg(tsv);
SvCUR_set(sv,append);
Perl_croak(aTHX_ "Wide character in $/");
}
}
- rsptr = SvPV(PL_rs, rslen);
+ rsptr = SvPV_const(PL_rs, rslen);
}
}
if (!(flags & SVp_POK) || !*SvPVX_const(sv)) {
if ((flags & SVTYPEMASK) < SVt_PVIV)
- sv_upgrade(sv, SVt_IV);
+ sv_upgrade(sv, ((flags & SVTYPEMASK) > SVt_IV ? SVt_PVIV : SVt_IV));
(void)SvIOK_only(sv);
SvIV_set(sv, 1);
return;
return;
}
if (!(flags & SVp_POK)) {
- if ((flags & SVTYPEMASK) < SVt_PVNV)
- sv_upgrade(sv, SVt_NV);
- SvNV_set(sv, 1.0);
- (void)SvNOK_only(sv);
+ if ((flags & SVTYPEMASK) < SVt_PVIV)
+ sv_upgrade(sv, ((flags & SVTYPEMASK) > SVt_IV) ? SVt_PVIV : SVt_IV);
+ SvIV_set(sv, -1);
+ (void)SvIOK_only(sv);
return;
}
#ifdef PERL_PRESERVE_IVUV
if (!hash)
PERL_HASH(hash, src, len);
new_SV(sv);
- sv_upgrade(sv, SVt_PVIV);
+ sv_upgrade(sv, SVt_PV);
SvPV_set(sv, sharepvn(src, is_utf8?-len:len, hash));
SvCUR_set(sv, len);
- SvUV_set(sv, hash);
SvLEN_set(sv, 0);
SvREADONLY_on(sv);
SvFAKE_on(sv);
char *
Perl_sv_pv(pTHX_ SV *sv)
{
- STRLEN n_a;
-
if (SvPOK(sv))
return SvPVX(sv);
- return sv_2pv(sv, &n_a);
+ return sv_2pv(sv, 0);
}
/*
/* The fact that I don't need to downcast to char * everywhere, only in ?:
inside return suggests a const propagation bug in g++. */
if (ob && SvOBJECT(sv)) {
- char *name = HvNAME_get(SvSTASH(sv));
+ char * const name = HvNAME_get(SvSTASH(sv));
return name ? name : (char *) "__ANON__";
}
else {
SvROK_on(rv);
if (classname) {
- HV* stash = gv_stashpv(classname, TRUE);
+ HV* const stash = gv_stashpv(classname, TRUE);
(void)sv_bless(rv, stash);
}
return sv;
*/
SV*
-Perl_sv_setref_pvn(pTHX_ SV *rv, const char *classname, char *pv, STRLEN n)
+Perl_sv_setref_pvn(pTHX_ SV *rv, const char *classname, const char *pv, STRLEN n)
{
sv_setpvn(newSVrv(rv,classname), pv, n);
return rv;
if (GvGP(sv))
gp_free((GV*)sv);
if (GvSTASH(sv)) {
- SvREFCNT_dec(GvSTASH(sv));
+ sv_del_backref((SV*)GvSTASH(sv), sv);
GvSTASH(sv) = Nullhv;
}
sv_unmagic(sv, PERL_MAGIC_glob);
*/
void
-Perl_sv_unref_flags(pTHX_ SV *sv, U32 flags)
+Perl_sv_unref_flags(pTHX_ SV *ref, U32 flags)
{
- SV* rv = SvRV(sv);
+ SV* target = SvRV(ref);
- if (SvWEAKREF(sv)) {
- sv_del_backref(sv);
- SvWEAKREF_off(sv);
- SvRV_set(sv, NULL);
+ if (SvWEAKREF(ref)) {
+ sv_del_backref(target, ref);
+ SvWEAKREF_off(ref);
+ SvRV_set(ref, NULL);
return;
}
- SvRV_set(sv, NULL);
- SvROK_off(sv);
- /* You can't have a || SvREADONLY(rv) here, as $a = $$a, where $a was
+ SvRV_set(ref, NULL);
+ SvROK_off(ref);
+ /* You can't have a || SvREADONLY(target) here, as $a = $$a, where $a was
assigned to as BEGIN {$a = \"Foo"} will fail. */
- if (SvREFCNT(rv) != 1 || (flags & SV_IMMEDIATE_UNREF))
- SvREFCNT_dec(rv);
+ if (SvREFCNT(target) != 1 || (flags & SV_IMMEDIATE_UNREF))
+ SvREFCNT_dec(target);
else /* XXX Hack, but hard to make $a=$a->[1] work otherwise */
- sv_2mortal(rv); /* Schedule for freeing later */
+ sv_2mortal(target); /* Schedule for freeing later */
}
/*
{
if (SvTYPE(sv) >= SVt_PVMG && SvMAGIC(sv)) {
MAGIC * const mg = mg_find(sv, PERL_MAGIC_taint);
- if (mg && ((mg->mg_len & 1) || ((mg->mg_len & 2) && mg->mg_obj == sv)))
+ if (mg && (mg->mg_len & 1) )
return TRUE;
}
return FALSE;
/* large enough for "%#.#f" --chip */
/* what about long double NVs? --jhi */
+ PERL_UNUSED_ARG(maybe_tainted);
+
/* no matter what, this is a string now */
(void)SvPV_force(sv, origlen);
return;
if (patlen == 2 && pat[0] == '%' && pat[1] == 's') {
if (args) {
- const char *s = va_arg(*args, char*);
+ const char * const s = va_arg(*args, char*);
sv_catpv(sv, s ? s : nullstr);
}
else if (svix < svmax) {
#ifndef USE_LONG_DOUBLE
/* special-case "%.<number>[gf]" */
- if ( patlen <= 5 && pat[0] == '%' && pat[1] == '.'
+ if ( !args && patlen <= 5 && pat[0] == '%' && pat[1] == '.'
&& (pat[patlen-1] == 'g' || pat[patlen-1] == 'f') ) {
unsigned digits = 0;
const char *pp;
if (pp - pat == (int)patlen - 1) {
NV nv;
- if (args)
- nv = (NV)va_arg(*args, double);
- else if (svix < svmax)
+ if (svix < svmax)
nv = SvNV(*svargs);
else
return;
STRLEN have;
STRLEN need;
STRLEN gap;
- const char *dotstr = ".";
+ const char *dotstr = ".";
STRLEN dotstrlen = 1;
I32 efix = 0; /* explicit format parameter index */
I32 ewix = 0; /* explicit width index */
sv_utf8_upgrade(sv);
}
else {
- SV *nsv = sv_2mortal(newSVpvn(eptr, elen));
+ SV * const nsv = sv_2mortal(newSVpvn(eptr, elen));
sv_utf8_upgrade(nsv);
- eptr = SvPVX(nsv);
+ eptr = SvPVX_const(nsv);
elen = SvCUR(nsv);
}
SvGROW(sv, SvCUR(sv) + elen + 1);
SvGROW(sv, SvCUR(sv) + need + dotstrlen + 1);
p = SvEND(sv);
if (esignlen && fill == '0') {
+ int i;
for (i = 0; i < (int)esignlen; i++)
*p++ = esignbuf[i];
}
p += gap;
}
if (esignlen && fill != '0') {
+ int i;
for (i = 0; i < (int)esignlen; i++)
*p++ = esignbuf[i];
}
if (zeros) {
+ int i;
for (i = zeros; i; i--)
*p++ = '0';
}
regcomp.c. AMS 20010712 */
REGEXP *
-Perl_re_dup(pTHX_ REGEXP *r, CLONE_PARAMS *param)
+Perl_re_dup(pTHX_ const REGEXP *r, CLONE_PARAMS *param)
{
dVAR;
REGEXP *ret;
if (r->data) {
struct reg_data *d;
const int count = r->data->count;
+ int i;
Newc(0, d, sizeof(struct reg_data) + count*sizeof(void *),
char, struct reg_data);
Perl_fp_dup(pTHX_ PerlIO *fp, char type, CLONE_PARAMS *param)
{
PerlIO *ret;
- (void)type;
+
+ PERL_UNUSED_ARG(type);
if (!fp)
return (PerlIO*)NULL;
# define PTR_TABLE_HASH(ptr) (PTR2UV(ptr) >> 2)
#endif
-
-
-STATIC void
-S_more_pte(pTHX)
-{
- struct ptr_tbl_ent* pte;
- struct ptr_tbl_ent* pteend;
- New(0, pte, PERL_ARENA_SIZE/sizeof(struct ptr_tbl_ent), struct ptr_tbl_ent);
- pte->next = PL_pte_arenaroot;
- PL_pte_arenaroot = pte;
-
- pteend = &pte[PERL_ARENA_SIZE / sizeof(struct ptr_tbl_ent) - 1];
- PL_pte_root = ++pte;
- while (pte < pteend) {
- pte->next = pte + 1;
- pte++;
- }
- pte->next = 0;
-}
-
-STATIC struct ptr_tbl_ent*
-S_new_pte(pTHX)
-{
- struct ptr_tbl_ent* pte;
- if (!PL_pte_root)
- S_more_pte(aTHX);
- pte = PL_pte_root;
- PL_pte_root = pte->next;
- return pte;
-}
-
-STATIC void
-S_del_pte(pTHX_ struct ptr_tbl_ent*p)
-{
- p->next = PL_pte_root;
- PL_pte_root = p;
-}
+#define new_pte() new_body(struct ptr_tbl_ent, pte)
+#define del_pte(p) del_body_type(p, struct ptr_tbl_ent, pte)
/* map an existing pointer using a table */
void *
-Perl_ptr_table_fetch(pTHX_ PTR_TBL_t *tbl, void *sv)
+Perl_ptr_table_fetch(pTHX_ PTR_TBL_t *tbl, const void *sv)
{
PTR_TBL_ENT_t *tblent;
const UV hash = PTR_TABLE_HASH(sv);
/* add a new entry to a pointer-mapping table */
void
-Perl_ptr_table_store(pTHX_ PTR_TBL_t *tbl, void *oldv, void *newv)
+Perl_ptr_table_store(pTHX_ PTR_TBL_t *tbl, const void *oldv, void *newv)
{
PTR_TBL_ENT_t *tblent, **otblent;
/* XXX this may be pessimal on platforms where pointers aren't good
return;
}
}
- tblent = S_new_pte(aTHX);
+ tblent = new_pte();
tblent->oldval = oldv;
tblent->newval = newv;
tblent->next = *otblent;
if (entry) {
PTR_TBL_ENT_t *oentry = entry;
entry = entry->next;
- S_del_pte(aTHX_ oentry);
+ del_pte(oentry);
}
if (!entry) {
if (++riter > max) {
Safefree(tbl);
}
-/* attempt to make everything in the typeglob readonly */
-
-STATIC SV *
-S_gv_share(pTHX_ SV *sstr, CLONE_PARAMS *param)
-{
- GV *gv = (GV*)sstr;
- SV *sv = ¶m->proto_perl->Isv_no; /* just need SvREADONLY-ness */
-
- if (GvIO(gv) || GvFORM(gv)) {
- GvUNIQUE_off(gv); /* GvIOs cannot be shared. nor can GvFORMs */
- }
- else if (!GvCV(gv)) {
- GvCV(gv) = (CV*)sv;
- }
- else {
- /* CvPADLISTs cannot be shared */
- if (!SvREADONLY(GvCV(gv)) && !CvXSUB(GvCV(gv))) {
- GvUNIQUE_off(gv);
- }
- }
-
- if (!GvUNIQUE(gv)) {
-#if 0
- PerlIO_printf(Perl_debug_log, "gv_share: unable to share %s::%s\n",
- HvNAME_get(GvSTASH(gv)), GvNAME(gv));
-#endif
- return Nullsv;
- }
-
- /*
- * write attempts will die with
- * "Modification of a read-only value attempted"
- */
- if (!GvSV(gv)) {
- GvSV(gv) = sv;
- }
- else {
- SvREADONLY_on(GvSV(gv));
- }
-
- if (!GvAV(gv)) {
- GvAV(gv) = (AV*)sv;
- }
- else {
- SvREADONLY_on(GvAV(gv));
- }
-
- if (!GvHV(gv)) {
- GvHV(gv) = (HV*)sv;
- }
- else {
- SvREADONLY_on(GvHV(gv));
- }
-
- return sstr; /* he_dup() will SvREFCNT_inc() */
-}
-
-/* duplicate an SV of any type (including AV, HV etc) */
void
Perl_rvpv_dup(pTHX_ SV *dstr, SV *sstr, CLONE_PARAMS* param)
}
else {
/* Special case - not normally malloced for some reason */
- if (SvREADONLY(sstr) && SvFAKE(sstr)) {
- /* A "shared" PV - clone it as unshared string */
- if(SvPADTMP(sstr)) {
- /* However, some of them live in the pad
- and they should not have these flags
- turned off */
-
- SvPV_set(dstr, sharepvn(SvPVX_const(sstr), SvCUR(sstr),
- SvUVX(sstr)));
- SvUV_set(dstr, SvUVX(sstr));
- } else {
-
- SvPV_set(dstr, SAVEPVN(SvPVX_const(sstr), SvCUR(sstr)));
- SvFAKE_off(dstr);
- SvREADONLY_off(dstr);
- }
+ if ((SvREADONLY(sstr) && SvFAKE(sstr))) {
+ /* A "shared" PV - clone it as "shared" PV */
+ SvPV_set(dstr,
+ HEK_KEY(hek_dup(SvSHARED_HEK_FROM_PV(SvPVX_const(sstr)),
+ param)));
}
else {
/* Some other special case - random pointer */
}
}
+/* duplicate an SV of any type (including AV, HV etc) */
+
SV *
Perl_sv_dup(pTHX_ SV *sstr, CLONE_PARAMS* param)
{
SvANY(dstr) = &(dstr->sv_u.svu_rv);
Perl_rvpv_dup(aTHX_ dstr, sstr, param);
break;
- case SVt_PV:
- SvANY(dstr) = new_XPV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- break;
- case SVt_PVIV:
- SvANY(dstr) = new_XPVIV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- break;
- case SVt_PVNV:
- SvANY(dstr) = new_XPVNV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- break;
- case SVt_PVMG:
- SvANY(dstr) = new_XPVMG();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- break;
- case SVt_PVBM:
- SvANY(dstr) = new_XPVBM();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- BmRARE(dstr) = BmRARE(sstr);
- BmUSEFUL(dstr) = BmUSEFUL(sstr);
- BmPREVIOUS(dstr)= BmPREVIOUS(sstr);
- break;
- case SVt_PVLV:
- SvANY(dstr) = new_XPVLV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- LvTARGOFF(dstr) = LvTARGOFF(sstr); /* XXX sometimes holds PMOP* when DEBUGGING */
- LvTARGLEN(dstr) = LvTARGLEN(sstr);
- if (LvTYPE(sstr) == 't') /* for tie: unrefcnted fake (SV**) */
- LvTARG(dstr) = dstr;
- else if (LvTYPE(sstr) == 'T') /* for tie: fake HE */
- LvTARG(dstr) = (SV*)he_dup((HE*)LvTARG(sstr), 0, param);
- else
- LvTARG(dstr) = sv_dup_inc(LvTARG(sstr), param);
- LvTYPE(dstr) = LvTYPE(sstr);
- break;
- case SVt_PVGV:
- if (GvUNIQUE((GV*)sstr)) {
- SV *share;
- if ((share = gv_share(sstr, param))) {
- del_SV(dstr);
- dstr = share;
- ptr_table_store(PL_ptr_table, sstr, dstr);
-#if 0
- PerlIO_printf(Perl_debug_log, "sv_dup: sharing %s::%s\n",
- HvNAME_get(GvSTASH(share)), GvNAME(share));
+ default:
+ {
+ /* These are all the types that need complex bodies allocating. */
+ size_t new_body_length;
+ size_t new_body_offset = 0;
+ void **new_body_arena;
+ void **new_body_arenaroot;
+ void *new_body;
+
+ switch (SvTYPE(sstr)) {
+ default:
+ Perl_croak(aTHX_ "Bizarre SvTYPE [%" IVdf "]",
+ (IV)SvTYPE(sstr));
+ break;
+
+ case SVt_PVIO:
+ new_body = new_XPVIO();
+ new_body_length = sizeof(XPVIO);
+ break;
+ case SVt_PVFM:
+ new_body = new_XPVFM();
+ new_body_length = sizeof(XPVFM);
+ break;
+
+ case SVt_PVHV:
+ new_body_arena = (void **) &PL_xpvhv_root;
+ new_body_arenaroot = (void **) &PL_xpvhv_arenaroot;
+ new_body_offset = STRUCT_OFFSET(XPVHV, xhv_fill)
+ - STRUCT_OFFSET(xpvhv_allocated, xhv_fill);
+ new_body_length = STRUCT_OFFSET(XPVHV, xmg_stash)
+ + sizeof (((XPVHV*)SvANY(sstr))->xmg_stash)
+ - new_body_offset;
+ goto new_body;
+ case SVt_PVAV:
+ new_body_arena = (void **) &PL_xpvav_root;
+ new_body_arenaroot = (void **) &PL_xpvav_arenaroot;
+ new_body_offset = STRUCT_OFFSET(XPVAV, xav_fill)
+ - STRUCT_OFFSET(xpvav_allocated, xav_fill);
+ new_body_length = STRUCT_OFFSET(XPVHV, xmg_stash)
+ + sizeof (((XPVHV*)SvANY(sstr))->xmg_stash)
+ - new_body_offset;
+ goto new_body;
+ case SVt_PVBM:
+ new_body_length = sizeof(XPVBM);
+ new_body_arena = (void **) &PL_xpvbm_root;
+ new_body_arenaroot = (void **) &PL_xpvbm_arenaroot;
+ goto new_body;
+ case SVt_PVGV:
+ if (GvUNIQUE((GV*)sstr)) {
+ /* Do sharing here. */
+ }
+ new_body_length = sizeof(XPVGV);
+ new_body_arena = (void **) &PL_xpvgv_root;
+ new_body_arenaroot = (void **) &PL_xpvgv_arenaroot;
+ goto new_body;
+ case SVt_PVCV:
+ new_body_length = sizeof(XPVCV);
+ new_body_arena = (void **) &PL_xpvcv_root;
+ new_body_arenaroot = (void **) &PL_xpvcv_arenaroot;
+ goto new_body;
+ case SVt_PVLV:
+ new_body_length = sizeof(XPVLV);
+ new_body_arena = (void **) &PL_xpvlv_root;
+ new_body_arenaroot = (void **) &PL_xpvlv_arenaroot;
+ goto new_body;
+ case SVt_PVMG:
+ new_body_length = sizeof(XPVMG);
+ new_body_arena = (void **) &PL_xpvmg_root;
+ new_body_arenaroot = (void **) &PL_xpvmg_arenaroot;
+ goto new_body;
+ case SVt_PVNV:
+ new_body_length = sizeof(XPVNV);
+ new_body_arena = (void **) &PL_xpvnv_root;
+ new_body_arenaroot = (void **) &PL_xpvnv_arenaroot;
+ goto new_body;
+ case SVt_PVIV:
+ new_body_offset = STRUCT_OFFSET(XPVIV, xpv_cur)
+ - STRUCT_OFFSET(xpviv_allocated, xpv_cur);
+ new_body_length = sizeof(XPVIV) - new_body_offset;
+ new_body_arena = (void **) &PL_xpviv_root;
+ new_body_arenaroot = (void **) &PL_xpviv_arenaroot;
+ goto new_body;
+ case SVt_PV:
+ new_body_offset = STRUCT_OFFSET(XPV, xpv_cur)
+ - STRUCT_OFFSET(xpv_allocated, xpv_cur);
+ new_body_length = sizeof(XPV) - new_body_offset;
+ new_body_arena = (void **) &PL_xpv_root;
+ new_body_arenaroot = (void **) &PL_xpv_arenaroot;
+ new_body:
+ assert(new_body_length);
+#ifndef PURIFY
+ new_body = (void*)((char*)S_new_body(aTHX_ new_body_arenaroot,
+ new_body_arena,
+ new_body_length)
+ - new_body_offset);
+#else
+ /* We always allocated the full length item with PURIFY */
+ new_body_length += new_body_offset;
+ new_body_offset = 0;
+ new_body = my_safemalloc(new_body_length);
#endif
- break;
- }
- }
- SvANY(dstr) = new_XPVGV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- GvNAMELEN(dstr) = GvNAMELEN(sstr);
- GvNAME(dstr) = SAVEPVN(GvNAME(sstr), GvNAMELEN(sstr));
- GvSTASH(dstr) = hv_dup_inc(GvSTASH(sstr), param);
- GvFLAGS(dstr) = GvFLAGS(sstr);
- GvGP(dstr) = gp_dup(GvGP(sstr), param);
- (void)GpREFCNT_inc(GvGP(dstr));
- break;
- case SVt_PVIO:
- SvANY(dstr) = new_XPVIO();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- IoIFP(dstr) = fp_dup(IoIFP(sstr), IoTYPE(sstr), param);
- if (IoOFP(sstr) == IoIFP(sstr))
- IoOFP(dstr) = IoIFP(dstr);
- else
- IoOFP(dstr) = fp_dup(IoOFP(sstr), IoTYPE(sstr), param);
- /* PL_rsfp_filters entries have fake IoDIRP() */
- if (IoDIRP(sstr) && !(IoFLAGS(sstr) & IOf_FAKE_DIRP))
- IoDIRP(dstr) = dirp_dup(IoDIRP(sstr));
- else
- IoDIRP(dstr) = IoDIRP(sstr);
- IoLINES(dstr) = IoLINES(sstr);
- IoPAGE(dstr) = IoPAGE(sstr);
- IoPAGE_LEN(dstr) = IoPAGE_LEN(sstr);
- IoLINES_LEFT(dstr) = IoLINES_LEFT(sstr);
- if(IoFLAGS(sstr) & IOf_FAKE_DIRP) {
- /* I have no idea why fake dirp (rsfps)
- should be treaded differently but otherwise
- we end up with leaks -- sky*/
- IoTOP_GV(dstr) = gv_dup_inc(IoTOP_GV(sstr), param);
- IoFMT_GV(dstr) = gv_dup_inc(IoFMT_GV(sstr), param);
- IoBOTTOM_GV(dstr) = gv_dup_inc(IoBOTTOM_GV(sstr), param);
- } else {
- IoTOP_GV(dstr) = gv_dup(IoTOP_GV(sstr), param);
- IoFMT_GV(dstr) = gv_dup(IoFMT_GV(sstr), param);
- IoBOTTOM_GV(dstr) = gv_dup(IoBOTTOM_GV(sstr), param);
- }
- IoTOP_NAME(dstr) = SAVEPV(IoTOP_NAME(sstr));
- IoFMT_NAME(dstr) = SAVEPV(IoFMT_NAME(sstr));
- IoBOTTOM_NAME(dstr) = SAVEPV(IoBOTTOM_NAME(sstr));
- IoSUBPROCESS(dstr) = IoSUBPROCESS(sstr);
- IoTYPE(dstr) = IoTYPE(sstr);
- IoFLAGS(dstr) = IoFLAGS(sstr);
- break;
- case SVt_PVAV:
- SvANY(dstr) = new_XPVAV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- if (AvARRAY((AV*)sstr)) {
- SV **dst_ary, **src_ary;
- SSize_t items = AvFILLp((AV*)sstr) + 1;
-
- src_ary = AvARRAY((AV*)sstr);
- Newz(0, dst_ary, AvMAX((AV*)sstr)+1, SV*);
- ptr_table_store(PL_ptr_table, src_ary, dst_ary);
- SvPV_set(dstr, (char*)dst_ary);
- AvALLOC((AV*)dstr) = dst_ary;
- if (AvREAL((AV*)sstr)) {
- while (items-- > 0)
- *dst_ary++ = sv_dup_inc(*src_ary++, param);
- }
- else {
- while (items-- > 0)
- *dst_ary++ = sv_dup(*src_ary++, param);
}
- items = AvMAX((AV*)sstr) - AvFILLp((AV*)sstr);
- while (items-- > 0) {
- *dst_ary++ = &PL_sv_undef;
+ assert(new_body);
+ SvANY(dstr) = new_body;
+
+ Copy(((char*)SvANY(sstr)) + new_body_offset,
+ ((char*)SvANY(dstr)) + new_body_offset,
+ new_body_length, char);
+
+ if (SvTYPE(sstr) != SVt_PVAV && SvTYPE(sstr) != SVt_PVHV)
+ Perl_rvpv_dup(aTHX_ dstr, sstr, param);
+
+ /* The Copy above means that all the source (unduplicated) pointers
+ are now in the destination. We can check the flags and the
+ pointers in either, but it's possible that there's less cache
+ missing by always going for the destination.
+ FIXME - instrument and check that assumption */
+ if (SvTYPE(sstr) >= SVt_PVMG) {
+ if (SvMAGIC(dstr))
+ SvMAGIC_set(dstr, mg_dup(SvMAGIC(dstr), param));
+ if (SvSTASH(dstr))
+ SvSTASH_set(dstr, hv_dup_inc(SvSTASH(dstr), param));
}
- }
- else {
- SvPV_set(dstr, Nullch);
- AvALLOC((AV*)dstr) = (SV**)NULL;
- }
- break;
- case SVt_PVHV:
- SvANY(dstr) = new_XPVHV();
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- HvTOTALKEYS(dstr) = HvTOTALKEYS(sstr);
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- {
- HEK *hvname = 0;
-
- if (HvARRAY((HV*)sstr)) {
- STRLEN i = 0;
- const bool sharekeys = !!HvSHAREKEYS(sstr);
- XPVHV * const dxhv = (XPVHV*)SvANY(dstr);
- XPVHV * const sxhv = (XPVHV*)SvANY(sstr);
- char *darray;
- New(0, darray,
- PERL_HV_ARRAY_ALLOC_BYTES(dxhv->xhv_max+1)
- + (SvOOK(sstr) ? sizeof(struct xpvhv_aux) : 0), char);
- HvARRAY(dstr) = (HE**)darray;
- while (i <= sxhv->xhv_max) {
- HE *source = HvARRAY(sstr)[i];
- HvARRAY(dstr)[i]
- = source ? he_dup(source, sharekeys, param) : 0;
- ++i;
+
+ switch (SvTYPE(sstr)) {
+ case SVt_PV:
+ break;
+ case SVt_PVIV:
+ break;
+ case SVt_PVNV:
+ break;
+ case SVt_PVMG:
+ break;
+ case SVt_PVBM:
+ break;
+ case SVt_PVLV:
+ /* XXX LvTARGOFF sometimes holds PMOP* when DEBUGGING */
+ if (LvTYPE(dstr) == 't') /* for tie: unrefcnted fake (SV**) */
+ LvTARG(dstr) = dstr;
+ else if (LvTYPE(dstr) == 'T') /* for tie: fake HE */
+ LvTARG(dstr) = (SV*)he_dup((HE*)LvTARG(dstr), 0, param);
+ else
+ LvTARG(dstr) = sv_dup_inc(LvTARG(dstr), param);
+ break;
+ case SVt_PVGV:
+ GvNAME(dstr) = SAVEPVN(GvNAME(dstr), GvNAMELEN(dstr));
+ GvSTASH(dstr) = hv_dup(GvSTASH(dstr), param);
+ /* Don't call sv_add_backref here as it's going to be created
+ as part of the magic cloning of the symbol table. */
+ GvGP(dstr) = gp_dup(GvGP(dstr), param);
+ (void)GpREFCNT_inc(GvGP(dstr));
+ break;
+ case SVt_PVIO:
+ IoIFP(dstr) = fp_dup(IoIFP(dstr), IoTYPE(dstr), param);
+ if (IoOFP(dstr) == IoIFP(sstr))
+ IoOFP(dstr) = IoIFP(dstr);
+ else
+ IoOFP(dstr) = fp_dup(IoOFP(dstr), IoTYPE(dstr), param);
+ /* PL_rsfp_filters entries have fake IoDIRP() */
+ if (IoDIRP(dstr) && !(IoFLAGS(dstr) & IOf_FAKE_DIRP))
+ IoDIRP(dstr) = dirp_dup(IoDIRP(dstr));
+ if(IoFLAGS(dstr) & IOf_FAKE_DIRP) {
+ /* I have no idea why fake dirp (rsfps)
+ should be treated differently but otherwise
+ we end up with leaks -- sky*/
+ IoTOP_GV(dstr) = gv_dup_inc(IoTOP_GV(dstr), param);
+ IoFMT_GV(dstr) = gv_dup_inc(IoFMT_GV(dstr), param);
+ IoBOTTOM_GV(dstr) = gv_dup_inc(IoBOTTOM_GV(dstr), param);
+ } else {
+ IoTOP_GV(dstr) = gv_dup(IoTOP_GV(dstr), param);
+ IoFMT_GV(dstr) = gv_dup(IoFMT_GV(dstr), param);
+ IoBOTTOM_GV(dstr) = gv_dup(IoBOTTOM_GV(dstr), param);
}
- if (SvOOK(sstr)) {
- struct xpvhv_aux *saux = HvAUX(sstr);
- struct xpvhv_aux *daux = HvAUX(dstr);
- /* This flag isn't copied. */
- /* SvOOK_on(hv) attacks the IV flags. */
- SvFLAGS(dstr) |= SVf_OOK;
-
- hvname = saux->xhv_name;
- daux->xhv_name = hvname ? hek_dup(hvname, param) : hvname;
-
- daux->xhv_riter = saux->xhv_riter;
- daux->xhv_eiter = saux->xhv_eiter
- ? he_dup(saux->xhv_eiter, (bool)!!HvSHAREKEYS(sstr),
- param) : 0;
+ IoTOP_NAME(dstr) = SAVEPV(IoTOP_NAME(dstr));
+ IoFMT_NAME(dstr) = SAVEPV(IoFMT_NAME(dstr));
+ IoBOTTOM_NAME(dstr) = SAVEPV(IoBOTTOM_NAME(dstr));
+ break;
+ case SVt_PVAV:
+ if (AvARRAY((AV*)sstr)) {
+ SV **dst_ary, **src_ary;
+ SSize_t items = AvFILLp((AV*)sstr) + 1;
+
+ src_ary = AvARRAY((AV*)sstr);
+ Newz(0, dst_ary, AvMAX((AV*)sstr)+1, SV*);
+ ptr_table_store(PL_ptr_table, src_ary, dst_ary);
+ SvPV_set(dstr, (char*)dst_ary);
+ AvALLOC((AV*)dstr) = dst_ary;
+ if (AvREAL((AV*)sstr)) {
+ while (items-- > 0)
+ *dst_ary++ = sv_dup_inc(*src_ary++, param);
+ }
+ else {
+ while (items-- > 0)
+ *dst_ary++ = sv_dup(*src_ary++, param);
+ }
+ items = AvMAX((AV*)sstr) - AvFILLp((AV*)sstr);
+ while (items-- > 0) {
+ *dst_ary++ = &PL_sv_undef;
+ }
}
+ else {
+ SvPV_set(dstr, Nullch);
+ AvALLOC((AV*)dstr) = (SV**)NULL;
+ }
+ break;
+ case SVt_PVHV:
+ {
+ HEK *hvname = 0;
+
+ if (HvARRAY((HV*)sstr)) {
+ STRLEN i = 0;
+ const bool sharekeys = !!HvSHAREKEYS(sstr);
+ XPVHV * const dxhv = (XPVHV*)SvANY(dstr);
+ XPVHV * const sxhv = (XPVHV*)SvANY(sstr);
+ char *darray;
+ New(0, darray,
+ PERL_HV_ARRAY_ALLOC_BYTES(dxhv->xhv_max+1)
+ + (SvOOK(sstr) ? sizeof(struct xpvhv_aux) : 0),
+ char);
+ HvARRAY(dstr) = (HE**)darray;
+ while (i <= sxhv->xhv_max) {
+ HE *source = HvARRAY(sstr)[i];
+ HvARRAY(dstr)[i] = source
+ ? he_dup(source, sharekeys, param) : 0;
+ ++i;
+ }
+ if (SvOOK(sstr)) {
+ struct xpvhv_aux *saux = HvAUX(sstr);
+ struct xpvhv_aux *daux = HvAUX(dstr);
+ /* This flag isn't copied. */
+ /* SvOOK_on(hv) attacks the IV flags. */
+ SvFLAGS(dstr) |= SVf_OOK;
+
+ hvname = saux->xhv_name;
+ daux->xhv_name
+ = hvname ? hek_dup(hvname, param) : hvname;
+
+ daux->xhv_riter = saux->xhv_riter;
+ daux->xhv_eiter = saux->xhv_eiter
+ ? he_dup(saux->xhv_eiter,
+ (bool)!!HvSHAREKEYS(sstr), param) : 0;
+ }
+ }
+ else {
+ SvPV_set(dstr, Nullch);
+ }
+ /* Record stashes for possible cloning in Perl_clone(). */
+ if(hvname)
+ av_push(param->stashes, dstr);
+ }
+ break;
+ case SVt_PVFM:
+ case SVt_PVCV:
+ /* NOTE: not refcounted */
+ CvSTASH(dstr) = hv_dup(CvSTASH(dstr), param);
+ OP_REFCNT_LOCK;
+ CvROOT(dstr) = OpREFCNT_inc(CvROOT(dstr));
+ OP_REFCNT_UNLOCK;
+ if (CvCONST(dstr)) {
+ CvXSUBANY(dstr).any_ptr = GvUNIQUE(CvGV(dstr)) ?
+ SvREFCNT_inc(CvXSUBANY(dstr).any_ptr) :
+ sv_dup_inc((SV *)CvXSUBANY(dstr).any_ptr, param);
+ }
+ /* don't dup if copying back - CvGV isn't refcounted, so the
+ * duped GV may never be freed. A bit of a hack! DAPM */
+ CvGV(dstr) = (param->flags & CLONEf_JOIN_IN) ?
+ Nullgv : gv_dup(CvGV(dstr), param) ;
+ if (!(param->flags & CLONEf_COPY_STACKS)) {
+ CvDEPTH(dstr) = 0;
+ }
+ PAD_DUP(CvPADLIST(dstr), CvPADLIST(sstr), param);
+ CvOUTSIDE(dstr) =
+ CvWEAKOUTSIDE(sstr)
+ ? cv_dup( CvOUTSIDE(dstr), param)
+ : cv_dup_inc(CvOUTSIDE(dstr), param);
+ if (!CvXSUB(dstr))
+ CvFILE(dstr) = SAVEPV(CvFILE(dstr));
+ break;
}
- else {
- SvPV_set(dstr, Nullch);
- }
- /* Record stashes for possible cloning in Perl_clone(). */
- if(hvname)
- av_push(param->stashes, dstr);
}
- break;
- case SVt_PVFM:
- SvANY(dstr) = new_XPVFM();
- FmLINES(dstr) = FmLINES(sstr);
- goto dup_pvcv;
- /* NOTREACHED */
- case SVt_PVCV:
- SvANY(dstr) = new_XPVCV();
- dup_pvcv:
- SvCUR_set(dstr, SvCUR(sstr));
- SvLEN_set(dstr, SvLEN(sstr));
- SvIV_set(dstr, SvIVX(sstr));
- SvNV_set(dstr, SvNVX(sstr));
- SvMAGIC_set(dstr, mg_dup(SvMAGIC(sstr), param));
- SvSTASH_set(dstr, hv_dup_inc(SvSTASH(sstr), param));
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
- CvSTASH(dstr) = hv_dup(CvSTASH(sstr), param); /* NOTE: not refcounted */
- CvSTART(dstr) = CvSTART(sstr);
- OP_REFCNT_LOCK;
- CvROOT(dstr) = OpREFCNT_inc(CvROOT(sstr));
- OP_REFCNT_UNLOCK;
- CvXSUB(dstr) = CvXSUB(sstr);
- CvXSUBANY(dstr) = CvXSUBANY(sstr);
- if (CvCONST(sstr)) {
- CvXSUBANY(dstr).any_ptr = GvUNIQUE(CvGV(sstr)) ?
- SvREFCNT_inc(CvXSUBANY(sstr).any_ptr) :
- sv_dup_inc((SV *)CvXSUBANY(sstr).any_ptr, param);
- }
- /* don't dup if copying back - CvGV isn't refcounted, so the
- * duped GV may never be freed. A bit of a hack! DAPM */
- CvGV(dstr) = (param->flags & CLONEf_JOIN_IN) ?
- Nullgv : gv_dup(CvGV(sstr), param) ;
- if (param->flags & CLONEf_COPY_STACKS) {
- CvDEPTH(dstr) = CvDEPTH(sstr);
- } else {
- CvDEPTH(dstr) = 0;
- }
- PAD_DUP(CvPADLIST(dstr), CvPADLIST(sstr), param);
- CvOUTSIDE_SEQ(dstr) = CvOUTSIDE_SEQ(sstr);
- CvOUTSIDE(dstr) =
- CvWEAKOUTSIDE(sstr)
- ? cv_dup( CvOUTSIDE(sstr), param)
- : cv_dup_inc(CvOUTSIDE(sstr), param);
- CvFLAGS(dstr) = CvFLAGS(sstr);
- CvFILE(dstr) = CvXSUB(sstr) ? CvFILE(sstr) : SAVEPV(CvFILE(sstr));
- break;
- default:
- Perl_croak(aTHX_ "Bizarre SvTYPE [%" IVdf "]", (IV)SvTYPE(sstr));
- break;
}
if (SvOBJECT(dstr) && SvTYPE(dstr) != SVt_PVIO)
*/
void *
-Perl_any_dup(pTHX_ void *v, PerlInterpreter *proto_perl)
+Perl_any_dup(pTHX_ void *v, const PerlInterpreter *proto_perl)
{
void *ret;
ANY *
Perl_ss_dup(pTHX_ PerlInterpreter *proto_perl, CLONE_PARAMS* param)
{
- ANY *ss = proto_perl->Tsavestack;
- I32 ix = proto_perl->Tsavestack_ix;
- I32 max = proto_perl->Tsavestack_max;
+ ANY * const ss = proto_perl->Tsavestack;
+ const I32 max = proto_perl->Tsavestack_max;
+ I32 ix = proto_perl->Tsavestack_ix;
ANY *nss;
SV *sv;
GV *gv;
char *c = NULL;
void (*dptr) (void*);
void (*dxptr) (pTHX_ void*);
- OP *o;
Newz(54, nss, max, ANY);
ptr = POPPTR(ss,ix);
if (ptr && (((OP*)ptr)->op_private & OPpREFCOUNTED)) {
/* these are assumed to be refcounted properly */
+ OP *o;
switch (((OP*)ptr)->op_type) {
case OP_LEAVESUB:
case OP_LEAVESUBLV:
static void
do_mark_cloneable_stash(pTHX_ SV *sv)
{
- const HEK *hvname = HvNAME_HEK((HV*)sv);
+ const HEK * const hvname = HvNAME_HEK((HV*)sv);
if (hvname) {
- GV* cloner = gv_fetchmethod_autoload((HV*)sv, "CLONE_SKIP", 0);
+ GV* const cloner = gv_fetchmethod_autoload((HV*)sv, "CLONE_SKIP", 0);
SvFLAGS(sv) |= SVphv_CLONEABLE; /* clone objects by default */
if (cloner && GvCV(cloner)) {
dSP;
/* create SV map for pointer relocation */
PL_ptr_table = ptr_table_new();
- /* and one for finding shared hash keys quickly */
- PL_shared_hek_table = ptr_table_new();
/* initialize these special pointers as early as possible */
SvANY(&PL_sv_undef) = NULL;
PL_regex_padav = newAV();
{
const I32 len = av_len((AV*)proto_perl->Iregex_padav);
- SV** regexen = AvARRAY((AV*)proto_perl->Iregex_padav);
+ SV** const regexen = AvARRAY((AV*)proto_perl->Iregex_padav);
IV i;
av_push(PL_regex_padav,
sv_dup_inc(regexen[0],param));
PL_mess_sv = Nullsv;
PL_ors_sv = sv_dup_inc(proto_perl->Iors_sv, param);
- PL_ofmt = SAVEPV(proto_perl->Iofmt);
/* interpreter atexit processing */
PL_exitlistlen = proto_perl->Iexitlistlen;
if (!(flags & CLONEf_KEEP_PTR_TABLE)) {
ptr_table_free(PL_ptr_table);
PL_ptr_table = NULL;
- ptr_table_free(PL_shared_hek_table);
- PL_shared_hek_table = NULL;
}
/* Call the ->CLONE method, if it exists, for each of the stashes
identified by sv_dup() above.
*/
while(av_len(param->stashes) != -1) {
- HV* stash = (HV*) av_shift(param->stashes);
- GV* cloner = gv_fetchmethod_autoload(stash, "CLONE", 0);
+ HV* const stash = (HV*) av_shift(param->stashes);
+ GV* const cloner = gv_fetchmethod_autoload(stash, "CLONE", 0);
if (cloner && GvCV(cloner)) {
dSP;
ENTER;
if (SvPOK(sv) && !SvUTF8(sv) && !IN_BYTES && SvROK(encoding)) {
SV *uni;
STRLEN len;
- char *s;
+ const char *s;
dSP;
ENTER;
SAVETMPS;
SPAGAIN;
uni = POPs;
PUTBACK;
- s = SvPV(uni, len);
+ s = SvPV_const(uni, len);
if (s != SvPVX_const(sv)) {
SvGROW(sv, len + 1);
- Move(s, SvPVX_const(sv), len, char);
+ Move(s, SvPVX(sv), len + 1, char);
SvCUR_set(sv, len);
- SvPVX(sv)[len] = 0;
}
FREETMPS;
LEAVE;