#include "perl.h"
#include "regcomp.h"
+#ifndef HAS_C99
+# if __STDC_VERSION__ >= 199901L && !defined(VMS)
+# define HAS_C99 1
+# endif
+#endif
+#if HAS_C99
+# include <stdint.h>
+#endif
+
#define FCALL *f
#ifdef __Lynx__
contains fields specific to each type. Some types store all they need
in the head, so don't have a body.
-In all but the most memory-paranoid configuations (ex: PURIFY), heads
+In all but the most memory-paranoid configurations (ex: PURIFY), heads
and bodies are allocated out of arenas, which by default are
approximately 4K chunks of memory parcelled up into N heads or bodies.
Sv-bodies are allocated by their sv-type, guaranteeing size
sv_report_used() / do_report_used()
dump all remaining SVs (debugging aid)
- sv_clean_objs() / do_clean_objs(),do_clean_named_objs()
+ sv_clean_objs() / do_clean_objs(),do_clean_named_objs(),
+ do_clean_named_io_objs()
Attempt to free all objects pointed to by RVs,
- and, unless DISABLE_DESTRUCTOR_KLUDGE is defined,
- try to do the same for all objects indirectly
+ and try to do the same for all objects indirectly
referenced by typeglobs too. Called once from
perl_destruct(), prior to calling sv_clean_all()
below.
new_SV(), del_SV(),
- new_XIV(), del_XIV(),
- new_XNV(), del_XNV(),
+ new_XPVNV(), del_XPVGV(),
etc
Public API:
* "A time to plant, and a time to uproot what was planted..."
*/
-void
-Perl_offer_nice_chunk(pTHX_ void *const chunk, const U32 chunk_size)
-{
- dVAR;
- void *new_chunk;
- U32 new_chunk_size;
-
- PERL_ARGS_ASSERT_OFFER_NICE_CHUNK;
-
- new_chunk = (void *)(chunk);
- new_chunk_size = (chunk_size);
- if (new_chunk_size > PL_nice_chunk_size) {
- Safefree(PL_nice_chunk);
- PL_nice_chunk = (char *) new_chunk;
- PL_nice_chunk_size = new_chunk_size;
- } else {
- Safefree(chunk);
- }
-}
-
#ifdef PERL_MEM_LOG
# define MEM_LOG_NEW_SV(sv, file, line, func) \
Perl_mem_log_new_sv(sv, file, line, func)
{
dVAR;
SV* sv;
-
- if (PL_nice_chunk) {
- sv_add_arena(PL_nice_chunk, PL_nice_chunk_size, 0);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
- }
- else {
- char *chunk; /* must use New here to match call to */
- Newx(chunk,PERL_ARENA_SIZE,char); /* Safefree() in sv_free_arenas() */
- sv_add_arena(chunk, PERL_ARENA_SIZE, 0);
- }
+ char *chunk; /* must use New here to match call to */
+ Newx(chunk,PERL_ARENA_SIZE,char); /* Safefree() in sv_free_arenas() */
+ sv_add_arena(chunk, PERL_ARENA_SIZE, 0);
uproot_SV(sv);
return sv;
}
: 0
);
sv->sv_debug_inpad = 0;
- sv->sv_debug_cloned = 0;
+ sv->sv_debug_parent = NULL;
sv->sv_debug_file = PL_curcop ? savepv(CopFILE(PL_curcop)): NULL;
sv->sv_debug_serial = PL_sv_serial++;
/* XXX Might want to check arrays, etc. */
}
-/* called by sv_clean_objs() for each live SV */
-#ifndef DISABLE_DESTRUCTOR_KLUDGE
+/* clear any slots in a GV which hold objects - except IO;
+ * called by sv_clean_objs() for each live GV */
+
static void
do_clean_named_objs(pTHX_ SV *const sv)
{
dVAR;
+ SV *obj;
assert(SvTYPE(sv) == SVt_PVGV);
assert(isGV_with_GP(sv));
- if (GvGP(sv)) {
- if ((
-#ifdef PERL_DONT_CREATE_GVSV
- GvSV(sv) &&
-#endif
- SvOBJECT(GvSV(sv))) ||
- (GvAV(sv) && SvOBJECT(GvAV(sv))) ||
- (GvHV(sv) && SvOBJECT(GvHV(sv))) ||
- /* In certain rare cases GvIOp(sv) can be NULL, which would make SvOBJECT(GvIO(sv)) dereference NULL. */
- (GvIO(sv) ? (SvFLAGS(GvIOp(sv)) & SVs_OBJECT) : 0) ||
- (GvCV(sv) && SvOBJECT(GvCV(sv))) )
- {
- DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning named glob object:\n "), sv_dump(sv)));
- SvFLAGS(sv) |= SVf_BREAK;
- SvREFCNT_dec(sv);
- }
+ if (!GvGP(sv))
+ return;
+
+ /* freeing GP entries may indirectly free the current GV;
+ * hold onto it while we mess with the GP slots */
+ SvREFCNT_inc(sv);
+
+ if ( ((obj = GvSV(sv) )) && SvOBJECT(obj)) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log,
+ "Cleaning named glob SV object:\n "), sv_dump(obj)));
+ GvSV(sv) = NULL;
+ SvREFCNT_dec(obj);
+ }
+ if ( ((obj = MUTABLE_SV(GvAV(sv)) )) && SvOBJECT(obj)) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log,
+ "Cleaning named glob AV object:\n "), sv_dump(obj)));
+ GvAV(sv) = NULL;
+ SvREFCNT_dec(obj);
+ }
+ if ( ((obj = MUTABLE_SV(GvHV(sv)) )) && SvOBJECT(obj)) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log,
+ "Cleaning named glob HV object:\n "), sv_dump(obj)));
+ GvHV(sv) = NULL;
+ SvREFCNT_dec(obj);
}
+ if ( ((obj = MUTABLE_SV(GvCV(sv)) )) && SvOBJECT(obj)) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log,
+ "Cleaning named glob CV object:\n "), sv_dump(obj)));
+ GvCV(sv) = NULL;
+ SvREFCNT_dec(obj);
+ }
+ SvREFCNT_dec(sv); /* undo the inc above */
+}
+
+/* clear any IO slots in a GV which hold objects (except stderr, defout);
+ * called by sv_clean_objs() for each live GV */
+
+static void
+do_clean_named_io_objs(pTHX_ SV *const sv)
+{
+ dVAR;
+ SV *obj;
+ assert(SvTYPE(sv) == SVt_PVGV);
+ assert(isGV_with_GP(sv));
+ if (!GvGP(sv) || sv == (SV*)PL_stderrgv || sv == (SV*)PL_defoutgv)
+ return;
+
+ SvREFCNT_inc(sv);
+ if ( ((obj = MUTABLE_SV(GvIO(sv)) )) && SvOBJECT(obj)) {
+ DEBUG_D((PerlIO_printf(Perl_debug_log,
+ "Cleaning named glob IO object:\n "), sv_dump(obj)));
+ GvIOp(sv) = NULL;
+ SvREFCNT_dec(obj);
+ }
+ SvREFCNT_dec(sv); /* undo the inc above */
+}
+
+/* Void wrapper to pass to visit() */
+static void
+do_curse(pTHX_ SV * const sv) {
+ if ((PL_stderrgv && GvGP(PL_stderrgv) && (SV*)GvIO(PL_stderrgv) == sv)
+ || (PL_defoutgv && GvGP(PL_defoutgv) && (SV*)GvIO(PL_defoutgv) == sv))
+ return;
+ (void)curse(sv, 0);
}
-#endif
/*
=for apidoc sv_clean_objs
Perl_sv_clean_objs(pTHX)
{
dVAR;
+ GV *olddef, *olderr;
PL_in_clean_objs = TRUE;
visit(do_clean_objs, SVf_ROK, SVf_ROK);
-#ifndef DISABLE_DESTRUCTOR_KLUDGE
- /* some barnacles may yet remain, clinging to typeglobs */
+ /* Some barnacles may yet remain, clinging to typeglobs.
+ * Run the non-IO destructors first: they may want to output
+ * error messages, close files etc */
visit(do_clean_named_objs, SVt_PVGV|SVpgv_GP, SVTYPEMASK|SVp_POK|SVpgv_GP);
-#endif
+ visit(do_clean_named_io_objs, SVt_PVGV|SVpgv_GP, SVTYPEMASK|SVp_POK|SVpgv_GP);
+ /* And if there are some very tenacious barnacles clinging to arrays,
+ closures, or what have you.... */
+ visit(do_curse, SVs_OBJECT, SVs_OBJECT);
+ olddef = PL_defoutgv;
+ PL_defoutgv = NULL; /* disable skip of PL_defoutgv */
+ if (olddef && isGV_with_GP(olddef))
+ do_clean_named_io_objs(aTHX_ MUTABLE_SV(olddef));
+ olderr = PL_stderrgv;
+ PL_stderrgv = NULL; /* disable skip of PL_stderrgv */
+ if (olderr && isGV_with_GP(olderr))
+ do_clean_named_io_objs(aTHX_ MUTABLE_SV(olderr));
+ SvREFCNT_dec(olddef);
PL_in_clean_objs = FALSE;
}
I32 cleaned;
PL_in_clean_all = TRUE;
cleaned = visit(do_clean_all, 0,0);
- PL_in_clean_all = FALSE;
return cleaned;
}
while (i--)
PL_body_roots[i] = 0;
- Safefree(PL_nice_chunk);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
PL_sv_arenaroot = 0;
PL_sv_root = 0;
}
are decremented to point at the unused 'ghost' memory, knowing that
the pointers are used with offsets to the real memory.
- HE, HEK arenas are managed separately, with separate code, but may
- be merge-able later..
-*/
-
-/* get_arena(size): this creates custom-sized arenas
- TBD: export properly for hv.c: S_more_he().
-*/
-void*
-Perl_get_arena(pTHX_ const size_t arena_size, const svtype bodytype)
-{
- dVAR;
- struct arena_desc* adesc;
- struct arena_set *aroot = (struct arena_set*) PL_body_arenas;
- unsigned int curr;
-
- /* shouldnt need this
- if (!arena_size) arena_size = PERL_ARENA_SIZE;
- */
-
- /* may need new arena-set to hold new arena */
- if (!aroot || aroot->curr >= aroot->set_size) {
- struct arena_set *newroot;
- Newxz(newroot, 1, struct arena_set);
- newroot->set_size = ARENAS_PER_SET;
- newroot->next = aroot;
- aroot = newroot;
- PL_body_arenas = (void *) newroot;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
- }
-
- /* ok, now have arena-set with at least 1 empty/available arena-desc */
- curr = aroot->curr++;
- adesc = &(aroot->set[curr]);
- assert(!adesc->arena);
-
- Newx(adesc->arena, arena_size, char);
- adesc->size = arena_size;
- adesc->utype = bodytype;
- DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
- curr, (void*)adesc->arena, (UV)arena_size));
-
- return adesc->arena;
-}
-
-
-/* return a thing to the free list */
-
-#define del_body(thing, root) \
- STMT_START { \
- void ** const thing_copy = (void **)thing;\
- *thing_copy = *root; \
- *root = (void*)thing_copy; \
- } STMT_END
-
-/*
=head1 SV-Body Allocation
In turn, the new_body_* allocators call S_new_body(), which invokes
new_body_inline macro, which takes a lock, and takes a body off the
-linked list at PL_body_roots[sv_type], calling S_more_bodies() if
+linked list at PL_body_roots[sv_type], calling Perl_more_bodies() if
necessary to refresh an empty list. Then the lock is released, and
the body is returned.
-S_more_bodies calls get_arena(), and carves it up into an array of N
+Perl_more_bodies allocates a new arena, and carves it up into an array of N
bodies, which it strings into a linked list. It looks up arena-size
and body-size from the body_details table described below, thus
supporting the multiple body-types.
If PURIFY is defined, or PERL_ARENA_SIZE=0, arenas are not used, and
the (new|del)_X*V macros are mapped directly to malloc/free.
-*/
-
-/*
-
For each sv-type, struct body_details bodies_by_type[] carries
parameters which control these aspects of SV handling:
+ sizeof (((type*)SvANY((const SV *)0))->last_member)
static const struct body_details bodies_by_type[] = {
- { sizeof(HE), 0, 0, SVt_NULL,
- FALSE, NONV, NOARENA, FIT_ARENA(0, sizeof(HE)) },
+ /* HEs use this offset for their arena. */
+ { 0, 0, 0, SVt_NULL, FALSE, NONV, NOARENA, 0 },
/* The bind placeholder pretends to be an RV for now.
Also it's marked as "can't upgrade" to stop anyone using it before it's
SVt_NV, FALSE, HADNV, HASARENA, FIT_ARENA(0, sizeof(NV)) },
/* 8 bytes on most ILP32 with IEEE doubles */
- { sizeof(XPV),
+ { sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPV, xpv_len) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
-#if 2 *PTRSIZE <= IVSIZE
/* 12 */
- { sizeof(XPVIV),
+ { sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PVIV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur)) },
- /* 12 */
-#else
- { sizeof(XPVIV),
- copy_length(XPVIV, xiv_u),
- 0,
- SVt_PVIV, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(XPVIV)) },
-#endif
-#if (2 *PTRSIZE <= IVSIZE) && (2 *PTRSIZE <= NVSIZE)
/* 20 */
- { sizeof(XPVNV),
+ { sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVNV, xnv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PVNV, FALSE, HADNV, HASARENA,
FIT_ARENA(0, sizeof(XPVNV) - STRUCT_OFFSET(XPV, xpv_cur)) },
-#else
- /* 20 */
- { sizeof(XPVNV), copy_length(XPVNV, xnv_u), 0, SVt_PVNV, FALSE, HADNV,
- HASARENA, FIT_ARENA(0, sizeof(XPVNV)) },
-#endif
/* 28 */
{ sizeof(XPVMG), copy_length(XPVMG, xnv_u), 0, SVt_PVMG, FALSE, HADNV,
sizeof(regexp),
0,
SVt_REGEXP, FALSE, NONV, HASARENA,
- FIT_ARENA(0, sizeof(regexp) - STRUCT_OFFSET(regexp, xpv_cur))
+ FIT_ARENA(0, sizeof(regexp))
},
/* 48 */
(void *)((char *)S_new_body(aTHX_ sv_type) \
- bodies_by_type[sv_type].offset)
-#define del_body_allocated(p, sv_type) \
- del_body(p + bodies_by_type[sv_type].offset, &PL_body_roots[sv_type])
-
+/* return a thing to the free list */
-#define my_safemalloc(s) (void*)safemalloc(s)
-#define my_safecalloc(s) (void*)safecalloc(s, 1)
-#define my_safefree(p) safefree((char*)p)
+#define del_body(thing, root) \
+ STMT_START { \
+ void ** const thing_copy = (void **)thing; \
+ *thing_copy = *root; \
+ *root = (void*)thing_copy; \
+ } STMT_END
#ifdef PURIFY
-#define new_XNV() my_safemalloc(sizeof(XPVNV))
-#define del_XNV(p) my_safefree(p)
-
-#define new_XPVNV() my_safemalloc(sizeof(XPVNV))
-#define del_XPVNV(p) my_safefree(p)
+#define new_XNV() safemalloc(sizeof(XPVNV))
+#define new_XPVNV() safemalloc(sizeof(XPVNV))
+#define new_XPVMG() safemalloc(sizeof(XPVMG))
-#define new_XPVAV() my_safemalloc(sizeof(XPVAV))
-#define del_XPVAV(p) my_safefree(p)
-
-#define new_XPVHV() my_safemalloc(sizeof(XPVHV))
-#define del_XPVHV(p) my_safefree(p)
-
-#define new_XPVMG() my_safemalloc(sizeof(XPVMG))
-#define del_XPVMG(p) my_safefree(p)
-
-#define new_XPVGV() my_safemalloc(sizeof(XPVGV))
-#define del_XPVGV(p) my_safefree(p)
+#define del_XPVGV(p) safefree(p)
#else /* !PURIFY */
#define new_XNV() new_body_allocated(SVt_NV)
-#define del_XNV(p) del_body_allocated(p, SVt_NV)
-
#define new_XPVNV() new_body_allocated(SVt_PVNV)
-#define del_XPVNV(p) del_body_allocated(p, SVt_PVNV)
-
-#define new_XPVAV() new_body_allocated(SVt_PVAV)
-#define del_XPVAV(p) del_body_allocated(p, SVt_PVAV)
-
-#define new_XPVHV() new_body_allocated(SVt_PVHV)
-#define del_XPVHV(p) del_body_allocated(p, SVt_PVHV)
-
#define new_XPVMG() new_body_allocated(SVt_PVMG)
-#define del_XPVMG(p) del_body_allocated(p, SVt_PVMG)
-#define new_XPVGV() new_body_allocated(SVt_PVGV)
-#define del_XPVGV(p) del_body_allocated(p, SVt_PVGV)
+#define del_XPVGV(p) del_body(p + bodies_by_type[SVt_PVGV].offset, \
+ &PL_body_roots[SVt_PVGV])
#endif /* PURIFY */
/* no arena for you! */
#define new_NOARENA(details) \
- my_safemalloc((details)->body_size + (details)->offset)
+ safemalloc((details)->body_size + (details)->offset)
#define new_NOARENAZ(details) \
- my_safecalloc((details)->body_size + (details)->offset)
+ safecalloc((details)->body_size + (details)->offset, 1)
-STATIC void *
-S_more_bodies (pTHX_ const svtype sv_type)
+void *
+Perl_more_bodies (pTHX_ const svtype sv_type, const size_t body_size,
+ const size_t arena_size)
{
dVAR;
void ** const root = &PL_body_roots[sv_type];
- const struct body_details * const bdp = &bodies_by_type[sv_type];
- const size_t body_size = bdp->body_size;
+ struct arena_desc *adesc;
+ struct arena_set *aroot = (struct arena_set *) PL_body_arenas;
+ unsigned int curr;
char *start;
const char *end;
- const size_t arena_size = Perl_malloc_good_size(bdp->arena_size);
+ const size_t good_arena_size = Perl_malloc_good_size(arena_size);
#if defined(DEBUGGING) && !defined(PERL_GLOBAL_STRUCT_PRIVATE)
static bool done_sanity_check;
}
#endif
- assert(bdp->arena_size);
+ assert(arena_size);
- start = (char*) Perl_get_arena(aTHX_ arena_size, sv_type);
+ /* may need new arena-set to hold new arena */
+ if (!aroot || aroot->curr >= aroot->set_size) {
+ struct arena_set *newroot;
+ Newxz(newroot, 1, struct arena_set);
+ newroot->set_size = ARENAS_PER_SET;
+ newroot->next = aroot;
+ aroot = newroot;
+ PL_body_arenas = (void *) newroot;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
+ }
- end = start + arena_size - 2 * body_size;
+ /* ok, now have arena-set with at least 1 empty/available arena-desc */
+ curr = aroot->curr++;
+ adesc = &(aroot->set[curr]);
+ assert(!adesc->arena);
+
+ Newx(adesc->arena, good_arena_size, char);
+ adesc->size = good_arena_size;
+ adesc->utype = sv_type;
+ DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
+ curr, (void*)adesc->arena, (UV)good_arena_size));
+
+ start = (char *) adesc->arena;
- /* computed count doesnt reflect the 1st slot reservation */
+ /* Get the address of the byte after the end of the last body we can fit.
+ Remember, this is integer division: */
+ end = start + good_arena_size / body_size * body_size;
+
+ /* computed count doesn't reflect the 1st slot reservation */
#if defined(MYMALLOC) || defined(HAS_MALLOC_GOOD_SIZE)
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d (from %d) type %d "
"size %d ct %d\n",
- (void*)start, (void*)end, (int)arena_size,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)arena_size / (int)body_size));
+ (void*)start, (void*)end, (int)good_arena_size,
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#else
DEBUG_m(PerlIO_printf(Perl_debug_log,
"arena %p end %p arena-size %d type %d size %d ct %d\n",
(void*)start, (void*)end,
- (int)bdp->arena_size, sv_type, (int)body_size,
- (int)bdp->arena_size / (int)body_size));
+ (int)arena_size, sv_type, (int)body_size,
+ (int)good_arena_size / (int)body_size));
#endif
*root = (void *)start;
- while (start <= end) {
+ while (1) {
+ /* Where the next body would start: */
char * const next = start + body_size;
+
+ if (next >= end) {
+ /* This is the last body: */
+ assert(next == end);
+
+ *(void **)start = 0;
+ return *root;
+ }
+
*(void**) start = (void *)next;
start = next;
}
- *(void **)start = 0;
-
- return *root;
}
/* grab a new thing from the free list, allocating more if necessary.
STMT_START { \
void ** const r3wt = &PL_body_roots[sv_type]; \
xpv = (PTR_TBL_ENT_t*) (*((void **)(r3wt)) \
- ? *((void **)(r3wt)) : more_bodies(sv_type)); \
+ ? *((void **)(r3wt)) : Perl_more_bodies(aTHX_ sv_type, \
+ bodies_by_type[sv_type].body_size,\
+ bodies_by_type[sv_type].arena_size)); \
*(r3wt) = *(void**)(xpv); \
} STMT_END
if (old_type > SVt_IV) {
#ifdef PURIFY
- my_safefree(old_body);
+ safefree(old_body);
#else
/* Note that there is an assumption that all bodies of types that
can be upgraded came from arenas. Only the more complex non-
s = SvPVX_mutable(sv);
if (newlen > SvLEN(sv)) { /* need more room? */
+ STRLEN minlen = SvCUR(sv);
+ minlen += (minlen >> PERL_STRLEN_EXPAND_SHIFT) + 10;
+ if (newlen < minlen)
+ newlen = minlen;
#ifndef Perl_safesysmalloc_size
newlen = PERL_STRLEN_ROUNDUP(newlen);
#endif
SV * tmpstr;
if (flags & SV_SKIP_OVERLOAD)
return 0;
- tmpstr=AMG_CALLun(sv,numer);
+ tmpstr = AMG_CALLunary(sv, numer_amg);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvIV(tmpstr);
}
SV *tmpstr;
if (flags & SV_SKIP_OVERLOAD)
return 0;
- tmpstr = AMG_CALLun(sv,numer);
+ tmpstr = AMG_CALLunary(sv, numer_amg);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvUV(tmpstr);
}
SV *tmpstr;
if (flags & SV_SKIP_OVERLOAD)
return 0;
- tmpstr = AMG_CALLun(sv,numer);
+ tmpstr = AMG_CALLunary(sv, numer_amg);
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
return SvNV(tmpstr);
}
if (!SvROK(sv))
return sv;
if (SvAMAGIC(sv)) {
- SV * const tmpsv = AMG_CALLun(sv,numer);
+ SV * const tmpsv = AMG_CALLunary(sv, numer_amg);
TAINT_IF(tmpsv && SvTAINTED(tmpsv));
if (tmpsv && (!SvROK(tmpsv) || (SvRV(tmpsv) != SvRV(sv))))
return sv_2num(tmpsv);
len = SvIsUV(sv)
? my_snprintf(tbuf, sizeof(tbuf), "%"UVuf, (UV)SvUVX(sv))
: my_snprintf(tbuf, sizeof(tbuf), "%"IVdf, (IV)SvIVX(sv));
+ } else if(SvNVX(sv) == 0.0) {
+ tbuf[0] = '0';
+ tbuf[1] = 0;
+ len = 1;
} else {
Gconvert(SvNVX(sv), NV_DIG, 0, tbuf);
len = strlen(tbuf);
{
dVAR;
-#ifdef FIXNEGATIVEZERO
- if (len == 2 && tbuf[0] == '-' && tbuf[1] == '0') {
- tbuf[0] = '0';
- tbuf[1] = 0;
- len = 1;
- }
-#endif
SvUPGRADE(sv, SVt_PV);
if (lp)
*lp = len;
SV *tmpstr;
if (flags & SV_SKIP_OVERLOAD)
return NULL;
- tmpstr = AMG_CALLun(sv,string);
+ tmpstr = AMG_CALLunary(sv, string_amg);
TAINT_IF(tmpstr && SvTAINTED(tmpstr));
if (tmpstr && (!SvROK(tmpstr) || (SvRV(tmpstr) != SvRV(sv)))) {
/* Unwrap this: */
retval -= stashnamelen;
memcpy(retval, stashname, stashnamelen);
}
- /* retval may not neccesarily have reached the start of the
+ /* retval may not necessarily have reached the start of the
buffer here. */
assert (retval >= buffer);
*s = '\0';
}
else if (SvNOKp(sv)) {
- dSAVE_ERRNO;
if (SvTYPE(sv) < SVt_PVNV)
sv_upgrade(sv, SVt_PVNV);
- /* The +20 is pure guesswork. Configure test needed. --jhi */
- s = SvGROW_mutable(sv, NV_DIG + 20);
- /* some Xenix systems wipe out errno here */
-#ifdef apollo
- if (SvNVX(sv) == 0.0)
- my_strlcpy(s, "0", SvLEN(sv));
- else
-#endif /*apollo*/
- {
+ if (SvNVX(sv) == 0.0) {
+ s = SvGROW_mutable(sv, 2);
+ *s++ = '0';
+ *s = '\0';
+ } else {
+ dSAVE_ERRNO;
+ /* The +20 is pure guesswork. Configure test needed. --jhi */
+ s = SvGROW_mutable(sv, NV_DIG + 20);
+ /* some Xenix systems wipe out errno here */
Gconvert(SvNVX(sv), NV_DIG, 0, s);
+ RESTORE_ERRNO;
+ while (*s) s++;
}
- RESTORE_ERRNO;
-#ifdef FIXNEGATIVEZERO
- if (*s == '-' && s[1] == '0' && !s[2]) {
- s[0] = '0';
- s[1] = 0;
- }
-#endif
- while (*s) s++;
#ifdef hcx
if (s[-1] == '.')
*--s = '\0';
{
PERL_ARGS_ASSERT_SV_2PVBYTE;
+ SvGETMAGIC(sv);
sv_utf8_downgrade(sv,0);
- return lp ? SvPV(sv,*lp) : SvPV_nolen(sv);
+ return lp ? SvPV_nomg(sv,*lp) : SvPV_nomg_nolen(sv);
}
/*
/*
=for apidoc sv_2bool
-This function is only called on magical items, and is only used by
-sv_true() or its macro equivalent.
+This macro is only used by sv_true() or its macro equivalent, and only if
+the latter's argument is neither SvPOK, SvIOK nor SvNOK.
+It calls sv_2bool_flags with the SV_GMAGIC flag.
+
+=for apidoc sv_2bool_flags
+
+This function is only used by sv_true() and friends, and only if
+the latter's argument is neither SvPOK, SvIOK nor SvNOK. If the flags
+contain SV_GMAGIC, then it does an mg_get() first.
+
=cut
*/
bool
-Perl_sv_2bool(pTHX_ register SV *const sv)
+Perl_sv_2bool_flags(pTHX_ register SV *const sv, const I32 flags)
{
dVAR;
- PERL_ARGS_ASSERT_SV_2BOOL;
+ PERL_ARGS_ASSERT_SV_2BOOL_FLAGS;
- SvGETMAGIC(sv);
+ if(flags & SV_GMAGIC) SvGETMAGIC(sv);
if (!SvOK(sv))
return 0;
if (SvROK(sv)) {
if (SvAMAGIC(sv)) {
- SV * const tmpsv = AMG_CALLun(sv,bool_);
+ SV * const tmpsv = AMG_CALLunary(sv, bool__amg);
if (tmpsv && (!SvROK(tmpsv) || (SvRV(tmpsv) != SvRV(sv))))
return cBOOL(SvTRUE(tmpsv));
}
static void
S_glob_assign_glob(pTHX_ SV *const dstr, SV *const sstr, const int dtype)
{
- I32 mro_changes = 0; /* 1 = method, 2 = isa */
+ I32 mro_changes = 0; /* 1 = method, 2 = isa, 3 = recursive isa */
+ HV *old_stash = NULL;
PERL_ARGS_ASSERT_GLOB_ASSIGN_GLOB;
- if (dtype != SVt_PVGV) {
+ if (dtype != SVt_PVGV && !isGV_with_GP(dstr)) {
const char * const name = GvNAME(sstr);
const STRLEN len = GvNAMELEN(sstr);
{
}
/* If source has a real method, then a method is
going to change */
- else if(GvCV((const GV *)sstr)) {
+ else if(
+ GvCV((const GV *)sstr) && GvSTASH(dstr) && HvENAME(GvSTASH(dstr))
+ ) {
mro_changes = 1;
}
}
/* If dest already had a real method, that's a change as well */
- if(!mro_changes && GvGP(MUTABLE_GV(dstr)) && GvCVu((const GV *)dstr)) {
+ if(
+ !mro_changes && GvGP(MUTABLE_GV(dstr)) && GvCVu((const GV *)dstr)
+ && GvSTASH(dstr) && HvENAME(GvSTASH(dstr))
+ ) {
mro_changes = 1;
}
- if(strEQ(GvNAME((const GV *)dstr),"ISA"))
- mro_changes = 2;
+ /* We don’t need to check the name of the destination if it was not a
+ glob to begin with. */
+ if(dtype == SVt_PVGV) {
+ const char * const name = GvNAME((const GV *)dstr);
+ if(
+ strEQ(name,"ISA")
+ /* The stash may have been detached from the symbol table, so
+ check its name. */
+ && GvSTASH(dstr) && HvENAME(GvSTASH(dstr))
+ && GvAV((const GV *)sstr)
+ )
+ mro_changes = 2;
+ else {
+ const STRLEN len = GvNAMELEN(dstr);
+ if (len > 1 && name[len-2] == ':' && name[len-1] == ':') {
+ mro_changes = 3;
+
+ /* Set aside the old stash, so we can reset isa caches on
+ its subclasses. */
+ if((old_stash = GvHV(dstr)))
+ /* Make sure we do not lose it early. */
+ SvREFCNT_inc_simple_void_NN(
+ sv_2mortal((SV *)old_stash)
+ );
+ }
+ }
+ }
gp_free(MUTABLE_GV(dstr));
isGV_with_GP_off(dstr);
GvIMPORTED_on(dstr);
}
GvMULTI_on(dstr);
- if(mro_changes == 2) mro_isa_changed_in(GvSTASH(dstr));
+ if(mro_changes == 2) {
+ MAGIC *mg;
+ SV * const sref = (SV *)GvAV((const GV *)dstr);
+ if (SvSMAGICAL(sref) && (mg = mg_find(sref, PERL_MAGIC_isa))) {
+ if (SvTYPE(mg->mg_obj) != SVt_PVAV) {
+ AV * const ary = newAV();
+ av_push(ary, mg->mg_obj); /* takes the refcount */
+ mg->mg_obj = (SV *)ary;
+ }
+ av_push((AV *)mg->mg_obj, SvREFCNT_inc_simple_NN(dstr));
+ }
+ else sv_magic(sref, dstr, PERL_MAGIC_isa, NULL, 0);
+ mro_isa_changed_in(GvSTASH(dstr));
+ }
+ else if(mro_changes == 3) {
+ HV * const stash = GvHV(dstr);
+ if(old_stash ? (HV *)HvENAME_get(old_stash) : stash)
+ mro_package_moved(
+ stash, old_stash,
+ (GV *)dstr, 0
+ );
+ }
else if(mro_changes) mro_method_changed_in(GvSTASH(dstr));
return;
}
&& CopSTASH_ne(PL_curcop, GvSTASH(dstr))) {
GvFLAGS(dstr) |= import_flag;
}
- if (stype == SVt_PVAV && strEQ(GvNAME((GV*)dstr), "ISA")) {
- sv_magic(sref, dstr, PERL_MAGIC_isa, NULL, 0);
- mro_isa_changed_in(GvSTASH(dstr));
+ if (stype == SVt_PVHV) {
+ const char * const name = GvNAME((GV*)dstr);
+ const STRLEN len = GvNAMELEN(dstr);
+ if (
+ len > 1 && name[len-2] == ':' && name[len-1] == ':'
+ && (!dref || HvENAME_get(dref))
+ ) {
+ mro_package_moved(
+ (HV *)sref, (HV *)dref,
+ (GV *)dstr, 0
+ );
+ }
+ }
+ else if (
+ stype == SVt_PVAV && sref != dref
+ && strEQ(GvNAME((GV*)dstr), "ISA")
+ /* The stash may have been detached from the symbol table, so
+ check its name before doing anything. */
+ && GvSTASH(dstr) && HvENAME(GvSTASH(dstr))
+ ) {
+ MAGIC *mg;
+ MAGIC * const omg = dref && SvSMAGICAL(dref)
+ ? mg_find(dref, PERL_MAGIC_isa)
+ : NULL;
+ if (SvSMAGICAL(sref) && (mg = mg_find(sref, PERL_MAGIC_isa))) {
+ if (SvTYPE(mg->mg_obj) != SVt_PVAV) {
+ AV * const ary = newAV();
+ av_push(ary, mg->mg_obj); /* takes the refcount */
+ mg->mg_obj = (SV *)ary;
+ }
+ if (omg) {
+ if (SvTYPE(omg->mg_obj) == SVt_PVAV) {
+ SV **svp = AvARRAY((AV *)omg->mg_obj);
+ I32 items = AvFILLp((AV *)omg->mg_obj) + 1;
+ while (items--)
+ av_push(
+ (AV *)mg->mg_obj,
+ SvREFCNT_inc_simple_NN(*svp++)
+ );
+ }
+ else
+ av_push(
+ (AV *)mg->mg_obj,
+ SvREFCNT_inc_simple_NN(omg->mg_obj)
+ );
+ }
+ else
+ av_push((AV *)mg->mg_obj,SvREFCNT_inc_simple_NN(dstr));
+ }
+ else
+ {
+ sv_magic(
+ sref, omg ? omg->mg_obj : dstr, PERL_MAGIC_isa, NULL, 0
+ );
+ mg = mg_find(sref, PERL_MAGIC_isa);
+ }
+ /* Since the *ISA assignment could have affected more than
+ one stash, don’t call mro_isa_changed_in directly, but let
+ magic_clearisa do it for us, as it already has the logic for
+ dealing with globs vs arrays of globs. */
+ assert(mg);
+ Perl_magic_clearisa(aTHX_ NULL, mg);
}
break;
}
switch (stype) {
case SVt_NULL:
undef_sstr:
- if (dtype != SVt_PVGV) {
+ if (dtype != SVt_PVGV && dtype != SVt_PVLV) {
(void)SvOK_off(dstr);
return;
}
sv_upgrade(dstr, SVt_PVIV);
break;
case SVt_PVGV:
+ case SVt_PVLV:
goto end_of_first_switch;
}
(void)SvIOK_only(dstr);
sv_upgrade(dstr, SVt_PVNV);
break;
case SVt_PVGV:
+ case SVt_PVLV:
goto end_of_first_switch;
}
SvNV_set(dstr, SvNVX(sstr));
/* case SVt_BIND: */
case SVt_PVLV:
case SVt_PVGV:
- if (isGV_with_GP(sstr) && dtype <= SVt_PVGV) {
- glob_assign_glob(dstr, sstr, dtype);
- return;
- }
/* SvVALID means that this PVGV is playing at being an FBM. */
- /*FALLTHROUGH*/
case SVt_PVMG:
if (SvGMAGICAL(sstr) && (flags & SV_GMAGIC)) {
mg_get(sstr);
- if (SvTYPE(sstr) != stype) {
+ if (SvTYPE(sstr) != stype)
stype = SvTYPE(sstr);
- if (isGV_with_GP(sstr) && stype == SVt_PVGV && dtype <= SVt_PVGV) {
+ }
+ if (isGV_with_GP(sstr) && dtype <= SVt_PVLV) {
glob_assign_glob(dstr, sstr, dtype);
return;
- }
- }
}
if (stype == SVt_PVLV)
SvUPGRADE(dstr, SVt_PVNV);
else
Perl_croak(aTHX_ "Cannot copy to %s", type);
} else if (sflags & SVf_ROK) {
- if (isGV_with_GP(dstr) && dtype == SVt_PVGV
+ if (isGV_with_GP(dstr)
&& SvTYPE(SvRV(sstr)) == SVt_PVGV && isGV_with_GP(SvRV(sstr))) {
sstr = SvRV(sstr);
if (sstr == dstr) {
}
if (dtype >= SVt_PV) {
- if (dtype == SVt_PVGV && isGV_with_GP(dstr)) {
+ if (isGV_with_GP(dstr)) {
glob_assign_ref(dstr, sstr);
return;
}
assert(!(sflags & SVf_NOK));
assert(!(sflags & SVf_IOK));
}
- else if (dtype == SVt_PVGV && isGV_with_GP(dstr)) {
+ else if (isGV_with_GP(dstr)) {
if (!(sflags & SVf_OK)) {
Perl_ck_warner(aTHX_ packWARN(WARN_MISC),
"Undefined value assigned to typeglob");
else {
GV *gv = gv_fetchsv(sstr, GV_ADD, SVt_PVGV);
if (dstr != (const SV *)gv) {
+ const char * const name = GvNAME((const GV *)dstr);
+ const STRLEN len = GvNAMELEN(dstr);
+ HV *old_stash = NULL;
+ bool reset_isa = FALSE;
+ if (len > 1 && name[len-2] == ':' && name[len-1] == ':') {
+ /* Set aside the old stash, so we can reset isa caches
+ on its subclasses. */
+ if((old_stash = GvHV(dstr))) {
+ /* Make sure we do not lose it early. */
+ SvREFCNT_inc_simple_void_NN(
+ sv_2mortal((SV *)old_stash)
+ );
+ }
+ reset_isa = TRUE;
+ }
+
if (GvGP(dstr))
gp_free(MUTABLE_GV(dstr));
GvGP(dstr) = gp_ref(GvGP(gv));
+
+ if (reset_isa) {
+ HV * const stash = GvHV(dstr);
+ if(
+ old_stash ? (HV *)HvENAME_get(old_stash) : stash
+ )
+ mro_package_moved(
+ stash, old_stash,
+ (GV *)dstr, 0
+ );
+ }
}
}
}
#endif
if (flags & SV_HAS_TRAILING_NUL) {
/* It's long enough - do nothing.
- Specfically Perl_newCONSTSUB is relying on this. */
+ Specifically Perl_newCONSTSUB is relying on this. */
} else {
#ifdef DEBUGGING
/* Force a move to shake out bugs in callers. */
#endif
if (SvROK(sv))
sv_unref_flags(sv, flags);
- else if (SvFAKE(sv) && SvTYPE(sv) == SVt_PVGV)
+ else if (SvFAKE(sv) && isGV_with_GP(sv))
sv_unglob(sv);
else if (SvFAKE(sv) && SvTYPE(sv) == SVt_REGEXP) {
- /* Need to downgrade the REGEXP to a simple(r) scalar. This is analagous
+ /* Need to downgrade the REGEXP to a simple(r) scalar. This is analogous
to sv_unglob. We only need it here, so inline it. */
const svtype new_type = SvMAGIC(sv) || SvSTASH(sv) ? SVt_PVMG : SVt_PV;
SV *const temp = newSV_type(new_type);
if (ssv) {
STRLEN slen;
- const char *spv = SvPV_const(ssv, slen);
+ const char *spv = SvPV_flags_const(ssv, slen, flags);
if (spv) {
/* sutf8 and dutf8 were type bool, but under USE_ITHREADS,
gcc version 2.95.2 20000220 (Debian GNU/Linux) for
}
/*
+=for apidoc sv_catpv_flags
+
+Concatenates the string onto the end of the string which is in the SV.
+If the SV has the UTF-8 status set, then the bytes appended should
+be valid UTF-8. If C<flags> has C<SV_GMAGIC> bit set, will C<mg_get>
+on the SVs if appropriate, else not.
+
+=cut
+*/
+
+void
+Perl_sv_catpv_flags(pTHX_ SV *dstr, const char *sstr, const I32 flags)
+{
+ PERL_ARGS_ASSERT_SV_CATPV_FLAGS;
+ sv_catpvn_flags(dstr, sstr, strlen(sstr), flags);
+}
+
+/*
=for apidoc sv_catpv_mg
Like C<sv_catpv>, but also handles 'set' magic.
mg->mg_ptr = savepvn(name, namlen);
else if (namlen == HEf_SVKEY) {
/* Yes, this is casting away const. This is only for the case of
- HEf_SVKEY. I think we need to document this abberation of the
+ HEf_SVKEY. I think we need to document this aberation of the
constness of the API, rather than making name non-const, as
that change propagating outwards a long way. */
mg->mg_ptr = (char*)SvREFCNT_inc_simple_NN((SV *)name);
case PERL_MAGIC_rhash:
case PERL_MAGIC_symtab:
case PERL_MAGIC_vstring:
+ case PERL_MAGIC_checkcall:
vtable = NULL;
break;
case PERL_MAGIC_utf8:
}
}
-/*
-=for apidoc sv_unmagic
-
-Removes all magic of type C<type> from an SV.
-
-=cut
-*/
-
int
-Perl_sv_unmagic(pTHX_ SV *const sv, const int type)
+S_sv_unmagicext_flags(pTHX_ SV *const sv, const int type, MGVTBL *vtbl, const U32 flags)
{
MAGIC* mg;
MAGIC** mgp;
- PERL_ARGS_ASSERT_SV_UNMAGIC;
+ assert(flags <= 1);
if (SvTYPE(sv) < SVt_PVMG || !SvMAGIC(sv))
return 0;
mgp = &(((XPVMG*) SvANY(sv))->xmg_u.xmg_magic);
for (mg = *mgp; mg; mg = *mgp) {
- if (mg->mg_type == type) {
- const MGVTBL* const vtbl = mg->mg_virtual;
+ const MGVTBL* const virt = mg->mg_virtual;
+ if (mg->mg_type == type && (!flags || virt == vtbl)) {
*mgp = mg->mg_moremagic;
- if (vtbl && vtbl->svt_free)
- CALL_FPTR(vtbl->svt_free)(aTHX_ sv, mg);
+ if (virt && virt->svt_free)
+ virt->svt_free(aTHX_ sv, mg);
if (mg->mg_ptr && mg->mg_type != PERL_MAGIC_regex_global) {
if (mg->mg_len > 0)
Safefree(mg->mg_ptr);
}
/*
+=for apidoc sv_unmagic
+
+Removes all magic of type C<type> from an SV.
+
+=cut
+*/
+
+int
+Perl_sv_unmagic(pTHX_ SV *const sv, const int type)
+{
+ PERL_ARGS_ASSERT_SV_UNMAGIC;
+ return S_sv_unmagicext_flags(aTHX_ sv, type, NULL, 0);
+}
+
+/*
+=for apidoc sv_unmagicext
+
+Removes all magic of type C<type> with the specified C<vtbl> from an SV.
+
+=cut
+*/
+
+int
+Perl_sv_unmagicext(pTHX_ SV *const sv, const int type, MGVTBL *vtbl)
+{
+ PERL_ARGS_ASSERT_SV_UNMAGICEXT;
+ return S_sv_unmagicext_flags(aTHX_ sv, type, vtbl, 1);
+}
+
+/*
=for apidoc sv_rvweaken
Weaken a reference: set the C<SvWEAKREF> flag on this RV; give the
/* Give tsv backref magic if it hasn't already got it, then push a
* back-reference to sv onto the array associated with the backref magic.
+ *
+ * As an optimisation, if there's only one backref and it's not an AV,
+ * store it directly in the HvAUX or mg_obj slot, avoiding the need to
+ * allocate an AV. (Whether the slot holds an AV tells us whether this is
+ * active.)
+ *
+ * If an HV's backref is stored in magic, it is moved back to HvAUX.
*/
/* A discussion about the backreferences array and its refcount:
* structure, from the xhv_backreferences field. (A HV without hv_aux will
* have the standard magic instead.) The array is created with a refcount
* of 2. This means that if during global destruction the array gets
- * picked on first to have its refcount decremented by the random zapper,
- * it won't actually be freed, meaning it's still theere for when its
- * parent gets freed.
- * When the parent SV is freed, in the case of magic, the magic is freed,
- * Perl_magic_killbackrefs is called which decrements one refcount, then
- * mg_obj is freed which kills the second count.
- * In the vase of a HV being freed, one ref is removed by
- * Perl_hv_kill_backrefs, the other by Perl_sv_kill_backrefs, which it
- * calls.
+ * picked on before its parent to have its refcount decremented by the
+ * random zapper, it won't actually be freed, meaning it's still there for
+ * when its parent gets freed.
+ *
+ * When the parent SV is freed, the extra ref is killed by
+ * Perl_sv_kill_backrefs. The other ref is killed, in the case of magic,
+ * by mg_free() / MGf_REFCOUNTED, or for a hash, by Perl_hv_kill_backrefs.
+ *
+ * When a single backref SV is stored directly, it is not reference
+ * counted.
*/
void
Perl_sv_add_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av;
+ SV **svp;
+ AV *av = NULL;
+ MAGIC *mg = NULL;
PERL_ARGS_ASSERT_SV_ADD_BACKREF;
- if (SvTYPE(tsv) == SVt_PVHV) {
- AV **const avp = Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
-
- av = *avp;
- if (!av) {
- /* There is no AV in the offical place - try a fixup. */
- MAGIC *const mg = mg_find(tsv, PERL_MAGIC_backref);
+ /* find slot to store array or singleton backref */
- if (mg) {
- /* Aha. They've got it stowed in magic. Bring it back. */
- av = MUTABLE_AV(mg->mg_obj);
- /* Stop mg_free decreasing the refernce count. */
+ if (SvTYPE(tsv) == SVt_PVHV) {
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
+
+ if (!*svp) {
+ if ((mg = mg_find(tsv, PERL_MAGIC_backref))) {
+ /* Aha. They've got it stowed in magic instead.
+ * Move it back to xhv_backreferences */
+ *svp = mg->mg_obj;
+ /* Stop mg_free decreasing the reference count. */
mg->mg_obj = NULL;
/* Stop mg_free even calling the destructor, given that
there's no AV to free up. */
mg->mg_virtual = 0;
sv_unmagic(tsv, PERL_MAGIC_backref);
- } else {
- av = newAV();
- AvREAL_off(av);
- SvREFCNT_inc_simple_void(av); /* see discussion above */
+ mg = NULL;
}
- *avp = av;
}
} else {
- const MAGIC *const mg
- = SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
- else {
- av = newAV();
- AvREAL_off(av);
- sv_magic(tsv, MUTABLE_SV(av), PERL_MAGIC_backref, NULL, 0);
- /* av now has a refcnt of 2; see discussion above */
+ if (! ((mg =
+ (SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL))))
+ {
+ sv_magic(tsv, NULL, PERL_MAGIC_backref, NULL, 0);
+ mg = mg_find(tsv, PERL_MAGIC_backref);
}
+ svp = &(mg->mg_obj);
}
+
+ /* create or retrieve the array */
+
+ if ( (!*svp && SvTYPE(sv) == SVt_PVAV)
+ || (*svp && SvTYPE(*svp) != SVt_PVAV)
+ ) {
+ /* create array */
+ av = newAV();
+ AvREAL_off(av);
+ SvREFCNT_inc_simple_void(av);
+ /* av now has a refcnt of 2; see discussion above */
+ if (*svp) {
+ /* move single existing backref to the array */
+ av_extend(av, 1);
+ AvARRAY(av)[++AvFILLp(av)] = *svp; /* av_push() */
+ }
+ *svp = (SV*)av;
+ if (mg)
+ mg->mg_flags |= MGf_REFCOUNTED;
+ }
+ else
+ av = MUTABLE_AV(*svp);
+
+ if (!av) {
+ /* optimisation: store single backref directly in HvAUX or mg_obj */
+ *svp = sv;
+ return;
+ }
+ /* push new backref */
+ assert(SvTYPE(av) == SVt_PVAV);
if (AvFILLp(av) >= AvMAX(av)) {
av_extend(av, AvFILLp(av)+1);
}
* with the SV we point to.
*/
-STATIC void
-S_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
+void
+Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
- AV *av = NULL;
- SV **svp;
- I32 i;
+ SV **svp = NULL;
PERL_ARGS_ASSERT_SV_DEL_BACKREF;
if (SvTYPE(tsv) == SVt_PVHV && SvOOK(tsv)) {
- av = *Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
- /* We mustn't attempt to "fix up" the hash here by moving the
- backreference array back to the hv_aux structure, as that is stored
- in the main HvARRAY(), and hfreentries assumes that no-one
- reallocates HvARRAY() while it is running. */
+ svp = (SV**)Perl_hv_backreferences_p(aTHX_ MUTABLE_HV(tsv));
}
- if (!av) {
- const MAGIC *const mg
+ if (!svp || !*svp) {
+ MAGIC *const mg
= SvMAGICAL(tsv) ? mg_find(tsv, PERL_MAGIC_backref) : NULL;
- if (mg)
- av = MUTABLE_AV(mg->mg_obj);
+ svp = mg ? &(mg->mg_obj) : NULL;
}
- if (!av)
+ if (!svp || !*svp)
Perl_croak(aTHX_ "panic: del_backref");
- assert(!SvIS_FREED(av));
-
- svp = AvARRAY(av);
- /* We shouldn't be in here more than once, but for paranoia reasons lets
- not assume this. */
- for (i = AvFILLp(av); i >= 0; i--) {
- if (svp[i] == sv) {
- const SSize_t fill = AvFILLp(av);
- if (i != fill) {
- /* We weren't the last entry.
- An unordered list has this property that you can take the
- last element off the end to fill the hole, and it's still
- an unordered list :-)
- */
- svp[i] = svp[fill];
+ if (SvTYPE(*svp) == SVt_PVAV) {
+#ifdef DEBUGGING
+ int count = 1;
+#endif
+ AV * const av = (AV*)*svp;
+ SSize_t fill;
+ assert(!SvIS_FREED(av));
+ fill = AvFILLp(av);
+ assert(fill > -1);
+ svp = AvARRAY(av);
+ /* for an SV with N weak references to it, if all those
+ * weak refs are deleted, then sv_del_backref will be called
+ * N times and O(N^2) compares will be done within the backref
+ * array. To ameliorate this potential slowness, we:
+ * 1) make sure this code is as tight as possible;
+ * 2) when looking for SV, look for it at both the head and tail of the
+ * array first before searching the rest, since some create/destroy
+ * patterns will cause the backrefs to be freed in order.
+ */
+ if (*svp == sv) {
+ AvARRAY(av)++;
+ AvMAX(av)--;
+ }
+ else {
+ SV **p = &svp[fill];
+ SV *const topsv = *p;
+ if (topsv != sv) {
+#ifdef DEBUGGING
+ count = 0;
+#endif
+ while (--p > svp) {
+ if (*p == sv) {
+ /* We weren't the last entry.
+ An unordered list has this property that you
+ can take the last element off the end to fill
+ the hole, and it's still an unordered list :-)
+ */
+ *p = topsv;
+#ifdef DEBUGGING
+ count++;
+#else
+ break; /* should only be one */
+#endif
+ }
+ }
}
- svp[fill] = NULL;
- AvFILLp(av) = fill - 1;
}
+ assert(count ==1);
+ AvFILLp(av) = fill-1;
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ if (*svp != sv)
+ Perl_croak(aTHX_ "panic: del_backref");
+ *svp = NULL;
}
+
}
-int
+void
Perl_sv_kill_backrefs(pTHX_ SV *const sv, AV *const av)
{
- SV **svp = AvARRAY(av);
+ SV **svp;
+ SV **last;
+ bool is_array;
PERL_ARGS_ASSERT_SV_KILL_BACKREFS;
- PERL_UNUSED_ARG(sv);
- assert(!svp || !SvIS_FREED(av));
- if (svp) {
- SV *const *const last = svp + AvFILLp(av);
+ if (!av)
+ return;
+ is_array = (SvTYPE(av) == SVt_PVAV);
+ if (is_array) {
+ assert(!SvIS_FREED(av));
+ svp = AvARRAY(av);
+ if (svp)
+ last = svp + AvFILLp(av);
+ }
+ else {
+ /* optimisation: only a single backref, stored directly */
+ svp = (SV**)&av;
+ last = svp;
+ }
+
+ if (svp) {
while (svp <= last) {
if (*svp) {
SV *const referrer = *svp;
if (SvWEAKREF(referrer)) {
/* XXX Should we check that it hasn't changed? */
+ assert(SvROK(referrer));
SvRV_set(referrer, 0);
SvOK_off(referrer);
SvWEAKREF_off(referrer);
SvSETMAGIC(referrer);
} else if (SvTYPE(referrer) == SVt_PVGV ||
SvTYPE(referrer) == SVt_PVLV) {
+ assert(SvTYPE(sv) == SVt_PVHV); /* stash backref */
/* You lookin' at me? */
assert(GvSTASH(referrer));
assert(GvSTASH(referrer) == (const HV *)sv);
GvSTASH(referrer) = 0;
+ } else if (SvTYPE(referrer) == SVt_PVCV ||
+ SvTYPE(referrer) == SVt_PVFM) {
+ if (SvTYPE(sv) == SVt_PVHV) { /* stash backref */
+ /* You lookin' at me? */
+ assert(CvSTASH(referrer));
+ assert(CvSTASH(referrer) == (const HV *)sv);
+ SvANY(MUTABLE_CV(referrer))->xcv_stash = 0;
+ }
+ else {
+ assert(SvTYPE(sv) == SVt_PVGV);
+ /* You lookin' at me? */
+ assert(CvGV(referrer));
+ assert(CvGV(referrer) == (const GV *)sv);
+ anonymise_cv_maybe(MUTABLE_GV(sv),
+ MUTABLE_CV(referrer));
+ }
+
} else {
Perl_croak(aTHX_
"panic: magic_killbackrefs (flags=%"UVxf")",
(UV)SvFLAGS(referrer));
}
- *svp = NULL;
+ if (is_array)
+ *svp = NULL;
}
svp++;
}
}
- SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
- return 0;
+ if (is_array) {
+ AvFILLp(av) = -1;
+ SvREFCNT_dec(av); /* remove extra count added by sv_add_backref() */
+ }
+ return;
}
/*
del_SV(nsv);
}
+/* We're about to free a GV which has a CV that refers back to us.
+ * If that CV will outlive us, make it anonymous (i.e. fix up its CvGV
+ * field) */
+
+STATIC void
+S_anonymise_cv_maybe(pTHX_ GV *gv, CV* cv)
+{
+ char *stash;
+ SV *gvname;
+ GV *anongv;
+
+ PERL_ARGS_ASSERT_ANONYMISE_CV_MAYBE;
+
+ /* be assertive! */
+ assert(SvREFCNT(gv) == 0);
+ assert(isGV(gv) && isGV_with_GP(gv));
+ assert(GvGP(gv));
+ assert(!CvANON(cv));
+ assert(CvGV(cv) == gv);
+
+ /* will the CV shortly be freed by gp_free() ? */
+ if (GvCV(gv) == cv && GvGP(gv)->gp_refcnt < 2 && SvREFCNT(cv) < 2) {
+ SvANY(cv)->xcv_gv = NULL;
+ return;
+ }
+
+ /* if not, anonymise: */
+ stash = GvSTASH(gv) ? HvNAME(GvSTASH(gv)) : NULL;
+ gvname = Perl_newSVpvf(aTHX_ "%s::__ANON__",
+ stash ? stash : "__ANON__");
+ anongv = gv_fetchsv(gvname, GV_ADDMULTI, SVt_PVCV);
+ SvREFCNT_dec(gvname);
+
+ CvANON_on(cv);
+ CvCVGV_RC_on(cv);
+ SvANY(cv)->xcv_gv = MUTABLE_GV(SvREFCNT_inc(anongv));
+}
+
+
/*
=for apidoc sv_clear
*/
void
-Perl_sv_clear(pTHX_ register SV *const sv)
+Perl_sv_clear(pTHX_ SV *const orig_sv)
{
dVAR;
- const U32 type = SvTYPE(sv);
- const struct body_details *const sv_type_details
- = bodies_by_type + type;
HV *stash;
+ U32 type;
+ const struct body_details *sv_type_details;
+ SV* iter_sv = NULL;
+ SV* next_sv = NULL;
+ register SV *sv = orig_sv;
PERL_ARGS_ASSERT_SV_CLEAR;
- assert(SvREFCNT(sv) == 0);
- assert(SvTYPE(sv) != SVTYPEMASK);
- if (type <= SVt_IV) {
- /* See the comment in sv.h about the collusion between this early
- return and the overloading of the NULL slots in the size table. */
- if (SvROK(sv))
- goto free_rv;
- SvFLAGS(sv) &= SVf_BREAK;
- SvFLAGS(sv) |= SVTYPEMASK;
- return;
- }
+ /* within this loop, sv is the SV currently being freed, and
+ * iter_sv is the most recent AV or whatever that's being iterated
+ * over to provide more SVs */
- if (SvOBJECT(sv)) {
- if (PL_defstash && /* Still have a symbol table? */
- SvDESTROYABLE(sv))
- {
- dSP;
- HV* stash;
- do {
- CV* destructor;
- stash = SvSTASH(sv);
- destructor = StashHANDLER(stash,DESTROY);
- if (destructor
- /* A constant subroutine can have no side effects, so
- don't bother calling it. */
- && !CvCONST(destructor)
- /* Don't bother calling an empty destructor */
- && (CvISXSUB(destructor)
- || (CvSTART(destructor)
- && (CvSTART(destructor)->op_next->op_type != OP_LEAVESUB))))
- {
- SV* const tmpref = newRV(sv);
- SvREADONLY_on(tmpref); /* DESTROY() could be naughty */
- ENTER;
- PUSHSTACKi(PERLSI_DESTROY);
- EXTEND(SP, 2);
- PUSHMARK(SP);
- PUSHs(tmpref);
- PUTBACK;
- call_sv(MUTABLE_SV(destructor), G_DISCARD|G_EVAL|G_KEEPERR|G_VOID);
-
-
- POPSTACK;
- SPAGAIN;
- LEAVE;
- if(SvREFCNT(tmpref) < 2) {
- /* tmpref is not kept alive! */
- SvREFCNT(sv)--;
- SvRV_set(tmpref, NULL);
- SvROK_off(tmpref);
- }
- SvREFCNT_dec(tmpref);
- }
- } while (SvOBJECT(sv) && SvSTASH(sv) != stash);
+ while (sv) {
+ type = SvTYPE(sv);
- if (SvREFCNT(sv)) {
- if (PL_in_clean_objs)
- Perl_croak(aTHX_ "DESTROY created new reference to dead object '%s'",
- HvNAME_get(stash));
- /* DESTROY gave object new lease on life */
- return;
- }
+ assert(SvREFCNT(sv) == 0);
+ assert(SvTYPE(sv) != SVTYPEMASK);
+
+ if (type <= SVt_IV) {
+ /* See the comment in sv.h about the collusion between this
+ * early return and the overloading of the NULL slots in the
+ * size table. */
+ if (SvROK(sv))
+ goto free_rv;
+ SvFLAGS(sv) &= SVf_BREAK;
+ SvFLAGS(sv) |= SVTYPEMASK;
+ goto free_head;
}
if (SvOBJECT(sv)) {
- SvREFCNT_dec(SvSTASH(sv)); /* possibly of changed persuasion */
- SvOBJECT_off(sv); /* Curse the object. */
- if (type != SVt_PVIO)
- --PL_sv_objcount; /* XXX Might want something more general */
- }
- }
- if (type >= SVt_PVMG) {
- if (type == SVt_PVMG && SvPAD_OUR(sv)) {
- SvREFCNT_dec(SvOURSTASH(sv));
- } else if (SvMAGIC(sv))
- mg_free(sv);
- if (type == SVt_PVMG && SvPAD_TYPED(sv))
- SvREFCNT_dec(SvSTASH(sv));
- }
- switch (type) {
- /* case SVt_BIND: */
- case SVt_PVIO:
- if (IoIFP(sv) &&
- IoIFP(sv) != PerlIO_stdin() &&
- IoIFP(sv) != PerlIO_stdout() &&
- IoIFP(sv) != PerlIO_stderr() &&
- !(IoFLAGS(sv) & IOf_FAKE_DIRP))
- {
- io_close(MUTABLE_IO(sv), FALSE);
- }
- if (IoDIRP(sv) && !(IoFLAGS(sv) & IOf_FAKE_DIRP))
- PerlDir_close(IoDIRP(sv));
- IoDIRP(sv) = (DIR*)NULL;
- Safefree(IoTOP_NAME(sv));
- Safefree(IoFMT_NAME(sv));
- Safefree(IoBOTTOM_NAME(sv));
- goto freescalar;
- case SVt_REGEXP:
- /* FIXME for plugins */
- pregfree2((REGEXP*) sv);
- goto freescalar;
- case SVt_PVCV:
- case SVt_PVFM:
- cv_undef(MUTABLE_CV(sv));
- goto freescalar;
- case SVt_PVHV:
- if (PL_last_swash_hv == (const HV *)sv) {
- PL_last_swash_hv = NULL;
- }
- Perl_hv_kill_backrefs(aTHX_ MUTABLE_HV(sv));
- hv_undef(MUTABLE_HV(sv));
- break;
- case SVt_PVAV:
- if (PL_comppad == MUTABLE_AV(sv)) {
- PL_comppad = NULL;
- PL_curpad = NULL;
- }
- av_undef(MUTABLE_AV(sv));
- break;
- case SVt_PVLV:
- if (LvTYPE(sv) == 'T') { /* for tie: return HE to pool */
- SvREFCNT_dec(HeKEY_sv((HE*)LvTARG(sv)));
- HeNEXT((HE*)LvTARG(sv)) = PL_hv_fetch_ent_mh;
- PL_hv_fetch_ent_mh = (HE*)LvTARG(sv);
+ if (!curse(sv, 1)) goto get_next_sv;
}
- else if (LvTYPE(sv) != 't') /* unless tie: unrefcnted fake SV** */
- SvREFCNT_dec(LvTARG(sv));
- case SVt_PVGV:
- if (isGV_with_GP(sv)) {
- if(GvCVu((const GV *)sv) && (stash = GvSTASH(MUTABLE_GV(sv)))
- && HvNAME_get(stash))
- mro_method_changed_in(stash);
- gp_free(MUTABLE_GV(sv));
- if (GvNAME_HEK(sv))
- unshare_hek(GvNAME_HEK(sv));
- /* If we're in a stash, we don't own a reference to it. However it does
- have a back reference to us, which needs to be cleared. */
- if (!SvVALID(sv) && (stash = GvSTASH(sv)))
- sv_del_backref(MUTABLE_SV(stash), sv);
- }
- /* FIXME. There are probably more unreferenced pointers to SVs in the
- interpreter struct that we should check and tidy in a similar
- fashion to this: */
- if ((const GV *)sv == PL_last_in_gv)
- PL_last_in_gv = NULL;
- case SVt_PVMG:
- case SVt_PVNV:
- case SVt_PVIV:
- case SVt_PV:
- freescalar:
- /* Don't bother with SvOOK_off(sv); as we're only going to free it. */
- if (SvOOK(sv)) {
- STRLEN offset;
- SvOOK_offset(sv, offset);
- SvPV_set(sv, SvPVX_mutable(sv) - offset);
- /* Don't even bother with turning off the OOK flag. */
+ if (type >= SVt_PVMG) {
+ if (type == SVt_PVMG && SvPAD_OUR(sv)) {
+ SvREFCNT_dec(SvOURSTASH(sv));
+ } else if (SvMAGIC(sv))
+ mg_free(sv);
+ if (type == SVt_PVMG && SvPAD_TYPED(sv))
+ SvREFCNT_dec(SvSTASH(sv));
}
- if (SvROK(sv)) {
- free_rv:
+ switch (type) {
+ /* case SVt_BIND: */
+ case SVt_PVIO:
+ if (IoIFP(sv) &&
+ IoIFP(sv) != PerlIO_stdin() &&
+ IoIFP(sv) != PerlIO_stdout() &&
+ IoIFP(sv) != PerlIO_stderr() &&
+ !(IoFLAGS(sv) & IOf_FAKE_DIRP))
{
- SV * const target = SvRV(sv);
- if (SvWEAKREF(sv))
- sv_del_backref(target, sv);
- else
- SvREFCNT_dec(target);
+ io_close(MUTABLE_IO(sv), FALSE);
+ }
+ if (IoDIRP(sv) && !(IoFLAGS(sv) & IOf_FAKE_DIRP))
+ PerlDir_close(IoDIRP(sv));
+ IoDIRP(sv) = (DIR*)NULL;
+ Safefree(IoTOP_NAME(sv));
+ Safefree(IoFMT_NAME(sv));
+ Safefree(IoBOTTOM_NAME(sv));
+ goto freescalar;
+ case SVt_REGEXP:
+ /* FIXME for plugins */
+ pregfree2((REGEXP*) sv);
+ goto freescalar;
+ case SVt_PVCV:
+ case SVt_PVFM:
+ cv_undef(MUTABLE_CV(sv));
+ /* If we're in a stash, we don't own a reference to it.
+ * However it does have a back reference to us, which needs to
+ * be cleared. */
+ if ((stash = CvSTASH(sv)))
+ sv_del_backref(MUTABLE_SV(stash), sv);
+ goto freescalar;
+ case SVt_PVHV:
+ if (PL_last_swash_hv == (const HV *)sv) {
+ PL_last_swash_hv = NULL;
+ }
+ Perl_hv_kill_backrefs(aTHX_ MUTABLE_HV(sv));
+ Perl_hv_undef_flags(aTHX_ MUTABLE_HV(sv), HV_NAME_SETALL);
+ break;
+ case SVt_PVAV:
+ {
+ AV* av = MUTABLE_AV(sv);
+ if (PL_comppad == av) {
+ PL_comppad = NULL;
+ PL_curpad = NULL;
+ }
+ if (AvREAL(av) && AvFILLp(av) > -1) {
+ next_sv = AvARRAY(av)[AvFILLp(av)--];
+ /* save old iter_sv in top-most slot of AV,
+ * and pray that it doesn't get wiped in the meantime */
+ AvARRAY(av)[AvMAX(av)] = iter_sv;
+ iter_sv = sv;
+ goto get_next_sv; /* process this new sv */
+ }
+ Safefree(AvALLOC(av));
+ }
+
+ break;
+ case SVt_PVLV:
+ if (LvTYPE(sv) == 'T') { /* for tie: return HE to pool */
+ SvREFCNT_dec(HeKEY_sv((HE*)LvTARG(sv)));
+ HeNEXT((HE*)LvTARG(sv)) = PL_hv_fetch_ent_mh;
+ PL_hv_fetch_ent_mh = (HE*)LvTARG(sv);
+ }
+ else if (LvTYPE(sv) != 't') /* unless tie: unrefcnted fake SV** */
+ SvREFCNT_dec(LvTARG(sv));
+ case SVt_PVGV:
+ if (isGV_with_GP(sv)) {
+ if(GvCVu((const GV *)sv) && (stash = GvSTASH(MUTABLE_GV(sv)))
+ && HvENAME_get(stash))
+ mro_method_changed_in(stash);
+ gp_free(MUTABLE_GV(sv));
+ if (GvNAME_HEK(sv))
+ unshare_hek(GvNAME_HEK(sv));
+ /* If we're in a stash, we don't own a reference to it.
+ * However it does have a back reference to us, which
+ * needs to be cleared. */
+ if (!SvVALID(sv) && (stash = GvSTASH(sv)))
+ sv_del_backref(MUTABLE_SV(stash), sv);
+ }
+ /* FIXME. There are probably more unreferenced pointers to SVs
+ * in the interpreter struct that we should check and tidy in
+ * a similar fashion to this: */
+ if ((const GV *)sv == PL_last_in_gv)
+ PL_last_in_gv = NULL;
+ case SVt_PVMG:
+ case SVt_PVNV:
+ case SVt_PVIV:
+ case SVt_PV:
+ freescalar:
+ /* Don't bother with SvOOK_off(sv); as we're only going to
+ * free it. */
+ if (SvOOK(sv)) {
+ STRLEN offset;
+ SvOOK_offset(sv, offset);
+ SvPV_set(sv, SvPVX_mutable(sv) - offset);
+ /* Don't even bother with turning off the OOK flag. */
+ }
+ if (SvROK(sv)) {
+ free_rv:
+ {
+ SV * const target = SvRV(sv);
+ if (SvWEAKREF(sv))
+ sv_del_backref(target, sv);
+ else
+ next_sv = target;
+ }
}
- }
#ifdef PERL_OLD_COPY_ON_WRITE
- else if (SvPVX_const(sv)
- && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP))) {
- if (SvIsCOW(sv)) {
- if (DEBUG_C_TEST) {
- PerlIO_printf(Perl_debug_log, "Copy on write: clear\n");
- sv_dump(sv);
- }
- if (SvLEN(sv)) {
- sv_release_COW(sv, SvPVX_const(sv), SV_COW_NEXT_SV(sv));
- } else {
- unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
+ else if (SvPVX_const(sv)
+ && !(SvTYPE(sv) == SVt_PVIO
+ && !(IoFLAGS(sv) & IOf_FAKE_DIRP)))
+ {
+ if (SvIsCOW(sv)) {
+ if (DEBUG_C_TEST) {
+ PerlIO_printf(Perl_debug_log, "Copy on write: clear\n");
+ sv_dump(sv);
+ }
+ if (SvLEN(sv)) {
+ sv_release_COW(sv, SvPVX_const(sv), SV_COW_NEXT_SV(sv));
+ } else {
+ unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
+ }
+
+ SvFAKE_off(sv);
+ } else if (SvLEN(sv)) {
+ Safefree(SvPVX_const(sv));
}
+ }
+#else
+ else if (SvPVX_const(sv) && SvLEN(sv)
+ && !(SvTYPE(sv) == SVt_PVIO
+ && !(IoFLAGS(sv) & IOf_FAKE_DIRP)))
+ Safefree(SvPVX_mutable(sv));
+ else if (SvPVX_const(sv) && SvREADONLY(sv) && SvFAKE(sv)) {
+ unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
+ SvFAKE_off(sv);
+ }
+#endif
+ break;
+ case SVt_NV:
+ break;
+ }
+
+ free_body:
+
+ SvFLAGS(sv) &= SVf_BREAK;
+ SvFLAGS(sv) |= SVTYPEMASK;
- SvFAKE_off(sv);
- } else if (SvLEN(sv)) {
- Safefree(SvPVX_const(sv));
- }
+ sv_type_details = bodies_by_type + type;
+ if (sv_type_details->arena) {
+ del_body(((char *)SvANY(sv) + sv_type_details->offset),
+ &PL_body_roots[type]);
}
-#else
- else if (SvPVX_const(sv) && SvLEN(sv)
- && !(SvTYPE(sv) == SVt_PVIO && !(IoFLAGS(sv) & IOf_FAKE_DIRP)))
- Safefree(SvPVX_mutable(sv));
- else if (SvPVX_const(sv) && SvREADONLY(sv) && SvFAKE(sv)) {
- unshare_hek(SvSHARED_HEK_FROM_PV(SvPVX_const(sv)));
- SvFAKE_off(sv);
+ else if (sv_type_details->body_size) {
+ safefree(SvANY(sv));
}
+
+ free_head:
+ /* caller is responsible for freeing the head of the original sv */
+ if (sv != orig_sv && !SvREFCNT(sv))
+ del_SV(sv);
+
+ /* grab and free next sv, if any */
+ get_next_sv:
+ while (1) {
+ sv = NULL;
+ if (next_sv) {
+ sv = next_sv;
+ next_sv = NULL;
+ }
+ else if (!iter_sv) {
+ break;
+ } else if (SvTYPE(iter_sv) == SVt_PVAV) {
+ AV *const av = (AV*)iter_sv;
+ if (AvFILLp(av) > -1) {
+ sv = AvARRAY(av)[AvFILLp(av)--];
+ }
+ else { /* no more elements of current AV to free */
+ sv = iter_sv;
+ type = SvTYPE(sv);
+ /* restore previous value, squirrelled away */
+ iter_sv = AvARRAY(av)[AvMAX(av)];
+ Safefree(AvALLOC(av));
+ goto free_body;
+ }
+ }
+
+ /* unrolled SvREFCNT_dec and sv_free2 follows: */
+
+ if (!sv)
+ continue;
+ if (!SvREFCNT(sv)) {
+ sv_free(sv);
+ continue;
+ }
+ if (--(SvREFCNT(sv)))
+ continue;
+#ifdef DEBUGGING
+ if (SvTEMP(sv)) {
+ Perl_ck_warner_d(aTHX_ packWARN(WARN_DEBUGGING),
+ "Attempt to free temp prematurely: SV 0x%"UVxf
+ pTHX__FORMAT, PTR2UV(sv) pTHX__VALUE);
+ continue;
+ }
#endif
- break;
- case SVt_NV:
- break;
- }
+ if (SvREADONLY(sv) && SvIMMORTAL(sv)) {
+ /* make sure SvREFCNT(sv)==0 happens very seldom */
+ SvREFCNT(sv) = (~(U32)0)/2;
+ continue;
+ }
+ break;
+ } /* while 1 */
+
+ } /* while sv */
+}
- SvFLAGS(sv) &= SVf_BREAK;
- SvFLAGS(sv) |= SVTYPEMASK;
+/* This routine curses the sv itself, not the object referenced by sv. So
+ sv does not have to be ROK. */
- if (sv_type_details->arena) {
- del_body(((char *)SvANY(sv) + sv_type_details->offset),
- &PL_body_roots[type]);
+static bool
+S_curse(pTHX_ SV * const sv, const bool check_refcnt) {
+ dVAR;
+
+ PERL_ARGS_ASSERT_CURSE;
+ assert(SvOBJECT(sv));
+
+ if (PL_defstash && /* Still have a symbol table? */
+ SvDESTROYABLE(sv))
+ {
+ dSP;
+ HV* stash;
+ do {
+ CV* destructor;
+ stash = SvSTASH(sv);
+ destructor = StashHANDLER(stash,DESTROY);
+ if (destructor
+ /* A constant subroutine can have no side effects, so
+ don't bother calling it. */
+ && !CvCONST(destructor)
+ /* Don't bother calling an empty destructor */
+ && (CvISXSUB(destructor)
+ || (CvSTART(destructor)
+ && (CvSTART(destructor)->op_next->op_type
+ != OP_LEAVESUB))))
+ {
+ SV* const tmpref = newRV(sv);
+ SvREADONLY_on(tmpref); /* DESTROY() could be naughty */
+ ENTER;
+ PUSHSTACKi(PERLSI_DESTROY);
+ EXTEND(SP, 2);
+ PUSHMARK(SP);
+ PUSHs(tmpref);
+ PUTBACK;
+ call_sv(MUTABLE_SV(destructor),
+ G_DISCARD|G_EVAL|G_KEEPERR|G_VOID);
+ POPSTACK;
+ SPAGAIN;
+ LEAVE;
+ if(SvREFCNT(tmpref) < 2) {
+ /* tmpref is not kept alive! */
+ SvREFCNT(sv)--;
+ SvRV_set(tmpref, NULL);
+ SvROK_off(tmpref);
+ }
+ SvREFCNT_dec(tmpref);
+ }
+ } while (SvOBJECT(sv) && SvSTASH(sv) != stash);
+
+
+ if (check_refcnt && SvREFCNT(sv)) {
+ if (PL_in_clean_objs)
+ Perl_croak(aTHX_
+ "DESTROY created new reference to dead object '%s'",
+ HvNAME_get(stash));
+ /* DESTROY gave object new lease on life */
+ return FALSE;
+ }
}
- else if (sv_type_details->body_size) {
- my_safefree(SvANY(sv));
+
+ if (SvOBJECT(sv)) {
+ SvREFCNT_dec(SvSTASH(sv)); /* possibly of changed persuasion */
+ SvOBJECT_off(sv); /* Curse the object. */
+ if (SvTYPE(sv) != SVt_PVIO)
+ --PL_sv_objcount;/* XXX Might want something more general */
}
+ return TRUE;
}
/*
STRLEN ulen;
MAGIC *mg = SvMAGICAL(sv) ? mg_find(sv, PERL_MAGIC_utf8) : NULL;
- if (mg && mg->mg_len != -1) {
- ulen = mg->mg_len;
+ if (mg && (mg->mg_len != -1 || mg->mg_ptr)) {
+ if (mg->mg_len != -1)
+ ulen = mg->mg_len;
+ else {
+ /* We can use the offset cache for a headstart.
+ The longer value is stored in the first pair. */
+ STRLEN *cache = (STRLEN *) mg->mg_ptr;
+
+ ulen = cache[0] + Perl_utf8_length(aTHX_ s + cache[1],
+ s + len);
+ }
+
if (PL_utf8cache < 0) {
const STRLEN real = Perl_utf8_length(aTHX_ s, s + len);
- if (real != ulen) {
- /* Need to turn the assertions off otherwise we may
- recurse infinitely while printing error messages.
- */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_len_utf8 cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) ulen, (UV) real, SVfARG(sv));
- }
+ assert_uft8_cache_coherent("sv_len_utf8", ulen, real, sv);
}
}
else {
ulen = Perl_utf8_length(aTHX_ s, s + len);
- if (!SvREADONLY(sv)) {
- if (!mg && (SvTYPE(sv) < SVt_PVMG ||
- !(mg = mg_find(sv, PERL_MAGIC_utf8)))) {
- mg = sv_magicext(sv, 0, PERL_MAGIC_utf8,
- &PL_vtbl_utf8, 0, 0);
- }
- assert(mg);
- mg->mg_len = ulen;
- /* For now, treat "overflowed" as "still unknown".
- See RT #72924. */
- if (ulen != (STRLEN) mg->mg_len)
- mg->mg_len = -1;
- }
+ utf8_mg_len_cache_update(sv, &mg, ulen);
}
return ulen;
}
offset. */
static STRLEN
S_sv_pos_u2b_forwards(const U8 *const start, const U8 *const send,
- STRLEN uoffset)
+ STRLEN *const uoffset_p, bool *const at_end)
{
const U8 *s = start;
+ STRLEN uoffset = *uoffset_p;
PERL_ARGS_ASSERT_SV_POS_U2B_FORWARDS;
- while (s < send && uoffset--)
+ while (s < send && uoffset) {
+ --uoffset;
s += UTF8SKIP(s);
- if (s > send) {
+ }
+ if (s == send) {
+ *at_end = TRUE;
+ }
+ else if (s > send) {
+ *at_end = TRUE;
/* This is the existing behaviour. Possibly it should be a croak, as
it's actually a bounds error */
s = send;
}
+ *uoffset_p -= uoffset;
return s - start;
}
the passed in UTF-8 offset. */
static STRLEN
S_sv_pos_u2b_midway(const U8 *const start, const U8 *send,
- const STRLEN uoffset, const STRLEN uend)
+ STRLEN uoffset, const STRLEN uend)
{
STRLEN backw = uend - uoffset;
/* The assumption is that going forwards is twice the speed of going
forward (that's where the 2 * backw comes from).
(The real figure of course depends on the UTF-8 data.) */
- return sv_pos_u2b_forwards(start, send, uoffset);
+ const U8 *s = start;
+
+ while (s < send && uoffset--)
+ s += UTF8SKIP(s);
+ assert (s <= send);
+ if (s > send)
+ s = send;
+ return s - start;
}
while (backw--) {
created if necessary, and the found value offered to it for update. */
static STRLEN
S_sv_pos_u2b_cached(pTHX_ SV *const sv, MAGIC **const mgp, const U8 *const start,
- const U8 *const send, const STRLEN uoffset,
+ const U8 *const send, STRLEN uoffset,
STRLEN uoffset0, STRLEN boffset0)
{
STRLEN boffset = 0; /* Actually always set, but let's keep gcc happy. */
bool found = FALSE;
+ bool at_end = FALSE;
PERL_ARGS_ASSERT_SV_POS_U2B_CACHED;
assert (uoffset >= uoffset0);
+ if (!uoffset)
+ return 0;
+
if (!SvREADONLY(sv)
&& PL_utf8cache
&& (*mgp || (SvTYPE(sv) >= SVt_PVMG &&
uoffset - uoffset0,
(*mgp)->mg_len - uoffset0);
} else {
+ uoffset -= uoffset0;
boffset = boffset0
+ sv_pos_u2b_forwards(start + boffset0,
- send, uoffset - uoffset0);
+ send, &uoffset, &at_end);
+ uoffset += uoffset0;
}
}
else if (cache[2] < uoffset) {
}
if (!found || PL_utf8cache < 0) {
- const STRLEN real_boffset
- = boffset0 + sv_pos_u2b_forwards(start + boffset0,
- send, uoffset - uoffset0);
-
- if (found && PL_utf8cache < 0) {
- if (real_boffset != boffset) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_pos_u2b_cache cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) boffset, (UV) real_boffset, SVfARG(sv));
- }
- }
+ STRLEN real_boffset;
+ uoffset -= uoffset0;
+ real_boffset = boffset0 + sv_pos_u2b_forwards(start + boffset0,
+ send, &uoffset, &at_end);
+ uoffset += uoffset0;
+
+ if (found && PL_utf8cache < 0)
+ assert_uft8_cache_coherent("sv_pos_u2b_cache", boffset,
+ real_boffset, sv);
boffset = real_boffset;
}
- if (PL_utf8cache)
- utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start);
+ if (PL_utf8cache) {
+ if (at_end)
+ utf8_mg_len_cache_update(sv, mgp, uoffset);
+ else
+ utf8_mg_pos_cache_update(sv, mgp, boffset, uoffset, send - start);
+ }
return boffset;
}
MAGIC *mg = NULL;
boffset = sv_pos_u2b_cached(sv, &mg, start, send, uoffset, 0, 0);
- if (lenp) {
+ if (lenp
+ && *lenp /* don't bother doing work for 0, as its bytes equivalent
+ is 0, and *lenp is already set to that. */) {
/* Convert the relative offset to absolute. */
const STRLEN uoffset2 = uoffset + *lenp;
const STRLEN boffset2
}
}
+static void
+S_utf8_mg_len_cache_update(pTHX_ SV *const sv, MAGIC **const mgp,
+ const STRLEN ulen)
+{
+ PERL_ARGS_ASSERT_UTF8_MG_LEN_CACHE_UPDATE;
+ if (SvREADONLY(sv))
+ return;
+
+ if (!*mgp && (SvTYPE(sv) < SVt_PVMG ||
+ !(*mgp = mg_find(sv, PERL_MAGIC_utf8)))) {
+ *mgp = sv_magicext(sv, 0, PERL_MAGIC_utf8, &PL_vtbl_utf8, 0, 0);
+ }
+ assert(*mgp);
+
+ (*mgp)->mg_len = ulen;
+ /* For now, treat "overflowed" as "still unknown". See RT #72924. */
+ if (ulen != (STRLEN) (*mgp)->mg_len)
+ (*mgp)->mg_len = -1;
+}
+
/* Create and update the UTF8 magic offset cache, with the proffered utf8/
byte length pairing. The (byte) length of the total SV is passed in too,
as blen, because for some (more esoteric) SVs, the call to SvPV_const()
const U8 *start = (const U8 *) SvPVX_const(sv);
const STRLEN realutf8 = utf8_length(start, start + byte);
- if (realutf8 != utf8) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: utf8_mg_pos_cache_update cache %"UVuf
- " real %"UVuf" for %"SVf, (UV) utf8, (UV) realutf8, SVfARG(sv));
- }
+ assert_uft8_cache_coherent("utf8_mg_pos_cache_update", utf8, realutf8,
+ sv);
}
/* Cache is held with the later position first, to simplify the code
/* Cache has 2 slots in use, and we know three potential pairs.
Keep the two that give the lowest RMS distance. Do the
- calcualation in bytes simply because we always know the byte
+ calculation in bytes simply because we always know the byte
length. squareroot has the same ordering as the positive value,
so don't bother with the actual square root. */
const float existing = THREEWAY_SQUARE(0, cache[3], cache[1], blen);
if (!found || PL_utf8cache < 0) {
const STRLEN real_len = utf8_length(s, send);
- if (found && PL_utf8cache < 0) {
- if (len != real_len) {
- /* Need to turn the assertions off otherwise we may recurse
- infinitely while printing error messages. */
- SAVEI8(PL_utf8cache);
- PL_utf8cache = 0;
- Perl_croak(aTHX_ "panic: sv_pos_b2u cache %"UVuf
- " real %"UVuf" for %"SVf,
- (UV) len, (UV) real_len, SVfARG(sv));
- }
- }
+ if (found && PL_utf8cache < 0)
+ assert_uft8_cache_coherent("sv_pos_b2u", len, real_len, sv);
len = real_len;
}
*offsetp = len;
- if (PL_utf8cache)
- utf8_mg_pos_cache_update(sv, &mg, byte, len, blen);
+ if (PL_utf8cache) {
+ if (blen == byte)
+ utf8_mg_len_cache_update(sv, &mg, len);
+ else
+ utf8_mg_pos_cache_update(sv, &mg, byte, len, blen);
+ }
+}
+
+static void
+S_assert_uft8_cache_coherent(pTHX_ const char *const func, STRLEN from_cache,
+ STRLEN real, SV *const sv)
+{
+ PERL_ARGS_ASSERT_ASSERT_UFT8_CACHE_COHERENT;
+
+ /* As this is debugging only code, save space by keeping this test here,
+ rather than inlining it in all the callers. */
+ if (from_cache == real)
+ return;
+
+ /* Need to turn the assertions off otherwise we may recurse infinitely
+ while printing error messages. */
+ SAVEI8(PL_utf8cache);
+ PL_utf8cache = 0;
+ Perl_croak(aTHX_ "panic: %s cache %"UVuf" real %"UVuf" for %"SVf,
+ func, (UV) from_cache, (UV) real, SVfARG(sv));
}
/*
identical. Is UTF-8 and 'use bytes' aware, handles get magic, and will
coerce its args to strings if necessary.
+=for apidoc sv_eq_flags
+
+Returns a boolean indicating whether the strings in the two SVs are
+identical. Is UTF-8 and 'use bytes' aware and coerces its args to strings
+if necessary. If the flags include SV_GMAGIC, it handles get-magic, too.
+
=cut
*/
I32
-Perl_sv_eq(pTHX_ register SV *sv1, register SV *sv2)
+Perl_sv_eq_flags(pTHX_ register SV *sv1, register SV *sv2, const U32 flags)
{
dVAR;
const char *pv1;
}
else {
/* if pv1 and pv2 are the same, second SvPV_const call may
- * invalidate pv1, so we may need to make a copy */
- if (sv1 == sv2 && (SvTHINKFIRST(sv1) || SvGMAGICAL(sv1))) {
+ * invalidate pv1 (if we are handling magic), so we may need to
+ * make a copy */
+ if (sv1 == sv2 && flags & SV_GMAGIC
+ && (SvTHINKFIRST(sv1) || SvGMAGICAL(sv1))) {
pv1 = SvPV_const(sv1, cur1);
sv1 = newSVpvn_flags(pv1, cur1, SVs_TEMP | SvUTF8(sv2));
}
- pv1 = SvPV_const(sv1, cur1);
+ pv1 = SvPV_flags_const(sv1, cur1, flags);
}
if (!sv2){
cur2 = 0;
}
else
- pv2 = SvPV_const(sv2, cur2);
+ pv2 = SvPV_flags_const(sv2, cur2, flags);
if (cur1 && cur2 && SvUTF8(sv1) != SvUTF8(sv2) && !IN_BYTES) {
/* Differing utf8ness.
}
}
else {
- bool is_utf8 = TRUE;
-
if (SvUTF8(sv1)) {
- /* sv1 is the UTF-8 one,
- * if is equal it must be downgrade-able */
- char * const pv = (char*)bytes_from_utf8((const U8*)pv1,
- &cur1, &is_utf8);
- if (pv != pv1)
- pv1 = tpv = pv;
+ /* sv1 is the UTF-8 one */
+ return bytes_cmp_utf8((const U8*)pv2, cur2,
+ (const U8*)pv1, cur1) == 0;
}
else {
- /* sv2 is the UTF-8 one,
- * if is equal it must be downgrade-able */
- char * const pv = (char *)bytes_from_utf8((const U8*)pv2,
- &cur2, &is_utf8);
- if (pv != pv2)
- pv2 = tpv = pv;
- }
- if (is_utf8) {
- /* Downgrade not possible - cannot be eq */
- assert (tpv == 0);
- return FALSE;
+ /* sv2 is the UTF-8 one */
+ return bytes_cmp_utf8((const U8*)pv1, cur1,
+ (const U8*)pv2, cur2) == 0;
}
}
}
C<sv2>. Is UTF-8 and 'use bytes' aware, handles get magic, and will
coerce its args to strings if necessary. See also C<sv_cmp_locale>.
+=for apidoc sv_cmp_flags
+
+Compares the strings in two SVs. Returns -1, 0, or 1 indicating whether the
+string in C<sv1> is less than, equal to, or greater than the string in
+C<sv2>. Is UTF-8 and 'use bytes' aware and will coerce its args to strings
+if necessary. If the flags include SV_GMAGIC, it handles get magic. See
+also C<sv_cmp_locale_flags>.
+
=cut
*/
I32
Perl_sv_cmp(pTHX_ register SV *const sv1, register SV *const sv2)
{
+ return sv_cmp_flags(sv1, sv2, SV_GMAGIC);
+}
+
+I32
+Perl_sv_cmp_flags(pTHX_ register SV *const sv1, register SV *const sv2,
+ const U32 flags)
+{
dVAR;
STRLEN cur1, cur2;
const char *pv1, *pv2;
cur1 = 0;
}
else
- pv1 = SvPV_const(sv1, cur1);
+ pv1 = SvPV_flags_const(sv1, cur1, flags);
if (!sv2) {
pv2 = "";
cur2 = 0;
}
else
- pv2 = SvPV_const(sv2, cur2);
+ pv2 = SvPV_flags_const(sv2, cur2, flags);
if (cur1 && cur2 && SvUTF8(sv1) != SvUTF8(sv2) && !IN_BYTES) {
/* Differing utf8ness.
pv2 = SvPV_const(svrecode, cur2);
}
else {
- pv2 = tpv = (char*)bytes_to_utf8((const U8*)pv2, &cur2);
+ const int retval = -bytes_cmp_utf8((const U8*)pv2, cur2,
+ (const U8*)pv1, cur1);
+ return retval ? retval < 0 ? -1 : +1 : 0;
}
}
else {
pv1 = SvPV_const(svrecode, cur1);
}
else {
- pv1 = tpv = (char*)bytes_to_utf8((const U8*)pv1, &cur1);
+ const int retval = bytes_cmp_utf8((const U8*)pv1, cur1,
+ (const U8*)pv2, cur2);
+ return retval ? retval < 0 ? -1 : +1 : 0;
}
}
}
'use bytes' aware, handles get magic, and will coerce its args to strings
if necessary. See also C<sv_cmp>.
+=for apidoc sv_cmp_locale_flags
+
+Compares the strings in two SVs in a locale-aware manner. Is UTF-8 and
+'use bytes' aware and will coerce its args to strings if necessary. If the
+flags contain SV_GMAGIC, it handles get magic. See also C<sv_cmp_flags>.
+
=cut
*/
I32
Perl_sv_cmp_locale(pTHX_ register SV *const sv1, register SV *const sv2)
{
+ return sv_cmp_locale_flags(sv1, sv2, SV_GMAGIC);
+}
+
+I32
+Perl_sv_cmp_locale_flags(pTHX_ register SV *const sv1, register SV *const sv2,
+ const U32 flags)
+{
dVAR;
#ifdef USE_LOCALE_COLLATE
goto raw_compare;
len1 = 0;
- pv1 = sv1 ? sv_collxfrm(sv1, &len1) : (char *) NULL;
+ pv1 = sv1 ? sv_collxfrm_flags(sv1, &len1, flags) : (char *) NULL;
len2 = 0;
- pv2 = sv2 ? sv_collxfrm(sv2, &len2) : (char *) NULL;
+ pv2 = sv2 ? sv_collxfrm_flags(sv2, &len2, flags) : (char *) NULL;
if (!pv1 || !len1) {
if (pv2 && len2)
/*
=for apidoc sv_collxfrm
-Add Collate Transform magic to an SV if it doesn't already have it.
+This calls C<sv_collxfrm_flags> with the SV_GMAGIC flag. See
+C<sv_collxfrm_flags>.
+
+=for apidoc sv_collxfrm_flags
+
+Add Collate Transform magic to an SV if it doesn't already have it. If the
+flags contain SV_GMAGIC, it handles get-magic.
Any scalar variable may carry PERL_MAGIC_collxfrm magic that contains the
scalar data of the variable, but transformed to such a format that a normal
*/
char *
-Perl_sv_collxfrm(pTHX_ SV *const sv, STRLEN *const nxp)
+Perl_sv_collxfrm_flags(pTHX_ SV *const sv, STRLEN *const nxp, const I32 flags)
{
dVAR;
MAGIC *mg;
- PERL_ARGS_ASSERT_SV_COLLXFRM;
+ PERL_ARGS_ASSERT_SV_COLLXFRM_FLAGS;
mg = SvMAGICAL(sv) ? mg_find(sv, PERL_MAGIC_collxfrm) : (MAGIC *) NULL;
if (!mg || !mg->mg_ptr || *(U32*)mg->mg_ptr != PL_collation_ix) {
if (mg)
Safefree(mg->mg_ptr);
- s = SvPV_const(sv, len);
+ s = SvPV_flags_const(sv, len, flags);
if ((xf = mem_collxfrm(s, len, &xlen))) {
if (! mg) {
#ifdef PERL_OLD_COPY_ON_WRITE
#endif /* USE_LOCALE_COLLATE */
+static char *
+S_sv_gets_append_to_utf8(pTHX_ SV *const sv, PerlIO *const fp, I32 append)
+{
+ SV * const tsv = newSV(0);
+ ENTER;
+ SAVEFREESV(tsv);
+ sv_gets(tsv, fp, 0);
+ sv_utf8_upgrade_nomg(tsv);
+ SvCUR_set(sv,append);
+ sv_catsv(sv,tsv);
+ LEAVE;
+ return (SvCUR(sv) - append) ? SvPVX(sv) : NULL;
+}
+
+static char *
+S_sv_gets_read_record(pTHX_ SV *const sv, PerlIO *const fp, I32 append)
+{
+ I32 bytesread;
+ const U32 recsize = SvUV(SvRV(PL_rs)); /* RsRECORD() guarantees > 0. */
+ /* Grab the size of the record we're getting */
+ char *const buffer = SvGROW(sv, (STRLEN)(recsize + append + 1)) + append;
+#ifdef VMS
+ int fd;
+#endif
+
+ /* Go yank in */
+#ifdef VMS
+ /* VMS wants read instead of fread, because fread doesn't respect */
+ /* RMS record boundaries. This is not necessarily a good thing to be */
+ /* doing, but we've got no other real choice - except avoid stdio
+ as implementation - perhaps write a :vms layer ?
+ */
+ fd = PerlIO_fileno(fp);
+ if (fd != -1) {
+ bytesread = PerlLIO_read(fd, buffer, recsize);
+ }
+ else /* in-memory file from PerlIO::Scalar */
+#endif
+ {
+ bytesread = PerlIO_read(fp, buffer, recsize);
+ }
+
+ if (bytesread < 0)
+ bytesread = 0;
+ SvCUR_set(sv, bytesread + append);
+ buffer[bytesread] = '\0';
+ return (SvCUR(sv) - append) ? SvPVX(sv) : NULL;
+}
+
/*
=for apidoc sv_gets
sv_pos_u2b(sv,&append,0);
}
} else if (SvUTF8(sv)) {
- SV * const tsv = newSV(0);
- sv_gets(tsv, fp, 0);
- sv_utf8_upgrade_nomg(tsv);
- SvCUR_set(sv,append);
- sv_catsv(sv,tsv);
- sv_free(tsv);
- goto return_string_or_null;
+ return S_sv_gets_append_to_utf8(aTHX_ sv, fp, append);
}
}
SvPOK_only(sv);
+ if (!append) {
+ SvCUR_set(sv,0);
+ }
if (PerlIO_isutf8(fp))
SvUTF8_on(sv);
rslen = 0;
}
else if (RsRECORD(PL_rs)) {
- I32 bytesread;
- char *buffer;
- U32 recsize;
-#ifdef VMS
- int fd;
-#endif
-
- /* Grab the size of the record we're getting */
- recsize = SvUV(SvRV(PL_rs)); /* RsRECORD() guarantees > 0. */
- buffer = SvGROW(sv, (STRLEN)(recsize + append + 1)) + append;
- /* Go yank in */
-#ifdef VMS
- /* VMS wants read instead of fread, because fread doesn't respect */
- /* RMS record boundaries. This is not necessarily a good thing to be */
- /* doing, but we've got no other real choice - except avoid stdio
- as implementation - perhaps write a :vms layer ?
- */
- fd = PerlIO_fileno(fp);
- if (fd == -1) { /* in-memory file from PerlIO::Scalar */
- bytesread = PerlIO_read(fp, buffer, recsize);
- }
- else {
- bytesread = PerlLIO_read(fd, buffer, recsize);
- }
-#else
- bytesread = PerlIO_read(fp, buffer, recsize);
-#endif
- if (bytesread < 0)
- bytesread = 0;
- SvCUR_set(sv, bytesread + append);
- buffer[bytesread] = '\0';
- goto return_string_or_null;
+ return S_sv_gets_read_record(aTHX_ sv, fp, append);
}
else if (RsPARA(PL_rs)) {
rsptr = "\n\n";
bp += cnt; /* screams | dust */
ptr += cnt; /* louder | sed :-) */
cnt = 0;
+ assert (!shortbuffered);
+ goto cannot_be_shortbuffered;
}
}
continue;
}
+ cannot_be_shortbuffered:
DEBUG_P(PerlIO_printf(Perl_debug_log,
"Screamer: going to getc, ptr=%"UVuf", cnt=%ld\n",
PTR2UV(ptr),(long)cnt));
PerlIO_set_ptrcnt(fp, (STDCHAR*)ptr, cnt); /* deregisterize cnt and ptr */
-#if 0
- DEBUG_P(PerlIO_printf(Perl_debug_log,
+
+ DEBUG_Pv(PerlIO_printf(Perl_debug_log,
"Screamer: pre: FILE * thinks ptr=%"UVuf", cnt=%ld, base=%"UVuf"\n",
PTR2UV(PerlIO_get_ptr(fp)), (long)PerlIO_get_cnt(fp),
PTR2UV(PerlIO_has_base (fp) ? PerlIO_get_base(fp) : 0)));
-#endif
+
/* This used to call 'filbuf' in stdio form, but as that behaves like
getc when cnt <= 0 we use PerlIO_getc here to avoid introducing
another abstraction. */
i = PerlIO_getc(fp); /* get more characters */
-#if 0
- DEBUG_P(PerlIO_printf(Perl_debug_log,
+
+ DEBUG_Pv(PerlIO_printf(Perl_debug_log,
"Screamer: post: FILE * thinks ptr=%"UVuf", cnt=%ld, base=%"UVuf"\n",
PTR2UV(PerlIO_get_ptr(fp)), (long)PerlIO_get_cnt(fp),
PTR2UV(PerlIO_has_base (fp) ? PerlIO_get_base(fp) : 0)));
-#endif
+
cnt = PerlIO_get_cnt(fp);
ptr = (STDCHAR*)PerlIO_get_ptr(fp); /* reregisterize cnt and ptr */
DEBUG_P(PerlIO_printf(Perl_debug_log,
}
else {
cnt = PerlIO_read(fp,(char*)buf, sizeof(buf));
- /* Accomodate broken VAXC compiler, which applies U8 cast to
+ /* Accommodate broken VAXC compiler, which applies U8 cast to
* both args of ?: operator, causing EOF to change into 255
*/
if (cnt > 0)
}
}
-return_string_or_null:
return (SvCUR(sv) - append) ? SvPVX(sv) : NULL;
}
}
if (SvROK(sv)) {
IV i;
- if (SvAMAGIC(sv) && AMG_CALLun(sv,inc))
+ if (SvAMAGIC(sv) && AMG_CALLunary(sv, inc_amg))
return;
i = PTR2IV(SvRV(sv));
sv_unref(sv);
}
if (SvROK(sv)) {
IV i;
- if (SvAMAGIC(sv) && AMG_CALLun(sv,dec))
+ if (SvAMAGIC(sv) && AMG_CALLunary(sv, dec_amg))
return;
i = PTR2IV(SvRV(sv));
sv_unref(sv);
string. You are responsible for ensuring that the source string is at least
C<len> bytes long. If the C<s> argument is NULL the new SV will be undefined.
Currently the only flag bits accepted are C<SVf_UTF8> and C<SVs_TEMP>.
-If C<SVs_TEMP> is set, then C<sv2mortal()> is called on the result before
+If C<SVs_TEMP> is set, then C<sv_2mortal()> is called on the result before
returning. If C<SVf_UTF8> is set, C<s> is considered to be in UTF-8 and the
C<SVf_UTF8> flag will be set on the new SV.
C<newSVpvn_utf8()> is a convenience wrapper for this function, defined as
sv_setpvn(sv,s,len);
/* This code used to a sv_2mortal(), however we now unroll the call to sv_2mortal()
- * and do what it does outselves here.
+ * and do what it does ourselves here.
* Since we have asserted that flags can only have the SVf_UTF8 and/or SVs_TEMP flags
* set above we can use it to enable the sv flags directly (bypassing SvTEMP_on), which
* in turn means we dont need to mask out the SVf_UTF8 flag below, which means that we
- * eleminate quite a few steps than it looks - Yves (explaining patch by gfx)
+ * eliminate quite a few steps than it looks - Yves (explaining patch by gfx)
*/
SvFLAGS(sv) |= flags;
Andreas would like keys he put in as utf8 to come back as utf8
*/
STRLEN utf8_len = HEK_LEN(hek);
- const U8 *as_utf8 = bytes_to_utf8 ((U8*)HEK_KEY(hek), &utf8_len);
- SV * const sv = newSVpvn ((const char*)as_utf8, utf8_len);
-
+ SV * const sv = newSV_type(SVt_PV);
+ char *as_utf8 = (char *)bytes_to_utf8 ((U8*)HEK_KEY(hek), &utf8_len);
+ /* bytes_to_utf8() allocates a new string, which we can repurpose: */
+ sv_usepvn_flags(sv, as_utf8, utf8_len, SV_HAS_TRAILING_NUL);
SvUTF8_on (sv);
- Safefree (as_utf8); /* bytes_to_utf8() allocates a new string */
return sv;
} else if (flags & (HVhek_REHASH|HVhek_UNSHARED)) {
/* We don't have a pointer to the hv, so we have to replicate the
return sv;
}
+/*
+=for apidoc newSVpv_share
+
+Like C<newSVpvn_share>, but takes a nul-terminated string instead of a
+string/length pair.
+
+=cut
+*/
+
+SV *
+Perl_newSVpv_share(pTHX_ const char *src, U32 hash)
+{
+ return newSVpvn_share(src, strlen(src), hash);
+}
#if defined(PERL_IMPLICIT_CONTEXT)
io = MUTABLE_IO(sv);
break;
case SVt_PVGV:
+ case SVt_PVLV:
if (isGV_with_GP(sv)) {
gv = MUTABLE_GV(sv);
io = GvIO(gv);
default:
if (SvROK(sv)) {
- SV * const *sp = &sv; /* Used in tryAMAGICunDEREF macro. */
SvGETMAGIC(sv);
- tryAMAGICunDEREF(to_cv);
+ if (SvAMAGIC(sv))
+ sv = amagic_deref_call(sv, to_cv_amg);
+ /* At this point I'd like to do SPAGAIN, but really I need to
+ force it upon my callers. Hmmm. This is a mess... */
sv = SvRV(sv);
if (SvTYPE(sv) == SVt_PVCV) {
case SVt_PVLV: return (char *) (SvROK(sv) ? "REF"
/* tied lvalues should appear to be
- * scalars for backwards compatitbility */
+ * scalars for backwards compatibility */
: (LvTYPE(sv) == 't' || LvTYPE(sv) == 'T')
? "SCALAR" : "LVALUE");
case SVt_PVAV: return "ARRAY";
case SVt_PVFM: return "FORMAT";
case SVt_PVIO: return "IO";
case SVt_BIND: return "BIND";
- case SVt_REGEXP: return "REGEXP";
+ case SVt_REGEXP: return "REGEXP";
default: return "UNKNOWN";
}
}
return sv;
}
-/* Downgrades a PVGV to a PVMG.
+/* Downgrades a PVGV to a PVMG. If it’s actually a PVLV, we leave the type
+ * as it is after unglobbing it.
*/
STATIC void
PERL_ARGS_ASSERT_SV_UNGLOB;
- assert(SvTYPE(sv) == SVt_PVGV);
+ assert(SvTYPE(sv) == SVt_PVGV || SvTYPE(sv) == SVt_PVLV);
SvFAKE_off(sv);
gv_efullname3(temp, MUTABLE_GV(sv), "*");
}
isGV_with_GP_off(sv);
- /* need to keep SvANY(sv) in the right arena */
- xpvmg = new_XPVMG();
- StructCopy(SvANY(sv), xpvmg, XPVMG);
- del_XPVGV(SvANY(sv));
- SvANY(sv) = xpvmg;
+ if(SvTYPE(sv) == SVt_PVGV) {
+ /* need to keep SvANY(sv) in the right arena */
+ xpvmg = new_XPVMG();
+ StructCopy(SvANY(sv), xpvmg, XPVMG);
+ del_XPVGV(SvANY(sv));
+ SvANY(sv) = xpvmg;
- SvFLAGS(sv) &= ~SVTYPEMASK;
- SvFLAGS(sv) |= SVt_PVMG;
+ SvFLAGS(sv) &= ~SVTYPEMASK;
+ SvFLAGS(sv) |= SVt_PVMG;
+ }
/* Intentionally not calling any local SET magic, as this isn't so much a
set operation as merely an internal storage change. */
#endif
case 'l':
#if defined(HAS_QUAD) || defined(HAS_LONG_DOUBLE)
- if (*(q + 1) == 'l') { /* lld, llf */
+ if (*++q == 'l') { /* lld, llf */
intsize = 'q';
- q += 2;
- break;
- }
+ ++q;
+ }
+ else
#endif
- /*FALLTHROUGH*/
+ intsize = 'l';
+ break;
case 'h':
- /*FALLTHROUGH*/
+ if (*++q == 'h') { /* hhd, hhu */
+ intsize = 'c';
+ ++q;
+ }
+ else
+ intsize = 'h';
+ break;
case 'V':
+ case 'z':
+ case 't':
+#if HAS_C99
+ case 'j':
+#endif
intsize = *q++;
break;
}
}
else if (args) {
switch (intsize) {
+ case 'c': iv = (char)va_arg(*args, int); break;
case 'h': iv = (short)va_arg(*args, int); break;
case 'l': iv = va_arg(*args, long); break;
case 'V': iv = va_arg(*args, IV); break;
+ case 'z': iv = va_arg(*args, SSize_t); break;
+ case 't': iv = va_arg(*args, ptrdiff_t); break;
default: iv = va_arg(*args, int); break;
+#if HAS_C99
+ case 'j': iv = va_arg(*args, intmax_t); break;
+#endif
case 'q':
#ifdef HAS_QUAD
iv = va_arg(*args, Quad_t); break;
else {
IV tiv = SvIV(argsv); /* work around GCC bug #13488 */
switch (intsize) {
+ case 'c': iv = (char)tiv; break;
case 'h': iv = (short)tiv; break;
case 'l': iv = (long)tiv; break;
case 'V':
}
else if (args) {
switch (intsize) {
+ case 'c': uv = (unsigned char)va_arg(*args, unsigned); break;
case 'h': uv = (unsigned short)va_arg(*args, unsigned); break;
case 'l': uv = va_arg(*args, unsigned long); break;
case 'V': uv = va_arg(*args, UV); break;
+ case 'z': uv = va_arg(*args, Size_t); break;
+ case 't': uv = va_arg(*args, ptrdiff_t); break; /* will sign extend, but there is no uptrdiff_t, so oh well */
+#if HAS_C99
+ case 'j': uv = va_arg(*args, uintmax_t); break;
+#endif
default: uv = va_arg(*args, unsigned); break;
case 'q':
#ifdef HAS_QUAD
else {
UV tuv = SvUV(argsv); /* work around GCC bug #13488 */
switch (intsize) {
+ case 'c': uv = (unsigned char)tuv; break;
case 'h': uv = (unsigned short)tuv; break;
case 'l': uv = (unsigned long)tuv; break;
case 'V':
#else
/*FALLTHROUGH*/
#endif
+ case 'c':
case 'h':
+ case 'z':
+ case 't':
+ case 'j':
goto unknown;
}
i = SvCUR(sv) - origlen;
if (args) {
switch (intsize) {
+ case 'c': *(va_arg(*args, char*)) = i; break;
case 'h': *(va_arg(*args, short*)) = i; break;
default: *(va_arg(*args, int*)) = i; break;
case 'l': *(va_arg(*args, long*)) = i; break;
case 'V': *(va_arg(*args, IV*)) = i; break;
+ case 'z': *(va_arg(*args, SSize_t*)) = i; break;
+ case 't': *(va_arg(*args, ptrdiff_t*)) = i; break;
+#if HAS_C99
+ case 'j': *(va_arg(*args, intmax_t*)) = i; break;
+#endif
case 'q':
#ifdef HAS_QUAD
*(va_arg(*args, Quad_t*)) = i; break;
Newxz(parser, 1, yy_parser);
ptr_table_store(PL_ptr_table, proto, parser);
- parser->yyerrstatus = 0;
- parser->yychar = YYEMPTY; /* Cause a token to be read. */
-
/* XXX these not yet duped */
parser->old_parser = NULL;
parser->stack = NULL;
/* duplicate a directory handle */
DIR *
-Perl_dirp_dup(pTHX_ DIR *const dp)
+Perl_dirp_dup(pTHX_ DIR *const dp, CLONE_PARAMS *const param)
{
+ DIR *ret;
+
+#ifdef HAS_FCHDIR
+ DIR *pwd;
+ register const Direntry_t *dirent;
+ char smallbuf[256];
+ char *name = NULL;
+ STRLEN len = -1;
+ long pos;
+#endif
+
PERL_UNUSED_CONTEXT;
+ PERL_ARGS_ASSERT_DIRP_DUP;
+
if (!dp)
return (DIR*)NULL;
- /* XXX TODO */
- return dp;
+
+ /* look for it in the table first */
+ ret = (DIR*)ptr_table_fetch(PL_ptr_table, dp);
+ if (ret)
+ return ret;
+
+#ifdef HAS_FCHDIR
+
+ PERL_UNUSED_ARG(param);
+
+ /* create anew */
+
+ /* open the current directory (so we can switch back) */
+ if (!(pwd = PerlDir_open("."))) return (DIR *)NULL;
+
+ /* chdir to our dir handle and open the present working directory */
+ if (fchdir(my_dirfd(dp)) < 0 || !(ret = PerlDir_open("."))) {
+ PerlDir_close(pwd);
+ return (DIR *)NULL;
+ }
+ /* Now we should have two dir handles pointing to the same dir. */
+
+ /* Be nice to the calling code and chdir back to where we were. */
+ fchdir(my_dirfd(pwd)); /* If this fails, then what? */
+
+ /* We have no need of the pwd handle any more. */
+ PerlDir_close(pwd);
+
+#ifdef DIRNAMLEN
+# define d_namlen(d) (d)->d_namlen
+#else
+# define d_namlen(d) strlen((d)->d_name)
+#endif
+ /* Iterate once through dp, to get the file name at the current posi-
+ tion. Then step back. */
+ pos = PerlDir_tell(dp);
+ if ((dirent = PerlDir_read(dp))) {
+ len = d_namlen(dirent);
+ if (len <= sizeof smallbuf) name = smallbuf;
+ else Newx(name, len, char);
+ Move(dirent->d_name, name, len, char);
+ }
+ PerlDir_seek(dp, pos);
+
+ /* Iterate through the new dir handle, till we find a file with the
+ right name. */
+ if (!dirent) /* just before the end */
+ for(;;) {
+ pos = PerlDir_tell(ret);
+ if (PerlDir_read(ret)) continue; /* not there yet */
+ PerlDir_seek(ret, pos); /* step back */
+ break;
+ }
+ else {
+ const long pos0 = PerlDir_tell(ret);
+ for(;;) {
+ pos = PerlDir_tell(ret);
+ if ((dirent = PerlDir_read(ret))) {
+ if (len == d_namlen(dirent)
+ && memEQ(name, dirent->d_name, len)) {
+ /* found it */
+ PerlDir_seek(ret, pos); /* step back */
+ break;
+ }
+ /* else we are not there yet; keep iterating */
+ }
+ else { /* This is not meant to happen. The best we can do is
+ reset the iterator to the beginning. */
+ PerlDir_seek(ret, pos0);
+ break;
+ }
+ }
+ }
+#undef d_namlen
+
+ if (name && name != smallbuf)
+ Safefree(name);
+#endif
+
+#ifdef WIN32
+ ret = win32_dirp_dup(dp, param);
+#endif
+
+ /* pop it in the pointer table */
+ if (ret)
+ ptr_table_store(PL_ptr_table, dp, ret);
+
+ return ret;
}
/* duplicate a typeglob */
for (; mg; mg = mg->mg_moremagic) {
MAGIC *nmg;
+
+ if ((param->flags & CLONEf_JOIN_IN)
+ && mg->mg_type == PERL_MAGIC_backref)
+ /* when joining, we let the individual SVs add themselves to
+ * backref as needed. */
+ continue;
+
Newx(nmg, 1, MAGIC);
*mgprev_p = nmg;
mgprev_p = &(nmg->mg_moremagic);
}
else
*/
- if(nmg->mg_type == PERL_MAGIC_backref) {
- /* The backref AV has its reference count deliberately bumped by
- 1. */
- nmg->mg_obj
- = SvREFCNT_inc(av_dup_inc((const AV *) nmg->mg_obj, param));
- }
- else {
- nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
- ? sv_dup_inc(nmg->mg_obj, param)
- : sv_dup(nmg->mg_obj, param);
- }
+ nmg->mg_obj = (nmg->mg_flags & MGf_REFCOUNTED)
+ ? nmg->mg_type == PERL_MAGIC_backref
+ /* The backref AV has its reference
+ * count deliberately bumped by 1 */
+ ? SvREFCNT_inc(av_dup_inc((const AV *)
+ nmg->mg_obj, param))
+ : sv_dup_inc(nmg->mg_obj, param)
+ : sv_dup(nmg->mg_obj, param);
if (nmg->mg_ptr && nmg->mg_type != PERL_MAGIC_regex_global) {
if (nmg->mg_len > 0) {
nmg->mg_ptr = (char*)sv_dup_inc((const SV *)nmg->mg_ptr, param);
}
if ((nmg->mg_flags & MGf_DUP) && nmg->mg_virtual && nmg->mg_virtual->svt_dup) {
- CALL_FPTR(nmg->mg_virtual->svt_dup)(aTHX_ nmg, param);
+ nmg->mg_virtual->svt_dup(aTHX_ nmg, param);
}
}
return mgret;
PERL_ARGS_ASSERT_RVPV_DUP;
if (SvROK(sstr)) {
- SvRV_set(dstr, SvWEAKREF(sstr)
- ? sv_dup(SvRV_const(sstr), param)
- : sv_dup_inc(SvRV_const(sstr), param));
-
+ if (SvWEAKREF(sstr)) {
+ SvRV_set(dstr, sv_dup(SvRV_const(sstr), param));
+ if (param->flags & CLONEf_JOIN_IN) {
+ /* if joining, we add any back references individually rather
+ * than copying the whole backref array */
+ Perl_sv_add_backref(aTHX_ SvRV(dstr), dstr);
+ }
+ }
+ else
+ SvRV_set(dstr, sv_dup_inc(SvRV_const(sstr), param));
}
else if (SvPVX_const(sstr)) {
/* Has something there */
SvPV_set(dstr, SAVEPVN(SvPVX_const(sstr), SvLEN(sstr)-1));
if (SvREADONLY(sstr) && SvFAKE(sstr)) {
/* Not that normal - actually sstr is copy on write.
- But we are a true, independant SV, so: */
+ But we are a true, independent SV, so: */
SvREADONLY_off(dstr);
SvFAKE_off(dstr);
}
something that is bad **/
if (SvTYPE(sstr) == SVt_PVHV) {
const HEK * const hvname = HvNAME_HEK(sstr);
- if (hvname)
+ if (hvname) {
/** don't clone stashes if they already exist **/
- return MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0));
+ dstr = MUTABLE_SV(gv_stashpvn(HEK_KEY(hvname), HEK_LEN(hvname), 0));
+ ptr_table_store(PL_ptr_table, sstr, dstr);
+ return dstr;
+ }
}
}
dstr->sv_debug_optype = sstr->sv_debug_optype;
dstr->sv_debug_line = sstr->sv_debug_line;
dstr->sv_debug_inpad = sstr->sv_debug_inpad;
- dstr->sv_debug_cloned = 1;
+ dstr->sv_debug_parent = (SV*)sstr;
+ FREE_SV_DEBUG_FILE(dstr);
dstr->sv_debug_file = savepv(sstr->sv_debug_file);
#endif
else
LvTARG(dstr) = sv_dup_inc(LvTARG(dstr), param);
case SVt_PVGV:
+ /* non-GP case already handled above */
if(isGV_with_GP(sstr)) {
GvNAME_HEK(dstr) = hek_dup(GvNAME_HEK(dstr), param);
/* Don't call sv_add_backref here as it's going to be
/* Danger Will Robinson - GvGP(dstr) isn't initialised
at the point of this comment. */
GvSTASH(dstr) = hv_dup(GvSTASH(dstr), param);
- if(param->flags & CLONEf_JOIN_IN) {
- const HEK * const hvname
- = HvNAME_HEK(GvSTASH(dstr));
- if( hvname
- && GvSTASH(dstr) == gv_stashpvn(
- HEK_KEY(hvname), HEK_LEN(hvname), 0
- )
- )
- Perl_sv_add_backref(
- aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr
- );
- }
+ if (param->flags & CLONEf_JOIN_IN)
+ Perl_sv_add_backref(aTHX_ MUTABLE_SV(GvSTASH(dstr)), dstr);
GvGP(dstr) = gp_dup(GvGP(sstr), param);
(void)GpREFCNT_inc(GvGP(dstr));
- } else
- Perl_rvpv_dup(aTHX_ dstr, sstr, param);
+ }
break;
case SVt_PVIO:
/* PL_parser->rsfp_filters entries have fake IoDIRP() */
IoFMT_GV(dstr) = gv_dup(IoFMT_GV(dstr), param);
IoBOTTOM_GV(dstr) = gv_dup(IoBOTTOM_GV(dstr), param);
if (IoDIRP(dstr)) {
- IoDIRP(dstr) = dirp_dup(IoDIRP(dstr));
+ IoDIRP(dstr) = dirp_dup(IoDIRP(dstr), param);
} else {
NOOP;
/* IoDIRP(dstr) is already a copy of IoDIRP(sstr) */
++i;
}
if (SvOOK(sstr)) {
- HEK *hvname;
const struct xpvhv_aux * const saux = HvAUX(sstr);
struct xpvhv_aux * const daux = HvAUX(dstr);
/* This flag isn't copied. */
/* SvOOK_on(hv) attacks the IV flags. */
SvFLAGS(dstr) |= SVf_OOK;
- hvname = saux->xhv_name;
- daux->xhv_name = hek_dup(hvname, param);
+ if (saux->xhv_name_count) {
+ HEK ** const sname = saux->xhv_name_u.xhvnameu_names;
+ const I32 count
+ = saux->xhv_name_count < 0
+ ? -saux->xhv_name_count
+ : saux->xhv_name_count;
+ HEK **shekp = sname + count;
+ HEK **dhekp;
+ Newx(daux->xhv_name_u.xhvnameu_names, count, HEK *);
+ dhekp = daux->xhv_name_u.xhvnameu_names + count;
+ while (shekp-- > sname) {
+ dhekp--;
+ *dhekp = hek_dup(*shekp, param);
+ }
+ }
+ else {
+ daux->xhv_name_u.xhvnameu_name
+ = hek_dup(saux->xhv_name_u.xhvnameu_name,
+ param);
+ }
+ daux->xhv_name_count = saux->xhv_name_count;
daux->xhv_riter = saux->xhv_riter;
daux->xhv_eiter = saux->xhv_eiter
cBOOL(HvSHAREKEYS(sstr)), param) : 0;
/* backref array needs refcnt=2; see sv_add_backref */
daux->xhv_backreferences =
- saux->xhv_backreferences
- ? MUTABLE_AV(SvREFCNT_inc(
- sv_dup_inc((const SV *)saux->xhv_backreferences, param)))
+ (param->flags & CLONEf_JOIN_IN)
+ /* when joining, we let the individual GVs and
+ * CVs add themselves to backref as
+ * needed. This avoids pulling in stuff
+ * that isn't required, and simplifies the
+ * case where stashes aren't cloned back
+ * if they already exist in the parent
+ * thread */
+ ? NULL
+ : saux->xhv_backreferences
+ ? (SvTYPE(saux->xhv_backreferences) == SVt_PVAV)
+ ? MUTABLE_AV(SvREFCNT_inc(
+ sv_dup_inc((const SV *)
+ saux->xhv_backreferences, param)))
+ : MUTABLE_AV(sv_dup((const SV *)
+ saux->xhv_backreferences, param))
: 0;
daux->xhv_mro_meta = saux->xhv_mro_meta
: 0;
/* Record stashes for possible cloning in Perl_clone(). */
- if (hvname)
+ if (HvNAME(sstr))
av_push(param->stashes, dstr);
}
}
if (!(param->flags & CLONEf_COPY_STACKS)) {
CvDEPTH(dstr) = 0;
}
+ /*FALLTHROUGH*/
case SVt_PVFM:
/* NOTE: not refcounted */
- CvSTASH(dstr) = hv_dup(CvSTASH(dstr), param);
- OP_REFCNT_LOCK;
- if (!CvISXSUB(dstr))
+ SvANY(MUTABLE_CV(dstr))->xcv_stash =
+ hv_dup(CvSTASH(dstr), param);
+ if ((param->flags & CLONEf_JOIN_IN) && CvSTASH(dstr))
+ Perl_sv_add_backref(aTHX_ MUTABLE_SV(CvSTASH(dstr)), dstr);
+ if (!CvISXSUB(dstr)) {
+ OP_REFCNT_LOCK;
CvROOT(dstr) = OpREFCNT_inc(CvROOT(dstr));
- OP_REFCNT_UNLOCK;
- if (CvCONST(dstr) && CvISXSUB(dstr)) {
+ OP_REFCNT_UNLOCK;
+ CvFILE(dstr) = SAVEPV(CvFILE(dstr));
+ } else if (CvCONST(dstr)) {
CvXSUBANY(dstr).any_ptr =
sv_dup_inc((const SV *)CvXSUBANY(dstr).any_ptr, param);
}
/* don't dup if copying back - CvGV isn't refcounted, so the
* duped GV may never be freed. A bit of a hack! DAPM */
- CvGV(dstr) = (param->flags & CLONEf_JOIN_IN) ?
- NULL : gv_dup(CvGV(dstr), param) ;
+ SvANY(MUTABLE_CV(dstr))->xcv_gv =
+ CvCVGV_RC(dstr)
+ ? gv_dup_inc(CvGV(sstr), param)
+ : (param->flags & CLONEf_JOIN_IN)
+ ? NULL
+ : gv_dup(CvGV(sstr), param);
+
CvPADLIST(dstr) = padlist_dup(CvPADLIST(sstr), param);
CvOUTSIDE(dstr) =
CvWEAKOUTSIDE(sstr)
? cv_dup( CvOUTSIDE(dstr), param)
: cv_dup_inc(CvOUTSIDE(dstr), param);
- if (!CvISXSUB(dstr))
- CvFILE(dstr) = SAVEPV(CvFILE(dstr));
break;
}
}
ncx->blk_loop.state_u.lazysv.end
= sv_dup_inc(ncx->blk_loop.state_u.lazysv.end, param);
/* We are taking advantage of av_dup_inc and sv_dup_inc
- actually being the same function, and order equivalance of
+ actually being the same function, and order equivalence of
the two unions.
We can assert the later [but only at run time :-(] */
assert ((void *) &ncx->blk_loop.state_u.ary.ary ==
case CXt_LOOP_LAZYIV:
case CXt_LOOP_PLAIN:
if (CxPADLOOP(ncx)) {
- ncx->blk_loop.oldcomppad
+ ncx->blk_loop.itervar_u.oldcomppad
= (PAD*)ptr_table_fetch(PL_ptr_table,
- ncx->blk_loop.oldcomppad);
+ ncx->blk_loop.itervar_u.oldcomppad);
} else {
- ncx->blk_loop.oldcomppad
- = (PAD*)gv_dup((const GV *)ncx->blk_loop.oldcomppad,
- param);
+ ncx->blk_loop.itervar_u.gv
+ = gv_dup((const GV *)ncx->blk_loop.itervar_u.gv,
+ param);
}
break;
case CXt_FORMAT:
TOPPTR(nss,ix) = sv_dup_inc(sv, param);
/* fall through */
case SAVEt_ITEM: /* normal string */
+ case SAVEt_GVSV: /* scalar slot in GV */
case SAVEt_SV: /* scalar reference */
sv = (const SV *)POPPTR(ss,ix);
TOPPTR(nss,ix) = sv_dup_inc(sv, param);
TOPPTR(nss,ix) = pv_dup(c);
break;
case SAVEt_GP: /* scalar reference */
- gv = (const GV *)POPPTR(ss,ix);
- TOPPTR(nss,ix) = gv_dup_inc(gv, param);
gp = (GP*)POPPTR(ss,ix);
TOPPTR(nss,ix) = gp = gp_dup(gp, param);
(void)GpREFCNT_inc(gp);
- i = POPINT(ss,ix);
- TOPINT(nss,ix) = i;
+ gv = (const GV *)POPPTR(ss,ix);
+ TOPPTR(nss,ix) = gv_dup_inc(gv, param);
break;
case SAVEt_FREEOP:
ptr = POPPTR(ss,ix);
else
TOPPTR(nss,ix) = NULL;
break;
+ case SAVEt_FREECOPHH:
+ ptr = POPPTR(ss,ix);
+ TOPPTR(nss,ix) = cophh_copy((COPHH *)ptr);
+ break;
case SAVEt_DELETE:
hv = (const HV *)POPPTR(ss,ix);
TOPPTR(nss,ix) = hv_dup_inc(hv, param);
break;
case SAVEt_HINTS:
ptr = POPPTR(ss,ix);
- if (ptr) {
- HINTS_REFCNT_LOCK;
- ((struct refcounted_he *)ptr)->refcounted_he_refcnt++;
- HINTS_REFCNT_UNLOCK;
- }
+ ptr = cophh_copy((COPHH*)ptr);
TOPPTR(nss,ix) = ptr;
i = POPINT(ss,ix);
TOPINT(nss,ix) = i;
PL_parser = NULL;
Zero(&PL_debug_pad, 1, struct perl_debug_pad);
# ifdef DEBUG_LEAKING_SCALARS
- PL_sv_serial = (((U32)my_perl >> 2) & 0xfff) * 1000000;
+ PL_sv_serial = (((UV)my_perl >> 2) & 0xfff) * 1000000;
# endif
#else /* !DEBUGGING */
Zero(my_perl, 1, PerlInterpreter);
param->proto_perl = proto_perl;
/* Likely nothing will use this, but it is initialised to be consistent
with Perl_clone_params_new(). */
- param->proto_perl = my_perl;
+ param->new_perl = my_perl;
param->unreferenced = NULL;
INIT_TRACK_MEMPOOL(my_perl->Imemory_debug_header, my_perl);
PL_body_arenas = NULL;
Zero(&PL_body_roots, 1, PL_body_roots);
- PL_nice_chunk = NULL;
- PL_nice_chunk_size = 0;
PL_sv_count = 0;
PL_sv_objcount = 0;
PL_sv_root = NULL;
ptr_table_store(PL_ptr_table, &proto_perl->Icompiling, &PL_compiling);
PL_compiling.cop_warnings = DUP_WARNINGS(PL_compiling.cop_warnings);
- if (PL_compiling.cop_hints_hash) {
- HINTS_REFCNT_LOCK;
- PL_compiling.cop_hints_hash->refcounted_he_refcnt++;
- HINTS_REFCNT_UNLOCK;
- }
+ CopHINTHASH_set(&PL_compiling, cophh_copy(CopHINTHASH_get(&PL_compiling)));
PL_curcop = (COP*)any_dup(proto_perl->Icurcop, proto_perl);
#ifdef PERL_DEBUG_READONLY_OPS
PL_slabs = NULL;
/* switches */
PL_minus_c = proto_perl->Iminus_c;
PL_patchlevel = sv_dup_inc(proto_perl->Ipatchlevel, param);
+ PL_apiversion = sv_dup_inc(proto_perl->Iapiversion, param);
PL_localpatches = proto_perl->Ilocalpatches;
PL_splitstr = proto_perl->Isplitstr;
PL_minus_n = proto_perl->Iminus_n;
PL_minus_F = proto_perl->Iminus_F;
PL_doswitches = proto_perl->Idoswitches;
PL_dowarn = proto_perl->Idowarn;
- PL_doextract = proto_perl->Idoextract;
PL_sawampersand = proto_perl->Isawampersand;
PL_unsafe = proto_perl->Iunsafe;
PL_inplace = SAVEPV(proto_perl->Iinplace);
PL_regex_pad = AvARRAY(PL_regex_padav);
/* shortcuts to various I/O objects */
- PL_ofsgv = gv_dup(proto_perl->Iofsgv, param);
+ PL_ofsgv = gv_dup_inc(proto_perl->Iofsgv, param);
PL_stdingv = gv_dup(proto_perl->Istdingv, param);
PL_stderrgv = gv_dup(proto_perl->Istderrgv, param);
PL_defgv = gv_dup(proto_perl->Idefgv, param);
PL_modglobal = hv_dup_inc(proto_perl->Imodglobal, param);
PL_custom_op_names = hv_dup_inc(proto_perl->Icustom_op_names,param);
PL_custom_op_descs = hv_dup_inc(proto_perl->Icustom_op_descs,param);
+ PL_custom_ops = hv_dup_inc(proto_perl->Icustom_ops, param);
PL_profiledata = NULL;
PL_restartop = proto_perl->Irestartop;
PL_in_eval = proto_perl->Iin_eval;
PL_delaymagic = proto_perl->Idelaymagic;
- PL_dirty = proto_perl->Idirty;
+ PL_phase = proto_perl->Iphase;
PL_localizing = proto_perl->Ilocalizing;
PL_errors = sv_dup_inc(proto_perl->Ierrors, param);
/* Pluggable optimizer */
PL_peepp = proto_perl->Ipeepp;
+ PL_rpeepp = proto_perl->Irpeepp;
/* op_free() hook */
PL_opfreehook = proto_perl->Iopfreehook;
}
PL_registered_mros = hv_dup_inc(proto_perl->Iregistered_mros, param);
+ PL_blockhooks = av_dup_inc(proto_perl->Iblockhooks, param);
+ PL_utf8_foldclosures = hv_dup_inc(proto_perl->Iutf8_foldclosures, param);
/* Call the ->CLONE method, if it exists, for each of the stashes
identified by sv_dup() above.
void
Perl_clone_params_del(CLONE_PARAMS *param)
{
- PerlInterpreter *const was = PERL_GET_THX;
+ /* This seemingly funky ordering keeps the build with PERL_GLOBAL_STRUCT
+ happy: */
PerlInterpreter *const to = param->new_perl;
dTHXa(to);
+ PerlInterpreter *const was = PERL_GET_THX;
PERL_ARGS_ASSERT_CLONE_PARAMS_DEL;
CLONE_PARAMS *
Perl_clone_params_new(PerlInterpreter *const from, PerlInterpreter *const to)
{
+ dVAR;
/* Need to play this game, as newAV() can call safesysmalloc(), and that
does a dTHX; to get the context from thread local storage.
FIXME - under PERL_CORE Newx(), Safefree() and friends should expand to
case OP_GVSV:
gv = cGVOPx_gv(obase);
- if (!gv || (match && GvSV(gv) != uninit_sv))
+ if (!gv || (match && GvSV(gv) != uninit_sv) || !GvSTASH(gv))
break;
return varname(gv, '$', 0, NULL, 0, FUV_SUBSCRIPT_NONE);
if ( (type == OP_CONST && SvOK(cSVOPx_sv(kid)))
|| (type == OP_NULL && ! (kid->op_flags & OPf_KIDS))
|| (type == OP_PUSHMARK)
+ || (
+ /* @$a and %$a, but not @a or %a */
+ (type == OP_RV2AV || type == OP_RV2HV)
+ && cUNOPx(kid)->op_first
+ && cUNOPx(kid)->op_first->op_type != OP_GV
+ )
)
continue;
}