{
PL_curcop = (COP*)PL_op;
TAINT_NOT; /* Each statement is presumed innocent */
- PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
+ PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
FREETMPS;
PERL_ASYNC_CHECK();
return NORMAL;
*/
SV *left = POPs; SV *right = TOPs;
- if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
+ if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
SV * const temp = left;
left = right; right = temp;
}
PP(pp_unstack)
{
+ PERL_CONTEXT *cx;
PERL_ASYNC_CHECK();
TAINT_NOT; /* Each statement is presumed innocent */
- PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
+ cx = CX_CUR();
+ PL_stack_sp = PL_stack_base + cx->blk_oldsp;
FREETMPS;
if (!(PL_op->op_flags & OPf_SPECIAL)) {
- I32 oldsave = PL_scopestack[PL_scopestack_ix - 1];
- LEAVE_SCOPE(oldsave);
+ assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
+ CX_LEAVE_SCOPE(cx);
}
return NORMAL;
}
}
else { /* $l .= $r and left == TARG */
if (!SvOK(left)) {
- if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
- report_uninit(right);
- sv_setpvs(left, "");
+ if ((left == right /* $l .= $l */
+ || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
+ && ckWARN(WARN_UNINITIALIZED)
+ )
+ report_uninit(left);
+ SvPVCLEAR(left);
}
else {
SvPV_force_nomg_nolen(left);
dSP;
PADOFFSET base = PL_op->op_targ;
int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
- int i;
if (PL_op->op_flags & OPf_SPECIAL) {
/* fake the RHS of my ($x,$y,..) = @_ */
PUSHMARK(SP);
/* note, this is only skipped for compile-time-known void cxt */
if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
+ int i;
+
EXTEND(SP, count);
PUSHMARK(SP);
for (i = 0; i <count; i++)
(base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
| (count << SAVE_TIGHT_SHIFT)
| SAVEt_CLEARPADRANGE);
+ int i;
+
STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
- assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
+ assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
+ == (Size_t)base);
{
dSS_ADD;
SS_ADD_UV(payload);
AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
const U32 lval = PL_op->op_flags & OPf_MOD;
- SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
- SV *sv = (svp ? *svp : &PL_sv_undef);
+ const I8 key = (I8)PL_op->op_private;
+ SV** svp;
+ SV *sv;
- if (UNLIKELY(!svp && lval))
- DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
+ assert(SvTYPE(av) == SVt_PVAV);
EXTEND(SP, 1);
+
+ /* inlined av_fetch() for simple cases ... */
+ if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
+ sv = AvARRAY(av)[key];
+ if (sv) {
+ PUSHs(sv);
+ RETURN;
+ }
+ }
+
+ /* ... else do it the hard way */
+ svp = av_fetch(av, key, lval);
+ sv = (svp ? *svp : &PL_sv_undef);
+
+ if (UNLIKELY(!svp && lval))
+ DIE(aTHX_ PL_no_aelem, (int)key);
+
if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
mg_get(sv);
PUSHs(sv);
RETURN;
}
-PP(pp_pushre)
-{
- dSP;
-#ifdef DEBUGGING
- /*
- * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
- * will be enough to hold an OP*.
- */
- SV* const sv = sv_newmortal();
- sv_upgrade(sv, SVt_PVLV);
- LvTYPE(sv) = '/';
- Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
- XPUSHs(sv);
-#else
- XPUSHs(MUTABLE_SV(PL_op));
-#endif
- RETURN;
-}
-
/* Oversized hot code. */
/* also used for: pp_say() */
PP(pp_rv2av)
{
dSP; dTOPss;
- const I32 gimme = GIMME_V;
+ const U8 gimme = GIMME_V;
static const char an_array[] = "an ARRAY";
static const char a_hash[] = "a HASH";
const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
|| ( PL_op->op_private & OPpMAYBE_TRUEBOOL
&& block_gimme() == G_VOID ))
&& (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
- SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
+ SETs(HvUSEDKEYS(MUTABLE_HV(sv)) ? &PL_sv_yes : &PL_sv_no);
else if (gimme == G_SCALAR) {
dTARG;
TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
lcount = -1;
lelem--; /* no need to unmark this element */
}
- else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
- assert(!SvIMMORTAL(svl));
+ else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
SvFLAGS(svl) |= SVf_BREAK;
marked = TRUE;
}
assert(svr);
if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
+ U32 brk = (SvFLAGS(svr) & SVf_BREAK);
#ifdef DEBUGGING
if (fake) {
/* ... but restore afterwards in case it's needed again,
* e.g. ($a,$b,$c) = (1,$a,$a)
*/
- SvFLAGS(svr) |= SVf_BREAK;
+ SvFLAGS(svr) |= brk;
}
if (!lcount)
SV **relem;
SV **lelem;
-
- SV *sv;
- AV *ary;
-
- I32 gimme;
- HV *hash;
- SSize_t i;
- int magic;
- U32 lval;
+ U8 gimme;
/* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
* only need to save locally, not on the save stack */
U16 old_delaymagic = PL_delaymagic;
if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
/* skip the scan if all scalars have a ref count of 1 */
for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
- sv = *lelem;
+ SV *sv = *lelem;
if (!sv || SvREFCNT(sv) == 1)
continue;
if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
#endif
gimme = GIMME_V;
- lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
-
relem = firstrelem;
lelem = firstlelem;
- ary = NULL;
- hash = NULL;
+ if (relem > lastrelem)
+ goto no_relems;
+
+ /* first lelem loop while there are still relems */
while (LIKELY(lelem <= lastlelem)) {
bool alias = FALSE;
- TAINT_NOT; /* Each item stands on its own, taintwise. */
- sv = *lelem++;
- if (UNLIKELY(!sv)) {
+ SV *lsv = *lelem++;
+
+ TAINT_NOT; /* Each item stands on its own, taintwise. */
+
+ assert(relem <= lastrelem);
+ if (UNLIKELY(!lsv)) {
alias = TRUE;
- sv = *lelem++;
- ASSUME(SvTYPE(sv) == SVt_PVAV);
+ lsv = *lelem++;
+ ASSUME(SvTYPE(lsv) == SVt_PVAV);
}
- switch (SvTYPE(sv)) {
- case SVt_PVAV: {
- bool already_copied = FALSE;
- ary = MUTABLE_AV(sv);
- magic = SvMAGICAL(ary) != 0;
- ENTER;
- SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
-
- /* We need to clear ary. The is a danger that if we do this,
- * elements on the RHS may be prematurely freed, e.g.
- * @a = ($a[0]);
- * In the case of possible commonality, make a copy of each
- * RHS SV *before* clearing the array, and add a reference
- * from the tmps stack, so that it doesn't leak on death.
- * Otherwise, make a copy of each RHS SV only as we're storing
- * it into the array - that way we don't have to worry about
- * it being leaked if we die, but don't incur the cost of
- * mortalising everything.
- */
- if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
- && (relem <= lastrelem)
- && (magic || AvFILL(ary) != -1))
- {
- SV **svp;
- EXTEND_MORTAL(lastrelem - relem + 1);
+ switch (SvTYPE(lsv)) {
+ case SVt_PVAV: {
+ SV **svp;
+ SSize_t i;
+ SSize_t tmps_base;
+ SSize_t nelems = lastrelem - relem + 1;
+ AV *ary = MUTABLE_AV(lsv);
+
+ /* Assigning to an aggregate is tricky. First there is the
+ * issue of commonality, e.g. @a = ($a[0]). Since the
+ * stack isn't refcounted, clearing @a prior to storing
+ * elements will free $a[0]. Similarly with
+ * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
+ *
+ * The way to avoid these issues is to make the copy of each
+ * SV (and we normally store a *copy* in the array) *before*
+ * clearing the array. But this has a problem in that
+ * if the code croaks during copying, the not-yet-stored copies
+ * could leak. One way to avoid this is to make all the copies
+ * mortal, but that's quite expensive.
+ *
+ * The current solution to these issues is to use a chunk
+ * of the tmps stack as a temporary refcounted-stack. SVs
+ * will be put on there during processing to avoid leaks,
+ * but will be removed again before the end of this block,
+ * so free_tmps() is never normally called. Also, the
+ * sv_refcnt of the SVs doesn't have to be manipulated, since
+ * the ownership of 1 reference count is transferred directly
+ * from the tmps stack to the AV when the SV is stored.
+ *
+ * We disarm slots in the temps stack by storing PL_sv_undef
+ * there: it doesn't matter if that SV's refcount is
+ * repeatedly decremented during a croak. But usually this is
+ * only an interim measure. By the end of this code block
+ * we try where possible to not leave any PL_sv_undef's on the
+ * tmps stack e.g. by shuffling newer entries down.
+ *
+ * There is one case where we don't copy: non-magical
+ * SvTEMP(sv)'s with a ref count of 1. The only owner of these
+ * is on the tmps stack, so its safe to directly steal the SV
+ * rather than copying. This is common in things like function
+ * returns, map etc, which all return a list of such SVs.
+ *
+ * Note however something like @a = (f())[0,0], where there is
+ * a danger of the same SV being shared: this avoided because
+ * when the SV is stored as $a[0], its ref count gets bumped,
+ * so the RC==1 test fails and the second element is copied
+ * instead.
+ *
+ * We also use one slot in the tmps stack to hold an extra
+ * ref to the array, to ensure it doesn't get prematurely
+ * freed. Again, this is removed before the end of this block.
+ *
+ * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
+ * @a = ($a[0]) case, but the current implementation uses the
+ * same algorithm regardless, so ignores that flag. (It *is*
+ * used in the hash branch below, however).
+ */
+
+ /* Reserve slots for ary, plus the elems we're about to copy,
+ * then protect ary and temporarily void the remaining slots
+ * with &PL_sv_undef */
+ EXTEND_MORTAL(nelems + 1);
+ PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
+ tmps_base = PL_tmps_ix + 1;
+ for (i = 0; i < nelems; i++)
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ PL_tmps_ix += nelems;
+
+ /* Make a copy of each RHS elem and save on the tmps_stack
+ * (or pass through where we can optimise away the copy) */
+
+ if (UNLIKELY(alias)) {
+ U32 lval = (gimme == G_ARRAY)
+ ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
for (svp = relem; svp <= lastrelem; svp++) {
- /* see comment in S_aassign_copy_common about SV_NOSTEAL */
- *svp = sv_mortalcopy_flags(*svp,
- SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
- TAINT_NOT;
+ SV *rsv = *svp;
+
+ SvGETMAGIC(rsv);
+ if (!SvROK(rsv))
+ DIE(aTHX_ "Assigned value is not a reference");
+ if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
+ /* diag_listed_as: Assigned value is not %s reference */
+ DIE(aTHX_
+ "Assigned value is not a SCALAR reference");
+ if (lval)
+ *svp = rsv = sv_mortalcopy(rsv);
+ /* XXX else check for weak refs? */
+ rsv = SvREFCNT_inc_NN(SvRV(rsv));
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
}
- already_copied = TRUE;
}
+ else {
+ for (svp = relem; svp <= lastrelem; svp++) {
+ SV *rsv = *svp;
- av_clear(ary);
- if (relem <= lastrelem)
- av_extend(ary, lastrelem - relem);
-
- i = 0;
- while (relem <= lastrelem) { /* gobble up all the rest */
- SV **didstore;
- if (LIKELY(!alias)) {
- if (already_copied)
- sv = *relem;
+ if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
+ /* can skip the copy */
+ SvREFCNT_inc_simple_void_NN(rsv);
+ SvTEMP_off(rsv);
+ }
else {
- if (LIKELY(*relem))
- /* before newSV, in case it dies */
- SvGETMAGIC(*relem);
- sv = newSV(0);
+ SV *nsv;
+ /* do get before newSV, in case it dies and leaks */
+ SvGETMAGIC(rsv);
+ nsv = newSV(0);
/* see comment in S_aassign_copy_common about
* SV_NOSTEAL */
- sv_setsv_flags(sv, *relem,
- (SV_DO_COW_SVSETSV|SV_NOSTEAL));
- *relem = sv;
+ sv_setsv_flags(nsv, rsv,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ rsv = *svp = nsv;
}
- }
- else {
- if (!already_copied)
- SvGETMAGIC(*relem);
- if (!SvROK(*relem))
- DIE(aTHX_ "Assigned value is not a reference");
- if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
- /* diag_listed_as: Assigned value is not %s reference */
- DIE(aTHX_
- "Assigned value is not a SCALAR reference");
- if (lval && !already_copied)
- *relem = sv_mortalcopy(*relem);
- /* XXX else check for weak refs? */
- sv = SvREFCNT_inc_NN(SvRV(*relem));
- }
- relem++;
- if (already_copied)
- SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
- didstore = av_store(ary,i++,sv);
- if (magic) {
- if (!didstore)
- sv_2mortal(sv);
- if (SvSMAGICAL(sv))
- mg_set(sv);
- }
- TAINT_NOT;
- }
+
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
+ }
+ }
+
+ if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
+ av_clear(ary);
+
+ /* store in the array, the SVs that are in the tmps stack */
+
+ tmps_base -= nelems;
+
+ if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
+ /* for arrays we can't cheat with, use the official API */
+ av_extend(ary, nelems - 1);
+ for (i = 0; i < nelems; i++) {
+ SV **svp = &(PL_tmps_stack[tmps_base + i]);
+ SV *rsv = *svp;
+ /* A tied store won't take ownership of rsv, so keep
+ * the 1 refcnt on the tmps stack; otherwise disarm
+ * the tmps stack entry */
+ if (av_store(ary, i, rsv))
+ *svp = &PL_sv_undef;
+ /* av_store() may have added set magic to rsv */;
+ SvSETMAGIC(rsv);
+ }
+ /* disarm ary refcount: see comments below about leak */
+ PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
+ }
+ else {
+ /* directly access/set the guts of the AV */
+ SSize_t fill = nelems - 1;
+ if (fill > AvMAX(ary))
+ av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
+ &AvARRAY(ary));
+ AvFILLp(ary) = fill;
+ Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
+ /* Quietly remove all the SVs from the tmps stack slots,
+ * since ary has now taken ownership of the refcnt.
+ * Also remove ary: which will now leak if we die before
+ * the SvREFCNT_dec_NN(ary) below */
+ if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
+ Move(&PL_tmps_stack[tmps_base + nelems],
+ &PL_tmps_stack[tmps_base - 1],
+ PL_tmps_ix - (tmps_base + nelems) + 1,
+ SV*);
+ PL_tmps_ix -= (nelems + 1);
+ }
+
if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
+ /* its assumed @ISA set magic can't die and leak ary */
SvSETMAGIC(MUTABLE_SV(ary));
- LEAVE;
- break;
+ SvREFCNT_dec_NN(ary);
+
+ relem = lastrelem + 1;
+ goto no_relems;
}
case SVt_PVHV: { /* normal hash */
- SV *tmpstr;
- int odd;
- int duplicates = 0;
- SV** topelem = relem;
- SV **firsthashrelem = relem;
- bool already_copied = FALSE;
-
- hash = MUTABLE_HV(sv);
- magic = SvMAGICAL(hash) != 0;
-
- odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
- if (UNLIKELY(odd)) {
- do_oddball(lastrelem, firsthashrelem);
- /* we have firstlelem to reuse, it's not needed anymore
- */
- *(lastrelem+1) = &PL_sv_undef;
+
+ SV **svp;
+ bool dirty_tmps;
+ SSize_t i;
+ SSize_t tmps_base;
+ SSize_t nelems = lastrelem - relem + 1;
+ HV *hash = MUTABLE_HV(lsv);
+
+ if (UNLIKELY(nelems & 1)) {
+ do_oddball(lastrelem, relem);
+ /* we have firstlelem to reuse, it's not needed any more */
+ *++lastrelem = &PL_sv_undef;
+ nelems++;
+ }
+
+ /* See the SVt_PVAV branch above for a long description of
+ * how the following all works. The main difference for hashes
+ * is that we treat keys and values separately (and have
+ * separate loops for them): as for arrays, values are always
+ * copied (except for the SvTEMP optimisation), since they
+ * need to be stored in the hash; while keys are only
+ * processed where they might get prematurely freed or
+ * whatever. */
+
+ /* tmps stack slots:
+ * * reserve a slot for the hash keepalive;
+ * * reserve slots for the hash values we're about to copy;
+ * * preallocate for the keys we'll possibly copy or refcount bump
+ * later;
+ * then protect hash and temporarily void the remaining
+ * value slots with &PL_sv_undef */
+ EXTEND_MORTAL(nelems + 1);
+
+ /* convert to number of key/value pairs */
+ nelems >>= 1;
+
+ PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
+ tmps_base = PL_tmps_ix + 1;
+ for (i = 0; i < nelems; i++)
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ PL_tmps_ix += nelems;
+
+ /* Make a copy of each RHS hash value and save on the tmps_stack
+ * (or pass through where we can optimise away the copy) */
+
+ for (svp = relem + 1; svp <= lastrelem; svp += 2) {
+ SV *rsv = *svp;
+
+ if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
+ /* can skip the copy */
+ SvREFCNT_inc_simple_void_NN(rsv);
+ SvTEMP_off(rsv);
+ }
+ else {
+ SV *nsv;
+ /* do get before newSV, in case it dies and leaks */
+ SvGETMAGIC(rsv);
+ nsv = newSV(0);
+ /* see comment in S_aassign_copy_common about
+ * SV_NOSTEAL */
+ sv_setsv_flags(nsv, rsv,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ rsv = *svp = nsv;
}
- ENTER;
- SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
+ }
+ tmps_base -= nelems;
- /* We need to clear hash. The is a danger that if we do this,
- * elements on the RHS may be prematurely freed, e.g.
- * %h = (foo => $h{bar});
- * In the case of possible commonality, make a copy of each
- * RHS SV *before* clearing the hash, and add a reference
- * from the tmps stack, so that it doesn't leak on death.
- */
- if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
- && (relem <= lastrelem)
- && (magic || HvUSEDKEYS(hash)))
- {
- SV **svp;
- EXTEND_MORTAL(lastrelem - relem + 1);
- for (svp = relem; svp <= lastrelem; svp++) {
+ /* possibly protect keys */
+
+ if (UNLIKELY(gimme == G_ARRAY)) {
+ /* handle e.g.
+ * @a = ((%h = ($$r, 1)), $r = "x");
+ * $_++ for %h = (1,2,3,4);
+ */
+ EXTEND_MORTAL(nelems);
+ for (svp = relem; svp <= lastrelem; svp += 2)
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ }
+ else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
+ /* for possible commonality, e.g.
+ * %h = ($h{a},1)
+ * avoid premature freeing RHS keys by mortalising
+ * them.
+ * For a magic element, make a copy so that its magic is
+ * called *before* the hash is emptied (which may affect
+ * a tied value for example).
+ * In theory we should check for magic keys in all
+ * cases, not just under OPpASSIGN_COMMON_AGG, but in
+ * practice, !OPpASSIGN_COMMON_AGG implies only
+ * constants or padtmps on the RHS.
+ */
+ EXTEND_MORTAL(nelems);
+ for (svp = relem; svp <= lastrelem; svp += 2) {
+ SV *rsv = *svp;
+ if (UNLIKELY(SvGMAGICAL(rsv))) {
+ SSize_t n;
*svp = sv_mortalcopy_flags(*svp,
SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
- TAINT_NOT;
+ /* allow other branch to continue pushing
+ * onto tmps stack without checking each time */
+ n = (lastrelem - relem) >> 1;
+ EXTEND_MORTAL(n);
}
- already_copied = TRUE;
+ else
+ PL_tmps_stack[++PL_tmps_ix] =
+ SvREFCNT_inc_simple_NN(rsv);
}
+ }
- hv_clear(hash);
-
- while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
- HE *didstore;
- assert(*relem);
- /* Copy the key if aassign is called in lvalue context,
- to avoid having the next op modify our rhs. Copy
- it also if it is gmagical, lest it make the
- hv_store_ent call below croak, leaking the value. */
- sv = (lval || SvGMAGICAL(*relem)) && !already_copied
- ? sv_mortalcopy(*relem)
- : *relem;
- relem++;
- assert(*relem);
- if (already_copied)
- tmpstr = *relem++;
- else {
- SvGETMAGIC(*relem);
- tmpstr = newSV(0);
- sv_setsv_nomg(tmpstr,*relem++); /* value */
- }
+ if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
+ hv_clear(hash);
- if (gimme == G_ARRAY) {
- if (hv_exists_ent(hash, sv, 0))
- /* key overwrites an existing entry */
- duplicates += 2;
- else {
- /* copy element back: possibly to an earlier
- * stack location if we encountered dups earlier,
- * possibly to a later stack location if odd */
- *topelem++ = sv;
- *topelem++ = tmpstr;
- }
- }
- if (already_copied)
- SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
- didstore = hv_store_ent(hash,sv,tmpstr,0);
- if (magic) {
- if (!didstore) sv_2mortal(tmpstr);
- SvSETMAGIC(tmpstr);
+ /* now assign the keys and values to the hash */
+
+ dirty_tmps = FALSE;
+
+ if (UNLIKELY(gimme == G_ARRAY)) {
+ /* @a = (%h = (...)) etc */
+ SV **svp;
+ SV **topelem = relem;
+
+ for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
+ SV *key = *svp++;
+ SV *val = *svp;
+ /* remove duplicates from list we return */
+ if (!hv_exists_ent(hash, key, 0)) {
+ /* copy key back: possibly to an earlier
+ * stack location if we encountered dups earlier,
+ * The values will be updated later
+ */
+ *topelem = key;
+ topelem += 2;
}
- TAINT_NOT;
- }
- LEAVE;
- if (duplicates && gimme == G_ARRAY) {
+ /* A tied store won't take ownership of val, so keep
+ * the 1 refcnt on the tmps stack; otherwise disarm
+ * the tmps stack entry */
+ if (hv_store_ent(hash, key, val, 0))
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ else
+ dirty_tmps = TRUE;
+ /* hv_store_ent() may have added set magic to val */;
+ SvSETMAGIC(val);
+ }
+ if (topelem < svp) {
/* at this point we have removed the duplicate key/value
* pairs from the stack, but the remaining values may be
* wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
* the (a 2), but the stack now probably contains
* (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
* obliterates the earlier key. So refresh all values. */
- lastrelem -= duplicates;
- relem = firsthashrelem;
- while (relem < lastrelem+odd) {
+ lastrelem = topelem - 1;
+ while (relem < lastrelem) {
HE *he;
he = hv_fetch_ent(hash, *relem++, 0, 0);
*relem++ = (he ? HeVAL(he) : &PL_sv_undef);
}
}
- if (odd && gimme == G_ARRAY) lastrelem++;
- }
- break;
+ }
+ else {
+ SV **svp;
+ for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
+ SV *key = *svp++;
+ SV *val = *svp;
+ if (hv_store_ent(hash, key, val, 0))
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ else
+ dirty_tmps = TRUE;
+ /* hv_store_ent() may have added set magic to val */;
+ SvSETMAGIC(val);
+ }
+ }
+
+ if (dirty_tmps) {
+ /* there are still some 'live' recounts on the tmps stack
+ * - usually caused by storing into a tied hash. So let
+ * free_tmps() do the proper but slow job later.
+ * Just disarm hash refcount: see comments below about leak
+ */
+ PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
+ }
+ else {
+ /* Quietly remove all the SVs from the tmps stack slots,
+ * since hash has now taken ownership of the refcnt.
+ * Also remove hash: which will now leak if we die before
+ * the SvREFCNT_dec_NN(hash) below */
+ if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
+ Move(&PL_tmps_stack[tmps_base + nelems],
+ &PL_tmps_stack[tmps_base - 1],
+ PL_tmps_ix - (tmps_base + nelems) + 1,
+ SV*);
+ PL_tmps_ix -= (nelems + 1);
+ }
+
+ SvREFCNT_dec_NN(hash);
+
+ relem = lastrelem + 1;
+ goto no_relems;
+ }
+
default:
- if (SvIMMORTAL(sv)) {
- if (relem <= lastrelem)
- relem++;
- break;
- }
- if (relem <= lastrelem) {
- if (UNLIKELY(
- SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
- (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
- ))
- Perl_warner(aTHX_
- packWARN(WARN_MISC),
- "Useless assignment to a temporary"
- );
- sv_setsv(sv, *relem);
- *(relem++) = sv;
- }
- else
- sv_setsv(sv, &PL_sv_undef);
- SvSETMAGIC(sv);
+ if (!SvIMMORTAL(lsv)) {
+ SV *ref;
+
+ if (UNLIKELY(
+ SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
+ (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
+ ))
+ Perl_warner(aTHX_
+ packWARN(WARN_MISC),
+ "Useless assignment to a temporary"
+ );
+
+ /* avoid freeing $$lsv if it might be needed for further
+ * elements, e.g. ($ref, $foo) = (1, $$ref) */
+ if ( SvROK(lsv)
+ && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
+ && lelem <= lastlelem
+ ) {
+ SSize_t ix;
+ SvREFCNT_inc_simple_void_NN(ref);
+ /* an unrolled sv_2mortal */
+ ix = ++PL_tmps_ix;
+ if (UNLIKELY(ix >= PL_tmps_max))
+ /* speculatively grow enough to cover other
+ * possible refs */
+ ix = tmps_grow_p(ix + (lastlelem - lelem));
+ PL_tmps_stack[ix] = ref;
+ }
+
+ sv_setsv(lsv, *relem);
+ *relem = lsv;
+ SvSETMAGIC(lsv);
+ }
+ if (++relem > lastrelem)
+ goto no_relems;
break;
+ } /* switch */
+ } /* while */
+
+
+ no_relems:
+
+ /* simplified lelem loop for when there are no relems left */
+ while (LIKELY(lelem <= lastlelem)) {
+ SV *lsv = *lelem++;
+
+ TAINT_NOT; /* Each item stands on its own, taintwise. */
+
+ if (UNLIKELY(!lsv)) {
+ lsv = *lelem++;
+ ASSUME(SvTYPE(lsv) == SVt_PVAV);
}
- }
+
+ switch (SvTYPE(lsv)) {
+ case SVt_PVAV:
+ if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
+ av_clear((AV*)lsv);
+ if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
+ SvSETMAGIC(lsv);
+ }
+ break;
+
+ case SVt_PVHV:
+ if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
+ hv_clear((HV*)lsv);
+ break;
+
+ default:
+ if (!SvIMMORTAL(lsv)) {
+ sv_set_undef(lsv);
+ SvSETMAGIC(lsv);
+ *relem++ = lsv;
+ }
+ break;
+ } /* switch */
+ } /* while */
+
+ TAINT_NOT; /* result of list assign isn't tainted */
+
if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
/* Will be used to set PL_tainting below */
Uid_t tmp_uid = PerlProc_getuid();
else if (gimme == G_SCALAR) {
dTARGET;
SP = firstrelem;
- SETi(lastrelem - firstrelem + 1);
- }
- else {
- if (ary || hash)
- /* note that in this case *firstlelem may have been overwritten
- by sv_undef in the odd hash case */
- SP = lastrelem;
- else {
- SP = firstrelem + (lastlelem - firstlelem);
- lelem = firstlelem + (relem - firstrelem);
- while (relem <= SP)
- *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
- }
+ EXTEND(SP,1);
+ SETi(firstlelem - firstrelem);
}
+ else
+ SP = relem - 1;
RETURN;
}
const char *truebase; /* Start of string */
REGEXP *rx = PM_GETRE(pm);
bool rxtainted;
- const I32 gimme = GIMME_V;
+ const U8 gimme = GIMME_V;
STRLEN len;
const I32 oldsave = PL_savestack_ix;
I32 had_zerolen = 0;
goto nope;
}
- /* empty pattern special-cased to use last successful pattern if
- possible, except for qr// */
- if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
- && PL_curpm) {
- pm = PL_curpm;
- rx = PM_GETRE(pm);
+ /* handle the empty pattern */
+ if (!RX_PRELEN(rx) && PL_curpm && !ReANY(rx)->mother_re) {
+ if (PL_curpm == PL_reg_curpm) {
+ if (PL_curpm_under) {
+ if (PL_curpm_under == PL_reg_curpm) {
+ Perl_croak(aTHX_ "Infinite recursion via empty pattern");
+ } else {
+ pm = PL_curpm_under;
+ }
+ }
+ } else {
+ pm = PL_curpm;
+ }
+ rx = PM_GETRE(pm);
}
if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
- UVuf" < %"IVdf")\n",
+ UVuf " < %" IVdf ")\n",
(UV)len, (IV)RX_MINLEN(rx)));
goto nope;
}
if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
|| len < 0 || len > strend - s))
DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
- "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
+ "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
(long) i, (long) RX_OFFS(rx)[i].start,
(long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
sv_setpvn(*SP, s, len);
PerlIO *fp;
IO * const io = GvIO(PL_last_in_gv);
const I32 type = PL_op->op_type;
- const I32 gimme = GIMME_V;
+ const U8 gimme = GIMME_V;
if (io) {
const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
&& ckWARN(WARN_MISC)))
Perl_warner(aTHX_ packWARN(WARN_MISC),
- "Use of reference \"%"SVf"\" as array index",
+ "Use of reference \"%" SVf "\" as array index",
SVfARG(elemsv));
/* the only time that S_find_uninit_var() needs this
* is to determine which index value triggered the
PP(pp_iter)
{
- dSP;
PERL_CONTEXT *cx;
SV *oldsv;
SV **itersvp;
+ SV *retsv;
- EXTEND(SP, 1);
- cx = &cxstack[cxstack_ix];
+ SV *sv;
+ AV *av;
+ IV ix;
+ IV inc;
+
+ cx = CX_CUR();
itersvp = CxITERVAR(cx);
+ assert(itersvp);
switch (CxTYPE(cx)) {
It has SvPVX of "" and SvCUR of 0, which is what we want. */
STRLEN maxlen = 0;
const char *max = SvPV_const(end, maxlen);
+ if (DO_UTF8(end) && IN_UNI_8_BIT)
+ maxlen = sv_len_utf8_nomg(end);
if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
- RETPUSHNO;
+ goto retno;
oldsv = *itersvp;
- if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
+ /* NB: on the first iteration, oldsv will have a ref count of at
+ * least 2 (one extra from blk_loop.itersave), so the GV or pad
+ * slot will get localised; on subsequent iterations the RC==1
+ * optimisation may kick in and the SV will be reused. */
+ if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
/* safe to reuse old SV */
sv_setsv(oldsv, cur);
}
* completely new SV for closures/references to work as
* they used to */
*itersvp = newSVsv(cur);
- SvREFCNT_dec_NN(oldsv);
+ SvREFCNT_dec(oldsv);
}
if (strEQ(SvPVX_const(cur), max))
sv_setiv(cur, 0); /* terminate next time */
{
IV cur = cx->blk_loop.state_u.lazyiv.cur;
if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
- RETPUSHNO;
+ goto retno;
oldsv = *itersvp;
- /* don't risk potential race */
- if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
+ /* see NB comment above */
+ if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
/* safe to reuse old SV */
- sv_setiv(oldsv, cur);
+
+ if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
+ == SVt_IV)
+ {
+ /* Cheap SvIOK_only().
+ * Assert that flags which SvIOK_only() would test or
+ * clear can't be set, because we're SVt_IV */
+ assert(!(SvFLAGS(oldsv) &
+ (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
+ SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
+ /* SvIV_set() where sv_any points to head */
+ oldsv->sv_u.svu_iv = cur;
+
+ }
+ else
+ sv_setiv(oldsv, cur);
}
else
{
* completely new SV for closures/references to work as they
* used to */
*itersvp = newSViv(cur);
- SvREFCNT_dec_NN(oldsv);
+ SvREFCNT_dec(oldsv);
}
if (UNLIKELY(cur == IV_MAX)) {
break;
}
- case CXt_LOOP_FOR: /* iterate array */
- {
-
- AV *av = cx->blk_loop.state_u.ary.ary;
- SV *sv;
- bool av_is_stack = FALSE;
- IV ix;
-
- if (!av) {
- av_is_stack = TRUE;
- av = PL_curstack;
- }
- if (PL_op->op_private & OPpITER_REVERSED) {
- ix = --cx->blk_loop.state_u.ary.ix;
- if (UNLIKELY(ix <= (av_is_stack ? cx->blk_loop.resetsp : -1)))
- RETPUSHNO;
- }
- else {
- ix = ++cx->blk_loop.state_u.ary.ix;
- if (UNLIKELY(ix > (av_is_stack ? cx->blk_oldsp : AvFILL(av))))
- RETPUSHNO;
- }
-
- if (UNLIKELY(SvMAGICAL(av) || AvREIFY(av))) {
+ case CXt_LOOP_LIST: /* for (1,2,3) */
+
+ assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
+ inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
+ ix = (cx->blk_loop.state_u.stack.ix += inc);
+ if (UNLIKELY(inc > 0
+ ? ix > cx->blk_oldsp
+ : ix <= cx->blk_loop.state_u.stack.basesp)
+ )
+ goto retno;
+
+ sv = PL_stack_base[ix];
+ av = NULL;
+ goto loop_ary_common;
+
+ case CXt_LOOP_ARY: /* for (@ary) */
+
+ av = cx->blk_loop.state_u.ary.ary;
+ inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
+ ix = (cx->blk_loop.state_u.ary.ix += inc);
+ if (UNLIKELY(inc > 0
+ ? ix > AvFILL(av)
+ : ix < 0)
+ )
+ goto retno;
+
+ if (UNLIKELY(SvRMAGICAL(av))) {
SV * const * const svp = av_fetch(av, ix, FALSE);
sv = svp ? *svp : NULL;
}
sv = AvARRAY(av)[ix];
}
+ loop_ary_common:
+
if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
SvSetMagicSV(*itersvp, sv);
break;
SvREFCNT_inc_simple_void_NN(sv);
}
}
- else if (!av_is_stack) {
+ else if (av) {
sv = newSVavdefelem(av, ix, 0);
}
else
*itersvp = sv;
SvREFCNT_dec(oldsv);
break;
- }
default:
DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
}
- RETPUSHYES;
+
+ retsv = &PL_sv_yes;
+ if (0) {
+ retno:
+ retsv = &PL_sv_no;
+ }
+ /* pp_enteriter should have pre-extended the stack */
+ assert(PL_stack_sp < PL_stack_max);
+ *++PL_stack_sp =retsv;
+
+ return PL_op->op_next;
}
/*
STRLEN slen;
bool doutf8 = FALSE; /* whether replacement is in utf8 */
#ifdef PERL_ANY_COW
- bool is_cow;
+ bool was_cow;
#endif
SV *nsv = NULL;
/* known replacement string? */
SvGETMAGIC(TARG); /* must come before cow check */
#ifdef PERL_ANY_COW
- /* Awooga. Awooga. "bool" types that are actually char are dangerous,
- because they make integers such as 256 "false". */
- is_cow = SvIsCOW(TARG) ? TRUE : FALSE;
-#else
- if (SvIsCOW(TARG))
- sv_force_normal_flags(TARG,0);
+ /* note that a string might get converted to COW during matching */
+ was_cow = cBOOL(SvIsCOW(TARG));
+#endif
+ if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
+#ifndef PERL_ANY_COW
+ if (SvIsCOW(TARG))
+ sv_force_normal_flags(TARG,0);
#endif
- if (!(rpm->op_pmflags & PMf_NONDESTRUCT)
- && (SvREADONLY(TARG)
- || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
- || SvTYPE(TARG) > SVt_PVLV)
- && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
- Perl_croak_no_modify();
+ if ((SvREADONLY(TARG)
+ || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
+ || SvTYPE(TARG) > SVt_PVLV)
+ && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
+ Perl_croak_no_modify();
+ }
PUTBACK;
orig = SvPV_nomg(TARG, len);
/* note we don't (yet) force the var into being a string; if we fail
- * to match, we leave as-is; on successful match howeverm, we *will*
+ * to match, we leave as-is; on successful match however, we *will*
* coerce into a string, then repeat the match */
if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
force_on_match = 1;
position, once with zero-length,
second time with non-zero. */
- if (!RX_PRELEN(rx) && PL_curpm
- && !ReANY(rx)->mother_re) {
- pm = PL_curpm;
- rx = PM_GETRE(pm);
+ /* handle the empty pattern */
+ if (!RX_PRELEN(rx) && PL_curpm && !ReANY(rx)->mother_re) {
+ if (PL_curpm == PL_reg_curpm) {
+ if (PL_curpm_under) {
+ if (PL_curpm_under == PL_reg_curpm) {
+ Perl_croak(aTHX_ "Infinite recursion via empty pattern");
+ } else {
+ pm = PL_curpm_under;
+ }
+ }
+ } else {
+ pm = PL_curpm;
+ }
+ rx = PM_GETRE(pm);
}
#ifdef PERL_SAWAMPERSAND
if (DO_UTF8(TARG) && !doutf8) {
nsv = sv_newmortal();
SvSetSV(nsv, dstr);
- if (IN_ENCODING)
- sv_recode_to_utf8(nsv, _get_encoding());
- else
- sv_utf8_upgrade(nsv);
+ sv_utf8_upgrade(nsv);
c = SvPV_const(nsv, clen);
doutf8 = TRUE;
}
/* can do inplace substitution? */
if (c
#ifdef PERL_ANY_COW
- && !is_cow
+ && !was_cow
#endif
&& (I32)clen <= RX_MINLENRET(rx)
&& ( once
{
#ifdef PERL_ANY_COW
+ /* string might have got converted to COW since we set was_cow */
if (SvIsCOW(TARG)) {
if (!force_on_match)
goto have_a_cow;
* searching for places in this sub that uses a particular var:
* iters maxiters r_flags oldsave rxtainted orig dstr targ
* s m strend rx once */
- PUSHSUBST(cx);
+ CX_PUSHSUBST(cx);
RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
}
first = TRUE;
first = FALSE;
}
else {
- if (IN_ENCODING) {
- if (!nsv) nsv = sv_newmortal();
- sv_copypv(nsv, repl);
- if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
- sv_catsv(dstr, nsv);
- }
- else sv_catsv(dstr, repl);
+ sv_catsv(dstr, repl);
if (UNLIKELY(SvTAINTED(repl)))
rxtainted |= SUBST_TAINT_REPL;
}
/* All done yet? */
if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
I32 items;
- const I32 gimme = GIMME_V;
+ const U8 gimme = GIMME_V;
LEAVE_with_name("grep"); /* exit outer scope */
(void)POPMARK; /* pop src */
}
}
-PP(pp_leavesub)
+/* leave_adjust_stacks():
+ *
+ * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
+ * positioning them at to_sp+1 onwards, and do the equivalent of a
+ * FREEMPS and TAINT_NOT.
+ *
+ * Not intended to be called in void context.
+ *
+ * When leaving a sub, eval, do{} or other scope, the things that need
+ * doing to process the return args are:
+ * * in scalar context, only return the last arg (or PL_sv_undef if none);
+ * * for the types of return that return copies of their args (such
+ * as rvalue sub return), make a mortal copy of every return arg,
+ * except where we can optimise the copy away without it being
+ * semantically visible;
+ * * make sure that the arg isn't prematurely freed; in the case of an
+ * arg not copied, this may involve mortalising it. For example, in
+ * C<sub f { my $x = ...; $x }>, $x would be freed when we do
+ * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
+ *
+ * What condition to use when deciding whether to pass the arg through
+ * or make a copy, is determined by the 'pass' arg; its valid values are:
+ * 0: rvalue sub/eval exit
+ * 1: other rvalue scope exit
+ * 2: :lvalue sub exit in rvalue context
+ * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
+ *
+ * There is a big issue with doing a FREETMPS. We would like to free any
+ * temps created by the last statement which the sub executed, rather than
+ * leaving them for the caller. In a situation where a sub call isn't
+ * soon followed by a nextstate (e.g. nested recursive calls, a la
+ * fibonacci()), temps can accumulate, causing memory and performance
+ * issues.
+ *
+ * On the other hand, we don't want to free any TEMPs which are keeping
+ * alive any return args that we skipped copying; nor do we wish to undo
+ * any mortalising done here.
+ *
+ * The solution is to split the temps stack frame into two, with a cut
+ * point delineating the two halves. We arrange that by the end of this
+ * function, all the temps stack frame entries we wish to keep are in the
+ * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
+ * the range tmps_base .. PL_tmps_ix. During the course of this
+ * function, tmps_base starts off as PL_tmps_floor+1, then increases
+ * whenever we find or create a temp that we know should be kept. In
+ * general the stuff above tmps_base is undecided until we reach the end,
+ * and we may need a sort stage for that.
+ *
+ * To determine whether a TEMP is keeping a return arg alive, every
+ * arg that is kept rather than copied and which has the SvTEMP flag
+ * set, has the flag temporarily unset, to mark it. At the end we scan
+ * the temps stack frame above the cut for entries without SvTEMP and
+ * keep them, while turning SvTEMP on again. Note that if we die before
+ * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
+ * those SVs may be slightly less efficient.
+ *
+ * In practice various optimisations for some common cases mean we can
+ * avoid most of the scanning and swapping about with the temps stack.
+ */
+
+void
+Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
{
+ dVAR;
dSP;
- SV **mark;
- SV **newsp;
- PMOP *newpm;
- I32 gimme;
+ SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
+ SSize_t nargs;
+
+ PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
+
+ TAINT_NOT;
+
+ if (gimme == G_ARRAY) {
+ nargs = SP - from_sp;
+ from_sp++;
+ }
+ else {
+ assert(gimme == G_SCALAR);
+ if (UNLIKELY(from_sp >= SP)) {
+ /* no return args */
+ assert(from_sp == SP);
+ EXTEND(SP, 1);
+ *++SP = &PL_sv_undef;
+ to_sp = SP;
+ nargs = 0;
+ }
+ else {
+ from_sp = SP;
+ nargs = 1;
+ }
+ }
+
+ /* common code for G_SCALAR and G_ARRAY */
+
+ tmps_base = PL_tmps_floor + 1;
+
+ assert(nargs >= 0);
+ if (nargs) {
+ /* pointer version of tmps_base. Not safe across temp stack
+ * reallocs. */
+ SV **tmps_basep;
+
+ EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
+ tmps_basep = PL_tmps_stack + tmps_base;
+
+ /* process each return arg */
+
+ do {
+ SV *sv = *from_sp++;
+
+ assert(PL_tmps_ix + nargs < PL_tmps_max);
+#ifdef DEBUGGING
+ /* PADTMPs with container set magic shouldn't appear in the
+ * wild. This assert is more important for pp_leavesublv(),
+ * but by testing for it here, we're more likely to catch
+ * bad cases (what with :lvalue subs not being widely
+ * deployed). The two issues are that for something like
+ * sub :lvalue { $tied{foo} }
+ * or
+ * sub :lvalue { substr($foo,1,2) }
+ * pp_leavesublv() will croak if the sub returns a PADTMP,
+ * and currently functions like pp_substr() return a mortal
+ * rather than using their PADTMP when returning a PVLV.
+ * This is because the PVLV will hold a ref to $foo,
+ * so $foo would get delayed in being freed while
+ * the PADTMP SV remained in the PAD.
+ * So if this assert fails it means either:
+ * 1) there is pp code similar to pp_substr that is
+ * returning a PADTMP instead of a mortal, and probably
+ * needs fixing, or
+ * 2) pp_leavesublv is making unwarranted assumptions
+ * about always croaking on a PADTMP
+ */
+ if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
+ MAGIC *mg;
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
+ }
+ }
+#endif
+
+ if (
+ pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
+ : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
+ : pass == 2 ? (!SvPADTMP(sv))
+ : 1)
+ {
+ /* pass through: skip copy for logic or optimisation
+ * reasons; instead mortalise it, except that ... */
+ *++to_sp = sv;
+
+ if (SvTEMP(sv)) {
+ /* ... since this SV is an SvTEMP , we don't need to
+ * re-mortalise it; instead we just need to ensure
+ * that its existing entry in the temps stack frame
+ * ends up below the cut and so avoids being freed
+ * this time round. We mark it as needing to be kept
+ * by temporarily unsetting SvTEMP; then at the end,
+ * we shuffle any !SvTEMP entries on the tmps stack
+ * back below the cut.
+ * However, there's a significant chance that there's
+ * a 1:1 correspondence between the first few (or all)
+ * elements in the return args stack frame and those
+ * in the temps stack frame; e,g.:
+ * sub f { ....; map {...} .... },
+ * or if we're exiting multiple scopes and one of the
+ * inner scopes has already made mortal copies of each
+ * return arg.
+ *
+ * If so, this arg sv will correspond to the next item
+ * on the tmps stack above the cut, and so can be kept
+ * merely by moving the cut boundary up one, rather
+ * than messing with SvTEMP. If all args are 1:1 then
+ * we can avoid the sorting stage below completely.
+ *
+ * If there are no items above the cut on the tmps
+ * stack, then the SvTEMP must comne from an item
+ * below the cut, so there's nothing to do.
+ */
+ if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
+ if (sv == *tmps_basep)
+ tmps_basep++;
+ else
+ SvTEMP_off(sv);
+ }
+ }
+ else if (!SvPADTMP(sv)) {
+ /* mortalise arg to avoid it being freed during save
+ * stack unwinding. Pad tmps don't need mortalising as
+ * they're never freed. This is the equivalent of
+ * sv_2mortal(SvREFCNT_inc(sv)), except that:
+ * * it assumes that the temps stack has already been
+ * extended;
+ * * it puts the new item at the cut rather than at
+ * ++PL_tmps_ix, moving the previous occupant there
+ * instead.
+ */
+ if (!SvIMMORTAL(sv)) {
+ SvREFCNT_inc_simple_void_NN(sv);
+ SvTEMP_on(sv);
+ /* Note that if there's nothing above the cut,
+ * this copies the garbage one slot above
+ * PL_tmps_ix onto itself. This is harmless (the
+ * stack's already been extended), but might in
+ * theory trigger warnings from tools like ASan
+ */
+ PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
+ *tmps_basep++ = sv;
+ }
+ }
+ }
+ else {
+ /* Make a mortal copy of the SV.
+ * The following code is the equivalent of sv_mortalcopy()
+ * except that:
+ * * it assumes the temps stack has already been extended;
+ * * it optimises the copying for some simple SV types;
+ * * it puts the new item at the cut rather than at
+ * ++PL_tmps_ix, moving the previous occupant there
+ * instead.
+ */
+ SV *newsv = newSV(0);
+
+ PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
+ /* put it on the tmps stack early so it gets freed if we die */
+ *tmps_basep++ = newsv;
+ *++to_sp = newsv;
+
+ if (SvTYPE(sv) <= SVt_IV) {
+ /* arg must be one of undef, IV/UV, or RV: skip
+ * sv_setsv_flags() and do the copy directly */
+ U32 dstflags;
+ U32 srcflags = SvFLAGS(sv);
+
+ assert(!SvGMAGICAL(sv));
+ if (srcflags & (SVf_IOK|SVf_ROK)) {
+ SET_SVANY_FOR_BODYLESS_IV(newsv);
+
+ if (srcflags & SVf_ROK) {
+ newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
+ }
+ else {
+ /* both src and dst are <= SVt_IV, so sv_any
+ * points to the head; so access the heads
+ * directly rather than going via sv_any.
+ */
+ assert( &(sv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(sv))->xiv_iv));
+ assert( &(newsv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(newsv))->xiv_iv));
+ newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
+ |(srcflags & SVf_IVisUV));
+ }
+ }
+ else {
+ assert(!(srcflags & SVf_OK));
+ dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
+ }
+ SvFLAGS(newsv) = dstflags;
+
+ }
+ else {
+ /* do the full sv_setsv() */
+ SSize_t old_base;
+
+ SvTEMP_on(newsv);
+ old_base = tmps_basep - PL_tmps_stack;
+ SvGETMAGIC(sv);
+ sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
+ /* the mg_get or sv_setsv might have created new temps
+ * or realloced the tmps stack; regrow and reload */
+ EXTEND_MORTAL(nargs);
+ tmps_basep = PL_tmps_stack + old_base;
+ TAINT_NOT; /* Each item is independent */
+ }
+
+ }
+ } while (--nargs);
+
+ /* If there are any temps left above the cut, we need to sort
+ * them into those to keep and those to free. The only ones to
+ * keep are those for which we've temporarily unset SvTEMP.
+ * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
+ * swapping pairs as necessary. Stop when we meet in the middle.
+ */
+ {
+ SV **top = PL_tmps_stack + PL_tmps_ix;
+ while (tmps_basep <= top) {
+ SV *sv = *top;
+ if (SvTEMP(sv))
+ top--;
+ else {
+ SvTEMP_on(sv);
+ *top = *tmps_basep;
+ *tmps_basep = sv;
+ tmps_basep++;
+ }
+ }
+ }
+
+ tmps_base = tmps_basep - PL_tmps_stack;
+ }
+
+ PL_stack_sp = to_sp;
+
+ /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
+ while (PL_tmps_ix >= tmps_base) {
+ SV* const sv = PL_tmps_stack[PL_tmps_ix--];
+#ifdef PERL_POISON
+ PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
+#endif
+ if (LIKELY(sv)) {
+ SvTEMP_off(sv);
+ SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
+ }
+ }
+}
+
+
+/* also tail-called by pp_return */
+
+PP(pp_leavesub)
+{
+ U8 gimme;
PERL_CONTEXT *cx;
- SV *sv;
+ SV **oldsp;
+ OP *retop;
+
+ cx = CX_CUR();
+ assert(CxTYPE(cx) == CXt_SUB);
- if (CxMULTICALL(&cxstack[cxstack_ix])) {
+ if (CxMULTICALL(cx)) {
/* entry zero of a stack is always PL_sv_undef, which
* simplifies converting a '()' return into undef in scalar context */
assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
return 0;
}
- POPBLOCK(cx,newpm);
- cxstack_ix++; /* temporarily protect top context */
+ gimme = cx->blk_gimme;
+ oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
- TAINT_NOT;
- if (gimme == G_SCALAR) {
- MARK = newsp + 1;
- if (LIKELY(MARK <= SP)) {
- if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
- if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
- && !SvMAGICAL(TOPs)) {
- *MARK = SvREFCNT_inc(TOPs);
- FREETMPS;
- sv_2mortal(*MARK);
- }
- else {
- sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
- FREETMPS;
- *MARK = sv_mortalcopy(sv);
- SvREFCNT_dec_NN(sv);
- }
- }
- else if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
- && !SvMAGICAL(TOPs)) {
- *MARK = TOPs;
- }
- else
- *MARK = sv_mortalcopy(TOPs);
- }
- else {
- MEXTEND(MARK, 0);
- *MARK = &PL_sv_undef;
- }
- SP = MARK;
- }
- else if (gimme == G_ARRAY) {
- for (MARK = newsp + 1; MARK <= SP; MARK++) {
- if (!SvTEMP(*MARK) || SvREFCNT(*MARK) != 1
- || SvMAGICAL(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- TAINT_NOT; /* Each item is independent */
- }
- }
- }
- PUTBACK;
+ if (gimme == G_VOID)
+ PL_stack_sp = oldsp;
+ else
+ leave_adjust_stacks(oldsp, oldsp, gimme, 0);
+
+ CX_LEAVE_SCOPE(cx);
+ cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
+ cx_popblock(cx);
+ retop = cx->blk_sub.retop;
+ CX_POP(cx);
- LEAVE;
- POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
- cxstack_ix--;
- PL_curpm = newpm; /* ... and pop $1 et al */
+ return retop;
+}
+
+
+/* clear (if possible) or abandon the current @_. If 'abandon' is true,
+ * forces an abandon */
+
+void
+Perl_clear_defarray(pTHX_ AV* av, bool abandon)
+{
+ const SSize_t fill = AvFILLp(av);
- LEAVESUB(sv);
- return cx->blk_sub.retop;
+ PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
+
+ if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
+ av_clear(av);
+ AvREIFY_only(av);
+ }
+ else {
+ AV *newav = newAV();
+ av_extend(newav, fill);
+ AvREIFY_only(newav);
+ PAD_SVl(0) = MUTABLE_SV(newav);
+ SvREFCNT_dec_NN(av);
+ }
}
+
PP(pp_entersub)
{
dSP; dPOPss;
GV *gv;
CV *cv;
PERL_CONTEXT *cx;
- I32 gimme;
- const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
+ I32 old_savestack_ix;
if (UNLIKELY(!sv))
- DIE(aTHX_ "Not a CODE reference");
- /* This is overwhelmingly the most common case: */
- if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
+ goto do_die;
+
+ /* Locate the CV to call:
+ * - most common case: RV->CV: f(), $ref->():
+ * note that if a sub is compiled before its caller is compiled,
+ * the stash entry will be a ref to a CV, rather than being a GV.
+ * - second most common case: CV: $ref->method()
+ */
+
+ /* a non-magic-RV -> CV ? */
+ if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
+ cv = MUTABLE_CV(SvRV(sv));
+ if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
+ goto do_ref;
+ }
+ else
+ cv = MUTABLE_CV(sv);
+
+ /* a CV ? */
+ if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
+ /* handle all the weird cases */
switch (SvTYPE(sv)) {
+ case SVt_PVLV:
+ if (!isGV_with_GP(sv))
+ goto do_default;
+ /* FALLTHROUGH */
case SVt_PVGV:
- we_have_a_glob:
- if (!(cv = GvCVu((const GV *)sv))) {
+ cv = GvCVu((const GV *)sv);
+ if (UNLIKELY(!cv)) {
HV *stash;
cv = sv_2cv(sv, &stash, &gv, 0);
- }
- if (!cv) {
- ENTER;
- goto try_autoload;
+ if (!cv) {
+ old_savestack_ix = PL_savestack_ix;
+ goto try_autoload;
+ }
}
break;
- case SVt_PVLV:
- if(isGV_with_GP(sv)) goto we_have_a_glob;
- /* FALLTHROUGH */
+
default:
- if (sv == &PL_sv_yes) { /* unfound import, ignore */
- if (hasargs)
- SP = PL_stack_base + POPMARK;
- else
- (void)POPMARK;
- if (GIMME_V == G_SCALAR)
- PUSHs(&PL_sv_undef);
- RETURN;
- }
+ do_default:
SvGETMAGIC(sv);
if (SvROK(sv)) {
- if (SvAMAGIC(sv)) {
+ do_ref:
+ if (UNLIKELY(SvAMAGIC(sv))) {
sv = amagic_deref_call(sv, to_cv_amg);
/* Don't SPAGAIN here. */
}
else {
const char *sym;
STRLEN len;
- if (!SvOK(sv))
+ if (UNLIKELY(!SvOK(sv)))
DIE(aTHX_ PL_no_usym, "a subroutine");
+
+ if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
+ if (PL_op->op_flags & OPf_STACKED) /* hasargs */
+ SP = PL_stack_base + POPMARK;
+ else
+ (void)POPMARK;
+ if (GIMME_V == G_SCALAR)
+ PUSHs(&PL_sv_undef);
+ RETURN;
+ }
+
sym = SvPV_nomg_const(sv, len);
if (PL_op->op_private & HINT_STRICT_REFS)
DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
break;
}
cv = MUTABLE_CV(SvRV(sv));
- if (SvTYPE(cv) == SVt_PVCV)
+ if (LIKELY(SvTYPE(cv) == SVt_PVCV))
break;
/* FALLTHROUGH */
case SVt_PVHV:
case SVt_PVAV:
+ do_die:
DIE(aTHX_ "Not a CODE reference");
- /* This is the second most common case: */
- case SVt_PVCV:
- cv = MUTABLE_CV(sv);
- break;
}
}
- ENTER;
-
- retry:
- if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
- DIE(aTHX_ "Closure prototype called");
- if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
+ /* At this point we want to save PL_savestack_ix, either by doing a
+ * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
+ * CV we will be using (so we don't know whether its XS, so we can't
+ * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
+ * the save stack. So remember where we are currently on the save
+ * stack, and later update the CX or scopestack entry accordingly. */
+ old_savestack_ix = PL_savestack_ix;
+
+ /* these two fields are in a union. If they ever become separate,
+ * we have to test for both of them being null below */
+ assert(cv);
+ assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
+ while (UNLIKELY(!CvROOT(cv))) {
GV* autogv;
SV* sub_name;
/* anonymous or undef'd function leaves us no recourse */
if (CvLEXICAL(cv) && CvHASGV(cv))
- DIE(aTHX_ "Undefined subroutine &%"SVf" called",
+ DIE(aTHX_ "Undefined subroutine &%" SVf " called",
SVfARG(cv_name(cv, NULL, 0)));
if (CvANON(cv) || !CvHASGV(cv)) {
DIE(aTHX_ "Undefined subroutine called");
/* should call AUTOLOAD now? */
else {
try_autoload:
- if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
- GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
- {
- cv = GvCV(autogv);
- }
- else {
- sorry:
- sub_name = sv_newmortal();
- gv_efullname3(sub_name, gv, NULL);
- DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
- }
+ autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
+ (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
+ |(PL_op->op_flags & OPf_REF
+ ? GV_AUTOLOAD_ISMETHOD
+ : 0));
+ cv = autogv ? GvCV(autogv) : NULL;
}
- if (!cv)
- goto sorry;
- goto retry;
+ if (!cv) {
+ sub_name = sv_newmortal();
+ gv_efullname3(sub_name, gv, NULL);
+ DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
+ }
}
+ /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
+ if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
+ DIE(aTHX_ "Closure prototype called");
+
if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
&& !CvNODEBUG(cv)))
{
DIE(aTHX_ "No DB::sub routine defined");
}
- gimme = GIMME_V;
-
if (!(CvISXSUB(cv))) {
/* This path taken at least 75% of the time */
dMARK;
- PADLIST * const padlist = CvPADLIST(cv);
+ PADLIST *padlist;
I32 depth;
+ bool hasargs;
+ U8 gimme;
- PUSHBLOCK(cx, CXt_SUB, MARK);
- PUSHSUB(cx);
- cx->blk_sub.retop = PL_op->op_next;
- if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
- PERL_STACK_OVERFLOW_CHECK();
+ /* keep PADTMP args alive throughout the call (we need to do this
+ * because @_ isn't refcounted). Note that we create the mortals
+ * in the caller's tmps frame, so they won't be freed until after
+ * we return from the sub.
+ */
+ {
+ SV **svp = MARK;
+ while (svp < SP) {
+ SV *sv = *++svp;
+ if (!sv)
+ continue;
+ if (SvPADTMP(sv))
+ *svp = sv = sv_mortalcopy(sv);
+ SvTEMP_off(sv);
+ }
+ }
+
+ gimme = GIMME_V;
+ cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
+ hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
+ cx_pushsub(cx, cv, PL_op->op_next, hasargs);
+
+ padlist = CvPADLIST(cv);
+ if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
pad_push(padlist, depth);
- }
PAD_SET_CUR_NOSAVE(padlist, depth);
if (LIKELY(hasargs)) {
AV *const av = MUTABLE_AV(PAD_SVl(0));
/* it's the responsibility of whoever leaves a sub to ensure
* that a clean, empty AV is left in pad[0]. This is normally
- * done by POPSUB() */
+ * done by cx_popsub() */
assert(!AvREAL(av) && AvFILLp(av) == -1);
items = SP - MARK;
if (UNLIKELY(items - 1 > AvMAX(av))) {
SV **ary = AvALLOC(av);
- AvMAX(av) = items - 1;
Renew(ary, items, SV*);
+ AvMAX(av) = items - 1;
AvALLOC(av) = ary;
AvARRAY(av) = ary;
}
Copy(MARK+1,AvARRAY(av),items,SV*);
AvFILLp(av) = items - 1;
-
- MARK = AvARRAY(av);
- while (items--) {
- if (*MARK)
- {
- if (SvPADTMP(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- }
- SvTEMP_off(*MARK);
- }
- MARK++;
- }
}
- SAVETMPS;
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
SVfARG(cv_name(cv, NULL, 0)));
/* warning must come *after* we fully set up the context
* stuff so that __WARN__ handlers can safely dounwind()
}
else {
SSize_t markix = TOPMARK;
+ bool is_scalar;
+
+ ENTER;
+ /* pretend we did the ENTER earlier */
+ PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
SAVETMPS;
PUTBACK;
if (UNLIKELY(((PL_op->op_private
- & PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
+ & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
SVfARG(cv_name(cv, NULL, 0)));
- if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
+ if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
/* Need to copy @_ to stack. Alternative may be to
* switch stack to @_, and copy return values
* back. This would allow popping @_ in XSUB, e.g.. XXXX */
}
/* Do we need to open block here? XXXX */
+ /* calculate gimme here as PL_op might get changed and then not
+ * restored until the LEAVE further down */
+ is_scalar = (GIMME_V == G_SCALAR);
+
/* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
assert(CvXSUB(cv));
CvXSUB(cv)(aTHX_ cv);
/* Enforce some sanity in scalar context. */
- if (gimme == G_SCALAR) {
+ if (is_scalar) {
SV **svp = PL_stack_base + markix + 1;
if (svp != PL_stack_sp) {
*svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
if (CvANON(cv))
Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
else {
- Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
+ Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
SVfARG(cv_name(cv,NULL,0)));
}
}
+
+
+/* like croak, but report in context of caller */
+
+void
+Perl_croak_caller(const char *pat, ...)
+{
+ dTHX;
+ va_list args;
+ const PERL_CONTEXT *cx = caller_cx(0, NULL);
+
+ /* make error appear at call site */
+ assert(cx);
+ PL_curcop = cx->blk_oldcop;
+
+ va_start(args, pat);
+ vcroak(pat, &args);
+ NOT_REACHED; /* NOTREACHED */
+ va_end(args);
+}
+
+
PP(pp_aelem)
{
dSP;
if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
Perl_warner(aTHX_ packWARN(WARN_MISC),
- "Use of reference \"%"SVf"\" as array index",
+ "Use of reference \"%" SVf "\" as array index",
SVfARG(elemsv));
if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
RETPUSHUNDEF;
HV* stash;
SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
- ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
+ ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
"package or object reference", SVfARG(meth)),
(SV *)NULL)
: *(PL_stack_base + TOPMARK + 1);
if (UNLIKELY(!sv))
undefined:
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
SVfARG(meth));
if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
else if (!SvOK(sv)) goto undefined;
else if (isGV_with_GP(sv)) {
if (!GvIO(sv))
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
"without a package or object reference",
SVfARG(meth));
ob = sv;
/* this isn't the name of a filehandle either */
if (!packlen)
{
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
"without a package or object reference",
SVfARG(meth));
}
&& (ob = MUTABLE_SV(GvIO((const GV *)ob)))
&& SvOBJECT(ob))))
{
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
- SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
+ SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
? newSVpvs_flags("DOES", SVs_TEMP)
: meth));
}