PP(pp_nextstate)
{
PL_curcop = (COP*)PL_op;
- PL_sawalias = 0;
TAINT_NOT; /* Each statement is presumed innocent */
PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
FREETMPS;
PUSHs(save_scalar(cGVOP_gv));
else
PUSHs(GvSVn(cGVOP_gv));
- if (GvREFCNT(cGVOP_gv) > 1 || GvALIASED_SV(cGVOP_gv))
- PL_sawalias = TRUE;
RETURN;
}
return NORMAL;
}
-/* This is sometimes called directly by pp_coreargs and pp_grepstart. */
+/* This is sometimes called directly by pp_coreargs, pp_grepstart and
+ amagic_call. */
PP(pp_pushmark)
{
PUSHMARK(PL_stack_sp);
{
dSP;
XPUSHs(MUTABLE_SV(cGVOP_gv));
- if (isGV(cGVOP_gv)
- && (GvREFCNT(cGVOP_gv) > 1 || GvALIASED_SV(cGVOP_gv)))
- PL_sawalias = TRUE;
RETURN;
}
SV * const temp = left;
left = right; right = temp;
}
- if (TAINTING_get && UNLIKELY(TAINT_get) && !SvTAINTED(right))
+ assert(TAINTING_get || !TAINT_get);
+ if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
TAINT_NOT;
if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
/* *foo =\&bar */
assert(source);
assert(CvFLAGS(source) & CVf_CONST);
- SvREFCNT_inc_void(source);
+ SvREFCNT_inc_simple_void_NN(source);
SvREFCNT_dec_NN(upgraded);
SvRV_set(right, MUTABLE_SV(source));
}
}
if (!rcopied) {
- if (left == right)
- /* $r.$r: do magic twice: tied might return different 2nd time */
- SvGETMAGIC(right);
rpv = SvPV_nomg_const(right, rlen);
rbyte = !DO_UTF8(right);
}
if (lbyte != rbyte) {
- /* sv_utf8_upgrade_nomg() may reallocate the stack */
- PUTBACK;
if (lbyte)
sv_utf8_upgrade_nomg(TARG);
else {
sv_utf8_upgrade_nomg(right);
rpv = SvPV_nomg_const(right, rlen);
}
- SPAGAIN;
}
sv_catpvn_nomg(TARG, rpv, rlen);
}
-/* also used for: pp_i_predec() pp_i_preinc() pp_predec() */
+/* also used for: pp_i_preinc() */
PP(pp_preinc)
{
- dSP;
- const bool inc =
- PL_op->op_type == OP_PREINC || PL_op->op_type == OP_I_PREINC;
- if (UNLIKELY(SvTYPE(TOPs) >= SVt_PVAV || (isGV_with_GP(TOPs) && !SvFAKE(TOPs))))
- Perl_croak_no_modify();
- if (LIKELY(!SvREADONLY(TOPs) && !SvGMAGICAL(TOPs) && SvIOK_notUV(TOPs) && !SvNOK(TOPs) && !SvPOK(TOPs))
- && SvIVX(TOPs) != (inc ? IV_MAX : IV_MIN))
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MAX)
{
- SvIV_set(TOPs, SvIVX(TOPs) + (inc ? 1 : -1));
- SvFLAGS(TOPs) &= ~(SVp_NOK|SVp_POK);
+ SvIV_set(sv, SvIVX(sv) + 1);
}
- else /* Do all the PERL_PRESERVE_IVUV conditionals in sv_inc */
- if (inc) sv_inc(TOPs);
- else sv_dec(TOPs);
- SvSETMAGIC(TOPs);
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
+ sv_inc(sv);
+ SvSETMAGIC(sv);
+ return NORMAL;
+}
+
+
+/* also used for: pp_i_predec() */
+
+PP(pp_predec)
+{
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MIN)
+ {
+ SvIV_set(sv, SvIVX(sv) - 1);
+ }
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
+ sv_dec(sv);
+ SvSETMAGIC(sv);
return NORMAL;
}
RETPUSHNO;
}
+
+
PP(pp_add)
{
dSP; dATARGET; bool useleft; SV *svl, *svr;
+
tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
svr = TOPs;
svl = TOPm1s;
- useleft = USE_LEFT(svl);
#ifdef PERL_PRESERVE_IVUV
+
+ /* special-case some simple common cases */
+ if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
+ IV il, ir;
+ U32 flags = (svl->sv_flags & svr->sv_flags);
+ if (flags & SVf_IOK) {
+ /* both args are simple IVs */
+ UV topl, topr;
+ il = SvIVX(svl);
+ ir = SvIVX(svr);
+ do_iv:
+ topl = ((UV)il) >> (UVSIZE * 8 - 2);
+ topr = ((UV)ir) >> (UVSIZE * 8 - 2);
+
+ /* if both are in a range that can't under/overflow, do a
+ * simple integer add: if the top of both numbers
+ * are 00 or 11, then it's safe */
+ if (!( ((topl+1) | (topr+1)) & 2)) {
+ SP--;
+ TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ goto generic;
+ }
+ else if (flags & SVf_NOK) {
+ /* both args are NVs */
+ NV nl = SvNVX(svl);
+ NV nr = SvNVX(svr);
+
+ il = (IV)nl;
+ ir = (IV)nr;
+ if (nl == (NV)il && nr == (NV)ir)
+ /* nothing was lost by converting to IVs */
+ goto do_iv;
+ SP--;
+ TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ }
+
+ generic:
+
+ useleft = USE_LEFT(svl);
/* We must see if we can perform the addition with integers if possible,
as the integer code detects overflow while the NV code doesn't.
If either argument hasn't had a numeric conversion yet attempt to get
if (aiv >= 0) {
auv = aiv;
auvok = 1; /* Now acting as a sign flag. */
- } else { /* 2s complement assumption for IV_MIN */
- auv = (UV)-aiv;
+ } else {
+ auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
}
}
a_valid = 1;
buv = biv;
buvok = 1;
} else
- buv = (UV)-biv;
+ buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
}
/* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
else "IV" now, independent of how it came in.
else {
/* Negate result */
if (result <= (UV)IV_MIN)
- SETi( -(IV)result );
+ SETi(result == (UV)IV_MIN
+ ? IV_MIN : -(IV)result);
else {
/* result valid, but out of range for IV. */
SETn( -(NV)result );
} /* Overflow, drop through to NVs. */
}
}
+
+#else
+ useleft = USE_LEFT(svl);
#endif
+
{
NV value = SvNV_nomg(svr);
(void)POPs;
}
}
+
+/* Do a mark and sweep with the SVf_BREAK flag to detect elements which
+ * are common to both the LHS and RHS of an aassign, and replace them
+ * with copies. All these copies are made before the actual list assign is
+ * done.
+ *
+ * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
+ * element ($b) to the first LH element ($a), modifies $a; when the
+ * second assignment is done, the second RH element now has the wrong
+ * value. So we initially replace the RHS with ($b, mortalcopy($a)).
+ * Note that we don't need to make a mortal copy of $b.
+ *
+ * The algorithm below works by, for every RHS element, mark the
+ * corresponding LHS target element with SVf_BREAK. Then if the RHS
+ * element is found with SVf_BREAK set, it means it would have been
+ * modified, so make a copy.
+ * Note that by scanning both LHS and RHS in lockstep, we avoid
+ * unnecessary copies (like $b above) compared with a naive
+ * "mark all LHS; copy all marked RHS; unmark all LHS".
+ *
+ * If the LHS element is a 'my' declaration' and has a refcount of 1, then
+ * it can't be common and can be skipped.
+ *
+ * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
+ * that we thought we didn't need to call S_aassign_copy_common(), but we
+ * have anyway for sanity checking. If we find we need to copy, then panic.
+ */
+
+PERL_STATIC_INLINE void
+S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
+ SV **firstrelem, SV **lastrelem
+#ifdef DEBUGGING
+ , bool fake
+#endif
+)
+{
+ dVAR;
+ SV **relem;
+ SV **lelem;
+ SSize_t lcount = lastlelem - firstlelem + 1;
+ bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
+ bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
+
+ assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
+ assert(firstlelem < lastlelem); /* at least 2 LH elements */
+ assert(firstrelem < lastrelem); /* at least 2 RH elements */
+
+
+ lelem = firstlelem;
+ /* we never have to copy the first RH element; it can't be corrupted
+ * by assigning something to the corresponding first LH element.
+ * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
+ */
+ relem = firstrelem + 1;
+
+ for (; relem <= lastrelem; relem++) {
+ SV *svr;
+
+ /* mark next LH element */
+
+ if (--lcount >= 0) {
+ SV *svl = *lelem++;
+
+ if (UNLIKELY(!svl)) {/* skip AV alias marker */
+ assert (lelem <= lastlelem);
+ svl = *lelem++;
+ lcount--;
+ }
+
+ assert(svl);
+ if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
+ if (!marked)
+ return;
+ /* this LH element will consume all further args;
+ * no need to mark any further LH elements (if any).
+ * But we still need to scan any remaining RHS elements;
+ * set lcount negative to distinguish from lcount == 0,
+ * so the loop condition continues being true
+ */
+ lcount = -1;
+ lelem--; /* no need to unmark this element */
+ }
+ else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
+ assert(!SvIMMORTAL(svl));
+ SvFLAGS(svl) |= SVf_BREAK;
+ marked = TRUE;
+ }
+ else if (!marked) {
+ /* don't check RH element if no SVf_BREAK flags set yet */
+ if (!lcount)
+ break;
+ continue;
+ }
+ }
+
+ /* see if corresponding RH element needs copying */
+
+ assert(marked);
+ svr = *relem;
+ assert(svr);
+
+ if (UNLIKELY(SvFLAGS(svr) & SVf_BREAK)) {
+
+#ifdef DEBUGGING
+ if (fake) {
+ /* op_dump(PL_op); */
+ Perl_croak(aTHX_
+ "panic: aassign skipped needed copy of common RH elem %"
+ UVuf, (UV)(relem - firstrelem));
+ }
+#endif
+
+ TAINT_NOT; /* Each item is independent */
+
+ /* Dear TODO test in t/op/sort.t, I love you.
+ (It's relying on a panic, not a "semi-panic" from newSVsv()
+ and then an assertion failure below.) */
+ if (UNLIKELY(SvIS_FREED(svr))) {
+ Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
+ (void*)svr);
+ }
+ /* avoid break flag while copying; otherwise COW etc
+ * disabled... */
+ SvFLAGS(svr) &= ~SVf_BREAK;
+ /* Not newSVsv(), as it does not allow copy-on-write,
+ resulting in wasteful copies.
+ Also, we use SV_NOSTEAL in case the SV is used more than
+ once, e.g. (...) = (f())[0,0]
+ Where the same SV appears twice on the RHS without a ref
+ count bump. (Although I suspect that the SV won't be
+ stealable here anyway - DAPM).
+ */
+ *relem = sv_mortalcopy_flags(svr,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ /* ... but restore afterwards in case it's needed again,
+ * e.g. ($a,$b,$c) = (1,$a,$a)
+ */
+ SvFLAGS(svr) |= SVf_BREAK;
+ }
+
+ if (!lcount)
+ break;
+ }
+
+ if (!marked)
+ return;
+
+ /*unmark LHS */
+
+ while (lelem > firstlelem) {
+ SV * const svl = *(--lelem);
+ if (svl)
+ SvFLAGS(svl) &= ~SVf_BREAK;
+ }
+}
+
+
+
PP(pp_aassign)
{
dVAR; dSP;
HV *hash;
SSize_t i;
int magic;
- U32 lval = 0;
+ U32 lval;
+ /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
+ * only need to save locally, not on the save stack */
+ U16 old_delaymagic = PL_delaymagic;
+#ifdef DEBUGGING
+ bool fake = 0;
+#endif
PL_delaymagic = DM_DELAY; /* catch simultaneous items */
- gimme = GIMME_V;
- if (gimme == G_ARRAY)
- lval = PL_op->op_flags & OPf_MOD || LVRET;
/* If there's a common identifier on both sides we have to take
* special care that assigning the identifier on the left doesn't
* clobber a value on the right that's used later in the list.
- * Don't bother if LHS is just an empty hash or array.
*/
- if ( (PL_op->op_private & OPpASSIGN_COMMON || PL_sawalias)
- && (
- firstlelem != lastlelem
- || ! ((sv = *firstlelem))
- || SvMAGICAL(sv)
- || ! (SvTYPE(sv) == SVt_PVAV || SvTYPE(sv) == SVt_PVHV)
- || (SvTYPE(sv) == SVt_PVAV && AvFILL((AV*)sv) != -1)
- || (SvTYPE(sv) == SVt_PVHV && HvUSEDKEYS((HV*)sv) != 0)
- )
+ if ( (PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1))
+ /* at least 2 LH and RH elements, or commonality isn't an issue */
+ && (firstlelem < lastlelem && firstrelem < lastrelem)
) {
- EXTEND_MORTAL(lastrelem - firstrelem + 1);
- for (relem = firstrelem; relem <= lastrelem; relem++) {
- if (LIKELY((sv = *relem))) {
- TAINT_NOT; /* Each item is independent */
-
- /* Dear TODO test in t/op/sort.t, I love you.
- (It's relying on a panic, not a "semi-panic" from newSVsv()
- and then an assertion failure below.) */
- if (UNLIKELY(SvIS_FREED(sv))) {
- Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
- (void*)sv);
- }
- /* Not newSVsv(), as it does not allow copy-on-write,
- resulting in wasteful copies. We need a second copy of
- a temp here, hence the SV_NOSTEAL. */
- *relem = sv_mortalcopy_flags(sv,SV_GMAGIC|SV_DO_COW_SVSETSV
- |SV_NOSTEAL);
- }
- }
+ if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
+ /* skip the scan if all scalars have a ref count of 1 */
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ sv = *lelem;
+ if (!sv || SvREFCNT(sv) == 1)
+ continue;
+ if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
+ goto do_scan;
+ break;
+ }
+ }
+ else {
+ do_scan:
+ S_aassign_copy_common(aTHX_
+ firstlelem, lastlelem, firstrelem, lastrelem
+#ifdef DEBUGGING
+ , fake
+#endif
+ );
+ }
}
+#ifdef DEBUGGING
+ else {
+ /* on debugging builds, do the scan even if we've concluded we
+ * don't need to, then panic if we find commonality. Note that the
+ * scanner assumes at least 2 elements */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ fake = 1;
+ goto do_scan;
+ }
+ }
+#endif
+
+ gimme = GIMME_V;
+ lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
relem = firstrelem;
lelem = firstlelem;
ASSUME(SvTYPE(sv) == SVt_PVAV);
}
switch (SvTYPE(sv)) {
- case SVt_PVAV:
+ case SVt_PVAV: {
+ bool already_copied = FALSE;
ary = MUTABLE_AV(sv);
magic = SvMAGICAL(ary) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
- av_clear(ary);
- av_extend(ary, lastrelem - relem);
+
+ /* We need to clear ary. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * @a = ($a[0]);
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the array, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ * Otherwise, make a copy of each RHS SV only as we're storing
+ * it into the array - that way we don't have to worry about
+ * it being leaked if we die, but don't incur the cost of
+ * mortalising everything.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || AvFILL(ary) != -1))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ /* see comment in S_aassign_copy_common about SV_NOSTEAL */
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
+ av_clear(ary);
+ if (relem <= lastrelem)
+ av_extend(ary, lastrelem - relem);
+
i = 0;
while (relem <= lastrelem) { /* gobble up all the rest */
SV **didstore;
- if (LIKELY(*relem))
- SvGETMAGIC(*relem); /* before newSV, in case it dies */
if (LIKELY(!alias)) {
- sv = newSV(0);
- sv_setsv_nomg(sv, *relem);
- *relem = sv;
+ if (already_copied)
+ sv = *relem;
+ else {
+ if (LIKELY(*relem))
+ /* before newSV, in case it dies */
+ SvGETMAGIC(*relem);
+ sv = newSV(0);
+ /* see comment in S_aassign_copy_common about
+ * SV_NOSTEAL */
+ sv_setsv_flags(sv, *relem,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ *relem = sv;
+ }
}
else {
+ if (!already_copied)
+ SvGETMAGIC(*relem);
if (!SvROK(*relem))
DIE(aTHX_ "Assigned value is not a reference");
if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
/* diag_listed_as: Assigned value is not %s reference */
DIE(aTHX_
"Assigned value is not a SCALAR reference");
- if (lval)
+ if (lval && !already_copied)
*relem = sv_mortalcopy(*relem);
/* XXX else check for weak refs? */
- sv = SvREFCNT_inc_simple_NN(SvRV(*relem));
+ sv = SvREFCNT_inc_NN(SvRV(*relem));
}
relem++;
+ if (already_copied)
+ SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
didstore = av_store(ary,i++,sv);
if (magic) {
if (!didstore)
SvSETMAGIC(MUTABLE_SV(ary));
LEAVE;
break;
+ }
+
case SVt_PVHV: { /* normal hash */
SV *tmpstr;
int odd;
int duplicates = 0;
SV** topelem = relem;
SV **firsthashrelem = relem;
+ bool already_copied = FALSE;
hash = MUTABLE_HV(sv);
magic = SvMAGICAL(hash) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
+
+ /* We need to clear hash. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * %h = (foo => $h{bar});
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the hash, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || HvUSEDKEYS(hash)))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
hv_clear(hash);
+
while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
HE *didstore;
assert(*relem);
to avoid having the next op modify our rhs. Copy
it also if it is gmagical, lest it make the
hv_store_ent call below croak, leaking the value. */
- sv = lval || SvGMAGICAL(*relem)
+ sv = (lval || SvGMAGICAL(*relem)) && !already_copied
? sv_mortalcopy(*relem)
: *relem;
relem++;
assert(*relem);
- SvGETMAGIC(*relem);
- tmpstr = newSV(0);
- sv_setsv_nomg(tmpstr,*relem++); /* value */
+ if (already_copied)
+ tmpstr = *relem++;
+ else {
+ SvGETMAGIC(*relem);
+ tmpstr = newSV(0);
+ sv_setsv_nomg(tmpstr,*relem++); /* value */
+ }
+
if (gimme == G_ARRAY) {
if (hv_exists_ent(hash, sv, 0))
/* key overwrites an existing entry */
*topelem++ = tmpstr;
}
}
+ if (already_copied)
+ SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
didstore = hv_store_ent(hash,sv,tmpstr,0);
if (magic) {
if (!didstore) sv_2mortal(tmpstr);
PERL_UNUSED_VAR(tmp_egid);
#endif
}
- PL_delaymagic = 0;
+ PL_delaymagic = old_delaymagic;
if (gimme == G_VOID)
SP = firstrelem - 1;
}
NOT_REACHED; /* NOTREACHED */
-nope:
+ nope:
if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
if (!mg)
mg = mg_find_mglob(TARG);
if (gimme == G_SCALAR) {
/* undef TARG, and push that undefined value */
if (type != OP_RCATLINE) {
- SV_CHECK_THINKFIRST_COW_DROP(TARG);
- SvOK_off(TARG);
+ sv_setsv(TARG,NULL);
}
PUSHTARG;
}
XPUSHs(sv);
if (type == OP_GLOB) {
const char *t1;
+ Stat_t statbuf;
if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
char * const tmps = SvEND(sv) - 1;
if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
#endif
break;
- if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &PL_statbuf) < 0) {
+ if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
(void)POPs; /* Unmatched wildcard? Chuck it... */
continue;
}
LvTYPE(lv) = 'y';
sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
- LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
LvTARGLEN(lv) = 1;
PUSHs(lv);
RETURN;
}
-/* handle one or more derefs and array/hash indexings, e.g.
- * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
+/* Handle one or more aggregate derefs and array/hash indexings, e.g.
+ * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
*
* op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
- * Each of these either contains an action, or an argument, such as
- * a UV to use as an array index, or a lexical var to retrieve.
- * In fact, several actions re stored per UV; we keep shifting new actions
- * of the one UV, and only reload when it becomes zero.
+ * Each of these either contains a set of actions, or an argument, such as
+ * an IV to use as an array index, or a lexical var to retrieve.
+ * Several actions re stored per UV; we keep shifting new actions off the
+ * one UV, and only reload when it becomes zero.
*/
PP(pp_multideref)
PERL_MAGIC_defelem, NULL, 0);
/* sv_magic() increments refcount */
SvREFCNT_dec_NN(key2);
- LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
LvTARGLEN(lv) = 1;
sv = lv;
}
char *strend;
const char *c;
STRLEN clen;
- I32 iters = 0;
- I32 maxiters;
+ SSize_t iters = 0;
+ SSize_t maxiters;
bool once;
U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
See "how taint works" above */
Move(s, d, i+1, char); /* include the NUL */
}
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
else {
}
if (once)
break;
- } while (CALLREGEXEC(rx, s, strend, orig, s == m,
+ } while (CALLREGEXEC(rx, s, strend, orig,
+ s == m, /* Yields minend of 0 or 1 */
TARG, NULL,
REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
SvPV_set(dstr, NULL);
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
(void)POPMARK; /* pop dst */
SP = PL_stack_base + POPMARK; /* pop original mark */
if (gimme == G_SCALAR) {
- if (PL_op->op_private & OPpGREP_LEX) {
- SV* const sv = sv_newmortal();
- sv_setiv(sv, items);
- PUSHs(sv);
- }
- else {
dTARGET;
XPUSHi(items);
- }
}
else if (gimme == G_ARRAY)
SP += items;
ENTER_with_name("grep_item"); /* enter inner scope */
SAVEVPTR(PL_curpm);
- src = PL_stack_base[*PL_markstack_ptr];
+ src = PL_stack_base[TOPMARK];
if (SvPADTMP(src)) {
- src = PL_stack_base[*PL_markstack_ptr] = sv_mortalcopy(src);
+ src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
PL_tmps_floor++;
}
SvTEMP_off(src);
- if (PL_op->op_private & OPpGREP_LEX)
- PAD_SVl(PL_op->op_targ) = src;
- else
- DEFSV_set(src);
+ DEFSV_set(src);
RETURNOP(cLOGOP->op_other);
}
PERL_CONTEXT *cx;
SV *sv;
- if (CxMULTICALL(&cxstack[cxstack_ix]))
+ if (CxMULTICALL(&cxstack[cxstack_ix])) {
+ /* entry zero of a stack is always PL_sv_undef, which
+ * simplifies converting a '()' return into undef in scalar context */
+ assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
return 0;
+ }
POPBLOCK(cx,newpm);
cxstack_ix++; /* temporarily protect top context */
}
/* should call AUTOLOAD now? */
else {
-try_autoload:
+ try_autoload:
if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
{
SAVETMPS;
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
/* warning must come *after* we fully set up the context
* stuff so that __WARN__ handlers can safely dounwind()
* if they want to
& PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
/* Need to copy @_ to stack. Alternative may be to
}
/*
- * Local variables:
- * c-indentation-style: bsd
- * c-basic-offset: 4
- * indent-tabs-mode: nil
- * End:
- *
* ex: set ts=8 sts=4 sw=4 et:
*/