PP(pp_nextstate)
{
PL_curcop = (COP*)PL_op;
- PL_sawalias = 0;
TAINT_NOT; /* Each statement is presumed innocent */
- PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
+ PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
FREETMPS;
PERL_ASYNC_CHECK();
return NORMAL;
PUSHs(save_scalar(cGVOP_gv));
else
PUSHs(GvSVn(cGVOP_gv));
- if (GvREFCNT(cGVOP_gv) > 1 || GvALIASED_SV(cGVOP_gv))
- PL_sawalias = TRUE;
RETURN;
}
return NORMAL;
}
-/* This is sometimes called directly by pp_coreargs and pp_grepstart. */
+/* This is sometimes called directly by pp_coreargs, pp_grepstart and
+ amagic_call. */
PP(pp_pushmark)
{
PUSHMARK(PL_stack_sp);
{
dSP;
XPUSHs(MUTABLE_SV(cGVOP_gv));
- if (isGV(cGVOP_gv)
- && (GvREFCNT(cGVOP_gv) > 1 || GvALIASED_SV(cGVOP_gv)))
- PL_sawalias = TRUE;
RETURN;
}
SV * const temp = left;
left = right; right = temp;
}
- if (TAINTING_get && UNLIKELY(TAINT_get) && !SvTAINTED(right))
+ assert(TAINTING_get || !TAINT_get);
+ if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
TAINT_NOT;
if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
/* *foo =\&bar */
assert(source);
assert(CvFLAGS(source) & CVf_CONST);
- SvREFCNT_inc_void(source);
+ SvREFCNT_inc_simple_void_NN(source);
SvREFCNT_dec_NN(upgraded);
SvRV_set(right, MUTABLE_SV(source));
}
PP(pp_unstack)
{
+ PERL_CONTEXT *cx;
PERL_ASYNC_CHECK();
TAINT_NOT; /* Each statement is presumed innocent */
- PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
+ cx = CX_CUR();
+ PL_stack_sp = PL_stack_base + cx->blk_oldsp;
FREETMPS;
if (!(PL_op->op_flags & OPf_SPECIAL)) {
- I32 oldsave = PL_scopestack[PL_scopestack_ix - 1];
- LEAVE_SCOPE(oldsave);
+ assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
+ CX_LEAVE_SCOPE(cx);
}
return NORMAL;
}
}
if (!rcopied) {
- if (left == right)
- /* $r.$r: do magic twice: tied might return different 2nd time */
- SvGETMAGIC(right);
rpv = SvPV_nomg_const(right, rlen);
rbyte = !DO_UTF8(right);
}
if (lbyte != rbyte) {
- /* sv_utf8_upgrade_nomg() may reallocate the stack */
- PUTBACK;
if (lbyte)
sv_utf8_upgrade_nomg(TARG);
else {
sv_utf8_upgrade_nomg(right);
rpv = SvPV_nomg_const(right, rlen);
}
- SPAGAIN;
}
sv_catpvn_nomg(TARG, rpv, rlen);
(base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
| (count << SAVE_TIGHT_SHIFT)
| SAVEt_CLEARPADRANGE);
- assert(OPpPADRANGE_COUNTMASK + 1 == (1 <<OPpPADRANGE_COUNTSHIFT));
+ STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
{
dSS_ADD;
PUTBACK;
Perl_pp_rv2gv(aTHX);
PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
+ if (PL_last_in_gv == (GV *)&PL_sv_undef)
+ PL_last_in_gv = NULL;
+ else
+ assert(isGV_with_GP(PL_last_in_gv));
}
}
return do_readline();
}
-/* also used for: pp_i_predec() pp_i_preinc() pp_predec() */
+/* also used for: pp_i_preinc() */
PP(pp_preinc)
{
- dSP;
- const bool inc =
- PL_op->op_type == OP_PREINC || PL_op->op_type == OP_I_PREINC;
- if (UNLIKELY(SvTYPE(TOPs) >= SVt_PVAV || (isGV_with_GP(TOPs) && !SvFAKE(TOPs))))
- Perl_croak_no_modify();
- if (LIKELY(!SvREADONLY(TOPs) && !SvGMAGICAL(TOPs) && SvIOK_notUV(TOPs) && !SvNOK(TOPs) && !SvPOK(TOPs))
- && SvIVX(TOPs) != (inc ? IV_MAX : IV_MIN))
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MAX)
+ {
+ SvIV_set(sv, SvIVX(sv) + 1);
+ }
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
+ sv_inc(sv);
+ SvSETMAGIC(sv);
+ return NORMAL;
+}
+
+
+/* also used for: pp_i_predec() */
+
+PP(pp_predec)
+{
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MIN)
{
- SvIV_set(TOPs, SvIVX(TOPs) + (inc ? 1 : -1));
- SvFLAGS(TOPs) &= ~(SVp_NOK|SVp_POK);
+ SvIV_set(sv, SvIVX(sv) - 1);
}
- else /* Do all the PERL_PRESERVE_IVUV conditionals in sv_inc */
- if (inc) sv_inc(TOPs);
- else sv_dec(TOPs);
- SvSETMAGIC(TOPs);
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
+ sv_dec(sv);
+ SvSETMAGIC(sv);
return NORMAL;
}
RETPUSHNO;
}
+
+
PP(pp_add)
{
dSP; dATARGET; bool useleft; SV *svl, *svr;
+
tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
svr = TOPs;
svl = TOPm1s;
- useleft = USE_LEFT(svl);
#ifdef PERL_PRESERVE_IVUV
+
+ /* special-case some simple common cases */
+ if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
+ IV il, ir;
+ U32 flags = (svl->sv_flags & svr->sv_flags);
+ if (flags & SVf_IOK) {
+ /* both args are simple IVs */
+ UV topl, topr;
+ il = SvIVX(svl);
+ ir = SvIVX(svr);
+ do_iv:
+ topl = ((UV)il) >> (UVSIZE * 8 - 2);
+ topr = ((UV)ir) >> (UVSIZE * 8 - 2);
+
+ /* if both are in a range that can't under/overflow, do a
+ * simple integer add: if the top of both numbers
+ * are 00 or 11, then it's safe */
+ if (!( ((topl+1) | (topr+1)) & 2)) {
+ SP--;
+ TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ goto generic;
+ }
+ else if (flags & SVf_NOK) {
+ /* both args are NVs */
+ NV nl = SvNVX(svl);
+ NV nr = SvNVX(svr);
+
+ if (
+#if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
+ !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
+ && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
+#else
+ nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
+#endif
+ )
+ /* nothing was lost by converting to IVs */
+ goto do_iv;
+ SP--;
+ TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ }
+
+ generic:
+
+ useleft = USE_LEFT(svl);
/* We must see if we can perform the addition with integers if possible,
as the integer code detects overflow while the NV code doesn't.
If either argument hasn't had a numeric conversion yet attempt to get
if (aiv >= 0) {
auv = aiv;
auvok = 1; /* Now acting as a sign flag. */
- } else { /* 2s complement assumption for IV_MIN */
- auv = (UV)-aiv;
+ } else {
+ auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
}
}
a_valid = 1;
buv = biv;
buvok = 1;
} else
- buv = (UV)-biv;
+ buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
}
/* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
else "IV" now, independent of how it came in.
else {
/* Negate result */
if (result <= (UV)IV_MIN)
- SETi( -(IV)result );
+ SETi(result == (UV)IV_MIN
+ ? IV_MIN : -(IV)result);
else {
/* result valid, but out of range for IV. */
SETn( -(NV)result );
} /* Overflow, drop through to NVs. */
}
}
+
+#else
+ useleft = USE_LEFT(svl);
#endif
+
{
NV value = SvNV_nomg(svr);
(void)POPs;
}
}
+
+/* Do a mark and sweep with the SVf_BREAK flag to detect elements which
+ * are common to both the LHS and RHS of an aassign, and replace them
+ * with copies. All these copies are made before the actual list assign is
+ * done.
+ *
+ * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
+ * element ($b) to the first LH element ($a), modifies $a; when the
+ * second assignment is done, the second RH element now has the wrong
+ * value. So we initially replace the RHS with ($b, mortalcopy($a)).
+ * Note that we don't need to make a mortal copy of $b.
+ *
+ * The algorithm below works by, for every RHS element, mark the
+ * corresponding LHS target element with SVf_BREAK. Then if the RHS
+ * element is found with SVf_BREAK set, it means it would have been
+ * modified, so make a copy.
+ * Note that by scanning both LHS and RHS in lockstep, we avoid
+ * unnecessary copies (like $b above) compared with a naive
+ * "mark all LHS; copy all marked RHS; unmark all LHS".
+ *
+ * If the LHS element is a 'my' declaration' and has a refcount of 1, then
+ * it can't be common and can be skipped.
+ *
+ * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
+ * that we thought we didn't need to call S_aassign_copy_common(), but we
+ * have anyway for sanity checking. If we find we need to copy, then panic.
+ */
+
+PERL_STATIC_INLINE void
+S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
+ SV **firstrelem, SV **lastrelem
+#ifdef DEBUGGING
+ , bool fake
+#endif
+)
+{
+ dVAR;
+ SV **relem;
+ SV **lelem;
+ SSize_t lcount = lastlelem - firstlelem + 1;
+ bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
+ bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
+ bool copy_all = FALSE;
+
+ assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
+ assert(firstlelem < lastlelem); /* at least 2 LH elements */
+ assert(firstrelem < lastrelem); /* at least 2 RH elements */
+
+
+ lelem = firstlelem;
+ /* we never have to copy the first RH element; it can't be corrupted
+ * by assigning something to the corresponding first LH element.
+ * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
+ */
+ relem = firstrelem + 1;
+
+ for (; relem <= lastrelem; relem++) {
+ SV *svr;
+
+ /* mark next LH element */
+
+ if (--lcount >= 0) {
+ SV *svl = *lelem++;
+
+ if (UNLIKELY(!svl)) {/* skip AV alias marker */
+ assert (lelem <= lastlelem);
+ svl = *lelem++;
+ lcount--;
+ }
+
+ assert(svl);
+ if (SvSMAGICAL(svl)) {
+ copy_all = TRUE;
+ }
+ if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
+ if (!marked)
+ return;
+ /* this LH element will consume all further args;
+ * no need to mark any further LH elements (if any).
+ * But we still need to scan any remaining RHS elements;
+ * set lcount negative to distinguish from lcount == 0,
+ * so the loop condition continues being true
+ */
+ lcount = -1;
+ lelem--; /* no need to unmark this element */
+ }
+ else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
+ assert(!SvIMMORTAL(svl));
+ SvFLAGS(svl) |= SVf_BREAK;
+ marked = TRUE;
+ }
+ else if (!marked) {
+ /* don't check RH element if no SVf_BREAK flags set yet */
+ if (!lcount)
+ break;
+ continue;
+ }
+ }
+
+ /* see if corresponding RH element needs copying */
+
+ assert(marked);
+ svr = *relem;
+ assert(svr);
+
+ if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
+
+#ifdef DEBUGGING
+ if (fake) {
+ /* op_dump(PL_op); */
+ Perl_croak(aTHX_
+ "panic: aassign skipped needed copy of common RH elem %"
+ UVuf, (UV)(relem - firstrelem));
+ }
+#endif
+
+ TAINT_NOT; /* Each item is independent */
+
+ /* Dear TODO test in t/op/sort.t, I love you.
+ (It's relying on a panic, not a "semi-panic" from newSVsv()
+ and then an assertion failure below.) */
+ if (UNLIKELY(SvIS_FREED(svr))) {
+ Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
+ (void*)svr);
+ }
+ /* avoid break flag while copying; otherwise COW etc
+ * disabled... */
+ SvFLAGS(svr) &= ~SVf_BREAK;
+ /* Not newSVsv(), as it does not allow copy-on-write,
+ resulting in wasteful copies.
+ Also, we use SV_NOSTEAL in case the SV is used more than
+ once, e.g. (...) = (f())[0,0]
+ Where the same SV appears twice on the RHS without a ref
+ count bump. (Although I suspect that the SV won't be
+ stealable here anyway - DAPM).
+ */
+ *relem = sv_mortalcopy_flags(svr,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ /* ... but restore afterwards in case it's needed again,
+ * e.g. ($a,$b,$c) = (1,$a,$a)
+ */
+ SvFLAGS(svr) |= SVf_BREAK;
+ }
+
+ if (!lcount)
+ break;
+ }
+
+ if (!marked)
+ return;
+
+ /*unmark LHS */
+
+ while (lelem > firstlelem) {
+ SV * const svl = *(--lelem);
+ if (svl)
+ SvFLAGS(svl) &= ~SVf_BREAK;
+ }
+}
+
+
+
PP(pp_aassign)
{
dVAR; dSP;
HV *hash;
SSize_t i;
int magic;
- U32 lval = 0;
+ U32 lval;
+ /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
+ * only need to save locally, not on the save stack */
+ U16 old_delaymagic = PL_delaymagic;
+#ifdef DEBUGGING
+ bool fake = 0;
+#endif
PL_delaymagic = DM_DELAY; /* catch simultaneous items */
- gimme = GIMME_V;
- if (gimme == G_ARRAY)
- lval = PL_op->op_flags & OPf_MOD || LVRET;
/* If there's a common identifier on both sides we have to take
* special care that assigning the identifier on the left doesn't
* clobber a value on the right that's used later in the list.
- * Don't bother if LHS is just an empty hash or array.
*/
- if ( (PL_op->op_private & OPpASSIGN_COMMON || PL_sawalias)
- && (
- firstlelem != lastlelem
- || ! ((sv = *firstlelem))
- || SvMAGICAL(sv)
- || ! (SvTYPE(sv) == SVt_PVAV || SvTYPE(sv) == SVt_PVHV)
- || (SvTYPE(sv) == SVt_PVAV && AvFILL((AV*)sv) != -1)
- || (SvTYPE(sv) == SVt_PVHV && HvUSEDKEYS((HV*)sv) != 0)
- )
- ) {
- EXTEND_MORTAL(lastrelem - firstrelem + 1);
- for (relem = firstrelem; relem <= lastrelem; relem++) {
- if (LIKELY((sv = *relem))) {
- TAINT_NOT; /* Each item is independent */
-
- /* Dear TODO test in t/op/sort.t, I love you.
- (It's relying on a panic, not a "semi-panic" from newSVsv()
- and then an assertion failure below.) */
- if (UNLIKELY(SvIS_FREED(sv))) {
- Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
- (void*)sv);
- }
- /* Not newSVsv(), as it does not allow copy-on-write,
- resulting in wasteful copies. We need a second copy of
- a temp here, hence the SV_NOSTEAL. */
- *relem = sv_mortalcopy_flags(sv,SV_GMAGIC|SV_DO_COW_SVSETSV
- |SV_NOSTEAL);
- }
- }
+ /* at least 2 LH and RH elements, or commonality isn't an issue */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ for (relem = firstrelem+1; relem <= lastrelem; relem++) {
+ if (SvGMAGICAL(*relem))
+ goto do_scan;
+ }
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ if (*lelem && SvSMAGICAL(*lelem))
+ goto do_scan;
+ }
+ if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
+ if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
+ /* skip the scan if all scalars have a ref count of 1 */
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ sv = *lelem;
+ if (!sv || SvREFCNT(sv) == 1)
+ continue;
+ if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
+ goto do_scan;
+ break;
+ }
+ }
+ else {
+ do_scan:
+ S_aassign_copy_common(aTHX_
+ firstlelem, lastlelem, firstrelem, lastrelem
+#ifdef DEBUGGING
+ , fake
+#endif
+ );
+ }
+ }
}
+#ifdef DEBUGGING
+ else {
+ /* on debugging builds, do the scan even if we've concluded we
+ * don't need to, then panic if we find commonality. Note that the
+ * scanner assumes at least 2 elements */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ fake = 1;
+ goto do_scan;
+ }
+ }
+#endif
+
+ gimme = GIMME_V;
+ lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
relem = firstrelem;
lelem = firstlelem;
ASSUME(SvTYPE(sv) == SVt_PVAV);
}
switch (SvTYPE(sv)) {
- case SVt_PVAV:
+ case SVt_PVAV: {
+ bool already_copied = FALSE;
ary = MUTABLE_AV(sv);
magic = SvMAGICAL(ary) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
- av_clear(ary);
- av_extend(ary, lastrelem - relem);
+
+ /* We need to clear ary. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * @a = ($a[0]);
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the array, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ * Otherwise, make a copy of each RHS SV only as we're storing
+ * it into the array - that way we don't have to worry about
+ * it being leaked if we die, but don't incur the cost of
+ * mortalising everything.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || AvFILL(ary) != -1))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ /* see comment in S_aassign_copy_common about SV_NOSTEAL */
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
+ av_clear(ary);
+ if (relem <= lastrelem)
+ av_extend(ary, lastrelem - relem);
+
i = 0;
while (relem <= lastrelem) { /* gobble up all the rest */
SV **didstore;
- if (LIKELY(*relem))
- SvGETMAGIC(*relem); /* before newSV, in case it dies */
if (LIKELY(!alias)) {
- sv = newSV(0);
- sv_setsv_nomg(sv, *relem);
- *relem = sv;
+ if (already_copied)
+ sv = *relem;
+ else {
+ if (LIKELY(*relem))
+ /* before newSV, in case it dies */
+ SvGETMAGIC(*relem);
+ sv = newSV(0);
+ /* see comment in S_aassign_copy_common about
+ * SV_NOSTEAL */
+ sv_setsv_flags(sv, *relem,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ *relem = sv;
+ }
}
else {
+ if (!already_copied)
+ SvGETMAGIC(*relem);
if (!SvROK(*relem))
DIE(aTHX_ "Assigned value is not a reference");
if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
/* diag_listed_as: Assigned value is not %s reference */
DIE(aTHX_
"Assigned value is not a SCALAR reference");
- if (lval)
+ if (lval && !already_copied)
*relem = sv_mortalcopy(*relem);
/* XXX else check for weak refs? */
- sv = SvREFCNT_inc_simple_NN(SvRV(*relem));
+ sv = SvREFCNT_inc_NN(SvRV(*relem));
}
relem++;
+ if (already_copied)
+ SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
didstore = av_store(ary,i++,sv);
if (magic) {
if (!didstore)
SvSETMAGIC(MUTABLE_SV(ary));
LEAVE;
break;
+ }
+
case SVt_PVHV: { /* normal hash */
SV *tmpstr;
int odd;
int duplicates = 0;
SV** topelem = relem;
SV **firsthashrelem = relem;
+ bool already_copied = FALSE;
hash = MUTABLE_HV(sv);
magic = SvMAGICAL(hash) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
+
+ /* We need to clear hash. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * %h = (foo => $h{bar});
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the hash, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || HvUSEDKEYS(hash)))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
hv_clear(hash);
+
while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
HE *didstore;
assert(*relem);
to avoid having the next op modify our rhs. Copy
it also if it is gmagical, lest it make the
hv_store_ent call below croak, leaking the value. */
- sv = lval || SvGMAGICAL(*relem)
+ sv = (lval || SvGMAGICAL(*relem)) && !already_copied
? sv_mortalcopy(*relem)
: *relem;
relem++;
assert(*relem);
- SvGETMAGIC(*relem);
- tmpstr = newSV(0);
- sv_setsv_nomg(tmpstr,*relem++); /* value */
+ if (already_copied)
+ tmpstr = *relem++;
+ else {
+ SvGETMAGIC(*relem);
+ tmpstr = newSV(0);
+ sv_setsv_nomg(tmpstr,*relem++); /* value */
+ }
+
if (gimme == G_ARRAY) {
if (hv_exists_ent(hash, sv, 0))
/* key overwrites an existing entry */
*topelem++ = tmpstr;
}
}
+ if (already_copied)
+ SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
didstore = hv_store_ent(hash,sv,tmpstr,0);
if (magic) {
if (!didstore) sv_2mortal(tmpstr);
PERL_UNUSED_VAR(tmp_egid);
#endif
}
- PL_delaymagic = 0;
+ PL_delaymagic = old_delaymagic;
if (gimme == G_VOID)
SP = firstrelem - 1;
const char *truebase; /* Start of string */
REGEXP *rx = PM_GETRE(pm);
bool rxtainted;
- const I32 gimme = GIMME;
+ const I32 gimme = GIMME_V;
STRLEN len;
const I32 oldsave = PL_savestack_ix;
I32 had_zerolen = 0;
LEAVE_SCOPE(oldsave);
RETURN;
}
- /* NOTREACHED */
+ NOT_REACHED; /* NOTREACHED */
-nope:
+ nope:
if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
if (!mg)
mg = mg_find_mglob(TARG);
if (gimme == G_SCALAR) {
/* undef TARG, and push that undefined value */
if (type != OP_RCATLINE) {
- SV_CHECK_THINKFIRST_COW_DROP(TARG);
- SvOK_off(TARG);
+ sv_setsv(TARG,NULL);
}
PUSHTARG;
}
XPUSHs(sv);
if (type == OP_GLOB) {
const char *t1;
+ Stat_t statbuf;
if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
char * const tmps = SvEND(sv) - 1;
if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
#endif
break;
- if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &PL_statbuf) < 0) {
+ if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
(void)POPs; /* Unmatched wildcard? Chuck it... */
continue;
}
LvTYPE(lv) = 'y';
sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
- LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
LvTARGLEN(lv) = 1;
PUSHs(lv);
RETURN;
RETURN;
}
+
+/* a stripped-down version of Perl_softref2xv() for use by
+ * pp_multideref(), which doesn't use PL_op->op_flags */
+
+STATIC GV *
+S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
+ const svtype type)
+{
+ if (PL_op->op_private & HINT_STRICT_REFS) {
+ if (SvOK(sv))
+ Perl_die(aTHX_ PL_no_symref_sv, sv,
+ (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
+ else
+ Perl_die(aTHX_ PL_no_usym, what);
+ }
+ if (!SvOK(sv))
+ Perl_die(aTHX_ PL_no_usym, what);
+ return gv_fetchsv_nomg(sv, GV_ADD, type);
+}
+
+
+/* Handle one or more aggregate derefs and array/hash indexings, e.g.
+ * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
+ *
+ * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
+ * Each of these either contains a set of actions, or an argument, such as
+ * an IV to use as an array index, or a lexical var to retrieve.
+ * Several actions re stored per UV; we keep shifting new actions off the
+ * one UV, and only reload when it becomes zero.
+ */
+
+PP(pp_multideref)
+{
+ SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
+ UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
+ UV actions = items->uv;
+
+ assert(actions);
+ /* this tells find_uninit_var() where we're up to */
+ PL_multideref_pc = items;
+
+ while (1) {
+ /* there are three main classes of action; the first retrieve
+ * the initial AV or HV from a variable or the stack; the second
+ * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
+ * the third an unrolled (/DREFHV, rv2hv, helem).
+ */
+ switch (actions & MDEREF_ACTION_MASK) {
+
+ case MDEREF_reload:
+ actions = (++items)->uv;
+ continue;
+
+ case MDEREF_AV_padav_aelem: /* $lex[...] */
+ sv = PAD_SVl((++items)->pad_offset);
+ goto do_AV_aelem;
+
+ case MDEREF_AV_gvav_aelem: /* $pkg[...] */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = (SV*)GvAVn((GV*)sv);
+ goto do_AV_aelem;
+
+ case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
+ {
+ dSP;
+ sv = POPs;
+ PUTBACK;
+ goto do_AV_rv2av_aelem;
+ }
+
+ case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = GvSVn((GV*)sv);
+ goto do_AV_vivify_rv2av_aelem;
+
+ case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
+ sv = PAD_SVl((++items)->pad_offset);
+ /* FALLTHROUGH */
+
+ do_AV_vivify_rv2av_aelem:
+ case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
+ /* this is the OPpDEREF action normally found at the end of
+ * ops like aelem, helem, rv2sv */
+ sv = vivify_ref(sv, OPpDEREF_AV);
+ /* FALLTHROUGH */
+
+ do_AV_rv2av_aelem:
+ /* this is basically a copy of pp_rv2av when it just has the
+ * sKR/1 flags */
+ SvGETMAGIC(sv);
+ if (LIKELY(SvROK(sv))) {
+ if (UNLIKELY(SvAMAGIC(sv))) {
+ sv = amagic_deref_call(sv, to_av_amg);
+ }
+ sv = SvRV(sv);
+ if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
+ DIE(aTHX_ "Not an ARRAY reference");
+ }
+ else if (SvTYPE(sv) != SVt_PVAV) {
+ if (!isGV_with_GP(sv))
+ sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
+ sv = MUTABLE_SV(GvAVn((GV*)sv));
+ }
+ /* FALLTHROUGH */
+
+ do_AV_aelem:
+ {
+ /* retrieve the key; this may be either a lexical or package
+ * var (whose index/ptr is stored as an item) or a signed
+ * integer constant stored as an item.
+ */
+ SV *elemsv;
+ IV elem = 0; /* to shut up stupid compiler warnings */
+
+
+ assert(SvTYPE(sv) == SVt_PVAV);
+
+ switch (actions & MDEREF_INDEX_MASK) {
+ case MDEREF_INDEX_none:
+ goto finish;
+ case MDEREF_INDEX_const:
+ elem = (++items)->iv;
+ break;
+ case MDEREF_INDEX_padsv:
+ elemsv = PAD_SVl((++items)->pad_offset);
+ goto check_elem;
+ case MDEREF_INDEX_gvsv:
+ elemsv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(elemsv));
+ elemsv = GvSVn((GV*)elemsv);
+ check_elem:
+ if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
+ && ckWARN(WARN_MISC)))
+ Perl_warner(aTHX_ packWARN(WARN_MISC),
+ "Use of reference \"%"SVf"\" as array index",
+ SVfARG(elemsv));
+ /* the only time that S_find_uninit_var() needs this
+ * is to determine which index value triggered the
+ * undef warning. So just update it here. Note that
+ * since we don't save and restore this var (e.g. for
+ * tie or overload execution), its value will be
+ * meaningless apart from just here */
+ PL_multideref_pc = items;
+ elem = SvIV(elemsv);
+ break;
+ }
+
+
+ /* this is basically a copy of pp_aelem with OPpDEREF skipped */
+
+ if (!(actions & MDEREF_FLAG_last)) {
+ SV** svp = av_fetch((AV*)sv, elem, 1);
+ if (!svp || ! (sv=*svp))
+ DIE(aTHX_ PL_no_aelem, elem);
+ break;
+ }
+
+ if (PL_op->op_private &
+ (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
+ {
+ if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
+ sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
+ }
+ else {
+ I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
+ sv = av_delete((AV*)sv, elem, discard);
+ if (discard)
+ return NORMAL;
+ if (!sv)
+ sv = &PL_sv_undef;
+ }
+ }
+ else {
+ const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
+ const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
+ const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
+ bool preeminent = TRUE;
+ AV *const av = (AV*)sv;
+ SV** svp;
+
+ if (UNLIKELY(localizing)) {
+ MAGIC *mg;
+ HV *stash;
+
+ /* If we can determine whether the element exist,
+ * Try to preserve the existenceness of a tied array
+ * element by using EXISTS and DELETE if possible.
+ * Fallback to FETCH and STORE otherwise. */
+ if (SvCANEXISTDELETE(av))
+ preeminent = av_exists(av, elem);
+ }
+
+ svp = av_fetch(av, elem, lval && !defer);
+
+ if (lval) {
+ if (!svp || !(sv = *svp)) {
+ IV len;
+ if (!defer)
+ DIE(aTHX_ PL_no_aelem, elem);
+ len = av_tindex(av);
+ sv = sv_2mortal(newSVavdefelem(av,
+ /* Resolve a negative index now, unless it points
+ * before the beginning of the array, in which
+ * case record it for error reporting in
+ * magic_setdefelem. */
+ elem < 0 && len + elem >= 0
+ ? len + elem : elem, 1));
+ }
+ else {
+ if (UNLIKELY(localizing)) {
+ if (preeminent) {
+ save_aelem(av, elem, svp);
+ sv = *svp; /* may have changed */
+ }
+ else
+ SAVEADELETE(av, elem);
+ }
+ }
+ }
+ else {
+ sv = (svp ? *svp : &PL_sv_undef);
+ /* see note in pp_helem() */
+ if (SvRMAGICAL(av) && SvGMAGICAL(sv))
+ mg_get(sv);
+ }
+ }
+
+ }
+ finish:
+ {
+ dSP;
+ XPUSHs(sv);
+ RETURN;
+ }
+ /* NOTREACHED */
+
+
+
+
+ case MDEREF_HV_padhv_helem: /* $lex{...} */
+ sv = PAD_SVl((++items)->pad_offset);
+ goto do_HV_helem;
+
+ case MDEREF_HV_gvhv_helem: /* $pkg{...} */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = (SV*)GvHVn((GV*)sv);
+ goto do_HV_helem;
+
+ case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
+ {
+ dSP;
+ sv = POPs;
+ PUTBACK;
+ goto do_HV_rv2hv_helem;
+ }
+
+ case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = GvSVn((GV*)sv);
+ goto do_HV_vivify_rv2hv_helem;
+
+ case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
+ sv = PAD_SVl((++items)->pad_offset);
+ /* FALLTHROUGH */
+
+ do_HV_vivify_rv2hv_helem:
+ case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
+ /* this is the OPpDEREF action normally found at the end of
+ * ops like aelem, helem, rv2sv */
+ sv = vivify_ref(sv, OPpDEREF_HV);
+ /* FALLTHROUGH */
+
+ do_HV_rv2hv_helem:
+ /* this is basically a copy of pp_rv2hv when it just has the
+ * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
+
+ SvGETMAGIC(sv);
+ if (LIKELY(SvROK(sv))) {
+ if (UNLIKELY(SvAMAGIC(sv))) {
+ sv = amagic_deref_call(sv, to_hv_amg);
+ }
+ sv = SvRV(sv);
+ if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
+ DIE(aTHX_ "Not a HASH reference");
+ }
+ else if (SvTYPE(sv) != SVt_PVHV) {
+ if (!isGV_with_GP(sv))
+ sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
+ sv = MUTABLE_SV(GvHVn((GV*)sv));
+ }
+ /* FALLTHROUGH */
+
+ do_HV_helem:
+ {
+ /* retrieve the key; this may be either a lexical / package
+ * var or a string constant, whose index/ptr is stored as an
+ * item
+ */
+ SV *keysv = NULL; /* to shut up stupid compiler warnings */
+
+ assert(SvTYPE(sv) == SVt_PVHV);
+
+ switch (actions & MDEREF_INDEX_MASK) {
+ case MDEREF_INDEX_none:
+ goto finish;
+
+ case MDEREF_INDEX_const:
+ keysv = UNOP_AUX_item_sv(++items);
+ break;
+
+ case MDEREF_INDEX_padsv:
+ keysv = PAD_SVl((++items)->pad_offset);
+ break;
+
+ case MDEREF_INDEX_gvsv:
+ keysv = UNOP_AUX_item_sv(++items);
+ keysv = GvSVn((GV*)keysv);
+ break;
+ }
+
+ /* see comment above about setting this var */
+ PL_multideref_pc = items;
+
+
+ /* ensure that candidate CONSTs have been HEKified */
+ assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
+ || SvTYPE(keysv) >= SVt_PVMG
+ || !SvOK(keysv)
+ || SvROK(keysv)
+ || SvIsCOW_shared_hash(keysv));
+
+ /* this is basically a copy of pp_helem with OPpDEREF skipped */
+
+ if (!(actions & MDEREF_FLAG_last)) {
+ HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
+ if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
+ DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
+ break;
+ }
+
+ if (PL_op->op_private &
+ (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
+ {
+ if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
+ sv = hv_exists_ent((HV*)sv, keysv, 0)
+ ? &PL_sv_yes : &PL_sv_no;
+ }
+ else {
+ I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
+ sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
+ if (discard)
+ return NORMAL;
+ if (!sv)
+ sv = &PL_sv_undef;
+ }
+ }
+ else {
+ const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
+ const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
+ const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
+ bool preeminent = TRUE;
+ SV **svp;
+ HV * const hv = (HV*)sv;
+ HE* he;
+
+ if (UNLIKELY(localizing)) {
+ MAGIC *mg;
+ HV *stash;
+
+ /* If we can determine whether the element exist,
+ * Try to preserve the existenceness of a tied hash
+ * element by using EXISTS and DELETE if possible.
+ * Fallback to FETCH and STORE otherwise. */
+ if (SvCANEXISTDELETE(hv))
+ preeminent = hv_exists_ent(hv, keysv, 0);
+ }
+
+ he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
+ svp = he ? &HeVAL(he) : NULL;
+
+
+ if (lval) {
+ if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
+ SV* lv;
+ SV* key2;
+ if (!defer)
+ DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
+ lv = sv_newmortal();
+ sv_upgrade(lv, SVt_PVLV);
+ LvTYPE(lv) = 'y';
+ sv_magic(lv, key2 = newSVsv(keysv),
+ PERL_MAGIC_defelem, NULL, 0);
+ /* sv_magic() increments refcount */
+ SvREFCNT_dec_NN(key2);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
+ LvTARGLEN(lv) = 1;
+ sv = lv;
+ }
+ else {
+ if (localizing) {
+ if (HvNAME_get(hv) && isGV(sv))
+ save_gp(MUTABLE_GV(sv),
+ !(PL_op->op_flags & OPf_SPECIAL));
+ else if (preeminent) {
+ save_helem_flags(hv, keysv, svp,
+ (PL_op->op_flags & OPf_SPECIAL)
+ ? 0 : SAVEf_SETMAGIC);
+ sv = *svp; /* may have changed */
+ }
+ else
+ SAVEHDELETE(hv, keysv);
+ }
+ }
+ }
+ else {
+ sv = (svp && *svp ? *svp : &PL_sv_undef);
+ /* see note in pp_helem() */
+ if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
+ mg_get(sv);
+ }
+ }
+ goto finish;
+ }
+
+ } /* switch */
+
+ actions >>= MDEREF_SHIFT;
+ } /* while */
+ /* NOTREACHED */
+}
+
+
PP(pp_iter)
{
- dSP;
PERL_CONTEXT *cx;
SV *oldsv;
SV **itersvp;
+ SV *retsv;
- EXTEND(SP, 1);
- cx = &cxstack[cxstack_ix];
+ cx = CX_CUR();
itersvp = CxITERVAR(cx);
switch (CxTYPE(cx)) {
STRLEN maxlen = 0;
const char *max = SvPV_const(end, maxlen);
if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
- RETPUSHNO;
+ goto retno;
oldsv = *itersvp;
if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
{
IV cur = cx->blk_loop.state_u.lazyiv.cur;
if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
- RETPUSHNO;
+ goto retno;
oldsv = *itersvp;
/* don't risk potential race */
break;
}
- case CXt_LOOP_FOR: /* iterate array */
{
-
- AV *av = cx->blk_loop.state_u.ary.ary;
SV *sv;
- bool av_is_stack = FALSE;
+ AV *av;
IV ix;
-
- if (!av) {
- av_is_stack = TRUE;
- av = PL_curstack;
- }
- if (PL_op->op_private & OPpITER_REVERSED) {
- ix = --cx->blk_loop.state_u.ary.ix;
- if (UNLIKELY(ix <= (av_is_stack ? cx->blk_loop.resetsp : -1)))
- RETPUSHNO;
- }
- else {
- ix = ++cx->blk_loop.state_u.ary.ix;
- if (UNLIKELY(ix > (av_is_stack ? cx->blk_oldsp : AvFILL(av))))
- RETPUSHNO;
- }
-
- if (UNLIKELY(SvMAGICAL(av) || AvREIFY(av))) {
+ IV inc;
+
+ case CXt_LOOP_LIST: /* for (1,2,3) */
+
+ assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
+ inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
+ ix = (cx->blk_loop.state_u.stack.ix += inc);
+ if (UNLIKELY(inc > 0
+ ? ix > cx->blk_oldsp
+ : ix <= cx->blk_loop.state_u.stack.basesp)
+ )
+ goto retno;
+
+ sv = PL_stack_base[ix];
+ av = NULL;
+ goto loop_ary_common;
+
+ case CXt_LOOP_ARY: /* for (@ary) */
+
+ av = cx->blk_loop.state_u.ary.ary;
+ inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
+ ix = (cx->blk_loop.state_u.ary.ix += inc);
+ if (UNLIKELY(inc > 0
+ ? ix > AvFILL(av)
+ : ix < 0)
+ )
+ goto retno;
+
+ if (UNLIKELY(SvRMAGICAL(av))) {
SV * const * const svp = av_fetch(av, ix, FALSE);
sv = svp ? *svp : NULL;
}
sv = AvARRAY(av)[ix];
}
+ loop_ary_common:
+
if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
SvSetMagicSV(*itersvp, sv);
break;
SvREFCNT_inc_simple_void_NN(sv);
}
}
- else if (!av_is_stack) {
+ else if (av) {
sv = newSVavdefelem(av, ix, 0);
}
else
default:
DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
}
- RETPUSHYES;
+
+ retsv = &PL_sv_yes;
+ if (0) {
+ retno:
+ retsv = &PL_sv_no;
+ }
+ /* pp_enteriter should have pre-extended the stack */
+ assert(PL_stack_sp < PL_stack_max);
+ *++PL_stack_sp =retsv;
+
+ return PL_op->op_next;
+
+
}
/*
char *strend;
const char *c;
STRLEN clen;
- I32 iters = 0;
- I32 maxiters;
+ SSize_t iters = 0;
+ SSize_t maxiters;
bool once;
U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
See "how taint works" above */
if (DO_UTF8(TARG) && !doutf8) {
nsv = sv_newmortal();
SvSetSV(nsv, dstr);
- if (PL_encoding)
- sv_recode_to_utf8(nsv, PL_encoding);
+ if (IN_ENCODING)
+ sv_recode_to_utf8(nsv, _get_encoding());
else
sv_utf8_upgrade(nsv);
c = SvPV_const(nsv, clen);
Move(s, d, i+1, char); /* include the NUL */
}
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
else {
first = FALSE;
}
else {
- if (PL_encoding) {
+ if (IN_ENCODING) {
if (!nsv) nsv = sv_newmortal();
sv_copypv(nsv, repl);
- if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, PL_encoding);
+ if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
sv_catsv(dstr, nsv);
}
else sv_catsv(dstr, repl);
}
if (once)
break;
- } while (CALLREGEXEC(rx, s, strend, orig, s == m,
+ } while (CALLREGEXEC(rx, s, strend, orig,
+ s == m, /* Yields minend of 0 or 1 */
TARG, NULL,
REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
+ assert(strend >= s);
sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
if (rpm->op_pmflags & PMf_NONDESTRUCT) {
SvPV_set(dstr, NULL);
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
(void)POPMARK; /* pop dst */
SP = PL_stack_base + POPMARK; /* pop original mark */
if (gimme == G_SCALAR) {
- if (PL_op->op_private & OPpGREP_LEX) {
- SV* const sv = sv_newmortal();
- sv_setiv(sv, items);
- PUSHs(sv);
- }
- else {
dTARGET;
XPUSHi(items);
- }
}
else if (gimme == G_ARRAY)
SP += items;
ENTER_with_name("grep_item"); /* enter inner scope */
SAVEVPTR(PL_curpm);
- src = PL_stack_base[*PL_markstack_ptr];
+ src = PL_stack_base[TOPMARK];
if (SvPADTMP(src)) {
- src = PL_stack_base[*PL_markstack_ptr] = sv_mortalcopy(src);
+ src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
PL_tmps_floor++;
}
SvTEMP_off(src);
- if (PL_op->op_private & OPpGREP_LEX)
- PAD_SVl(PL_op->op_targ) = src;
- else
- DEFSV_set(src);
+ DEFSV_set(src);
RETURNOP(cLOGOP->op_other);
}
}
-PP(pp_leavesub)
+/* leavesub_adjust_stacks():
+ *
+ * Process the sub's return args (in the range base_sp+1 .. PL_stack_sp),
+ * and do the equivalent of a FREEMPS (and TAINT_NOT).
+ * Not intended to be called in void context.
+ *
+ * The main things done to process the return args are:
+ * * in scalar context, only return the last arg (or PL_sv_undef if none);
+ * * make a TEMP copy of every return arg, except where we can optimise
+ * the copy away without it being semantically visible;
+ * * make sure the arg isn't prematurely freed; in the case of an arg
+ * not copied, this may involve mortalising it. For example, in
+ * C<sub f { my $x = ...; $x }>, $x would be freed when we do
+ * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
+ *
+ * There is a big issue with doing a FREETMPS. We would like to free any
+ * temps created by the last statement the sub executed, rather than
+ * leaving them for the caller. In a situation where a sub call isn't
+ * soon followed by a nextstate (e.g. nested recursive calls, a la
+ * fibonacci()), temps can accumulate, causing memory and performance
+ * issues.
+ *
+ * On the other hand, we don't want to free any TEMPs which are keeping
+ * alive any return args that we skip copying; nor do we wish to undo any
+ * mortalising or mortal copying we do here.
+ *
+ * The solution is to split the temps stack frame into two, with a cut
+ * point delineating the two halves. We arrange that by the end of this
+ * function, all the temps stack frame entries we wish to keep are in the
+ * range PL_tmps_floor+1.. tmps_base-1, while the ones we free now are in
+ * the range tmps_base .. PL_tmps_ix. During the course of this
+ * function, tmps_base starts off as PL_tmps_floor+1, then increases
+ * whenever we find or create a temp that we know should be kept. In
+ * general the stuff above tmps_base is undecided until we reach the end,
+ * and we may need a sort stage for that.
+ *
+ * To determine whether a TEMP is keeping a return arg alive, every
+ * arg that is kept rather than copied and which has the SvTEMP flag
+ * set, has the flag temporarily unset, to mark it. At the end we scan
+ * stack temps stack frame above the cut for entries without SvTEMP and
+ * keep them, while turning SvTEMP on again. Note that if we die before
+ * the SvTEMPs are enabled again, its safe: at worst, subsequent use of
+ * those SVs may be slightly less efficient.
+ *
+ * In practice various optimisations for some common cases mean we can
+ * avoid most of the scanning and swapping about with the temps stack.
+ */
+
+STATIC void
+S_leavesub_adjust_stacks(pTHX_ SV **base_sp, I32 gimme)
{
dSP;
- SV **mark;
- SV **newsp;
- PMOP *newpm;
+ SV **from_sp; /* where we're copying args from */
+ SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
+ SSize_t nargs;
+
+ TAINT_NOT;
+
+ if (gimme == G_ARRAY) {
+ from_sp = base_sp + 1;
+ nargs = SP - base_sp;
+ }
+ else {
+ assert(gimme == G_SCALAR);
+ if (UNLIKELY(base_sp >= SP)) {
+ /* no return args */
+ assert(base_sp == SP);
+ EXTEND(SP, 1);
+ *++SP = &PL_sv_undef;
+ base_sp = SP;
+ nargs = 0;
+ }
+ else {
+ from_sp = SP;
+ nargs = 1;
+ }
+ }
+
+ /* common code for G_SCALAR and G_ARRAY */
+
+ tmps_base = PL_tmps_floor + 1;
+
+ assert(nargs >= 0);
+ if (nargs) {
+ /* pointer version of tmps_base. Not safe across temp stack
+ * reallocs. */
+ SV **tmps_basep;
+
+ EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
+ tmps_basep = PL_tmps_stack + tmps_base;
+ /* whether any SVs have have SvTEMP temporarily turned off,
+ * indicating that they need saving below the cut */
+
+ /* process each return arg */
+
+ do {
+ SV *sv = *from_sp++;
+
+ assert(PL_tmps_ix + nargs < PL_tmps_max);
+#ifdef DEBUGGING
+ /* PADTMPs with container set magic shouldn't appear in the
+ * wild. This assert is more important for pp_leavesublv(),
+ * but by testing for it here, we're more likely to catch
+ * bad cases (what with :lvalue subs not being widely
+ * deployed). The two issues are that for something like
+ * sub :lvalue { $tied{foo} }
+ * or
+ * sub :lvalue { substr($foo,1,2) }
+ * pp_leavesublv() will croak if the sub returns a PADTMP,
+ * and currently functions like pp_substr() return a mortal
+ * rather than using their PADTMP when returning a PVLV.
+ * This is because the PVLV will hold a ref to $foo,
+ * so $foo would get delayed in being freed while
+ * the PADTMP SV remained in the PAD.
+ * So if this assert fails it means either:
+ * 1) there is pp code similar to pp_substr that is
+ * returning a PADTMP instead of a mortal, and probably
+ * needs fixing, or
+ * 2) pp_leavesub is making unwarranted assumptions
+ * about always croaking on a PADTMP
+ */
+ if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
+ MAGIC *mg;
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
+ }
+ }
+#endif
+
+ if (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1) {
+ /* can optimise away the copy */
+ *++base_sp = sv;
+
+ /* Since this SV is an SvTEMP with a ref count of 1, we
+ * don't need to re-mortalise it; instead we just need to
+ * ensure that its existing entry in the temps stack frame
+ * ends up below the cut and so avoids being freed this
+ * time round. We mark it as needing to be kept by
+ * temporarily unsetting SvTEMP; then at the end, we
+ * shuffle any !SvTEMP entries on the tmps stack back
+ * below the cut.
+ * However, there's a significant chance that there's a
+ * 1:1 correspondence between the first few (or all)
+ * elements in the return args stack frame and those in
+ * the temps stack frame;
+ * e,g. sub f { ....; map {...} .... },
+ * or e.g. if we're exiting multiple scopes and one of the
+ * inner scopes has already made mortal copies of each
+ * return arg.
+ *
+ * If so, this arg sv will correspond to the next item
+ * above the cut, and so can be kept merely by moving the
+ * cut boundary up one, rather than messing with SvTEMP.
+ * If all args arre 1:1 then we can avoid the sorting
+ * stage below completely.
+ */
+ if (sv == *tmps_basep)
+ tmps_basep++;
+ else
+ SvTEMP_off(sv);
+ }
+ else {
+ /* Make a mortal copy of the SV.
+ * The following code is the equivalent of sv_mortalcopy()
+ * except that:
+ * * it assumes the temps stack has already been extended;
+ * * it optimises the copying for some simple SV types;
+ * * it puts the new item at the cut rather than at
+ * ++PL_tmps_ix, moving the previous occupant there
+ * instead.
+ */
+ SV *newsv = newSV(0);
+
+ PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
+ /* put it on the tmps stack early so it gets freed if we die */
+ *tmps_basep++ = newsv;
+ *++base_sp = newsv;
+
+ if (SvTYPE(sv) <= SVt_IV) {
+ /* arg must be one of undef, IV/UV, or RV: skip
+ * sv_setsv_flags() and do the copy directly */
+ U32 dstflags;
+ U32 srcflags = SvFLAGS(sv);
+
+ assert(!SvGMAGICAL(sv));
+ if (srcflags & (SVf_IOK|SVf_ROK)) {
+ SET_SVANY_FOR_BODYLESS_IV(newsv);
+
+ if (srcflags & SVf_ROK) {
+ newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
+ }
+ else {
+ /* both src and dst are <= SVt_IV, so sv_any
+ * points to the head; so access the heads
+ * directly rather than going via sv_any.
+ */
+ assert( &(sv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(sv))->xiv_iv));
+ assert( &(newsv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(newsv))->xiv_iv));
+ newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
+ |(srcflags & SVf_IVisUV));
+ }
+ }
+ else {
+ assert(!(srcflags & SVf_OK));
+ dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
+ }
+ SvFLAGS(newsv) = dstflags;
+
+ }
+ else {
+ /* do the full sv_setsv() */
+ SSize_t old_base;
+
+ SvTEMP_on(newsv);
+ old_base = tmps_basep - PL_tmps_stack;
+ SvGETMAGIC(sv);
+ sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
+ /* the mg_get or sv_setv might have created new temps
+ * or realloced the tmps stack; regrow and reload */
+ EXTEND_MORTAL(nargs);
+ tmps_basep = PL_tmps_stack + old_base;
+ TAINT_NOT; /* Each item is independent */
+ }
+
+ }
+ } while (--nargs);
+
+ /* If there are any temps left above the cut, we need to sort
+ * them into those to keep and those to free. The only ones to
+ * keep are those for which we've temporarily unset SvTEMP.
+ * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
+ * swapping pairs as necessary. Stop when we meet in the middle.
+ */
+ {
+ SV **top = PL_tmps_stack + PL_tmps_ix;
+ while (tmps_basep <= top) {
+ SV *sv = *top;
+ if (SvTEMP(sv))
+ top--;
+ else {
+ SvTEMP_on(sv);
+ *top = *tmps_basep;
+ *tmps_basep = sv;
+ tmps_basep++;
+ }
+ }
+ }
+
+ tmps_base = tmps_basep - PL_tmps_stack;
+ }
+
+ PL_stack_sp = base_sp;
+
+ /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
+ while (PL_tmps_ix >= tmps_base) {
+ SV* const sv = PL_tmps_stack[PL_tmps_ix--];
+#ifdef PERL_POISON
+ PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
+#endif
+ if (LIKELY(sv)) {
+ SvTEMP_off(sv);
+ SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
+ }
+ }
+}
+
+
+PP(pp_leavesub)
+{
I32 gimme;
PERL_CONTEXT *cx;
- SV *sv;
+ SV **oldsp;
+ OP *retop;
- if (CxMULTICALL(&cxstack[cxstack_ix]))
+ cx = CX_CUR();
+ assert(CxTYPE(cx) == CXt_SUB);
+
+ if (CxMULTICALL(cx)) {
+ /* entry zero of a stack is always PL_sv_undef, which
+ * simplifies converting a '()' return into undef in scalar context */
+ assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
return 0;
+ }
- POPBLOCK(cx,newpm);
- cxstack_ix++; /* temporarily protect top context */
+ gimme = cx->blk_gimme;
+ oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
- TAINT_NOT;
- if (gimme == G_SCALAR) {
- MARK = newsp + 1;
- if (LIKELY(MARK <= SP)) {
- if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
- if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
- && !SvMAGICAL(TOPs)) {
- *MARK = SvREFCNT_inc(TOPs);
- FREETMPS;
- sv_2mortal(*MARK);
- }
- else {
- sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
- FREETMPS;
- *MARK = sv_mortalcopy(sv);
- SvREFCNT_dec_NN(sv);
- }
- }
- else if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
- && !SvMAGICAL(TOPs)) {
- *MARK = TOPs;
- }
- else
- *MARK = sv_mortalcopy(TOPs);
- }
- else {
- MEXTEND(MARK, 0);
- *MARK = &PL_sv_undef;
- }
- SP = MARK;
- }
- else if (gimme == G_ARRAY) {
- for (MARK = newsp + 1; MARK <= SP; MARK++) {
- if (!SvTEMP(*MARK) || SvREFCNT(*MARK) != 1
- || SvMAGICAL(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- TAINT_NOT; /* Each item is independent */
- }
- }
- }
- PUTBACK;
+ if (gimme == G_VOID)
+ PL_stack_sp = oldsp;
+ else
+ S_leavesub_adjust_stacks(aTHX_ oldsp, gimme);
+
+ CX_LEAVE_SCOPE(cx);
+ POPSUB(cx); /* Stack values are safe: release CV and @_ ... */
+ POPBLOCK(cx);
+ retop = cx->blk_sub.retop;
+ CX_POP(cx);
- LEAVE;
- POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
- cxstack_ix--;
- PL_curpm = newpm; /* ... and pop $1 et al */
+ return retop;
+}
+
+
+/* clear (if possible) or abandon the current @_. If 'abandon' is true,
+ * forces an abandon */
+
+void
+Perl_clear_defarray(pTHX_ AV* av, bool abandon)
+{
+ const SSize_t fill = AvFILLp(av);
- LEAVESUB(sv);
- return cx->blk_sub.retop;
+ PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
+
+ if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
+ av_clear(av);
+ AvREIFY_only(av);
+ }
+ else {
+ AV *newav = newAV();
+ av_extend(newav, fill);
+ AvREIFY_only(newav);
+ PAD_SVl(0) = MUTABLE_SV(newav);
+ SvREFCNT_dec_NN(av);
+ }
}
+
PP(pp_entersub)
{
dSP; dPOPss;
GV *gv;
CV *cv;
PERL_CONTEXT *cx;
- I32 gimme;
- const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
+ I32 old_savestack_ix;
if (UNLIKELY(!sv))
- DIE(aTHX_ "Not a CODE reference");
- /* This is overwhelmingly the most common case: */
- if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
+ goto do_die;
+
+ /* Locate the CV to call:
+ * - most common case: RV->CV: f(), $ref->():
+ * note that if a sub is compiled before its caller is compiled,
+ * the stash entry will be a ref to a CV, rather than being a GV.
+ * - second most common case: CV: $ref->method()
+ */
+
+ /* a non-magic-RV -> CV ? */
+ if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
+ cv = MUTABLE_CV(SvRV(sv));
+ if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
+ goto do_ref;
+ }
+ else
+ cv = MUTABLE_CV(sv);
+
+ /* a CV ? */
+ if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
+ /* handle all the weird cases */
switch (SvTYPE(sv)) {
+ case SVt_PVLV:
+ if (!isGV_with_GP(sv))
+ goto do_default;
+ /* FALLTHROUGH */
case SVt_PVGV:
- we_have_a_glob:
- if (!(cv = GvCVu((const GV *)sv))) {
+ cv = GvCVu((const GV *)sv);
+ if (UNLIKELY(!cv)) {
HV *stash;
cv = sv_2cv(sv, &stash, &gv, 0);
- }
- if (!cv) {
- ENTER;
- SAVETMPS;
- goto try_autoload;
+ if (!cv) {
+ old_savestack_ix = PL_savestack_ix;
+ goto try_autoload;
+ }
}
break;
- case SVt_PVLV:
- if(isGV_with_GP(sv)) goto we_have_a_glob;
- /* FALLTHROUGH */
+
default:
- if (sv == &PL_sv_yes) { /* unfound import, ignore */
- if (hasargs)
- SP = PL_stack_base + POPMARK;
- else
- (void)POPMARK;
- RETURN;
- }
+ do_default:
SvGETMAGIC(sv);
if (SvROK(sv)) {
- if (SvAMAGIC(sv)) {
+ do_ref:
+ if (UNLIKELY(SvAMAGIC(sv))) {
sv = amagic_deref_call(sv, to_cv_amg);
/* Don't SPAGAIN here. */
}
else {
const char *sym;
STRLEN len;
- if (!SvOK(sv))
+ if (UNLIKELY(!SvOK(sv)))
DIE(aTHX_ PL_no_usym, "a subroutine");
+
+ if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
+ if (PL_op->op_flags & OPf_STACKED) /* hasargs */
+ SP = PL_stack_base + POPMARK;
+ else
+ (void)POPMARK;
+ if (GIMME_V == G_SCALAR)
+ PUSHs(&PL_sv_undef);
+ RETURN;
+ }
+
sym = SvPV_nomg_const(sv, len);
if (PL_op->op_private & HINT_STRICT_REFS)
DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
break;
}
cv = MUTABLE_CV(SvRV(sv));
- if (SvTYPE(cv) == SVt_PVCV)
+ if (LIKELY(SvTYPE(cv) == SVt_PVCV))
break;
/* FALLTHROUGH */
case SVt_PVHV:
case SVt_PVAV:
+ do_die:
DIE(aTHX_ "Not a CODE reference");
- /* This is the second most common case: */
- case SVt_PVCV:
- cv = MUTABLE_CV(sv);
- break;
}
}
- ENTER;
-
- retry:
- if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
- DIE(aTHX_ "Closure prototype called");
- if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
+ /* At this point we want to save PL_savestack_ix, either by doing a
+ * PUSHSUB, or for XS, doing an ENTER. But we don't yet know the final
+ * CV we will be using (so we don't know whether its XS, so we can't
+ * PUSHSUB or ENTER yet), and determining cv may itself push stuff on
+ * the save stack. So remember where we are currently on the save
+ * stack, and later update the CX or scopestack entry accordingly. */
+ old_savestack_ix = PL_savestack_ix;
+
+ /* these two fields are in a union. If they ever become separate,
+ * we have to test for both of them being null below */
+ assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
+ while (UNLIKELY(!CvROOT(cv))) {
GV* autogv;
SV* sub_name;
}
/* should call AUTOLOAD now? */
else {
-try_autoload:
- if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
- GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
- {
- cv = GvCV(autogv);
- }
- else {
- sorry:
- sub_name = sv_newmortal();
- gv_efullname3(sub_name, gv, NULL);
- DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
- }
+ try_autoload:
+ autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
+ GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
+ cv = autogv ? GvCV(autogv) : NULL;
}
- if (!cv)
- goto sorry;
- goto retry;
+ if (!cv) {
+ sub_name = sv_newmortal();
+ gv_efullname3(sub_name, gv, NULL);
+ DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
+ }
}
+ /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
+ if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
+ DIE(aTHX_ "Closure prototype called");
+
if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
&& !CvNODEBUG(cv)))
{
DIE(aTHX_ "No DB::sub routine defined");
}
- gimme = GIMME_V;
-
if (!(CvISXSUB(cv))) {
/* This path taken at least 75% of the time */
dMARK;
- PADLIST * const padlist = CvPADLIST(cv);
+ PADLIST *padlist;
I32 depth;
+ bool hasargs;
+ I32 gimme;
+
+ /* keep PADTMP args alive throughout the call (we need to do this
+ * because @_ isn't refcounted). Note that we create the mortals
+ * in the caller's tmps frame, so they won't be freed until after
+ * we return from the sub.
+ */
+ {
+ SV **svp = MARK;
+ while (svp < SP) {
+ SV *sv = *++svp;
+ if (!sv)
+ continue;
+ if (SvPADTMP(sv))
+ *svp = sv = sv_mortalcopy(sv);
+ SvTEMP_off(sv);
+ }
+ }
+ gimme = GIMME_V;
PUSHBLOCK(cx, CXt_SUB, MARK);
+ hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
PUSHSUB(cx);
cx->blk_sub.retop = PL_op->op_next;
+ cx->blk_oldsaveix = old_savestack_ix;
+
+ padlist = CvPADLIST(cv);
if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
PERL_STACK_OVERFLOW_CHECK();
pad_push(padlist, depth);
}
- SAVECOMPPAD();
PAD_SET_CUR_NOSAVE(padlist, depth);
if (LIKELY(hasargs)) {
AV *const av = MUTABLE_AV(PAD_SVl(0));
SSize_t items;
AV **defavp;
- if (UNLIKELY(AvREAL(av))) {
- /* @_ is normally not REAL--this should only ever
- * happen when DB::sub() calls things that modify @_ */
- av_clear(av);
- AvREAL_off(av);
- AvREIFY_on(av);
- }
defavp = &GvAV(PL_defgv);
cx->blk_sub.savearray = *defavp;
*defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
- CX_CURPAD_SAVE(cx->blk_sub);
- cx->blk_sub.argarray = av;
- items = SP - MARK;
+ /* it's the responsibility of whoever leaves a sub to ensure
+ * that a clean, empty AV is left in pad[0]. This is normally
+ * done by POPSUB() */
+ assert(!AvREAL(av) && AvFILLp(av) == -1);
+
+ items = SP - MARK;
if (UNLIKELY(items - 1 > AvMAX(av))) {
SV **ary = AvALLOC(av);
AvMAX(av) = items - 1;
Copy(MARK+1,AvARRAY(av),items,SV*);
AvFILLp(av) = items - 1;
-
- MARK = AvARRAY(av);
- while (items--) {
- if (*MARK)
- {
- if (SvPADTMP(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- }
- SvTEMP_off(*MARK);
- }
- MARK++;
- }
}
- SAVETMPS;
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
/* warning must come *after* we fully set up the context
* stuff so that __WARN__ handlers can safely dounwind()
* if they want to
else {
SSize_t markix = TOPMARK;
+ ENTER;
+ /* pretend we did the ENTER earlier */
+ PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
+
SAVETMPS;
PUTBACK;
& PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
- if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
+ if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
/* Need to copy @_ to stack. Alternative may be to
* switch stack to @_, and copy return values
* back. This would allow popping @_ in XSUB, e.g.. XXXX */
CvXSUB(cv)(aTHX_ cv);
/* Enforce some sanity in scalar context. */
- if (gimme == G_SCALAR) {
+ if (GIMME_V == G_SCALAR) {
SV **svp = PL_stack_base + markix + 1;
if (svp != PL_stack_sp) {
*svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
return sv;
}
-PP(pp_method)
-{
- dSP;
- SV* const sv = TOPs;
-
- if (SvROK(sv)) {
- SV* const rsv = SvRV(sv);
- if (SvTYPE(rsv) == SVt_PVCV) {
- SETs(rsv);
- RETURN;
- }
- }
-
- SETs(method_common(sv, NULL));
- RETURN;
-}
-
-PP(pp_method_named)
-{
- dSP;
- SV* const meth = cMETHOPx_meth(PL_op);
- U32 hash = SvSHARED_HASH(meth);
-
- XPUSHs(method_common(meth, &hash));
- RETURN;
-}
-
-STATIC SV *
-S_method_common(pTHX_ SV* meth, U32* hashp)
+PERL_STATIC_INLINE HV *
+S_opmethod_stash(pTHX_ SV* meth)
{
SV* ob;
- GV* gv;
HV* stash;
- SV *packsv = NULL;
- SV * const sv = PL_stack_base + TOPMARK == PL_stack_sp
+
+ SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
"package or object reference", SVfARG(meth)),
(SV *)NULL)
: *(PL_stack_base + TOPMARK + 1);
- PERL_ARGS_ASSERT_METHOD_COMMON;
+ PERL_ARGS_ASSERT_OPMETHOD_STASH;
if (UNLIKELY(!sv))
undefined:
Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
SVfARG(meth));
- SvGETMAGIC(sv);
+ if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
+ else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
+ stash = gv_stashsv(sv, GV_CACHE_ONLY);
+ if (stash) return stash;
+ }
+
if (SvROK(sv))
ob = MUTABLE_SV(SvRV(sv));
else if (!SvOK(sv)) goto undefined;
const char * const packname = SvPV_nomg_const(sv, packlen);
const U32 packname_utf8 = SvUTF8(sv);
stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
- if (stash) goto fetch;
+ if (stash) return stash;
if (!(iogv = gv_fetchpvn_flags(
packname, packlen, packname_utf8, SVt_PVIO
}
/* assume it's a package name */
stash = gv_stashpvn(packname, packlen, packname_utf8);
- if (!stash) packsv = sv;
- goto fetch;
+ if (stash) return stash;
+ else return MUTABLE_HV(sv);
}
/* it _is_ a filehandle name -- replace with a reference */
*(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
: meth));
}
- stash = SvSTASH(ob);
+ return SvSTASH(ob);
+}
- fetch:
- /* NOTE: stash may be null, hope hv_fetch_ent and
- gv_fetchmethod can cope (it seems they can) */
+PP(pp_method)
+{
+ dSP;
+ GV* gv;
+ HV* stash;
+ SV* const meth = TOPs;
- /* shortcut for simple names */
- if (hashp) {
- const HE* const he = hv_fetch_ent(stash, meth, 0, *hashp);
- if (he) {
- gv = MUTABLE_GV(HeVAL(he));
- assert(stash);
- if (isGV(gv) && GvCV(gv) &&
- (!GvCVGEN(gv) || GvCVGEN(gv)
- == (PL_sub_generation + HvMROMETA(stash)->cache_gen)))
- return MUTABLE_SV(GvCV(gv));
- }
+ if (SvROK(meth)) {
+ SV* const rmeth = SvRV(meth);
+ if (SvTYPE(rmeth) == SVt_PVCV) {
+ SETs(rmeth);
+ RETURN;
+ }
+ }
+
+ stash = opmethod_stash(meth);
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
+ assert(gv);
+
+ SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+#define METHOD_CHECK_CACHE(stash,cache,meth) \
+ const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
+ if (he) { \
+ gv = MUTABLE_GV(HeVAL(he)); \
+ if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
+ == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
+ { \
+ XPUSHs(MUTABLE_SV(GvCV(gv))); \
+ RETURN; \
+ } \
+ } \
+
+PP(pp_method_named)
+{
+ dSP;
+ GV* gv;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* const stash = opmethod_stash(meth);
+
+ if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
+ METHOD_CHECK_CACHE(stash, stash, meth);
+ }
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_super)
+{
+ dSP;
+ GV* gv;
+ HV* cache;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* const stash = CopSTASH(PL_curcop);
+ /* Actually, SUPER doesn't need real object's (or class') stash at all,
+ * as it uses CopSTASH. However, we must ensure that object(class) is
+ * correct (this check is done by S_opmethod_stash) */
+ opmethod_stash(meth);
+
+ if ((cache = HvMROMETA(stash)->super)) {
+ METHOD_CHECK_CACHE(stash, cache, meth);
}
- assert(stash || packsv);
- gv = gv_fetchmethod_sv_flags(stash ? stash : MUTABLE_HV(packsv),
- meth, GV_AUTOLOAD | GV_CROAK);
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
assert(gv);
- return isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv);
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_redir)
+{
+ dSP;
+ GV* gv;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
+ opmethod_stash(meth); /* not used but needed for error checks */
+
+ if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
+ else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_redir_super)
+{
+ dSP;
+ GV* gv;
+ HV* cache;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
+ opmethod_stash(meth); /* not used but needed for error checks */
+
+ if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
+ else if ((cache = HvMROMETA(stash)->super)) {
+ METHOD_CHECK_CACHE(stash, cache, meth);
+ }
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
}
/*
- * Local variables:
- * c-indentation-style: bsd
- * c-basic-offset: 4
- * indent-tabs-mode: nil
- * End:
- *
* ex: set ts=8 sts=4 sw=4 et:
*/