SV * const temp = left;
left = right; right = temp;
}
- if (TAINTING_get && UNLIKELY(TAINT_get) && !SvTAINTED(right))
+ assert(TAINTING_get || !TAINT_get);
+ if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
TAINT_NOT;
if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
/* *foo =\&bar */
assert(source);
assert(CvFLAGS(source) & CVf_CONST);
- SvREFCNT_inc_void(source);
+ SvREFCNT_inc_simple_void_NN(source);
SvREFCNT_dec_NN(upgraded);
SvRV_set(right, MUTABLE_SV(source));
}
}
-/* also used for: pp_i_predec() pp_i_preinc() pp_predec() */
+/* also used for: pp_i_preinc() */
PP(pp_preinc)
{
- dSP;
- const bool inc =
- PL_op->op_type == OP_PREINC || PL_op->op_type == OP_I_PREINC;
- if (UNLIKELY(SvTYPE(TOPs) >= SVt_PVAV || (isGV_with_GP(TOPs) && !SvFAKE(TOPs))))
- Perl_croak_no_modify();
- if (LIKELY(!SvREADONLY(TOPs) && !SvGMAGICAL(TOPs) && SvIOK_notUV(TOPs) && !SvNOK(TOPs) && !SvPOK(TOPs))
- && SvIVX(TOPs) != (inc ? IV_MAX : IV_MIN))
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MAX)
{
- SvIV_set(TOPs, SvIVX(TOPs) + (inc ? 1 : -1));
- SvFLAGS(TOPs) &= ~(SVp_NOK|SVp_POK);
+ SvIV_set(sv, SvIVX(sv) + 1);
}
- else /* Do all the PERL_PRESERVE_IVUV conditionals in sv_inc */
- if (inc) sv_inc(TOPs);
- else sv_dec(TOPs);
- SvSETMAGIC(TOPs);
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
+ sv_inc(sv);
+ SvSETMAGIC(sv);
+ return NORMAL;
+}
+
+
+/* also used for: pp_i_predec() */
+
+PP(pp_predec)
+{
+ SV *sv = *PL_stack_sp;
+
+ if (LIKELY(((sv->sv_flags &
+ (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
+ SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
+ == SVf_IOK))
+ && SvIVX(sv) != IV_MIN)
+ {
+ SvIV_set(sv, SvIVX(sv) - 1);
+ }
+ else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
+ sv_dec(sv);
+ SvSETMAGIC(sv);
return NORMAL;
}
RETPUSHNO;
}
+
+
PP(pp_add)
{
dSP; dATARGET; bool useleft; SV *svl, *svr;
+
tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
svr = TOPs;
svl = TOPm1s;
- useleft = USE_LEFT(svl);
#ifdef PERL_PRESERVE_IVUV
+
+ /* special-case some simple common cases */
+ if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
+ IV il, ir;
+ U32 flags = (svl->sv_flags & svr->sv_flags);
+ if (flags & SVf_IOK) {
+ /* both args are simple IVs */
+ UV topl, topr;
+ il = SvIVX(svl);
+ ir = SvIVX(svr);
+ do_iv:
+ topl = ((UV)il) >> (UVSIZE * 8 - 2);
+ topr = ((UV)ir) >> (UVSIZE * 8 - 2);
+
+ /* if both are in a range that can't under/overflow, do a
+ * simple integer add: if the top of both numbers
+ * are 00 or 11, then it's safe */
+ if (!( ((topl+1) | (topr+1)) & 2)) {
+ SP--;
+ TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ goto generic;
+ }
+ else if (flags & SVf_NOK) {
+ /* both args are NVs */
+ NV nl = SvNVX(svl);
+ NV nr = SvNVX(svr);
+
+ if (
+#if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
+ !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
+ && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
+#else
+ nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
+#endif
+ )
+ /* nothing was lost by converting to IVs */
+ goto do_iv;
+ SP--;
+ TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
+ SETs(TARG);
+ RETURN;
+ }
+ }
+
+ generic:
+
+ useleft = USE_LEFT(svl);
/* We must see if we can perform the addition with integers if possible,
as the integer code detects overflow while the NV code doesn't.
If either argument hasn't had a numeric conversion yet attempt to get
} /* Overflow, drop through to NVs. */
}
}
+
+#else
+ useleft = USE_LEFT(svl);
#endif
+
{
NV value = SvNV_nomg(svr);
(void)POPs;
*
* If the LHS element is a 'my' declaration' and has a refcount of 1, then
* it can't be common and can be skipped.
+ *
+ * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
+ * that we thought we didn't need to call S_aassign_copy_common(), but we
+ * have anyway for sanity checking. If we find we need to copy, then panic.
*/
PERL_STATIC_INLINE void
S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
- SV **firstrelem, SV **lastrelem)
+ SV **firstrelem, SV **lastrelem
+#ifdef DEBUGGING
+ , bool fake
+#endif
+)
{
dVAR;
SV **relem;
SSize_t lcount = lastlelem - firstlelem + 1;
bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
+ bool copy_all = FALSE;
assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
assert(firstlelem < lastlelem); /* at least 2 LH elements */
assert(firstrelem < lastrelem); /* at least 2 RH elements */
+
+ lelem = firstlelem;
/* we never have to copy the first RH element; it can't be corrupted
* by assigning something to the corresponding first LH element.
* So this scan does in a loop: mark LHS[N]; test RHS[N+1]
*/
- firstrelem++;
-
- lelem = firstlelem;
- relem = firstrelem;
+ relem = firstrelem + 1;
for (; relem <= lastrelem; relem++) {
SV *svr;
}
assert(svl);
+ if (SvSMAGICAL(svl)) {
+ copy_all = TRUE;
+ }
if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
if (!marked)
return;
svr = *relem;
assert(svr);
- if (UNLIKELY(SvFLAGS(svr) & SVf_BREAK)) {
+ if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
+
+#ifdef DEBUGGING
+ if (fake) {
+ /* op_dump(PL_op); */
+ Perl_croak(aTHX_
+ "panic: aassign skipped needed copy of common RH elem %"
+ UVuf, (UV)(relem - firstrelem));
+ }
+#endif
TAINT_NOT; /* Each item is independent */
SSize_t i;
int magic;
U32 lval;
+ /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
+ * only need to save locally, not on the save stack */
+ U16 old_delaymagic = PL_delaymagic;
+#ifdef DEBUGGING
+ bool fake = 0;
+#endif
PL_delaymagic = DM_DELAY; /* catch simultaneous items */
* clobber a value on the right that's used later in the list.
*/
- if ( (PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1))
- /* at least 2 LH and RH elements, or commonality isn't an issue */
- && (firstlelem < lastlelem && firstrelem < lastrelem)
- ) {
- if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
- /* skip the scan if all scalars have a ref count of 1 */
- for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
- sv = *lelem;
- if (!sv || SvREFCNT(sv) == 1)
- continue;
- if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
- goto do_scan;
- break;
+ /* at least 2 LH and RH elements, or commonality isn't an issue */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ for (relem = firstrelem+1; relem <= lastrelem; relem++) {
+ if (SvGMAGICAL(*relem))
+ goto do_scan;
+ }
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ if (*lelem && SvSMAGICAL(*lelem))
+ goto do_scan;
+ }
+ if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
+ if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
+ /* skip the scan if all scalars have a ref count of 1 */
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ sv = *lelem;
+ if (!sv || SvREFCNT(sv) == 1)
+ continue;
+ if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
+ goto do_scan;
+ break;
+ }
+ }
+ else {
+ do_scan:
+ S_aassign_copy_common(aTHX_
+ firstlelem, lastlelem, firstrelem, lastrelem
+#ifdef DEBUGGING
+ , fake
+#endif
+ );
}
}
- else {
- do_scan:
- S_aassign_copy_common(aTHX_
- firstlelem, lastlelem, firstrelem, lastrelem);
+ }
+#ifdef DEBUGGING
+ else {
+ /* on debugging builds, do the scan even if we've concluded we
+ * don't need to, then panic if we find commonality. Note that the
+ * scanner assumes at least 2 elements */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ fake = 1;
+ goto do_scan;
}
}
+#endif
gimme = GIMME_V;
lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
}
av_clear(ary);
- av_extend(ary, lastrelem - relem);
+ if (relem <= lastrelem)
+ av_extend(ary, lastrelem - relem);
+
i = 0;
while (relem <= lastrelem) { /* gobble up all the rest */
SV **didstore;
if (lval && !already_copied)
*relem = sv_mortalcopy(*relem);
/* XXX else check for weak refs? */
- sv = SvREFCNT_inc_simple_NN(SvRV(*relem));
+ sv = SvREFCNT_inc_NN(SvRV(*relem));
}
relem++;
if (already_copied)
- SvREFCNT_inc_simple_NN(sv); /* undo mortal free */
+ SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
didstore = av_store(ary,i++,sv);
if (magic) {
if (!didstore)
}
}
if (already_copied)
- SvREFCNT_inc_simple_NN(tmpstr); /* undo mortal free */
+ SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
didstore = hv_store_ent(hash,sv,tmpstr,0);
if (magic) {
if (!didstore) sv_2mortal(tmpstr);
PERL_UNUSED_VAR(tmp_egid);
#endif
}
- PL_delaymagic = 0;
+ PL_delaymagic = old_delaymagic;
if (gimme == G_VOID)
SP = firstrelem - 1;
XPUSHs(sv);
if (type == OP_GLOB) {
const char *t1;
+ Stat_t statbuf;
if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
char * const tmps = SvEND(sv) - 1;
if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
#endif
break;
- if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &PL_statbuf) < 0) {
+ if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
(void)POPs; /* Unmatched wildcard? Chuck it... */
continue;
}
LvTYPE(lv) = 'y';
sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
- LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
LvTARGLEN(lv) = 1;
PUSHs(lv);
RETURN;
/* a stripped-down version of Perl_softref2xv() for use by
* pp_multideref(), which doesn't use PL_op->op_flags */
-GV *
+STATIC GV *
S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
const svtype type)
{
PERL_MAGIC_defelem, NULL, 0);
/* sv_magic() increments refcount */
SvREFCNT_dec_NN(key2);
- LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
LvTARGLEN(lv) = 1;
sv = lv;
}
}
if (once)
break;
- } while (CALLREGEXEC(rx, s, strend, orig, s == m,
+ } while (CALLREGEXEC(rx, s, strend, orig,
+ s == m, /* Yields minend of 0 or 1 */
TARG, NULL,
REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
+ assert(strend >= s);
sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
if (rpm->op_pmflags & PMf_NONDESTRUCT) {
(void)POPMARK; /* pop dst */
SP = PL_stack_base + POPMARK; /* pop original mark */
if (gimme == G_SCALAR) {
- if (PL_op->op_private & OPpGREP_LEX) {
- SV* const sv = sv_newmortal();
- sv_setiv(sv, items);
- PUSHs(sv);
- }
- else {
dTARGET;
XPUSHi(items);
- }
}
else if (gimme == G_ARRAY)
SP += items;
ENTER_with_name("grep_item"); /* enter inner scope */
SAVEVPTR(PL_curpm);
- src = PL_stack_base[*PL_markstack_ptr];
+ src = PL_stack_base[TOPMARK];
if (SvPADTMP(src)) {
- src = PL_stack_base[*PL_markstack_ptr] = sv_mortalcopy(src);
+ src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
PL_tmps_floor++;
}
SvTEMP_off(src);
- if (PL_op->op_private & OPpGREP_LEX)
- PAD_SVl(PL_op->op_targ) = src;
- else
- DEFSV_set(src);
+ DEFSV_set(src);
RETURNOP(cLOGOP->op_other);
}
if (gimme == G_SCALAR) {
MARK = newsp + 1;
if (LIKELY(MARK <= SP)) {
- if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
+ /* if we are recursing, then free the current tmps.
+ * Normally we don't bother and rely on the caller to do this,
+ * because early tmp freeing tends to free the args we're
+ * returning.
+ * Doing it for recursion ensures the things like the
+ * fibonacci benchmark don't fill up the tmps stack because
+ * it never reaches an outer nextstate */
+ if (cx->blk_sub.olddepth) {
if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
&& !SvMAGICAL(TOPs)) {
*MARK = SvREFCNT_inc(TOPs);
}
PUTBACK;
- LEAVE;
POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
cxstack_ix--;
PL_curpm = newpm; /* ... and pop $1 et al */
return cx->blk_sub.retop;
}
+
+/* clear (if possible) or abandon the current @_. If 'abandon' is true,
+ * forces an abandon */
+
+void
+Perl_clear_defarray(pTHX_ AV* av, bool abandon)
+{
+ const SSize_t fill = AvFILLp(av);
+
+ PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
+
+ if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av)))
+ av_clear(av);
+ else {
+ SvREFCNT_dec_NN(av);
+ av = newAV();
+ PAD_SVl(0) = MUTABLE_SV(av);
+ av_extend(av, fill);
+ }
+ AvREIFY_only(av);
+}
+
+
PP(pp_entersub)
{
dSP; dPOPss;
PERL_CONTEXT *cx;
I32 gimme;
const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
+ I32 old_savestack_ix;
if (UNLIKELY(!sv))
- DIE(aTHX_ "Not a CODE reference");
- /* This is overwhelmingly the most common case: */
- if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
+ goto do_die;
+
+ /* Locate the CV to call:
+ * - most common case: RV->CV: f(), $ref->():
+ * note that if a sub is compiled before its caller is compiled,
+ * the stash entry will be a ref to a CV, rather than being a GV.
+ * - second most common case: CV: $ref->method()
+ */
+
+ /* a non-magic-RV -> CV ? */
+ if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
+ cv = MUTABLE_CV(SvRV(sv));
+ if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
+ goto do_ref;
+ }
+ else
+ cv = MUTABLE_CV(sv);
+
+ /* a CV ? */
+ if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
+ /* handle all the weird cases */
switch (SvTYPE(sv)) {
+ case SVt_PVLV:
+ if (!isGV_with_GP(sv))
+ goto do_default;
+ /* FALLTHROUGH */
case SVt_PVGV:
- we_have_a_glob:
- if (!(cv = GvCVu((const GV *)sv))) {
+ cv = GvCVu((const GV *)sv);
+ if (UNLIKELY(!cv)) {
HV *stash;
cv = sv_2cv(sv, &stash, &gv, 0);
- }
- if (!cv) {
- ENTER;
- SAVETMPS;
- goto try_autoload;
+ if (!cv) {
+ old_savestack_ix = PL_savestack_ix;
+ goto try_autoload;
+ }
}
break;
- case SVt_PVLV:
- if(isGV_with_GP(sv)) goto we_have_a_glob;
- /* FALLTHROUGH */
+
default:
- if (sv == &PL_sv_yes) { /* unfound import, ignore */
- if (hasargs)
- SP = PL_stack_base + POPMARK;
- else
- (void)POPMARK;
- RETURN;
- }
+ do_default:
SvGETMAGIC(sv);
if (SvROK(sv)) {
- if (SvAMAGIC(sv)) {
+ do_ref:
+ if (UNLIKELY(SvAMAGIC(sv))) {
sv = amagic_deref_call(sv, to_cv_amg);
/* Don't SPAGAIN here. */
}
else {
const char *sym;
STRLEN len;
- if (!SvOK(sv))
+ if (UNLIKELY(!SvOK(sv)))
DIE(aTHX_ PL_no_usym, "a subroutine");
+
+ if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
+ if (hasargs)
+ SP = PL_stack_base + POPMARK;
+ else
+ (void)POPMARK;
+ if (GIMME_V == G_SCALAR)
+ PUSHs(&PL_sv_undef);
+ RETURN;
+ }
+
sym = SvPV_nomg_const(sv, len);
if (PL_op->op_private & HINT_STRICT_REFS)
DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
break;
}
cv = MUTABLE_CV(SvRV(sv));
- if (SvTYPE(cv) == SVt_PVCV)
+ if (LIKELY(SvTYPE(cv) == SVt_PVCV))
break;
/* FALLTHROUGH */
case SVt_PVHV:
case SVt_PVAV:
+ do_die:
DIE(aTHX_ "Not a CODE reference");
- /* This is the second most common case: */
- case SVt_PVCV:
- cv = MUTABLE_CV(sv);
- break;
}
}
- ENTER;
+ /* At this point we want to save PL_savestack_ix, either by doing a
+ * PUSHSUB, or for XS, doing an ENTER. But we don't yet know the final
+ * CV we will be using (so we don't know whether its XS, so we can't
+ * PUSHSUB or ENTER yet), and determining cv may itself push stuff on
+ * the save stack. So remember where we are currently on the save
+ * stack, and later update the CX or scopestack entry accordingly. */
+ old_savestack_ix = PL_savestack_ix;
- retry:
- if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
- DIE(aTHX_ "Closure prototype called");
- if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
+ /* these two fields are in a union. If they ever become separate,
+ * we have to test for both of them being null below */
+ assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
+ while (UNLIKELY(!CvROOT(cv))) {
GV* autogv;
SV* sub_name;
/* should call AUTOLOAD now? */
else {
try_autoload:
- if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
- GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
- {
- cv = GvCV(autogv);
- }
- else {
- sorry:
- sub_name = sv_newmortal();
- gv_efullname3(sub_name, gv, NULL);
- DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
- }
- }
- if (!cv)
- goto sorry;
- goto retry;
+ autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
+ GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
+ cv = autogv ? GvCV(autogv) : NULL;
+ }
+ if (!cv) {
+ sub_name = sv_newmortal();
+ gv_efullname3(sub_name, gv, NULL);
+ DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
+ }
}
+ if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
+ DIE(aTHX_ "Closure prototype called");
+
if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
&& !CvNODEBUG(cv)))
{
PADLIST * const padlist = CvPADLIST(cv);
I32 depth;
+ /* keep PADTMP args alive throughout the call (we need to do this
+ * because @_ isn't refcounted). Note that we create the mortals
+ * in the caller's tmps frame, so they won't be freed until after
+ * we return from the sub.
+ */
+ {
+ SV **svp = MARK;
+ while (svp < SP) {
+ SV *sv = *++svp;
+ if (!sv)
+ continue;
+ if (SvPADTMP(sv))
+ *svp = sv = sv_mortalcopy(sv);
+ SvTEMP_off(sv);
+ }
+ }
+
PUSHBLOCK(cx, CXt_SUB, MARK);
PUSHSUB(cx);
cx->blk_sub.retop = PL_op->op_next;
+ cx->blk_sub.old_savestack_ix = old_savestack_ix;
+
if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
PERL_STACK_OVERFLOW_CHECK();
pad_push(padlist, depth);
}
- SAVECOMPPAD();
PAD_SET_CUR_NOSAVE(padlist, depth);
if (LIKELY(hasargs)) {
AV *const av = MUTABLE_AV(PAD_SVl(0));
SSize_t items;
AV **defavp;
- if (UNLIKELY(AvREAL(av))) {
- /* @_ is normally not REAL--this should only ever
- * happen when DB::sub() calls things that modify @_ */
- av_clear(av);
- AvREAL_off(av);
- AvREIFY_on(av);
- }
defavp = &GvAV(PL_defgv);
cx->blk_sub.savearray = *defavp;
*defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
- CX_CURPAD_SAVE(cx->blk_sub);
- cx->blk_sub.argarray = av;
- items = SP - MARK;
+ /* it's the responsibility of whoever leaves a sub to ensure
+ * that a clean, empty AV is left in pad[0]. This is normally
+ * done by POPSUB() */
+ assert(!AvREAL(av) && AvFILLp(av) == -1);
+
+ items = SP - MARK;
if (UNLIKELY(items - 1 > AvMAX(av))) {
SV **ary = AvALLOC(av);
AvMAX(av) = items - 1;
Copy(MARK+1,AvARRAY(av),items,SV*);
AvFILLp(av) = items - 1;
-
- MARK = AvARRAY(av);
- while (items--) {
- if (*MARK)
- {
- if (SvPADTMP(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- }
- SvTEMP_off(*MARK);
- }
- MARK++;
- }
}
- SAVETMPS;
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
/* warning must come *after* we fully set up the context
* stuff so that __WARN__ handlers can safely dounwind()
* if they want to
else {
SSize_t markix = TOPMARK;
+ ENTER;
+ /* pretend we did the ENTER earlier */
+ PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
+
SAVETMPS;
PUTBACK;
& PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call");
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ SVfARG(cv_name(cv, NULL, 0)));
if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
/* Need to copy @_ to stack. Alternative may be to