PP(pp_unstack)
{
+ PERL_CONTEXT *cx;
PERL_ASYNC_CHECK();
TAINT_NOT; /* Each statement is presumed innocent */
- PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
+ cx = &cxstack[cxstack_ix];
+ PL_stack_sp = PL_stack_base + cx->blk_oldsp;
FREETMPS;
if (!(PL_op->op_flags & OPf_SPECIAL)) {
- I32 oldsave = PL_scopestack[PL_scopestack_ix - 1];
- LEAVE_SCOPE(oldsave);
+ assert(
+ CxTYPE(cx) == CXt_BLOCK
+ || CxTYPE(cx) == CXt_LOOP_FOR
+ || CxTYPE(cx) == CXt_LOOP_PLAIN
+ || CxTYPE(cx) == CXt_LOOP_LAZYSV
+ || CxTYPE(cx) == CXt_LOOP_LAZYIV
+ );
+ CX_LEAVE_SCOPE(cx);
}
return NORMAL;
}
PMOP *newpm;
I32 gimme;
PERL_CONTEXT *cx;
- SV *sv;
- if (CxMULTICALL(&cxstack[cxstack_ix])) {
+ cx = &cxstack[cxstack_ix];
+ assert(CxTYPE(cx) == CXt_SUB);
+
+ if (CxMULTICALL(cx)) {
/* entry zero of a stack is always PL_sv_undef, which
* simplifies converting a '()' return into undef in scalar context */
assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
return 0;
}
- POPBLOCK(cx,newpm);
- cxstack_ix++; /* temporarily protect top context */
+ newsp = PL_stack_base + cx->blk_oldsp;
+ gimme = cx->blk_gimme;
TAINT_NOT;
if (gimme == G_SCALAR) {
MARK = newsp + 1;
if (LIKELY(MARK <= SP)) {
- if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
+ /* if we are recursing, then free the current tmps.
+ * Normally we don't bother and rely on the caller to do this,
+ * because early tmp freeing tends to free the args we're
+ * returning.
+ * Doing it for recursion ensures the things like the
+ * fibonacci benchmark don't fill up the tmps stack because
+ * it never reaches an outer nextstate */
+ if (cx->blk_sub.olddepth) {
if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
&& !SvMAGICAL(TOPs)) {
*MARK = SvREFCNT_inc(TOPs);
sv_2mortal(*MARK);
}
else {
- sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
+ SV *sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
FREETMPS;
*MARK = sv_mortalcopy(sv);
SvREFCNT_dec_NN(sv);
}
PUTBACK;
- LEAVE;
- POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
- cxstack_ix--;
+ POPSUB(cx); /* Stack values are safe: release CV and @_ ... */
+ POPBLOCK(cx,newpm);
PL_curpm = newpm; /* ... and pop $1 et al */
+ cxstack_ix--;
- LEAVESUB(sv);
return cx->blk_sub.retop;
}
GV *gv;
CV *cv;
PERL_CONTEXT *cx;
- I32 gimme;
- const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
+ I32 old_savestack_ix;
if (UNLIKELY(!sv))
- DIE(aTHX_ "Not a CODE reference");
- /* This is overwhelmingly the most common case: */
- if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
+ goto do_die;
+
+ /* Locate the CV to call:
+ * - most common case: RV->CV: f(), $ref->():
+ * note that if a sub is compiled before its caller is compiled,
+ * the stash entry will be a ref to a CV, rather than being a GV.
+ * - second most common case: CV: $ref->method()
+ */
+
+ /* a non-magic-RV -> CV ? */
+ if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
+ cv = MUTABLE_CV(SvRV(sv));
+ if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
+ goto do_ref;
+ }
+ else
+ cv = MUTABLE_CV(sv);
+
+ /* a CV ? */
+ if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
+ /* handle all the weird cases */
switch (SvTYPE(sv)) {
+ case SVt_PVLV:
+ if (!isGV_with_GP(sv))
+ goto do_default;
+ /* FALLTHROUGH */
case SVt_PVGV:
- we_have_a_glob:
- if (!(cv = GvCVu((const GV *)sv))) {
+ cv = GvCVu((const GV *)sv);
+ if (UNLIKELY(!cv)) {
HV *stash;
cv = sv_2cv(sv, &stash, &gv, 0);
- }
- if (!cv) {
- ENTER;
- goto try_autoload;
+ if (!cv) {
+ old_savestack_ix = PL_savestack_ix;
+ goto try_autoload;
+ }
}
break;
- case SVt_PVLV:
- if(isGV_with_GP(sv)) goto we_have_a_glob;
- /* FALLTHROUGH */
+
default:
- if (sv == &PL_sv_yes) { /* unfound import, ignore */
- if (hasargs)
- SP = PL_stack_base + POPMARK;
- else
- (void)POPMARK;
- if (GIMME_V == G_SCALAR)
- PUSHs(&PL_sv_undef);
- RETURN;
- }
+ do_default:
SvGETMAGIC(sv);
if (SvROK(sv)) {
- if (SvAMAGIC(sv)) {
+ do_ref:
+ if (UNLIKELY(SvAMAGIC(sv))) {
sv = amagic_deref_call(sv, to_cv_amg);
/* Don't SPAGAIN here. */
}
else {
const char *sym;
STRLEN len;
- if (!SvOK(sv))
+ if (UNLIKELY(!SvOK(sv)))
DIE(aTHX_ PL_no_usym, "a subroutine");
+
+ if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
+ if (PL_op->op_flags & OPf_STACKED) /* hasargs */
+ SP = PL_stack_base + POPMARK;
+ else
+ (void)POPMARK;
+ if (GIMME_V == G_SCALAR)
+ PUSHs(&PL_sv_undef);
+ RETURN;
+ }
+
sym = SvPV_nomg_const(sv, len);
if (PL_op->op_private & HINT_STRICT_REFS)
DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
break;
}
cv = MUTABLE_CV(SvRV(sv));
- if (SvTYPE(cv) == SVt_PVCV)
+ if (LIKELY(SvTYPE(cv) == SVt_PVCV))
break;
/* FALLTHROUGH */
case SVt_PVHV:
case SVt_PVAV:
+ do_die:
DIE(aTHX_ "Not a CODE reference");
- /* This is the second most common case: */
- case SVt_PVCV:
- cv = MUTABLE_CV(sv);
- break;
}
}
- ENTER;
+ /* At this point we want to save PL_savestack_ix, either by doing a
+ * PUSHSUB, or for XS, doing an ENTER. But we don't yet know the final
+ * CV we will be using (so we don't know whether its XS, so we can't
+ * PUSHSUB or ENTER yet), and determining cv may itself push stuff on
+ * the save stack. So remember where we are currently on the save
+ * stack, and later update the CX or scopestack entry accordingly. */
+ old_savestack_ix = PL_savestack_ix;
- retry:
- if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
- DIE(aTHX_ "Closure prototype called");
- if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
+ /* these two fields are in a union. If they ever become separate,
+ * we have to test for both of them being null below */
+ assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
+ while (UNLIKELY(!CvROOT(cv))) {
GV* autogv;
SV* sub_name;
/* should call AUTOLOAD now? */
else {
try_autoload:
- if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
- GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
- {
- cv = GvCV(autogv);
- }
- else {
- sorry:
- sub_name = sv_newmortal();
- gv_efullname3(sub_name, gv, NULL);
- DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
- }
- }
- if (!cv)
- goto sorry;
- goto retry;
+ autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
+ GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
+ cv = autogv ? GvCV(autogv) : NULL;
+ }
+ if (!cv) {
+ sub_name = sv_newmortal();
+ gv_efullname3(sub_name, gv, NULL);
+ DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
+ }
}
+ /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
+ if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
+ DIE(aTHX_ "Closure prototype called");
+
if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
&& !CvNODEBUG(cv)))
{
DIE(aTHX_ "No DB::sub routine defined");
}
- gimme = GIMME_V;
-
if (!(CvISXSUB(cv))) {
/* This path taken at least 75% of the time */
dMARK;
- PADLIST * const padlist = CvPADLIST(cv);
+ PADLIST *padlist;
I32 depth;
+ bool hasargs;
+ I32 gimme;
+
+ /* keep PADTMP args alive throughout the call (we need to do this
+ * because @_ isn't refcounted). Note that we create the mortals
+ * in the caller's tmps frame, so they won't be freed until after
+ * we return from the sub.
+ */
+ {
+ SV **svp = MARK;
+ while (svp < SP) {
+ SV *sv = *++svp;
+ if (!sv)
+ continue;
+ if (SvPADTMP(sv))
+ *svp = sv = sv_mortalcopy(sv);
+ SvTEMP_off(sv);
+ }
+ }
+ gimme = GIMME_V;
PUSHBLOCK(cx, CXt_SUB, MARK);
+ hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
PUSHSUB(cx);
cx->blk_sub.retop = PL_op->op_next;
+ cx->cx_u.cx_blk.blku_old_savestack_ix = old_savestack_ix;
+
+ padlist = CvPADLIST(cv);
if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
PERL_STACK_OVERFLOW_CHECK();
pad_push(padlist, depth);
Copy(MARK+1,AvARRAY(av),items,SV*);
AvFILLp(av) = items - 1;
-
- MARK = AvARRAY(av);
- while (items--) {
- if (*MARK)
- {
- if (SvPADTMP(*MARK)) {
- *MARK = sv_mortalcopy(*MARK);
- }
- SvTEMP_off(*MARK);
- }
- MARK++;
- }
}
- SAVETMPS;
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
else {
SSize_t markix = TOPMARK;
+ ENTER;
+ /* pretend we did the ENTER earlier */
+ PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
+
SAVETMPS;
PUTBACK;
DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
SVfARG(cv_name(cv, NULL, 0)));
- if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
+ if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
/* Need to copy @_ to stack. Alternative may be to
* switch stack to @_, and copy return values
* back. This would allow popping @_ in XSUB, e.g.. XXXX */
CvXSUB(cv)(aTHX_ cv);
/* Enforce some sanity in scalar context. */
- if (gimme == G_SCALAR) {
+ if (GIMME_V == G_SCALAR) {
SV **svp = PL_stack_base + markix + 1;
if (svp != PL_stack_sp) {
*svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;