PERL_ARGS_ASSERT_SLAB_FREE;
if (!o->op_slabbed) {
- PerlMemShared_free(op);
+ if (!o->op_static)
+ PerlMemShared_free(op);
return;
}
)
) {
assert(slot->opslot_op.op_slabbed);
- slab->opslab_refcnt++; /* op_free may free slab */
op_free(&slot->opslot_op);
- if (!--slab->opslab_refcnt) goto free;
+ if (slab->opslab_refcnt == 1) goto free;
}
}
} while ((slab2 = slab2->opslab_next));
#ifdef DEBUGGING
assert(savestack_count == slab->opslab_refcnt-1);
#endif
+ /* Remove the CV’s reference count. */
+ slab->opslab_refcnt--;
return;
}
free:
}
break;
}
+
case OP_SUBST: {
if (cPMOPo->op_pmreplrootu.op_pmreplroot)
finalize_op(cPMOPo->op_pmreplrootu.op_pmreplroot);
case OP_SCALAR:
case OP_NULL:
- if (!(o->op_flags & OPf_KIDS))
+ if (!(o->op_flags & OPf_KIDS) || type == OP_DEFINED)
break;
doref(cBINOPo->op_first, type, set_op_ref);
break;
}
STATIC void
-S_apply_attrs(pTHX_ HV *stash, SV *target, OP *attrs, bool for_my)
+S_apply_attrs(pTHX_ HV *stash, SV *target, OP *attrs)
{
dVAR;
- SV *stashsv;
+ SV * const stashsv = stash ? newSVhek(HvNAME_HEK(stash)) : &PL_sv_no;
PERL_ARGS_ASSERT_APPLY_ATTRS;
/* fake up C<use attributes $pkg,$rv,@attrs> */
ENTER; /* need to protect against side-effects of 'use' */
- stashsv = stash ? newSVhek(HvNAME_HEK(stash)) : &PL_sv_no;
#define ATTRSMODULE "attributes"
#define ATTRSMODULE_PM "attributes.pm"
- if (for_my) {
- /* Don't force the C<use> if we don't need it. */
- SV * const * const svp = hv_fetchs(GvHVn(PL_incgv), ATTRSMODULE_PM, FALSE);
- if (svp && *svp != &PL_sv_undef)
- NOOP; /* already in %INC */
- else
- Perl_load_module(aTHX_ PERL_LOADMOD_NOIMPORT,
- newSVpvs(ATTRSMODULE), NULL);
- }
- else {
- Perl_load_module(aTHX_ PERL_LOADMOD_IMPORT_OPS,
+ Perl_load_module(aTHX_ PERL_LOADMOD_IMPORT_OPS,
newSVpvs(ATTRSMODULE),
NULL,
op_prepend_elem(OP_LIST,
newSVOP(OP_CONST, 0,
newRV(target)),
dup_attrlist(attrs))));
- }
LEAVE;
}
{
dVAR;
OP *pack, *imop, *arg;
- SV *meth, *stashsv;
+ SV *meth, *stashsv, **svp;
PERL_ARGS_ASSERT_APPLY_ATTRS_MY;
target->op_type == OP_PADAV);
/* Ensure that attributes.pm is loaded. */
- apply_attrs(stash, PAD_SV(target->op_targ), attrs, TRUE);
+ ENTER; /* need to protect against side-effects of 'use' */
+ /* Don't force the C<use> if we don't need it. */
+ svp = hv_fetchs(GvHVn(PL_incgv), ATTRSMODULE_PM, FALSE);
+ if (svp && *svp != &PL_sv_undef)
+ NOOP; /* already in %INC */
+ else
+ Perl_load_module(aTHX_ PERL_LOADMOD_NOIMPORT,
+ newSVpvs(ATTRSMODULE), NULL);
+ LEAVE;
/* Need package name for method call. */
pack = newSVOP(OP_CONST, 0, newSVpvs(ATTRSMODULE));
(type == OP_RV2SV ? GvSV(gv) :
type == OP_RV2AV ? MUTABLE_SV(GvAV(gv)) :
type == OP_RV2HV ? MUTABLE_SV(GvHV(gv)) : MUTABLE_SV(gv)),
- attrs, FALSE);
+ attrs);
}
o->op_private |= OPpOUR_INTRO;
return o;
{
dVAR;
if (o) {
- if (o->op_flags & OPf_PARENS || PERLDB_NOOPT || PL_tainting) {
+ if (o->op_flags & OPf_PARENS || PERLDB_NOOPT || TAINTING_get) {
o = op_prepend_elem(OP_LINESEQ, newOP(OP_ENTER, 0), o);
o->op_type = OP_LEAVE;
o->op_ppaddr = PL_ppaddr[OP_LEAVE];
dVAR;
const int needblockscope = PL_hints & HINT_BLOCK_SCOPE;
OP* retval = scalarseq(seq);
+ OP *o;
CALL_BLOCK_HOOKS(bhk_pre_end, &retval);
CopHINTS_set(&PL_compiling, PL_hints);
if (needblockscope)
PL_hints |= HINT_BLOCK_SCOPE; /* propagate out */
- pad_leavemy();
+ o = pad_leavemy();
+
+ if (o) {
+ /* pad_leavemy has created a sequence of introcv ops for all my
+ subs declared in the block. We have to replicate that list with
+ clonecv ops, to deal with this situation:
+
+ sub {
+ my sub s1;
+ my sub s2;
+ sub s1 { state sub foo { \&s2 } }
+ }->()
+
+ Originally, I was going to have introcv clone the CV and turn
+ off the stale flag. Since &s1 is declared before &s2, the
+ introcv op for &s1 is executed (on sub entry) before the one for
+ &s2. But the &foo sub inside &s1 (which is cloned when &s1 is
+ cloned, since it is a state sub) closes over &s2 and expects
+ to see it in its outer CV’s pad. If the introcv op clones &s1,
+ then &s2 is still marked stale. Since &s1 is not active, and
+ &foo closes over &s1’s implicit entry for &s2, we get a ‘Varia-
+ ble will not stay shared’ warning. Because it is the same stub
+ that will be used when the introcv op for &s2 is executed, clos-
+ ing over it is safe. Hence, we have to turn off the stale flag
+ on all lexical subs in the block before we clone any of them.
+ Hence, having introcv clone the sub cannot work. So we create a
+ list of ops like this:
+
+ lineseq
+ |
+ +-- introcv
+ |
+ +-- introcv
+ |
+ +-- introcv
+ |
+ .
+ .
+ .
+ |
+ +-- clonecv
+ |
+ +-- clonecv
+ |
+ +-- clonecv
+ |
+ .
+ .
+ .
+ */
+ OP *kid = o->op_flags & OPf_KIDS ? cLISTOPo->op_first : o;
+ OP * const last = o->op_flags & OPf_KIDS ? cLISTOPo->op_last : o;
+ for (;; kid = kid->op_sibling) {
+ OP *newkid = newOP(OP_CLONECV, 0);
+ newkid->op_targ = kid->op_targ;
+ o = op_append_elem(OP_LINESEQ, o, newkid);
+ if (kid == last) break;
+ }
+ retval = op_prepend_elem(OP_LINESEQ, o, retval);
+ }
CALL_BLOCK_HOOKS(bhk_post_end, &retval);
maybe other things) also take this path, because they set up
PL_main_start and PL_main_root directly, without generating an
optree.
+
+ If the parsing the main program aborts (due to parse errors,
+ or due to BEGIN or similar calling exit), then newPROG()
+ isn't even called, and hence this code path and its cleanups
+ are skipped. This shouldn't make a make a difference:
+ * a non-zero return from perl_parse is a failure, and
+ perl_destruct() should be called immediately.
+ * however, if exit(0) is called during the parse, then
+ perl_parse() returns 0, and perl_run() is called. As
+ PL_main_start will be NULL, perl_run() will return
+ promptly, and the exit code will remain 0.
*/
PL_comppad_name = 0;
U32 rx_flags = pm->op_pmflags & RXf_PMf_COMPILETIME;
regexp_engine const *eng = current_re_engine();
- if (o->op_flags & OPf_SPECIAL)
- rx_flags |= RXf_SPLIT;
-
if (!has_code || !eng->op_comp) {
/* compile-time simple constant pattern */
/* handle the implicit sub{} wrapped round the qr/(?{..})/ */
SvREFCNT_inc_simple_void(PL_compcv);
cv = newATTRSUB(floor, 0, NULL, NULL, qr);
- ((struct regexp *)SvANY(re))->qr_anoncv = cv;
+ ReANY(re)->qr_anoncv = cv;
/* attach the anon CV to the pad so that
* pad_fixup_inner_anons() can find it */
* preceding stacking ops;
* OP_REGCRESET is there to reset taint before executing the
* stacking ops */
- if (pm->op_pmflags & PMf_KEEP || PL_tainting)
- expr = newUNOP((PL_tainting ? OP_REGCRESET : OP_REGCMAYBE),0,expr);
+ if (pm->op_pmflags & PMf_KEEP || TAINTING_get)
+ expr = newUNOP((TAINTING_get ? OP_REGCRESET : OP_REGCMAYBE),0,expr);
if (pm->op_pmflags & PMf_HAS_CV) {
/* we have a runtime qr with literal code. This means
}
if (repl) {
- OP *curop;
+ OP *curop = repl;
+ bool konst;
if (pm->op_pmflags & PMf_EVAL) {
- curop = NULL;
if (CopLINE(PL_curcop) < (line_t)PL_parser->multi_end)
CopLINE_set(PL_curcop, (line_t)PL_parser->multi_end);
}
- else if (repl->op_type == OP_CONST)
- curop = repl;
- else {
- OP *lastop = NULL;
- for (curop = LINKLIST(repl); curop!=repl; curop = LINKLIST(curop)) {
- if (curop->op_type == OP_SCOPE
- || curop->op_type == OP_LEAVE
- || (PL_opargs[curop->op_type] & OA_DANGEROUS)) {
- if (curop->op_type == OP_GV) {
- GV * const gv = cGVOPx_gv(curop);
- repl_has_vars = 1;
- if (strchr("&`'123456789+-\016\022", *GvENAME(gv)))
- break;
- }
- else if (curop->op_type == OP_RV2CV)
- break;
- else if (curop->op_type == OP_RV2SV ||
- curop->op_type == OP_RV2AV ||
- curop->op_type == OP_RV2HV ||
- curop->op_type == OP_RV2GV) {
- if (lastop && lastop->op_type != OP_GV) /*funny deref?*/
- break;
- }
- else if (curop->op_type == OP_PADSV ||
- curop->op_type == OP_PADAV ||
- curop->op_type == OP_PADHV ||
- curop->op_type == OP_PADANY)
- {
- repl_has_vars = 1;
- }
- else if (curop->op_type == OP_PUSHRE)
- NOOP; /* Okay here, dangerous in newASSIGNOP */
- else
- break;
- }
- lastop = curop;
- }
- }
- if (curop == repl
+ /* If we are looking at s//.../e with a single statement, get past
+ the implicit do{}. */
+ if (curop->op_type == OP_NULL && curop->op_flags & OPf_KIDS
+ && cUNOPx(curop)->op_first->op_type == OP_SCOPE
+ && cUNOPx(curop)->op_first->op_flags & OPf_KIDS) {
+ OP *kid = cUNOPx(cUNOPx(curop)->op_first)->op_first;
+ if (kid->op_type == OP_NULL && kid->op_sibling
+ && !kid->op_sibling->op_sibling)
+ curop = kid->op_sibling;
+ }
+ if (curop->op_type == OP_CONST)
+ konst = TRUE;
+ else if (( (curop->op_type == OP_RV2SV ||
+ curop->op_type == OP_RV2AV ||
+ curop->op_type == OP_RV2HV ||
+ curop->op_type == OP_RV2GV)
+ && cUNOPx(curop)->op_first
+ && cUNOPx(curop)->op_first->op_type == OP_GV )
+ || curop->op_type == OP_PADSV
+ || curop->op_type == OP_PADAV
+ || curop->op_type == OP_PADHV
+ || curop->op_type == OP_PADANY) {
+ repl_has_vars = 1;
+ konst = TRUE;
+ }
+ else konst = FALSE;
+ if (konst
&& !(repl_has_vars
&& (!PM_GETRE(pm)
+ || !RX_PRELEN(PM_GETRE(pm))
|| RX_EXTFLAGS(PM_GETRE(pm)) & RXf_EVAL_SEEN)))
{
pm->op_pmflags |= PMf_CONST; /* const for long enough */
op_prepend_elem(o->op_type, scalar(repl), o);
}
else {
- if (curop == repl && !PM_GETRE(pm)) { /* Has variables. */
- pm->op_pmflags |= PMf_MAYBE_CONST;
- }
NewOp(1101, rcop, 1, LOGOP);
rcop->op_type = OP_SUBSTCONT;
rcop->op_ppaddr = PL_ppaddr[OP_SUBSTCONT];
PADNAME *name;
PADOFFSET pax = o->op_targ;
CV *outcv = CvOUTSIDE(PL_compcv);
+ CV *clonee = NULL;
+ HEK *hek = NULL;
+ bool reusable = FALSE;
PERL_ARGS_ASSERT_NEWMYSUB;
goto redo;
}
svspot =
- &PadARRAY(PadlistARRAY(CvPADLIST(outcv))[1])[pax];
+ &PadARRAY(PadlistARRAY(CvPADLIST(outcv))
+ [CvDEPTH(outcv) ? CvDEPTH(outcv) : 1])[pax];
spot = (CV **)svspot;
if (proto) {
goto done;
}
- if (SvTYPE(*spot) != SVt_PVCV) { /* Maybe prototype now, and had at
- maximum a prototype before. */
- SvREFCNT_dec(*spot);
- *spot = NULL;
+ if (CvDEPTH(outcv) && CvCLONE(compcv)) {
+ cv = *spot;
+ svspot = (SV **)(spot = &clonee);
+ }
+ else if (PadnameIsSTATE(name) || CvDEPTH(outcv))
+ cv = *spot;
+ else {
+ MAGIC *mg;
+ SvUPGRADE(name, SVt_PVMG);
+ mg = mg_find(name, PERL_MAGIC_proto);
+ assert (SvTYPE(*spot) == SVt_PVCV);
+ if (CvNAMED(*spot))
+ hek = CvNAME_HEK(*spot);
+ else {
+ CvNAME_HEK_set(*spot, hek =
+ share_hek(
+ PadnamePV(name)+1,
+ PadnameLEN(name)-1 * (PadnameUTF8(name) ? -1 : 1), 0
+ )
+ );
+ }
+ if (mg) {
+ assert(mg->mg_obj);
+ cv = (CV *)mg->mg_obj;
+ }
+ else {
+ sv_magic(name, &PL_sv_undef, PERL_MAGIC_proto, NULL, 0);
+ mg = mg_find(name, PERL_MAGIC_proto);
+ }
+ spot = (CV **)(svspot = &mg->mg_obj);
}
-
- cv = *spot;
if (!block || !ps || *ps || attrs
|| (CvFLAGS(compcv) & CVf_BUILTIN_ATTRS)
cv = NULL;
}
}
+ else if (CvDEPTH(outcv) && CvCLONE(compcv)) {
+ cv = NULL;
+ reusable = TRUE;
+ }
}
if (const_sv) {
SvREFCNT_inc_simple_void_NN(const_sv);
op_free(block);
SvREFCNT_dec(compcv);
PL_compcv = NULL;
- goto done;
+ goto clone;
}
- if (outcv == CvOUTSIDE(compcv)) {
+ /* Checking whether outcv is CvOUTSIDE(compcv) is not sufficient to
+ determine whether this sub definition is in the same scope as its
+ declaration. If this sub definition is inside an inner named pack-
+ age sub (my sub foo; sub bar { sub foo { ... } }), outcv points to
+ the package sub. So check PadnameOUTER(name) too.
+ */
+ if (outcv == CvOUTSIDE(compcv) && !PadnameOUTER(name)) {
assert(!CvWEAKOUTSIDE(compcv));
SvREFCNT_dec(CvOUTSIDE(compcv));
CvWEAKOUTSIDE_on(compcv);
&& block->op_type != OP_NULL
#endif
) {
- cv_flags_t existing_builtin_attrs = CvFLAGS(cv) & CVf_BUILTIN_ATTRS;
+ cv_flags_t preserved_flags =
+ CvFLAGS(cv) & (CVf_BUILTIN_ATTRS|CVf_NAMED);
PADLIST *const temp_padl = CvPADLIST(cv);
CV *const temp_cv = CvOUTSIDE(cv);
const cv_flags_t other_flags =
CvFLAGS(cv) & (CVf_SLABBED|CVf_WEAKOUTSIDE);
OP * const cvstart = CvSTART(cv);
- assert(CvWEAKOUTSIDE(cv));
- assert(CvNAMED(cv));
- assert(CvNAME_HEK(cv));
-
SvPOK_off(cv);
CvFLAGS(cv) =
- CvFLAGS(compcv) | existing_builtin_attrs | CVf_NAMED;
+ CvFLAGS(compcv) | preserved_flags;
CvOUTSIDE(cv) = CvOUTSIDE(compcv);
CvOUTSIDE_SEQ(cv) = CvOUTSIDE_SEQ(compcv);
CvPADLIST(cv) = CvPADLIST(compcv);
else {
cv = compcv;
*spot = cv;
- SvANY(cv)->xcv_gv_u.xcv_hek =
- share_hek(PadnamePV(name)+1,
+ }
+ if (!CvNAME_HEK(cv)) {
+ CvNAME_HEK_set(cv,
+ hek
+ ? share_hek_hek(hek)
+ : share_hek(PadnamePV(name)+1,
PadnameLEN(name)-1 * (PadnameUTF8(name) ? -1 : 1),
- 0);
- CvNAMED_on(cv);
+ 0)
+ );
}
CvFILE_set_from_cop(cv, PL_curcop);
CvSTASH_set(cv, PL_curstash);
attrs:
if (attrs) {
/* Need to do a C<use attributes $stash_of_cv,\&cv,@attrs>. */
- apply_attrs(PL_curstash, MUTABLE_SV(cv), attrs, FALSE);
+ apply_attrs(PL_curstash, MUTABLE_SV(cv), attrs);
}
if (block) {
}
}
+ clone:
+ if (clonee) {
+ assert(CvDEPTH(outcv));
+ spot = (CV **)
+ &PadARRAY(PadlistARRAY(CvPADLIST(outcv))[CvDEPTH(outcv)])[pax];
+ if (reusable) cv_clone_into(clonee, *spot);
+ else *spot = cv_clone(clonee);
+ SvREFCNT_dec(clonee);
+ cv = *spot;
+ SvPADMY_on(cv);
+ }
+ if (CvDEPTH(outcv) && !reusable && PadnameIsSTATE(name)) {
+ PADOFFSET depth = CvDEPTH(outcv);
+ while (--depth) {
+ SV *oldcv;
+ svspot = &PadARRAY(PadlistARRAY(CvPADLIST(outcv))[depth])[pax];
+ oldcv = *svspot;
+ *svspot = SvREFCNT_inc_simple_NN(cv);
+ SvREFCNT_dec(oldcv);
+ }
+ }
+
done:
if (PL_parser)
PL_parser->copline = NOLINE;
if (ec) {
op_free(block);
+ cv = PL_compcv;
if (name && block) {
const char *s = strrchr(name, ':');
s = s ? s+1 : name;
if (strEQ(s, "BEGIN")) {
const char not_safe[] =
"BEGIN not safe after errors--compilation aborted";
+ PL_compcv = 0;
+ SvREFCNT_dec(cv);
if (PL_in_eval & EVAL_KEEPERR)
Perl_croak(aTHX_ not_safe);
else {
}
}
}
- cv = PL_compcv;
goto done;
}
cv = PL_compcv;
if (name) {
GvCV_set(gv, cv);
- if (PL_madskills) {
- if (strEQ(name, "import")) {
- PL_formfeed = MUTABLE_SV(cv);
- /* diag_listed_as: SKIPME */
- Perl_warner(aTHX_ packWARN(WARN_VOID), "0x%"UVxf"\n", PTR2UV(cv));
- }
- }
GvCVGEN(gv) = 0;
if (HvENAME_HEK(GvSTASH(gv)))
/* sub Foo::bar { (shift)+1 } */
if (attrs) {
/* Need to do a C<use attributes $stash_of_cv,\&cv,@attrs>. */
HV *stash = name && GvSTASH(CvGV(cv)) ? GvSTASH(CvGV(cv)) : PL_curstash;
- apply_attrs(stash, MUTABLE_SV(cv), attrs, FALSE);
+ apply_attrs(stash, MUTABLE_SV(cv), attrs);
}
if (block && has_name) {
}
if (name && ! (PL_parser && PL_parser->error_count))
- process_special_blocks(name, gv, cv);
+ process_special_blocks(floor, name, gv, cv);
}
done:
}
STATIC void
-S_process_special_blocks(pTHX_ const char *const fullname, GV *const gv,
+S_process_special_blocks(pTHX_ I32 floor, const char *const fullname,
+ GV *const gv,
CV *const cv)
{
const char *const colon = strrchr(fullname,':');
if (*name == 'B') {
if (strEQ(name, "BEGIN")) {
const I32 oldscope = PL_scopestack_ix;
+ if (floor) LEAVE_SCOPE(floor);
ENTER;
SAVECOPFILE(&PL_compiling);
SAVECOPLINE(&PL_compiling);
PERL_ARGS_ASSERT_NEWXS_LEN_FLAGS;
{
- GV * const gv = name
- ? gv_fetchpvn(
- name,len,GV_ADDMULTI|flags,SVt_PVCV
- )
- : gv_fetchpv(
- (PL_curstash ? "__ANON__" : "__ANON__::__ANON__"),
- GV_ADDMULTI | flags, SVt_PVCV);
+ GV * const gv = gv_fetchpvn(
+ name ? name : PL_curstash ? "__ANON__" : "__ANON__::__ANON__",
+ name ? len : PL_curstash ? sizeof("__ANON__") - 1:
+ sizeof("__ANON__::__ANON__") - 1,
+ GV_ADDMULTI | flags, SVt_PVCV);
if (!subaddr)
Perl_croak(aTHX_ "panic: no address for '%s' in '%s'", name, filename);
CvXSUB(cv) = subaddr;
if (name)
- process_special_blocks(name, gv, cv);
+ process_special_blocks(0, name, gv, cv);
}
if (flags & XS_DYNAMIC_FILENAME) {
Perl_newCVREF(pTHX_ I32 flags, OP *o)
{
if (o->op_type == OP_PADANY) {
+ dVAR;
o->op_type = OP_PADCV;
o->op_ppaddr = PL_ppaddr[OP_PADCV];
+ return o;
}
return newUNOP(OP_RV2CV, flags, scalar(o));
}
LEAVE;
}
#endif /* !PERL_EXTERNAL_GLOB */
- gv = newGVgen("main");
+ gv = (GV *)newSV(0);
+ gv_init(gv, 0, "", 0, 0);
gv_IOadd(gv);
#ifndef PERL_EXTERNAL_GLOB
sv_setiv(GvSVn(gv),PL_glob_index++);
#endif
op_append_elem(OP_GLOB, o, newGVOP(OP_GV, 0, gv));
+ SvREFCNT_dec(gv); /* newGVOP increased it */
scalarkids(o);
return o;
}
if (kid)
kid = kid->op_sibling; /* get past "big" */
if (kid && kid->op_type == OP_CONST) {
- const bool save_taint = PL_tainted;
+ const bool save_taint = TAINT_get; /* accepted unused var warning if NO_TAINT_SUPPORT */
fbm_compile(((SVOP*)kid)->op_sv, 0);
- PL_tainted = save_taint;
+ TAINT_set(save_taint);
}
}
return ck_fun(o);
cLISTOPo->op_last = kid; /* There was only one element previously */
}
+ if (kid->op_type == OP_CONST && !(kid->op_private & OPpCONST_FOLDED)) {
+ SV * const sv = kSVOP->op_sv;
+ if (SvPOK(sv) && SvCUR(sv) == 1 && *SvPVX(sv) == ' ')
+ o->op_flags |= OPf_SPECIAL;
+ }
if (kid->op_type != OP_MATCH || kid->op_flags & OPf_STACKED) {
OP * const sibl = kid->op_sibling;
kid->op_sibling = 0;
- kid = pmruntime( newPMOP(OP_MATCH, OPf_SPECIAL), kid, 0, 0);
+ kid = pmruntime( newPMOP(OP_MATCH, 0), kid, 0, 0);
if (cLISTOPo->op_first == cLISTOPo->op_last)
cLISTOPo->op_last = kid;
cLISTOPo->op_first = kid;
cv = (CV*)SvRV(rv);
gv = NULL;
} break;
+ case OP_PADCV: {
+ PADNAME *name = PAD_COMPNAME(rvop->op_targ);
+ CV *compcv = PL_compcv;
+ PADOFFSET off = rvop->op_targ;
+ while (PadnameOUTER(name)) {
+ assert(PARENT_PAD_INDEX(name));
+ compcv = CvOUTSIDE(PL_compcv);
+ name = PadlistNAMESARRAY(CvPADLIST(compcv))
+ [off = PARENT_PAD_INDEX(name)];
+ }
+ assert(!PadnameIsOUR(name));
+ if (!PadnameIsSTATE(name)) {
+ MAGIC * mg = mg_find(name, PERL_MAGIC_proto);
+ assert(mg);
+ assert(mg->mg_obj);
+ cv = (CV *)mg->mg_obj;
+ }
+ else cv =
+ (CV *)AvARRAY(PadlistARRAY(CvPADLIST(compcv))[1])[off];
+ gv = NULL;
+ } break;
default: {
return NULL;
} break;
Perl_call_checker ckfun;
SV *ckobj;
cv_get_call_checker(cv, &ckfun, &ckobj);
+ if (!namegv) { /* expletive! */
+ /* XXX The call checker API is public. And it guarantees that
+ a GV will be provided with the right name. So we have
+ to create a GV. But it is still not correct, as its
+ stringification will include the package. What we
+ really need is a new call checker API that accepts a
+ GV or string (or GV or CV). */
+ HEK * const hek = CvNAME_HEK(cv);
+ assert(hek);
+ namegv = (GV *)sv_newmortal();
+ gv_init_pvn(namegv, PL_curstash, HEK_KEY(hek), HEK_LEN(hek),
+ SVf_UTF8 * !!HEK_UTF8(hek));
+ }
return ckfun(aTHX_ o, namegv, ckobj);
}
}
{
dVAR;
OP* oldop = NULL;
+ OP* oldoldop = NULL;
OP* defer_queue[MAX_DEFERRED]; /* small queue of deferred branches */
int defer_base = 0;
int defer_ix = -1;
}
break;
+ case OP_PUSHMARK:
+
+ /* Convert a series of PAD ops for my vars plus support into a
+ * single padrange op. Basically
+ *
+ * pushmark -> pad[ahs]v -> pad[ahs]?v -> ... -> (list) -> rest
+ *
+ * becomes, depending on circumstances, one of
+ *
+ * padrange ----------------------------------> (list) -> rest
+ * padrange --------------------------------------------> rest
+ *
+ * where all the pad indexes are sequential and of the same type
+ * (INTRO or not).
+ * We convert the pushmark into a padrange op, then skip
+ * any other pad ops, and possibly some trailing ops.
+ * Note that we don't null() the skipped ops, to make it
+ * easier for Deparse to undo this optimisation (and none of
+ * the skipped ops are holding any resourses). It also makes
+ * it easier for find_uninit_var(), as it can just ignore
+ * padrange, and examine the original pad ops.
+ */
+ {
+ OP *p;
+ OP *followop = NULL; /* the op that will follow the padrange op */
+ U8 count = 0;
+ U8 intro = 0;
+ PADOFFSET base = 0; /* init only to stop compiler whining */
+ U8 gimme = 0; /* init only to stop compiler whining */
+ bool defav = 0; /* seen (...) = @_ */
+ bool reuse = 0; /* reuse an existing padrange op */
+
+ /* look for a pushmark -> gv[_] -> rv2av */
+
+ {
+ GV *gv;
+ OP *rv2av, *q;
+ p = o->op_next;
+ if ( p->op_type == OP_GV
+ && (gv = cGVOPx_gv(p))
+ && GvNAMELEN_get(gv) == 1
+ && *GvNAME_get(gv) == '_'
+ && GvSTASH(gv) == PL_defstash
+ && (rv2av = p->op_next)
+ && rv2av->op_type == OP_RV2AV
+ && !(rv2av->op_flags & OPf_REF)
+ && !(rv2av->op_private & (OPpLVAL_INTRO|OPpMAYBE_LVSUB))
+ && ((rv2av->op_flags & OPf_WANT) == OPf_WANT_LIST)
+ && o->op_sibling == rv2av /* these two for Deparse */
+ && cUNOPx(rv2av)->op_first == p
+ ) {
+ q = rv2av->op_next;
+ if (q->op_type == OP_NULL)
+ q = q->op_next;
+ if (q->op_type == OP_PUSHMARK) {
+ defav = 1;
+ p = q;
+ }
+ }
+ }
+ if (!defav) {
+ /* To allow Deparse to pessimise this, it needs to be able
+ * to restore the pushmark's original op_next, which it
+ * will assume to be the same as op_sibling. */
+ if (o->op_next != o->op_sibling)
+ break;
+ p = o;
+ }
+
+ /* scan for PAD ops */
+
+ for (p = p->op_next; p; p = p->op_next) {
+ if (p->op_type == OP_NULL)
+ continue;
+
+ if (( p->op_type != OP_PADSV
+ && p->op_type != OP_PADAV
+ && p->op_type != OP_PADHV
+ )
+ /* any private flag other than INTRO? e.g. STATE */
+ || (p->op_private & ~OPpLVAL_INTRO)
+ )
+ break;
+
+ /* let $a[N] potentially be optimised into ALEMFAST_LEX
+ * instead */
+ if ( p->op_type == OP_PADAV
+ && p->op_next
+ && p->op_next->op_type == OP_CONST
+ && p->op_next->op_next
+ && p->op_next->op_next->op_type == OP_AELEM
+ )
+ break;
+
+ /* for 1st padop, note what type it is and the range
+ * start; for the others, check that it's the same type
+ * and that the targs are contiguous */
+ if (count == 0) {
+ intro = (p->op_private & OPpLVAL_INTRO);
+ base = p->op_targ;
+ gimme = (p->op_flags & OPf_WANT);
+ }
+ else {
+ if ((p->op_private & OPpLVAL_INTRO) != intro)
+ break;
+ /* Note that you'd normally expect targs to be
+ * contiguous in my($a,$b,$c), but that's not the case
+ * when external modules start doing things, e.g.
+ i* Function::Parameters */
+ if (p->op_targ != base + count)
+ break;
+ assert(p->op_targ == base + count);
+ /* all the padops should be in the same context */
+ if (gimme != (p->op_flags & OPf_WANT))
+ break;
+ }
+
+ /* for AV, HV, only when we're not flattening */
+ if ( p->op_type != OP_PADSV
+ && gimme != OPf_WANT_VOID
+ && !(p->op_flags & OPf_REF)
+ )
+ break;
+
+ if (count >= OPpPADRANGE_COUNTMASK)
+ break;
+
+ /* there's a biggest base we can fit into a
+ * SAVEt_CLEARPADRANGE in pp_padrange */
+ if (intro && base >
+ (UV_MAX >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)))
+ break;
+
+ /* Success! We've got another valid pad op to optimise away */
+ count++;
+ followop = p->op_next;
+ }
+
+ if (count < 1)
+ break;
+
+ /* pp_padrange in specifically compile-time void context
+ * skips pushing a mark and lexicals; in all other contexts
+ * (including unknown till runtime) it pushes a mark and the
+ * lexicals. We must be very careful then, that the ops we
+ * optimise away would have exactly the same effect as the
+ * padrange.
+ * In particular in void context, we can only optimise to
+ * a padrange if see see the complete sequence
+ * pushmark, pad*v, ...., list, nextstate
+ * which has the net effect of of leaving the stack empty
+ * (for now we leave the nextstate in the execution chain, for
+ * its other side-effects).
+ */
+ assert(followop);
+ if (gimme == OPf_WANT_VOID) {
+ if (followop->op_type == OP_LIST
+ && gimme == (followop->op_flags & OPf_WANT)
+ && ( followop->op_next->op_type == OP_NEXTSTATE
+ || followop->op_next->op_type == OP_DBSTATE))
+ {
+ followop = followop->op_next; /* skip OP_LIST */
+
+ /* consolidate two successive my(...);'s */
+
+ if ( oldoldop
+ && oldoldop->op_type == OP_PADRANGE
+ && (oldoldop->op_flags & OPf_WANT) == OPf_WANT_VOID
+ && (oldoldop->op_private & OPpLVAL_INTRO) == intro
+ && !(oldoldop->op_flags & OPf_SPECIAL)
+ ) {
+ U8 old_count;
+ assert(oldoldop->op_next == oldop);
+ assert( oldop->op_type == OP_NEXTSTATE
+ || oldop->op_type == OP_DBSTATE);
+ assert(oldop->op_next == o);
+
+ old_count
+ = (oldoldop->op_private & OPpPADRANGE_COUNTMASK);
+ assert(oldoldop->op_targ + old_count == base);
+
+ if (old_count < OPpPADRANGE_COUNTMASK - count) {
+ base = oldoldop->op_targ;
+ count += old_count;
+ reuse = 1;
+ }
+ }
+
+ /* if there's any immediately following singleton
+ * my var's; then swallow them and the associated
+ * nextstates; i.e.
+ * my ($a,$b); my $c; my $d;
+ * is treated as
+ * my ($a,$b,$c,$d);
+ */
+
+ while ( ((p = followop->op_next))
+ && ( p->op_type == OP_PADSV
+ || p->op_type == OP_PADAV
+ || p->op_type == OP_PADHV)
+ && (p->op_flags & OPf_WANT) == OPf_WANT_VOID
+ && (p->op_private & OPpLVAL_INTRO) == intro
+ && p->op_next
+ && ( p->op_next->op_type == OP_NEXTSTATE
+ || p->op_next->op_type == OP_DBSTATE)
+ && count < OPpPADRANGE_COUNTMASK
+ ) {
+ assert(base + count == p->op_targ);
+ count++;
+ followop = p->op_next;
+ }
+ }
+ else
+ break;
+ }
+
+ if (reuse) {
+ assert(oldoldop->op_type == OP_PADRANGE);
+ oldoldop->op_next = followop;
+ oldoldop->op_private = (intro | count);
+ o = oldoldop;
+ oldop = NULL;
+ oldoldop = NULL;
+ }
+ else {
+ /* Convert the pushmark into a padrange.
+ * To make Deparse easier, we guarantee that a padrange was
+ * *always* formerly a pushmark */
+ assert(o->op_type == OP_PUSHMARK);
+ o->op_next = followop;
+ o->op_type = OP_PADRANGE;
+ o->op_ppaddr = PL_ppaddr[OP_PADRANGE];
+ o->op_targ = base;
+ /* bit 7: INTRO; bit 6..0: count */
+ o->op_private = (intro | count);
+ o->op_flags = ((o->op_flags & ~(OPf_WANT|OPf_SPECIAL))
+ | gimme | (defav ? OPf_SPECIAL : 0));
+ }
+ break;
+ }
+
case OP_PADAV:
case OP_GV:
if (o->op_type == OP_PADAV || o->op_next->op_type == OP_RV2AV) {
}
}
+ oldoldop = oldop;
oldop = o;
}
LEAVE;