PP(pp_const)
{
- dVAR;
dSP;
XPUSHs(cSVOP_sv);
RETURN;
PP(pp_nextstate)
{
- dVAR;
PL_curcop = (COP*)PL_op;
TAINT_NOT; /* Each statement is presumed innocent */
PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
PP(pp_gvsv)
{
- dVAR;
dSP;
EXTEND(SP,1);
if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
RETURN;
}
+
+/* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
+
PP(pp_null)
{
- dVAR;
return NORMAL;
}
-/* This is sometimes called directly by pp_coreargs and pp_grepstart. */
+/* This is sometimes called directly by pp_coreargs, pp_grepstart and
+ amagic_call. */
PP(pp_pushmark)
{
- dVAR;
PUSHMARK(PL_stack_sp);
return NORMAL;
}
PP(pp_stringify)
{
- dVAR; dSP; dTARGET;
+ dSP; dTARGET;
SV * const sv = TOPs;
SETs(TARG);
sv_copypv(TARG, sv);
PP(pp_gv)
{
- dVAR; dSP;
+ dSP;
XPUSHs(MUTABLE_SV(cGVOP_gv));
RETURN;
}
+
+/* also used for: pp_andassign() */
+
PP(pp_and)
{
- dVAR;
PERL_ASYNC_CHECK();
{
/* SP is not used to remove a variable that is saved across the
PP(pp_sassign)
{
- dVAR; dSP;
+ dSP;
/* sassign keeps its args in the optree traditionally backwards.
So we pop them differently.
*/
PP(pp_cond_expr)
{
- dVAR; dSP;
+ dSP;
PERL_ASYNC_CHECK();
if (SvTRUEx(POPs))
RETURNOP(cLOGOP->op_other);
PP(pp_unstack)
{
- dVAR;
PERL_ASYNC_CHECK();
TAINT_NOT; /* Each statement is presumed innocent */
PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
PP(pp_concat)
{
- dVAR; dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
+ dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
{
dPOPTOPssrl;
bool lbyte;
}
if (!rcopied) {
- if (left == right)
- /* $r.$r: do magic twice: tied might return different 2nd time */
- SvGETMAGIC(right);
rpv = SvPV_nomg_const(right, rlen);
rbyte = !DO_UTF8(right);
}
if (lbyte != rbyte) {
- /* sv_utf8_upgrade_nomg() may reallocate the stack */
- PUTBACK;
if (lbyte)
sv_utf8_upgrade_nomg(TARG);
else {
sv_utf8_upgrade_nomg(right);
rpv = SvPV_nomg_const(right, rlen);
}
- SPAGAIN;
}
sv_catpvn_nomg(TARG, rpv, rlen);
PP(pp_padrange)
{
- dVAR; dSP;
+ dSP;
PADOFFSET base = PL_op->op_targ;
int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
int i;
(base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
| (count << SAVE_TIGHT_SHIFT)
| SAVEt_CLEARPADRANGE);
- assert(OPpPADRANGE_COUNTMASK + 1 == (1 <<OPpPADRANGE_COUNTSHIFT));
+ STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
{
dSS_ADD;
PP(pp_padsv)
{
- dVAR; dSP;
+ dSP;
EXTEND(SP, 1);
{
OP * const op = PL_op;
PP(pp_readline)
{
- dVAR;
dSP;
if (TOPs) {
SvGETMAGIC(TOPs);
PUTBACK;
Perl_pp_rv2gv(aTHX);
PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
+ if (PL_last_in_gv == (GV *)&PL_sv_undef)
+ PL_last_in_gv = NULL;
+ else
+ assert(isGV_with_GP(PL_last_in_gv));
}
}
return do_readline();
PP(pp_eq)
{
- dVAR; dSP;
+ dSP;
SV *left, *right;
tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
RETURN;
}
+
+/* also used for: pp_i_predec() pp_i_preinc() pp_predec() */
+
PP(pp_preinc)
{
- dVAR; dSP;
+ dSP;
const bool inc =
PL_op->op_type == OP_PREINC || PL_op->op_type == OP_I_PREINC;
if (UNLIKELY(SvTYPE(TOPs) >= SVt_PVAV || (isGV_with_GP(TOPs) && !SvFAKE(TOPs))))
return NORMAL;
}
+
+/* also used for: pp_orassign() */
+
PP(pp_or)
{
- dVAR; dSP;
+ dSP;
PERL_ASYNC_CHECK();
if (SvTRUE(TOPs))
RETURN;
}
}
+
+/* also used for: pp_dor() pp_dorassign() */
+
PP(pp_defined)
{
- dVAR; dSP;
+ dSP;
SV* sv;
bool defined;
const int op_type = PL_op->op_type;
PP(pp_add)
{
- dVAR; dSP; dATARGET; bool useleft; SV *svl, *svr;
+ dSP; dATARGET; bool useleft; SV *svl, *svr;
tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
svr = TOPs;
svl = TOPm1s;
if (aiv >= 0) {
auv = aiv;
auvok = 1; /* Now acting as a sign flag. */
- } else { /* 2s complement assumption for IV_MIN */
- auv = (UV)-aiv;
+ } else {
+ auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
}
}
a_valid = 1;
buv = biv;
buvok = 1;
} else
- buv = (UV)-biv;
+ buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
}
/* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
else "IV" now, independent of how it came in.
else {
/* Negate result */
if (result <= (UV)IV_MIN)
- SETi( -(IV)result );
+ SETi(result == (UV)IV_MIN
+ ? IV_MIN : -(IV)result);
else {
/* result valid, but out of range for IV. */
SETn( -(NV)result );
}
}
+
+/* also used for: pp_aelemfast_lex() */
+
PP(pp_aelemfast)
{
- dVAR; dSP;
+ dSP;
AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
const U32 lval = PL_op->op_flags & OPf_MOD;
PP(pp_join)
{
- dVAR; dSP; dMARK; dTARGET;
+ dSP; dMARK; dTARGET;
MARK++;
do_join(TARG, *MARK, MARK, SP);
SP = MARK;
PP(pp_pushre)
{
- dVAR; dSP;
+ dSP;
#ifdef DEBUGGING
/*
* We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
/* Oversized hot code. */
+/* also used for: pp_say() */
+
PP(pp_print)
{
- dVAR; dSP; dMARK; dORIGMARK;
+ dSP; dMARK; dORIGMARK;
PerlIO *fp;
MAGIC *mg;
GV * const gv
RETURN;
}
+
+/* also used for: pp_rv2hv() */
+/* also called directly by pp_lvavref */
+
PP(pp_rv2av)
{
- dVAR; dSP; dTOPss;
+ dSP; dTOPss;
const I32 gimme = GIMME_V;
static const char an_array[] = "an ARRAY";
static const char a_hash[] = "a HASH";
- const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV;
+ const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
+ || PL_op->op_type == OP_LVAVREF;
const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
SvGETMAGIC(sv);
if (is_pp_rv2av) {
AV *const av = MUTABLE_AV(sv);
- /* The guts of pp_rv2av, with no intending change to preserve history
- (until such time as we get tools that can do blame annotation across
- whitespace changes. */
+ /* The guts of pp_rv2av */
if (gimme == G_ARRAY) {
SP--;
PUTBACK;
STATIC void
S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
{
- dVAR;
-
PERL_ARGS_ASSERT_DO_ODDBALL;
if (*oddkey) {
}
}
+
+/* Do a mark and sweep with the SVf_BREAK flag to detect elements which
+ * are common to both the LHS and RHS of an aassign, and replace them
+ * with copies. All these copies are made before the actual list assign is
+ * done.
+ *
+ * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
+ * element ($b) to the first LH element ($a), modifies $a; when the
+ * second assignment is done, the second RH element now has the wrong
+ * value. So we initially replace the RHS with ($b, mortalcopy($a)).
+ * Note that we don't need to make a mortal copy of $b.
+ *
+ * The algorithm below works by, for every RHS element, mark the
+ * corresponding LHS target element with SVf_BREAK. Then if the RHS
+ * element is found with SVf_BREAK set, it means it would have been
+ * modified, so make a copy.
+ * Note that by scanning both LHS and RHS in lockstep, we avoid
+ * unnecessary copies (like $b above) compared with a naive
+ * "mark all LHS; copy all marked RHS; unmark all LHS".
+ *
+ * If the LHS element is a 'my' declaration' and has a refcount of 1, then
+ * it can't be common and can be skipped.
+ *
+ * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
+ * that we thought we didn't need to call S_aassign_copy_common(), but we
+ * have anyway for sanity checking. If we find we need to copy, then panic.
+ */
+
+PERL_STATIC_INLINE void
+S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
+ SV **firstrelem, SV **lastrelem
+#ifdef DEBUGGING
+ , bool fake
+#endif
+)
+{
+ dVAR;
+ SV **relem;
+ SV **lelem;
+ SSize_t lcount = lastlelem - firstlelem + 1;
+ bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
+ bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
+
+ assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
+ assert(firstlelem < lastlelem); /* at least 2 LH elements */
+ assert(firstrelem < lastrelem); /* at least 2 RH elements */
+
+
+ lelem = firstlelem;
+ /* we never have to copy the first RH element; it can't be corrupted
+ * by assigning something to the corresponding first LH element.
+ * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
+ */
+ relem = firstrelem + 1;
+
+ for (; relem <= lastrelem; relem++) {
+ SV *svr;
+
+ /* mark next LH element */
+
+ if (--lcount >= 0) {
+ SV *svl = *lelem++;
+
+ if (UNLIKELY(!svl)) {/* skip AV alias marker */
+ assert (lelem <= lastlelem);
+ svl = *lelem++;
+ lcount--;
+ }
+
+ assert(svl);
+ if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
+ if (!marked)
+ return;
+ /* this LH element will consume all further args;
+ * no need to mark any further LH elements (if any).
+ * But we still need to scan any remaining RHS elements;
+ * set lcount negative to distinguish from lcount == 0,
+ * so the loop condition continues being true
+ */
+ lcount = -1;
+ lelem--; /* no need to unmark this element */
+ }
+ else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
+ assert(!SvIMMORTAL(svl));
+ SvFLAGS(svl) |= SVf_BREAK;
+ marked = TRUE;
+ }
+ else if (!marked) {
+ /* don't check RH element if no SVf_BREAK flags set yet */
+ if (!lcount)
+ break;
+ continue;
+ }
+ }
+
+ /* see if corresponding RH element needs copying */
+
+ assert(marked);
+ svr = *relem;
+ assert(svr);
+
+ if (UNLIKELY(SvFLAGS(svr) & SVf_BREAK)) {
+
+#ifdef DEBUGGING
+ if (fake) {
+ /* op_dump(PL_op); */
+ Perl_croak(aTHX_
+ "panic: aassign skipped needed copy of common RH elem %"
+ UVuf, (UV)(relem - firstrelem));
+ }
+#endif
+
+ TAINT_NOT; /* Each item is independent */
+
+ /* Dear TODO test in t/op/sort.t, I love you.
+ (It's relying on a panic, not a "semi-panic" from newSVsv()
+ and then an assertion failure below.) */
+ if (UNLIKELY(SvIS_FREED(svr))) {
+ Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
+ (void*)svr);
+ }
+ /* avoid break flag while copying; otherwise COW etc
+ * disabled... */
+ SvFLAGS(svr) &= ~SVf_BREAK;
+ /* Not newSVsv(), as it does not allow copy-on-write,
+ resulting in wasteful copies.
+ Also, we use SV_NOSTEAL in case the SV is used more than
+ once, e.g. (...) = (f())[0,0]
+ Where the same SV appears twice on the RHS without a ref
+ count bump. (Although I suspect that the SV won't be
+ stealable here anyway - DAPM).
+ */
+ *relem = sv_mortalcopy_flags(svr,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ /* ... but restore afterwards in case it's needed again,
+ * e.g. ($a,$b,$c) = (1,$a,$a)
+ */
+ SvFLAGS(svr) |= SVf_BREAK;
+ }
+
+ if (!lcount)
+ break;
+ }
+
+ if (!marked)
+ return;
+
+ /*unmark LHS */
+
+ while (lelem > firstlelem) {
+ SV * const svl = *(--lelem);
+ if (svl)
+ SvFLAGS(svl) &= ~SVf_BREAK;
+ }
+}
+
+
+
PP(pp_aassign)
{
dVAR; dSP;
HV *hash;
SSize_t i;
int magic;
- U32 lval = 0;
+ U32 lval;
+#ifdef DEBUGGING
+ bool fake = 0;
+#endif
PL_delaymagic = DM_DELAY; /* catch simultaneous items */
- gimme = GIMME_V;
- if (gimme == G_ARRAY)
- lval = PL_op->op_flags & OPf_MOD || LVRET;
/* If there's a common identifier on both sides we have to take
* special care that assigning the identifier on the left doesn't
* clobber a value on the right that's used later in the list.
- * Don't bother if LHS is just an empty hash or array.
*/
- if ( (PL_op->op_private & OPpASSIGN_COMMON)
- && (
- firstlelem != lastlelem
- || ! ((sv = *firstlelem))
- || SvMAGICAL(sv)
- || ! (SvTYPE(sv) == SVt_PVAV || SvTYPE(sv) == SVt_PVHV)
- || (SvTYPE(sv) == SVt_PVAV && AvFILL((AV*)sv) != -1)
- || (SvTYPE(sv) == SVt_PVHV && HvUSEDKEYS((HV*)sv) != 0)
- )
+ if ( (PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1))
+ /* at least 2 LH and RH elements, or commonality isn't an issue */
+ && (firstlelem < lastlelem && firstrelem < lastrelem)
) {
- EXTEND_MORTAL(lastrelem - firstrelem + 1);
- for (relem = firstrelem; relem <= lastrelem; relem++) {
- if (LIKELY((sv = *relem))) {
- TAINT_NOT; /* Each item is independent */
-
- /* Dear TODO test in t/op/sort.t, I love you.
- (It's relying on a panic, not a "semi-panic" from newSVsv()
- and then an assertion failure below.) */
- if (UNLIKELY(SvIS_FREED(sv))) {
- Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
- (void*)sv);
- }
- /* Not newSVsv(), as it does not allow copy-on-write,
- resulting in wasteful copies. We need a second copy of
- a temp here, hence the SV_NOSTEAL. */
- *relem = sv_mortalcopy_flags(sv,SV_GMAGIC|SV_DO_COW_SVSETSV
- |SV_NOSTEAL);
- }
- }
+ if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
+ /* skip the scan if all scalars have a ref count of 1 */
+ for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
+ sv = *lelem;
+ if (!sv || SvREFCNT(sv) == 1)
+ continue;
+ if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
+ goto do_scan;
+ break;
+ }
+ }
+ else {
+ do_scan:
+ S_aassign_copy_common(aTHX_
+ firstlelem, lastlelem, firstrelem, lastrelem
+#ifdef DEBUGGING
+ , fake
+#endif
+ );
+ }
}
+#ifdef DEBUGGING
+ else {
+ /* on debugging builds, do the scan even if we've concluded we
+ * don't need to, then panic if we find commonality. Note that the
+ * scanner assumes at least 2 elements */
+ if (firstlelem < lastlelem && firstrelem < lastrelem) {
+ fake = 1;
+ goto do_scan;
+ }
+ }
+#endif
+
+ gimme = GIMME_V;
+ lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
relem = firstrelem;
lelem = firstlelem;
hash = NULL;
while (LIKELY(lelem <= lastlelem)) {
+ bool alias = FALSE;
TAINT_NOT; /* Each item stands on its own, taintwise. */
sv = *lelem++;
+ if (UNLIKELY(!sv)) {
+ alias = TRUE;
+ sv = *lelem++;
+ ASSUME(SvTYPE(sv) == SVt_PVAV);
+ }
switch (SvTYPE(sv)) {
- case SVt_PVAV:
+ case SVt_PVAV: {
+ bool already_copied = FALSE;
ary = MUTABLE_AV(sv);
magic = SvMAGICAL(ary) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
- av_clear(ary);
+
+ /* We need to clear ary. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * @a = ($a[0]);
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the array, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ * Otherwise, make a copy of each RHS SV only as we're storing
+ * it into the array - that way we don't have to worry about
+ * it being leaked if we die, but don't incur the cost of
+ * mortalising everything.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || AvFILL(ary) != -1))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ /* see comment in S_aassign_copy_common about SV_NOSTEAL */
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
+ av_clear(ary);
av_extend(ary, lastrelem - relem);
i = 0;
while (relem <= lastrelem) { /* gobble up all the rest */
SV **didstore;
- if (LIKELY(*relem))
- SvGETMAGIC(*relem); /* before newSV, in case it dies */
- sv = newSV(0);
- sv_setsv_nomg(sv, *relem);
- *(relem++) = sv;
+ if (LIKELY(!alias)) {
+ if (already_copied)
+ sv = *relem;
+ else {
+ if (LIKELY(*relem))
+ /* before newSV, in case it dies */
+ SvGETMAGIC(*relem);
+ sv = newSV(0);
+ /* see comment in S_aassign_copy_common about
+ * SV_NOSTEAL */
+ sv_setsv_flags(sv, *relem,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ *relem = sv;
+ }
+ }
+ else {
+ if (!already_copied)
+ SvGETMAGIC(*relem);
+ if (!SvROK(*relem))
+ DIE(aTHX_ "Assigned value is not a reference");
+ if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
+ /* diag_listed_as: Assigned value is not %s reference */
+ DIE(aTHX_
+ "Assigned value is not a SCALAR reference");
+ if (lval && !already_copied)
+ *relem = sv_mortalcopy(*relem);
+ /* XXX else check for weak refs? */
+ sv = SvREFCNT_inc_simple_NN(SvRV(*relem));
+ }
+ relem++;
+ if (already_copied)
+ SvREFCNT_inc_simple_NN(sv); /* undo mortal free */
didstore = av_store(ary,i++,sv);
if (magic) {
if (!didstore)
SvSETMAGIC(MUTABLE_SV(ary));
LEAVE;
break;
+ }
+
case SVt_PVHV: { /* normal hash */
SV *tmpstr;
int odd;
int duplicates = 0;
SV** topelem = relem;
SV **firsthashrelem = relem;
+ bool already_copied = FALSE;
hash = MUTABLE_HV(sv);
magic = SvMAGICAL(hash) != 0;
ENTER;
SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
+
+ /* We need to clear hash. The is a danger that if we do this,
+ * elements on the RHS may be prematurely freed, e.g.
+ * %h = (foo => $h{bar});
+ * In the case of possible commonality, make a copy of each
+ * RHS SV *before* clearing the hash, and add a reference
+ * from the tmps stack, so that it doesn't leak on death.
+ */
+
+ if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
+ && (relem <= lastrelem)
+ && (magic || HvUSEDKEYS(hash)))
+ {
+ SV **svp;
+ EXTEND_MORTAL(lastrelem - relem + 1);
+ for (svp = relem; svp <= lastrelem; svp++) {
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ TAINT_NOT;
+ }
+ already_copied = TRUE;
+ }
+
hv_clear(hash);
+
while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
HE *didstore;
assert(*relem);
to avoid having the next op modify our rhs. Copy
it also if it is gmagical, lest it make the
hv_store_ent call below croak, leaking the value. */
- sv = lval || SvGMAGICAL(*relem)
+ sv = (lval || SvGMAGICAL(*relem)) && !already_copied
? sv_mortalcopy(*relem)
: *relem;
relem++;
assert(*relem);
- SvGETMAGIC(*relem);
- tmpstr = newSV(0);
- sv_setsv_nomg(tmpstr,*relem++); /* value */
+ if (already_copied)
+ tmpstr = *relem++;
+ else {
+ SvGETMAGIC(*relem);
+ tmpstr = newSV(0);
+ sv_setsv_nomg(tmpstr,*relem++); /* value */
+ }
+
if (gimme == G_ARRAY) {
if (hv_exists_ent(hash, sv, 0))
/* key overwrites an existing entry */
*topelem++ = tmpstr;
}
}
+ if (already_copied)
+ SvREFCNT_inc_simple_NN(tmpstr); /* undo mortal free */
didstore = hv_store_ent(hash,sv,tmpstr,0);
if (magic) {
if (!didstore) sv_2mortal(tmpstr);
PP(pp_qr)
{
- dVAR; dSP;
+ dSP;
PMOP * const pm = cPMOP;
REGEXP * rx = PM_GETRE(pm);
SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
PP(pp_match)
{
- dVAR; dSP; dTARG;
+ dSP; dTARG;
PMOP *pm = cPMOP;
PMOP *dynpm = pm;
const char *s;
const char *truebase; /* Start of string */
REGEXP *rx = PM_GETRE(pm);
bool rxtainted;
- const I32 gimme = GIMME;
+ const I32 gimme = GIMME_V;
STRLEN len;
const I32 oldsave = PL_savestack_ix;
I32 had_zerolen = 0;
if (PL_op->op_flags & OPf_STACKED)
TARG = POPs;
- else if (PL_op->op_private & OPpTARGET_MY)
+ else if (ARGTARG)
GETTARGET;
else {
TARG = DEFSV;
LEAVE_SCOPE(oldsave);
RETURN;
}
- /* NOTREACHED */
+ NOT_REACHED; /* NOTREACHED */
-nope:
+ nope:
if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
if (!mg)
mg = mg_find_mglob(TARG);
OP *
Perl_do_readline(pTHX)
{
- dVAR; dSP; dTARGETSTACKED;
+ dSP; dTARGETSTACKED;
SV *sv;
STRLEN tmplen = 0;
STRLEN offset;
goto have_fp;
}
}
- fp = nextargv(PL_last_in_gv);
+ fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
if (!fp) { /* Note: fp != IoIFP(io) */
(void)do_close(PL_last_in_gv, FALSE); /* now it does*/
}
if (gimme == G_SCALAR) {
/* undef TARG, and push that undefined value */
if (type != OP_RCATLINE) {
- SV_CHECK_THINKFIRST_COW_DROP(TARG);
- SvOK_off(TARG);
+ sv_setsv(TARG,NULL);
}
PUSHTARG;
}
{
PerlIO_clearerr(fp);
if (IoFLAGS(io) & IOf_ARGV) {
- fp = nextargv(PL_last_in_gv);
+ fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
if (fp)
continue;
(void)do_close(PL_last_in_gv, FALSE);
PP(pp_helem)
{
- dVAR; dSP;
+ dSP;
HE* he;
SV **svp;
SV * const keysv = POPs;
RETURN;
}
+
+/* a stripped-down version of Perl_softref2xv() for use by
+ * pp_multideref(), which doesn't use PL_op->op_flags */
+
+GV *
+S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
+ const svtype type)
+{
+ if (PL_op->op_private & HINT_STRICT_REFS) {
+ if (SvOK(sv))
+ Perl_die(aTHX_ PL_no_symref_sv, sv,
+ (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
+ else
+ Perl_die(aTHX_ PL_no_usym, what);
+ }
+ if (!SvOK(sv))
+ Perl_die(aTHX_ PL_no_usym, what);
+ return gv_fetchsv_nomg(sv, GV_ADD, type);
+}
+
+
+/* Handle one or more aggregate derefs and array/hash indexings, e.g.
+ * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
+ *
+ * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
+ * Each of these either contains a set of actions, or an argument, such as
+ * an IV to use as an array index, or a lexical var to retrieve.
+ * Several actions re stored per UV; we keep shifting new actions off the
+ * one UV, and only reload when it becomes zero.
+ */
+
+PP(pp_multideref)
+{
+ SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
+ UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
+ UV actions = items->uv;
+
+ assert(actions);
+ /* this tells find_uninit_var() where we're up to */
+ PL_multideref_pc = items;
+
+ while (1) {
+ /* there are three main classes of action; the first retrieve
+ * the initial AV or HV from a variable or the stack; the second
+ * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
+ * the third an unrolled (/DREFHV, rv2hv, helem).
+ */
+ switch (actions & MDEREF_ACTION_MASK) {
+
+ case MDEREF_reload:
+ actions = (++items)->uv;
+ continue;
+
+ case MDEREF_AV_padav_aelem: /* $lex[...] */
+ sv = PAD_SVl((++items)->pad_offset);
+ goto do_AV_aelem;
+
+ case MDEREF_AV_gvav_aelem: /* $pkg[...] */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = (SV*)GvAVn((GV*)sv);
+ goto do_AV_aelem;
+
+ case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
+ {
+ dSP;
+ sv = POPs;
+ PUTBACK;
+ goto do_AV_rv2av_aelem;
+ }
+
+ case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = GvSVn((GV*)sv);
+ goto do_AV_vivify_rv2av_aelem;
+
+ case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
+ sv = PAD_SVl((++items)->pad_offset);
+ /* FALLTHROUGH */
+
+ do_AV_vivify_rv2av_aelem:
+ case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
+ /* this is the OPpDEREF action normally found at the end of
+ * ops like aelem, helem, rv2sv */
+ sv = vivify_ref(sv, OPpDEREF_AV);
+ /* FALLTHROUGH */
+
+ do_AV_rv2av_aelem:
+ /* this is basically a copy of pp_rv2av when it just has the
+ * sKR/1 flags */
+ SvGETMAGIC(sv);
+ if (LIKELY(SvROK(sv))) {
+ if (UNLIKELY(SvAMAGIC(sv))) {
+ sv = amagic_deref_call(sv, to_av_amg);
+ }
+ sv = SvRV(sv);
+ if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
+ DIE(aTHX_ "Not an ARRAY reference");
+ }
+ else if (SvTYPE(sv) != SVt_PVAV) {
+ if (!isGV_with_GP(sv))
+ sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
+ sv = MUTABLE_SV(GvAVn((GV*)sv));
+ }
+ /* FALLTHROUGH */
+
+ do_AV_aelem:
+ {
+ /* retrieve the key; this may be either a lexical or package
+ * var (whose index/ptr is stored as an item) or a signed
+ * integer constant stored as an item.
+ */
+ SV *elemsv;
+ IV elem = 0; /* to shut up stupid compiler warnings */
+
+
+ assert(SvTYPE(sv) == SVt_PVAV);
+
+ switch (actions & MDEREF_INDEX_MASK) {
+ case MDEREF_INDEX_none:
+ goto finish;
+ case MDEREF_INDEX_const:
+ elem = (++items)->iv;
+ break;
+ case MDEREF_INDEX_padsv:
+ elemsv = PAD_SVl((++items)->pad_offset);
+ goto check_elem;
+ case MDEREF_INDEX_gvsv:
+ elemsv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(elemsv));
+ elemsv = GvSVn((GV*)elemsv);
+ check_elem:
+ if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
+ && ckWARN(WARN_MISC)))
+ Perl_warner(aTHX_ packWARN(WARN_MISC),
+ "Use of reference \"%"SVf"\" as array index",
+ SVfARG(elemsv));
+ /* the only time that S_find_uninit_var() needs this
+ * is to determine which index value triggered the
+ * undef warning. So just update it here. Note that
+ * since we don't save and restore this var (e.g. for
+ * tie or overload execution), its value will be
+ * meaningless apart from just here */
+ PL_multideref_pc = items;
+ elem = SvIV(elemsv);
+ break;
+ }
+
+
+ /* this is basically a copy of pp_aelem with OPpDEREF skipped */
+
+ if (!(actions & MDEREF_FLAG_last)) {
+ SV** svp = av_fetch((AV*)sv, elem, 1);
+ if (!svp || ! (sv=*svp))
+ DIE(aTHX_ PL_no_aelem, elem);
+ break;
+ }
+
+ if (PL_op->op_private &
+ (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
+ {
+ if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
+ sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
+ }
+ else {
+ I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
+ sv = av_delete((AV*)sv, elem, discard);
+ if (discard)
+ return NORMAL;
+ if (!sv)
+ sv = &PL_sv_undef;
+ }
+ }
+ else {
+ const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
+ const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
+ const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
+ bool preeminent = TRUE;
+ AV *const av = (AV*)sv;
+ SV** svp;
+
+ if (UNLIKELY(localizing)) {
+ MAGIC *mg;
+ HV *stash;
+
+ /* If we can determine whether the element exist,
+ * Try to preserve the existenceness of a tied array
+ * element by using EXISTS and DELETE if possible.
+ * Fallback to FETCH and STORE otherwise. */
+ if (SvCANEXISTDELETE(av))
+ preeminent = av_exists(av, elem);
+ }
+
+ svp = av_fetch(av, elem, lval && !defer);
+
+ if (lval) {
+ if (!svp || !(sv = *svp)) {
+ IV len;
+ if (!defer)
+ DIE(aTHX_ PL_no_aelem, elem);
+ len = av_tindex(av);
+ sv = sv_2mortal(newSVavdefelem(av,
+ /* Resolve a negative index now, unless it points
+ * before the beginning of the array, in which
+ * case record it for error reporting in
+ * magic_setdefelem. */
+ elem < 0 && len + elem >= 0
+ ? len + elem : elem, 1));
+ }
+ else {
+ if (UNLIKELY(localizing)) {
+ if (preeminent) {
+ save_aelem(av, elem, svp);
+ sv = *svp; /* may have changed */
+ }
+ else
+ SAVEADELETE(av, elem);
+ }
+ }
+ }
+ else {
+ sv = (svp ? *svp : &PL_sv_undef);
+ /* see note in pp_helem() */
+ if (SvRMAGICAL(av) && SvGMAGICAL(sv))
+ mg_get(sv);
+ }
+ }
+
+ }
+ finish:
+ {
+ dSP;
+ XPUSHs(sv);
+ RETURN;
+ }
+ /* NOTREACHED */
+
+
+
+
+ case MDEREF_HV_padhv_helem: /* $lex{...} */
+ sv = PAD_SVl((++items)->pad_offset);
+ goto do_HV_helem;
+
+ case MDEREF_HV_gvhv_helem: /* $pkg{...} */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = (SV*)GvHVn((GV*)sv);
+ goto do_HV_helem;
+
+ case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
+ {
+ dSP;
+ sv = POPs;
+ PUTBACK;
+ goto do_HV_rv2hv_helem;
+ }
+
+ case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
+ sv = UNOP_AUX_item_sv(++items);
+ assert(isGV_with_GP(sv));
+ sv = GvSVn((GV*)sv);
+ goto do_HV_vivify_rv2hv_helem;
+
+ case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
+ sv = PAD_SVl((++items)->pad_offset);
+ /* FALLTHROUGH */
+
+ do_HV_vivify_rv2hv_helem:
+ case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
+ /* this is the OPpDEREF action normally found at the end of
+ * ops like aelem, helem, rv2sv */
+ sv = vivify_ref(sv, OPpDEREF_HV);
+ /* FALLTHROUGH */
+
+ do_HV_rv2hv_helem:
+ /* this is basically a copy of pp_rv2hv when it just has the
+ * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
+
+ SvGETMAGIC(sv);
+ if (LIKELY(SvROK(sv))) {
+ if (UNLIKELY(SvAMAGIC(sv))) {
+ sv = amagic_deref_call(sv, to_hv_amg);
+ }
+ sv = SvRV(sv);
+ if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
+ DIE(aTHX_ "Not a HASH reference");
+ }
+ else if (SvTYPE(sv) != SVt_PVHV) {
+ if (!isGV_with_GP(sv))
+ sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
+ sv = MUTABLE_SV(GvHVn((GV*)sv));
+ }
+ /* FALLTHROUGH */
+
+ do_HV_helem:
+ {
+ /* retrieve the key; this may be either a lexical / package
+ * var or a string constant, whose index/ptr is stored as an
+ * item
+ */
+ SV *keysv = NULL; /* to shut up stupid compiler warnings */
+
+ assert(SvTYPE(sv) == SVt_PVHV);
+
+ switch (actions & MDEREF_INDEX_MASK) {
+ case MDEREF_INDEX_none:
+ goto finish;
+
+ case MDEREF_INDEX_const:
+ keysv = UNOP_AUX_item_sv(++items);
+ break;
+
+ case MDEREF_INDEX_padsv:
+ keysv = PAD_SVl((++items)->pad_offset);
+ break;
+
+ case MDEREF_INDEX_gvsv:
+ keysv = UNOP_AUX_item_sv(++items);
+ keysv = GvSVn((GV*)keysv);
+ break;
+ }
+
+ /* see comment above about setting this var */
+ PL_multideref_pc = items;
+
+
+ /* ensure that candidate CONSTs have been HEKified */
+ assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
+ || SvTYPE(keysv) >= SVt_PVMG
+ || !SvOK(keysv)
+ || SvROK(keysv)
+ || SvIsCOW_shared_hash(keysv));
+
+ /* this is basically a copy of pp_helem with OPpDEREF skipped */
+
+ if (!(actions & MDEREF_FLAG_last)) {
+ HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
+ if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
+ DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
+ break;
+ }
+
+ if (PL_op->op_private &
+ (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
+ {
+ if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
+ sv = hv_exists_ent((HV*)sv, keysv, 0)
+ ? &PL_sv_yes : &PL_sv_no;
+ }
+ else {
+ I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
+ sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
+ if (discard)
+ return NORMAL;
+ if (!sv)
+ sv = &PL_sv_undef;
+ }
+ }
+ else {
+ const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
+ const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
+ const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
+ bool preeminent = TRUE;
+ SV **svp;
+ HV * const hv = (HV*)sv;
+ HE* he;
+
+ if (UNLIKELY(localizing)) {
+ MAGIC *mg;
+ HV *stash;
+
+ /* If we can determine whether the element exist,
+ * Try to preserve the existenceness of a tied hash
+ * element by using EXISTS and DELETE if possible.
+ * Fallback to FETCH and STORE otherwise. */
+ if (SvCANEXISTDELETE(hv))
+ preeminent = hv_exists_ent(hv, keysv, 0);
+ }
+
+ he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
+ svp = he ? &HeVAL(he) : NULL;
+
+
+ if (lval) {
+ if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
+ SV* lv;
+ SV* key2;
+ if (!defer)
+ DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
+ lv = sv_newmortal();
+ sv_upgrade(lv, SVt_PVLV);
+ LvTYPE(lv) = 'y';
+ sv_magic(lv, key2 = newSVsv(keysv),
+ PERL_MAGIC_defelem, NULL, 0);
+ /* sv_magic() increments refcount */
+ SvREFCNT_dec_NN(key2);
+ LvTARG(lv) = SvREFCNT_inc_simple(hv);
+ LvTARGLEN(lv) = 1;
+ sv = lv;
+ }
+ else {
+ if (localizing) {
+ if (HvNAME_get(hv) && isGV(sv))
+ save_gp(MUTABLE_GV(sv),
+ !(PL_op->op_flags & OPf_SPECIAL));
+ else if (preeminent) {
+ save_helem_flags(hv, keysv, svp,
+ (PL_op->op_flags & OPf_SPECIAL)
+ ? 0 : SAVEf_SETMAGIC);
+ sv = *svp; /* may have changed */
+ }
+ else
+ SAVEHDELETE(hv, keysv);
+ }
+ }
+ }
+ else {
+ sv = (svp && *svp ? *svp : &PL_sv_undef);
+ /* see note in pp_helem() */
+ if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
+ mg_get(sv);
+ }
+ }
+ goto finish;
+ }
+
+ } /* switch */
+
+ actions >>= MDEREF_SHIFT;
+ } /* while */
+ /* NOTREACHED */
+}
+
+
PP(pp_iter)
{
- dVAR; dSP;
+ dSP;
PERL_CONTEXT *cx;
SV *oldsv;
SV **itersvp;
sv = AvARRAY(av)[ix];
}
+ if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
+ SvSetMagicSV(*itersvp, sv);
+ break;
+ }
+
if (LIKELY(sv)) {
if (UNLIKELY(SvIS_FREED(sv))) {
*itersvp = NULL;
Perl_croak(aTHX_ "Use of freed value in iteration");
}
if (SvPADTMP(sv)) {
- assert(!IS_PADGV(sv));
sv = newSVsv(sv);
}
else {
PP(pp_subst)
{
- dVAR; dSP; dTARG;
+ dSP; dTARG;
PMOP *pm = cPMOP;
PMOP *rpm = pm;
char *s;
char *strend;
const char *c;
STRLEN clen;
- I32 iters = 0;
- I32 maxiters;
+ SSize_t iters = 0;
+ SSize_t maxiters;
bool once;
U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
See "how taint works" above */
if (PL_op->op_flags & OPf_STACKED)
TARG = POPs;
- else if (PL_op->op_private & OPpTARGET_MY)
+ else if (ARGTARG)
GETTARGET;
else {
TARG = DEFSV;
if (DO_UTF8(TARG) && !doutf8) {
nsv = sv_newmortal();
SvSetSV(nsv, dstr);
- if (PL_encoding)
- sv_recode_to_utf8(nsv, PL_encoding);
+ if (IN_ENCODING)
+ sv_recode_to_utf8(nsv, _get_encoding());
else
sv_utf8_upgrade(nsv);
c = SvPV_const(nsv, clen);
Move(s, d, i+1, char); /* include the NUL */
}
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
else {
first = FALSE;
}
else {
- if (PL_encoding) {
+ if (IN_ENCODING) {
if (!nsv) nsv = sv_newmortal();
sv_copypv(nsv, repl);
- if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, PL_encoding);
+ if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
sv_catsv(dstr, nsv);
}
else sv_catsv(dstr, repl);
SvPV_set(dstr, NULL);
SPAGAIN;
- mPUSHi((I32)iters);
+ mPUSHi(iters);
}
}
PP(pp_grepwhile)
{
- dVAR; dSP;
+ dSP;
if (SvTRUEx(POPs))
PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
src = PL_stack_base[*PL_markstack_ptr];
if (SvPADTMP(src)) {
- assert(!IS_PADGV(src));
src = PL_stack_base[*PL_markstack_ptr] = sv_mortalcopy(src);
PL_tmps_floor++;
}
PP(pp_leavesub)
{
- dVAR; dSP;
+ dSP;
SV **mark;
SV **newsp;
PMOP *newpm;
PERL_CONTEXT *cx;
SV *sv;
- if (CxMULTICALL(&cxstack[cxstack_ix]))
+ if (CxMULTICALL(&cxstack[cxstack_ix])) {
+ /* entry zero of a stack is always PL_sv_undef, which
+ * simplifies converting a '()' return into undef in scalar context */
+ assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
return 0;
+ }
POPBLOCK(cx,newpm);
cxstack_ix++; /* temporarily protect top context */
PP(pp_entersub)
{
- dVAR; dSP; dPOPss;
+ dSP; dPOPss;
GV *gv;
CV *cv;
PERL_CONTEXT *cx;
SV* sub_name;
/* anonymous or undef'd function leaves us no recourse */
- if (CvANON(cv) || !(gv = CvGV(cv))) {
- if (CvNAMED(cv))
- DIE(aTHX_ "Undefined subroutine &%"HEKf" called",
- HEKfARG(CvNAME_HEK(cv)));
+ if (CvLEXICAL(cv) && CvHASGV(cv))
+ DIE(aTHX_ "Undefined subroutine &%"SVf" called",
+ SVfARG(cv_name(cv, NULL, 0)));
+ if (CvANON(cv) || !CvHASGV(cv)) {
DIE(aTHX_ "Undefined subroutine called");
}
/* autoloaded stub? */
- if (cv != GvCV(gv)) {
+ if (cv != GvCV(gv = CvGV(cv))) {
cv = GvCV(gv);
}
/* should call AUTOLOAD now? */
else {
-try_autoload:
+ try_autoload:
if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
{
if (*MARK)
{
if (SvPADTMP(*MARK)) {
- assert(!IS_PADGV(*MARK));
*MARK = sv_mortalcopy(*MARK);
}
SvTEMP_off(*MARK);
while (items--) {
mark++;
if (*mark && SvPADTMP(*mark)) {
- assert(!IS_PADGV(*mark));
*mark = sv_mortalcopy(*mark);
}
}
if (CvANON(cv))
Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
else {
- HEK *const hek = CvNAME_HEK(cv);
- SV *tmpstr;
- if (hek) {
- tmpstr = sv_2mortal(newSVhek(hek));
- }
- else {
- tmpstr = sv_newmortal();
- gv_efullname3(tmpstr, CvGV(cv), NULL);
- }
Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
- SVfARG(tmpstr));
+ SVfARG(cv_name(cv,NULL,0)));
}
}
PP(pp_aelem)
{
- dVAR; dSP;
+ dSP;
SV** svp;
SV* const elemsv = POPs;
IV elem = SvIV(elemsv);
return sv;
}
-PP(pp_method)
-{
- dVAR; dSP;
- SV* const sv = TOPs;
-
- if (SvROK(sv)) {
- SV* const rsv = SvRV(sv);
- if (SvTYPE(rsv) == SVt_PVCV) {
- SETs(rsv);
- RETURN;
- }
- }
-
- SETs(method_common(sv, NULL));
- RETURN;
-}
-
-PP(pp_method_named)
+PERL_STATIC_INLINE HV *
+S_opmethod_stash(pTHX_ SV* meth)
{
- dVAR; dSP;
- SV* const sv = cSVOP_sv;
- U32 hash = SvSHARED_HASH(sv);
-
- XPUSHs(method_common(sv, &hash));
- RETURN;
-}
-
-STATIC SV *
-S_method_common(pTHX_ SV* meth, U32* hashp)
-{
- dVAR;
SV* ob;
- GV* gv;
HV* stash;
- SV *packsv = NULL;
- SV * const sv = PL_stack_base + TOPMARK == PL_stack_sp
+
+ SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
"package or object reference", SVfARG(meth)),
(SV *)NULL)
: *(PL_stack_base + TOPMARK + 1);
- PERL_ARGS_ASSERT_METHOD_COMMON;
+ PERL_ARGS_ASSERT_OPMETHOD_STASH;
if (UNLIKELY(!sv))
undefined:
Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
SVfARG(meth));
- SvGETMAGIC(sv);
+ if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
+ else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
+ stash = gv_stashsv(sv, GV_CACHE_ONLY);
+ if (stash) return stash;
+ }
+
if (SvROK(sv))
ob = MUTABLE_SV(SvRV(sv));
else if (!SvOK(sv)) goto undefined;
GV* iogv;
STRLEN packlen;
const char * const packname = SvPV_nomg_const(sv, packlen);
- const bool packname_is_utf8 = !!SvUTF8(sv);
- const HE* const he =
- (const HE *)hv_common(
- PL_stashcache, NULL, packname, packlen,
- packname_is_utf8 ? HVhek_UTF8 : 0, 0, NULL, 0
- );
-
- if (he) {
- stash = INT2PTR(HV*,SvIV(HeVAL(he)));
- DEBUG_o(Perl_deb(aTHX_ "PL_stashcache hit %p for '%"SVf"'\n",
- (void*)stash, SVfARG(sv)));
- goto fetch;
- }
+ const U32 packname_utf8 = SvUTF8(sv);
+ stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
+ if (stash) return stash;
if (!(iogv = gv_fetchpvn_flags(
- packname, packlen, SVf_UTF8 * packname_is_utf8, SVt_PVIO
+ packname, packlen, packname_utf8, SVt_PVIO
)) ||
!(ob=MUTABLE_SV(GvIO(iogv))))
{
SVfARG(meth));
}
/* assume it's a package name */
- stash = gv_stashpvn(packname, packlen, packname_is_utf8 ? SVf_UTF8 : 0);
- if (!stash)
- packsv = sv;
- else {
- SV* const ref = newSViv(PTR2IV(stash));
- (void)hv_store(PL_stashcache, packname,
- packname_is_utf8 ? -(I32)packlen : (I32)packlen, ref, 0);
- DEBUG_o(Perl_deb(aTHX_ "PL_stashcache caching %p for '%"SVf"'\n",
- (void*)stash, SVfARG(sv)));
- }
- goto fetch;
+ stash = gv_stashpvn(packname, packlen, packname_utf8);
+ if (stash) return stash;
+ else return MUTABLE_HV(sv);
}
/* it _is_ a filehandle name -- replace with a reference */
*(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
: meth));
}
- stash = SvSTASH(ob);
+ return SvSTASH(ob);
+}
- fetch:
- /* NOTE: stash may be null, hope hv_fetch_ent and
- gv_fetchmethod can cope (it seems they can) */
+PP(pp_method)
+{
+ dSP;
+ GV* gv;
+ HV* stash;
+ SV* const meth = TOPs;
- /* shortcut for simple names */
- if (hashp) {
- const HE* const he = hv_fetch_ent(stash, meth, 0, *hashp);
- if (he) {
- gv = MUTABLE_GV(HeVAL(he));
- assert(stash);
- if (isGV(gv) && GvCV(gv) &&
- (!GvCVGEN(gv) || GvCVGEN(gv)
- == (PL_sub_generation + HvMROMETA(stash)->cache_gen)))
- return MUTABLE_SV(GvCV(gv));
- }
+ if (SvROK(meth)) {
+ SV* const rmeth = SvRV(meth);
+ if (SvTYPE(rmeth) == SVt_PVCV) {
+ SETs(rmeth);
+ RETURN;
+ }
}
- assert(stash || packsv);
- gv = gv_fetchmethod_sv_flags(stash ? stash : MUTABLE_HV(packsv),
- meth, GV_AUTOLOAD | GV_CROAK);
+ stash = opmethod_stash(meth);
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
assert(gv);
- return isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv);
+ SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+#define METHOD_CHECK_CACHE(stash,cache,meth) \
+ const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
+ if (he) { \
+ gv = MUTABLE_GV(HeVAL(he)); \
+ if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
+ == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
+ { \
+ XPUSHs(MUTABLE_SV(GvCV(gv))); \
+ RETURN; \
+ } \
+ } \
+
+PP(pp_method_named)
+{
+ dSP;
+ GV* gv;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* const stash = opmethod_stash(meth);
+
+ if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
+ METHOD_CHECK_CACHE(stash, stash, meth);
+ }
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_super)
+{
+ dSP;
+ GV* gv;
+ HV* cache;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* const stash = CopSTASH(PL_curcop);
+ /* Actually, SUPER doesn't need real object's (or class') stash at all,
+ * as it uses CopSTASH. However, we must ensure that object(class) is
+ * correct (this check is done by S_opmethod_stash) */
+ opmethod_stash(meth);
+
+ if ((cache = HvMROMETA(stash)->super)) {
+ METHOD_CHECK_CACHE(stash, cache, meth);
+ }
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_redir)
+{
+ dSP;
+ GV* gv;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
+ opmethod_stash(meth); /* not used but needed for error checks */
+
+ if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
+ else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
+}
+
+PP(pp_method_redir_super)
+{
+ dSP;
+ GV* gv;
+ HV* cache;
+ SV* const meth = cMETHOPx_meth(PL_op);
+ HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
+ opmethod_stash(meth); /* not used but needed for error checks */
+
+ if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
+ else if ((cache = HvMROMETA(stash)->super)) {
+ METHOD_CHECK_CACHE(stash, cache, meth);
+ }
+
+ gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
+ assert(gv);
+
+ XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
+ RETURN;
}
/*
- * Local variables:
- * c-indentation-style: bsd
- * c-basic-offset: 4
- * indent-tabs-mode: nil
- * End:
- *
* ex: set ts=8 sts=4 sw=4 et:
*/