*/
SV *left = POPs; SV *right = TOPs;
- if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
+ if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
SV * const temp = left;
left = right; right = temp;
}
PP(pp_cond_expr)
{
dSP;
+ SV *sv;
+
PERL_ASYNC_CHECK();
- if (SvTRUEx(POPs))
- RETURNOP(cLOGOP->op_other);
- else
- RETURNOP(cLOGOP->op_next);
+ sv = POPs;
+ RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
}
PP(pp_unstack)
}
else { /* $l .= $r and left == TARG */
if (!SvOK(left)) {
- if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
- report_uninit(right);
- sv_setpvs(left, "");
+ if ((left == right /* $l .= $l */
+ || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
+ && ckWARN(WARN_UNINITIALIZED)
+ )
+ report_uninit(left);
+ SvPVCLEAR(left);
}
else {
SvPV_force_nomg_nolen(left);
}
/* push the elements of av onto the stack.
- * XXX Note that padav has similar code but without the mg_get().
- * I suspect that the mg_get is no longer needed, but while padav
- * differs, it can't share this function */
+ * Returns PL_op->op_next to allow tail-call optimisation of its callers */
-STATIC void
+STATIC OP*
S_pushav(pTHX_ AV* const av)
{
dSP;
PADOFFSET i;
for (i=0; i < (PADOFFSET)maxarg; i++) {
SV ** const svp = av_fetch(av, i, FALSE);
- /* See note in pp_helem, and bug id #27839 */
- SP[i+1] = svp
- ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
- : &PL_sv_undef;
+ SP[i+1] = svp ? *svp : &PL_sv_undef;
}
}
else {
}
SP += maxarg;
PUTBACK;
+ return NORMAL;
}
dSP;
PADOFFSET base = PL_op->op_targ;
int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
- int i;
if (PL_op->op_flags & OPf_SPECIAL) {
/* fake the RHS of my ($x,$y,..) = @_ */
PUSHMARK(SP);
- S_pushav(aTHX_ GvAVn(PL_defgv));
+ (void)S_pushav(aTHX_ GvAVn(PL_defgv));
SPAGAIN;
}
/* note, this is only skipped for compile-time-known void cxt */
if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
+ int i;
+
EXTEND(SP, count);
PUSHMARK(SP);
for (i = 0; i <count; i++)
(base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
| (count << SAVE_TIGHT_SHIFT)
| SAVEt_CLEARPADRANGE);
+ int i;
+
STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
- assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
+ assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
+ == (Size_t)base);
{
dSS_ADD;
SS_ADD_UV(payload);
PP(pp_readline)
{
dSP;
+ /* pp_coreargs pushes a NULL to indicate no args passed to
+ * CORE::readline() */
if (TOPs) {
SvGETMAGIC(TOPs);
tryAMAGICunTARGETlist(iter_amg, 0);
PUTBACK;
Perl_pp_rv2gv(aTHX);
PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
- if (PL_last_in_gv == (GV *)&PL_sv_undef)
- PL_last_in_gv = NULL;
- else
- assert(isGV_with_GP(PL_last_in_gv));
+ assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv));
}
}
return do_readline();
PP(pp_or)
{
dSP;
+ SV *sv;
PERL_ASYNC_CHECK();
- if (SvTRUE(TOPs))
+ sv = TOPs;
+ if (SvTRUE_NN(sv))
RETURN;
else {
if (PL_op->op_type == OP_OR)
AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
const U32 lval = PL_op->op_flags & OPf_MOD;
- SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
- SV *sv = (svp ? *svp : &PL_sv_undef);
+ const I8 key = (I8)PL_op->op_private;
+ SV** svp;
+ SV *sv;
- if (UNLIKELY(!svp && lval))
- DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
+ assert(SvTYPE(av) == SVt_PVAV);
EXTEND(SP, 1);
+
+ /* inlined av_fetch() for simple cases ... */
+ if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
+ sv = AvARRAY(av)[key];
+ if (sv) {
+ PUSHs(sv);
+ RETURN;
+ }
+ }
+
+ /* ... else do it the hard way */
+ svp = av_fetch(av, key, lval);
+ sv = (svp ? *svp : &PL_sv_undef);
+
+ if (UNLIKELY(!svp && lval))
+ DIE(aTHX_ PL_no_aelem, (int)key);
+
if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
mg_get(sv);
PUSHs(sv);
RETURN;
}
-PP(pp_pushre)
-{
- dSP;
-#ifdef DEBUGGING
- /*
- * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
- * will be enough to hold an OP*.
- */
- SV* const sv = sv_newmortal();
- sv_upgrade(sv, SVt_PVLV);
- LvTYPE(sv) = '/';
- Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
- XPUSHs(sv);
-#else
- XPUSHs(MUTABLE_SV(PL_op));
-#endif
- RETURN;
-}
-
/* Oversized hot code. */
/* also used for: pp_say() */
}
+/* do the common parts of pp_padhv() and pp_rv2hv()
+ * It assumes the caller has done EXTEND(SP, 1) or equivalent.
+ * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
+ * 'has_targ' indicates that the op has a target - this should
+ * be a compile-time constant so that the code can constant-folded as
+ * appropriate
+ * */
+
+PERL_STATIC_INLINE OP*
+S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
+{
+ bool is_tied;
+ bool is_bool;
+ MAGIC *mg;
+ dSP;
+ IV i;
+ SV *sv;
+
+ assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
+
+ if (gimme == G_ARRAY) {
+ hv_pushkv(hv, 3);
+ return NORMAL;
+ }
+
+ if (is_keys)
+ /* 'keys %h' masquerading as '%h': reset iterator */
+ (void)hv_iterinit(hv);
+
+ if (gimme == G_VOID)
+ return NORMAL;
+
+ is_bool = ( PL_op->op_private & OPpTRUEBOOL
+ || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
+ && block_gimme() == G_VOID));
+ is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
+
+ if (UNLIKELY(is_tied)) {
+ if (is_keys && !is_bool) {
+ i = 0;
+ while (hv_iternext(hv))
+ i++;
+ goto push_i;
+ }
+ else {
+ sv = magic_scalarpack(hv, mg);
+ goto push_sv;
+ }
+ }
+ else {
+ i = HvUSEDKEYS(hv);
+ if (is_bool) {
+ sv = i ? &PL_sv_yes : &PL_sv_zero;
+ push_sv:
+ PUSHs(sv);
+ }
+ else {
+ push_i:
+ if (has_targ) {
+ dTARGET;
+ PUSHi(i);
+ }
+ else
+#ifdef PERL_OP_PARENT
+ if (is_keys) {
+ /* parent op should be an unused OP_KEYS whose targ we can
+ * use */
+ dTARG;
+ OP *k;
+
+ assert(!OpHAS_SIBLING(PL_op));
+ k = PL_op->op_sibparent;
+ assert(k->op_type == OP_KEYS);
+ TARG = PAD_SV(k->op_targ);
+ PUSHi(i);
+ }
+ else
+#endif
+ mPUSHi(i);
+ }
+ }
+
+ PUTBACK;
+ return NORMAL;
+}
+
+
+/* This is also called directly by pp_lvavref. */
+PP(pp_padav)
+{
+ dSP; dTARGET;
+ U8 gimme;
+ assert(SvTYPE(TARG) == SVt_PVAV);
+ if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
+ if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
+ SAVECLEARSV(PAD_SVl(PL_op->op_targ));
+ EXTEND(SP, 1);
+
+ if (PL_op->op_flags & OPf_REF) {
+ PUSHs(TARG);
+ RETURN;
+ }
+ else if (PL_op->op_private & OPpMAYBE_LVSUB) {
+ const I32 flags = is_lvalue_sub();
+ if (flags && !(flags & OPpENTERSUB_INARGS)) {
+ if (GIMME_V == G_SCALAR)
+ /* diag_listed_as: Can't return %s to lvalue scalar context */
+ Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
+ PUSHs(TARG);
+ RETURN;
+ }
+ }
+
+ gimme = GIMME_V;
+ if (gimme == G_ARRAY)
+ return S_pushav(aTHX_ (AV*)TARG);
+
+ if (gimme == G_SCALAR) {
+ const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
+ if (!maxarg)
+ PUSHs(&PL_sv_zero);
+ else if (PL_op->op_private & OPpTRUEBOOL)
+ PUSHs(&PL_sv_yes);
+ else
+ mPUSHi(maxarg);
+ }
+ RETURN;
+}
+
+
+PP(pp_padhv)
+{
+ dSP; dTARGET;
+ U8 gimme;
+
+ assert(SvTYPE(TARG) == SVt_PVHV);
+ if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
+ if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
+ SAVECLEARSV(PAD_SVl(PL_op->op_targ));
+
+ EXTEND(SP, 1);
+
+ if (PL_op->op_flags & OPf_REF) {
+ PUSHs(TARG);
+ RETURN;
+ }
+ else if (PL_op->op_private & OPpMAYBE_LVSUB) {
+ const I32 flags = is_lvalue_sub();
+ if (flags && !(flags & OPpENTERSUB_INARGS)) {
+ if (GIMME_V == G_SCALAR)
+ /* diag_listed_as: Can't return %s to lvalue scalar context */
+ Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
+ PUSHs(TARG);
+ RETURN;
+ }
+ }
+
+ gimme = GIMME_V;
+
+ return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
+ cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
+ 0 /* has_targ*/);
+}
+
+
/* also used for: pp_rv2hv() */
/* also called directly by pp_lvavref */
if (is_pp_rv2av) {
AV *const av = MUTABLE_AV(sv);
- /* The guts of pp_rv2av */
+
if (gimme == G_ARRAY) {
SP--;
PUTBACK;
- S_pushav(aTHX_ av);
- SPAGAIN;
+ return S_pushav(aTHX_ av);
}
- else if (gimme == G_SCALAR) {
- dTARGET;
+
+ if (gimme == G_SCALAR) {
const SSize_t maxarg = AvFILL(av) + 1;
- SETi(maxarg);
- }
- } else {
- /* The guts of pp_rv2hv */
- if (gimme == G_ARRAY) { /* array wanted */
- *PL_stack_sp = sv;
- return Perl_do_kv(aTHX);
- }
- else if ((PL_op->op_private & OPpTRUEBOOL
- || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
- && block_gimme() == G_VOID ))
- && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
- SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
- else if (gimme == G_SCALAR) {
- dTARG;
- TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
- SETTARG;
+ if (PL_op->op_private & OPpTRUEBOOL)
+ SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
+ else {
+ dTARGET;
+ SETi(maxarg);
+ }
}
}
+ else {
+ SP--; PUTBACK;
+ return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
+ cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
+ 1 /* has_targ*/);
+ }
RETURN;
croak_cant_return:
lcount = -1;
lelem--; /* no need to unmark this element */
}
- else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
- assert(!SvIMMORTAL(svl));
+ else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
SvFLAGS(svl) |= SVf_BREAK;
marked = TRUE;
}
assert(svr);
if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
+ U32 brk = (SvFLAGS(svr) & SVf_BREAK);
#ifdef DEBUGGING
if (fake) {
/* ... but restore afterwards in case it's needed again,
* e.g. ($a,$b,$c) = (1,$a,$a)
*/
- SvFLAGS(svr) |= SVf_BREAK;
+ SvFLAGS(svr) |= brk;
}
if (!lcount)
SV **relem;
SV **lelem;
-
- SV *sv;
- AV *ary;
-
U8 gimme;
- HV *hash;
- SSize_t i;
- int magic;
- U32 lval;
/* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
* only need to save locally, not on the save stack */
U16 old_delaymagic = PL_delaymagic;
if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
/* skip the scan if all scalars have a ref count of 1 */
for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
- sv = *lelem;
+ SV *sv = *lelem;
if (!sv || SvREFCNT(sv) == 1)
continue;
if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
#endif
gimme = GIMME_V;
- lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
-
relem = firstrelem;
lelem = firstlelem;
- ary = NULL;
- hash = NULL;
+ if (relem > lastrelem)
+ goto no_relems;
+
+ /* first lelem loop while there are still relems */
while (LIKELY(lelem <= lastlelem)) {
bool alias = FALSE;
- TAINT_NOT; /* Each item stands on its own, taintwise. */
- sv = *lelem++;
- if (UNLIKELY(!sv)) {
+ SV *lsv = *lelem++;
+
+ TAINT_NOT; /* Each item stands on its own, taintwise. */
+
+ assert(relem <= lastrelem);
+ if (UNLIKELY(!lsv)) {
alias = TRUE;
- sv = *lelem++;
- ASSUME(SvTYPE(sv) == SVt_PVAV);
+ lsv = *lelem++;
+ ASSUME(SvTYPE(lsv) == SVt_PVAV);
}
- switch (SvTYPE(sv)) {
- case SVt_PVAV: {
- bool already_copied = FALSE;
- ary = MUTABLE_AV(sv);
- magic = SvMAGICAL(ary) != 0;
- ENTER;
- SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
-
- /* We need to clear ary. The is a danger that if we do this,
- * elements on the RHS may be prematurely freed, e.g.
- * @a = ($a[0]);
- * In the case of possible commonality, make a copy of each
- * RHS SV *before* clearing the array, and add a reference
- * from the tmps stack, so that it doesn't leak on death.
- * Otherwise, make a copy of each RHS SV only as we're storing
- * it into the array - that way we don't have to worry about
- * it being leaked if we die, but don't incur the cost of
- * mortalising everything.
- */
- if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
- && (relem <= lastrelem)
- && (magic || AvFILL(ary) != -1))
- {
- SV **svp;
- EXTEND_MORTAL(lastrelem - relem + 1);
+ switch (SvTYPE(lsv)) {
+ case SVt_PVAV: {
+ SV **svp;
+ SSize_t i;
+ SSize_t tmps_base;
+ SSize_t nelems = lastrelem - relem + 1;
+ AV *ary = MUTABLE_AV(lsv);
+
+ /* Assigning to an aggregate is tricky. First there is the
+ * issue of commonality, e.g. @a = ($a[0]). Since the
+ * stack isn't refcounted, clearing @a prior to storing
+ * elements will free $a[0]. Similarly with
+ * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
+ *
+ * The way to avoid these issues is to make the copy of each
+ * SV (and we normally store a *copy* in the array) *before*
+ * clearing the array. But this has a problem in that
+ * if the code croaks during copying, the not-yet-stored copies
+ * could leak. One way to avoid this is to make all the copies
+ * mortal, but that's quite expensive.
+ *
+ * The current solution to these issues is to use a chunk
+ * of the tmps stack as a temporary refcounted-stack. SVs
+ * will be put on there during processing to avoid leaks,
+ * but will be removed again before the end of this block,
+ * so free_tmps() is never normally called. Also, the
+ * sv_refcnt of the SVs doesn't have to be manipulated, since
+ * the ownership of 1 reference count is transferred directly
+ * from the tmps stack to the AV when the SV is stored.
+ *
+ * We disarm slots in the temps stack by storing PL_sv_undef
+ * there: it doesn't matter if that SV's refcount is
+ * repeatedly decremented during a croak. But usually this is
+ * only an interim measure. By the end of this code block
+ * we try where possible to not leave any PL_sv_undef's on the
+ * tmps stack e.g. by shuffling newer entries down.
+ *
+ * There is one case where we don't copy: non-magical
+ * SvTEMP(sv)'s with a ref count of 1. The only owner of these
+ * is on the tmps stack, so its safe to directly steal the SV
+ * rather than copying. This is common in things like function
+ * returns, map etc, which all return a list of such SVs.
+ *
+ * Note however something like @a = (f())[0,0], where there is
+ * a danger of the same SV being shared: this avoided because
+ * when the SV is stored as $a[0], its ref count gets bumped,
+ * so the RC==1 test fails and the second element is copied
+ * instead.
+ *
+ * We also use one slot in the tmps stack to hold an extra
+ * ref to the array, to ensure it doesn't get prematurely
+ * freed. Again, this is removed before the end of this block.
+ *
+ * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
+ * @a = ($a[0]) case, but the current implementation uses the
+ * same algorithm regardless, so ignores that flag. (It *is*
+ * used in the hash branch below, however).
+ */
+
+ /* Reserve slots for ary, plus the elems we're about to copy,
+ * then protect ary and temporarily void the remaining slots
+ * with &PL_sv_undef */
+ EXTEND_MORTAL(nelems + 1);
+ PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
+ tmps_base = PL_tmps_ix + 1;
+ for (i = 0; i < nelems; i++)
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ PL_tmps_ix += nelems;
+
+ /* Make a copy of each RHS elem and save on the tmps_stack
+ * (or pass through where we can optimise away the copy) */
+
+ if (UNLIKELY(alias)) {
+ U32 lval = (gimme == G_ARRAY)
+ ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
for (svp = relem; svp <= lastrelem; svp++) {
- /* see comment in S_aassign_copy_common about SV_NOSTEAL */
- *svp = sv_mortalcopy_flags(*svp,
- SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
- TAINT_NOT;
+ SV *rsv = *svp;
+
+ SvGETMAGIC(rsv);
+ if (!SvROK(rsv))
+ DIE(aTHX_ "Assigned value is not a reference");
+ if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
+ /* diag_listed_as: Assigned value is not %s reference */
+ DIE(aTHX_
+ "Assigned value is not a SCALAR reference");
+ if (lval)
+ *svp = rsv = sv_mortalcopy(rsv);
+ /* XXX else check for weak refs? */
+ rsv = SvREFCNT_inc_NN(SvRV(rsv));
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
}
- already_copied = TRUE;
}
+ else {
+ for (svp = relem; svp <= lastrelem; svp++) {
+ SV *rsv = *svp;
- av_clear(ary);
- if (relem <= lastrelem)
- av_extend(ary, lastrelem - relem);
-
- i = 0;
- while (relem <= lastrelem) { /* gobble up all the rest */
- SV **didstore;
- if (LIKELY(!alias)) {
- if (already_copied)
- sv = *relem;
+ if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
+ /* can skip the copy */
+ SvREFCNT_inc_simple_void_NN(rsv);
+ SvTEMP_off(rsv);
+ }
else {
- if (LIKELY(*relem))
- /* before newSV, in case it dies */
- SvGETMAGIC(*relem);
- sv = newSV(0);
+ SV *nsv;
+ /* do get before newSV, in case it dies and leaks */
+ SvGETMAGIC(rsv);
+ nsv = newSV(0);
/* see comment in S_aassign_copy_common about
* SV_NOSTEAL */
- sv_setsv_flags(sv, *relem,
- (SV_DO_COW_SVSETSV|SV_NOSTEAL));
- *relem = sv;
+ sv_setsv_flags(nsv, rsv,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ rsv = *svp = nsv;
}
- }
- else {
- if (!already_copied)
- SvGETMAGIC(*relem);
- if (!SvROK(*relem))
- DIE(aTHX_ "Assigned value is not a reference");
- if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
- /* diag_listed_as: Assigned value is not %s reference */
- DIE(aTHX_
- "Assigned value is not a SCALAR reference");
- if (lval && !already_copied)
- *relem = sv_mortalcopy(*relem);
- /* XXX else check for weak refs? */
- sv = SvREFCNT_inc_NN(SvRV(*relem));
- }
- relem++;
- if (already_copied)
- SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
- didstore = av_store(ary,i++,sv);
- if (magic) {
- if (!didstore)
- sv_2mortal(sv);
- if (SvSMAGICAL(sv))
- mg_set(sv);
- }
- TAINT_NOT;
- }
+
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
+ }
+ }
+
+ if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
+ av_clear(ary);
+
+ /* store in the array, the SVs that are in the tmps stack */
+
+ tmps_base -= nelems;
+
+ if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
+ /* for arrays we can't cheat with, use the official API */
+ av_extend(ary, nelems - 1);
+ for (i = 0; i < nelems; i++) {
+ SV **svp = &(PL_tmps_stack[tmps_base + i]);
+ SV *rsv = *svp;
+ /* A tied store won't take ownership of rsv, so keep
+ * the 1 refcnt on the tmps stack; otherwise disarm
+ * the tmps stack entry */
+ if (av_store(ary, i, rsv))
+ *svp = &PL_sv_undef;
+ /* av_store() may have added set magic to rsv */;
+ SvSETMAGIC(rsv);
+ }
+ /* disarm ary refcount: see comments below about leak */
+ PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
+ }
+ else {
+ /* directly access/set the guts of the AV */
+ SSize_t fill = nelems - 1;
+ if (fill > AvMAX(ary))
+ av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
+ &AvARRAY(ary));
+ AvFILLp(ary) = fill;
+ Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
+ /* Quietly remove all the SVs from the tmps stack slots,
+ * since ary has now taken ownership of the refcnt.
+ * Also remove ary: which will now leak if we die before
+ * the SvREFCNT_dec_NN(ary) below */
+ if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
+ Move(&PL_tmps_stack[tmps_base + nelems],
+ &PL_tmps_stack[tmps_base - 1],
+ PL_tmps_ix - (tmps_base + nelems) + 1,
+ SV*);
+ PL_tmps_ix -= (nelems + 1);
+ }
+
if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
+ /* its assumed @ISA set magic can't die and leak ary */
SvSETMAGIC(MUTABLE_SV(ary));
- LEAVE;
- break;
+ SvREFCNT_dec_NN(ary);
+
+ relem = lastrelem + 1;
+ goto no_relems;
}
case SVt_PVHV: { /* normal hash */
- SV *tmpstr;
- int odd;
- int duplicates = 0;
- SV** topelem = relem;
- SV **firsthashrelem = relem;
- bool already_copied = FALSE;
-
- hash = MUTABLE_HV(sv);
- magic = SvMAGICAL(hash) != 0;
-
- odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
- if (UNLIKELY(odd)) {
- do_oddball(lastrelem, firsthashrelem);
- /* we have firstlelem to reuse, it's not needed anymore
- */
- *(lastrelem+1) = &PL_sv_undef;
+
+ SV **svp;
+ bool dirty_tmps;
+ SSize_t i;
+ SSize_t tmps_base;
+ SSize_t nelems = lastrelem - relem + 1;
+ HV *hash = MUTABLE_HV(lsv);
+
+ if (UNLIKELY(nelems & 1)) {
+ do_oddball(lastrelem, relem);
+ /* we have firstlelem to reuse, it's not needed any more */
+ *++lastrelem = &PL_sv_undef;
+ nelems++;
+ }
+
+ /* See the SVt_PVAV branch above for a long description of
+ * how the following all works. The main difference for hashes
+ * is that we treat keys and values separately (and have
+ * separate loops for them): as for arrays, values are always
+ * copied (except for the SvTEMP optimisation), since they
+ * need to be stored in the hash; while keys are only
+ * processed where they might get prematurely freed or
+ * whatever. */
+
+ /* tmps stack slots:
+ * * reserve a slot for the hash keepalive;
+ * * reserve slots for the hash values we're about to copy;
+ * * preallocate for the keys we'll possibly copy or refcount bump
+ * later;
+ * then protect hash and temporarily void the remaining
+ * value slots with &PL_sv_undef */
+ EXTEND_MORTAL(nelems + 1);
+
+ /* convert to number of key/value pairs */
+ nelems >>= 1;
+
+ PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
+ tmps_base = PL_tmps_ix + 1;
+ for (i = 0; i < nelems; i++)
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ PL_tmps_ix += nelems;
+
+ /* Make a copy of each RHS hash value and save on the tmps_stack
+ * (or pass through where we can optimise away the copy) */
+
+ for (svp = relem + 1; svp <= lastrelem; svp += 2) {
+ SV *rsv = *svp;
+
+ if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
+ /* can skip the copy */
+ SvREFCNT_inc_simple_void_NN(rsv);
+ SvTEMP_off(rsv);
+ }
+ else {
+ SV *nsv;
+ /* do get before newSV, in case it dies and leaks */
+ SvGETMAGIC(rsv);
+ nsv = newSV(0);
+ /* see comment in S_aassign_copy_common about
+ * SV_NOSTEAL */
+ sv_setsv_flags(nsv, rsv,
+ (SV_DO_COW_SVSETSV|SV_NOSTEAL));
+ rsv = *svp = nsv;
}
- ENTER;
- SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
+ assert(tmps_base <= PL_tmps_max);
+ PL_tmps_stack[tmps_base++] = rsv;
+ }
+ tmps_base -= nelems;
- /* We need to clear hash. The is a danger that if we do this,
- * elements on the RHS may be prematurely freed, e.g.
- * %h = (foo => $h{bar});
- * In the case of possible commonality, make a copy of each
- * RHS SV *before* clearing the hash, and add a reference
- * from the tmps stack, so that it doesn't leak on death.
- */
- if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
- && (relem <= lastrelem)
- && (magic || HvUSEDKEYS(hash)))
- {
- SV **svp;
- EXTEND_MORTAL(lastrelem - relem + 1);
- for (svp = relem; svp <= lastrelem; svp++) {
+ /* possibly protect keys */
+
+ if (UNLIKELY(gimme == G_ARRAY)) {
+ /* handle e.g.
+ * @a = ((%h = ($$r, 1)), $r = "x");
+ * $_++ for %h = (1,2,3,4);
+ */
+ EXTEND_MORTAL(nelems);
+ for (svp = relem; svp <= lastrelem; svp += 2)
+ *svp = sv_mortalcopy_flags(*svp,
+ SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
+ }
+ else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
+ /* for possible commonality, e.g.
+ * %h = ($h{a},1)
+ * avoid premature freeing RHS keys by mortalising
+ * them.
+ * For a magic element, make a copy so that its magic is
+ * called *before* the hash is emptied (which may affect
+ * a tied value for example).
+ * In theory we should check for magic keys in all
+ * cases, not just under OPpASSIGN_COMMON_AGG, but in
+ * practice, !OPpASSIGN_COMMON_AGG implies only
+ * constants or padtmps on the RHS.
+ */
+ EXTEND_MORTAL(nelems);
+ for (svp = relem; svp <= lastrelem; svp += 2) {
+ SV *rsv = *svp;
+ if (UNLIKELY(SvGMAGICAL(rsv))) {
+ SSize_t n;
*svp = sv_mortalcopy_flags(*svp,
SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
- TAINT_NOT;
+ /* allow other branch to continue pushing
+ * onto tmps stack without checking each time */
+ n = (lastrelem - relem) >> 1;
+ EXTEND_MORTAL(n);
}
- already_copied = TRUE;
+ else
+ PL_tmps_stack[++PL_tmps_ix] =
+ SvREFCNT_inc_simple_NN(rsv);
}
+ }
- hv_clear(hash);
-
- while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
- HE *didstore;
- assert(*relem);
- /* Copy the key if aassign is called in lvalue context,
- to avoid having the next op modify our rhs. Copy
- it also if it is gmagical, lest it make the
- hv_store_ent call below croak, leaking the value. */
- sv = (lval || SvGMAGICAL(*relem)) && !already_copied
- ? sv_mortalcopy(*relem)
- : *relem;
- relem++;
- assert(*relem);
- if (already_copied)
- tmpstr = *relem++;
- else {
- SvGETMAGIC(*relem);
- tmpstr = newSV(0);
- sv_setsv_nomg(tmpstr,*relem++); /* value */
- }
+ if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
+ hv_clear(hash);
- if (gimme == G_ARRAY) {
- if (hv_exists_ent(hash, sv, 0))
- /* key overwrites an existing entry */
- duplicates += 2;
- else {
- /* copy element back: possibly to an earlier
- * stack location if we encountered dups earlier,
- * possibly to a later stack location if odd */
- *topelem++ = sv;
- *topelem++ = tmpstr;
- }
- }
- if (already_copied)
- SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
- didstore = hv_store_ent(hash,sv,tmpstr,0);
- if (magic) {
- if (!didstore) sv_2mortal(tmpstr);
- SvSETMAGIC(tmpstr);
+ /* now assign the keys and values to the hash */
+
+ dirty_tmps = FALSE;
+
+ if (UNLIKELY(gimme == G_ARRAY)) {
+ /* @a = (%h = (...)) etc */
+ SV **svp;
+ SV **topelem = relem;
+
+ for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
+ SV *key = *svp++;
+ SV *val = *svp;
+ /* remove duplicates from list we return */
+ if (!hv_exists_ent(hash, key, 0)) {
+ /* copy key back: possibly to an earlier
+ * stack location if we encountered dups earlier,
+ * The values will be updated later
+ */
+ *topelem = key;
+ topelem += 2;
}
- TAINT_NOT;
- }
- LEAVE;
- if (duplicates && gimme == G_ARRAY) {
+ /* A tied store won't take ownership of val, so keep
+ * the 1 refcnt on the tmps stack; otherwise disarm
+ * the tmps stack entry */
+ if (hv_store_ent(hash, key, val, 0))
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ else
+ dirty_tmps = TRUE;
+ /* hv_store_ent() may have added set magic to val */;
+ SvSETMAGIC(val);
+ }
+ if (topelem < svp) {
/* at this point we have removed the duplicate key/value
* pairs from the stack, but the remaining values may be
* wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
* the (a 2), but the stack now probably contains
* (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
* obliterates the earlier key. So refresh all values. */
- lastrelem -= duplicates;
- relem = firsthashrelem;
- while (relem < lastrelem+odd) {
+ lastrelem = topelem - 1;
+ while (relem < lastrelem) {
HE *he;
he = hv_fetch_ent(hash, *relem++, 0, 0);
*relem++ = (he ? HeVAL(he) : &PL_sv_undef);
}
}
- if (odd && gimme == G_ARRAY) lastrelem++;
- }
- break;
+ }
+ else {
+ SV **svp;
+ for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
+ SV *key = *svp++;
+ SV *val = *svp;
+ if (hv_store_ent(hash, key, val, 0))
+ PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
+ else
+ dirty_tmps = TRUE;
+ /* hv_store_ent() may have added set magic to val */;
+ SvSETMAGIC(val);
+ }
+ }
+
+ if (dirty_tmps) {
+ /* there are still some 'live' recounts on the tmps stack
+ * - usually caused by storing into a tied hash. So let
+ * free_tmps() do the proper but slow job later.
+ * Just disarm hash refcount: see comments below about leak
+ */
+ PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
+ }
+ else {
+ /* Quietly remove all the SVs from the tmps stack slots,
+ * since hash has now taken ownership of the refcnt.
+ * Also remove hash: which will now leak if we die before
+ * the SvREFCNT_dec_NN(hash) below */
+ if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
+ Move(&PL_tmps_stack[tmps_base + nelems],
+ &PL_tmps_stack[tmps_base - 1],
+ PL_tmps_ix - (tmps_base + nelems) + 1,
+ SV*);
+ PL_tmps_ix -= (nelems + 1);
+ }
+
+ SvREFCNT_dec_NN(hash);
+
+ relem = lastrelem + 1;
+ goto no_relems;
+ }
+
default:
- if (SvIMMORTAL(sv)) {
- if (relem <= lastrelem)
- relem++;
- break;
- }
- if (relem <= lastrelem) {
- if (UNLIKELY(
- SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
- (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
- ))
- Perl_warner(aTHX_
- packWARN(WARN_MISC),
- "Useless assignment to a temporary"
- );
- sv_setsv(sv, *relem);
- *(relem++) = sv;
- }
- else
- sv_setsv(sv, &PL_sv_undef);
- SvSETMAGIC(sv);
+ if (!SvIMMORTAL(lsv)) {
+ SV *ref;
+
+ if (UNLIKELY(
+ SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
+ (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
+ ))
+ Perl_warner(aTHX_
+ packWARN(WARN_MISC),
+ "Useless assignment to a temporary"
+ );
+
+ /* avoid freeing $$lsv if it might be needed for further
+ * elements, e.g. ($ref, $foo) = (1, $$ref) */
+ if ( SvROK(lsv)
+ && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
+ && lelem <= lastlelem
+ ) {
+ SSize_t ix;
+ SvREFCNT_inc_simple_void_NN(ref);
+ /* an unrolled sv_2mortal */
+ ix = ++PL_tmps_ix;
+ if (UNLIKELY(ix >= PL_tmps_max))
+ /* speculatively grow enough to cover other
+ * possible refs */
+ (void)tmps_grow_p(ix + (lastlelem - lelem));
+ PL_tmps_stack[ix] = ref;
+ }
+
+ sv_setsv(lsv, *relem);
+ *relem = lsv;
+ SvSETMAGIC(lsv);
+ }
+ if (++relem > lastrelem)
+ goto no_relems;
break;
+ } /* switch */
+ } /* while */
+
+
+ no_relems:
+
+ /* simplified lelem loop for when there are no relems left */
+ while (LIKELY(lelem <= lastlelem)) {
+ SV *lsv = *lelem++;
+
+ TAINT_NOT; /* Each item stands on its own, taintwise. */
+
+ if (UNLIKELY(!lsv)) {
+ lsv = *lelem++;
+ ASSUME(SvTYPE(lsv) == SVt_PVAV);
}
- }
+
+ switch (SvTYPE(lsv)) {
+ case SVt_PVAV:
+ if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
+ av_clear((AV*)lsv);
+ if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
+ SvSETMAGIC(lsv);
+ }
+ break;
+
+ case SVt_PVHV:
+ if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
+ hv_clear((HV*)lsv);
+ break;
+
+ default:
+ if (!SvIMMORTAL(lsv)) {
+ sv_set_undef(lsv);
+ SvSETMAGIC(lsv);
+ *relem++ = lsv;
+ }
+ break;
+ } /* switch */
+ } /* while */
+
+ TAINT_NOT; /* result of list assign isn't tainted */
+
if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
/* Will be used to set PL_tainting below */
Uid_t tmp_uid = PerlProc_getuid();
setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
(PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
(Uid_t)-1));
-#else
-# ifdef HAS_SETREUID
+#elif defined(HAS_SETREUID)
PERL_UNUSED_RESULT(
setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
(PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
-# else
+#else
# ifdef HAS_SETRUID
if ((PL_delaymagic & DM_UID) == DM_RUID) {
PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
DIE(aTHX_ "No setreuid available");
PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
}
-# endif /* HAS_SETREUID */
#endif /* HAS_SETRESUID */
tmp_uid = PerlProc_getuid();
setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
(PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
(Gid_t)-1));
-#else
-# ifdef HAS_SETREGID
+#elif defined(HAS_SETREGID)
PERL_UNUSED_RESULT(
setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
(PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
-# else
+#else
# ifdef HAS_SETRGID
if ((PL_delaymagic & DM_GID) == DM_RGID) {
PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
DIE(aTHX_ "No setregid available");
PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
}
-# endif /* HAS_SETREGID */
#endif /* HAS_SETRESGID */
tmp_gid = PerlProc_getgid();
if (gimme == G_VOID)
SP = firstrelem - 1;
else if (gimme == G_SCALAR) {
- dTARGET;
SP = firstrelem;
- SETi(lastrelem - firstrelem + 1);
- }
- else {
- if (ary || hash)
- /* note that in this case *firstlelem may have been overwritten
- by sv_undef in the odd hash case */
- SP = lastrelem;
- else {
- SP = firstrelem + (lastlelem - firstlelem);
- lelem = firstlelem + (relem - firstrelem);
- while (relem <= SP)
- *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
+ EXTEND(SP,1);
+ if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
+ SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
+ else {
+ dTARGET;
+ SETi(firstlelem - firstrelem);
}
}
+ else
+ SP = relem - 1;
RETURN;
}
dSP;
PMOP * const pm = cPMOP;
REGEXP * rx = PM_GETRE(pm);
- SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
+ regexp *prog = ReANY(rx);
+ SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
SV * const rv = sv_newmortal();
CV **cvp;
CV *cv;
(void)sv_bless(rv, stash);
}
- if (UNLIKELY(RX_ISTAINTED(rx))) {
+ if (UNLIKELY(RXp_ISTAINTED(prog))) {
SvTAINTED_on(rv);
SvTAINTED_on(SvRV(rv));
}
U8 r_flags = 0;
const char *truebase; /* Start of string */
REGEXP *rx = PM_GETRE(pm);
+ regexp *prog = ReANY(rx);
bool rxtainted;
const U8 gimme = GIMME_V;
STRLEN len;
if (PL_op->op_flags & OPf_STACKED)
TARG = POPs;
- else if (ARGTARG)
- GETTARGET;
else {
- TARG = DEFSV;
+ if (ARGTARG)
+ GETTARGET;
+ else {
+ TARG = DEFSV;
+ }
EXTEND(SP,1);
}
PUTBACK; /* EVAL blocks need stack_sp. */
/* Skip get-magic if this is a qr// clone, because regcomp has
already done it. */
- truebase = ReANY(rx)->mother_re
+ truebase = prog->mother_re
? SvPV_nomg_const(TARG, len)
: SvPV_const(TARG, len);
if (!truebase)
DIE(aTHX_ "panic: pp_match");
strend = truebase + len;
- rxtainted = (RX_ISTAINTED(rx) ||
+ rxtainted = (RXp_ISTAINTED(prog) ||
(TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
TAINT_NOT;
goto nope;
}
- /* empty pattern special-cased to use last successful pattern if
- possible, except for qr// */
- if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
- && PL_curpm) {
- pm = PL_curpm;
- rx = PM_GETRE(pm);
+ /* handle the empty pattern */
+ if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
+ if (PL_curpm == PL_reg_curpm) {
+ if (PL_curpm_under) {
+ if (PL_curpm_under == PL_reg_curpm) {
+ Perl_croak(aTHX_ "Infinite recursion via empty pattern");
+ } else {
+ pm = PL_curpm_under;
+ }
+ }
+ } else {
+ pm = PL_curpm;
+ }
+ rx = PM_GETRE(pm);
+ prog = ReANY(rx);
}
- if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
+ if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
- UVuf" < %"IVdf")\n",
- (UV)len, (IV)RX_MINLEN(rx)));
+ UVuf " < %" IVdf ")\n",
+ (UV)len, (IV)RXp_MINLEN(prog)));
goto nope;
}
}
#ifdef PERL_SAWAMPERSAND
- if ( RX_NPARENS(rx)
+ if ( RXp_NPARENS(prog)
|| PL_sawampersand
- || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
+ || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
|| (dynpm->op_pmflags & PMf_KEEPCOPY)
)
#endif
#endif
if (rxtainted)
- RX_MATCH_TAINTED_on(rx);
- TAINT_IF(RX_MATCH_TAINTED(rx));
+ RXp_MATCH_TAINTED_on(prog);
+ TAINT_IF(RXp_MATCH_TAINTED(prog));
/* update pos */
if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
if (!mg)
mg = sv_magicext_mglob(TARG);
- MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
- if (RX_ZERO_LEN(rx))
+ MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
+ if (RXp_ZERO_LEN(prog))
mg->mg_flags |= MGf_MINMATCH;
else
mg->mg_flags &= ~MGf_MINMATCH;
}
- if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
+ if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
LEAVE_SCOPE(oldsave);
RETPUSHYES;
}
/* push captures on stack */
{
- const I32 nparens = RX_NPARENS(rx);
+ const I32 nparens = RXp_NPARENS(prog);
I32 i = (global && !nparens) ? 1 : 0;
SPAGAIN; /* EVAL blocks could move the stack. */
EXTEND_MORTAL(nparens + i);
for (i = !i; i <= nparens; i++) {
PUSHs(sv_newmortal());
- if (LIKELY((RX_OFFS(rx)[i].start != -1)
- && RX_OFFS(rx)[i].end != -1 ))
+ if (LIKELY((RXp_OFFS(prog)[i].start != -1)
+ && RXp_OFFS(prog)[i].end != -1 ))
{
- const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
- const char * const s = RX_OFFS(rx)[i].start + truebase;
- if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
- || len < 0 || len > strend - s))
+ const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
+ const char * const s = RXp_OFFS(prog)[i].start + truebase;
+ if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
+ || RXp_OFFS(prog)[i].start < 0
+ || len < 0
+ || len > strend - s)
+ )
DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
- "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
- (long) i, (long) RX_OFFS(rx)[i].start,
- (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
+ "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
+ (long) i, (long) RXp_OFFS(prog)[i].start,
+ (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
sv_setpvn(*SP, s, len);
if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
SvUTF8_on(*SP);
}
}
if (global) {
- curpos = (UV)RX_OFFS(rx)[0].end;
- had_zerolen = RX_ZERO_LEN(rx);
+ curpos = (UV)RXp_OFFS(prog)[0].end;
+ had_zerolen = RXp_ZERO_LEN(prog);
PUTBACK; /* EVAL blocks may use stack */
r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
goto play_it_again;
if (gimme == G_SCALAR) {
/* undef TARG, and push that undefined value */
if (type != OP_RCATLINE) {
- sv_setsv(TARG,NULL);
+ sv_set_undef(TARG);
}
PUSHTARG;
}
RETURN;
}
if (localizing) {
- if (HvNAME_get(hv) && isGV(*svp))
+ if (HvNAME_get(hv) && isGV_or_RVCV(*svp))
save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
else if (preeminent)
save_helem_flags(hv, keysv, svp,
if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
&& ckWARN(WARN_MISC)))
Perl_warner(aTHX_ packWARN(WARN_MISC),
- "Use of reference \"%"SVf"\" as array index",
+ "Use of reference \"%" SVf "\" as array index",
SVfARG(elemsv));
/* the only time that S_find_uninit_var() needs this
* is to determine which index value triggered the
}
else {
if (localizing) {
- if (HvNAME_get(hv) && isGV(sv))
+ if (HvNAME_get(hv) && isGV_or_RVCV(sv))
save_gp(MUTABLE_GV(sv),
!(PL_op->op_flags & OPf_SPECIAL));
else if (preeminent) {
PERL_CONTEXT *cx;
SV *oldsv;
SV **itersvp;
- SV *retsv;
SV *sv;
AV *av;
It has SvPVX of "" and SvCUR of 0, which is what we want. */
STRLEN maxlen = 0;
const char *max = SvPV_const(end, maxlen);
+ if (DO_UTF8(end) && IN_UNI_8_BIT)
+ maxlen = sv_len_utf8_nomg(end);
if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
goto retno;
DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
}
- retsv = &PL_sv_yes;
- if (0) {
- retno:
- retsv = &PL_sv_no;
- }
+ /* Bypass pushing &PL_sv_yes and calling pp_and(); instead
+ * jump straight to the AND op's op_other */
+ assert(PL_op->op_next->op_type == OP_AND);
+ assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
+ return cLOGOPx(PL_op->op_next)->op_other;
+
+ retno:
+ /* Bypass pushing &PL_sv_no and calling pp_and(); instead
+ * jump straight to the AND op's op_next */
+ assert(PL_op->op_next->op_type == OP_AND);
+ assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
/* pp_enteriter should have pre-extended the stack */
- assert(PL_stack_sp < PL_stack_max);
- *++PL_stack_sp =retsv;
-
- return PL_op->op_next;
-
-
+ EXTEND_SKIP(PL_stack_sp, 1);
+ /* we only need this for the rare case where the OP_AND isn't
+ * in void context, e.g. $x = do { for (..) {...} };
+ * but its cheaper to just push it rather than testing first
+ */
+ *++PL_stack_sp = &PL_sv_no;
+ return PL_op->op_next->op_next;
}
+
/*
A description of how taint works in pattern matching and substitution.
char *orig;
U8 r_flags;
REGEXP *rx = PM_GETRE(pm);
+ regexp *prog = ReANY(rx);
STRLEN len;
int force_on_match = 0;
const I32 oldsave = PL_savestack_ix;
STRLEN slen;
bool doutf8 = FALSE; /* whether replacement is in utf8 */
#ifdef PERL_ANY_COW
- bool is_cow;
+ bool was_cow;
#endif
SV *nsv = NULL;
/* known replacement string? */
if (PL_op->op_flags & OPf_STACKED)
TARG = POPs;
- else if (ARGTARG)
- GETTARGET;
else {
- TARG = DEFSV;
+ if (ARGTARG)
+ GETTARGET;
+ else {
+ TARG = DEFSV;
+ }
EXTEND(SP,1);
}
SvGETMAGIC(TARG); /* must come before cow check */
#ifdef PERL_ANY_COW
- /* Awooga. Awooga. "bool" types that are actually char are dangerous,
- because they make integers such as 256 "false". */
- is_cow = SvIsCOW(TARG) ? TRUE : FALSE;
-#else
- if (SvIsCOW(TARG))
- sv_force_normal_flags(TARG,0);
+ /* note that a string might get converted to COW during matching */
+ was_cow = cBOOL(SvIsCOW(TARG));
+#endif
+ if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
+#ifndef PERL_ANY_COW
+ if (SvIsCOW(TARG))
+ sv_force_normal_flags(TARG,0);
#endif
- if (!(rpm->op_pmflags & PMf_NONDESTRUCT)
- && (SvREADONLY(TARG)
- || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
- || SvTYPE(TARG) > SVt_PVLV)
- && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
- Perl_croak_no_modify();
+ if ((SvREADONLY(TARG)
+ || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
+ || SvTYPE(TARG) > SVt_PVLV)
+ && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
+ Perl_croak_no_modify();
+ }
PUTBACK;
orig = SvPV_nomg(TARG, len);
/* note we don't (yet) force the var into being a string; if we fail
- * to match, we leave as-is; on successful match howeverm, we *will*
+ * to match, we leave as-is; on successful match however, we *will*
* coerce into a string, then repeat the match */
if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
force_on_match = 1;
if (TAINTING_get) {
rxtainted = (
(SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
- | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
+ | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
| ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
| ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
? SUBST_TAINT_BOOLRET : 0));
position, once with zero-length,
second time with non-zero. */
- if (!RX_PRELEN(rx) && PL_curpm
- && !ReANY(rx)->mother_re) {
- pm = PL_curpm;
- rx = PM_GETRE(pm);
+ /* handle the empty pattern */
+ if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
+ if (PL_curpm == PL_reg_curpm) {
+ if (PL_curpm_under) {
+ if (PL_curpm_under == PL_reg_curpm) {
+ Perl_croak(aTHX_ "Infinite recursion via empty pattern");
+ } else {
+ pm = PL_curpm_under;
+ }
+ }
+ } else {
+ pm = PL_curpm;
+ }
+ rx = PM_GETRE(pm);
+ prog = ReANY(rx);
}
#ifdef PERL_SAWAMPERSAND
- r_flags = ( RX_NPARENS(rx)
+ r_flags = ( RXp_NPARENS(prog)
|| PL_sawampersand
- || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
+ || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
|| (rpm->op_pmflags & PMf_KEEPCOPY)
)
? REXEC_COPY_STR
if (DO_UTF8(TARG) && !doutf8) {
nsv = sv_newmortal();
SvSetSV(nsv, dstr);
- if (IN_ENCODING)
- sv_recode_to_utf8(nsv, _get_encoding());
- else
- sv_utf8_upgrade(nsv);
+ sv_utf8_upgrade(nsv);
c = SvPV_const(nsv, clen);
doutf8 = TRUE;
}
/* can do inplace substitution? */
if (c
#ifdef PERL_ANY_COW
- && !is_cow
+ && !was_cow
#endif
- && (I32)clen <= RX_MINLENRET(rx)
+ && (I32)clen <= RXp_MINLENRET(prog)
&& ( once
|| !(r_flags & REXEC_COPY_STR)
- || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
+ || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
)
- && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
+ && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
&& (!doutf8 || SvUTF8(TARG))
&& !(rpm->op_pmflags & PMf_NONDESTRUCT))
{
#ifdef PERL_ANY_COW
+ /* string might have got converted to COW since we set was_cow */
if (SvIsCOW(TARG)) {
if (!force_on_match)
goto have_a_cow;
if (once) {
char *d, *m;
- if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
+ if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
rxtainted |= SUBST_TAINT_PAT;
- m = orig + RX_OFFS(rx)[0].start;
- d = orig + RX_OFFS(rx)[0].end;
+ m = orig + RXp_OFFS(prog)[0].start;
+ d = orig + RXp_OFFS(prog)[0].end;
s = orig;
if (m - s > strend - d) { /* faster to shorten from end */
I32 i;
}
else {
char *d, *m;
- d = s = RX_OFFS(rx)[0].start + orig;
+ d = s = RXp_OFFS(prog)[0].start + orig;
do {
I32 i;
if (UNLIKELY(iters++ > maxiters))
DIE(aTHX_ "Substitution loop");
- if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
+ /* run time pattern taint, eg locale */
+ if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
rxtainted |= SUBST_TAINT_PAT;
- m = RX_OFFS(rx)[0].start + orig;
+ m = RXp_OFFS(prog)[0].start + orig;
if ((i = m - s)) {
if (s != d)
Move(s, d, i, char);
Copy(c, d, clen, char);
d += clen;
}
- s = RX_OFFS(rx)[0].end + orig;
+ s = RXp_OFFS(prog)[0].end + orig;
} while (CALLREGEXEC(rx, s, strend, orig,
s == m, /* don't match same null twice */
TARG, NULL,
Move(s, d, i+1, char); /* include the NUL */
}
SPAGAIN;
- mPUSHi(iters);
+ if (PL_op->op_private & OPpTRUEBOOL)
+ PUSHs(iters ? &PL_sv_yes : &PL_sv_zero);
+ else
+ mPUSHi(iters);
}
}
else {
#ifdef PERL_ANY_COW
have_a_cow:
#endif
- if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
+ if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
rxtainted |= SUBST_TAINT_PAT;
repl = dstr;
- s = RX_OFFS(rx)[0].start + orig;
+ s = RXp_OFFS(prog)[0].start + orig;
dstr = newSVpvn_flags(orig, s-orig,
SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
if (!c) {
do {
if (UNLIKELY(iters++ > maxiters))
DIE(aTHX_ "Substitution loop");
- if (UNLIKELY(RX_MATCH_TAINTED(rx)))
+ if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
rxtainted |= SUBST_TAINT_PAT;
- if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
+ if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
char *old_s = s;
char *old_orig = orig;
- assert(RX_SUBOFFSET(rx) == 0);
+ assert(RXp_SUBOFFSET(prog) == 0);
- orig = RX_SUBBEG(rx);
+ orig = RXp_SUBBEG(prog);
s = orig + (old_s - old_orig);
strend = s + (strend - old_s);
}
- m = RX_OFFS(rx)[0].start + orig;
+ m = RXp_OFFS(prog)[0].start + orig;
sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
- s = RX_OFFS(rx)[0].end + orig;
+ s = RXp_OFFS(prog)[0].end + orig;
if (first) {
/* replacement already stringified */
if (clen)
first = FALSE;
}
else {
- if (IN_ENCODING) {
- if (!nsv) nsv = sv_newmortal();
- sv_copypv(nsv, repl);
- if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
- sv_catsv(dstr, nsv);
- }
- else sv_catsv(dstr, repl);
+ sv_catsv(dstr, repl);
if (UNLIKELY(SvTAINTED(repl)))
rxtainted |= SUBST_TAINT_REPL;
}
((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
(SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
)
- (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
+ (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
if (!(rxtainted & SUBST_TAINT_BOOLRET)
&& (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
PP(pp_grepwhile)
{
dSP;
+ dPOPss;
- if (SvTRUEx(POPs))
+ if (SvTRUE_NN(sv))
PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
++*PL_markstack_ptr;
FREETMPS;
(void)POPMARK; /* pop dst */
SP = PL_stack_base + POPMARK; /* pop original mark */
if (gimme == G_SCALAR) {
+ if (PL_op->op_private & OPpTRUEBOOL)
+ PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
+ else {
dTARGET;
- XPUSHi(items);
+ PUSHi(items);
+ }
}
else if (gimme == G_ARRAY)
SP += items;
}
+/* also tail-called by pp_return */
+
PP(pp_leavesub)
{
U8 gimme;
/* these two fields are in a union. If they ever become separate,
* we have to test for both of them being null below */
+ assert(cv);
assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
while (UNLIKELY(!CvROOT(cv))) {
GV* autogv;
/* anonymous or undef'd function leaves us no recourse */
if (CvLEXICAL(cv) && CvHASGV(cv))
- DIE(aTHX_ "Undefined subroutine &%"SVf" called",
+ DIE(aTHX_ "Undefined subroutine &%" SVf " called",
SVfARG(cv_name(cv, NULL, 0)));
if (CvANON(cv) || !CvHASGV(cv)) {
DIE(aTHX_ "Undefined subroutine called");
else {
try_autoload:
autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
- GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
+ (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
+ |(PL_op->op_flags & OPf_REF
+ ? GV_AUTOLOAD_ISMETHOD
+ : 0));
cv = autogv ? GvCV(autogv) : NULL;
}
if (!cv) {
sub_name = sv_newmortal();
gv_efullname3(sub_name, gv, NULL);
- DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
+ DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
}
}
items = SP - MARK;
if (UNLIKELY(items - 1 > AvMAX(av))) {
SV **ary = AvALLOC(av);
- AvMAX(av) = items - 1;
Renew(ary, items, SV*);
+ AvMAX(av) = items - 1;
AvALLOC(av) = ary;
AvARRAY(av) = ary;
}
- Copy(MARK+1,AvARRAY(av),items,SV*);
+ if (items)
+ Copy(MARK+1,AvARRAY(av),items,SV*);
AvFILLp(av) = items - 1;
}
if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
SVfARG(cv_name(cv, NULL, 0)));
/* warning must come *after* we fully set up the context
* stuff so that __WARN__ handlers can safely dounwind()
& CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
!CvLVALUE(cv)))
- DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
+ DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
SVfARG(cv_name(cv, NULL, 0)));
if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
assert(CvXSUB(cv));
CvXSUB(cv)(aTHX_ cv);
+#if defined DEBUGGING && !defined DEBUGGING_RE_ONLY
+ /* This duplicates the check done in runops_debug(), but provides more
+ * information in the common case of the fault being with an XSUB.
+ *
+ * It should also catch an XSUB pushing more than it extends
+ * in scalar context.
+ */
+ if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base)
+ Perl_croak_nocontext(
+ "panic: XSUB %s::%s (%s) failed to extend arg stack: "
+ "base=%p, sp=%p, hwm=%p\n",
+ HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv),
+ PL_stack_base, PL_stack_sp,
+ PL_stack_base + PL_curstackinfo->si_stack_hwm);
+#endif
/* Enforce some sanity in scalar context. */
if (is_scalar) {
SV **svp = PL_stack_base + markix + 1;
if (CvANON(cv))
Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
else {
- Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
+ Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
SVfARG(cv_name(cv,NULL,0)));
}
}
+
+
+/* like croak, but report in context of caller */
+
+void
+Perl_croak_caller(const char *pat, ...)
+{
+ dTHX;
+ va_list args;
+ const PERL_CONTEXT *cx = caller_cx(0, NULL);
+
+ /* make error appear at call site */
+ assert(cx);
+ PL_curcop = cx->blk_oldcop;
+
+ va_start(args, pat);
+ vcroak(pat, &args);
+ NOT_REACHED; /* NOTREACHED */
+ va_end(args);
+}
+
+
PP(pp_aelem)
{
dSP;
if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
Perl_warner(aTHX_ packWARN(WARN_MISC),
- "Use of reference \"%"SVf"\" as array index",
+ "Use of reference \"%" SVf "\" as array index",
SVfARG(elemsv));
if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
RETPUSHUNDEF;
HV* stash;
SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
- ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
+ ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
"package or object reference", SVfARG(meth)),
(SV *)NULL)
: *(PL_stack_base + TOPMARK + 1);
if (UNLIKELY(!sv))
undefined:
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
SVfARG(meth));
if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
else if (!SvOK(sv)) goto undefined;
else if (isGV_with_GP(sv)) {
if (!GvIO(sv))
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
"without a package or object reference",
SVfARG(meth));
ob = sv;
/* this isn't the name of a filehandle either */
if (!packlen)
{
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
"without a package or object reference",
SVfARG(meth));
}
&& (ob = MUTABLE_SV(GvIO((const GV *)ob)))
&& SvOBJECT(ob))))
{
- Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
- SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
+ Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
+ SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
? newSVpvs_flags("DOES", SVs_TEMP)
: meth));
}