3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
238 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * Returns PL_op->op_next to allow tail-call optimisation of its callers */
327 S_pushav(pTHX_ AV* const av)
330 const SSize_t maxarg = AvFILL(av) + 1;
332 if (UNLIKELY(SvRMAGICAL(av))) {
334 for (i=0; i < (PADOFFSET)maxarg; i++) {
335 SV ** const svp = av_fetch(av, i, FALSE);
336 SP[i+1] = svp ? *svp : &PL_sv_undef;
341 for (i=0; i < (PADOFFSET)maxarg; i++) {
342 SV * const sv = AvARRAY(av)[i];
343 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
352 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
357 PADOFFSET base = PL_op->op_targ;
358 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
359 if (PL_op->op_flags & OPf_SPECIAL) {
360 /* fake the RHS of my ($x,$y,..) = @_ */
362 (void)S_pushav(aTHX_ GvAVn(PL_defgv));
366 /* note, this is only skipped for compile-time-known void cxt */
367 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
372 for (i = 0; i <count; i++)
373 *++SP = PAD_SV(base+i);
375 if (PL_op->op_private & OPpLVAL_INTRO) {
376 SV **svp = &(PAD_SVl(base));
377 const UV payload = (UV)(
378 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
379 | (count << SAVE_TIGHT_SHIFT)
380 | SAVEt_CLEARPADRANGE);
383 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
384 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
392 for (i = 0; i <count; i++)
393 SvPADSTALE_off(*svp++); /* mark lexical as active */
404 OP * const op = PL_op;
405 /* access PL_curpad once */
406 SV ** const padentry = &(PAD_SVl(op->op_targ));
411 PUTBACK; /* no pop/push after this, TOPs ok */
413 if (op->op_flags & OPf_MOD) {
414 if (op->op_private & OPpLVAL_INTRO)
415 if (!(op->op_private & OPpPAD_STATE))
416 save_clearsv(padentry);
417 if (op->op_private & OPpDEREF) {
418 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
419 than TARG reduces the scope of TARG, so it does not
420 span the call to save_clearsv, resulting in smaller
422 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
432 /* pp_coreargs pushes a NULL to indicate no args passed to
433 * CORE::readline() */
436 tryAMAGICunTARGETlist(iter_amg, 0);
437 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
439 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
440 if (!isGV_with_GP(PL_last_in_gv)) {
441 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
442 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
445 XPUSHs(MUTABLE_SV(PL_last_in_gv));
448 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
449 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv));
452 return do_readline();
460 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
464 (SvIOK_notUV(left) && SvIOK_notUV(right))
465 ? (SvIVX(left) == SvIVX(right))
466 : ( do_ncmp(left, right) == 0)
472 /* also used for: pp_i_preinc() */
476 SV *sv = *PL_stack_sp;
478 if (LIKELY(((sv->sv_flags &
479 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
480 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
482 && SvIVX(sv) != IV_MAX)
484 SvIV_set(sv, SvIVX(sv) + 1);
486 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
493 /* also used for: pp_i_predec() */
497 SV *sv = *PL_stack_sp;
499 if (LIKELY(((sv->sv_flags &
500 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
501 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
503 && SvIVX(sv) != IV_MIN)
505 SvIV_set(sv, SvIVX(sv) - 1);
507 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
514 /* also used for: pp_orassign() */
525 if (PL_op->op_type == OP_OR)
527 RETURNOP(cLOGOP->op_other);
532 /* also used for: pp_dor() pp_dorassign() */
539 const int op_type = PL_op->op_type;
540 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
545 if (UNLIKELY(!sv || !SvANY(sv))) {
546 if (op_type == OP_DOR)
548 RETURNOP(cLOGOP->op_other);
554 if (UNLIKELY(!sv || !SvANY(sv)))
559 switch (SvTYPE(sv)) {
561 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
565 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
569 if (CvROOT(sv) || CvXSUB(sv))
582 if(op_type == OP_DOR)
584 RETURNOP(cLOGOP->op_other);
586 /* assuming OP_DEFINED */
596 dSP; dATARGET; bool useleft; SV *svl, *svr;
598 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
602 #ifdef PERL_PRESERVE_IVUV
604 /* special-case some simple common cases */
605 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
607 U32 flags = (svl->sv_flags & svr->sv_flags);
608 if (flags & SVf_IOK) {
609 /* both args are simple IVs */
614 topl = ((UV)il) >> (UVSIZE * 8 - 2);
615 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
617 /* if both are in a range that can't under/overflow, do a
618 * simple integer add: if the top of both numbers
619 * are 00 or 11, then it's safe */
620 if (!( ((topl+1) | (topr+1)) & 2)) {
622 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
628 else if (flags & SVf_NOK) {
629 /* both args are NVs */
634 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
635 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
636 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
638 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
641 /* nothing was lost by converting to IVs */
644 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
652 useleft = USE_LEFT(svl);
653 /* We must see if we can perform the addition with integers if possible,
654 as the integer code detects overflow while the NV code doesn't.
655 If either argument hasn't had a numeric conversion yet attempt to get
656 the IV. It's important to do this now, rather than just assuming that
657 it's not IOK as a PV of "9223372036854775806" may not take well to NV
658 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
659 integer in case the second argument is IV=9223372036854775806
660 We can (now) rely on sv_2iv to do the right thing, only setting the
661 public IOK flag if the value in the NV (or PV) slot is truly integer.
663 A side effect is that this also aggressively prefers integer maths over
664 fp maths for integer values.
666 How to detect overflow?
668 C 99 section 6.2.6.1 says
670 The range of nonnegative values of a signed integer type is a subrange
671 of the corresponding unsigned integer type, and the representation of
672 the same value in each type is the same. A computation involving
673 unsigned operands can never overflow, because a result that cannot be
674 represented by the resulting unsigned integer type is reduced modulo
675 the number that is one greater than the largest value that can be
676 represented by the resulting type.
680 which I read as "unsigned ints wrap."
682 signed integer overflow seems to be classed as "exception condition"
684 If an exceptional condition occurs during the evaluation of an
685 expression (that is, if the result is not mathematically defined or not
686 in the range of representable values for its type), the behavior is
689 (6.5, the 5th paragraph)
691 I had assumed that on 2s complement machines signed arithmetic would
692 wrap, hence coded pp_add and pp_subtract on the assumption that
693 everything perl builds on would be happy. After much wailing and
694 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
695 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
696 unsigned code below is actually shorter than the old code. :-)
699 if (SvIV_please_nomg(svr)) {
700 /* Unless the left argument is integer in range we are going to have to
701 use NV maths. Hence only attempt to coerce the right argument if
702 we know the left is integer. */
710 /* left operand is undef, treat as zero. + 0 is identity,
711 Could SETi or SETu right now, but space optimise by not adding
712 lots of code to speed up what is probably a rarish case. */
714 /* Left operand is defined, so is it IV? */
715 if (SvIV_please_nomg(svl)) {
716 if ((auvok = SvUOK(svl)))
719 const IV aiv = SvIVX(svl);
722 auvok = 1; /* Now acting as a sign flag. */
724 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
731 bool result_good = 0;
734 bool buvok = SvUOK(svr);
739 const IV biv = SvIVX(svr);
744 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
746 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
747 else "IV" now, independent of how it came in.
748 if a, b represents positive, A, B negative, a maps to -A etc
753 all UV maths. negate result if A negative.
754 add if signs same, subtract if signs differ. */
760 /* Must get smaller */
766 /* result really should be -(auv-buv). as its negation
767 of true value, need to swap our result flag */
784 if (result <= (UV)IV_MIN)
785 SETi(result == (UV)IV_MIN
786 ? IV_MIN : -(IV)result);
788 /* result valid, but out of range for IV. */
793 } /* Overflow, drop through to NVs. */
798 useleft = USE_LEFT(svl);
802 NV value = SvNV_nomg(svr);
805 /* left operand is undef, treat as zero. + 0.0 is identity. */
809 SETn( value + SvNV_nomg(svl) );
815 /* also used for: pp_aelemfast_lex() */
820 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
821 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
822 const U32 lval = PL_op->op_flags & OPf_MOD;
823 const I8 key = (I8)PL_op->op_private;
827 assert(SvTYPE(av) == SVt_PVAV);
831 /* inlined av_fetch() for simple cases ... */
832 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
833 sv = AvARRAY(av)[key];
840 /* ... else do it the hard way */
841 svp = av_fetch(av, key, lval);
842 sv = (svp ? *svp : &PL_sv_undef);
844 if (UNLIKELY(!svp && lval))
845 DIE(aTHX_ PL_no_aelem, (int)key);
847 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
857 do_join(TARG, *MARK, MARK, SP);
863 /* Oversized hot code. */
865 /* also used for: pp_say() */
869 dSP; dMARK; dORIGMARK;
873 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
877 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
880 if (MARK == ORIGMARK) {
881 /* If using default handle then we need to make space to
882 * pass object as 1st arg, so move other args up ...
886 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
889 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
891 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
892 | (PL_op->op_type == OP_SAY
893 ? TIED_METHOD_SAY : 0)), sp - mark);
896 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
897 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
900 SETERRNO(EBADF,RMS_IFI);
903 else if (!(fp = IoOFP(io))) {
905 report_wrongway_fh(gv, '<');
908 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
912 SV * const ofs = GvSV(PL_ofsgv); /* $, */
914 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
916 if (!do_print(*MARK, fp))
920 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
921 if (!do_print(GvSV(PL_ofsgv), fp)) {
930 if (!do_print(*MARK, fp))
938 if (PL_op->op_type == OP_SAY) {
939 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
942 else if (PL_ors_sv && SvOK(PL_ors_sv))
943 if (!do_print(PL_ors_sv, fp)) /* $\ */
946 if (IoFLAGS(io) & IOf_FLUSH)
947 if (PerlIO_flush(fp) == EOF)
957 XPUSHs(&PL_sv_undef);
962 /* do the common parts of pp_padhv() and pp_rv2hv()
963 * It assumes the caller has done EXTEND(SP, 1) or equivalent.
964 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
965 * 'has_targ' indicates that the op has a target - this should
966 * be a compile-time constant so that the code can constant-folded as
970 PERL_STATIC_INLINE OP*
971 S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
980 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
982 if (gimme == G_ARRAY) {
988 /* 'keys %h' masquerading as '%h': reset iterator */
989 (void)hv_iterinit(hv);
994 is_bool = ( PL_op->op_private & OPpTRUEBOOL
995 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
996 && block_gimme() == G_VOID));
997 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
999 if (UNLIKELY(is_tied)) {
1000 if (is_keys && !is_bool) {
1002 while (hv_iternext(hv))
1007 sv = magic_scalarpack(hv, mg);
1014 sv = i ? &PL_sv_yes : &PL_sv_zero;
1025 #ifdef PERL_OP_PARENT
1027 /* parent op should be an unused OP_KEYS whose targ we can
1032 assert(!OpHAS_SIBLING(PL_op));
1033 k = PL_op->op_sibparent;
1034 assert(k->op_type == OP_KEYS);
1035 TARG = PAD_SV(k->op_targ);
1049 /* This is also called directly by pp_lvavref. */
1054 assert(SvTYPE(TARG) == SVt_PVAV);
1055 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1056 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1057 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1060 if (PL_op->op_flags & OPf_REF) {
1064 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1065 const I32 flags = is_lvalue_sub();
1066 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1067 if (GIMME_V == G_SCALAR)
1068 /* diag_listed_as: Can't return %s to lvalue scalar context */
1069 Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
1076 if (gimme == G_ARRAY)
1077 return S_pushav(aTHX_ (AV*)TARG);
1079 if (gimme == G_SCALAR) {
1080 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
1083 else if (PL_op->op_private & OPpTRUEBOOL)
1097 assert(SvTYPE(TARG) == SVt_PVHV);
1098 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1099 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1100 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1104 if (PL_op->op_flags & OPf_REF) {
1108 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1109 const I32 flags = is_lvalue_sub();
1110 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1111 if (GIMME_V == G_SCALAR)
1112 /* diag_listed_as: Can't return %s to lvalue scalar context */
1113 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
1121 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
1122 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
1127 /* also used for: pp_rv2hv() */
1128 /* also called directly by pp_lvavref */
1133 const U8 gimme = GIMME_V;
1134 static const char an_array[] = "an ARRAY";
1135 static const char a_hash[] = "a HASH";
1136 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
1137 || PL_op->op_type == OP_LVAVREF;
1138 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
1142 if (UNLIKELY(SvAMAGIC(sv))) {
1143 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
1146 if (UNLIKELY(SvTYPE(sv) != type))
1147 /* diag_listed_as: Not an ARRAY reference */
1148 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
1149 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
1150 && PL_op->op_private & OPpLVAL_INTRO))
1151 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
1153 else if (UNLIKELY(SvTYPE(sv) != type)) {
1156 if (!isGV_with_GP(sv)) {
1157 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1163 gv = MUTABLE_GV(sv);
1165 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1166 if (PL_op->op_private & OPpLVAL_INTRO)
1167 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1169 if (PL_op->op_flags & OPf_REF) {
1173 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1174 const I32 flags = is_lvalue_sub();
1175 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1176 if (gimme != G_ARRAY)
1177 goto croak_cant_return;
1184 AV *const av = MUTABLE_AV(sv);
1186 if (gimme == G_ARRAY) {
1189 return S_pushav(aTHX_ av);
1192 if (gimme == G_SCALAR) {
1193 const SSize_t maxarg = AvFILL(av) + 1;
1194 if (PL_op->op_private & OPpTRUEBOOL)
1195 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
1204 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
1205 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
1211 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1212 is_pp_rv2av ? "array" : "hash");
1217 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1219 PERL_ARGS_ASSERT_DO_ODDBALL;
1222 if (ckWARN(WARN_MISC)) {
1224 if (oddkey == firstkey &&
1226 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1227 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1229 err = "Reference found where even-sized list expected";
1232 err = "Odd number of elements in hash assignment";
1233 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1240 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1241 * are common to both the LHS and RHS of an aassign, and replace them
1242 * with copies. All these copies are made before the actual list assign is
1245 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1246 * element ($b) to the first LH element ($a), modifies $a; when the
1247 * second assignment is done, the second RH element now has the wrong
1248 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1249 * Note that we don't need to make a mortal copy of $b.
1251 * The algorithm below works by, for every RHS element, mark the
1252 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1253 * element is found with SVf_BREAK set, it means it would have been
1254 * modified, so make a copy.
1255 * Note that by scanning both LHS and RHS in lockstep, we avoid
1256 * unnecessary copies (like $b above) compared with a naive
1257 * "mark all LHS; copy all marked RHS; unmark all LHS".
1259 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1260 * it can't be common and can be skipped.
1262 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1263 * that we thought we didn't need to call S_aassign_copy_common(), but we
1264 * have anyway for sanity checking. If we find we need to copy, then panic.
1267 PERL_STATIC_INLINE void
1268 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1269 SV **firstrelem, SV **lastrelem
1278 SSize_t lcount = lastlelem - firstlelem + 1;
1279 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1280 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1281 bool copy_all = FALSE;
1283 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1284 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1285 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1289 /* we never have to copy the first RH element; it can't be corrupted
1290 * by assigning something to the corresponding first LH element.
1291 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1293 relem = firstrelem + 1;
1295 for (; relem <= lastrelem; relem++) {
1298 /* mark next LH element */
1300 if (--lcount >= 0) {
1303 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1304 assert (lelem <= lastlelem);
1310 if (SvSMAGICAL(svl)) {
1313 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1316 /* this LH element will consume all further args;
1317 * no need to mark any further LH elements (if any).
1318 * But we still need to scan any remaining RHS elements;
1319 * set lcount negative to distinguish from lcount == 0,
1320 * so the loop condition continues being true
1323 lelem--; /* no need to unmark this element */
1325 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
1326 SvFLAGS(svl) |= SVf_BREAK;
1330 /* don't check RH element if no SVf_BREAK flags set yet */
1337 /* see if corresponding RH element needs copying */
1343 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1344 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
1348 /* op_dump(PL_op); */
1350 "panic: aassign skipped needed copy of common RH elem %"
1351 UVuf, (UV)(relem - firstrelem));
1355 TAINT_NOT; /* Each item is independent */
1357 /* Dear TODO test in t/op/sort.t, I love you.
1358 (It's relying on a panic, not a "semi-panic" from newSVsv()
1359 and then an assertion failure below.) */
1360 if (UNLIKELY(SvIS_FREED(svr))) {
1361 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1364 /* avoid break flag while copying; otherwise COW etc
1366 SvFLAGS(svr) &= ~SVf_BREAK;
1367 /* Not newSVsv(), as it does not allow copy-on-write,
1368 resulting in wasteful copies.
1369 Also, we use SV_NOSTEAL in case the SV is used more than
1370 once, e.g. (...) = (f())[0,0]
1371 Where the same SV appears twice on the RHS without a ref
1372 count bump. (Although I suspect that the SV won't be
1373 stealable here anyway - DAPM).
1375 *relem = sv_mortalcopy_flags(svr,
1376 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1377 /* ... but restore afterwards in case it's needed again,
1378 * e.g. ($a,$b,$c) = (1,$a,$a)
1380 SvFLAGS(svr) |= brk;
1392 while (lelem > firstlelem) {
1393 SV * const svl = *(--lelem);
1395 SvFLAGS(svl) &= ~SVf_BREAK;
1404 SV **lastlelem = PL_stack_sp;
1405 SV **lastrelem = PL_stack_base + POPMARK;
1406 SV **firstrelem = PL_stack_base + POPMARK + 1;
1407 SV **firstlelem = lastrelem + 1;
1412 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1413 * only need to save locally, not on the save stack */
1414 U16 old_delaymagic = PL_delaymagic;
1419 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1421 /* If there's a common identifier on both sides we have to take
1422 * special care that assigning the identifier on the left doesn't
1423 * clobber a value on the right that's used later in the list.
1426 /* at least 2 LH and RH elements, or commonality isn't an issue */
1427 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1428 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1429 if (SvGMAGICAL(*relem))
1432 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1433 if (*lelem && SvSMAGICAL(*lelem))
1436 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1437 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1438 /* skip the scan if all scalars have a ref count of 1 */
1439 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1441 if (!sv || SvREFCNT(sv) == 1)
1443 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1450 S_aassign_copy_common(aTHX_
1451 firstlelem, lastlelem, firstrelem, lastrelem
1461 /* on debugging builds, do the scan even if we've concluded we
1462 * don't need to, then panic if we find commonality. Note that the
1463 * scanner assumes at least 2 elements */
1464 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1475 if (relem > lastrelem)
1478 /* first lelem loop while there are still relems */
1479 while (LIKELY(lelem <= lastlelem)) {
1483 TAINT_NOT; /* Each item stands on its own, taintwise. */
1485 assert(relem <= lastrelem);
1486 if (UNLIKELY(!lsv)) {
1489 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1492 switch (SvTYPE(lsv)) {
1497 SSize_t nelems = lastrelem - relem + 1;
1498 AV *ary = MUTABLE_AV(lsv);
1500 /* Assigning to an aggregate is tricky. First there is the
1501 * issue of commonality, e.g. @a = ($a[0]). Since the
1502 * stack isn't refcounted, clearing @a prior to storing
1503 * elements will free $a[0]. Similarly with
1504 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
1506 * The way to avoid these issues is to make the copy of each
1507 * SV (and we normally store a *copy* in the array) *before*
1508 * clearing the array. But this has a problem in that
1509 * if the code croaks during copying, the not-yet-stored copies
1510 * could leak. One way to avoid this is to make all the copies
1511 * mortal, but that's quite expensive.
1513 * The current solution to these issues is to use a chunk
1514 * of the tmps stack as a temporary refcounted-stack. SVs
1515 * will be put on there during processing to avoid leaks,
1516 * but will be removed again before the end of this block,
1517 * so free_tmps() is never normally called. Also, the
1518 * sv_refcnt of the SVs doesn't have to be manipulated, since
1519 * the ownership of 1 reference count is transferred directly
1520 * from the tmps stack to the AV when the SV is stored.
1522 * We disarm slots in the temps stack by storing PL_sv_undef
1523 * there: it doesn't matter if that SV's refcount is
1524 * repeatedly decremented during a croak. But usually this is
1525 * only an interim measure. By the end of this code block
1526 * we try where possible to not leave any PL_sv_undef's on the
1527 * tmps stack e.g. by shuffling newer entries down.
1529 * There is one case where we don't copy: non-magical
1530 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
1531 * is on the tmps stack, so its safe to directly steal the SV
1532 * rather than copying. This is common in things like function
1533 * returns, map etc, which all return a list of such SVs.
1535 * Note however something like @a = (f())[0,0], where there is
1536 * a danger of the same SV being shared: this avoided because
1537 * when the SV is stored as $a[0], its ref count gets bumped,
1538 * so the RC==1 test fails and the second element is copied
1541 * We also use one slot in the tmps stack to hold an extra
1542 * ref to the array, to ensure it doesn't get prematurely
1543 * freed. Again, this is removed before the end of this block.
1545 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
1546 * @a = ($a[0]) case, but the current implementation uses the
1547 * same algorithm regardless, so ignores that flag. (It *is*
1548 * used in the hash branch below, however).
1551 /* Reserve slots for ary, plus the elems we're about to copy,
1552 * then protect ary and temporarily void the remaining slots
1553 * with &PL_sv_undef */
1554 EXTEND_MORTAL(nelems + 1);
1555 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
1556 tmps_base = PL_tmps_ix + 1;
1557 for (i = 0; i < nelems; i++)
1558 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1559 PL_tmps_ix += nelems;
1561 /* Make a copy of each RHS elem and save on the tmps_stack
1562 * (or pass through where we can optimise away the copy) */
1564 if (UNLIKELY(alias)) {
1565 U32 lval = (gimme == G_ARRAY)
1566 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1567 for (svp = relem; svp <= lastrelem; svp++) {
1572 DIE(aTHX_ "Assigned value is not a reference");
1573 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
1574 /* diag_listed_as: Assigned value is not %s reference */
1576 "Assigned value is not a SCALAR reference");
1578 *svp = rsv = sv_mortalcopy(rsv);
1579 /* XXX else check for weak refs? */
1580 rsv = SvREFCNT_inc_NN(SvRV(rsv));
1581 assert(tmps_base <= PL_tmps_max);
1582 PL_tmps_stack[tmps_base++] = rsv;
1586 for (svp = relem; svp <= lastrelem; svp++) {
1589 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1590 /* can skip the copy */
1591 SvREFCNT_inc_simple_void_NN(rsv);
1596 /* do get before newSV, in case it dies and leaks */
1599 /* see comment in S_aassign_copy_common about
1601 sv_setsv_flags(nsv, rsv,
1602 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1606 assert(tmps_base <= PL_tmps_max);
1607 PL_tmps_stack[tmps_base++] = rsv;
1611 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
1614 /* store in the array, the SVs that are in the tmps stack */
1616 tmps_base -= nelems;
1618 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
1619 /* for arrays we can't cheat with, use the official API */
1620 av_extend(ary, nelems - 1);
1621 for (i = 0; i < nelems; i++) {
1622 SV **svp = &(PL_tmps_stack[tmps_base + i]);
1624 /* A tied store won't take ownership of rsv, so keep
1625 * the 1 refcnt on the tmps stack; otherwise disarm
1626 * the tmps stack entry */
1627 if (av_store(ary, i, rsv))
1628 *svp = &PL_sv_undef;
1629 /* av_store() may have added set magic to rsv */;
1632 /* disarm ary refcount: see comments below about leak */
1633 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1636 /* directly access/set the guts of the AV */
1637 SSize_t fill = nelems - 1;
1638 if (fill > AvMAX(ary))
1639 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
1641 AvFILLp(ary) = fill;
1642 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
1643 /* Quietly remove all the SVs from the tmps stack slots,
1644 * since ary has now taken ownership of the refcnt.
1645 * Also remove ary: which will now leak if we die before
1646 * the SvREFCNT_dec_NN(ary) below */
1647 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1648 Move(&PL_tmps_stack[tmps_base + nelems],
1649 &PL_tmps_stack[tmps_base - 1],
1650 PL_tmps_ix - (tmps_base + nelems) + 1,
1652 PL_tmps_ix -= (nelems + 1);
1655 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1656 /* its assumed @ISA set magic can't die and leak ary */
1657 SvSETMAGIC(MUTABLE_SV(ary));
1658 SvREFCNT_dec_NN(ary);
1660 relem = lastrelem + 1;
1664 case SVt_PVHV: { /* normal hash */
1670 SSize_t nelems = lastrelem - relem + 1;
1671 HV *hash = MUTABLE_HV(lsv);
1673 if (UNLIKELY(nelems & 1)) {
1674 do_oddball(lastrelem, relem);
1675 /* we have firstlelem to reuse, it's not needed any more */
1676 *++lastrelem = &PL_sv_undef;
1680 /* See the SVt_PVAV branch above for a long description of
1681 * how the following all works. The main difference for hashes
1682 * is that we treat keys and values separately (and have
1683 * separate loops for them): as for arrays, values are always
1684 * copied (except for the SvTEMP optimisation), since they
1685 * need to be stored in the hash; while keys are only
1686 * processed where they might get prematurely freed or
1689 /* tmps stack slots:
1690 * * reserve a slot for the hash keepalive;
1691 * * reserve slots for the hash values we're about to copy;
1692 * * preallocate for the keys we'll possibly copy or refcount bump
1694 * then protect hash and temporarily void the remaining
1695 * value slots with &PL_sv_undef */
1696 EXTEND_MORTAL(nelems + 1);
1698 /* convert to number of key/value pairs */
1701 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
1702 tmps_base = PL_tmps_ix + 1;
1703 for (i = 0; i < nelems; i++)
1704 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1705 PL_tmps_ix += nelems;
1707 /* Make a copy of each RHS hash value and save on the tmps_stack
1708 * (or pass through where we can optimise away the copy) */
1710 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
1713 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1714 /* can skip the copy */
1715 SvREFCNT_inc_simple_void_NN(rsv);
1720 /* do get before newSV, in case it dies and leaks */
1723 /* see comment in S_aassign_copy_common about
1725 sv_setsv_flags(nsv, rsv,
1726 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1730 assert(tmps_base <= PL_tmps_max);
1731 PL_tmps_stack[tmps_base++] = rsv;
1733 tmps_base -= nelems;
1736 /* possibly protect keys */
1738 if (UNLIKELY(gimme == G_ARRAY)) {
1740 * @a = ((%h = ($$r, 1)), $r = "x");
1741 * $_++ for %h = (1,2,3,4);
1743 EXTEND_MORTAL(nelems);
1744 for (svp = relem; svp <= lastrelem; svp += 2)
1745 *svp = sv_mortalcopy_flags(*svp,
1746 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1748 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
1749 /* for possible commonality, e.g.
1751 * avoid premature freeing RHS keys by mortalising
1753 * For a magic element, make a copy so that its magic is
1754 * called *before* the hash is emptied (which may affect
1755 * a tied value for example).
1756 * In theory we should check for magic keys in all
1757 * cases, not just under OPpASSIGN_COMMON_AGG, but in
1758 * practice, !OPpASSIGN_COMMON_AGG implies only
1759 * constants or padtmps on the RHS.
1761 EXTEND_MORTAL(nelems);
1762 for (svp = relem; svp <= lastrelem; svp += 2) {
1764 if (UNLIKELY(SvGMAGICAL(rsv))) {
1766 *svp = sv_mortalcopy_flags(*svp,
1767 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1768 /* allow other branch to continue pushing
1769 * onto tmps stack without checking each time */
1770 n = (lastrelem - relem) >> 1;
1774 PL_tmps_stack[++PL_tmps_ix] =
1775 SvREFCNT_inc_simple_NN(rsv);
1779 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
1782 /* now assign the keys and values to the hash */
1786 if (UNLIKELY(gimme == G_ARRAY)) {
1787 /* @a = (%h = (...)) etc */
1789 SV **topelem = relem;
1791 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1794 /* remove duplicates from list we return */
1795 if (!hv_exists_ent(hash, key, 0)) {
1796 /* copy key back: possibly to an earlier
1797 * stack location if we encountered dups earlier,
1798 * The values will be updated later
1803 /* A tied store won't take ownership of val, so keep
1804 * the 1 refcnt on the tmps stack; otherwise disarm
1805 * the tmps stack entry */
1806 if (hv_store_ent(hash, key, val, 0))
1807 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1810 /* hv_store_ent() may have added set magic to val */;
1813 if (topelem < svp) {
1814 /* at this point we have removed the duplicate key/value
1815 * pairs from the stack, but the remaining values may be
1816 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1817 * the (a 2), but the stack now probably contains
1818 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1819 * obliterates the earlier key. So refresh all values. */
1820 lastrelem = topelem - 1;
1821 while (relem < lastrelem) {
1823 he = hv_fetch_ent(hash, *relem++, 0, 0);
1824 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1830 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1833 if (hv_store_ent(hash, key, val, 0))
1834 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1837 /* hv_store_ent() may have added set magic to val */;
1843 /* there are still some 'live' recounts on the tmps stack
1844 * - usually caused by storing into a tied hash. So let
1845 * free_tmps() do the proper but slow job later.
1846 * Just disarm hash refcount: see comments below about leak
1848 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1851 /* Quietly remove all the SVs from the tmps stack slots,
1852 * since hash has now taken ownership of the refcnt.
1853 * Also remove hash: which will now leak if we die before
1854 * the SvREFCNT_dec_NN(hash) below */
1855 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1856 Move(&PL_tmps_stack[tmps_base + nelems],
1857 &PL_tmps_stack[tmps_base - 1],
1858 PL_tmps_ix - (tmps_base + nelems) + 1,
1860 PL_tmps_ix -= (nelems + 1);
1863 SvREFCNT_dec_NN(hash);
1865 relem = lastrelem + 1;
1870 if (!SvIMMORTAL(lsv)) {
1874 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
1875 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
1878 packWARN(WARN_MISC),
1879 "Useless assignment to a temporary"
1882 /* avoid freeing $$lsv if it might be needed for further
1883 * elements, e.g. ($ref, $foo) = (1, $$ref) */
1885 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
1886 && lelem <= lastlelem
1889 SvREFCNT_inc_simple_void_NN(ref);
1890 /* an unrolled sv_2mortal */
1892 if (UNLIKELY(ix >= PL_tmps_max))
1893 /* speculatively grow enough to cover other
1895 (void)tmps_grow_p(ix + (lastlelem - lelem));
1896 PL_tmps_stack[ix] = ref;
1899 sv_setsv(lsv, *relem);
1903 if (++relem > lastrelem)
1912 /* simplified lelem loop for when there are no relems left */
1913 while (LIKELY(lelem <= lastlelem)) {
1916 TAINT_NOT; /* Each item stands on its own, taintwise. */
1918 if (UNLIKELY(!lsv)) {
1920 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1923 switch (SvTYPE(lsv)) {
1925 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
1927 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1933 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
1938 if (!SvIMMORTAL(lsv)) {
1947 TAINT_NOT; /* result of list assign isn't tainted */
1949 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1950 /* Will be used to set PL_tainting below */
1951 Uid_t tmp_uid = PerlProc_getuid();
1952 Uid_t tmp_euid = PerlProc_geteuid();
1953 Gid_t tmp_gid = PerlProc_getgid();
1954 Gid_t tmp_egid = PerlProc_getegid();
1956 /* XXX $> et al currently silently ignore failures */
1957 if (PL_delaymagic & DM_UID) {
1958 #ifdef HAS_SETRESUID
1960 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1961 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1963 #elif defined(HAS_SETREUID)
1965 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1966 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1969 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1970 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1971 PL_delaymagic &= ~DM_RUID;
1973 # endif /* HAS_SETRUID */
1975 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1976 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1977 PL_delaymagic &= ~DM_EUID;
1979 # endif /* HAS_SETEUID */
1980 if (PL_delaymagic & DM_UID) {
1981 if (PL_delaymagic_uid != PL_delaymagic_euid)
1982 DIE(aTHX_ "No setreuid available");
1983 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1985 #endif /* HAS_SETRESUID */
1987 tmp_uid = PerlProc_getuid();
1988 tmp_euid = PerlProc_geteuid();
1990 /* XXX $> et al currently silently ignore failures */
1991 if (PL_delaymagic & DM_GID) {
1992 #ifdef HAS_SETRESGID
1994 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1995 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1997 #elif defined(HAS_SETREGID)
1999 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2000 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
2003 if ((PL_delaymagic & DM_GID) == DM_RGID) {
2004 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
2005 PL_delaymagic &= ~DM_RGID;
2007 # endif /* HAS_SETRGID */
2009 if ((PL_delaymagic & DM_GID) == DM_EGID) {
2010 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
2011 PL_delaymagic &= ~DM_EGID;
2013 # endif /* HAS_SETEGID */
2014 if (PL_delaymagic & DM_GID) {
2015 if (PL_delaymagic_gid != PL_delaymagic_egid)
2016 DIE(aTHX_ "No setregid available");
2017 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
2019 #endif /* HAS_SETRESGID */
2021 tmp_gid = PerlProc_getgid();
2022 tmp_egid = PerlProc_getegid();
2024 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
2025 #ifdef NO_TAINT_SUPPORT
2026 PERL_UNUSED_VAR(tmp_uid);
2027 PERL_UNUSED_VAR(tmp_euid);
2028 PERL_UNUSED_VAR(tmp_gid);
2029 PERL_UNUSED_VAR(tmp_egid);
2032 PL_delaymagic = old_delaymagic;
2034 if (gimme == G_VOID)
2035 SP = firstrelem - 1;
2036 else if (gimme == G_SCALAR) {
2039 if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
2040 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
2043 SETi(firstlelem - firstrelem);
2055 PMOP * const pm = cPMOP;
2056 REGEXP * rx = PM_GETRE(pm);
2057 regexp *prog = ReANY(rx);
2058 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
2059 SV * const rv = sv_newmortal();
2063 SvUPGRADE(rv, SVt_IV);
2064 /* For a subroutine describing itself as "This is a hacky workaround" I'm
2065 loathe to use it here, but it seems to be the right fix. Or close.
2066 The key part appears to be that it's essential for pp_qr to return a new
2067 object (SV), which implies that there needs to be an effective way to
2068 generate a new SV from the existing SV that is pre-compiled in the
2070 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
2073 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
2074 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
2075 *cvp = cv_clone(cv);
2076 SvREFCNT_dec_NN(cv);
2080 HV *const stash = gv_stashsv(pkg, GV_ADD);
2081 SvREFCNT_dec_NN(pkg);
2082 (void)sv_bless(rv, stash);
2085 if (UNLIKELY(RXp_ISTAINTED(prog))) {
2087 SvTAINTED_on(SvRV(rv));
2100 SSize_t curpos = 0; /* initial pos() or current $+[0] */
2103 const char *truebase; /* Start of string */
2104 REGEXP *rx = PM_GETRE(pm);
2105 regexp *prog = ReANY(rx);
2107 const U8 gimme = GIMME_V;
2109 const I32 oldsave = PL_savestack_ix;
2110 I32 had_zerolen = 0;
2113 if (PL_op->op_flags & OPf_STACKED)
2124 PUTBACK; /* EVAL blocks need stack_sp. */
2125 /* Skip get-magic if this is a qr// clone, because regcomp has
2127 truebase = prog->mother_re
2128 ? SvPV_nomg_const(TARG, len)
2129 : SvPV_const(TARG, len);
2131 DIE(aTHX_ "panic: pp_match");
2132 strend = truebase + len;
2133 rxtainted = (RXp_ISTAINTED(prog) ||
2134 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
2137 /* We need to know this in case we fail out early - pos() must be reset */
2138 global = dynpm->op_pmflags & PMf_GLOBAL;
2140 /* PMdf_USED is set after a ?? matches once */
2143 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
2145 pm->op_pmflags & PMf_USED
2148 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
2152 /* handle the empty pattern */
2153 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
2154 if (PL_curpm == PL_reg_curpm) {
2155 if (PL_curpm_under) {
2156 if (PL_curpm_under == PL_reg_curpm) {
2157 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
2159 pm = PL_curpm_under;
2169 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
2170 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
2171 UVuf " < %" IVdf ")\n",
2172 (UV)len, (IV)RXp_MINLEN(prog)));
2176 /* get pos() if //g */
2178 mg = mg_find_mglob(TARG);
2179 if (mg && mg->mg_len >= 0) {
2180 curpos = MgBYTEPOS(mg, TARG, truebase, len);
2181 /* last time pos() was set, it was zero-length match */
2182 if (mg->mg_flags & MGf_MINMATCH)
2187 #ifdef PERL_SAWAMPERSAND
2188 if ( RXp_NPARENS(prog)
2190 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2191 || (dynpm->op_pmflags & PMf_KEEPCOPY)
2195 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
2196 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
2197 * only on the first iteration. Therefore we need to copy $' as well
2198 * as $&, to make the rest of the string available for captures in
2199 * subsequent iterations */
2200 if (! (global && gimme == G_ARRAY))
2201 r_flags |= REXEC_COPY_SKIP_POST;
2203 #ifdef PERL_SAWAMPERSAND
2204 if (dynpm->op_pmflags & PMf_KEEPCOPY)
2205 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
2206 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
2213 s = truebase + curpos;
2215 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
2216 had_zerolen, TARG, NULL, r_flags))
2220 if (dynpm->op_pmflags & PMf_ONCE)
2222 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
2224 dynpm->op_pmflags |= PMf_USED;
2228 RXp_MATCH_TAINTED_on(prog);
2229 TAINT_IF(RXp_MATCH_TAINTED(prog));
2233 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
2235 mg = sv_magicext_mglob(TARG);
2236 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
2237 if (RXp_ZERO_LEN(prog))
2238 mg->mg_flags |= MGf_MINMATCH;
2240 mg->mg_flags &= ~MGf_MINMATCH;
2243 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
2244 LEAVE_SCOPE(oldsave);
2248 /* push captures on stack */
2251 const I32 nparens = RXp_NPARENS(prog);
2252 I32 i = (global && !nparens) ? 1 : 0;
2254 SPAGAIN; /* EVAL blocks could move the stack. */
2255 EXTEND(SP, nparens + i);
2256 EXTEND_MORTAL(nparens + i);
2257 for (i = !i; i <= nparens; i++) {
2258 PUSHs(sv_newmortal());
2259 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
2260 && RXp_OFFS(prog)[i].end != -1 ))
2262 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
2263 const char * const s = RXp_OFFS(prog)[i].start + truebase;
2264 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
2265 || RXp_OFFS(prog)[i].start < 0
2267 || len > strend - s)
2269 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
2270 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
2271 (long) i, (long) RXp_OFFS(prog)[i].start,
2272 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
2273 sv_setpvn(*SP, s, len);
2274 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
2279 curpos = (UV)RXp_OFFS(prog)[0].end;
2280 had_zerolen = RXp_ZERO_LEN(prog);
2281 PUTBACK; /* EVAL blocks may use stack */
2282 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
2285 LEAVE_SCOPE(oldsave);
2288 NOT_REACHED; /* NOTREACHED */
2291 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
2293 mg = mg_find_mglob(TARG);
2297 LEAVE_SCOPE(oldsave);
2298 if (gimme == G_ARRAY)
2304 Perl_do_readline(pTHX)
2306 dSP; dTARGETSTACKED;
2311 IO * const io = GvIO(PL_last_in_gv);
2312 const I32 type = PL_op->op_type;
2313 const U8 gimme = GIMME_V;
2316 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
2318 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
2319 if (gimme == G_SCALAR) {
2321 SvSetSV_nosteal(TARG, TOPs);
2331 if (IoFLAGS(io) & IOf_ARGV) {
2332 if (IoFLAGS(io) & IOf_START) {
2334 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
2335 IoFLAGS(io) &= ~IOf_START;
2336 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
2337 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
2338 sv_setpvs(GvSVn(PL_last_in_gv), "-");
2339 SvSETMAGIC(GvSV(PL_last_in_gv));
2344 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2345 if (!fp) { /* Note: fp != IoIFP(io) */
2346 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
2349 else if (type == OP_GLOB)
2350 fp = Perl_start_glob(aTHX_ POPs, io);
2352 else if (type == OP_GLOB)
2354 else if (IoTYPE(io) == IoTYPE_WRONLY) {
2355 report_wrongway_fh(PL_last_in_gv, '>');
2359 if ((!io || !(IoFLAGS(io) & IOf_START))
2360 && ckWARN(WARN_CLOSED)
2363 report_evil_fh(PL_last_in_gv);
2365 if (gimme == G_SCALAR) {
2366 /* undef TARG, and push that undefined value */
2367 if (type != OP_RCATLINE) {
2375 if (gimme == G_SCALAR) {
2377 if (type == OP_RCATLINE && SvGMAGICAL(sv))
2380 if (type == OP_RCATLINE)
2381 SvPV_force_nomg_nolen(sv);
2385 else if (isGV_with_GP(sv)) {
2386 SvPV_force_nomg_nolen(sv);
2388 SvUPGRADE(sv, SVt_PV);
2389 tmplen = SvLEN(sv); /* remember if already alloced */
2390 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
2391 /* try short-buffering it. Please update t/op/readline.t
2392 * if you change the growth length.
2397 if (type == OP_RCATLINE && SvOK(sv)) {
2399 SvPV_force_nomg_nolen(sv);
2405 sv = sv_2mortal(newSV(80));
2409 /* This should not be marked tainted if the fp is marked clean */
2410 #define MAYBE_TAINT_LINE(io, sv) \
2411 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2416 /* delay EOF state for a snarfed empty file */
2417 #define SNARF_EOF(gimme,rs,io,sv) \
2418 (gimme != G_SCALAR || SvCUR(sv) \
2419 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2423 if (!sv_gets(sv, fp, offset)
2425 || SNARF_EOF(gimme, PL_rs, io, sv)
2426 || PerlIO_error(fp)))
2428 PerlIO_clearerr(fp);
2429 if (IoFLAGS(io) & IOf_ARGV) {
2430 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2433 (void)do_close(PL_last_in_gv, FALSE);
2435 else if (type == OP_GLOB) {
2436 if (!do_close(PL_last_in_gv, FALSE)) {
2437 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2438 "glob failed (child exited with status %d%s)",
2439 (int)(STATUS_CURRENT >> 8),
2440 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2443 if (gimme == G_SCALAR) {
2444 if (type != OP_RCATLINE) {
2445 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2451 MAYBE_TAINT_LINE(io, sv);
2454 MAYBE_TAINT_LINE(io, sv);
2456 IoFLAGS(io) |= IOf_NOLINE;
2460 if (type == OP_GLOB) {
2464 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2465 char * const tmps = SvEND(sv) - 1;
2466 if (*tmps == *SvPVX_const(PL_rs)) {
2468 SvCUR_set(sv, SvCUR(sv) - 1);
2471 for (t1 = SvPVX_const(sv); *t1; t1++)
2473 if (strchr("*%?", *t1))
2475 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2478 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2479 (void)POPs; /* Unmatched wildcard? Chuck it... */
2482 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2483 if (ckWARN(WARN_UTF8)) {
2484 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2485 const STRLEN len = SvCUR(sv) - offset;
2488 if (!is_utf8_string_loc(s, len, &f))
2489 /* Emulate :encoding(utf8) warning in the same case. */
2490 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2491 "utf8 \"\\x%02X\" does not map to Unicode",
2492 f < (U8*)SvEND(sv) ? *f : 0);
2495 if (gimme == G_ARRAY) {
2496 if (SvLEN(sv) - SvCUR(sv) > 20) {
2497 SvPV_shrink_to_cur(sv);
2499 sv = sv_2mortal(newSV(80));
2502 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2503 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2504 const STRLEN new_len
2505 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2506 SvPV_renew(sv, new_len);
2517 SV * const keysv = POPs;
2518 HV * const hv = MUTABLE_HV(POPs);
2519 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2520 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2522 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2523 bool preeminent = TRUE;
2525 if (SvTYPE(hv) != SVt_PVHV)
2532 /* If we can determine whether the element exist,
2533 * Try to preserve the existenceness of a tied hash
2534 * element by using EXISTS and DELETE if possible.
2535 * Fallback to FETCH and STORE otherwise. */
2536 if (SvCANEXISTDELETE(hv))
2537 preeminent = hv_exists_ent(hv, keysv, 0);
2540 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2541 svp = he ? &HeVAL(he) : NULL;
2543 if (!svp || !*svp || *svp == &PL_sv_undef) {
2547 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2549 lv = sv_newmortal();
2550 sv_upgrade(lv, SVt_PVLV);
2552 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2553 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2554 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2560 if (HvNAME_get(hv) && isGV_or_RVCV(*svp))
2561 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2562 else if (preeminent)
2563 save_helem_flags(hv, keysv, svp,
2564 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2566 SAVEHDELETE(hv, keysv);
2568 else if (PL_op->op_private & OPpDEREF) {
2569 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2573 sv = (svp && *svp ? *svp : &PL_sv_undef);
2574 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2575 * was to make C<local $tied{foo} = $tied{foo}> possible.
2576 * However, it seems no longer to be needed for that purpose, and
2577 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2578 * would loop endlessly since the pos magic is getting set on the
2579 * mortal copy and lost. However, the copy has the effect of
2580 * triggering the get magic, and losing it altogether made things like
2581 * c<$tied{foo};> in void context no longer do get magic, which some
2582 * code relied on. Also, delayed triggering of magic on @+ and friends
2583 * meant the original regex may be out of scope by now. So as a
2584 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2585 * being called too many times). */
2586 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2593 /* a stripped-down version of Perl_softref2xv() for use by
2594 * pp_multideref(), which doesn't use PL_op->op_flags */
2597 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2600 if (PL_op->op_private & HINT_STRICT_REFS) {
2602 Perl_die(aTHX_ PL_no_symref_sv, sv,
2603 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2605 Perl_die(aTHX_ PL_no_usym, what);
2608 Perl_die(aTHX_ PL_no_usym, what);
2609 return gv_fetchsv_nomg(sv, GV_ADD, type);
2613 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2614 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2616 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2617 * Each of these either contains a set of actions, or an argument, such as
2618 * an IV to use as an array index, or a lexical var to retrieve.
2619 * Several actions re stored per UV; we keep shifting new actions off the
2620 * one UV, and only reload when it becomes zero.
2625 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2626 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2627 UV actions = items->uv;
2630 /* this tells find_uninit_var() where we're up to */
2631 PL_multideref_pc = items;
2634 /* there are three main classes of action; the first retrieve
2635 * the initial AV or HV from a variable or the stack; the second
2636 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2637 * the third an unrolled (/DREFHV, rv2hv, helem).
2639 switch (actions & MDEREF_ACTION_MASK) {
2642 actions = (++items)->uv;
2645 case MDEREF_AV_padav_aelem: /* $lex[...] */
2646 sv = PAD_SVl((++items)->pad_offset);
2649 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2650 sv = UNOP_AUX_item_sv(++items);
2651 assert(isGV_with_GP(sv));
2652 sv = (SV*)GvAVn((GV*)sv);
2655 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2660 goto do_AV_rv2av_aelem;
2663 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2664 sv = UNOP_AUX_item_sv(++items);
2665 assert(isGV_with_GP(sv));
2666 sv = GvSVn((GV*)sv);
2667 goto do_AV_vivify_rv2av_aelem;
2669 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2670 sv = PAD_SVl((++items)->pad_offset);
2673 do_AV_vivify_rv2av_aelem:
2674 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2675 /* this is the OPpDEREF action normally found at the end of
2676 * ops like aelem, helem, rv2sv */
2677 sv = vivify_ref(sv, OPpDEREF_AV);
2681 /* this is basically a copy of pp_rv2av when it just has the
2684 if (LIKELY(SvROK(sv))) {
2685 if (UNLIKELY(SvAMAGIC(sv))) {
2686 sv = amagic_deref_call(sv, to_av_amg);
2689 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2690 DIE(aTHX_ "Not an ARRAY reference");
2692 else if (SvTYPE(sv) != SVt_PVAV) {
2693 if (!isGV_with_GP(sv))
2694 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2695 sv = MUTABLE_SV(GvAVn((GV*)sv));
2701 /* retrieve the key; this may be either a lexical or package
2702 * var (whose index/ptr is stored as an item) or a signed
2703 * integer constant stored as an item.
2706 IV elem = 0; /* to shut up stupid compiler warnings */
2709 assert(SvTYPE(sv) == SVt_PVAV);
2711 switch (actions & MDEREF_INDEX_MASK) {
2712 case MDEREF_INDEX_none:
2714 case MDEREF_INDEX_const:
2715 elem = (++items)->iv;
2717 case MDEREF_INDEX_padsv:
2718 elemsv = PAD_SVl((++items)->pad_offset);
2720 case MDEREF_INDEX_gvsv:
2721 elemsv = UNOP_AUX_item_sv(++items);
2722 assert(isGV_with_GP(elemsv));
2723 elemsv = GvSVn((GV*)elemsv);
2725 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2726 && ckWARN(WARN_MISC)))
2727 Perl_warner(aTHX_ packWARN(WARN_MISC),
2728 "Use of reference \"%" SVf "\" as array index",
2730 /* the only time that S_find_uninit_var() needs this
2731 * is to determine which index value triggered the
2732 * undef warning. So just update it here. Note that
2733 * since we don't save and restore this var (e.g. for
2734 * tie or overload execution), its value will be
2735 * meaningless apart from just here */
2736 PL_multideref_pc = items;
2737 elem = SvIV(elemsv);
2742 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2744 if (!(actions & MDEREF_FLAG_last)) {
2745 SV** svp = av_fetch((AV*)sv, elem, 1);
2746 if (!svp || ! (sv=*svp))
2747 DIE(aTHX_ PL_no_aelem, elem);
2751 if (PL_op->op_private &
2752 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2754 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2755 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2758 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2759 sv = av_delete((AV*)sv, elem, discard);
2767 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2768 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2769 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2770 bool preeminent = TRUE;
2771 AV *const av = (AV*)sv;
2774 if (UNLIKELY(localizing)) {
2778 /* If we can determine whether the element exist,
2779 * Try to preserve the existenceness of a tied array
2780 * element by using EXISTS and DELETE if possible.
2781 * Fallback to FETCH and STORE otherwise. */
2782 if (SvCANEXISTDELETE(av))
2783 preeminent = av_exists(av, elem);
2786 svp = av_fetch(av, elem, lval && !defer);
2789 if (!svp || !(sv = *svp)) {
2792 DIE(aTHX_ PL_no_aelem, elem);
2793 len = av_tindex(av);
2794 sv = sv_2mortal(newSVavdefelem(av,
2795 /* Resolve a negative index now, unless it points
2796 * before the beginning of the array, in which
2797 * case record it for error reporting in
2798 * magic_setdefelem. */
2799 elem < 0 && len + elem >= 0
2800 ? len + elem : elem, 1));
2803 if (UNLIKELY(localizing)) {
2805 save_aelem(av, elem, svp);
2806 sv = *svp; /* may have changed */
2809 SAVEADELETE(av, elem);
2814 sv = (svp ? *svp : &PL_sv_undef);
2815 /* see note in pp_helem() */
2816 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2833 case MDEREF_HV_padhv_helem: /* $lex{...} */
2834 sv = PAD_SVl((++items)->pad_offset);
2837 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2838 sv = UNOP_AUX_item_sv(++items);
2839 assert(isGV_with_GP(sv));
2840 sv = (SV*)GvHVn((GV*)sv);
2843 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2848 goto do_HV_rv2hv_helem;
2851 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2852 sv = UNOP_AUX_item_sv(++items);
2853 assert(isGV_with_GP(sv));
2854 sv = GvSVn((GV*)sv);
2855 goto do_HV_vivify_rv2hv_helem;
2857 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2858 sv = PAD_SVl((++items)->pad_offset);
2861 do_HV_vivify_rv2hv_helem:
2862 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2863 /* this is the OPpDEREF action normally found at the end of
2864 * ops like aelem, helem, rv2sv */
2865 sv = vivify_ref(sv, OPpDEREF_HV);
2869 /* this is basically a copy of pp_rv2hv when it just has the
2870 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2873 if (LIKELY(SvROK(sv))) {
2874 if (UNLIKELY(SvAMAGIC(sv))) {
2875 sv = amagic_deref_call(sv, to_hv_amg);
2878 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2879 DIE(aTHX_ "Not a HASH reference");
2881 else if (SvTYPE(sv) != SVt_PVHV) {
2882 if (!isGV_with_GP(sv))
2883 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2884 sv = MUTABLE_SV(GvHVn((GV*)sv));
2890 /* retrieve the key; this may be either a lexical / package
2891 * var or a string constant, whose index/ptr is stored as an
2894 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2896 assert(SvTYPE(sv) == SVt_PVHV);
2898 switch (actions & MDEREF_INDEX_MASK) {
2899 case MDEREF_INDEX_none:
2902 case MDEREF_INDEX_const:
2903 keysv = UNOP_AUX_item_sv(++items);
2906 case MDEREF_INDEX_padsv:
2907 keysv = PAD_SVl((++items)->pad_offset);
2910 case MDEREF_INDEX_gvsv:
2911 keysv = UNOP_AUX_item_sv(++items);
2912 keysv = GvSVn((GV*)keysv);
2916 /* see comment above about setting this var */
2917 PL_multideref_pc = items;
2920 /* ensure that candidate CONSTs have been HEKified */
2921 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2922 || SvTYPE(keysv) >= SVt_PVMG
2925 || SvIsCOW_shared_hash(keysv));
2927 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2929 if (!(actions & MDEREF_FLAG_last)) {
2930 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2931 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2932 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2936 if (PL_op->op_private &
2937 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2939 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2940 sv = hv_exists_ent((HV*)sv, keysv, 0)
2941 ? &PL_sv_yes : &PL_sv_no;
2944 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2945 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2953 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2954 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2955 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2956 bool preeminent = TRUE;
2958 HV * const hv = (HV*)sv;
2961 if (UNLIKELY(localizing)) {
2965 /* If we can determine whether the element exist,
2966 * Try to preserve the existenceness of a tied hash
2967 * element by using EXISTS and DELETE if possible.
2968 * Fallback to FETCH and STORE otherwise. */
2969 if (SvCANEXISTDELETE(hv))
2970 preeminent = hv_exists_ent(hv, keysv, 0);
2973 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2974 svp = he ? &HeVAL(he) : NULL;
2978 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2982 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2983 lv = sv_newmortal();
2984 sv_upgrade(lv, SVt_PVLV);
2986 sv_magic(lv, key2 = newSVsv(keysv),
2987 PERL_MAGIC_defelem, NULL, 0);
2988 /* sv_magic() increments refcount */
2989 SvREFCNT_dec_NN(key2);
2990 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2996 if (HvNAME_get(hv) && isGV_or_RVCV(sv))
2997 save_gp(MUTABLE_GV(sv),
2998 !(PL_op->op_flags & OPf_SPECIAL));
2999 else if (preeminent) {
3000 save_helem_flags(hv, keysv, svp,
3001 (PL_op->op_flags & OPf_SPECIAL)
3002 ? 0 : SAVEf_SETMAGIC);
3003 sv = *svp; /* may have changed */
3006 SAVEHDELETE(hv, keysv);
3011 sv = (svp && *svp ? *svp : &PL_sv_undef);
3012 /* see note in pp_helem() */
3013 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
3022 actions >>= MDEREF_SHIFT;
3040 itersvp = CxITERVAR(cx);
3043 switch (CxTYPE(cx)) {
3045 case CXt_LOOP_LAZYSV: /* string increment */
3047 SV* cur = cx->blk_loop.state_u.lazysv.cur;
3048 SV *end = cx->blk_loop.state_u.lazysv.end;
3049 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
3050 It has SvPVX of "" and SvCUR of 0, which is what we want. */
3052 const char *max = SvPV_const(end, maxlen);
3053 if (DO_UTF8(end) && IN_UNI_8_BIT)
3054 maxlen = sv_len_utf8_nomg(end);
3055 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
3059 /* NB: on the first iteration, oldsv will have a ref count of at
3060 * least 2 (one extra from blk_loop.itersave), so the GV or pad
3061 * slot will get localised; on subsequent iterations the RC==1
3062 * optimisation may kick in and the SV will be reused. */
3063 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3064 /* safe to reuse old SV */
3065 sv_setsv(oldsv, cur);
3069 /* we need a fresh SV every time so that loop body sees a
3070 * completely new SV for closures/references to work as
3072 *itersvp = newSVsv(cur);
3073 SvREFCNT_dec(oldsv);
3075 if (strEQ(SvPVX_const(cur), max))
3076 sv_setiv(cur, 0); /* terminate next time */
3082 case CXt_LOOP_LAZYIV: /* integer increment */
3084 IV cur = cx->blk_loop.state_u.lazyiv.cur;
3085 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
3089 /* see NB comment above */
3090 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3091 /* safe to reuse old SV */
3093 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
3096 /* Cheap SvIOK_only().
3097 * Assert that flags which SvIOK_only() would test or
3098 * clear can't be set, because we're SVt_IV */
3099 assert(!(SvFLAGS(oldsv) &
3100 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
3101 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
3102 /* SvIV_set() where sv_any points to head */
3103 oldsv->sv_u.svu_iv = cur;
3107 sv_setiv(oldsv, cur);
3111 /* we need a fresh SV every time so that loop body sees a
3112 * completely new SV for closures/references to work as they
3114 *itersvp = newSViv(cur);
3115 SvREFCNT_dec(oldsv);
3118 if (UNLIKELY(cur == IV_MAX)) {
3119 /* Handle end of range at IV_MAX */
3120 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
3122 ++cx->blk_loop.state_u.lazyiv.cur;
3126 case CXt_LOOP_LIST: /* for (1,2,3) */
3128 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
3129 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3130 ix = (cx->blk_loop.state_u.stack.ix += inc);
3131 if (UNLIKELY(inc > 0
3132 ? ix > cx->blk_oldsp
3133 : ix <= cx->blk_loop.state_u.stack.basesp)
3137 sv = PL_stack_base[ix];
3139 goto loop_ary_common;
3141 case CXt_LOOP_ARY: /* for (@ary) */
3143 av = cx->blk_loop.state_u.ary.ary;
3144 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3145 ix = (cx->blk_loop.state_u.ary.ix += inc);
3146 if (UNLIKELY(inc > 0
3152 if (UNLIKELY(SvRMAGICAL(av))) {
3153 SV * const * const svp = av_fetch(av, ix, FALSE);
3154 sv = svp ? *svp : NULL;
3157 sv = AvARRAY(av)[ix];
3162 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
3163 SvSetMagicSV(*itersvp, sv);
3168 if (UNLIKELY(SvIS_FREED(sv))) {
3170 Perl_croak(aTHX_ "Use of freed value in iteration");
3177 SvREFCNT_inc_simple_void_NN(sv);
3181 sv = newSVavdefelem(av, ix, 0);
3188 SvREFCNT_dec(oldsv);
3192 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
3195 /* Bypass pushing &PL_sv_yes and calling pp_and(); instead
3196 * jump straight to the AND op's op_other */
3197 assert(PL_op->op_next->op_type == OP_AND);
3198 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
3199 return cLOGOPx(PL_op->op_next)->op_other;
3202 /* Bypass pushing &PL_sv_no and calling pp_and(); instead
3203 * jump straight to the AND op's op_next */
3204 assert(PL_op->op_next->op_type == OP_AND);
3205 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
3206 /* pp_enteriter should have pre-extended the stack */
3207 EXTEND_SKIP(PL_stack_sp, 1);
3208 /* we only need this for the rare case where the OP_AND isn't
3209 * in void context, e.g. $x = do { for (..) {...} };
3210 * but its cheaper to just push it rather than testing first
3212 *++PL_stack_sp = &PL_sv_no;
3213 return PL_op->op_next->op_next;
3218 A description of how taint works in pattern matching and substitution.
3220 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
3221 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
3223 While the pattern is being assembled/concatenated and then compiled,
3224 PL_tainted will get set (via TAINT_set) if any component of the pattern
3225 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
3226 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
3227 TAINT_get). It will also be set if any component of the pattern matches
3228 based on locale-dependent behavior.
3230 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
3231 the pattern is marked as tainted. This means that subsequent usage, such
3232 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
3233 on the new pattern too.
3235 RXf_TAINTED_SEEN is used post-execution by the get magic code
3236 of $1 et al to indicate whether the returned value should be tainted.
3237 It is the responsibility of the caller of the pattern (i.e. pp_match,
3238 pp_subst etc) to set this flag for any other circumstances where $1 needs
3241 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
3243 There are three possible sources of taint
3245 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
3246 * the replacement string (or expression under /e)
3248 There are four destinations of taint and they are affected by the sources
3249 according to the rules below:
3251 * the return value (not including /r):
3252 tainted by the source string and pattern, but only for the
3253 number-of-iterations case; boolean returns aren't tainted;
3254 * the modified string (or modified copy under /r):
3255 tainted by the source string, pattern, and replacement strings;
3257 tainted by the pattern, and under 'use re "taint"', by the source
3259 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
3260 should always be unset before executing subsequent code.
3262 The overall action of pp_subst is:
3264 * at the start, set bits in rxtainted indicating the taint status of
3265 the various sources.
3267 * After each pattern execution, update the SUBST_TAINT_PAT bit in
3268 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
3269 pattern has subsequently become tainted via locale ops.
3271 * If control is being passed to pp_substcont to execute a /e block,
3272 save rxtainted in the CXt_SUBST block, for future use by
3275 * Whenever control is being returned to perl code (either by falling
3276 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
3277 use the flag bits in rxtainted to make all the appropriate types of
3278 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
3279 et al will appear tainted.
3281 pp_match is just a simpler version of the above.
3297 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
3298 See "how taint works" above */
3301 REGEXP *rx = PM_GETRE(pm);
3302 regexp *prog = ReANY(rx);
3304 int force_on_match = 0;
3305 const I32 oldsave = PL_savestack_ix;
3307 bool doutf8 = FALSE; /* whether replacement is in utf8 */
3312 /* known replacement string? */
3313 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
3317 if (PL_op->op_flags & OPf_STACKED)
3328 SvGETMAGIC(TARG); /* must come before cow check */
3330 /* note that a string might get converted to COW during matching */
3331 was_cow = cBOOL(SvIsCOW(TARG));
3333 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3334 #ifndef PERL_ANY_COW
3336 sv_force_normal_flags(TARG,0);
3338 if ((SvREADONLY(TARG)
3339 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
3340 || SvTYPE(TARG) > SVt_PVLV)
3341 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
3342 Perl_croak_no_modify();
3346 orig = SvPV_nomg(TARG, len);
3347 /* note we don't (yet) force the var into being a string; if we fail
3348 * to match, we leave as-is; on successful match however, we *will*
3349 * coerce into a string, then repeat the match */
3350 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
3353 /* only replace once? */
3354 once = !(rpm->op_pmflags & PMf_GLOBAL);
3356 /* See "how taint works" above */
3359 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
3360 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
3361 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
3362 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3363 ? SUBST_TAINT_BOOLRET : 0));
3369 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
3371 strend = orig + len;
3372 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
3373 maxiters = 2 * slen + 10; /* We can match twice at each
3374 position, once with zero-length,
3375 second time with non-zero. */
3377 /* handle the empty pattern */
3378 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
3379 if (PL_curpm == PL_reg_curpm) {
3380 if (PL_curpm_under) {
3381 if (PL_curpm_under == PL_reg_curpm) {
3382 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
3384 pm = PL_curpm_under;
3394 #ifdef PERL_SAWAMPERSAND
3395 r_flags = ( RXp_NPARENS(prog)
3397 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3398 || (rpm->op_pmflags & PMf_KEEPCOPY)
3403 r_flags = REXEC_COPY_STR;
3406 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
3409 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
3410 LEAVE_SCOPE(oldsave);
3415 /* known replacement string? */
3417 /* replacement needing upgrading? */
3418 if (DO_UTF8(TARG) && !doutf8) {
3419 nsv = sv_newmortal();
3421 sv_utf8_upgrade(nsv);
3422 c = SvPV_const(nsv, clen);
3426 c = SvPV_const(dstr, clen);
3427 doutf8 = DO_UTF8(dstr);
3430 if (SvTAINTED(dstr))
3431 rxtainted |= SUBST_TAINT_REPL;
3438 /* can do inplace substitution? */
3443 && (I32)clen <= RXp_MINLENRET(prog)
3445 || !(r_flags & REXEC_COPY_STR)
3446 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
3448 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
3449 && (!doutf8 || SvUTF8(TARG))
3450 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3454 /* string might have got converted to COW since we set was_cow */
3455 if (SvIsCOW(TARG)) {
3456 if (!force_on_match)
3458 assert(SvVOK(TARG));
3461 if (force_on_match) {
3462 /* redo the first match, this time with the orig var
3463 * forced into being a string */
3465 orig = SvPV_force_nomg(TARG, len);
3471 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3472 rxtainted |= SUBST_TAINT_PAT;
3473 m = orig + RXp_OFFS(prog)[0].start;
3474 d = orig + RXp_OFFS(prog)[0].end;
3476 if (m - s > strend - d) { /* faster to shorten from end */
3479 Copy(c, m, clen, char);
3484 Move(d, m, i, char);
3488 SvCUR_set(TARG, m - s);
3490 else { /* faster from front */
3494 Move(s, d - i, i, char);
3497 Copy(c, d, clen, char);
3504 d = s = RXp_OFFS(prog)[0].start + orig;
3507 if (UNLIKELY(iters++ > maxiters))
3508 DIE(aTHX_ "Substitution loop");
3509 /* run time pattern taint, eg locale */
3510 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3511 rxtainted |= SUBST_TAINT_PAT;
3512 m = RXp_OFFS(prog)[0].start + orig;
3515 Move(s, d, i, char);
3519 Copy(c, d, clen, char);
3522 s = RXp_OFFS(prog)[0].end + orig;
3523 } while (CALLREGEXEC(rx, s, strend, orig,
3524 s == m, /* don't match same null twice */
3526 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3529 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3530 Move(s, d, i+1, char); /* include the NUL */
3533 if (PL_op->op_private & OPpTRUEBOOL)
3534 PUSHs(iters ? &PL_sv_yes : &PL_sv_zero);
3543 if (force_on_match) {
3544 /* redo the first match, this time with the orig var
3545 * forced into being a string */
3547 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3548 /* I feel that it should be possible to avoid this mortal copy
3549 given that the code below copies into a new destination.
3550 However, I suspect it isn't worth the complexity of
3551 unravelling the C<goto force_it> for the small number of
3552 cases where it would be viable to drop into the copy code. */
3553 TARG = sv_2mortal(newSVsv(TARG));
3555 orig = SvPV_force_nomg(TARG, len);
3561 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3562 rxtainted |= SUBST_TAINT_PAT;
3564 s = RXp_OFFS(prog)[0].start + orig;
3565 dstr = newSVpvn_flags(orig, s-orig,
3566 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3571 /* note that a whole bunch of local vars are saved here for
3572 * use by pp_substcont: here's a list of them in case you're
3573 * searching for places in this sub that uses a particular var:
3574 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3575 * s m strend rx once */
3577 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3581 if (UNLIKELY(iters++ > maxiters))
3582 DIE(aTHX_ "Substitution loop");
3583 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3584 rxtainted |= SUBST_TAINT_PAT;
3585 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
3587 char *old_orig = orig;
3588 assert(RXp_SUBOFFSET(prog) == 0);
3590 orig = RXp_SUBBEG(prog);
3591 s = orig + (old_s - old_orig);
3592 strend = s + (strend - old_s);
3594 m = RXp_OFFS(prog)[0].start + orig;
3595 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3596 s = RXp_OFFS(prog)[0].end + orig;
3598 /* replacement already stringified */
3600 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3604 sv_catsv(dstr, repl);
3605 if (UNLIKELY(SvTAINTED(repl)))
3606 rxtainted |= SUBST_TAINT_REPL;
3610 } while (CALLREGEXEC(rx, s, strend, orig,
3611 s == m, /* Yields minend of 0 or 1 */
3613 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3614 assert(strend >= s);
3615 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3617 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3618 /* From here on down we're using the copy, and leaving the original
3625 /* The match may make the string COW. If so, brilliant, because
3626 that's just saved us one malloc, copy and free - the regexp has
3627 donated the old buffer, and we malloc an entirely new one, rather
3628 than the regexp malloc()ing a buffer and copying our original,
3629 only for us to throw it away here during the substitution. */
3630 if (SvIsCOW(TARG)) {
3631 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3637 SvPV_set(TARG, SvPVX(dstr));
3638 SvCUR_set(TARG, SvCUR(dstr));
3639 SvLEN_set(TARG, SvLEN(dstr));
3640 SvFLAGS(TARG) |= SvUTF8(dstr);
3641 SvPV_set(dstr, NULL);
3648 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3649 (void)SvPOK_only_UTF8(TARG);
3652 /* See "how taint works" above */
3654 if ((rxtainted & SUBST_TAINT_PAT) ||
3655 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3656 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3658 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
3660 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3661 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3663 SvTAINTED_on(TOPs); /* taint return value */
3665 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3667 /* needed for mg_set below */
3669 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3673 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3675 LEAVE_SCOPE(oldsave);
3685 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3686 ++*PL_markstack_ptr;
3688 LEAVE_with_name("grep_item"); /* exit inner scope */
3691 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3693 const U8 gimme = GIMME_V;
3695 LEAVE_with_name("grep"); /* exit outer scope */
3696 (void)POPMARK; /* pop src */
3697 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3698 (void)POPMARK; /* pop dst */
3699 SP = PL_stack_base + POPMARK; /* pop original mark */
3700 if (gimme == G_SCALAR) {
3701 if (PL_op->op_private & OPpTRUEBOOL)
3702 PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
3708 else if (gimme == G_ARRAY)
3715 ENTER_with_name("grep_item"); /* enter inner scope */
3718 src = PL_stack_base[TOPMARK];
3719 if (SvPADTMP(src)) {
3720 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3726 RETURNOP(cLOGOP->op_other);
3730 /* leave_adjust_stacks():
3732 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3733 * positioning them at to_sp+1 onwards, and do the equivalent of a
3734 * FREEMPS and TAINT_NOT.
3736 * Not intended to be called in void context.
3738 * When leaving a sub, eval, do{} or other scope, the things that need
3739 * doing to process the return args are:
3740 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3741 * * for the types of return that return copies of their args (such
3742 * as rvalue sub return), make a mortal copy of every return arg,
3743 * except where we can optimise the copy away without it being
3744 * semantically visible;
3745 * * make sure that the arg isn't prematurely freed; in the case of an
3746 * arg not copied, this may involve mortalising it. For example, in
3747 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3748 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3750 * What condition to use when deciding whether to pass the arg through
3751 * or make a copy, is determined by the 'pass' arg; its valid values are:
3752 * 0: rvalue sub/eval exit
3753 * 1: other rvalue scope exit
3754 * 2: :lvalue sub exit in rvalue context
3755 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3757 * There is a big issue with doing a FREETMPS. We would like to free any
3758 * temps created by the last statement which the sub executed, rather than
3759 * leaving them for the caller. In a situation where a sub call isn't
3760 * soon followed by a nextstate (e.g. nested recursive calls, a la
3761 * fibonacci()), temps can accumulate, causing memory and performance
3764 * On the other hand, we don't want to free any TEMPs which are keeping
3765 * alive any return args that we skipped copying; nor do we wish to undo
3766 * any mortalising done here.
3768 * The solution is to split the temps stack frame into two, with a cut
3769 * point delineating the two halves. We arrange that by the end of this
3770 * function, all the temps stack frame entries we wish to keep are in the
3771 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3772 * the range tmps_base .. PL_tmps_ix. During the course of this
3773 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3774 * whenever we find or create a temp that we know should be kept. In
3775 * general the stuff above tmps_base is undecided until we reach the end,
3776 * and we may need a sort stage for that.
3778 * To determine whether a TEMP is keeping a return arg alive, every
3779 * arg that is kept rather than copied and which has the SvTEMP flag
3780 * set, has the flag temporarily unset, to mark it. At the end we scan
3781 * the temps stack frame above the cut for entries without SvTEMP and
3782 * keep them, while turning SvTEMP on again. Note that if we die before
3783 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3784 * those SVs may be slightly less efficient.
3786 * In practice various optimisations for some common cases mean we can
3787 * avoid most of the scanning and swapping about with the temps stack.
3791 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3795 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3798 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3802 if (gimme == G_ARRAY) {
3803 nargs = SP - from_sp;
3807 assert(gimme == G_SCALAR);
3808 if (UNLIKELY(from_sp >= SP)) {
3809 /* no return args */
3810 assert(from_sp == SP);
3812 *++SP = &PL_sv_undef;
3822 /* common code for G_SCALAR and G_ARRAY */
3824 tmps_base = PL_tmps_floor + 1;
3828 /* pointer version of tmps_base. Not safe across temp stack
3832 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3833 tmps_basep = PL_tmps_stack + tmps_base;
3835 /* process each return arg */
3838 SV *sv = *from_sp++;
3840 assert(PL_tmps_ix + nargs < PL_tmps_max);
3842 /* PADTMPs with container set magic shouldn't appear in the
3843 * wild. This assert is more important for pp_leavesublv(),
3844 * but by testing for it here, we're more likely to catch
3845 * bad cases (what with :lvalue subs not being widely
3846 * deployed). The two issues are that for something like
3847 * sub :lvalue { $tied{foo} }
3849 * sub :lvalue { substr($foo,1,2) }
3850 * pp_leavesublv() will croak if the sub returns a PADTMP,
3851 * and currently functions like pp_substr() return a mortal
3852 * rather than using their PADTMP when returning a PVLV.
3853 * This is because the PVLV will hold a ref to $foo,
3854 * so $foo would get delayed in being freed while
3855 * the PADTMP SV remained in the PAD.
3856 * So if this assert fails it means either:
3857 * 1) there is pp code similar to pp_substr that is
3858 * returning a PADTMP instead of a mortal, and probably
3860 * 2) pp_leavesublv is making unwarranted assumptions
3861 * about always croaking on a PADTMP
3863 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3865 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3866 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3872 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3873 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3874 : pass == 2 ? (!SvPADTMP(sv))
3877 /* pass through: skip copy for logic or optimisation
3878 * reasons; instead mortalise it, except that ... */
3882 /* ... since this SV is an SvTEMP , we don't need to
3883 * re-mortalise it; instead we just need to ensure
3884 * that its existing entry in the temps stack frame
3885 * ends up below the cut and so avoids being freed
3886 * this time round. We mark it as needing to be kept
3887 * by temporarily unsetting SvTEMP; then at the end,
3888 * we shuffle any !SvTEMP entries on the tmps stack
3889 * back below the cut.
3890 * However, there's a significant chance that there's
3891 * a 1:1 correspondence between the first few (or all)
3892 * elements in the return args stack frame and those
3893 * in the temps stack frame; e,g.:
3894 * sub f { ....; map {...} .... },
3895 * or if we're exiting multiple scopes and one of the
3896 * inner scopes has already made mortal copies of each
3899 * If so, this arg sv will correspond to the next item
3900 * on the tmps stack above the cut, and so can be kept
3901 * merely by moving the cut boundary up one, rather
3902 * than messing with SvTEMP. If all args are 1:1 then
3903 * we can avoid the sorting stage below completely.
3905 * If there are no items above the cut on the tmps
3906 * stack, then the SvTEMP must comne from an item
3907 * below the cut, so there's nothing to do.
3909 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3910 if (sv == *tmps_basep)
3916 else if (!SvPADTMP(sv)) {
3917 /* mortalise arg to avoid it being freed during save
3918 * stack unwinding. Pad tmps don't need mortalising as
3919 * they're never freed. This is the equivalent of
3920 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3921 * * it assumes that the temps stack has already been
3923 * * it puts the new item at the cut rather than at
3924 * ++PL_tmps_ix, moving the previous occupant there
3927 if (!SvIMMORTAL(sv)) {
3928 SvREFCNT_inc_simple_void_NN(sv);
3930 /* Note that if there's nothing above the cut,
3931 * this copies the garbage one slot above
3932 * PL_tmps_ix onto itself. This is harmless (the
3933 * stack's already been extended), but might in
3934 * theory trigger warnings from tools like ASan
3936 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3942 /* Make a mortal copy of the SV.
3943 * The following code is the equivalent of sv_mortalcopy()
3945 * * it assumes the temps stack has already been extended;
3946 * * it optimises the copying for some simple SV types;
3947 * * it puts the new item at the cut rather than at
3948 * ++PL_tmps_ix, moving the previous occupant there
3951 SV *newsv = newSV(0);
3953 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3954 /* put it on the tmps stack early so it gets freed if we die */
3955 *tmps_basep++ = newsv;
3958 if (SvTYPE(sv) <= SVt_IV) {
3959 /* arg must be one of undef, IV/UV, or RV: skip
3960 * sv_setsv_flags() and do the copy directly */
3962 U32 srcflags = SvFLAGS(sv);
3964 assert(!SvGMAGICAL(sv));
3965 if (srcflags & (SVf_IOK|SVf_ROK)) {
3966 SET_SVANY_FOR_BODYLESS_IV(newsv);
3968 if (srcflags & SVf_ROK) {
3969 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3970 /* SV type plus flags */
3971 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3974 /* both src and dst are <= SVt_IV, so sv_any
3975 * points to the head; so access the heads
3976 * directly rather than going via sv_any.
3978 assert( &(sv->sv_u.svu_iv)
3979 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3980 assert( &(newsv->sv_u.svu_iv)
3981 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3982 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3983 /* SV type plus flags */
3984 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3985 |(srcflags & SVf_IVisUV));
3989 assert(!(srcflags & SVf_OK));
3990 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3992 SvFLAGS(newsv) = dstflags;
3996 /* do the full sv_setsv() */
4000 old_base = tmps_basep - PL_tmps_stack;
4002 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
4003 /* the mg_get or sv_setsv might have created new temps
4004 * or realloced the tmps stack; regrow and reload */
4005 EXTEND_MORTAL(nargs);
4006 tmps_basep = PL_tmps_stack + old_base;
4007 TAINT_NOT; /* Each item is independent */
4013 /* If there are any temps left above the cut, we need to sort
4014 * them into those to keep and those to free. The only ones to
4015 * keep are those for which we've temporarily unset SvTEMP.
4016 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
4017 * swapping pairs as necessary. Stop when we meet in the middle.
4020 SV **top = PL_tmps_stack + PL_tmps_ix;
4021 while (tmps_basep <= top) {
4034 tmps_base = tmps_basep - PL_tmps_stack;
4037 PL_stack_sp = to_sp;
4039 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
4040 while (PL_tmps_ix >= tmps_base) {
4041 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
4043 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
4047 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
4053 /* also tail-called by pp_return */
4063 assert(CxTYPE(cx) == CXt_SUB);
4065 if (CxMULTICALL(cx)) {
4066 /* entry zero of a stack is always PL_sv_undef, which
4067 * simplifies converting a '()' return into undef in scalar context */
4068 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
4072 gimme = cx->blk_gimme;
4073 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
4075 if (gimme == G_VOID)
4076 PL_stack_sp = oldsp;
4078 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
4081 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
4083 retop = cx->blk_sub.retop;
4090 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
4091 * forces an abandon */
4094 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
4096 const SSize_t fill = AvFILLp(av);
4098 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
4100 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
4105 AV *newav = newAV();
4106 av_extend(newav, fill);
4107 AvREIFY_only(newav);
4108 PAD_SVl(0) = MUTABLE_SV(newav);
4109 SvREFCNT_dec_NN(av);
4120 I32 old_savestack_ix;
4125 /* Locate the CV to call:
4126 * - most common case: RV->CV: f(), $ref->():
4127 * note that if a sub is compiled before its caller is compiled,
4128 * the stash entry will be a ref to a CV, rather than being a GV.
4129 * - second most common case: CV: $ref->method()
4132 /* a non-magic-RV -> CV ? */
4133 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
4134 cv = MUTABLE_CV(SvRV(sv));
4135 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
4139 cv = MUTABLE_CV(sv);
4142 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
4143 /* handle all the weird cases */
4144 switch (SvTYPE(sv)) {
4146 if (!isGV_with_GP(sv))
4150 cv = GvCVu((const GV *)sv);
4151 if (UNLIKELY(!cv)) {
4153 cv = sv_2cv(sv, &stash, &gv, 0);
4155 old_savestack_ix = PL_savestack_ix;
4166 if (UNLIKELY(SvAMAGIC(sv))) {
4167 sv = amagic_deref_call(sv, to_cv_amg);
4168 /* Don't SPAGAIN here. */
4174 if (UNLIKELY(!SvOK(sv)))
4175 DIE(aTHX_ PL_no_usym, "a subroutine");
4177 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
4178 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
4179 SP = PL_stack_base + POPMARK;
4182 if (GIMME_V == G_SCALAR)
4183 PUSHs(&PL_sv_undef);
4187 sym = SvPV_nomg_const(sv, len);
4188 if (PL_op->op_private & HINT_STRICT_REFS)
4189 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
4190 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
4193 cv = MUTABLE_CV(SvRV(sv));
4194 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
4200 DIE(aTHX_ "Not a CODE reference");
4204 /* At this point we want to save PL_savestack_ix, either by doing a
4205 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
4206 * CV we will be using (so we don't know whether its XS, so we can't
4207 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
4208 * the save stack. So remember where we are currently on the save
4209 * stack, and later update the CX or scopestack entry accordingly. */
4210 old_savestack_ix = PL_savestack_ix;
4212 /* these two fields are in a union. If they ever become separate,
4213 * we have to test for both of them being null below */
4215 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
4216 while (UNLIKELY(!CvROOT(cv))) {
4220 /* anonymous or undef'd function leaves us no recourse */
4221 if (CvLEXICAL(cv) && CvHASGV(cv))
4222 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
4223 SVfARG(cv_name(cv, NULL, 0)));
4224 if (CvANON(cv) || !CvHASGV(cv)) {
4225 DIE(aTHX_ "Undefined subroutine called");
4228 /* autoloaded stub? */
4229 if (cv != GvCV(gv = CvGV(cv))) {
4232 /* should call AUTOLOAD now? */
4235 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
4236 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
4237 |(PL_op->op_flags & OPf_REF
4238 ? GV_AUTOLOAD_ISMETHOD
4240 cv = autogv ? GvCV(autogv) : NULL;
4243 sub_name = sv_newmortal();
4244 gv_efullname3(sub_name, gv, NULL);
4245 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
4249 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
4250 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
4251 DIE(aTHX_ "Closure prototype called");
4253 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
4256 Perl_get_db_sub(aTHX_ &sv, cv);
4258 PL_curcopdb = PL_curcop;
4260 /* check for lsub that handles lvalue subroutines */
4261 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
4262 /* if lsub not found then fall back to DB::sub */
4263 if (!cv) cv = GvCV(PL_DBsub);
4265 cv = GvCV(PL_DBsub);
4268 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
4269 DIE(aTHX_ "No DB::sub routine defined");
4272 if (!(CvISXSUB(cv))) {
4273 /* This path taken at least 75% of the time */
4280 /* keep PADTMP args alive throughout the call (we need to do this
4281 * because @_ isn't refcounted). Note that we create the mortals
4282 * in the caller's tmps frame, so they won't be freed until after
4283 * we return from the sub.
4292 *svp = sv = sv_mortalcopy(sv);
4298 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
4299 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
4300 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
4302 padlist = CvPADLIST(cv);
4303 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
4304 pad_push(padlist, depth);
4305 PAD_SET_CUR_NOSAVE(padlist, depth);
4306 if (LIKELY(hasargs)) {
4307 AV *const av = MUTABLE_AV(PAD_SVl(0));
4311 defavp = &GvAV(PL_defgv);
4312 cx->blk_sub.savearray = *defavp;
4313 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
4315 /* it's the responsibility of whoever leaves a sub to ensure
4316 * that a clean, empty AV is left in pad[0]. This is normally
4317 * done by cx_popsub() */
4318 assert(!AvREAL(av) && AvFILLp(av) == -1);
4321 if (UNLIKELY(items - 1 > AvMAX(av))) {
4322 SV **ary = AvALLOC(av);
4323 Renew(ary, items, SV*);
4324 AvMAX(av) = items - 1;
4330 Copy(MARK+1,AvARRAY(av),items,SV*);
4331 AvFILLp(av) = items - 1;
4333 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4335 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4336 SVfARG(cv_name(cv, NULL, 0)));
4337 /* warning must come *after* we fully set up the context
4338 * stuff so that __WARN__ handlers can safely dounwind()
4341 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
4342 && ckWARN(WARN_RECURSION)
4343 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
4344 sub_crush_depth(cv);
4345 RETURNOP(CvSTART(cv));
4348 SSize_t markix = TOPMARK;
4352 /* pretend we did the ENTER earlier */
4353 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
4358 if (UNLIKELY(((PL_op->op_private
4359 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
4360 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4362 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4363 SVfARG(cv_name(cv, NULL, 0)));
4365 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
4366 /* Need to copy @_ to stack. Alternative may be to
4367 * switch stack to @_, and copy return values
4368 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
4369 AV * const av = GvAV(PL_defgv);
4370 const SSize_t items = AvFILL(av) + 1;
4374 const bool m = cBOOL(SvRMAGICAL(av));
4375 /* Mark is at the end of the stack. */
4377 for (; i < items; ++i)
4381 SV ** const svp = av_fetch(av, i, 0);
4382 sv = svp ? *svp : NULL;
4384 else sv = AvARRAY(av)[i];
4385 if (sv) SP[i+1] = sv;
4387 SP[i+1] = newSVavdefelem(av, i, 1);
4395 SV **mark = PL_stack_base + markix;
4396 SSize_t items = SP - mark;
4399 if (*mark && SvPADTMP(*mark)) {
4400 *mark = sv_mortalcopy(*mark);
4404 /* We assume first XSUB in &DB::sub is the called one. */
4405 if (UNLIKELY(PL_curcopdb)) {
4406 SAVEVPTR(PL_curcop);
4407 PL_curcop = PL_curcopdb;
4410 /* Do we need to open block here? XXXX */
4412 /* calculate gimme here as PL_op might get changed and then not
4413 * restored until the LEAVE further down */
4414 is_scalar = (GIMME_V == G_SCALAR);
4416 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
4418 CvXSUB(cv)(aTHX_ cv);
4420 #if defined DEBUGGING && !defined DEBUGGING_RE_ONLY
4421 /* This duplicates the check done in runops_debug(), but provides more
4422 * information in the common case of the fault being with an XSUB.
4424 * It should also catch an XSUB pushing more than it extends
4425 * in scalar context.
4427 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base)
4428 Perl_croak_nocontext(
4429 "panic: XSUB %s::%s (%s) failed to extend arg stack: "
4430 "base=%p, sp=%p, hwm=%p\n",
4431 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv),
4432 PL_stack_base, PL_stack_sp,
4433 PL_stack_base + PL_curstackinfo->si_stack_hwm);
4435 /* Enforce some sanity in scalar context. */
4437 SV **svp = PL_stack_base + markix + 1;
4438 if (svp != PL_stack_sp) {
4439 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4449 Perl_sub_crush_depth(pTHX_ CV *cv)
4451 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4454 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4456 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
4457 SVfARG(cv_name(cv,NULL,0)));
4463 /* like croak, but report in context of caller */
4466 Perl_croak_caller(const char *pat, ...)
4470 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4472 /* make error appear at call site */
4474 PL_curcop = cx->blk_oldcop;
4476 va_start(args, pat);
4478 NOT_REACHED; /* NOTREACHED */
4487 SV* const elemsv = POPs;
4488 IV elem = SvIV(elemsv);
4489 AV *const av = MUTABLE_AV(POPs);
4490 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4491 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4492 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4493 bool preeminent = TRUE;
4496 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4497 Perl_warner(aTHX_ packWARN(WARN_MISC),
4498 "Use of reference \"%" SVf "\" as array index",
4500 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4503 if (UNLIKELY(localizing)) {
4507 /* If we can determine whether the element exist,
4508 * Try to preserve the existenceness of a tied array
4509 * element by using EXISTS and DELETE if possible.
4510 * Fallback to FETCH and STORE otherwise. */
4511 if (SvCANEXISTDELETE(av))
4512 preeminent = av_exists(av, elem);
4515 svp = av_fetch(av, elem, lval && !defer);
4517 #ifdef PERL_MALLOC_WRAP
4518 if (SvUOK(elemsv)) {
4519 const UV uv = SvUV(elemsv);
4520 elem = uv > IV_MAX ? IV_MAX : uv;
4522 else if (SvNOK(elemsv))
4523 elem = (IV)SvNV(elemsv);
4525 static const char oom_array_extend[] =
4526 "Out of memory during array extend"; /* Duplicated in av.c */
4527 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4530 if (!svp || !*svp) {
4533 DIE(aTHX_ PL_no_aelem, elem);
4534 len = av_tindex(av);
4535 mPUSHs(newSVavdefelem(av,
4536 /* Resolve a negative index now, unless it points before the
4537 beginning of the array, in which case record it for error
4538 reporting in magic_setdefelem. */
4539 elem < 0 && len + elem >= 0 ? len + elem : elem,
4543 if (UNLIKELY(localizing)) {
4545 save_aelem(av, elem, svp);
4547 SAVEADELETE(av, elem);
4549 else if (PL_op->op_private & OPpDEREF) {
4550 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4554 sv = (svp ? *svp : &PL_sv_undef);
4555 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4562 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4564 PERL_ARGS_ASSERT_VIVIFY_REF;
4569 Perl_croak_no_modify();
4570 prepare_SV_for_RV(sv);
4573 SvRV_set(sv, newSV(0));
4576 SvRV_set(sv, MUTABLE_SV(newAV()));
4579 SvRV_set(sv, MUTABLE_SV(newHV()));
4586 if (SvGMAGICAL(sv)) {
4587 /* copy the sv without magic to prevent magic from being
4589 SV* msv = sv_newmortal();
4590 sv_setsv_nomg(msv, sv);
4596 PERL_STATIC_INLINE HV *
4597 S_opmethod_stash(pTHX_ SV* meth)
4602 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4603 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
4604 "package or object reference", SVfARG(meth)),
4606 : *(PL_stack_base + TOPMARK + 1);
4608 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4612 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
4615 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4616 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4617 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4618 if (stash) return stash;
4622 ob = MUTABLE_SV(SvRV(sv));
4623 else if (!SvOK(sv)) goto undefined;
4624 else if (isGV_with_GP(sv)) {
4626 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
4627 "without a package or object reference",
4630 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4631 assert(!LvTARGLEN(ob));
4635 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4638 /* this isn't a reference */
4641 const char * const packname = SvPV_nomg_const(sv, packlen);
4642 const U32 packname_utf8 = SvUTF8(sv);
4643 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4644 if (stash) return stash;
4646 if (!(iogv = gv_fetchpvn_flags(
4647 packname, packlen, packname_utf8, SVt_PVIO
4649 !(ob=MUTABLE_SV(GvIO(iogv))))
4651 /* this isn't the name of a filehandle either */
4654 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
4655 "without a package or object reference",
4658 /* assume it's a package name */
4659 stash = gv_stashpvn(packname, packlen, packname_utf8);
4660 if (stash) return stash;
4661 else return MUTABLE_HV(sv);
4663 /* it _is_ a filehandle name -- replace with a reference */
4664 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4667 /* if we got here, ob should be an object or a glob */
4668 if (!ob || !(SvOBJECT(ob)
4669 || (isGV_with_GP(ob)
4670 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4673 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
4674 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
4675 ? newSVpvs_flags("DOES", SVs_TEMP)
4687 SV* const meth = TOPs;
4690 SV* const rmeth = SvRV(meth);
4691 if (SvTYPE(rmeth) == SVt_PVCV) {
4697 stash = opmethod_stash(meth);
4699 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4702 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4706 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4707 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4709 gv = MUTABLE_GV(HeVAL(he)); \
4710 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4711 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4713 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4722 SV* const meth = cMETHOPx_meth(PL_op);
4723 HV* const stash = opmethod_stash(meth);
4725 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4726 METHOD_CHECK_CACHE(stash, stash, meth);
4729 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4732 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4741 SV* const meth = cMETHOPx_meth(PL_op);
4742 HV* const stash = CopSTASH(PL_curcop);
4743 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4744 * as it uses CopSTASH. However, we must ensure that object(class) is
4745 * correct (this check is done by S_opmethod_stash) */
4746 opmethod_stash(meth);
4748 if ((cache = HvMROMETA(stash)->super)) {
4749 METHOD_CHECK_CACHE(stash, cache, meth);
4752 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4755 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4763 SV* const meth = cMETHOPx_meth(PL_op);
4764 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4765 opmethod_stash(meth); /* not used but needed for error checks */
4767 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4768 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4770 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4773 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4777 PP(pp_method_redir_super)
4782 SV* const meth = cMETHOPx_meth(PL_op);
4783 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4784 opmethod_stash(meth); /* not used but needed for error checks */
4786 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4787 else if ((cache = HvMROMETA(stash)->super)) {
4788 METHOD_CHECK_CACHE(stash, cache, meth);
4791 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4794 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4799 * ex: set ts=8 sts=4 sw=4 et: