3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
238 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * Returns PL_op->op_next to allow tail-call optimisation of its callers */
327 S_pushav(pTHX_ AV* const av)
330 const SSize_t maxarg = AvFILL(av) + 1;
332 if (UNLIKELY(SvRMAGICAL(av))) {
334 for (i=0; i < (PADOFFSET)maxarg; i++) {
335 SV ** const svp = av_fetch(av, i, FALSE);
336 SP[i+1] = svp ? *svp : &PL_sv_undef;
341 for (i=0; i < (PADOFFSET)maxarg; i++) {
342 SV * const sv = AvARRAY(av)[i];
343 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
352 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
357 PADOFFSET base = PL_op->op_targ;
358 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
359 if (PL_op->op_flags & OPf_SPECIAL) {
360 /* fake the RHS of my ($x,$y,..) = @_ */
362 (void)S_pushav(aTHX_ GvAVn(PL_defgv));
366 /* note, this is only skipped for compile-time-known void cxt */
367 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
372 for (i = 0; i <count; i++)
373 *++SP = PAD_SV(base+i);
375 if (PL_op->op_private & OPpLVAL_INTRO) {
376 SV **svp = &(PAD_SVl(base));
377 const UV payload = (UV)(
378 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
379 | (count << SAVE_TIGHT_SHIFT)
380 | SAVEt_CLEARPADRANGE);
383 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
384 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
392 for (i = 0; i <count; i++)
393 SvPADSTALE_off(*svp++); /* mark lexical as active */
404 OP * const op = PL_op;
405 /* access PL_curpad once */
406 SV ** const padentry = &(PAD_SVl(op->op_targ));
411 PUTBACK; /* no pop/push after this, TOPs ok */
413 if (op->op_flags & OPf_MOD) {
414 if (op->op_private & OPpLVAL_INTRO)
415 if (!(op->op_private & OPpPAD_STATE))
416 save_clearsv(padentry);
417 if (op->op_private & OPpDEREF) {
418 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
419 than TARG reduces the scope of TARG, so it does not
420 span the call to save_clearsv, resulting in smaller
422 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
432 /* pp_coreargs pushes a NULL to indicate no args passed to
433 * CORE::readline() */
436 tryAMAGICunTARGETlist(iter_amg, 0);
437 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
439 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
440 if (!isGV_with_GP(PL_last_in_gv)) {
441 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
442 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
445 XPUSHs(MUTABLE_SV(PL_last_in_gv));
448 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
449 if (PL_last_in_gv == (GV *)&PL_sv_undef)
450 PL_last_in_gv = NULL;
452 assert(isGV_with_GP(PL_last_in_gv));
455 return do_readline();
463 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
467 (SvIOK_notUV(left) && SvIOK_notUV(right))
468 ? (SvIVX(left) == SvIVX(right))
469 : ( do_ncmp(left, right) == 0)
475 /* also used for: pp_i_preinc() */
479 SV *sv = *PL_stack_sp;
481 if (LIKELY(((sv->sv_flags &
482 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
483 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
485 && SvIVX(sv) != IV_MAX)
487 SvIV_set(sv, SvIVX(sv) + 1);
489 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
496 /* also used for: pp_i_predec() */
500 SV *sv = *PL_stack_sp;
502 if (LIKELY(((sv->sv_flags &
503 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
504 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
506 && SvIVX(sv) != IV_MIN)
508 SvIV_set(sv, SvIVX(sv) - 1);
510 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
517 /* also used for: pp_orassign() */
528 if (PL_op->op_type == OP_OR)
530 RETURNOP(cLOGOP->op_other);
535 /* also used for: pp_dor() pp_dorassign() */
542 const int op_type = PL_op->op_type;
543 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
548 if (UNLIKELY(!sv || !SvANY(sv))) {
549 if (op_type == OP_DOR)
551 RETURNOP(cLOGOP->op_other);
557 if (UNLIKELY(!sv || !SvANY(sv)))
562 switch (SvTYPE(sv)) {
564 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
568 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
572 if (CvROOT(sv) || CvXSUB(sv))
585 if(op_type == OP_DOR)
587 RETURNOP(cLOGOP->op_other);
589 /* assuming OP_DEFINED */
599 dSP; dATARGET; bool useleft; SV *svl, *svr;
601 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
605 #ifdef PERL_PRESERVE_IVUV
607 /* special-case some simple common cases */
608 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
610 U32 flags = (svl->sv_flags & svr->sv_flags);
611 if (flags & SVf_IOK) {
612 /* both args are simple IVs */
617 topl = ((UV)il) >> (UVSIZE * 8 - 2);
618 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
620 /* if both are in a range that can't under/overflow, do a
621 * simple integer add: if the top of both numbers
622 * are 00 or 11, then it's safe */
623 if (!( ((topl+1) | (topr+1)) & 2)) {
625 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
631 else if (flags & SVf_NOK) {
632 /* both args are NVs */
637 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
638 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
639 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
641 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
644 /* nothing was lost by converting to IVs */
647 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
655 useleft = USE_LEFT(svl);
656 /* We must see if we can perform the addition with integers if possible,
657 as the integer code detects overflow while the NV code doesn't.
658 If either argument hasn't had a numeric conversion yet attempt to get
659 the IV. It's important to do this now, rather than just assuming that
660 it's not IOK as a PV of "9223372036854775806" may not take well to NV
661 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
662 integer in case the second argument is IV=9223372036854775806
663 We can (now) rely on sv_2iv to do the right thing, only setting the
664 public IOK flag if the value in the NV (or PV) slot is truly integer.
666 A side effect is that this also aggressively prefers integer maths over
667 fp maths for integer values.
669 How to detect overflow?
671 C 99 section 6.2.6.1 says
673 The range of nonnegative values of a signed integer type is a subrange
674 of the corresponding unsigned integer type, and the representation of
675 the same value in each type is the same. A computation involving
676 unsigned operands can never overflow, because a result that cannot be
677 represented by the resulting unsigned integer type is reduced modulo
678 the number that is one greater than the largest value that can be
679 represented by the resulting type.
683 which I read as "unsigned ints wrap."
685 signed integer overflow seems to be classed as "exception condition"
687 If an exceptional condition occurs during the evaluation of an
688 expression (that is, if the result is not mathematically defined or not
689 in the range of representable values for its type), the behavior is
692 (6.5, the 5th paragraph)
694 I had assumed that on 2s complement machines signed arithmetic would
695 wrap, hence coded pp_add and pp_subtract on the assumption that
696 everything perl builds on would be happy. After much wailing and
697 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
698 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
699 unsigned code below is actually shorter than the old code. :-)
702 if (SvIV_please_nomg(svr)) {
703 /* Unless the left argument is integer in range we are going to have to
704 use NV maths. Hence only attempt to coerce the right argument if
705 we know the left is integer. */
713 /* left operand is undef, treat as zero. + 0 is identity,
714 Could SETi or SETu right now, but space optimise by not adding
715 lots of code to speed up what is probably a rarish case. */
717 /* Left operand is defined, so is it IV? */
718 if (SvIV_please_nomg(svl)) {
719 if ((auvok = SvUOK(svl)))
722 const IV aiv = SvIVX(svl);
725 auvok = 1; /* Now acting as a sign flag. */
727 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
734 bool result_good = 0;
737 bool buvok = SvUOK(svr);
742 const IV biv = SvIVX(svr);
747 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
749 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
750 else "IV" now, independent of how it came in.
751 if a, b represents positive, A, B negative, a maps to -A etc
756 all UV maths. negate result if A negative.
757 add if signs same, subtract if signs differ. */
763 /* Must get smaller */
769 /* result really should be -(auv-buv). as its negation
770 of true value, need to swap our result flag */
787 if (result <= (UV)IV_MIN)
788 SETi(result == (UV)IV_MIN
789 ? IV_MIN : -(IV)result);
791 /* result valid, but out of range for IV. */
796 } /* Overflow, drop through to NVs. */
801 useleft = USE_LEFT(svl);
805 NV value = SvNV_nomg(svr);
808 /* left operand is undef, treat as zero. + 0.0 is identity. */
812 SETn( value + SvNV_nomg(svl) );
818 /* also used for: pp_aelemfast_lex() */
823 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
824 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
825 const U32 lval = PL_op->op_flags & OPf_MOD;
826 const I8 key = (I8)PL_op->op_private;
830 assert(SvTYPE(av) == SVt_PVAV);
834 /* inlined av_fetch() for simple cases ... */
835 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
836 sv = AvARRAY(av)[key];
843 /* ... else do it the hard way */
844 svp = av_fetch(av, key, lval);
845 sv = (svp ? *svp : &PL_sv_undef);
847 if (UNLIKELY(!svp && lval))
848 DIE(aTHX_ PL_no_aelem, (int)key);
850 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
860 do_join(TARG, *MARK, MARK, SP);
866 /* Oversized hot code. */
868 /* also used for: pp_say() */
872 dSP; dMARK; dORIGMARK;
876 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
880 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
883 if (MARK == ORIGMARK) {
884 /* If using default handle then we need to make space to
885 * pass object as 1st arg, so move other args up ...
889 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
892 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
894 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
895 | (PL_op->op_type == OP_SAY
896 ? TIED_METHOD_SAY : 0)), sp - mark);
899 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
900 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
903 SETERRNO(EBADF,RMS_IFI);
906 else if (!(fp = IoOFP(io))) {
908 report_wrongway_fh(gv, '<');
911 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
915 SV * const ofs = GvSV(PL_ofsgv); /* $, */
917 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
919 if (!do_print(*MARK, fp))
923 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
924 if (!do_print(GvSV(PL_ofsgv), fp)) {
933 if (!do_print(*MARK, fp))
941 if (PL_op->op_type == OP_SAY) {
942 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
945 else if (PL_ors_sv && SvOK(PL_ors_sv))
946 if (!do_print(PL_ors_sv, fp)) /* $\ */
949 if (IoFLAGS(io) & IOf_FLUSH)
950 if (PerlIO_flush(fp) == EOF)
960 XPUSHs(&PL_sv_undef);
965 /* do the common parts of pp_padhv() and pp_rv2hv()
966 * It assumes the caller has done EXTEND(SP, 1) or equivalent.
967 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
968 * 'has_targ' indicates that the op has a target - this should
969 * be a compile-time constant so that the code can constant-folded as
973 PERL_STATIC_INLINE OP*
974 S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
983 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
985 if (gimme == G_ARRAY) {
991 /* 'keys %h' masquerading as '%h': reset iterator */
992 (void)hv_iterinit(hv);
997 is_bool = ( PL_op->op_private & OPpTRUEBOOL
998 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
999 && block_gimme() == G_VOID));
1000 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
1002 if (UNLIKELY(is_tied)) {
1003 if (is_keys && !is_bool) {
1005 while (hv_iternext(hv))
1010 sv = magic_scalarpack(hv, mg);
1017 sv = i ? &PL_sv_yes : &PL_sv_zero;
1028 #ifdef PERL_OP_PARENT
1030 /* parent op should be an unused OP_KEYS whose targ we can
1035 assert(!OpHAS_SIBLING(PL_op));
1036 k = PL_op->op_sibparent;
1037 assert(k->op_type == OP_KEYS);
1038 TARG = PAD_SV(k->op_targ);
1052 /* This is also called directly by pp_lvavref. */
1057 assert(SvTYPE(TARG) == SVt_PVAV);
1058 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1059 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1060 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1063 if (PL_op->op_flags & OPf_REF) {
1067 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1068 const I32 flags = is_lvalue_sub();
1069 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1070 if (GIMME_V == G_SCALAR)
1071 /* diag_listed_as: Can't return %s to lvalue scalar context */
1072 Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
1079 if (gimme == G_ARRAY)
1080 return S_pushav(aTHX_ (AV*)TARG);
1082 if (gimme == G_SCALAR) {
1083 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
1086 else if (PL_op->op_private & OPpTRUEBOOL)
1100 assert(SvTYPE(TARG) == SVt_PVHV);
1101 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1102 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1103 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1107 if (PL_op->op_flags & OPf_REF) {
1111 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1112 const I32 flags = is_lvalue_sub();
1113 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1114 if (GIMME_V == G_SCALAR)
1115 /* diag_listed_as: Can't return %s to lvalue scalar context */
1116 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
1124 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
1125 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
1130 /* also used for: pp_rv2hv() */
1131 /* also called directly by pp_lvavref */
1136 const U8 gimme = GIMME_V;
1137 static const char an_array[] = "an ARRAY";
1138 static const char a_hash[] = "a HASH";
1139 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
1140 || PL_op->op_type == OP_LVAVREF;
1141 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
1145 if (UNLIKELY(SvAMAGIC(sv))) {
1146 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
1149 if (UNLIKELY(SvTYPE(sv) != type))
1150 /* diag_listed_as: Not an ARRAY reference */
1151 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
1152 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
1153 && PL_op->op_private & OPpLVAL_INTRO))
1154 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
1156 else if (UNLIKELY(SvTYPE(sv) != type)) {
1159 if (!isGV_with_GP(sv)) {
1160 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1166 gv = MUTABLE_GV(sv);
1168 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1169 if (PL_op->op_private & OPpLVAL_INTRO)
1170 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1172 if (PL_op->op_flags & OPf_REF) {
1176 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1177 const I32 flags = is_lvalue_sub();
1178 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1179 if (gimme != G_ARRAY)
1180 goto croak_cant_return;
1187 AV *const av = MUTABLE_AV(sv);
1189 if (gimme == G_ARRAY) {
1192 return S_pushav(aTHX_ av);
1195 if (gimme == G_SCALAR) {
1196 const SSize_t maxarg = AvFILL(av) + 1;
1197 if (PL_op->op_private & OPpTRUEBOOL)
1198 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
1207 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
1208 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
1214 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1215 is_pp_rv2av ? "array" : "hash");
1220 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1222 PERL_ARGS_ASSERT_DO_ODDBALL;
1225 if (ckWARN(WARN_MISC)) {
1227 if (oddkey == firstkey &&
1229 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1230 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1232 err = "Reference found where even-sized list expected";
1235 err = "Odd number of elements in hash assignment";
1236 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1243 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1244 * are common to both the LHS and RHS of an aassign, and replace them
1245 * with copies. All these copies are made before the actual list assign is
1248 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1249 * element ($b) to the first LH element ($a), modifies $a; when the
1250 * second assignment is done, the second RH element now has the wrong
1251 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1252 * Note that we don't need to make a mortal copy of $b.
1254 * The algorithm below works by, for every RHS element, mark the
1255 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1256 * element is found with SVf_BREAK set, it means it would have been
1257 * modified, so make a copy.
1258 * Note that by scanning both LHS and RHS in lockstep, we avoid
1259 * unnecessary copies (like $b above) compared with a naive
1260 * "mark all LHS; copy all marked RHS; unmark all LHS".
1262 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1263 * it can't be common and can be skipped.
1265 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1266 * that we thought we didn't need to call S_aassign_copy_common(), but we
1267 * have anyway for sanity checking. If we find we need to copy, then panic.
1270 PERL_STATIC_INLINE void
1271 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1272 SV **firstrelem, SV **lastrelem
1281 SSize_t lcount = lastlelem - firstlelem + 1;
1282 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1283 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1284 bool copy_all = FALSE;
1286 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1287 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1288 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1292 /* we never have to copy the first RH element; it can't be corrupted
1293 * by assigning something to the corresponding first LH element.
1294 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1296 relem = firstrelem + 1;
1298 for (; relem <= lastrelem; relem++) {
1301 /* mark next LH element */
1303 if (--lcount >= 0) {
1306 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1307 assert (lelem <= lastlelem);
1313 if (SvSMAGICAL(svl)) {
1316 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1319 /* this LH element will consume all further args;
1320 * no need to mark any further LH elements (if any).
1321 * But we still need to scan any remaining RHS elements;
1322 * set lcount negative to distinguish from lcount == 0,
1323 * so the loop condition continues being true
1326 lelem--; /* no need to unmark this element */
1328 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
1329 SvFLAGS(svl) |= SVf_BREAK;
1333 /* don't check RH element if no SVf_BREAK flags set yet */
1340 /* see if corresponding RH element needs copying */
1346 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1347 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
1351 /* op_dump(PL_op); */
1353 "panic: aassign skipped needed copy of common RH elem %"
1354 UVuf, (UV)(relem - firstrelem));
1358 TAINT_NOT; /* Each item is independent */
1360 /* Dear TODO test in t/op/sort.t, I love you.
1361 (It's relying on a panic, not a "semi-panic" from newSVsv()
1362 and then an assertion failure below.) */
1363 if (UNLIKELY(SvIS_FREED(svr))) {
1364 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1367 /* avoid break flag while copying; otherwise COW etc
1369 SvFLAGS(svr) &= ~SVf_BREAK;
1370 /* Not newSVsv(), as it does not allow copy-on-write,
1371 resulting in wasteful copies.
1372 Also, we use SV_NOSTEAL in case the SV is used more than
1373 once, e.g. (...) = (f())[0,0]
1374 Where the same SV appears twice on the RHS without a ref
1375 count bump. (Although I suspect that the SV won't be
1376 stealable here anyway - DAPM).
1378 *relem = sv_mortalcopy_flags(svr,
1379 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1380 /* ... but restore afterwards in case it's needed again,
1381 * e.g. ($a,$b,$c) = (1,$a,$a)
1383 SvFLAGS(svr) |= brk;
1395 while (lelem > firstlelem) {
1396 SV * const svl = *(--lelem);
1398 SvFLAGS(svl) &= ~SVf_BREAK;
1407 SV **lastlelem = PL_stack_sp;
1408 SV **lastrelem = PL_stack_base + POPMARK;
1409 SV **firstrelem = PL_stack_base + POPMARK + 1;
1410 SV **firstlelem = lastrelem + 1;
1415 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1416 * only need to save locally, not on the save stack */
1417 U16 old_delaymagic = PL_delaymagic;
1422 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1424 /* If there's a common identifier on both sides we have to take
1425 * special care that assigning the identifier on the left doesn't
1426 * clobber a value on the right that's used later in the list.
1429 /* at least 2 LH and RH elements, or commonality isn't an issue */
1430 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1431 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1432 if (SvGMAGICAL(*relem))
1435 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1436 if (*lelem && SvSMAGICAL(*lelem))
1439 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1440 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1441 /* skip the scan if all scalars have a ref count of 1 */
1442 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1444 if (!sv || SvREFCNT(sv) == 1)
1446 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1453 S_aassign_copy_common(aTHX_
1454 firstlelem, lastlelem, firstrelem, lastrelem
1464 /* on debugging builds, do the scan even if we've concluded we
1465 * don't need to, then panic if we find commonality. Note that the
1466 * scanner assumes at least 2 elements */
1467 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1478 if (relem > lastrelem)
1481 /* first lelem loop while there are still relems */
1482 while (LIKELY(lelem <= lastlelem)) {
1486 TAINT_NOT; /* Each item stands on its own, taintwise. */
1488 assert(relem <= lastrelem);
1489 if (UNLIKELY(!lsv)) {
1492 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1495 switch (SvTYPE(lsv)) {
1500 SSize_t nelems = lastrelem - relem + 1;
1501 AV *ary = MUTABLE_AV(lsv);
1503 /* Assigning to an aggregate is tricky. First there is the
1504 * issue of commonality, e.g. @a = ($a[0]). Since the
1505 * stack isn't refcounted, clearing @a prior to storing
1506 * elements will free $a[0]. Similarly with
1507 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
1509 * The way to avoid these issues is to make the copy of each
1510 * SV (and we normally store a *copy* in the array) *before*
1511 * clearing the array. But this has a problem in that
1512 * if the code croaks during copying, the not-yet-stored copies
1513 * could leak. One way to avoid this is to make all the copies
1514 * mortal, but that's quite expensive.
1516 * The current solution to these issues is to use a chunk
1517 * of the tmps stack as a temporary refcounted-stack. SVs
1518 * will be put on there during processing to avoid leaks,
1519 * but will be removed again before the end of this block,
1520 * so free_tmps() is never normally called. Also, the
1521 * sv_refcnt of the SVs doesn't have to be manipulated, since
1522 * the ownership of 1 reference count is transferred directly
1523 * from the tmps stack to the AV when the SV is stored.
1525 * We disarm slots in the temps stack by storing PL_sv_undef
1526 * there: it doesn't matter if that SV's refcount is
1527 * repeatedly decremented during a croak. But usually this is
1528 * only an interim measure. By the end of this code block
1529 * we try where possible to not leave any PL_sv_undef's on the
1530 * tmps stack e.g. by shuffling newer entries down.
1532 * There is one case where we don't copy: non-magical
1533 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
1534 * is on the tmps stack, so its safe to directly steal the SV
1535 * rather than copying. This is common in things like function
1536 * returns, map etc, which all return a list of such SVs.
1538 * Note however something like @a = (f())[0,0], where there is
1539 * a danger of the same SV being shared: this avoided because
1540 * when the SV is stored as $a[0], its ref count gets bumped,
1541 * so the RC==1 test fails and the second element is copied
1544 * We also use one slot in the tmps stack to hold an extra
1545 * ref to the array, to ensure it doesn't get prematurely
1546 * freed. Again, this is removed before the end of this block.
1548 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
1549 * @a = ($a[0]) case, but the current implementation uses the
1550 * same algorithm regardless, so ignores that flag. (It *is*
1551 * used in the hash branch below, however).
1554 /* Reserve slots for ary, plus the elems we're about to copy,
1555 * then protect ary and temporarily void the remaining slots
1556 * with &PL_sv_undef */
1557 EXTEND_MORTAL(nelems + 1);
1558 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
1559 tmps_base = PL_tmps_ix + 1;
1560 for (i = 0; i < nelems; i++)
1561 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1562 PL_tmps_ix += nelems;
1564 /* Make a copy of each RHS elem and save on the tmps_stack
1565 * (or pass through where we can optimise away the copy) */
1567 if (UNLIKELY(alias)) {
1568 U32 lval = (gimme == G_ARRAY)
1569 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1570 for (svp = relem; svp <= lastrelem; svp++) {
1575 DIE(aTHX_ "Assigned value is not a reference");
1576 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
1577 /* diag_listed_as: Assigned value is not %s reference */
1579 "Assigned value is not a SCALAR reference");
1581 *svp = rsv = sv_mortalcopy(rsv);
1582 /* XXX else check for weak refs? */
1583 rsv = SvREFCNT_inc_NN(SvRV(rsv));
1584 assert(tmps_base <= PL_tmps_max);
1585 PL_tmps_stack[tmps_base++] = rsv;
1589 for (svp = relem; svp <= lastrelem; svp++) {
1592 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1593 /* can skip the copy */
1594 SvREFCNT_inc_simple_void_NN(rsv);
1599 /* do get before newSV, in case it dies and leaks */
1602 /* see comment in S_aassign_copy_common about
1604 sv_setsv_flags(nsv, rsv,
1605 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1609 assert(tmps_base <= PL_tmps_max);
1610 PL_tmps_stack[tmps_base++] = rsv;
1614 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
1617 /* store in the array, the SVs that are in the tmps stack */
1619 tmps_base -= nelems;
1621 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
1622 /* for arrays we can't cheat with, use the official API */
1623 av_extend(ary, nelems - 1);
1624 for (i = 0; i < nelems; i++) {
1625 SV **svp = &(PL_tmps_stack[tmps_base + i]);
1627 /* A tied store won't take ownership of rsv, so keep
1628 * the 1 refcnt on the tmps stack; otherwise disarm
1629 * the tmps stack entry */
1630 if (av_store(ary, i, rsv))
1631 *svp = &PL_sv_undef;
1632 /* av_store() may have added set magic to rsv */;
1635 /* disarm ary refcount: see comments below about leak */
1636 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1639 /* directly access/set the guts of the AV */
1640 SSize_t fill = nelems - 1;
1641 if (fill > AvMAX(ary))
1642 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
1644 AvFILLp(ary) = fill;
1645 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
1646 /* Quietly remove all the SVs from the tmps stack slots,
1647 * since ary has now taken ownership of the refcnt.
1648 * Also remove ary: which will now leak if we die before
1649 * the SvREFCNT_dec_NN(ary) below */
1650 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1651 Move(&PL_tmps_stack[tmps_base + nelems],
1652 &PL_tmps_stack[tmps_base - 1],
1653 PL_tmps_ix - (tmps_base + nelems) + 1,
1655 PL_tmps_ix -= (nelems + 1);
1658 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1659 /* its assumed @ISA set magic can't die and leak ary */
1660 SvSETMAGIC(MUTABLE_SV(ary));
1661 SvREFCNT_dec_NN(ary);
1663 relem = lastrelem + 1;
1667 case SVt_PVHV: { /* normal hash */
1673 SSize_t nelems = lastrelem - relem + 1;
1674 HV *hash = MUTABLE_HV(lsv);
1676 if (UNLIKELY(nelems & 1)) {
1677 do_oddball(lastrelem, relem);
1678 /* we have firstlelem to reuse, it's not needed any more */
1679 *++lastrelem = &PL_sv_undef;
1683 /* See the SVt_PVAV branch above for a long description of
1684 * how the following all works. The main difference for hashes
1685 * is that we treat keys and values separately (and have
1686 * separate loops for them): as for arrays, values are always
1687 * copied (except for the SvTEMP optimisation), since they
1688 * need to be stored in the hash; while keys are only
1689 * processed where they might get prematurely freed or
1692 /* tmps stack slots:
1693 * * reserve a slot for the hash keepalive;
1694 * * reserve slots for the hash values we're about to copy;
1695 * * preallocate for the keys we'll possibly copy or refcount bump
1697 * then protect hash and temporarily void the remaining
1698 * value slots with &PL_sv_undef */
1699 EXTEND_MORTAL(nelems + 1);
1701 /* convert to number of key/value pairs */
1704 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
1705 tmps_base = PL_tmps_ix + 1;
1706 for (i = 0; i < nelems; i++)
1707 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1708 PL_tmps_ix += nelems;
1710 /* Make a copy of each RHS hash value and save on the tmps_stack
1711 * (or pass through where we can optimise away the copy) */
1713 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
1716 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1717 /* can skip the copy */
1718 SvREFCNT_inc_simple_void_NN(rsv);
1723 /* do get before newSV, in case it dies and leaks */
1726 /* see comment in S_aassign_copy_common about
1728 sv_setsv_flags(nsv, rsv,
1729 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1733 assert(tmps_base <= PL_tmps_max);
1734 PL_tmps_stack[tmps_base++] = rsv;
1736 tmps_base -= nelems;
1739 /* possibly protect keys */
1741 if (UNLIKELY(gimme == G_ARRAY)) {
1743 * @a = ((%h = ($$r, 1)), $r = "x");
1744 * $_++ for %h = (1,2,3,4);
1746 EXTEND_MORTAL(nelems);
1747 for (svp = relem; svp <= lastrelem; svp += 2)
1748 *svp = sv_mortalcopy_flags(*svp,
1749 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1751 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
1752 /* for possible commonality, e.g.
1754 * avoid premature freeing RHS keys by mortalising
1756 * For a magic element, make a copy so that its magic is
1757 * called *before* the hash is emptied (which may affect
1758 * a tied value for example).
1759 * In theory we should check for magic keys in all
1760 * cases, not just under OPpASSIGN_COMMON_AGG, but in
1761 * practice, !OPpASSIGN_COMMON_AGG implies only
1762 * constants or padtmps on the RHS.
1764 EXTEND_MORTAL(nelems);
1765 for (svp = relem; svp <= lastrelem; svp += 2) {
1767 if (UNLIKELY(SvGMAGICAL(rsv))) {
1769 *svp = sv_mortalcopy_flags(*svp,
1770 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1771 /* allow other branch to continue pushing
1772 * onto tmps stack without checking each time */
1773 n = (lastrelem - relem) >> 1;
1777 PL_tmps_stack[++PL_tmps_ix] =
1778 SvREFCNT_inc_simple_NN(rsv);
1782 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
1785 /* now assign the keys and values to the hash */
1789 if (UNLIKELY(gimme == G_ARRAY)) {
1790 /* @a = (%h = (...)) etc */
1792 SV **topelem = relem;
1794 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1797 /* remove duplicates from list we return */
1798 if (!hv_exists_ent(hash, key, 0)) {
1799 /* copy key back: possibly to an earlier
1800 * stack location if we encountered dups earlier,
1801 * The values will be updated later
1806 /* A tied store won't take ownership of val, so keep
1807 * the 1 refcnt on the tmps stack; otherwise disarm
1808 * the tmps stack entry */
1809 if (hv_store_ent(hash, key, val, 0))
1810 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1813 /* hv_store_ent() may have added set magic to val */;
1816 if (topelem < svp) {
1817 /* at this point we have removed the duplicate key/value
1818 * pairs from the stack, but the remaining values may be
1819 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1820 * the (a 2), but the stack now probably contains
1821 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1822 * obliterates the earlier key. So refresh all values. */
1823 lastrelem = topelem - 1;
1824 while (relem < lastrelem) {
1826 he = hv_fetch_ent(hash, *relem++, 0, 0);
1827 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1833 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1836 if (hv_store_ent(hash, key, val, 0))
1837 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1840 /* hv_store_ent() may have added set magic to val */;
1846 /* there are still some 'live' recounts on the tmps stack
1847 * - usually caused by storing into a tied hash. So let
1848 * free_tmps() do the proper but slow job later.
1849 * Just disarm hash refcount: see comments below about leak
1851 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1854 /* Quietly remove all the SVs from the tmps stack slots,
1855 * since hash has now taken ownership of the refcnt.
1856 * Also remove hash: which will now leak if we die before
1857 * the SvREFCNT_dec_NN(hash) below */
1858 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1859 Move(&PL_tmps_stack[tmps_base + nelems],
1860 &PL_tmps_stack[tmps_base - 1],
1861 PL_tmps_ix - (tmps_base + nelems) + 1,
1863 PL_tmps_ix -= (nelems + 1);
1866 SvREFCNT_dec_NN(hash);
1868 relem = lastrelem + 1;
1873 if (!SvIMMORTAL(lsv)) {
1877 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
1878 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
1881 packWARN(WARN_MISC),
1882 "Useless assignment to a temporary"
1885 /* avoid freeing $$lsv if it might be needed for further
1886 * elements, e.g. ($ref, $foo) = (1, $$ref) */
1888 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
1889 && lelem <= lastlelem
1892 SvREFCNT_inc_simple_void_NN(ref);
1893 /* an unrolled sv_2mortal */
1895 if (UNLIKELY(ix >= PL_tmps_max))
1896 /* speculatively grow enough to cover other
1898 (void)tmps_grow_p(ix + (lastlelem - lelem));
1899 PL_tmps_stack[ix] = ref;
1902 sv_setsv(lsv, *relem);
1906 if (++relem > lastrelem)
1915 /* simplified lelem loop for when there are no relems left */
1916 while (LIKELY(lelem <= lastlelem)) {
1919 TAINT_NOT; /* Each item stands on its own, taintwise. */
1921 if (UNLIKELY(!lsv)) {
1923 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1926 switch (SvTYPE(lsv)) {
1928 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
1930 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1936 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
1941 if (!SvIMMORTAL(lsv)) {
1950 TAINT_NOT; /* result of list assign isn't tainted */
1952 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1953 /* Will be used to set PL_tainting below */
1954 Uid_t tmp_uid = PerlProc_getuid();
1955 Uid_t tmp_euid = PerlProc_geteuid();
1956 Gid_t tmp_gid = PerlProc_getgid();
1957 Gid_t tmp_egid = PerlProc_getegid();
1959 /* XXX $> et al currently silently ignore failures */
1960 if (PL_delaymagic & DM_UID) {
1961 #ifdef HAS_SETRESUID
1963 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1964 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1967 # ifdef HAS_SETREUID
1969 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1970 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1973 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1974 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1975 PL_delaymagic &= ~DM_RUID;
1977 # endif /* HAS_SETRUID */
1979 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1980 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1981 PL_delaymagic &= ~DM_EUID;
1983 # endif /* HAS_SETEUID */
1984 if (PL_delaymagic & DM_UID) {
1985 if (PL_delaymagic_uid != PL_delaymagic_euid)
1986 DIE(aTHX_ "No setreuid available");
1987 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1989 # endif /* HAS_SETREUID */
1990 #endif /* HAS_SETRESUID */
1992 tmp_uid = PerlProc_getuid();
1993 tmp_euid = PerlProc_geteuid();
1995 /* XXX $> et al currently silently ignore failures */
1996 if (PL_delaymagic & DM_GID) {
1997 #ifdef HAS_SETRESGID
1999 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2000 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
2003 # ifdef HAS_SETREGID
2005 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2006 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
2009 if ((PL_delaymagic & DM_GID) == DM_RGID) {
2010 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
2011 PL_delaymagic &= ~DM_RGID;
2013 # endif /* HAS_SETRGID */
2015 if ((PL_delaymagic & DM_GID) == DM_EGID) {
2016 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
2017 PL_delaymagic &= ~DM_EGID;
2019 # endif /* HAS_SETEGID */
2020 if (PL_delaymagic & DM_GID) {
2021 if (PL_delaymagic_gid != PL_delaymagic_egid)
2022 DIE(aTHX_ "No setregid available");
2023 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
2025 # endif /* HAS_SETREGID */
2026 #endif /* HAS_SETRESGID */
2028 tmp_gid = PerlProc_getgid();
2029 tmp_egid = PerlProc_getegid();
2031 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
2032 #ifdef NO_TAINT_SUPPORT
2033 PERL_UNUSED_VAR(tmp_uid);
2034 PERL_UNUSED_VAR(tmp_euid);
2035 PERL_UNUSED_VAR(tmp_gid);
2036 PERL_UNUSED_VAR(tmp_egid);
2039 PL_delaymagic = old_delaymagic;
2041 if (gimme == G_VOID)
2042 SP = firstrelem - 1;
2043 else if (gimme == G_SCALAR) {
2046 if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
2047 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
2050 SETi(firstlelem - firstrelem);
2062 PMOP * const pm = cPMOP;
2063 REGEXP * rx = PM_GETRE(pm);
2064 regexp *prog = ReANY(rx);
2065 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
2066 SV * const rv = sv_newmortal();
2070 SvUPGRADE(rv, SVt_IV);
2071 /* For a subroutine describing itself as "This is a hacky workaround" I'm
2072 loathe to use it here, but it seems to be the right fix. Or close.
2073 The key part appears to be that it's essential for pp_qr to return a new
2074 object (SV), which implies that there needs to be an effective way to
2075 generate a new SV from the existing SV that is pre-compiled in the
2077 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
2080 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
2081 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
2082 *cvp = cv_clone(cv);
2083 SvREFCNT_dec_NN(cv);
2087 HV *const stash = gv_stashsv(pkg, GV_ADD);
2088 SvREFCNT_dec_NN(pkg);
2089 (void)sv_bless(rv, stash);
2092 if (UNLIKELY(RXp_ISTAINTED(prog))) {
2094 SvTAINTED_on(SvRV(rv));
2107 SSize_t curpos = 0; /* initial pos() or current $+[0] */
2110 const char *truebase; /* Start of string */
2111 REGEXP *rx = PM_GETRE(pm);
2112 regexp *prog = ReANY(rx);
2114 const U8 gimme = GIMME_V;
2116 const I32 oldsave = PL_savestack_ix;
2117 I32 had_zerolen = 0;
2120 if (PL_op->op_flags & OPf_STACKED)
2131 PUTBACK; /* EVAL blocks need stack_sp. */
2132 /* Skip get-magic if this is a qr// clone, because regcomp has
2134 truebase = prog->mother_re
2135 ? SvPV_nomg_const(TARG, len)
2136 : SvPV_const(TARG, len);
2138 DIE(aTHX_ "panic: pp_match");
2139 strend = truebase + len;
2140 rxtainted = (RXp_ISTAINTED(prog) ||
2141 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
2144 /* We need to know this in case we fail out early - pos() must be reset */
2145 global = dynpm->op_pmflags & PMf_GLOBAL;
2147 /* PMdf_USED is set after a ?? matches once */
2150 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
2152 pm->op_pmflags & PMf_USED
2155 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
2159 /* handle the empty pattern */
2160 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
2161 if (PL_curpm == PL_reg_curpm) {
2162 if (PL_curpm_under) {
2163 if (PL_curpm_under == PL_reg_curpm) {
2164 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
2166 pm = PL_curpm_under;
2176 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
2177 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
2178 UVuf " < %" IVdf ")\n",
2179 (UV)len, (IV)RXp_MINLEN(prog)));
2183 /* get pos() if //g */
2185 mg = mg_find_mglob(TARG);
2186 if (mg && mg->mg_len >= 0) {
2187 curpos = MgBYTEPOS(mg, TARG, truebase, len);
2188 /* last time pos() was set, it was zero-length match */
2189 if (mg->mg_flags & MGf_MINMATCH)
2194 #ifdef PERL_SAWAMPERSAND
2195 if ( RXp_NPARENS(prog)
2197 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2198 || (dynpm->op_pmflags & PMf_KEEPCOPY)
2202 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
2203 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
2204 * only on the first iteration. Therefore we need to copy $' as well
2205 * as $&, to make the rest of the string available for captures in
2206 * subsequent iterations */
2207 if (! (global && gimme == G_ARRAY))
2208 r_flags |= REXEC_COPY_SKIP_POST;
2210 #ifdef PERL_SAWAMPERSAND
2211 if (dynpm->op_pmflags & PMf_KEEPCOPY)
2212 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
2213 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
2220 s = truebase + curpos;
2222 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
2223 had_zerolen, TARG, NULL, r_flags))
2227 if (dynpm->op_pmflags & PMf_ONCE)
2229 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
2231 dynpm->op_pmflags |= PMf_USED;
2235 RXp_MATCH_TAINTED_on(prog);
2236 TAINT_IF(RXp_MATCH_TAINTED(prog));
2240 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
2242 mg = sv_magicext_mglob(TARG);
2243 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
2244 if (RXp_ZERO_LEN(prog))
2245 mg->mg_flags |= MGf_MINMATCH;
2247 mg->mg_flags &= ~MGf_MINMATCH;
2250 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
2251 LEAVE_SCOPE(oldsave);
2255 /* push captures on stack */
2258 const I32 nparens = RXp_NPARENS(prog);
2259 I32 i = (global && !nparens) ? 1 : 0;
2261 SPAGAIN; /* EVAL blocks could move the stack. */
2262 EXTEND(SP, nparens + i);
2263 EXTEND_MORTAL(nparens + i);
2264 for (i = !i; i <= nparens; i++) {
2265 PUSHs(sv_newmortal());
2266 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
2267 && RXp_OFFS(prog)[i].end != -1 ))
2269 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
2270 const char * const s = RXp_OFFS(prog)[i].start + truebase;
2271 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
2272 || RXp_OFFS(prog)[i].start < 0
2274 || len > strend - s)
2276 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
2277 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
2278 (long) i, (long) RXp_OFFS(prog)[i].start,
2279 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
2280 sv_setpvn(*SP, s, len);
2281 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
2286 curpos = (UV)RXp_OFFS(prog)[0].end;
2287 had_zerolen = RXp_ZERO_LEN(prog);
2288 PUTBACK; /* EVAL blocks may use stack */
2289 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
2292 LEAVE_SCOPE(oldsave);
2295 NOT_REACHED; /* NOTREACHED */
2298 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
2300 mg = mg_find_mglob(TARG);
2304 LEAVE_SCOPE(oldsave);
2305 if (gimme == G_ARRAY)
2311 Perl_do_readline(pTHX)
2313 dSP; dTARGETSTACKED;
2318 IO * const io = GvIO(PL_last_in_gv);
2319 const I32 type = PL_op->op_type;
2320 const U8 gimme = GIMME_V;
2323 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
2325 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
2326 if (gimme == G_SCALAR) {
2328 SvSetSV_nosteal(TARG, TOPs);
2338 if (IoFLAGS(io) & IOf_ARGV) {
2339 if (IoFLAGS(io) & IOf_START) {
2341 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
2342 IoFLAGS(io) &= ~IOf_START;
2343 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
2344 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
2345 sv_setpvs(GvSVn(PL_last_in_gv), "-");
2346 SvSETMAGIC(GvSV(PL_last_in_gv));
2351 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2352 if (!fp) { /* Note: fp != IoIFP(io) */
2353 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
2356 else if (type == OP_GLOB)
2357 fp = Perl_start_glob(aTHX_ POPs, io);
2359 else if (type == OP_GLOB)
2361 else if (IoTYPE(io) == IoTYPE_WRONLY) {
2362 report_wrongway_fh(PL_last_in_gv, '>');
2366 if ((!io || !(IoFLAGS(io) & IOf_START))
2367 && ckWARN(WARN_CLOSED)
2370 report_evil_fh(PL_last_in_gv);
2372 if (gimme == G_SCALAR) {
2373 /* undef TARG, and push that undefined value */
2374 if (type != OP_RCATLINE) {
2382 if (gimme == G_SCALAR) {
2384 if (type == OP_RCATLINE && SvGMAGICAL(sv))
2387 if (type == OP_RCATLINE)
2388 SvPV_force_nomg_nolen(sv);
2392 else if (isGV_with_GP(sv)) {
2393 SvPV_force_nomg_nolen(sv);
2395 SvUPGRADE(sv, SVt_PV);
2396 tmplen = SvLEN(sv); /* remember if already alloced */
2397 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
2398 /* try short-buffering it. Please update t/op/readline.t
2399 * if you change the growth length.
2404 if (type == OP_RCATLINE && SvOK(sv)) {
2406 SvPV_force_nomg_nolen(sv);
2412 sv = sv_2mortal(newSV(80));
2416 /* This should not be marked tainted if the fp is marked clean */
2417 #define MAYBE_TAINT_LINE(io, sv) \
2418 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2423 /* delay EOF state for a snarfed empty file */
2424 #define SNARF_EOF(gimme,rs,io,sv) \
2425 (gimme != G_SCALAR || SvCUR(sv) \
2426 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2430 if (!sv_gets(sv, fp, offset)
2432 || SNARF_EOF(gimme, PL_rs, io, sv)
2433 || PerlIO_error(fp)))
2435 PerlIO_clearerr(fp);
2436 if (IoFLAGS(io) & IOf_ARGV) {
2437 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2440 (void)do_close(PL_last_in_gv, FALSE);
2442 else if (type == OP_GLOB) {
2443 if (!do_close(PL_last_in_gv, FALSE)) {
2444 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2445 "glob failed (child exited with status %d%s)",
2446 (int)(STATUS_CURRENT >> 8),
2447 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2450 if (gimme == G_SCALAR) {
2451 if (type != OP_RCATLINE) {
2452 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2458 MAYBE_TAINT_LINE(io, sv);
2461 MAYBE_TAINT_LINE(io, sv);
2463 IoFLAGS(io) |= IOf_NOLINE;
2467 if (type == OP_GLOB) {
2471 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2472 char * const tmps = SvEND(sv) - 1;
2473 if (*tmps == *SvPVX_const(PL_rs)) {
2475 SvCUR_set(sv, SvCUR(sv) - 1);
2478 for (t1 = SvPVX_const(sv); *t1; t1++)
2480 if (strchr("*%?", *t1))
2482 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2485 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2486 (void)POPs; /* Unmatched wildcard? Chuck it... */
2489 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2490 if (ckWARN(WARN_UTF8)) {
2491 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2492 const STRLEN len = SvCUR(sv) - offset;
2495 if (!is_utf8_string_loc(s, len, &f))
2496 /* Emulate :encoding(utf8) warning in the same case. */
2497 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2498 "utf8 \"\\x%02X\" does not map to Unicode",
2499 f < (U8*)SvEND(sv) ? *f : 0);
2502 if (gimme == G_ARRAY) {
2503 if (SvLEN(sv) - SvCUR(sv) > 20) {
2504 SvPV_shrink_to_cur(sv);
2506 sv = sv_2mortal(newSV(80));
2509 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2510 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2511 const STRLEN new_len
2512 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2513 SvPV_renew(sv, new_len);
2524 SV * const keysv = POPs;
2525 HV * const hv = MUTABLE_HV(POPs);
2526 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2527 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2529 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2530 bool preeminent = TRUE;
2532 if (SvTYPE(hv) != SVt_PVHV)
2539 /* If we can determine whether the element exist,
2540 * Try to preserve the existenceness of a tied hash
2541 * element by using EXISTS and DELETE if possible.
2542 * Fallback to FETCH and STORE otherwise. */
2543 if (SvCANEXISTDELETE(hv))
2544 preeminent = hv_exists_ent(hv, keysv, 0);
2547 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2548 svp = he ? &HeVAL(he) : NULL;
2550 if (!svp || !*svp || *svp == &PL_sv_undef) {
2554 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2556 lv = sv_newmortal();
2557 sv_upgrade(lv, SVt_PVLV);
2559 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2560 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2561 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2567 if (HvNAME_get(hv) && isGV(*svp))
2568 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2569 else if (preeminent)
2570 save_helem_flags(hv, keysv, svp,
2571 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2573 SAVEHDELETE(hv, keysv);
2575 else if (PL_op->op_private & OPpDEREF) {
2576 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2580 sv = (svp && *svp ? *svp : &PL_sv_undef);
2581 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2582 * was to make C<local $tied{foo} = $tied{foo}> possible.
2583 * However, it seems no longer to be needed for that purpose, and
2584 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2585 * would loop endlessly since the pos magic is getting set on the
2586 * mortal copy and lost. However, the copy has the effect of
2587 * triggering the get magic, and losing it altogether made things like
2588 * c<$tied{foo};> in void context no longer do get magic, which some
2589 * code relied on. Also, delayed triggering of magic on @+ and friends
2590 * meant the original regex may be out of scope by now. So as a
2591 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2592 * being called too many times). */
2593 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2600 /* a stripped-down version of Perl_softref2xv() for use by
2601 * pp_multideref(), which doesn't use PL_op->op_flags */
2604 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2607 if (PL_op->op_private & HINT_STRICT_REFS) {
2609 Perl_die(aTHX_ PL_no_symref_sv, sv,
2610 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2612 Perl_die(aTHX_ PL_no_usym, what);
2615 Perl_die(aTHX_ PL_no_usym, what);
2616 return gv_fetchsv_nomg(sv, GV_ADD, type);
2620 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2621 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2623 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2624 * Each of these either contains a set of actions, or an argument, such as
2625 * an IV to use as an array index, or a lexical var to retrieve.
2626 * Several actions re stored per UV; we keep shifting new actions off the
2627 * one UV, and only reload when it becomes zero.
2632 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2633 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2634 UV actions = items->uv;
2637 /* this tells find_uninit_var() where we're up to */
2638 PL_multideref_pc = items;
2641 /* there are three main classes of action; the first retrieve
2642 * the initial AV or HV from a variable or the stack; the second
2643 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2644 * the third an unrolled (/DREFHV, rv2hv, helem).
2646 switch (actions & MDEREF_ACTION_MASK) {
2649 actions = (++items)->uv;
2652 case MDEREF_AV_padav_aelem: /* $lex[...] */
2653 sv = PAD_SVl((++items)->pad_offset);
2656 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2657 sv = UNOP_AUX_item_sv(++items);
2658 assert(isGV_with_GP(sv));
2659 sv = (SV*)GvAVn((GV*)sv);
2662 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2667 goto do_AV_rv2av_aelem;
2670 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2671 sv = UNOP_AUX_item_sv(++items);
2672 assert(isGV_with_GP(sv));
2673 sv = GvSVn((GV*)sv);
2674 goto do_AV_vivify_rv2av_aelem;
2676 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2677 sv = PAD_SVl((++items)->pad_offset);
2680 do_AV_vivify_rv2av_aelem:
2681 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2682 /* this is the OPpDEREF action normally found at the end of
2683 * ops like aelem, helem, rv2sv */
2684 sv = vivify_ref(sv, OPpDEREF_AV);
2688 /* this is basically a copy of pp_rv2av when it just has the
2691 if (LIKELY(SvROK(sv))) {
2692 if (UNLIKELY(SvAMAGIC(sv))) {
2693 sv = amagic_deref_call(sv, to_av_amg);
2696 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2697 DIE(aTHX_ "Not an ARRAY reference");
2699 else if (SvTYPE(sv) != SVt_PVAV) {
2700 if (!isGV_with_GP(sv))
2701 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2702 sv = MUTABLE_SV(GvAVn((GV*)sv));
2708 /* retrieve the key; this may be either a lexical or package
2709 * var (whose index/ptr is stored as an item) or a signed
2710 * integer constant stored as an item.
2713 IV elem = 0; /* to shut up stupid compiler warnings */
2716 assert(SvTYPE(sv) == SVt_PVAV);
2718 switch (actions & MDEREF_INDEX_MASK) {
2719 case MDEREF_INDEX_none:
2721 case MDEREF_INDEX_const:
2722 elem = (++items)->iv;
2724 case MDEREF_INDEX_padsv:
2725 elemsv = PAD_SVl((++items)->pad_offset);
2727 case MDEREF_INDEX_gvsv:
2728 elemsv = UNOP_AUX_item_sv(++items);
2729 assert(isGV_with_GP(elemsv));
2730 elemsv = GvSVn((GV*)elemsv);
2732 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2733 && ckWARN(WARN_MISC)))
2734 Perl_warner(aTHX_ packWARN(WARN_MISC),
2735 "Use of reference \"%" SVf "\" as array index",
2737 /* the only time that S_find_uninit_var() needs this
2738 * is to determine which index value triggered the
2739 * undef warning. So just update it here. Note that
2740 * since we don't save and restore this var (e.g. for
2741 * tie or overload execution), its value will be
2742 * meaningless apart from just here */
2743 PL_multideref_pc = items;
2744 elem = SvIV(elemsv);
2749 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2751 if (!(actions & MDEREF_FLAG_last)) {
2752 SV** svp = av_fetch((AV*)sv, elem, 1);
2753 if (!svp || ! (sv=*svp))
2754 DIE(aTHX_ PL_no_aelem, elem);
2758 if (PL_op->op_private &
2759 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2761 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2762 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2765 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2766 sv = av_delete((AV*)sv, elem, discard);
2774 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2775 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2776 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2777 bool preeminent = TRUE;
2778 AV *const av = (AV*)sv;
2781 if (UNLIKELY(localizing)) {
2785 /* If we can determine whether the element exist,
2786 * Try to preserve the existenceness of a tied array
2787 * element by using EXISTS and DELETE if possible.
2788 * Fallback to FETCH and STORE otherwise. */
2789 if (SvCANEXISTDELETE(av))
2790 preeminent = av_exists(av, elem);
2793 svp = av_fetch(av, elem, lval && !defer);
2796 if (!svp || !(sv = *svp)) {
2799 DIE(aTHX_ PL_no_aelem, elem);
2800 len = av_tindex(av);
2801 sv = sv_2mortal(newSVavdefelem(av,
2802 /* Resolve a negative index now, unless it points
2803 * before the beginning of the array, in which
2804 * case record it for error reporting in
2805 * magic_setdefelem. */
2806 elem < 0 && len + elem >= 0
2807 ? len + elem : elem, 1));
2810 if (UNLIKELY(localizing)) {
2812 save_aelem(av, elem, svp);
2813 sv = *svp; /* may have changed */
2816 SAVEADELETE(av, elem);
2821 sv = (svp ? *svp : &PL_sv_undef);
2822 /* see note in pp_helem() */
2823 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2840 case MDEREF_HV_padhv_helem: /* $lex{...} */
2841 sv = PAD_SVl((++items)->pad_offset);
2844 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2845 sv = UNOP_AUX_item_sv(++items);
2846 assert(isGV_with_GP(sv));
2847 sv = (SV*)GvHVn((GV*)sv);
2850 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2855 goto do_HV_rv2hv_helem;
2858 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2859 sv = UNOP_AUX_item_sv(++items);
2860 assert(isGV_with_GP(sv));
2861 sv = GvSVn((GV*)sv);
2862 goto do_HV_vivify_rv2hv_helem;
2864 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2865 sv = PAD_SVl((++items)->pad_offset);
2868 do_HV_vivify_rv2hv_helem:
2869 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2870 /* this is the OPpDEREF action normally found at the end of
2871 * ops like aelem, helem, rv2sv */
2872 sv = vivify_ref(sv, OPpDEREF_HV);
2876 /* this is basically a copy of pp_rv2hv when it just has the
2877 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2880 if (LIKELY(SvROK(sv))) {
2881 if (UNLIKELY(SvAMAGIC(sv))) {
2882 sv = amagic_deref_call(sv, to_hv_amg);
2885 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2886 DIE(aTHX_ "Not a HASH reference");
2888 else if (SvTYPE(sv) != SVt_PVHV) {
2889 if (!isGV_with_GP(sv))
2890 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2891 sv = MUTABLE_SV(GvHVn((GV*)sv));
2897 /* retrieve the key; this may be either a lexical / package
2898 * var or a string constant, whose index/ptr is stored as an
2901 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2903 assert(SvTYPE(sv) == SVt_PVHV);
2905 switch (actions & MDEREF_INDEX_MASK) {
2906 case MDEREF_INDEX_none:
2909 case MDEREF_INDEX_const:
2910 keysv = UNOP_AUX_item_sv(++items);
2913 case MDEREF_INDEX_padsv:
2914 keysv = PAD_SVl((++items)->pad_offset);
2917 case MDEREF_INDEX_gvsv:
2918 keysv = UNOP_AUX_item_sv(++items);
2919 keysv = GvSVn((GV*)keysv);
2923 /* see comment above about setting this var */
2924 PL_multideref_pc = items;
2927 /* ensure that candidate CONSTs have been HEKified */
2928 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2929 || SvTYPE(keysv) >= SVt_PVMG
2932 || SvIsCOW_shared_hash(keysv));
2934 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2936 if (!(actions & MDEREF_FLAG_last)) {
2937 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2938 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2939 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2943 if (PL_op->op_private &
2944 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2946 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2947 sv = hv_exists_ent((HV*)sv, keysv, 0)
2948 ? &PL_sv_yes : &PL_sv_no;
2951 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2952 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2960 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2961 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2962 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2963 bool preeminent = TRUE;
2965 HV * const hv = (HV*)sv;
2968 if (UNLIKELY(localizing)) {
2972 /* If we can determine whether the element exist,
2973 * Try to preserve the existenceness of a tied hash
2974 * element by using EXISTS and DELETE if possible.
2975 * Fallback to FETCH and STORE otherwise. */
2976 if (SvCANEXISTDELETE(hv))
2977 preeminent = hv_exists_ent(hv, keysv, 0);
2980 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2981 svp = he ? &HeVAL(he) : NULL;
2985 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2989 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2990 lv = sv_newmortal();
2991 sv_upgrade(lv, SVt_PVLV);
2993 sv_magic(lv, key2 = newSVsv(keysv),
2994 PERL_MAGIC_defelem, NULL, 0);
2995 /* sv_magic() increments refcount */
2996 SvREFCNT_dec_NN(key2);
2997 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3003 if (HvNAME_get(hv) && isGV(sv))
3004 save_gp(MUTABLE_GV(sv),
3005 !(PL_op->op_flags & OPf_SPECIAL));
3006 else if (preeminent) {
3007 save_helem_flags(hv, keysv, svp,
3008 (PL_op->op_flags & OPf_SPECIAL)
3009 ? 0 : SAVEf_SETMAGIC);
3010 sv = *svp; /* may have changed */
3013 SAVEHDELETE(hv, keysv);
3018 sv = (svp && *svp ? *svp : &PL_sv_undef);
3019 /* see note in pp_helem() */
3020 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
3029 actions >>= MDEREF_SHIFT;
3047 itersvp = CxITERVAR(cx);
3050 switch (CxTYPE(cx)) {
3052 case CXt_LOOP_LAZYSV: /* string increment */
3054 SV* cur = cx->blk_loop.state_u.lazysv.cur;
3055 SV *end = cx->blk_loop.state_u.lazysv.end;
3056 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
3057 It has SvPVX of "" and SvCUR of 0, which is what we want. */
3059 const char *max = SvPV_const(end, maxlen);
3060 if (DO_UTF8(end) && IN_UNI_8_BIT)
3061 maxlen = sv_len_utf8_nomg(end);
3062 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
3066 /* NB: on the first iteration, oldsv will have a ref count of at
3067 * least 2 (one extra from blk_loop.itersave), so the GV or pad
3068 * slot will get localised; on subsequent iterations the RC==1
3069 * optimisation may kick in and the SV will be reused. */
3070 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3071 /* safe to reuse old SV */
3072 sv_setsv(oldsv, cur);
3076 /* we need a fresh SV every time so that loop body sees a
3077 * completely new SV for closures/references to work as
3079 *itersvp = newSVsv(cur);
3080 SvREFCNT_dec(oldsv);
3082 if (strEQ(SvPVX_const(cur), max))
3083 sv_setiv(cur, 0); /* terminate next time */
3089 case CXt_LOOP_LAZYIV: /* integer increment */
3091 IV cur = cx->blk_loop.state_u.lazyiv.cur;
3092 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
3096 /* see NB comment above */
3097 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3098 /* safe to reuse old SV */
3100 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
3103 /* Cheap SvIOK_only().
3104 * Assert that flags which SvIOK_only() would test or
3105 * clear can't be set, because we're SVt_IV */
3106 assert(!(SvFLAGS(oldsv) &
3107 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
3108 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
3109 /* SvIV_set() where sv_any points to head */
3110 oldsv->sv_u.svu_iv = cur;
3114 sv_setiv(oldsv, cur);
3118 /* we need a fresh SV every time so that loop body sees a
3119 * completely new SV for closures/references to work as they
3121 *itersvp = newSViv(cur);
3122 SvREFCNT_dec(oldsv);
3125 if (UNLIKELY(cur == IV_MAX)) {
3126 /* Handle end of range at IV_MAX */
3127 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
3129 ++cx->blk_loop.state_u.lazyiv.cur;
3133 case CXt_LOOP_LIST: /* for (1,2,3) */
3135 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
3136 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3137 ix = (cx->blk_loop.state_u.stack.ix += inc);
3138 if (UNLIKELY(inc > 0
3139 ? ix > cx->blk_oldsp
3140 : ix <= cx->blk_loop.state_u.stack.basesp)
3144 sv = PL_stack_base[ix];
3146 goto loop_ary_common;
3148 case CXt_LOOP_ARY: /* for (@ary) */
3150 av = cx->blk_loop.state_u.ary.ary;
3151 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3152 ix = (cx->blk_loop.state_u.ary.ix += inc);
3153 if (UNLIKELY(inc > 0
3159 if (UNLIKELY(SvRMAGICAL(av))) {
3160 SV * const * const svp = av_fetch(av, ix, FALSE);
3161 sv = svp ? *svp : NULL;
3164 sv = AvARRAY(av)[ix];
3169 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
3170 SvSetMagicSV(*itersvp, sv);
3175 if (UNLIKELY(SvIS_FREED(sv))) {
3177 Perl_croak(aTHX_ "Use of freed value in iteration");
3184 SvREFCNT_inc_simple_void_NN(sv);
3188 sv = newSVavdefelem(av, ix, 0);
3195 SvREFCNT_dec(oldsv);
3199 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
3202 /* Bypass pushing &PL_sv_yes and calling pp_and(); instead
3203 * jump straight to the AND op's op_other */
3204 assert(PL_op->op_next->op_type == OP_AND);
3205 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
3206 return cLOGOPx(PL_op->op_next)->op_other;
3209 /* Bypass pushing &PL_sv_no and calling pp_and(); instead
3210 * jump straight to the AND op's op_next */
3211 assert(PL_op->op_next->op_type == OP_AND);
3212 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
3213 /* pp_enteriter should have pre-extended the stack */
3214 EXTEND_SKIP(PL_stack_sp, 1);
3215 /* we only need this for the rare case where the OP_AND isn't
3216 * in void context, e.g. $x = do { for (..) {...} };
3217 * but its cheaper to just push it rather than testing first
3219 *++PL_stack_sp = &PL_sv_no;
3220 return PL_op->op_next->op_next;
3225 A description of how taint works in pattern matching and substitution.
3227 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
3228 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
3230 While the pattern is being assembled/concatenated and then compiled,
3231 PL_tainted will get set (via TAINT_set) if any component of the pattern
3232 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
3233 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
3234 TAINT_get). It will also be set if any component of the pattern matches
3235 based on locale-dependent behavior.
3237 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
3238 the pattern is marked as tainted. This means that subsequent usage, such
3239 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
3240 on the new pattern too.
3242 RXf_TAINTED_SEEN is used post-execution by the get magic code
3243 of $1 et al to indicate whether the returned value should be tainted.
3244 It is the responsibility of the caller of the pattern (i.e. pp_match,
3245 pp_subst etc) to set this flag for any other circumstances where $1 needs
3248 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
3250 There are three possible sources of taint
3252 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
3253 * the replacement string (or expression under /e)
3255 There are four destinations of taint and they are affected by the sources
3256 according to the rules below:
3258 * the return value (not including /r):
3259 tainted by the source string and pattern, but only for the
3260 number-of-iterations case; boolean returns aren't tainted;
3261 * the modified string (or modified copy under /r):
3262 tainted by the source string, pattern, and replacement strings;
3264 tainted by the pattern, and under 'use re "taint"', by the source
3266 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
3267 should always be unset before executing subsequent code.
3269 The overall action of pp_subst is:
3271 * at the start, set bits in rxtainted indicating the taint status of
3272 the various sources.
3274 * After each pattern execution, update the SUBST_TAINT_PAT bit in
3275 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
3276 pattern has subsequently become tainted via locale ops.
3278 * If control is being passed to pp_substcont to execute a /e block,
3279 save rxtainted in the CXt_SUBST block, for future use by
3282 * Whenever control is being returned to perl code (either by falling
3283 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
3284 use the flag bits in rxtainted to make all the appropriate types of
3285 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
3286 et al will appear tainted.
3288 pp_match is just a simpler version of the above.
3304 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
3305 See "how taint works" above */
3308 REGEXP *rx = PM_GETRE(pm);
3309 regexp *prog = ReANY(rx);
3311 int force_on_match = 0;
3312 const I32 oldsave = PL_savestack_ix;
3314 bool doutf8 = FALSE; /* whether replacement is in utf8 */
3319 /* known replacement string? */
3320 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
3324 if (PL_op->op_flags & OPf_STACKED)
3335 SvGETMAGIC(TARG); /* must come before cow check */
3337 /* note that a string might get converted to COW during matching */
3338 was_cow = cBOOL(SvIsCOW(TARG));
3340 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3341 #ifndef PERL_ANY_COW
3343 sv_force_normal_flags(TARG,0);
3345 if ((SvREADONLY(TARG)
3346 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
3347 || SvTYPE(TARG) > SVt_PVLV)
3348 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
3349 Perl_croak_no_modify();
3353 orig = SvPV_nomg(TARG, len);
3354 /* note we don't (yet) force the var into being a string; if we fail
3355 * to match, we leave as-is; on successful match however, we *will*
3356 * coerce into a string, then repeat the match */
3357 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
3360 /* only replace once? */
3361 once = !(rpm->op_pmflags & PMf_GLOBAL);
3363 /* See "how taint works" above */
3366 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
3367 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
3368 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
3369 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3370 ? SUBST_TAINT_BOOLRET : 0));
3376 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
3378 strend = orig + len;
3379 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
3380 maxiters = 2 * slen + 10; /* We can match twice at each
3381 position, once with zero-length,
3382 second time with non-zero. */
3384 /* handle the empty pattern */
3385 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
3386 if (PL_curpm == PL_reg_curpm) {
3387 if (PL_curpm_under) {
3388 if (PL_curpm_under == PL_reg_curpm) {
3389 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
3391 pm = PL_curpm_under;
3401 #ifdef PERL_SAWAMPERSAND
3402 r_flags = ( RXp_NPARENS(prog)
3404 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3405 || (rpm->op_pmflags & PMf_KEEPCOPY)
3410 r_flags = REXEC_COPY_STR;
3413 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
3416 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
3417 LEAVE_SCOPE(oldsave);
3422 /* known replacement string? */
3424 /* replacement needing upgrading? */
3425 if (DO_UTF8(TARG) && !doutf8) {
3426 nsv = sv_newmortal();
3428 sv_utf8_upgrade(nsv);
3429 c = SvPV_const(nsv, clen);
3433 c = SvPV_const(dstr, clen);
3434 doutf8 = DO_UTF8(dstr);
3437 if (SvTAINTED(dstr))
3438 rxtainted |= SUBST_TAINT_REPL;
3445 /* can do inplace substitution? */
3450 && (I32)clen <= RXp_MINLENRET(prog)
3452 || !(r_flags & REXEC_COPY_STR)
3453 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
3455 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
3456 && (!doutf8 || SvUTF8(TARG))
3457 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3461 /* string might have got converted to COW since we set was_cow */
3462 if (SvIsCOW(TARG)) {
3463 if (!force_on_match)
3465 assert(SvVOK(TARG));
3468 if (force_on_match) {
3469 /* redo the first match, this time with the orig var
3470 * forced into being a string */
3472 orig = SvPV_force_nomg(TARG, len);
3478 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3479 rxtainted |= SUBST_TAINT_PAT;
3480 m = orig + RXp_OFFS(prog)[0].start;
3481 d = orig + RXp_OFFS(prog)[0].end;
3483 if (m - s > strend - d) { /* faster to shorten from end */
3486 Copy(c, m, clen, char);
3491 Move(d, m, i, char);
3495 SvCUR_set(TARG, m - s);
3497 else { /* faster from front */
3501 Move(s, d - i, i, char);
3504 Copy(c, d, clen, char);
3511 d = s = RXp_OFFS(prog)[0].start + orig;
3514 if (UNLIKELY(iters++ > maxiters))
3515 DIE(aTHX_ "Substitution loop");
3516 /* run time pattern taint, eg locale */
3517 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3518 rxtainted |= SUBST_TAINT_PAT;
3519 m = RXp_OFFS(prog)[0].start + orig;
3522 Move(s, d, i, char);
3526 Copy(c, d, clen, char);
3529 s = RXp_OFFS(prog)[0].end + orig;
3530 } while (CALLREGEXEC(rx, s, strend, orig,
3531 s == m, /* don't match same null twice */
3533 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3536 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3537 Move(s, d, i+1, char); /* include the NUL */
3540 if (PL_op->op_private & OPpTRUEBOOL)
3541 PUSHs(iters ? &PL_sv_yes : &PL_sv_zero);
3550 if (force_on_match) {
3551 /* redo the first match, this time with the orig var
3552 * forced into being a string */
3554 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3555 /* I feel that it should be possible to avoid this mortal copy
3556 given that the code below copies into a new destination.
3557 However, I suspect it isn't worth the complexity of
3558 unravelling the C<goto force_it> for the small number of
3559 cases where it would be viable to drop into the copy code. */
3560 TARG = sv_2mortal(newSVsv(TARG));
3562 orig = SvPV_force_nomg(TARG, len);
3568 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3569 rxtainted |= SUBST_TAINT_PAT;
3571 s = RXp_OFFS(prog)[0].start + orig;
3572 dstr = newSVpvn_flags(orig, s-orig,
3573 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3578 /* note that a whole bunch of local vars are saved here for
3579 * use by pp_substcont: here's a list of them in case you're
3580 * searching for places in this sub that uses a particular var:
3581 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3582 * s m strend rx once */
3584 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3588 if (UNLIKELY(iters++ > maxiters))
3589 DIE(aTHX_ "Substitution loop");
3590 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3591 rxtainted |= SUBST_TAINT_PAT;
3592 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
3594 char *old_orig = orig;
3595 assert(RXp_SUBOFFSET(prog) == 0);
3597 orig = RXp_SUBBEG(prog);
3598 s = orig + (old_s - old_orig);
3599 strend = s + (strend - old_s);
3601 m = RXp_OFFS(prog)[0].start + orig;
3602 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3603 s = RXp_OFFS(prog)[0].end + orig;
3605 /* replacement already stringified */
3607 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3611 sv_catsv(dstr, repl);
3612 if (UNLIKELY(SvTAINTED(repl)))
3613 rxtainted |= SUBST_TAINT_REPL;
3617 } while (CALLREGEXEC(rx, s, strend, orig,
3618 s == m, /* Yields minend of 0 or 1 */
3620 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3621 assert(strend >= s);
3622 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3624 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3625 /* From here on down we're using the copy, and leaving the original
3632 /* The match may make the string COW. If so, brilliant, because
3633 that's just saved us one malloc, copy and free - the regexp has
3634 donated the old buffer, and we malloc an entirely new one, rather
3635 than the regexp malloc()ing a buffer and copying our original,
3636 only for us to throw it away here during the substitution. */
3637 if (SvIsCOW(TARG)) {
3638 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3644 SvPV_set(TARG, SvPVX(dstr));
3645 SvCUR_set(TARG, SvCUR(dstr));
3646 SvLEN_set(TARG, SvLEN(dstr));
3647 SvFLAGS(TARG) |= SvUTF8(dstr);
3648 SvPV_set(dstr, NULL);
3655 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3656 (void)SvPOK_only_UTF8(TARG);
3659 /* See "how taint works" above */
3661 if ((rxtainted & SUBST_TAINT_PAT) ||
3662 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3663 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3665 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
3667 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3668 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3670 SvTAINTED_on(TOPs); /* taint return value */
3672 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3674 /* needed for mg_set below */
3676 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3680 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3682 LEAVE_SCOPE(oldsave);
3692 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3693 ++*PL_markstack_ptr;
3695 LEAVE_with_name("grep_item"); /* exit inner scope */
3698 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3700 const U8 gimme = GIMME_V;
3702 LEAVE_with_name("grep"); /* exit outer scope */
3703 (void)POPMARK; /* pop src */
3704 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3705 (void)POPMARK; /* pop dst */
3706 SP = PL_stack_base + POPMARK; /* pop original mark */
3707 if (gimme == G_SCALAR) {
3708 if (PL_op->op_private & OPpTRUEBOOL)
3709 PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
3715 else if (gimme == G_ARRAY)
3722 ENTER_with_name("grep_item"); /* enter inner scope */
3725 src = PL_stack_base[TOPMARK];
3726 if (SvPADTMP(src)) {
3727 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3733 RETURNOP(cLOGOP->op_other);
3737 /* leave_adjust_stacks():
3739 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3740 * positioning them at to_sp+1 onwards, and do the equivalent of a
3741 * FREEMPS and TAINT_NOT.
3743 * Not intended to be called in void context.
3745 * When leaving a sub, eval, do{} or other scope, the things that need
3746 * doing to process the return args are:
3747 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3748 * * for the types of return that return copies of their args (such
3749 * as rvalue sub return), make a mortal copy of every return arg,
3750 * except where we can optimise the copy away without it being
3751 * semantically visible;
3752 * * make sure that the arg isn't prematurely freed; in the case of an
3753 * arg not copied, this may involve mortalising it. For example, in
3754 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3755 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3757 * What condition to use when deciding whether to pass the arg through
3758 * or make a copy, is determined by the 'pass' arg; its valid values are:
3759 * 0: rvalue sub/eval exit
3760 * 1: other rvalue scope exit
3761 * 2: :lvalue sub exit in rvalue context
3762 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3764 * There is a big issue with doing a FREETMPS. We would like to free any
3765 * temps created by the last statement which the sub executed, rather than
3766 * leaving them for the caller. In a situation where a sub call isn't
3767 * soon followed by a nextstate (e.g. nested recursive calls, a la
3768 * fibonacci()), temps can accumulate, causing memory and performance
3771 * On the other hand, we don't want to free any TEMPs which are keeping
3772 * alive any return args that we skipped copying; nor do we wish to undo
3773 * any mortalising done here.
3775 * The solution is to split the temps stack frame into two, with a cut
3776 * point delineating the two halves. We arrange that by the end of this
3777 * function, all the temps stack frame entries we wish to keep are in the
3778 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3779 * the range tmps_base .. PL_tmps_ix. During the course of this
3780 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3781 * whenever we find or create a temp that we know should be kept. In
3782 * general the stuff above tmps_base is undecided until we reach the end,
3783 * and we may need a sort stage for that.
3785 * To determine whether a TEMP is keeping a return arg alive, every
3786 * arg that is kept rather than copied and which has the SvTEMP flag
3787 * set, has the flag temporarily unset, to mark it. At the end we scan
3788 * the temps stack frame above the cut for entries without SvTEMP and
3789 * keep them, while turning SvTEMP on again. Note that if we die before
3790 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3791 * those SVs may be slightly less efficient.
3793 * In practice various optimisations for some common cases mean we can
3794 * avoid most of the scanning and swapping about with the temps stack.
3798 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3802 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3805 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3809 if (gimme == G_ARRAY) {
3810 nargs = SP - from_sp;
3814 assert(gimme == G_SCALAR);
3815 if (UNLIKELY(from_sp >= SP)) {
3816 /* no return args */
3817 assert(from_sp == SP);
3819 *++SP = &PL_sv_undef;
3829 /* common code for G_SCALAR and G_ARRAY */
3831 tmps_base = PL_tmps_floor + 1;
3835 /* pointer version of tmps_base. Not safe across temp stack
3839 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3840 tmps_basep = PL_tmps_stack + tmps_base;
3842 /* process each return arg */
3845 SV *sv = *from_sp++;
3847 assert(PL_tmps_ix + nargs < PL_tmps_max);
3849 /* PADTMPs with container set magic shouldn't appear in the
3850 * wild. This assert is more important for pp_leavesublv(),
3851 * but by testing for it here, we're more likely to catch
3852 * bad cases (what with :lvalue subs not being widely
3853 * deployed). The two issues are that for something like
3854 * sub :lvalue { $tied{foo} }
3856 * sub :lvalue { substr($foo,1,2) }
3857 * pp_leavesublv() will croak if the sub returns a PADTMP,
3858 * and currently functions like pp_substr() return a mortal
3859 * rather than using their PADTMP when returning a PVLV.
3860 * This is because the PVLV will hold a ref to $foo,
3861 * so $foo would get delayed in being freed while
3862 * the PADTMP SV remained in the PAD.
3863 * So if this assert fails it means either:
3864 * 1) there is pp code similar to pp_substr that is
3865 * returning a PADTMP instead of a mortal, and probably
3867 * 2) pp_leavesublv is making unwarranted assumptions
3868 * about always croaking on a PADTMP
3870 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3872 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3873 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3879 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3880 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3881 : pass == 2 ? (!SvPADTMP(sv))
3884 /* pass through: skip copy for logic or optimisation
3885 * reasons; instead mortalise it, except that ... */
3889 /* ... since this SV is an SvTEMP , we don't need to
3890 * re-mortalise it; instead we just need to ensure
3891 * that its existing entry in the temps stack frame
3892 * ends up below the cut and so avoids being freed
3893 * this time round. We mark it as needing to be kept
3894 * by temporarily unsetting SvTEMP; then at the end,
3895 * we shuffle any !SvTEMP entries on the tmps stack
3896 * back below the cut.
3897 * However, there's a significant chance that there's
3898 * a 1:1 correspondence between the first few (or all)
3899 * elements in the return args stack frame and those
3900 * in the temps stack frame; e,g.:
3901 * sub f { ....; map {...} .... },
3902 * or if we're exiting multiple scopes and one of the
3903 * inner scopes has already made mortal copies of each
3906 * If so, this arg sv will correspond to the next item
3907 * on the tmps stack above the cut, and so can be kept
3908 * merely by moving the cut boundary up one, rather
3909 * than messing with SvTEMP. If all args are 1:1 then
3910 * we can avoid the sorting stage below completely.
3912 * If there are no items above the cut on the tmps
3913 * stack, then the SvTEMP must comne from an item
3914 * below the cut, so there's nothing to do.
3916 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3917 if (sv == *tmps_basep)
3923 else if (!SvPADTMP(sv)) {
3924 /* mortalise arg to avoid it being freed during save
3925 * stack unwinding. Pad tmps don't need mortalising as
3926 * they're never freed. This is the equivalent of
3927 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3928 * * it assumes that the temps stack has already been
3930 * * it puts the new item at the cut rather than at
3931 * ++PL_tmps_ix, moving the previous occupant there
3934 if (!SvIMMORTAL(sv)) {
3935 SvREFCNT_inc_simple_void_NN(sv);
3937 /* Note that if there's nothing above the cut,
3938 * this copies the garbage one slot above
3939 * PL_tmps_ix onto itself. This is harmless (the
3940 * stack's already been extended), but might in
3941 * theory trigger warnings from tools like ASan
3943 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3949 /* Make a mortal copy of the SV.
3950 * The following code is the equivalent of sv_mortalcopy()
3952 * * it assumes the temps stack has already been extended;
3953 * * it optimises the copying for some simple SV types;
3954 * * it puts the new item at the cut rather than at
3955 * ++PL_tmps_ix, moving the previous occupant there
3958 SV *newsv = newSV(0);
3960 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3961 /* put it on the tmps stack early so it gets freed if we die */
3962 *tmps_basep++ = newsv;
3965 if (SvTYPE(sv) <= SVt_IV) {
3966 /* arg must be one of undef, IV/UV, or RV: skip
3967 * sv_setsv_flags() and do the copy directly */
3969 U32 srcflags = SvFLAGS(sv);
3971 assert(!SvGMAGICAL(sv));
3972 if (srcflags & (SVf_IOK|SVf_ROK)) {
3973 SET_SVANY_FOR_BODYLESS_IV(newsv);
3975 if (srcflags & SVf_ROK) {
3976 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3977 /* SV type plus flags */
3978 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3981 /* both src and dst are <= SVt_IV, so sv_any
3982 * points to the head; so access the heads
3983 * directly rather than going via sv_any.
3985 assert( &(sv->sv_u.svu_iv)
3986 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3987 assert( &(newsv->sv_u.svu_iv)
3988 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3989 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3990 /* SV type plus flags */
3991 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3992 |(srcflags & SVf_IVisUV));
3996 assert(!(srcflags & SVf_OK));
3997 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3999 SvFLAGS(newsv) = dstflags;
4003 /* do the full sv_setsv() */
4007 old_base = tmps_basep - PL_tmps_stack;
4009 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
4010 /* the mg_get or sv_setsv might have created new temps
4011 * or realloced the tmps stack; regrow and reload */
4012 EXTEND_MORTAL(nargs);
4013 tmps_basep = PL_tmps_stack + old_base;
4014 TAINT_NOT; /* Each item is independent */
4020 /* If there are any temps left above the cut, we need to sort
4021 * them into those to keep and those to free. The only ones to
4022 * keep are those for which we've temporarily unset SvTEMP.
4023 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
4024 * swapping pairs as necessary. Stop when we meet in the middle.
4027 SV **top = PL_tmps_stack + PL_tmps_ix;
4028 while (tmps_basep <= top) {
4041 tmps_base = tmps_basep - PL_tmps_stack;
4044 PL_stack_sp = to_sp;
4046 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
4047 while (PL_tmps_ix >= tmps_base) {
4048 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
4050 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
4054 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
4060 /* also tail-called by pp_return */
4070 assert(CxTYPE(cx) == CXt_SUB);
4072 if (CxMULTICALL(cx)) {
4073 /* entry zero of a stack is always PL_sv_undef, which
4074 * simplifies converting a '()' return into undef in scalar context */
4075 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
4079 gimme = cx->blk_gimme;
4080 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
4082 if (gimme == G_VOID)
4083 PL_stack_sp = oldsp;
4085 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
4088 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
4090 retop = cx->blk_sub.retop;
4097 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
4098 * forces an abandon */
4101 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
4103 const SSize_t fill = AvFILLp(av);
4105 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
4107 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
4112 AV *newav = newAV();
4113 av_extend(newav, fill);
4114 AvREIFY_only(newav);
4115 PAD_SVl(0) = MUTABLE_SV(newav);
4116 SvREFCNT_dec_NN(av);
4127 I32 old_savestack_ix;
4132 /* Locate the CV to call:
4133 * - most common case: RV->CV: f(), $ref->():
4134 * note that if a sub is compiled before its caller is compiled,
4135 * the stash entry will be a ref to a CV, rather than being a GV.
4136 * - second most common case: CV: $ref->method()
4139 /* a non-magic-RV -> CV ? */
4140 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
4141 cv = MUTABLE_CV(SvRV(sv));
4142 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
4146 cv = MUTABLE_CV(sv);
4149 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
4150 /* handle all the weird cases */
4151 switch (SvTYPE(sv)) {
4153 if (!isGV_with_GP(sv))
4157 cv = GvCVu((const GV *)sv);
4158 if (UNLIKELY(!cv)) {
4160 cv = sv_2cv(sv, &stash, &gv, 0);
4162 old_savestack_ix = PL_savestack_ix;
4173 if (UNLIKELY(SvAMAGIC(sv))) {
4174 sv = amagic_deref_call(sv, to_cv_amg);
4175 /* Don't SPAGAIN here. */
4181 if (UNLIKELY(!SvOK(sv)))
4182 DIE(aTHX_ PL_no_usym, "a subroutine");
4184 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
4185 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
4186 SP = PL_stack_base + POPMARK;
4189 if (GIMME_V == G_SCALAR)
4190 PUSHs(&PL_sv_undef);
4194 sym = SvPV_nomg_const(sv, len);
4195 if (PL_op->op_private & HINT_STRICT_REFS)
4196 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
4197 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
4200 cv = MUTABLE_CV(SvRV(sv));
4201 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
4207 DIE(aTHX_ "Not a CODE reference");
4211 /* At this point we want to save PL_savestack_ix, either by doing a
4212 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
4213 * CV we will be using (so we don't know whether its XS, so we can't
4214 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
4215 * the save stack. So remember where we are currently on the save
4216 * stack, and later update the CX or scopestack entry accordingly. */
4217 old_savestack_ix = PL_savestack_ix;
4219 /* these two fields are in a union. If they ever become separate,
4220 * we have to test for both of them being null below */
4222 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
4223 while (UNLIKELY(!CvROOT(cv))) {
4227 /* anonymous or undef'd function leaves us no recourse */
4228 if (CvLEXICAL(cv) && CvHASGV(cv))
4229 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
4230 SVfARG(cv_name(cv, NULL, 0)));
4231 if (CvANON(cv) || !CvHASGV(cv)) {
4232 DIE(aTHX_ "Undefined subroutine called");
4235 /* autoloaded stub? */
4236 if (cv != GvCV(gv = CvGV(cv))) {
4239 /* should call AUTOLOAD now? */
4242 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
4243 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
4244 |(PL_op->op_flags & OPf_REF
4245 ? GV_AUTOLOAD_ISMETHOD
4247 cv = autogv ? GvCV(autogv) : NULL;
4250 sub_name = sv_newmortal();
4251 gv_efullname3(sub_name, gv, NULL);
4252 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
4256 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
4257 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
4258 DIE(aTHX_ "Closure prototype called");
4260 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
4263 Perl_get_db_sub(aTHX_ &sv, cv);
4265 PL_curcopdb = PL_curcop;
4267 /* check for lsub that handles lvalue subroutines */
4268 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
4269 /* if lsub not found then fall back to DB::sub */
4270 if (!cv) cv = GvCV(PL_DBsub);
4272 cv = GvCV(PL_DBsub);
4275 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
4276 DIE(aTHX_ "No DB::sub routine defined");
4279 if (!(CvISXSUB(cv))) {
4280 /* This path taken at least 75% of the time */
4287 /* keep PADTMP args alive throughout the call (we need to do this
4288 * because @_ isn't refcounted). Note that we create the mortals
4289 * in the caller's tmps frame, so they won't be freed until after
4290 * we return from the sub.
4299 *svp = sv = sv_mortalcopy(sv);
4305 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
4306 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
4307 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
4309 padlist = CvPADLIST(cv);
4310 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
4311 pad_push(padlist, depth);
4312 PAD_SET_CUR_NOSAVE(padlist, depth);
4313 if (LIKELY(hasargs)) {
4314 AV *const av = MUTABLE_AV(PAD_SVl(0));
4318 defavp = &GvAV(PL_defgv);
4319 cx->blk_sub.savearray = *defavp;
4320 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
4322 /* it's the responsibility of whoever leaves a sub to ensure
4323 * that a clean, empty AV is left in pad[0]. This is normally
4324 * done by cx_popsub() */
4325 assert(!AvREAL(av) && AvFILLp(av) == -1);
4328 if (UNLIKELY(items - 1 > AvMAX(av))) {
4329 SV **ary = AvALLOC(av);
4330 Renew(ary, items, SV*);
4331 AvMAX(av) = items - 1;
4336 Copy(MARK+1,AvARRAY(av),items,SV*);
4337 AvFILLp(av) = items - 1;
4339 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4341 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4342 SVfARG(cv_name(cv, NULL, 0)));
4343 /* warning must come *after* we fully set up the context
4344 * stuff so that __WARN__ handlers can safely dounwind()
4347 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
4348 && ckWARN(WARN_RECURSION)
4349 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
4350 sub_crush_depth(cv);
4351 RETURNOP(CvSTART(cv));
4354 SSize_t markix = TOPMARK;
4358 /* pretend we did the ENTER earlier */
4359 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
4364 if (UNLIKELY(((PL_op->op_private
4365 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
4366 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4368 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4369 SVfARG(cv_name(cv, NULL, 0)));
4371 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
4372 /* Need to copy @_ to stack. Alternative may be to
4373 * switch stack to @_, and copy return values
4374 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
4375 AV * const av = GvAV(PL_defgv);
4376 const SSize_t items = AvFILL(av) + 1;
4380 const bool m = cBOOL(SvRMAGICAL(av));
4381 /* Mark is at the end of the stack. */
4383 for (; i < items; ++i)
4387 SV ** const svp = av_fetch(av, i, 0);
4388 sv = svp ? *svp : NULL;
4390 else sv = AvARRAY(av)[i];
4391 if (sv) SP[i+1] = sv;
4393 SP[i+1] = newSVavdefelem(av, i, 1);
4401 SV **mark = PL_stack_base + markix;
4402 SSize_t items = SP - mark;
4405 if (*mark && SvPADTMP(*mark)) {
4406 *mark = sv_mortalcopy(*mark);
4410 /* We assume first XSUB in &DB::sub is the called one. */
4411 if (UNLIKELY(PL_curcopdb)) {
4412 SAVEVPTR(PL_curcop);
4413 PL_curcop = PL_curcopdb;
4416 /* Do we need to open block here? XXXX */
4418 /* calculate gimme here as PL_op might get changed and then not
4419 * restored until the LEAVE further down */
4420 is_scalar = (GIMME_V == G_SCALAR);
4422 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
4424 CvXSUB(cv)(aTHX_ cv);
4426 /* Enforce some sanity in scalar context. */
4428 SV **svp = PL_stack_base + markix + 1;
4429 if (svp != PL_stack_sp) {
4430 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4440 Perl_sub_crush_depth(pTHX_ CV *cv)
4442 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4445 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4447 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
4448 SVfARG(cv_name(cv,NULL,0)));
4454 /* like croak, but report in context of caller */
4457 Perl_croak_caller(const char *pat, ...)
4461 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4463 /* make error appear at call site */
4465 PL_curcop = cx->blk_oldcop;
4467 va_start(args, pat);
4469 NOT_REACHED; /* NOTREACHED */
4478 SV* const elemsv = POPs;
4479 IV elem = SvIV(elemsv);
4480 AV *const av = MUTABLE_AV(POPs);
4481 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4482 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4483 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4484 bool preeminent = TRUE;
4487 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4488 Perl_warner(aTHX_ packWARN(WARN_MISC),
4489 "Use of reference \"%" SVf "\" as array index",
4491 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4494 if (UNLIKELY(localizing)) {
4498 /* If we can determine whether the element exist,
4499 * Try to preserve the existenceness of a tied array
4500 * element by using EXISTS and DELETE if possible.
4501 * Fallback to FETCH and STORE otherwise. */
4502 if (SvCANEXISTDELETE(av))
4503 preeminent = av_exists(av, elem);
4506 svp = av_fetch(av, elem, lval && !defer);
4508 #ifdef PERL_MALLOC_WRAP
4509 if (SvUOK(elemsv)) {
4510 const UV uv = SvUV(elemsv);
4511 elem = uv > IV_MAX ? IV_MAX : uv;
4513 else if (SvNOK(elemsv))
4514 elem = (IV)SvNV(elemsv);
4516 static const char oom_array_extend[] =
4517 "Out of memory during array extend"; /* Duplicated in av.c */
4518 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4521 if (!svp || !*svp) {
4524 DIE(aTHX_ PL_no_aelem, elem);
4525 len = av_tindex(av);
4526 mPUSHs(newSVavdefelem(av,
4527 /* Resolve a negative index now, unless it points before the
4528 beginning of the array, in which case record it for error
4529 reporting in magic_setdefelem. */
4530 elem < 0 && len + elem >= 0 ? len + elem : elem,
4534 if (UNLIKELY(localizing)) {
4536 save_aelem(av, elem, svp);
4538 SAVEADELETE(av, elem);
4540 else if (PL_op->op_private & OPpDEREF) {
4541 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4545 sv = (svp ? *svp : &PL_sv_undef);
4546 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4553 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4555 PERL_ARGS_ASSERT_VIVIFY_REF;
4560 Perl_croak_no_modify();
4561 prepare_SV_for_RV(sv);
4564 SvRV_set(sv, newSV(0));
4567 SvRV_set(sv, MUTABLE_SV(newAV()));
4570 SvRV_set(sv, MUTABLE_SV(newHV()));
4577 if (SvGMAGICAL(sv)) {
4578 /* copy the sv without magic to prevent magic from being
4580 SV* msv = sv_newmortal();
4581 sv_setsv_nomg(msv, sv);
4587 PERL_STATIC_INLINE HV *
4588 S_opmethod_stash(pTHX_ SV* meth)
4593 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4594 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
4595 "package or object reference", SVfARG(meth)),
4597 : *(PL_stack_base + TOPMARK + 1);
4599 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4603 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
4606 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4607 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4608 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4609 if (stash) return stash;
4613 ob = MUTABLE_SV(SvRV(sv));
4614 else if (!SvOK(sv)) goto undefined;
4615 else if (isGV_with_GP(sv)) {
4617 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
4618 "without a package or object reference",
4621 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4622 assert(!LvTARGLEN(ob));
4626 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4629 /* this isn't a reference */
4632 const char * const packname = SvPV_nomg_const(sv, packlen);
4633 const U32 packname_utf8 = SvUTF8(sv);
4634 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4635 if (stash) return stash;
4637 if (!(iogv = gv_fetchpvn_flags(
4638 packname, packlen, packname_utf8, SVt_PVIO
4640 !(ob=MUTABLE_SV(GvIO(iogv))))
4642 /* this isn't the name of a filehandle either */
4645 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
4646 "without a package or object reference",
4649 /* assume it's a package name */
4650 stash = gv_stashpvn(packname, packlen, packname_utf8);
4651 if (stash) return stash;
4652 else return MUTABLE_HV(sv);
4654 /* it _is_ a filehandle name -- replace with a reference */
4655 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4658 /* if we got here, ob should be an object or a glob */
4659 if (!ob || !(SvOBJECT(ob)
4660 || (isGV_with_GP(ob)
4661 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4664 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
4665 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
4666 ? newSVpvs_flags("DOES", SVs_TEMP)
4678 SV* const meth = TOPs;
4681 SV* const rmeth = SvRV(meth);
4682 if (SvTYPE(rmeth) == SVt_PVCV) {
4688 stash = opmethod_stash(meth);
4690 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4693 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4697 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4698 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4700 gv = MUTABLE_GV(HeVAL(he)); \
4701 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4702 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4704 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4713 SV* const meth = cMETHOPx_meth(PL_op);
4714 HV* const stash = opmethod_stash(meth);
4716 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4717 METHOD_CHECK_CACHE(stash, stash, meth);
4720 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4723 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4732 SV* const meth = cMETHOPx_meth(PL_op);
4733 HV* const stash = CopSTASH(PL_curcop);
4734 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4735 * as it uses CopSTASH. However, we must ensure that object(class) is
4736 * correct (this check is done by S_opmethod_stash) */
4737 opmethod_stash(meth);
4739 if ((cache = HvMROMETA(stash)->super)) {
4740 METHOD_CHECK_CACHE(stash, cache, meth);
4743 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4746 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4754 SV* const meth = cMETHOPx_meth(PL_op);
4755 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4756 opmethod_stash(meth); /* not used but needed for error checks */
4758 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4759 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4761 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4764 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4768 PP(pp_method_redir_super)
4773 SV* const meth = cMETHOPx_meth(PL_op);
4774 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4775 opmethod_stash(meth); /* not used but needed for error checks */
4777 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4778 else if ((cache = HvMROMETA(stash)->super)) {
4779 METHOD_CHECK_CACHE(stash, cache, meth);
4782 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4785 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4790 * ex: set ts=8 sts=4 sw=4 et: