3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * XXX Note that padav has similar code but without the mg_get().
325 * I suspect that the mg_get is no longer needed, but while padav
326 * differs, it can't share this function */
329 S_pushav(pTHX_ AV* const av)
332 const SSize_t maxarg = AvFILL(av) + 1;
334 if (UNLIKELY(SvRMAGICAL(av))) {
336 for (i=0; i < (PADOFFSET)maxarg; i++) {
337 SV ** const svp = av_fetch(av, i, FALSE);
338 /* See note in pp_helem, and bug id #27839 */
340 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
346 for (i=0; i < (PADOFFSET)maxarg; i++) {
347 SV * const sv = AvARRAY(av)[i];
348 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
356 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
361 PADOFFSET base = PL_op->op_targ;
362 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
363 if (PL_op->op_flags & OPf_SPECIAL) {
364 /* fake the RHS of my ($x,$y,..) = @_ */
366 S_pushav(aTHX_ GvAVn(PL_defgv));
370 /* note, this is only skipped for compile-time-known void cxt */
371 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
376 for (i = 0; i <count; i++)
377 *++SP = PAD_SV(base+i);
379 if (PL_op->op_private & OPpLVAL_INTRO) {
380 SV **svp = &(PAD_SVl(base));
381 const UV payload = (UV)(
382 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
383 | (count << SAVE_TIGHT_SHIFT)
384 | SAVEt_CLEARPADRANGE);
387 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
388 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
396 for (i = 0; i <count; i++)
397 SvPADSTALE_off(*svp++); /* mark lexical as active */
408 OP * const op = PL_op;
409 /* access PL_curpad once */
410 SV ** const padentry = &(PAD_SVl(op->op_targ));
415 PUTBACK; /* no pop/push after this, TOPs ok */
417 if (op->op_flags & OPf_MOD) {
418 if (op->op_private & OPpLVAL_INTRO)
419 if (!(op->op_private & OPpPAD_STATE))
420 save_clearsv(padentry);
421 if (op->op_private & OPpDEREF) {
422 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
423 than TARG reduces the scope of TARG, so it does not
424 span the call to save_clearsv, resulting in smaller
426 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
438 tryAMAGICunTARGETlist(iter_amg, 0);
439 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
441 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
442 if (!isGV_with_GP(PL_last_in_gv)) {
443 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
444 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
447 XPUSHs(MUTABLE_SV(PL_last_in_gv));
450 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
451 if (PL_last_in_gv == (GV *)&PL_sv_undef)
452 PL_last_in_gv = NULL;
454 assert(isGV_with_GP(PL_last_in_gv));
457 return do_readline();
465 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
469 (SvIOK_notUV(left) && SvIOK_notUV(right))
470 ? (SvIVX(left) == SvIVX(right))
471 : ( do_ncmp(left, right) == 0)
477 /* also used for: pp_i_preinc() */
481 SV *sv = *PL_stack_sp;
483 if (LIKELY(((sv->sv_flags &
484 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
485 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
487 && SvIVX(sv) != IV_MAX)
489 SvIV_set(sv, SvIVX(sv) + 1);
491 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
498 /* also used for: pp_i_predec() */
502 SV *sv = *PL_stack_sp;
504 if (LIKELY(((sv->sv_flags &
505 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
506 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
508 && SvIVX(sv) != IV_MIN)
510 SvIV_set(sv, SvIVX(sv) - 1);
512 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
519 /* also used for: pp_orassign() */
528 if (PL_op->op_type == OP_OR)
530 RETURNOP(cLOGOP->op_other);
535 /* also used for: pp_dor() pp_dorassign() */
542 const int op_type = PL_op->op_type;
543 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
548 if (UNLIKELY(!sv || !SvANY(sv))) {
549 if (op_type == OP_DOR)
551 RETURNOP(cLOGOP->op_other);
557 if (UNLIKELY(!sv || !SvANY(sv)))
562 switch (SvTYPE(sv)) {
564 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
568 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
572 if (CvROOT(sv) || CvXSUB(sv))
585 if(op_type == OP_DOR)
587 RETURNOP(cLOGOP->op_other);
589 /* assuming OP_DEFINED */
599 dSP; dATARGET; bool useleft; SV *svl, *svr;
601 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
605 #ifdef PERL_PRESERVE_IVUV
607 /* special-case some simple common cases */
608 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
610 U32 flags = (svl->sv_flags & svr->sv_flags);
611 if (flags & SVf_IOK) {
612 /* both args are simple IVs */
617 topl = ((UV)il) >> (UVSIZE * 8 - 2);
618 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
620 /* if both are in a range that can't under/overflow, do a
621 * simple integer add: if the top of both numbers
622 * are 00 or 11, then it's safe */
623 if (!( ((topl+1) | (topr+1)) & 2)) {
625 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
631 else if (flags & SVf_NOK) {
632 /* both args are NVs */
637 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
638 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
639 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
641 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
644 /* nothing was lost by converting to IVs */
647 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
655 useleft = USE_LEFT(svl);
656 /* We must see if we can perform the addition with integers if possible,
657 as the integer code detects overflow while the NV code doesn't.
658 If either argument hasn't had a numeric conversion yet attempt to get
659 the IV. It's important to do this now, rather than just assuming that
660 it's not IOK as a PV of "9223372036854775806" may not take well to NV
661 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
662 integer in case the second argument is IV=9223372036854775806
663 We can (now) rely on sv_2iv to do the right thing, only setting the
664 public IOK flag if the value in the NV (or PV) slot is truly integer.
666 A side effect is that this also aggressively prefers integer maths over
667 fp maths for integer values.
669 How to detect overflow?
671 C 99 section 6.2.6.1 says
673 The range of nonnegative values of a signed integer type is a subrange
674 of the corresponding unsigned integer type, and the representation of
675 the same value in each type is the same. A computation involving
676 unsigned operands can never overflow, because a result that cannot be
677 represented by the resulting unsigned integer type is reduced modulo
678 the number that is one greater than the largest value that can be
679 represented by the resulting type.
683 which I read as "unsigned ints wrap."
685 signed integer overflow seems to be classed as "exception condition"
687 If an exceptional condition occurs during the evaluation of an
688 expression (that is, if the result is not mathematically defined or not
689 in the range of representable values for its type), the behavior is
692 (6.5, the 5th paragraph)
694 I had assumed that on 2s complement machines signed arithmetic would
695 wrap, hence coded pp_add and pp_subtract on the assumption that
696 everything perl builds on would be happy. After much wailing and
697 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
698 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
699 unsigned code below is actually shorter than the old code. :-)
702 if (SvIV_please_nomg(svr)) {
703 /* Unless the left argument is integer in range we are going to have to
704 use NV maths. Hence only attempt to coerce the right argument if
705 we know the left is integer. */
713 /* left operand is undef, treat as zero. + 0 is identity,
714 Could SETi or SETu right now, but space optimise by not adding
715 lots of code to speed up what is probably a rarish case. */
717 /* Left operand is defined, so is it IV? */
718 if (SvIV_please_nomg(svl)) {
719 if ((auvok = SvUOK(svl)))
722 const IV aiv = SvIVX(svl);
725 auvok = 1; /* Now acting as a sign flag. */
727 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
734 bool result_good = 0;
737 bool buvok = SvUOK(svr);
742 const IV biv = SvIVX(svr);
747 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
749 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
750 else "IV" now, independent of how it came in.
751 if a, b represents positive, A, B negative, a maps to -A etc
756 all UV maths. negate result if A negative.
757 add if signs same, subtract if signs differ. */
763 /* Must get smaller */
769 /* result really should be -(auv-buv). as its negation
770 of true value, need to swap our result flag */
787 if (result <= (UV)IV_MIN)
788 SETi(result == (UV)IV_MIN
789 ? IV_MIN : -(IV)result);
791 /* result valid, but out of range for IV. */
796 } /* Overflow, drop through to NVs. */
801 useleft = USE_LEFT(svl);
805 NV value = SvNV_nomg(svr);
808 /* left operand is undef, treat as zero. + 0.0 is identity. */
812 SETn( value + SvNV_nomg(svl) );
818 /* also used for: pp_aelemfast_lex() */
823 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
824 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
825 const U32 lval = PL_op->op_flags & OPf_MOD;
826 const I8 key = (I8)PL_op->op_private;
830 assert(SvTYPE(av) == SVt_PVAV);
834 /* inlined av_fetch() for simple cases ... */
835 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
836 sv = AvARRAY(av)[key];
843 /* ... else do it the hard way */
844 svp = av_fetch(av, key, lval);
845 sv = (svp ? *svp : &PL_sv_undef);
847 if (UNLIKELY(!svp && lval))
848 DIE(aTHX_ PL_no_aelem, (int)key);
850 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
860 do_join(TARG, *MARK, MARK, SP);
866 /* Oversized hot code. */
868 /* also used for: pp_say() */
872 dSP; dMARK; dORIGMARK;
876 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
880 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
883 if (MARK == ORIGMARK) {
884 /* If using default handle then we need to make space to
885 * pass object as 1st arg, so move other args up ...
889 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
892 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
894 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
895 | (PL_op->op_type == OP_SAY
896 ? TIED_METHOD_SAY : 0)), sp - mark);
899 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
900 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
903 SETERRNO(EBADF,RMS_IFI);
906 else if (!(fp = IoOFP(io))) {
908 report_wrongway_fh(gv, '<');
911 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
915 SV * const ofs = GvSV(PL_ofsgv); /* $, */
917 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
919 if (!do_print(*MARK, fp))
923 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
924 if (!do_print(GvSV(PL_ofsgv), fp)) {
933 if (!do_print(*MARK, fp))
941 if (PL_op->op_type == OP_SAY) {
942 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
945 else if (PL_ors_sv && SvOK(PL_ors_sv))
946 if (!do_print(PL_ors_sv, fp)) /* $\ */
949 if (IoFLAGS(io) & IOf_FLUSH)
950 if (PerlIO_flush(fp) == EOF)
960 XPUSHs(&PL_sv_undef);
965 /* also used for: pp_rv2hv() */
966 /* also called directly by pp_lvavref */
971 const U8 gimme = GIMME_V;
972 static const char an_array[] = "an ARRAY";
973 static const char a_hash[] = "a HASH";
974 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
975 || PL_op->op_type == OP_LVAVREF;
976 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
980 if (UNLIKELY(SvAMAGIC(sv))) {
981 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
984 if (UNLIKELY(SvTYPE(sv) != type))
985 /* diag_listed_as: Not an ARRAY reference */
986 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
987 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
988 && PL_op->op_private & OPpLVAL_INTRO))
989 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
991 else if (UNLIKELY(SvTYPE(sv) != type)) {
994 if (!isGV_with_GP(sv)) {
995 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1001 gv = MUTABLE_GV(sv);
1003 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1004 if (PL_op->op_private & OPpLVAL_INTRO)
1005 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1007 if (PL_op->op_flags & OPf_REF) {
1011 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1012 const I32 flags = is_lvalue_sub();
1013 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1014 if (gimme != G_ARRAY)
1015 goto croak_cant_return;
1022 AV *const av = MUTABLE_AV(sv);
1023 /* The guts of pp_rv2av */
1024 if (gimme == G_ARRAY) {
1030 else if (gimme == G_SCALAR) {
1032 const SSize_t maxarg = AvFILL(av) + 1;
1036 /* The guts of pp_rv2hv */
1037 if (gimme == G_ARRAY) { /* array wanted */
1039 return Perl_do_kv(aTHX);
1041 else if ((PL_op->op_private & OPpTRUEBOOL
1042 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1043 && block_gimme() == G_VOID ))
1044 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1045 SETs(HvUSEDKEYS(MUTABLE_HV(sv)) ? &PL_sv_yes : &PL_sv_zero);
1046 else if (gimme == G_SCALAR) {
1048 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1055 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1056 is_pp_rv2av ? "array" : "hash");
1061 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1063 PERL_ARGS_ASSERT_DO_ODDBALL;
1066 if (ckWARN(WARN_MISC)) {
1068 if (oddkey == firstkey &&
1070 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1071 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1073 err = "Reference found where even-sized list expected";
1076 err = "Odd number of elements in hash assignment";
1077 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1084 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1085 * are common to both the LHS and RHS of an aassign, and replace them
1086 * with copies. All these copies are made before the actual list assign is
1089 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1090 * element ($b) to the first LH element ($a), modifies $a; when the
1091 * second assignment is done, the second RH element now has the wrong
1092 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1093 * Note that we don't need to make a mortal copy of $b.
1095 * The algorithm below works by, for every RHS element, mark the
1096 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1097 * element is found with SVf_BREAK set, it means it would have been
1098 * modified, so make a copy.
1099 * Note that by scanning both LHS and RHS in lockstep, we avoid
1100 * unnecessary copies (like $b above) compared with a naive
1101 * "mark all LHS; copy all marked RHS; unmark all LHS".
1103 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1104 * it can't be common and can be skipped.
1106 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1107 * that we thought we didn't need to call S_aassign_copy_common(), but we
1108 * have anyway for sanity checking. If we find we need to copy, then panic.
1111 PERL_STATIC_INLINE void
1112 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1113 SV **firstrelem, SV **lastrelem
1122 SSize_t lcount = lastlelem - firstlelem + 1;
1123 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1124 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1125 bool copy_all = FALSE;
1127 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1128 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1129 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1133 /* we never have to copy the first RH element; it can't be corrupted
1134 * by assigning something to the corresponding first LH element.
1135 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1137 relem = firstrelem + 1;
1139 for (; relem <= lastrelem; relem++) {
1142 /* mark next LH element */
1144 if (--lcount >= 0) {
1147 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1148 assert (lelem <= lastlelem);
1154 if (SvSMAGICAL(svl)) {
1157 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1160 /* this LH element will consume all further args;
1161 * no need to mark any further LH elements (if any).
1162 * But we still need to scan any remaining RHS elements;
1163 * set lcount negative to distinguish from lcount == 0,
1164 * so the loop condition continues being true
1167 lelem--; /* no need to unmark this element */
1169 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
1170 SvFLAGS(svl) |= SVf_BREAK;
1174 /* don't check RH element if no SVf_BREAK flags set yet */
1181 /* see if corresponding RH element needs copying */
1187 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1188 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
1192 /* op_dump(PL_op); */
1194 "panic: aassign skipped needed copy of common RH elem %"
1195 UVuf, (UV)(relem - firstrelem));
1199 TAINT_NOT; /* Each item is independent */
1201 /* Dear TODO test in t/op/sort.t, I love you.
1202 (It's relying on a panic, not a "semi-panic" from newSVsv()
1203 and then an assertion failure below.) */
1204 if (UNLIKELY(SvIS_FREED(svr))) {
1205 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1208 /* avoid break flag while copying; otherwise COW etc
1210 SvFLAGS(svr) &= ~SVf_BREAK;
1211 /* Not newSVsv(), as it does not allow copy-on-write,
1212 resulting in wasteful copies.
1213 Also, we use SV_NOSTEAL in case the SV is used more than
1214 once, e.g. (...) = (f())[0,0]
1215 Where the same SV appears twice on the RHS without a ref
1216 count bump. (Although I suspect that the SV won't be
1217 stealable here anyway - DAPM).
1219 *relem = sv_mortalcopy_flags(svr,
1220 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1221 /* ... but restore afterwards in case it's needed again,
1222 * e.g. ($a,$b,$c) = (1,$a,$a)
1224 SvFLAGS(svr) |= brk;
1236 while (lelem > firstlelem) {
1237 SV * const svl = *(--lelem);
1239 SvFLAGS(svl) &= ~SVf_BREAK;
1248 SV **lastlelem = PL_stack_sp;
1249 SV **lastrelem = PL_stack_base + POPMARK;
1250 SV **firstrelem = PL_stack_base + POPMARK + 1;
1251 SV **firstlelem = lastrelem + 1;
1256 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1257 * only need to save locally, not on the save stack */
1258 U16 old_delaymagic = PL_delaymagic;
1263 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1265 /* If there's a common identifier on both sides we have to take
1266 * special care that assigning the identifier on the left doesn't
1267 * clobber a value on the right that's used later in the list.
1270 /* at least 2 LH and RH elements, or commonality isn't an issue */
1271 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1272 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1273 if (SvGMAGICAL(*relem))
1276 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1277 if (*lelem && SvSMAGICAL(*lelem))
1280 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1281 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1282 /* skip the scan if all scalars have a ref count of 1 */
1283 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1285 if (!sv || SvREFCNT(sv) == 1)
1287 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1294 S_aassign_copy_common(aTHX_
1295 firstlelem, lastlelem, firstrelem, lastrelem
1305 /* on debugging builds, do the scan even if we've concluded we
1306 * don't need to, then panic if we find commonality. Note that the
1307 * scanner assumes at least 2 elements */
1308 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1319 if (relem > lastrelem)
1322 /* first lelem loop while there are still relems */
1323 while (LIKELY(lelem <= lastlelem)) {
1327 TAINT_NOT; /* Each item stands on its own, taintwise. */
1329 assert(relem <= lastrelem);
1330 if (UNLIKELY(!lsv)) {
1333 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1336 switch (SvTYPE(lsv)) {
1341 SSize_t nelems = lastrelem - relem + 1;
1342 AV *ary = MUTABLE_AV(lsv);
1344 /* Assigning to an aggregate is tricky. First there is the
1345 * issue of commonality, e.g. @a = ($a[0]). Since the
1346 * stack isn't refcounted, clearing @a prior to storing
1347 * elements will free $a[0]. Similarly with
1348 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
1350 * The way to avoid these issues is to make the copy of each
1351 * SV (and we normally store a *copy* in the array) *before*
1352 * clearing the array. But this has a problem in that
1353 * if the code croaks during copying, the not-yet-stored copies
1354 * could leak. One way to avoid this is to make all the copies
1355 * mortal, but that's quite expensive.
1357 * The current solution to these issues is to use a chunk
1358 * of the tmps stack as a temporary refcounted-stack. SVs
1359 * will be put on there during processing to avoid leaks,
1360 * but will be removed again before the end of this block,
1361 * so free_tmps() is never normally called. Also, the
1362 * sv_refcnt of the SVs doesn't have to be manipulated, since
1363 * the ownership of 1 reference count is transferred directly
1364 * from the tmps stack to the AV when the SV is stored.
1366 * We disarm slots in the temps stack by storing PL_sv_undef
1367 * there: it doesn't matter if that SV's refcount is
1368 * repeatedly decremented during a croak. But usually this is
1369 * only an interim measure. By the end of this code block
1370 * we try where possible to not leave any PL_sv_undef's on the
1371 * tmps stack e.g. by shuffling newer entries down.
1373 * There is one case where we don't copy: non-magical
1374 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
1375 * is on the tmps stack, so its safe to directly steal the SV
1376 * rather than copying. This is common in things like function
1377 * returns, map etc, which all return a list of such SVs.
1379 * Note however something like @a = (f())[0,0], where there is
1380 * a danger of the same SV being shared: this avoided because
1381 * when the SV is stored as $a[0], its ref count gets bumped,
1382 * so the RC==1 test fails and the second element is copied
1385 * We also use one slot in the tmps stack to hold an extra
1386 * ref to the array, to ensure it doesn't get prematurely
1387 * freed. Again, this is removed before the end of this block.
1389 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
1390 * @a = ($a[0]) case, but the current implementation uses the
1391 * same algorithm regardless, so ignores that flag. (It *is*
1392 * used in the hash branch below, however).
1395 /* Reserve slots for ary, plus the elems we're about to copy,
1396 * then protect ary and temporarily void the remaining slots
1397 * with &PL_sv_undef */
1398 EXTEND_MORTAL(nelems + 1);
1399 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
1400 tmps_base = PL_tmps_ix + 1;
1401 for (i = 0; i < nelems; i++)
1402 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1403 PL_tmps_ix += nelems;
1405 /* Make a copy of each RHS elem and save on the tmps_stack
1406 * (or pass through where we can optimise away the copy) */
1408 if (UNLIKELY(alias)) {
1409 U32 lval = (gimme == G_ARRAY)
1410 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1411 for (svp = relem; svp <= lastrelem; svp++) {
1416 DIE(aTHX_ "Assigned value is not a reference");
1417 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
1418 /* diag_listed_as: Assigned value is not %s reference */
1420 "Assigned value is not a SCALAR reference");
1422 *svp = rsv = sv_mortalcopy(rsv);
1423 /* XXX else check for weak refs? */
1424 rsv = SvREFCNT_inc_NN(SvRV(rsv));
1425 assert(tmps_base <= PL_tmps_max);
1426 PL_tmps_stack[tmps_base++] = rsv;
1430 for (svp = relem; svp <= lastrelem; svp++) {
1433 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1434 /* can skip the copy */
1435 SvREFCNT_inc_simple_void_NN(rsv);
1440 /* do get before newSV, in case it dies and leaks */
1443 /* see comment in S_aassign_copy_common about
1445 sv_setsv_flags(nsv, rsv,
1446 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1450 assert(tmps_base <= PL_tmps_max);
1451 PL_tmps_stack[tmps_base++] = rsv;
1455 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
1458 /* store in the array, the SVs that are in the tmps stack */
1460 tmps_base -= nelems;
1462 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
1463 /* for arrays we can't cheat with, use the official API */
1464 av_extend(ary, nelems - 1);
1465 for (i = 0; i < nelems; i++) {
1466 SV **svp = &(PL_tmps_stack[tmps_base + i]);
1468 /* A tied store won't take ownership of rsv, so keep
1469 * the 1 refcnt on the tmps stack; otherwise disarm
1470 * the tmps stack entry */
1471 if (av_store(ary, i, rsv))
1472 *svp = &PL_sv_undef;
1473 /* av_store() may have added set magic to rsv */;
1476 /* disarm ary refcount: see comments below about leak */
1477 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1480 /* directly access/set the guts of the AV */
1481 SSize_t fill = nelems - 1;
1482 if (fill > AvMAX(ary))
1483 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
1485 AvFILLp(ary) = fill;
1486 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
1487 /* Quietly remove all the SVs from the tmps stack slots,
1488 * since ary has now taken ownership of the refcnt.
1489 * Also remove ary: which will now leak if we die before
1490 * the SvREFCNT_dec_NN(ary) below */
1491 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1492 Move(&PL_tmps_stack[tmps_base + nelems],
1493 &PL_tmps_stack[tmps_base - 1],
1494 PL_tmps_ix - (tmps_base + nelems) + 1,
1496 PL_tmps_ix -= (nelems + 1);
1499 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1500 /* its assumed @ISA set magic can't die and leak ary */
1501 SvSETMAGIC(MUTABLE_SV(ary));
1502 SvREFCNT_dec_NN(ary);
1504 relem = lastrelem + 1;
1508 case SVt_PVHV: { /* normal hash */
1514 SSize_t nelems = lastrelem - relem + 1;
1515 HV *hash = MUTABLE_HV(lsv);
1517 if (UNLIKELY(nelems & 1)) {
1518 do_oddball(lastrelem, relem);
1519 /* we have firstlelem to reuse, it's not needed any more */
1520 *++lastrelem = &PL_sv_undef;
1524 /* See the SVt_PVAV branch above for a long description of
1525 * how the following all works. The main difference for hashes
1526 * is that we treat keys and values separately (and have
1527 * separate loops for them): as for arrays, values are always
1528 * copied (except for the SvTEMP optimisation), since they
1529 * need to be stored in the hash; while keys are only
1530 * processed where they might get prematurely freed or
1533 /* tmps stack slots:
1534 * * reserve a slot for the hash keepalive;
1535 * * reserve slots for the hash values we're about to copy;
1536 * * preallocate for the keys we'll possibly copy or refcount bump
1538 * then protect hash and temporarily void the remaining
1539 * value slots with &PL_sv_undef */
1540 EXTEND_MORTAL(nelems + 1);
1542 /* convert to number of key/value pairs */
1545 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
1546 tmps_base = PL_tmps_ix + 1;
1547 for (i = 0; i < nelems; i++)
1548 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1549 PL_tmps_ix += nelems;
1551 /* Make a copy of each RHS hash value and save on the tmps_stack
1552 * (or pass through where we can optimise away the copy) */
1554 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
1557 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
1558 /* can skip the copy */
1559 SvREFCNT_inc_simple_void_NN(rsv);
1564 /* do get before newSV, in case it dies and leaks */
1567 /* see comment in S_aassign_copy_common about
1569 sv_setsv_flags(nsv, rsv,
1570 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1574 assert(tmps_base <= PL_tmps_max);
1575 PL_tmps_stack[tmps_base++] = rsv;
1577 tmps_base -= nelems;
1580 /* possibly protect keys */
1582 if (UNLIKELY(gimme == G_ARRAY)) {
1584 * @a = ((%h = ($$r, 1)), $r = "x");
1585 * $_++ for %h = (1,2,3,4);
1587 EXTEND_MORTAL(nelems);
1588 for (svp = relem; svp <= lastrelem; svp += 2)
1589 *svp = sv_mortalcopy_flags(*svp,
1590 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1592 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
1593 /* for possible commonality, e.g.
1595 * avoid premature freeing RHS keys by mortalising
1597 * For a magic element, make a copy so that its magic is
1598 * called *before* the hash is emptied (which may affect
1599 * a tied value for example).
1600 * In theory we should check for magic keys in all
1601 * cases, not just under OPpASSIGN_COMMON_AGG, but in
1602 * practice, !OPpASSIGN_COMMON_AGG implies only
1603 * constants or padtmps on the RHS.
1605 EXTEND_MORTAL(nelems);
1606 for (svp = relem; svp <= lastrelem; svp += 2) {
1608 if (UNLIKELY(SvGMAGICAL(rsv))) {
1610 *svp = sv_mortalcopy_flags(*svp,
1611 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1612 /* allow other branch to continue pushing
1613 * onto tmps stack without checking each time */
1614 n = (lastrelem - relem) >> 1;
1618 PL_tmps_stack[++PL_tmps_ix] =
1619 SvREFCNT_inc_simple_NN(rsv);
1623 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
1626 /* now assign the keys and values to the hash */
1630 if (UNLIKELY(gimme == G_ARRAY)) {
1631 /* @a = (%h = (...)) etc */
1633 SV **topelem = relem;
1635 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1638 /* remove duplicates from list we return */
1639 if (!hv_exists_ent(hash, key, 0)) {
1640 /* copy key back: possibly to an earlier
1641 * stack location if we encountered dups earlier,
1642 * The values will be updated later
1647 /* A tied store won't take ownership of val, so keep
1648 * the 1 refcnt on the tmps stack; otherwise disarm
1649 * the tmps stack entry */
1650 if (hv_store_ent(hash, key, val, 0))
1651 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1654 /* hv_store_ent() may have added set magic to val */;
1657 if (topelem < svp) {
1658 /* at this point we have removed the duplicate key/value
1659 * pairs from the stack, but the remaining values may be
1660 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1661 * the (a 2), but the stack now probably contains
1662 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1663 * obliterates the earlier key. So refresh all values. */
1664 lastrelem = topelem - 1;
1665 while (relem < lastrelem) {
1667 he = hv_fetch_ent(hash, *relem++, 0, 0);
1668 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1674 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
1677 if (hv_store_ent(hash, key, val, 0))
1678 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
1681 /* hv_store_ent() may have added set magic to val */;
1687 /* there are still some 'live' recounts on the tmps stack
1688 * - usually caused by storing into a tied hash. So let
1689 * free_tmps() do the proper but slow job later.
1690 * Just disarm hash refcount: see comments below about leak
1692 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
1695 /* Quietly remove all the SVs from the tmps stack slots,
1696 * since hash has now taken ownership of the refcnt.
1697 * Also remove hash: which will now leak if we die before
1698 * the SvREFCNT_dec_NN(hash) below */
1699 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
1700 Move(&PL_tmps_stack[tmps_base + nelems],
1701 &PL_tmps_stack[tmps_base - 1],
1702 PL_tmps_ix - (tmps_base + nelems) + 1,
1704 PL_tmps_ix -= (nelems + 1);
1707 SvREFCNT_dec_NN(hash);
1709 relem = lastrelem + 1;
1714 if (!SvIMMORTAL(lsv)) {
1718 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
1719 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
1722 packWARN(WARN_MISC),
1723 "Useless assignment to a temporary"
1726 /* avoid freeing $$lsv if it might be needed for further
1727 * elements, e.g. ($ref, $foo) = (1, $$ref) */
1729 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
1730 && lelem <= lastlelem
1733 SvREFCNT_inc_simple_void_NN(ref);
1734 /* an unrolled sv_2mortal */
1736 if (UNLIKELY(ix >= PL_tmps_max))
1737 /* speculatively grow enough to cover other
1739 (void)tmps_grow_p(ix + (lastlelem - lelem));
1740 PL_tmps_stack[ix] = ref;
1743 sv_setsv(lsv, *relem);
1747 if (++relem > lastrelem)
1756 /* simplified lelem loop for when there are no relems left */
1757 while (LIKELY(lelem <= lastlelem)) {
1760 TAINT_NOT; /* Each item stands on its own, taintwise. */
1762 if (UNLIKELY(!lsv)) {
1764 ASSUME(SvTYPE(lsv) == SVt_PVAV);
1767 switch (SvTYPE(lsv)) {
1769 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
1771 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1777 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
1782 if (!SvIMMORTAL(lsv)) {
1791 TAINT_NOT; /* result of list assign isn't tainted */
1793 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1794 /* Will be used to set PL_tainting below */
1795 Uid_t tmp_uid = PerlProc_getuid();
1796 Uid_t tmp_euid = PerlProc_geteuid();
1797 Gid_t tmp_gid = PerlProc_getgid();
1798 Gid_t tmp_egid = PerlProc_getegid();
1800 /* XXX $> et al currently silently ignore failures */
1801 if (PL_delaymagic & DM_UID) {
1802 #ifdef HAS_SETRESUID
1804 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1805 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1808 # ifdef HAS_SETREUID
1810 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1811 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1814 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1815 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1816 PL_delaymagic &= ~DM_RUID;
1818 # endif /* HAS_SETRUID */
1820 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1821 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1822 PL_delaymagic &= ~DM_EUID;
1824 # endif /* HAS_SETEUID */
1825 if (PL_delaymagic & DM_UID) {
1826 if (PL_delaymagic_uid != PL_delaymagic_euid)
1827 DIE(aTHX_ "No setreuid available");
1828 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1830 # endif /* HAS_SETREUID */
1831 #endif /* HAS_SETRESUID */
1833 tmp_uid = PerlProc_getuid();
1834 tmp_euid = PerlProc_geteuid();
1836 /* XXX $> et al currently silently ignore failures */
1837 if (PL_delaymagic & DM_GID) {
1838 #ifdef HAS_SETRESGID
1840 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1841 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1844 # ifdef HAS_SETREGID
1846 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1847 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1850 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1851 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1852 PL_delaymagic &= ~DM_RGID;
1854 # endif /* HAS_SETRGID */
1856 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1857 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1858 PL_delaymagic &= ~DM_EGID;
1860 # endif /* HAS_SETEGID */
1861 if (PL_delaymagic & DM_GID) {
1862 if (PL_delaymagic_gid != PL_delaymagic_egid)
1863 DIE(aTHX_ "No setregid available");
1864 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1866 # endif /* HAS_SETREGID */
1867 #endif /* HAS_SETRESGID */
1869 tmp_gid = PerlProc_getgid();
1870 tmp_egid = PerlProc_getegid();
1872 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1873 #ifdef NO_TAINT_SUPPORT
1874 PERL_UNUSED_VAR(tmp_uid);
1875 PERL_UNUSED_VAR(tmp_euid);
1876 PERL_UNUSED_VAR(tmp_gid);
1877 PERL_UNUSED_VAR(tmp_egid);
1880 PL_delaymagic = old_delaymagic;
1882 if (gimme == G_VOID)
1883 SP = firstrelem - 1;
1884 else if (gimme == G_SCALAR) {
1888 SETi(firstlelem - firstrelem);
1899 PMOP * const pm = cPMOP;
1900 REGEXP * rx = PM_GETRE(pm);
1901 regexp *prog = ReANY(rx);
1902 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
1903 SV * const rv = sv_newmortal();
1907 SvUPGRADE(rv, SVt_IV);
1908 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1909 loathe to use it here, but it seems to be the right fix. Or close.
1910 The key part appears to be that it's essential for pp_qr to return a new
1911 object (SV), which implies that there needs to be an effective way to
1912 generate a new SV from the existing SV that is pre-compiled in the
1914 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1917 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1918 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1919 *cvp = cv_clone(cv);
1920 SvREFCNT_dec_NN(cv);
1924 HV *const stash = gv_stashsv(pkg, GV_ADD);
1925 SvREFCNT_dec_NN(pkg);
1926 (void)sv_bless(rv, stash);
1929 if (UNLIKELY(RXp_ISTAINTED(prog))) {
1931 SvTAINTED_on(SvRV(rv));
1944 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1947 const char *truebase; /* Start of string */
1948 REGEXP *rx = PM_GETRE(pm);
1949 regexp *prog = ReANY(rx);
1951 const U8 gimme = GIMME_V;
1953 const I32 oldsave = PL_savestack_ix;
1954 I32 had_zerolen = 0;
1957 if (PL_op->op_flags & OPf_STACKED)
1968 PUTBACK; /* EVAL blocks need stack_sp. */
1969 /* Skip get-magic if this is a qr// clone, because regcomp has
1971 truebase = prog->mother_re
1972 ? SvPV_nomg_const(TARG, len)
1973 : SvPV_const(TARG, len);
1975 DIE(aTHX_ "panic: pp_match");
1976 strend = truebase + len;
1977 rxtainted = (RXp_ISTAINTED(prog) ||
1978 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1981 /* We need to know this in case we fail out early - pos() must be reset */
1982 global = dynpm->op_pmflags & PMf_GLOBAL;
1984 /* PMdf_USED is set after a ?? matches once */
1987 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1989 pm->op_pmflags & PMf_USED
1992 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1996 /* handle the empty pattern */
1997 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
1998 if (PL_curpm == PL_reg_curpm) {
1999 if (PL_curpm_under) {
2000 if (PL_curpm_under == PL_reg_curpm) {
2001 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
2003 pm = PL_curpm_under;
2013 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
2014 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
2015 UVuf " < %" IVdf ")\n",
2016 (UV)len, (IV)RXp_MINLEN(prog)));
2020 /* get pos() if //g */
2022 mg = mg_find_mglob(TARG);
2023 if (mg && mg->mg_len >= 0) {
2024 curpos = MgBYTEPOS(mg, TARG, truebase, len);
2025 /* last time pos() was set, it was zero-length match */
2026 if (mg->mg_flags & MGf_MINMATCH)
2031 #ifdef PERL_SAWAMPERSAND
2032 if ( RXp_NPARENS(prog)
2034 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2035 || (dynpm->op_pmflags & PMf_KEEPCOPY)
2039 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
2040 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
2041 * only on the first iteration. Therefore we need to copy $' as well
2042 * as $&, to make the rest of the string available for captures in
2043 * subsequent iterations */
2044 if (! (global && gimme == G_ARRAY))
2045 r_flags |= REXEC_COPY_SKIP_POST;
2047 #ifdef PERL_SAWAMPERSAND
2048 if (dynpm->op_pmflags & PMf_KEEPCOPY)
2049 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
2050 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
2057 s = truebase + curpos;
2059 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
2060 had_zerolen, TARG, NULL, r_flags))
2064 if (dynpm->op_pmflags & PMf_ONCE)
2066 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
2068 dynpm->op_pmflags |= PMf_USED;
2072 RXp_MATCH_TAINTED_on(prog);
2073 TAINT_IF(RXp_MATCH_TAINTED(prog));
2077 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
2079 mg = sv_magicext_mglob(TARG);
2080 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
2081 if (RXp_ZERO_LEN(prog))
2082 mg->mg_flags |= MGf_MINMATCH;
2084 mg->mg_flags &= ~MGf_MINMATCH;
2087 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
2088 LEAVE_SCOPE(oldsave);
2092 /* push captures on stack */
2095 const I32 nparens = RXp_NPARENS(prog);
2096 I32 i = (global && !nparens) ? 1 : 0;
2098 SPAGAIN; /* EVAL blocks could move the stack. */
2099 EXTEND(SP, nparens + i);
2100 EXTEND_MORTAL(nparens + i);
2101 for (i = !i; i <= nparens; i++) {
2102 PUSHs(sv_newmortal());
2103 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
2104 && RXp_OFFS(prog)[i].end != -1 ))
2106 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
2107 const char * const s = RXp_OFFS(prog)[i].start + truebase;
2108 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
2109 || RXp_OFFS(prog)[i].start < 0
2111 || len > strend - s)
2113 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
2114 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
2115 (long) i, (long) RXp_OFFS(prog)[i].start,
2116 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
2117 sv_setpvn(*SP, s, len);
2118 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
2123 curpos = (UV)RXp_OFFS(prog)[0].end;
2124 had_zerolen = RXp_ZERO_LEN(prog);
2125 PUTBACK; /* EVAL blocks may use stack */
2126 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
2129 LEAVE_SCOPE(oldsave);
2132 NOT_REACHED; /* NOTREACHED */
2135 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
2137 mg = mg_find_mglob(TARG);
2141 LEAVE_SCOPE(oldsave);
2142 if (gimme == G_ARRAY)
2148 Perl_do_readline(pTHX)
2150 dSP; dTARGETSTACKED;
2155 IO * const io = GvIO(PL_last_in_gv);
2156 const I32 type = PL_op->op_type;
2157 const U8 gimme = GIMME_V;
2160 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
2162 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
2163 if (gimme == G_SCALAR) {
2165 SvSetSV_nosteal(TARG, TOPs);
2175 if (IoFLAGS(io) & IOf_ARGV) {
2176 if (IoFLAGS(io) & IOf_START) {
2178 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
2179 IoFLAGS(io) &= ~IOf_START;
2180 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
2181 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
2182 sv_setpvs(GvSVn(PL_last_in_gv), "-");
2183 SvSETMAGIC(GvSV(PL_last_in_gv));
2188 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2189 if (!fp) { /* Note: fp != IoIFP(io) */
2190 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
2193 else if (type == OP_GLOB)
2194 fp = Perl_start_glob(aTHX_ POPs, io);
2196 else if (type == OP_GLOB)
2198 else if (IoTYPE(io) == IoTYPE_WRONLY) {
2199 report_wrongway_fh(PL_last_in_gv, '>');
2203 if ((!io || !(IoFLAGS(io) & IOf_START))
2204 && ckWARN(WARN_CLOSED)
2207 report_evil_fh(PL_last_in_gv);
2209 if (gimme == G_SCALAR) {
2210 /* undef TARG, and push that undefined value */
2211 if (type != OP_RCATLINE) {
2212 sv_setsv(TARG,NULL);
2219 if (gimme == G_SCALAR) {
2221 if (type == OP_RCATLINE && SvGMAGICAL(sv))
2224 if (type == OP_RCATLINE)
2225 SvPV_force_nomg_nolen(sv);
2229 else if (isGV_with_GP(sv)) {
2230 SvPV_force_nomg_nolen(sv);
2232 SvUPGRADE(sv, SVt_PV);
2233 tmplen = SvLEN(sv); /* remember if already alloced */
2234 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
2235 /* try short-buffering it. Please update t/op/readline.t
2236 * if you change the growth length.
2241 if (type == OP_RCATLINE && SvOK(sv)) {
2243 SvPV_force_nomg_nolen(sv);
2249 sv = sv_2mortal(newSV(80));
2253 /* This should not be marked tainted if the fp is marked clean */
2254 #define MAYBE_TAINT_LINE(io, sv) \
2255 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2260 /* delay EOF state for a snarfed empty file */
2261 #define SNARF_EOF(gimme,rs,io,sv) \
2262 (gimme != G_SCALAR || SvCUR(sv) \
2263 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2267 if (!sv_gets(sv, fp, offset)
2269 || SNARF_EOF(gimme, PL_rs, io, sv)
2270 || PerlIO_error(fp)))
2272 PerlIO_clearerr(fp);
2273 if (IoFLAGS(io) & IOf_ARGV) {
2274 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2277 (void)do_close(PL_last_in_gv, FALSE);
2279 else if (type == OP_GLOB) {
2280 if (!do_close(PL_last_in_gv, FALSE)) {
2281 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2282 "glob failed (child exited with status %d%s)",
2283 (int)(STATUS_CURRENT >> 8),
2284 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2287 if (gimme == G_SCALAR) {
2288 if (type != OP_RCATLINE) {
2289 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2295 MAYBE_TAINT_LINE(io, sv);
2298 MAYBE_TAINT_LINE(io, sv);
2300 IoFLAGS(io) |= IOf_NOLINE;
2304 if (type == OP_GLOB) {
2308 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2309 char * const tmps = SvEND(sv) - 1;
2310 if (*tmps == *SvPVX_const(PL_rs)) {
2312 SvCUR_set(sv, SvCUR(sv) - 1);
2315 for (t1 = SvPVX_const(sv); *t1; t1++)
2317 if (strchr("*%?", *t1))
2319 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2322 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2323 (void)POPs; /* Unmatched wildcard? Chuck it... */
2326 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2327 if (ckWARN(WARN_UTF8)) {
2328 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2329 const STRLEN len = SvCUR(sv) - offset;
2332 if (!is_utf8_string_loc(s, len, &f))
2333 /* Emulate :encoding(utf8) warning in the same case. */
2334 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2335 "utf8 \"\\x%02X\" does not map to Unicode",
2336 f < (U8*)SvEND(sv) ? *f : 0);
2339 if (gimme == G_ARRAY) {
2340 if (SvLEN(sv) - SvCUR(sv) > 20) {
2341 SvPV_shrink_to_cur(sv);
2343 sv = sv_2mortal(newSV(80));
2346 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2347 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2348 const STRLEN new_len
2349 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2350 SvPV_renew(sv, new_len);
2361 SV * const keysv = POPs;
2362 HV * const hv = MUTABLE_HV(POPs);
2363 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2364 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2366 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2367 bool preeminent = TRUE;
2369 if (SvTYPE(hv) != SVt_PVHV)
2376 /* If we can determine whether the element exist,
2377 * Try to preserve the existenceness of a tied hash
2378 * element by using EXISTS and DELETE if possible.
2379 * Fallback to FETCH and STORE otherwise. */
2380 if (SvCANEXISTDELETE(hv))
2381 preeminent = hv_exists_ent(hv, keysv, 0);
2384 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2385 svp = he ? &HeVAL(he) : NULL;
2387 if (!svp || !*svp || *svp == &PL_sv_undef) {
2391 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2393 lv = sv_newmortal();
2394 sv_upgrade(lv, SVt_PVLV);
2396 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2397 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2398 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2404 if (HvNAME_get(hv) && isGV(*svp))
2405 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2406 else if (preeminent)
2407 save_helem_flags(hv, keysv, svp,
2408 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2410 SAVEHDELETE(hv, keysv);
2412 else if (PL_op->op_private & OPpDEREF) {
2413 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2417 sv = (svp && *svp ? *svp : &PL_sv_undef);
2418 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2419 * was to make C<local $tied{foo} = $tied{foo}> possible.
2420 * However, it seems no longer to be needed for that purpose, and
2421 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2422 * would loop endlessly since the pos magic is getting set on the
2423 * mortal copy and lost. However, the copy has the effect of
2424 * triggering the get magic, and losing it altogether made things like
2425 * c<$tied{foo};> in void context no longer do get magic, which some
2426 * code relied on. Also, delayed triggering of magic on @+ and friends
2427 * meant the original regex may be out of scope by now. So as a
2428 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2429 * being called too many times). */
2430 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2437 /* a stripped-down version of Perl_softref2xv() for use by
2438 * pp_multideref(), which doesn't use PL_op->op_flags */
2441 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2444 if (PL_op->op_private & HINT_STRICT_REFS) {
2446 Perl_die(aTHX_ PL_no_symref_sv, sv,
2447 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2449 Perl_die(aTHX_ PL_no_usym, what);
2452 Perl_die(aTHX_ PL_no_usym, what);
2453 return gv_fetchsv_nomg(sv, GV_ADD, type);
2457 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2458 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2460 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2461 * Each of these either contains a set of actions, or an argument, such as
2462 * an IV to use as an array index, or a lexical var to retrieve.
2463 * Several actions re stored per UV; we keep shifting new actions off the
2464 * one UV, and only reload when it becomes zero.
2469 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2470 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2471 UV actions = items->uv;
2474 /* this tells find_uninit_var() where we're up to */
2475 PL_multideref_pc = items;
2478 /* there are three main classes of action; the first retrieve
2479 * the initial AV or HV from a variable or the stack; the second
2480 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2481 * the third an unrolled (/DREFHV, rv2hv, helem).
2483 switch (actions & MDEREF_ACTION_MASK) {
2486 actions = (++items)->uv;
2489 case MDEREF_AV_padav_aelem: /* $lex[...] */
2490 sv = PAD_SVl((++items)->pad_offset);
2493 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2494 sv = UNOP_AUX_item_sv(++items);
2495 assert(isGV_with_GP(sv));
2496 sv = (SV*)GvAVn((GV*)sv);
2499 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2504 goto do_AV_rv2av_aelem;
2507 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2508 sv = UNOP_AUX_item_sv(++items);
2509 assert(isGV_with_GP(sv));
2510 sv = GvSVn((GV*)sv);
2511 goto do_AV_vivify_rv2av_aelem;
2513 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2514 sv = PAD_SVl((++items)->pad_offset);
2517 do_AV_vivify_rv2av_aelem:
2518 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2519 /* this is the OPpDEREF action normally found at the end of
2520 * ops like aelem, helem, rv2sv */
2521 sv = vivify_ref(sv, OPpDEREF_AV);
2525 /* this is basically a copy of pp_rv2av when it just has the
2528 if (LIKELY(SvROK(sv))) {
2529 if (UNLIKELY(SvAMAGIC(sv))) {
2530 sv = amagic_deref_call(sv, to_av_amg);
2533 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2534 DIE(aTHX_ "Not an ARRAY reference");
2536 else if (SvTYPE(sv) != SVt_PVAV) {
2537 if (!isGV_with_GP(sv))
2538 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2539 sv = MUTABLE_SV(GvAVn((GV*)sv));
2545 /* retrieve the key; this may be either a lexical or package
2546 * var (whose index/ptr is stored as an item) or a signed
2547 * integer constant stored as an item.
2550 IV elem = 0; /* to shut up stupid compiler warnings */
2553 assert(SvTYPE(sv) == SVt_PVAV);
2555 switch (actions & MDEREF_INDEX_MASK) {
2556 case MDEREF_INDEX_none:
2558 case MDEREF_INDEX_const:
2559 elem = (++items)->iv;
2561 case MDEREF_INDEX_padsv:
2562 elemsv = PAD_SVl((++items)->pad_offset);
2564 case MDEREF_INDEX_gvsv:
2565 elemsv = UNOP_AUX_item_sv(++items);
2566 assert(isGV_with_GP(elemsv));
2567 elemsv = GvSVn((GV*)elemsv);
2569 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2570 && ckWARN(WARN_MISC)))
2571 Perl_warner(aTHX_ packWARN(WARN_MISC),
2572 "Use of reference \"%" SVf "\" as array index",
2574 /* the only time that S_find_uninit_var() needs this
2575 * is to determine which index value triggered the
2576 * undef warning. So just update it here. Note that
2577 * since we don't save and restore this var (e.g. for
2578 * tie or overload execution), its value will be
2579 * meaningless apart from just here */
2580 PL_multideref_pc = items;
2581 elem = SvIV(elemsv);
2586 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2588 if (!(actions & MDEREF_FLAG_last)) {
2589 SV** svp = av_fetch((AV*)sv, elem, 1);
2590 if (!svp || ! (sv=*svp))
2591 DIE(aTHX_ PL_no_aelem, elem);
2595 if (PL_op->op_private &
2596 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2598 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2599 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2602 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2603 sv = av_delete((AV*)sv, elem, discard);
2611 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2612 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2613 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2614 bool preeminent = TRUE;
2615 AV *const av = (AV*)sv;
2618 if (UNLIKELY(localizing)) {
2622 /* If we can determine whether the element exist,
2623 * Try to preserve the existenceness of a tied array
2624 * element by using EXISTS and DELETE if possible.
2625 * Fallback to FETCH and STORE otherwise. */
2626 if (SvCANEXISTDELETE(av))
2627 preeminent = av_exists(av, elem);
2630 svp = av_fetch(av, elem, lval && !defer);
2633 if (!svp || !(sv = *svp)) {
2636 DIE(aTHX_ PL_no_aelem, elem);
2637 len = av_tindex(av);
2638 sv = sv_2mortal(newSVavdefelem(av,
2639 /* Resolve a negative index now, unless it points
2640 * before the beginning of the array, in which
2641 * case record it for error reporting in
2642 * magic_setdefelem. */
2643 elem < 0 && len + elem >= 0
2644 ? len + elem : elem, 1));
2647 if (UNLIKELY(localizing)) {
2649 save_aelem(av, elem, svp);
2650 sv = *svp; /* may have changed */
2653 SAVEADELETE(av, elem);
2658 sv = (svp ? *svp : &PL_sv_undef);
2659 /* see note in pp_helem() */
2660 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2677 case MDEREF_HV_padhv_helem: /* $lex{...} */
2678 sv = PAD_SVl((++items)->pad_offset);
2681 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2682 sv = UNOP_AUX_item_sv(++items);
2683 assert(isGV_with_GP(sv));
2684 sv = (SV*)GvHVn((GV*)sv);
2687 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2692 goto do_HV_rv2hv_helem;
2695 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2696 sv = UNOP_AUX_item_sv(++items);
2697 assert(isGV_with_GP(sv));
2698 sv = GvSVn((GV*)sv);
2699 goto do_HV_vivify_rv2hv_helem;
2701 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2702 sv = PAD_SVl((++items)->pad_offset);
2705 do_HV_vivify_rv2hv_helem:
2706 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2707 /* this is the OPpDEREF action normally found at the end of
2708 * ops like aelem, helem, rv2sv */
2709 sv = vivify_ref(sv, OPpDEREF_HV);
2713 /* this is basically a copy of pp_rv2hv when it just has the
2714 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2717 if (LIKELY(SvROK(sv))) {
2718 if (UNLIKELY(SvAMAGIC(sv))) {
2719 sv = amagic_deref_call(sv, to_hv_amg);
2722 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2723 DIE(aTHX_ "Not a HASH reference");
2725 else if (SvTYPE(sv) != SVt_PVHV) {
2726 if (!isGV_with_GP(sv))
2727 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2728 sv = MUTABLE_SV(GvHVn((GV*)sv));
2734 /* retrieve the key; this may be either a lexical / package
2735 * var or a string constant, whose index/ptr is stored as an
2738 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2740 assert(SvTYPE(sv) == SVt_PVHV);
2742 switch (actions & MDEREF_INDEX_MASK) {
2743 case MDEREF_INDEX_none:
2746 case MDEREF_INDEX_const:
2747 keysv = UNOP_AUX_item_sv(++items);
2750 case MDEREF_INDEX_padsv:
2751 keysv = PAD_SVl((++items)->pad_offset);
2754 case MDEREF_INDEX_gvsv:
2755 keysv = UNOP_AUX_item_sv(++items);
2756 keysv = GvSVn((GV*)keysv);
2760 /* see comment above about setting this var */
2761 PL_multideref_pc = items;
2764 /* ensure that candidate CONSTs have been HEKified */
2765 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2766 || SvTYPE(keysv) >= SVt_PVMG
2769 || SvIsCOW_shared_hash(keysv));
2771 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2773 if (!(actions & MDEREF_FLAG_last)) {
2774 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2775 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2776 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2780 if (PL_op->op_private &
2781 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2783 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2784 sv = hv_exists_ent((HV*)sv, keysv, 0)
2785 ? &PL_sv_yes : &PL_sv_no;
2788 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2789 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2797 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2798 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2799 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2800 bool preeminent = TRUE;
2802 HV * const hv = (HV*)sv;
2805 if (UNLIKELY(localizing)) {
2809 /* If we can determine whether the element exist,
2810 * Try to preserve the existenceness of a tied hash
2811 * element by using EXISTS and DELETE if possible.
2812 * Fallback to FETCH and STORE otherwise. */
2813 if (SvCANEXISTDELETE(hv))
2814 preeminent = hv_exists_ent(hv, keysv, 0);
2817 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2818 svp = he ? &HeVAL(he) : NULL;
2822 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2826 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2827 lv = sv_newmortal();
2828 sv_upgrade(lv, SVt_PVLV);
2830 sv_magic(lv, key2 = newSVsv(keysv),
2831 PERL_MAGIC_defelem, NULL, 0);
2832 /* sv_magic() increments refcount */
2833 SvREFCNT_dec_NN(key2);
2834 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2840 if (HvNAME_get(hv) && isGV(sv))
2841 save_gp(MUTABLE_GV(sv),
2842 !(PL_op->op_flags & OPf_SPECIAL));
2843 else if (preeminent) {
2844 save_helem_flags(hv, keysv, svp,
2845 (PL_op->op_flags & OPf_SPECIAL)
2846 ? 0 : SAVEf_SETMAGIC);
2847 sv = *svp; /* may have changed */
2850 SAVEHDELETE(hv, keysv);
2855 sv = (svp && *svp ? *svp : &PL_sv_undef);
2856 /* see note in pp_helem() */
2857 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2866 actions >>= MDEREF_SHIFT;
2885 itersvp = CxITERVAR(cx);
2888 switch (CxTYPE(cx)) {
2890 case CXt_LOOP_LAZYSV: /* string increment */
2892 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2893 SV *end = cx->blk_loop.state_u.lazysv.end;
2894 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2895 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2897 const char *max = SvPV_const(end, maxlen);
2898 if (DO_UTF8(end) && IN_UNI_8_BIT)
2899 maxlen = sv_len_utf8_nomg(end);
2900 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2904 /* NB: on the first iteration, oldsv will have a ref count of at
2905 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2906 * slot will get localised; on subsequent iterations the RC==1
2907 * optimisation may kick in and the SV will be reused. */
2908 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2909 /* safe to reuse old SV */
2910 sv_setsv(oldsv, cur);
2914 /* we need a fresh SV every time so that loop body sees a
2915 * completely new SV for closures/references to work as
2917 *itersvp = newSVsv(cur);
2918 SvREFCNT_dec(oldsv);
2920 if (strEQ(SvPVX_const(cur), max))
2921 sv_setiv(cur, 0); /* terminate next time */
2927 case CXt_LOOP_LAZYIV: /* integer increment */
2929 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2930 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2934 /* see NB comment above */
2935 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2936 /* safe to reuse old SV */
2938 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2941 /* Cheap SvIOK_only().
2942 * Assert that flags which SvIOK_only() would test or
2943 * clear can't be set, because we're SVt_IV */
2944 assert(!(SvFLAGS(oldsv) &
2945 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2946 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2947 /* SvIV_set() where sv_any points to head */
2948 oldsv->sv_u.svu_iv = cur;
2952 sv_setiv(oldsv, cur);
2956 /* we need a fresh SV every time so that loop body sees a
2957 * completely new SV for closures/references to work as they
2959 *itersvp = newSViv(cur);
2960 SvREFCNT_dec(oldsv);
2963 if (UNLIKELY(cur == IV_MAX)) {
2964 /* Handle end of range at IV_MAX */
2965 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2967 ++cx->blk_loop.state_u.lazyiv.cur;
2971 case CXt_LOOP_LIST: /* for (1,2,3) */
2973 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2974 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2975 ix = (cx->blk_loop.state_u.stack.ix += inc);
2976 if (UNLIKELY(inc > 0
2977 ? ix > cx->blk_oldsp
2978 : ix <= cx->blk_loop.state_u.stack.basesp)
2982 sv = PL_stack_base[ix];
2984 goto loop_ary_common;
2986 case CXt_LOOP_ARY: /* for (@ary) */
2988 av = cx->blk_loop.state_u.ary.ary;
2989 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2990 ix = (cx->blk_loop.state_u.ary.ix += inc);
2991 if (UNLIKELY(inc > 0
2997 if (UNLIKELY(SvRMAGICAL(av))) {
2998 SV * const * const svp = av_fetch(av, ix, FALSE);
2999 sv = svp ? *svp : NULL;
3002 sv = AvARRAY(av)[ix];
3007 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
3008 SvSetMagicSV(*itersvp, sv);
3013 if (UNLIKELY(SvIS_FREED(sv))) {
3015 Perl_croak(aTHX_ "Use of freed value in iteration");
3022 SvREFCNT_inc_simple_void_NN(sv);
3026 sv = newSVavdefelem(av, ix, 0);
3033 SvREFCNT_dec(oldsv);
3037 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
3045 /* pp_enteriter should have pre-extended the stack */
3046 EXTEND_SKIP(PL_stack_sp, 1);
3047 *++PL_stack_sp =retsv;
3049 return PL_op->op_next;
3053 A description of how taint works in pattern matching and substitution.
3055 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
3056 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
3058 While the pattern is being assembled/concatenated and then compiled,
3059 PL_tainted will get set (via TAINT_set) if any component of the pattern
3060 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
3061 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
3062 TAINT_get). It will also be set if any component of the pattern matches
3063 based on locale-dependent behavior.
3065 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
3066 the pattern is marked as tainted. This means that subsequent usage, such
3067 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
3068 on the new pattern too.
3070 RXf_TAINTED_SEEN is used post-execution by the get magic code
3071 of $1 et al to indicate whether the returned value should be tainted.
3072 It is the responsibility of the caller of the pattern (i.e. pp_match,
3073 pp_subst etc) to set this flag for any other circumstances where $1 needs
3076 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
3078 There are three possible sources of taint
3080 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
3081 * the replacement string (or expression under /e)
3083 There are four destinations of taint and they are affected by the sources
3084 according to the rules below:
3086 * the return value (not including /r):
3087 tainted by the source string and pattern, but only for the
3088 number-of-iterations case; boolean returns aren't tainted;
3089 * the modified string (or modified copy under /r):
3090 tainted by the source string, pattern, and replacement strings;
3092 tainted by the pattern, and under 'use re "taint"', by the source
3094 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
3095 should always be unset before executing subsequent code.
3097 The overall action of pp_subst is:
3099 * at the start, set bits in rxtainted indicating the taint status of
3100 the various sources.
3102 * After each pattern execution, update the SUBST_TAINT_PAT bit in
3103 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
3104 pattern has subsequently become tainted via locale ops.
3106 * If control is being passed to pp_substcont to execute a /e block,
3107 save rxtainted in the CXt_SUBST block, for future use by
3110 * Whenever control is being returned to perl code (either by falling
3111 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
3112 use the flag bits in rxtainted to make all the appropriate types of
3113 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
3114 et al will appear tainted.
3116 pp_match is just a simpler version of the above.
3132 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
3133 See "how taint works" above */
3136 REGEXP *rx = PM_GETRE(pm);
3137 regexp *prog = ReANY(rx);
3139 int force_on_match = 0;
3140 const I32 oldsave = PL_savestack_ix;
3142 bool doutf8 = FALSE; /* whether replacement is in utf8 */
3147 /* known replacement string? */
3148 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
3152 if (PL_op->op_flags & OPf_STACKED)
3163 SvGETMAGIC(TARG); /* must come before cow check */
3165 /* note that a string might get converted to COW during matching */
3166 was_cow = cBOOL(SvIsCOW(TARG));
3168 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3169 #ifndef PERL_ANY_COW
3171 sv_force_normal_flags(TARG,0);
3173 if ((SvREADONLY(TARG)
3174 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
3175 || SvTYPE(TARG) > SVt_PVLV)
3176 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
3177 Perl_croak_no_modify();
3181 orig = SvPV_nomg(TARG, len);
3182 /* note we don't (yet) force the var into being a string; if we fail
3183 * to match, we leave as-is; on successful match however, we *will*
3184 * coerce into a string, then repeat the match */
3185 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
3188 /* only replace once? */
3189 once = !(rpm->op_pmflags & PMf_GLOBAL);
3191 /* See "how taint works" above */
3194 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
3195 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
3196 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
3197 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3198 ? SUBST_TAINT_BOOLRET : 0));
3204 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
3206 strend = orig + len;
3207 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
3208 maxiters = 2 * slen + 10; /* We can match twice at each
3209 position, once with zero-length,
3210 second time with non-zero. */
3212 /* handle the empty pattern */
3213 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
3214 if (PL_curpm == PL_reg_curpm) {
3215 if (PL_curpm_under) {
3216 if (PL_curpm_under == PL_reg_curpm) {
3217 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
3219 pm = PL_curpm_under;
3229 #ifdef PERL_SAWAMPERSAND
3230 r_flags = ( RXp_NPARENS(prog)
3232 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3233 || (rpm->op_pmflags & PMf_KEEPCOPY)
3238 r_flags = REXEC_COPY_STR;
3241 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
3244 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
3245 LEAVE_SCOPE(oldsave);
3250 /* known replacement string? */
3252 /* replacement needing upgrading? */
3253 if (DO_UTF8(TARG) && !doutf8) {
3254 nsv = sv_newmortal();
3256 sv_utf8_upgrade(nsv);
3257 c = SvPV_const(nsv, clen);
3261 c = SvPV_const(dstr, clen);
3262 doutf8 = DO_UTF8(dstr);
3265 if (SvTAINTED(dstr))
3266 rxtainted |= SUBST_TAINT_REPL;
3273 /* can do inplace substitution? */
3278 && (I32)clen <= RXp_MINLENRET(prog)
3280 || !(r_flags & REXEC_COPY_STR)
3281 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
3283 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
3284 && (!doutf8 || SvUTF8(TARG))
3285 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3289 /* string might have got converted to COW since we set was_cow */
3290 if (SvIsCOW(TARG)) {
3291 if (!force_on_match)
3293 assert(SvVOK(TARG));
3296 if (force_on_match) {
3297 /* redo the first match, this time with the orig var
3298 * forced into being a string */
3300 orig = SvPV_force_nomg(TARG, len);
3306 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3307 rxtainted |= SUBST_TAINT_PAT;
3308 m = orig + RXp_OFFS(prog)[0].start;
3309 d = orig + RXp_OFFS(prog)[0].end;
3311 if (m - s > strend - d) { /* faster to shorten from end */
3314 Copy(c, m, clen, char);
3319 Move(d, m, i, char);
3323 SvCUR_set(TARG, m - s);
3325 else { /* faster from front */
3329 Move(s, d - i, i, char);
3332 Copy(c, d, clen, char);
3339 d = s = RXp_OFFS(prog)[0].start + orig;
3342 if (UNLIKELY(iters++ > maxiters))
3343 DIE(aTHX_ "Substitution loop");
3344 /* run time pattern taint, eg locale */
3345 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3346 rxtainted |= SUBST_TAINT_PAT;
3347 m = RXp_OFFS(prog)[0].start + orig;
3350 Move(s, d, i, char);
3354 Copy(c, d, clen, char);
3357 s = RXp_OFFS(prog)[0].end + orig;
3358 } while (CALLREGEXEC(rx, s, strend, orig,
3359 s == m, /* don't match same null twice */
3361 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3364 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3365 Move(s, d, i+1, char); /* include the NUL */
3375 if (force_on_match) {
3376 /* redo the first match, this time with the orig var
3377 * forced into being a string */
3379 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3380 /* I feel that it should be possible to avoid this mortal copy
3381 given that the code below copies into a new destination.
3382 However, I suspect it isn't worth the complexity of
3383 unravelling the C<goto force_it> for the small number of
3384 cases where it would be viable to drop into the copy code. */
3385 TARG = sv_2mortal(newSVsv(TARG));
3387 orig = SvPV_force_nomg(TARG, len);
3393 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
3394 rxtainted |= SUBST_TAINT_PAT;
3396 s = RXp_OFFS(prog)[0].start + orig;
3397 dstr = newSVpvn_flags(orig, s-orig,
3398 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3403 /* note that a whole bunch of local vars are saved here for
3404 * use by pp_substcont: here's a list of them in case you're
3405 * searching for places in this sub that uses a particular var:
3406 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3407 * s m strend rx once */
3409 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3413 if (UNLIKELY(iters++ > maxiters))
3414 DIE(aTHX_ "Substitution loop");
3415 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
3416 rxtainted |= SUBST_TAINT_PAT;
3417 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
3419 char *old_orig = orig;
3420 assert(RXp_SUBOFFSET(prog) == 0);
3422 orig = RXp_SUBBEG(prog);
3423 s = orig + (old_s - old_orig);
3424 strend = s + (strend - old_s);
3426 m = RXp_OFFS(prog)[0].start + orig;
3427 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3428 s = RXp_OFFS(prog)[0].end + orig;
3430 /* replacement already stringified */
3432 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3436 sv_catsv(dstr, repl);
3437 if (UNLIKELY(SvTAINTED(repl)))
3438 rxtainted |= SUBST_TAINT_REPL;
3442 } while (CALLREGEXEC(rx, s, strend, orig,
3443 s == m, /* Yields minend of 0 or 1 */
3445 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3446 assert(strend >= s);
3447 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3449 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3450 /* From here on down we're using the copy, and leaving the original
3457 /* The match may make the string COW. If so, brilliant, because
3458 that's just saved us one malloc, copy and free - the regexp has
3459 donated the old buffer, and we malloc an entirely new one, rather
3460 than the regexp malloc()ing a buffer and copying our original,
3461 only for us to throw it away here during the substitution. */
3462 if (SvIsCOW(TARG)) {
3463 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3469 SvPV_set(TARG, SvPVX(dstr));
3470 SvCUR_set(TARG, SvCUR(dstr));
3471 SvLEN_set(TARG, SvLEN(dstr));
3472 SvFLAGS(TARG) |= SvUTF8(dstr);
3473 SvPV_set(dstr, NULL);
3480 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3481 (void)SvPOK_only_UTF8(TARG);
3484 /* See "how taint works" above */
3486 if ((rxtainted & SUBST_TAINT_PAT) ||
3487 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3488 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3490 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
3492 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3493 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3495 SvTAINTED_on(TOPs); /* taint return value */
3497 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3499 /* needed for mg_set below */
3501 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3505 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3507 LEAVE_SCOPE(oldsave);
3516 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3517 ++*PL_markstack_ptr;
3519 LEAVE_with_name("grep_item"); /* exit inner scope */
3522 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3524 const U8 gimme = GIMME_V;
3526 LEAVE_with_name("grep"); /* exit outer scope */
3527 (void)POPMARK; /* pop src */
3528 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3529 (void)POPMARK; /* pop dst */
3530 SP = PL_stack_base + POPMARK; /* pop original mark */
3531 if (gimme == G_SCALAR) {
3535 else if (gimme == G_ARRAY)
3542 ENTER_with_name("grep_item"); /* enter inner scope */
3545 src = PL_stack_base[TOPMARK];
3546 if (SvPADTMP(src)) {
3547 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3553 RETURNOP(cLOGOP->op_other);
3557 /* leave_adjust_stacks():
3559 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3560 * positioning them at to_sp+1 onwards, and do the equivalent of a
3561 * FREEMPS and TAINT_NOT.
3563 * Not intended to be called in void context.
3565 * When leaving a sub, eval, do{} or other scope, the things that need
3566 * doing to process the return args are:
3567 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3568 * * for the types of return that return copies of their args (such
3569 * as rvalue sub return), make a mortal copy of every return arg,
3570 * except where we can optimise the copy away without it being
3571 * semantically visible;
3572 * * make sure that the arg isn't prematurely freed; in the case of an
3573 * arg not copied, this may involve mortalising it. For example, in
3574 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3575 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3577 * What condition to use when deciding whether to pass the arg through
3578 * or make a copy, is determined by the 'pass' arg; its valid values are:
3579 * 0: rvalue sub/eval exit
3580 * 1: other rvalue scope exit
3581 * 2: :lvalue sub exit in rvalue context
3582 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3584 * There is a big issue with doing a FREETMPS. We would like to free any
3585 * temps created by the last statement which the sub executed, rather than
3586 * leaving them for the caller. In a situation where a sub call isn't
3587 * soon followed by a nextstate (e.g. nested recursive calls, a la
3588 * fibonacci()), temps can accumulate, causing memory and performance
3591 * On the other hand, we don't want to free any TEMPs which are keeping
3592 * alive any return args that we skipped copying; nor do we wish to undo
3593 * any mortalising done here.
3595 * The solution is to split the temps stack frame into two, with a cut
3596 * point delineating the two halves. We arrange that by the end of this
3597 * function, all the temps stack frame entries we wish to keep are in the
3598 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3599 * the range tmps_base .. PL_tmps_ix. During the course of this
3600 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3601 * whenever we find or create a temp that we know should be kept. In
3602 * general the stuff above tmps_base is undecided until we reach the end,
3603 * and we may need a sort stage for that.
3605 * To determine whether a TEMP is keeping a return arg alive, every
3606 * arg that is kept rather than copied and which has the SvTEMP flag
3607 * set, has the flag temporarily unset, to mark it. At the end we scan
3608 * the temps stack frame above the cut for entries without SvTEMP and
3609 * keep them, while turning SvTEMP on again. Note that if we die before
3610 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3611 * those SVs may be slightly less efficient.
3613 * In practice various optimisations for some common cases mean we can
3614 * avoid most of the scanning and swapping about with the temps stack.
3618 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3622 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3625 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3629 if (gimme == G_ARRAY) {
3630 nargs = SP - from_sp;
3634 assert(gimme == G_SCALAR);
3635 if (UNLIKELY(from_sp >= SP)) {
3636 /* no return args */
3637 assert(from_sp == SP);
3639 *++SP = &PL_sv_undef;
3649 /* common code for G_SCALAR and G_ARRAY */
3651 tmps_base = PL_tmps_floor + 1;
3655 /* pointer version of tmps_base. Not safe across temp stack
3659 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3660 tmps_basep = PL_tmps_stack + tmps_base;
3662 /* process each return arg */
3665 SV *sv = *from_sp++;
3667 assert(PL_tmps_ix + nargs < PL_tmps_max);
3669 /* PADTMPs with container set magic shouldn't appear in the
3670 * wild. This assert is more important for pp_leavesublv(),
3671 * but by testing for it here, we're more likely to catch
3672 * bad cases (what with :lvalue subs not being widely
3673 * deployed). The two issues are that for something like
3674 * sub :lvalue { $tied{foo} }
3676 * sub :lvalue { substr($foo,1,2) }
3677 * pp_leavesublv() will croak if the sub returns a PADTMP,
3678 * and currently functions like pp_substr() return a mortal
3679 * rather than using their PADTMP when returning a PVLV.
3680 * This is because the PVLV will hold a ref to $foo,
3681 * so $foo would get delayed in being freed while
3682 * the PADTMP SV remained in the PAD.
3683 * So if this assert fails it means either:
3684 * 1) there is pp code similar to pp_substr that is
3685 * returning a PADTMP instead of a mortal, and probably
3687 * 2) pp_leavesublv is making unwarranted assumptions
3688 * about always croaking on a PADTMP
3690 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3692 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3693 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3699 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3700 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3701 : pass == 2 ? (!SvPADTMP(sv))
3704 /* pass through: skip copy for logic or optimisation
3705 * reasons; instead mortalise it, except that ... */
3709 /* ... since this SV is an SvTEMP , we don't need to
3710 * re-mortalise it; instead we just need to ensure
3711 * that its existing entry in the temps stack frame
3712 * ends up below the cut and so avoids being freed
3713 * this time round. We mark it as needing to be kept
3714 * by temporarily unsetting SvTEMP; then at the end,
3715 * we shuffle any !SvTEMP entries on the tmps stack
3716 * back below the cut.
3717 * However, there's a significant chance that there's
3718 * a 1:1 correspondence between the first few (or all)
3719 * elements in the return args stack frame and those
3720 * in the temps stack frame; e,g.:
3721 * sub f { ....; map {...} .... },
3722 * or if we're exiting multiple scopes and one of the
3723 * inner scopes has already made mortal copies of each
3726 * If so, this arg sv will correspond to the next item
3727 * on the tmps stack above the cut, and so can be kept
3728 * merely by moving the cut boundary up one, rather
3729 * than messing with SvTEMP. If all args are 1:1 then
3730 * we can avoid the sorting stage below completely.
3732 * If there are no items above the cut on the tmps
3733 * stack, then the SvTEMP must comne from an item
3734 * below the cut, so there's nothing to do.
3736 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3737 if (sv == *tmps_basep)
3743 else if (!SvPADTMP(sv)) {
3744 /* mortalise arg to avoid it being freed during save
3745 * stack unwinding. Pad tmps don't need mortalising as
3746 * they're never freed. This is the equivalent of
3747 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3748 * * it assumes that the temps stack has already been
3750 * * it puts the new item at the cut rather than at
3751 * ++PL_tmps_ix, moving the previous occupant there
3754 if (!SvIMMORTAL(sv)) {
3755 SvREFCNT_inc_simple_void_NN(sv);
3757 /* Note that if there's nothing above the cut,
3758 * this copies the garbage one slot above
3759 * PL_tmps_ix onto itself. This is harmless (the
3760 * stack's already been extended), but might in
3761 * theory trigger warnings from tools like ASan
3763 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3769 /* Make a mortal copy of the SV.
3770 * The following code is the equivalent of sv_mortalcopy()
3772 * * it assumes the temps stack has already been extended;
3773 * * it optimises the copying for some simple SV types;
3774 * * it puts the new item at the cut rather than at
3775 * ++PL_tmps_ix, moving the previous occupant there
3778 SV *newsv = newSV(0);
3780 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3781 /* put it on the tmps stack early so it gets freed if we die */
3782 *tmps_basep++ = newsv;
3785 if (SvTYPE(sv) <= SVt_IV) {
3786 /* arg must be one of undef, IV/UV, or RV: skip
3787 * sv_setsv_flags() and do the copy directly */
3789 U32 srcflags = SvFLAGS(sv);
3791 assert(!SvGMAGICAL(sv));
3792 if (srcflags & (SVf_IOK|SVf_ROK)) {
3793 SET_SVANY_FOR_BODYLESS_IV(newsv);
3795 if (srcflags & SVf_ROK) {
3796 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3797 /* SV type plus flags */
3798 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3801 /* both src and dst are <= SVt_IV, so sv_any
3802 * points to the head; so access the heads
3803 * directly rather than going via sv_any.
3805 assert( &(sv->sv_u.svu_iv)
3806 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3807 assert( &(newsv->sv_u.svu_iv)
3808 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3809 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3810 /* SV type plus flags */
3811 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3812 |(srcflags & SVf_IVisUV));
3816 assert(!(srcflags & SVf_OK));
3817 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3819 SvFLAGS(newsv) = dstflags;
3823 /* do the full sv_setsv() */
3827 old_base = tmps_basep - PL_tmps_stack;
3829 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3830 /* the mg_get or sv_setsv might have created new temps
3831 * or realloced the tmps stack; regrow and reload */
3832 EXTEND_MORTAL(nargs);
3833 tmps_basep = PL_tmps_stack + old_base;
3834 TAINT_NOT; /* Each item is independent */
3840 /* If there are any temps left above the cut, we need to sort
3841 * them into those to keep and those to free. The only ones to
3842 * keep are those for which we've temporarily unset SvTEMP.
3843 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3844 * swapping pairs as necessary. Stop when we meet in the middle.
3847 SV **top = PL_tmps_stack + PL_tmps_ix;
3848 while (tmps_basep <= top) {
3861 tmps_base = tmps_basep - PL_tmps_stack;
3864 PL_stack_sp = to_sp;
3866 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3867 while (PL_tmps_ix >= tmps_base) {
3868 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3870 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3874 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3880 /* also tail-called by pp_return */
3890 assert(CxTYPE(cx) == CXt_SUB);
3892 if (CxMULTICALL(cx)) {
3893 /* entry zero of a stack is always PL_sv_undef, which
3894 * simplifies converting a '()' return into undef in scalar context */
3895 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3899 gimme = cx->blk_gimme;
3900 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3902 if (gimme == G_VOID)
3903 PL_stack_sp = oldsp;
3905 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3908 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3910 retop = cx->blk_sub.retop;
3917 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3918 * forces an abandon */
3921 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3923 const SSize_t fill = AvFILLp(av);
3925 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3927 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3932 AV *newav = newAV();
3933 av_extend(newav, fill);
3934 AvREIFY_only(newav);
3935 PAD_SVl(0) = MUTABLE_SV(newav);
3936 SvREFCNT_dec_NN(av);
3947 I32 old_savestack_ix;
3952 /* Locate the CV to call:
3953 * - most common case: RV->CV: f(), $ref->():
3954 * note that if a sub is compiled before its caller is compiled,
3955 * the stash entry will be a ref to a CV, rather than being a GV.
3956 * - second most common case: CV: $ref->method()
3959 /* a non-magic-RV -> CV ? */
3960 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3961 cv = MUTABLE_CV(SvRV(sv));
3962 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3966 cv = MUTABLE_CV(sv);
3969 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3970 /* handle all the weird cases */
3971 switch (SvTYPE(sv)) {
3973 if (!isGV_with_GP(sv))
3977 cv = GvCVu((const GV *)sv);
3978 if (UNLIKELY(!cv)) {
3980 cv = sv_2cv(sv, &stash, &gv, 0);
3982 old_savestack_ix = PL_savestack_ix;
3993 if (UNLIKELY(SvAMAGIC(sv))) {
3994 sv = amagic_deref_call(sv, to_cv_amg);
3995 /* Don't SPAGAIN here. */
4001 if (UNLIKELY(!SvOK(sv)))
4002 DIE(aTHX_ PL_no_usym, "a subroutine");
4004 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
4005 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
4006 SP = PL_stack_base + POPMARK;
4009 if (GIMME_V == G_SCALAR)
4010 PUSHs(&PL_sv_undef);
4014 sym = SvPV_nomg_const(sv, len);
4015 if (PL_op->op_private & HINT_STRICT_REFS)
4016 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
4017 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
4020 cv = MUTABLE_CV(SvRV(sv));
4021 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
4027 DIE(aTHX_ "Not a CODE reference");
4031 /* At this point we want to save PL_savestack_ix, either by doing a
4032 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
4033 * CV we will be using (so we don't know whether its XS, so we can't
4034 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
4035 * the save stack. So remember where we are currently on the save
4036 * stack, and later update the CX or scopestack entry accordingly. */
4037 old_savestack_ix = PL_savestack_ix;
4039 /* these two fields are in a union. If they ever become separate,
4040 * we have to test for both of them being null below */
4042 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
4043 while (UNLIKELY(!CvROOT(cv))) {
4047 /* anonymous or undef'd function leaves us no recourse */
4048 if (CvLEXICAL(cv) && CvHASGV(cv))
4049 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
4050 SVfARG(cv_name(cv, NULL, 0)));
4051 if (CvANON(cv) || !CvHASGV(cv)) {
4052 DIE(aTHX_ "Undefined subroutine called");
4055 /* autoloaded stub? */
4056 if (cv != GvCV(gv = CvGV(cv))) {
4059 /* should call AUTOLOAD now? */
4062 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
4063 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
4064 |(PL_op->op_flags & OPf_REF
4065 ? GV_AUTOLOAD_ISMETHOD
4067 cv = autogv ? GvCV(autogv) : NULL;
4070 sub_name = sv_newmortal();
4071 gv_efullname3(sub_name, gv, NULL);
4072 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
4076 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
4077 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
4078 DIE(aTHX_ "Closure prototype called");
4080 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
4083 Perl_get_db_sub(aTHX_ &sv, cv);
4085 PL_curcopdb = PL_curcop;
4087 /* check for lsub that handles lvalue subroutines */
4088 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
4089 /* if lsub not found then fall back to DB::sub */
4090 if (!cv) cv = GvCV(PL_DBsub);
4092 cv = GvCV(PL_DBsub);
4095 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
4096 DIE(aTHX_ "No DB::sub routine defined");
4099 if (!(CvISXSUB(cv))) {
4100 /* This path taken at least 75% of the time */
4107 /* keep PADTMP args alive throughout the call (we need to do this
4108 * because @_ isn't refcounted). Note that we create the mortals
4109 * in the caller's tmps frame, so they won't be freed until after
4110 * we return from the sub.
4119 *svp = sv = sv_mortalcopy(sv);
4125 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
4126 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
4127 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
4129 padlist = CvPADLIST(cv);
4130 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
4131 pad_push(padlist, depth);
4132 PAD_SET_CUR_NOSAVE(padlist, depth);
4133 if (LIKELY(hasargs)) {
4134 AV *const av = MUTABLE_AV(PAD_SVl(0));
4138 defavp = &GvAV(PL_defgv);
4139 cx->blk_sub.savearray = *defavp;
4140 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
4142 /* it's the responsibility of whoever leaves a sub to ensure
4143 * that a clean, empty AV is left in pad[0]. This is normally
4144 * done by cx_popsub() */
4145 assert(!AvREAL(av) && AvFILLp(av) == -1);
4148 if (UNLIKELY(items - 1 > AvMAX(av))) {
4149 SV **ary = AvALLOC(av);
4150 Renew(ary, items, SV*);
4151 AvMAX(av) = items - 1;
4156 Copy(MARK+1,AvARRAY(av),items,SV*);
4157 AvFILLp(av) = items - 1;
4159 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4161 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4162 SVfARG(cv_name(cv, NULL, 0)));
4163 /* warning must come *after* we fully set up the context
4164 * stuff so that __WARN__ handlers can safely dounwind()
4167 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
4168 && ckWARN(WARN_RECURSION)
4169 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
4170 sub_crush_depth(cv);
4171 RETURNOP(CvSTART(cv));
4174 SSize_t markix = TOPMARK;
4178 /* pretend we did the ENTER earlier */
4179 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
4184 if (UNLIKELY(((PL_op->op_private
4185 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
4186 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
4188 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
4189 SVfARG(cv_name(cv, NULL, 0)));
4191 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
4192 /* Need to copy @_ to stack. Alternative may be to
4193 * switch stack to @_, and copy return values
4194 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
4195 AV * const av = GvAV(PL_defgv);
4196 const SSize_t items = AvFILL(av) + 1;
4200 const bool m = cBOOL(SvRMAGICAL(av));
4201 /* Mark is at the end of the stack. */
4203 for (; i < items; ++i)
4207 SV ** const svp = av_fetch(av, i, 0);
4208 sv = svp ? *svp : NULL;
4210 else sv = AvARRAY(av)[i];
4211 if (sv) SP[i+1] = sv;
4213 SP[i+1] = newSVavdefelem(av, i, 1);
4221 SV **mark = PL_stack_base + markix;
4222 SSize_t items = SP - mark;
4225 if (*mark && SvPADTMP(*mark)) {
4226 *mark = sv_mortalcopy(*mark);
4230 /* We assume first XSUB in &DB::sub is the called one. */
4231 if (UNLIKELY(PL_curcopdb)) {
4232 SAVEVPTR(PL_curcop);
4233 PL_curcop = PL_curcopdb;
4236 /* Do we need to open block here? XXXX */
4238 /* calculate gimme here as PL_op might get changed and then not
4239 * restored until the LEAVE further down */
4240 is_scalar = (GIMME_V == G_SCALAR);
4242 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
4244 CvXSUB(cv)(aTHX_ cv);
4246 /* Enforce some sanity in scalar context. */
4248 SV **svp = PL_stack_base + markix + 1;
4249 if (svp != PL_stack_sp) {
4250 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4260 Perl_sub_crush_depth(pTHX_ CV *cv)
4262 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4265 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4267 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
4268 SVfARG(cv_name(cv,NULL,0)));
4274 /* like croak, but report in context of caller */
4277 Perl_croak_caller(const char *pat, ...)
4281 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4283 /* make error appear at call site */
4285 PL_curcop = cx->blk_oldcop;
4287 va_start(args, pat);
4289 NOT_REACHED; /* NOTREACHED */
4298 SV* const elemsv = POPs;
4299 IV elem = SvIV(elemsv);
4300 AV *const av = MUTABLE_AV(POPs);
4301 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4302 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4303 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4304 bool preeminent = TRUE;
4307 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4308 Perl_warner(aTHX_ packWARN(WARN_MISC),
4309 "Use of reference \"%" SVf "\" as array index",
4311 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4314 if (UNLIKELY(localizing)) {
4318 /* If we can determine whether the element exist,
4319 * Try to preserve the existenceness of a tied array
4320 * element by using EXISTS and DELETE if possible.
4321 * Fallback to FETCH and STORE otherwise. */
4322 if (SvCANEXISTDELETE(av))
4323 preeminent = av_exists(av, elem);
4326 svp = av_fetch(av, elem, lval && !defer);
4328 #ifdef PERL_MALLOC_WRAP
4329 if (SvUOK(elemsv)) {
4330 const UV uv = SvUV(elemsv);
4331 elem = uv > IV_MAX ? IV_MAX : uv;
4333 else if (SvNOK(elemsv))
4334 elem = (IV)SvNV(elemsv);
4336 static const char oom_array_extend[] =
4337 "Out of memory during array extend"; /* Duplicated in av.c */
4338 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);