3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
244 TAINT_NOT; /* Each statement is presumed innocent */
245 PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
247 if (!(PL_op->op_flags & OPf_SPECIAL)) {
248 I32 oldsave = PL_scopestack[PL_scopestack_ix - 1];
249 LEAVE_SCOPE(oldsave);
256 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
261 const char *rpv = NULL;
263 bool rcopied = FALSE;
265 if (TARG == right && right != left) { /* $r = $l.$r */
266 rpv = SvPV_nomg_const(right, rlen);
267 rbyte = !DO_UTF8(right);
268 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
269 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
273 if (TARG != left) { /* not $l .= $r */
275 const char* const lpv = SvPV_nomg_const(left, llen);
276 lbyte = !DO_UTF8(left);
277 sv_setpvn(TARG, lpv, llen);
283 else { /* $l .= $r and left == TARG */
285 if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
286 report_uninit(right);
290 SvPV_force_nomg_nolen(left);
292 lbyte = !DO_UTF8(left);
298 rpv = SvPV_nomg_const(right, rlen);
299 rbyte = !DO_UTF8(right);
301 if (lbyte != rbyte) {
303 sv_utf8_upgrade_nomg(TARG);
306 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
307 sv_utf8_upgrade_nomg(right);
308 rpv = SvPV_nomg_const(right, rlen);
311 sv_catpvn_nomg(TARG, rpv, rlen);
318 /* push the elements of av onto the stack.
319 * XXX Note that padav has similar code but without the mg_get().
320 * I suspect that the mg_get is no longer needed, but while padav
321 * differs, it can't share this function */
324 S_pushav(pTHX_ AV* const av)
327 const SSize_t maxarg = AvFILL(av) + 1;
329 if (UNLIKELY(SvRMAGICAL(av))) {
331 for (i=0; i < (PADOFFSET)maxarg; i++) {
332 SV ** const svp = av_fetch(av, i, FALSE);
333 /* See note in pp_helem, and bug id #27839 */
335 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
341 for (i=0; i < (PADOFFSET)maxarg; i++) {
342 SV * const sv = AvARRAY(av)[i];
343 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
351 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
356 PADOFFSET base = PL_op->op_targ;
357 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
359 if (PL_op->op_flags & OPf_SPECIAL) {
360 /* fake the RHS of my ($x,$y,..) = @_ */
362 S_pushav(aTHX_ GvAVn(PL_defgv));
366 /* note, this is only skipped for compile-time-known void cxt */
367 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
370 for (i = 0; i <count; i++)
371 *++SP = PAD_SV(base+i);
373 if (PL_op->op_private & OPpLVAL_INTRO) {
374 SV **svp = &(PAD_SVl(base));
375 const UV payload = (UV)(
376 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
377 | (count << SAVE_TIGHT_SHIFT)
378 | SAVEt_CLEARPADRANGE);
379 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
380 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
387 for (i = 0; i <count; i++)
388 SvPADSTALE_off(*svp++); /* mark lexical as active */
399 OP * const op = PL_op;
400 /* access PL_curpad once */
401 SV ** const padentry = &(PAD_SVl(op->op_targ));
406 PUTBACK; /* no pop/push after this, TOPs ok */
408 if (op->op_flags & OPf_MOD) {
409 if (op->op_private & OPpLVAL_INTRO)
410 if (!(op->op_private & OPpPAD_STATE))
411 save_clearsv(padentry);
412 if (op->op_private & OPpDEREF) {
413 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
414 than TARG reduces the scope of TARG, so it does not
415 span the call to save_clearsv, resulting in smaller
417 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
429 tryAMAGICunTARGETlist(iter_amg, 0);
430 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
432 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
433 if (!isGV_with_GP(PL_last_in_gv)) {
434 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
435 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
438 XPUSHs(MUTABLE_SV(PL_last_in_gv));
441 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
442 if (PL_last_in_gv == (GV *)&PL_sv_undef)
443 PL_last_in_gv = NULL;
445 assert(isGV_with_GP(PL_last_in_gv));
448 return do_readline();
456 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
460 (SvIOK_notUV(left) && SvIOK_notUV(right))
461 ? (SvIVX(left) == SvIVX(right))
462 : ( do_ncmp(left, right) == 0)
468 /* also used for: pp_i_preinc() */
472 SV *sv = *PL_stack_sp;
474 if (LIKELY(((sv->sv_flags &
475 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
476 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
478 && SvIVX(sv) != IV_MAX)
480 SvIV_set(sv, SvIVX(sv) + 1);
482 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
489 /* also used for: pp_i_predec() */
493 SV *sv = *PL_stack_sp;
495 if (LIKELY(((sv->sv_flags &
496 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
497 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
499 && SvIVX(sv) != IV_MIN)
501 SvIV_set(sv, SvIVX(sv) - 1);
503 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
510 /* also used for: pp_orassign() */
519 if (PL_op->op_type == OP_OR)
521 RETURNOP(cLOGOP->op_other);
526 /* also used for: pp_dor() pp_dorassign() */
533 const int op_type = PL_op->op_type;
534 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
539 if (UNLIKELY(!sv || !SvANY(sv))) {
540 if (op_type == OP_DOR)
542 RETURNOP(cLOGOP->op_other);
548 if (UNLIKELY(!sv || !SvANY(sv)))
553 switch (SvTYPE(sv)) {
555 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
559 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
563 if (CvROOT(sv) || CvXSUB(sv))
576 if(op_type == OP_DOR)
578 RETURNOP(cLOGOP->op_other);
580 /* assuming OP_DEFINED */
590 dSP; dATARGET; bool useleft; SV *svl, *svr;
592 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
596 #ifdef PERL_PRESERVE_IVUV
598 /* special-case some simple common cases */
599 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
601 U32 flags = (svl->sv_flags & svr->sv_flags);
602 if (flags & SVf_IOK) {
603 /* both args are simple IVs */
608 topl = ((UV)il) >> (UVSIZE * 8 - 2);
609 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
611 /* if both are in a range that can't under/overflow, do a
612 * simple integer add: if the top of both numbers
613 * are 00 or 11, then it's safe */
614 if (!( ((topl+1) | (topr+1)) & 2)) {
616 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
622 else if (flags & SVf_NOK) {
623 /* both args are NVs */
629 if (nl == (NV)il && nr == (NV)ir)
630 /* nothing was lost by converting to IVs */
633 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
641 useleft = USE_LEFT(svl);
642 /* We must see if we can perform the addition with integers if possible,
643 as the integer code detects overflow while the NV code doesn't.
644 If either argument hasn't had a numeric conversion yet attempt to get
645 the IV. It's important to do this now, rather than just assuming that
646 it's not IOK as a PV of "9223372036854775806" may not take well to NV
647 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
648 integer in case the second argument is IV=9223372036854775806
649 We can (now) rely on sv_2iv to do the right thing, only setting the
650 public IOK flag if the value in the NV (or PV) slot is truly integer.
652 A side effect is that this also aggressively prefers integer maths over
653 fp maths for integer values.
655 How to detect overflow?
657 C 99 section 6.2.6.1 says
659 The range of nonnegative values of a signed integer type is a subrange
660 of the corresponding unsigned integer type, and the representation of
661 the same value in each type is the same. A computation involving
662 unsigned operands can never overflow, because a result that cannot be
663 represented by the resulting unsigned integer type is reduced modulo
664 the number that is one greater than the largest value that can be
665 represented by the resulting type.
669 which I read as "unsigned ints wrap."
671 signed integer overflow seems to be classed as "exception condition"
673 If an exceptional condition occurs during the evaluation of an
674 expression (that is, if the result is not mathematically defined or not
675 in the range of representable values for its type), the behavior is
678 (6.5, the 5th paragraph)
680 I had assumed that on 2s complement machines signed arithmetic would
681 wrap, hence coded pp_add and pp_subtract on the assumption that
682 everything perl builds on would be happy. After much wailing and
683 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
684 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
685 unsigned code below is actually shorter than the old code. :-)
688 if (SvIV_please_nomg(svr)) {
689 /* Unless the left argument is integer in range we are going to have to
690 use NV maths. Hence only attempt to coerce the right argument if
691 we know the left is integer. */
699 /* left operand is undef, treat as zero. + 0 is identity,
700 Could SETi or SETu right now, but space optimise by not adding
701 lots of code to speed up what is probably a rarish case. */
703 /* Left operand is defined, so is it IV? */
704 if (SvIV_please_nomg(svl)) {
705 if ((auvok = SvUOK(svl)))
708 const IV aiv = SvIVX(svl);
711 auvok = 1; /* Now acting as a sign flag. */
713 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
720 bool result_good = 0;
723 bool buvok = SvUOK(svr);
728 const IV biv = SvIVX(svr);
733 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
735 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
736 else "IV" now, independent of how it came in.
737 if a, b represents positive, A, B negative, a maps to -A etc
742 all UV maths. negate result if A negative.
743 add if signs same, subtract if signs differ. */
749 /* Must get smaller */
755 /* result really should be -(auv-buv). as its negation
756 of true value, need to swap our result flag */
773 if (result <= (UV)IV_MIN)
774 SETi(result == (UV)IV_MIN
775 ? IV_MIN : -(IV)result);
777 /* result valid, but out of range for IV. */
782 } /* Overflow, drop through to NVs. */
787 useleft = USE_LEFT(svl);
791 NV value = SvNV_nomg(svr);
794 /* left operand is undef, treat as zero. + 0.0 is identity. */
798 SETn( value + SvNV_nomg(svl) );
804 /* also used for: pp_aelemfast_lex() */
809 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
810 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
811 const U32 lval = PL_op->op_flags & OPf_MOD;
812 SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
813 SV *sv = (svp ? *svp : &PL_sv_undef);
815 if (UNLIKELY(!svp && lval))
816 DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
819 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
829 do_join(TARG, *MARK, MARK, SP);
840 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
841 * will be enough to hold an OP*.
843 SV* const sv = sv_newmortal();
844 sv_upgrade(sv, SVt_PVLV);
846 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
849 XPUSHs(MUTABLE_SV(PL_op));
854 /* Oversized hot code. */
856 /* also used for: pp_say() */
860 dSP; dMARK; dORIGMARK;
864 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
868 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
871 if (MARK == ORIGMARK) {
872 /* If using default handle then we need to make space to
873 * pass object as 1st arg, so move other args up ...
877 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
880 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
882 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
883 | (PL_op->op_type == OP_SAY
884 ? TIED_METHOD_SAY : 0)), sp - mark);
887 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
888 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
891 SETERRNO(EBADF,RMS_IFI);
894 else if (!(fp = IoOFP(io))) {
896 report_wrongway_fh(gv, '<');
899 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
903 SV * const ofs = GvSV(PL_ofsgv); /* $, */
905 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
907 if (!do_print(*MARK, fp))
911 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
912 if (!do_print(GvSV(PL_ofsgv), fp)) {
921 if (!do_print(*MARK, fp))
929 if (PL_op->op_type == OP_SAY) {
930 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
933 else if (PL_ors_sv && SvOK(PL_ors_sv))
934 if (!do_print(PL_ors_sv, fp)) /* $\ */
937 if (IoFLAGS(io) & IOf_FLUSH)
938 if (PerlIO_flush(fp) == EOF)
948 XPUSHs(&PL_sv_undef);
953 /* also used for: pp_rv2hv() */
954 /* also called directly by pp_lvavref */
959 const I32 gimme = GIMME_V;
960 static const char an_array[] = "an ARRAY";
961 static const char a_hash[] = "a HASH";
962 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
963 || PL_op->op_type == OP_LVAVREF;
964 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
968 if (UNLIKELY(SvAMAGIC(sv))) {
969 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
972 if (UNLIKELY(SvTYPE(sv) != type))
973 /* diag_listed_as: Not an ARRAY reference */
974 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
975 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
976 && PL_op->op_private & OPpLVAL_INTRO))
977 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
979 else if (UNLIKELY(SvTYPE(sv) != type)) {
982 if (!isGV_with_GP(sv)) {
983 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
991 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
992 if (PL_op->op_private & OPpLVAL_INTRO)
993 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
995 if (PL_op->op_flags & OPf_REF) {
999 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1000 const I32 flags = is_lvalue_sub();
1001 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1002 if (gimme != G_ARRAY)
1003 goto croak_cant_return;
1010 AV *const av = MUTABLE_AV(sv);
1011 /* The guts of pp_rv2av */
1012 if (gimme == G_ARRAY) {
1018 else if (gimme == G_SCALAR) {
1020 const SSize_t maxarg = AvFILL(av) + 1;
1024 /* The guts of pp_rv2hv */
1025 if (gimme == G_ARRAY) { /* array wanted */
1027 return Perl_do_kv(aTHX);
1029 else if ((PL_op->op_private & OPpTRUEBOOL
1030 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1031 && block_gimme() == G_VOID ))
1032 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1033 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1034 else if (gimme == G_SCALAR) {
1036 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1043 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1044 is_pp_rv2av ? "array" : "hash");
1049 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1051 PERL_ARGS_ASSERT_DO_ODDBALL;
1054 if (ckWARN(WARN_MISC)) {
1056 if (oddkey == firstkey &&
1058 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1059 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1061 err = "Reference found where even-sized list expected";
1064 err = "Odd number of elements in hash assignment";
1065 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1072 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1073 * are common to both the LHS and RHS of an aassign, and replace them
1074 * with copies. All these copies are made before the actual list assign is
1077 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1078 * element ($b) to the first LH element ($a), modifies $a; when the
1079 * second assignment is done, the second RH element now has the wrong
1080 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1081 * Note that we don't need to make a mortal copy of $b.
1083 * The algorithm below works by, for every RHS element, mark the
1084 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1085 * element is found with SVf_BREAK set, it means it would have been
1086 * modified, so make a copy.
1087 * Note that by scanning both LHS and RHS in lockstep, we avoid
1088 * unnecessary copies (like $b above) compared with a naive
1089 * "mark all LHS; copy all marked RHS; unmark all LHS".
1091 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1092 * it can't be common and can be skipped.
1094 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1095 * that we thought we didn't need to call S_aassign_copy_common(), but we
1096 * have anyway for sanity checking. If we find we need to copy, then panic.
1099 PERL_STATIC_INLINE void
1100 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1101 SV **firstrelem, SV **lastrelem
1110 SSize_t lcount = lastlelem - firstlelem + 1;
1111 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1112 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1114 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1115 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1116 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1120 /* we never have to copy the first RH element; it can't be corrupted
1121 * by assigning something to the corresponding first LH element.
1122 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1124 relem = firstrelem + 1;
1126 for (; relem <= lastrelem; relem++) {
1129 /* mark next LH element */
1131 if (--lcount >= 0) {
1134 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1135 assert (lelem <= lastlelem);
1141 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1144 /* this LH element will consume all further args;
1145 * no need to mark any further LH elements (if any).
1146 * But we still need to scan any remaining RHS elements;
1147 * set lcount negative to distinguish from lcount == 0,
1148 * so the loop condition continues being true
1151 lelem--; /* no need to unmark this element */
1153 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1154 assert(!SvIMMORTAL(svl));
1155 SvFLAGS(svl) |= SVf_BREAK;
1159 /* don't check RH element if no SVf_BREAK flags set yet */
1166 /* see if corresponding RH element needs copying */
1172 if (UNLIKELY(SvFLAGS(svr) & SVf_BREAK)) {
1176 /* op_dump(PL_op); */
1178 "panic: aassign skipped needed copy of common RH elem %"
1179 UVuf, (UV)(relem - firstrelem));
1183 TAINT_NOT; /* Each item is independent */
1185 /* Dear TODO test in t/op/sort.t, I love you.
1186 (It's relying on a panic, not a "semi-panic" from newSVsv()
1187 and then an assertion failure below.) */
1188 if (UNLIKELY(SvIS_FREED(svr))) {
1189 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1192 /* avoid break flag while copying; otherwise COW etc
1194 SvFLAGS(svr) &= ~SVf_BREAK;
1195 /* Not newSVsv(), as it does not allow copy-on-write,
1196 resulting in wasteful copies.
1197 Also, we use SV_NOSTEAL in case the SV is used more than
1198 once, e.g. (...) = (f())[0,0]
1199 Where the same SV appears twice on the RHS without a ref
1200 count bump. (Although I suspect that the SV won't be
1201 stealable here anyway - DAPM).
1203 *relem = sv_mortalcopy_flags(svr,
1204 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1205 /* ... but restore afterwards in case it's needed again,
1206 * e.g. ($a,$b,$c) = (1,$a,$a)
1208 SvFLAGS(svr) |= SVf_BREAK;
1220 while (lelem > firstlelem) {
1221 SV * const svl = *(--lelem);
1223 SvFLAGS(svl) &= ~SVf_BREAK;
1232 SV **lastlelem = PL_stack_sp;
1233 SV **lastrelem = PL_stack_base + POPMARK;
1234 SV **firstrelem = PL_stack_base + POPMARK + 1;
1235 SV **firstlelem = lastrelem + 1;
1248 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1249 * only need to save locally, not on the save stack */
1250 U16 old_delaymagic = PL_delaymagic;
1255 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1257 /* If there's a common identifier on both sides we have to take
1258 * special care that assigning the identifier on the left doesn't
1259 * clobber a value on the right that's used later in the list.
1262 if ( (PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1))
1263 /* at least 2 LH and RH elements, or commonality isn't an issue */
1264 && (firstlelem < lastlelem && firstrelem < lastrelem)
1266 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1267 /* skip the scan if all scalars have a ref count of 1 */
1268 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1270 if (!sv || SvREFCNT(sv) == 1)
1272 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1279 S_aassign_copy_common(aTHX_
1280 firstlelem, lastlelem, firstrelem, lastrelem
1289 /* on debugging builds, do the scan even if we've concluded we
1290 * don't need to, then panic if we find commonality. Note that the
1291 * scanner assumes at least 2 elements */
1292 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1300 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1307 while (LIKELY(lelem <= lastlelem)) {
1309 TAINT_NOT; /* Each item stands on its own, taintwise. */
1311 if (UNLIKELY(!sv)) {
1314 ASSUME(SvTYPE(sv) == SVt_PVAV);
1316 switch (SvTYPE(sv)) {
1318 bool already_copied = FALSE;
1319 ary = MUTABLE_AV(sv);
1320 magic = SvMAGICAL(ary) != 0;
1322 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1324 /* We need to clear ary. The is a danger that if we do this,
1325 * elements on the RHS may be prematurely freed, e.g.
1327 * In the case of possible commonality, make a copy of each
1328 * RHS SV *before* clearing the array, and add a reference
1329 * from the tmps stack, so that it doesn't leak on death.
1330 * Otherwise, make a copy of each RHS SV only as we're storing
1331 * it into the array - that way we don't have to worry about
1332 * it being leaked if we die, but don't incur the cost of
1333 * mortalising everything.
1336 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1337 && (relem <= lastrelem)
1338 && (magic || AvFILL(ary) != -1))
1341 EXTEND_MORTAL(lastrelem - relem + 1);
1342 for (svp = relem; svp <= lastrelem; svp++) {
1343 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1344 *svp = sv_mortalcopy_flags(*svp,
1345 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1348 already_copied = TRUE;
1352 if (relem <= lastrelem)
1353 av_extend(ary, lastrelem - relem);
1356 while (relem <= lastrelem) { /* gobble up all the rest */
1358 if (LIKELY(!alias)) {
1363 /* before newSV, in case it dies */
1366 /* see comment in S_aassign_copy_common about
1368 sv_setsv_flags(sv, *relem,
1369 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1374 if (!already_copied)
1377 DIE(aTHX_ "Assigned value is not a reference");
1378 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1379 /* diag_listed_as: Assigned value is not %s reference */
1381 "Assigned value is not a SCALAR reference");
1382 if (lval && !already_copied)
1383 *relem = sv_mortalcopy(*relem);
1384 /* XXX else check for weak refs? */
1385 sv = SvREFCNT_inc_NN(SvRV(*relem));
1389 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1390 didstore = av_store(ary,i++,sv);
1399 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1400 SvSETMAGIC(MUTABLE_SV(ary));
1405 case SVt_PVHV: { /* normal hash */
1409 SV** topelem = relem;
1410 SV **firsthashrelem = relem;
1411 bool already_copied = FALSE;
1413 hash = MUTABLE_HV(sv);
1414 magic = SvMAGICAL(hash) != 0;
1416 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1417 if (UNLIKELY(odd)) {
1418 do_oddball(lastrelem, firsthashrelem);
1419 /* we have firstlelem to reuse, it's not needed anymore
1421 *(lastrelem+1) = &PL_sv_undef;
1425 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1427 /* We need to clear hash. The is a danger that if we do this,
1428 * elements on the RHS may be prematurely freed, e.g.
1429 * %h = (foo => $h{bar});
1430 * In the case of possible commonality, make a copy of each
1431 * RHS SV *before* clearing the hash, and add a reference
1432 * from the tmps stack, so that it doesn't leak on death.
1435 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1436 && (relem <= lastrelem)
1437 && (magic || HvUSEDKEYS(hash)))
1440 EXTEND_MORTAL(lastrelem - relem + 1);
1441 for (svp = relem; svp <= lastrelem; svp++) {
1442 *svp = sv_mortalcopy_flags(*svp,
1443 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1446 already_copied = TRUE;
1451 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1454 /* Copy the key if aassign is called in lvalue context,
1455 to avoid having the next op modify our rhs. Copy
1456 it also if it is gmagical, lest it make the
1457 hv_store_ent call below croak, leaking the value. */
1458 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1459 ? sv_mortalcopy(*relem)
1468 sv_setsv_nomg(tmpstr,*relem++); /* value */
1471 if (gimme == G_ARRAY) {
1472 if (hv_exists_ent(hash, sv, 0))
1473 /* key overwrites an existing entry */
1476 /* copy element back: possibly to an earlier
1477 * stack location if we encountered dups earlier,
1478 * possibly to a later stack location if odd */
1480 *topelem++ = tmpstr;
1484 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1485 didstore = hv_store_ent(hash,sv,tmpstr,0);
1487 if (!didstore) sv_2mortal(tmpstr);
1493 if (duplicates && gimme == G_ARRAY) {
1494 /* at this point we have removed the duplicate key/value
1495 * pairs from the stack, but the remaining values may be
1496 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1497 * the (a 2), but the stack now probably contains
1498 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1499 * obliterates the earlier key. So refresh all values. */
1500 lastrelem -= duplicates;
1501 relem = firsthashrelem;
1502 while (relem < lastrelem+odd) {
1504 he = hv_fetch_ent(hash, *relem++, 0, 0);
1505 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1508 if (odd && gimme == G_ARRAY) lastrelem++;
1512 if (SvIMMORTAL(sv)) {
1513 if (relem <= lastrelem)
1517 if (relem <= lastrelem) {
1519 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1520 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1523 packWARN(WARN_MISC),
1524 "Useless assignment to a temporary"
1526 sv_setsv(sv, *relem);
1530 sv_setsv(sv, &PL_sv_undef);
1535 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1536 /* Will be used to set PL_tainting below */
1537 Uid_t tmp_uid = PerlProc_getuid();
1538 Uid_t tmp_euid = PerlProc_geteuid();
1539 Gid_t tmp_gid = PerlProc_getgid();
1540 Gid_t tmp_egid = PerlProc_getegid();
1542 /* XXX $> et al currently silently ignore failures */
1543 if (PL_delaymagic & DM_UID) {
1544 #ifdef HAS_SETRESUID
1546 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1547 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1550 # ifdef HAS_SETREUID
1552 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1553 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1556 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1557 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1558 PL_delaymagic &= ~DM_RUID;
1560 # endif /* HAS_SETRUID */
1562 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1563 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1564 PL_delaymagic &= ~DM_EUID;
1566 # endif /* HAS_SETEUID */
1567 if (PL_delaymagic & DM_UID) {
1568 if (PL_delaymagic_uid != PL_delaymagic_euid)
1569 DIE(aTHX_ "No setreuid available");
1570 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1572 # endif /* HAS_SETREUID */
1573 #endif /* HAS_SETRESUID */
1575 tmp_uid = PerlProc_getuid();
1576 tmp_euid = PerlProc_geteuid();
1578 /* XXX $> et al currently silently ignore failures */
1579 if (PL_delaymagic & DM_GID) {
1580 #ifdef HAS_SETRESGID
1582 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1583 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1586 # ifdef HAS_SETREGID
1588 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1589 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1592 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1593 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1594 PL_delaymagic &= ~DM_RGID;
1596 # endif /* HAS_SETRGID */
1598 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1599 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1600 PL_delaymagic &= ~DM_EGID;
1602 # endif /* HAS_SETEGID */
1603 if (PL_delaymagic & DM_GID) {
1604 if (PL_delaymagic_gid != PL_delaymagic_egid)
1605 DIE(aTHX_ "No setregid available");
1606 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1608 # endif /* HAS_SETREGID */
1609 #endif /* HAS_SETRESGID */
1611 tmp_gid = PerlProc_getgid();
1612 tmp_egid = PerlProc_getegid();
1614 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1615 #ifdef NO_TAINT_SUPPORT
1616 PERL_UNUSED_VAR(tmp_uid);
1617 PERL_UNUSED_VAR(tmp_euid);
1618 PERL_UNUSED_VAR(tmp_gid);
1619 PERL_UNUSED_VAR(tmp_egid);
1622 PL_delaymagic = old_delaymagic;
1624 if (gimme == G_VOID)
1625 SP = firstrelem - 1;
1626 else if (gimme == G_SCALAR) {
1629 SETi(lastrelem - firstrelem + 1);
1633 /* note that in this case *firstlelem may have been overwritten
1634 by sv_undef in the odd hash case */
1637 SP = firstrelem + (lastlelem - firstlelem);
1638 lelem = firstlelem + (relem - firstrelem);
1640 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1650 PMOP * const pm = cPMOP;
1651 REGEXP * rx = PM_GETRE(pm);
1652 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1653 SV * const rv = sv_newmortal();
1657 SvUPGRADE(rv, SVt_IV);
1658 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1659 loathe to use it here, but it seems to be the right fix. Or close.
1660 The key part appears to be that it's essential for pp_qr to return a new
1661 object (SV), which implies that there needs to be an effective way to
1662 generate a new SV from the existing SV that is pre-compiled in the
1664 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1667 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1668 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1669 *cvp = cv_clone(cv);
1670 SvREFCNT_dec_NN(cv);
1674 HV *const stash = gv_stashsv(pkg, GV_ADD);
1675 SvREFCNT_dec_NN(pkg);
1676 (void)sv_bless(rv, stash);
1679 if (UNLIKELY(RX_ISTAINTED(rx))) {
1681 SvTAINTED_on(SvRV(rv));
1694 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1697 const char *truebase; /* Start of string */
1698 REGEXP *rx = PM_GETRE(pm);
1700 const I32 gimme = GIMME_V;
1702 const I32 oldsave = PL_savestack_ix;
1703 I32 had_zerolen = 0;
1706 if (PL_op->op_flags & OPf_STACKED)
1715 PUTBACK; /* EVAL blocks need stack_sp. */
1716 /* Skip get-magic if this is a qr// clone, because regcomp has
1718 truebase = ReANY(rx)->mother_re
1719 ? SvPV_nomg_const(TARG, len)
1720 : SvPV_const(TARG, len);
1722 DIE(aTHX_ "panic: pp_match");
1723 strend = truebase + len;
1724 rxtainted = (RX_ISTAINTED(rx) ||
1725 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1728 /* We need to know this in case we fail out early - pos() must be reset */
1729 global = dynpm->op_pmflags & PMf_GLOBAL;
1731 /* PMdf_USED is set after a ?? matches once */
1734 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1736 pm->op_pmflags & PMf_USED
1739 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1743 /* empty pattern special-cased to use last successful pattern if
1744 possible, except for qr// */
1745 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1751 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1752 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1753 UVuf" < %"IVdf")\n",
1754 (UV)len, (IV)RX_MINLEN(rx)));
1758 /* get pos() if //g */
1760 mg = mg_find_mglob(TARG);
1761 if (mg && mg->mg_len >= 0) {
1762 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1763 /* last time pos() was set, it was zero-length match */
1764 if (mg->mg_flags & MGf_MINMATCH)
1769 #ifdef PERL_SAWAMPERSAND
1772 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1773 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1777 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1778 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1779 * only on the first iteration. Therefore we need to copy $' as well
1780 * as $&, to make the rest of the string available for captures in
1781 * subsequent iterations */
1782 if (! (global && gimme == G_ARRAY))
1783 r_flags |= REXEC_COPY_SKIP_POST;
1785 #ifdef PERL_SAWAMPERSAND
1786 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1787 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1788 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1795 s = truebase + curpos;
1797 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1798 had_zerolen, TARG, NULL, r_flags))
1802 if (dynpm->op_pmflags & PMf_ONCE)
1804 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1806 dynpm->op_pmflags |= PMf_USED;
1810 RX_MATCH_TAINTED_on(rx);
1811 TAINT_IF(RX_MATCH_TAINTED(rx));
1815 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1817 mg = sv_magicext_mglob(TARG);
1818 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1819 if (RX_ZERO_LEN(rx))
1820 mg->mg_flags |= MGf_MINMATCH;
1822 mg->mg_flags &= ~MGf_MINMATCH;
1825 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1826 LEAVE_SCOPE(oldsave);
1830 /* push captures on stack */
1833 const I32 nparens = RX_NPARENS(rx);
1834 I32 i = (global && !nparens) ? 1 : 0;
1836 SPAGAIN; /* EVAL blocks could move the stack. */
1837 EXTEND(SP, nparens + i);
1838 EXTEND_MORTAL(nparens + i);
1839 for (i = !i; i <= nparens; i++) {
1840 PUSHs(sv_newmortal());
1841 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1842 && RX_OFFS(rx)[i].end != -1 ))
1844 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1845 const char * const s = RX_OFFS(rx)[i].start + truebase;
1846 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1847 || len < 0 || len > strend - s))
1848 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1849 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1850 (long) i, (long) RX_OFFS(rx)[i].start,
1851 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1852 sv_setpvn(*SP, s, len);
1853 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1858 curpos = (UV)RX_OFFS(rx)[0].end;
1859 had_zerolen = RX_ZERO_LEN(rx);
1860 PUTBACK; /* EVAL blocks may use stack */
1861 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1864 LEAVE_SCOPE(oldsave);
1867 NOT_REACHED; /* NOTREACHED */
1870 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1872 mg = mg_find_mglob(TARG);
1876 LEAVE_SCOPE(oldsave);
1877 if (gimme == G_ARRAY)
1883 Perl_do_readline(pTHX)
1885 dSP; dTARGETSTACKED;
1890 IO * const io = GvIO(PL_last_in_gv);
1891 const I32 type = PL_op->op_type;
1892 const I32 gimme = GIMME_V;
1895 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1897 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1898 if (gimme == G_SCALAR) {
1900 SvSetSV_nosteal(TARG, TOPs);
1910 if (IoFLAGS(io) & IOf_ARGV) {
1911 if (IoFLAGS(io) & IOf_START) {
1913 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1914 IoFLAGS(io) &= ~IOf_START;
1915 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1916 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1917 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1918 SvSETMAGIC(GvSV(PL_last_in_gv));
1923 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1924 if (!fp) { /* Note: fp != IoIFP(io) */
1925 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1928 else if (type == OP_GLOB)
1929 fp = Perl_start_glob(aTHX_ POPs, io);
1931 else if (type == OP_GLOB)
1933 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1934 report_wrongway_fh(PL_last_in_gv, '>');
1938 if ((!io || !(IoFLAGS(io) & IOf_START))
1939 && ckWARN(WARN_CLOSED)
1942 report_evil_fh(PL_last_in_gv);
1944 if (gimme == G_SCALAR) {
1945 /* undef TARG, and push that undefined value */
1946 if (type != OP_RCATLINE) {
1947 sv_setsv(TARG,NULL);
1954 if (gimme == G_SCALAR) {
1956 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1959 if (type == OP_RCATLINE)
1960 SvPV_force_nomg_nolen(sv);
1964 else if (isGV_with_GP(sv)) {
1965 SvPV_force_nomg_nolen(sv);
1967 SvUPGRADE(sv, SVt_PV);
1968 tmplen = SvLEN(sv); /* remember if already alloced */
1969 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1970 /* try short-buffering it. Please update t/op/readline.t
1971 * if you change the growth length.
1976 if (type == OP_RCATLINE && SvOK(sv)) {
1978 SvPV_force_nomg_nolen(sv);
1984 sv = sv_2mortal(newSV(80));
1988 /* This should not be marked tainted if the fp is marked clean */
1989 #define MAYBE_TAINT_LINE(io, sv) \
1990 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
1995 /* delay EOF state for a snarfed empty file */
1996 #define SNARF_EOF(gimme,rs,io,sv) \
1997 (gimme != G_SCALAR || SvCUR(sv) \
1998 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2002 if (!sv_gets(sv, fp, offset)
2004 || SNARF_EOF(gimme, PL_rs, io, sv)
2005 || PerlIO_error(fp)))
2007 PerlIO_clearerr(fp);
2008 if (IoFLAGS(io) & IOf_ARGV) {
2009 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2012 (void)do_close(PL_last_in_gv, FALSE);
2014 else if (type == OP_GLOB) {
2015 if (!do_close(PL_last_in_gv, FALSE)) {
2016 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2017 "glob failed (child exited with status %d%s)",
2018 (int)(STATUS_CURRENT >> 8),
2019 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2022 if (gimme == G_SCALAR) {
2023 if (type != OP_RCATLINE) {
2024 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2030 MAYBE_TAINT_LINE(io, sv);
2033 MAYBE_TAINT_LINE(io, sv);
2035 IoFLAGS(io) |= IOf_NOLINE;
2039 if (type == OP_GLOB) {
2043 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2044 char * const tmps = SvEND(sv) - 1;
2045 if (*tmps == *SvPVX_const(PL_rs)) {
2047 SvCUR_set(sv, SvCUR(sv) - 1);
2050 for (t1 = SvPVX_const(sv); *t1; t1++)
2052 if (strchr("*%?", *t1))
2054 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2057 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2058 (void)POPs; /* Unmatched wildcard? Chuck it... */
2061 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2062 if (ckWARN(WARN_UTF8)) {
2063 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2064 const STRLEN len = SvCUR(sv) - offset;
2067 if (!is_utf8_string_loc(s, len, &f))
2068 /* Emulate :encoding(utf8) warning in the same case. */
2069 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2070 "utf8 \"\\x%02X\" does not map to Unicode",
2071 f < (U8*)SvEND(sv) ? *f : 0);
2074 if (gimme == G_ARRAY) {
2075 if (SvLEN(sv) - SvCUR(sv) > 20) {
2076 SvPV_shrink_to_cur(sv);
2078 sv = sv_2mortal(newSV(80));
2081 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2082 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2083 const STRLEN new_len
2084 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2085 SvPV_renew(sv, new_len);
2096 SV * const keysv = POPs;
2097 HV * const hv = MUTABLE_HV(POPs);
2098 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2099 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2101 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2102 bool preeminent = TRUE;
2104 if (SvTYPE(hv) != SVt_PVHV)
2111 /* If we can determine whether the element exist,
2112 * Try to preserve the existenceness of a tied hash
2113 * element by using EXISTS and DELETE if possible.
2114 * Fallback to FETCH and STORE otherwise. */
2115 if (SvCANEXISTDELETE(hv))
2116 preeminent = hv_exists_ent(hv, keysv, 0);
2119 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2120 svp = he ? &HeVAL(he) : NULL;
2122 if (!svp || !*svp || *svp == &PL_sv_undef) {
2126 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2128 lv = sv_newmortal();
2129 sv_upgrade(lv, SVt_PVLV);
2131 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2132 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2133 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2139 if (HvNAME_get(hv) && isGV(*svp))
2140 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2141 else if (preeminent)
2142 save_helem_flags(hv, keysv, svp,
2143 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2145 SAVEHDELETE(hv, keysv);
2147 else if (PL_op->op_private & OPpDEREF) {
2148 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2152 sv = (svp && *svp ? *svp : &PL_sv_undef);
2153 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2154 * was to make C<local $tied{foo} = $tied{foo}> possible.
2155 * However, it seems no longer to be needed for that purpose, and
2156 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2157 * would loop endlessly since the pos magic is getting set on the
2158 * mortal copy and lost. However, the copy has the effect of
2159 * triggering the get magic, and losing it altogether made things like
2160 * c<$tied{foo};> in void context no longer do get magic, which some
2161 * code relied on. Also, delayed triggering of magic on @+ and friends
2162 * meant the original regex may be out of scope by now. So as a
2163 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2164 * being called too many times). */
2165 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2172 /* a stripped-down version of Perl_softref2xv() for use by
2173 * pp_multideref(), which doesn't use PL_op->op_flags */
2176 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2179 if (PL_op->op_private & HINT_STRICT_REFS) {
2181 Perl_die(aTHX_ PL_no_symref_sv, sv,
2182 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2184 Perl_die(aTHX_ PL_no_usym, what);
2187 Perl_die(aTHX_ PL_no_usym, what);
2188 return gv_fetchsv_nomg(sv, GV_ADD, type);
2192 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2193 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2195 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2196 * Each of these either contains a set of actions, or an argument, such as
2197 * an IV to use as an array index, or a lexical var to retrieve.
2198 * Several actions re stored per UV; we keep shifting new actions off the
2199 * one UV, and only reload when it becomes zero.
2204 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2205 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2206 UV actions = items->uv;
2209 /* this tells find_uninit_var() where we're up to */
2210 PL_multideref_pc = items;
2213 /* there are three main classes of action; the first retrieve
2214 * the initial AV or HV from a variable or the stack; the second
2215 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2216 * the third an unrolled (/DREFHV, rv2hv, helem).
2218 switch (actions & MDEREF_ACTION_MASK) {
2221 actions = (++items)->uv;
2224 case MDEREF_AV_padav_aelem: /* $lex[...] */
2225 sv = PAD_SVl((++items)->pad_offset);
2228 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2229 sv = UNOP_AUX_item_sv(++items);
2230 assert(isGV_with_GP(sv));
2231 sv = (SV*)GvAVn((GV*)sv);
2234 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2239 goto do_AV_rv2av_aelem;
2242 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2243 sv = UNOP_AUX_item_sv(++items);
2244 assert(isGV_with_GP(sv));
2245 sv = GvSVn((GV*)sv);
2246 goto do_AV_vivify_rv2av_aelem;
2248 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2249 sv = PAD_SVl((++items)->pad_offset);
2252 do_AV_vivify_rv2av_aelem:
2253 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2254 /* this is the OPpDEREF action normally found at the end of
2255 * ops like aelem, helem, rv2sv */
2256 sv = vivify_ref(sv, OPpDEREF_AV);
2260 /* this is basically a copy of pp_rv2av when it just has the
2263 if (LIKELY(SvROK(sv))) {
2264 if (UNLIKELY(SvAMAGIC(sv))) {
2265 sv = amagic_deref_call(sv, to_av_amg);
2268 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2269 DIE(aTHX_ "Not an ARRAY reference");
2271 else if (SvTYPE(sv) != SVt_PVAV) {
2272 if (!isGV_with_GP(sv))
2273 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2274 sv = MUTABLE_SV(GvAVn((GV*)sv));
2280 /* retrieve the key; this may be either a lexical or package
2281 * var (whose index/ptr is stored as an item) or a signed
2282 * integer constant stored as an item.
2285 IV elem = 0; /* to shut up stupid compiler warnings */
2288 assert(SvTYPE(sv) == SVt_PVAV);
2290 switch (actions & MDEREF_INDEX_MASK) {
2291 case MDEREF_INDEX_none:
2293 case MDEREF_INDEX_const:
2294 elem = (++items)->iv;
2296 case MDEREF_INDEX_padsv:
2297 elemsv = PAD_SVl((++items)->pad_offset);
2299 case MDEREF_INDEX_gvsv:
2300 elemsv = UNOP_AUX_item_sv(++items);
2301 assert(isGV_with_GP(elemsv));
2302 elemsv = GvSVn((GV*)elemsv);
2304 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2305 && ckWARN(WARN_MISC)))
2306 Perl_warner(aTHX_ packWARN(WARN_MISC),
2307 "Use of reference \"%"SVf"\" as array index",
2309 /* the only time that S_find_uninit_var() needs this
2310 * is to determine which index value triggered the
2311 * undef warning. So just update it here. Note that
2312 * since we don't save and restore this var (e.g. for
2313 * tie or overload execution), its value will be
2314 * meaningless apart from just here */
2315 PL_multideref_pc = items;
2316 elem = SvIV(elemsv);
2321 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2323 if (!(actions & MDEREF_FLAG_last)) {
2324 SV** svp = av_fetch((AV*)sv, elem, 1);
2325 if (!svp || ! (sv=*svp))
2326 DIE(aTHX_ PL_no_aelem, elem);
2330 if (PL_op->op_private &
2331 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2333 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2334 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2337 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2338 sv = av_delete((AV*)sv, elem, discard);
2346 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2347 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2348 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2349 bool preeminent = TRUE;
2350 AV *const av = (AV*)sv;
2353 if (UNLIKELY(localizing)) {
2357 /* If we can determine whether the element exist,
2358 * Try to preserve the existenceness of a tied array
2359 * element by using EXISTS and DELETE if possible.
2360 * Fallback to FETCH and STORE otherwise. */
2361 if (SvCANEXISTDELETE(av))
2362 preeminent = av_exists(av, elem);
2365 svp = av_fetch(av, elem, lval && !defer);
2368 if (!svp || !(sv = *svp)) {
2371 DIE(aTHX_ PL_no_aelem, elem);
2372 len = av_tindex(av);
2373 sv = sv_2mortal(newSVavdefelem(av,
2374 /* Resolve a negative index now, unless it points
2375 * before the beginning of the array, in which
2376 * case record it for error reporting in
2377 * magic_setdefelem. */
2378 elem < 0 && len + elem >= 0
2379 ? len + elem : elem, 1));
2382 if (UNLIKELY(localizing)) {
2384 save_aelem(av, elem, svp);
2385 sv = *svp; /* may have changed */
2388 SAVEADELETE(av, elem);
2393 sv = (svp ? *svp : &PL_sv_undef);
2394 /* see note in pp_helem() */
2395 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2412 case MDEREF_HV_padhv_helem: /* $lex{...} */
2413 sv = PAD_SVl((++items)->pad_offset);
2416 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2417 sv = UNOP_AUX_item_sv(++items);
2418 assert(isGV_with_GP(sv));
2419 sv = (SV*)GvHVn((GV*)sv);
2422 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2427 goto do_HV_rv2hv_helem;
2430 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2431 sv = UNOP_AUX_item_sv(++items);
2432 assert(isGV_with_GP(sv));
2433 sv = GvSVn((GV*)sv);
2434 goto do_HV_vivify_rv2hv_helem;
2436 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2437 sv = PAD_SVl((++items)->pad_offset);
2440 do_HV_vivify_rv2hv_helem:
2441 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2442 /* this is the OPpDEREF action normally found at the end of
2443 * ops like aelem, helem, rv2sv */
2444 sv = vivify_ref(sv, OPpDEREF_HV);
2448 /* this is basically a copy of pp_rv2hv when it just has the
2449 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2452 if (LIKELY(SvROK(sv))) {
2453 if (UNLIKELY(SvAMAGIC(sv))) {
2454 sv = amagic_deref_call(sv, to_hv_amg);
2457 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2458 DIE(aTHX_ "Not a HASH reference");
2460 else if (SvTYPE(sv) != SVt_PVHV) {
2461 if (!isGV_with_GP(sv))
2462 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2463 sv = MUTABLE_SV(GvHVn((GV*)sv));
2469 /* retrieve the key; this may be either a lexical / package
2470 * var or a string constant, whose index/ptr is stored as an
2473 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2475 assert(SvTYPE(sv) == SVt_PVHV);
2477 switch (actions & MDEREF_INDEX_MASK) {
2478 case MDEREF_INDEX_none:
2481 case MDEREF_INDEX_const:
2482 keysv = UNOP_AUX_item_sv(++items);
2485 case MDEREF_INDEX_padsv:
2486 keysv = PAD_SVl((++items)->pad_offset);
2489 case MDEREF_INDEX_gvsv:
2490 keysv = UNOP_AUX_item_sv(++items);
2491 keysv = GvSVn((GV*)keysv);
2495 /* see comment above about setting this var */
2496 PL_multideref_pc = items;
2499 /* ensure that candidate CONSTs have been HEKified */
2500 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2501 || SvTYPE(keysv) >= SVt_PVMG
2504 || SvIsCOW_shared_hash(keysv));
2506 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2508 if (!(actions & MDEREF_FLAG_last)) {
2509 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2510 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2511 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2515 if (PL_op->op_private &
2516 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2518 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2519 sv = hv_exists_ent((HV*)sv, keysv, 0)
2520 ? &PL_sv_yes : &PL_sv_no;
2523 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2524 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2532 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2533 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2534 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2535 bool preeminent = TRUE;
2537 HV * const hv = (HV*)sv;
2540 if (UNLIKELY(localizing)) {
2544 /* If we can determine whether the element exist,
2545 * Try to preserve the existenceness of a tied hash
2546 * element by using EXISTS and DELETE if possible.
2547 * Fallback to FETCH and STORE otherwise. */
2548 if (SvCANEXISTDELETE(hv))
2549 preeminent = hv_exists_ent(hv, keysv, 0);
2552 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2553 svp = he ? &HeVAL(he) : NULL;
2557 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2561 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2562 lv = sv_newmortal();
2563 sv_upgrade(lv, SVt_PVLV);
2565 sv_magic(lv, key2 = newSVsv(keysv),
2566 PERL_MAGIC_defelem, NULL, 0);
2567 /* sv_magic() increments refcount */
2568 SvREFCNT_dec_NN(key2);
2569 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2575 if (HvNAME_get(hv) && isGV(sv))
2576 save_gp(MUTABLE_GV(sv),
2577 !(PL_op->op_flags & OPf_SPECIAL));
2578 else if (preeminent) {
2579 save_helem_flags(hv, keysv, svp,
2580 (PL_op->op_flags & OPf_SPECIAL)
2581 ? 0 : SAVEf_SETMAGIC);
2582 sv = *svp; /* may have changed */
2585 SAVEHDELETE(hv, keysv);
2590 sv = (svp && *svp ? *svp : &PL_sv_undef);
2591 /* see note in pp_helem() */
2592 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2601 actions >>= MDEREF_SHIFT;
2615 cx = &cxstack[cxstack_ix];
2616 itersvp = CxITERVAR(cx);
2618 switch (CxTYPE(cx)) {
2620 case CXt_LOOP_LAZYSV: /* string increment */
2622 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2623 SV *end = cx->blk_loop.state_u.lazysv.end;
2624 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2625 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2627 const char *max = SvPV_const(end, maxlen);
2628 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2632 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2633 /* safe to reuse old SV */
2634 sv_setsv(oldsv, cur);
2638 /* we need a fresh SV every time so that loop body sees a
2639 * completely new SV for closures/references to work as
2641 *itersvp = newSVsv(cur);
2642 SvREFCNT_dec_NN(oldsv);
2644 if (strEQ(SvPVX_const(cur), max))
2645 sv_setiv(cur, 0); /* terminate next time */
2651 case CXt_LOOP_LAZYIV: /* integer increment */
2653 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2654 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2658 /* don't risk potential race */
2659 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2660 /* safe to reuse old SV */
2661 sv_setiv(oldsv, cur);
2665 /* we need a fresh SV every time so that loop body sees a
2666 * completely new SV for closures/references to work as they
2668 *itersvp = newSViv(cur);
2669 SvREFCNT_dec_NN(oldsv);
2672 if (UNLIKELY(cur == IV_MAX)) {
2673 /* Handle end of range at IV_MAX */
2674 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2676 ++cx->blk_loop.state_u.lazyiv.cur;
2680 case CXt_LOOP_FOR: /* iterate array */
2683 AV *av = cx->blk_loop.state_u.ary.ary;
2685 bool av_is_stack = FALSE;
2692 if (PL_op->op_private & OPpITER_REVERSED) {
2693 ix = --cx->blk_loop.state_u.ary.ix;
2694 if (UNLIKELY(ix <= (av_is_stack ? cx->blk_loop.resetsp : -1)))
2698 ix = ++cx->blk_loop.state_u.ary.ix;
2699 if (UNLIKELY(ix > (av_is_stack ? cx->blk_oldsp : AvFILL(av))))
2703 if (UNLIKELY(SvMAGICAL(av) || AvREIFY(av))) {
2704 SV * const * const svp = av_fetch(av, ix, FALSE);
2705 sv = svp ? *svp : NULL;
2708 sv = AvARRAY(av)[ix];
2711 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2712 SvSetMagicSV(*itersvp, sv);
2717 if (UNLIKELY(SvIS_FREED(sv))) {
2719 Perl_croak(aTHX_ "Use of freed value in iteration");
2726 SvREFCNT_inc_simple_void_NN(sv);
2729 else if (!av_is_stack) {
2730 sv = newSVavdefelem(av, ix, 0);
2737 SvREFCNT_dec(oldsv);
2742 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2748 A description of how taint works in pattern matching and substitution.
2750 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2751 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2753 While the pattern is being assembled/concatenated and then compiled,
2754 PL_tainted will get set (via TAINT_set) if any component of the pattern
2755 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2756 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2757 TAINT_get). It will also be set if any component of the pattern matches
2758 based on locale-dependent behavior.
2760 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2761 the pattern is marked as tainted. This means that subsequent usage, such
2762 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2763 on the new pattern too.
2765 RXf_TAINTED_SEEN is used post-execution by the get magic code
2766 of $1 et al to indicate whether the returned value should be tainted.
2767 It is the responsibility of the caller of the pattern (i.e. pp_match,
2768 pp_subst etc) to set this flag for any other circumstances where $1 needs
2771 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2773 There are three possible sources of taint
2775 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2776 * the replacement string (or expression under /e)
2778 There are four destinations of taint and they are affected by the sources
2779 according to the rules below:
2781 * the return value (not including /r):
2782 tainted by the source string and pattern, but only for the
2783 number-of-iterations case; boolean returns aren't tainted;
2784 * the modified string (or modified copy under /r):
2785 tainted by the source string, pattern, and replacement strings;
2787 tainted by the pattern, and under 'use re "taint"', by the source
2789 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2790 should always be unset before executing subsequent code.
2792 The overall action of pp_subst is:
2794 * at the start, set bits in rxtainted indicating the taint status of
2795 the various sources.
2797 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2798 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2799 pattern has subsequently become tainted via locale ops.
2801 * If control is being passed to pp_substcont to execute a /e block,
2802 save rxtainted in the CXt_SUBST block, for future use by
2805 * Whenever control is being returned to perl code (either by falling
2806 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2807 use the flag bits in rxtainted to make all the appropriate types of
2808 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2809 et al will appear tainted.
2811 pp_match is just a simpler version of the above.
2827 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2828 See "how taint works" above */
2831 REGEXP *rx = PM_GETRE(pm);
2833 int force_on_match = 0;
2834 const I32 oldsave = PL_savestack_ix;
2836 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2841 /* known replacement string? */
2842 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2846 if (PL_op->op_flags & OPf_STACKED)
2855 SvGETMAGIC(TARG); /* must come before cow check */
2857 /* Awooga. Awooga. "bool" types that are actually char are dangerous,
2858 because they make integers such as 256 "false". */
2859 is_cow = SvIsCOW(TARG) ? TRUE : FALSE;
2862 sv_force_normal_flags(TARG,0);
2864 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)
2865 && (SvREADONLY(TARG)
2866 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2867 || SvTYPE(TARG) > SVt_PVLV)
2868 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2869 Perl_croak_no_modify();
2872 orig = SvPV_nomg(TARG, len);
2873 /* note we don't (yet) force the var into being a string; if we fail
2874 * to match, we leave as-is; on successful match howeverm, we *will*
2875 * coerce into a string, then repeat the match */
2876 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2879 /* only replace once? */
2880 once = !(rpm->op_pmflags & PMf_GLOBAL);
2882 /* See "how taint works" above */
2885 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2886 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2887 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2888 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2889 ? SUBST_TAINT_BOOLRET : 0));
2895 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2897 strend = orig + len;
2898 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2899 maxiters = 2 * slen + 10; /* We can match twice at each
2900 position, once with zero-length,
2901 second time with non-zero. */
2903 if (!RX_PRELEN(rx) && PL_curpm
2904 && !ReANY(rx)->mother_re) {
2909 #ifdef PERL_SAWAMPERSAND
2910 r_flags = ( RX_NPARENS(rx)
2912 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2913 || (rpm->op_pmflags & PMf_KEEPCOPY)
2918 r_flags = REXEC_COPY_STR;
2921 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2924 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2925 LEAVE_SCOPE(oldsave);
2930 /* known replacement string? */
2932 /* replacement needing upgrading? */
2933 if (DO_UTF8(TARG) && !doutf8) {
2934 nsv = sv_newmortal();
2937 sv_recode_to_utf8(nsv, _get_encoding());
2939 sv_utf8_upgrade(nsv);
2940 c = SvPV_const(nsv, clen);
2944 c = SvPV_const(dstr, clen);
2945 doutf8 = DO_UTF8(dstr);
2948 if (SvTAINTED(dstr))
2949 rxtainted |= SUBST_TAINT_REPL;
2956 /* can do inplace substitution? */
2961 && (I32)clen <= RX_MINLENRET(rx)
2963 || !(r_flags & REXEC_COPY_STR)
2964 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
2966 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
2967 && (!doutf8 || SvUTF8(TARG))
2968 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2972 if (SvIsCOW(TARG)) {
2973 if (!force_on_match)
2975 assert(SvVOK(TARG));
2978 if (force_on_match) {
2979 /* redo the first match, this time with the orig var
2980 * forced into being a string */
2982 orig = SvPV_force_nomg(TARG, len);
2988 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
2989 rxtainted |= SUBST_TAINT_PAT;
2990 m = orig + RX_OFFS(rx)[0].start;
2991 d = orig + RX_OFFS(rx)[0].end;
2993 if (m - s > strend - d) { /* faster to shorten from end */
2996 Copy(c, m, clen, char);
3001 Move(d, m, i, char);
3005 SvCUR_set(TARG, m - s);
3007 else { /* faster from front */
3011 Move(s, d - i, i, char);
3014 Copy(c, d, clen, char);
3021 d = s = RX_OFFS(rx)[0].start + orig;
3024 if (UNLIKELY(iters++ > maxiters))
3025 DIE(aTHX_ "Substitution loop");
3026 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3027 rxtainted |= SUBST_TAINT_PAT;
3028 m = RX_OFFS(rx)[0].start + orig;
3031 Move(s, d, i, char);
3035 Copy(c, d, clen, char);
3038 s = RX_OFFS(rx)[0].end + orig;
3039 } while (CALLREGEXEC(rx, s, strend, orig,
3040 s == m, /* don't match same null twice */
3042 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3045 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3046 Move(s, d, i+1, char); /* include the NUL */
3056 if (force_on_match) {
3057 /* redo the first match, this time with the orig var
3058 * forced into being a string */
3060 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3061 /* I feel that it should be possible to avoid this mortal copy
3062 given that the code below copies into a new destination.
3063 However, I suspect it isn't worth the complexity of
3064 unravelling the C<goto force_it> for the small number of
3065 cases where it would be viable to drop into the copy code. */
3066 TARG = sv_2mortal(newSVsv(TARG));
3068 orig = SvPV_force_nomg(TARG, len);
3074 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3075 rxtainted |= SUBST_TAINT_PAT;
3077 s = RX_OFFS(rx)[0].start + orig;
3078 dstr = newSVpvn_flags(orig, s-orig,
3079 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3084 /* note that a whole bunch of local vars are saved here for
3085 * use by pp_substcont: here's a list of them in case you're
3086 * searching for places in this sub that uses a particular var:
3087 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3088 * s m strend rx once */
3090 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3094 if (UNLIKELY(iters++ > maxiters))
3095 DIE(aTHX_ "Substitution loop");
3096 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3097 rxtainted |= SUBST_TAINT_PAT;
3098 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3100 char *old_orig = orig;
3101 assert(RX_SUBOFFSET(rx) == 0);
3103 orig = RX_SUBBEG(rx);
3104 s = orig + (old_s - old_orig);
3105 strend = s + (strend - old_s);
3107 m = RX_OFFS(rx)[0].start + orig;
3108 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3109 s = RX_OFFS(rx)[0].end + orig;
3111 /* replacement already stringified */
3113 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3118 if (!nsv) nsv = sv_newmortal();
3119 sv_copypv(nsv, repl);
3120 if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
3121 sv_catsv(dstr, nsv);
3123 else sv_catsv(dstr, repl);
3124 if (UNLIKELY(SvTAINTED(repl)))
3125 rxtainted |= SUBST_TAINT_REPL;
3129 } while (CALLREGEXEC(rx, s, strend, orig,
3130 s == m, /* Yields minend of 0 or 1 */
3132 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3133 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3135 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3136 /* From here on down we're using the copy, and leaving the original
3143 /* The match may make the string COW. If so, brilliant, because
3144 that's just saved us one malloc, copy and free - the regexp has
3145 donated the old buffer, and we malloc an entirely new one, rather
3146 than the regexp malloc()ing a buffer and copying our original,
3147 only for us to throw it away here during the substitution. */
3148 if (SvIsCOW(TARG)) {
3149 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3155 SvPV_set(TARG, SvPVX(dstr));
3156 SvCUR_set(TARG, SvCUR(dstr));
3157 SvLEN_set(TARG, SvLEN(dstr));
3158 SvFLAGS(TARG) |= SvUTF8(dstr);
3159 SvPV_set(dstr, NULL);
3166 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3167 (void)SvPOK_only_UTF8(TARG);
3170 /* See "how taint works" above */
3172 if ((rxtainted & SUBST_TAINT_PAT) ||
3173 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3174 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3176 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3178 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3179 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3181 SvTAINTED_on(TOPs); /* taint return value */
3183 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3185 /* needed for mg_set below */
3187 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3191 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3193 LEAVE_SCOPE(oldsave);
3202 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3203 ++*PL_markstack_ptr;
3205 LEAVE_with_name("grep_item"); /* exit inner scope */
3208 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3210 const I32 gimme = GIMME_V;
3212 LEAVE_with_name("grep"); /* exit outer scope */
3213 (void)POPMARK; /* pop src */
3214 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3215 (void)POPMARK; /* pop dst */
3216 SP = PL_stack_base + POPMARK; /* pop original mark */
3217 if (gimme == G_SCALAR) {
3221 else if (gimme == G_ARRAY)
3228 ENTER_with_name("grep_item"); /* enter inner scope */
3231 src = PL_stack_base[TOPMARK];
3232 if (SvPADTMP(src)) {
3233 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3239 RETURNOP(cLOGOP->op_other);
3253 if (CxMULTICALL(&cxstack[cxstack_ix])) {
3254 /* entry zero of a stack is always PL_sv_undef, which
3255 * simplifies converting a '()' return into undef in scalar context */
3256 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3261 cxstack_ix++; /* temporarily protect top context */
3264 if (gimme == G_SCALAR) {
3266 if (LIKELY(MARK <= SP)) {
3267 if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
3268 if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3269 && !SvMAGICAL(TOPs)) {
3270 *MARK = SvREFCNT_inc(TOPs);
3275 sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
3277 *MARK = sv_mortalcopy(sv);
3278 SvREFCNT_dec_NN(sv);
3281 else if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3282 && !SvMAGICAL(TOPs)) {
3286 *MARK = sv_mortalcopy(TOPs);
3290 *MARK = &PL_sv_undef;
3294 else if (gimme == G_ARRAY) {
3295 for (MARK = newsp + 1; MARK <= SP; MARK++) {
3296 if (!SvTEMP(*MARK) || SvREFCNT(*MARK) != 1
3297 || SvMAGICAL(*MARK)) {
3298 *MARK = sv_mortalcopy(*MARK);
3299 TAINT_NOT; /* Each item is independent */
3306 POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
3308 PL_curpm = newpm; /* ... and pop $1 et al */
3311 return cx->blk_sub.retop;
3321 const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
3324 DIE(aTHX_ "Not a CODE reference");
3325 /* This is overwhelmingly the most common case: */
3326 if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
3327 switch (SvTYPE(sv)) {
3330 if (!(cv = GvCVu((const GV *)sv))) {
3332 cv = sv_2cv(sv, &stash, &gv, 0);
3341 if(isGV_with_GP(sv)) goto we_have_a_glob;
3344 if (sv == &PL_sv_yes) { /* unfound import, ignore */
3346 SP = PL_stack_base + POPMARK;
3354 sv = amagic_deref_call(sv, to_cv_amg);
3355 /* Don't SPAGAIN here. */
3362 DIE(aTHX_ PL_no_usym, "a subroutine");
3363 sym = SvPV_nomg_const(sv, len);
3364 if (PL_op->op_private & HINT_STRICT_REFS)
3365 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3366 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3369 cv = MUTABLE_CV(SvRV(sv));
3370 if (SvTYPE(cv) == SVt_PVCV)
3375 DIE(aTHX_ "Not a CODE reference");
3376 /* This is the second most common case: */
3378 cv = MUTABLE_CV(sv);
3386 if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
3387 DIE(aTHX_ "Closure prototype called");
3388 if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
3392 /* anonymous or undef'd function leaves us no recourse */
3393 if (CvLEXICAL(cv) && CvHASGV(cv))
3394 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3395 SVfARG(cv_name(cv, NULL, 0)));
3396 if (CvANON(cv) || !CvHASGV(cv)) {
3397 DIE(aTHX_ "Undefined subroutine called");
3400 /* autoloaded stub? */
3401 if (cv != GvCV(gv = CvGV(cv))) {
3404 /* should call AUTOLOAD now? */
3407 if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3408 GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
3414 sub_name = sv_newmortal();
3415 gv_efullname3(sub_name, gv, NULL);
3416 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3424 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3427 Perl_get_db_sub(aTHX_ &sv, cv);
3429 PL_curcopdb = PL_curcop;
3431 /* check for lsub that handles lvalue subroutines */
3432 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3433 /* if lsub not found then fall back to DB::sub */
3434 if (!cv) cv = GvCV(PL_DBsub);
3436 cv = GvCV(PL_DBsub);
3439 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3440 DIE(aTHX_ "No DB::sub routine defined");
3445 if (!(CvISXSUB(cv))) {
3446 /* This path taken at least 75% of the time */
3448 PADLIST * const padlist = CvPADLIST(cv);
3451 PUSHBLOCK(cx, CXt_SUB, MARK);
3453 cx->blk_sub.retop = PL_op->op_next;
3454 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
3455 PERL_STACK_OVERFLOW_CHECK();
3456 pad_push(padlist, depth);
3459 PAD_SET_CUR_NOSAVE(padlist, depth);
3460 if (LIKELY(hasargs)) {
3461 AV *const av = MUTABLE_AV(PAD_SVl(0));
3465 if (UNLIKELY(AvREAL(av))) {
3466 /* @_ is normally not REAL--this should only ever
3467 * happen when DB::sub() calls things that modify @_ */
3472 defavp = &GvAV(PL_defgv);
3473 cx->blk_sub.savearray = *defavp;
3474 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3475 CX_CURPAD_SAVE(cx->blk_sub);
3476 cx->blk_sub.argarray = av;
3479 if (UNLIKELY(items - 1 > AvMAX(av))) {
3480 SV **ary = AvALLOC(av);
3481 AvMAX(av) = items - 1;
3482 Renew(ary, items, SV*);
3487 Copy(MARK+1,AvARRAY(av),items,SV*);
3488 AvFILLp(av) = items - 1;
3494 if (SvPADTMP(*MARK)) {
3495 *MARK = sv_mortalcopy(*MARK);
3503 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3505 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3506 SVfARG(cv_name(cv, NULL, 0)));
3507 /* warning must come *after* we fully set up the context
3508 * stuff so that __WARN__ handlers can safely dounwind()
3511 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3512 && ckWARN(WARN_RECURSION)
3513 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3514 sub_crush_depth(cv);
3515 RETURNOP(CvSTART(cv));
3518 SSize_t markix = TOPMARK;
3523 if (UNLIKELY(((PL_op->op_private
3524 & PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3525 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3527 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3528 SVfARG(cv_name(cv, NULL, 0)));
3530 if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
3531 /* Need to copy @_ to stack. Alternative may be to
3532 * switch stack to @_, and copy return values
3533 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3534 AV * const av = GvAV(PL_defgv);
3535 const SSize_t items = AvFILL(av) + 1;
3539 const bool m = cBOOL(SvRMAGICAL(av));
3540 /* Mark is at the end of the stack. */
3542 for (; i < items; ++i)
3546 SV ** const svp = av_fetch(av, i, 0);
3547 sv = svp ? *svp : NULL;
3549 else sv = AvARRAY(av)[i];
3550 if (sv) SP[i+1] = sv;
3552 SP[i+1] = newSVavdefelem(av, i, 1);
3560 SV **mark = PL_stack_base + markix;
3561 SSize_t items = SP - mark;
3564 if (*mark && SvPADTMP(*mark)) {
3565 *mark = sv_mortalcopy(*mark);
3569 /* We assume first XSUB in &DB::sub is the called one. */
3570 if (UNLIKELY(PL_curcopdb)) {
3571 SAVEVPTR(PL_curcop);
3572 PL_curcop = PL_curcopdb;
3575 /* Do we need to open block here? XXXX */
3577 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3579 CvXSUB(cv)(aTHX_ cv);
3581 /* Enforce some sanity in scalar context. */
3582 if (gimme == G_SCALAR) {
3583 SV **svp = PL_stack_base + markix + 1;
3584 if (svp != PL_stack_sp) {
3585 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
3595 Perl_sub_crush_depth(pTHX_ CV *cv)
3597 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
3600 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
3602 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
3603 SVfARG(cv_name(cv,NULL,0)));
3611 SV* const elemsv = POPs;
3612 IV elem = SvIV(elemsv);
3613 AV *const av = MUTABLE_AV(POPs);
3614 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3615 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3616 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3617 bool preeminent = TRUE;
3620 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
3621 Perl_warner(aTHX_ packWARN(WARN_MISC),
3622 "Use of reference \"%"SVf"\" as array index",
3624 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
3627 if (UNLIKELY(localizing)) {
3631 /* If we can determine whether the element exist,
3632 * Try to preserve the existenceness of a tied array
3633 * element by using EXISTS and DELETE if possible.
3634 * Fallback to FETCH and STORE otherwise. */
3635 if (SvCANEXISTDELETE(av))
3636 preeminent = av_exists(av, elem);
3639 svp = av_fetch(av, elem, lval && !defer);
3641 #ifdef PERL_MALLOC_WRAP
3642 if (SvUOK(elemsv)) {
3643 const UV uv = SvUV(elemsv);
3644 elem = uv > IV_MAX ? IV_MAX : uv;
3646 else if (SvNOK(elemsv))
3647 elem = (IV)SvNV(elemsv);
3649 static const char oom_array_extend[] =
3650 "Out of memory during array extend"; /* Duplicated in av.c */
3651 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
3654 if (!svp || !*svp) {
3657 DIE(aTHX_ PL_no_aelem, elem);
3658 len = av_tindex(av);
3659 mPUSHs(newSVavdefelem(av,
3660 /* Resolve a negative index now, unless it points before the
3661 beginning of the array, in which case record it for error
3662 reporting in magic_setdefelem. */
3663 elem < 0 && len + elem >= 0 ? len + elem : elem,
3667 if (UNLIKELY(localizing)) {
3669 save_aelem(av, elem, svp);
3671 SAVEADELETE(av, elem);
3673 else if (PL_op->op_private & OPpDEREF) {
3674 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3678 sv = (svp ? *svp : &PL_sv_undef);
3679 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
3686 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
3688 PERL_ARGS_ASSERT_VIVIFY_REF;
3693 Perl_croak_no_modify();
3694 prepare_SV_for_RV(sv);
3697 SvRV_set(sv, newSV(0));
3700 SvRV_set(sv, MUTABLE_SV(newAV()));
3703 SvRV_set(sv, MUTABLE_SV(newHV()));
3710 if (SvGMAGICAL(sv)) {
3711 /* copy the sv without magic to prevent magic from being
3713 SV* msv = sv_newmortal();
3714 sv_setsv_nomg(msv, sv);
3720 PERL_STATIC_INLINE HV *
3721 S_opmethod_stash(pTHX_ SV* meth)
3726 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
3727 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
3728 "package or object reference", SVfARG(meth)),
3730 : *(PL_stack_base + TOPMARK + 1);
3732 PERL_ARGS_ASSERT_OPMETHOD_STASH;
3736 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
3739 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
3740 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
3741 stash = gv_stashsv(sv, GV_CACHE_ONLY);
3742 if (stash) return stash;
3746 ob = MUTABLE_SV(SvRV(sv));
3747 else if (!SvOK(sv)) goto undefined;
3748 else if (isGV_with_GP(sv)) {
3750 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3751 "without a package or object reference",
3754 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
3755 assert(!LvTARGLEN(ob));
3759 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
3762 /* this isn't a reference */
3765 const char * const packname = SvPV_nomg_const(sv, packlen);
3766 const U32 packname_utf8 = SvUTF8(sv);
3767 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
3768 if (stash) return stash;
3770 if (!(iogv = gv_fetchpvn_flags(
3771 packname, packlen, packname_utf8, SVt_PVIO
3773 !(ob=MUTABLE_SV(GvIO(iogv))))
3775 /* this isn't the name of a filehandle either */
3778 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3779 "without a package or object reference",
3782 /* assume it's a package name */
3783 stash = gv_stashpvn(packname, packlen, packname_utf8);
3784 if (stash) return stash;
3785 else return MUTABLE_HV(sv);
3787 /* it _is_ a filehandle name -- replace with a reference */
3788 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
3791 /* if we got here, ob should be an object or a glob */
3792 if (!ob || !(SvOBJECT(ob)
3793 || (isGV_with_GP(ob)
3794 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
3797 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
3798 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
3799 ? newSVpvs_flags("DOES", SVs_TEMP)
3811 SV* const meth = TOPs;
3814 SV* const rmeth = SvRV(meth);
3815 if (SvTYPE(rmeth) == SVt_PVCV) {
3821 stash = opmethod_stash(meth);
3823 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3826 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3830 #define METHOD_CHECK_CACHE(stash,cache,meth) \
3831 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
3833 gv = MUTABLE_GV(HeVAL(he)); \
3834 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
3835 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
3837 XPUSHs(MUTABLE_SV(GvCV(gv))); \
3846 SV* const meth = cMETHOPx_meth(PL_op);
3847 HV* const stash = opmethod_stash(meth);
3849 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
3850 METHOD_CHECK_CACHE(stash, stash, meth);
3853 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3856 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3865 SV* const meth = cMETHOPx_meth(PL_op);
3866 HV* const stash = CopSTASH(PL_curcop);
3867 /* Actually, SUPER doesn't need real object's (or class') stash at all,
3868 * as it uses CopSTASH. However, we must ensure that object(class) is
3869 * correct (this check is done by S_opmethod_stash) */
3870 opmethod_stash(meth);
3872 if ((cache = HvMROMETA(stash)->super)) {
3873 METHOD_CHECK_CACHE(stash, cache, meth);
3876 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
3879 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3887 SV* const meth = cMETHOPx_meth(PL_op);
3888 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
3889 opmethod_stash(meth); /* not used but needed for error checks */
3891 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
3892 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
3894 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3897 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3901 PP(pp_method_redir_super)
3906 SV* const meth = cMETHOPx_meth(PL_op);
3907 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
3908 opmethod_stash(meth); /* not used but needed for error checks */
3910 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
3911 else if ((cache = HvMROMETA(stash)->super)) {
3912 METHOD_CHECK_CACHE(stash, cache, meth);
3915 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
3918 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3923 * ex: set ts=8 sts=4 sw=4 et: