3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
246 cx = &cxstack[cxstack_ix];
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
251 CxTYPE(cx) == CXt_BLOCK
252 || CxTYPE(cx) == CXt_LOOP_FOR
253 || CxTYPE(cx) == CXt_LOOP_PLAIN
254 || CxTYPE(cx) == CXt_LOOP_LAZYSV
255 || CxTYPE(cx) == CXt_LOOP_LAZYIV
264 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
269 const char *rpv = NULL;
271 bool rcopied = FALSE;
273 if (TARG == right && right != left) { /* $r = $l.$r */
274 rpv = SvPV_nomg_const(right, rlen);
275 rbyte = !DO_UTF8(right);
276 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
277 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
281 if (TARG != left) { /* not $l .= $r */
283 const char* const lpv = SvPV_nomg_const(left, llen);
284 lbyte = !DO_UTF8(left);
285 sv_setpvn(TARG, lpv, llen);
291 else { /* $l .= $r and left == TARG */
293 if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
294 report_uninit(right);
298 SvPV_force_nomg_nolen(left);
300 lbyte = !DO_UTF8(left);
306 rpv = SvPV_nomg_const(right, rlen);
307 rbyte = !DO_UTF8(right);
309 if (lbyte != rbyte) {
311 sv_utf8_upgrade_nomg(TARG);
314 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
315 sv_utf8_upgrade_nomg(right);
316 rpv = SvPV_nomg_const(right, rlen);
319 sv_catpvn_nomg(TARG, rpv, rlen);
326 /* push the elements of av onto the stack.
327 * XXX Note that padav has similar code but without the mg_get().
328 * I suspect that the mg_get is no longer needed, but while padav
329 * differs, it can't share this function */
332 S_pushav(pTHX_ AV* const av)
335 const SSize_t maxarg = AvFILL(av) + 1;
337 if (UNLIKELY(SvRMAGICAL(av))) {
339 for (i=0; i < (PADOFFSET)maxarg; i++) {
340 SV ** const svp = av_fetch(av, i, FALSE);
341 /* See note in pp_helem, and bug id #27839 */
343 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
349 for (i=0; i < (PADOFFSET)maxarg; i++) {
350 SV * const sv = AvARRAY(av)[i];
351 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
359 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
364 PADOFFSET base = PL_op->op_targ;
365 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
367 if (PL_op->op_flags & OPf_SPECIAL) {
368 /* fake the RHS of my ($x,$y,..) = @_ */
370 S_pushav(aTHX_ GvAVn(PL_defgv));
374 /* note, this is only skipped for compile-time-known void cxt */
375 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
378 for (i = 0; i <count; i++)
379 *++SP = PAD_SV(base+i);
381 if (PL_op->op_private & OPpLVAL_INTRO) {
382 SV **svp = &(PAD_SVl(base));
383 const UV payload = (UV)(
384 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
385 | (count << SAVE_TIGHT_SHIFT)
386 | SAVEt_CLEARPADRANGE);
387 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
388 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
395 for (i = 0; i <count; i++)
396 SvPADSTALE_off(*svp++); /* mark lexical as active */
407 OP * const op = PL_op;
408 /* access PL_curpad once */
409 SV ** const padentry = &(PAD_SVl(op->op_targ));
414 PUTBACK; /* no pop/push after this, TOPs ok */
416 if (op->op_flags & OPf_MOD) {
417 if (op->op_private & OPpLVAL_INTRO)
418 if (!(op->op_private & OPpPAD_STATE))
419 save_clearsv(padentry);
420 if (op->op_private & OPpDEREF) {
421 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
422 than TARG reduces the scope of TARG, so it does not
423 span the call to save_clearsv, resulting in smaller
425 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
437 tryAMAGICunTARGETlist(iter_amg, 0);
438 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
440 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
441 if (!isGV_with_GP(PL_last_in_gv)) {
442 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
443 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
446 XPUSHs(MUTABLE_SV(PL_last_in_gv));
449 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
450 if (PL_last_in_gv == (GV *)&PL_sv_undef)
451 PL_last_in_gv = NULL;
453 assert(isGV_with_GP(PL_last_in_gv));
456 return do_readline();
464 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
468 (SvIOK_notUV(left) && SvIOK_notUV(right))
469 ? (SvIVX(left) == SvIVX(right))
470 : ( do_ncmp(left, right) == 0)
476 /* also used for: pp_i_preinc() */
480 SV *sv = *PL_stack_sp;
482 if (LIKELY(((sv->sv_flags &
483 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
484 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
486 && SvIVX(sv) != IV_MAX)
488 SvIV_set(sv, SvIVX(sv) + 1);
490 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
497 /* also used for: pp_i_predec() */
501 SV *sv = *PL_stack_sp;
503 if (LIKELY(((sv->sv_flags &
504 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
505 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
507 && SvIVX(sv) != IV_MIN)
509 SvIV_set(sv, SvIVX(sv) - 1);
511 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
518 /* also used for: pp_orassign() */
527 if (PL_op->op_type == OP_OR)
529 RETURNOP(cLOGOP->op_other);
534 /* also used for: pp_dor() pp_dorassign() */
541 const int op_type = PL_op->op_type;
542 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
547 if (UNLIKELY(!sv || !SvANY(sv))) {
548 if (op_type == OP_DOR)
550 RETURNOP(cLOGOP->op_other);
556 if (UNLIKELY(!sv || !SvANY(sv)))
561 switch (SvTYPE(sv)) {
563 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
567 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
571 if (CvROOT(sv) || CvXSUB(sv))
584 if(op_type == OP_DOR)
586 RETURNOP(cLOGOP->op_other);
588 /* assuming OP_DEFINED */
598 dSP; dATARGET; bool useleft; SV *svl, *svr;
600 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
604 #ifdef PERL_PRESERVE_IVUV
606 /* special-case some simple common cases */
607 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
609 U32 flags = (svl->sv_flags & svr->sv_flags);
610 if (flags & SVf_IOK) {
611 /* both args are simple IVs */
616 topl = ((UV)il) >> (UVSIZE * 8 - 2);
617 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
619 /* if both are in a range that can't under/overflow, do a
620 * simple integer add: if the top of both numbers
621 * are 00 or 11, then it's safe */
622 if (!( ((topl+1) | (topr+1)) & 2)) {
624 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
630 else if (flags & SVf_NOK) {
631 /* both args are NVs */
636 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
637 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
638 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
640 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
643 /* nothing was lost by converting to IVs */
646 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
654 useleft = USE_LEFT(svl);
655 /* We must see if we can perform the addition with integers if possible,
656 as the integer code detects overflow while the NV code doesn't.
657 If either argument hasn't had a numeric conversion yet attempt to get
658 the IV. It's important to do this now, rather than just assuming that
659 it's not IOK as a PV of "9223372036854775806" may not take well to NV
660 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
661 integer in case the second argument is IV=9223372036854775806
662 We can (now) rely on sv_2iv to do the right thing, only setting the
663 public IOK flag if the value in the NV (or PV) slot is truly integer.
665 A side effect is that this also aggressively prefers integer maths over
666 fp maths for integer values.
668 How to detect overflow?
670 C 99 section 6.2.6.1 says
672 The range of nonnegative values of a signed integer type is a subrange
673 of the corresponding unsigned integer type, and the representation of
674 the same value in each type is the same. A computation involving
675 unsigned operands can never overflow, because a result that cannot be
676 represented by the resulting unsigned integer type is reduced modulo
677 the number that is one greater than the largest value that can be
678 represented by the resulting type.
682 which I read as "unsigned ints wrap."
684 signed integer overflow seems to be classed as "exception condition"
686 If an exceptional condition occurs during the evaluation of an
687 expression (that is, if the result is not mathematically defined or not
688 in the range of representable values for its type), the behavior is
691 (6.5, the 5th paragraph)
693 I had assumed that on 2s complement machines signed arithmetic would
694 wrap, hence coded pp_add and pp_subtract on the assumption that
695 everything perl builds on would be happy. After much wailing and
696 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
697 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
698 unsigned code below is actually shorter than the old code. :-)
701 if (SvIV_please_nomg(svr)) {
702 /* Unless the left argument is integer in range we are going to have to
703 use NV maths. Hence only attempt to coerce the right argument if
704 we know the left is integer. */
712 /* left operand is undef, treat as zero. + 0 is identity,
713 Could SETi or SETu right now, but space optimise by not adding
714 lots of code to speed up what is probably a rarish case. */
716 /* Left operand is defined, so is it IV? */
717 if (SvIV_please_nomg(svl)) {
718 if ((auvok = SvUOK(svl)))
721 const IV aiv = SvIVX(svl);
724 auvok = 1; /* Now acting as a sign flag. */
726 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
733 bool result_good = 0;
736 bool buvok = SvUOK(svr);
741 const IV biv = SvIVX(svr);
746 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
748 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
749 else "IV" now, independent of how it came in.
750 if a, b represents positive, A, B negative, a maps to -A etc
755 all UV maths. negate result if A negative.
756 add if signs same, subtract if signs differ. */
762 /* Must get smaller */
768 /* result really should be -(auv-buv). as its negation
769 of true value, need to swap our result flag */
786 if (result <= (UV)IV_MIN)
787 SETi(result == (UV)IV_MIN
788 ? IV_MIN : -(IV)result);
790 /* result valid, but out of range for IV. */
795 } /* Overflow, drop through to NVs. */
800 useleft = USE_LEFT(svl);
804 NV value = SvNV_nomg(svr);
807 /* left operand is undef, treat as zero. + 0.0 is identity. */
811 SETn( value + SvNV_nomg(svl) );
817 /* also used for: pp_aelemfast_lex() */
822 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
823 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
824 const U32 lval = PL_op->op_flags & OPf_MOD;
825 SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
826 SV *sv = (svp ? *svp : &PL_sv_undef);
828 if (UNLIKELY(!svp && lval))
829 DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
832 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
842 do_join(TARG, *MARK, MARK, SP);
853 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
854 * will be enough to hold an OP*.
856 SV* const sv = sv_newmortal();
857 sv_upgrade(sv, SVt_PVLV);
859 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
862 XPUSHs(MUTABLE_SV(PL_op));
867 /* Oversized hot code. */
869 /* also used for: pp_say() */
873 dSP; dMARK; dORIGMARK;
877 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
881 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
884 if (MARK == ORIGMARK) {
885 /* If using default handle then we need to make space to
886 * pass object as 1st arg, so move other args up ...
890 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
893 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
895 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
896 | (PL_op->op_type == OP_SAY
897 ? TIED_METHOD_SAY : 0)), sp - mark);
900 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
901 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
904 SETERRNO(EBADF,RMS_IFI);
907 else if (!(fp = IoOFP(io))) {
909 report_wrongway_fh(gv, '<');
912 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
916 SV * const ofs = GvSV(PL_ofsgv); /* $, */
918 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
920 if (!do_print(*MARK, fp))
924 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
925 if (!do_print(GvSV(PL_ofsgv), fp)) {
934 if (!do_print(*MARK, fp))
942 if (PL_op->op_type == OP_SAY) {
943 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
946 else if (PL_ors_sv && SvOK(PL_ors_sv))
947 if (!do_print(PL_ors_sv, fp)) /* $\ */
950 if (IoFLAGS(io) & IOf_FLUSH)
951 if (PerlIO_flush(fp) == EOF)
961 XPUSHs(&PL_sv_undef);
966 /* also used for: pp_rv2hv() */
967 /* also called directly by pp_lvavref */
972 const I32 gimme = GIMME_V;
973 static const char an_array[] = "an ARRAY";
974 static const char a_hash[] = "a HASH";
975 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
976 || PL_op->op_type == OP_LVAVREF;
977 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
981 if (UNLIKELY(SvAMAGIC(sv))) {
982 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
985 if (UNLIKELY(SvTYPE(sv) != type))
986 /* diag_listed_as: Not an ARRAY reference */
987 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
988 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
989 && PL_op->op_private & OPpLVAL_INTRO))
990 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
992 else if (UNLIKELY(SvTYPE(sv) != type)) {
995 if (!isGV_with_GP(sv)) {
996 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1002 gv = MUTABLE_GV(sv);
1004 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1005 if (PL_op->op_private & OPpLVAL_INTRO)
1006 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1008 if (PL_op->op_flags & OPf_REF) {
1012 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1013 const I32 flags = is_lvalue_sub();
1014 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1015 if (gimme != G_ARRAY)
1016 goto croak_cant_return;
1023 AV *const av = MUTABLE_AV(sv);
1024 /* The guts of pp_rv2av */
1025 if (gimme == G_ARRAY) {
1031 else if (gimme == G_SCALAR) {
1033 const SSize_t maxarg = AvFILL(av) + 1;
1037 /* The guts of pp_rv2hv */
1038 if (gimme == G_ARRAY) { /* array wanted */
1040 return Perl_do_kv(aTHX);
1042 else if ((PL_op->op_private & OPpTRUEBOOL
1043 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1044 && block_gimme() == G_VOID ))
1045 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1046 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1047 else if (gimme == G_SCALAR) {
1049 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1056 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1057 is_pp_rv2av ? "array" : "hash");
1062 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1064 PERL_ARGS_ASSERT_DO_ODDBALL;
1067 if (ckWARN(WARN_MISC)) {
1069 if (oddkey == firstkey &&
1071 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1072 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1074 err = "Reference found where even-sized list expected";
1077 err = "Odd number of elements in hash assignment";
1078 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1085 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1086 * are common to both the LHS and RHS of an aassign, and replace them
1087 * with copies. All these copies are made before the actual list assign is
1090 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1091 * element ($b) to the first LH element ($a), modifies $a; when the
1092 * second assignment is done, the second RH element now has the wrong
1093 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1094 * Note that we don't need to make a mortal copy of $b.
1096 * The algorithm below works by, for every RHS element, mark the
1097 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1098 * element is found with SVf_BREAK set, it means it would have been
1099 * modified, so make a copy.
1100 * Note that by scanning both LHS and RHS in lockstep, we avoid
1101 * unnecessary copies (like $b above) compared with a naive
1102 * "mark all LHS; copy all marked RHS; unmark all LHS".
1104 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1105 * it can't be common and can be skipped.
1107 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1108 * that we thought we didn't need to call S_aassign_copy_common(), but we
1109 * have anyway for sanity checking. If we find we need to copy, then panic.
1112 PERL_STATIC_INLINE void
1113 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1114 SV **firstrelem, SV **lastrelem
1123 SSize_t lcount = lastlelem - firstlelem + 1;
1124 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1125 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1126 bool copy_all = FALSE;
1128 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1129 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1130 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1134 /* we never have to copy the first RH element; it can't be corrupted
1135 * by assigning something to the corresponding first LH element.
1136 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1138 relem = firstrelem + 1;
1140 for (; relem <= lastrelem; relem++) {
1143 /* mark next LH element */
1145 if (--lcount >= 0) {
1148 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1149 assert (lelem <= lastlelem);
1155 if (SvSMAGICAL(svl)) {
1158 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1161 /* this LH element will consume all further args;
1162 * no need to mark any further LH elements (if any).
1163 * But we still need to scan any remaining RHS elements;
1164 * set lcount negative to distinguish from lcount == 0,
1165 * so the loop condition continues being true
1168 lelem--; /* no need to unmark this element */
1170 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1171 assert(!SvIMMORTAL(svl));
1172 SvFLAGS(svl) |= SVf_BREAK;
1176 /* don't check RH element if no SVf_BREAK flags set yet */
1183 /* see if corresponding RH element needs copying */
1189 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1193 /* op_dump(PL_op); */
1195 "panic: aassign skipped needed copy of common RH elem %"
1196 UVuf, (UV)(relem - firstrelem));
1200 TAINT_NOT; /* Each item is independent */
1202 /* Dear TODO test in t/op/sort.t, I love you.
1203 (It's relying on a panic, not a "semi-panic" from newSVsv()
1204 and then an assertion failure below.) */
1205 if (UNLIKELY(SvIS_FREED(svr))) {
1206 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1209 /* avoid break flag while copying; otherwise COW etc
1211 SvFLAGS(svr) &= ~SVf_BREAK;
1212 /* Not newSVsv(), as it does not allow copy-on-write,
1213 resulting in wasteful copies.
1214 Also, we use SV_NOSTEAL in case the SV is used more than
1215 once, e.g. (...) = (f())[0,0]
1216 Where the same SV appears twice on the RHS without a ref
1217 count bump. (Although I suspect that the SV won't be
1218 stealable here anyway - DAPM).
1220 *relem = sv_mortalcopy_flags(svr,
1221 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1222 /* ... but restore afterwards in case it's needed again,
1223 * e.g. ($a,$b,$c) = (1,$a,$a)
1225 SvFLAGS(svr) |= SVf_BREAK;
1237 while (lelem > firstlelem) {
1238 SV * const svl = *(--lelem);
1240 SvFLAGS(svl) &= ~SVf_BREAK;
1249 SV **lastlelem = PL_stack_sp;
1250 SV **lastrelem = PL_stack_base + POPMARK;
1251 SV **firstrelem = PL_stack_base + POPMARK + 1;
1252 SV **firstlelem = lastrelem + 1;
1265 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1266 * only need to save locally, not on the save stack */
1267 U16 old_delaymagic = PL_delaymagic;
1272 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1274 /* If there's a common identifier on both sides we have to take
1275 * special care that assigning the identifier on the left doesn't
1276 * clobber a value on the right that's used later in the list.
1279 /* at least 2 LH and RH elements, or commonality isn't an issue */
1280 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1281 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1282 if (SvGMAGICAL(*relem))
1285 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1286 if (*lelem && SvSMAGICAL(*lelem))
1289 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1290 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1291 /* skip the scan if all scalars have a ref count of 1 */
1292 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1294 if (!sv || SvREFCNT(sv) == 1)
1296 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1303 S_aassign_copy_common(aTHX_
1304 firstlelem, lastlelem, firstrelem, lastrelem
1314 /* on debugging builds, do the scan even if we've concluded we
1315 * don't need to, then panic if we find commonality. Note that the
1316 * scanner assumes at least 2 elements */
1317 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1325 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1332 while (LIKELY(lelem <= lastlelem)) {
1334 TAINT_NOT; /* Each item stands on its own, taintwise. */
1336 if (UNLIKELY(!sv)) {
1339 ASSUME(SvTYPE(sv) == SVt_PVAV);
1341 switch (SvTYPE(sv)) {
1343 bool already_copied = FALSE;
1344 ary = MUTABLE_AV(sv);
1345 magic = SvMAGICAL(ary) != 0;
1347 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1349 /* We need to clear ary. The is a danger that if we do this,
1350 * elements on the RHS may be prematurely freed, e.g.
1352 * In the case of possible commonality, make a copy of each
1353 * RHS SV *before* clearing the array, and add a reference
1354 * from the tmps stack, so that it doesn't leak on death.
1355 * Otherwise, make a copy of each RHS SV only as we're storing
1356 * it into the array - that way we don't have to worry about
1357 * it being leaked if we die, but don't incur the cost of
1358 * mortalising everything.
1361 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1362 && (relem <= lastrelem)
1363 && (magic || AvFILL(ary) != -1))
1366 EXTEND_MORTAL(lastrelem - relem + 1);
1367 for (svp = relem; svp <= lastrelem; svp++) {
1368 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1369 *svp = sv_mortalcopy_flags(*svp,
1370 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1373 already_copied = TRUE;
1377 if (relem <= lastrelem)
1378 av_extend(ary, lastrelem - relem);
1381 while (relem <= lastrelem) { /* gobble up all the rest */
1383 if (LIKELY(!alias)) {
1388 /* before newSV, in case it dies */
1391 /* see comment in S_aassign_copy_common about
1393 sv_setsv_flags(sv, *relem,
1394 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1399 if (!already_copied)
1402 DIE(aTHX_ "Assigned value is not a reference");
1403 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1404 /* diag_listed_as: Assigned value is not %s reference */
1406 "Assigned value is not a SCALAR reference");
1407 if (lval && !already_copied)
1408 *relem = sv_mortalcopy(*relem);
1409 /* XXX else check for weak refs? */
1410 sv = SvREFCNT_inc_NN(SvRV(*relem));
1414 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1415 didstore = av_store(ary,i++,sv);
1424 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1425 SvSETMAGIC(MUTABLE_SV(ary));
1430 case SVt_PVHV: { /* normal hash */
1434 SV** topelem = relem;
1435 SV **firsthashrelem = relem;
1436 bool already_copied = FALSE;
1438 hash = MUTABLE_HV(sv);
1439 magic = SvMAGICAL(hash) != 0;
1441 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1442 if (UNLIKELY(odd)) {
1443 do_oddball(lastrelem, firsthashrelem);
1444 /* we have firstlelem to reuse, it's not needed anymore
1446 *(lastrelem+1) = &PL_sv_undef;
1450 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1452 /* We need to clear hash. The is a danger that if we do this,
1453 * elements on the RHS may be prematurely freed, e.g.
1454 * %h = (foo => $h{bar});
1455 * In the case of possible commonality, make a copy of each
1456 * RHS SV *before* clearing the hash, and add a reference
1457 * from the tmps stack, so that it doesn't leak on death.
1460 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1461 && (relem <= lastrelem)
1462 && (magic || HvUSEDKEYS(hash)))
1465 EXTEND_MORTAL(lastrelem - relem + 1);
1466 for (svp = relem; svp <= lastrelem; svp++) {
1467 *svp = sv_mortalcopy_flags(*svp,
1468 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1471 already_copied = TRUE;
1476 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1479 /* Copy the key if aassign is called in lvalue context,
1480 to avoid having the next op modify our rhs. Copy
1481 it also if it is gmagical, lest it make the
1482 hv_store_ent call below croak, leaking the value. */
1483 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1484 ? sv_mortalcopy(*relem)
1493 sv_setsv_nomg(tmpstr,*relem++); /* value */
1496 if (gimme == G_ARRAY) {
1497 if (hv_exists_ent(hash, sv, 0))
1498 /* key overwrites an existing entry */
1501 /* copy element back: possibly to an earlier
1502 * stack location if we encountered dups earlier,
1503 * possibly to a later stack location if odd */
1505 *topelem++ = tmpstr;
1509 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1510 didstore = hv_store_ent(hash,sv,tmpstr,0);
1512 if (!didstore) sv_2mortal(tmpstr);
1518 if (duplicates && gimme == G_ARRAY) {
1519 /* at this point we have removed the duplicate key/value
1520 * pairs from the stack, but the remaining values may be
1521 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1522 * the (a 2), but the stack now probably contains
1523 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1524 * obliterates the earlier key. So refresh all values. */
1525 lastrelem -= duplicates;
1526 relem = firsthashrelem;
1527 while (relem < lastrelem+odd) {
1529 he = hv_fetch_ent(hash, *relem++, 0, 0);
1530 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1533 if (odd && gimme == G_ARRAY) lastrelem++;
1537 if (SvIMMORTAL(sv)) {
1538 if (relem <= lastrelem)
1542 if (relem <= lastrelem) {
1544 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1545 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1548 packWARN(WARN_MISC),
1549 "Useless assignment to a temporary"
1551 sv_setsv(sv, *relem);
1555 sv_setsv(sv, &PL_sv_undef);
1560 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1561 /* Will be used to set PL_tainting below */
1562 Uid_t tmp_uid = PerlProc_getuid();
1563 Uid_t tmp_euid = PerlProc_geteuid();
1564 Gid_t tmp_gid = PerlProc_getgid();
1565 Gid_t tmp_egid = PerlProc_getegid();
1567 /* XXX $> et al currently silently ignore failures */
1568 if (PL_delaymagic & DM_UID) {
1569 #ifdef HAS_SETRESUID
1571 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1572 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1575 # ifdef HAS_SETREUID
1577 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1578 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1581 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1582 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1583 PL_delaymagic &= ~DM_RUID;
1585 # endif /* HAS_SETRUID */
1587 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1588 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1589 PL_delaymagic &= ~DM_EUID;
1591 # endif /* HAS_SETEUID */
1592 if (PL_delaymagic & DM_UID) {
1593 if (PL_delaymagic_uid != PL_delaymagic_euid)
1594 DIE(aTHX_ "No setreuid available");
1595 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1597 # endif /* HAS_SETREUID */
1598 #endif /* HAS_SETRESUID */
1600 tmp_uid = PerlProc_getuid();
1601 tmp_euid = PerlProc_geteuid();
1603 /* XXX $> et al currently silently ignore failures */
1604 if (PL_delaymagic & DM_GID) {
1605 #ifdef HAS_SETRESGID
1607 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1608 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1611 # ifdef HAS_SETREGID
1613 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1614 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1617 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1618 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1619 PL_delaymagic &= ~DM_RGID;
1621 # endif /* HAS_SETRGID */
1623 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1624 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1625 PL_delaymagic &= ~DM_EGID;
1627 # endif /* HAS_SETEGID */
1628 if (PL_delaymagic & DM_GID) {
1629 if (PL_delaymagic_gid != PL_delaymagic_egid)
1630 DIE(aTHX_ "No setregid available");
1631 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1633 # endif /* HAS_SETREGID */
1634 #endif /* HAS_SETRESGID */
1636 tmp_gid = PerlProc_getgid();
1637 tmp_egid = PerlProc_getegid();
1639 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1640 #ifdef NO_TAINT_SUPPORT
1641 PERL_UNUSED_VAR(tmp_uid);
1642 PERL_UNUSED_VAR(tmp_euid);
1643 PERL_UNUSED_VAR(tmp_gid);
1644 PERL_UNUSED_VAR(tmp_egid);
1647 PL_delaymagic = old_delaymagic;
1649 if (gimme == G_VOID)
1650 SP = firstrelem - 1;
1651 else if (gimme == G_SCALAR) {
1654 SETi(lastrelem - firstrelem + 1);
1658 /* note that in this case *firstlelem may have been overwritten
1659 by sv_undef in the odd hash case */
1662 SP = firstrelem + (lastlelem - firstlelem);
1663 lelem = firstlelem + (relem - firstrelem);
1665 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1675 PMOP * const pm = cPMOP;
1676 REGEXP * rx = PM_GETRE(pm);
1677 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1678 SV * const rv = sv_newmortal();
1682 SvUPGRADE(rv, SVt_IV);
1683 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1684 loathe to use it here, but it seems to be the right fix. Or close.
1685 The key part appears to be that it's essential for pp_qr to return a new
1686 object (SV), which implies that there needs to be an effective way to
1687 generate a new SV from the existing SV that is pre-compiled in the
1689 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1692 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1693 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1694 *cvp = cv_clone(cv);
1695 SvREFCNT_dec_NN(cv);
1699 HV *const stash = gv_stashsv(pkg, GV_ADD);
1700 SvREFCNT_dec_NN(pkg);
1701 (void)sv_bless(rv, stash);
1704 if (UNLIKELY(RX_ISTAINTED(rx))) {
1706 SvTAINTED_on(SvRV(rv));
1719 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1722 const char *truebase; /* Start of string */
1723 REGEXP *rx = PM_GETRE(pm);
1725 const I32 gimme = GIMME_V;
1727 const I32 oldsave = PL_savestack_ix;
1728 I32 had_zerolen = 0;
1731 if (PL_op->op_flags & OPf_STACKED)
1740 PUTBACK; /* EVAL blocks need stack_sp. */
1741 /* Skip get-magic if this is a qr// clone, because regcomp has
1743 truebase = ReANY(rx)->mother_re
1744 ? SvPV_nomg_const(TARG, len)
1745 : SvPV_const(TARG, len);
1747 DIE(aTHX_ "panic: pp_match");
1748 strend = truebase + len;
1749 rxtainted = (RX_ISTAINTED(rx) ||
1750 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1753 /* We need to know this in case we fail out early - pos() must be reset */
1754 global = dynpm->op_pmflags & PMf_GLOBAL;
1756 /* PMdf_USED is set after a ?? matches once */
1759 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1761 pm->op_pmflags & PMf_USED
1764 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1768 /* empty pattern special-cased to use last successful pattern if
1769 possible, except for qr// */
1770 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1776 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1777 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1778 UVuf" < %"IVdf")\n",
1779 (UV)len, (IV)RX_MINLEN(rx)));
1783 /* get pos() if //g */
1785 mg = mg_find_mglob(TARG);
1786 if (mg && mg->mg_len >= 0) {
1787 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1788 /* last time pos() was set, it was zero-length match */
1789 if (mg->mg_flags & MGf_MINMATCH)
1794 #ifdef PERL_SAWAMPERSAND
1797 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1798 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1802 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1803 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1804 * only on the first iteration. Therefore we need to copy $' as well
1805 * as $&, to make the rest of the string available for captures in
1806 * subsequent iterations */
1807 if (! (global && gimme == G_ARRAY))
1808 r_flags |= REXEC_COPY_SKIP_POST;
1810 #ifdef PERL_SAWAMPERSAND
1811 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1812 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1813 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1820 s = truebase + curpos;
1822 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1823 had_zerolen, TARG, NULL, r_flags))
1827 if (dynpm->op_pmflags & PMf_ONCE)
1829 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1831 dynpm->op_pmflags |= PMf_USED;
1835 RX_MATCH_TAINTED_on(rx);
1836 TAINT_IF(RX_MATCH_TAINTED(rx));
1840 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1842 mg = sv_magicext_mglob(TARG);
1843 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1844 if (RX_ZERO_LEN(rx))
1845 mg->mg_flags |= MGf_MINMATCH;
1847 mg->mg_flags &= ~MGf_MINMATCH;
1850 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1851 LEAVE_SCOPE(oldsave);
1855 /* push captures on stack */
1858 const I32 nparens = RX_NPARENS(rx);
1859 I32 i = (global && !nparens) ? 1 : 0;
1861 SPAGAIN; /* EVAL blocks could move the stack. */
1862 EXTEND(SP, nparens + i);
1863 EXTEND_MORTAL(nparens + i);
1864 for (i = !i; i <= nparens; i++) {
1865 PUSHs(sv_newmortal());
1866 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1867 && RX_OFFS(rx)[i].end != -1 ))
1869 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1870 const char * const s = RX_OFFS(rx)[i].start + truebase;
1871 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1872 || len < 0 || len > strend - s))
1873 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1874 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1875 (long) i, (long) RX_OFFS(rx)[i].start,
1876 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1877 sv_setpvn(*SP, s, len);
1878 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1883 curpos = (UV)RX_OFFS(rx)[0].end;
1884 had_zerolen = RX_ZERO_LEN(rx);
1885 PUTBACK; /* EVAL blocks may use stack */
1886 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1889 LEAVE_SCOPE(oldsave);
1892 NOT_REACHED; /* NOTREACHED */
1895 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1897 mg = mg_find_mglob(TARG);
1901 LEAVE_SCOPE(oldsave);
1902 if (gimme == G_ARRAY)
1908 Perl_do_readline(pTHX)
1910 dSP; dTARGETSTACKED;
1915 IO * const io = GvIO(PL_last_in_gv);
1916 const I32 type = PL_op->op_type;
1917 const I32 gimme = GIMME_V;
1920 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1922 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1923 if (gimme == G_SCALAR) {
1925 SvSetSV_nosteal(TARG, TOPs);
1935 if (IoFLAGS(io) & IOf_ARGV) {
1936 if (IoFLAGS(io) & IOf_START) {
1938 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1939 IoFLAGS(io) &= ~IOf_START;
1940 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1941 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1942 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1943 SvSETMAGIC(GvSV(PL_last_in_gv));
1948 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1949 if (!fp) { /* Note: fp != IoIFP(io) */
1950 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1953 else if (type == OP_GLOB)
1954 fp = Perl_start_glob(aTHX_ POPs, io);
1956 else if (type == OP_GLOB)
1958 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1959 report_wrongway_fh(PL_last_in_gv, '>');
1963 if ((!io || !(IoFLAGS(io) & IOf_START))
1964 && ckWARN(WARN_CLOSED)
1967 report_evil_fh(PL_last_in_gv);
1969 if (gimme == G_SCALAR) {
1970 /* undef TARG, and push that undefined value */
1971 if (type != OP_RCATLINE) {
1972 sv_setsv(TARG,NULL);
1979 if (gimme == G_SCALAR) {
1981 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1984 if (type == OP_RCATLINE)
1985 SvPV_force_nomg_nolen(sv);
1989 else if (isGV_with_GP(sv)) {
1990 SvPV_force_nomg_nolen(sv);
1992 SvUPGRADE(sv, SVt_PV);
1993 tmplen = SvLEN(sv); /* remember if already alloced */
1994 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1995 /* try short-buffering it. Please update t/op/readline.t
1996 * if you change the growth length.
2001 if (type == OP_RCATLINE && SvOK(sv)) {
2003 SvPV_force_nomg_nolen(sv);
2009 sv = sv_2mortal(newSV(80));
2013 /* This should not be marked tainted if the fp is marked clean */
2014 #define MAYBE_TAINT_LINE(io, sv) \
2015 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2020 /* delay EOF state for a snarfed empty file */
2021 #define SNARF_EOF(gimme,rs,io,sv) \
2022 (gimme != G_SCALAR || SvCUR(sv) \
2023 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2027 if (!sv_gets(sv, fp, offset)
2029 || SNARF_EOF(gimme, PL_rs, io, sv)
2030 || PerlIO_error(fp)))
2032 PerlIO_clearerr(fp);
2033 if (IoFLAGS(io) & IOf_ARGV) {
2034 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2037 (void)do_close(PL_last_in_gv, FALSE);
2039 else if (type == OP_GLOB) {
2040 if (!do_close(PL_last_in_gv, FALSE)) {
2041 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2042 "glob failed (child exited with status %d%s)",
2043 (int)(STATUS_CURRENT >> 8),
2044 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2047 if (gimme == G_SCALAR) {
2048 if (type != OP_RCATLINE) {
2049 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2055 MAYBE_TAINT_LINE(io, sv);
2058 MAYBE_TAINT_LINE(io, sv);
2060 IoFLAGS(io) |= IOf_NOLINE;
2064 if (type == OP_GLOB) {
2068 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2069 char * const tmps = SvEND(sv) - 1;
2070 if (*tmps == *SvPVX_const(PL_rs)) {
2072 SvCUR_set(sv, SvCUR(sv) - 1);
2075 for (t1 = SvPVX_const(sv); *t1; t1++)
2077 if (strchr("*%?", *t1))
2079 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2082 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2083 (void)POPs; /* Unmatched wildcard? Chuck it... */
2086 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2087 if (ckWARN(WARN_UTF8)) {
2088 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2089 const STRLEN len = SvCUR(sv) - offset;
2092 if (!is_utf8_string_loc(s, len, &f))
2093 /* Emulate :encoding(utf8) warning in the same case. */
2094 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2095 "utf8 \"\\x%02X\" does not map to Unicode",
2096 f < (U8*)SvEND(sv) ? *f : 0);
2099 if (gimme == G_ARRAY) {
2100 if (SvLEN(sv) - SvCUR(sv) > 20) {
2101 SvPV_shrink_to_cur(sv);
2103 sv = sv_2mortal(newSV(80));
2106 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2107 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2108 const STRLEN new_len
2109 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2110 SvPV_renew(sv, new_len);
2121 SV * const keysv = POPs;
2122 HV * const hv = MUTABLE_HV(POPs);
2123 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2124 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2126 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2127 bool preeminent = TRUE;
2129 if (SvTYPE(hv) != SVt_PVHV)
2136 /* If we can determine whether the element exist,
2137 * Try to preserve the existenceness of a tied hash
2138 * element by using EXISTS and DELETE if possible.
2139 * Fallback to FETCH and STORE otherwise. */
2140 if (SvCANEXISTDELETE(hv))
2141 preeminent = hv_exists_ent(hv, keysv, 0);
2144 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2145 svp = he ? &HeVAL(he) : NULL;
2147 if (!svp || !*svp || *svp == &PL_sv_undef) {
2151 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2153 lv = sv_newmortal();
2154 sv_upgrade(lv, SVt_PVLV);
2156 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2157 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2158 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2164 if (HvNAME_get(hv) && isGV(*svp))
2165 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2166 else if (preeminent)
2167 save_helem_flags(hv, keysv, svp,
2168 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2170 SAVEHDELETE(hv, keysv);
2172 else if (PL_op->op_private & OPpDEREF) {
2173 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2177 sv = (svp && *svp ? *svp : &PL_sv_undef);
2178 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2179 * was to make C<local $tied{foo} = $tied{foo}> possible.
2180 * However, it seems no longer to be needed for that purpose, and
2181 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2182 * would loop endlessly since the pos magic is getting set on the
2183 * mortal copy and lost. However, the copy has the effect of
2184 * triggering the get magic, and losing it altogether made things like
2185 * c<$tied{foo};> in void context no longer do get magic, which some
2186 * code relied on. Also, delayed triggering of magic on @+ and friends
2187 * meant the original regex may be out of scope by now. So as a
2188 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2189 * being called too many times). */
2190 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2197 /* a stripped-down version of Perl_softref2xv() for use by
2198 * pp_multideref(), which doesn't use PL_op->op_flags */
2201 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2204 if (PL_op->op_private & HINT_STRICT_REFS) {
2206 Perl_die(aTHX_ PL_no_symref_sv, sv,
2207 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2209 Perl_die(aTHX_ PL_no_usym, what);
2212 Perl_die(aTHX_ PL_no_usym, what);
2213 return gv_fetchsv_nomg(sv, GV_ADD, type);
2217 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2218 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2220 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2221 * Each of these either contains a set of actions, or an argument, such as
2222 * an IV to use as an array index, or a lexical var to retrieve.
2223 * Several actions re stored per UV; we keep shifting new actions off the
2224 * one UV, and only reload when it becomes zero.
2229 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2230 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2231 UV actions = items->uv;
2234 /* this tells find_uninit_var() where we're up to */
2235 PL_multideref_pc = items;
2238 /* there are three main classes of action; the first retrieve
2239 * the initial AV or HV from a variable or the stack; the second
2240 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2241 * the third an unrolled (/DREFHV, rv2hv, helem).
2243 switch (actions & MDEREF_ACTION_MASK) {
2246 actions = (++items)->uv;
2249 case MDEREF_AV_padav_aelem: /* $lex[...] */
2250 sv = PAD_SVl((++items)->pad_offset);
2253 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2254 sv = UNOP_AUX_item_sv(++items);
2255 assert(isGV_with_GP(sv));
2256 sv = (SV*)GvAVn((GV*)sv);
2259 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2264 goto do_AV_rv2av_aelem;
2267 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2268 sv = UNOP_AUX_item_sv(++items);
2269 assert(isGV_with_GP(sv));
2270 sv = GvSVn((GV*)sv);
2271 goto do_AV_vivify_rv2av_aelem;
2273 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2274 sv = PAD_SVl((++items)->pad_offset);
2277 do_AV_vivify_rv2av_aelem:
2278 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2279 /* this is the OPpDEREF action normally found at the end of
2280 * ops like aelem, helem, rv2sv */
2281 sv = vivify_ref(sv, OPpDEREF_AV);
2285 /* this is basically a copy of pp_rv2av when it just has the
2288 if (LIKELY(SvROK(sv))) {
2289 if (UNLIKELY(SvAMAGIC(sv))) {
2290 sv = amagic_deref_call(sv, to_av_amg);
2293 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2294 DIE(aTHX_ "Not an ARRAY reference");
2296 else if (SvTYPE(sv) != SVt_PVAV) {
2297 if (!isGV_with_GP(sv))
2298 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2299 sv = MUTABLE_SV(GvAVn((GV*)sv));
2305 /* retrieve the key; this may be either a lexical or package
2306 * var (whose index/ptr is stored as an item) or a signed
2307 * integer constant stored as an item.
2310 IV elem = 0; /* to shut up stupid compiler warnings */
2313 assert(SvTYPE(sv) == SVt_PVAV);
2315 switch (actions & MDEREF_INDEX_MASK) {
2316 case MDEREF_INDEX_none:
2318 case MDEREF_INDEX_const:
2319 elem = (++items)->iv;
2321 case MDEREF_INDEX_padsv:
2322 elemsv = PAD_SVl((++items)->pad_offset);
2324 case MDEREF_INDEX_gvsv:
2325 elemsv = UNOP_AUX_item_sv(++items);
2326 assert(isGV_with_GP(elemsv));
2327 elemsv = GvSVn((GV*)elemsv);
2329 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2330 && ckWARN(WARN_MISC)))
2331 Perl_warner(aTHX_ packWARN(WARN_MISC),
2332 "Use of reference \"%"SVf"\" as array index",
2334 /* the only time that S_find_uninit_var() needs this
2335 * is to determine which index value triggered the
2336 * undef warning. So just update it here. Note that
2337 * since we don't save and restore this var (e.g. for
2338 * tie or overload execution), its value will be
2339 * meaningless apart from just here */
2340 PL_multideref_pc = items;
2341 elem = SvIV(elemsv);
2346 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2348 if (!(actions & MDEREF_FLAG_last)) {
2349 SV** svp = av_fetch((AV*)sv, elem, 1);
2350 if (!svp || ! (sv=*svp))
2351 DIE(aTHX_ PL_no_aelem, elem);
2355 if (PL_op->op_private &
2356 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2358 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2359 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2362 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2363 sv = av_delete((AV*)sv, elem, discard);
2371 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2372 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2373 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2374 bool preeminent = TRUE;
2375 AV *const av = (AV*)sv;
2378 if (UNLIKELY(localizing)) {
2382 /* If we can determine whether the element exist,
2383 * Try to preserve the existenceness of a tied array
2384 * element by using EXISTS and DELETE if possible.
2385 * Fallback to FETCH and STORE otherwise. */
2386 if (SvCANEXISTDELETE(av))
2387 preeminent = av_exists(av, elem);
2390 svp = av_fetch(av, elem, lval && !defer);
2393 if (!svp || !(sv = *svp)) {
2396 DIE(aTHX_ PL_no_aelem, elem);
2397 len = av_tindex(av);
2398 sv = sv_2mortal(newSVavdefelem(av,
2399 /* Resolve a negative index now, unless it points
2400 * before the beginning of the array, in which
2401 * case record it for error reporting in
2402 * magic_setdefelem. */
2403 elem < 0 && len + elem >= 0
2404 ? len + elem : elem, 1));
2407 if (UNLIKELY(localizing)) {
2409 save_aelem(av, elem, svp);
2410 sv = *svp; /* may have changed */
2413 SAVEADELETE(av, elem);
2418 sv = (svp ? *svp : &PL_sv_undef);
2419 /* see note in pp_helem() */
2420 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2437 case MDEREF_HV_padhv_helem: /* $lex{...} */
2438 sv = PAD_SVl((++items)->pad_offset);
2441 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2442 sv = UNOP_AUX_item_sv(++items);
2443 assert(isGV_with_GP(sv));
2444 sv = (SV*)GvHVn((GV*)sv);
2447 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2452 goto do_HV_rv2hv_helem;
2455 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2456 sv = UNOP_AUX_item_sv(++items);
2457 assert(isGV_with_GP(sv));
2458 sv = GvSVn((GV*)sv);
2459 goto do_HV_vivify_rv2hv_helem;
2461 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2462 sv = PAD_SVl((++items)->pad_offset);
2465 do_HV_vivify_rv2hv_helem:
2466 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2467 /* this is the OPpDEREF action normally found at the end of
2468 * ops like aelem, helem, rv2sv */
2469 sv = vivify_ref(sv, OPpDEREF_HV);
2473 /* this is basically a copy of pp_rv2hv when it just has the
2474 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2477 if (LIKELY(SvROK(sv))) {
2478 if (UNLIKELY(SvAMAGIC(sv))) {
2479 sv = amagic_deref_call(sv, to_hv_amg);
2482 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2483 DIE(aTHX_ "Not a HASH reference");
2485 else if (SvTYPE(sv) != SVt_PVHV) {
2486 if (!isGV_with_GP(sv))
2487 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2488 sv = MUTABLE_SV(GvHVn((GV*)sv));
2494 /* retrieve the key; this may be either a lexical / package
2495 * var or a string constant, whose index/ptr is stored as an
2498 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2500 assert(SvTYPE(sv) == SVt_PVHV);
2502 switch (actions & MDEREF_INDEX_MASK) {
2503 case MDEREF_INDEX_none:
2506 case MDEREF_INDEX_const:
2507 keysv = UNOP_AUX_item_sv(++items);
2510 case MDEREF_INDEX_padsv:
2511 keysv = PAD_SVl((++items)->pad_offset);
2514 case MDEREF_INDEX_gvsv:
2515 keysv = UNOP_AUX_item_sv(++items);
2516 keysv = GvSVn((GV*)keysv);
2520 /* see comment above about setting this var */
2521 PL_multideref_pc = items;
2524 /* ensure that candidate CONSTs have been HEKified */
2525 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2526 || SvTYPE(keysv) >= SVt_PVMG
2529 || SvIsCOW_shared_hash(keysv));
2531 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2533 if (!(actions & MDEREF_FLAG_last)) {
2534 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2535 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2536 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2540 if (PL_op->op_private &
2541 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2543 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2544 sv = hv_exists_ent((HV*)sv, keysv, 0)
2545 ? &PL_sv_yes : &PL_sv_no;
2548 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2549 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2557 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2558 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2559 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2560 bool preeminent = TRUE;
2562 HV * const hv = (HV*)sv;
2565 if (UNLIKELY(localizing)) {
2569 /* If we can determine whether the element exist,
2570 * Try to preserve the existenceness of a tied hash
2571 * element by using EXISTS and DELETE if possible.
2572 * Fallback to FETCH and STORE otherwise. */
2573 if (SvCANEXISTDELETE(hv))
2574 preeminent = hv_exists_ent(hv, keysv, 0);
2577 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2578 svp = he ? &HeVAL(he) : NULL;
2582 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2586 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2587 lv = sv_newmortal();
2588 sv_upgrade(lv, SVt_PVLV);
2590 sv_magic(lv, key2 = newSVsv(keysv),
2591 PERL_MAGIC_defelem, NULL, 0);
2592 /* sv_magic() increments refcount */
2593 SvREFCNT_dec_NN(key2);
2594 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2600 if (HvNAME_get(hv) && isGV(sv))
2601 save_gp(MUTABLE_GV(sv),
2602 !(PL_op->op_flags & OPf_SPECIAL));
2603 else if (preeminent) {
2604 save_helem_flags(hv, keysv, svp,
2605 (PL_op->op_flags & OPf_SPECIAL)
2606 ? 0 : SAVEf_SETMAGIC);
2607 sv = *svp; /* may have changed */
2610 SAVEHDELETE(hv, keysv);
2615 sv = (svp && *svp ? *svp : &PL_sv_undef);
2616 /* see note in pp_helem() */
2617 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2626 actions >>= MDEREF_SHIFT;
2640 cx = &cxstack[cxstack_ix];
2641 itersvp = CxITERVAR(cx);
2643 switch (CxTYPE(cx)) {
2645 case CXt_LOOP_LAZYSV: /* string increment */
2647 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2648 SV *end = cx->blk_loop.state_u.lazysv.end;
2649 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2650 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2652 const char *max = SvPV_const(end, maxlen);
2653 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2657 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2658 /* safe to reuse old SV */
2659 sv_setsv(oldsv, cur);
2663 /* we need a fresh SV every time so that loop body sees a
2664 * completely new SV for closures/references to work as
2666 *itersvp = newSVsv(cur);
2667 SvREFCNT_dec_NN(oldsv);
2669 if (strEQ(SvPVX_const(cur), max))
2670 sv_setiv(cur, 0); /* terminate next time */
2676 case CXt_LOOP_LAZYIV: /* integer increment */
2678 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2679 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2683 /* don't risk potential race */
2684 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2685 /* safe to reuse old SV */
2686 sv_setiv(oldsv, cur);
2690 /* we need a fresh SV every time so that loop body sees a
2691 * completely new SV for closures/references to work as they
2693 *itersvp = newSViv(cur);
2694 SvREFCNT_dec_NN(oldsv);
2697 if (UNLIKELY(cur == IV_MAX)) {
2698 /* Handle end of range at IV_MAX */
2699 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2701 ++cx->blk_loop.state_u.lazyiv.cur;
2705 case CXt_LOOP_FOR: /* iterate array */
2708 AV *av = cx->blk_loop.state_u.ary.ary;
2710 bool av_is_stack = FALSE;
2717 if (PL_op->op_private & OPpITER_REVERSED) {
2718 ix = --cx->blk_loop.state_u.ary.ix;
2719 if (UNLIKELY(ix <= (av_is_stack ? cx->blk_loop.resetsp : -1)))
2723 ix = ++cx->blk_loop.state_u.ary.ix;
2724 if (UNLIKELY(ix > (av_is_stack ? cx->blk_oldsp : AvFILL(av))))
2728 if (UNLIKELY(SvMAGICAL(av) || AvREIFY(av))) {
2729 SV * const * const svp = av_fetch(av, ix, FALSE);
2730 sv = svp ? *svp : NULL;
2733 sv = AvARRAY(av)[ix];
2736 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2737 SvSetMagicSV(*itersvp, sv);
2742 if (UNLIKELY(SvIS_FREED(sv))) {
2744 Perl_croak(aTHX_ "Use of freed value in iteration");
2751 SvREFCNT_inc_simple_void_NN(sv);
2754 else if (!av_is_stack) {
2755 sv = newSVavdefelem(av, ix, 0);
2762 SvREFCNT_dec(oldsv);
2767 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2773 A description of how taint works in pattern matching and substitution.
2775 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2776 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2778 While the pattern is being assembled/concatenated and then compiled,
2779 PL_tainted will get set (via TAINT_set) if any component of the pattern
2780 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2781 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2782 TAINT_get). It will also be set if any component of the pattern matches
2783 based on locale-dependent behavior.
2785 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2786 the pattern is marked as tainted. This means that subsequent usage, such
2787 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2788 on the new pattern too.
2790 RXf_TAINTED_SEEN is used post-execution by the get magic code
2791 of $1 et al to indicate whether the returned value should be tainted.
2792 It is the responsibility of the caller of the pattern (i.e. pp_match,
2793 pp_subst etc) to set this flag for any other circumstances where $1 needs
2796 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2798 There are three possible sources of taint
2800 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2801 * the replacement string (or expression under /e)
2803 There are four destinations of taint and they are affected by the sources
2804 according to the rules below:
2806 * the return value (not including /r):
2807 tainted by the source string and pattern, but only for the
2808 number-of-iterations case; boolean returns aren't tainted;
2809 * the modified string (or modified copy under /r):
2810 tainted by the source string, pattern, and replacement strings;
2812 tainted by the pattern, and under 'use re "taint"', by the source
2814 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2815 should always be unset before executing subsequent code.
2817 The overall action of pp_subst is:
2819 * at the start, set bits in rxtainted indicating the taint status of
2820 the various sources.
2822 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2823 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2824 pattern has subsequently become tainted via locale ops.
2826 * If control is being passed to pp_substcont to execute a /e block,
2827 save rxtainted in the CXt_SUBST block, for future use by
2830 * Whenever control is being returned to perl code (either by falling
2831 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2832 use the flag bits in rxtainted to make all the appropriate types of
2833 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2834 et al will appear tainted.
2836 pp_match is just a simpler version of the above.
2852 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2853 See "how taint works" above */
2856 REGEXP *rx = PM_GETRE(pm);
2858 int force_on_match = 0;
2859 const I32 oldsave = PL_savestack_ix;
2861 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2866 /* known replacement string? */
2867 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2871 if (PL_op->op_flags & OPf_STACKED)
2880 SvGETMAGIC(TARG); /* must come before cow check */
2882 /* Awooga. Awooga. "bool" types that are actually char are dangerous,
2883 because they make integers such as 256 "false". */
2884 is_cow = SvIsCOW(TARG) ? TRUE : FALSE;
2887 sv_force_normal_flags(TARG,0);
2889 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)
2890 && (SvREADONLY(TARG)
2891 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2892 || SvTYPE(TARG) > SVt_PVLV)
2893 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2894 Perl_croak_no_modify();
2897 orig = SvPV_nomg(TARG, len);
2898 /* note we don't (yet) force the var into being a string; if we fail
2899 * to match, we leave as-is; on successful match howeverm, we *will*
2900 * coerce into a string, then repeat the match */
2901 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2904 /* only replace once? */
2905 once = !(rpm->op_pmflags & PMf_GLOBAL);
2907 /* See "how taint works" above */
2910 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2911 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2912 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2913 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2914 ? SUBST_TAINT_BOOLRET : 0));
2920 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2922 strend = orig + len;
2923 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2924 maxiters = 2 * slen + 10; /* We can match twice at each
2925 position, once with zero-length,
2926 second time with non-zero. */
2928 if (!RX_PRELEN(rx) && PL_curpm
2929 && !ReANY(rx)->mother_re) {
2934 #ifdef PERL_SAWAMPERSAND
2935 r_flags = ( RX_NPARENS(rx)
2937 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2938 || (rpm->op_pmflags & PMf_KEEPCOPY)
2943 r_flags = REXEC_COPY_STR;
2946 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2949 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2950 LEAVE_SCOPE(oldsave);
2955 /* known replacement string? */
2957 /* replacement needing upgrading? */
2958 if (DO_UTF8(TARG) && !doutf8) {
2959 nsv = sv_newmortal();
2962 sv_recode_to_utf8(nsv, _get_encoding());
2964 sv_utf8_upgrade(nsv);
2965 c = SvPV_const(nsv, clen);
2969 c = SvPV_const(dstr, clen);
2970 doutf8 = DO_UTF8(dstr);
2973 if (SvTAINTED(dstr))
2974 rxtainted |= SUBST_TAINT_REPL;
2981 /* can do inplace substitution? */
2986 && (I32)clen <= RX_MINLENRET(rx)
2988 || !(r_flags & REXEC_COPY_STR)
2989 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
2991 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
2992 && (!doutf8 || SvUTF8(TARG))
2993 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2997 if (SvIsCOW(TARG)) {
2998 if (!force_on_match)
3000 assert(SvVOK(TARG));
3003 if (force_on_match) {
3004 /* redo the first match, this time with the orig var
3005 * forced into being a string */
3007 orig = SvPV_force_nomg(TARG, len);
3013 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3014 rxtainted |= SUBST_TAINT_PAT;
3015 m = orig + RX_OFFS(rx)[0].start;
3016 d = orig + RX_OFFS(rx)[0].end;
3018 if (m - s > strend - d) { /* faster to shorten from end */
3021 Copy(c, m, clen, char);
3026 Move(d, m, i, char);
3030 SvCUR_set(TARG, m - s);
3032 else { /* faster from front */
3036 Move(s, d - i, i, char);
3039 Copy(c, d, clen, char);
3046 d = s = RX_OFFS(rx)[0].start + orig;
3049 if (UNLIKELY(iters++ > maxiters))
3050 DIE(aTHX_ "Substitution loop");
3051 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3052 rxtainted |= SUBST_TAINT_PAT;
3053 m = RX_OFFS(rx)[0].start + orig;
3056 Move(s, d, i, char);
3060 Copy(c, d, clen, char);
3063 s = RX_OFFS(rx)[0].end + orig;
3064 } while (CALLREGEXEC(rx, s, strend, orig,
3065 s == m, /* don't match same null twice */
3067 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3070 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3071 Move(s, d, i+1, char); /* include the NUL */
3081 if (force_on_match) {
3082 /* redo the first match, this time with the orig var
3083 * forced into being a string */
3085 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3086 /* I feel that it should be possible to avoid this mortal copy
3087 given that the code below copies into a new destination.
3088 However, I suspect it isn't worth the complexity of
3089 unravelling the C<goto force_it> for the small number of
3090 cases where it would be viable to drop into the copy code. */
3091 TARG = sv_2mortal(newSVsv(TARG));
3093 orig = SvPV_force_nomg(TARG, len);
3099 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3100 rxtainted |= SUBST_TAINT_PAT;
3102 s = RX_OFFS(rx)[0].start + orig;
3103 dstr = newSVpvn_flags(orig, s-orig,
3104 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3109 /* note that a whole bunch of local vars are saved here for
3110 * use by pp_substcont: here's a list of them in case you're
3111 * searching for places in this sub that uses a particular var:
3112 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3113 * s m strend rx once */
3115 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3119 if (UNLIKELY(iters++ > maxiters))
3120 DIE(aTHX_ "Substitution loop");
3121 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3122 rxtainted |= SUBST_TAINT_PAT;
3123 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3125 char *old_orig = orig;
3126 assert(RX_SUBOFFSET(rx) == 0);
3128 orig = RX_SUBBEG(rx);
3129 s = orig + (old_s - old_orig);
3130 strend = s + (strend - old_s);
3132 m = RX_OFFS(rx)[0].start + orig;
3133 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3134 s = RX_OFFS(rx)[0].end + orig;
3136 /* replacement already stringified */
3138 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3143 if (!nsv) nsv = sv_newmortal();
3144 sv_copypv(nsv, repl);
3145 if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
3146 sv_catsv(dstr, nsv);
3148 else sv_catsv(dstr, repl);
3149 if (UNLIKELY(SvTAINTED(repl)))
3150 rxtainted |= SUBST_TAINT_REPL;
3154 } while (CALLREGEXEC(rx, s, strend, orig,
3155 s == m, /* Yields minend of 0 or 1 */
3157 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3158 assert(strend >= s);
3159 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3161 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3162 /* From here on down we're using the copy, and leaving the original
3169 /* The match may make the string COW. If so, brilliant, because
3170 that's just saved us one malloc, copy and free - the regexp has
3171 donated the old buffer, and we malloc an entirely new one, rather
3172 than the regexp malloc()ing a buffer and copying our original,
3173 only for us to throw it away here during the substitution. */
3174 if (SvIsCOW(TARG)) {
3175 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3181 SvPV_set(TARG, SvPVX(dstr));
3182 SvCUR_set(TARG, SvCUR(dstr));
3183 SvLEN_set(TARG, SvLEN(dstr));
3184 SvFLAGS(TARG) |= SvUTF8(dstr);
3185 SvPV_set(dstr, NULL);
3192 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3193 (void)SvPOK_only_UTF8(TARG);
3196 /* See "how taint works" above */
3198 if ((rxtainted & SUBST_TAINT_PAT) ||
3199 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3200 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3202 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3204 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3205 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3207 SvTAINTED_on(TOPs); /* taint return value */
3209 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3211 /* needed for mg_set below */
3213 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3217 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3219 LEAVE_SCOPE(oldsave);
3228 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3229 ++*PL_markstack_ptr;
3231 LEAVE_with_name("grep_item"); /* exit inner scope */
3234 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3236 const I32 gimme = GIMME_V;
3238 LEAVE_with_name("grep"); /* exit outer scope */
3239 (void)POPMARK; /* pop src */
3240 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3241 (void)POPMARK; /* pop dst */
3242 SP = PL_stack_base + POPMARK; /* pop original mark */
3243 if (gimme == G_SCALAR) {
3247 else if (gimme == G_ARRAY)
3254 ENTER_with_name("grep_item"); /* enter inner scope */
3257 src = PL_stack_base[TOPMARK];
3258 if (SvPADTMP(src)) {
3259 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3265 RETURNOP(cLOGOP->op_other);
3279 cx = &cxstack[cxstack_ix];
3280 assert(CxTYPE(cx) == CXt_SUB);
3282 if (CxMULTICALL(cx)) {
3283 /* entry zero of a stack is always PL_sv_undef, which
3284 * simplifies converting a '()' return into undef in scalar context */
3285 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3289 newsp = PL_stack_base + cx->blk_oldsp;
3290 gimme = cx->blk_gimme;
3293 if (gimme == G_SCALAR) {
3295 if (LIKELY(MARK <= SP)) {
3296 /* if we are recursing, then free the current tmps.
3297 * Normally we don't bother and rely on the caller to do this,
3298 * because early tmp freeing tends to free the args we're
3300 * Doing it for recursion ensures the things like the
3301 * fibonacci benchmark don't fill up the tmps stack because
3302 * it never reaches an outer nextstate */
3303 if (cx->blk_sub.olddepth) {
3304 if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3305 && !SvMAGICAL(TOPs)) {
3306 *MARK = SvREFCNT_inc(TOPs);
3311 sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
3313 *MARK = sv_mortalcopy(sv);
3314 SvREFCNT_dec_NN(sv);
3317 else if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3318 && !SvMAGICAL(TOPs)) {
3322 *MARK = sv_mortalcopy(TOPs);
3326 *MARK = &PL_sv_undef;
3330 else if (gimme == G_ARRAY) {
3331 for (MARK = newsp + 1; MARK <= SP; MARK++) {
3332 if (!SvTEMP(*MARK) || SvREFCNT(*MARK) != 1
3333 || SvMAGICAL(*MARK)) {
3334 *MARK = sv_mortalcopy(*MARK);
3335 TAINT_NOT; /* Each item is independent */
3342 cxstack_ix++; /* temporarily protect top context */
3343 POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
3345 PL_curpm = newpm; /* ... and pop $1 et al */
3348 return cx->blk_sub.retop;
3352 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3353 * forces an abandon */
3356 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3358 const SSize_t fill = AvFILLp(av);
3360 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3362 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av)))
3365 SvREFCNT_dec_NN(av);
3367 PAD_SVl(0) = MUTABLE_SV(av);
3368 av_extend(av, fill);
3380 I32 old_savestack_ix;
3385 /* Locate the CV to call:
3386 * - most common case: RV->CV: f(), $ref->():
3387 * note that if a sub is compiled before its caller is compiled,
3388 * the stash entry will be a ref to a CV, rather than being a GV.
3389 * - second most common case: CV: $ref->method()
3392 /* a non-magic-RV -> CV ? */
3393 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3394 cv = MUTABLE_CV(SvRV(sv));
3395 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3399 cv = MUTABLE_CV(sv);
3402 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3403 /* handle all the weird cases */
3404 switch (SvTYPE(sv)) {
3406 if (!isGV_with_GP(sv))
3410 cv = GvCVu((const GV *)sv);
3411 if (UNLIKELY(!cv)) {
3413 cv = sv_2cv(sv, &stash, &gv, 0);
3415 old_savestack_ix = PL_savestack_ix;
3426 if (UNLIKELY(SvAMAGIC(sv))) {
3427 sv = amagic_deref_call(sv, to_cv_amg);
3428 /* Don't SPAGAIN here. */
3434 if (UNLIKELY(!SvOK(sv)))
3435 DIE(aTHX_ PL_no_usym, "a subroutine");
3437 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3438 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3439 SP = PL_stack_base + POPMARK;
3442 if (GIMME_V == G_SCALAR)
3443 PUSHs(&PL_sv_undef);
3447 sym = SvPV_nomg_const(sv, len);
3448 if (PL_op->op_private & HINT_STRICT_REFS)
3449 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3450 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3453 cv = MUTABLE_CV(SvRV(sv));
3454 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3460 DIE(aTHX_ "Not a CODE reference");
3464 /* At this point we want to save PL_savestack_ix, either by doing a
3465 * PUSHSUB, or for XS, doing an ENTER. But we don't yet know the final
3466 * CV we will be using (so we don't know whether its XS, so we can't
3467 * PUSHSUB or ENTER yet), and determining cv may itself push stuff on
3468 * the save stack. So remember where we are currently on the save
3469 * stack, and later update the CX or scopestack entry accordingly. */
3470 old_savestack_ix = PL_savestack_ix;
3472 /* these two fields are in a union. If they ever become separate,
3473 * we have to test for both of them being null below */
3474 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3475 while (UNLIKELY(!CvROOT(cv))) {
3479 /* anonymous or undef'd function leaves us no recourse */
3480 if (CvLEXICAL(cv) && CvHASGV(cv))
3481 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3482 SVfARG(cv_name(cv, NULL, 0)));
3483 if (CvANON(cv) || !CvHASGV(cv)) {
3484 DIE(aTHX_ "Undefined subroutine called");
3487 /* autoloaded stub? */
3488 if (cv != GvCV(gv = CvGV(cv))) {
3491 /* should call AUTOLOAD now? */
3494 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3495 GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
3496 cv = autogv ? GvCV(autogv) : NULL;
3499 sub_name = sv_newmortal();
3500 gv_efullname3(sub_name, gv, NULL);
3501 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3505 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3506 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3507 DIE(aTHX_ "Closure prototype called");
3509 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3512 Perl_get_db_sub(aTHX_ &sv, cv);
3514 PL_curcopdb = PL_curcop;
3516 /* check for lsub that handles lvalue subroutines */
3517 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3518 /* if lsub not found then fall back to DB::sub */
3519 if (!cv) cv = GvCV(PL_DBsub);
3521 cv = GvCV(PL_DBsub);
3524 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3525 DIE(aTHX_ "No DB::sub routine defined");
3528 if (!(CvISXSUB(cv))) {
3529 /* This path taken at least 75% of the time */
3536 /* keep PADTMP args alive throughout the call (we need to do this
3537 * because @_ isn't refcounted). Note that we create the mortals
3538 * in the caller's tmps frame, so they won't be freed until after
3539 * we return from the sub.
3548 *svp = sv = sv_mortalcopy(sv);
3554 PUSHBLOCK(cx, CXt_SUB, MARK);
3555 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3557 cx->blk_sub.retop = PL_op->op_next;
3558 cx->cx_u.cx_blk.blku_old_savestack_ix = old_savestack_ix;
3560 padlist = CvPADLIST(cv);
3561 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
3562 PERL_STACK_OVERFLOW_CHECK();
3563 pad_push(padlist, depth);
3565 PAD_SET_CUR_NOSAVE(padlist, depth);
3566 if (LIKELY(hasargs)) {
3567 AV *const av = MUTABLE_AV(PAD_SVl(0));
3571 defavp = &GvAV(PL_defgv);
3572 cx->blk_sub.savearray = *defavp;
3573 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3575 /* it's the responsibility of whoever leaves a sub to ensure
3576 * that a clean, empty AV is left in pad[0]. This is normally
3577 * done by POPSUB() */
3578 assert(!AvREAL(av) && AvFILLp(av) == -1);
3581 if (UNLIKELY(items - 1 > AvMAX(av))) {
3582 SV **ary = AvALLOC(av);
3583 AvMAX(av) = items - 1;
3584 Renew(ary, items, SV*);
3589 Copy(MARK+1,AvARRAY(av),items,SV*);
3590 AvFILLp(av) = items - 1;
3592 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3594 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3595 SVfARG(cv_name(cv, NULL, 0)));
3596 /* warning must come *after* we fully set up the context
3597 * stuff so that __WARN__ handlers can safely dounwind()
3600 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3601 && ckWARN(WARN_RECURSION)
3602 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3603 sub_crush_depth(cv);
3604 RETURNOP(CvSTART(cv));
3607 SSize_t markix = TOPMARK;
3610 /* pretend we did the ENTER earlier */
3611 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3616 if (UNLIKELY(((PL_op->op_private
3617 & PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3618 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3620 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3621 SVfARG(cv_name(cv, NULL, 0)));
3623 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3624 /* Need to copy @_ to stack. Alternative may be to
3625 * switch stack to @_, and copy return values
3626 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3627 AV * const av = GvAV(PL_defgv);
3628 const SSize_t items = AvFILL(av) + 1;
3632 const bool m = cBOOL(SvRMAGICAL(av));
3633 /* Mark is at the end of the stack. */
3635 for (; i < items; ++i)
3639 SV ** const svp = av_fetch(av, i, 0);
3640 sv = svp ? *svp : NULL;
3642 else sv = AvARRAY(av)[i];
3643 if (sv) SP[i+1] = sv;
3645 SP[i+1] = newSVavdefelem(av, i, 1);
3653 SV **mark = PL_stack_base + markix;
3654 SSize_t items = SP - mark;
3657 if (*mark && SvPADTMP(*mark)) {
3658 *mark = sv_mortalcopy(*mark);
3662 /* We assume first XSUB in &DB::sub is the called one. */
3663 if (UNLIKELY(PL_curcopdb)) {
3664 SAVEVPTR(PL_curcop);
3665 PL_curcop = PL_curcopdb;
3668 /* Do we need to open block here? XXXX */
3670 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3672 CvXSUB(cv)(aTHX_ cv);
3674 /* Enforce some sanity in scalar context. */
3675 if (GIMME_V == G_SCALAR) {
3676 SV **svp = PL_stack_base + markix + 1;
3677 if (svp != PL_stack_sp) {
3678 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
3688 Perl_sub_crush_depth(pTHX_ CV *cv)
3690 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
3693 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
3695 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
3696 SVfARG(cv_name(cv,NULL,0)));
3704 SV* const elemsv = POPs;
3705 IV elem = SvIV(elemsv);
3706 AV *const av = MUTABLE_AV(POPs);
3707 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3708 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3709 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3710 bool preeminent = TRUE;
3713 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
3714 Perl_warner(aTHX_ packWARN(WARN_MISC),
3715 "Use of reference \"%"SVf"\" as array index",
3717 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
3720 if (UNLIKELY(localizing)) {
3724 /* If we can determine whether the element exist,
3725 * Try to preserve the existenceness of a tied array
3726 * element by using EXISTS and DELETE if possible.
3727 * Fallback to FETCH and STORE otherwise. */
3728 if (SvCANEXISTDELETE(av))
3729 preeminent = av_exists(av, elem);
3732 svp = av_fetch(av, elem, lval && !defer);
3734 #ifdef PERL_MALLOC_WRAP
3735 if (SvUOK(elemsv)) {
3736 const UV uv = SvUV(elemsv);
3737 elem = uv > IV_MAX ? IV_MAX : uv;
3739 else if (SvNOK(elemsv))
3740 elem = (IV)SvNV(elemsv);
3742 static const char oom_array_extend[] =
3743 "Out of memory during array extend"; /* Duplicated in av.c */
3744 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
3747 if (!svp || !*svp) {
3750 DIE(aTHX_ PL_no_aelem, elem);
3751 len = av_tindex(av);
3752 mPUSHs(newSVavdefelem(av,
3753 /* Resolve a negative index now, unless it points before the
3754 beginning of the array, in which case record it for error
3755 reporting in magic_setdefelem. */
3756 elem < 0 && len + elem >= 0 ? len + elem : elem,
3760 if (UNLIKELY(localizing)) {
3762 save_aelem(av, elem, svp);
3764 SAVEADELETE(av, elem);
3766 else if (PL_op->op_private & OPpDEREF) {
3767 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3771 sv = (svp ? *svp : &PL_sv_undef);
3772 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
3779 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
3781 PERL_ARGS_ASSERT_VIVIFY_REF;
3786 Perl_croak_no_modify();
3787 prepare_SV_for_RV(sv);
3790 SvRV_set(sv, newSV(0));
3793 SvRV_set(sv, MUTABLE_SV(newAV()));
3796 SvRV_set(sv, MUTABLE_SV(newHV()));
3803 if (SvGMAGICAL(sv)) {
3804 /* copy the sv without magic to prevent magic from being
3806 SV* msv = sv_newmortal();
3807 sv_setsv_nomg(msv, sv);
3813 PERL_STATIC_INLINE HV *
3814 S_opmethod_stash(pTHX_ SV* meth)
3819 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
3820 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
3821 "package or object reference", SVfARG(meth)),
3823 : *(PL_stack_base + TOPMARK + 1);
3825 PERL_ARGS_ASSERT_OPMETHOD_STASH;
3829 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
3832 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
3833 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
3834 stash = gv_stashsv(sv, GV_CACHE_ONLY);
3835 if (stash) return stash;
3839 ob = MUTABLE_SV(SvRV(sv));
3840 else if (!SvOK(sv)) goto undefined;
3841 else if (isGV_with_GP(sv)) {
3843 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3844 "without a package or object reference",
3847 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
3848 assert(!LvTARGLEN(ob));
3852 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
3855 /* this isn't a reference */
3858 const char * const packname = SvPV_nomg_const(sv, packlen);
3859 const U32 packname_utf8 = SvUTF8(sv);
3860 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
3861 if (stash) return stash;
3863 if (!(iogv = gv_fetchpvn_flags(
3864 packname, packlen, packname_utf8, SVt_PVIO
3866 !(ob=MUTABLE_SV(GvIO(iogv))))
3868 /* this isn't the name of a filehandle either */
3871 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3872 "without a package or object reference",
3875 /* assume it's a package name */
3876 stash = gv_stashpvn(packname, packlen, packname_utf8);
3877 if (stash) return stash;
3878 else return MUTABLE_HV(sv);
3880 /* it _is_ a filehandle name -- replace with a reference */
3881 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
3884 /* if we got here, ob should be an object or a glob */
3885 if (!ob || !(SvOBJECT(ob)
3886 || (isGV_with_GP(ob)
3887 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
3890 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
3891 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
3892 ? newSVpvs_flags("DOES", SVs_TEMP)
3904 SV* const meth = TOPs;
3907 SV* const rmeth = SvRV(meth);
3908 if (SvTYPE(rmeth) == SVt_PVCV) {
3914 stash = opmethod_stash(meth);
3916 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3919 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3923 #define METHOD_CHECK_CACHE(stash,cache,meth) \
3924 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
3926 gv = MUTABLE_GV(HeVAL(he)); \
3927 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
3928 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
3930 XPUSHs(MUTABLE_SV(GvCV(gv))); \
3939 SV* const meth = cMETHOPx_meth(PL_op);
3940 HV* const stash = opmethod_stash(meth);
3942 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
3943 METHOD_CHECK_CACHE(stash, stash, meth);
3946 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3949 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3958 SV* const meth = cMETHOPx_meth(PL_op);
3959 HV* const stash = CopSTASH(PL_curcop);
3960 /* Actually, SUPER doesn't need real object's (or class') stash at all,
3961 * as it uses CopSTASH. However, we must ensure that object(class) is
3962 * correct (this check is done by S_opmethod_stash) */
3963 opmethod_stash(meth);
3965 if ((cache = HvMROMETA(stash)->super)) {
3966 METHOD_CHECK_CACHE(stash, cache, meth);
3969 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
3972 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3980 SV* const meth = cMETHOPx_meth(PL_op);
3981 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
3982 opmethod_stash(meth); /* not used but needed for error checks */
3984 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
3985 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
3987 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3990 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3994 PP(pp_method_redir_super)
3999 SV* const meth = cMETHOPx_meth(PL_op);
4000 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4001 opmethod_stash(meth); /* not used but needed for error checks */
4003 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4004 else if ((cache = HvMROMETA(stash)->super)) {
4005 METHOD_CHECK_CACHE(stash, cache, meth);
4008 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4011 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4016 * ex: set ts=8 sts=4 sw=4 et: