3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * XXX Note that padav has similar code but without the mg_get().
325 * I suspect that the mg_get is no longer needed, but while padav
326 * differs, it can't share this function */
329 S_pushav(pTHX_ AV* const av)
332 const SSize_t maxarg = AvFILL(av) + 1;
334 if (UNLIKELY(SvRMAGICAL(av))) {
336 for (i=0; i < (PADOFFSET)maxarg; i++) {
337 SV ** const svp = av_fetch(av, i, FALSE);
338 /* See note in pp_helem, and bug id #27839 */
340 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
346 for (i=0; i < (PADOFFSET)maxarg; i++) {
347 SV * const sv = AvARRAY(av)[i];
348 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
356 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
361 PADOFFSET base = PL_op->op_targ;
362 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
364 if (PL_op->op_flags & OPf_SPECIAL) {
365 /* fake the RHS of my ($x,$y,..) = @_ */
367 S_pushav(aTHX_ GvAVn(PL_defgv));
371 /* note, this is only skipped for compile-time-known void cxt */
372 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
375 for (i = 0; i <count; i++)
376 *++SP = PAD_SV(base+i);
378 if (PL_op->op_private & OPpLVAL_INTRO) {
379 SV **svp = &(PAD_SVl(base));
380 const UV payload = (UV)(
381 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
382 | (count << SAVE_TIGHT_SHIFT)
383 | SAVEt_CLEARPADRANGE);
384 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
385 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
392 for (i = 0; i <count; i++)
393 SvPADSTALE_off(*svp++); /* mark lexical as active */
404 OP * const op = PL_op;
405 /* access PL_curpad once */
406 SV ** const padentry = &(PAD_SVl(op->op_targ));
411 PUTBACK; /* no pop/push after this, TOPs ok */
413 if (op->op_flags & OPf_MOD) {
414 if (op->op_private & OPpLVAL_INTRO)
415 if (!(op->op_private & OPpPAD_STATE))
416 save_clearsv(padentry);
417 if (op->op_private & OPpDEREF) {
418 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
419 than TARG reduces the scope of TARG, so it does not
420 span the call to save_clearsv, resulting in smaller
422 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
434 tryAMAGICunTARGETlist(iter_amg, 0);
435 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
437 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
438 if (!isGV_with_GP(PL_last_in_gv)) {
439 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
440 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
443 XPUSHs(MUTABLE_SV(PL_last_in_gv));
446 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
447 if (PL_last_in_gv == (GV *)&PL_sv_undef)
448 PL_last_in_gv = NULL;
450 assert(isGV_with_GP(PL_last_in_gv));
453 return do_readline();
461 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
465 (SvIOK_notUV(left) && SvIOK_notUV(right))
466 ? (SvIVX(left) == SvIVX(right))
467 : ( do_ncmp(left, right) == 0)
473 /* also used for: pp_i_preinc() */
477 SV *sv = *PL_stack_sp;
479 if (LIKELY(((sv->sv_flags &
480 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
481 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
483 && SvIVX(sv) != IV_MAX)
485 SvIV_set(sv, SvIVX(sv) + 1);
487 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
494 /* also used for: pp_i_predec() */
498 SV *sv = *PL_stack_sp;
500 if (LIKELY(((sv->sv_flags &
501 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
502 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
504 && SvIVX(sv) != IV_MIN)
506 SvIV_set(sv, SvIVX(sv) - 1);
508 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
515 /* also used for: pp_orassign() */
524 if (PL_op->op_type == OP_OR)
526 RETURNOP(cLOGOP->op_other);
531 /* also used for: pp_dor() pp_dorassign() */
538 const int op_type = PL_op->op_type;
539 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
544 if (UNLIKELY(!sv || !SvANY(sv))) {
545 if (op_type == OP_DOR)
547 RETURNOP(cLOGOP->op_other);
553 if (UNLIKELY(!sv || !SvANY(sv)))
558 switch (SvTYPE(sv)) {
560 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
564 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
568 if (CvROOT(sv) || CvXSUB(sv))
581 if(op_type == OP_DOR)
583 RETURNOP(cLOGOP->op_other);
585 /* assuming OP_DEFINED */
595 dSP; dATARGET; bool useleft; SV *svl, *svr;
597 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
601 #ifdef PERL_PRESERVE_IVUV
603 /* special-case some simple common cases */
604 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
606 U32 flags = (svl->sv_flags & svr->sv_flags);
607 if (flags & SVf_IOK) {
608 /* both args are simple IVs */
613 topl = ((UV)il) >> (UVSIZE * 8 - 2);
614 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
616 /* if both are in a range that can't under/overflow, do a
617 * simple integer add: if the top of both numbers
618 * are 00 or 11, then it's safe */
619 if (!( ((topl+1) | (topr+1)) & 2)) {
621 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
627 else if (flags & SVf_NOK) {
628 /* both args are NVs */
633 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
634 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
635 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
637 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
640 /* nothing was lost by converting to IVs */
643 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
651 useleft = USE_LEFT(svl);
652 /* We must see if we can perform the addition with integers if possible,
653 as the integer code detects overflow while the NV code doesn't.
654 If either argument hasn't had a numeric conversion yet attempt to get
655 the IV. It's important to do this now, rather than just assuming that
656 it's not IOK as a PV of "9223372036854775806" may not take well to NV
657 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
658 integer in case the second argument is IV=9223372036854775806
659 We can (now) rely on sv_2iv to do the right thing, only setting the
660 public IOK flag if the value in the NV (or PV) slot is truly integer.
662 A side effect is that this also aggressively prefers integer maths over
663 fp maths for integer values.
665 How to detect overflow?
667 C 99 section 6.2.6.1 says
669 The range of nonnegative values of a signed integer type is a subrange
670 of the corresponding unsigned integer type, and the representation of
671 the same value in each type is the same. A computation involving
672 unsigned operands can never overflow, because a result that cannot be
673 represented by the resulting unsigned integer type is reduced modulo
674 the number that is one greater than the largest value that can be
675 represented by the resulting type.
679 which I read as "unsigned ints wrap."
681 signed integer overflow seems to be classed as "exception condition"
683 If an exceptional condition occurs during the evaluation of an
684 expression (that is, if the result is not mathematically defined or not
685 in the range of representable values for its type), the behavior is
688 (6.5, the 5th paragraph)
690 I had assumed that on 2s complement machines signed arithmetic would
691 wrap, hence coded pp_add and pp_subtract on the assumption that
692 everything perl builds on would be happy. After much wailing and
693 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
694 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
695 unsigned code below is actually shorter than the old code. :-)
698 if (SvIV_please_nomg(svr)) {
699 /* Unless the left argument is integer in range we are going to have to
700 use NV maths. Hence only attempt to coerce the right argument if
701 we know the left is integer. */
709 /* left operand is undef, treat as zero. + 0 is identity,
710 Could SETi or SETu right now, but space optimise by not adding
711 lots of code to speed up what is probably a rarish case. */
713 /* Left operand is defined, so is it IV? */
714 if (SvIV_please_nomg(svl)) {
715 if ((auvok = SvUOK(svl)))
718 const IV aiv = SvIVX(svl);
721 auvok = 1; /* Now acting as a sign flag. */
723 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
730 bool result_good = 0;
733 bool buvok = SvUOK(svr);
738 const IV biv = SvIVX(svr);
743 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
745 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
746 else "IV" now, independent of how it came in.
747 if a, b represents positive, A, B negative, a maps to -A etc
752 all UV maths. negate result if A negative.
753 add if signs same, subtract if signs differ. */
759 /* Must get smaller */
765 /* result really should be -(auv-buv). as its negation
766 of true value, need to swap our result flag */
783 if (result <= (UV)IV_MIN)
784 SETi(result == (UV)IV_MIN
785 ? IV_MIN : -(IV)result);
787 /* result valid, but out of range for IV. */
792 } /* Overflow, drop through to NVs. */
797 useleft = USE_LEFT(svl);
801 NV value = SvNV_nomg(svr);
804 /* left operand is undef, treat as zero. + 0.0 is identity. */
808 SETn( value + SvNV_nomg(svl) );
814 /* also used for: pp_aelemfast_lex() */
819 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
820 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
821 const U32 lval = PL_op->op_flags & OPf_MOD;
822 const I8 key = (I8)PL_op->op_private;
826 assert(SvTYPE(av) == SVt_PVAV);
830 /* inlined av_fetch() for simple cases ... */
831 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
832 sv = AvARRAY(av)[key];
833 if (sv && !SvIS_FREED(sv)) {
839 /* ... else do it the hard way */
840 svp = av_fetch(av, key, lval);
841 sv = (svp ? *svp : &PL_sv_undef);
843 if (UNLIKELY(!svp && lval))
844 DIE(aTHX_ PL_no_aelem, (int)key);
846 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
856 do_join(TARG, *MARK, MARK, SP);
867 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
868 * will be enough to hold an OP*.
870 SV* const sv = sv_newmortal();
871 sv_upgrade(sv, SVt_PVLV);
873 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
876 XPUSHs(MUTABLE_SV(PL_op));
881 /* Oversized hot code. */
883 /* also used for: pp_say() */
887 dSP; dMARK; dORIGMARK;
891 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
895 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
898 if (MARK == ORIGMARK) {
899 /* If using default handle then we need to make space to
900 * pass object as 1st arg, so move other args up ...
904 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
907 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
909 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
910 | (PL_op->op_type == OP_SAY
911 ? TIED_METHOD_SAY : 0)), sp - mark);
914 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
915 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
918 SETERRNO(EBADF,RMS_IFI);
921 else if (!(fp = IoOFP(io))) {
923 report_wrongway_fh(gv, '<');
926 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
930 SV * const ofs = GvSV(PL_ofsgv); /* $, */
932 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
934 if (!do_print(*MARK, fp))
938 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
939 if (!do_print(GvSV(PL_ofsgv), fp)) {
948 if (!do_print(*MARK, fp))
956 if (PL_op->op_type == OP_SAY) {
957 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
960 else if (PL_ors_sv && SvOK(PL_ors_sv))
961 if (!do_print(PL_ors_sv, fp)) /* $\ */
964 if (IoFLAGS(io) & IOf_FLUSH)
965 if (PerlIO_flush(fp) == EOF)
975 XPUSHs(&PL_sv_undef);
980 /* also used for: pp_rv2hv() */
981 /* also called directly by pp_lvavref */
986 const U8 gimme = GIMME_V;
987 static const char an_array[] = "an ARRAY";
988 static const char a_hash[] = "a HASH";
989 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
990 || PL_op->op_type == OP_LVAVREF;
991 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
995 if (UNLIKELY(SvAMAGIC(sv))) {
996 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
999 if (UNLIKELY(SvTYPE(sv) != type))
1000 /* diag_listed_as: Not an ARRAY reference */
1001 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
1002 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
1003 && PL_op->op_private & OPpLVAL_INTRO))
1004 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
1006 else if (UNLIKELY(SvTYPE(sv) != type)) {
1009 if (!isGV_with_GP(sv)) {
1010 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1016 gv = MUTABLE_GV(sv);
1018 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1019 if (PL_op->op_private & OPpLVAL_INTRO)
1020 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1022 if (PL_op->op_flags & OPf_REF) {
1026 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1027 const I32 flags = is_lvalue_sub();
1028 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1029 if (gimme != G_ARRAY)
1030 goto croak_cant_return;
1037 AV *const av = MUTABLE_AV(sv);
1038 /* The guts of pp_rv2av */
1039 if (gimme == G_ARRAY) {
1045 else if (gimme == G_SCALAR) {
1047 const SSize_t maxarg = AvFILL(av) + 1;
1051 /* The guts of pp_rv2hv */
1052 if (gimme == G_ARRAY) { /* array wanted */
1054 return Perl_do_kv(aTHX);
1056 else if ((PL_op->op_private & OPpTRUEBOOL
1057 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1058 && block_gimme() == G_VOID ))
1059 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1060 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1061 else if (gimme == G_SCALAR) {
1063 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1070 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1071 is_pp_rv2av ? "array" : "hash");
1076 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1078 PERL_ARGS_ASSERT_DO_ODDBALL;
1081 if (ckWARN(WARN_MISC)) {
1083 if (oddkey == firstkey &&
1085 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1086 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1088 err = "Reference found where even-sized list expected";
1091 err = "Odd number of elements in hash assignment";
1092 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1099 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1100 * are common to both the LHS and RHS of an aassign, and replace them
1101 * with copies. All these copies are made before the actual list assign is
1104 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1105 * element ($b) to the first LH element ($a), modifies $a; when the
1106 * second assignment is done, the second RH element now has the wrong
1107 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1108 * Note that we don't need to make a mortal copy of $b.
1110 * The algorithm below works by, for every RHS element, mark the
1111 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1112 * element is found with SVf_BREAK set, it means it would have been
1113 * modified, so make a copy.
1114 * Note that by scanning both LHS and RHS in lockstep, we avoid
1115 * unnecessary copies (like $b above) compared with a naive
1116 * "mark all LHS; copy all marked RHS; unmark all LHS".
1118 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1119 * it can't be common and can be skipped.
1121 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1122 * that we thought we didn't need to call S_aassign_copy_common(), but we
1123 * have anyway for sanity checking. If we find we need to copy, then panic.
1126 PERL_STATIC_INLINE void
1127 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1128 SV **firstrelem, SV **lastrelem
1137 SSize_t lcount = lastlelem - firstlelem + 1;
1138 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1139 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1140 bool copy_all = FALSE;
1142 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1143 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1144 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1148 /* we never have to copy the first RH element; it can't be corrupted
1149 * by assigning something to the corresponding first LH element.
1150 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1152 relem = firstrelem + 1;
1154 for (; relem <= lastrelem; relem++) {
1157 /* mark next LH element */
1159 if (--lcount >= 0) {
1162 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1163 assert (lelem <= lastlelem);
1169 if (SvSMAGICAL(svl)) {
1172 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1175 /* this LH element will consume all further args;
1176 * no need to mark any further LH elements (if any).
1177 * But we still need to scan any remaining RHS elements;
1178 * set lcount negative to distinguish from lcount == 0,
1179 * so the loop condition continues being true
1182 lelem--; /* no need to unmark this element */
1184 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1185 assert(!SvIMMORTAL(svl));
1186 SvFLAGS(svl) |= SVf_BREAK;
1190 /* don't check RH element if no SVf_BREAK flags set yet */
1197 /* see if corresponding RH element needs copying */
1203 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1207 /* op_dump(PL_op); */
1209 "panic: aassign skipped needed copy of common RH elem %"
1210 UVuf, (UV)(relem - firstrelem));
1214 TAINT_NOT; /* Each item is independent */
1216 /* Dear TODO test in t/op/sort.t, I love you.
1217 (It's relying on a panic, not a "semi-panic" from newSVsv()
1218 and then an assertion failure below.) */
1219 if (UNLIKELY(SvIS_FREED(svr))) {
1220 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1223 /* avoid break flag while copying; otherwise COW etc
1225 SvFLAGS(svr) &= ~SVf_BREAK;
1226 /* Not newSVsv(), as it does not allow copy-on-write,
1227 resulting in wasteful copies.
1228 Also, we use SV_NOSTEAL in case the SV is used more than
1229 once, e.g. (...) = (f())[0,0]
1230 Where the same SV appears twice on the RHS without a ref
1231 count bump. (Although I suspect that the SV won't be
1232 stealable here anyway - DAPM).
1234 *relem = sv_mortalcopy_flags(svr,
1235 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1236 /* ... but restore afterwards in case it's needed again,
1237 * e.g. ($a,$b,$c) = (1,$a,$a)
1239 SvFLAGS(svr) |= SVf_BREAK;
1251 while (lelem > firstlelem) {
1252 SV * const svl = *(--lelem);
1254 SvFLAGS(svl) &= ~SVf_BREAK;
1263 SV **lastlelem = PL_stack_sp;
1264 SV **lastrelem = PL_stack_base + POPMARK;
1265 SV **firstrelem = PL_stack_base + POPMARK + 1;
1266 SV **firstlelem = lastrelem + 1;
1279 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1280 * only need to save locally, not on the save stack */
1281 U16 old_delaymagic = PL_delaymagic;
1286 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1288 /* If there's a common identifier on both sides we have to take
1289 * special care that assigning the identifier on the left doesn't
1290 * clobber a value on the right that's used later in the list.
1293 /* at least 2 LH and RH elements, or commonality isn't an issue */
1294 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1295 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1296 if (SvGMAGICAL(*relem))
1299 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1300 if (*lelem && SvSMAGICAL(*lelem))
1303 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1304 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1305 /* skip the scan if all scalars have a ref count of 1 */
1306 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1308 if (!sv || SvREFCNT(sv) == 1)
1310 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1317 S_aassign_copy_common(aTHX_
1318 firstlelem, lastlelem, firstrelem, lastrelem
1328 /* on debugging builds, do the scan even if we've concluded we
1329 * don't need to, then panic if we find commonality. Note that the
1330 * scanner assumes at least 2 elements */
1331 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1339 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1346 while (LIKELY(lelem <= lastlelem)) {
1348 TAINT_NOT; /* Each item stands on its own, taintwise. */
1350 if (UNLIKELY(!sv)) {
1353 ASSUME(SvTYPE(sv) == SVt_PVAV);
1355 switch (SvTYPE(sv)) {
1357 bool already_copied = FALSE;
1358 ary = MUTABLE_AV(sv);
1359 magic = SvMAGICAL(ary) != 0;
1361 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1363 /* We need to clear ary. The is a danger that if we do this,
1364 * elements on the RHS may be prematurely freed, e.g.
1366 * In the case of possible commonality, make a copy of each
1367 * RHS SV *before* clearing the array, and add a reference
1368 * from the tmps stack, so that it doesn't leak on death.
1369 * Otherwise, make a copy of each RHS SV only as we're storing
1370 * it into the array - that way we don't have to worry about
1371 * it being leaked if we die, but don't incur the cost of
1372 * mortalising everything.
1375 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1376 && (relem <= lastrelem)
1377 && (magic || AvFILL(ary) != -1))
1380 EXTEND_MORTAL(lastrelem - relem + 1);
1381 for (svp = relem; svp <= lastrelem; svp++) {
1382 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1383 *svp = sv_mortalcopy_flags(*svp,
1384 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1387 already_copied = TRUE;
1391 if (relem <= lastrelem)
1392 av_extend(ary, lastrelem - relem);
1395 while (relem <= lastrelem) { /* gobble up all the rest */
1397 if (LIKELY(!alias)) {
1402 /* before newSV, in case it dies */
1405 /* see comment in S_aassign_copy_common about
1407 sv_setsv_flags(sv, *relem,
1408 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1413 if (!already_copied)
1416 DIE(aTHX_ "Assigned value is not a reference");
1417 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1418 /* diag_listed_as: Assigned value is not %s reference */
1420 "Assigned value is not a SCALAR reference");
1421 if (lval && !already_copied)
1422 *relem = sv_mortalcopy(*relem);
1423 /* XXX else check for weak refs? */
1424 sv = SvREFCNT_inc_NN(SvRV(*relem));
1428 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1429 didstore = av_store(ary,i++,sv);
1438 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1439 SvSETMAGIC(MUTABLE_SV(ary));
1444 case SVt_PVHV: { /* normal hash */
1448 SV** topelem = relem;
1449 SV **firsthashrelem = relem;
1450 bool already_copied = FALSE;
1452 hash = MUTABLE_HV(sv);
1453 magic = SvMAGICAL(hash) != 0;
1455 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1456 if (UNLIKELY(odd)) {
1457 do_oddball(lastrelem, firsthashrelem);
1458 /* we have firstlelem to reuse, it's not needed anymore
1460 *(lastrelem+1) = &PL_sv_undef;
1464 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1466 /* We need to clear hash. The is a danger that if we do this,
1467 * elements on the RHS may be prematurely freed, e.g.
1468 * %h = (foo => $h{bar});
1469 * In the case of possible commonality, make a copy of each
1470 * RHS SV *before* clearing the hash, and add a reference
1471 * from the tmps stack, so that it doesn't leak on death.
1474 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1475 && (relem <= lastrelem)
1476 && (magic || HvUSEDKEYS(hash)))
1479 EXTEND_MORTAL(lastrelem - relem + 1);
1480 for (svp = relem; svp <= lastrelem; svp++) {
1481 *svp = sv_mortalcopy_flags(*svp,
1482 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1485 already_copied = TRUE;
1490 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1493 /* Copy the key if aassign is called in lvalue context,
1494 to avoid having the next op modify our rhs. Copy
1495 it also if it is gmagical, lest it make the
1496 hv_store_ent call below croak, leaking the value. */
1497 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1498 ? sv_mortalcopy(*relem)
1507 sv_setsv_nomg(tmpstr,*relem++); /* value */
1510 if (gimme == G_ARRAY) {
1511 if (hv_exists_ent(hash, sv, 0))
1512 /* key overwrites an existing entry */
1515 /* copy element back: possibly to an earlier
1516 * stack location if we encountered dups earlier,
1517 * possibly to a later stack location if odd */
1519 *topelem++ = tmpstr;
1523 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1524 didstore = hv_store_ent(hash,sv,tmpstr,0);
1526 if (!didstore) sv_2mortal(tmpstr);
1532 if (duplicates && gimme == G_ARRAY) {
1533 /* at this point we have removed the duplicate key/value
1534 * pairs from the stack, but the remaining values may be
1535 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1536 * the (a 2), but the stack now probably contains
1537 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1538 * obliterates the earlier key. So refresh all values. */
1539 lastrelem -= duplicates;
1540 relem = firsthashrelem;
1541 while (relem < lastrelem+odd) {
1543 he = hv_fetch_ent(hash, *relem++, 0, 0);
1544 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1547 if (odd && gimme == G_ARRAY) lastrelem++;
1551 if (SvIMMORTAL(sv)) {
1552 if (relem <= lastrelem)
1556 if (relem <= lastrelem) {
1558 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1559 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1562 packWARN(WARN_MISC),
1563 "Useless assignment to a temporary"
1565 sv_setsv(sv, *relem);
1569 sv_setsv(sv, &PL_sv_undef);
1574 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1575 /* Will be used to set PL_tainting below */
1576 Uid_t tmp_uid = PerlProc_getuid();
1577 Uid_t tmp_euid = PerlProc_geteuid();
1578 Gid_t tmp_gid = PerlProc_getgid();
1579 Gid_t tmp_egid = PerlProc_getegid();
1581 /* XXX $> et al currently silently ignore failures */
1582 if (PL_delaymagic & DM_UID) {
1583 #ifdef HAS_SETRESUID
1585 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1586 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1589 # ifdef HAS_SETREUID
1591 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1592 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1595 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1596 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1597 PL_delaymagic &= ~DM_RUID;
1599 # endif /* HAS_SETRUID */
1601 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1602 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1603 PL_delaymagic &= ~DM_EUID;
1605 # endif /* HAS_SETEUID */
1606 if (PL_delaymagic & DM_UID) {
1607 if (PL_delaymagic_uid != PL_delaymagic_euid)
1608 DIE(aTHX_ "No setreuid available");
1609 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1611 # endif /* HAS_SETREUID */
1612 #endif /* HAS_SETRESUID */
1614 tmp_uid = PerlProc_getuid();
1615 tmp_euid = PerlProc_geteuid();
1617 /* XXX $> et al currently silently ignore failures */
1618 if (PL_delaymagic & DM_GID) {
1619 #ifdef HAS_SETRESGID
1621 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1622 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1625 # ifdef HAS_SETREGID
1627 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1628 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1631 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1632 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1633 PL_delaymagic &= ~DM_RGID;
1635 # endif /* HAS_SETRGID */
1637 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1638 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1639 PL_delaymagic &= ~DM_EGID;
1641 # endif /* HAS_SETEGID */
1642 if (PL_delaymagic & DM_GID) {
1643 if (PL_delaymagic_gid != PL_delaymagic_egid)
1644 DIE(aTHX_ "No setregid available");
1645 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1647 # endif /* HAS_SETREGID */
1648 #endif /* HAS_SETRESGID */
1650 tmp_gid = PerlProc_getgid();
1651 tmp_egid = PerlProc_getegid();
1653 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1654 #ifdef NO_TAINT_SUPPORT
1655 PERL_UNUSED_VAR(tmp_uid);
1656 PERL_UNUSED_VAR(tmp_euid);
1657 PERL_UNUSED_VAR(tmp_gid);
1658 PERL_UNUSED_VAR(tmp_egid);
1661 PL_delaymagic = old_delaymagic;
1663 if (gimme == G_VOID)
1664 SP = firstrelem - 1;
1665 else if (gimme == G_SCALAR) {
1668 SETi(lastrelem - firstrelem + 1);
1672 /* note that in this case *firstlelem may have been overwritten
1673 by sv_undef in the odd hash case */
1676 SP = firstrelem + (lastlelem - firstlelem);
1677 lelem = firstlelem + (relem - firstrelem);
1679 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1689 PMOP * const pm = cPMOP;
1690 REGEXP * rx = PM_GETRE(pm);
1691 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1692 SV * const rv = sv_newmortal();
1696 SvUPGRADE(rv, SVt_IV);
1697 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1698 loathe to use it here, but it seems to be the right fix. Or close.
1699 The key part appears to be that it's essential for pp_qr to return a new
1700 object (SV), which implies that there needs to be an effective way to
1701 generate a new SV from the existing SV that is pre-compiled in the
1703 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1706 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1707 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1708 *cvp = cv_clone(cv);
1709 SvREFCNT_dec_NN(cv);
1713 HV *const stash = gv_stashsv(pkg, GV_ADD);
1714 SvREFCNT_dec_NN(pkg);
1715 (void)sv_bless(rv, stash);
1718 if (UNLIKELY(RX_ISTAINTED(rx))) {
1720 SvTAINTED_on(SvRV(rv));
1733 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1736 const char *truebase; /* Start of string */
1737 REGEXP *rx = PM_GETRE(pm);
1739 const U8 gimme = GIMME_V;
1741 const I32 oldsave = PL_savestack_ix;
1742 I32 had_zerolen = 0;
1745 if (PL_op->op_flags & OPf_STACKED)
1754 PUTBACK; /* EVAL blocks need stack_sp. */
1755 /* Skip get-magic if this is a qr// clone, because regcomp has
1757 truebase = ReANY(rx)->mother_re
1758 ? SvPV_nomg_const(TARG, len)
1759 : SvPV_const(TARG, len);
1761 DIE(aTHX_ "panic: pp_match");
1762 strend = truebase + len;
1763 rxtainted = (RX_ISTAINTED(rx) ||
1764 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1767 /* We need to know this in case we fail out early - pos() must be reset */
1768 global = dynpm->op_pmflags & PMf_GLOBAL;
1770 /* PMdf_USED is set after a ?? matches once */
1773 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1775 pm->op_pmflags & PMf_USED
1778 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1782 /* empty pattern special-cased to use last successful pattern if
1783 possible, except for qr// */
1784 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1790 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1791 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1792 UVuf" < %"IVdf")\n",
1793 (UV)len, (IV)RX_MINLEN(rx)));
1797 /* get pos() if //g */
1799 mg = mg_find_mglob(TARG);
1800 if (mg && mg->mg_len >= 0) {
1801 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1802 /* last time pos() was set, it was zero-length match */
1803 if (mg->mg_flags & MGf_MINMATCH)
1808 #ifdef PERL_SAWAMPERSAND
1811 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1812 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1816 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1817 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1818 * only on the first iteration. Therefore we need to copy $' as well
1819 * as $&, to make the rest of the string available for captures in
1820 * subsequent iterations */
1821 if (! (global && gimme == G_ARRAY))
1822 r_flags |= REXEC_COPY_SKIP_POST;
1824 #ifdef PERL_SAWAMPERSAND
1825 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1826 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1827 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1834 s = truebase + curpos;
1836 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1837 had_zerolen, TARG, NULL, r_flags))
1841 if (dynpm->op_pmflags & PMf_ONCE)
1843 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1845 dynpm->op_pmflags |= PMf_USED;
1849 RX_MATCH_TAINTED_on(rx);
1850 TAINT_IF(RX_MATCH_TAINTED(rx));
1854 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1856 mg = sv_magicext_mglob(TARG);
1857 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1858 if (RX_ZERO_LEN(rx))
1859 mg->mg_flags |= MGf_MINMATCH;
1861 mg->mg_flags &= ~MGf_MINMATCH;
1864 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1865 LEAVE_SCOPE(oldsave);
1869 /* push captures on stack */
1872 const I32 nparens = RX_NPARENS(rx);
1873 I32 i = (global && !nparens) ? 1 : 0;
1875 SPAGAIN; /* EVAL blocks could move the stack. */
1876 EXTEND(SP, nparens + i);
1877 EXTEND_MORTAL(nparens + i);
1878 for (i = !i; i <= nparens; i++) {
1879 PUSHs(sv_newmortal());
1880 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1881 && RX_OFFS(rx)[i].end != -1 ))
1883 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1884 const char * const s = RX_OFFS(rx)[i].start + truebase;
1885 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1886 || len < 0 || len > strend - s))
1887 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1888 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1889 (long) i, (long) RX_OFFS(rx)[i].start,
1890 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1891 sv_setpvn(*SP, s, len);
1892 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1897 curpos = (UV)RX_OFFS(rx)[0].end;
1898 had_zerolen = RX_ZERO_LEN(rx);
1899 PUTBACK; /* EVAL blocks may use stack */
1900 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1903 LEAVE_SCOPE(oldsave);
1906 NOT_REACHED; /* NOTREACHED */
1909 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1911 mg = mg_find_mglob(TARG);
1915 LEAVE_SCOPE(oldsave);
1916 if (gimme == G_ARRAY)
1922 Perl_do_readline(pTHX)
1924 dSP; dTARGETSTACKED;
1929 IO * const io = GvIO(PL_last_in_gv);
1930 const I32 type = PL_op->op_type;
1931 const U8 gimme = GIMME_V;
1934 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1936 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1937 if (gimme == G_SCALAR) {
1939 SvSetSV_nosteal(TARG, TOPs);
1949 if (IoFLAGS(io) & IOf_ARGV) {
1950 if (IoFLAGS(io) & IOf_START) {
1952 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1953 IoFLAGS(io) &= ~IOf_START;
1954 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1955 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1956 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1957 SvSETMAGIC(GvSV(PL_last_in_gv));
1962 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1963 if (!fp) { /* Note: fp != IoIFP(io) */
1964 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1967 else if (type == OP_GLOB)
1968 fp = Perl_start_glob(aTHX_ POPs, io);
1970 else if (type == OP_GLOB)
1972 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1973 report_wrongway_fh(PL_last_in_gv, '>');
1977 if ((!io || !(IoFLAGS(io) & IOf_START))
1978 && ckWARN(WARN_CLOSED)
1981 report_evil_fh(PL_last_in_gv);
1983 if (gimme == G_SCALAR) {
1984 /* undef TARG, and push that undefined value */
1985 if (type != OP_RCATLINE) {
1986 sv_setsv(TARG,NULL);
1993 if (gimme == G_SCALAR) {
1995 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1998 if (type == OP_RCATLINE)
1999 SvPV_force_nomg_nolen(sv);
2003 else if (isGV_with_GP(sv)) {
2004 SvPV_force_nomg_nolen(sv);
2006 SvUPGRADE(sv, SVt_PV);
2007 tmplen = SvLEN(sv); /* remember if already alloced */
2008 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
2009 /* try short-buffering it. Please update t/op/readline.t
2010 * if you change the growth length.
2015 if (type == OP_RCATLINE && SvOK(sv)) {
2017 SvPV_force_nomg_nolen(sv);
2023 sv = sv_2mortal(newSV(80));
2027 /* This should not be marked tainted if the fp is marked clean */
2028 #define MAYBE_TAINT_LINE(io, sv) \
2029 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2034 /* delay EOF state for a snarfed empty file */
2035 #define SNARF_EOF(gimme,rs,io,sv) \
2036 (gimme != G_SCALAR || SvCUR(sv) \
2037 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2041 if (!sv_gets(sv, fp, offset)
2043 || SNARF_EOF(gimme, PL_rs, io, sv)
2044 || PerlIO_error(fp)))
2046 PerlIO_clearerr(fp);
2047 if (IoFLAGS(io) & IOf_ARGV) {
2048 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2051 (void)do_close(PL_last_in_gv, FALSE);
2053 else if (type == OP_GLOB) {
2054 if (!do_close(PL_last_in_gv, FALSE)) {
2055 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2056 "glob failed (child exited with status %d%s)",
2057 (int)(STATUS_CURRENT >> 8),
2058 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2061 if (gimme == G_SCALAR) {
2062 if (type != OP_RCATLINE) {
2063 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2069 MAYBE_TAINT_LINE(io, sv);
2072 MAYBE_TAINT_LINE(io, sv);
2074 IoFLAGS(io) |= IOf_NOLINE;
2078 if (type == OP_GLOB) {
2082 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2083 char * const tmps = SvEND(sv) - 1;
2084 if (*tmps == *SvPVX_const(PL_rs)) {
2086 SvCUR_set(sv, SvCUR(sv) - 1);
2089 for (t1 = SvPVX_const(sv); *t1; t1++)
2091 if (strchr("*%?", *t1))
2093 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2096 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2097 (void)POPs; /* Unmatched wildcard? Chuck it... */
2100 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2101 if (ckWARN(WARN_UTF8)) {
2102 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2103 const STRLEN len = SvCUR(sv) - offset;
2106 if (!is_utf8_string_loc(s, len, &f))
2107 /* Emulate :encoding(utf8) warning in the same case. */
2108 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2109 "utf8 \"\\x%02X\" does not map to Unicode",
2110 f < (U8*)SvEND(sv) ? *f : 0);
2113 if (gimme == G_ARRAY) {
2114 if (SvLEN(sv) - SvCUR(sv) > 20) {
2115 SvPV_shrink_to_cur(sv);
2117 sv = sv_2mortal(newSV(80));
2120 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2121 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2122 const STRLEN new_len
2123 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2124 SvPV_renew(sv, new_len);
2135 SV * const keysv = POPs;
2136 HV * const hv = MUTABLE_HV(POPs);
2137 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2138 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2140 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2141 bool preeminent = TRUE;
2143 if (SvTYPE(hv) != SVt_PVHV)
2150 /* If we can determine whether the element exist,
2151 * Try to preserve the existenceness of a tied hash
2152 * element by using EXISTS and DELETE if possible.
2153 * Fallback to FETCH and STORE otherwise. */
2154 if (SvCANEXISTDELETE(hv))
2155 preeminent = hv_exists_ent(hv, keysv, 0);
2158 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2159 svp = he ? &HeVAL(he) : NULL;
2161 if (!svp || !*svp || *svp == &PL_sv_undef) {
2165 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2167 lv = sv_newmortal();
2168 sv_upgrade(lv, SVt_PVLV);
2170 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2171 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2172 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2178 if (HvNAME_get(hv) && isGV(*svp))
2179 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2180 else if (preeminent)
2181 save_helem_flags(hv, keysv, svp,
2182 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2184 SAVEHDELETE(hv, keysv);
2186 else if (PL_op->op_private & OPpDEREF) {
2187 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2191 sv = (svp && *svp ? *svp : &PL_sv_undef);
2192 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2193 * was to make C<local $tied{foo} = $tied{foo}> possible.
2194 * However, it seems no longer to be needed for that purpose, and
2195 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2196 * would loop endlessly since the pos magic is getting set on the
2197 * mortal copy and lost. However, the copy has the effect of
2198 * triggering the get magic, and losing it altogether made things like
2199 * c<$tied{foo};> in void context no longer do get magic, which some
2200 * code relied on. Also, delayed triggering of magic on @+ and friends
2201 * meant the original regex may be out of scope by now. So as a
2202 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2203 * being called too many times). */
2204 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2211 /* a stripped-down version of Perl_softref2xv() for use by
2212 * pp_multideref(), which doesn't use PL_op->op_flags */
2215 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2218 if (PL_op->op_private & HINT_STRICT_REFS) {
2220 Perl_die(aTHX_ PL_no_symref_sv, sv,
2221 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2223 Perl_die(aTHX_ PL_no_usym, what);
2226 Perl_die(aTHX_ PL_no_usym, what);
2227 return gv_fetchsv_nomg(sv, GV_ADD, type);
2231 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2232 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2234 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2235 * Each of these either contains a set of actions, or an argument, such as
2236 * an IV to use as an array index, or a lexical var to retrieve.
2237 * Several actions re stored per UV; we keep shifting new actions off the
2238 * one UV, and only reload when it becomes zero.
2243 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2244 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2245 UV actions = items->uv;
2248 /* this tells find_uninit_var() where we're up to */
2249 PL_multideref_pc = items;
2252 /* there are three main classes of action; the first retrieve
2253 * the initial AV or HV from a variable or the stack; the second
2254 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2255 * the third an unrolled (/DREFHV, rv2hv, helem).
2257 switch (actions & MDEREF_ACTION_MASK) {
2260 actions = (++items)->uv;
2263 case MDEREF_AV_padav_aelem: /* $lex[...] */
2264 sv = PAD_SVl((++items)->pad_offset);
2267 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2268 sv = UNOP_AUX_item_sv(++items);
2269 assert(isGV_with_GP(sv));
2270 sv = (SV*)GvAVn((GV*)sv);
2273 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2278 goto do_AV_rv2av_aelem;
2281 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2282 sv = UNOP_AUX_item_sv(++items);
2283 assert(isGV_with_GP(sv));
2284 sv = GvSVn((GV*)sv);
2285 goto do_AV_vivify_rv2av_aelem;
2287 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2288 sv = PAD_SVl((++items)->pad_offset);
2291 do_AV_vivify_rv2av_aelem:
2292 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2293 /* this is the OPpDEREF action normally found at the end of
2294 * ops like aelem, helem, rv2sv */
2295 sv = vivify_ref(sv, OPpDEREF_AV);
2299 /* this is basically a copy of pp_rv2av when it just has the
2302 if (LIKELY(SvROK(sv))) {
2303 if (UNLIKELY(SvAMAGIC(sv))) {
2304 sv = amagic_deref_call(sv, to_av_amg);
2307 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2308 DIE(aTHX_ "Not an ARRAY reference");
2310 else if (SvTYPE(sv) != SVt_PVAV) {
2311 if (!isGV_with_GP(sv))
2312 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2313 sv = MUTABLE_SV(GvAVn((GV*)sv));
2319 /* retrieve the key; this may be either a lexical or package
2320 * var (whose index/ptr is stored as an item) or a signed
2321 * integer constant stored as an item.
2324 IV elem = 0; /* to shut up stupid compiler warnings */
2327 assert(SvTYPE(sv) == SVt_PVAV);
2329 switch (actions & MDEREF_INDEX_MASK) {
2330 case MDEREF_INDEX_none:
2332 case MDEREF_INDEX_const:
2333 elem = (++items)->iv;
2335 case MDEREF_INDEX_padsv:
2336 elemsv = PAD_SVl((++items)->pad_offset);
2338 case MDEREF_INDEX_gvsv:
2339 elemsv = UNOP_AUX_item_sv(++items);
2340 assert(isGV_with_GP(elemsv));
2341 elemsv = GvSVn((GV*)elemsv);
2343 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2344 && ckWARN(WARN_MISC)))
2345 Perl_warner(aTHX_ packWARN(WARN_MISC),
2346 "Use of reference \"%"SVf"\" as array index",
2348 /* the only time that S_find_uninit_var() needs this
2349 * is to determine which index value triggered the
2350 * undef warning. So just update it here. Note that
2351 * since we don't save and restore this var (e.g. for
2352 * tie or overload execution), its value will be
2353 * meaningless apart from just here */
2354 PL_multideref_pc = items;
2355 elem = SvIV(elemsv);
2360 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2362 if (!(actions & MDEREF_FLAG_last)) {
2363 SV** svp = av_fetch((AV*)sv, elem, 1);
2364 if (!svp || ! (sv=*svp))
2365 DIE(aTHX_ PL_no_aelem, elem);
2369 if (PL_op->op_private &
2370 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2372 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2373 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2376 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2377 sv = av_delete((AV*)sv, elem, discard);
2385 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2386 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2387 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2388 bool preeminent = TRUE;
2389 AV *const av = (AV*)sv;
2392 if (UNLIKELY(localizing)) {
2396 /* If we can determine whether the element exist,
2397 * Try to preserve the existenceness of a tied array
2398 * element by using EXISTS and DELETE if possible.
2399 * Fallback to FETCH and STORE otherwise. */
2400 if (SvCANEXISTDELETE(av))
2401 preeminent = av_exists(av, elem);
2404 svp = av_fetch(av, elem, lval && !defer);
2407 if (!svp || !(sv = *svp)) {
2410 DIE(aTHX_ PL_no_aelem, elem);
2411 len = av_tindex(av);
2412 sv = sv_2mortal(newSVavdefelem(av,
2413 /* Resolve a negative index now, unless it points
2414 * before the beginning of the array, in which
2415 * case record it for error reporting in
2416 * magic_setdefelem. */
2417 elem < 0 && len + elem >= 0
2418 ? len + elem : elem, 1));
2421 if (UNLIKELY(localizing)) {
2423 save_aelem(av, elem, svp);
2424 sv = *svp; /* may have changed */
2427 SAVEADELETE(av, elem);
2432 sv = (svp ? *svp : &PL_sv_undef);
2433 /* see note in pp_helem() */
2434 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2451 case MDEREF_HV_padhv_helem: /* $lex{...} */
2452 sv = PAD_SVl((++items)->pad_offset);
2455 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2456 sv = UNOP_AUX_item_sv(++items);
2457 assert(isGV_with_GP(sv));
2458 sv = (SV*)GvHVn((GV*)sv);
2461 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2466 goto do_HV_rv2hv_helem;
2469 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2470 sv = UNOP_AUX_item_sv(++items);
2471 assert(isGV_with_GP(sv));
2472 sv = GvSVn((GV*)sv);
2473 goto do_HV_vivify_rv2hv_helem;
2475 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2476 sv = PAD_SVl((++items)->pad_offset);
2479 do_HV_vivify_rv2hv_helem:
2480 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2481 /* this is the OPpDEREF action normally found at the end of
2482 * ops like aelem, helem, rv2sv */
2483 sv = vivify_ref(sv, OPpDEREF_HV);
2487 /* this is basically a copy of pp_rv2hv when it just has the
2488 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2491 if (LIKELY(SvROK(sv))) {
2492 if (UNLIKELY(SvAMAGIC(sv))) {
2493 sv = amagic_deref_call(sv, to_hv_amg);
2496 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2497 DIE(aTHX_ "Not a HASH reference");
2499 else if (SvTYPE(sv) != SVt_PVHV) {
2500 if (!isGV_with_GP(sv))
2501 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2502 sv = MUTABLE_SV(GvHVn((GV*)sv));
2508 /* retrieve the key; this may be either a lexical / package
2509 * var or a string constant, whose index/ptr is stored as an
2512 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2514 assert(SvTYPE(sv) == SVt_PVHV);
2516 switch (actions & MDEREF_INDEX_MASK) {
2517 case MDEREF_INDEX_none:
2520 case MDEREF_INDEX_const:
2521 keysv = UNOP_AUX_item_sv(++items);
2524 case MDEREF_INDEX_padsv:
2525 keysv = PAD_SVl((++items)->pad_offset);
2528 case MDEREF_INDEX_gvsv:
2529 keysv = UNOP_AUX_item_sv(++items);
2530 keysv = GvSVn((GV*)keysv);
2534 /* see comment above about setting this var */
2535 PL_multideref_pc = items;
2538 /* ensure that candidate CONSTs have been HEKified */
2539 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2540 || SvTYPE(keysv) >= SVt_PVMG
2543 || SvIsCOW_shared_hash(keysv));
2545 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2547 if (!(actions & MDEREF_FLAG_last)) {
2548 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2549 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2550 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2554 if (PL_op->op_private &
2555 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2557 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2558 sv = hv_exists_ent((HV*)sv, keysv, 0)
2559 ? &PL_sv_yes : &PL_sv_no;
2562 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2563 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2571 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2572 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2573 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2574 bool preeminent = TRUE;
2576 HV * const hv = (HV*)sv;
2579 if (UNLIKELY(localizing)) {
2583 /* If we can determine whether the element exist,
2584 * Try to preserve the existenceness of a tied hash
2585 * element by using EXISTS and DELETE if possible.
2586 * Fallback to FETCH and STORE otherwise. */
2587 if (SvCANEXISTDELETE(hv))
2588 preeminent = hv_exists_ent(hv, keysv, 0);
2591 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2592 svp = he ? &HeVAL(he) : NULL;
2596 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2600 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2601 lv = sv_newmortal();
2602 sv_upgrade(lv, SVt_PVLV);
2604 sv_magic(lv, key2 = newSVsv(keysv),
2605 PERL_MAGIC_defelem, NULL, 0);
2606 /* sv_magic() increments refcount */
2607 SvREFCNT_dec_NN(key2);
2608 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2614 if (HvNAME_get(hv) && isGV(sv))
2615 save_gp(MUTABLE_GV(sv),
2616 !(PL_op->op_flags & OPf_SPECIAL));
2617 else if (preeminent) {
2618 save_helem_flags(hv, keysv, svp,
2619 (PL_op->op_flags & OPf_SPECIAL)
2620 ? 0 : SAVEf_SETMAGIC);
2621 sv = *svp; /* may have changed */
2624 SAVEHDELETE(hv, keysv);
2629 sv = (svp && *svp ? *svp : &PL_sv_undef);
2630 /* see note in pp_helem() */
2631 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2640 actions >>= MDEREF_SHIFT;
2659 itersvp = CxITERVAR(cx);
2662 switch (CxTYPE(cx)) {
2664 case CXt_LOOP_LAZYSV: /* string increment */
2666 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2667 SV *end = cx->blk_loop.state_u.lazysv.end;
2668 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2669 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2671 const char *max = SvPV_const(end, maxlen);
2672 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2676 /* NB: on the first iteration, oldsv will have a ref count of at
2677 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2678 * slot will get localised; on subsequent iterations the RC==1
2679 * optimisation may kick in and the SV will be reused. */
2680 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2681 /* safe to reuse old SV */
2682 sv_setsv(oldsv, cur);
2686 /* we need a fresh SV every time so that loop body sees a
2687 * completely new SV for closures/references to work as
2689 *itersvp = newSVsv(cur);
2690 SvREFCNT_dec(oldsv);
2692 if (strEQ(SvPVX_const(cur), max))
2693 sv_setiv(cur, 0); /* terminate next time */
2699 case CXt_LOOP_LAZYIV: /* integer increment */
2701 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2702 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2706 /* see NB comment above */
2707 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2708 /* safe to reuse old SV */
2710 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2713 /* Cheap SvIOK_only().
2714 * Assert that flags which SvIOK_only() would test or
2715 * clear can't be set, because we're SVt_IV */
2716 assert(!(SvFLAGS(oldsv) &
2717 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2718 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2719 /* SvIV_set() where sv_any points to head */
2720 oldsv->sv_u.svu_iv = cur;
2724 sv_setiv(oldsv, cur);
2728 /* we need a fresh SV every time so that loop body sees a
2729 * completely new SV for closures/references to work as they
2731 *itersvp = newSViv(cur);
2732 SvREFCNT_dec(oldsv);
2735 if (UNLIKELY(cur == IV_MAX)) {
2736 /* Handle end of range at IV_MAX */
2737 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2739 ++cx->blk_loop.state_u.lazyiv.cur;
2743 case CXt_LOOP_LIST: /* for (1,2,3) */
2745 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2746 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2747 ix = (cx->blk_loop.state_u.stack.ix += inc);
2748 if (UNLIKELY(inc > 0
2749 ? ix > cx->blk_oldsp
2750 : ix <= cx->blk_loop.state_u.stack.basesp)
2754 sv = PL_stack_base[ix];
2756 goto loop_ary_common;
2758 case CXt_LOOP_ARY: /* for (@ary) */
2760 av = cx->blk_loop.state_u.ary.ary;
2761 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2762 ix = (cx->blk_loop.state_u.ary.ix += inc);
2763 if (UNLIKELY(inc > 0
2769 if (UNLIKELY(SvRMAGICAL(av))) {
2770 SV * const * const svp = av_fetch(av, ix, FALSE);
2771 sv = svp ? *svp : NULL;
2774 sv = AvARRAY(av)[ix];
2779 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2780 SvSetMagicSV(*itersvp, sv);
2785 if (UNLIKELY(SvIS_FREED(sv))) {
2787 Perl_croak(aTHX_ "Use of freed value in iteration");
2794 SvREFCNT_inc_simple_void_NN(sv);
2798 sv = newSVavdefelem(av, ix, 0);
2805 SvREFCNT_dec(oldsv);
2809 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2817 /* pp_enteriter should have pre-extended the stack */
2818 assert(PL_stack_sp < PL_stack_max);
2819 *++PL_stack_sp =retsv;
2821 return PL_op->op_next;
2825 A description of how taint works in pattern matching and substitution.
2827 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2828 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2830 While the pattern is being assembled/concatenated and then compiled,
2831 PL_tainted will get set (via TAINT_set) if any component of the pattern
2832 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2833 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2834 TAINT_get). It will also be set if any component of the pattern matches
2835 based on locale-dependent behavior.
2837 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2838 the pattern is marked as tainted. This means that subsequent usage, such
2839 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2840 on the new pattern too.
2842 RXf_TAINTED_SEEN is used post-execution by the get magic code
2843 of $1 et al to indicate whether the returned value should be tainted.
2844 It is the responsibility of the caller of the pattern (i.e. pp_match,
2845 pp_subst etc) to set this flag for any other circumstances where $1 needs
2848 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2850 There are three possible sources of taint
2852 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2853 * the replacement string (or expression under /e)
2855 There are four destinations of taint and they are affected by the sources
2856 according to the rules below:
2858 * the return value (not including /r):
2859 tainted by the source string and pattern, but only for the
2860 number-of-iterations case; boolean returns aren't tainted;
2861 * the modified string (or modified copy under /r):
2862 tainted by the source string, pattern, and replacement strings;
2864 tainted by the pattern, and under 'use re "taint"', by the source
2866 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2867 should always be unset before executing subsequent code.
2869 The overall action of pp_subst is:
2871 * at the start, set bits in rxtainted indicating the taint status of
2872 the various sources.
2874 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2875 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2876 pattern has subsequently become tainted via locale ops.
2878 * If control is being passed to pp_substcont to execute a /e block,
2879 save rxtainted in the CXt_SUBST block, for future use by
2882 * Whenever control is being returned to perl code (either by falling
2883 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2884 use the flag bits in rxtainted to make all the appropriate types of
2885 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2886 et al will appear tainted.
2888 pp_match is just a simpler version of the above.
2904 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2905 See "how taint works" above */
2908 REGEXP *rx = PM_GETRE(pm);
2910 int force_on_match = 0;
2911 const I32 oldsave = PL_savestack_ix;
2913 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2918 /* known replacement string? */
2919 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2923 if (PL_op->op_flags & OPf_STACKED)
2932 SvGETMAGIC(TARG); /* must come before cow check */
2934 /* note that a string might get converted to COW during matching */
2935 was_cow = cBOOL(SvIsCOW(TARG));
2937 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
2938 #ifndef PERL_ANY_COW
2940 sv_force_normal_flags(TARG,0);
2942 if ((SvREADONLY(TARG)
2943 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2944 || SvTYPE(TARG) > SVt_PVLV)
2945 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2946 Perl_croak_no_modify();
2950 orig = SvPV_nomg(TARG, len);
2951 /* note we don't (yet) force the var into being a string; if we fail
2952 * to match, we leave as-is; on successful match however, we *will*
2953 * coerce into a string, then repeat the match */
2954 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2957 /* only replace once? */
2958 once = !(rpm->op_pmflags & PMf_GLOBAL);
2960 /* See "how taint works" above */
2963 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2964 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2965 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2966 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2967 ? SUBST_TAINT_BOOLRET : 0));
2973 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2975 strend = orig + len;
2976 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2977 maxiters = 2 * slen + 10; /* We can match twice at each
2978 position, once with zero-length,
2979 second time with non-zero. */
2981 if (!RX_PRELEN(rx) && PL_curpm
2982 && !ReANY(rx)->mother_re) {
2987 #ifdef PERL_SAWAMPERSAND
2988 r_flags = ( RX_NPARENS(rx)
2990 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2991 || (rpm->op_pmflags & PMf_KEEPCOPY)
2996 r_flags = REXEC_COPY_STR;
2999 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
3002 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
3003 LEAVE_SCOPE(oldsave);
3008 /* known replacement string? */
3010 /* replacement needing upgrading? */
3011 if (DO_UTF8(TARG) && !doutf8) {
3012 nsv = sv_newmortal();
3014 sv_utf8_upgrade(nsv);
3015 c = SvPV_const(nsv, clen);
3019 c = SvPV_const(dstr, clen);
3020 doutf8 = DO_UTF8(dstr);
3023 if (SvTAINTED(dstr))
3024 rxtainted |= SUBST_TAINT_REPL;
3031 /* can do inplace substitution? */
3036 && (I32)clen <= RX_MINLENRET(rx)
3038 || !(r_flags & REXEC_COPY_STR)
3039 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
3041 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
3042 && (!doutf8 || SvUTF8(TARG))
3043 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3047 /* string might have got converted to COW since we set was_cow */
3048 if (SvIsCOW(TARG)) {
3049 if (!force_on_match)
3051 assert(SvVOK(TARG));
3054 if (force_on_match) {
3055 /* redo the first match, this time with the orig var
3056 * forced into being a string */
3058 orig = SvPV_force_nomg(TARG, len);
3064 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3065 rxtainted |= SUBST_TAINT_PAT;
3066 m = orig + RX_OFFS(rx)[0].start;
3067 d = orig + RX_OFFS(rx)[0].end;
3069 if (m - s > strend - d) { /* faster to shorten from end */
3072 Copy(c, m, clen, char);
3077 Move(d, m, i, char);
3081 SvCUR_set(TARG, m - s);
3083 else { /* faster from front */
3087 Move(s, d - i, i, char);
3090 Copy(c, d, clen, char);
3097 d = s = RX_OFFS(rx)[0].start + orig;
3100 if (UNLIKELY(iters++ > maxiters))
3101 DIE(aTHX_ "Substitution loop");
3102 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3103 rxtainted |= SUBST_TAINT_PAT;
3104 m = RX_OFFS(rx)[0].start + orig;
3107 Move(s, d, i, char);
3111 Copy(c, d, clen, char);
3114 s = RX_OFFS(rx)[0].end + orig;
3115 } while (CALLREGEXEC(rx, s, strend, orig,
3116 s == m, /* don't match same null twice */
3118 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3121 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3122 Move(s, d, i+1, char); /* include the NUL */
3132 if (force_on_match) {
3133 /* redo the first match, this time with the orig var
3134 * forced into being a string */
3136 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3137 /* I feel that it should be possible to avoid this mortal copy
3138 given that the code below copies into a new destination.
3139 However, I suspect it isn't worth the complexity of
3140 unravelling the C<goto force_it> for the small number of
3141 cases where it would be viable to drop into the copy code. */
3142 TARG = sv_2mortal(newSVsv(TARG));
3144 orig = SvPV_force_nomg(TARG, len);
3150 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3151 rxtainted |= SUBST_TAINT_PAT;
3153 s = RX_OFFS(rx)[0].start + orig;
3154 dstr = newSVpvn_flags(orig, s-orig,
3155 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3160 /* note that a whole bunch of local vars are saved here for
3161 * use by pp_substcont: here's a list of them in case you're
3162 * searching for places in this sub that uses a particular var:
3163 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3164 * s m strend rx once */
3166 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3170 if (UNLIKELY(iters++ > maxiters))
3171 DIE(aTHX_ "Substitution loop");
3172 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3173 rxtainted |= SUBST_TAINT_PAT;
3174 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3176 char *old_orig = orig;
3177 assert(RX_SUBOFFSET(rx) == 0);
3179 orig = RX_SUBBEG(rx);
3180 s = orig + (old_s - old_orig);
3181 strend = s + (strend - old_s);
3183 m = RX_OFFS(rx)[0].start + orig;
3184 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3185 s = RX_OFFS(rx)[0].end + orig;
3187 /* replacement already stringified */
3189 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3193 sv_catsv(dstr, repl);
3194 if (UNLIKELY(SvTAINTED(repl)))
3195 rxtainted |= SUBST_TAINT_REPL;
3199 } while (CALLREGEXEC(rx, s, strend, orig,
3200 s == m, /* Yields minend of 0 or 1 */
3202 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3203 assert(strend >= s);
3204 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3206 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3207 /* From here on down we're using the copy, and leaving the original
3214 /* The match may make the string COW. If so, brilliant, because
3215 that's just saved us one malloc, copy and free - the regexp has
3216 donated the old buffer, and we malloc an entirely new one, rather
3217 than the regexp malloc()ing a buffer and copying our original,
3218 only for us to throw it away here during the substitution. */
3219 if (SvIsCOW(TARG)) {
3220 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3226 SvPV_set(TARG, SvPVX(dstr));
3227 SvCUR_set(TARG, SvCUR(dstr));
3228 SvLEN_set(TARG, SvLEN(dstr));
3229 SvFLAGS(TARG) |= SvUTF8(dstr);
3230 SvPV_set(dstr, NULL);
3237 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3238 (void)SvPOK_only_UTF8(TARG);
3241 /* See "how taint works" above */
3243 if ((rxtainted & SUBST_TAINT_PAT) ||
3244 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3245 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3247 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3249 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3250 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3252 SvTAINTED_on(TOPs); /* taint return value */
3254 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3256 /* needed for mg_set below */
3258 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3262 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3264 LEAVE_SCOPE(oldsave);
3273 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3274 ++*PL_markstack_ptr;
3276 LEAVE_with_name("grep_item"); /* exit inner scope */
3279 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3281 const U8 gimme = GIMME_V;
3283 LEAVE_with_name("grep"); /* exit outer scope */
3284 (void)POPMARK; /* pop src */
3285 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3286 (void)POPMARK; /* pop dst */
3287 SP = PL_stack_base + POPMARK; /* pop original mark */
3288 if (gimme == G_SCALAR) {
3292 else if (gimme == G_ARRAY)
3299 ENTER_with_name("grep_item"); /* enter inner scope */
3302 src = PL_stack_base[TOPMARK];
3303 if (SvPADTMP(src)) {
3304 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3310 RETURNOP(cLOGOP->op_other);
3314 /* leave_adjust_stacks():
3316 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3317 * positioning them at to_sp+1 onwards, and do the equivalent of a
3318 * FREEMPS and TAINT_NOT.
3320 * Not intended to be called in void context.
3322 * When leaving a sub, eval, do{} or other scope, the things that need
3323 * doing to process the return args are:
3324 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3325 * * for the types of return that return copies of their args (such
3326 * as rvalue sub return), make a mortal copy of every return arg,
3327 * except where we can optimise the copy away without it being
3328 * semantically visible;
3329 * * make sure that the arg isn't prematurely freed; in the case of an
3330 * arg not copied, this may involve mortalising it. For example, in
3331 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3332 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3334 * What condition to use when deciding whether to pass the arg through
3335 * or make a copy, is determined by the 'pass' arg; its valid values are:
3336 * 0: rvalue sub/eval exit
3337 * 1: other rvalue scope exit
3338 * 2: :lvalue sub exit in rvalue context
3339 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3341 * There is a big issue with doing a FREETMPS. We would like to free any
3342 * temps created by the last statement which the sub executed, rather than
3343 * leaving them for the caller. In a situation where a sub call isn't
3344 * soon followed by a nextstate (e.g. nested recursive calls, a la
3345 * fibonacci()), temps can accumulate, causing memory and performance
3348 * On the other hand, we don't want to free any TEMPs which are keeping
3349 * alive any return args that we skipped copying; nor do we wish to undo
3350 * any mortalising done here.
3352 * The solution is to split the temps stack frame into two, with a cut
3353 * point delineating the two halves. We arrange that by the end of this
3354 * function, all the temps stack frame entries we wish to keep are in the
3355 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3356 * the range tmps_base .. PL_tmps_ix. During the course of this
3357 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3358 * whenever we find or create a temp that we know should be kept. In
3359 * general the stuff above tmps_base is undecided until we reach the end,
3360 * and we may need a sort stage for that.
3362 * To determine whether a TEMP is keeping a return arg alive, every
3363 * arg that is kept rather than copied and which has the SvTEMP flag
3364 * set, has the flag temporarily unset, to mark it. At the end we scan
3365 * the temps stack frame above the cut for entries without SvTEMP and
3366 * keep them, while turning SvTEMP on again. Note that if we die before
3367 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3368 * those SVs may be slightly less efficient.
3370 * In practice various optimisations for some common cases mean we can
3371 * avoid most of the scanning and swapping about with the temps stack.
3375 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3379 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3382 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3386 if (gimme == G_ARRAY) {
3387 nargs = SP - from_sp;
3391 assert(gimme == G_SCALAR);
3392 if (UNLIKELY(from_sp >= SP)) {
3393 /* no return args */
3394 assert(from_sp == SP);
3396 *++SP = &PL_sv_undef;
3406 /* common code for G_SCALAR and G_ARRAY */
3408 tmps_base = PL_tmps_floor + 1;
3412 /* pointer version of tmps_base. Not safe across temp stack
3416 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3417 tmps_basep = PL_tmps_stack + tmps_base;
3419 /* process each return arg */
3422 SV *sv = *from_sp++;
3424 assert(PL_tmps_ix + nargs < PL_tmps_max);
3426 /* PADTMPs with container set magic shouldn't appear in the
3427 * wild. This assert is more important for pp_leavesublv(),
3428 * but by testing for it here, we're more likely to catch
3429 * bad cases (what with :lvalue subs not being widely
3430 * deployed). The two issues are that for something like
3431 * sub :lvalue { $tied{foo} }
3433 * sub :lvalue { substr($foo,1,2) }
3434 * pp_leavesublv() will croak if the sub returns a PADTMP,
3435 * and currently functions like pp_substr() return a mortal
3436 * rather than using their PADTMP when returning a PVLV.
3437 * This is because the PVLV will hold a ref to $foo,
3438 * so $foo would get delayed in being freed while
3439 * the PADTMP SV remained in the PAD.
3440 * So if this assert fails it means either:
3441 * 1) there is pp code similar to pp_substr that is
3442 * returning a PADTMP instead of a mortal, and probably
3444 * 2) pp_leavesublv is making unwarranted assumptions
3445 * about always croaking on a PADTMP
3447 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3449 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3450 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3456 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3457 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3458 : pass == 2 ? (!SvPADTMP(sv))
3461 /* pass through: skip copy for logic or optimisation
3462 * reasons; instead mortalise it, except that ... */
3466 /* ... since this SV is an SvTEMP , we don't need to
3467 * re-mortalise it; instead we just need to ensure
3468 * that its existing entry in the temps stack frame
3469 * ends up below the cut and so avoids being freed
3470 * this time round. We mark it as needing to be kept
3471 * by temporarily unsetting SvTEMP; then at the end,
3472 * we shuffle any !SvTEMP entries on the tmps stack
3473 * back below the cut.
3474 * However, there's a significant chance that there's
3475 * a 1:1 correspondence between the first few (or all)
3476 * elements in the return args stack frame and those
3477 * in the temps stack frame; e,g.:
3478 * sub f { ....; map {...} .... },
3479 * or if we're exiting multiple scopes and one of the
3480 * inner scopes has already made mortal copies of each
3483 * If so, this arg sv will correspond to the next item
3484 * on the tmps stack above the cut, and so can be kept
3485 * merely by moving the cut boundary up one, rather
3486 * than messing with SvTEMP. If all args are 1:1 then
3487 * we can avoid the sorting stage below completely.
3489 * If there are no items above the cut on the tmps
3490 * stack, then the SvTEMP must comne from an item
3491 * below the cut, so there's nothing to do.
3493 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3494 if (sv == *tmps_basep)
3500 else if (!SvPADTMP(sv)) {
3501 /* mortalise arg to avoid it being freed during save
3502 * stack unwinding. Pad tmps don't need mortalising as
3503 * they're never freed. This is the equivalent of
3504 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3505 * * it assumes that the temps stack has already been
3507 * * it puts the new item at the cut rather than at
3508 * ++PL_tmps_ix, moving the previous occupant there
3511 if (!SvIMMORTAL(sv)) {
3512 SvREFCNT_inc_simple_void_NN(sv);
3514 /* Note that if there's nothing above the cut,
3515 * this copies the garbage one slot above
3516 * PL_tmps_ix onto itself. This is harmless (the
3517 * stack's already been extended), but might in
3518 * theory trigger warnings from tools like ASan
3520 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3526 /* Make a mortal copy of the SV.
3527 * The following code is the equivalent of sv_mortalcopy()
3529 * * it assumes the temps stack has already been extended;
3530 * * it optimises the copying for some simple SV types;
3531 * * it puts the new item at the cut rather than at
3532 * ++PL_tmps_ix, moving the previous occupant there
3535 SV *newsv = newSV(0);
3537 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3538 /* put it on the tmps stack early so it gets freed if we die */
3539 *tmps_basep++ = newsv;
3542 if (SvTYPE(sv) <= SVt_IV) {
3543 /* arg must be one of undef, IV/UV, or RV: skip
3544 * sv_setsv_flags() and do the copy directly */
3546 U32 srcflags = SvFLAGS(sv);
3548 assert(!SvGMAGICAL(sv));
3549 if (srcflags & (SVf_IOK|SVf_ROK)) {
3550 SET_SVANY_FOR_BODYLESS_IV(newsv);
3552 if (srcflags & SVf_ROK) {
3553 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3554 /* SV type plus flags */
3555 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3558 /* both src and dst are <= SVt_IV, so sv_any
3559 * points to the head; so access the heads
3560 * directly rather than going via sv_any.
3562 assert( &(sv->sv_u.svu_iv)
3563 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3564 assert( &(newsv->sv_u.svu_iv)
3565 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3566 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3567 /* SV type plus flags */
3568 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3569 |(srcflags & SVf_IVisUV));
3573 assert(!(srcflags & SVf_OK));
3574 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3576 SvFLAGS(newsv) = dstflags;
3580 /* do the full sv_setsv() */
3584 old_base = tmps_basep - PL_tmps_stack;
3586 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3587 /* the mg_get or sv_setsv might have created new temps
3588 * or realloced the tmps stack; regrow and reload */
3589 EXTEND_MORTAL(nargs);
3590 tmps_basep = PL_tmps_stack + old_base;
3591 TAINT_NOT; /* Each item is independent */
3597 /* If there are any temps left above the cut, we need to sort
3598 * them into those to keep and those to free. The only ones to
3599 * keep are those for which we've temporarily unset SvTEMP.
3600 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3601 * swapping pairs as necessary. Stop when we meet in the middle.
3604 SV **top = PL_tmps_stack + PL_tmps_ix;
3605 while (tmps_basep <= top) {
3618 tmps_base = tmps_basep - PL_tmps_stack;
3621 PL_stack_sp = to_sp;
3623 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3624 while (PL_tmps_ix >= tmps_base) {
3625 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3627 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3631 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3637 /* also tail-called by pp_return */
3647 assert(CxTYPE(cx) == CXt_SUB);
3649 if (CxMULTICALL(cx)) {
3650 /* entry zero of a stack is always PL_sv_undef, which
3651 * simplifies converting a '()' return into undef in scalar context */
3652 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3656 gimme = cx->blk_gimme;
3657 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3659 if (gimme == G_VOID)
3660 PL_stack_sp = oldsp;
3662 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3665 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3667 retop = cx->blk_sub.retop;
3674 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3675 * forces an abandon */
3678 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3680 const SSize_t fill = AvFILLp(av);
3682 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3684 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3689 AV *newav = newAV();
3690 av_extend(newav, fill);
3691 AvREIFY_only(newav);
3692 PAD_SVl(0) = MUTABLE_SV(newav);
3693 SvREFCNT_dec_NN(av);
3704 I32 old_savestack_ix;
3709 /* Locate the CV to call:
3710 * - most common case: RV->CV: f(), $ref->():
3711 * note that if a sub is compiled before its caller is compiled,
3712 * the stash entry will be a ref to a CV, rather than being a GV.
3713 * - second most common case: CV: $ref->method()
3716 /* a non-magic-RV -> CV ? */
3717 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3718 cv = MUTABLE_CV(SvRV(sv));
3719 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3723 cv = MUTABLE_CV(sv);
3726 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3727 /* handle all the weird cases */
3728 switch (SvTYPE(sv)) {
3730 if (!isGV_with_GP(sv))
3734 cv = GvCVu((const GV *)sv);
3735 if (UNLIKELY(!cv)) {
3737 cv = sv_2cv(sv, &stash, &gv, 0);
3739 old_savestack_ix = PL_savestack_ix;
3750 if (UNLIKELY(SvAMAGIC(sv))) {
3751 sv = amagic_deref_call(sv, to_cv_amg);
3752 /* Don't SPAGAIN here. */
3758 if (UNLIKELY(!SvOK(sv)))
3759 DIE(aTHX_ PL_no_usym, "a subroutine");
3761 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3762 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3763 SP = PL_stack_base + POPMARK;
3766 if (GIMME_V == G_SCALAR)
3767 PUSHs(&PL_sv_undef);
3771 sym = SvPV_nomg_const(sv, len);
3772 if (PL_op->op_private & HINT_STRICT_REFS)
3773 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3774 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3777 cv = MUTABLE_CV(SvRV(sv));
3778 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3784 DIE(aTHX_ "Not a CODE reference");
3788 /* At this point we want to save PL_savestack_ix, either by doing a
3789 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
3790 * CV we will be using (so we don't know whether its XS, so we can't
3791 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
3792 * the save stack. So remember where we are currently on the save
3793 * stack, and later update the CX or scopestack entry accordingly. */
3794 old_savestack_ix = PL_savestack_ix;
3796 /* these two fields are in a union. If they ever become separate,
3797 * we have to test for both of them being null below */
3799 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3800 while (UNLIKELY(!CvROOT(cv))) {
3804 /* anonymous or undef'd function leaves us no recourse */
3805 if (CvLEXICAL(cv) && CvHASGV(cv))
3806 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3807 SVfARG(cv_name(cv, NULL, 0)));
3808 if (CvANON(cv) || !CvHASGV(cv)) {
3809 DIE(aTHX_ "Undefined subroutine called");
3812 /* autoloaded stub? */
3813 if (cv != GvCV(gv = CvGV(cv))) {
3816 /* should call AUTOLOAD now? */
3819 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3820 GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
3821 cv = autogv ? GvCV(autogv) : NULL;
3824 sub_name = sv_newmortal();
3825 gv_efullname3(sub_name, gv, NULL);
3826 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3830 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3831 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3832 DIE(aTHX_ "Closure prototype called");
3834 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3837 Perl_get_db_sub(aTHX_ &sv, cv);
3839 PL_curcopdb = PL_curcop;
3841 /* check for lsub that handles lvalue subroutines */
3842 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3843 /* if lsub not found then fall back to DB::sub */
3844 if (!cv) cv = GvCV(PL_DBsub);
3846 cv = GvCV(PL_DBsub);
3849 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3850 DIE(aTHX_ "No DB::sub routine defined");
3853 if (!(CvISXSUB(cv))) {
3854 /* This path taken at least 75% of the time */
3861 /* keep PADTMP args alive throughout the call (we need to do this
3862 * because @_ isn't refcounted). Note that we create the mortals
3863 * in the caller's tmps frame, so they won't be freed until after
3864 * we return from the sub.
3873 *svp = sv = sv_mortalcopy(sv);
3879 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
3880 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3881 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
3883 padlist = CvPADLIST(cv);
3884 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
3885 pad_push(padlist, depth);
3886 PAD_SET_CUR_NOSAVE(padlist, depth);
3887 if (LIKELY(hasargs)) {
3888 AV *const av = MUTABLE_AV(PAD_SVl(0));
3892 defavp = &GvAV(PL_defgv);
3893 cx->blk_sub.savearray = *defavp;
3894 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3896 /* it's the responsibility of whoever leaves a sub to ensure
3897 * that a clean, empty AV is left in pad[0]. This is normally
3898 * done by cx_popsub() */
3899 assert(!AvREAL(av) && AvFILLp(av) == -1);
3902 if (UNLIKELY(items - 1 > AvMAX(av))) {
3903 SV **ary = AvALLOC(av);
3904 AvMAX(av) = items - 1;
3905 Renew(ary, items, SV*);
3910 Copy(MARK+1,AvARRAY(av),items,SV*);
3911 AvFILLp(av) = items - 1;
3913 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3915 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3916 SVfARG(cv_name(cv, NULL, 0)));
3917 /* warning must come *after* we fully set up the context
3918 * stuff so that __WARN__ handlers can safely dounwind()
3921 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3922 && ckWARN(WARN_RECURSION)
3923 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3924 sub_crush_depth(cv);
3925 RETURNOP(CvSTART(cv));
3928 SSize_t markix = TOPMARK;
3932 /* pretend we did the ENTER earlier */
3933 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3938 if (UNLIKELY(((PL_op->op_private
3939 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3940 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3942 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3943 SVfARG(cv_name(cv, NULL, 0)));
3945 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3946 /* Need to copy @_ to stack. Alternative may be to
3947 * switch stack to @_, and copy return values
3948 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3949 AV * const av = GvAV(PL_defgv);
3950 const SSize_t items = AvFILL(av) + 1;
3954 const bool m = cBOOL(SvRMAGICAL(av));
3955 /* Mark is at the end of the stack. */
3957 for (; i < items; ++i)
3961 SV ** const svp = av_fetch(av, i, 0);
3962 sv = svp ? *svp : NULL;
3964 else sv = AvARRAY(av)[i];
3965 if (sv) SP[i+1] = sv;
3967 SP[i+1] = newSVavdefelem(av, i, 1);
3975 SV **mark = PL_stack_base + markix;
3976 SSize_t items = SP - mark;
3979 if (*mark && SvPADTMP(*mark)) {
3980 *mark = sv_mortalcopy(*mark);
3984 /* We assume first XSUB in &DB::sub is the called one. */
3985 if (UNLIKELY(PL_curcopdb)) {
3986 SAVEVPTR(PL_curcop);
3987 PL_curcop = PL_curcopdb;
3990 /* Do we need to open block here? XXXX */
3992 /* calculate gimme here as PL_op might get changed and then not
3993 * restored until the LEAVE further down */
3994 is_scalar = (GIMME_V == G_SCALAR);
3996 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3998 CvXSUB(cv)(aTHX_ cv);
4000 /* Enforce some sanity in scalar context. */
4002 SV **svp = PL_stack_base + markix + 1;
4003 if (svp != PL_stack_sp) {
4004 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4014 Perl_sub_crush_depth(pTHX_ CV *cv)
4016 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4019 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4021 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
4022 SVfARG(cv_name(cv,NULL,0)));
4028 /* like croak, but report in context of caller */
4031 Perl_croak_caller(const char *pat, ...)
4035 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4037 /* make error appear at call site */
4039 PL_curcop = cx->blk_oldcop;
4041 va_start(args, pat);
4043 NOT_REACHED; /* NOTREACHED */
4052 SV* const elemsv = POPs;
4053 IV elem = SvIV(elemsv);
4054 AV *const av = MUTABLE_AV(POPs);
4055 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4056 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4057 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4058 bool preeminent = TRUE;
4061 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4062 Perl_warner(aTHX_ packWARN(WARN_MISC),
4063 "Use of reference \"%"SVf"\" as array index",
4065 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4068 if (UNLIKELY(localizing)) {
4072 /* If we can determine whether the element exist,
4073 * Try to preserve the existenceness of a tied array
4074 * element by using EXISTS and DELETE if possible.
4075 * Fallback to FETCH and STORE otherwise. */
4076 if (SvCANEXISTDELETE(av))
4077 preeminent = av_exists(av, elem);
4080 svp = av_fetch(av, elem, lval && !defer);
4082 #ifdef PERL_MALLOC_WRAP
4083 if (SvUOK(elemsv)) {
4084 const UV uv = SvUV(elemsv);
4085 elem = uv > IV_MAX ? IV_MAX : uv;
4087 else if (SvNOK(elemsv))
4088 elem = (IV)SvNV(elemsv);
4090 static const char oom_array_extend[] =
4091 "Out of memory during array extend"; /* Duplicated in av.c */
4092 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4095 if (!svp || !*svp) {
4098 DIE(aTHX_ PL_no_aelem, elem);
4099 len = av_tindex(av);
4100 mPUSHs(newSVavdefelem(av,
4101 /* Resolve a negative index now, unless it points before the
4102 beginning of the array, in which case record it for error
4103 reporting in magic_setdefelem. */
4104 elem < 0 && len + elem >= 0 ? len + elem : elem,
4108 if (UNLIKELY(localizing)) {
4110 save_aelem(av, elem, svp);
4112 SAVEADELETE(av, elem);
4114 else if (PL_op->op_private & OPpDEREF) {
4115 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4119 sv = (svp ? *svp : &PL_sv_undef);
4120 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4127 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4129 PERL_ARGS_ASSERT_VIVIFY_REF;
4134 Perl_croak_no_modify();
4135 prepare_SV_for_RV(sv);
4138 SvRV_set(sv, newSV(0));
4141 SvRV_set(sv, MUTABLE_SV(newAV()));
4144 SvRV_set(sv, MUTABLE_SV(newHV()));
4151 if (SvGMAGICAL(sv)) {
4152 /* copy the sv without magic to prevent magic from being
4154 SV* msv = sv_newmortal();
4155 sv_setsv_nomg(msv, sv);
4161 PERL_STATIC_INLINE HV *
4162 S_opmethod_stash(pTHX_ SV* meth)
4167 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4168 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
4169 "package or object reference", SVfARG(meth)),
4171 : *(PL_stack_base + TOPMARK + 1);
4173 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4177 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
4180 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4181 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4182 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4183 if (stash) return stash;
4187 ob = MUTABLE_SV(SvRV(sv));
4188 else if (!SvOK(sv)) goto undefined;
4189 else if (isGV_with_GP(sv)) {
4191 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4192 "without a package or object reference",
4195 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4196 assert(!LvTARGLEN(ob));
4200 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4203 /* this isn't a reference */
4206 const char * const packname = SvPV_nomg_const(sv, packlen);
4207 const U32 packname_utf8 = SvUTF8(sv);
4208 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4209 if (stash) return stash;
4211 if (!(iogv = gv_fetchpvn_flags(
4212 packname, packlen, packname_utf8, SVt_PVIO
4214 !(ob=MUTABLE_SV(GvIO(iogv))))
4216 /* this isn't the name of a filehandle either */
4219 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4220 "without a package or object reference",
4223 /* assume it's a package name */
4224 stash = gv_stashpvn(packname, packlen, packname_utf8);
4225 if (stash) return stash;
4226 else return MUTABLE_HV(sv);
4228 /* it _is_ a filehandle name -- replace with a reference */
4229 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4232 /* if we got here, ob should be an object or a glob */
4233 if (!ob || !(SvOBJECT(ob)
4234 || (isGV_with_GP(ob)
4235 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4238 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
4239 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
4240 ? newSVpvs_flags("DOES", SVs_TEMP)
4252 SV* const meth = TOPs;
4255 SV* const rmeth = SvRV(meth);
4256 if (SvTYPE(rmeth) == SVt_PVCV) {
4262 stash = opmethod_stash(meth);
4264 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4267 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4271 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4272 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4274 gv = MUTABLE_GV(HeVAL(he)); \
4275 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4276 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4278 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4287 SV* const meth = cMETHOPx_meth(PL_op);
4288 HV* const stash = opmethod_stash(meth);
4290 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4291 METHOD_CHECK_CACHE(stash, stash, meth);
4294 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4297 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4306 SV* const meth = cMETHOPx_meth(PL_op);
4307 HV* const stash = CopSTASH(PL_curcop);
4308 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4309 * as it uses CopSTASH. However, we must ensure that object(class) is
4310 * correct (this check is done by S_opmethod_stash) */
4311 opmethod_stash(meth);
4313 if ((cache = HvMROMETA(stash)->super)) {
4314 METHOD_CHECK_CACHE(stash, cache, meth);
4317 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4320 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4328 SV* const meth = cMETHOPx_meth(PL_op);
4329 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4330 opmethod_stash(meth); /* not used but needed for error checks */
4332 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4333 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4335 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4338 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4342 PP(pp_method_redir_super)
4347 SV* const meth = cMETHOPx_meth(PL_op);
4348 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4349 opmethod_stash(meth); /* not used but needed for error checks */
4351 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4352 else if ((cache = HvMROMETA(stash)->super)) {
4353 METHOD_CHECK_CACHE(stash, cache, meth);
4356 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4359 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4364 * ex: set ts=8 sts=4 sw=4 et: