3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * XXX Note that padav has similar code but without the mg_get().
325 * I suspect that the mg_get is no longer needed, but while padav
326 * differs, it can't share this function */
329 S_pushav(pTHX_ AV* const av)
332 const SSize_t maxarg = AvFILL(av) + 1;
334 if (UNLIKELY(SvRMAGICAL(av))) {
336 for (i=0; i < (PADOFFSET)maxarg; i++) {
337 SV ** const svp = av_fetch(av, i, FALSE);
338 /* See note in pp_helem, and bug id #27839 */
340 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
346 for (i=0; i < (PADOFFSET)maxarg; i++) {
347 SV * const sv = AvARRAY(av)[i];
348 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
356 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
361 PADOFFSET base = PL_op->op_targ;
362 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
364 if (PL_op->op_flags & OPf_SPECIAL) {
365 /* fake the RHS of my ($x,$y,..) = @_ */
367 S_pushav(aTHX_ GvAVn(PL_defgv));
371 /* note, this is only skipped for compile-time-known void cxt */
372 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
375 for (i = 0; i <count; i++)
376 *++SP = PAD_SV(base+i);
378 if (PL_op->op_private & OPpLVAL_INTRO) {
379 SV **svp = &(PAD_SVl(base));
380 const UV payload = (UV)(
381 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
382 | (count << SAVE_TIGHT_SHIFT)
383 | SAVEt_CLEARPADRANGE);
384 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
385 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
393 for (i = 0; i <count; i++)
394 SvPADSTALE_off(*svp++); /* mark lexical as active */
405 OP * const op = PL_op;
406 /* access PL_curpad once */
407 SV ** const padentry = &(PAD_SVl(op->op_targ));
412 PUTBACK; /* no pop/push after this, TOPs ok */
414 if (op->op_flags & OPf_MOD) {
415 if (op->op_private & OPpLVAL_INTRO)
416 if (!(op->op_private & OPpPAD_STATE))
417 save_clearsv(padentry);
418 if (op->op_private & OPpDEREF) {
419 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
420 than TARG reduces the scope of TARG, so it does not
421 span the call to save_clearsv, resulting in smaller
423 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
435 tryAMAGICunTARGETlist(iter_amg, 0);
436 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
438 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
439 if (!isGV_with_GP(PL_last_in_gv)) {
440 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
441 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
444 XPUSHs(MUTABLE_SV(PL_last_in_gv));
447 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
448 if (PL_last_in_gv == (GV *)&PL_sv_undef)
449 PL_last_in_gv = NULL;
451 assert(isGV_with_GP(PL_last_in_gv));
454 return do_readline();
462 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
466 (SvIOK_notUV(left) && SvIOK_notUV(right))
467 ? (SvIVX(left) == SvIVX(right))
468 : ( do_ncmp(left, right) == 0)
474 /* also used for: pp_i_preinc() */
478 SV *sv = *PL_stack_sp;
480 if (LIKELY(((sv->sv_flags &
481 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
482 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
484 && SvIVX(sv) != IV_MAX)
486 SvIV_set(sv, SvIVX(sv) + 1);
488 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
495 /* also used for: pp_i_predec() */
499 SV *sv = *PL_stack_sp;
501 if (LIKELY(((sv->sv_flags &
502 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
503 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
505 && SvIVX(sv) != IV_MIN)
507 SvIV_set(sv, SvIVX(sv) - 1);
509 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
516 /* also used for: pp_orassign() */
525 if (PL_op->op_type == OP_OR)
527 RETURNOP(cLOGOP->op_other);
532 /* also used for: pp_dor() pp_dorassign() */
539 const int op_type = PL_op->op_type;
540 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
545 if (UNLIKELY(!sv || !SvANY(sv))) {
546 if (op_type == OP_DOR)
548 RETURNOP(cLOGOP->op_other);
554 if (UNLIKELY(!sv || !SvANY(sv)))
559 switch (SvTYPE(sv)) {
561 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
565 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
569 if (CvROOT(sv) || CvXSUB(sv))
582 if(op_type == OP_DOR)
584 RETURNOP(cLOGOP->op_other);
586 /* assuming OP_DEFINED */
596 dSP; dATARGET; bool useleft; SV *svl, *svr;
598 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
602 #ifdef PERL_PRESERVE_IVUV
604 /* special-case some simple common cases */
605 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
607 U32 flags = (svl->sv_flags & svr->sv_flags);
608 if (flags & SVf_IOK) {
609 /* both args are simple IVs */
614 topl = ((UV)il) >> (UVSIZE * 8 - 2);
615 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
617 /* if both are in a range that can't under/overflow, do a
618 * simple integer add: if the top of both numbers
619 * are 00 or 11, then it's safe */
620 if (!( ((topl+1) | (topr+1)) & 2)) {
622 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
628 else if (flags & SVf_NOK) {
629 /* both args are NVs */
634 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
635 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
636 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
638 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
641 /* nothing was lost by converting to IVs */
644 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
652 useleft = USE_LEFT(svl);
653 /* We must see if we can perform the addition with integers if possible,
654 as the integer code detects overflow while the NV code doesn't.
655 If either argument hasn't had a numeric conversion yet attempt to get
656 the IV. It's important to do this now, rather than just assuming that
657 it's not IOK as a PV of "9223372036854775806" may not take well to NV
658 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
659 integer in case the second argument is IV=9223372036854775806
660 We can (now) rely on sv_2iv to do the right thing, only setting the
661 public IOK flag if the value in the NV (or PV) slot is truly integer.
663 A side effect is that this also aggressively prefers integer maths over
664 fp maths for integer values.
666 How to detect overflow?
668 C 99 section 6.2.6.1 says
670 The range of nonnegative values of a signed integer type is a subrange
671 of the corresponding unsigned integer type, and the representation of
672 the same value in each type is the same. A computation involving
673 unsigned operands can never overflow, because a result that cannot be
674 represented by the resulting unsigned integer type is reduced modulo
675 the number that is one greater than the largest value that can be
676 represented by the resulting type.
680 which I read as "unsigned ints wrap."
682 signed integer overflow seems to be classed as "exception condition"
684 If an exceptional condition occurs during the evaluation of an
685 expression (that is, if the result is not mathematically defined or not
686 in the range of representable values for its type), the behavior is
689 (6.5, the 5th paragraph)
691 I had assumed that on 2s complement machines signed arithmetic would
692 wrap, hence coded pp_add and pp_subtract on the assumption that
693 everything perl builds on would be happy. After much wailing and
694 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
695 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
696 unsigned code below is actually shorter than the old code. :-)
699 if (SvIV_please_nomg(svr)) {
700 /* Unless the left argument is integer in range we are going to have to
701 use NV maths. Hence only attempt to coerce the right argument if
702 we know the left is integer. */
710 /* left operand is undef, treat as zero. + 0 is identity,
711 Could SETi or SETu right now, but space optimise by not adding
712 lots of code to speed up what is probably a rarish case. */
714 /* Left operand is defined, so is it IV? */
715 if (SvIV_please_nomg(svl)) {
716 if ((auvok = SvUOK(svl)))
719 const IV aiv = SvIVX(svl);
722 auvok = 1; /* Now acting as a sign flag. */
724 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
731 bool result_good = 0;
734 bool buvok = SvUOK(svr);
739 const IV biv = SvIVX(svr);
744 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
746 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
747 else "IV" now, independent of how it came in.
748 if a, b represents positive, A, B negative, a maps to -A etc
753 all UV maths. negate result if A negative.
754 add if signs same, subtract if signs differ. */
760 /* Must get smaller */
766 /* result really should be -(auv-buv). as its negation
767 of true value, need to swap our result flag */
784 if (result <= (UV)IV_MIN)
785 SETi(result == (UV)IV_MIN
786 ? IV_MIN : -(IV)result);
788 /* result valid, but out of range for IV. */
793 } /* Overflow, drop through to NVs. */
798 useleft = USE_LEFT(svl);
802 NV value = SvNV_nomg(svr);
805 /* left operand is undef, treat as zero. + 0.0 is identity. */
809 SETn( value + SvNV_nomg(svl) );
815 /* also used for: pp_aelemfast_lex() */
820 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
821 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
822 const U32 lval = PL_op->op_flags & OPf_MOD;
823 const I8 key = (I8)PL_op->op_private;
827 assert(SvTYPE(av) == SVt_PVAV);
831 /* inlined av_fetch() for simple cases ... */
832 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
833 sv = AvARRAY(av)[key];
840 /* ... else do it the hard way */
841 svp = av_fetch(av, key, lval);
842 sv = (svp ? *svp : &PL_sv_undef);
844 if (UNLIKELY(!svp && lval))
845 DIE(aTHX_ PL_no_aelem, (int)key);
847 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
857 do_join(TARG, *MARK, MARK, SP);
863 /* Oversized hot code. */
865 /* also used for: pp_say() */
869 dSP; dMARK; dORIGMARK;
873 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
877 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
880 if (MARK == ORIGMARK) {
881 /* If using default handle then we need to make space to
882 * pass object as 1st arg, so move other args up ...
886 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
889 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
891 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
892 | (PL_op->op_type == OP_SAY
893 ? TIED_METHOD_SAY : 0)), sp - mark);
896 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
897 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
900 SETERRNO(EBADF,RMS_IFI);
903 else if (!(fp = IoOFP(io))) {
905 report_wrongway_fh(gv, '<');
908 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
912 SV * const ofs = GvSV(PL_ofsgv); /* $, */
914 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
916 if (!do_print(*MARK, fp))
920 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
921 if (!do_print(GvSV(PL_ofsgv), fp)) {
930 if (!do_print(*MARK, fp))
938 if (PL_op->op_type == OP_SAY) {
939 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
942 else if (PL_ors_sv && SvOK(PL_ors_sv))
943 if (!do_print(PL_ors_sv, fp)) /* $\ */
946 if (IoFLAGS(io) & IOf_FLUSH)
947 if (PerlIO_flush(fp) == EOF)
957 XPUSHs(&PL_sv_undef);
962 /* also used for: pp_rv2hv() */
963 /* also called directly by pp_lvavref */
968 const U8 gimme = GIMME_V;
969 static const char an_array[] = "an ARRAY";
970 static const char a_hash[] = "a HASH";
971 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
972 || PL_op->op_type == OP_LVAVREF;
973 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
977 if (UNLIKELY(SvAMAGIC(sv))) {
978 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
981 if (UNLIKELY(SvTYPE(sv) != type))
982 /* diag_listed_as: Not an ARRAY reference */
983 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
984 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
985 && PL_op->op_private & OPpLVAL_INTRO))
986 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
988 else if (UNLIKELY(SvTYPE(sv) != type)) {
991 if (!isGV_with_GP(sv)) {
992 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1000 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1001 if (PL_op->op_private & OPpLVAL_INTRO)
1002 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1004 if (PL_op->op_flags & OPf_REF) {
1008 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1009 const I32 flags = is_lvalue_sub();
1010 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1011 if (gimme != G_ARRAY)
1012 goto croak_cant_return;
1019 AV *const av = MUTABLE_AV(sv);
1020 /* The guts of pp_rv2av */
1021 if (gimme == G_ARRAY) {
1027 else if (gimme == G_SCALAR) {
1029 const SSize_t maxarg = AvFILL(av) + 1;
1033 /* The guts of pp_rv2hv */
1034 if (gimme == G_ARRAY) { /* array wanted */
1036 return Perl_do_kv(aTHX);
1038 else if ((PL_op->op_private & OPpTRUEBOOL
1039 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1040 && block_gimme() == G_VOID ))
1041 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1042 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1043 else if (gimme == G_SCALAR) {
1045 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1052 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1053 is_pp_rv2av ? "array" : "hash");
1058 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1060 PERL_ARGS_ASSERT_DO_ODDBALL;
1063 if (ckWARN(WARN_MISC)) {
1065 if (oddkey == firstkey &&
1067 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1068 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1070 err = "Reference found where even-sized list expected";
1073 err = "Odd number of elements in hash assignment";
1074 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1081 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1082 * are common to both the LHS and RHS of an aassign, and replace them
1083 * with copies. All these copies are made before the actual list assign is
1086 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1087 * element ($b) to the first LH element ($a), modifies $a; when the
1088 * second assignment is done, the second RH element now has the wrong
1089 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1090 * Note that we don't need to make a mortal copy of $b.
1092 * The algorithm below works by, for every RHS element, mark the
1093 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1094 * element is found with SVf_BREAK set, it means it would have been
1095 * modified, so make a copy.
1096 * Note that by scanning both LHS and RHS in lockstep, we avoid
1097 * unnecessary copies (like $b above) compared with a naive
1098 * "mark all LHS; copy all marked RHS; unmark all LHS".
1100 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1101 * it can't be common and can be skipped.
1103 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1104 * that we thought we didn't need to call S_aassign_copy_common(), but we
1105 * have anyway for sanity checking. If we find we need to copy, then panic.
1108 PERL_STATIC_INLINE void
1109 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1110 SV **firstrelem, SV **lastrelem
1119 SSize_t lcount = lastlelem - firstlelem + 1;
1120 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1121 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1122 bool copy_all = FALSE;
1124 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1125 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1126 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1130 /* we never have to copy the first RH element; it can't be corrupted
1131 * by assigning something to the corresponding first LH element.
1132 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1134 relem = firstrelem + 1;
1136 for (; relem <= lastrelem; relem++) {
1139 /* mark next LH element */
1141 if (--lcount >= 0) {
1144 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1145 assert (lelem <= lastlelem);
1151 if (SvSMAGICAL(svl)) {
1154 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1157 /* this LH element will consume all further args;
1158 * no need to mark any further LH elements (if any).
1159 * But we still need to scan any remaining RHS elements;
1160 * set lcount negative to distinguish from lcount == 0,
1161 * so the loop condition continues being true
1164 lelem--; /* no need to unmark this element */
1166 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1167 assert(!SvIMMORTAL(svl));
1168 SvFLAGS(svl) |= SVf_BREAK;
1172 /* don't check RH element if no SVf_BREAK flags set yet */
1179 /* see if corresponding RH element needs copying */
1185 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1189 /* op_dump(PL_op); */
1191 "panic: aassign skipped needed copy of common RH elem %"
1192 UVuf, (UV)(relem - firstrelem));
1196 TAINT_NOT; /* Each item is independent */
1198 /* Dear TODO test in t/op/sort.t, I love you.
1199 (It's relying on a panic, not a "semi-panic" from newSVsv()
1200 and then an assertion failure below.) */
1201 if (UNLIKELY(SvIS_FREED(svr))) {
1202 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1205 /* avoid break flag while copying; otherwise COW etc
1207 SvFLAGS(svr) &= ~SVf_BREAK;
1208 /* Not newSVsv(), as it does not allow copy-on-write,
1209 resulting in wasteful copies.
1210 Also, we use SV_NOSTEAL in case the SV is used more than
1211 once, e.g. (...) = (f())[0,0]
1212 Where the same SV appears twice on the RHS without a ref
1213 count bump. (Although I suspect that the SV won't be
1214 stealable here anyway - DAPM).
1216 *relem = sv_mortalcopy_flags(svr,
1217 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1218 /* ... but restore afterwards in case it's needed again,
1219 * e.g. ($a,$b,$c) = (1,$a,$a)
1221 SvFLAGS(svr) |= SVf_BREAK;
1233 while (lelem > firstlelem) {
1234 SV * const svl = *(--lelem);
1236 SvFLAGS(svl) &= ~SVf_BREAK;
1245 SV **lastlelem = PL_stack_sp;
1246 SV **lastrelem = PL_stack_base + POPMARK;
1247 SV **firstrelem = PL_stack_base + POPMARK + 1;
1248 SV **firstlelem = lastrelem + 1;
1261 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1262 * only need to save locally, not on the save stack */
1263 U16 old_delaymagic = PL_delaymagic;
1268 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1270 /* If there's a common identifier on both sides we have to take
1271 * special care that assigning the identifier on the left doesn't
1272 * clobber a value on the right that's used later in the list.
1275 /* at least 2 LH and RH elements, or commonality isn't an issue */
1276 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1277 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1278 if (SvGMAGICAL(*relem))
1281 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1282 if (*lelem && SvSMAGICAL(*lelem))
1285 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1286 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1287 /* skip the scan if all scalars have a ref count of 1 */
1288 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1290 if (!sv || SvREFCNT(sv) == 1)
1292 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1299 S_aassign_copy_common(aTHX_
1300 firstlelem, lastlelem, firstrelem, lastrelem
1310 /* on debugging builds, do the scan even if we've concluded we
1311 * don't need to, then panic if we find commonality. Note that the
1312 * scanner assumes at least 2 elements */
1313 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1321 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1328 while (LIKELY(lelem <= lastlelem)) {
1330 TAINT_NOT; /* Each item stands on its own, taintwise. */
1332 if (UNLIKELY(!sv)) {
1335 ASSUME(SvTYPE(sv) == SVt_PVAV);
1337 switch (SvTYPE(sv)) {
1339 bool already_copied = FALSE;
1340 ary = MUTABLE_AV(sv);
1341 magic = SvMAGICAL(ary) != 0;
1343 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1345 /* We need to clear ary. The is a danger that if we do this,
1346 * elements on the RHS may be prematurely freed, e.g.
1348 * In the case of possible commonality, make a copy of each
1349 * RHS SV *before* clearing the array, and add a reference
1350 * from the tmps stack, so that it doesn't leak on death.
1351 * Otherwise, make a copy of each RHS SV only as we're storing
1352 * it into the array - that way we don't have to worry about
1353 * it being leaked if we die, but don't incur the cost of
1354 * mortalising everything.
1357 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1358 && (relem <= lastrelem)
1359 && (magic || AvFILL(ary) != -1))
1362 EXTEND_MORTAL(lastrelem - relem + 1);
1363 for (svp = relem; svp <= lastrelem; svp++) {
1364 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1365 *svp = sv_mortalcopy_flags(*svp,
1366 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1369 already_copied = TRUE;
1373 if (relem <= lastrelem)
1374 av_extend(ary, lastrelem - relem);
1377 while (relem <= lastrelem) { /* gobble up all the rest */
1379 if (LIKELY(!alias)) {
1384 /* before newSV, in case it dies */
1387 /* see comment in S_aassign_copy_common about
1389 sv_setsv_flags(sv, *relem,
1390 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1395 if (!already_copied)
1398 DIE(aTHX_ "Assigned value is not a reference");
1399 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1400 /* diag_listed_as: Assigned value is not %s reference */
1402 "Assigned value is not a SCALAR reference");
1403 if (lval && !already_copied)
1404 *relem = sv_mortalcopy(*relem);
1405 /* XXX else check for weak refs? */
1406 sv = SvREFCNT_inc_NN(SvRV(*relem));
1410 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1411 didstore = av_store(ary,i++,sv);
1420 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1421 SvSETMAGIC(MUTABLE_SV(ary));
1426 case SVt_PVHV: { /* normal hash */
1430 SV** topelem = relem;
1431 SV **firsthashrelem = relem;
1432 bool already_copied = FALSE;
1434 hash = MUTABLE_HV(sv);
1435 magic = SvMAGICAL(hash) != 0;
1437 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1438 if (UNLIKELY(odd)) {
1439 do_oddball(lastrelem, firsthashrelem);
1440 /* we have firstlelem to reuse, it's not needed anymore
1442 *(lastrelem+1) = &PL_sv_undef;
1446 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1448 /* We need to clear hash. The is a danger that if we do this,
1449 * elements on the RHS may be prematurely freed, e.g.
1450 * %h = (foo => $h{bar});
1451 * In the case of possible commonality, make a copy of each
1452 * RHS SV *before* clearing the hash, and add a reference
1453 * from the tmps stack, so that it doesn't leak on death.
1456 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1457 && (relem <= lastrelem)
1458 && (magic || HvUSEDKEYS(hash)))
1461 EXTEND_MORTAL(lastrelem - relem + 1);
1462 for (svp = relem; svp <= lastrelem; svp++) {
1463 *svp = sv_mortalcopy_flags(*svp,
1464 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1467 already_copied = TRUE;
1472 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1475 /* Copy the key if aassign is called in lvalue context,
1476 to avoid having the next op modify our rhs. Copy
1477 it also if it is gmagical, lest it make the
1478 hv_store_ent call below croak, leaking the value. */
1479 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1480 ? sv_mortalcopy(*relem)
1489 sv_setsv_nomg(tmpstr,*relem++); /* value */
1492 if (gimme == G_ARRAY) {
1493 if (hv_exists_ent(hash, sv, 0))
1494 /* key overwrites an existing entry */
1497 /* copy element back: possibly to an earlier
1498 * stack location if we encountered dups earlier,
1499 * possibly to a later stack location if odd */
1501 *topelem++ = tmpstr;
1505 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1506 didstore = hv_store_ent(hash,sv,tmpstr,0);
1508 if (!didstore) sv_2mortal(tmpstr);
1514 if (duplicates && gimme == G_ARRAY) {
1515 /* at this point we have removed the duplicate key/value
1516 * pairs from the stack, but the remaining values may be
1517 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1518 * the (a 2), but the stack now probably contains
1519 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1520 * obliterates the earlier key. So refresh all values. */
1521 lastrelem -= duplicates;
1522 relem = firsthashrelem;
1523 while (relem < lastrelem+odd) {
1525 he = hv_fetch_ent(hash, *relem++, 0, 0);
1526 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1529 if (odd && gimme == G_ARRAY) lastrelem++;
1533 if (SvIMMORTAL(sv)) {
1534 if (relem <= lastrelem)
1538 if (relem <= lastrelem) {
1540 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1541 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1544 packWARN(WARN_MISC),
1545 "Useless assignment to a temporary"
1547 sv_setsv(sv, *relem);
1551 sv_setsv(sv, &PL_sv_undef);
1556 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1557 /* Will be used to set PL_tainting below */
1558 Uid_t tmp_uid = PerlProc_getuid();
1559 Uid_t tmp_euid = PerlProc_geteuid();
1560 Gid_t tmp_gid = PerlProc_getgid();
1561 Gid_t tmp_egid = PerlProc_getegid();
1563 /* XXX $> et al currently silently ignore failures */
1564 if (PL_delaymagic & DM_UID) {
1565 #ifdef HAS_SETRESUID
1567 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1568 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1571 # ifdef HAS_SETREUID
1573 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1574 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1577 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1578 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1579 PL_delaymagic &= ~DM_RUID;
1581 # endif /* HAS_SETRUID */
1583 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1584 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1585 PL_delaymagic &= ~DM_EUID;
1587 # endif /* HAS_SETEUID */
1588 if (PL_delaymagic & DM_UID) {
1589 if (PL_delaymagic_uid != PL_delaymagic_euid)
1590 DIE(aTHX_ "No setreuid available");
1591 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1593 # endif /* HAS_SETREUID */
1594 #endif /* HAS_SETRESUID */
1596 tmp_uid = PerlProc_getuid();
1597 tmp_euid = PerlProc_geteuid();
1599 /* XXX $> et al currently silently ignore failures */
1600 if (PL_delaymagic & DM_GID) {
1601 #ifdef HAS_SETRESGID
1603 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1604 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1607 # ifdef HAS_SETREGID
1609 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1610 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1613 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1614 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1615 PL_delaymagic &= ~DM_RGID;
1617 # endif /* HAS_SETRGID */
1619 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1620 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1621 PL_delaymagic &= ~DM_EGID;
1623 # endif /* HAS_SETEGID */
1624 if (PL_delaymagic & DM_GID) {
1625 if (PL_delaymagic_gid != PL_delaymagic_egid)
1626 DIE(aTHX_ "No setregid available");
1627 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1629 # endif /* HAS_SETREGID */
1630 #endif /* HAS_SETRESGID */
1632 tmp_gid = PerlProc_getgid();
1633 tmp_egid = PerlProc_getegid();
1635 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1636 #ifdef NO_TAINT_SUPPORT
1637 PERL_UNUSED_VAR(tmp_uid);
1638 PERL_UNUSED_VAR(tmp_euid);
1639 PERL_UNUSED_VAR(tmp_gid);
1640 PERL_UNUSED_VAR(tmp_egid);
1643 PL_delaymagic = old_delaymagic;
1645 if (gimme == G_VOID)
1646 SP = firstrelem - 1;
1647 else if (gimme == G_SCALAR) {
1650 SETi(lastrelem - firstrelem + 1);
1654 /* note that in this case *firstlelem may have been overwritten
1655 by sv_undef in the odd hash case */
1658 SP = firstrelem + (lastlelem - firstlelem);
1659 lelem = firstlelem + (relem - firstrelem);
1661 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1671 PMOP * const pm = cPMOP;
1672 REGEXP * rx = PM_GETRE(pm);
1673 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1674 SV * const rv = sv_newmortal();
1678 SvUPGRADE(rv, SVt_IV);
1679 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1680 loathe to use it here, but it seems to be the right fix. Or close.
1681 The key part appears to be that it's essential for pp_qr to return a new
1682 object (SV), which implies that there needs to be an effective way to
1683 generate a new SV from the existing SV that is pre-compiled in the
1685 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1688 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1689 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1690 *cvp = cv_clone(cv);
1691 SvREFCNT_dec_NN(cv);
1695 HV *const stash = gv_stashsv(pkg, GV_ADD);
1696 SvREFCNT_dec_NN(pkg);
1697 (void)sv_bless(rv, stash);
1700 if (UNLIKELY(RX_ISTAINTED(rx))) {
1702 SvTAINTED_on(SvRV(rv));
1715 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1718 const char *truebase; /* Start of string */
1719 REGEXP *rx = PM_GETRE(pm);
1721 const U8 gimme = GIMME_V;
1723 const I32 oldsave = PL_savestack_ix;
1724 I32 had_zerolen = 0;
1727 if (PL_op->op_flags & OPf_STACKED)
1736 PUTBACK; /* EVAL blocks need stack_sp. */
1737 /* Skip get-magic if this is a qr// clone, because regcomp has
1739 truebase = ReANY(rx)->mother_re
1740 ? SvPV_nomg_const(TARG, len)
1741 : SvPV_const(TARG, len);
1743 DIE(aTHX_ "panic: pp_match");
1744 strend = truebase + len;
1745 rxtainted = (RX_ISTAINTED(rx) ||
1746 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1749 /* We need to know this in case we fail out early - pos() must be reset */
1750 global = dynpm->op_pmflags & PMf_GLOBAL;
1752 /* PMdf_USED is set after a ?? matches once */
1755 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1757 pm->op_pmflags & PMf_USED
1760 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1764 /* empty pattern special-cased to use last successful pattern if
1765 possible, except for qr// */
1766 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx) && PL_curpm) {
1767 if (PL_curpm == PL_reg_curpm)
1768 Perl_croak(aTHX_ "Use of the empty pattern inside of "
1769 "a regex code block is forbidden");
1774 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1775 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1776 UVuf" < %"IVdf")\n",
1777 (UV)len, (IV)RX_MINLEN(rx)));
1781 /* get pos() if //g */
1783 mg = mg_find_mglob(TARG);
1784 if (mg && mg->mg_len >= 0) {
1785 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1786 /* last time pos() was set, it was zero-length match */
1787 if (mg->mg_flags & MGf_MINMATCH)
1792 #ifdef PERL_SAWAMPERSAND
1795 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1796 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1800 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1801 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1802 * only on the first iteration. Therefore we need to copy $' as well
1803 * as $&, to make the rest of the string available for captures in
1804 * subsequent iterations */
1805 if (! (global && gimme == G_ARRAY))
1806 r_flags |= REXEC_COPY_SKIP_POST;
1808 #ifdef PERL_SAWAMPERSAND
1809 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1810 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1811 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1818 s = truebase + curpos;
1820 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1821 had_zerolen, TARG, NULL, r_flags))
1825 if (dynpm->op_pmflags & PMf_ONCE)
1827 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1829 dynpm->op_pmflags |= PMf_USED;
1833 RX_MATCH_TAINTED_on(rx);
1834 TAINT_IF(RX_MATCH_TAINTED(rx));
1838 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1840 mg = sv_magicext_mglob(TARG);
1841 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1842 if (RX_ZERO_LEN(rx))
1843 mg->mg_flags |= MGf_MINMATCH;
1845 mg->mg_flags &= ~MGf_MINMATCH;
1848 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1849 LEAVE_SCOPE(oldsave);
1853 /* push captures on stack */
1856 const I32 nparens = RX_NPARENS(rx);
1857 I32 i = (global && !nparens) ? 1 : 0;
1859 SPAGAIN; /* EVAL blocks could move the stack. */
1860 EXTEND(SP, nparens + i);
1861 EXTEND_MORTAL(nparens + i);
1862 for (i = !i; i <= nparens; i++) {
1863 PUSHs(sv_newmortal());
1864 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1865 && RX_OFFS(rx)[i].end != -1 ))
1867 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1868 const char * const s = RX_OFFS(rx)[i].start + truebase;
1869 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1870 || len < 0 || len > strend - s))
1871 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1872 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1873 (long) i, (long) RX_OFFS(rx)[i].start,
1874 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1875 sv_setpvn(*SP, s, len);
1876 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1881 curpos = (UV)RX_OFFS(rx)[0].end;
1882 had_zerolen = RX_ZERO_LEN(rx);
1883 PUTBACK; /* EVAL blocks may use stack */
1884 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1887 LEAVE_SCOPE(oldsave);
1890 NOT_REACHED; /* NOTREACHED */
1893 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1895 mg = mg_find_mglob(TARG);
1899 LEAVE_SCOPE(oldsave);
1900 if (gimme == G_ARRAY)
1906 Perl_do_readline(pTHX)
1908 dSP; dTARGETSTACKED;
1913 IO * const io = GvIO(PL_last_in_gv);
1914 const I32 type = PL_op->op_type;
1915 const U8 gimme = GIMME_V;
1918 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1920 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1921 if (gimme == G_SCALAR) {
1923 SvSetSV_nosteal(TARG, TOPs);
1933 if (IoFLAGS(io) & IOf_ARGV) {
1934 if (IoFLAGS(io) & IOf_START) {
1936 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1937 IoFLAGS(io) &= ~IOf_START;
1938 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1939 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1940 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1941 SvSETMAGIC(GvSV(PL_last_in_gv));
1946 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1947 if (!fp) { /* Note: fp != IoIFP(io) */
1948 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1951 else if (type == OP_GLOB)
1952 fp = Perl_start_glob(aTHX_ POPs, io);
1954 else if (type == OP_GLOB)
1956 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1957 report_wrongway_fh(PL_last_in_gv, '>');
1961 if ((!io || !(IoFLAGS(io) & IOf_START))
1962 && ckWARN(WARN_CLOSED)
1965 report_evil_fh(PL_last_in_gv);
1967 if (gimme == G_SCALAR) {
1968 /* undef TARG, and push that undefined value */
1969 if (type != OP_RCATLINE) {
1970 sv_setsv(TARG,NULL);
1977 if (gimme == G_SCALAR) {
1979 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1982 if (type == OP_RCATLINE)
1983 SvPV_force_nomg_nolen(sv);
1987 else if (isGV_with_GP(sv)) {
1988 SvPV_force_nomg_nolen(sv);
1990 SvUPGRADE(sv, SVt_PV);
1991 tmplen = SvLEN(sv); /* remember if already alloced */
1992 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1993 /* try short-buffering it. Please update t/op/readline.t
1994 * if you change the growth length.
1999 if (type == OP_RCATLINE && SvOK(sv)) {
2001 SvPV_force_nomg_nolen(sv);
2007 sv = sv_2mortal(newSV(80));
2011 /* This should not be marked tainted if the fp is marked clean */
2012 #define MAYBE_TAINT_LINE(io, sv) \
2013 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2018 /* delay EOF state for a snarfed empty file */
2019 #define SNARF_EOF(gimme,rs,io,sv) \
2020 (gimme != G_SCALAR || SvCUR(sv) \
2021 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2025 if (!sv_gets(sv, fp, offset)
2027 || SNARF_EOF(gimme, PL_rs, io, sv)
2028 || PerlIO_error(fp)))
2030 PerlIO_clearerr(fp);
2031 if (IoFLAGS(io) & IOf_ARGV) {
2032 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2035 (void)do_close(PL_last_in_gv, FALSE);
2037 else if (type == OP_GLOB) {
2038 if (!do_close(PL_last_in_gv, FALSE)) {
2039 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2040 "glob failed (child exited with status %d%s)",
2041 (int)(STATUS_CURRENT >> 8),
2042 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2045 if (gimme == G_SCALAR) {
2046 if (type != OP_RCATLINE) {
2047 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2053 MAYBE_TAINT_LINE(io, sv);
2056 MAYBE_TAINT_LINE(io, sv);
2058 IoFLAGS(io) |= IOf_NOLINE;
2062 if (type == OP_GLOB) {
2066 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2067 char * const tmps = SvEND(sv) - 1;
2068 if (*tmps == *SvPVX_const(PL_rs)) {
2070 SvCUR_set(sv, SvCUR(sv) - 1);
2073 for (t1 = SvPVX_const(sv); *t1; t1++)
2075 if (strchr("*%?", *t1))
2077 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2080 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2081 (void)POPs; /* Unmatched wildcard? Chuck it... */
2084 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2085 if (ckWARN(WARN_UTF8)) {
2086 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2087 const STRLEN len = SvCUR(sv) - offset;
2090 if (!is_utf8_string_loc(s, len, &f))
2091 /* Emulate :encoding(utf8) warning in the same case. */
2092 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2093 "utf8 \"\\x%02X\" does not map to Unicode",
2094 f < (U8*)SvEND(sv) ? *f : 0);
2097 if (gimme == G_ARRAY) {
2098 if (SvLEN(sv) - SvCUR(sv) > 20) {
2099 SvPV_shrink_to_cur(sv);
2101 sv = sv_2mortal(newSV(80));
2104 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2105 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2106 const STRLEN new_len
2107 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2108 SvPV_renew(sv, new_len);
2119 SV * const keysv = POPs;
2120 HV * const hv = MUTABLE_HV(POPs);
2121 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2122 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2124 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2125 bool preeminent = TRUE;
2127 if (SvTYPE(hv) != SVt_PVHV)
2134 /* If we can determine whether the element exist,
2135 * Try to preserve the existenceness of a tied hash
2136 * element by using EXISTS and DELETE if possible.
2137 * Fallback to FETCH and STORE otherwise. */
2138 if (SvCANEXISTDELETE(hv))
2139 preeminent = hv_exists_ent(hv, keysv, 0);
2142 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2143 svp = he ? &HeVAL(he) : NULL;
2145 if (!svp || !*svp || *svp == &PL_sv_undef) {
2149 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2151 lv = sv_newmortal();
2152 sv_upgrade(lv, SVt_PVLV);
2154 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2155 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2156 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2162 if (HvNAME_get(hv) && isGV(*svp))
2163 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2164 else if (preeminent)
2165 save_helem_flags(hv, keysv, svp,
2166 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2168 SAVEHDELETE(hv, keysv);
2170 else if (PL_op->op_private & OPpDEREF) {
2171 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2175 sv = (svp && *svp ? *svp : &PL_sv_undef);
2176 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2177 * was to make C<local $tied{foo} = $tied{foo}> possible.
2178 * However, it seems no longer to be needed for that purpose, and
2179 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2180 * would loop endlessly since the pos magic is getting set on the
2181 * mortal copy and lost. However, the copy has the effect of
2182 * triggering the get magic, and losing it altogether made things like
2183 * c<$tied{foo};> in void context no longer do get magic, which some
2184 * code relied on. Also, delayed triggering of magic on @+ and friends
2185 * meant the original regex may be out of scope by now. So as a
2186 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2187 * being called too many times). */
2188 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2195 /* a stripped-down version of Perl_softref2xv() for use by
2196 * pp_multideref(), which doesn't use PL_op->op_flags */
2199 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2202 if (PL_op->op_private & HINT_STRICT_REFS) {
2204 Perl_die(aTHX_ PL_no_symref_sv, sv,
2205 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2207 Perl_die(aTHX_ PL_no_usym, what);
2210 Perl_die(aTHX_ PL_no_usym, what);
2211 return gv_fetchsv_nomg(sv, GV_ADD, type);
2215 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2216 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2218 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2219 * Each of these either contains a set of actions, or an argument, such as
2220 * an IV to use as an array index, or a lexical var to retrieve.
2221 * Several actions re stored per UV; we keep shifting new actions off the
2222 * one UV, and only reload when it becomes zero.
2227 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2228 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2229 UV actions = items->uv;
2232 /* this tells find_uninit_var() where we're up to */
2233 PL_multideref_pc = items;
2236 /* there are three main classes of action; the first retrieve
2237 * the initial AV or HV from a variable or the stack; the second
2238 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2239 * the third an unrolled (/DREFHV, rv2hv, helem).
2241 switch (actions & MDEREF_ACTION_MASK) {
2244 actions = (++items)->uv;
2247 case MDEREF_AV_padav_aelem: /* $lex[...] */
2248 sv = PAD_SVl((++items)->pad_offset);
2251 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2252 sv = UNOP_AUX_item_sv(++items);
2253 assert(isGV_with_GP(sv));
2254 sv = (SV*)GvAVn((GV*)sv);
2257 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2262 goto do_AV_rv2av_aelem;
2265 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2266 sv = UNOP_AUX_item_sv(++items);
2267 assert(isGV_with_GP(sv));
2268 sv = GvSVn((GV*)sv);
2269 goto do_AV_vivify_rv2av_aelem;
2271 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2272 sv = PAD_SVl((++items)->pad_offset);
2275 do_AV_vivify_rv2av_aelem:
2276 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2277 /* this is the OPpDEREF action normally found at the end of
2278 * ops like aelem, helem, rv2sv */
2279 sv = vivify_ref(sv, OPpDEREF_AV);
2283 /* this is basically a copy of pp_rv2av when it just has the
2286 if (LIKELY(SvROK(sv))) {
2287 if (UNLIKELY(SvAMAGIC(sv))) {
2288 sv = amagic_deref_call(sv, to_av_amg);
2291 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2292 DIE(aTHX_ "Not an ARRAY reference");
2294 else if (SvTYPE(sv) != SVt_PVAV) {
2295 if (!isGV_with_GP(sv))
2296 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2297 sv = MUTABLE_SV(GvAVn((GV*)sv));
2303 /* retrieve the key; this may be either a lexical or package
2304 * var (whose index/ptr is stored as an item) or a signed
2305 * integer constant stored as an item.
2308 IV elem = 0; /* to shut up stupid compiler warnings */
2311 assert(SvTYPE(sv) == SVt_PVAV);
2313 switch (actions & MDEREF_INDEX_MASK) {
2314 case MDEREF_INDEX_none:
2316 case MDEREF_INDEX_const:
2317 elem = (++items)->iv;
2319 case MDEREF_INDEX_padsv:
2320 elemsv = PAD_SVl((++items)->pad_offset);
2322 case MDEREF_INDEX_gvsv:
2323 elemsv = UNOP_AUX_item_sv(++items);
2324 assert(isGV_with_GP(elemsv));
2325 elemsv = GvSVn((GV*)elemsv);
2327 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2328 && ckWARN(WARN_MISC)))
2329 Perl_warner(aTHX_ packWARN(WARN_MISC),
2330 "Use of reference \"%"SVf"\" as array index",
2332 /* the only time that S_find_uninit_var() needs this
2333 * is to determine which index value triggered the
2334 * undef warning. So just update it here. Note that
2335 * since we don't save and restore this var (e.g. for
2336 * tie or overload execution), its value will be
2337 * meaningless apart from just here */
2338 PL_multideref_pc = items;
2339 elem = SvIV(elemsv);
2344 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2346 if (!(actions & MDEREF_FLAG_last)) {
2347 SV** svp = av_fetch((AV*)sv, elem, 1);
2348 if (!svp || ! (sv=*svp))
2349 DIE(aTHX_ PL_no_aelem, elem);
2353 if (PL_op->op_private &
2354 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2356 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2357 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2360 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2361 sv = av_delete((AV*)sv, elem, discard);
2369 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2370 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2371 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2372 bool preeminent = TRUE;
2373 AV *const av = (AV*)sv;
2376 if (UNLIKELY(localizing)) {
2380 /* If we can determine whether the element exist,
2381 * Try to preserve the existenceness of a tied array
2382 * element by using EXISTS and DELETE if possible.
2383 * Fallback to FETCH and STORE otherwise. */
2384 if (SvCANEXISTDELETE(av))
2385 preeminent = av_exists(av, elem);
2388 svp = av_fetch(av, elem, lval && !defer);
2391 if (!svp || !(sv = *svp)) {
2394 DIE(aTHX_ PL_no_aelem, elem);
2395 len = av_tindex(av);
2396 sv = sv_2mortal(newSVavdefelem(av,
2397 /* Resolve a negative index now, unless it points
2398 * before the beginning of the array, in which
2399 * case record it for error reporting in
2400 * magic_setdefelem. */
2401 elem < 0 && len + elem >= 0
2402 ? len + elem : elem, 1));
2405 if (UNLIKELY(localizing)) {
2407 save_aelem(av, elem, svp);
2408 sv = *svp; /* may have changed */
2411 SAVEADELETE(av, elem);
2416 sv = (svp ? *svp : &PL_sv_undef);
2417 /* see note in pp_helem() */
2418 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2435 case MDEREF_HV_padhv_helem: /* $lex{...} */
2436 sv = PAD_SVl((++items)->pad_offset);
2439 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2440 sv = UNOP_AUX_item_sv(++items);
2441 assert(isGV_with_GP(sv));
2442 sv = (SV*)GvHVn((GV*)sv);
2445 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2450 goto do_HV_rv2hv_helem;
2453 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2454 sv = UNOP_AUX_item_sv(++items);
2455 assert(isGV_with_GP(sv));
2456 sv = GvSVn((GV*)sv);
2457 goto do_HV_vivify_rv2hv_helem;
2459 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2460 sv = PAD_SVl((++items)->pad_offset);
2463 do_HV_vivify_rv2hv_helem:
2464 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2465 /* this is the OPpDEREF action normally found at the end of
2466 * ops like aelem, helem, rv2sv */
2467 sv = vivify_ref(sv, OPpDEREF_HV);
2471 /* this is basically a copy of pp_rv2hv when it just has the
2472 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2475 if (LIKELY(SvROK(sv))) {
2476 if (UNLIKELY(SvAMAGIC(sv))) {
2477 sv = amagic_deref_call(sv, to_hv_amg);
2480 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2481 DIE(aTHX_ "Not a HASH reference");
2483 else if (SvTYPE(sv) != SVt_PVHV) {
2484 if (!isGV_with_GP(sv))
2485 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2486 sv = MUTABLE_SV(GvHVn((GV*)sv));
2492 /* retrieve the key; this may be either a lexical / package
2493 * var or a string constant, whose index/ptr is stored as an
2496 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2498 assert(SvTYPE(sv) == SVt_PVHV);
2500 switch (actions & MDEREF_INDEX_MASK) {
2501 case MDEREF_INDEX_none:
2504 case MDEREF_INDEX_const:
2505 keysv = UNOP_AUX_item_sv(++items);
2508 case MDEREF_INDEX_padsv:
2509 keysv = PAD_SVl((++items)->pad_offset);
2512 case MDEREF_INDEX_gvsv:
2513 keysv = UNOP_AUX_item_sv(++items);
2514 keysv = GvSVn((GV*)keysv);
2518 /* see comment above about setting this var */
2519 PL_multideref_pc = items;
2522 /* ensure that candidate CONSTs have been HEKified */
2523 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2524 || SvTYPE(keysv) >= SVt_PVMG
2527 || SvIsCOW_shared_hash(keysv));
2529 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2531 if (!(actions & MDEREF_FLAG_last)) {
2532 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2533 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2534 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2538 if (PL_op->op_private &
2539 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2541 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2542 sv = hv_exists_ent((HV*)sv, keysv, 0)
2543 ? &PL_sv_yes : &PL_sv_no;
2546 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2547 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2555 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2556 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2557 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2558 bool preeminent = TRUE;
2560 HV * const hv = (HV*)sv;
2563 if (UNLIKELY(localizing)) {
2567 /* If we can determine whether the element exist,
2568 * Try to preserve the existenceness of a tied hash
2569 * element by using EXISTS and DELETE if possible.
2570 * Fallback to FETCH and STORE otherwise. */
2571 if (SvCANEXISTDELETE(hv))
2572 preeminent = hv_exists_ent(hv, keysv, 0);
2575 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2576 svp = he ? &HeVAL(he) : NULL;
2580 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2584 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2585 lv = sv_newmortal();
2586 sv_upgrade(lv, SVt_PVLV);
2588 sv_magic(lv, key2 = newSVsv(keysv),
2589 PERL_MAGIC_defelem, NULL, 0);
2590 /* sv_magic() increments refcount */
2591 SvREFCNT_dec_NN(key2);
2592 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2598 if (HvNAME_get(hv) && isGV(sv))
2599 save_gp(MUTABLE_GV(sv),
2600 !(PL_op->op_flags & OPf_SPECIAL));
2601 else if (preeminent) {
2602 save_helem_flags(hv, keysv, svp,
2603 (PL_op->op_flags & OPf_SPECIAL)
2604 ? 0 : SAVEf_SETMAGIC);
2605 sv = *svp; /* may have changed */
2608 SAVEHDELETE(hv, keysv);
2613 sv = (svp && *svp ? *svp : &PL_sv_undef);
2614 /* see note in pp_helem() */
2615 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2624 actions >>= MDEREF_SHIFT;
2643 itersvp = CxITERVAR(cx);
2646 switch (CxTYPE(cx)) {
2648 case CXt_LOOP_LAZYSV: /* string increment */
2650 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2651 SV *end = cx->blk_loop.state_u.lazysv.end;
2652 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2653 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2655 const char *max = SvPV_const(end, maxlen);
2656 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2660 /* NB: on the first iteration, oldsv will have a ref count of at
2661 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2662 * slot will get localised; on subsequent iterations the RC==1
2663 * optimisation may kick in and the SV will be reused. */
2664 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2665 /* safe to reuse old SV */
2666 sv_setsv(oldsv, cur);
2670 /* we need a fresh SV every time so that loop body sees a
2671 * completely new SV for closures/references to work as
2673 *itersvp = newSVsv(cur);
2674 SvREFCNT_dec(oldsv);
2676 if (strEQ(SvPVX_const(cur), max))
2677 sv_setiv(cur, 0); /* terminate next time */
2683 case CXt_LOOP_LAZYIV: /* integer increment */
2685 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2686 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2690 /* see NB comment above */
2691 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2692 /* safe to reuse old SV */
2694 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2697 /* Cheap SvIOK_only().
2698 * Assert that flags which SvIOK_only() would test or
2699 * clear can't be set, because we're SVt_IV */
2700 assert(!(SvFLAGS(oldsv) &
2701 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2702 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2703 /* SvIV_set() where sv_any points to head */
2704 oldsv->sv_u.svu_iv = cur;
2708 sv_setiv(oldsv, cur);
2712 /* we need a fresh SV every time so that loop body sees a
2713 * completely new SV for closures/references to work as they
2715 *itersvp = newSViv(cur);
2716 SvREFCNT_dec(oldsv);
2719 if (UNLIKELY(cur == IV_MAX)) {
2720 /* Handle end of range at IV_MAX */
2721 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2723 ++cx->blk_loop.state_u.lazyiv.cur;
2727 case CXt_LOOP_LIST: /* for (1,2,3) */
2729 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2730 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2731 ix = (cx->blk_loop.state_u.stack.ix += inc);
2732 if (UNLIKELY(inc > 0
2733 ? ix > cx->blk_oldsp
2734 : ix <= cx->blk_loop.state_u.stack.basesp)
2738 sv = PL_stack_base[ix];
2740 goto loop_ary_common;
2742 case CXt_LOOP_ARY: /* for (@ary) */
2744 av = cx->blk_loop.state_u.ary.ary;
2745 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2746 ix = (cx->blk_loop.state_u.ary.ix += inc);
2747 if (UNLIKELY(inc > 0
2753 if (UNLIKELY(SvRMAGICAL(av))) {
2754 SV * const * const svp = av_fetch(av, ix, FALSE);
2755 sv = svp ? *svp : NULL;
2758 sv = AvARRAY(av)[ix];
2763 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2764 SvSetMagicSV(*itersvp, sv);
2769 if (UNLIKELY(SvIS_FREED(sv))) {
2771 Perl_croak(aTHX_ "Use of freed value in iteration");
2778 SvREFCNT_inc_simple_void_NN(sv);
2782 sv = newSVavdefelem(av, ix, 0);
2789 SvREFCNT_dec(oldsv);
2793 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2801 /* pp_enteriter should have pre-extended the stack */
2802 assert(PL_stack_sp < PL_stack_max);
2803 *++PL_stack_sp =retsv;
2805 return PL_op->op_next;
2809 A description of how taint works in pattern matching and substitution.
2811 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2812 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2814 While the pattern is being assembled/concatenated and then compiled,
2815 PL_tainted will get set (via TAINT_set) if any component of the pattern
2816 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2817 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2818 TAINT_get). It will also be set if any component of the pattern matches
2819 based on locale-dependent behavior.
2821 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2822 the pattern is marked as tainted. This means that subsequent usage, such
2823 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2824 on the new pattern too.
2826 RXf_TAINTED_SEEN is used post-execution by the get magic code
2827 of $1 et al to indicate whether the returned value should be tainted.
2828 It is the responsibility of the caller of the pattern (i.e. pp_match,
2829 pp_subst etc) to set this flag for any other circumstances where $1 needs
2832 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2834 There are three possible sources of taint
2836 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2837 * the replacement string (or expression under /e)
2839 There are four destinations of taint and they are affected by the sources
2840 according to the rules below:
2842 * the return value (not including /r):
2843 tainted by the source string and pattern, but only for the
2844 number-of-iterations case; boolean returns aren't tainted;
2845 * the modified string (or modified copy under /r):
2846 tainted by the source string, pattern, and replacement strings;
2848 tainted by the pattern, and under 'use re "taint"', by the source
2850 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2851 should always be unset before executing subsequent code.
2853 The overall action of pp_subst is:
2855 * at the start, set bits in rxtainted indicating the taint status of
2856 the various sources.
2858 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2859 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2860 pattern has subsequently become tainted via locale ops.
2862 * If control is being passed to pp_substcont to execute a /e block,
2863 save rxtainted in the CXt_SUBST block, for future use by
2866 * Whenever control is being returned to perl code (either by falling
2867 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2868 use the flag bits in rxtainted to make all the appropriate types of
2869 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2870 et al will appear tainted.
2872 pp_match is just a simpler version of the above.
2888 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2889 See "how taint works" above */
2892 REGEXP *rx = PM_GETRE(pm);
2894 int force_on_match = 0;
2895 const I32 oldsave = PL_savestack_ix;
2897 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2902 /* known replacement string? */
2903 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2907 if (PL_op->op_flags & OPf_STACKED)
2916 SvGETMAGIC(TARG); /* must come before cow check */
2918 /* note that a string might get converted to COW during matching */
2919 was_cow = cBOOL(SvIsCOW(TARG));
2921 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
2922 #ifndef PERL_ANY_COW
2924 sv_force_normal_flags(TARG,0);
2926 if ((SvREADONLY(TARG)
2927 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2928 || SvTYPE(TARG) > SVt_PVLV)
2929 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2930 Perl_croak_no_modify();
2934 orig = SvPV_nomg(TARG, len);
2935 /* note we don't (yet) force the var into being a string; if we fail
2936 * to match, we leave as-is; on successful match however, we *will*
2937 * coerce into a string, then repeat the match */
2938 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2941 /* only replace once? */
2942 once = !(rpm->op_pmflags & PMf_GLOBAL);
2944 /* See "how taint works" above */
2947 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2948 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2949 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2950 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2951 ? SUBST_TAINT_BOOLRET : 0));
2957 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2959 strend = orig + len;
2960 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2961 maxiters = 2 * slen + 10; /* We can match twice at each
2962 position, once with zero-length,
2963 second time with non-zero. */
2965 /* handle the empty pattern */
2966 if (!RX_PRELEN(rx) && PL_curpm && !ReANY(rx)->mother_re) {
2967 if (PL_curpm == PL_reg_curpm)
2968 Perl_croak(aTHX_ "Use of the empty pattern inside of "
2969 "a regex code block is forbidden");
2974 #ifdef PERL_SAWAMPERSAND
2975 r_flags = ( RX_NPARENS(rx)
2977 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2978 || (rpm->op_pmflags & PMf_KEEPCOPY)
2983 r_flags = REXEC_COPY_STR;
2986 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2989 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2990 LEAVE_SCOPE(oldsave);
2995 /* known replacement string? */
2997 /* replacement needing upgrading? */
2998 if (DO_UTF8(TARG) && !doutf8) {
2999 nsv = sv_newmortal();
3001 sv_utf8_upgrade(nsv);
3002 c = SvPV_const(nsv, clen);
3006 c = SvPV_const(dstr, clen);
3007 doutf8 = DO_UTF8(dstr);
3010 if (SvTAINTED(dstr))
3011 rxtainted |= SUBST_TAINT_REPL;
3018 /* can do inplace substitution? */
3023 && (I32)clen <= RX_MINLENRET(rx)
3025 || !(r_flags & REXEC_COPY_STR)
3026 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
3028 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
3029 && (!doutf8 || SvUTF8(TARG))
3030 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3034 /* string might have got converted to COW since we set was_cow */
3035 if (SvIsCOW(TARG)) {
3036 if (!force_on_match)
3038 assert(SvVOK(TARG));
3041 if (force_on_match) {
3042 /* redo the first match, this time with the orig var
3043 * forced into being a string */
3045 orig = SvPV_force_nomg(TARG, len);
3051 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3052 rxtainted |= SUBST_TAINT_PAT;
3053 m = orig + RX_OFFS(rx)[0].start;
3054 d = orig + RX_OFFS(rx)[0].end;
3056 if (m - s > strend - d) { /* faster to shorten from end */
3059 Copy(c, m, clen, char);
3064 Move(d, m, i, char);
3068 SvCUR_set(TARG, m - s);
3070 else { /* faster from front */
3074 Move(s, d - i, i, char);
3077 Copy(c, d, clen, char);
3084 d = s = RX_OFFS(rx)[0].start + orig;
3087 if (UNLIKELY(iters++ > maxiters))
3088 DIE(aTHX_ "Substitution loop");
3089 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3090 rxtainted |= SUBST_TAINT_PAT;
3091 m = RX_OFFS(rx)[0].start + orig;
3094 Move(s, d, i, char);
3098 Copy(c, d, clen, char);
3101 s = RX_OFFS(rx)[0].end + orig;
3102 } while (CALLREGEXEC(rx, s, strend, orig,
3103 s == m, /* don't match same null twice */
3105 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3108 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3109 Move(s, d, i+1, char); /* include the NUL */
3119 if (force_on_match) {
3120 /* redo the first match, this time with the orig var
3121 * forced into being a string */
3123 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3124 /* I feel that it should be possible to avoid this mortal copy
3125 given that the code below copies into a new destination.
3126 However, I suspect it isn't worth the complexity of
3127 unravelling the C<goto force_it> for the small number of
3128 cases where it would be viable to drop into the copy code. */
3129 TARG = sv_2mortal(newSVsv(TARG));
3131 orig = SvPV_force_nomg(TARG, len);
3137 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3138 rxtainted |= SUBST_TAINT_PAT;
3140 s = RX_OFFS(rx)[0].start + orig;
3141 dstr = newSVpvn_flags(orig, s-orig,
3142 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3147 /* note that a whole bunch of local vars are saved here for
3148 * use by pp_substcont: here's a list of them in case you're
3149 * searching for places in this sub that uses a particular var:
3150 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3151 * s m strend rx once */
3153 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3157 if (UNLIKELY(iters++ > maxiters))
3158 DIE(aTHX_ "Substitution loop");
3159 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3160 rxtainted |= SUBST_TAINT_PAT;
3161 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3163 char *old_orig = orig;
3164 assert(RX_SUBOFFSET(rx) == 0);
3166 orig = RX_SUBBEG(rx);
3167 s = orig + (old_s - old_orig);
3168 strend = s + (strend - old_s);
3170 m = RX_OFFS(rx)[0].start + orig;
3171 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3172 s = RX_OFFS(rx)[0].end + orig;
3174 /* replacement already stringified */
3176 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3180 sv_catsv(dstr, repl);
3181 if (UNLIKELY(SvTAINTED(repl)))
3182 rxtainted |= SUBST_TAINT_REPL;
3186 } while (CALLREGEXEC(rx, s, strend, orig,
3187 s == m, /* Yields minend of 0 or 1 */
3189 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3190 assert(strend >= s);
3191 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3193 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3194 /* From here on down we're using the copy, and leaving the original
3201 /* The match may make the string COW. If so, brilliant, because
3202 that's just saved us one malloc, copy and free - the regexp has
3203 donated the old buffer, and we malloc an entirely new one, rather
3204 than the regexp malloc()ing a buffer and copying our original,
3205 only for us to throw it away here during the substitution. */
3206 if (SvIsCOW(TARG)) {
3207 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3213 SvPV_set(TARG, SvPVX(dstr));
3214 SvCUR_set(TARG, SvCUR(dstr));
3215 SvLEN_set(TARG, SvLEN(dstr));
3216 SvFLAGS(TARG) |= SvUTF8(dstr);
3217 SvPV_set(dstr, NULL);
3224 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3225 (void)SvPOK_only_UTF8(TARG);
3228 /* See "how taint works" above */
3230 if ((rxtainted & SUBST_TAINT_PAT) ||
3231 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3232 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3234 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3236 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3237 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3239 SvTAINTED_on(TOPs); /* taint return value */
3241 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3243 /* needed for mg_set below */
3245 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3249 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3251 LEAVE_SCOPE(oldsave);
3260 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3261 ++*PL_markstack_ptr;
3263 LEAVE_with_name("grep_item"); /* exit inner scope */
3266 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3268 const U8 gimme = GIMME_V;
3270 LEAVE_with_name("grep"); /* exit outer scope */
3271 (void)POPMARK; /* pop src */
3272 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3273 (void)POPMARK; /* pop dst */
3274 SP = PL_stack_base + POPMARK; /* pop original mark */
3275 if (gimme == G_SCALAR) {
3279 else if (gimme == G_ARRAY)
3286 ENTER_with_name("grep_item"); /* enter inner scope */
3289 src = PL_stack_base[TOPMARK];
3290 if (SvPADTMP(src)) {
3291 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3297 RETURNOP(cLOGOP->op_other);
3301 /* leave_adjust_stacks():
3303 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3304 * positioning them at to_sp+1 onwards, and do the equivalent of a
3305 * FREEMPS and TAINT_NOT.
3307 * Not intended to be called in void context.
3309 * When leaving a sub, eval, do{} or other scope, the things that need
3310 * doing to process the return args are:
3311 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3312 * * for the types of return that return copies of their args (such
3313 * as rvalue sub return), make a mortal copy of every return arg,
3314 * except where we can optimise the copy away without it being
3315 * semantically visible;
3316 * * make sure that the arg isn't prematurely freed; in the case of an
3317 * arg not copied, this may involve mortalising it. For example, in
3318 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3319 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3321 * What condition to use when deciding whether to pass the arg through
3322 * or make a copy, is determined by the 'pass' arg; its valid values are:
3323 * 0: rvalue sub/eval exit
3324 * 1: other rvalue scope exit
3325 * 2: :lvalue sub exit in rvalue context
3326 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3328 * There is a big issue with doing a FREETMPS. We would like to free any
3329 * temps created by the last statement which the sub executed, rather than
3330 * leaving them for the caller. In a situation where a sub call isn't
3331 * soon followed by a nextstate (e.g. nested recursive calls, a la
3332 * fibonacci()), temps can accumulate, causing memory and performance
3335 * On the other hand, we don't want to free any TEMPs which are keeping
3336 * alive any return args that we skipped copying; nor do we wish to undo
3337 * any mortalising done here.
3339 * The solution is to split the temps stack frame into two, with a cut
3340 * point delineating the two halves. We arrange that by the end of this
3341 * function, all the temps stack frame entries we wish to keep are in the
3342 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3343 * the range tmps_base .. PL_tmps_ix. During the course of this
3344 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3345 * whenever we find or create a temp that we know should be kept. In
3346 * general the stuff above tmps_base is undecided until we reach the end,
3347 * and we may need a sort stage for that.
3349 * To determine whether a TEMP is keeping a return arg alive, every
3350 * arg that is kept rather than copied and which has the SvTEMP flag
3351 * set, has the flag temporarily unset, to mark it. At the end we scan
3352 * the temps stack frame above the cut for entries without SvTEMP and
3353 * keep them, while turning SvTEMP on again. Note that if we die before
3354 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3355 * those SVs may be slightly less efficient.
3357 * In practice various optimisations for some common cases mean we can
3358 * avoid most of the scanning and swapping about with the temps stack.
3362 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3366 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3369 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3373 if (gimme == G_ARRAY) {
3374 nargs = SP - from_sp;
3378 assert(gimme == G_SCALAR);
3379 if (UNLIKELY(from_sp >= SP)) {
3380 /* no return args */
3381 assert(from_sp == SP);
3383 *++SP = &PL_sv_undef;
3393 /* common code for G_SCALAR and G_ARRAY */
3395 tmps_base = PL_tmps_floor + 1;
3399 /* pointer version of tmps_base. Not safe across temp stack
3403 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3404 tmps_basep = PL_tmps_stack + tmps_base;
3406 /* process each return arg */
3409 SV *sv = *from_sp++;
3411 assert(PL_tmps_ix + nargs < PL_tmps_max);
3413 /* PADTMPs with container set magic shouldn't appear in the
3414 * wild. This assert is more important for pp_leavesublv(),
3415 * but by testing for it here, we're more likely to catch
3416 * bad cases (what with :lvalue subs not being widely
3417 * deployed). The two issues are that for something like
3418 * sub :lvalue { $tied{foo} }
3420 * sub :lvalue { substr($foo,1,2) }
3421 * pp_leavesublv() will croak if the sub returns a PADTMP,
3422 * and currently functions like pp_substr() return a mortal
3423 * rather than using their PADTMP when returning a PVLV.
3424 * This is because the PVLV will hold a ref to $foo,
3425 * so $foo would get delayed in being freed while
3426 * the PADTMP SV remained in the PAD.
3427 * So if this assert fails it means either:
3428 * 1) there is pp code similar to pp_substr that is
3429 * returning a PADTMP instead of a mortal, and probably
3431 * 2) pp_leavesublv is making unwarranted assumptions
3432 * about always croaking on a PADTMP
3434 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3436 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3437 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3443 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3444 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3445 : pass == 2 ? (!SvPADTMP(sv))
3448 /* pass through: skip copy for logic or optimisation
3449 * reasons; instead mortalise it, except that ... */
3453 /* ... since this SV is an SvTEMP , we don't need to
3454 * re-mortalise it; instead we just need to ensure
3455 * that its existing entry in the temps stack frame
3456 * ends up below the cut and so avoids being freed
3457 * this time round. We mark it as needing to be kept
3458 * by temporarily unsetting SvTEMP; then at the end,
3459 * we shuffle any !SvTEMP entries on the tmps stack
3460 * back below the cut.
3461 * However, there's a significant chance that there's
3462 * a 1:1 correspondence between the first few (or all)
3463 * elements in the return args stack frame and those
3464 * in the temps stack frame; e,g.:
3465 * sub f { ....; map {...} .... },
3466 * or if we're exiting multiple scopes and one of the
3467 * inner scopes has already made mortal copies of each
3470 * If so, this arg sv will correspond to the next item
3471 * on the tmps stack above the cut, and so can be kept
3472 * merely by moving the cut boundary up one, rather
3473 * than messing with SvTEMP. If all args are 1:1 then
3474 * we can avoid the sorting stage below completely.
3476 * If there are no items above the cut on the tmps
3477 * stack, then the SvTEMP must comne from an item
3478 * below the cut, so there's nothing to do.
3480 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3481 if (sv == *tmps_basep)
3487 else if (!SvPADTMP(sv)) {
3488 /* mortalise arg to avoid it being freed during save
3489 * stack unwinding. Pad tmps don't need mortalising as
3490 * they're never freed. This is the equivalent of
3491 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3492 * * it assumes that the temps stack has already been
3494 * * it puts the new item at the cut rather than at
3495 * ++PL_tmps_ix, moving the previous occupant there
3498 if (!SvIMMORTAL(sv)) {
3499 SvREFCNT_inc_simple_void_NN(sv);
3501 /* Note that if there's nothing above the cut,
3502 * this copies the garbage one slot above
3503 * PL_tmps_ix onto itself. This is harmless (the
3504 * stack's already been extended), but might in
3505 * theory trigger warnings from tools like ASan
3507 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3513 /* Make a mortal copy of the SV.
3514 * The following code is the equivalent of sv_mortalcopy()
3516 * * it assumes the temps stack has already been extended;
3517 * * it optimises the copying for some simple SV types;
3518 * * it puts the new item at the cut rather than at
3519 * ++PL_tmps_ix, moving the previous occupant there
3522 SV *newsv = newSV(0);
3524 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3525 /* put it on the tmps stack early so it gets freed if we die */
3526 *tmps_basep++ = newsv;
3529 if (SvTYPE(sv) <= SVt_IV) {
3530 /* arg must be one of undef, IV/UV, or RV: skip
3531 * sv_setsv_flags() and do the copy directly */
3533 U32 srcflags = SvFLAGS(sv);
3535 assert(!SvGMAGICAL(sv));
3536 if (srcflags & (SVf_IOK|SVf_ROK)) {
3537 SET_SVANY_FOR_BODYLESS_IV(newsv);
3539 if (srcflags & SVf_ROK) {
3540 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3541 /* SV type plus flags */
3542 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3545 /* both src and dst are <= SVt_IV, so sv_any
3546 * points to the head; so access the heads
3547 * directly rather than going via sv_any.
3549 assert( &(sv->sv_u.svu_iv)
3550 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3551 assert( &(newsv->sv_u.svu_iv)
3552 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3553 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3554 /* SV type plus flags */
3555 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3556 |(srcflags & SVf_IVisUV));
3560 assert(!(srcflags & SVf_OK));
3561 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3563 SvFLAGS(newsv) = dstflags;
3567 /* do the full sv_setsv() */
3571 old_base = tmps_basep - PL_tmps_stack;
3573 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3574 /* the mg_get or sv_setsv might have created new temps
3575 * or realloced the tmps stack; regrow and reload */
3576 EXTEND_MORTAL(nargs);
3577 tmps_basep = PL_tmps_stack + old_base;
3578 TAINT_NOT; /* Each item is independent */
3584 /* If there are any temps left above the cut, we need to sort
3585 * them into those to keep and those to free. The only ones to
3586 * keep are those for which we've temporarily unset SvTEMP.
3587 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3588 * swapping pairs as necessary. Stop when we meet in the middle.
3591 SV **top = PL_tmps_stack + PL_tmps_ix;
3592 while (tmps_basep <= top) {
3605 tmps_base = tmps_basep - PL_tmps_stack;
3608 PL_stack_sp = to_sp;
3610 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3611 while (PL_tmps_ix >= tmps_base) {
3612 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3614 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3618 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3624 /* also tail-called by pp_return */
3634 assert(CxTYPE(cx) == CXt_SUB);
3636 if (CxMULTICALL(cx)) {
3637 /* entry zero of a stack is always PL_sv_undef, which
3638 * simplifies converting a '()' return into undef in scalar context */
3639 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3643 gimme = cx->blk_gimme;
3644 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3646 if (gimme == G_VOID)
3647 PL_stack_sp = oldsp;
3649 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3652 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3654 retop = cx->blk_sub.retop;
3661 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3662 * forces an abandon */
3665 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3667 const SSize_t fill = AvFILLp(av);
3669 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3671 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3676 AV *newav = newAV();
3677 av_extend(newav, fill);
3678 AvREIFY_only(newav);
3679 PAD_SVl(0) = MUTABLE_SV(newav);
3680 SvREFCNT_dec_NN(av);
3691 I32 old_savestack_ix;
3696 /* Locate the CV to call:
3697 * - most common case: RV->CV: f(), $ref->():
3698 * note that if a sub is compiled before its caller is compiled,
3699 * the stash entry will be a ref to a CV, rather than being a GV.
3700 * - second most common case: CV: $ref->method()
3703 /* a non-magic-RV -> CV ? */
3704 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3705 cv = MUTABLE_CV(SvRV(sv));
3706 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3710 cv = MUTABLE_CV(sv);
3713 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3714 /* handle all the weird cases */
3715 switch (SvTYPE(sv)) {
3717 if (!isGV_with_GP(sv))
3721 cv = GvCVu((const GV *)sv);
3722 if (UNLIKELY(!cv)) {
3724 cv = sv_2cv(sv, &stash, &gv, 0);
3726 old_savestack_ix = PL_savestack_ix;
3737 if (UNLIKELY(SvAMAGIC(sv))) {
3738 sv = amagic_deref_call(sv, to_cv_amg);
3739 /* Don't SPAGAIN here. */
3745 if (UNLIKELY(!SvOK(sv)))
3746 DIE(aTHX_ PL_no_usym, "a subroutine");
3748 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3749 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3750 SP = PL_stack_base + POPMARK;
3753 if (GIMME_V == G_SCALAR)
3754 PUSHs(&PL_sv_undef);
3758 sym = SvPV_nomg_const(sv, len);
3759 if (PL_op->op_private & HINT_STRICT_REFS)
3760 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3761 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3764 cv = MUTABLE_CV(SvRV(sv));
3765 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3771 DIE(aTHX_ "Not a CODE reference");
3775 /* At this point we want to save PL_savestack_ix, either by doing a
3776 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
3777 * CV we will be using (so we don't know whether its XS, so we can't
3778 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
3779 * the save stack. So remember where we are currently on the save
3780 * stack, and later update the CX or scopestack entry accordingly. */
3781 old_savestack_ix = PL_savestack_ix;
3783 /* these two fields are in a union. If they ever become separate,
3784 * we have to test for both of them being null below */
3786 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3787 while (UNLIKELY(!CvROOT(cv))) {
3791 /* anonymous or undef'd function leaves us no recourse */
3792 if (CvLEXICAL(cv) && CvHASGV(cv))
3793 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3794 SVfARG(cv_name(cv, NULL, 0)));
3795 if (CvANON(cv) || !CvHASGV(cv)) {
3796 DIE(aTHX_ "Undefined subroutine called");
3799 /* autoloaded stub? */
3800 if (cv != GvCV(gv = CvGV(cv))) {
3803 /* should call AUTOLOAD now? */
3806 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3807 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
3808 |(PL_op->op_flags & OPf_REF
3809 ? GV_AUTOLOAD_ISMETHOD
3811 cv = autogv ? GvCV(autogv) : NULL;
3814 sub_name = sv_newmortal();
3815 gv_efullname3(sub_name, gv, NULL);
3816 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3820 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3821 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3822 DIE(aTHX_ "Closure prototype called");
3824 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3827 Perl_get_db_sub(aTHX_ &sv, cv);
3829 PL_curcopdb = PL_curcop;
3831 /* check for lsub that handles lvalue subroutines */
3832 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3833 /* if lsub not found then fall back to DB::sub */
3834 if (!cv) cv = GvCV(PL_DBsub);
3836 cv = GvCV(PL_DBsub);
3839 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3840 DIE(aTHX_ "No DB::sub routine defined");
3843 if (!(CvISXSUB(cv))) {
3844 /* This path taken at least 75% of the time */
3851 /* keep PADTMP args alive throughout the call (we need to do this
3852 * because @_ isn't refcounted). Note that we create the mortals
3853 * in the caller's tmps frame, so they won't be freed until after
3854 * we return from the sub.
3863 *svp = sv = sv_mortalcopy(sv);
3869 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
3870 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3871 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
3873 padlist = CvPADLIST(cv);
3874 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
3875 pad_push(padlist, depth);
3876 PAD_SET_CUR_NOSAVE(padlist, depth);
3877 if (LIKELY(hasargs)) {
3878 AV *const av = MUTABLE_AV(PAD_SVl(0));
3882 defavp = &GvAV(PL_defgv);
3883 cx->blk_sub.savearray = *defavp;
3884 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3886 /* it's the responsibility of whoever leaves a sub to ensure
3887 * that a clean, empty AV is left in pad[0]. This is normally
3888 * done by cx_popsub() */
3889 assert(!AvREAL(av) && AvFILLp(av) == -1);
3892 if (UNLIKELY(items - 1 > AvMAX(av))) {
3893 SV **ary = AvALLOC(av);
3894 AvMAX(av) = items - 1;
3895 Renew(ary, items, SV*);
3900 Copy(MARK+1,AvARRAY(av),items,SV*);
3901 AvFILLp(av) = items - 1;
3903 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3905 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3906 SVfARG(cv_name(cv, NULL, 0)));
3907 /* warning must come *after* we fully set up the context
3908 * stuff so that __WARN__ handlers can safely dounwind()
3911 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3912 && ckWARN(WARN_RECURSION)
3913 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3914 sub_crush_depth(cv);
3915 RETURNOP(CvSTART(cv));
3918 SSize_t markix = TOPMARK;
3922 /* pretend we did the ENTER earlier */
3923 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3928 if (UNLIKELY(((PL_op->op_private
3929 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3930 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3932 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3933 SVfARG(cv_name(cv, NULL, 0)));
3935 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3936 /* Need to copy @_ to stack. Alternative may be to
3937 * switch stack to @_, and copy return values
3938 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3939 AV * const av = GvAV(PL_defgv);
3940 const SSize_t items = AvFILL(av) + 1;
3944 const bool m = cBOOL(SvRMAGICAL(av));
3945 /* Mark is at the end of the stack. */
3947 for (; i < items; ++i)
3951 SV ** const svp = av_fetch(av, i, 0);
3952 sv = svp ? *svp : NULL;
3954 else sv = AvARRAY(av)[i];
3955 if (sv) SP[i+1] = sv;
3957 SP[i+1] = newSVavdefelem(av, i, 1);
3965 SV **mark = PL_stack_base + markix;
3966 SSize_t items = SP - mark;
3969 if (*mark && SvPADTMP(*mark)) {
3970 *mark = sv_mortalcopy(*mark);
3974 /* We assume first XSUB in &DB::sub is the called one. */
3975 if (UNLIKELY(PL_curcopdb)) {
3976 SAVEVPTR(PL_curcop);
3977 PL_curcop = PL_curcopdb;
3980 /* Do we need to open block here? XXXX */
3982 /* calculate gimme here as PL_op might get changed and then not
3983 * restored until the LEAVE further down */
3984 is_scalar = (GIMME_V == G_SCALAR);
3986 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3988 CvXSUB(cv)(aTHX_ cv);
3990 /* Enforce some sanity in scalar context. */
3992 SV **svp = PL_stack_base + markix + 1;
3993 if (svp != PL_stack_sp) {
3994 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4004 Perl_sub_crush_depth(pTHX_ CV *cv)
4006 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4009 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4011 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
4012 SVfARG(cv_name(cv,NULL,0)));
4018 /* like croak, but report in context of caller */
4021 Perl_croak_caller(const char *pat, ...)
4025 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4027 /* make error appear at call site */
4029 PL_curcop = cx->blk_oldcop;
4031 va_start(args, pat);
4033 NOT_REACHED; /* NOTREACHED */
4042 SV* const elemsv = POPs;
4043 IV elem = SvIV(elemsv);
4044 AV *const av = MUTABLE_AV(POPs);
4045 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4046 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4047 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4048 bool preeminent = TRUE;
4051 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4052 Perl_warner(aTHX_ packWARN(WARN_MISC),
4053 "Use of reference \"%"SVf"\" as array index",
4055 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4058 if (UNLIKELY(localizing)) {
4062 /* If we can determine whether the element exist,
4063 * Try to preserve the existenceness of a tied array
4064 * element by using EXISTS and DELETE if possible.
4065 * Fallback to FETCH and STORE otherwise. */
4066 if (SvCANEXISTDELETE(av))
4067 preeminent = av_exists(av, elem);
4070 svp = av_fetch(av, elem, lval && !defer);
4072 #ifdef PERL_MALLOC_WRAP
4073 if (SvUOK(elemsv)) {
4074 const UV uv = SvUV(elemsv);
4075 elem = uv > IV_MAX ? IV_MAX : uv;
4077 else if (SvNOK(elemsv))
4078 elem = (IV)SvNV(elemsv);
4080 static const char oom_array_extend[] =
4081 "Out of memory during array extend"; /* Duplicated in av.c */
4082 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4085 if (!svp || !*svp) {
4088 DIE(aTHX_ PL_no_aelem, elem);
4089 len = av_tindex(av);
4090 mPUSHs(newSVavdefelem(av,
4091 /* Resolve a negative index now, unless it points before the
4092 beginning of the array, in which case record it for error
4093 reporting in magic_setdefelem. */
4094 elem < 0 && len + elem >= 0 ? len + elem : elem,
4098 if (UNLIKELY(localizing)) {
4100 save_aelem(av, elem, svp);
4102 SAVEADELETE(av, elem);
4104 else if (PL_op->op_private & OPpDEREF) {
4105 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4109 sv = (svp ? *svp : &PL_sv_undef);
4110 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4117 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4119 PERL_ARGS_ASSERT_VIVIFY_REF;
4124 Perl_croak_no_modify();
4125 prepare_SV_for_RV(sv);
4128 SvRV_set(sv, newSV(0));
4131 SvRV_set(sv, MUTABLE_SV(newAV()));
4134 SvRV_set(sv, MUTABLE_SV(newHV()));
4141 if (SvGMAGICAL(sv)) {
4142 /* copy the sv without magic to prevent magic from being
4144 SV* msv = sv_newmortal();
4145 sv_setsv_nomg(msv, sv);
4151 PERL_STATIC_INLINE HV *
4152 S_opmethod_stash(pTHX_ SV* meth)
4157 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4158 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
4159 "package or object reference", SVfARG(meth)),
4161 : *(PL_stack_base + TOPMARK + 1);
4163 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4167 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
4170 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4171 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4172 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4173 if (stash) return stash;
4177 ob = MUTABLE_SV(SvRV(sv));
4178 else if (!SvOK(sv)) goto undefined;
4179 else if (isGV_with_GP(sv)) {
4181 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4182 "without a package or object reference",
4185 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4186 assert(!LvTARGLEN(ob));
4190 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4193 /* this isn't a reference */
4196 const char * const packname = SvPV_nomg_const(sv, packlen);
4197 const U32 packname_utf8 = SvUTF8(sv);
4198 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4199 if (stash) return stash;
4201 if (!(iogv = gv_fetchpvn_flags(
4202 packname, packlen, packname_utf8, SVt_PVIO
4204 !(ob=MUTABLE_SV(GvIO(iogv))))
4206 /* this isn't the name of a filehandle either */
4209 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4210 "without a package or object reference",
4213 /* assume it's a package name */
4214 stash = gv_stashpvn(packname, packlen, packname_utf8);
4215 if (stash) return stash;
4216 else return MUTABLE_HV(sv);
4218 /* it _is_ a filehandle name -- replace with a reference */
4219 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4222 /* if we got here, ob should be an object or a glob */
4223 if (!ob || !(SvOBJECT(ob)
4224 || (isGV_with_GP(ob)
4225 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4228 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
4229 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
4230 ? newSVpvs_flags("DOES", SVs_TEMP)
4242 SV* const meth = TOPs;
4245 SV* const rmeth = SvRV(meth);
4246 if (SvTYPE(rmeth) == SVt_PVCV) {
4252 stash = opmethod_stash(meth);
4254 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4257 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4261 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4262 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4264 gv = MUTABLE_GV(HeVAL(he)); \
4265 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4266 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4268 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4277 SV* const meth = cMETHOPx_meth(PL_op);
4278 HV* const stash = opmethod_stash(meth);
4280 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4281 METHOD_CHECK_CACHE(stash, stash, meth);
4284 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4287 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4296 SV* const meth = cMETHOPx_meth(PL_op);
4297 HV* const stash = CopSTASH(PL_curcop);
4298 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4299 * as it uses CopSTASH. However, we must ensure that object(class) is
4300 * correct (this check is done by S_opmethod_stash) */
4301 opmethod_stash(meth);
4303 if ((cache = HvMROMETA(stash)->super)) {
4304 METHOD_CHECK_CACHE(stash, cache, meth);
4307 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4310 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4318 SV* const meth = cMETHOPx_meth(PL_op);
4319 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4320 opmethod_stash(meth); /* not used but needed for error checks */
4322 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4323 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4325 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4328 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4332 PP(pp_method_redir_super)
4337 SV* const meth = cMETHOPx_meth(PL_op);
4338 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4339 opmethod_stash(meth); /* not used but needed for error checks */
4341 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4342 else if ((cache = HvMROMETA(stash)->super)) {
4343 METHOD_CHECK_CACHE(stash, cache, meth);
4346 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4349 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4354 * ex: set ts=8 sts=4 sw=4 et: