3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * XXX Note that padav has similar code but without the mg_get().
325 * I suspect that the mg_get is no longer needed, but while padav
326 * differs, it can't share this function */
329 S_pushav(pTHX_ AV* const av)
332 const SSize_t maxarg = AvFILL(av) + 1;
334 if (UNLIKELY(SvRMAGICAL(av))) {
336 for (i=0; i < (PADOFFSET)maxarg; i++) {
337 SV ** const svp = av_fetch(av, i, FALSE);
338 /* See note in pp_helem, and bug id #27839 */
340 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
346 for (i=0; i < (PADOFFSET)maxarg; i++) {
347 SV * const sv = AvARRAY(av)[i];
348 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
356 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
361 PADOFFSET base = PL_op->op_targ;
362 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
364 if (PL_op->op_flags & OPf_SPECIAL) {
365 /* fake the RHS of my ($x,$y,..) = @_ */
367 S_pushav(aTHX_ GvAVn(PL_defgv));
371 /* note, this is only skipped for compile-time-known void cxt */
372 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
375 for (i = 0; i <count; i++)
376 *++SP = PAD_SV(base+i);
378 if (PL_op->op_private & OPpLVAL_INTRO) {
379 SV **svp = &(PAD_SVl(base));
380 const UV payload = (UV)(
381 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
382 | (count << SAVE_TIGHT_SHIFT)
383 | SAVEt_CLEARPADRANGE);
384 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
385 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
393 for (i = 0; i <count; i++)
394 SvPADSTALE_off(*svp++); /* mark lexical as active */
405 OP * const op = PL_op;
406 /* access PL_curpad once */
407 SV ** const padentry = &(PAD_SVl(op->op_targ));
412 PUTBACK; /* no pop/push after this, TOPs ok */
414 if (op->op_flags & OPf_MOD) {
415 if (op->op_private & OPpLVAL_INTRO)
416 if (!(op->op_private & OPpPAD_STATE))
417 save_clearsv(padentry);
418 if (op->op_private & OPpDEREF) {
419 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
420 than TARG reduces the scope of TARG, so it does not
421 span the call to save_clearsv, resulting in smaller
423 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
435 tryAMAGICunTARGETlist(iter_amg, 0);
436 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
438 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
439 if (!isGV_with_GP(PL_last_in_gv)) {
440 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
441 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
444 XPUSHs(MUTABLE_SV(PL_last_in_gv));
447 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
448 if (PL_last_in_gv == (GV *)&PL_sv_undef)
449 PL_last_in_gv = NULL;
451 assert(isGV_with_GP(PL_last_in_gv));
454 return do_readline();
462 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
466 (SvIOK_notUV(left) && SvIOK_notUV(right))
467 ? (SvIVX(left) == SvIVX(right))
468 : ( do_ncmp(left, right) == 0)
474 /* also used for: pp_i_preinc() */
478 SV *sv = *PL_stack_sp;
480 if (LIKELY(((sv->sv_flags &
481 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
482 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
484 && SvIVX(sv) != IV_MAX)
486 SvIV_set(sv, SvIVX(sv) + 1);
488 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
495 /* also used for: pp_i_predec() */
499 SV *sv = *PL_stack_sp;
501 if (LIKELY(((sv->sv_flags &
502 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
503 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
505 && SvIVX(sv) != IV_MIN)
507 SvIV_set(sv, SvIVX(sv) - 1);
509 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
516 /* also used for: pp_orassign() */
525 if (PL_op->op_type == OP_OR)
527 RETURNOP(cLOGOP->op_other);
532 /* also used for: pp_dor() pp_dorassign() */
539 const int op_type = PL_op->op_type;
540 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
545 if (UNLIKELY(!sv || !SvANY(sv))) {
546 if (op_type == OP_DOR)
548 RETURNOP(cLOGOP->op_other);
554 if (UNLIKELY(!sv || !SvANY(sv)))
559 switch (SvTYPE(sv)) {
561 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
565 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
569 if (CvROOT(sv) || CvXSUB(sv))
582 if(op_type == OP_DOR)
584 RETURNOP(cLOGOP->op_other);
586 /* assuming OP_DEFINED */
596 dSP; dATARGET; bool useleft; SV *svl, *svr;
598 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
602 #ifdef PERL_PRESERVE_IVUV
604 /* special-case some simple common cases */
605 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
607 U32 flags = (svl->sv_flags & svr->sv_flags);
608 if (flags & SVf_IOK) {
609 /* both args are simple IVs */
614 topl = ((UV)il) >> (UVSIZE * 8 - 2);
615 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
617 /* if both are in a range that can't under/overflow, do a
618 * simple integer add: if the top of both numbers
619 * are 00 or 11, then it's safe */
620 if (!( ((topl+1) | (topr+1)) & 2)) {
622 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
628 else if (flags & SVf_NOK) {
629 /* both args are NVs */
634 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
635 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
636 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
638 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
641 /* nothing was lost by converting to IVs */
644 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
652 useleft = USE_LEFT(svl);
653 /* We must see if we can perform the addition with integers if possible,
654 as the integer code detects overflow while the NV code doesn't.
655 If either argument hasn't had a numeric conversion yet attempt to get
656 the IV. It's important to do this now, rather than just assuming that
657 it's not IOK as a PV of "9223372036854775806" may not take well to NV
658 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
659 integer in case the second argument is IV=9223372036854775806
660 We can (now) rely on sv_2iv to do the right thing, only setting the
661 public IOK flag if the value in the NV (or PV) slot is truly integer.
663 A side effect is that this also aggressively prefers integer maths over
664 fp maths for integer values.
666 How to detect overflow?
668 C 99 section 6.2.6.1 says
670 The range of nonnegative values of a signed integer type is a subrange
671 of the corresponding unsigned integer type, and the representation of
672 the same value in each type is the same. A computation involving
673 unsigned operands can never overflow, because a result that cannot be
674 represented by the resulting unsigned integer type is reduced modulo
675 the number that is one greater than the largest value that can be
676 represented by the resulting type.
680 which I read as "unsigned ints wrap."
682 signed integer overflow seems to be classed as "exception condition"
684 If an exceptional condition occurs during the evaluation of an
685 expression (that is, if the result is not mathematically defined or not
686 in the range of representable values for its type), the behavior is
689 (6.5, the 5th paragraph)
691 I had assumed that on 2s complement machines signed arithmetic would
692 wrap, hence coded pp_add and pp_subtract on the assumption that
693 everything perl builds on would be happy. After much wailing and
694 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
695 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
696 unsigned code below is actually shorter than the old code. :-)
699 if (SvIV_please_nomg(svr)) {
700 /* Unless the left argument is integer in range we are going to have to
701 use NV maths. Hence only attempt to coerce the right argument if
702 we know the left is integer. */
710 /* left operand is undef, treat as zero. + 0 is identity,
711 Could SETi or SETu right now, but space optimise by not adding
712 lots of code to speed up what is probably a rarish case. */
714 /* Left operand is defined, so is it IV? */
715 if (SvIV_please_nomg(svl)) {
716 if ((auvok = SvUOK(svl)))
719 const IV aiv = SvIVX(svl);
722 auvok = 1; /* Now acting as a sign flag. */
724 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
731 bool result_good = 0;
734 bool buvok = SvUOK(svr);
739 const IV biv = SvIVX(svr);
744 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
746 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
747 else "IV" now, independent of how it came in.
748 if a, b represents positive, A, B negative, a maps to -A etc
753 all UV maths. negate result if A negative.
754 add if signs same, subtract if signs differ. */
760 /* Must get smaller */
766 /* result really should be -(auv-buv). as its negation
767 of true value, need to swap our result flag */
784 if (result <= (UV)IV_MIN)
785 SETi(result == (UV)IV_MIN
786 ? IV_MIN : -(IV)result);
788 /* result valid, but out of range for IV. */
793 } /* Overflow, drop through to NVs. */
798 useleft = USE_LEFT(svl);
802 NV value = SvNV_nomg(svr);
805 /* left operand is undef, treat as zero. + 0.0 is identity. */
809 SETn( value + SvNV_nomg(svl) );
815 /* also used for: pp_aelemfast_lex() */
820 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
821 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
822 const U32 lval = PL_op->op_flags & OPf_MOD;
823 const I8 key = (I8)PL_op->op_private;
827 assert(SvTYPE(av) == SVt_PVAV);
831 /* inlined av_fetch() for simple cases ... */
832 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
833 sv = AvARRAY(av)[key];
840 /* ... else do it the hard way */
841 svp = av_fetch(av, key, lval);
842 sv = (svp ? *svp : &PL_sv_undef);
844 if (UNLIKELY(!svp && lval))
845 DIE(aTHX_ PL_no_aelem, (int)key);
847 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
857 do_join(TARG, *MARK, MARK, SP);
863 /* Oversized hot code. */
865 /* also used for: pp_say() */
869 dSP; dMARK; dORIGMARK;
873 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
877 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
880 if (MARK == ORIGMARK) {
881 /* If using default handle then we need to make space to
882 * pass object as 1st arg, so move other args up ...
886 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
889 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
891 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
892 | (PL_op->op_type == OP_SAY
893 ? TIED_METHOD_SAY : 0)), sp - mark);
896 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
897 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
900 SETERRNO(EBADF,RMS_IFI);
903 else if (!(fp = IoOFP(io))) {
905 report_wrongway_fh(gv, '<');
908 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
912 SV * const ofs = GvSV(PL_ofsgv); /* $, */
914 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
916 if (!do_print(*MARK, fp))
920 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
921 if (!do_print(GvSV(PL_ofsgv), fp)) {
930 if (!do_print(*MARK, fp))
938 if (PL_op->op_type == OP_SAY) {
939 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
942 else if (PL_ors_sv && SvOK(PL_ors_sv))
943 if (!do_print(PL_ors_sv, fp)) /* $\ */
946 if (IoFLAGS(io) & IOf_FLUSH)
947 if (PerlIO_flush(fp) == EOF)
957 XPUSHs(&PL_sv_undef);
962 /* also used for: pp_rv2hv() */
963 /* also called directly by pp_lvavref */
968 const U8 gimme = GIMME_V;
969 static const char an_array[] = "an ARRAY";
970 static const char a_hash[] = "a HASH";
971 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
972 || PL_op->op_type == OP_LVAVREF;
973 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
977 if (UNLIKELY(SvAMAGIC(sv))) {
978 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
981 if (UNLIKELY(SvTYPE(sv) != type))
982 /* diag_listed_as: Not an ARRAY reference */
983 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
984 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
985 && PL_op->op_private & OPpLVAL_INTRO))
986 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
988 else if (UNLIKELY(SvTYPE(sv) != type)) {
991 if (!isGV_with_GP(sv)) {
992 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1000 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1001 if (PL_op->op_private & OPpLVAL_INTRO)
1002 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1004 if (PL_op->op_flags & OPf_REF) {
1008 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1009 const I32 flags = is_lvalue_sub();
1010 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1011 if (gimme != G_ARRAY)
1012 goto croak_cant_return;
1019 AV *const av = MUTABLE_AV(sv);
1020 /* The guts of pp_rv2av */
1021 if (gimme == G_ARRAY) {
1027 else if (gimme == G_SCALAR) {
1029 const SSize_t maxarg = AvFILL(av) + 1;
1033 /* The guts of pp_rv2hv */
1034 if (gimme == G_ARRAY) { /* array wanted */
1036 return Perl_do_kv(aTHX);
1038 else if ((PL_op->op_private & OPpTRUEBOOL
1039 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1040 && block_gimme() == G_VOID ))
1041 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1042 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1043 else if (gimme == G_SCALAR) {
1045 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1052 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1053 is_pp_rv2av ? "array" : "hash");
1058 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1060 PERL_ARGS_ASSERT_DO_ODDBALL;
1063 if (ckWARN(WARN_MISC)) {
1065 if (oddkey == firstkey &&
1067 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1068 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1070 err = "Reference found where even-sized list expected";
1073 err = "Odd number of elements in hash assignment";
1074 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1081 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1082 * are common to both the LHS and RHS of an aassign, and replace them
1083 * with copies. All these copies are made before the actual list assign is
1086 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1087 * element ($b) to the first LH element ($a), modifies $a; when the
1088 * second assignment is done, the second RH element now has the wrong
1089 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1090 * Note that we don't need to make a mortal copy of $b.
1092 * The algorithm below works by, for every RHS element, mark the
1093 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1094 * element is found with SVf_BREAK set, it means it would have been
1095 * modified, so make a copy.
1096 * Note that by scanning both LHS and RHS in lockstep, we avoid
1097 * unnecessary copies (like $b above) compared with a naive
1098 * "mark all LHS; copy all marked RHS; unmark all LHS".
1100 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1101 * it can't be common and can be skipped.
1103 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1104 * that we thought we didn't need to call S_aassign_copy_common(), but we
1105 * have anyway for sanity checking. If we find we need to copy, then panic.
1108 PERL_STATIC_INLINE void
1109 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1110 SV **firstrelem, SV **lastrelem
1119 SSize_t lcount = lastlelem - firstlelem + 1;
1120 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1121 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1122 bool copy_all = FALSE;
1124 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1125 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1126 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1130 /* we never have to copy the first RH element; it can't be corrupted
1131 * by assigning something to the corresponding first LH element.
1132 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1134 relem = firstrelem + 1;
1136 for (; relem <= lastrelem; relem++) {
1139 /* mark next LH element */
1141 if (--lcount >= 0) {
1144 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1145 assert (lelem <= lastlelem);
1151 if (SvSMAGICAL(svl)) {
1154 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1157 /* this LH element will consume all further args;
1158 * no need to mark any further LH elements (if any).
1159 * But we still need to scan any remaining RHS elements;
1160 * set lcount negative to distinguish from lcount == 0,
1161 * so the loop condition continues being true
1164 lelem--; /* no need to unmark this element */
1166 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1167 assert(!SvIMMORTAL(svl));
1168 SvFLAGS(svl) |= SVf_BREAK;
1172 /* don't check RH element if no SVf_BREAK flags set yet */
1179 /* see if corresponding RH element needs copying */
1185 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1189 /* op_dump(PL_op); */
1191 "panic: aassign skipped needed copy of common RH elem %"
1192 UVuf, (UV)(relem - firstrelem));
1196 TAINT_NOT; /* Each item is independent */
1198 /* Dear TODO test in t/op/sort.t, I love you.
1199 (It's relying on a panic, not a "semi-panic" from newSVsv()
1200 and then an assertion failure below.) */
1201 if (UNLIKELY(SvIS_FREED(svr))) {
1202 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1205 /* avoid break flag while copying; otherwise COW etc
1207 SvFLAGS(svr) &= ~SVf_BREAK;
1208 /* Not newSVsv(), as it does not allow copy-on-write,
1209 resulting in wasteful copies.
1210 Also, we use SV_NOSTEAL in case the SV is used more than
1211 once, e.g. (...) = (f())[0,0]
1212 Where the same SV appears twice on the RHS without a ref
1213 count bump. (Although I suspect that the SV won't be
1214 stealable here anyway - DAPM).
1216 *relem = sv_mortalcopy_flags(svr,
1217 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1218 /* ... but restore afterwards in case it's needed again,
1219 * e.g. ($a,$b,$c) = (1,$a,$a)
1221 SvFLAGS(svr) |= SVf_BREAK;
1233 while (lelem > firstlelem) {
1234 SV * const svl = *(--lelem);
1236 SvFLAGS(svl) &= ~SVf_BREAK;
1245 SV **lastlelem = PL_stack_sp;
1246 SV **lastrelem = PL_stack_base + POPMARK;
1247 SV **firstrelem = PL_stack_base + POPMARK + 1;
1248 SV **firstlelem = lastrelem + 1;
1261 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1262 * only need to save locally, not on the save stack */
1263 U16 old_delaymagic = PL_delaymagic;
1268 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1270 /* If there's a common identifier on both sides we have to take
1271 * special care that assigning the identifier on the left doesn't
1272 * clobber a value on the right that's used later in the list.
1275 /* at least 2 LH and RH elements, or commonality isn't an issue */
1276 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1277 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1278 if (SvGMAGICAL(*relem))
1281 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1282 if (*lelem && SvSMAGICAL(*lelem))
1285 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1286 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1287 /* skip the scan if all scalars have a ref count of 1 */
1288 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1290 if (!sv || SvREFCNT(sv) == 1)
1292 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1299 S_aassign_copy_common(aTHX_
1300 firstlelem, lastlelem, firstrelem, lastrelem
1310 /* on debugging builds, do the scan even if we've concluded we
1311 * don't need to, then panic if we find commonality. Note that the
1312 * scanner assumes at least 2 elements */
1313 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1321 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1328 while (LIKELY(lelem <= lastlelem)) {
1330 TAINT_NOT; /* Each item stands on its own, taintwise. */
1332 if (UNLIKELY(!sv)) {
1335 ASSUME(SvTYPE(sv) == SVt_PVAV);
1337 switch (SvTYPE(sv)) {
1339 bool already_copied = FALSE;
1340 ary = MUTABLE_AV(sv);
1341 magic = SvMAGICAL(ary) != 0;
1343 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1345 /* We need to clear ary. The is a danger that if we do this,
1346 * elements on the RHS may be prematurely freed, e.g.
1348 * In the case of possible commonality, make a copy of each
1349 * RHS SV *before* clearing the array, and add a reference
1350 * from the tmps stack, so that it doesn't leak on death.
1351 * Otherwise, make a copy of each RHS SV only as we're storing
1352 * it into the array - that way we don't have to worry about
1353 * it being leaked if we die, but don't incur the cost of
1354 * mortalising everything.
1357 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1358 && (relem <= lastrelem)
1359 && (magic || AvFILL(ary) != -1))
1362 EXTEND_MORTAL(lastrelem - relem + 1);
1363 for (svp = relem; svp <= lastrelem; svp++) {
1364 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1365 *svp = sv_mortalcopy_flags(*svp,
1366 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1369 already_copied = TRUE;
1373 if (relem <= lastrelem)
1374 av_extend(ary, lastrelem - relem);
1377 while (relem <= lastrelem) { /* gobble up all the rest */
1379 if (LIKELY(!alias)) {
1384 /* before newSV, in case it dies */
1387 /* see comment in S_aassign_copy_common about
1389 sv_setsv_flags(sv, *relem,
1390 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1395 if (!already_copied)
1398 DIE(aTHX_ "Assigned value is not a reference");
1399 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1400 /* diag_listed_as: Assigned value is not %s reference */
1402 "Assigned value is not a SCALAR reference");
1403 if (lval && !already_copied)
1404 *relem = sv_mortalcopy(*relem);
1405 /* XXX else check for weak refs? */
1406 sv = SvREFCNT_inc_NN(SvRV(*relem));
1410 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1411 didstore = av_store(ary,i++,sv);
1420 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1421 SvSETMAGIC(MUTABLE_SV(ary));
1426 case SVt_PVHV: { /* normal hash */
1430 SV** topelem = relem;
1431 SV **firsthashrelem = relem;
1432 bool already_copied = FALSE;
1434 hash = MUTABLE_HV(sv);
1435 magic = SvMAGICAL(hash) != 0;
1437 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1438 if (UNLIKELY(odd)) {
1439 do_oddball(lastrelem, firsthashrelem);
1440 /* we have firstlelem to reuse, it's not needed anymore
1442 *(lastrelem+1) = &PL_sv_undef;
1446 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1448 /* We need to clear hash. The is a danger that if we do this,
1449 * elements on the RHS may be prematurely freed, e.g.
1450 * %h = (foo => $h{bar});
1451 * In the case of possible commonality, make a copy of each
1452 * RHS SV *before* clearing the hash, and add a reference
1453 * from the tmps stack, so that it doesn't leak on death.
1456 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1457 && (relem <= lastrelem)
1458 && (magic || HvUSEDKEYS(hash)))
1461 EXTEND_MORTAL(lastrelem - relem + 1);
1462 for (svp = relem; svp <= lastrelem; svp++) {
1463 *svp = sv_mortalcopy_flags(*svp,
1464 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1467 already_copied = TRUE;
1472 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1475 /* Copy the key if aassign is called in lvalue context,
1476 to avoid having the next op modify our rhs. Copy
1477 it also if it is gmagical, lest it make the
1478 hv_store_ent call below croak, leaking the value. */
1479 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1480 ? sv_mortalcopy(*relem)
1489 sv_setsv_nomg(tmpstr,*relem++); /* value */
1492 if (gimme == G_ARRAY) {
1493 if (hv_exists_ent(hash, sv, 0))
1494 /* key overwrites an existing entry */
1497 /* copy element back: possibly to an earlier
1498 * stack location if we encountered dups earlier,
1499 * possibly to a later stack location if odd */
1501 *topelem++ = tmpstr;
1505 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1506 didstore = hv_store_ent(hash,sv,tmpstr,0);
1508 if (!didstore) sv_2mortal(tmpstr);
1514 if (duplicates && gimme == G_ARRAY) {
1515 /* at this point we have removed the duplicate key/value
1516 * pairs from the stack, but the remaining values may be
1517 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1518 * the (a 2), but the stack now probably contains
1519 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1520 * obliterates the earlier key. So refresh all values. */
1521 lastrelem -= duplicates;
1522 relem = firsthashrelem;
1523 while (relem < lastrelem+odd) {
1525 he = hv_fetch_ent(hash, *relem++, 0, 0);
1526 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1529 if (odd && gimme == G_ARRAY) lastrelem++;
1533 if (SvIMMORTAL(sv)) {
1534 if (relem <= lastrelem)
1538 if (relem <= lastrelem) {
1540 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1541 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1544 packWARN(WARN_MISC),
1545 "Useless assignment to a temporary"
1547 sv_setsv(sv, *relem);
1551 sv_setsv(sv, &PL_sv_undef);
1556 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1557 /* Will be used to set PL_tainting below */
1558 Uid_t tmp_uid = PerlProc_getuid();
1559 Uid_t tmp_euid = PerlProc_geteuid();
1560 Gid_t tmp_gid = PerlProc_getgid();
1561 Gid_t tmp_egid = PerlProc_getegid();
1563 /* XXX $> et al currently silently ignore failures */
1564 if (PL_delaymagic & DM_UID) {
1565 #ifdef HAS_SETRESUID
1567 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1568 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1571 # ifdef HAS_SETREUID
1573 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1574 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1577 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1578 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1579 PL_delaymagic &= ~DM_RUID;
1581 # endif /* HAS_SETRUID */
1583 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1584 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1585 PL_delaymagic &= ~DM_EUID;
1587 # endif /* HAS_SETEUID */
1588 if (PL_delaymagic & DM_UID) {
1589 if (PL_delaymagic_uid != PL_delaymagic_euid)
1590 DIE(aTHX_ "No setreuid available");
1591 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1593 # endif /* HAS_SETREUID */
1594 #endif /* HAS_SETRESUID */
1596 tmp_uid = PerlProc_getuid();
1597 tmp_euid = PerlProc_geteuid();
1599 /* XXX $> et al currently silently ignore failures */
1600 if (PL_delaymagic & DM_GID) {
1601 #ifdef HAS_SETRESGID
1603 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1604 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1607 # ifdef HAS_SETREGID
1609 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1610 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1613 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1614 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1615 PL_delaymagic &= ~DM_RGID;
1617 # endif /* HAS_SETRGID */
1619 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1620 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1621 PL_delaymagic &= ~DM_EGID;
1623 # endif /* HAS_SETEGID */
1624 if (PL_delaymagic & DM_GID) {
1625 if (PL_delaymagic_gid != PL_delaymagic_egid)
1626 DIE(aTHX_ "No setregid available");
1627 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1629 # endif /* HAS_SETREGID */
1630 #endif /* HAS_SETRESGID */
1632 tmp_gid = PerlProc_getgid();
1633 tmp_egid = PerlProc_getegid();
1635 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1636 #ifdef NO_TAINT_SUPPORT
1637 PERL_UNUSED_VAR(tmp_uid);
1638 PERL_UNUSED_VAR(tmp_euid);
1639 PERL_UNUSED_VAR(tmp_gid);
1640 PERL_UNUSED_VAR(tmp_egid);
1643 PL_delaymagic = old_delaymagic;
1645 if (gimme == G_VOID)
1646 SP = firstrelem - 1;
1647 else if (gimme == G_SCALAR) {
1650 SETi(lastrelem - firstrelem + 1);
1654 /* note that in this case *firstlelem may have been overwritten
1655 by sv_undef in the odd hash case */
1658 SP = firstrelem + (lastlelem - firstlelem);
1659 lelem = firstlelem + (relem - firstrelem);
1661 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1671 PMOP * const pm = cPMOP;
1672 REGEXP * rx = PM_GETRE(pm);
1673 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1674 SV * const rv = sv_newmortal();
1678 SvUPGRADE(rv, SVt_IV);
1679 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1680 loathe to use it here, but it seems to be the right fix. Or close.
1681 The key part appears to be that it's essential for pp_qr to return a new
1682 object (SV), which implies that there needs to be an effective way to
1683 generate a new SV from the existing SV that is pre-compiled in the
1685 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1688 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1689 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1690 *cvp = cv_clone(cv);
1691 SvREFCNT_dec_NN(cv);
1695 HV *const stash = gv_stashsv(pkg, GV_ADD);
1696 SvREFCNT_dec_NN(pkg);
1697 (void)sv_bless(rv, stash);
1700 if (UNLIKELY(RX_ISTAINTED(rx))) {
1702 SvTAINTED_on(SvRV(rv));
1715 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1718 const char *truebase; /* Start of string */
1719 REGEXP *rx = PM_GETRE(pm);
1721 const U8 gimme = GIMME_V;
1723 const I32 oldsave = PL_savestack_ix;
1724 I32 had_zerolen = 0;
1727 if (PL_op->op_flags & OPf_STACKED)
1736 PUTBACK; /* EVAL blocks need stack_sp. */
1737 /* Skip get-magic if this is a qr// clone, because regcomp has
1739 truebase = ReANY(rx)->mother_re
1740 ? SvPV_nomg_const(TARG, len)
1741 : SvPV_const(TARG, len);
1743 DIE(aTHX_ "panic: pp_match");
1744 strend = truebase + len;
1745 rxtainted = (RX_ISTAINTED(rx) ||
1746 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1749 /* We need to know this in case we fail out early - pos() must be reset */
1750 global = dynpm->op_pmflags & PMf_GLOBAL;
1752 /* PMdf_USED is set after a ?? matches once */
1755 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1757 pm->op_pmflags & PMf_USED
1760 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1764 /* empty pattern special-cased to use last successful pattern if
1765 possible, except for qr// */
1766 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1772 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1773 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1774 UVuf" < %"IVdf")\n",
1775 (UV)len, (IV)RX_MINLEN(rx)));
1779 /* get pos() if //g */
1781 mg = mg_find_mglob(TARG);
1782 if (mg && mg->mg_len >= 0) {
1783 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1784 /* last time pos() was set, it was zero-length match */
1785 if (mg->mg_flags & MGf_MINMATCH)
1790 #ifdef PERL_SAWAMPERSAND
1793 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1794 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1798 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1799 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1800 * only on the first iteration. Therefore we need to copy $' as well
1801 * as $&, to make the rest of the string available for captures in
1802 * subsequent iterations */
1803 if (! (global && gimme == G_ARRAY))
1804 r_flags |= REXEC_COPY_SKIP_POST;
1806 #ifdef PERL_SAWAMPERSAND
1807 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1808 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1809 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1816 s = truebase + curpos;
1818 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1819 had_zerolen, TARG, NULL, r_flags))
1823 if (dynpm->op_pmflags & PMf_ONCE)
1825 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1827 dynpm->op_pmflags |= PMf_USED;
1831 RX_MATCH_TAINTED_on(rx);
1832 TAINT_IF(RX_MATCH_TAINTED(rx));
1836 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1838 mg = sv_magicext_mglob(TARG);
1839 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1840 if (RX_ZERO_LEN(rx))
1841 mg->mg_flags |= MGf_MINMATCH;
1843 mg->mg_flags &= ~MGf_MINMATCH;
1846 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1847 LEAVE_SCOPE(oldsave);
1851 /* push captures on stack */
1854 const I32 nparens = RX_NPARENS(rx);
1855 I32 i = (global && !nparens) ? 1 : 0;
1857 SPAGAIN; /* EVAL blocks could move the stack. */
1858 EXTEND(SP, nparens + i);
1859 EXTEND_MORTAL(nparens + i);
1860 for (i = !i; i <= nparens; i++) {
1861 PUSHs(sv_newmortal());
1862 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1863 && RX_OFFS(rx)[i].end != -1 ))
1865 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1866 const char * const s = RX_OFFS(rx)[i].start + truebase;
1867 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1868 || len < 0 || len > strend - s))
1869 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1870 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1871 (long) i, (long) RX_OFFS(rx)[i].start,
1872 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1873 sv_setpvn(*SP, s, len);
1874 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1879 curpos = (UV)RX_OFFS(rx)[0].end;
1880 had_zerolen = RX_ZERO_LEN(rx);
1881 PUTBACK; /* EVAL blocks may use stack */
1882 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1885 LEAVE_SCOPE(oldsave);
1888 NOT_REACHED; /* NOTREACHED */
1891 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1893 mg = mg_find_mglob(TARG);
1897 LEAVE_SCOPE(oldsave);
1898 if (gimme == G_ARRAY)
1904 Perl_do_readline(pTHX)
1906 dSP; dTARGETSTACKED;
1911 IO * const io = GvIO(PL_last_in_gv);
1912 const I32 type = PL_op->op_type;
1913 const U8 gimme = GIMME_V;
1916 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1918 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1919 if (gimme == G_SCALAR) {
1921 SvSetSV_nosteal(TARG, TOPs);
1931 if (IoFLAGS(io) & IOf_ARGV) {
1932 if (IoFLAGS(io) & IOf_START) {
1934 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1935 IoFLAGS(io) &= ~IOf_START;
1936 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1937 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1938 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1939 SvSETMAGIC(GvSV(PL_last_in_gv));
1944 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1945 if (!fp) { /* Note: fp != IoIFP(io) */
1946 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1949 else if (type == OP_GLOB)
1950 fp = Perl_start_glob(aTHX_ POPs, io);
1952 else if (type == OP_GLOB)
1954 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1955 report_wrongway_fh(PL_last_in_gv, '>');
1959 if ((!io || !(IoFLAGS(io) & IOf_START))
1960 && ckWARN(WARN_CLOSED)
1963 report_evil_fh(PL_last_in_gv);
1965 if (gimme == G_SCALAR) {
1966 /* undef TARG, and push that undefined value */
1967 if (type != OP_RCATLINE) {
1968 sv_setsv(TARG,NULL);
1975 if (gimme == G_SCALAR) {
1977 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1980 if (type == OP_RCATLINE)
1981 SvPV_force_nomg_nolen(sv);
1985 else if (isGV_with_GP(sv)) {
1986 SvPV_force_nomg_nolen(sv);
1988 SvUPGRADE(sv, SVt_PV);
1989 tmplen = SvLEN(sv); /* remember if already alloced */
1990 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1991 /* try short-buffering it. Please update t/op/readline.t
1992 * if you change the growth length.
1997 if (type == OP_RCATLINE && SvOK(sv)) {
1999 SvPV_force_nomg_nolen(sv);
2005 sv = sv_2mortal(newSV(80));
2009 /* This should not be marked tainted if the fp is marked clean */
2010 #define MAYBE_TAINT_LINE(io, sv) \
2011 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2016 /* delay EOF state for a snarfed empty file */
2017 #define SNARF_EOF(gimme,rs,io,sv) \
2018 (gimme != G_SCALAR || SvCUR(sv) \
2019 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2023 if (!sv_gets(sv, fp, offset)
2025 || SNARF_EOF(gimme, PL_rs, io, sv)
2026 || PerlIO_error(fp)))
2028 PerlIO_clearerr(fp);
2029 if (IoFLAGS(io) & IOf_ARGV) {
2030 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2033 (void)do_close(PL_last_in_gv, FALSE);
2035 else if (type == OP_GLOB) {
2036 if (!do_close(PL_last_in_gv, FALSE)) {
2037 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2038 "glob failed (child exited with status %d%s)",
2039 (int)(STATUS_CURRENT >> 8),
2040 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2043 if (gimme == G_SCALAR) {
2044 if (type != OP_RCATLINE) {
2045 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2051 MAYBE_TAINT_LINE(io, sv);
2054 MAYBE_TAINT_LINE(io, sv);
2056 IoFLAGS(io) |= IOf_NOLINE;
2060 if (type == OP_GLOB) {
2064 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2065 char * const tmps = SvEND(sv) - 1;
2066 if (*tmps == *SvPVX_const(PL_rs)) {
2068 SvCUR_set(sv, SvCUR(sv) - 1);
2071 for (t1 = SvPVX_const(sv); *t1; t1++)
2073 if (strchr("*%?", *t1))
2075 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2078 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2079 (void)POPs; /* Unmatched wildcard? Chuck it... */
2082 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2083 if (ckWARN(WARN_UTF8)) {
2084 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2085 const STRLEN len = SvCUR(sv) - offset;
2088 if (!is_utf8_string_loc(s, len, &f))
2089 /* Emulate :encoding(utf8) warning in the same case. */
2090 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2091 "utf8 \"\\x%02X\" does not map to Unicode",
2092 f < (U8*)SvEND(sv) ? *f : 0);
2095 if (gimme == G_ARRAY) {
2096 if (SvLEN(sv) - SvCUR(sv) > 20) {
2097 SvPV_shrink_to_cur(sv);
2099 sv = sv_2mortal(newSV(80));
2102 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2103 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2104 const STRLEN new_len
2105 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2106 SvPV_renew(sv, new_len);
2117 SV * const keysv = POPs;
2118 HV * const hv = MUTABLE_HV(POPs);
2119 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2120 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2122 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2123 bool preeminent = TRUE;
2125 if (SvTYPE(hv) != SVt_PVHV)
2132 /* If we can determine whether the element exist,
2133 * Try to preserve the existenceness of a tied hash
2134 * element by using EXISTS and DELETE if possible.
2135 * Fallback to FETCH and STORE otherwise. */
2136 if (SvCANEXISTDELETE(hv))
2137 preeminent = hv_exists_ent(hv, keysv, 0);
2140 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2141 svp = he ? &HeVAL(he) : NULL;
2143 if (!svp || !*svp || *svp == &PL_sv_undef) {
2147 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2149 lv = sv_newmortal();
2150 sv_upgrade(lv, SVt_PVLV);
2152 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2153 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2154 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2160 if (HvNAME_get(hv) && isGV(*svp))
2161 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2162 else if (preeminent)
2163 save_helem_flags(hv, keysv, svp,
2164 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2166 SAVEHDELETE(hv, keysv);
2168 else if (PL_op->op_private & OPpDEREF) {
2169 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2173 sv = (svp && *svp ? *svp : &PL_sv_undef);
2174 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2175 * was to make C<local $tied{foo} = $tied{foo}> possible.
2176 * However, it seems no longer to be needed for that purpose, and
2177 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2178 * would loop endlessly since the pos magic is getting set on the
2179 * mortal copy and lost. However, the copy has the effect of
2180 * triggering the get magic, and losing it altogether made things like
2181 * c<$tied{foo};> in void context no longer do get magic, which some
2182 * code relied on. Also, delayed triggering of magic on @+ and friends
2183 * meant the original regex may be out of scope by now. So as a
2184 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2185 * being called too many times). */
2186 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2193 /* a stripped-down version of Perl_softref2xv() for use by
2194 * pp_multideref(), which doesn't use PL_op->op_flags */
2197 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2200 if (PL_op->op_private & HINT_STRICT_REFS) {
2202 Perl_die(aTHX_ PL_no_symref_sv, sv,
2203 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2205 Perl_die(aTHX_ PL_no_usym, what);
2208 Perl_die(aTHX_ PL_no_usym, what);
2209 return gv_fetchsv_nomg(sv, GV_ADD, type);
2213 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2214 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2216 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2217 * Each of these either contains a set of actions, or an argument, such as
2218 * an IV to use as an array index, or a lexical var to retrieve.
2219 * Several actions re stored per UV; we keep shifting new actions off the
2220 * one UV, and only reload when it becomes zero.
2225 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2226 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2227 UV actions = items->uv;
2230 /* this tells find_uninit_var() where we're up to */
2231 PL_multideref_pc = items;
2234 /* there are three main classes of action; the first retrieve
2235 * the initial AV or HV from a variable or the stack; the second
2236 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2237 * the third an unrolled (/DREFHV, rv2hv, helem).
2239 switch (actions & MDEREF_ACTION_MASK) {
2242 actions = (++items)->uv;
2245 case MDEREF_AV_padav_aelem: /* $lex[...] */
2246 sv = PAD_SVl((++items)->pad_offset);
2249 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2250 sv = UNOP_AUX_item_sv(++items);
2251 assert(isGV_with_GP(sv));
2252 sv = (SV*)GvAVn((GV*)sv);
2255 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2260 goto do_AV_rv2av_aelem;
2263 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2264 sv = UNOP_AUX_item_sv(++items);
2265 assert(isGV_with_GP(sv));
2266 sv = GvSVn((GV*)sv);
2267 goto do_AV_vivify_rv2av_aelem;
2269 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2270 sv = PAD_SVl((++items)->pad_offset);
2273 do_AV_vivify_rv2av_aelem:
2274 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2275 /* this is the OPpDEREF action normally found at the end of
2276 * ops like aelem, helem, rv2sv */
2277 sv = vivify_ref(sv, OPpDEREF_AV);
2281 /* this is basically a copy of pp_rv2av when it just has the
2284 if (LIKELY(SvROK(sv))) {
2285 if (UNLIKELY(SvAMAGIC(sv))) {
2286 sv = amagic_deref_call(sv, to_av_amg);
2289 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2290 DIE(aTHX_ "Not an ARRAY reference");
2292 else if (SvTYPE(sv) != SVt_PVAV) {
2293 if (!isGV_with_GP(sv))
2294 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2295 sv = MUTABLE_SV(GvAVn((GV*)sv));
2301 /* retrieve the key; this may be either a lexical or package
2302 * var (whose index/ptr is stored as an item) or a signed
2303 * integer constant stored as an item.
2306 IV elem = 0; /* to shut up stupid compiler warnings */
2309 assert(SvTYPE(sv) == SVt_PVAV);
2311 switch (actions & MDEREF_INDEX_MASK) {
2312 case MDEREF_INDEX_none:
2314 case MDEREF_INDEX_const:
2315 elem = (++items)->iv;
2317 case MDEREF_INDEX_padsv:
2318 elemsv = PAD_SVl((++items)->pad_offset);
2320 case MDEREF_INDEX_gvsv:
2321 elemsv = UNOP_AUX_item_sv(++items);
2322 assert(isGV_with_GP(elemsv));
2323 elemsv = GvSVn((GV*)elemsv);
2325 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2326 && ckWARN(WARN_MISC)))
2327 Perl_warner(aTHX_ packWARN(WARN_MISC),
2328 "Use of reference \"%"SVf"\" as array index",
2330 /* the only time that S_find_uninit_var() needs this
2331 * is to determine which index value triggered the
2332 * undef warning. So just update it here. Note that
2333 * since we don't save and restore this var (e.g. for
2334 * tie or overload execution), its value will be
2335 * meaningless apart from just here */
2336 PL_multideref_pc = items;
2337 elem = SvIV(elemsv);
2342 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2344 if (!(actions & MDEREF_FLAG_last)) {
2345 SV** svp = av_fetch((AV*)sv, elem, 1);
2346 if (!svp || ! (sv=*svp))
2347 DIE(aTHX_ PL_no_aelem, elem);
2351 if (PL_op->op_private &
2352 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2354 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2355 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2358 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2359 sv = av_delete((AV*)sv, elem, discard);
2367 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2368 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2369 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2370 bool preeminent = TRUE;
2371 AV *const av = (AV*)sv;
2374 if (UNLIKELY(localizing)) {
2378 /* If we can determine whether the element exist,
2379 * Try to preserve the existenceness of a tied array
2380 * element by using EXISTS and DELETE if possible.
2381 * Fallback to FETCH and STORE otherwise. */
2382 if (SvCANEXISTDELETE(av))
2383 preeminent = av_exists(av, elem);
2386 svp = av_fetch(av, elem, lval && !defer);
2389 if (!svp || !(sv = *svp)) {
2392 DIE(aTHX_ PL_no_aelem, elem);
2393 len = av_tindex(av);
2394 sv = sv_2mortal(newSVavdefelem(av,
2395 /* Resolve a negative index now, unless it points
2396 * before the beginning of the array, in which
2397 * case record it for error reporting in
2398 * magic_setdefelem. */
2399 elem < 0 && len + elem >= 0
2400 ? len + elem : elem, 1));
2403 if (UNLIKELY(localizing)) {
2405 save_aelem(av, elem, svp);
2406 sv = *svp; /* may have changed */
2409 SAVEADELETE(av, elem);
2414 sv = (svp ? *svp : &PL_sv_undef);
2415 /* see note in pp_helem() */
2416 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2433 case MDEREF_HV_padhv_helem: /* $lex{...} */
2434 sv = PAD_SVl((++items)->pad_offset);
2437 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2438 sv = UNOP_AUX_item_sv(++items);
2439 assert(isGV_with_GP(sv));
2440 sv = (SV*)GvHVn((GV*)sv);
2443 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2448 goto do_HV_rv2hv_helem;
2451 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2452 sv = UNOP_AUX_item_sv(++items);
2453 assert(isGV_with_GP(sv));
2454 sv = GvSVn((GV*)sv);
2455 goto do_HV_vivify_rv2hv_helem;
2457 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2458 sv = PAD_SVl((++items)->pad_offset);
2461 do_HV_vivify_rv2hv_helem:
2462 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2463 /* this is the OPpDEREF action normally found at the end of
2464 * ops like aelem, helem, rv2sv */
2465 sv = vivify_ref(sv, OPpDEREF_HV);
2469 /* this is basically a copy of pp_rv2hv when it just has the
2470 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2473 if (LIKELY(SvROK(sv))) {
2474 if (UNLIKELY(SvAMAGIC(sv))) {
2475 sv = amagic_deref_call(sv, to_hv_amg);
2478 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2479 DIE(aTHX_ "Not a HASH reference");
2481 else if (SvTYPE(sv) != SVt_PVHV) {
2482 if (!isGV_with_GP(sv))
2483 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2484 sv = MUTABLE_SV(GvHVn((GV*)sv));
2490 /* retrieve the key; this may be either a lexical / package
2491 * var or a string constant, whose index/ptr is stored as an
2494 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2496 assert(SvTYPE(sv) == SVt_PVHV);
2498 switch (actions & MDEREF_INDEX_MASK) {
2499 case MDEREF_INDEX_none:
2502 case MDEREF_INDEX_const:
2503 keysv = UNOP_AUX_item_sv(++items);
2506 case MDEREF_INDEX_padsv:
2507 keysv = PAD_SVl((++items)->pad_offset);
2510 case MDEREF_INDEX_gvsv:
2511 keysv = UNOP_AUX_item_sv(++items);
2512 keysv = GvSVn((GV*)keysv);
2516 /* see comment above about setting this var */
2517 PL_multideref_pc = items;
2520 /* ensure that candidate CONSTs have been HEKified */
2521 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2522 || SvTYPE(keysv) >= SVt_PVMG
2525 || SvIsCOW_shared_hash(keysv));
2527 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2529 if (!(actions & MDEREF_FLAG_last)) {
2530 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2531 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2532 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2536 if (PL_op->op_private &
2537 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2539 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2540 sv = hv_exists_ent((HV*)sv, keysv, 0)
2541 ? &PL_sv_yes : &PL_sv_no;
2544 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2545 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2553 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2554 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2555 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2556 bool preeminent = TRUE;
2558 HV * const hv = (HV*)sv;
2561 if (UNLIKELY(localizing)) {
2565 /* If we can determine whether the element exist,
2566 * Try to preserve the existenceness of a tied hash
2567 * element by using EXISTS and DELETE if possible.
2568 * Fallback to FETCH and STORE otherwise. */
2569 if (SvCANEXISTDELETE(hv))
2570 preeminent = hv_exists_ent(hv, keysv, 0);
2573 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2574 svp = he ? &HeVAL(he) : NULL;
2578 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2582 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2583 lv = sv_newmortal();
2584 sv_upgrade(lv, SVt_PVLV);
2586 sv_magic(lv, key2 = newSVsv(keysv),
2587 PERL_MAGIC_defelem, NULL, 0);
2588 /* sv_magic() increments refcount */
2589 SvREFCNT_dec_NN(key2);
2590 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2596 if (HvNAME_get(hv) && isGV(sv))
2597 save_gp(MUTABLE_GV(sv),
2598 !(PL_op->op_flags & OPf_SPECIAL));
2599 else if (preeminent) {
2600 save_helem_flags(hv, keysv, svp,
2601 (PL_op->op_flags & OPf_SPECIAL)
2602 ? 0 : SAVEf_SETMAGIC);
2603 sv = *svp; /* may have changed */
2606 SAVEHDELETE(hv, keysv);
2611 sv = (svp && *svp ? *svp : &PL_sv_undef);
2612 /* see note in pp_helem() */
2613 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2622 actions >>= MDEREF_SHIFT;
2641 itersvp = CxITERVAR(cx);
2644 switch (CxTYPE(cx)) {
2646 case CXt_LOOP_LAZYSV: /* string increment */
2648 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2649 SV *end = cx->blk_loop.state_u.lazysv.end;
2650 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2651 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2653 const char *max = SvPV_const(end, maxlen);
2654 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2658 /* NB: on the first iteration, oldsv will have a ref count of at
2659 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2660 * slot will get localised; on subsequent iterations the RC==1
2661 * optimisation may kick in and the SV will be reused. */
2662 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2663 /* safe to reuse old SV */
2664 sv_setsv(oldsv, cur);
2668 /* we need a fresh SV every time so that loop body sees a
2669 * completely new SV for closures/references to work as
2671 *itersvp = newSVsv(cur);
2672 SvREFCNT_dec(oldsv);
2674 if (strEQ(SvPVX_const(cur), max))
2675 sv_setiv(cur, 0); /* terminate next time */
2681 case CXt_LOOP_LAZYIV: /* integer increment */
2683 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2684 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2688 /* see NB comment above */
2689 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2690 /* safe to reuse old SV */
2692 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2695 /* Cheap SvIOK_only().
2696 * Assert that flags which SvIOK_only() would test or
2697 * clear can't be set, because we're SVt_IV */
2698 assert(!(SvFLAGS(oldsv) &
2699 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2700 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2701 /* SvIV_set() where sv_any points to head */
2702 oldsv->sv_u.svu_iv = cur;
2706 sv_setiv(oldsv, cur);
2710 /* we need a fresh SV every time so that loop body sees a
2711 * completely new SV for closures/references to work as they
2713 *itersvp = newSViv(cur);
2714 SvREFCNT_dec(oldsv);
2717 if (UNLIKELY(cur == IV_MAX)) {
2718 /* Handle end of range at IV_MAX */
2719 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2721 ++cx->blk_loop.state_u.lazyiv.cur;
2725 case CXt_LOOP_LIST: /* for (1,2,3) */
2727 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2728 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2729 ix = (cx->blk_loop.state_u.stack.ix += inc);
2730 if (UNLIKELY(inc > 0
2731 ? ix > cx->blk_oldsp
2732 : ix <= cx->blk_loop.state_u.stack.basesp)
2736 sv = PL_stack_base[ix];
2738 goto loop_ary_common;
2740 case CXt_LOOP_ARY: /* for (@ary) */
2742 av = cx->blk_loop.state_u.ary.ary;
2743 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2744 ix = (cx->blk_loop.state_u.ary.ix += inc);
2745 if (UNLIKELY(inc > 0
2751 if (UNLIKELY(SvRMAGICAL(av))) {
2752 SV * const * const svp = av_fetch(av, ix, FALSE);
2753 sv = svp ? *svp : NULL;
2756 sv = AvARRAY(av)[ix];
2761 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2762 SvSetMagicSV(*itersvp, sv);
2767 if (UNLIKELY(SvIS_FREED(sv))) {
2769 Perl_croak(aTHX_ "Use of freed value in iteration");
2776 SvREFCNT_inc_simple_void_NN(sv);
2780 sv = newSVavdefelem(av, ix, 0);
2787 SvREFCNT_dec(oldsv);
2791 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2799 /* pp_enteriter should have pre-extended the stack */
2800 assert(PL_stack_sp < PL_stack_max);
2801 *++PL_stack_sp =retsv;
2803 return PL_op->op_next;
2807 A description of how taint works in pattern matching and substitution.
2809 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2810 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2812 While the pattern is being assembled/concatenated and then compiled,
2813 PL_tainted will get set (via TAINT_set) if any component of the pattern
2814 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2815 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2816 TAINT_get). It will also be set if any component of the pattern matches
2817 based on locale-dependent behavior.
2819 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2820 the pattern is marked as tainted. This means that subsequent usage, such
2821 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2822 on the new pattern too.
2824 RXf_TAINTED_SEEN is used post-execution by the get magic code
2825 of $1 et al to indicate whether the returned value should be tainted.
2826 It is the responsibility of the caller of the pattern (i.e. pp_match,
2827 pp_subst etc) to set this flag for any other circumstances where $1 needs
2830 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2832 There are three possible sources of taint
2834 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2835 * the replacement string (or expression under /e)
2837 There are four destinations of taint and they are affected by the sources
2838 according to the rules below:
2840 * the return value (not including /r):
2841 tainted by the source string and pattern, but only for the
2842 number-of-iterations case; boolean returns aren't tainted;
2843 * the modified string (or modified copy under /r):
2844 tainted by the source string, pattern, and replacement strings;
2846 tainted by the pattern, and under 'use re "taint"', by the source
2848 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2849 should always be unset before executing subsequent code.
2851 The overall action of pp_subst is:
2853 * at the start, set bits in rxtainted indicating the taint status of
2854 the various sources.
2856 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2857 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2858 pattern has subsequently become tainted via locale ops.
2860 * If control is being passed to pp_substcont to execute a /e block,
2861 save rxtainted in the CXt_SUBST block, for future use by
2864 * Whenever control is being returned to perl code (either by falling
2865 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2866 use the flag bits in rxtainted to make all the appropriate types of
2867 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2868 et al will appear tainted.
2870 pp_match is just a simpler version of the above.
2886 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2887 See "how taint works" above */
2890 REGEXP *rx = PM_GETRE(pm);
2892 int force_on_match = 0;
2893 const I32 oldsave = PL_savestack_ix;
2895 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2900 /* known replacement string? */
2901 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2905 if (PL_op->op_flags & OPf_STACKED)
2914 SvGETMAGIC(TARG); /* must come before cow check */
2916 /* note that a string might get converted to COW during matching */
2917 was_cow = cBOOL(SvIsCOW(TARG));
2919 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
2920 #ifndef PERL_ANY_COW
2922 sv_force_normal_flags(TARG,0);
2924 if ((SvREADONLY(TARG)
2925 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2926 || SvTYPE(TARG) > SVt_PVLV)
2927 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2928 Perl_croak_no_modify();
2932 orig = SvPV_nomg(TARG, len);
2933 /* note we don't (yet) force the var into being a string; if we fail
2934 * to match, we leave as-is; on successful match however, we *will*
2935 * coerce into a string, then repeat the match */
2936 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2939 /* only replace once? */
2940 once = !(rpm->op_pmflags & PMf_GLOBAL);
2942 /* See "how taint works" above */
2945 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2946 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2947 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2948 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2949 ? SUBST_TAINT_BOOLRET : 0));
2955 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2957 strend = orig + len;
2958 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2959 maxiters = 2 * slen + 10; /* We can match twice at each
2960 position, once with zero-length,
2961 second time with non-zero. */
2963 if (!RX_PRELEN(rx) && PL_curpm
2964 && !ReANY(rx)->mother_re) {
2969 #ifdef PERL_SAWAMPERSAND
2970 r_flags = ( RX_NPARENS(rx)
2972 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2973 || (rpm->op_pmflags & PMf_KEEPCOPY)
2978 r_flags = REXEC_COPY_STR;
2981 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2984 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2985 LEAVE_SCOPE(oldsave);
2990 /* known replacement string? */
2992 /* replacement needing upgrading? */
2993 if (DO_UTF8(TARG) && !doutf8) {
2994 nsv = sv_newmortal();
2996 sv_utf8_upgrade(nsv);
2997 c = SvPV_const(nsv, clen);
3001 c = SvPV_const(dstr, clen);
3002 doutf8 = DO_UTF8(dstr);
3005 if (SvTAINTED(dstr))
3006 rxtainted |= SUBST_TAINT_REPL;
3013 /* can do inplace substitution? */
3018 && (I32)clen <= RX_MINLENRET(rx)
3020 || !(r_flags & REXEC_COPY_STR)
3021 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
3023 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
3024 && (!doutf8 || SvUTF8(TARG))
3025 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3029 /* string might have got converted to COW since we set was_cow */
3030 if (SvIsCOW(TARG)) {
3031 if (!force_on_match)
3033 assert(SvVOK(TARG));
3036 if (force_on_match) {
3037 /* redo the first match, this time with the orig var
3038 * forced into being a string */
3040 orig = SvPV_force_nomg(TARG, len);
3046 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3047 rxtainted |= SUBST_TAINT_PAT;
3048 m = orig + RX_OFFS(rx)[0].start;
3049 d = orig + RX_OFFS(rx)[0].end;
3051 if (m - s > strend - d) { /* faster to shorten from end */
3054 Copy(c, m, clen, char);
3059 Move(d, m, i, char);
3063 SvCUR_set(TARG, m - s);
3065 else { /* faster from front */
3069 Move(s, d - i, i, char);
3072 Copy(c, d, clen, char);
3079 d = s = RX_OFFS(rx)[0].start + orig;
3082 if (UNLIKELY(iters++ > maxiters))
3083 DIE(aTHX_ "Substitution loop");
3084 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3085 rxtainted |= SUBST_TAINT_PAT;
3086 m = RX_OFFS(rx)[0].start + orig;
3089 Move(s, d, i, char);
3093 Copy(c, d, clen, char);
3096 s = RX_OFFS(rx)[0].end + orig;
3097 } while (CALLREGEXEC(rx, s, strend, orig,
3098 s == m, /* don't match same null twice */
3100 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3103 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3104 Move(s, d, i+1, char); /* include the NUL */
3114 if (force_on_match) {
3115 /* redo the first match, this time with the orig var
3116 * forced into being a string */
3118 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3119 /* I feel that it should be possible to avoid this mortal copy
3120 given that the code below copies into a new destination.
3121 However, I suspect it isn't worth the complexity of
3122 unravelling the C<goto force_it> for the small number of
3123 cases where it would be viable to drop into the copy code. */
3124 TARG = sv_2mortal(newSVsv(TARG));
3126 orig = SvPV_force_nomg(TARG, len);
3132 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3133 rxtainted |= SUBST_TAINT_PAT;
3135 s = RX_OFFS(rx)[0].start + orig;
3136 dstr = newSVpvn_flags(orig, s-orig,
3137 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3142 /* note that a whole bunch of local vars are saved here for
3143 * use by pp_substcont: here's a list of them in case you're
3144 * searching for places in this sub that uses a particular var:
3145 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3146 * s m strend rx once */
3148 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3152 if (UNLIKELY(iters++ > maxiters))
3153 DIE(aTHX_ "Substitution loop");
3154 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3155 rxtainted |= SUBST_TAINT_PAT;
3156 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3158 char *old_orig = orig;
3159 assert(RX_SUBOFFSET(rx) == 0);
3161 orig = RX_SUBBEG(rx);
3162 s = orig + (old_s - old_orig);
3163 strend = s + (strend - old_s);
3165 m = RX_OFFS(rx)[0].start + orig;
3166 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3167 s = RX_OFFS(rx)[0].end + orig;
3169 /* replacement already stringified */
3171 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3175 sv_catsv(dstr, repl);
3176 if (UNLIKELY(SvTAINTED(repl)))
3177 rxtainted |= SUBST_TAINT_REPL;
3181 } while (CALLREGEXEC(rx, s, strend, orig,
3182 s == m, /* Yields minend of 0 or 1 */
3184 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3185 assert(strend >= s);
3186 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3188 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3189 /* From here on down we're using the copy, and leaving the original
3196 /* The match may make the string COW. If so, brilliant, because
3197 that's just saved us one malloc, copy and free - the regexp has
3198 donated the old buffer, and we malloc an entirely new one, rather
3199 than the regexp malloc()ing a buffer and copying our original,
3200 only for us to throw it away here during the substitution. */
3201 if (SvIsCOW(TARG)) {
3202 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3208 SvPV_set(TARG, SvPVX(dstr));
3209 SvCUR_set(TARG, SvCUR(dstr));
3210 SvLEN_set(TARG, SvLEN(dstr));
3211 SvFLAGS(TARG) |= SvUTF8(dstr);
3212 SvPV_set(dstr, NULL);
3219 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3220 (void)SvPOK_only_UTF8(TARG);
3223 /* See "how taint works" above */
3225 if ((rxtainted & SUBST_TAINT_PAT) ||
3226 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3227 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3229 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3231 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3232 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3234 SvTAINTED_on(TOPs); /* taint return value */
3236 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3238 /* needed for mg_set below */
3240 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3244 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3246 LEAVE_SCOPE(oldsave);
3255 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3256 ++*PL_markstack_ptr;
3258 LEAVE_with_name("grep_item"); /* exit inner scope */
3261 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3263 const U8 gimme = GIMME_V;
3265 LEAVE_with_name("grep"); /* exit outer scope */
3266 (void)POPMARK; /* pop src */
3267 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3268 (void)POPMARK; /* pop dst */
3269 SP = PL_stack_base + POPMARK; /* pop original mark */
3270 if (gimme == G_SCALAR) {
3274 else if (gimme == G_ARRAY)
3281 ENTER_with_name("grep_item"); /* enter inner scope */
3284 src = PL_stack_base[TOPMARK];
3285 if (SvPADTMP(src)) {
3286 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3292 RETURNOP(cLOGOP->op_other);
3296 /* leave_adjust_stacks():
3298 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3299 * positioning them at to_sp+1 onwards, and do the equivalent of a
3300 * FREEMPS and TAINT_NOT.
3302 * Not intended to be called in void context.
3304 * When leaving a sub, eval, do{} or other scope, the things that need
3305 * doing to process the return args are:
3306 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3307 * * for the types of return that return copies of their args (such
3308 * as rvalue sub return), make a mortal copy of every return arg,
3309 * except where we can optimise the copy away without it being
3310 * semantically visible;
3311 * * make sure that the arg isn't prematurely freed; in the case of an
3312 * arg not copied, this may involve mortalising it. For example, in
3313 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3314 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3316 * What condition to use when deciding whether to pass the arg through
3317 * or make a copy, is determined by the 'pass' arg; its valid values are:
3318 * 0: rvalue sub/eval exit
3319 * 1: other rvalue scope exit
3320 * 2: :lvalue sub exit in rvalue context
3321 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3323 * There is a big issue with doing a FREETMPS. We would like to free any
3324 * temps created by the last statement which the sub executed, rather than
3325 * leaving them for the caller. In a situation where a sub call isn't
3326 * soon followed by a nextstate (e.g. nested recursive calls, a la
3327 * fibonacci()), temps can accumulate, causing memory and performance
3330 * On the other hand, we don't want to free any TEMPs which are keeping
3331 * alive any return args that we skipped copying; nor do we wish to undo
3332 * any mortalising done here.
3334 * The solution is to split the temps stack frame into two, with a cut
3335 * point delineating the two halves. We arrange that by the end of this
3336 * function, all the temps stack frame entries we wish to keep are in the
3337 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3338 * the range tmps_base .. PL_tmps_ix. During the course of this
3339 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3340 * whenever we find or create a temp that we know should be kept. In
3341 * general the stuff above tmps_base is undecided until we reach the end,
3342 * and we may need a sort stage for that.
3344 * To determine whether a TEMP is keeping a return arg alive, every
3345 * arg that is kept rather than copied and which has the SvTEMP flag
3346 * set, has the flag temporarily unset, to mark it. At the end we scan
3347 * the temps stack frame above the cut for entries without SvTEMP and
3348 * keep them, while turning SvTEMP on again. Note that if we die before
3349 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3350 * those SVs may be slightly less efficient.
3352 * In practice various optimisations for some common cases mean we can
3353 * avoid most of the scanning and swapping about with the temps stack.
3357 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3361 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3364 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3368 if (gimme == G_ARRAY) {
3369 nargs = SP - from_sp;
3373 assert(gimme == G_SCALAR);
3374 if (UNLIKELY(from_sp >= SP)) {
3375 /* no return args */
3376 assert(from_sp == SP);
3378 *++SP = &PL_sv_undef;
3388 /* common code for G_SCALAR and G_ARRAY */
3390 tmps_base = PL_tmps_floor + 1;
3394 /* pointer version of tmps_base. Not safe across temp stack
3398 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3399 tmps_basep = PL_tmps_stack + tmps_base;
3401 /* process each return arg */
3404 SV *sv = *from_sp++;
3406 assert(PL_tmps_ix + nargs < PL_tmps_max);
3408 /* PADTMPs with container set magic shouldn't appear in the
3409 * wild. This assert is more important for pp_leavesublv(),
3410 * but by testing for it here, we're more likely to catch
3411 * bad cases (what with :lvalue subs not being widely
3412 * deployed). The two issues are that for something like
3413 * sub :lvalue { $tied{foo} }
3415 * sub :lvalue { substr($foo,1,2) }
3416 * pp_leavesublv() will croak if the sub returns a PADTMP,
3417 * and currently functions like pp_substr() return a mortal
3418 * rather than using their PADTMP when returning a PVLV.
3419 * This is because the PVLV will hold a ref to $foo,
3420 * so $foo would get delayed in being freed while
3421 * the PADTMP SV remained in the PAD.
3422 * So if this assert fails it means either:
3423 * 1) there is pp code similar to pp_substr that is
3424 * returning a PADTMP instead of a mortal, and probably
3426 * 2) pp_leavesublv is making unwarranted assumptions
3427 * about always croaking on a PADTMP
3429 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3431 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3432 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3438 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3439 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3440 : pass == 2 ? (!SvPADTMP(sv))
3443 /* pass through: skip copy for logic or optimisation
3444 * reasons; instead mortalise it, except that ... */
3448 /* ... since this SV is an SvTEMP , we don't need to
3449 * re-mortalise it; instead we just need to ensure
3450 * that its existing entry in the temps stack frame
3451 * ends up below the cut and so avoids being freed
3452 * this time round. We mark it as needing to be kept
3453 * by temporarily unsetting SvTEMP; then at the end,
3454 * we shuffle any !SvTEMP entries on the tmps stack
3455 * back below the cut.
3456 * However, there's a significant chance that there's
3457 * a 1:1 correspondence between the first few (or all)
3458 * elements in the return args stack frame and those
3459 * in the temps stack frame; e,g.:
3460 * sub f { ....; map {...} .... },
3461 * or if we're exiting multiple scopes and one of the
3462 * inner scopes has already made mortal copies of each
3465 * If so, this arg sv will correspond to the next item
3466 * on the tmps stack above the cut, and so can be kept
3467 * merely by moving the cut boundary up one, rather
3468 * than messing with SvTEMP. If all args are 1:1 then
3469 * we can avoid the sorting stage below completely.
3471 * If there are no items above the cut on the tmps
3472 * stack, then the SvTEMP must comne from an item
3473 * below the cut, so there's nothing to do.
3475 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3476 if (sv == *tmps_basep)
3482 else if (!SvPADTMP(sv)) {
3483 /* mortalise arg to avoid it being freed during save
3484 * stack unwinding. Pad tmps don't need mortalising as
3485 * they're never freed. This is the equivalent of
3486 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3487 * * it assumes that the temps stack has already been
3489 * * it puts the new item at the cut rather than at
3490 * ++PL_tmps_ix, moving the previous occupant there
3493 if (!SvIMMORTAL(sv)) {
3494 SvREFCNT_inc_simple_void_NN(sv);
3496 /* Note that if there's nothing above the cut,
3497 * this copies the garbage one slot above
3498 * PL_tmps_ix onto itself. This is harmless (the
3499 * stack's already been extended), but might in
3500 * theory trigger warnings from tools like ASan
3502 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3508 /* Make a mortal copy of the SV.
3509 * The following code is the equivalent of sv_mortalcopy()
3511 * * it assumes the temps stack has already been extended;
3512 * * it optimises the copying for some simple SV types;
3513 * * it puts the new item at the cut rather than at
3514 * ++PL_tmps_ix, moving the previous occupant there
3517 SV *newsv = newSV(0);
3519 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3520 /* put it on the tmps stack early so it gets freed if we die */
3521 *tmps_basep++ = newsv;
3524 if (SvTYPE(sv) <= SVt_IV) {
3525 /* arg must be one of undef, IV/UV, or RV: skip
3526 * sv_setsv_flags() and do the copy directly */
3528 U32 srcflags = SvFLAGS(sv);
3530 assert(!SvGMAGICAL(sv));
3531 if (srcflags & (SVf_IOK|SVf_ROK)) {
3532 SET_SVANY_FOR_BODYLESS_IV(newsv);
3534 if (srcflags & SVf_ROK) {
3535 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3536 /* SV type plus flags */
3537 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3540 /* both src and dst are <= SVt_IV, so sv_any
3541 * points to the head; so access the heads
3542 * directly rather than going via sv_any.
3544 assert( &(sv->sv_u.svu_iv)
3545 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3546 assert( &(newsv->sv_u.svu_iv)
3547 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3548 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3549 /* SV type plus flags */
3550 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3551 |(srcflags & SVf_IVisUV));
3555 assert(!(srcflags & SVf_OK));
3556 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3558 SvFLAGS(newsv) = dstflags;
3562 /* do the full sv_setsv() */
3566 old_base = tmps_basep - PL_tmps_stack;
3568 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3569 /* the mg_get or sv_setsv might have created new temps
3570 * or realloced the tmps stack; regrow and reload */
3571 EXTEND_MORTAL(nargs);
3572 tmps_basep = PL_tmps_stack + old_base;
3573 TAINT_NOT; /* Each item is independent */
3579 /* If there are any temps left above the cut, we need to sort
3580 * them into those to keep and those to free. The only ones to
3581 * keep are those for which we've temporarily unset SvTEMP.
3582 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3583 * swapping pairs as necessary. Stop when we meet in the middle.
3586 SV **top = PL_tmps_stack + PL_tmps_ix;
3587 while (tmps_basep <= top) {
3600 tmps_base = tmps_basep - PL_tmps_stack;
3603 PL_stack_sp = to_sp;
3605 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3606 while (PL_tmps_ix >= tmps_base) {
3607 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3609 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3613 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3619 /* also tail-called by pp_return */
3629 assert(CxTYPE(cx) == CXt_SUB);
3631 if (CxMULTICALL(cx)) {
3632 /* entry zero of a stack is always PL_sv_undef, which
3633 * simplifies converting a '()' return into undef in scalar context */
3634 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3638 gimme = cx->blk_gimme;
3639 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3641 if (gimme == G_VOID)
3642 PL_stack_sp = oldsp;
3644 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3647 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3649 retop = cx->blk_sub.retop;
3656 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3657 * forces an abandon */
3660 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3662 const SSize_t fill = AvFILLp(av);
3664 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3666 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3671 AV *newav = newAV();
3672 av_extend(newav, fill);
3673 AvREIFY_only(newav);
3674 PAD_SVl(0) = MUTABLE_SV(newav);
3675 SvREFCNT_dec_NN(av);
3686 I32 old_savestack_ix;
3691 /* Locate the CV to call:
3692 * - most common case: RV->CV: f(), $ref->():
3693 * note that if a sub is compiled before its caller is compiled,
3694 * the stash entry will be a ref to a CV, rather than being a GV.
3695 * - second most common case: CV: $ref->method()
3698 /* a non-magic-RV -> CV ? */
3699 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3700 cv = MUTABLE_CV(SvRV(sv));
3701 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3705 cv = MUTABLE_CV(sv);
3708 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3709 /* handle all the weird cases */
3710 switch (SvTYPE(sv)) {
3712 if (!isGV_with_GP(sv))
3716 cv = GvCVu((const GV *)sv);
3717 if (UNLIKELY(!cv)) {
3719 cv = sv_2cv(sv, &stash, &gv, 0);
3721 old_savestack_ix = PL_savestack_ix;
3732 if (UNLIKELY(SvAMAGIC(sv))) {
3733 sv = amagic_deref_call(sv, to_cv_amg);
3734 /* Don't SPAGAIN here. */
3740 if (UNLIKELY(!SvOK(sv)))
3741 DIE(aTHX_ PL_no_usym, "a subroutine");
3743 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3744 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3745 SP = PL_stack_base + POPMARK;
3748 if (GIMME_V == G_SCALAR)
3749 PUSHs(&PL_sv_undef);
3753 sym = SvPV_nomg_const(sv, len);
3754 if (PL_op->op_private & HINT_STRICT_REFS)
3755 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3756 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3759 cv = MUTABLE_CV(SvRV(sv));
3760 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3766 DIE(aTHX_ "Not a CODE reference");
3770 /* At this point we want to save PL_savestack_ix, either by doing a
3771 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
3772 * CV we will be using (so we don't know whether its XS, so we can't
3773 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
3774 * the save stack. So remember where we are currently on the save
3775 * stack, and later update the CX or scopestack entry accordingly. */
3776 old_savestack_ix = PL_savestack_ix;
3778 /* these two fields are in a union. If they ever become separate,
3779 * we have to test for both of them being null below */
3781 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3782 while (UNLIKELY(!CvROOT(cv))) {
3786 /* anonymous or undef'd function leaves us no recourse */
3787 if (CvLEXICAL(cv) && CvHASGV(cv))
3788 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3789 SVfARG(cv_name(cv, NULL, 0)));
3790 if (CvANON(cv) || !CvHASGV(cv)) {
3791 DIE(aTHX_ "Undefined subroutine called");
3794 /* autoloaded stub? */
3795 if (cv != GvCV(gv = CvGV(cv))) {
3798 /* should call AUTOLOAD now? */
3801 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3802 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
3803 |(PL_op->op_flags & OPf_REF
3804 ? GV_AUTOLOAD_ISMETHOD
3806 cv = autogv ? GvCV(autogv) : NULL;
3809 sub_name = sv_newmortal();
3810 gv_efullname3(sub_name, gv, NULL);
3811 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3815 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3816 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3817 DIE(aTHX_ "Closure prototype called");
3819 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3822 Perl_get_db_sub(aTHX_ &sv, cv);
3824 PL_curcopdb = PL_curcop;
3826 /* check for lsub that handles lvalue subroutines */
3827 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3828 /* if lsub not found then fall back to DB::sub */
3829 if (!cv) cv = GvCV(PL_DBsub);
3831 cv = GvCV(PL_DBsub);
3834 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3835 DIE(aTHX_ "No DB::sub routine defined");
3838 if (!(CvISXSUB(cv))) {
3839 /* This path taken at least 75% of the time */
3846 /* keep PADTMP args alive throughout the call (we need to do this
3847 * because @_ isn't refcounted). Note that we create the mortals
3848 * in the caller's tmps frame, so they won't be freed until after
3849 * we return from the sub.
3858 *svp = sv = sv_mortalcopy(sv);
3864 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
3865 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3866 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
3868 padlist = CvPADLIST(cv);
3869 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
3870 pad_push(padlist, depth);
3871 PAD_SET_CUR_NOSAVE(padlist, depth);
3872 if (LIKELY(hasargs)) {
3873 AV *const av = MUTABLE_AV(PAD_SVl(0));
3877 defavp = &GvAV(PL_defgv);
3878 cx->blk_sub.savearray = *defavp;
3879 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3881 /* it's the responsibility of whoever leaves a sub to ensure
3882 * that a clean, empty AV is left in pad[0]. This is normally
3883 * done by cx_popsub() */
3884 assert(!AvREAL(av) && AvFILLp(av) == -1);
3887 if (UNLIKELY(items - 1 > AvMAX(av))) {
3888 SV **ary = AvALLOC(av);
3889 AvMAX(av) = items - 1;
3890 Renew(ary, items, SV*);
3895 Copy(MARK+1,AvARRAY(av),items,SV*);
3896 AvFILLp(av) = items - 1;
3898 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3900 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3901 SVfARG(cv_name(cv, NULL, 0)));
3902 /* warning must come *after* we fully set up the context
3903 * stuff so that __WARN__ handlers can safely dounwind()
3906 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3907 && ckWARN(WARN_RECURSION)
3908 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3909 sub_crush_depth(cv);
3910 RETURNOP(CvSTART(cv));
3913 SSize_t markix = TOPMARK;
3917 /* pretend we did the ENTER earlier */
3918 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3923 if (UNLIKELY(((PL_op->op_private
3924 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3925 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3927 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3928 SVfARG(cv_name(cv, NULL, 0)));
3930 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3931 /* Need to copy @_ to stack. Alternative may be to
3932 * switch stack to @_, and copy return values
3933 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3934 AV * const av = GvAV(PL_defgv);
3935 const SSize_t items = AvFILL(av) + 1;
3939 const bool m = cBOOL(SvRMAGICAL(av));
3940 /* Mark is at the end of the stack. */
3942 for (; i < items; ++i)
3946 SV ** const svp = av_fetch(av, i, 0);
3947 sv = svp ? *svp : NULL;
3949 else sv = AvARRAY(av)[i];
3950 if (sv) SP[i+1] = sv;
3952 SP[i+1] = newSVavdefelem(av, i, 1);
3960 SV **mark = PL_stack_base + markix;
3961 SSize_t items = SP - mark;
3964 if (*mark && SvPADTMP(*mark)) {
3965 *mark = sv_mortalcopy(*mark);
3969 /* We assume first XSUB in &DB::sub is the called one. */
3970 if (UNLIKELY(PL_curcopdb)) {
3971 SAVEVPTR(PL_curcop);
3972 PL_curcop = PL_curcopdb;
3975 /* Do we need to open block here? XXXX */
3977 /* calculate gimme here as PL_op might get changed and then not
3978 * restored until the LEAVE further down */
3979 is_scalar = (GIMME_V == G_SCALAR);
3981 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3983 CvXSUB(cv)(aTHX_ cv);
3985 /* Enforce some sanity in scalar context. */
3987 SV **svp = PL_stack_base + markix + 1;
3988 if (svp != PL_stack_sp) {
3989 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
3999 Perl_sub_crush_depth(pTHX_ CV *cv)
4001 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4004 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4006 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
4007 SVfARG(cv_name(cv,NULL,0)));
4013 /* like croak, but report in context of caller */
4016 Perl_croak_caller(const char *pat, ...)
4020 const PERL_CONTEXT *cx = caller_cx(0, NULL);
4022 /* make error appear at call site */
4024 PL_curcop = cx->blk_oldcop;
4026 va_start(args, pat);
4028 NOT_REACHED; /* NOTREACHED */
4037 SV* const elemsv = POPs;
4038 IV elem = SvIV(elemsv);
4039 AV *const av = MUTABLE_AV(POPs);
4040 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4041 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4042 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4043 bool preeminent = TRUE;
4046 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4047 Perl_warner(aTHX_ packWARN(WARN_MISC),
4048 "Use of reference \"%"SVf"\" as array index",
4050 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4053 if (UNLIKELY(localizing)) {
4057 /* If we can determine whether the element exist,
4058 * Try to preserve the existenceness of a tied array
4059 * element by using EXISTS and DELETE if possible.
4060 * Fallback to FETCH and STORE otherwise. */
4061 if (SvCANEXISTDELETE(av))
4062 preeminent = av_exists(av, elem);
4065 svp = av_fetch(av, elem, lval && !defer);
4067 #ifdef PERL_MALLOC_WRAP
4068 if (SvUOK(elemsv)) {
4069 const UV uv = SvUV(elemsv);
4070 elem = uv > IV_MAX ? IV_MAX : uv;
4072 else if (SvNOK(elemsv))
4073 elem = (IV)SvNV(elemsv);
4075 static const char oom_array_extend[] =
4076 "Out of memory during array extend"; /* Duplicated in av.c */
4077 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4080 if (!svp || !*svp) {
4083 DIE(aTHX_ PL_no_aelem, elem);
4084 len = av_tindex(av);
4085 mPUSHs(newSVavdefelem(av,
4086 /* Resolve a negative index now, unless it points before the
4087 beginning of the array, in which case record it for error
4088 reporting in magic_setdefelem. */
4089 elem < 0 && len + elem >= 0 ? len + elem : elem,
4093 if (UNLIKELY(localizing)) {
4095 save_aelem(av, elem, svp);
4097 SAVEADELETE(av, elem);
4099 else if (PL_op->op_private & OPpDEREF) {
4100 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4104 sv = (svp ? *svp : &PL_sv_undef);
4105 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4112 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4114 PERL_ARGS_ASSERT_VIVIFY_REF;
4119 Perl_croak_no_modify();
4120 prepare_SV_for_RV(sv);
4123 SvRV_set(sv, newSV(0));
4126 SvRV_set(sv, MUTABLE_SV(newAV()));
4129 SvRV_set(sv, MUTABLE_SV(newHV()));
4136 if (SvGMAGICAL(sv)) {
4137 /* copy the sv without magic to prevent magic from being
4139 SV* msv = sv_newmortal();
4140 sv_setsv_nomg(msv, sv);
4146 PERL_STATIC_INLINE HV *
4147 S_opmethod_stash(pTHX_ SV* meth)
4152 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4153 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
4154 "package or object reference", SVfARG(meth)),
4156 : *(PL_stack_base + TOPMARK + 1);
4158 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4162 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
4165 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4166 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4167 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4168 if (stash) return stash;
4172 ob = MUTABLE_SV(SvRV(sv));
4173 else if (!SvOK(sv)) goto undefined;
4174 else if (isGV_with_GP(sv)) {
4176 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4177 "without a package or object reference",
4180 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4181 assert(!LvTARGLEN(ob));
4185 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4188 /* this isn't a reference */
4191 const char * const packname = SvPV_nomg_const(sv, packlen);
4192 const U32 packname_utf8 = SvUTF8(sv);
4193 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4194 if (stash) return stash;
4196 if (!(iogv = gv_fetchpvn_flags(
4197 packname, packlen, packname_utf8, SVt_PVIO
4199 !(ob=MUTABLE_SV(GvIO(iogv))))
4201 /* this isn't the name of a filehandle either */
4204 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4205 "without a package or object reference",
4208 /* assume it's a package name */
4209 stash = gv_stashpvn(packname, packlen, packname_utf8);
4210 if (stash) return stash;
4211 else return MUTABLE_HV(sv);
4213 /* it _is_ a filehandle name -- replace with a reference */
4214 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4217 /* if we got here, ob should be an object or a glob */
4218 if (!ob || !(SvOBJECT(ob)
4219 || (isGV_with_GP(ob)
4220 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4223 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
4224 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
4225 ? newSVpvs_flags("DOES", SVs_TEMP)
4237 SV* const meth = TOPs;
4240 SV* const rmeth = SvRV(meth);
4241 if (SvTYPE(rmeth) == SVt_PVCV) {
4247 stash = opmethod_stash(meth);
4249 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4252 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4256 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4257 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4259 gv = MUTABLE_GV(HeVAL(he)); \
4260 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4261 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4263 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4272 SV* const meth = cMETHOPx_meth(PL_op);
4273 HV* const stash = opmethod_stash(meth);
4275 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4276 METHOD_CHECK_CACHE(stash, stash, meth);
4279 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4282 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4291 SV* const meth = cMETHOPx_meth(PL_op);
4292 HV* const stash = CopSTASH(PL_curcop);
4293 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4294 * as it uses CopSTASH. However, we must ensure that object(class) is
4295 * correct (this check is done by S_opmethod_stash) */
4296 opmethod_stash(meth);
4298 if ((cache = HvMROMETA(stash)->super)) {
4299 METHOD_CHECK_CACHE(stash, cache, meth);
4302 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4305 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4313 SV* const meth = cMETHOPx_meth(PL_op);
4314 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4315 opmethod_stash(meth); /* not used but needed for error checks */
4317 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4318 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4320 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4323 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4327 PP(pp_method_redir_super)
4332 SV* const meth = cMETHOPx_meth(PL_op);
4333 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4334 opmethod_stash(meth); /* not used but needed for error checks */
4336 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4337 else if ((cache = HvMROMETA(stash)->super)) {
4338 METHOD_CHECK_CACHE(stash, cache, meth);
4341 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4344 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4349 * ex: set ts=8 sts=4 sw=4 et: