3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
295 SvPV_force_nomg_nolen(left);
297 lbyte = !DO_UTF8(left);
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
306 if (lbyte != rbyte) {
308 sv_utf8_upgrade_nomg(TARG);
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
316 sv_catpvn_nomg(TARG, rpv, rlen);
323 /* push the elements of av onto the stack.
324 * XXX Note that padav has similar code but without the mg_get().
325 * I suspect that the mg_get is no longer needed, but while padav
326 * differs, it can't share this function */
329 S_pushav(pTHX_ AV* const av)
332 const SSize_t maxarg = AvFILL(av) + 1;
334 if (UNLIKELY(SvRMAGICAL(av))) {
336 for (i=0; i < (PADOFFSET)maxarg; i++) {
337 SV ** const svp = av_fetch(av, i, FALSE);
338 /* See note in pp_helem, and bug id #27839 */
340 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
346 for (i=0; i < (PADOFFSET)maxarg; i++) {
347 SV * const sv = AvARRAY(av)[i];
348 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
356 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
361 PADOFFSET base = PL_op->op_targ;
362 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
364 if (PL_op->op_flags & OPf_SPECIAL) {
365 /* fake the RHS of my ($x,$y,..) = @_ */
367 S_pushav(aTHX_ GvAVn(PL_defgv));
371 /* note, this is only skipped for compile-time-known void cxt */
372 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
375 for (i = 0; i <count; i++)
376 *++SP = PAD_SV(base+i);
378 if (PL_op->op_private & OPpLVAL_INTRO) {
379 SV **svp = &(PAD_SVl(base));
380 const UV payload = (UV)(
381 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
382 | (count << SAVE_TIGHT_SHIFT)
383 | SAVEt_CLEARPADRANGE);
384 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
385 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
392 for (i = 0; i <count; i++)
393 SvPADSTALE_off(*svp++); /* mark lexical as active */
404 OP * const op = PL_op;
405 /* access PL_curpad once */
406 SV ** const padentry = &(PAD_SVl(op->op_targ));
411 PUTBACK; /* no pop/push after this, TOPs ok */
413 if (op->op_flags & OPf_MOD) {
414 if (op->op_private & OPpLVAL_INTRO)
415 if (!(op->op_private & OPpPAD_STATE))
416 save_clearsv(padentry);
417 if (op->op_private & OPpDEREF) {
418 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
419 than TARG reduces the scope of TARG, so it does not
420 span the call to save_clearsv, resulting in smaller
422 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
434 tryAMAGICunTARGETlist(iter_amg, 0);
435 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
437 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
438 if (!isGV_with_GP(PL_last_in_gv)) {
439 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
440 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
443 XPUSHs(MUTABLE_SV(PL_last_in_gv));
446 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
447 if (PL_last_in_gv == (GV *)&PL_sv_undef)
448 PL_last_in_gv = NULL;
450 assert(isGV_with_GP(PL_last_in_gv));
453 return do_readline();
461 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
465 (SvIOK_notUV(left) && SvIOK_notUV(right))
466 ? (SvIVX(left) == SvIVX(right))
467 : ( do_ncmp(left, right) == 0)
473 /* also used for: pp_i_preinc() */
477 SV *sv = *PL_stack_sp;
479 if (LIKELY(((sv->sv_flags &
480 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
481 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
483 && SvIVX(sv) != IV_MAX)
485 SvIV_set(sv, SvIVX(sv) + 1);
487 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
494 /* also used for: pp_i_predec() */
498 SV *sv = *PL_stack_sp;
500 if (LIKELY(((sv->sv_flags &
501 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
502 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
504 && SvIVX(sv) != IV_MIN)
506 SvIV_set(sv, SvIVX(sv) - 1);
508 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
515 /* also used for: pp_orassign() */
524 if (PL_op->op_type == OP_OR)
526 RETURNOP(cLOGOP->op_other);
531 /* also used for: pp_dor() pp_dorassign() */
538 const int op_type = PL_op->op_type;
539 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
544 if (UNLIKELY(!sv || !SvANY(sv))) {
545 if (op_type == OP_DOR)
547 RETURNOP(cLOGOP->op_other);
553 if (UNLIKELY(!sv || !SvANY(sv)))
558 switch (SvTYPE(sv)) {
560 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
564 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
568 if (CvROOT(sv) || CvXSUB(sv))
581 if(op_type == OP_DOR)
583 RETURNOP(cLOGOP->op_other);
585 /* assuming OP_DEFINED */
595 dSP; dATARGET; bool useleft; SV *svl, *svr;
597 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
601 #ifdef PERL_PRESERVE_IVUV
603 /* special-case some simple common cases */
604 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
606 U32 flags = (svl->sv_flags & svr->sv_flags);
607 if (flags & SVf_IOK) {
608 /* both args are simple IVs */
613 topl = ((UV)il) >> (UVSIZE * 8 - 2);
614 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
616 /* if both are in a range that can't under/overflow, do a
617 * simple integer add: if the top of both numbers
618 * are 00 or 11, then it's safe */
619 if (!( ((topl+1) | (topr+1)) & 2)) {
621 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
627 else if (flags & SVf_NOK) {
628 /* both args are NVs */
633 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
634 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
635 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
637 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
640 /* nothing was lost by converting to IVs */
643 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
651 useleft = USE_LEFT(svl);
652 /* We must see if we can perform the addition with integers if possible,
653 as the integer code detects overflow while the NV code doesn't.
654 If either argument hasn't had a numeric conversion yet attempt to get
655 the IV. It's important to do this now, rather than just assuming that
656 it's not IOK as a PV of "9223372036854775806" may not take well to NV
657 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
658 integer in case the second argument is IV=9223372036854775806
659 We can (now) rely on sv_2iv to do the right thing, only setting the
660 public IOK flag if the value in the NV (or PV) slot is truly integer.
662 A side effect is that this also aggressively prefers integer maths over
663 fp maths for integer values.
665 How to detect overflow?
667 C 99 section 6.2.6.1 says
669 The range of nonnegative values of a signed integer type is a subrange
670 of the corresponding unsigned integer type, and the representation of
671 the same value in each type is the same. A computation involving
672 unsigned operands can never overflow, because a result that cannot be
673 represented by the resulting unsigned integer type is reduced modulo
674 the number that is one greater than the largest value that can be
675 represented by the resulting type.
679 which I read as "unsigned ints wrap."
681 signed integer overflow seems to be classed as "exception condition"
683 If an exceptional condition occurs during the evaluation of an
684 expression (that is, if the result is not mathematically defined or not
685 in the range of representable values for its type), the behavior is
688 (6.5, the 5th paragraph)
690 I had assumed that on 2s complement machines signed arithmetic would
691 wrap, hence coded pp_add and pp_subtract on the assumption that
692 everything perl builds on would be happy. After much wailing and
693 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
694 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
695 unsigned code below is actually shorter than the old code. :-)
698 if (SvIV_please_nomg(svr)) {
699 /* Unless the left argument is integer in range we are going to have to
700 use NV maths. Hence only attempt to coerce the right argument if
701 we know the left is integer. */
709 /* left operand is undef, treat as zero. + 0 is identity,
710 Could SETi or SETu right now, but space optimise by not adding
711 lots of code to speed up what is probably a rarish case. */
713 /* Left operand is defined, so is it IV? */
714 if (SvIV_please_nomg(svl)) {
715 if ((auvok = SvUOK(svl)))
718 const IV aiv = SvIVX(svl);
721 auvok = 1; /* Now acting as a sign flag. */
723 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
730 bool result_good = 0;
733 bool buvok = SvUOK(svr);
738 const IV biv = SvIVX(svr);
743 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
745 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
746 else "IV" now, independent of how it came in.
747 if a, b represents positive, A, B negative, a maps to -A etc
752 all UV maths. negate result if A negative.
753 add if signs same, subtract if signs differ. */
759 /* Must get smaller */
765 /* result really should be -(auv-buv). as its negation
766 of true value, need to swap our result flag */
783 if (result <= (UV)IV_MIN)
784 SETi(result == (UV)IV_MIN
785 ? IV_MIN : -(IV)result);
787 /* result valid, but out of range for IV. */
792 } /* Overflow, drop through to NVs. */
797 useleft = USE_LEFT(svl);
801 NV value = SvNV_nomg(svr);
804 /* left operand is undef, treat as zero. + 0.0 is identity. */
808 SETn( value + SvNV_nomg(svl) );
814 /* also used for: pp_aelemfast_lex() */
819 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
820 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
821 const U32 lval = PL_op->op_flags & OPf_MOD;
822 SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
823 SV *sv = (svp ? *svp : &PL_sv_undef);
825 if (UNLIKELY(!svp && lval))
826 DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
829 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
839 do_join(TARG, *MARK, MARK, SP);
850 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
851 * will be enough to hold an OP*.
853 SV* const sv = sv_newmortal();
854 sv_upgrade(sv, SVt_PVLV);
856 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
859 XPUSHs(MUTABLE_SV(PL_op));
864 /* Oversized hot code. */
866 /* also used for: pp_say() */
870 dSP; dMARK; dORIGMARK;
874 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
878 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
881 if (MARK == ORIGMARK) {
882 /* If using default handle then we need to make space to
883 * pass object as 1st arg, so move other args up ...
887 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
890 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
892 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
893 | (PL_op->op_type == OP_SAY
894 ? TIED_METHOD_SAY : 0)), sp - mark);
897 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
898 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
901 SETERRNO(EBADF,RMS_IFI);
904 else if (!(fp = IoOFP(io))) {
906 report_wrongway_fh(gv, '<');
909 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
913 SV * const ofs = GvSV(PL_ofsgv); /* $, */
915 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
917 if (!do_print(*MARK, fp))
921 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
922 if (!do_print(GvSV(PL_ofsgv), fp)) {
931 if (!do_print(*MARK, fp))
939 if (PL_op->op_type == OP_SAY) {
940 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
943 else if (PL_ors_sv && SvOK(PL_ors_sv))
944 if (!do_print(PL_ors_sv, fp)) /* $\ */
947 if (IoFLAGS(io) & IOf_FLUSH)
948 if (PerlIO_flush(fp) == EOF)
958 XPUSHs(&PL_sv_undef);
963 /* also used for: pp_rv2hv() */
964 /* also called directly by pp_lvavref */
969 const U8 gimme = GIMME_V;
970 static const char an_array[] = "an ARRAY";
971 static const char a_hash[] = "a HASH";
972 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
973 || PL_op->op_type == OP_LVAVREF;
974 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
978 if (UNLIKELY(SvAMAGIC(sv))) {
979 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
982 if (UNLIKELY(SvTYPE(sv) != type))
983 /* diag_listed_as: Not an ARRAY reference */
984 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
985 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
986 && PL_op->op_private & OPpLVAL_INTRO))
987 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
989 else if (UNLIKELY(SvTYPE(sv) != type)) {
992 if (!isGV_with_GP(sv)) {
993 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1001 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1002 if (PL_op->op_private & OPpLVAL_INTRO)
1003 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1005 if (PL_op->op_flags & OPf_REF) {
1009 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1010 const I32 flags = is_lvalue_sub();
1011 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1012 if (gimme != G_ARRAY)
1013 goto croak_cant_return;
1020 AV *const av = MUTABLE_AV(sv);
1021 /* The guts of pp_rv2av */
1022 if (gimme == G_ARRAY) {
1028 else if (gimme == G_SCALAR) {
1030 const SSize_t maxarg = AvFILL(av) + 1;
1034 /* The guts of pp_rv2hv */
1035 if (gimme == G_ARRAY) { /* array wanted */
1037 return Perl_do_kv(aTHX);
1039 else if ((PL_op->op_private & OPpTRUEBOOL
1040 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1041 && block_gimme() == G_VOID ))
1042 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1043 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1044 else if (gimme == G_SCALAR) {
1046 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1053 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1054 is_pp_rv2av ? "array" : "hash");
1059 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1061 PERL_ARGS_ASSERT_DO_ODDBALL;
1064 if (ckWARN(WARN_MISC)) {
1066 if (oddkey == firstkey &&
1068 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1069 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1071 err = "Reference found where even-sized list expected";
1074 err = "Odd number of elements in hash assignment";
1075 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1082 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1083 * are common to both the LHS and RHS of an aassign, and replace them
1084 * with copies. All these copies are made before the actual list assign is
1087 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1088 * element ($b) to the first LH element ($a), modifies $a; when the
1089 * second assignment is done, the second RH element now has the wrong
1090 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1091 * Note that we don't need to make a mortal copy of $b.
1093 * The algorithm below works by, for every RHS element, mark the
1094 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1095 * element is found with SVf_BREAK set, it means it would have been
1096 * modified, so make a copy.
1097 * Note that by scanning both LHS and RHS in lockstep, we avoid
1098 * unnecessary copies (like $b above) compared with a naive
1099 * "mark all LHS; copy all marked RHS; unmark all LHS".
1101 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1102 * it can't be common and can be skipped.
1104 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1105 * that we thought we didn't need to call S_aassign_copy_common(), but we
1106 * have anyway for sanity checking. If we find we need to copy, then panic.
1109 PERL_STATIC_INLINE void
1110 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1111 SV **firstrelem, SV **lastrelem
1120 SSize_t lcount = lastlelem - firstlelem + 1;
1121 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1122 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1123 bool copy_all = FALSE;
1125 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1126 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1127 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1131 /* we never have to copy the first RH element; it can't be corrupted
1132 * by assigning something to the corresponding first LH element.
1133 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1135 relem = firstrelem + 1;
1137 for (; relem <= lastrelem; relem++) {
1140 /* mark next LH element */
1142 if (--lcount >= 0) {
1145 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1146 assert (lelem <= lastlelem);
1152 if (SvSMAGICAL(svl)) {
1155 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1158 /* this LH element will consume all further args;
1159 * no need to mark any further LH elements (if any).
1160 * But we still need to scan any remaining RHS elements;
1161 * set lcount negative to distinguish from lcount == 0,
1162 * so the loop condition continues being true
1165 lelem--; /* no need to unmark this element */
1167 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1168 assert(!SvIMMORTAL(svl));
1169 SvFLAGS(svl) |= SVf_BREAK;
1173 /* don't check RH element if no SVf_BREAK flags set yet */
1180 /* see if corresponding RH element needs copying */
1186 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1190 /* op_dump(PL_op); */
1192 "panic: aassign skipped needed copy of common RH elem %"
1193 UVuf, (UV)(relem - firstrelem));
1197 TAINT_NOT; /* Each item is independent */
1199 /* Dear TODO test in t/op/sort.t, I love you.
1200 (It's relying on a panic, not a "semi-panic" from newSVsv()
1201 and then an assertion failure below.) */
1202 if (UNLIKELY(SvIS_FREED(svr))) {
1203 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1206 /* avoid break flag while copying; otherwise COW etc
1208 SvFLAGS(svr) &= ~SVf_BREAK;
1209 /* Not newSVsv(), as it does not allow copy-on-write,
1210 resulting in wasteful copies.
1211 Also, we use SV_NOSTEAL in case the SV is used more than
1212 once, e.g. (...) = (f())[0,0]
1213 Where the same SV appears twice on the RHS without a ref
1214 count bump. (Although I suspect that the SV won't be
1215 stealable here anyway - DAPM).
1217 *relem = sv_mortalcopy_flags(svr,
1218 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1219 /* ... but restore afterwards in case it's needed again,
1220 * e.g. ($a,$b,$c) = (1,$a,$a)
1222 SvFLAGS(svr) |= SVf_BREAK;
1234 while (lelem > firstlelem) {
1235 SV * const svl = *(--lelem);
1237 SvFLAGS(svl) &= ~SVf_BREAK;
1246 SV **lastlelem = PL_stack_sp;
1247 SV **lastrelem = PL_stack_base + POPMARK;
1248 SV **firstrelem = PL_stack_base + POPMARK + 1;
1249 SV **firstlelem = lastrelem + 1;
1262 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1263 * only need to save locally, not on the save stack */
1264 U16 old_delaymagic = PL_delaymagic;
1269 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1271 /* If there's a common identifier on both sides we have to take
1272 * special care that assigning the identifier on the left doesn't
1273 * clobber a value on the right that's used later in the list.
1276 /* at least 2 LH and RH elements, or commonality isn't an issue */
1277 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1278 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1279 if (SvGMAGICAL(*relem))
1282 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1283 if (*lelem && SvSMAGICAL(*lelem))
1286 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1287 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1288 /* skip the scan if all scalars have a ref count of 1 */
1289 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1291 if (!sv || SvREFCNT(sv) == 1)
1293 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1300 S_aassign_copy_common(aTHX_
1301 firstlelem, lastlelem, firstrelem, lastrelem
1311 /* on debugging builds, do the scan even if we've concluded we
1312 * don't need to, then panic if we find commonality. Note that the
1313 * scanner assumes at least 2 elements */
1314 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1322 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1329 while (LIKELY(lelem <= lastlelem)) {
1331 TAINT_NOT; /* Each item stands on its own, taintwise. */
1333 if (UNLIKELY(!sv)) {
1336 ASSUME(SvTYPE(sv) == SVt_PVAV);
1338 switch (SvTYPE(sv)) {
1340 bool already_copied = FALSE;
1341 ary = MUTABLE_AV(sv);
1342 magic = SvMAGICAL(ary) != 0;
1344 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1346 /* We need to clear ary. The is a danger that if we do this,
1347 * elements on the RHS may be prematurely freed, e.g.
1349 * In the case of possible commonality, make a copy of each
1350 * RHS SV *before* clearing the array, and add a reference
1351 * from the tmps stack, so that it doesn't leak on death.
1352 * Otherwise, make a copy of each RHS SV only as we're storing
1353 * it into the array - that way we don't have to worry about
1354 * it being leaked if we die, but don't incur the cost of
1355 * mortalising everything.
1358 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1359 && (relem <= lastrelem)
1360 && (magic || AvFILL(ary) != -1))
1363 EXTEND_MORTAL(lastrelem - relem + 1);
1364 for (svp = relem; svp <= lastrelem; svp++) {
1365 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1366 *svp = sv_mortalcopy_flags(*svp,
1367 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1370 already_copied = TRUE;
1374 if (relem <= lastrelem)
1375 av_extend(ary, lastrelem - relem);
1378 while (relem <= lastrelem) { /* gobble up all the rest */
1380 if (LIKELY(!alias)) {
1385 /* before newSV, in case it dies */
1388 /* see comment in S_aassign_copy_common about
1390 sv_setsv_flags(sv, *relem,
1391 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1396 if (!already_copied)
1399 DIE(aTHX_ "Assigned value is not a reference");
1400 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1401 /* diag_listed_as: Assigned value is not %s reference */
1403 "Assigned value is not a SCALAR reference");
1404 if (lval && !already_copied)
1405 *relem = sv_mortalcopy(*relem);
1406 /* XXX else check for weak refs? */
1407 sv = SvREFCNT_inc_NN(SvRV(*relem));
1411 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1412 didstore = av_store(ary,i++,sv);
1421 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1422 SvSETMAGIC(MUTABLE_SV(ary));
1427 case SVt_PVHV: { /* normal hash */
1431 SV** topelem = relem;
1432 SV **firsthashrelem = relem;
1433 bool already_copied = FALSE;
1435 hash = MUTABLE_HV(sv);
1436 magic = SvMAGICAL(hash) != 0;
1438 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1439 if (UNLIKELY(odd)) {
1440 do_oddball(lastrelem, firsthashrelem);
1441 /* we have firstlelem to reuse, it's not needed anymore
1443 *(lastrelem+1) = &PL_sv_undef;
1447 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1449 /* We need to clear hash. The is a danger that if we do this,
1450 * elements on the RHS may be prematurely freed, e.g.
1451 * %h = (foo => $h{bar});
1452 * In the case of possible commonality, make a copy of each
1453 * RHS SV *before* clearing the hash, and add a reference
1454 * from the tmps stack, so that it doesn't leak on death.
1457 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1458 && (relem <= lastrelem)
1459 && (magic || HvUSEDKEYS(hash)))
1462 EXTEND_MORTAL(lastrelem - relem + 1);
1463 for (svp = relem; svp <= lastrelem; svp++) {
1464 *svp = sv_mortalcopy_flags(*svp,
1465 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1468 already_copied = TRUE;
1473 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1476 /* Copy the key if aassign is called in lvalue context,
1477 to avoid having the next op modify our rhs. Copy
1478 it also if it is gmagical, lest it make the
1479 hv_store_ent call below croak, leaking the value. */
1480 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1481 ? sv_mortalcopy(*relem)
1490 sv_setsv_nomg(tmpstr,*relem++); /* value */
1493 if (gimme == G_ARRAY) {
1494 if (hv_exists_ent(hash, sv, 0))
1495 /* key overwrites an existing entry */
1498 /* copy element back: possibly to an earlier
1499 * stack location if we encountered dups earlier,
1500 * possibly to a later stack location if odd */
1502 *topelem++ = tmpstr;
1506 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1507 didstore = hv_store_ent(hash,sv,tmpstr,0);
1509 if (!didstore) sv_2mortal(tmpstr);
1515 if (duplicates && gimme == G_ARRAY) {
1516 /* at this point we have removed the duplicate key/value
1517 * pairs from the stack, but the remaining values may be
1518 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1519 * the (a 2), but the stack now probably contains
1520 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1521 * obliterates the earlier key. So refresh all values. */
1522 lastrelem -= duplicates;
1523 relem = firsthashrelem;
1524 while (relem < lastrelem+odd) {
1526 he = hv_fetch_ent(hash, *relem++, 0, 0);
1527 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1530 if (odd && gimme == G_ARRAY) lastrelem++;
1534 if (SvIMMORTAL(sv)) {
1535 if (relem <= lastrelem)
1539 if (relem <= lastrelem) {
1541 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1542 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1545 packWARN(WARN_MISC),
1546 "Useless assignment to a temporary"
1548 sv_setsv(sv, *relem);
1552 sv_setsv(sv, &PL_sv_undef);
1557 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1558 /* Will be used to set PL_tainting below */
1559 Uid_t tmp_uid = PerlProc_getuid();
1560 Uid_t tmp_euid = PerlProc_geteuid();
1561 Gid_t tmp_gid = PerlProc_getgid();
1562 Gid_t tmp_egid = PerlProc_getegid();
1564 /* XXX $> et al currently silently ignore failures */
1565 if (PL_delaymagic & DM_UID) {
1566 #ifdef HAS_SETRESUID
1568 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1569 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1572 # ifdef HAS_SETREUID
1574 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1575 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1578 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1579 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1580 PL_delaymagic &= ~DM_RUID;
1582 # endif /* HAS_SETRUID */
1584 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1585 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1586 PL_delaymagic &= ~DM_EUID;
1588 # endif /* HAS_SETEUID */
1589 if (PL_delaymagic & DM_UID) {
1590 if (PL_delaymagic_uid != PL_delaymagic_euid)
1591 DIE(aTHX_ "No setreuid available");
1592 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1594 # endif /* HAS_SETREUID */
1595 #endif /* HAS_SETRESUID */
1597 tmp_uid = PerlProc_getuid();
1598 tmp_euid = PerlProc_geteuid();
1600 /* XXX $> et al currently silently ignore failures */
1601 if (PL_delaymagic & DM_GID) {
1602 #ifdef HAS_SETRESGID
1604 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1605 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1608 # ifdef HAS_SETREGID
1610 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1611 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1614 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1615 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1616 PL_delaymagic &= ~DM_RGID;
1618 # endif /* HAS_SETRGID */
1620 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1621 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1622 PL_delaymagic &= ~DM_EGID;
1624 # endif /* HAS_SETEGID */
1625 if (PL_delaymagic & DM_GID) {
1626 if (PL_delaymagic_gid != PL_delaymagic_egid)
1627 DIE(aTHX_ "No setregid available");
1628 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1630 # endif /* HAS_SETREGID */
1631 #endif /* HAS_SETRESGID */
1633 tmp_gid = PerlProc_getgid();
1634 tmp_egid = PerlProc_getegid();
1636 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1637 #ifdef NO_TAINT_SUPPORT
1638 PERL_UNUSED_VAR(tmp_uid);
1639 PERL_UNUSED_VAR(tmp_euid);
1640 PERL_UNUSED_VAR(tmp_gid);
1641 PERL_UNUSED_VAR(tmp_egid);
1644 PL_delaymagic = old_delaymagic;
1646 if (gimme == G_VOID)
1647 SP = firstrelem - 1;
1648 else if (gimme == G_SCALAR) {
1651 SETi(lastrelem - firstrelem + 1);
1655 /* note that in this case *firstlelem may have been overwritten
1656 by sv_undef in the odd hash case */
1659 SP = firstrelem + (lastlelem - firstlelem);
1660 lelem = firstlelem + (relem - firstrelem);
1662 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1672 PMOP * const pm = cPMOP;
1673 REGEXP * rx = PM_GETRE(pm);
1674 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1675 SV * const rv = sv_newmortal();
1679 SvUPGRADE(rv, SVt_IV);
1680 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1681 loathe to use it here, but it seems to be the right fix. Or close.
1682 The key part appears to be that it's essential for pp_qr to return a new
1683 object (SV), which implies that there needs to be an effective way to
1684 generate a new SV from the existing SV that is pre-compiled in the
1686 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1689 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1690 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1691 *cvp = cv_clone(cv);
1692 SvREFCNT_dec_NN(cv);
1696 HV *const stash = gv_stashsv(pkg, GV_ADD);
1697 SvREFCNT_dec_NN(pkg);
1698 (void)sv_bless(rv, stash);
1701 if (UNLIKELY(RX_ISTAINTED(rx))) {
1703 SvTAINTED_on(SvRV(rv));
1716 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1719 const char *truebase; /* Start of string */
1720 REGEXP *rx = PM_GETRE(pm);
1722 const U8 gimme = GIMME_V;
1724 const I32 oldsave = PL_savestack_ix;
1725 I32 had_zerolen = 0;
1728 if (PL_op->op_flags & OPf_STACKED)
1737 PUTBACK; /* EVAL blocks need stack_sp. */
1738 /* Skip get-magic if this is a qr// clone, because regcomp has
1740 truebase = ReANY(rx)->mother_re
1741 ? SvPV_nomg_const(TARG, len)
1742 : SvPV_const(TARG, len);
1744 DIE(aTHX_ "panic: pp_match");
1745 strend = truebase + len;
1746 rxtainted = (RX_ISTAINTED(rx) ||
1747 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1750 /* We need to know this in case we fail out early - pos() must be reset */
1751 global = dynpm->op_pmflags & PMf_GLOBAL;
1753 /* PMdf_USED is set after a ?? matches once */
1756 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1758 pm->op_pmflags & PMf_USED
1761 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1765 /* empty pattern special-cased to use last successful pattern if
1766 possible, except for qr// */
1767 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1773 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1774 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1775 UVuf" < %"IVdf")\n",
1776 (UV)len, (IV)RX_MINLEN(rx)));
1780 /* get pos() if //g */
1782 mg = mg_find_mglob(TARG);
1783 if (mg && mg->mg_len >= 0) {
1784 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1785 /* last time pos() was set, it was zero-length match */
1786 if (mg->mg_flags & MGf_MINMATCH)
1791 #ifdef PERL_SAWAMPERSAND
1794 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1795 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1799 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1800 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1801 * only on the first iteration. Therefore we need to copy $' as well
1802 * as $&, to make the rest of the string available for captures in
1803 * subsequent iterations */
1804 if (! (global && gimme == G_ARRAY))
1805 r_flags |= REXEC_COPY_SKIP_POST;
1807 #ifdef PERL_SAWAMPERSAND
1808 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1809 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1810 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1817 s = truebase + curpos;
1819 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1820 had_zerolen, TARG, NULL, r_flags))
1824 if (dynpm->op_pmflags & PMf_ONCE)
1826 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1828 dynpm->op_pmflags |= PMf_USED;
1832 RX_MATCH_TAINTED_on(rx);
1833 TAINT_IF(RX_MATCH_TAINTED(rx));
1837 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1839 mg = sv_magicext_mglob(TARG);
1840 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1841 if (RX_ZERO_LEN(rx))
1842 mg->mg_flags |= MGf_MINMATCH;
1844 mg->mg_flags &= ~MGf_MINMATCH;
1847 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1848 LEAVE_SCOPE(oldsave);
1852 /* push captures on stack */
1855 const I32 nparens = RX_NPARENS(rx);
1856 I32 i = (global && !nparens) ? 1 : 0;
1858 SPAGAIN; /* EVAL blocks could move the stack. */
1859 EXTEND(SP, nparens + i);
1860 EXTEND_MORTAL(nparens + i);
1861 for (i = !i; i <= nparens; i++) {
1862 PUSHs(sv_newmortal());
1863 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1864 && RX_OFFS(rx)[i].end != -1 ))
1866 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1867 const char * const s = RX_OFFS(rx)[i].start + truebase;
1868 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1869 || len < 0 || len > strend - s))
1870 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1871 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1872 (long) i, (long) RX_OFFS(rx)[i].start,
1873 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1874 sv_setpvn(*SP, s, len);
1875 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1880 curpos = (UV)RX_OFFS(rx)[0].end;
1881 had_zerolen = RX_ZERO_LEN(rx);
1882 PUTBACK; /* EVAL blocks may use stack */
1883 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1886 LEAVE_SCOPE(oldsave);
1889 NOT_REACHED; /* NOTREACHED */
1892 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1894 mg = mg_find_mglob(TARG);
1898 LEAVE_SCOPE(oldsave);
1899 if (gimme == G_ARRAY)
1905 Perl_do_readline(pTHX)
1907 dSP; dTARGETSTACKED;
1912 IO * const io = GvIO(PL_last_in_gv);
1913 const I32 type = PL_op->op_type;
1914 const U8 gimme = GIMME_V;
1917 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1919 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1920 if (gimme == G_SCALAR) {
1922 SvSetSV_nosteal(TARG, TOPs);
1932 if (IoFLAGS(io) & IOf_ARGV) {
1933 if (IoFLAGS(io) & IOf_START) {
1935 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1936 IoFLAGS(io) &= ~IOf_START;
1937 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1938 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1939 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1940 SvSETMAGIC(GvSV(PL_last_in_gv));
1945 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1946 if (!fp) { /* Note: fp != IoIFP(io) */
1947 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1950 else if (type == OP_GLOB)
1951 fp = Perl_start_glob(aTHX_ POPs, io);
1953 else if (type == OP_GLOB)
1955 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1956 report_wrongway_fh(PL_last_in_gv, '>');
1960 if ((!io || !(IoFLAGS(io) & IOf_START))
1961 && ckWARN(WARN_CLOSED)
1964 report_evil_fh(PL_last_in_gv);
1966 if (gimme == G_SCALAR) {
1967 /* undef TARG, and push that undefined value */
1968 if (type != OP_RCATLINE) {
1969 sv_setsv(TARG,NULL);
1976 if (gimme == G_SCALAR) {
1978 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1981 if (type == OP_RCATLINE)
1982 SvPV_force_nomg_nolen(sv);
1986 else if (isGV_with_GP(sv)) {
1987 SvPV_force_nomg_nolen(sv);
1989 SvUPGRADE(sv, SVt_PV);
1990 tmplen = SvLEN(sv); /* remember if already alloced */
1991 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1992 /* try short-buffering it. Please update t/op/readline.t
1993 * if you change the growth length.
1998 if (type == OP_RCATLINE && SvOK(sv)) {
2000 SvPV_force_nomg_nolen(sv);
2006 sv = sv_2mortal(newSV(80));
2010 /* This should not be marked tainted if the fp is marked clean */
2011 #define MAYBE_TAINT_LINE(io, sv) \
2012 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2017 /* delay EOF state for a snarfed empty file */
2018 #define SNARF_EOF(gimme,rs,io,sv) \
2019 (gimme != G_SCALAR || SvCUR(sv) \
2020 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2024 if (!sv_gets(sv, fp, offset)
2026 || SNARF_EOF(gimme, PL_rs, io, sv)
2027 || PerlIO_error(fp)))
2029 PerlIO_clearerr(fp);
2030 if (IoFLAGS(io) & IOf_ARGV) {
2031 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2034 (void)do_close(PL_last_in_gv, FALSE);
2036 else if (type == OP_GLOB) {
2037 if (!do_close(PL_last_in_gv, FALSE)) {
2038 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2039 "glob failed (child exited with status %d%s)",
2040 (int)(STATUS_CURRENT >> 8),
2041 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2044 if (gimme == G_SCALAR) {
2045 if (type != OP_RCATLINE) {
2046 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2052 MAYBE_TAINT_LINE(io, sv);
2055 MAYBE_TAINT_LINE(io, sv);
2057 IoFLAGS(io) |= IOf_NOLINE;
2061 if (type == OP_GLOB) {
2065 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2066 char * const tmps = SvEND(sv) - 1;
2067 if (*tmps == *SvPVX_const(PL_rs)) {
2069 SvCUR_set(sv, SvCUR(sv) - 1);
2072 for (t1 = SvPVX_const(sv); *t1; t1++)
2074 if (strchr("*%?", *t1))
2076 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2079 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2080 (void)POPs; /* Unmatched wildcard? Chuck it... */
2083 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2084 if (ckWARN(WARN_UTF8)) {
2085 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2086 const STRLEN len = SvCUR(sv) - offset;
2089 if (!is_utf8_string_loc(s, len, &f))
2090 /* Emulate :encoding(utf8) warning in the same case. */
2091 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2092 "utf8 \"\\x%02X\" does not map to Unicode",
2093 f < (U8*)SvEND(sv) ? *f : 0);
2096 if (gimme == G_ARRAY) {
2097 if (SvLEN(sv) - SvCUR(sv) > 20) {
2098 SvPV_shrink_to_cur(sv);
2100 sv = sv_2mortal(newSV(80));
2103 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2104 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2105 const STRLEN new_len
2106 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2107 SvPV_renew(sv, new_len);
2118 SV * const keysv = POPs;
2119 HV * const hv = MUTABLE_HV(POPs);
2120 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2121 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2123 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2124 bool preeminent = TRUE;
2126 if (SvTYPE(hv) != SVt_PVHV)
2133 /* If we can determine whether the element exist,
2134 * Try to preserve the existenceness of a tied hash
2135 * element by using EXISTS and DELETE if possible.
2136 * Fallback to FETCH and STORE otherwise. */
2137 if (SvCANEXISTDELETE(hv))
2138 preeminent = hv_exists_ent(hv, keysv, 0);
2141 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2142 svp = he ? &HeVAL(he) : NULL;
2144 if (!svp || !*svp || *svp == &PL_sv_undef) {
2148 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2150 lv = sv_newmortal();
2151 sv_upgrade(lv, SVt_PVLV);
2153 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2154 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2155 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2161 if (HvNAME_get(hv) && isGV(*svp))
2162 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2163 else if (preeminent)
2164 save_helem_flags(hv, keysv, svp,
2165 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2167 SAVEHDELETE(hv, keysv);
2169 else if (PL_op->op_private & OPpDEREF) {
2170 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2174 sv = (svp && *svp ? *svp : &PL_sv_undef);
2175 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2176 * was to make C<local $tied{foo} = $tied{foo}> possible.
2177 * However, it seems no longer to be needed for that purpose, and
2178 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2179 * would loop endlessly since the pos magic is getting set on the
2180 * mortal copy and lost. However, the copy has the effect of
2181 * triggering the get magic, and losing it altogether made things like
2182 * c<$tied{foo};> in void context no longer do get magic, which some
2183 * code relied on. Also, delayed triggering of magic on @+ and friends
2184 * meant the original regex may be out of scope by now. So as a
2185 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2186 * being called too many times). */
2187 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2194 /* a stripped-down version of Perl_softref2xv() for use by
2195 * pp_multideref(), which doesn't use PL_op->op_flags */
2198 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2201 if (PL_op->op_private & HINT_STRICT_REFS) {
2203 Perl_die(aTHX_ PL_no_symref_sv, sv,
2204 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2206 Perl_die(aTHX_ PL_no_usym, what);
2209 Perl_die(aTHX_ PL_no_usym, what);
2210 return gv_fetchsv_nomg(sv, GV_ADD, type);
2214 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2215 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2217 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2218 * Each of these either contains a set of actions, or an argument, such as
2219 * an IV to use as an array index, or a lexical var to retrieve.
2220 * Several actions re stored per UV; we keep shifting new actions off the
2221 * one UV, and only reload when it becomes zero.
2226 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2227 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2228 UV actions = items->uv;
2231 /* this tells find_uninit_var() where we're up to */
2232 PL_multideref_pc = items;
2235 /* there are three main classes of action; the first retrieve
2236 * the initial AV or HV from a variable or the stack; the second
2237 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2238 * the third an unrolled (/DREFHV, rv2hv, helem).
2240 switch (actions & MDEREF_ACTION_MASK) {
2243 actions = (++items)->uv;
2246 case MDEREF_AV_padav_aelem: /* $lex[...] */
2247 sv = PAD_SVl((++items)->pad_offset);
2250 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2251 sv = UNOP_AUX_item_sv(++items);
2252 assert(isGV_with_GP(sv));
2253 sv = (SV*)GvAVn((GV*)sv);
2256 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2261 goto do_AV_rv2av_aelem;
2264 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2265 sv = UNOP_AUX_item_sv(++items);
2266 assert(isGV_with_GP(sv));
2267 sv = GvSVn((GV*)sv);
2268 goto do_AV_vivify_rv2av_aelem;
2270 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2271 sv = PAD_SVl((++items)->pad_offset);
2274 do_AV_vivify_rv2av_aelem:
2275 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2276 /* this is the OPpDEREF action normally found at the end of
2277 * ops like aelem, helem, rv2sv */
2278 sv = vivify_ref(sv, OPpDEREF_AV);
2282 /* this is basically a copy of pp_rv2av when it just has the
2285 if (LIKELY(SvROK(sv))) {
2286 if (UNLIKELY(SvAMAGIC(sv))) {
2287 sv = amagic_deref_call(sv, to_av_amg);
2290 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2291 DIE(aTHX_ "Not an ARRAY reference");
2293 else if (SvTYPE(sv) != SVt_PVAV) {
2294 if (!isGV_with_GP(sv))
2295 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2296 sv = MUTABLE_SV(GvAVn((GV*)sv));
2302 /* retrieve the key; this may be either a lexical or package
2303 * var (whose index/ptr is stored as an item) or a signed
2304 * integer constant stored as an item.
2307 IV elem = 0; /* to shut up stupid compiler warnings */
2310 assert(SvTYPE(sv) == SVt_PVAV);
2312 switch (actions & MDEREF_INDEX_MASK) {
2313 case MDEREF_INDEX_none:
2315 case MDEREF_INDEX_const:
2316 elem = (++items)->iv;
2318 case MDEREF_INDEX_padsv:
2319 elemsv = PAD_SVl((++items)->pad_offset);
2321 case MDEREF_INDEX_gvsv:
2322 elemsv = UNOP_AUX_item_sv(++items);
2323 assert(isGV_with_GP(elemsv));
2324 elemsv = GvSVn((GV*)elemsv);
2326 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2327 && ckWARN(WARN_MISC)))
2328 Perl_warner(aTHX_ packWARN(WARN_MISC),
2329 "Use of reference \"%"SVf"\" as array index",
2331 /* the only time that S_find_uninit_var() needs this
2332 * is to determine which index value triggered the
2333 * undef warning. So just update it here. Note that
2334 * since we don't save and restore this var (e.g. for
2335 * tie or overload execution), its value will be
2336 * meaningless apart from just here */
2337 PL_multideref_pc = items;
2338 elem = SvIV(elemsv);
2343 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2345 if (!(actions & MDEREF_FLAG_last)) {
2346 SV** svp = av_fetch((AV*)sv, elem, 1);
2347 if (!svp || ! (sv=*svp))
2348 DIE(aTHX_ PL_no_aelem, elem);
2352 if (PL_op->op_private &
2353 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2355 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2356 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2359 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2360 sv = av_delete((AV*)sv, elem, discard);
2368 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2369 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2370 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2371 bool preeminent = TRUE;
2372 AV *const av = (AV*)sv;
2375 if (UNLIKELY(localizing)) {
2379 /* If we can determine whether the element exist,
2380 * Try to preserve the existenceness of a tied array
2381 * element by using EXISTS and DELETE if possible.
2382 * Fallback to FETCH and STORE otherwise. */
2383 if (SvCANEXISTDELETE(av))
2384 preeminent = av_exists(av, elem);
2387 svp = av_fetch(av, elem, lval && !defer);
2390 if (!svp || !(sv = *svp)) {
2393 DIE(aTHX_ PL_no_aelem, elem);
2394 len = av_tindex(av);
2395 sv = sv_2mortal(newSVavdefelem(av,
2396 /* Resolve a negative index now, unless it points
2397 * before the beginning of the array, in which
2398 * case record it for error reporting in
2399 * magic_setdefelem. */
2400 elem < 0 && len + elem >= 0
2401 ? len + elem : elem, 1));
2404 if (UNLIKELY(localizing)) {
2406 save_aelem(av, elem, svp);
2407 sv = *svp; /* may have changed */
2410 SAVEADELETE(av, elem);
2415 sv = (svp ? *svp : &PL_sv_undef);
2416 /* see note in pp_helem() */
2417 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2434 case MDEREF_HV_padhv_helem: /* $lex{...} */
2435 sv = PAD_SVl((++items)->pad_offset);
2438 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2439 sv = UNOP_AUX_item_sv(++items);
2440 assert(isGV_with_GP(sv));
2441 sv = (SV*)GvHVn((GV*)sv);
2444 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2449 goto do_HV_rv2hv_helem;
2452 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2453 sv = UNOP_AUX_item_sv(++items);
2454 assert(isGV_with_GP(sv));
2455 sv = GvSVn((GV*)sv);
2456 goto do_HV_vivify_rv2hv_helem;
2458 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2459 sv = PAD_SVl((++items)->pad_offset);
2462 do_HV_vivify_rv2hv_helem:
2463 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2464 /* this is the OPpDEREF action normally found at the end of
2465 * ops like aelem, helem, rv2sv */
2466 sv = vivify_ref(sv, OPpDEREF_HV);
2470 /* this is basically a copy of pp_rv2hv when it just has the
2471 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2474 if (LIKELY(SvROK(sv))) {
2475 if (UNLIKELY(SvAMAGIC(sv))) {
2476 sv = amagic_deref_call(sv, to_hv_amg);
2479 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2480 DIE(aTHX_ "Not a HASH reference");
2482 else if (SvTYPE(sv) != SVt_PVHV) {
2483 if (!isGV_with_GP(sv))
2484 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2485 sv = MUTABLE_SV(GvHVn((GV*)sv));
2491 /* retrieve the key; this may be either a lexical / package
2492 * var or a string constant, whose index/ptr is stored as an
2495 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2497 assert(SvTYPE(sv) == SVt_PVHV);
2499 switch (actions & MDEREF_INDEX_MASK) {
2500 case MDEREF_INDEX_none:
2503 case MDEREF_INDEX_const:
2504 keysv = UNOP_AUX_item_sv(++items);
2507 case MDEREF_INDEX_padsv:
2508 keysv = PAD_SVl((++items)->pad_offset);
2511 case MDEREF_INDEX_gvsv:
2512 keysv = UNOP_AUX_item_sv(++items);
2513 keysv = GvSVn((GV*)keysv);
2517 /* see comment above about setting this var */
2518 PL_multideref_pc = items;
2521 /* ensure that candidate CONSTs have been HEKified */
2522 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2523 || SvTYPE(keysv) >= SVt_PVMG
2526 || SvIsCOW_shared_hash(keysv));
2528 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2530 if (!(actions & MDEREF_FLAG_last)) {
2531 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2532 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2533 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2537 if (PL_op->op_private &
2538 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2540 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2541 sv = hv_exists_ent((HV*)sv, keysv, 0)
2542 ? &PL_sv_yes : &PL_sv_no;
2545 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2546 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2554 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2555 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2556 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2557 bool preeminent = TRUE;
2559 HV * const hv = (HV*)sv;
2562 if (UNLIKELY(localizing)) {
2566 /* If we can determine whether the element exist,
2567 * Try to preserve the existenceness of a tied hash
2568 * element by using EXISTS and DELETE if possible.
2569 * Fallback to FETCH and STORE otherwise. */
2570 if (SvCANEXISTDELETE(hv))
2571 preeminent = hv_exists_ent(hv, keysv, 0);
2574 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2575 svp = he ? &HeVAL(he) : NULL;
2579 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2583 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2584 lv = sv_newmortal();
2585 sv_upgrade(lv, SVt_PVLV);
2587 sv_magic(lv, key2 = newSVsv(keysv),
2588 PERL_MAGIC_defelem, NULL, 0);
2589 /* sv_magic() increments refcount */
2590 SvREFCNT_dec_NN(key2);
2591 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2597 if (HvNAME_get(hv) && isGV(sv))
2598 save_gp(MUTABLE_GV(sv),
2599 !(PL_op->op_flags & OPf_SPECIAL));
2600 else if (preeminent) {
2601 save_helem_flags(hv, keysv, svp,
2602 (PL_op->op_flags & OPf_SPECIAL)
2603 ? 0 : SAVEf_SETMAGIC);
2604 sv = *svp; /* may have changed */
2607 SAVEHDELETE(hv, keysv);
2612 sv = (svp && *svp ? *svp : &PL_sv_undef);
2613 /* see note in pp_helem() */
2614 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2623 actions >>= MDEREF_SHIFT;
2642 itersvp = CxITERVAR(cx);
2645 switch (CxTYPE(cx)) {
2647 case CXt_LOOP_LAZYSV: /* string increment */
2649 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2650 SV *end = cx->blk_loop.state_u.lazysv.end;
2651 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2652 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2654 const char *max = SvPV_const(end, maxlen);
2655 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2659 /* NB: on the first iteration, oldsv will have a ref count of at
2660 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2661 * slot will get localised; on subsequent iterations the RC==1
2662 * optimisation may kick in and the SV will be reused. */
2663 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2664 /* safe to reuse old SV */
2665 sv_setsv(oldsv, cur);
2669 /* we need a fresh SV every time so that loop body sees a
2670 * completely new SV for closures/references to work as
2672 *itersvp = newSVsv(cur);
2673 SvREFCNT_dec(oldsv);
2675 if (strEQ(SvPVX_const(cur), max))
2676 sv_setiv(cur, 0); /* terminate next time */
2682 case CXt_LOOP_LAZYIV: /* integer increment */
2684 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2685 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2689 /* see NB comment above */
2690 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2691 /* safe to reuse old SV */
2693 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2696 /* Cheap SvIOK_only().
2697 * Assert that flags which SvIOK_only() would test or
2698 * clear can't be set, because we're SVt_IV */
2699 assert(!(SvFLAGS(oldsv) &
2700 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2701 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2702 /* SvIV_set() where sv_any points to head */
2703 oldsv->sv_u.svu_iv = cur;
2707 sv_setiv(oldsv, cur);
2711 /* we need a fresh SV every time so that loop body sees a
2712 * completely new SV for closures/references to work as they
2714 *itersvp = newSViv(cur);
2715 SvREFCNT_dec(oldsv);
2718 if (UNLIKELY(cur == IV_MAX)) {
2719 /* Handle end of range at IV_MAX */
2720 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2722 ++cx->blk_loop.state_u.lazyiv.cur;
2726 case CXt_LOOP_LIST: /* for (1,2,3) */
2728 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2729 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2730 ix = (cx->blk_loop.state_u.stack.ix += inc);
2731 if (UNLIKELY(inc > 0
2732 ? ix > cx->blk_oldsp
2733 : ix <= cx->blk_loop.state_u.stack.basesp)
2737 sv = PL_stack_base[ix];
2739 goto loop_ary_common;
2741 case CXt_LOOP_ARY: /* for (@ary) */
2743 av = cx->blk_loop.state_u.ary.ary;
2744 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2745 ix = (cx->blk_loop.state_u.ary.ix += inc);
2746 if (UNLIKELY(inc > 0
2752 if (UNLIKELY(SvRMAGICAL(av))) {
2753 SV * const * const svp = av_fetch(av, ix, FALSE);
2754 sv = svp ? *svp : NULL;
2757 sv = AvARRAY(av)[ix];
2762 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2763 SvSetMagicSV(*itersvp, sv);
2768 if (UNLIKELY(SvIS_FREED(sv))) {
2770 Perl_croak(aTHX_ "Use of freed value in iteration");
2777 SvREFCNT_inc_simple_void_NN(sv);
2781 sv = newSVavdefelem(av, ix, 0);
2788 SvREFCNT_dec(oldsv);
2792 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2800 /* pp_enteriter should have pre-extended the stack */
2801 assert(PL_stack_sp < PL_stack_max);
2802 *++PL_stack_sp =retsv;
2804 return PL_op->op_next;
2808 A description of how taint works in pattern matching and substitution.
2810 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2811 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2813 While the pattern is being assembled/concatenated and then compiled,
2814 PL_tainted will get set (via TAINT_set) if any component of the pattern
2815 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2816 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2817 TAINT_get). It will also be set if any component of the pattern matches
2818 based on locale-dependent behavior.
2820 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2821 the pattern is marked as tainted. This means that subsequent usage, such
2822 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2823 on the new pattern too.
2825 RXf_TAINTED_SEEN is used post-execution by the get magic code
2826 of $1 et al to indicate whether the returned value should be tainted.
2827 It is the responsibility of the caller of the pattern (i.e. pp_match,
2828 pp_subst etc) to set this flag for any other circumstances where $1 needs
2831 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2833 There are three possible sources of taint
2835 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2836 * the replacement string (or expression under /e)
2838 There are four destinations of taint and they are affected by the sources
2839 according to the rules below:
2841 * the return value (not including /r):
2842 tainted by the source string and pattern, but only for the
2843 number-of-iterations case; boolean returns aren't tainted;
2844 * the modified string (or modified copy under /r):
2845 tainted by the source string, pattern, and replacement strings;
2847 tainted by the pattern, and under 'use re "taint"', by the source
2849 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2850 should always be unset before executing subsequent code.
2852 The overall action of pp_subst is:
2854 * at the start, set bits in rxtainted indicating the taint status of
2855 the various sources.
2857 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2858 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2859 pattern has subsequently become tainted via locale ops.
2861 * If control is being passed to pp_substcont to execute a /e block,
2862 save rxtainted in the CXt_SUBST block, for future use by
2865 * Whenever control is being returned to perl code (either by falling
2866 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2867 use the flag bits in rxtainted to make all the appropriate types of
2868 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2869 et al will appear tainted.
2871 pp_match is just a simpler version of the above.
2887 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2888 See "how taint works" above */
2891 REGEXP *rx = PM_GETRE(pm);
2893 int force_on_match = 0;
2894 const I32 oldsave = PL_savestack_ix;
2896 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2901 /* known replacement string? */
2902 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2906 if (PL_op->op_flags & OPf_STACKED)
2915 SvGETMAGIC(TARG); /* must come before cow check */
2917 /* note that a string might get converted to COW during matching */
2918 was_cow = cBOOL(SvIsCOW(TARG));
2920 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
2921 #ifndef PERL_ANY_COW
2923 sv_force_normal_flags(TARG,0);
2925 if ((SvREADONLY(TARG)
2926 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2927 || SvTYPE(TARG) > SVt_PVLV)
2928 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2929 Perl_croak_no_modify();
2933 orig = SvPV_nomg(TARG, len);
2934 /* note we don't (yet) force the var into being a string; if we fail
2935 * to match, we leave as-is; on successful match however, we *will*
2936 * coerce into a string, then repeat the match */
2937 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2940 /* only replace once? */
2941 once = !(rpm->op_pmflags & PMf_GLOBAL);
2943 /* See "how taint works" above */
2946 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2947 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2948 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2949 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2950 ? SUBST_TAINT_BOOLRET : 0));
2956 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2958 strend = orig + len;
2959 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2960 maxiters = 2 * slen + 10; /* We can match twice at each
2961 position, once with zero-length,
2962 second time with non-zero. */
2964 if (!RX_PRELEN(rx) && PL_curpm
2965 && !ReANY(rx)->mother_re) {
2970 #ifdef PERL_SAWAMPERSAND
2971 r_flags = ( RX_NPARENS(rx)
2973 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2974 || (rpm->op_pmflags & PMf_KEEPCOPY)
2979 r_flags = REXEC_COPY_STR;
2982 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2985 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2986 LEAVE_SCOPE(oldsave);
2991 /* known replacement string? */
2993 /* replacement needing upgrading? */
2994 if (DO_UTF8(TARG) && !doutf8) {
2995 nsv = sv_newmortal();
2998 sv_recode_to_utf8(nsv, _get_encoding());
3000 sv_utf8_upgrade(nsv);
3001 c = SvPV_const(nsv, clen);
3005 c = SvPV_const(dstr, clen);
3006 doutf8 = DO_UTF8(dstr);
3009 if (SvTAINTED(dstr))
3010 rxtainted |= SUBST_TAINT_REPL;
3017 /* can do inplace substitution? */
3022 && (I32)clen <= RX_MINLENRET(rx)
3024 || !(r_flags & REXEC_COPY_STR)
3025 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
3027 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
3028 && (!doutf8 || SvUTF8(TARG))
3029 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3033 /* string might have got converted to COW since we set was_cow */
3034 if (SvIsCOW(TARG)) {
3035 if (!force_on_match)
3037 assert(SvVOK(TARG));
3040 if (force_on_match) {
3041 /* redo the first match, this time with the orig var
3042 * forced into being a string */
3044 orig = SvPV_force_nomg(TARG, len);
3050 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3051 rxtainted |= SUBST_TAINT_PAT;
3052 m = orig + RX_OFFS(rx)[0].start;
3053 d = orig + RX_OFFS(rx)[0].end;
3055 if (m - s > strend - d) { /* faster to shorten from end */
3058 Copy(c, m, clen, char);
3063 Move(d, m, i, char);
3067 SvCUR_set(TARG, m - s);
3069 else { /* faster from front */
3073 Move(s, d - i, i, char);
3076 Copy(c, d, clen, char);
3083 d = s = RX_OFFS(rx)[0].start + orig;
3086 if (UNLIKELY(iters++ > maxiters))
3087 DIE(aTHX_ "Substitution loop");
3088 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3089 rxtainted |= SUBST_TAINT_PAT;
3090 m = RX_OFFS(rx)[0].start + orig;
3093 Move(s, d, i, char);
3097 Copy(c, d, clen, char);
3100 s = RX_OFFS(rx)[0].end + orig;
3101 } while (CALLREGEXEC(rx, s, strend, orig,
3102 s == m, /* don't match same null twice */
3104 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3107 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3108 Move(s, d, i+1, char); /* include the NUL */
3118 if (force_on_match) {
3119 /* redo the first match, this time with the orig var
3120 * forced into being a string */
3122 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3123 /* I feel that it should be possible to avoid this mortal copy
3124 given that the code below copies into a new destination.
3125 However, I suspect it isn't worth the complexity of
3126 unravelling the C<goto force_it> for the small number of
3127 cases where it would be viable to drop into the copy code. */
3128 TARG = sv_2mortal(newSVsv(TARG));
3130 orig = SvPV_force_nomg(TARG, len);
3136 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3137 rxtainted |= SUBST_TAINT_PAT;
3139 s = RX_OFFS(rx)[0].start + orig;
3140 dstr = newSVpvn_flags(orig, s-orig,
3141 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3146 /* note that a whole bunch of local vars are saved here for
3147 * use by pp_substcont: here's a list of them in case you're
3148 * searching for places in this sub that uses a particular var:
3149 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3150 * s m strend rx once */
3152 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3156 if (UNLIKELY(iters++ > maxiters))
3157 DIE(aTHX_ "Substitution loop");
3158 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3159 rxtainted |= SUBST_TAINT_PAT;
3160 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3162 char *old_orig = orig;
3163 assert(RX_SUBOFFSET(rx) == 0);
3165 orig = RX_SUBBEG(rx);
3166 s = orig + (old_s - old_orig);
3167 strend = s + (strend - old_s);
3169 m = RX_OFFS(rx)[0].start + orig;
3170 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3171 s = RX_OFFS(rx)[0].end + orig;
3173 /* replacement already stringified */
3175 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3180 if (!nsv) nsv = sv_newmortal();
3181 sv_copypv(nsv, repl);
3182 if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
3183 sv_catsv(dstr, nsv);
3185 else sv_catsv(dstr, repl);
3186 if (UNLIKELY(SvTAINTED(repl)))
3187 rxtainted |= SUBST_TAINT_REPL;
3191 } while (CALLREGEXEC(rx, s, strend, orig,
3192 s == m, /* Yields minend of 0 or 1 */
3194 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3195 assert(strend >= s);
3196 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3198 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3199 /* From here on down we're using the copy, and leaving the original
3206 /* The match may make the string COW. If so, brilliant, because
3207 that's just saved us one malloc, copy and free - the regexp has
3208 donated the old buffer, and we malloc an entirely new one, rather
3209 than the regexp malloc()ing a buffer and copying our original,
3210 only for us to throw it away here during the substitution. */
3211 if (SvIsCOW(TARG)) {
3212 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3218 SvPV_set(TARG, SvPVX(dstr));
3219 SvCUR_set(TARG, SvCUR(dstr));
3220 SvLEN_set(TARG, SvLEN(dstr));
3221 SvFLAGS(TARG) |= SvUTF8(dstr);
3222 SvPV_set(dstr, NULL);
3229 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3230 (void)SvPOK_only_UTF8(TARG);
3233 /* See "how taint works" above */
3235 if ((rxtainted & SUBST_TAINT_PAT) ||
3236 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3237 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3239 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3241 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3242 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3244 SvTAINTED_on(TOPs); /* taint return value */
3246 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3248 /* needed for mg_set below */
3250 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3254 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3256 LEAVE_SCOPE(oldsave);
3265 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3266 ++*PL_markstack_ptr;
3268 LEAVE_with_name("grep_item"); /* exit inner scope */
3271 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3273 const U8 gimme = GIMME_V;
3275 LEAVE_with_name("grep"); /* exit outer scope */
3276 (void)POPMARK; /* pop src */
3277 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3278 (void)POPMARK; /* pop dst */
3279 SP = PL_stack_base + POPMARK; /* pop original mark */
3280 if (gimme == G_SCALAR) {
3284 else if (gimme == G_ARRAY)
3291 ENTER_with_name("grep_item"); /* enter inner scope */
3294 src = PL_stack_base[TOPMARK];
3295 if (SvPADTMP(src)) {
3296 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3302 RETURNOP(cLOGOP->op_other);
3306 /* leave_adjust_stacks():
3308 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3309 * positioning them at to_sp+1 onwards, and do the equivalent of a
3310 * FREEMPS and TAINT_NOT.
3312 * Not intended to be called in void context.
3314 * When leaving a sub, eval, do{} or other scope, the things that need
3315 * doing to process the return args are:
3316 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3317 * * for the types of return that return copies of their args (such
3318 * as rvalue sub return), make a mortal copy of every return arg,
3319 * except where we can optimise the copy away without it being
3320 * semantically visible;
3321 * * make sure that the arg isn't prematurely freed; in the case of an
3322 * arg not copied, this may involve mortalising it. For example, in
3323 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3324 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3326 * What condition to use when deciding whether to pass the arg through
3327 * or make a copy, is determined by the 'pass' arg; its valid values are:
3328 * 0: rvalue sub/eval exit
3329 * 1: other rvalue scope exit
3330 * 2: :lvalue sub exit in rvalue context
3331 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3333 * There is a big issue with doing a FREETMPS. We would like to free any
3334 * temps created by the last statement which the sub executed, rather than
3335 * leaving them for the caller. In a situation where a sub call isn't
3336 * soon followed by a nextstate (e.g. nested recursive calls, a la
3337 * fibonacci()), temps can accumulate, causing memory and performance
3340 * On the other hand, we don't want to free any TEMPs which are keeping
3341 * alive any return args that we skipped copying; nor do we wish to undo
3342 * any mortalising done here.
3344 * The solution is to split the temps stack frame into two, with a cut
3345 * point delineating the two halves. We arrange that by the end of this
3346 * function, all the temps stack frame entries we wish to keep are in the
3347 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3348 * the range tmps_base .. PL_tmps_ix. During the course of this
3349 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3350 * whenever we find or create a temp that we know should be kept. In
3351 * general the stuff above tmps_base is undecided until we reach the end,
3352 * and we may need a sort stage for that.
3354 * To determine whether a TEMP is keeping a return arg alive, every
3355 * arg that is kept rather than copied and which has the SvTEMP flag
3356 * set, has the flag temporarily unset, to mark it. At the end we scan
3357 * the temps stack frame above the cut for entries without SvTEMP and
3358 * keep them, while turning SvTEMP on again. Note that if we die before
3359 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3360 * those SVs may be slightly less efficient.
3362 * In practice various optimisations for some common cases mean we can
3363 * avoid most of the scanning and swapping about with the temps stack.
3367 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3371 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3374 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3378 if (gimme == G_ARRAY) {
3379 nargs = SP - from_sp;
3383 assert(gimme == G_SCALAR);
3384 if (UNLIKELY(from_sp >= SP)) {
3385 /* no return args */
3386 assert(from_sp == SP);
3388 *++SP = &PL_sv_undef;
3398 /* common code for G_SCALAR and G_ARRAY */
3400 tmps_base = PL_tmps_floor + 1;
3404 /* pointer version of tmps_base. Not safe across temp stack
3408 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3409 tmps_basep = PL_tmps_stack + tmps_base;
3411 /* process each return arg */
3414 SV *sv = *from_sp++;
3416 assert(PL_tmps_ix + nargs < PL_tmps_max);
3418 /* PADTMPs with container set magic shouldn't appear in the
3419 * wild. This assert is more important for pp_leavesublv(),
3420 * but by testing for it here, we're more likely to catch
3421 * bad cases (what with :lvalue subs not being widely
3422 * deployed). The two issues are that for something like
3423 * sub :lvalue { $tied{foo} }
3425 * sub :lvalue { substr($foo,1,2) }
3426 * pp_leavesublv() will croak if the sub returns a PADTMP,
3427 * and currently functions like pp_substr() return a mortal
3428 * rather than using their PADTMP when returning a PVLV.
3429 * This is because the PVLV will hold a ref to $foo,
3430 * so $foo would get delayed in being freed while
3431 * the PADTMP SV remained in the PAD.
3432 * So if this assert fails it means either:
3433 * 1) there is pp code similar to pp_substr that is
3434 * returning a PADTMP instead of a mortal, and probably
3436 * 2) pp_leavesublv is making unwarranted assumptions
3437 * about always croaking on a PADTMP
3439 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3441 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3442 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3448 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3449 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3450 : pass == 2 ? (!SvPADTMP(sv))
3453 /* pass through: skip copy for logic or optimisation
3454 * reasons; instead mortalise it, except that ... */
3458 /* ... since this SV is an SvTEMP , we don't need to
3459 * re-mortalise it; instead we just need to ensure
3460 * that its existing entry in the temps stack frame
3461 * ends up below the cut and so avoids being freed
3462 * this time round. We mark it as needing to be kept
3463 * by temporarily unsetting SvTEMP; then at the end,
3464 * we shuffle any !SvTEMP entries on the tmps stack
3465 * back below the cut.
3466 * However, there's a significant chance that there's
3467 * a 1:1 correspondence between the first few (or all)
3468 * elements in the return args stack frame and those
3469 * in the temps stack frame; e,g.:
3470 * sub f { ....; map {...} .... },
3471 * or if we're exiting multiple scopes and one of the
3472 * inner scopes has already made mortal copies of each
3475 * If so, this arg sv will correspond to the next item
3476 * on the tmps stack above the cut, and so can be kept
3477 * merely by moving the cut boundary up one, rather
3478 * than messing with SvTEMP. If all args are 1:1 then
3479 * we can avoid the sorting stage below completely.
3481 * If there are no items above the cut on the tmps
3482 * stack, then the SvTEMP must comne from an item
3483 * below the cut, so there's nothing to do.
3485 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3486 if (sv == *tmps_basep)
3492 else if (!SvPADTMP(sv)) {
3493 /* mortalise arg to avoid it being freed during save
3494 * stack unwinding. Pad tmps don't need mortalising as
3495 * they're never freed. This is the equivalent of
3496 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3497 * * it assumes that the temps stack has already been
3499 * * it puts the new item at the cut rather than at
3500 * ++PL_tmps_ix, moving the previous occupant there
3503 if (!SvIMMORTAL(sv)) {
3504 SvREFCNT_inc_simple_void_NN(sv);
3506 /* Note that if there's nothing above the cut,
3507 * this copies the garbage one slot above
3508 * PL_tmps_ix onto itself. This is harmless (the
3509 * stack's already been extended), but might in
3510 * theory trigger warnings from tools like ASan
3512 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3518 /* Make a mortal copy of the SV.
3519 * The following code is the equivalent of sv_mortalcopy()
3521 * * it assumes the temps stack has already been extended;
3522 * * it optimises the copying for some simple SV types;
3523 * * it puts the new item at the cut rather than at
3524 * ++PL_tmps_ix, moving the previous occupant there
3527 SV *newsv = newSV(0);
3529 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3530 /* put it on the tmps stack early so it gets freed if we die */
3531 *tmps_basep++ = newsv;
3534 if (SvTYPE(sv) <= SVt_IV) {
3535 /* arg must be one of undef, IV/UV, or RV: skip
3536 * sv_setsv_flags() and do the copy directly */
3538 U32 srcflags = SvFLAGS(sv);
3540 assert(!SvGMAGICAL(sv));
3541 if (srcflags & (SVf_IOK|SVf_ROK)) {
3542 SET_SVANY_FOR_BODYLESS_IV(newsv);
3544 if (srcflags & SVf_ROK) {
3545 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3546 /* SV type plus flags */
3547 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3550 /* both src and dst are <= SVt_IV, so sv_any
3551 * points to the head; so access the heads
3552 * directly rather than going via sv_any.
3554 assert( &(sv->sv_u.svu_iv)
3555 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3556 assert( &(newsv->sv_u.svu_iv)
3557 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3558 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3559 /* SV type plus flags */
3560 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3561 |(srcflags & SVf_IVisUV));
3565 assert(!(srcflags & SVf_OK));
3566 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3568 SvFLAGS(newsv) = dstflags;
3572 /* do the full sv_setsv() */
3576 old_base = tmps_basep - PL_tmps_stack;
3578 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3579 /* the mg_get or sv_setsv might have created new temps
3580 * or realloced the tmps stack; regrow and reload */
3581 EXTEND_MORTAL(nargs);
3582 tmps_basep = PL_tmps_stack + old_base;
3583 TAINT_NOT; /* Each item is independent */
3589 /* If there are any temps left above the cut, we need to sort
3590 * them into those to keep and those to free. The only ones to
3591 * keep are those for which we've temporarily unset SvTEMP.
3592 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3593 * swapping pairs as necessary. Stop when we meet in the middle.
3596 SV **top = PL_tmps_stack + PL_tmps_ix;
3597 while (tmps_basep <= top) {
3610 tmps_base = tmps_basep - PL_tmps_stack;
3613 PL_stack_sp = to_sp;
3615 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3616 while (PL_tmps_ix >= tmps_base) {
3617 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3619 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3623 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3629 /* also tail-called by pp_return */
3639 assert(CxTYPE(cx) == CXt_SUB);
3641 if (CxMULTICALL(cx)) {
3642 /* entry zero of a stack is always PL_sv_undef, which
3643 * simplifies converting a '()' return into undef in scalar context */
3644 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3648 gimme = cx->blk_gimme;
3649 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3651 if (gimme == G_VOID)
3652 PL_stack_sp = oldsp;
3654 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3657 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3659 retop = cx->blk_sub.retop;
3666 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3667 * forces an abandon */
3670 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3672 const SSize_t fill = AvFILLp(av);
3674 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3676 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3681 AV *newav = newAV();
3682 av_extend(newav, fill);
3683 AvREIFY_only(newav);
3684 PAD_SVl(0) = MUTABLE_SV(newav);
3685 SvREFCNT_dec_NN(av);
3696 I32 old_savestack_ix;
3701 /* Locate the CV to call:
3702 * - most common case: RV->CV: f(), $ref->():
3703 * note that if a sub is compiled before its caller is compiled,
3704 * the stash entry will be a ref to a CV, rather than being a GV.
3705 * - second most common case: CV: $ref->method()
3708 /* a non-magic-RV -> CV ? */
3709 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3710 cv = MUTABLE_CV(SvRV(sv));
3711 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3715 cv = MUTABLE_CV(sv);
3718 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3719 /* handle all the weird cases */
3720 switch (SvTYPE(sv)) {
3722 if (!isGV_with_GP(sv))
3726 cv = GvCVu((const GV *)sv);
3727 if (UNLIKELY(!cv)) {
3729 cv = sv_2cv(sv, &stash, &gv, 0);
3731 old_savestack_ix = PL_savestack_ix;
3742 if (UNLIKELY(SvAMAGIC(sv))) {
3743 sv = amagic_deref_call(sv, to_cv_amg);
3744 /* Don't SPAGAIN here. */
3750 if (UNLIKELY(!SvOK(sv)))
3751 DIE(aTHX_ PL_no_usym, "a subroutine");
3753 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3754 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3755 SP = PL_stack_base + POPMARK;
3758 if (GIMME_V == G_SCALAR)
3759 PUSHs(&PL_sv_undef);
3763 sym = SvPV_nomg_const(sv, len);
3764 if (PL_op->op_private & HINT_STRICT_REFS)
3765 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3766 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3769 cv = MUTABLE_CV(SvRV(sv));
3770 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3776 DIE(aTHX_ "Not a CODE reference");
3780 /* At this point we want to save PL_savestack_ix, either by doing a
3781 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
3782 * CV we will be using (so we don't know whether its XS, so we can't
3783 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
3784 * the save stack. So remember where we are currently on the save
3785 * stack, and later update the CX or scopestack entry accordingly. */
3786 old_savestack_ix = PL_savestack_ix;
3788 /* these two fields are in a union. If they ever become separate,
3789 * we have to test for both of them being null below */
3791 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3792 while (UNLIKELY(!CvROOT(cv))) {
3796 /* anonymous or undef'd function leaves us no recourse */
3797 if (CvLEXICAL(cv) && CvHASGV(cv))
3798 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3799 SVfARG(cv_name(cv, NULL, 0)));
3800 if (CvANON(cv) || !CvHASGV(cv)) {
3801 DIE(aTHX_ "Undefined subroutine called");
3804 /* autoloaded stub? */
3805 if (cv != GvCV(gv = CvGV(cv))) {
3808 /* should call AUTOLOAD now? */
3811 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3812 GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
3813 cv = autogv ? GvCV(autogv) : NULL;
3816 sub_name = sv_newmortal();
3817 gv_efullname3(sub_name, gv, NULL);
3818 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3822 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3823 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3824 DIE(aTHX_ "Closure prototype called");
3826 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3829 Perl_get_db_sub(aTHX_ &sv, cv);
3831 PL_curcopdb = PL_curcop;
3833 /* check for lsub that handles lvalue subroutines */
3834 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3835 /* if lsub not found then fall back to DB::sub */
3836 if (!cv) cv = GvCV(PL_DBsub);
3838 cv = GvCV(PL_DBsub);
3841 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3842 DIE(aTHX_ "No DB::sub routine defined");
3845 if (!(CvISXSUB(cv))) {
3846 /* This path taken at least 75% of the time */
3853 /* keep PADTMP args alive throughout the call (we need to do this
3854 * because @_ isn't refcounted). Note that we create the mortals
3855 * in the caller's tmps frame, so they won't be freed until after
3856 * we return from the sub.
3865 *svp = sv = sv_mortalcopy(sv);
3871 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
3872 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3873 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
3875 padlist = CvPADLIST(cv);
3876 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
3877 pad_push(padlist, depth);
3878 PAD_SET_CUR_NOSAVE(padlist, depth);
3879 if (LIKELY(hasargs)) {
3880 AV *const av = MUTABLE_AV(PAD_SVl(0));
3884 defavp = &GvAV(PL_defgv);
3885 cx->blk_sub.savearray = *defavp;
3886 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3888 /* it's the responsibility of whoever leaves a sub to ensure
3889 * that a clean, empty AV is left in pad[0]. This is normally
3890 * done by cx_popsub() */
3891 assert(!AvREAL(av) && AvFILLp(av) == -1);
3894 if (UNLIKELY(items - 1 > AvMAX(av))) {
3895 SV **ary = AvALLOC(av);
3896 AvMAX(av) = items - 1;
3897 Renew(ary, items, SV*);
3902 Copy(MARK+1,AvARRAY(av),items,SV*);
3903 AvFILLp(av) = items - 1;
3905 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3907 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3908 SVfARG(cv_name(cv, NULL, 0)));
3909 /* warning must come *after* we fully set up the context
3910 * stuff so that __WARN__ handlers can safely dounwind()
3913 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3914 && ckWARN(WARN_RECURSION)
3915 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3916 sub_crush_depth(cv);
3917 RETURNOP(CvSTART(cv));
3920 SSize_t markix = TOPMARK;
3924 /* pretend we did the ENTER earlier */
3925 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3930 if (UNLIKELY(((PL_op->op_private
3931 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3932 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3934 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3935 SVfARG(cv_name(cv, NULL, 0)));
3937 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3938 /* Need to copy @_ to stack. Alternative may be to
3939 * switch stack to @_, and copy return values
3940 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3941 AV * const av = GvAV(PL_defgv);
3942 const SSize_t items = AvFILL(av) + 1;
3946 const bool m = cBOOL(SvRMAGICAL(av));
3947 /* Mark is at the end of the stack. */
3949 for (; i < items; ++i)
3953 SV ** const svp = av_fetch(av, i, 0);
3954 sv = svp ? *svp : NULL;
3956 else sv = AvARRAY(av)[i];
3957 if (sv) SP[i+1] = sv;
3959 SP[i+1] = newSVavdefelem(av, i, 1);
3967 SV **mark = PL_stack_base + markix;
3968 SSize_t items = SP - mark;
3971 if (*mark && SvPADTMP(*mark)) {
3972 *mark = sv_mortalcopy(*mark);
3976 /* We assume first XSUB in &DB::sub is the called one. */
3977 if (UNLIKELY(PL_curcopdb)) {
3978 SAVEVPTR(PL_curcop);
3979 PL_curcop = PL_curcopdb;
3982 /* Do we need to open block here? XXXX */
3984 /* calculate gimme here as PL_op might get changed and then not
3985 * restored until the LEAVE further down */
3986 is_scalar = (GIMME_V == G_SCALAR);
3988 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3990 CvXSUB(cv)(aTHX_ cv);
3992 /* Enforce some sanity in scalar context. */
3994 SV **svp = PL_stack_base + markix + 1;
3995 if (svp != PL_stack_sp) {
3996 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4006 Perl_sub_crush_depth(pTHX_ CV *cv)
4008 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4011 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4013 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
4014 SVfARG(cv_name(cv,NULL,0)));
4022 SV* const elemsv = POPs;
4023 IV elem = SvIV(elemsv);
4024 AV *const av = MUTABLE_AV(POPs);
4025 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4026 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4027 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4028 bool preeminent = TRUE;
4031 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4032 Perl_warner(aTHX_ packWARN(WARN_MISC),
4033 "Use of reference \"%"SVf"\" as array index",
4035 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4038 if (UNLIKELY(localizing)) {
4042 /* If we can determine whether the element exist,
4043 * Try to preserve the existenceness of a tied array
4044 * element by using EXISTS and DELETE if possible.
4045 * Fallback to FETCH and STORE otherwise. */
4046 if (SvCANEXISTDELETE(av))
4047 preeminent = av_exists(av, elem);
4050 svp = av_fetch(av, elem, lval && !defer);
4052 #ifdef PERL_MALLOC_WRAP
4053 if (SvUOK(elemsv)) {
4054 const UV uv = SvUV(elemsv);
4055 elem = uv > IV_MAX ? IV_MAX : uv;
4057 else if (SvNOK(elemsv))
4058 elem = (IV)SvNV(elemsv);
4060 static const char oom_array_extend[] =
4061 "Out of memory during array extend"; /* Duplicated in av.c */
4062 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4065 if (!svp || !*svp) {
4068 DIE(aTHX_ PL_no_aelem, elem);
4069 len = av_tindex(av);
4070 mPUSHs(newSVavdefelem(av,
4071 /* Resolve a negative index now, unless it points before the
4072 beginning of the array, in which case record it for error
4073 reporting in magic_setdefelem. */
4074 elem < 0 && len + elem >= 0 ? len + elem : elem,
4078 if (UNLIKELY(localizing)) {
4080 save_aelem(av, elem, svp);
4082 SAVEADELETE(av, elem);
4084 else if (PL_op->op_private & OPpDEREF) {
4085 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4089 sv = (svp ? *svp : &PL_sv_undef);
4090 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4097 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4099 PERL_ARGS_ASSERT_VIVIFY_REF;
4104 Perl_croak_no_modify();
4105 prepare_SV_for_RV(sv);
4108 SvRV_set(sv, newSV(0));
4111 SvRV_set(sv, MUTABLE_SV(newAV()));
4114 SvRV_set(sv, MUTABLE_SV(newHV()));
4121 if (SvGMAGICAL(sv)) {
4122 /* copy the sv without magic to prevent magic from being
4124 SV* msv = sv_newmortal();
4125 sv_setsv_nomg(msv, sv);
4131 PERL_STATIC_INLINE HV *
4132 S_opmethod_stash(pTHX_ SV* meth)
4137 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4138 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
4139 "package or object reference", SVfARG(meth)),
4141 : *(PL_stack_base + TOPMARK + 1);
4143 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4147 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
4150 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4151 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4152 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4153 if (stash) return stash;
4157 ob = MUTABLE_SV(SvRV(sv));
4158 else if (!SvOK(sv)) goto undefined;
4159 else if (isGV_with_GP(sv)) {
4161 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4162 "without a package or object reference",
4165 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4166 assert(!LvTARGLEN(ob));
4170 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4173 /* this isn't a reference */
4176 const char * const packname = SvPV_nomg_const(sv, packlen);
4177 const U32 packname_utf8 = SvUTF8(sv);
4178 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4179 if (stash) return stash;
4181 if (!(iogv = gv_fetchpvn_flags(
4182 packname, packlen, packname_utf8, SVt_PVIO
4184 !(ob=MUTABLE_SV(GvIO(iogv))))
4186 /* this isn't the name of a filehandle either */
4189 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4190 "without a package or object reference",
4193 /* assume it's a package name */
4194 stash = gv_stashpvn(packname, packlen, packname_utf8);
4195 if (stash) return stash;
4196 else return MUTABLE_HV(sv);
4198 /* it _is_ a filehandle name -- replace with a reference */
4199 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4202 /* if we got here, ob should be an object or a glob */
4203 if (!ob || !(SvOBJECT(ob)
4204 || (isGV_with_GP(ob)
4205 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4208 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
4209 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
4210 ? newSVpvs_flags("DOES", SVs_TEMP)
4222 SV* const meth = TOPs;
4225 SV* const rmeth = SvRV(meth);
4226 if (SvTYPE(rmeth) == SVt_PVCV) {
4232 stash = opmethod_stash(meth);
4234 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4237 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4241 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4242 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4244 gv = MUTABLE_GV(HeVAL(he)); \
4245 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4246 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4248 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4257 SV* const meth = cMETHOPx_meth(PL_op);
4258 HV* const stash = opmethod_stash(meth);
4260 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4261 METHOD_CHECK_CACHE(stash, stash, meth);
4264 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4267 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4276 SV* const meth = cMETHOPx_meth(PL_op);
4277 HV* const stash = CopSTASH(PL_curcop);
4278 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4279 * as it uses CopSTASH. However, we must ensure that object(class) is
4280 * correct (this check is done by S_opmethod_stash) */
4281 opmethod_stash(meth);
4283 if ((cache = HvMROMETA(stash)->super)) {
4284 METHOD_CHECK_CACHE(stash, cache, meth);
4287 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4290 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4298 SV* const meth = cMETHOPx_meth(PL_op);
4299 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4300 opmethod_stash(meth); /* not used but needed for error checks */
4302 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4303 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4305 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4308 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4312 PP(pp_method_redir_super)
4317 SV* const meth = cMETHOPx_meth(PL_op);
4318 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4319 opmethod_stash(meth); /* not used but needed for error checks */
4321 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4322 else if ((cache = HvMROMETA(stash)->super)) {
4323 METHOD_CHECK_CACHE(stash, cache, meth);
4326 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4329 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4334 * ex: set ts=8 sts=4 sw=4 et: