3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
245 TAINT_NOT; /* Each statement is presumed innocent */
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
263 const char *rpv = NULL;
265 bool rcopied = FALSE;
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
275 if (TARG != left) { /* not $l .= $r */
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
285 else { /* $l .= $r and left == TARG */
287 if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
288 report_uninit(right);
292 SvPV_force_nomg_nolen(left);
294 lbyte = !DO_UTF8(left);
300 rpv = SvPV_nomg_const(right, rlen);
301 rbyte = !DO_UTF8(right);
303 if (lbyte != rbyte) {
305 sv_utf8_upgrade_nomg(TARG);
308 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
309 sv_utf8_upgrade_nomg(right);
310 rpv = SvPV_nomg_const(right, rlen);
313 sv_catpvn_nomg(TARG, rpv, rlen);
320 /* push the elements of av onto the stack.
321 * XXX Note that padav has similar code but without the mg_get().
322 * I suspect that the mg_get is no longer needed, but while padav
323 * differs, it can't share this function */
326 S_pushav(pTHX_ AV* const av)
329 const SSize_t maxarg = AvFILL(av) + 1;
331 if (UNLIKELY(SvRMAGICAL(av))) {
333 for (i=0; i < (PADOFFSET)maxarg; i++) {
334 SV ** const svp = av_fetch(av, i, FALSE);
335 /* See note in pp_helem, and bug id #27839 */
337 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
343 for (i=0; i < (PADOFFSET)maxarg; i++) {
344 SV * const sv = AvARRAY(av)[i];
345 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
353 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
358 PADOFFSET base = PL_op->op_targ;
359 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
361 if (PL_op->op_flags & OPf_SPECIAL) {
362 /* fake the RHS of my ($x,$y,..) = @_ */
364 S_pushav(aTHX_ GvAVn(PL_defgv));
368 /* note, this is only skipped for compile-time-known void cxt */
369 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
372 for (i = 0; i <count; i++)
373 *++SP = PAD_SV(base+i);
375 if (PL_op->op_private & OPpLVAL_INTRO) {
376 SV **svp = &(PAD_SVl(base));
377 const UV payload = (UV)(
378 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
379 | (count << SAVE_TIGHT_SHIFT)
380 | SAVEt_CLEARPADRANGE);
381 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
382 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
389 for (i = 0; i <count; i++)
390 SvPADSTALE_off(*svp++); /* mark lexical as active */
401 OP * const op = PL_op;
402 /* access PL_curpad once */
403 SV ** const padentry = &(PAD_SVl(op->op_targ));
408 PUTBACK; /* no pop/push after this, TOPs ok */
410 if (op->op_flags & OPf_MOD) {
411 if (op->op_private & OPpLVAL_INTRO)
412 if (!(op->op_private & OPpPAD_STATE))
413 save_clearsv(padentry);
414 if (op->op_private & OPpDEREF) {
415 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
416 than TARG reduces the scope of TARG, so it does not
417 span the call to save_clearsv, resulting in smaller
419 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
431 tryAMAGICunTARGETlist(iter_amg, 0);
432 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
434 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
435 if (!isGV_with_GP(PL_last_in_gv)) {
436 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
437 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
440 XPUSHs(MUTABLE_SV(PL_last_in_gv));
443 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
444 if (PL_last_in_gv == (GV *)&PL_sv_undef)
445 PL_last_in_gv = NULL;
447 assert(isGV_with_GP(PL_last_in_gv));
450 return do_readline();
458 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
462 (SvIOK_notUV(left) && SvIOK_notUV(right))
463 ? (SvIVX(left) == SvIVX(right))
464 : ( do_ncmp(left, right) == 0)
470 /* also used for: pp_i_preinc() */
474 SV *sv = *PL_stack_sp;
476 if (LIKELY(((sv->sv_flags &
477 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
478 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
480 && SvIVX(sv) != IV_MAX)
482 SvIV_set(sv, SvIVX(sv) + 1);
484 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
491 /* also used for: pp_i_predec() */
495 SV *sv = *PL_stack_sp;
497 if (LIKELY(((sv->sv_flags &
498 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
499 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
501 && SvIVX(sv) != IV_MIN)
503 SvIV_set(sv, SvIVX(sv) - 1);
505 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
512 /* also used for: pp_orassign() */
521 if (PL_op->op_type == OP_OR)
523 RETURNOP(cLOGOP->op_other);
528 /* also used for: pp_dor() pp_dorassign() */
535 const int op_type = PL_op->op_type;
536 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
541 if (UNLIKELY(!sv || !SvANY(sv))) {
542 if (op_type == OP_DOR)
544 RETURNOP(cLOGOP->op_other);
550 if (UNLIKELY(!sv || !SvANY(sv)))
555 switch (SvTYPE(sv)) {
557 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
561 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
565 if (CvROOT(sv) || CvXSUB(sv))
578 if(op_type == OP_DOR)
580 RETURNOP(cLOGOP->op_other);
582 /* assuming OP_DEFINED */
592 dSP; dATARGET; bool useleft; SV *svl, *svr;
594 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
598 #ifdef PERL_PRESERVE_IVUV
600 /* special-case some simple common cases */
601 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
603 U32 flags = (svl->sv_flags & svr->sv_flags);
604 if (flags & SVf_IOK) {
605 /* both args are simple IVs */
610 topl = ((UV)il) >> (UVSIZE * 8 - 2);
611 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
613 /* if both are in a range that can't under/overflow, do a
614 * simple integer add: if the top of both numbers
615 * are 00 or 11, then it's safe */
616 if (!( ((topl+1) | (topr+1)) & 2)) {
618 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
624 else if (flags & SVf_NOK) {
625 /* both args are NVs */
630 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
631 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
632 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
634 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
637 /* nothing was lost by converting to IVs */
640 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
648 useleft = USE_LEFT(svl);
649 /* We must see if we can perform the addition with integers if possible,
650 as the integer code detects overflow while the NV code doesn't.
651 If either argument hasn't had a numeric conversion yet attempt to get
652 the IV. It's important to do this now, rather than just assuming that
653 it's not IOK as a PV of "9223372036854775806" may not take well to NV
654 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
655 integer in case the second argument is IV=9223372036854775806
656 We can (now) rely on sv_2iv to do the right thing, only setting the
657 public IOK flag if the value in the NV (or PV) slot is truly integer.
659 A side effect is that this also aggressively prefers integer maths over
660 fp maths for integer values.
662 How to detect overflow?
664 C 99 section 6.2.6.1 says
666 The range of nonnegative values of a signed integer type is a subrange
667 of the corresponding unsigned integer type, and the representation of
668 the same value in each type is the same. A computation involving
669 unsigned operands can never overflow, because a result that cannot be
670 represented by the resulting unsigned integer type is reduced modulo
671 the number that is one greater than the largest value that can be
672 represented by the resulting type.
676 which I read as "unsigned ints wrap."
678 signed integer overflow seems to be classed as "exception condition"
680 If an exceptional condition occurs during the evaluation of an
681 expression (that is, if the result is not mathematically defined or not
682 in the range of representable values for its type), the behavior is
685 (6.5, the 5th paragraph)
687 I had assumed that on 2s complement machines signed arithmetic would
688 wrap, hence coded pp_add and pp_subtract on the assumption that
689 everything perl builds on would be happy. After much wailing and
690 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
691 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
692 unsigned code below is actually shorter than the old code. :-)
695 if (SvIV_please_nomg(svr)) {
696 /* Unless the left argument is integer in range we are going to have to
697 use NV maths. Hence only attempt to coerce the right argument if
698 we know the left is integer. */
706 /* left operand is undef, treat as zero. + 0 is identity,
707 Could SETi or SETu right now, but space optimise by not adding
708 lots of code to speed up what is probably a rarish case. */
710 /* Left operand is defined, so is it IV? */
711 if (SvIV_please_nomg(svl)) {
712 if ((auvok = SvUOK(svl)))
715 const IV aiv = SvIVX(svl);
718 auvok = 1; /* Now acting as a sign flag. */
720 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
727 bool result_good = 0;
730 bool buvok = SvUOK(svr);
735 const IV biv = SvIVX(svr);
740 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
742 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
743 else "IV" now, independent of how it came in.
744 if a, b represents positive, A, B negative, a maps to -A etc
749 all UV maths. negate result if A negative.
750 add if signs same, subtract if signs differ. */
756 /* Must get smaller */
762 /* result really should be -(auv-buv). as its negation
763 of true value, need to swap our result flag */
780 if (result <= (UV)IV_MIN)
781 SETi(result == (UV)IV_MIN
782 ? IV_MIN : -(IV)result);
784 /* result valid, but out of range for IV. */
789 } /* Overflow, drop through to NVs. */
794 useleft = USE_LEFT(svl);
798 NV value = SvNV_nomg(svr);
801 /* left operand is undef, treat as zero. + 0.0 is identity. */
805 SETn( value + SvNV_nomg(svl) );
811 /* also used for: pp_aelemfast_lex() */
816 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
817 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
818 const U32 lval = PL_op->op_flags & OPf_MOD;
819 SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
820 SV *sv = (svp ? *svp : &PL_sv_undef);
822 if (UNLIKELY(!svp && lval))
823 DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
826 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
836 do_join(TARG, *MARK, MARK, SP);
847 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
848 * will be enough to hold an OP*.
850 SV* const sv = sv_newmortal();
851 sv_upgrade(sv, SVt_PVLV);
853 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
856 XPUSHs(MUTABLE_SV(PL_op));
861 /* Oversized hot code. */
863 /* also used for: pp_say() */
867 dSP; dMARK; dORIGMARK;
871 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
875 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
878 if (MARK == ORIGMARK) {
879 /* If using default handle then we need to make space to
880 * pass object as 1st arg, so move other args up ...
884 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
887 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
889 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
890 | (PL_op->op_type == OP_SAY
891 ? TIED_METHOD_SAY : 0)), sp - mark);
894 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
895 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
898 SETERRNO(EBADF,RMS_IFI);
901 else if (!(fp = IoOFP(io))) {
903 report_wrongway_fh(gv, '<');
906 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
910 SV * const ofs = GvSV(PL_ofsgv); /* $, */
912 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
914 if (!do_print(*MARK, fp))
918 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
919 if (!do_print(GvSV(PL_ofsgv), fp)) {
928 if (!do_print(*MARK, fp))
936 if (PL_op->op_type == OP_SAY) {
937 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
940 else if (PL_ors_sv && SvOK(PL_ors_sv))
941 if (!do_print(PL_ors_sv, fp)) /* $\ */
944 if (IoFLAGS(io) & IOf_FLUSH)
945 if (PerlIO_flush(fp) == EOF)
955 XPUSHs(&PL_sv_undef);
960 /* also used for: pp_rv2hv() */
961 /* also called directly by pp_lvavref */
966 const U8 gimme = GIMME_V;
967 static const char an_array[] = "an ARRAY";
968 static const char a_hash[] = "a HASH";
969 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
970 || PL_op->op_type == OP_LVAVREF;
971 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
975 if (UNLIKELY(SvAMAGIC(sv))) {
976 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
979 if (UNLIKELY(SvTYPE(sv) != type))
980 /* diag_listed_as: Not an ARRAY reference */
981 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
982 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
983 && PL_op->op_private & OPpLVAL_INTRO))
984 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
986 else if (UNLIKELY(SvTYPE(sv) != type)) {
989 if (!isGV_with_GP(sv)) {
990 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
998 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
999 if (PL_op->op_private & OPpLVAL_INTRO)
1000 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1002 if (PL_op->op_flags & OPf_REF) {
1006 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1007 const I32 flags = is_lvalue_sub();
1008 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1009 if (gimme != G_ARRAY)
1010 goto croak_cant_return;
1017 AV *const av = MUTABLE_AV(sv);
1018 /* The guts of pp_rv2av */
1019 if (gimme == G_ARRAY) {
1025 else if (gimme == G_SCALAR) {
1027 const SSize_t maxarg = AvFILL(av) + 1;
1031 /* The guts of pp_rv2hv */
1032 if (gimme == G_ARRAY) { /* array wanted */
1034 return Perl_do_kv(aTHX);
1036 else if ((PL_op->op_private & OPpTRUEBOOL
1037 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1038 && block_gimme() == G_VOID ))
1039 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1040 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1041 else if (gimme == G_SCALAR) {
1043 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1050 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1051 is_pp_rv2av ? "array" : "hash");
1056 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1058 PERL_ARGS_ASSERT_DO_ODDBALL;
1061 if (ckWARN(WARN_MISC)) {
1063 if (oddkey == firstkey &&
1065 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1066 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1068 err = "Reference found where even-sized list expected";
1071 err = "Odd number of elements in hash assignment";
1072 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1079 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1080 * are common to both the LHS and RHS of an aassign, and replace them
1081 * with copies. All these copies are made before the actual list assign is
1084 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1085 * element ($b) to the first LH element ($a), modifies $a; when the
1086 * second assignment is done, the second RH element now has the wrong
1087 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1088 * Note that we don't need to make a mortal copy of $b.
1090 * The algorithm below works by, for every RHS element, mark the
1091 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1092 * element is found with SVf_BREAK set, it means it would have been
1093 * modified, so make a copy.
1094 * Note that by scanning both LHS and RHS in lockstep, we avoid
1095 * unnecessary copies (like $b above) compared with a naive
1096 * "mark all LHS; copy all marked RHS; unmark all LHS".
1098 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1099 * it can't be common and can be skipped.
1101 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1102 * that we thought we didn't need to call S_aassign_copy_common(), but we
1103 * have anyway for sanity checking. If we find we need to copy, then panic.
1106 PERL_STATIC_INLINE void
1107 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1108 SV **firstrelem, SV **lastrelem
1117 SSize_t lcount = lastlelem - firstlelem + 1;
1118 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1119 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1120 bool copy_all = FALSE;
1122 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1123 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1124 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1128 /* we never have to copy the first RH element; it can't be corrupted
1129 * by assigning something to the corresponding first LH element.
1130 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1132 relem = firstrelem + 1;
1134 for (; relem <= lastrelem; relem++) {
1137 /* mark next LH element */
1139 if (--lcount >= 0) {
1142 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1143 assert (lelem <= lastlelem);
1149 if (SvSMAGICAL(svl)) {
1152 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1155 /* this LH element will consume all further args;
1156 * no need to mark any further LH elements (if any).
1157 * But we still need to scan any remaining RHS elements;
1158 * set lcount negative to distinguish from lcount == 0,
1159 * so the loop condition continues being true
1162 lelem--; /* no need to unmark this element */
1164 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1165 assert(!SvIMMORTAL(svl));
1166 SvFLAGS(svl) |= SVf_BREAK;
1170 /* don't check RH element if no SVf_BREAK flags set yet */
1177 /* see if corresponding RH element needs copying */
1183 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1187 /* op_dump(PL_op); */
1189 "panic: aassign skipped needed copy of common RH elem %"
1190 UVuf, (UV)(relem - firstrelem));
1194 TAINT_NOT; /* Each item is independent */
1196 /* Dear TODO test in t/op/sort.t, I love you.
1197 (It's relying on a panic, not a "semi-panic" from newSVsv()
1198 and then an assertion failure below.) */
1199 if (UNLIKELY(SvIS_FREED(svr))) {
1200 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1203 /* avoid break flag while copying; otherwise COW etc
1205 SvFLAGS(svr) &= ~SVf_BREAK;
1206 /* Not newSVsv(), as it does not allow copy-on-write,
1207 resulting in wasteful copies.
1208 Also, we use SV_NOSTEAL in case the SV is used more than
1209 once, e.g. (...) = (f())[0,0]
1210 Where the same SV appears twice on the RHS without a ref
1211 count bump. (Although I suspect that the SV won't be
1212 stealable here anyway - DAPM).
1214 *relem = sv_mortalcopy_flags(svr,
1215 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1216 /* ... but restore afterwards in case it's needed again,
1217 * e.g. ($a,$b,$c) = (1,$a,$a)
1219 SvFLAGS(svr) |= SVf_BREAK;
1231 while (lelem > firstlelem) {
1232 SV * const svl = *(--lelem);
1234 SvFLAGS(svl) &= ~SVf_BREAK;
1243 SV **lastlelem = PL_stack_sp;
1244 SV **lastrelem = PL_stack_base + POPMARK;
1245 SV **firstrelem = PL_stack_base + POPMARK + 1;
1246 SV **firstlelem = lastrelem + 1;
1259 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1260 * only need to save locally, not on the save stack */
1261 U16 old_delaymagic = PL_delaymagic;
1266 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1268 /* If there's a common identifier on both sides we have to take
1269 * special care that assigning the identifier on the left doesn't
1270 * clobber a value on the right that's used later in the list.
1273 /* at least 2 LH and RH elements, or commonality isn't an issue */
1274 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1275 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1276 if (SvGMAGICAL(*relem))
1279 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1280 if (*lelem && SvSMAGICAL(*lelem))
1283 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1284 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1285 /* skip the scan if all scalars have a ref count of 1 */
1286 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1288 if (!sv || SvREFCNT(sv) == 1)
1290 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1297 S_aassign_copy_common(aTHX_
1298 firstlelem, lastlelem, firstrelem, lastrelem
1308 /* on debugging builds, do the scan even if we've concluded we
1309 * don't need to, then panic if we find commonality. Note that the
1310 * scanner assumes at least 2 elements */
1311 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1319 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1326 while (LIKELY(lelem <= lastlelem)) {
1328 TAINT_NOT; /* Each item stands on its own, taintwise. */
1330 if (UNLIKELY(!sv)) {
1333 ASSUME(SvTYPE(sv) == SVt_PVAV);
1335 switch (SvTYPE(sv)) {
1337 bool already_copied = FALSE;
1338 ary = MUTABLE_AV(sv);
1339 magic = SvMAGICAL(ary) != 0;
1341 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1343 /* We need to clear ary. The is a danger that if we do this,
1344 * elements on the RHS may be prematurely freed, e.g.
1346 * In the case of possible commonality, make a copy of each
1347 * RHS SV *before* clearing the array, and add a reference
1348 * from the tmps stack, so that it doesn't leak on death.
1349 * Otherwise, make a copy of each RHS SV only as we're storing
1350 * it into the array - that way we don't have to worry about
1351 * it being leaked if we die, but don't incur the cost of
1352 * mortalising everything.
1355 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1356 && (relem <= lastrelem)
1357 && (magic || AvFILL(ary) != -1))
1360 EXTEND_MORTAL(lastrelem - relem + 1);
1361 for (svp = relem; svp <= lastrelem; svp++) {
1362 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1363 *svp = sv_mortalcopy_flags(*svp,
1364 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1367 already_copied = TRUE;
1371 if (relem <= lastrelem)
1372 av_extend(ary, lastrelem - relem);
1375 while (relem <= lastrelem) { /* gobble up all the rest */
1377 if (LIKELY(!alias)) {
1382 /* before newSV, in case it dies */
1385 /* see comment in S_aassign_copy_common about
1387 sv_setsv_flags(sv, *relem,
1388 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1393 if (!already_copied)
1396 DIE(aTHX_ "Assigned value is not a reference");
1397 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1398 /* diag_listed_as: Assigned value is not %s reference */
1400 "Assigned value is not a SCALAR reference");
1401 if (lval && !already_copied)
1402 *relem = sv_mortalcopy(*relem);
1403 /* XXX else check for weak refs? */
1404 sv = SvREFCNT_inc_NN(SvRV(*relem));
1408 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1409 didstore = av_store(ary,i++,sv);
1418 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1419 SvSETMAGIC(MUTABLE_SV(ary));
1424 case SVt_PVHV: { /* normal hash */
1428 SV** topelem = relem;
1429 SV **firsthashrelem = relem;
1430 bool already_copied = FALSE;
1432 hash = MUTABLE_HV(sv);
1433 magic = SvMAGICAL(hash) != 0;
1435 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1436 if (UNLIKELY(odd)) {
1437 do_oddball(lastrelem, firsthashrelem);
1438 /* we have firstlelem to reuse, it's not needed anymore
1440 *(lastrelem+1) = &PL_sv_undef;
1444 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1446 /* We need to clear hash. The is a danger that if we do this,
1447 * elements on the RHS may be prematurely freed, e.g.
1448 * %h = (foo => $h{bar});
1449 * In the case of possible commonality, make a copy of each
1450 * RHS SV *before* clearing the hash, and add a reference
1451 * from the tmps stack, so that it doesn't leak on death.
1454 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1455 && (relem <= lastrelem)
1456 && (magic || HvUSEDKEYS(hash)))
1459 EXTEND_MORTAL(lastrelem - relem + 1);
1460 for (svp = relem; svp <= lastrelem; svp++) {
1461 *svp = sv_mortalcopy_flags(*svp,
1462 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1465 already_copied = TRUE;
1470 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1473 /* Copy the key if aassign is called in lvalue context,
1474 to avoid having the next op modify our rhs. Copy
1475 it also if it is gmagical, lest it make the
1476 hv_store_ent call below croak, leaking the value. */
1477 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1478 ? sv_mortalcopy(*relem)
1487 sv_setsv_nomg(tmpstr,*relem++); /* value */
1490 if (gimme == G_ARRAY) {
1491 if (hv_exists_ent(hash, sv, 0))
1492 /* key overwrites an existing entry */
1495 /* copy element back: possibly to an earlier
1496 * stack location if we encountered dups earlier,
1497 * possibly to a later stack location if odd */
1499 *topelem++ = tmpstr;
1503 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1504 didstore = hv_store_ent(hash,sv,tmpstr,0);
1506 if (!didstore) sv_2mortal(tmpstr);
1512 if (duplicates && gimme == G_ARRAY) {
1513 /* at this point we have removed the duplicate key/value
1514 * pairs from the stack, but the remaining values may be
1515 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1516 * the (a 2), but the stack now probably contains
1517 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1518 * obliterates the earlier key. So refresh all values. */
1519 lastrelem -= duplicates;
1520 relem = firsthashrelem;
1521 while (relem < lastrelem+odd) {
1523 he = hv_fetch_ent(hash, *relem++, 0, 0);
1524 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1527 if (odd && gimme == G_ARRAY) lastrelem++;
1531 if (SvIMMORTAL(sv)) {
1532 if (relem <= lastrelem)
1536 if (relem <= lastrelem) {
1538 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1539 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1542 packWARN(WARN_MISC),
1543 "Useless assignment to a temporary"
1545 sv_setsv(sv, *relem);
1549 sv_setsv(sv, &PL_sv_undef);
1554 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1555 /* Will be used to set PL_tainting below */
1556 Uid_t tmp_uid = PerlProc_getuid();
1557 Uid_t tmp_euid = PerlProc_geteuid();
1558 Gid_t tmp_gid = PerlProc_getgid();
1559 Gid_t tmp_egid = PerlProc_getegid();
1561 /* XXX $> et al currently silently ignore failures */
1562 if (PL_delaymagic & DM_UID) {
1563 #ifdef HAS_SETRESUID
1565 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1566 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1569 # ifdef HAS_SETREUID
1571 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1572 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1575 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1576 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1577 PL_delaymagic &= ~DM_RUID;
1579 # endif /* HAS_SETRUID */
1581 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1582 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1583 PL_delaymagic &= ~DM_EUID;
1585 # endif /* HAS_SETEUID */
1586 if (PL_delaymagic & DM_UID) {
1587 if (PL_delaymagic_uid != PL_delaymagic_euid)
1588 DIE(aTHX_ "No setreuid available");
1589 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1591 # endif /* HAS_SETREUID */
1592 #endif /* HAS_SETRESUID */
1594 tmp_uid = PerlProc_getuid();
1595 tmp_euid = PerlProc_geteuid();
1597 /* XXX $> et al currently silently ignore failures */
1598 if (PL_delaymagic & DM_GID) {
1599 #ifdef HAS_SETRESGID
1601 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1602 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1605 # ifdef HAS_SETREGID
1607 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1608 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1611 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1612 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1613 PL_delaymagic &= ~DM_RGID;
1615 # endif /* HAS_SETRGID */
1617 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1618 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1619 PL_delaymagic &= ~DM_EGID;
1621 # endif /* HAS_SETEGID */
1622 if (PL_delaymagic & DM_GID) {
1623 if (PL_delaymagic_gid != PL_delaymagic_egid)
1624 DIE(aTHX_ "No setregid available");
1625 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1627 # endif /* HAS_SETREGID */
1628 #endif /* HAS_SETRESGID */
1630 tmp_gid = PerlProc_getgid();
1631 tmp_egid = PerlProc_getegid();
1633 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1634 #ifdef NO_TAINT_SUPPORT
1635 PERL_UNUSED_VAR(tmp_uid);
1636 PERL_UNUSED_VAR(tmp_euid);
1637 PERL_UNUSED_VAR(tmp_gid);
1638 PERL_UNUSED_VAR(tmp_egid);
1641 PL_delaymagic = old_delaymagic;
1643 if (gimme == G_VOID)
1644 SP = firstrelem - 1;
1645 else if (gimme == G_SCALAR) {
1648 SETi(lastrelem - firstrelem + 1);
1652 /* note that in this case *firstlelem may have been overwritten
1653 by sv_undef in the odd hash case */
1656 SP = firstrelem + (lastlelem - firstlelem);
1657 lelem = firstlelem + (relem - firstrelem);
1659 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1669 PMOP * const pm = cPMOP;
1670 REGEXP * rx = PM_GETRE(pm);
1671 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1672 SV * const rv = sv_newmortal();
1676 SvUPGRADE(rv, SVt_IV);
1677 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1678 loathe to use it here, but it seems to be the right fix. Or close.
1679 The key part appears to be that it's essential for pp_qr to return a new
1680 object (SV), which implies that there needs to be an effective way to
1681 generate a new SV from the existing SV that is pre-compiled in the
1683 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1686 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1687 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1688 *cvp = cv_clone(cv);
1689 SvREFCNT_dec_NN(cv);
1693 HV *const stash = gv_stashsv(pkg, GV_ADD);
1694 SvREFCNT_dec_NN(pkg);
1695 (void)sv_bless(rv, stash);
1698 if (UNLIKELY(RX_ISTAINTED(rx))) {
1700 SvTAINTED_on(SvRV(rv));
1713 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1716 const char *truebase; /* Start of string */
1717 REGEXP *rx = PM_GETRE(pm);
1719 const U8 gimme = GIMME_V;
1721 const I32 oldsave = PL_savestack_ix;
1722 I32 had_zerolen = 0;
1725 if (PL_op->op_flags & OPf_STACKED)
1734 PUTBACK; /* EVAL blocks need stack_sp. */
1735 /* Skip get-magic if this is a qr// clone, because regcomp has
1737 truebase = ReANY(rx)->mother_re
1738 ? SvPV_nomg_const(TARG, len)
1739 : SvPV_const(TARG, len);
1741 DIE(aTHX_ "panic: pp_match");
1742 strend = truebase + len;
1743 rxtainted = (RX_ISTAINTED(rx) ||
1744 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1747 /* We need to know this in case we fail out early - pos() must be reset */
1748 global = dynpm->op_pmflags & PMf_GLOBAL;
1750 /* PMdf_USED is set after a ?? matches once */
1753 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1755 pm->op_pmflags & PMf_USED
1758 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1762 /* empty pattern special-cased to use last successful pattern if
1763 possible, except for qr// */
1764 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1770 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1771 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1772 UVuf" < %"IVdf")\n",
1773 (UV)len, (IV)RX_MINLEN(rx)));
1777 /* get pos() if //g */
1779 mg = mg_find_mglob(TARG);
1780 if (mg && mg->mg_len >= 0) {
1781 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1782 /* last time pos() was set, it was zero-length match */
1783 if (mg->mg_flags & MGf_MINMATCH)
1788 #ifdef PERL_SAWAMPERSAND
1791 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1792 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1796 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1797 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1798 * only on the first iteration. Therefore we need to copy $' as well
1799 * as $&, to make the rest of the string available for captures in
1800 * subsequent iterations */
1801 if (! (global && gimme == G_ARRAY))
1802 r_flags |= REXEC_COPY_SKIP_POST;
1804 #ifdef PERL_SAWAMPERSAND
1805 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1806 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1807 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1814 s = truebase + curpos;
1816 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1817 had_zerolen, TARG, NULL, r_flags))
1821 if (dynpm->op_pmflags & PMf_ONCE)
1823 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1825 dynpm->op_pmflags |= PMf_USED;
1829 RX_MATCH_TAINTED_on(rx);
1830 TAINT_IF(RX_MATCH_TAINTED(rx));
1834 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1836 mg = sv_magicext_mglob(TARG);
1837 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1838 if (RX_ZERO_LEN(rx))
1839 mg->mg_flags |= MGf_MINMATCH;
1841 mg->mg_flags &= ~MGf_MINMATCH;
1844 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1845 LEAVE_SCOPE(oldsave);
1849 /* push captures on stack */
1852 const I32 nparens = RX_NPARENS(rx);
1853 I32 i = (global && !nparens) ? 1 : 0;
1855 SPAGAIN; /* EVAL blocks could move the stack. */
1856 EXTEND(SP, nparens + i);
1857 EXTEND_MORTAL(nparens + i);
1858 for (i = !i; i <= nparens; i++) {
1859 PUSHs(sv_newmortal());
1860 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1861 && RX_OFFS(rx)[i].end != -1 ))
1863 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1864 const char * const s = RX_OFFS(rx)[i].start + truebase;
1865 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1866 || len < 0 || len > strend - s))
1867 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1868 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1869 (long) i, (long) RX_OFFS(rx)[i].start,
1870 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1871 sv_setpvn(*SP, s, len);
1872 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1877 curpos = (UV)RX_OFFS(rx)[0].end;
1878 had_zerolen = RX_ZERO_LEN(rx);
1879 PUTBACK; /* EVAL blocks may use stack */
1880 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1883 LEAVE_SCOPE(oldsave);
1886 NOT_REACHED; /* NOTREACHED */
1889 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1891 mg = mg_find_mglob(TARG);
1895 LEAVE_SCOPE(oldsave);
1896 if (gimme == G_ARRAY)
1902 Perl_do_readline(pTHX)
1904 dSP; dTARGETSTACKED;
1909 IO * const io = GvIO(PL_last_in_gv);
1910 const I32 type = PL_op->op_type;
1911 const U8 gimme = GIMME_V;
1914 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1916 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1917 if (gimme == G_SCALAR) {
1919 SvSetSV_nosteal(TARG, TOPs);
1929 if (IoFLAGS(io) & IOf_ARGV) {
1930 if (IoFLAGS(io) & IOf_START) {
1932 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1933 IoFLAGS(io) &= ~IOf_START;
1934 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1935 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1936 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1937 SvSETMAGIC(GvSV(PL_last_in_gv));
1942 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1943 if (!fp) { /* Note: fp != IoIFP(io) */
1944 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1947 else if (type == OP_GLOB)
1948 fp = Perl_start_glob(aTHX_ POPs, io);
1950 else if (type == OP_GLOB)
1952 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1953 report_wrongway_fh(PL_last_in_gv, '>');
1957 if ((!io || !(IoFLAGS(io) & IOf_START))
1958 && ckWARN(WARN_CLOSED)
1961 report_evil_fh(PL_last_in_gv);
1963 if (gimme == G_SCALAR) {
1964 /* undef TARG, and push that undefined value */
1965 if (type != OP_RCATLINE) {
1966 sv_setsv(TARG,NULL);
1973 if (gimme == G_SCALAR) {
1975 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1978 if (type == OP_RCATLINE)
1979 SvPV_force_nomg_nolen(sv);
1983 else if (isGV_with_GP(sv)) {
1984 SvPV_force_nomg_nolen(sv);
1986 SvUPGRADE(sv, SVt_PV);
1987 tmplen = SvLEN(sv); /* remember if already alloced */
1988 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1989 /* try short-buffering it. Please update t/op/readline.t
1990 * if you change the growth length.
1995 if (type == OP_RCATLINE && SvOK(sv)) {
1997 SvPV_force_nomg_nolen(sv);
2003 sv = sv_2mortal(newSV(80));
2007 /* This should not be marked tainted if the fp is marked clean */
2008 #define MAYBE_TAINT_LINE(io, sv) \
2009 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2014 /* delay EOF state for a snarfed empty file */
2015 #define SNARF_EOF(gimme,rs,io,sv) \
2016 (gimme != G_SCALAR || SvCUR(sv) \
2017 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2021 if (!sv_gets(sv, fp, offset)
2023 || SNARF_EOF(gimme, PL_rs, io, sv)
2024 || PerlIO_error(fp)))
2026 PerlIO_clearerr(fp);
2027 if (IoFLAGS(io) & IOf_ARGV) {
2028 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2031 (void)do_close(PL_last_in_gv, FALSE);
2033 else if (type == OP_GLOB) {
2034 if (!do_close(PL_last_in_gv, FALSE)) {
2035 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2036 "glob failed (child exited with status %d%s)",
2037 (int)(STATUS_CURRENT >> 8),
2038 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2041 if (gimme == G_SCALAR) {
2042 if (type != OP_RCATLINE) {
2043 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2049 MAYBE_TAINT_LINE(io, sv);
2052 MAYBE_TAINT_LINE(io, sv);
2054 IoFLAGS(io) |= IOf_NOLINE;
2058 if (type == OP_GLOB) {
2062 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2063 char * const tmps = SvEND(sv) - 1;
2064 if (*tmps == *SvPVX_const(PL_rs)) {
2066 SvCUR_set(sv, SvCUR(sv) - 1);
2069 for (t1 = SvPVX_const(sv); *t1; t1++)
2071 if (strchr("*%?", *t1))
2073 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2076 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2077 (void)POPs; /* Unmatched wildcard? Chuck it... */
2080 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2081 if (ckWARN(WARN_UTF8)) {
2082 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2083 const STRLEN len = SvCUR(sv) - offset;
2086 if (!is_utf8_string_loc(s, len, &f))
2087 /* Emulate :encoding(utf8) warning in the same case. */
2088 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2089 "utf8 \"\\x%02X\" does not map to Unicode",
2090 f < (U8*)SvEND(sv) ? *f : 0);
2093 if (gimme == G_ARRAY) {
2094 if (SvLEN(sv) - SvCUR(sv) > 20) {
2095 SvPV_shrink_to_cur(sv);
2097 sv = sv_2mortal(newSV(80));
2100 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2101 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2102 const STRLEN new_len
2103 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2104 SvPV_renew(sv, new_len);
2115 SV * const keysv = POPs;
2116 HV * const hv = MUTABLE_HV(POPs);
2117 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2118 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2120 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2121 bool preeminent = TRUE;
2123 if (SvTYPE(hv) != SVt_PVHV)
2130 /* If we can determine whether the element exist,
2131 * Try to preserve the existenceness of a tied hash
2132 * element by using EXISTS and DELETE if possible.
2133 * Fallback to FETCH and STORE otherwise. */
2134 if (SvCANEXISTDELETE(hv))
2135 preeminent = hv_exists_ent(hv, keysv, 0);
2138 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2139 svp = he ? &HeVAL(he) : NULL;
2141 if (!svp || !*svp || *svp == &PL_sv_undef) {
2145 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2147 lv = sv_newmortal();
2148 sv_upgrade(lv, SVt_PVLV);
2150 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2151 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2152 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2158 if (HvNAME_get(hv) && isGV(*svp))
2159 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2160 else if (preeminent)
2161 save_helem_flags(hv, keysv, svp,
2162 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2164 SAVEHDELETE(hv, keysv);
2166 else if (PL_op->op_private & OPpDEREF) {
2167 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2171 sv = (svp && *svp ? *svp : &PL_sv_undef);
2172 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2173 * was to make C<local $tied{foo} = $tied{foo}> possible.
2174 * However, it seems no longer to be needed for that purpose, and
2175 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2176 * would loop endlessly since the pos magic is getting set on the
2177 * mortal copy and lost. However, the copy has the effect of
2178 * triggering the get magic, and losing it altogether made things like
2179 * c<$tied{foo};> in void context no longer do get magic, which some
2180 * code relied on. Also, delayed triggering of magic on @+ and friends
2181 * meant the original regex may be out of scope by now. So as a
2182 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2183 * being called too many times). */
2184 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2191 /* a stripped-down version of Perl_softref2xv() for use by
2192 * pp_multideref(), which doesn't use PL_op->op_flags */
2195 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2198 if (PL_op->op_private & HINT_STRICT_REFS) {
2200 Perl_die(aTHX_ PL_no_symref_sv, sv,
2201 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2203 Perl_die(aTHX_ PL_no_usym, what);
2206 Perl_die(aTHX_ PL_no_usym, what);
2207 return gv_fetchsv_nomg(sv, GV_ADD, type);
2211 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2212 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2214 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2215 * Each of these either contains a set of actions, or an argument, such as
2216 * an IV to use as an array index, or a lexical var to retrieve.
2217 * Several actions re stored per UV; we keep shifting new actions off the
2218 * one UV, and only reload when it becomes zero.
2223 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2224 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2225 UV actions = items->uv;
2228 /* this tells find_uninit_var() where we're up to */
2229 PL_multideref_pc = items;
2232 /* there are three main classes of action; the first retrieve
2233 * the initial AV or HV from a variable or the stack; the second
2234 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2235 * the third an unrolled (/DREFHV, rv2hv, helem).
2237 switch (actions & MDEREF_ACTION_MASK) {
2240 actions = (++items)->uv;
2243 case MDEREF_AV_padav_aelem: /* $lex[...] */
2244 sv = PAD_SVl((++items)->pad_offset);
2247 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2248 sv = UNOP_AUX_item_sv(++items);
2249 assert(isGV_with_GP(sv));
2250 sv = (SV*)GvAVn((GV*)sv);
2253 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2258 goto do_AV_rv2av_aelem;
2261 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2262 sv = UNOP_AUX_item_sv(++items);
2263 assert(isGV_with_GP(sv));
2264 sv = GvSVn((GV*)sv);
2265 goto do_AV_vivify_rv2av_aelem;
2267 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2268 sv = PAD_SVl((++items)->pad_offset);
2271 do_AV_vivify_rv2av_aelem:
2272 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2273 /* this is the OPpDEREF action normally found at the end of
2274 * ops like aelem, helem, rv2sv */
2275 sv = vivify_ref(sv, OPpDEREF_AV);
2279 /* this is basically a copy of pp_rv2av when it just has the
2282 if (LIKELY(SvROK(sv))) {
2283 if (UNLIKELY(SvAMAGIC(sv))) {
2284 sv = amagic_deref_call(sv, to_av_amg);
2287 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2288 DIE(aTHX_ "Not an ARRAY reference");
2290 else if (SvTYPE(sv) != SVt_PVAV) {
2291 if (!isGV_with_GP(sv))
2292 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2293 sv = MUTABLE_SV(GvAVn((GV*)sv));
2299 /* retrieve the key; this may be either a lexical or package
2300 * var (whose index/ptr is stored as an item) or a signed
2301 * integer constant stored as an item.
2304 IV elem = 0; /* to shut up stupid compiler warnings */
2307 assert(SvTYPE(sv) == SVt_PVAV);
2309 switch (actions & MDEREF_INDEX_MASK) {
2310 case MDEREF_INDEX_none:
2312 case MDEREF_INDEX_const:
2313 elem = (++items)->iv;
2315 case MDEREF_INDEX_padsv:
2316 elemsv = PAD_SVl((++items)->pad_offset);
2318 case MDEREF_INDEX_gvsv:
2319 elemsv = UNOP_AUX_item_sv(++items);
2320 assert(isGV_with_GP(elemsv));
2321 elemsv = GvSVn((GV*)elemsv);
2323 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2324 && ckWARN(WARN_MISC)))
2325 Perl_warner(aTHX_ packWARN(WARN_MISC),
2326 "Use of reference \"%"SVf"\" as array index",
2328 /* the only time that S_find_uninit_var() needs this
2329 * is to determine which index value triggered the
2330 * undef warning. So just update it here. Note that
2331 * since we don't save and restore this var (e.g. for
2332 * tie or overload execution), its value will be
2333 * meaningless apart from just here */
2334 PL_multideref_pc = items;
2335 elem = SvIV(elemsv);
2340 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2342 if (!(actions & MDEREF_FLAG_last)) {
2343 SV** svp = av_fetch((AV*)sv, elem, 1);
2344 if (!svp || ! (sv=*svp))
2345 DIE(aTHX_ PL_no_aelem, elem);
2349 if (PL_op->op_private &
2350 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2352 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2353 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2356 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2357 sv = av_delete((AV*)sv, elem, discard);
2365 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2366 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2367 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2368 bool preeminent = TRUE;
2369 AV *const av = (AV*)sv;
2372 if (UNLIKELY(localizing)) {
2376 /* If we can determine whether the element exist,
2377 * Try to preserve the existenceness of a tied array
2378 * element by using EXISTS and DELETE if possible.
2379 * Fallback to FETCH and STORE otherwise. */
2380 if (SvCANEXISTDELETE(av))
2381 preeminent = av_exists(av, elem);
2384 svp = av_fetch(av, elem, lval && !defer);
2387 if (!svp || !(sv = *svp)) {
2390 DIE(aTHX_ PL_no_aelem, elem);
2391 len = av_tindex(av);
2392 sv = sv_2mortal(newSVavdefelem(av,
2393 /* Resolve a negative index now, unless it points
2394 * before the beginning of the array, in which
2395 * case record it for error reporting in
2396 * magic_setdefelem. */
2397 elem < 0 && len + elem >= 0
2398 ? len + elem : elem, 1));
2401 if (UNLIKELY(localizing)) {
2403 save_aelem(av, elem, svp);
2404 sv = *svp; /* may have changed */
2407 SAVEADELETE(av, elem);
2412 sv = (svp ? *svp : &PL_sv_undef);
2413 /* see note in pp_helem() */
2414 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2431 case MDEREF_HV_padhv_helem: /* $lex{...} */
2432 sv = PAD_SVl((++items)->pad_offset);
2435 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2436 sv = UNOP_AUX_item_sv(++items);
2437 assert(isGV_with_GP(sv));
2438 sv = (SV*)GvHVn((GV*)sv);
2441 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2446 goto do_HV_rv2hv_helem;
2449 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2450 sv = UNOP_AUX_item_sv(++items);
2451 assert(isGV_with_GP(sv));
2452 sv = GvSVn((GV*)sv);
2453 goto do_HV_vivify_rv2hv_helem;
2455 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2456 sv = PAD_SVl((++items)->pad_offset);
2459 do_HV_vivify_rv2hv_helem:
2460 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2461 /* this is the OPpDEREF action normally found at the end of
2462 * ops like aelem, helem, rv2sv */
2463 sv = vivify_ref(sv, OPpDEREF_HV);
2467 /* this is basically a copy of pp_rv2hv when it just has the
2468 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2471 if (LIKELY(SvROK(sv))) {
2472 if (UNLIKELY(SvAMAGIC(sv))) {
2473 sv = amagic_deref_call(sv, to_hv_amg);
2476 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2477 DIE(aTHX_ "Not a HASH reference");
2479 else if (SvTYPE(sv) != SVt_PVHV) {
2480 if (!isGV_with_GP(sv))
2481 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2482 sv = MUTABLE_SV(GvHVn((GV*)sv));
2488 /* retrieve the key; this may be either a lexical / package
2489 * var or a string constant, whose index/ptr is stored as an
2492 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2494 assert(SvTYPE(sv) == SVt_PVHV);
2496 switch (actions & MDEREF_INDEX_MASK) {
2497 case MDEREF_INDEX_none:
2500 case MDEREF_INDEX_const:
2501 keysv = UNOP_AUX_item_sv(++items);
2504 case MDEREF_INDEX_padsv:
2505 keysv = PAD_SVl((++items)->pad_offset);
2508 case MDEREF_INDEX_gvsv:
2509 keysv = UNOP_AUX_item_sv(++items);
2510 keysv = GvSVn((GV*)keysv);
2514 /* see comment above about setting this var */
2515 PL_multideref_pc = items;
2518 /* ensure that candidate CONSTs have been HEKified */
2519 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2520 || SvTYPE(keysv) >= SVt_PVMG
2523 || SvIsCOW_shared_hash(keysv));
2525 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2527 if (!(actions & MDEREF_FLAG_last)) {
2528 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2529 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2530 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2534 if (PL_op->op_private &
2535 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2537 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2538 sv = hv_exists_ent((HV*)sv, keysv, 0)
2539 ? &PL_sv_yes : &PL_sv_no;
2542 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2543 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2551 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2552 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2553 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2554 bool preeminent = TRUE;
2556 HV * const hv = (HV*)sv;
2559 if (UNLIKELY(localizing)) {
2563 /* If we can determine whether the element exist,
2564 * Try to preserve the existenceness of a tied hash
2565 * element by using EXISTS and DELETE if possible.
2566 * Fallback to FETCH and STORE otherwise. */
2567 if (SvCANEXISTDELETE(hv))
2568 preeminent = hv_exists_ent(hv, keysv, 0);
2571 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2572 svp = he ? &HeVAL(he) : NULL;
2576 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2580 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2581 lv = sv_newmortal();
2582 sv_upgrade(lv, SVt_PVLV);
2584 sv_magic(lv, key2 = newSVsv(keysv),
2585 PERL_MAGIC_defelem, NULL, 0);
2586 /* sv_magic() increments refcount */
2587 SvREFCNT_dec_NN(key2);
2588 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2594 if (HvNAME_get(hv) && isGV(sv))
2595 save_gp(MUTABLE_GV(sv),
2596 !(PL_op->op_flags & OPf_SPECIAL));
2597 else if (preeminent) {
2598 save_helem_flags(hv, keysv, svp,
2599 (PL_op->op_flags & OPf_SPECIAL)
2600 ? 0 : SAVEf_SETMAGIC);
2601 sv = *svp; /* may have changed */
2604 SAVEHDELETE(hv, keysv);
2609 sv = (svp && *svp ? *svp : &PL_sv_undef);
2610 /* see note in pp_helem() */
2611 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2620 actions >>= MDEREF_SHIFT;
2639 itersvp = CxITERVAR(cx);
2642 switch (CxTYPE(cx)) {
2644 case CXt_LOOP_LAZYSV: /* string increment */
2646 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2647 SV *end = cx->blk_loop.state_u.lazysv.end;
2648 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2649 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2651 const char *max = SvPV_const(end, maxlen);
2652 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2656 /* NB: on the first iteration, oldsv will have a ref count of at
2657 * least 2 (one extra from blk_loop.itersave), so the GV or pad
2658 * slot will get localised; on subsequent iterations the RC==1
2659 * optimisation may kick in and the SV will be reused. */
2660 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2661 /* safe to reuse old SV */
2662 sv_setsv(oldsv, cur);
2666 /* we need a fresh SV every time so that loop body sees a
2667 * completely new SV for closures/references to work as
2669 *itersvp = newSVsv(cur);
2670 SvREFCNT_dec(oldsv);
2672 if (strEQ(SvPVX_const(cur), max))
2673 sv_setiv(cur, 0); /* terminate next time */
2679 case CXt_LOOP_LAZYIV: /* integer increment */
2681 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2682 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2686 /* see NB comment above */
2687 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2688 /* safe to reuse old SV */
2690 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
2693 /* Cheap SvIOK_only().
2694 * Assert that flags which SvIOK_only() would test or
2695 * clear can't be set, because we're SVt_IV */
2696 assert(!(SvFLAGS(oldsv) &
2697 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
2698 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
2699 /* SvIV_set() where sv_any points to head */
2700 oldsv->sv_u.svu_iv = cur;
2704 sv_setiv(oldsv, cur);
2708 /* we need a fresh SV every time so that loop body sees a
2709 * completely new SV for closures/references to work as they
2711 *itersvp = newSViv(cur);
2712 SvREFCNT_dec(oldsv);
2715 if (UNLIKELY(cur == IV_MAX)) {
2716 /* Handle end of range at IV_MAX */
2717 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2719 ++cx->blk_loop.state_u.lazyiv.cur;
2723 case CXt_LOOP_LIST: /* for (1,2,3) */
2725 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
2726 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2727 ix = (cx->blk_loop.state_u.stack.ix += inc);
2728 if (UNLIKELY(inc > 0
2729 ? ix > cx->blk_oldsp
2730 : ix <= cx->blk_loop.state_u.stack.basesp)
2734 sv = PL_stack_base[ix];
2736 goto loop_ary_common;
2738 case CXt_LOOP_ARY: /* for (@ary) */
2740 av = cx->blk_loop.state_u.ary.ary;
2741 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
2742 ix = (cx->blk_loop.state_u.ary.ix += inc);
2743 if (UNLIKELY(inc > 0
2749 if (UNLIKELY(SvRMAGICAL(av))) {
2750 SV * const * const svp = av_fetch(av, ix, FALSE);
2751 sv = svp ? *svp : NULL;
2754 sv = AvARRAY(av)[ix];
2759 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2760 SvSetMagicSV(*itersvp, sv);
2765 if (UNLIKELY(SvIS_FREED(sv))) {
2767 Perl_croak(aTHX_ "Use of freed value in iteration");
2774 SvREFCNT_inc_simple_void_NN(sv);
2778 sv = newSVavdefelem(av, ix, 0);
2785 SvREFCNT_dec(oldsv);
2789 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2797 /* pp_enteriter should have pre-extended the stack */
2798 assert(PL_stack_sp < PL_stack_max);
2799 *++PL_stack_sp =retsv;
2801 return PL_op->op_next;
2805 A description of how taint works in pattern matching and substitution.
2807 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2808 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2810 While the pattern is being assembled/concatenated and then compiled,
2811 PL_tainted will get set (via TAINT_set) if any component of the pattern
2812 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2813 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2814 TAINT_get). It will also be set if any component of the pattern matches
2815 based on locale-dependent behavior.
2817 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2818 the pattern is marked as tainted. This means that subsequent usage, such
2819 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2820 on the new pattern too.
2822 RXf_TAINTED_SEEN is used post-execution by the get magic code
2823 of $1 et al to indicate whether the returned value should be tainted.
2824 It is the responsibility of the caller of the pattern (i.e. pp_match,
2825 pp_subst etc) to set this flag for any other circumstances where $1 needs
2828 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2830 There are three possible sources of taint
2832 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2833 * the replacement string (or expression under /e)
2835 There are four destinations of taint and they are affected by the sources
2836 according to the rules below:
2838 * the return value (not including /r):
2839 tainted by the source string and pattern, but only for the
2840 number-of-iterations case; boolean returns aren't tainted;
2841 * the modified string (or modified copy under /r):
2842 tainted by the source string, pattern, and replacement strings;
2844 tainted by the pattern, and under 'use re "taint"', by the source
2846 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2847 should always be unset before executing subsequent code.
2849 The overall action of pp_subst is:
2851 * at the start, set bits in rxtainted indicating the taint status of
2852 the various sources.
2854 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2855 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2856 pattern has subsequently become tainted via locale ops.
2858 * If control is being passed to pp_substcont to execute a /e block,
2859 save rxtainted in the CXt_SUBST block, for future use by
2862 * Whenever control is being returned to perl code (either by falling
2863 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2864 use the flag bits in rxtainted to make all the appropriate types of
2865 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2866 et al will appear tainted.
2868 pp_match is just a simpler version of the above.
2884 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2885 See "how taint works" above */
2888 REGEXP *rx = PM_GETRE(pm);
2890 int force_on_match = 0;
2891 const I32 oldsave = PL_savestack_ix;
2893 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2898 /* known replacement string? */
2899 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2903 if (PL_op->op_flags & OPf_STACKED)
2912 SvGETMAGIC(TARG); /* must come before cow check */
2914 /* note that a string might get converted to COW during matching */
2915 was_cow = cBOOL(SvIsCOW(TARG));
2917 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
2918 #ifndef PERL_ANY_COW
2920 sv_force_normal_flags(TARG,0);
2922 if ((SvREADONLY(TARG)
2923 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2924 || SvTYPE(TARG) > SVt_PVLV)
2925 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2926 Perl_croak_no_modify();
2930 orig = SvPV_nomg(TARG, len);
2931 /* note we don't (yet) force the var into being a string; if we fail
2932 * to match, we leave as-is; on successful match however, we *will*
2933 * coerce into a string, then repeat the match */
2934 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2937 /* only replace once? */
2938 once = !(rpm->op_pmflags & PMf_GLOBAL);
2940 /* See "how taint works" above */
2943 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2944 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2945 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2946 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2947 ? SUBST_TAINT_BOOLRET : 0));
2953 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2955 strend = orig + len;
2956 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2957 maxiters = 2 * slen + 10; /* We can match twice at each
2958 position, once with zero-length,
2959 second time with non-zero. */
2961 if (!RX_PRELEN(rx) && PL_curpm
2962 && !ReANY(rx)->mother_re) {
2967 #ifdef PERL_SAWAMPERSAND
2968 r_flags = ( RX_NPARENS(rx)
2970 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2971 || (rpm->op_pmflags & PMf_KEEPCOPY)
2976 r_flags = REXEC_COPY_STR;
2979 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2982 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2983 LEAVE_SCOPE(oldsave);
2988 /* known replacement string? */
2990 /* replacement needing upgrading? */
2991 if (DO_UTF8(TARG) && !doutf8) {
2992 nsv = sv_newmortal();
2995 sv_recode_to_utf8(nsv, _get_encoding());
2997 sv_utf8_upgrade(nsv);
2998 c = SvPV_const(nsv, clen);
3002 c = SvPV_const(dstr, clen);
3003 doutf8 = DO_UTF8(dstr);
3006 if (SvTAINTED(dstr))
3007 rxtainted |= SUBST_TAINT_REPL;
3014 /* can do inplace substitution? */
3019 && (I32)clen <= RX_MINLENRET(rx)
3021 || !(r_flags & REXEC_COPY_STR)
3022 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
3024 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
3025 && (!doutf8 || SvUTF8(TARG))
3026 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
3030 /* string might have got converted to COW since we set was_cow */
3031 if (SvIsCOW(TARG)) {
3032 if (!force_on_match)
3034 assert(SvVOK(TARG));
3037 if (force_on_match) {
3038 /* redo the first match, this time with the orig var
3039 * forced into being a string */
3041 orig = SvPV_force_nomg(TARG, len);
3047 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3048 rxtainted |= SUBST_TAINT_PAT;
3049 m = orig + RX_OFFS(rx)[0].start;
3050 d = orig + RX_OFFS(rx)[0].end;
3052 if (m - s > strend - d) { /* faster to shorten from end */
3055 Copy(c, m, clen, char);
3060 Move(d, m, i, char);
3064 SvCUR_set(TARG, m - s);
3066 else { /* faster from front */
3070 Move(s, d - i, i, char);
3073 Copy(c, d, clen, char);
3080 d = s = RX_OFFS(rx)[0].start + orig;
3083 if (UNLIKELY(iters++ > maxiters))
3084 DIE(aTHX_ "Substitution loop");
3085 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3086 rxtainted |= SUBST_TAINT_PAT;
3087 m = RX_OFFS(rx)[0].start + orig;
3090 Move(s, d, i, char);
3094 Copy(c, d, clen, char);
3097 s = RX_OFFS(rx)[0].end + orig;
3098 } while (CALLREGEXEC(rx, s, strend, orig,
3099 s == m, /* don't match same null twice */
3101 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3104 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3105 Move(s, d, i+1, char); /* include the NUL */
3115 if (force_on_match) {
3116 /* redo the first match, this time with the orig var
3117 * forced into being a string */
3119 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3120 /* I feel that it should be possible to avoid this mortal copy
3121 given that the code below copies into a new destination.
3122 However, I suspect it isn't worth the complexity of
3123 unravelling the C<goto force_it> for the small number of
3124 cases where it would be viable to drop into the copy code. */
3125 TARG = sv_2mortal(newSVsv(TARG));
3127 orig = SvPV_force_nomg(TARG, len);
3133 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3134 rxtainted |= SUBST_TAINT_PAT;
3136 s = RX_OFFS(rx)[0].start + orig;
3137 dstr = newSVpvn_flags(orig, s-orig,
3138 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3143 /* note that a whole bunch of local vars are saved here for
3144 * use by pp_substcont: here's a list of them in case you're
3145 * searching for places in this sub that uses a particular var:
3146 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3147 * s m strend rx once */
3149 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3153 if (UNLIKELY(iters++ > maxiters))
3154 DIE(aTHX_ "Substitution loop");
3155 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3156 rxtainted |= SUBST_TAINT_PAT;
3157 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3159 char *old_orig = orig;
3160 assert(RX_SUBOFFSET(rx) == 0);
3162 orig = RX_SUBBEG(rx);
3163 s = orig + (old_s - old_orig);
3164 strend = s + (strend - old_s);
3166 m = RX_OFFS(rx)[0].start + orig;
3167 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3168 s = RX_OFFS(rx)[0].end + orig;
3170 /* replacement already stringified */
3172 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3177 if (!nsv) nsv = sv_newmortal();
3178 sv_copypv(nsv, repl);
3179 if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
3180 sv_catsv(dstr, nsv);
3182 else sv_catsv(dstr, repl);
3183 if (UNLIKELY(SvTAINTED(repl)))
3184 rxtainted |= SUBST_TAINT_REPL;
3188 } while (CALLREGEXEC(rx, s, strend, orig,
3189 s == m, /* Yields minend of 0 or 1 */
3191 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3192 assert(strend >= s);
3193 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3195 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3196 /* From here on down we're using the copy, and leaving the original
3203 /* The match may make the string COW. If so, brilliant, because
3204 that's just saved us one malloc, copy and free - the regexp has
3205 donated the old buffer, and we malloc an entirely new one, rather
3206 than the regexp malloc()ing a buffer and copying our original,
3207 only for us to throw it away here during the substitution. */
3208 if (SvIsCOW(TARG)) {
3209 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3215 SvPV_set(TARG, SvPVX(dstr));
3216 SvCUR_set(TARG, SvCUR(dstr));
3217 SvLEN_set(TARG, SvLEN(dstr));
3218 SvFLAGS(TARG) |= SvUTF8(dstr);
3219 SvPV_set(dstr, NULL);
3226 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3227 (void)SvPOK_only_UTF8(TARG);
3230 /* See "how taint works" above */
3232 if ((rxtainted & SUBST_TAINT_PAT) ||
3233 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3234 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3236 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3238 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3239 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3241 SvTAINTED_on(TOPs); /* taint return value */
3243 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3245 /* needed for mg_set below */
3247 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3251 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3253 LEAVE_SCOPE(oldsave);
3262 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3263 ++*PL_markstack_ptr;
3265 LEAVE_with_name("grep_item"); /* exit inner scope */
3268 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3270 const U8 gimme = GIMME_V;
3272 LEAVE_with_name("grep"); /* exit outer scope */
3273 (void)POPMARK; /* pop src */
3274 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3275 (void)POPMARK; /* pop dst */
3276 SP = PL_stack_base + POPMARK; /* pop original mark */
3277 if (gimme == G_SCALAR) {
3281 else if (gimme == G_ARRAY)
3288 ENTER_with_name("grep_item"); /* enter inner scope */
3291 src = PL_stack_base[TOPMARK];
3292 if (SvPADTMP(src)) {
3293 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3299 RETURNOP(cLOGOP->op_other);
3303 /* leave_adjust_stacks():
3305 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
3306 * positioning them at to_sp+1 onwards, and do the equivalent of a
3307 * FREEMPS and TAINT_NOT.
3309 * Not intended to be called in void context.
3311 * When leaving a sub, eval, do{} or other scope, the things that need
3312 * doing to process the return args are:
3313 * * in scalar context, only return the last arg (or PL_sv_undef if none);
3314 * * for the types of return that return copies of their args (such
3315 * as rvalue sub return), make a mortal copy of every return arg,
3316 * except where we can optimise the copy away without it being
3317 * semantically visible;
3318 * * make sure that the arg isn't prematurely freed; in the case of an
3319 * arg not copied, this may involve mortalising it. For example, in
3320 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
3321 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
3323 * What condition to use when deciding whether to pass the arg through
3324 * or make a copy, is determined by the 'pass' arg; its valid values are:
3325 * 0: rvalue sub/eval exit
3326 * 1: other rvalue scope exit
3327 * 2: :lvalue sub exit in rvalue context
3328 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
3330 * There is a big issue with doing a FREETMPS. We would like to free any
3331 * temps created by the last statement which the sub executed, rather than
3332 * leaving them for the caller. In a situation where a sub call isn't
3333 * soon followed by a nextstate (e.g. nested recursive calls, a la
3334 * fibonacci()), temps can accumulate, causing memory and performance
3337 * On the other hand, we don't want to free any TEMPs which are keeping
3338 * alive any return args that we skipped copying; nor do we wish to undo
3339 * any mortalising done here.
3341 * The solution is to split the temps stack frame into two, with a cut
3342 * point delineating the two halves. We arrange that by the end of this
3343 * function, all the temps stack frame entries we wish to keep are in the
3344 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
3345 * the range tmps_base .. PL_tmps_ix. During the course of this
3346 * function, tmps_base starts off as PL_tmps_floor+1, then increases
3347 * whenever we find or create a temp that we know should be kept. In
3348 * general the stuff above tmps_base is undecided until we reach the end,
3349 * and we may need a sort stage for that.
3351 * To determine whether a TEMP is keeping a return arg alive, every
3352 * arg that is kept rather than copied and which has the SvTEMP flag
3353 * set, has the flag temporarily unset, to mark it. At the end we scan
3354 * the temps stack frame above the cut for entries without SvTEMP and
3355 * keep them, while turning SvTEMP on again. Note that if we die before
3356 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
3357 * those SVs may be slightly less efficient.
3359 * In practice various optimisations for some common cases mean we can
3360 * avoid most of the scanning and swapping about with the temps stack.
3364 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
3368 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
3371 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
3375 if (gimme == G_ARRAY) {
3376 nargs = SP - from_sp;
3380 assert(gimme == G_SCALAR);
3381 if (UNLIKELY(from_sp >= SP)) {
3382 /* no return args */
3383 assert(from_sp == SP);
3385 *++SP = &PL_sv_undef;
3395 /* common code for G_SCALAR and G_ARRAY */
3397 tmps_base = PL_tmps_floor + 1;
3401 /* pointer version of tmps_base. Not safe across temp stack
3405 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
3406 tmps_basep = PL_tmps_stack + tmps_base;
3408 /* process each return arg */
3411 SV *sv = *from_sp++;
3413 assert(PL_tmps_ix + nargs < PL_tmps_max);
3415 /* PADTMPs with container set magic shouldn't appear in the
3416 * wild. This assert is more important for pp_leavesublv(),
3417 * but by testing for it here, we're more likely to catch
3418 * bad cases (what with :lvalue subs not being widely
3419 * deployed). The two issues are that for something like
3420 * sub :lvalue { $tied{foo} }
3422 * sub :lvalue { substr($foo,1,2) }
3423 * pp_leavesublv() will croak if the sub returns a PADTMP,
3424 * and currently functions like pp_substr() return a mortal
3425 * rather than using their PADTMP when returning a PVLV.
3426 * This is because the PVLV will hold a ref to $foo,
3427 * so $foo would get delayed in being freed while
3428 * the PADTMP SV remained in the PAD.
3429 * So if this assert fails it means either:
3430 * 1) there is pp code similar to pp_substr that is
3431 * returning a PADTMP instead of a mortal, and probably
3433 * 2) pp_leavesublv is making unwarranted assumptions
3434 * about always croaking on a PADTMP
3436 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
3438 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
3439 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
3445 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3446 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
3447 : pass == 2 ? (!SvPADTMP(sv))
3450 /* pass through: skip copy for logic or optimisation
3451 * reasons; instead mortalise it, except that ... */
3455 /* ... since this SV is an SvTEMP , we don't need to
3456 * re-mortalise it; instead we just need to ensure
3457 * that its existing entry in the temps stack frame
3458 * ends up below the cut and so avoids being freed
3459 * this time round. We mark it as needing to be kept
3460 * by temporarily unsetting SvTEMP; then at the end,
3461 * we shuffle any !SvTEMP entries on the tmps stack
3462 * back below the cut.
3463 * However, there's a significant chance that there's
3464 * a 1:1 correspondence between the first few (or all)
3465 * elements in the return args stack frame and those
3466 * in the temps stack frame; e,g.:
3467 * sub f { ....; map {...} .... },
3468 * or if we're exiting multiple scopes and one of the
3469 * inner scopes has already made mortal copies of each
3472 * If so, this arg sv will correspond to the next item
3473 * on the tmps stack above the cut, and so can be kept
3474 * merely by moving the cut boundary up one, rather
3475 * than messing with SvTEMP. If all args are 1:1 then
3476 * we can avoid the sorting stage below completely.
3478 * If there are no items above the cut on the tmps
3479 * stack, then the SvTEMP must comne from an item
3480 * below the cut, so there's nothing to do.
3482 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
3483 if (sv == *tmps_basep)
3489 else if (!SvPADTMP(sv)) {
3490 /* mortalise arg to avoid it being freed during save
3491 * stack unwinding. Pad tmps don't need mortalising as
3492 * they're never freed. This is the equivalent of
3493 * sv_2mortal(SvREFCNT_inc(sv)), except that:
3494 * * it assumes that the temps stack has already been
3496 * * it puts the new item at the cut rather than at
3497 * ++PL_tmps_ix, moving the previous occupant there
3500 if (!SvIMMORTAL(sv)) {
3501 SvREFCNT_inc_simple_void_NN(sv);
3503 /* Note that if there's nothing above the cut,
3504 * this copies the garbage one slot above
3505 * PL_tmps_ix onto itself. This is harmless (the
3506 * stack's already been extended), but might in
3507 * theory trigger warnings from tools like ASan
3509 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3515 /* Make a mortal copy of the SV.
3516 * The following code is the equivalent of sv_mortalcopy()
3518 * * it assumes the temps stack has already been extended;
3519 * * it optimises the copying for some simple SV types;
3520 * * it puts the new item at the cut rather than at
3521 * ++PL_tmps_ix, moving the previous occupant there
3524 SV *newsv = newSV(0);
3526 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
3527 /* put it on the tmps stack early so it gets freed if we die */
3528 *tmps_basep++ = newsv;
3531 if (SvTYPE(sv) <= SVt_IV) {
3532 /* arg must be one of undef, IV/UV, or RV: skip
3533 * sv_setsv_flags() and do the copy directly */
3535 U32 srcflags = SvFLAGS(sv);
3537 assert(!SvGMAGICAL(sv));
3538 if (srcflags & (SVf_IOK|SVf_ROK)) {
3539 SET_SVANY_FOR_BODYLESS_IV(newsv);
3541 if (srcflags & SVf_ROK) {
3542 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
3543 /* SV type plus flags */
3544 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
3547 /* both src and dst are <= SVt_IV, so sv_any
3548 * points to the head; so access the heads
3549 * directly rather than going via sv_any.
3551 assert( &(sv->sv_u.svu_iv)
3552 == &(((XPVIV*) SvANY(sv))->xiv_iv));
3553 assert( &(newsv->sv_u.svu_iv)
3554 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
3555 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
3556 /* SV type plus flags */
3557 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
3558 |(srcflags & SVf_IVisUV));
3562 assert(!(srcflags & SVf_OK));
3563 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
3565 SvFLAGS(newsv) = dstflags;
3569 /* do the full sv_setsv() */
3573 old_base = tmps_basep - PL_tmps_stack;
3575 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
3576 /* the mg_get or sv_setsv might have created new temps
3577 * or realloced the tmps stack; regrow and reload */
3578 EXTEND_MORTAL(nargs);
3579 tmps_basep = PL_tmps_stack + old_base;
3580 TAINT_NOT; /* Each item is independent */
3586 /* If there are any temps left above the cut, we need to sort
3587 * them into those to keep and those to free. The only ones to
3588 * keep are those for which we've temporarily unset SvTEMP.
3589 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
3590 * swapping pairs as necessary. Stop when we meet in the middle.
3593 SV **top = PL_tmps_stack + PL_tmps_ix;
3594 while (tmps_basep <= top) {
3607 tmps_base = tmps_basep - PL_tmps_stack;
3610 PL_stack_sp = to_sp;
3612 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
3613 while (PL_tmps_ix >= tmps_base) {
3614 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
3616 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
3620 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
3634 assert(CxTYPE(cx) == CXt_SUB);
3636 if (CxMULTICALL(cx)) {
3637 /* entry zero of a stack is always PL_sv_undef, which
3638 * simplifies converting a '()' return into undef in scalar context */
3639 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3643 gimme = cx->blk_gimme;
3644 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
3646 if (gimme == G_VOID)
3647 PL_stack_sp = oldsp;
3649 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
3652 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
3654 retop = cx->blk_sub.retop;
3661 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
3662 * forces an abandon */
3665 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
3667 const SSize_t fill = AvFILLp(av);
3669 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
3671 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
3676 AV *newav = newAV();
3677 av_extend(newav, fill);
3678 AvREIFY_only(newav);
3679 PAD_SVl(0) = MUTABLE_SV(newav);
3680 SvREFCNT_dec_NN(av);
3691 I32 old_savestack_ix;
3696 /* Locate the CV to call:
3697 * - most common case: RV->CV: f(), $ref->():
3698 * note that if a sub is compiled before its caller is compiled,
3699 * the stash entry will be a ref to a CV, rather than being a GV.
3700 * - second most common case: CV: $ref->method()
3703 /* a non-magic-RV -> CV ? */
3704 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
3705 cv = MUTABLE_CV(SvRV(sv));
3706 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
3710 cv = MUTABLE_CV(sv);
3713 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
3714 /* handle all the weird cases */
3715 switch (SvTYPE(sv)) {
3717 if (!isGV_with_GP(sv))
3721 cv = GvCVu((const GV *)sv);
3722 if (UNLIKELY(!cv)) {
3724 cv = sv_2cv(sv, &stash, &gv, 0);
3726 old_savestack_ix = PL_savestack_ix;
3737 if (UNLIKELY(SvAMAGIC(sv))) {
3738 sv = amagic_deref_call(sv, to_cv_amg);
3739 /* Don't SPAGAIN here. */
3745 if (UNLIKELY(!SvOK(sv)))
3746 DIE(aTHX_ PL_no_usym, "a subroutine");
3748 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
3749 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
3750 SP = PL_stack_base + POPMARK;
3753 if (GIMME_V == G_SCALAR)
3754 PUSHs(&PL_sv_undef);
3758 sym = SvPV_nomg_const(sv, len);
3759 if (PL_op->op_private & HINT_STRICT_REFS)
3760 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3761 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3764 cv = MUTABLE_CV(SvRV(sv));
3765 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
3771 DIE(aTHX_ "Not a CODE reference");
3775 /* At this point we want to save PL_savestack_ix, either by doing a
3776 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
3777 * CV we will be using (so we don't know whether its XS, so we can't
3778 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
3779 * the save stack. So remember where we are currently on the save
3780 * stack, and later update the CX or scopestack entry accordingly. */
3781 old_savestack_ix = PL_savestack_ix;
3783 /* these two fields are in a union. If they ever become separate,
3784 * we have to test for both of them being null below */
3786 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
3787 while (UNLIKELY(!CvROOT(cv))) {
3791 /* anonymous or undef'd function leaves us no recourse */
3792 if (CvLEXICAL(cv) && CvHASGV(cv))
3793 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3794 SVfARG(cv_name(cv, NULL, 0)));
3795 if (CvANON(cv) || !CvHASGV(cv)) {
3796 DIE(aTHX_ "Undefined subroutine called");
3799 /* autoloaded stub? */
3800 if (cv != GvCV(gv = CvGV(cv))) {
3803 /* should call AUTOLOAD now? */
3806 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3807 GvNAMEUTF8(gv) ? SVf_UTF8 : 0);
3808 cv = autogv ? GvCV(autogv) : NULL;
3811 sub_name = sv_newmortal();
3812 gv_efullname3(sub_name, gv, NULL);
3813 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3817 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
3818 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
3819 DIE(aTHX_ "Closure prototype called");
3821 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3824 Perl_get_db_sub(aTHX_ &sv, cv);
3826 PL_curcopdb = PL_curcop;
3828 /* check for lsub that handles lvalue subroutines */
3829 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3830 /* if lsub not found then fall back to DB::sub */
3831 if (!cv) cv = GvCV(PL_DBsub);
3833 cv = GvCV(PL_DBsub);
3836 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3837 DIE(aTHX_ "No DB::sub routine defined");
3840 if (!(CvISXSUB(cv))) {
3841 /* This path taken at least 75% of the time */
3848 /* keep PADTMP args alive throughout the call (we need to do this
3849 * because @_ isn't refcounted). Note that we create the mortals
3850 * in the caller's tmps frame, so they won't be freed until after
3851 * we return from the sub.
3860 *svp = sv = sv_mortalcopy(sv);
3866 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
3867 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
3868 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
3870 padlist = CvPADLIST(cv);
3871 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
3872 pad_push(padlist, depth);
3873 PAD_SET_CUR_NOSAVE(padlist, depth);
3874 if (LIKELY(hasargs)) {
3875 AV *const av = MUTABLE_AV(PAD_SVl(0));
3879 defavp = &GvAV(PL_defgv);
3880 cx->blk_sub.savearray = *defavp;
3881 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3883 /* it's the responsibility of whoever leaves a sub to ensure
3884 * that a clean, empty AV is left in pad[0]. This is normally
3885 * done by cx_popsub() */
3886 assert(!AvREAL(av) && AvFILLp(av) == -1);
3889 if (UNLIKELY(items - 1 > AvMAX(av))) {
3890 SV **ary = AvALLOC(av);
3891 AvMAX(av) = items - 1;
3892 Renew(ary, items, SV*);
3897 Copy(MARK+1,AvARRAY(av),items,SV*);
3898 AvFILLp(av) = items - 1;
3900 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3902 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3903 SVfARG(cv_name(cv, NULL, 0)));
3904 /* warning must come *after* we fully set up the context
3905 * stuff so that __WARN__ handlers can safely dounwind()
3908 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3909 && ckWARN(WARN_RECURSION)
3910 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3911 sub_crush_depth(cv);
3912 RETURNOP(CvSTART(cv));
3915 SSize_t markix = TOPMARK;
3919 /* pretend we did the ENTER earlier */
3920 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
3925 if (UNLIKELY(((PL_op->op_private
3926 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3927 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3929 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3930 SVfARG(cv_name(cv, NULL, 0)));
3932 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
3933 /* Need to copy @_ to stack. Alternative may be to
3934 * switch stack to @_, and copy return values
3935 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3936 AV * const av = GvAV(PL_defgv);
3937 const SSize_t items = AvFILL(av) + 1;
3941 const bool m = cBOOL(SvRMAGICAL(av));
3942 /* Mark is at the end of the stack. */
3944 for (; i < items; ++i)
3948 SV ** const svp = av_fetch(av, i, 0);
3949 sv = svp ? *svp : NULL;
3951 else sv = AvARRAY(av)[i];
3952 if (sv) SP[i+1] = sv;
3954 SP[i+1] = newSVavdefelem(av, i, 1);
3962 SV **mark = PL_stack_base + markix;
3963 SSize_t items = SP - mark;
3966 if (*mark && SvPADTMP(*mark)) {
3967 *mark = sv_mortalcopy(*mark);
3971 /* We assume first XSUB in &DB::sub is the called one. */
3972 if (UNLIKELY(PL_curcopdb)) {
3973 SAVEVPTR(PL_curcop);
3974 PL_curcop = PL_curcopdb;
3977 /* Do we need to open block here? XXXX */
3979 /* calculate gimme here as PL_op might get changed and then not
3980 * restored until the LEAVE further down */
3981 is_scalar = (GIMME_V == G_SCALAR);
3983 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3985 CvXSUB(cv)(aTHX_ cv);
3987 /* Enforce some sanity in scalar context. */
3989 SV **svp = PL_stack_base + markix + 1;
3990 if (svp != PL_stack_sp) {
3991 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
4001 Perl_sub_crush_depth(pTHX_ CV *cv)
4003 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
4006 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
4008 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
4009 SVfARG(cv_name(cv,NULL,0)));
4017 SV* const elemsv = POPs;
4018 IV elem = SvIV(elemsv);
4019 AV *const av = MUTABLE_AV(POPs);
4020 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
4021 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
4022 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
4023 bool preeminent = TRUE;
4026 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
4027 Perl_warner(aTHX_ packWARN(WARN_MISC),
4028 "Use of reference \"%"SVf"\" as array index",
4030 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
4033 if (UNLIKELY(localizing)) {
4037 /* If we can determine whether the element exist,
4038 * Try to preserve the existenceness of a tied array
4039 * element by using EXISTS and DELETE if possible.
4040 * Fallback to FETCH and STORE otherwise. */
4041 if (SvCANEXISTDELETE(av))
4042 preeminent = av_exists(av, elem);
4045 svp = av_fetch(av, elem, lval && !defer);
4047 #ifdef PERL_MALLOC_WRAP
4048 if (SvUOK(elemsv)) {
4049 const UV uv = SvUV(elemsv);
4050 elem = uv > IV_MAX ? IV_MAX : uv;
4052 else if (SvNOK(elemsv))
4053 elem = (IV)SvNV(elemsv);
4055 static const char oom_array_extend[] =
4056 "Out of memory during array extend"; /* Duplicated in av.c */
4057 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
4060 if (!svp || !*svp) {
4063 DIE(aTHX_ PL_no_aelem, elem);
4064 len = av_tindex(av);
4065 mPUSHs(newSVavdefelem(av,
4066 /* Resolve a negative index now, unless it points before the
4067 beginning of the array, in which case record it for error
4068 reporting in magic_setdefelem. */
4069 elem < 0 && len + elem >= 0 ? len + elem : elem,
4073 if (UNLIKELY(localizing)) {
4075 save_aelem(av, elem, svp);
4077 SAVEADELETE(av, elem);
4079 else if (PL_op->op_private & OPpDEREF) {
4080 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
4084 sv = (svp ? *svp : &PL_sv_undef);
4085 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
4092 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
4094 PERL_ARGS_ASSERT_VIVIFY_REF;
4099 Perl_croak_no_modify();
4100 prepare_SV_for_RV(sv);
4103 SvRV_set(sv, newSV(0));
4106 SvRV_set(sv, MUTABLE_SV(newAV()));
4109 SvRV_set(sv, MUTABLE_SV(newHV()));
4116 if (SvGMAGICAL(sv)) {
4117 /* copy the sv without magic to prevent magic from being
4119 SV* msv = sv_newmortal();
4120 sv_setsv_nomg(msv, sv);
4126 PERL_STATIC_INLINE HV *
4127 S_opmethod_stash(pTHX_ SV* meth)
4132 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
4133 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
4134 "package or object reference", SVfARG(meth)),
4136 : *(PL_stack_base + TOPMARK + 1);
4138 PERL_ARGS_ASSERT_OPMETHOD_STASH;
4142 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
4145 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
4146 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
4147 stash = gv_stashsv(sv, GV_CACHE_ONLY);
4148 if (stash) return stash;
4152 ob = MUTABLE_SV(SvRV(sv));
4153 else if (!SvOK(sv)) goto undefined;
4154 else if (isGV_with_GP(sv)) {
4156 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4157 "without a package or object reference",
4160 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
4161 assert(!LvTARGLEN(ob));
4165 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
4168 /* this isn't a reference */
4171 const char * const packname = SvPV_nomg_const(sv, packlen);
4172 const U32 packname_utf8 = SvUTF8(sv);
4173 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
4174 if (stash) return stash;
4176 if (!(iogv = gv_fetchpvn_flags(
4177 packname, packlen, packname_utf8, SVt_PVIO
4179 !(ob=MUTABLE_SV(GvIO(iogv))))
4181 /* this isn't the name of a filehandle either */
4184 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
4185 "without a package or object reference",
4188 /* assume it's a package name */
4189 stash = gv_stashpvn(packname, packlen, packname_utf8);
4190 if (stash) return stash;
4191 else return MUTABLE_HV(sv);
4193 /* it _is_ a filehandle name -- replace with a reference */
4194 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
4197 /* if we got here, ob should be an object or a glob */
4198 if (!ob || !(SvOBJECT(ob)
4199 || (isGV_with_GP(ob)
4200 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
4203 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
4204 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
4205 ? newSVpvs_flags("DOES", SVs_TEMP)
4217 SV* const meth = TOPs;
4220 SV* const rmeth = SvRV(meth);
4221 if (SvTYPE(rmeth) == SVt_PVCV) {
4227 stash = opmethod_stash(meth);
4229 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4232 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4236 #define METHOD_CHECK_CACHE(stash,cache,meth) \
4237 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
4239 gv = MUTABLE_GV(HeVAL(he)); \
4240 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
4241 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
4243 XPUSHs(MUTABLE_SV(GvCV(gv))); \
4252 SV* const meth = cMETHOPx_meth(PL_op);
4253 HV* const stash = opmethod_stash(meth);
4255 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
4256 METHOD_CHECK_CACHE(stash, stash, meth);
4259 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4262 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4271 SV* const meth = cMETHOPx_meth(PL_op);
4272 HV* const stash = CopSTASH(PL_curcop);
4273 /* Actually, SUPER doesn't need real object's (or class') stash at all,
4274 * as it uses CopSTASH. However, we must ensure that object(class) is
4275 * correct (this check is done by S_opmethod_stash) */
4276 opmethod_stash(meth);
4278 if ((cache = HvMROMETA(stash)->super)) {
4279 METHOD_CHECK_CACHE(stash, cache, meth);
4282 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4285 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4293 SV* const meth = cMETHOPx_meth(PL_op);
4294 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4295 opmethod_stash(meth); /* not used but needed for error checks */
4297 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
4298 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4300 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
4303 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4307 PP(pp_method_redir_super)
4312 SV* const meth = cMETHOPx_meth(PL_op);
4313 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
4314 opmethod_stash(meth); /* not used but needed for error checks */
4316 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
4317 else if ((cache = HvMROMETA(stash)->super)) {
4318 METHOD_CHECK_CACHE(stash, cache, meth);
4321 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
4324 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
4329 * ex: set ts=8 sts=4 sw=4 et: