3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
50 PL_curcop = (COP*)PL_op;
51 TAINT_NOT; /* Each statement is presumed innocent */
52 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
61 assert(SvTYPE(cGVOP_gv) == SVt_PVGV);
63 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
64 PUSHs(save_scalar(cGVOP_gv));
66 PUSHs(GvSVn(cGVOP_gv));
71 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
78 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
82 PUSHMARK(PL_stack_sp);
93 /* no PUTBACK, SETs doesn't inc/dec SP */
100 /* cGVOP_gv might be a real GV or might be an RV to a CV */
101 assert(SvTYPE(cGVOP_gv) == SVt_PVGV ||
102 (SvTYPE(cGVOP_gv) <= SVt_PVMG && SvROK(cGVOP_gv) && SvTYPE(SvRV(cGVOP_gv)) == SVt_PVCV));
103 XPUSHs(MUTABLE_SV(cGVOP_gv));
108 /* also used for: pp_andassign() */
114 /* SP is not used to remove a variable that is saved across the
115 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
116 register or load/store vs direct mem ops macro is introduced, this
117 should be a define block between direct PL_stack_sp and dSP operations,
118 presently, using PL_stack_sp is bias towards CISC cpus */
119 SV * const sv = *PL_stack_sp;
123 if (PL_op->op_type == OP_AND)
125 return cLOGOP->op_other;
131 * Mashup of simple padsv + sassign OPs
132 * Doesn't support the following lengthy and unlikely sassign case:
133 * (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV))
134 * These cases have a separate optimization, so are not handled here:
135 * (PL_op->op_private & OPpASSIGN_BACKWARDS) {or,and,dor}assign
141 OP * const op = PL_op;
142 SV** const padentry = &PAD_SVl(op->op_targ);
143 SV* targ = *padentry; /* lvalue to assign into */
144 SV* const val = TOPs; /* RHS value to assign */
146 /* !OPf_STACKED is not handled by this OP */
147 assert(op->op_flags & OPf_STACKED);
149 /* Inlined, simplified pp_padsv here */
150 if ((op->op_private & (OPpLVAL_INTRO|OPpPAD_STATE)) == OPpLVAL_INTRO) {
151 save_clearsv(padentry);
154 /* Inlined, simplified pp_sassign from here */
155 assert(TAINTING_get || !TAINT_get);
156 if (UNLIKELY(TAINT_get) && !SvTAINTED(val))
160 UNLIKELY(SvTEMP(targ)) && !SvSMAGICAL(targ) && SvREFCNT(targ) == 1 &&
161 (!isGV_with_GP(targ) || SvFAKE(targ)) && ckWARN(WARN_MISC)
164 packWARN(WARN_MISC), "Useless assignment to a temporary"
166 SvSetMagicSV(targ, val);
172 /* A mashup of simplified AELEMFAST_LEX + SASSIGN OPs */
174 PP(pp_aelemfastlex_store)
177 OP * const op = PL_op;
178 SV* const val = TOPs; /* RHS value to assign */
179 AV * const av = MUTABLE_AV(PAD_SV(op->op_targ));
180 const I8 key = (I8)PL_op->op_private;
183 /* !OPf_STACKED is not handled by this OP */
184 assert(op->op_flags & OPf_STACKED);
186 /* Inlined, simplified pp_aelemfast here */
187 assert(SvTYPE(av) == SVt_PVAV);
190 /* inlined av_fetch() for simple cases ... */
191 if (!SvRMAGICAL(av) && key <= AvFILLp(av)) {
192 targ = AvARRAY(av)[key];
194 /* ... else do it the hard way */
196 SV **svp = av_fetch(av, key, 1);
201 DIE(aTHX_ PL_no_aelem, (int)key);
204 /* Inlined, simplified pp_sassign from here */
205 assert(TAINTING_get || !TAINT_get);
206 if (UNLIKELY(TAINT_get) && !SvTAINTED(val))
210 UNLIKELY(SvTEMP(targ)) && !SvSMAGICAL(targ) && SvREFCNT(targ) == 1 &&
211 (!isGV_with_GP(targ) || SvFAKE(targ)) && ckWARN(WARN_MISC)
214 packWARN(WARN_MISC), "Useless assignment to a temporary"
216 SvSetMagicSV(targ, val);
225 /* sassign keeps its args in the optree traditionally backwards.
226 So we pop them differently.
228 SV *left = POPs; SV *right = TOPs;
230 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
231 SV * const temp = left;
232 left = right; right = temp;
234 assert(TAINTING_get || !TAINT_get);
235 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
237 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
239 SV * const cv = SvRV(right);
240 const U32 cv_type = SvTYPE(cv);
241 const bool is_gv = isGV_with_GP(left);
242 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
248 /* Can do the optimisation if left (LVALUE) is not a typeglob,
249 right (RVALUE) is a reference to something, and we're in void
251 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
252 /* Is the target symbol table currently empty? */
253 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
254 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
255 /* Good. Create a new proxy constant subroutine in the target.
256 The gv becomes a(nother) reference to the constant. */
257 SV *const value = SvRV(cv);
259 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
260 SvPCS_IMPORTED_on(gv);
262 SvREFCNT_inc_simple_void(value);
268 /* Need to fix things up. */
270 /* Need to fix GV. */
271 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
275 /* We've been returned a constant rather than a full subroutine,
276 but they expect a subroutine reference to apply. */
278 ENTER_with_name("sassign_coderef");
279 SvREFCNT_inc_void(SvRV(cv));
280 /* newCONSTSUB takes a reference count on the passed in SV
281 from us. We set the name to NULL, otherwise we get into
282 all sorts of fun as the reference to our new sub is
283 donated to the GV that we're about to assign to.
285 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
288 LEAVE_with_name("sassign_coderef");
290 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
292 First: ops for \&{"BONK"}; return us the constant in the
294 Second: ops for *{"BONK"} cause that symbol table entry
295 (and our reference to it) to be upgraded from RV
297 Thirdly: We get here. cv is actually PVGV now, and its
298 GvCV() is actually the subroutine we're looking for
300 So change the reference so that it points to the subroutine
301 of that typeglob, as that's what they were after all along.
303 GV *const upgraded = MUTABLE_GV(cv);
304 CV *const source = GvCV(upgraded);
307 assert(CvFLAGS(source) & CVf_CONST);
309 SvREFCNT_inc_simple_void_NN(source);
310 SvREFCNT_dec_NN(upgraded);
311 SvRV_set(right, MUTABLE_SV(source));
317 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
318 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
321 packWARN(WARN_MISC), "Useless assignment to a temporary"
323 SvSetMagicSV(left, right);
335 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
342 TAINT_NOT; /* Each statement is presumed innocent */
344 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
346 if (!(PL_op->op_flags & OPf_SPECIAL)) {
347 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
354 /* The main body of pp_concat, not including the magic/overload and
356 * It does targ = left . right.
357 * Moved into a separate function so that pp_multiconcat() can use it
361 PERL_STATIC_INLINE void
362 S_do_concat(pTHX_ SV *left, SV *right, SV *targ, U8 targmy)
366 const char *rpv = NULL;
368 bool rcopied = FALSE;
370 if (TARG == right && right != left) { /* $r = $l.$r */
371 rpv = SvPV_nomg_const(right, rlen);
372 rbyte = !DO_UTF8(right);
373 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
374 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
378 if (TARG != left) { /* not $l .= $r */
380 const char* const lpv = SvPV_nomg_const(left, llen);
381 lbyte = !DO_UTF8(left);
382 sv_setpvn(TARG, lpv, llen);
388 else { /* $l .= $r and left == TARG */
390 if ((left == right /* $l .= $l */
391 || targmy) /* $l = $l . $r */
392 && ckWARN(WARN_UNINITIALIZED)
398 SvPV_force_nomg_nolen(left);
400 lbyte = !DO_UTF8(left);
406 rpv = SvPV_nomg_const(right, rlen);
407 rbyte = !DO_UTF8(right);
409 if (lbyte != rbyte) {
411 sv_utf8_upgrade_nomg(TARG);
414 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
415 sv_utf8_upgrade_nomg(right);
416 rpv = SvPV_nomg_const(right, rlen);
419 sv_catpvn_nomg(TARG, rpv, rlen);
426 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
429 S_do_concat(aTHX_ left, right, targ, PL_op->op_private & OPpTARGET_MY);
438 Concatenate one or more args, possibly interleaved with constant string
439 segments. The result may be assigned to, or appended to, a variable or
442 Several op_flags and/or op_private bits indicate what the target is, and
443 whether it's appended to. Valid permutations are:
445 - (PADTMP) = (A.B.C....)
446 OPpTARGET_MY $lex = (A.B.C....)
447 OPpTARGET_MY,OPpLVAL_INTRO my $lex = (A.B.C....)
448 OPpTARGET_MY,OPpMULTICONCAT_APPEND $lex .= (A.B.C....)
449 OPf_STACKED expr = (A.B.C....)
450 OPf_STACKED,OPpMULTICONCAT_APPEND expr .= (A.B.C....)
452 Other combinations like (A.B).(C.D) are not optimised into a multiconcat
453 op, as it's too hard to get the correct ordering of ties, overload etc.
457 OPpMULTICONCAT_FAKE: not a real concat, instead an optimised
458 sprintf "...%s...". Don't call '.'
459 overloading: only use '""' overloading.
461 OPpMULTICONCAT_STRINGIFY: the RHS was of the form
462 "...$a...$b..." rather than
463 "..." . $a . "..." . $b . "..."
465 An OP_MULTICONCAT is of type UNOP_AUX. The fixed slots of the aux array are
466 defined with PERL_MULTICONCAT_IX_FOO constants, where:
469 FOO index description
470 -------- ----- ----------------------------------
471 NARGS 0 number of arguments
472 PLAIN_PV 1 non-utf8 constant string
473 PLAIN_LEN 2 non-utf8 constant string length
474 UTF8_PV 3 utf8 constant string
475 UTF8_LEN 4 utf8 constant string length
476 LENGTHS 5 first of nargs+1 const segment lengths
478 The idea is that a general string concatenation will have a fixed (known
479 at compile time) number of variable args, interspersed with constant
480 strings, e.g. "a=$a b=$b\n"
482 All the constant string segments "a=", " b=" and "\n" are stored as a
483 single string "a= b=\n", pointed to from the PLAIN_PV/UTF8_PV slot, along
484 with a series of segment lengths: e.g. 2,3,1. In the case where the
485 constant string is plain but has a different utf8 representation, both
486 variants are stored, and two sets of (nargs+1) segments lengths are stored
487 in the slots beginning at PERL_MULTICONCAT_IX_LENGTHS.
489 A segment length of -1 indicates that there is no constant string at that
490 point; this distinguishes between e.g. ($a . $b) and ($a . "" . $b), which
491 have differing overloading behaviour.
498 SV *targ; /* The SV to be assigned or appended to */
499 char *targ_pv; /* where within SvPVX(targ) we're writing to */
500 STRLEN targ_len; /* SvCUR(targ) */
501 SV **toparg; /* the highest arg position on the stack */
502 UNOP_AUX_item *aux; /* PL_op->op_aux buffer */
503 UNOP_AUX_item *const_lens; /* the segment length array part of aux */
504 const char *const_pv; /* the current segment of the const string buf */
505 SSize_t nargs; /* how many args were expected */
506 SSize_t stack_adj; /* how much to adjust SP on return */
507 STRLEN grow; /* final size of destination string (targ) */
508 UV targ_count; /* how many times targ has appeared on the RHS */
509 bool is_append; /* OPpMULTICONCAT_APPEND flag is set */
510 bool slow_concat; /* args too complex for quick concat */
511 U32 dst_utf8; /* the result will be utf8 (indicate this with
512 SVf_UTF8 in a U32, rather than using bool,
513 for ease of testing and setting) */
514 /* for each arg, holds the result of an SvPV() call */
515 struct multiconcat_svpv {
519 *targ_chain, /* chain of slots where targ has appeared on RHS */
520 *svpv_p, /* ptr for looping through svpv_buf */
521 *svpv_base, /* first slot (may be greater than svpv_buf), */
522 *svpv_end, /* and slot after highest result so far, of: */
523 svpv_buf[PERL_MULTICONCAT_MAXARG]; /* buf for storing SvPV() results */
525 aux = cUNOP_AUXx(PL_op)->op_aux;
526 stack_adj = nargs = aux[PERL_MULTICONCAT_IX_NARGS].ssize;
527 is_append = cBOOL(PL_op->op_private & OPpMULTICONCAT_APPEND);
529 /* get targ from the stack or pad */
531 if (PL_op->op_flags & OPf_STACKED) {
533 /* for 'expr .= ...', expr is the bottom item on the stack */
538 /* for 'expr = ...', expr is the top item on the stack */
542 SV **svp = &(PAD_SVl(PL_op->op_targ));
544 if (PL_op->op_private & OPpLVAL_INTRO) {
545 assert(PL_op->op_private & OPpTARGET_MY);
549 /* $lex .= "const" doesn't cause anything to be pushed */
555 grow = 1; /* allow for '\0' at minimum */
560 /* only utf8 variants of the const strings? */
561 dst_utf8 = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv ? 0 : SVf_UTF8;
564 /* --------------------------------------------------------------
567 * stringify (i.e. SvPV()) every arg and store the resultant pv/len/utf8
568 * triplets in svpv_buf[]. Also increment 'grow' by the args' lengths.
570 * utf8 is indicated by storing a negative length.
572 * Where an arg is actually targ, the stringification is deferred:
573 * the length is set to 0, and the slot is added to targ_chain.
575 * If a magic, overloaded, or otherwise weird arg is found, which
576 * might have side effects when stringified, the loop is abandoned and
577 * we goto a code block where a more basic 'emulate calling
578 * pp_cpncat() on each arg in turn' is done.
581 for (; SP <= toparg; SP++, svpv_end++) {
586 assert(svpv_end - svpv_buf < PERL_MULTICONCAT_MAXARG);
590 /* this if/else chain is arranged so that common/simple cases
591 * take few conditionals */
593 if (LIKELY((SvFLAGS(sv) & (SVs_GMG|SVf_ROK|SVf_POK)) == SVf_POK)) {
594 /* common case: sv is a simple non-magical PV */
596 /* targ appears on RHS.
597 * Delay storing PV pointer; instead, add slot to targ_chain
598 * so it can be populated later, after targ has been grown and
599 * we know its final SvPVX() address.
602 svpv_end->len = 0; /* zerojng here means we can skip
603 updating later if targ_len == 0 */
604 svpv_end->pv = (char*)targ_chain;
605 targ_chain = svpv_end;
611 svpv_end->pv = SvPVX(sv);
613 else if (UNLIKELY(SvFLAGS(sv) & (SVs_GMG|SVf_ROK)))
614 /* may have side effects: tie, overload etc.
615 * Abandon 'stringify everything first' and handle
616 * args in strict order. Note that already-stringified args
617 * will be reprocessed, which is safe because the each first
618 * stringification would have been idempotent.
621 else if (SvNIOK(sv)) {
624 /* stringify general valid scalar */
625 svpv_end->pv = sv_2pv_flags(sv, &len, 0);
627 else if (!SvOK(sv)) {
628 if (ckWARN(WARN_UNINITIALIZED))
629 /* an undef value in the presence of warnings may trigger
636 goto do_magical; /* something weird */
638 utf8 = (SvFLAGS(sv) & SVf_UTF8);
640 ASSUME(len < SSize_t_MAX);
641 svpv_end->len = utf8 ? -(SSize_t)len : (SSize_t)len;
645 /* --------------------------------------------------------------
650 * if targ appears on the RHS or is appended to, force stringify it;
651 * otherwise set it to "". Then set targ_len.
655 /* abandon quick route if using targ might have side effects */
656 if (UNLIKELY(SvFLAGS(targ) & (SVs_GMG|SVf_ROK)))
662 SvPV_force_nomg_nolen(targ);
663 targ_utf8 = SvFLAGS(targ) & SVf_UTF8;
664 if (UNLIKELY(dst_utf8 & ~targ_utf8)) {
665 if (LIKELY(!IN_BYTES))
666 sv_utf8_upgrade_nomg(targ);
669 dst_utf8 |= targ_utf8;
671 targ_len = SvCUR(targ);
672 grow += targ_len * (targ_count + is_append);
675 else if (ckWARN(WARN_UNINITIALIZED))
676 /* warning might have side effects */
678 /* the undef targ will be silently SvPVCLEAR()ed below */
680 else if (UNLIKELY(SvTYPE(targ) >= SVt_REGEXP)) {
681 /* Assigning to some weird LHS type. Don't force the LHS to be an
682 * empty string; instead, do things 'long hand' by using the
683 * overload code path, which concats to a TEMP sv and does
684 * sv_catsv() calls rather than COPY()s. This ensures that even
685 * bizarre code like this doesn't break or crash:
687 * (which makes the 'F' typeglob an alias to the
688 * '*main::F*main::F' typeglob).
693 /* targ was found on RHS.
694 * Force stringify it, using the same code as the append branch
695 * above, except that we don't need the magic/overload/undef
696 * checks as these will already have been done in the phase 1
701 /* unrolled SvPVCLEAR() - mostly: no need to grow or set SvCUR() to 0;
702 * those will be done later. */
703 SV_CHECK_THINKFIRST_COW_DROP(targ);
704 SvUPGRADE(targ, SVt_PV);
705 SvFLAGS(targ) &= ~(SVf_OK|SVf_IVisUV|SVf_UTF8);
706 SvFLAGS(targ) |= (SVf_POK|SVp_POK|dst_utf8);
710 /* --------------------------------------------------------------
713 * UTF-8 tweaks and grow targ:
715 * Now that we know the length and utf8-ness of both the targ and
716 * args, grow targ to the size needed to accumulate all the args, based
717 * on whether targ appears on the RHS, whether we're appending, and
718 * whether any non-utf8 args expand in size if converted to utf8.
720 * For the latter, if dst_utf8 we scan non-utf8 args looking for
721 * variant chars, and adjust the svpv->len value of those args to the
722 * utf8 size and negate it to flag them. At the same time we un-negate
723 * the lens of any utf8 args since after this phase we no longer care
724 * whether an arg is utf8 or not.
726 * Finally, initialise const_lens and const_pv based on utf8ness.
727 * Note that there are 3 permutations:
729 * * If the constant string is invariant whether utf8 or not (e.g. "abc"),
730 * then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] are the same as
731 * aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] and there is one set of
734 * * If the string is fully utf8, e.g. "\x{100}", then
735 * aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] == (NULL,0) and there is
736 * one set of segment lengths.
738 * * If the string has different plain and utf8 representations
739 * (e.g. "\x80"), then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN]]
740 * holds the plain rep, while aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN]
741 * holds the utf8 rep, and there are 2 sets of segment lengths,
742 * with the utf8 set following after the plain set.
744 * On entry to this section the (pv,len) pairs in svpv_buf have the
745 * following meanings:
746 * (pv, len) a plain string
747 * (pv, -len) a utf8 string
748 * (NULL, 0) left-most targ \ linked together R-to-L
749 * (next, 0) other targ / in targ_chain
752 /* turn off utf8 handling if 'use bytes' is in scope */
753 if (UNLIKELY(dst_utf8 && IN_BYTES)) {
756 /* undo all the negative lengths which flag utf8-ness */
757 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
758 SSize_t len = svpv_p->len;
764 /* grow += total of lengths of constant string segments */
767 len = aux[dst_utf8 ? PERL_MULTICONCAT_IX_UTF8_LEN
768 : PERL_MULTICONCAT_IX_PLAIN_LEN].ssize;
769 slow_concat = cBOOL(len);
773 const_lens = aux + PERL_MULTICONCAT_IX_LENGTHS;
776 const_pv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv;
777 if ( aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv
778 && const_pv != aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv)
779 /* separate sets of lengths for plain and utf8 */
780 const_lens += nargs + 1;
782 /* If the result is utf8 but some of the args aren't,
783 * calculate how much extra growth is needed for all the chars
784 * which will expand to two utf8 bytes.
785 * Also, if the growth is non-zero, negate the length to indicate
786 * that this is a variant string. Conversely, un-negate the
787 * length on utf8 args (which was only needed to flag non-utf8
788 * args in this loop */
789 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
798 extra = variant_under_utf8_count((U8 *) svpv_p->pv,
799 (U8 *) svpv_p->pv + len);
800 if (UNLIKELY(extra)) {
802 /* -ve len indicates special handling */
803 svpv_p->len = -(len + extra);
809 const_pv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv;
811 /* unrolled SvGROW(), except don't check for SVf_IsCOW, which should
812 * already have been dropped */
813 assert(!SvIsCOW(targ));
814 targ_pv = (SvLEN(targ) < (grow) ? sv_grow(targ,grow) : SvPVX(targ));
817 /* --------------------------------------------------------------
820 * Now that targ has been grown, we know the final address of the targ
821 * PVX, if needed. Preserve / move targ contents if appending or if
822 * targ appears on RHS.
824 * Also update svpv_buf slots in targ_chain.
826 * Don't bother with any of this if the target length is zero:
827 * targ_len is set to zero unless we're appending or targ appears on
828 * RHS. And even if it is, we can optimise by skipping this chunk of
829 * code for zero targ_len. In the latter case, we don't need to update
830 * the slots in targ_chain with the (zero length) target string, since
831 * we set the len in such slots to 0 earlier, and since the Copy() is
832 * skipped on zero length, it doesn't matter what svpv_p->pv contains.
834 * On entry to this section the (pv,len) pairs in svpv_buf have the
835 * following meanings:
836 * (pv, len) a pure-plain or utf8 string
837 * (pv, -(len+extra)) a plain string which will expand by 'extra'
838 * bytes when converted to utf8
839 * (NULL, 0) left-most targ \ linked together R-to-L
840 * (next, 0) other targ / in targ_chain
842 * On exit, the targ contents will have been moved to the
843 * earliest place they are needed (e.g. $x = "abc$x" will shift them
844 * 3 bytes, while $x .= ... will leave them at the beginning);
845 * and dst_pv will point to the location within SvPVX(targ) where the
846 * next arg should be copied.
849 svpv_base = svpv_buf;
852 struct multiconcat_svpv *tc_stop;
853 char *targ_buf = targ_pv; /* ptr to original targ string */
855 assert(is_append || targ_count);
862 /* The targ appears on RHS, e.g. '$t = $a . $t . $t'.
863 * Move the current contents of targ to the first
864 * position where it's needed, and use that as the src buffer
865 * for any further uses (such as the second RHS $t above).
866 * In calculating the first position, we need to sum the
867 * lengths of all consts and args before that.
870 UNOP_AUX_item *lens = const_lens;
871 /* length of first const string segment */
872 STRLEN offset = lens->ssize > 0 ? lens->ssize : 0;
880 break; /* the first targ argument */
881 /* add lengths of the next arg and const string segment */
883 if (len < 0) /* variant args have this */
885 offset += (STRLEN)len;
886 len = (++lens)->ssize;
887 offset += (len >= 0) ? (STRLEN)len : 0;
889 /* all args and consts so far are empty; update
890 * the start position for the concat later */
895 assert(svpv_p < svpv_end);
900 Move(targ_pv, targ_buf, targ_len, char);
901 /* a negative length implies don't Copy(), but do increment */
902 svpv_p->len = -((SSize_t)targ_len);
906 /* skip the first targ copy */
912 /* Don't populate the first targ slot in the loop below; it's
913 * either not used because we advanced svpv_base beyond it, or
914 * we already stored the special -targ_len value in it
919 /* populate slots in svpv_buf representing targ on RHS */
920 while (targ_chain != tc_stop) {
921 struct multiconcat_svpv *p = targ_chain;
922 targ_chain = (struct multiconcat_svpv *)(p->pv);
924 p->len = (SSize_t)targ_len;
929 /* --------------------------------------------------------------
932 * Append all the args in svpv_buf, plus the const strings, to targ.
934 * On entry to this section the (pv,len) pairs in svpv_buf have the
935 * following meanings:
936 * (pv, len) a pure-plain or utf8 string (which may be targ)
937 * (pv, -(len+extra)) a plain string which will expand by 'extra'
938 * bytes when converted to utf8
939 * (0, -len) left-most targ, whose content has already
940 * been copied. Just advance targ_pv by len.
943 /* If there are no constant strings and no special case args
944 * (svpv_p->len < 0), use a simpler, more efficient concat loop
947 for (svpv_p = svpv_base; svpv_p < svpv_end; svpv_p++) {
948 SSize_t len = svpv_p->len;
951 Copy(svpv_p->pv, targ_pv, len, char);
954 const_lens += (svpv_end - svpv_base + 1);
957 /* Note that we iterate the loop nargs+1 times: to append nargs
958 * arguments and nargs+1 constant strings. For example, "-$a-$b-"
960 svpv_p = svpv_base - 1;
963 SSize_t len = (const_lens++)->ssize;
965 /* append next const string segment */
967 Copy(const_pv, targ_pv, len, char);
972 if (++svpv_p == svpv_end)
975 /* append next arg */
978 if (LIKELY(len > 0)) {
979 Copy(svpv_p->pv, targ_pv, len, char);
982 else if (UNLIKELY(len < 0)) {
983 /* negative length indicates two special cases */
984 const char *p = svpv_p->pv;
987 /* copy plain-but-variant pv to a utf8 targ */
988 char * end_pv = targ_pv + len;
990 while (targ_pv < end_pv) {
992 append_utf8_from_native_byte(c, (U8**)&targ_pv);
996 /* arg is already-copied targ */
1004 SvCUR_set(targ, targ_pv - SvPVX(targ));
1005 assert(grow >= SvCUR(targ) + 1);
1006 assert(SvLEN(targ) >= SvCUR(targ) + 1);
1008 /* --------------------------------------------------------------
1019 /* --------------------------------------------------------------
1022 * We only get here if any of the args (or targ too in the case of
1023 * append) have something which might cause side effects, such
1024 * as magic, overload, or an undef value in the presence of warnings.
1025 * In that case, any earlier attempt to stringify the args will have
1026 * been abandoned, and we come here instead.
1028 * Here, we concat each arg in turn the old-fashioned way: essentially
1029 * emulating pp_concat() in a loop. This means that all the weird edge
1030 * cases will be handled correctly, if not necessarily speedily.
1032 * Note that some args may already have been stringified - those are
1033 * processed again, which is safe, since only args without side-effects
1034 * were stringified earlier.
1046 const char *cpv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv;
1047 UNOP_AUX_item *lens = aux + PERL_MULTICONCAT_IX_LENGTHS;
1048 Size_t arg_count = 0; /* how many args have been processed */
1051 cpv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv;
1055 svp = toparg - nargs + 1;
1059 * plus possible nargs+1 consts,
1060 * plus, if appending, a final targ in an extra last iteration
1064 for (i = 0; i <= n; i++) {
1067 /* if necessary, stringify the final RHS result in
1068 * something like $targ .= "$a$b$c" - simulating
1072 && (PL_op->op_private &OPpMULTICONCAT_STRINGIFY)
1074 /* extra conditions for backwards compatibility:
1075 * probably incorrect, but keep the existing behaviour
1076 * for now. The rules are:
1077 * $x = "$ov" single arg: stringify;
1078 * $x = "$ov$y" multiple args: don't stringify,
1079 * $lex = "$ov$y$z" except TARGMY with at least 2 concats
1084 && (PL_op->op_private & OPpTARGET_MY)
1085 && !(PL_op->op_private & OPpLVAL_INTRO)
1090 SV *tmp = newSV_type_mortal(SVt_PV);
1091 sv_copypv(tmp, left);
1096 /* do one extra iteration to handle $targ in $targ .= ... */
1097 if (i == n && !is_append)
1100 /* get the next arg SV or regen the next const SV */
1101 len = lens[i >> 1].ssize;
1103 /* handle the final targ .= (....) */
1108 right = svp[(i >> 1)];
1110 continue; /* no const in this position */
1112 right = newSVpvn_flags(cpv, len, (utf8 | SVs_TEMP));
1118 if (arg_count <= 1) {
1120 continue; /* need at least two SVs to concat together */
1123 if (arg_count == 2 && i < n) {
1124 /* for the first concat, create a mortal acting like the
1125 * padtmp from OP_CONST. In later iterations this will
1127 nexttarg = sv_newmortal();
1135 /* Handle possible overloading.
1136 * This is basically an unrolled
1137 * tryAMAGICbin_MG(concat_amg, AMGf_assign);
1139 * Perl_try_amagic_bin()
1140 * call, but using left and right rather than SP[-1], SP[0],
1141 * and not relying on OPf_STACKED implying .=
1144 if ((SvFLAGS(left)|SvFLAGS(right)) & (SVf_ROK|SVs_GMG)) {
1149 if ((SvAMAGIC(left) || SvAMAGIC(right))
1150 /* sprintf doesn't do concat overloading,
1151 * but allow for $x .= sprintf(...)
1153 && ( !(PL_op->op_private & OPpMULTICONCAT_FAKE)
1157 SV * const tmpsv = amagic_call(left, right, concat_amg,
1158 (nextappend ? AMGf_assign: 0));
1160 /* NB: tryAMAGICbin_MG() includes an OPpTARGET_MY test
1161 * here, which isn't needed as any implicit
1162 * assign done under OPpTARGET_MY is done after
1165 sv_setsv(left, tmpsv);
1174 /* if both args are the same magical value, make one a copy */
1175 if (left == right && SvGMAGICAL(left)) {
1176 SV * targetsv = right;
1177 /* Print the uninitialized warning now, so it includes the
1180 if (ckWARN(WARN_UNINITIALIZED))
1181 report_uninit(right);
1182 targetsv = &PL_sv_no;
1184 left = sv_mortalcopy_flags(targetsv, 0);
1189 /* nexttarg = left . right */
1190 S_do_concat(aTHX_ left, right, nexttarg, 0);
1194 SP = toparg - stack_adj + 1;
1196 /* Return the result of all RHS concats, unless this op includes
1197 * an assign ($lex = x.y.z or expr = x.y.z), in which case copy
1198 * to target (which will be $lex or expr).
1199 * If we are appending, targ will already have been appended to in
1202 && ( (PL_op->op_flags & OPf_STACKED)
1203 || (PL_op->op_private & OPpTARGET_MY))
1205 sv_setsv(targ, left);
1216 /* push the elements of av onto the stack.
1217 * Returns PL_op->op_next to allow tail-call optimisation of its callers */
1220 S_pushav(pTHX_ AV* const av)
1223 const SSize_t maxarg = AvFILL(av) + 1;
1225 if (UNLIKELY(SvRMAGICAL(av))) {
1227 for (i=0; i < (PADOFFSET)maxarg; i++) {
1228 SV ** const svp = av_fetch(av, i, FALSE);
1229 SP[i+1] = LIKELY(svp)
1231 : UNLIKELY(PL_op->op_flags & OPf_MOD)
1238 for (i=0; i < (PADOFFSET)maxarg; i++) {
1239 SV *sv = AvARRAY(av)[i];
1240 SP[i+1] = LIKELY(sv)
1242 : UNLIKELY(PL_op->op_flags & OPf_MOD)
1253 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
1258 PADOFFSET base = PL_op->op_targ;
1259 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
1260 if (PL_op->op_flags & OPf_SPECIAL) {
1261 /* fake the RHS of my ($x,$y,..) = @_ */
1263 (void)S_pushav(aTHX_ GvAVn(PL_defgv));
1267 /* note, this is only skipped for compile-time-known void cxt */
1268 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
1273 for (i = 0; i <count; i++)
1274 *++SP = PAD_SV(base+i);
1276 if (PL_op->op_private & OPpLVAL_INTRO) {
1277 SV **svp = &(PAD_SVl(base));
1278 const UV payload = (UV)(
1279 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
1280 | (count << SAVE_TIGHT_SHIFT)
1281 | SAVEt_CLEARPADRANGE);
1284 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
1285 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
1293 for (i = 0; i <count; i++)
1294 SvPADSTALE_off(*svp++); /* mark lexical as active */
1305 OP * const op = PL_op;
1306 /* access PL_curpad once */
1307 SV ** const padentry = &(PAD_SVl(op->op_targ));
1312 PUTBACK; /* no pop/push after this, TOPs ok */
1314 if (op->op_flags & OPf_MOD) {
1315 if (op->op_private & OPpLVAL_INTRO)
1316 if (!(op->op_private & OPpPAD_STATE))
1317 save_clearsv(padentry);
1318 if (op->op_private & OPpDEREF) {
1319 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
1320 than TARG reduces the scope of TARG, so it does not
1321 span the call to save_clearsv, resulting in smaller
1323 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
1333 /* pp_coreargs pushes a NULL to indicate no args passed to
1334 * CORE::readline() */
1337 tryAMAGICunTARGETlist(iter_amg, 0);
1338 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1340 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
1341 if (!isGV_with_GP(PL_last_in_gv)) {
1342 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
1343 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
1346 XPUSHs(MUTABLE_SV(PL_last_in_gv));
1348 Perl_pp_rv2gv(aTHX);
1349 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1350 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv));
1353 return do_readline();
1360 U32 flags_and, flags_or;
1362 tryAMAGICbin_MG(eq_amg, AMGf_numeric);
1365 flags_and = SvFLAGS(left) & SvFLAGS(right);
1366 flags_or = SvFLAGS(left) | SvFLAGS(right);
1369 ( (flags_and & SVf_IOK) && ((flags_or & SVf_IVisUV) ==0 ) )
1370 ? (SvIVX(left) == SvIVX(right))
1371 : (flags_and & SVf_NOK)
1372 ? (SvNVX(left) == SvNVX(right))
1373 : ( do_ncmp(left, right) == 0)
1379 /* also used for: pp_i_preinc() */
1383 SV *sv = *PL_stack_sp;
1385 if (LIKELY(((sv->sv_flags &
1386 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1387 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1389 && SvIVX(sv) != IV_MAX)
1391 SvIV_set(sv, SvIVX(sv) + 1);
1393 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
1400 /* also used for: pp_i_predec() */
1404 SV *sv = *PL_stack_sp;
1406 if (LIKELY(((sv->sv_flags &
1407 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1408 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1410 && SvIVX(sv) != IV_MIN)
1412 SvIV_set(sv, SvIVX(sv) - 1);
1414 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
1421 /* also used for: pp_orassign() */
1432 if (PL_op->op_type == OP_OR)
1434 RETURNOP(cLOGOP->op_other);
1439 /* also used for: pp_dor() pp_dorassign() */
1445 bool defined = FALSE;
1446 const int op_type = PL_op->op_type;
1447 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
1451 if (UNLIKELY(!sv || !SvANY(sv))) {
1452 if (op_type == OP_DOR)
1454 RETURNOP(cLOGOP->op_other);
1459 if (UNLIKELY(!sv || !SvANY(sv)))
1463 /* Historically what followed was a switch on SvTYPE(sv), handling SVt_PVAV,
1464 * SVt_PVCV, SVt_PVHV and "default". `defined &sub` is still valid syntax,
1465 * hence we still need the special case PVCV code. But AVs and HVs now
1466 * should never arrive here... */
1468 assert(SvTYPE(sv) != SVt_PVAV);
1469 assert(SvTYPE(sv) != SVt_PVHV);
1472 if (UNLIKELY(SvTYPE(sv) == SVt_PVCV)) {
1473 if (CvROOT(sv) || CvXSUB(sv))
1485 if(op_type == OP_DOR)
1487 RETURNOP(cLOGOP->op_other);
1489 /* assuming OP_DEFINED */
1499 dSP; dATARGET; bool useleft; SV *svl, *svr;
1501 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
1505 #ifdef PERL_PRESERVE_IVUV
1507 /* special-case some simple common cases */
1508 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
1510 U32 flags = (svl->sv_flags & svr->sv_flags);
1511 if (flags & SVf_IOK) {
1512 /* both args are simple IVs */
1517 topl = ((UV)il) >> (UVSIZE * 8 - 2);
1518 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
1520 /* if both are in a range that can't under/overflow, do a
1521 * simple integer add: if the top of both numbers
1522 * are 00 or 11, then it's safe */
1523 if (!( ((topl+1) | (topr+1)) & 2)) {
1525 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
1531 else if (flags & SVf_NOK) {
1532 /* both args are NVs */
1536 if (lossless_NV_to_IV(nl, &il) && lossless_NV_to_IV(nr, &ir)) {
1537 /* nothing was lost by converting to IVs */
1541 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
1549 useleft = USE_LEFT(svl);
1550 /* We must see if we can perform the addition with integers if possible,
1551 as the integer code detects overflow while the NV code doesn't.
1552 If either argument hasn't had a numeric conversion yet attempt to get
1553 the IV. It's important to do this now, rather than just assuming that
1554 it's not IOK as a PV of "9223372036854775806" may not take well to NV
1555 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
1556 integer in case the second argument is IV=9223372036854775806
1557 We can (now) rely on sv_2iv to do the right thing, only setting the
1558 public IOK flag if the value in the NV (or PV) slot is truly integer.
1560 A side effect is that this also aggressively prefers integer maths over
1561 fp maths for integer values.
1563 How to detect overflow?
1565 C 99 section 6.2.6.1 says
1567 The range of nonnegative values of a signed integer type is a subrange
1568 of the corresponding unsigned integer type, and the representation of
1569 the same value in each type is the same. A computation involving
1570 unsigned operands can never overflow, because a result that cannot be
1571 represented by the resulting unsigned integer type is reduced modulo
1572 the number that is one greater than the largest value that can be
1573 represented by the resulting type.
1577 which I read as "unsigned ints wrap."
1579 signed integer overflow seems to be classed as "exception condition"
1581 If an exceptional condition occurs during the evaluation of an
1582 expression (that is, if the result is not mathematically defined or not
1583 in the range of representable values for its type), the behavior is
1586 (6.5, the 5th paragraph)
1588 I had assumed that on 2s complement machines signed arithmetic would
1589 wrap, hence coded pp_add and pp_subtract on the assumption that
1590 everything perl builds on would be happy. After much wailing and
1591 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
1592 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
1593 unsigned code below is actually shorter than the old code. :-)
1596 if (SvIV_please_nomg(svr)) {
1597 /* Unless the left argument is integer in range we are going to have to
1598 use NV maths. Hence only attempt to coerce the right argument if
1599 we know the left is integer. */
1606 a_valid = auvok = 1;
1607 /* left operand is undef, treat as zero. + 0 is identity,
1608 Could SETi or SETu right now, but space optimise by not adding
1609 lots of code to speed up what is probably a rarish case. */
1611 /* Left operand is defined, so is it IV? */
1612 if (SvIV_please_nomg(svl)) {
1613 if ((auvok = SvUOK(svl)))
1616 const IV aiv = SvIVX(svl);
1619 auvok = 1; /* Now acting as a sign flag. */
1621 /* Using 0- here and later to silence bogus warning
1623 auv = (UV) (0 - (UV) aiv);
1630 bool result_good = 0;
1633 bool buvok = SvUOK(svr);
1638 const IV biv = SvIVX(svr);
1643 buv = (UV) (0 - (UV) biv);
1645 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
1646 else "IV" now, independent of how it came in.
1647 if a, b represents positive, A, B negative, a maps to -A etc
1652 all UV maths. negate result if A negative.
1653 add if signs same, subtract if signs differ. */
1655 if (auvok ^ buvok) {
1659 /* Must get smaller */
1664 if (result <= buv) {
1665 /* result really should be -(auv-buv). as its negation
1666 of true value, need to swap our result flag */
1683 if (result <= (UV)IV_MIN)
1684 SETi(result == (UV)IV_MIN
1685 ? IV_MIN : -(IV)result);
1687 /* result valid, but out of range for IV. */
1688 SETn( -(NV)result );
1692 } /* Overflow, drop through to NVs. */
1697 useleft = USE_LEFT(svl);
1701 NV value = SvNV_nomg(svr);
1704 /* left operand is undef, treat as zero. + 0.0 is identity. */
1708 SETn( value + SvNV_nomg(svl) );
1714 /* also used for: pp_aelemfast_lex() */
1719 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
1720 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
1721 const U32 lval = PL_op->op_flags & OPf_MOD;
1722 const I8 key = (I8)PL_op->op_private;
1726 assert(SvTYPE(av) == SVt_PVAV);
1730 /* inlined av_fetch() for simple cases ... */
1731 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
1732 sv = AvARRAY(av)[key];
1737 PUSHs(&PL_sv_undef);
1742 /* ... else do it the hard way */
1743 svp = av_fetch(av, key, lval);
1744 sv = (svp ? *svp : &PL_sv_undef);
1746 if (UNLIKELY(!svp && lval))
1747 DIE(aTHX_ PL_no_aelem, (int)key);
1749 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
1757 dSP; dMARK; dTARGET;
1759 do_join(TARG, *MARK, MARK, SP);
1765 /* Oversized hot code. */
1767 /* also used for: pp_say() */
1771 dSP; dMARK; dORIGMARK;
1775 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
1779 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1782 if (MARK == ORIGMARK) {
1783 /* If using default handle then we need to make space to
1784 * pass object as 1st arg, so move other args up ...
1788 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
1791 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
1793 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
1794 | (PL_op->op_type == OP_SAY
1795 ? TIED_METHOD_SAY : 0)), sp - mark);
1798 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
1799 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1802 SETERRNO(EBADF,RMS_IFI);
1805 else if (!(fp = IoOFP(io))) {
1807 report_wrongway_fh(gv, '<');
1810 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
1814 SV * const ofs = GvSV(PL_ofsgv); /* $, */
1816 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
1817 while (MARK <= SP) {
1818 if (!do_print(*MARK, fp))
1822 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
1823 if (!do_print(GvSV(PL_ofsgv), fp)) {
1831 while (MARK <= SP) {
1832 if (!do_print(*MARK, fp))
1840 if (PL_op->op_type == OP_SAY) {
1841 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
1844 else if (PL_ors_sv && SvOK(PL_ors_sv))
1845 if (!do_print(PL_ors_sv, fp)) /* $\ */
1848 if (IoFLAGS(io) & IOf_FLUSH)
1849 if (PerlIO_flush(fp) == EOF)
1859 XPUSHs(&PL_sv_undef);
1864 /* do the common parts of pp_padhv() and pp_rv2hv()
1865 * It assumes the caller has done EXTEND(SP, 1) or equivalent.
1866 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
1867 * 'has_targ' indicates that the op has a target - this should
1868 * be a compile-time constant so that the code can constant-folded as
1872 PERL_STATIC_INLINE OP*
1873 S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
1882 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
1884 if (gimme == G_LIST) {
1890 /* 'keys %h' masquerading as '%h': reset iterator */
1891 (void)hv_iterinit(hv);
1893 if (gimme == G_VOID)
1896 is_bool = ( PL_op->op_private & OPpTRUEBOOL
1897 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1898 && block_gimme() == G_VOID));
1899 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
1901 if (UNLIKELY(is_tied)) {
1902 if (is_keys && !is_bool) {
1904 while (hv_iternext(hv))
1909 sv = magic_scalarpack(hv, mg);
1914 #if defined(DYNAMIC_ENV_FETCH) && defined(VMS)
1915 /* maybe nothing set up %ENV for iteration yet...
1916 do this always (not just if HvUSEDKEYS(hv) is currently 0) because
1917 we ought to give a *consistent* answer to "how many keys?"
1918 whether we ask this op in scalar context, or get the list of all
1919 keys then check its length, and whether we do either with or without
1920 an %ENV lookup first. prime_env_iter() returns quickly if nothing
1922 if (SvRMAGICAL((const SV *)hv)
1923 && mg_find((const SV *)hv, PERL_MAGIC_env)) {
1929 sv = i ? &PL_sv_yes : &PL_sv_zero;
1941 /* parent op should be an unused OP_KEYS whose targ we can
1946 assert(!OpHAS_SIBLING(PL_op));
1947 k = PL_op->op_sibparent;
1948 assert(k->op_type == OP_KEYS);
1949 TARG = PAD_SV(k->op_targ);
1962 /* This is also called directly by pp_lvavref. */
1967 assert(SvTYPE(TARG) == SVt_PVAV);
1968 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1969 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1970 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1973 if (PL_op->op_flags & OPf_REF) {
1977 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1978 const I32 flags = is_lvalue_sub();
1979 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1980 if (GIMME_V == G_SCALAR)
1981 /* diag_listed_as: Can't return %s to lvalue scalar context */
1982 Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
1989 if (gimme == G_LIST)
1990 return S_pushav(aTHX_ (AV*)TARG);
1992 if (gimme == G_SCALAR) {
1993 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
1996 else if (PL_op->op_private & OPpTRUEBOOL)
2010 assert(SvTYPE(TARG) == SVt_PVHV);
2011 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
2012 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
2013 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
2017 if (PL_op->op_flags & OPf_REF) {
2021 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
2022 const I32 flags = is_lvalue_sub();
2023 if (flags && !(flags & OPpENTERSUB_INARGS)) {
2024 if (GIMME_V == G_SCALAR)
2025 /* diag_listed_as: Can't return %s to lvalue scalar context */
2026 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
2034 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
2035 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
2040 /* also used for: pp_rv2hv() */
2041 /* also called directly by pp_lvavref */
2046 const U8 gimme = GIMME_V;
2047 static const char an_array[] = "an ARRAY";
2048 static const char a_hash[] = "a HASH";
2049 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
2050 || PL_op->op_type == OP_LVAVREF;
2051 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
2055 if (UNLIKELY(SvAMAGIC(sv))) {
2056 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
2059 if (UNLIKELY(SvTYPE(sv) != type))
2060 /* diag_listed_as: Not an ARRAY reference */
2061 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
2062 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
2063 && PL_op->op_private & OPpLVAL_INTRO))
2064 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
2066 else if (UNLIKELY(SvTYPE(sv) != type)) {
2069 if (!isGV_with_GP(sv)) {
2070 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
2076 gv = MUTABLE_GV(sv);
2078 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
2079 if (PL_op->op_private & OPpLVAL_INTRO)
2080 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
2082 if (PL_op->op_flags & OPf_REF) {
2086 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
2087 const I32 flags = is_lvalue_sub();
2088 if (flags && !(flags & OPpENTERSUB_INARGS)) {
2089 if (gimme != G_LIST)
2090 goto croak_cant_return;
2097 AV *const av = MUTABLE_AV(sv);
2099 if (gimme == G_LIST) {
2102 return S_pushav(aTHX_ av);
2105 if (gimme == G_SCALAR) {
2106 const SSize_t maxarg = AvFILL(av) + 1;
2107 if (PL_op->op_private & OPpTRUEBOOL)
2108 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
2117 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
2118 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
2124 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
2125 is_pp_rv2av ? "array" : "hash");
2130 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
2132 PERL_ARGS_ASSERT_DO_ODDBALL;
2135 if (ckWARN(WARN_MISC)) {
2137 if (oddkey == firstkey &&
2139 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
2140 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
2142 err = "Reference found where even-sized list expected";
2145 err = "Odd number of elements in hash assignment";
2146 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
2153 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
2154 * are common to both the LHS and RHS of an aassign, and replace them
2155 * with copies. All these copies are made before the actual list assign is
2158 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
2159 * element ($b) to the first LH element ($a), modifies $a; when the
2160 * second assignment is done, the second RH element now has the wrong
2161 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
2162 * Note that we don't need to make a mortal copy of $b.
2164 * The algorithm below works by, for every RHS element, mark the
2165 * corresponding LHS target element with SVf_BREAK. Then if the RHS
2166 * element is found with SVf_BREAK set, it means it would have been
2167 * modified, so make a copy.
2168 * Note that by scanning both LHS and RHS in lockstep, we avoid
2169 * unnecessary copies (like $b above) compared with a naive
2170 * "mark all LHS; copy all marked RHS; unmark all LHS".
2172 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
2173 * it can't be common and can be skipped.
2175 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
2176 * that we thought we didn't need to call S_aassign_copy_common(), but we
2177 * have anyway for sanity checking. If we find we need to copy, then panic.
2180 PERL_STATIC_INLINE void
2181 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
2182 SV **firstrelem, SV **lastrelem
2190 SSize_t lcount = lastlelem - firstlelem + 1;
2191 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
2192 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
2193 bool copy_all = FALSE;
2195 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
2196 assert(firstlelem < lastlelem); /* at least 2 LH elements */
2197 assert(firstrelem < lastrelem); /* at least 2 RH elements */
2201 /* we never have to copy the first RH element; it can't be corrupted
2202 * by assigning something to the corresponding first LH element.
2203 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
2205 relem = firstrelem + 1;
2207 for (; relem <= lastrelem; relem++) {
2210 /* mark next LH element */
2212 if (--lcount >= 0) {
2215 if (UNLIKELY(!svl)) {/* skip AV alias marker */
2216 assert (lelem <= lastlelem);
2222 if (SvSMAGICAL(svl)) {
2225 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
2228 /* this LH element will consume all further args;
2229 * no need to mark any further LH elements (if any).
2230 * But we still need to scan any remaining RHS elements;
2231 * set lcount negative to distinguish from lcount == 0,
2232 * so the loop condition continues being true
2235 lelem--; /* no need to unmark this element */
2237 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
2238 SvFLAGS(svl) |= SVf_BREAK;
2242 /* don't check RH element if no SVf_BREAK flags set yet */
2249 /* see if corresponding RH element needs copying */
2255 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
2256 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
2260 /* op_dump(PL_op); */
2262 "panic: aassign skipped needed copy of common RH elem %"
2263 UVuf, (UV)(relem - firstrelem));
2267 TAINT_NOT; /* Each item is independent */
2269 /* Dear TODO test in t/op/sort.t, I love you.
2270 (It's relying on a panic, not a "semi-panic" from newSVsv()
2271 and then an assertion failure below.) */
2272 if (UNLIKELY(SvIS_FREED(svr))) {
2273 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
2276 /* avoid break flag while copying; otherwise COW etc
2278 SvFLAGS(svr) &= ~SVf_BREAK;
2279 /* Not newSVsv(), as it does not allow copy-on-write,
2280 resulting in wasteful copies.
2281 Also, we use SV_NOSTEAL in case the SV is used more than
2282 once, e.g. (...) = (f())[0,0]
2283 Where the same SV appears twice on the RHS without a ref
2284 count bump. (Although I suspect that the SV won't be
2285 stealable here anyway - DAPM).
2287 *relem = sv_mortalcopy_flags(svr,
2288 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2289 /* ... but restore afterwards in case it's needed again,
2290 * e.g. ($a,$b,$c) = (1,$a,$a)
2292 SvFLAGS(svr) |= brk;
2304 while (lelem > firstlelem) {
2305 SV * const svl = *(--lelem);
2307 SvFLAGS(svl) &= ~SVf_BREAK;
2316 SV **lastlelem = PL_stack_sp;
2317 SV **lastrelem = PL_stack_base + POPMARK;
2318 SV **firstrelem = PL_stack_base + POPMARK + 1;
2319 SV **firstlelem = lastrelem + 1;
2324 /* PL_delaymagic is restored by JMPENV_POP on dieing, so we
2325 * only need to save locally, not on the save stack */
2326 U16 old_delaymagic = PL_delaymagic;
2331 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
2333 /* If there's a common identifier on both sides we have to take
2334 * special care that assigning the identifier on the left doesn't
2335 * clobber a value on the right that's used later in the list.
2338 /* at least 2 LH and RH elements, or commonality isn't an issue */
2339 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2340 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
2341 if (SvGMAGICAL(*relem))
2344 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2345 if (*lelem && SvSMAGICAL(*lelem))
2348 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
2349 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
2350 /* skip the scan if all scalars have a ref count of 1 */
2351 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2353 if (!sv || SvREFCNT(sv) == 1)
2355 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
2362 S_aassign_copy_common(aTHX_
2363 firstlelem, lastlelem, firstrelem, lastrelem
2373 /* on debugging builds, do the scan even if we've concluded we
2374 * don't need to, then panic if we find commonality. Note that the
2375 * scanner assumes at least 2 elements */
2376 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2387 if (relem > lastrelem)
2390 /* first lelem loop while there are still relems */
2391 while (LIKELY(lelem <= lastlelem)) {
2395 TAINT_NOT; /* Each item stands on its own, taintwise. */
2397 assert(relem <= lastrelem);
2398 if (UNLIKELY(!lsv)) {
2401 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2404 switch (SvTYPE(lsv)) {
2409 SSize_t nelems = lastrelem - relem + 1;
2410 AV *ary = MUTABLE_AV(lsv);
2412 /* Assigning to an aggregate is tricky. First there is the
2413 * issue of commonality, e.g. @a = ($a[0]). Since the
2414 * stack isn't refcounted, clearing @a prior to storing
2415 * elements will free $a[0]. Similarly with
2416 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
2418 * The way to avoid these issues is to make the copy of each
2419 * SV (and we normally store a *copy* in the array) *before*
2420 * clearing the array. But this has a problem in that
2421 * if the code croaks during copying, the not-yet-stored copies
2422 * could leak. One way to avoid this is to make all the copies
2423 * mortal, but that's quite expensive.
2425 * The current solution to these issues is to use a chunk
2426 * of the tmps stack as a temporary refcounted-stack. SVs
2427 * will be put on there during processing to avoid leaks,
2428 * but will be removed again before the end of this block,
2429 * so free_tmps() is never normally called. Also, the
2430 * sv_refcnt of the SVs doesn't have to be manipulated, since
2431 * the ownership of 1 reference count is transferred directly
2432 * from the tmps stack to the AV when the SV is stored.
2434 * We disarm slots in the temps stack by storing PL_sv_undef
2435 * there: it doesn't matter if that SV's refcount is
2436 * repeatedly decremented during a croak. But usually this is
2437 * only an interim measure. By the end of this code block
2438 * we try where possible to not leave any PL_sv_undef's on the
2439 * tmps stack e.g. by shuffling newer entries down.
2441 * There is one case where we don't copy: non-magical
2442 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
2443 * is on the tmps stack, so its safe to directly steal the SV
2444 * rather than copying. This is common in things like function
2445 * returns, map etc, which all return a list of such SVs.
2447 * Note however something like @a = (f())[0,0], where there is
2448 * a danger of the same SV being shared: this avoided because
2449 * when the SV is stored as $a[0], its ref count gets bumped,
2450 * so the RC==1 test fails and the second element is copied
2453 * We also use one slot in the tmps stack to hold an extra
2454 * ref to the array, to ensure it doesn't get prematurely
2455 * freed. Again, this is removed before the end of this block.
2457 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
2458 * @a = ($a[0]) case, but the current implementation uses the
2459 * same algorithm regardless, so ignores that flag. (It *is*
2460 * used in the hash branch below, however).
2463 /* Reserve slots for ary, plus the elems we're about to copy,
2464 * then protect ary and temporarily void the remaining slots
2465 * with &PL_sv_undef */
2466 EXTEND_MORTAL(nelems + 1);
2467 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
2468 tmps_base = PL_tmps_ix + 1;
2469 for (i = 0; i < nelems; i++)
2470 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2471 PL_tmps_ix += nelems;
2473 /* Make a copy of each RHS elem and save on the tmps_stack
2474 * (or pass through where we can optimise away the copy) */
2476 if (UNLIKELY(alias)) {
2477 U32 lval = (gimme == G_LIST)
2478 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
2479 for (svp = relem; svp <= lastrelem; svp++) {
2484 DIE(aTHX_ "Assigned value is not a reference");
2485 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
2486 /* diag_listed_as: Assigned value is not %s reference */
2488 "Assigned value is not a SCALAR reference");
2490 *svp = rsv = sv_mortalcopy(rsv);
2491 /* XXX else check for weak refs? */
2492 rsv = SvREFCNT_inc_NN(SvRV(rsv));
2493 assert(tmps_base <= PL_tmps_max);
2494 PL_tmps_stack[tmps_base++] = rsv;
2498 for (svp = relem; svp <= lastrelem; svp++) {
2501 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2502 /* can skip the copy */
2503 SvREFCNT_inc_simple_void_NN(rsv);
2508 /* see comment in S_aassign_copy_common about
2510 nsv = newSVsv_flags(rsv,
2511 (SV_DO_COW_SVSETSV|SV_NOSTEAL|SV_GMAGIC));
2515 assert(tmps_base <= PL_tmps_max);
2516 PL_tmps_stack[tmps_base++] = rsv;
2520 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
2523 /* store in the array, the SVs that are in the tmps stack */
2525 tmps_base -= nelems;
2527 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
2528 /* for arrays we can't cheat with, use the official API */
2529 av_extend(ary, nelems - 1);
2530 for (i = 0; i < nelems; i++) {
2531 SV **svp = &(PL_tmps_stack[tmps_base + i]);
2533 /* A tied store won't take ownership of rsv, so keep
2534 * the 1 refcnt on the tmps stack; otherwise disarm
2535 * the tmps stack entry */
2536 if (av_store(ary, i, rsv))
2537 *svp = &PL_sv_undef;
2538 /* av_store() may have added set magic to rsv */;
2541 /* disarm ary refcount: see comments below about leak */
2542 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2545 /* directly access/set the guts of the AV */
2546 SSize_t fill = nelems - 1;
2547 if (fill > AvMAX(ary))
2548 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
2550 AvFILLp(ary) = fill;
2551 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
2552 /* Quietly remove all the SVs from the tmps stack slots,
2553 * since ary has now taken ownership of the refcnt.
2554 * Also remove ary: which will now leak if we die before
2555 * the SvREFCNT_dec_NN(ary) below */
2556 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2557 Move(&PL_tmps_stack[tmps_base + nelems],
2558 &PL_tmps_stack[tmps_base - 1],
2559 PL_tmps_ix - (tmps_base + nelems) + 1,
2561 PL_tmps_ix -= (nelems + 1);
2564 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2565 /* its assumed @ISA set magic can't die and leak ary */
2566 SvSETMAGIC(MUTABLE_SV(ary));
2567 SvREFCNT_dec_NN(ary);
2569 relem = lastrelem + 1;
2573 case SVt_PVHV: { /* normal hash */
2579 SSize_t nelems = lastrelem - relem + 1;
2580 HV *hash = MUTABLE_HV(lsv);
2582 if (UNLIKELY(nelems & 1)) {
2583 do_oddball(lastrelem, relem);
2584 /* we have firstlelem to reuse, it's not needed any more */
2585 *++lastrelem = &PL_sv_undef;
2589 /* See the SVt_PVAV branch above for a long description of
2590 * how the following all works. The main difference for hashes
2591 * is that we treat keys and values separately (and have
2592 * separate loops for them): as for arrays, values are always
2593 * copied (except for the SvTEMP optimisation), since they
2594 * need to be stored in the hash; while keys are only
2595 * processed where they might get prematurely freed or
2598 /* tmps stack slots:
2599 * * reserve a slot for the hash keepalive;
2600 * * reserve slots for the hash values we're about to copy;
2601 * * preallocate for the keys we'll possibly copy or refcount bump
2603 * then protect hash and temporarily void the remaining
2604 * value slots with &PL_sv_undef */
2605 EXTEND_MORTAL(nelems + 1);
2607 /* convert to number of key/value pairs */
2610 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
2611 tmps_base = PL_tmps_ix + 1;
2612 for (i = 0; i < nelems; i++)
2613 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2614 PL_tmps_ix += nelems;
2616 /* Make a copy of each RHS hash value and save on the tmps_stack
2617 * (or pass through where we can optimise away the copy) */
2619 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
2622 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2623 /* can skip the copy */
2624 SvREFCNT_inc_simple_void_NN(rsv);
2629 /* see comment in S_aassign_copy_common about
2631 nsv = newSVsv_flags(rsv,
2632 (SV_DO_COW_SVSETSV|SV_NOSTEAL|SV_GMAGIC));
2636 assert(tmps_base <= PL_tmps_max);
2637 PL_tmps_stack[tmps_base++] = rsv;
2639 tmps_base -= nelems;
2642 /* possibly protect keys */
2644 if (UNLIKELY(gimme == G_LIST)) {
2646 * @a = ((%h = ($$r, 1)), $r = "x");
2647 * $_++ for %h = (1,2,3,4);
2649 EXTEND_MORTAL(nelems);
2650 for (svp = relem; svp <= lastrelem; svp += 2)
2651 *svp = sv_mortalcopy_flags(*svp,
2652 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2654 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
2655 /* for possible commonality, e.g.
2657 * avoid premature freeing RHS keys by mortalising
2659 * For a magic element, make a copy so that its magic is
2660 * called *before* the hash is emptied (which may affect
2661 * a tied value for example).
2662 * In theory we should check for magic keys in all
2663 * cases, not just under OPpASSIGN_COMMON_AGG, but in
2664 * practice, !OPpASSIGN_COMMON_AGG implies only
2665 * constants or padtmps on the RHS.
2667 EXTEND_MORTAL(nelems);
2668 for (svp = relem; svp <= lastrelem; svp += 2) {
2670 if (UNLIKELY(SvGMAGICAL(rsv))) {
2672 *svp = sv_mortalcopy_flags(*svp,
2673 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2674 /* allow other branch to continue pushing
2675 * onto tmps stack without checking each time */
2676 n = (lastrelem - relem) >> 1;
2680 PL_tmps_stack[++PL_tmps_ix] =
2681 SvREFCNT_inc_simple_NN(rsv);
2685 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
2688 /* "nelems" was converted to the number of pairs earlier. */
2689 if (nelems > PERL_HASH_DEFAULT_HvMAX) {
2690 hv_ksplit(hash, nelems);
2693 /* now assign the keys and values to the hash */
2697 if (UNLIKELY(gimme == G_LIST)) {
2698 /* @a = (%h = (...)) etc */
2700 SV **topelem = relem;
2702 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2705 /* remove duplicates from list we return */
2706 if (!hv_exists_ent(hash, key, 0)) {
2707 /* copy key back: possibly to an earlier
2708 * stack location if we encountered dups earlier,
2709 * The values will be updated later
2714 /* A tied store won't take ownership of val, so keep
2715 * the 1 refcnt on the tmps stack; otherwise disarm
2716 * the tmps stack entry */
2717 if (hv_store_ent(hash, key, val, 0))
2718 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2721 /* hv_store_ent() may have added set magic to val */;
2724 if (topelem < svp) {
2725 /* at this point we have removed the duplicate key/value
2726 * pairs from the stack, but the remaining values may be
2727 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
2728 * the (a 2), but the stack now probably contains
2729 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
2730 * obliterates the earlier key. So refresh all values. */
2731 lastrelem = topelem - 1;
2732 while (relem < lastrelem) {
2734 he = hv_fetch_ent(hash, *relem++, 0, 0);
2735 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
2741 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2744 if (hv_store_ent(hash, key, val, 0))
2745 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2748 /* hv_store_ent() may have added set magic to val */;
2754 /* there are still some 'live' recounts on the tmps stack
2755 * - usually caused by storing into a tied hash. So let
2756 * free_tmps() do the proper but slow job later.
2757 * Just disarm hash refcount: see comments below about leak
2759 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2762 /* Quietly remove all the SVs from the tmps stack slots,
2763 * since hash has now taken ownership of the refcnt.
2764 * Also remove hash: which will now leak if we die before
2765 * the SvREFCNT_dec_NN(hash) below */
2766 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2767 Move(&PL_tmps_stack[tmps_base + nelems],
2768 &PL_tmps_stack[tmps_base - 1],
2769 PL_tmps_ix - (tmps_base + nelems) + 1,
2771 PL_tmps_ix -= (nelems + 1);
2774 SvREFCNT_dec_NN(hash);
2776 relem = lastrelem + 1;
2781 if (!SvIMMORTAL(lsv)) {
2785 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
2786 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
2789 packWARN(WARN_MISC),
2790 "Useless assignment to a temporary"
2793 /* avoid freeing $$lsv if it might be needed for further
2794 * elements, e.g. ($ref, $foo) = (1, $$ref) */
2796 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
2797 && lelem <= lastlelem
2800 SvREFCNT_inc_simple_void_NN(ref);
2801 /* an unrolled sv_2mortal */
2803 if (UNLIKELY(ix >= PL_tmps_max))
2804 /* speculatively grow enough to cover other
2806 (void)tmps_grow_p(ix + (lastlelem - lelem));
2807 PL_tmps_stack[ix] = ref;
2810 sv_setsv(lsv, *relem);
2814 if (++relem > lastrelem)
2823 /* simplified lelem loop for when there are no relems left */
2824 while (LIKELY(lelem <= lastlelem)) {
2827 TAINT_NOT; /* Each item stands on its own, taintwise. */
2829 if (UNLIKELY(!lsv)) {
2831 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2834 switch (SvTYPE(lsv)) {
2836 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
2838 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2844 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
2849 if (!SvIMMORTAL(lsv)) {
2858 TAINT_NOT; /* result of list assign isn't tainted */
2860 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
2861 /* Will be used to set PL_tainting below */
2862 Uid_t tmp_uid = PerlProc_getuid();
2863 Uid_t tmp_euid = PerlProc_geteuid();
2864 Gid_t tmp_gid = PerlProc_getgid();
2865 Gid_t tmp_egid = PerlProc_getegid();
2867 /* XXX $> et al currently silently ignore failures */
2868 if (PL_delaymagic & DM_UID) {
2869 #ifdef HAS_SETRESUID
2871 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2872 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
2874 #elif defined(HAS_SETREUID)
2876 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2877 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
2880 if ((PL_delaymagic & DM_UID) == DM_RUID) {
2881 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
2882 PL_delaymagic &= ~DM_RUID;
2884 # endif /* HAS_SETRUID */
2886 if ((PL_delaymagic & DM_UID) == DM_EUID) {
2887 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
2888 PL_delaymagic &= ~DM_EUID;
2890 # endif /* HAS_SETEUID */
2891 if (PL_delaymagic & DM_UID) {
2892 if (PL_delaymagic_uid != PL_delaymagic_euid)
2893 DIE(aTHX_ "No setreuid available");
2894 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
2896 #endif /* HAS_SETRESUID */
2898 tmp_uid = PerlProc_getuid();
2899 tmp_euid = PerlProc_geteuid();
2901 /* XXX $> et al currently silently ignore failures */
2902 if (PL_delaymagic & DM_GID) {
2903 #ifdef HAS_SETRESGID
2905 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2906 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
2908 #elif defined(HAS_SETREGID)
2910 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2911 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
2914 if ((PL_delaymagic & DM_GID) == DM_RGID) {
2915 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
2916 PL_delaymagic &= ~DM_RGID;
2918 # endif /* HAS_SETRGID */
2920 if ((PL_delaymagic & DM_GID) == DM_EGID) {
2921 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
2922 PL_delaymagic &= ~DM_EGID;
2924 # endif /* HAS_SETEGID */
2925 if (PL_delaymagic & DM_GID) {
2926 if (PL_delaymagic_gid != PL_delaymagic_egid)
2927 DIE(aTHX_ "No setregid available");
2928 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
2930 #endif /* HAS_SETRESGID */
2932 tmp_gid = PerlProc_getgid();
2933 tmp_egid = PerlProc_getegid();
2935 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
2936 #ifdef NO_TAINT_SUPPORT
2937 PERL_UNUSED_VAR(tmp_uid);
2938 PERL_UNUSED_VAR(tmp_euid);
2939 PERL_UNUSED_VAR(tmp_gid);
2940 PERL_UNUSED_VAR(tmp_egid);
2943 PL_delaymagic = old_delaymagic;
2945 if (gimme == G_VOID)
2946 SP = firstrelem - 1;
2947 else if (gimme == G_SCALAR) {
2950 if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
2951 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
2954 SETi(firstlelem - firstrelem);
2966 PMOP * const pm = cPMOP;
2967 REGEXP * rx = PM_GETRE(pm);
2968 regexp *prog = ReANY(rx);
2969 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
2970 SV * const rv = newSV_type_mortal(SVt_IV);
2974 SvUPGRADE(rv, SVt_IV);
2975 /* For a subroutine describing itself as "This is a hacky workaround" I'm
2976 loathe to use it here, but it seems to be the right fix. Or close.
2977 The key part appears to be that it's essential for pp_qr to return a new
2978 object (SV), which implies that there needs to be an effective way to
2979 generate a new SV from the existing SV that is pre-compiled in the
2981 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
2984 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
2985 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
2986 *cvp = cv_clone(cv);
2987 SvREFCNT_dec_NN(cv);
2991 HV *const stash = gv_stashsv(pkg, GV_ADD);
2992 SvREFCNT_dec_NN(pkg);
2993 (void)sv_bless(rv, stash);
2996 if (UNLIKELY(RXp_ISTAINTED(prog))) {
2998 SvTAINTED_on(SvRV(rv));
3005 S_are_we_in_Debug_EXECUTE_r(pTHX)
3007 /* Given a 'use re' is in effect, does it ask for outputting execution
3010 * This is separated from the sole place it's called, an inline function,
3011 * because it is the large-ish slow portion of the function */
3013 DECLARE_AND_GET_RE_DEBUG_FLAGS_NON_REGEX;
3015 return cBOOL(RE_DEBUG_FLAG(RE_DEBUG_EXECUTE_MASK));
3018 PERL_STATIC_INLINE bool
3019 S_should_we_output_Debug_r(pTHX_ regexp *prog)
3021 PERL_ARGS_ASSERT_SHOULD_WE_OUTPUT_DEBUG_R;
3023 /* pp_match can output regex debugging info. This function returns a
3024 * boolean as to whether or not it should.
3026 * Under -Dr, it should. Any reasonable compiler will optimize this bit of
3027 * code away on non-debugging builds. */
3028 if (UNLIKELY(DEBUG_r_TEST)) {
3032 /* If the regex engine is using the non-debugging execution routine, then
3033 * no debugging should be output. Same if the field is NULL that pluggable
3034 * engines are not supposed to fill. */
3035 if ( LIKELY(prog->engine->exec == &Perl_regexec_flags)
3036 || UNLIKELY(prog->engine->op_comp == NULL))
3041 /* Otherwise have to check */
3042 return S_are_we_in_Debug_EXECUTE_r(aTHX);
3052 SSize_t curpos = 0; /* initial pos() or current $+[0] */
3055 const char *truebase; /* Start of string */
3056 REGEXP *rx = PM_GETRE(pm);
3057 regexp *prog = ReANY(rx);
3059 const U8 gimme = GIMME_V;
3061 const I32 oldsave = PL_savestack_ix;
3062 I32 had_zerolen = 0;
3065 if (PL_op->op_flags & OPf_STACKED)
3076 PUTBACK; /* EVAL blocks need stack_sp. */
3077 /* Skip get-magic if this is a qr// clone, because regcomp has
3079 truebase = prog->mother_re
3080 ? SvPV_nomg_const(TARG, len)
3081 : SvPV_const(TARG, len);
3083 DIE(aTHX_ "panic: pp_match");
3084 strend = truebase + len;
3085 rxtainted = (RXp_ISTAINTED(prog) ||
3086 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
3089 /* We need to know this in case we fail out early - pos() must be reset */
3090 global = dynpm->op_pmflags & PMf_GLOBAL;
3092 /* PMdf_USED is set after a ?? matches once */
3095 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
3097 pm->op_pmflags & PMf_USED
3100 if (UNLIKELY(should_we_output_Debug_r(prog))) {
3101 PerlIO_printf(Perl_debug_log, "?? already matched once");
3106 /* handle the empty pattern */
3107 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
3108 if (PL_curpm == PL_reg_curpm) {
3109 if (PL_curpm_under) {
3110 if (PL_curpm_under == PL_reg_curpm) {
3111 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
3113 pm = PL_curpm_under;
3123 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
3124 if (UNLIKELY(should_we_output_Debug_r(prog))) {
3125 PerlIO_printf(Perl_debug_log,
3126 "String shorter than min possible regex match (%zd < %zd)\n",
3127 len, RXp_MINLEN(prog));
3132 /* get pos() if //g */
3134 mg = mg_find_mglob(TARG);
3135 if (mg && mg->mg_len >= 0) {
3136 curpos = MgBYTEPOS(mg, TARG, truebase, len);
3137 /* last time pos() was set, it was zero-length match */
3138 if (mg->mg_flags & MGf_MINMATCH)
3143 #ifdef PERL_SAWAMPERSAND
3144 if ( RXp_NPARENS(prog)
3146 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3147 || (dynpm->op_pmflags & PMf_KEEPCOPY)
3151 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
3152 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
3153 * only on the first iteration. Therefore we need to copy $' as well
3154 * as $&, to make the rest of the string available for captures in
3155 * subsequent iterations */
3156 if (! (global && gimme == G_LIST))
3157 r_flags |= REXEC_COPY_SKIP_POST;
3159 #ifdef PERL_SAWAMPERSAND
3160 if (dynpm->op_pmflags & PMf_KEEPCOPY)
3161 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
3162 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
3169 s = truebase + curpos;
3171 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
3172 had_zerolen, TARG, NULL, r_flags))
3176 if (dynpm->op_pmflags & PMf_ONCE)
3178 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
3180 dynpm->op_pmflags |= PMf_USED;
3184 RXp_MATCH_TAINTED_on(prog);
3185 TAINT_IF(RXp_MATCH_TAINTED(prog));
3189 if (global && (gimme != G_LIST || (dynpm->op_pmflags & PMf_CONTINUE))) {
3191 mg = sv_magicext_mglob(TARG);
3192 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
3193 if (RXp_ZERO_LEN(prog))
3194 mg->mg_flags |= MGf_MINMATCH;
3196 mg->mg_flags &= ~MGf_MINMATCH;
3199 if ((!RXp_NPARENS(prog) && !global) || gimme != G_LIST) {
3200 LEAVE_SCOPE(oldsave);
3204 /* push captures on stack */
3207 const I32 nparens = RXp_NPARENS(prog);
3208 I32 i = (global && !nparens) ? 1 : 0;
3210 SPAGAIN; /* EVAL blocks could move the stack. */
3211 EXTEND(SP, nparens + i);
3212 EXTEND_MORTAL(nparens + i);
3213 for (i = !i; i <= nparens; i++) {
3214 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
3215 && RXp_OFFS(prog)[i].end != -1 ))
3217 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
3218 const char * const s = RXp_OFFS(prog)[i].start + truebase;
3219 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
3220 || RXp_OFFS(prog)[i].start < 0
3222 || len > strend - s)
3224 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
3225 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
3226 (long) i, (long) RXp_OFFS(prog)[i].start,
3227 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
3228 PUSHs(newSVpvn_flags(s, len,
3234 PUSHs(sv_newmortal());
3238 curpos = (UV)RXp_OFFS(prog)[0].end;
3239 had_zerolen = RXp_ZERO_LEN(prog);
3240 PUTBACK; /* EVAL blocks may use stack */
3241 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
3244 LEAVE_SCOPE(oldsave);
3247 NOT_REACHED; /* NOTREACHED */
3250 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
3252 mg = mg_find_mglob(TARG);
3256 LEAVE_SCOPE(oldsave);
3257 if (gimme == G_LIST)
3263 Perl_do_readline(pTHX)
3265 dSP; dTARGETSTACKED;
3270 IO * const io = GvIO(PL_last_in_gv);
3271 const I32 type = PL_op->op_type;
3272 const U8 gimme = GIMME_V;
3275 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
3277 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
3278 if (gimme == G_SCALAR) {
3280 SvSetSV_nosteal(TARG, TOPs);
3290 if (IoFLAGS(io) & IOf_ARGV) {
3291 if (IoFLAGS(io) & IOf_START) {
3293 if (av_count(GvAVn(PL_last_in_gv)) == 0) {
3294 IoFLAGS(io) &= ~IOf_START;
3295 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
3296 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
3297 sv_setpvs(GvSVn(PL_last_in_gv), "-");
3298 SvSETMAGIC(GvSV(PL_last_in_gv));
3303 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3304 if (!fp) { /* Note: fp != IoIFP(io) */
3305 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
3308 else if (type == OP_GLOB)
3309 fp = Perl_start_glob(aTHX_ POPs, io);
3311 else if (type == OP_GLOB)
3313 else if (IoTYPE(io) == IoTYPE_WRONLY) {
3314 report_wrongway_fh(PL_last_in_gv, '>');
3318 if ((!io || !(IoFLAGS(io) & IOf_START))
3319 && ckWARN(WARN_CLOSED)
3322 report_evil_fh(PL_last_in_gv);
3324 if (gimme == G_SCALAR) {
3325 /* undef TARG, and push that undefined value */
3326 if (type != OP_RCATLINE) {
3334 if (gimme == G_SCALAR) {
3336 if (type == OP_RCATLINE && SvGMAGICAL(sv))
3339 if (type == OP_RCATLINE)
3340 SvPV_force_nomg_nolen(sv);
3344 else if (isGV_with_GP(sv)) {
3345 SvPV_force_nomg_nolen(sv);
3347 SvUPGRADE(sv, SVt_PV);
3348 tmplen = SvLEN(sv); /* remember if already alloced */
3349 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
3350 /* try short-buffering it. Please update t/op/readline.t
3351 * if you change the growth length.
3356 if (type == OP_RCATLINE && SvOK(sv)) {
3358 SvPV_force_nomg_nolen(sv);
3364 sv = sv_2mortal(newSV(80));
3368 /* This should not be marked tainted if the fp is marked clean */
3369 #define MAYBE_TAINT_LINE(io, sv) \
3370 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
3375 /* delay EOF state for a snarfed empty file */
3376 #define SNARF_EOF(gimme,rs,io,sv) \
3377 (gimme != G_SCALAR || SvCUR(sv) \
3378 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
3382 if (!sv_gets(sv, fp, offset)
3384 || SNARF_EOF(gimme, PL_rs, io, sv)
3385 || PerlIO_error(fp)))
3387 if (IoFLAGS(io) & IOf_ARGV) {
3388 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3392 (void)do_close(PL_last_in_gv, FALSE);
3394 else if (type == OP_GLOB) {
3395 /* clear any errors here so we only fail on the pclose()
3396 failing, which should only happen on the child
3399 PerlIO_clearerr(fp);
3400 if (!do_close(PL_last_in_gv, FALSE)) {
3401 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
3402 "glob failed (child exited with status %d%s)",
3403 (int)(STATUS_CURRENT >> 8),
3404 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
3407 if (gimme == G_SCALAR) {
3408 if (type != OP_RCATLINE) {
3409 SV_CHECK_THINKFIRST_COW_DROP(TARG);
3415 MAYBE_TAINT_LINE(io, sv);
3418 MAYBE_TAINT_LINE(io, sv);
3420 IoFLAGS(io) |= IOf_NOLINE;
3424 if (type == OP_GLOB) {
3428 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
3429 char * const tmps = SvEND(sv) - 1;
3430 if (*tmps == *SvPVX_const(PL_rs)) {
3432 SvCUR_set(sv, SvCUR(sv) - 1);
3435 for (t1 = SvPVX_const(sv); *t1; t1++)
3437 if (memCHRs("*%?", *t1))
3439 if (memCHRs("$&*(){}[]'\";\\|?<>~`", *t1))
3442 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
3443 (void)POPs; /* Unmatched wildcard? Chuck it... */
3446 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
3447 if (ckWARN(WARN_UTF8)) {
3448 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
3449 const STRLEN len = SvCUR(sv) - offset;
3452 if (!is_utf8_string_loc(s, len, &f))
3453 /* Emulate :encoding(utf8) warning in the same case. */
3454 Perl_warner(aTHX_ packWARN(WARN_UTF8),
3455 "utf8 \"\\x%02X\" does not map to Unicode",
3456 f < (U8*)SvEND(sv) ? *f : 0);
3459 if (gimme == G_LIST) {
3460 if (SvLEN(sv) - SvCUR(sv) > 20) {
3461 SvPV_shrink_to_cur(sv);
3463 sv = sv_2mortal(newSV(80));
3466 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
3467 /* try to reclaim a bit of scalar space (only on 1st alloc) */
3468 const STRLEN new_len
3469 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
3470 SvPV_renew(sv, new_len);
3481 SV * const keysv = POPs;
3482 HV * const hv = MUTABLE_HV(POPs);
3483 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3484 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3486 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3487 bool preeminent = TRUE;
3489 if (SvTYPE(hv) != SVt_PVHV)
3496 /* If we can determine whether the element exists,
3497 * Try to preserve the existenceness of a tied hash
3498 * element by using EXISTS and DELETE if possible.
3499 * Fallback to FETCH and STORE otherwise. */
3500 if (SvCANEXISTDELETE(hv))
3501 preeminent = hv_exists_ent(hv, keysv, 0);
3504 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3505 svp = he ? &HeVAL(he) : NULL;
3507 if (!svp || !*svp || *svp == &PL_sv_undef) {
3511 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3513 lv = newSV_type_mortal(SVt_PVLV);
3515 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
3516 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
3517 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3523 if (HvNAME_get(hv) && isGV_or_RVCV(*svp))
3524 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
3525 else if (preeminent)
3526 save_helem_flags(hv, keysv, svp,
3527 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
3529 SAVEHDELETE(hv, keysv);
3531 else if (PL_op->op_private & OPpDEREF) {
3532 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3536 sv = (svp && *svp ? *svp : &PL_sv_undef);
3537 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
3538 * was to make C<local $tied{foo} = $tied{foo}> possible.
3539 * However, it seems no longer to be needed for that purpose, and
3540 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
3541 * would loop endlessly since the pos magic is getting set on the
3542 * mortal copy and lost. However, the copy has the effect of
3543 * triggering the get magic, and losing it altogether made things like
3544 * c<$tied{foo};> in void context no longer do get magic, which some
3545 * code relied on. Also, delayed triggering of magic on @+ and friends
3546 * meant the original regex may be out of scope by now. So as a
3547 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
3548 * being called too many times). */
3549 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
3556 /* a stripped-down version of Perl_softref2xv() for use by
3557 * pp_multideref(), which doesn't use PL_op->op_flags */
3560 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
3563 if (PL_op->op_private & HINT_STRICT_REFS) {
3565 Perl_die(aTHX_ PL_no_symref_sv, sv,
3566 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
3568 Perl_die(aTHX_ PL_no_usym, what);
3571 Perl_die(aTHX_ PL_no_usym, what);
3572 return gv_fetchsv_nomg(sv, GV_ADD, type);
3576 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
3577 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
3579 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
3580 * Each of these either contains a set of actions, or an argument, such as
3581 * an IV to use as an array index, or a lexical var to retrieve.
3582 * Several actions re stored per UV; we keep shifting new actions off the
3583 * one UV, and only reload when it becomes zero.
3588 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
3589 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
3590 UV actions = items->uv;
3593 /* this tells find_uninit_var() where we're up to */
3594 PL_multideref_pc = items;
3597 /* there are three main classes of action; the first retrieve
3598 * the initial AV or HV from a variable or the stack; the second
3599 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
3600 * the third an unrolled (/DREFHV, rv2hv, helem).
3602 switch (actions & MDEREF_ACTION_MASK) {
3605 actions = (++items)->uv;
3608 case MDEREF_AV_padav_aelem: /* $lex[...] */
3609 sv = PAD_SVl((++items)->pad_offset);
3612 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
3613 sv = UNOP_AUX_item_sv(++items);
3614 assert(isGV_with_GP(sv));
3615 sv = (SV*)GvAVn((GV*)sv);
3618 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
3623 goto do_AV_rv2av_aelem;
3626 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
3627 sv = UNOP_AUX_item_sv(++items);
3628 assert(isGV_with_GP(sv));
3629 sv = GvSVn((GV*)sv);
3630 goto do_AV_vivify_rv2av_aelem;
3632 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
3633 sv = PAD_SVl((++items)->pad_offset);
3636 do_AV_vivify_rv2av_aelem:
3637 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
3638 /* this is the OPpDEREF action normally found at the end of
3639 * ops like aelem, helem, rv2sv */
3640 sv = vivify_ref(sv, OPpDEREF_AV);
3644 /* this is basically a copy of pp_rv2av when it just has the
3647 if (LIKELY(SvROK(sv))) {
3648 if (UNLIKELY(SvAMAGIC(sv))) {
3649 sv = amagic_deref_call(sv, to_av_amg);
3652 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
3653 DIE(aTHX_ "Not an ARRAY reference");
3655 else if (SvTYPE(sv) != SVt_PVAV) {
3656 if (!isGV_with_GP(sv))
3657 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
3658 sv = MUTABLE_SV(GvAVn((GV*)sv));
3664 /* retrieve the key; this may be either a lexical or package
3665 * var (whose index/ptr is stored as an item) or a signed
3666 * integer constant stored as an item.
3669 IV elem = 0; /* to shut up stupid compiler warnings */
3672 assert(SvTYPE(sv) == SVt_PVAV);
3674 switch (actions & MDEREF_INDEX_MASK) {
3675 case MDEREF_INDEX_none:
3677 case MDEREF_INDEX_const:
3678 elem = (++items)->iv;
3680 case MDEREF_INDEX_padsv:
3681 elemsv = PAD_SVl((++items)->pad_offset);
3683 case MDEREF_INDEX_gvsv:
3684 elemsv = UNOP_AUX_item_sv(++items);
3685 assert(isGV_with_GP(elemsv));
3686 elemsv = GvSVn((GV*)elemsv);
3688 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
3689 && ckWARN(WARN_MISC)))
3690 Perl_warner(aTHX_ packWARN(WARN_MISC),
3691 "Use of reference \"%" SVf "\" as array index",
3693 /* the only time that S_find_uninit_var() needs this
3694 * is to determine which index value triggered the
3695 * undef warning. So just update it here. Note that
3696 * since we don't save and restore this var (e.g. for
3697 * tie or overload execution), its value will be
3698 * meaningless apart from just here */
3699 PL_multideref_pc = items;
3700 elem = SvIV(elemsv);
3705 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
3707 if (!(actions & MDEREF_FLAG_last)) {
3708 SV** svp = av_fetch((AV*)sv, elem, 1);
3709 if (!svp || ! (sv=*svp))
3710 DIE(aTHX_ PL_no_aelem, elem);
3714 if (PL_op->op_private &
3715 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3717 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3718 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
3721 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3722 sv = av_delete((AV*)sv, elem, discard);
3730 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3731 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3732 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3733 bool preeminent = TRUE;
3734 AV *const av = (AV*)sv;
3737 if (UNLIKELY(localizing)) {
3741 /* If we can determine whether the element exist,
3742 * Try to preserve the existenceness of a tied array
3743 * element by using EXISTS and DELETE if possible.
3744 * Fallback to FETCH and STORE otherwise. */
3745 if (SvCANEXISTDELETE(av))
3746 preeminent = av_exists(av, elem);
3749 svp = av_fetch(av, elem, lval && !defer);
3752 if (!svp || !(sv = *svp)) {
3755 DIE(aTHX_ PL_no_aelem, elem);
3756 len = av_top_index(av);
3757 /* Resolve a negative index that falls within
3758 * the array. Leave it negative it if falls
3759 * outside the array. */
3760 if (elem < 0 && len + elem >= 0)
3762 if (elem >= 0 && elem <= len)
3763 /* Falls within the array. */
3764 sv = av_nonelem(av,elem);
3766 /* Falls outside the array. If it is neg-
3767 ative, magic_setdefelem will use the
3768 index for error reporting. */
3769 sv = sv_2mortal(newSVavdefelem(av,elem,1));
3772 if (UNLIKELY(localizing)) {
3774 save_aelem(av, elem, svp);
3775 sv = *svp; /* may have changed */
3778 SAVEADELETE(av, elem);
3783 sv = (svp ? *svp : &PL_sv_undef);
3784 /* see note in pp_helem() */
3785 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
3802 case MDEREF_HV_padhv_helem: /* $lex{...} */
3803 sv = PAD_SVl((++items)->pad_offset);
3806 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
3807 sv = UNOP_AUX_item_sv(++items);
3808 assert(isGV_with_GP(sv));
3809 sv = (SV*)GvHVn((GV*)sv);
3812 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
3817 goto do_HV_rv2hv_helem;
3820 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
3821 sv = UNOP_AUX_item_sv(++items);
3822 assert(isGV_with_GP(sv));
3823 sv = GvSVn((GV*)sv);
3824 goto do_HV_vivify_rv2hv_helem;
3826 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
3827 sv = PAD_SVl((++items)->pad_offset);
3830 do_HV_vivify_rv2hv_helem:
3831 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
3832 /* this is the OPpDEREF action normally found at the end of
3833 * ops like aelem, helem, rv2sv */
3834 sv = vivify_ref(sv, OPpDEREF_HV);
3838 /* this is basically a copy of pp_rv2hv when it just has the
3839 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
3842 if (LIKELY(SvROK(sv))) {
3843 if (UNLIKELY(SvAMAGIC(sv))) {
3844 sv = amagic_deref_call(sv, to_hv_amg);
3847 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
3848 DIE(aTHX_ "Not a HASH reference");
3850 else if (SvTYPE(sv) != SVt_PVHV) {
3851 if (!isGV_with_GP(sv))
3852 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
3853 sv = MUTABLE_SV(GvHVn((GV*)sv));
3859 /* retrieve the key; this may be either a lexical / package
3860 * var or a string constant, whose index/ptr is stored as an
3863 SV *keysv = NULL; /* to shut up stupid compiler warnings */
3865 assert(SvTYPE(sv) == SVt_PVHV);
3867 switch (actions & MDEREF_INDEX_MASK) {
3868 case MDEREF_INDEX_none:
3871 case MDEREF_INDEX_const:
3872 keysv = UNOP_AUX_item_sv(++items);
3875 case MDEREF_INDEX_padsv:
3876 keysv = PAD_SVl((++items)->pad_offset);
3879 case MDEREF_INDEX_gvsv:
3880 keysv = UNOP_AUX_item_sv(++items);
3881 keysv = GvSVn((GV*)keysv);
3885 /* see comment above about setting this var */
3886 PL_multideref_pc = items;
3889 /* ensure that candidate CONSTs have been HEKified */
3890 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
3891 || SvTYPE(keysv) >= SVt_PVMG
3894 || SvIsCOW_shared_hash(keysv));
3896 /* this is basically a copy of pp_helem with OPpDEREF skipped */
3898 if (!(actions & MDEREF_FLAG_last)) {
3899 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
3900 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
3901 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3905 if (PL_op->op_private &
3906 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3908 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3909 sv = hv_exists_ent((HV*)sv, keysv, 0)
3910 ? &PL_sv_yes : &PL_sv_no;
3913 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3914 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
3922 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3923 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3924 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3925 bool preeminent = TRUE;
3927 HV * const hv = (HV*)sv;
3930 if (UNLIKELY(localizing)) {
3934 /* If we can determine whether the element exist,
3935 * Try to preserve the existenceness of a tied hash
3936 * element by using EXISTS and DELETE if possible.
3937 * Fallback to FETCH and STORE otherwise. */
3938 if (SvCANEXISTDELETE(hv))
3939 preeminent = hv_exists_ent(hv, keysv, 0);
3942 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3943 svp = he ? &HeVAL(he) : NULL;
3947 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
3951 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3952 lv = newSV_type_mortal(SVt_PVLV);
3954 sv_magic(lv, key2 = newSVsv(keysv),
3955 PERL_MAGIC_defelem, NULL, 0);
3956 /* sv_magic() increments refcount */
3957 SvREFCNT_dec_NN(key2);
3958 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3964 if (HvNAME_get(hv) && isGV_or_RVCV(sv))
3965 save_gp(MUTABLE_GV(sv),
3966 !(PL_op->op_flags & OPf_SPECIAL));
3967 else if (preeminent) {
3968 save_helem_flags(hv, keysv, svp,
3969 (PL_op->op_flags & OPf_SPECIAL)
3970 ? 0 : SAVEf_SETMAGIC);
3971 sv = *svp; /* may have changed */
3974 SAVEHDELETE(hv, keysv);
3979 sv = (svp && *svp ? *svp : &PL_sv_undef);
3980 /* see note in pp_helem() */
3981 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
3990 actions >>= MDEREF_SHIFT;
3998 PERL_CONTEXT *cx = CX_CUR();
3999 SV **itersvp = CxITERVAR(cx);
4000 const U8 type = CxTYPE(cx);
4002 /* Classic "for" syntax iterates one-at-a-time.
4003 Many-at-a-time for loops are only for lexicals declared as part of the
4004 for loop, and rely on all the lexicals being in adjacent pad slots.
4006 Curiously, even if the iterator variable is a lexical, the pad offset is
4007 stored in the targ slot of the ENTERITER op, meaning that targ of this OP
4008 has always been zero. Hence we can use this op's targ to hold "how many"
4009 for many-at-a-time. We actually store C<how_many - 1>, so that for the
4010 case of one-at-a-time we have zero (as before), as this makes all the
4011 logic of the for loop below much simpler, with all the other
4012 one-at-a-time cases just falling out of this "naturally". */
4013 PADOFFSET how_many = PL_op->op_targ;
4018 for (; i <= how_many; ++i ) {
4027 case CXt_LOOP_LAZYSV: /* string increment */
4029 SV* cur = cx->blk_loop.state_u.lazysv.cur;
4030 SV *end = cx->blk_loop.state_u.lazysv.end;
4031 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
4032 It has SvPVX of "" and SvCUR of 0, which is what we want. */
4034 const char *max = SvPV_const(end, maxlen);
4035 bool pad_it = FALSE;
4036 if (DO_UTF8(end) && IN_UNI_8_BIT)
4037 maxlen = sv_len_utf8_nomg(end);
4038 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen)) {
4042 /* We are looping n-at-a-time and the range isn't a multiple
4043 of n, so we fill the rest of the lexicals with undef.
4044 This only happens on the last iteration of the loop, and
4045 we will have already set up the "terminate next time"
4046 condition earlier in this for loop for this call of the
4047 ITER op when we set up the lexical corresponding to the
4048 last value in the range. Hence we don't goto retno (yet),
4049 and just below we don't repeat the setup for "terminate
4055 /* NB: on the first iteration, oldsv will have a ref count of at
4056 * least 2 (one extra from blk_loop.itersave), so the GV or pad
4057 * slot will get localised; on subsequent iterations the RC==1
4058 * optimisation may kick in and the SV will be reused. */
4059 if (UNLIKELY(pad_it)) {
4060 *itersvp = &PL_sv_undef;
4061 SvREFCNT_dec(oldsv);
4063 else if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
4064 /* safe to reuse old SV */
4065 sv_setsv(oldsv, cur);
4068 /* we need a fresh SV every time so that loop body sees a
4069 * completely new SV for closures/references to work as
4071 *itersvp = newSVsv(cur);
4072 SvREFCNT_dec(oldsv);
4075 if (UNLIKELY(pad_it)) {
4076 /* We're "beyond the end" of the iterator here, filling the
4077 extra lexicals with undef, so we mustn't do anything
4078 (further) to the the iterator itself at this point.
4079 (Observe how the other two blocks modify the iterator's
4082 else if (strEQ(SvPVX_const(cur), max))
4083 sv_setiv(cur, 0); /* terminate next time */
4089 case CXt_LOOP_LAZYIV: /* integer increment */
4091 IV cur = cx->blk_loop.state_u.lazyiv.cur;
4092 bool pad_it = FALSE;
4093 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end)) {
4101 /* see NB comment above */
4102 if (UNLIKELY(pad_it)) {
4103 *itersvp = &PL_sv_undef;
4104 SvREFCNT_dec(oldsv);
4106 else if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
4107 /* safe to reuse old SV */
4109 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
4111 /* Cheap SvIOK_only().
4112 * Assert that flags which SvIOK_only() would test or
4113 * clear can't be set, because we're SVt_IV */
4114 assert(!(SvFLAGS(oldsv) &
4115 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
4116 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
4117 /* SvIV_set() where sv_any points to head */
4118 oldsv->sv_u.svu_iv = cur;
4122 sv_setiv(oldsv, cur);
4125 /* we need a fresh SV every time so that loop body sees a
4126 * completely new SV for closures/references to work as they
4128 *itersvp = newSViv(cur);
4129 SvREFCNT_dec(oldsv);
4132 if (UNLIKELY(pad_it)) {
4133 /* We're good (see "We are looping n-at-a-time" comment
4136 else if (UNLIKELY(cur == IV_MAX)) {
4137 /* Handle end of range at IV_MAX */
4138 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
4140 ++cx->blk_loop.state_u.lazyiv.cur;
4144 case CXt_LOOP_LIST: /* for (1,2,3) */
4146 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
4147 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED);
4148 ix = (cx->blk_loop.state_u.stack.ix += inc);
4149 if (UNLIKELY(inc > 0
4150 ? ix > cx->blk_oldsp
4151 : ix <= cx->blk_loop.state_u.stack.basesp)
4160 sv = PL_stack_base[ix];
4164 goto loop_ary_common;
4166 case CXt_LOOP_ARY: /* for (@ary) */
4168 av = cx->blk_loop.state_u.ary.ary;
4169 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED);
4170 ix = (cx->blk_loop.state_u.ary.ix += inc);
4171 if (UNLIKELY(inc > 0
4180 } else if (UNLIKELY(SvRMAGICAL(av))) {
4181 SV * const * const svp = av_fetch(av, ix, FALSE);
4182 sv = svp ? *svp : NULL;
4185 sv = AvARRAY(av)[ix];
4190 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
4191 SvSetMagicSV(*itersvp, sv);
4196 if (UNLIKELY(SvIS_FREED(sv))) {
4198 Perl_croak(aTHX_ "Use of freed value in iteration");
4205 SvREFCNT_inc_simple_void_NN(sv);
4209 sv = newSVavdefelem(av, ix, 0);
4216 SvREFCNT_dec(oldsv);
4220 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
4223 /* Only relevant for a many-at-a-time loop: */
4227 /* Try to bypass pushing &PL_sv_yes and calling pp_and(); instead
4228 * jump straight to the AND op's op_other */
4229 assert(PL_op->op_next->op_type == OP_AND);
4230 if (PL_op->op_next->op_ppaddr == Perl_pp_and) {
4231 return cLOGOPx(PL_op->op_next)->op_other;
4234 /* An XS module has replaced the op_ppaddr, so fall back to the slow,
4236 /* pp_enteriter should have pre-extended the stack */
4237 EXTEND_SKIP(PL_stack_sp, 1);
4238 *++PL_stack_sp = &PL_sv_yes;
4239 return PL_op->op_next;
4243 /* Try to bypass pushing &PL_sv_no and calling pp_and(); instead
4244 * jump straight to the AND op's op_next */
4245 assert(PL_op->op_next->op_type == OP_AND);
4246 /* pp_enteriter should have pre-extended the stack */
4247 EXTEND_SKIP(PL_stack_sp, 1);
4248 /* we only need this for the rare case where the OP_AND isn't
4249 * in void context, e.g. $x = do { for (..) {...} };
4250 * (or for when an XS module has replaced the op_ppaddr)
4251 * but it's cheaper to just push it rather than testing first
4253 *++PL_stack_sp = &PL_sv_no;
4254 if (PL_op->op_next->op_ppaddr == Perl_pp_and) {
4255 return PL_op->op_next->op_next;
4258 /* An XS module has replaced the op_ppaddr, so fall back to the slow,
4260 return PL_op->op_next;
4266 A description of how taint works in pattern matching and substitution.
4268 This is all conditional on NO_TAINT_SUPPORT remaining undefined (the default).
4269 Under NO_TAINT_SUPPORT, taint-related operations should become no-ops.
4271 While the pattern is being assembled/concatenated and then compiled,
4272 PL_tainted will get set (via TAINT_set) if any component of the pattern
4273 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
4274 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
4275 TAINT_get). It will also be set if any component of the pattern matches
4276 based on locale-dependent behavior.
4278 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
4279 the pattern is marked as tainted. This means that subsequent usage, such
4280 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
4281 on the new pattern too.
4283 RXf_TAINTED_SEEN is used post-execution by the get magic code
4284 of $1 et al to indicate whether the returned value should be tainted.
4285 It is the responsibility of the caller of the pattern (i.e. pp_match,
4286 pp_subst etc) to set this flag for any other circumstances where $1 needs
4289 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
4291 There are three possible sources of taint
4293 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
4294 * the replacement string (or expression under /e)
4296 There are four destinations of taint and they are affected by the sources
4297 according to the rules below:
4299 * the return value (not including /r):
4300 tainted by the source string and pattern, but only for the
4301 number-of-iterations case; boolean returns aren't tainted;
4302 * the modified string (or modified copy under /r):
4303 tainted by the source string, pattern, and replacement strings;
4305 tainted by the pattern, and under 'use re "taint"', by the source
4307 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
4308 should always be unset before executing subsequent code.
4310 The overall action of pp_subst is:
4312 * at the start, set bits in rxtainted indicating the taint status of
4313 the various sources.
4315 * After each pattern execution, update the SUBST_TAINT_PAT bit in
4316 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
4317 pattern has subsequently become tainted via locale ops.
4319 * If control is being passed to pp_substcont to execute a /e block,
4320 save rxtainted in the CXt_SUBST block, for future use by
4323 * Whenever control is being returned to perl code (either by falling
4324 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
4325 use the flag bits in rxtainted to make all the appropriate types of
4326 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
4327 et al will appear tainted.
4329 pp_match is just a simpler version of the above.
4345 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
4346 See "how taint works" above */
4349 REGEXP *rx = PM_GETRE(pm);
4350 regexp *prog = ReANY(rx);
4352 int force_on_match = 0;
4353 const I32 oldsave = PL_savestack_ix;
4354 bool doutf8 = FALSE; /* whether replacement is in utf8 */
4359 /* known replacement string? */
4360 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
4364 if (PL_op->op_flags & OPf_STACKED)
4375 SvGETMAGIC(TARG); /* must come before cow check */
4377 /* note that a string might get converted to COW during matching */
4378 was_cow = cBOOL(SvIsCOW(TARG));
4380 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4381 #ifndef PERL_ANY_COW
4383 sv_force_normal_flags(TARG,0);
4385 if ((SvREADONLY(TARG)
4386 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
4387 || SvTYPE(TARG) > SVt_PVLV)
4388 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
4389 Perl_croak_no_modify();
4393 orig = SvPV_nomg(TARG, len);
4394 /* note we don't (yet) force the var into being a string; if we fail
4395 * to match, we leave as-is; on successful match however, we *will*
4396 * coerce into a string, then repeat the match */
4397 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
4400 /* only replace once? */
4401 once = !(rpm->op_pmflags & PMf_GLOBAL);
4403 /* See "how taint works" above */
4406 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
4407 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
4408 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
4409 | (( (once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4410 || (PL_op->op_private & OPpTRUEBOOL)) ? SUBST_TAINT_BOOLRET : 0));
4416 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
4418 strend = orig + len;
4419 /* We can match twice at each position, once with zero-length,
4420 * second time with non-zero.
4421 * Don't handle utf8 specially; we can use length-in-bytes as an
4422 * upper bound on length-in-characters, and avoid the cpu-cost of
4423 * computing a tighter bound. */
4424 maxiters = 2 * len + 10;
4426 /* handle the empty pattern */
4427 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
4428 if (PL_curpm == PL_reg_curpm) {
4429 if (PL_curpm_under) {
4430 if (PL_curpm_under == PL_reg_curpm) {
4431 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
4433 pm = PL_curpm_under;
4443 #ifdef PERL_SAWAMPERSAND
4444 r_flags = ( RXp_NPARENS(prog)
4446 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
4447 || (rpm->op_pmflags & PMf_KEEPCOPY)
4452 r_flags = REXEC_COPY_STR;
4455 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
4458 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
4459 LEAVE_SCOPE(oldsave);
4464 /* known replacement string? */
4466 /* replacement needing upgrading? */
4467 if (DO_UTF8(TARG) && !doutf8) {
4468 nsv = sv_newmortal();
4470 sv_utf8_upgrade(nsv);
4471 c = SvPV_const(nsv, clen);
4475 c = SvPV_const(dstr, clen);
4476 doutf8 = DO_UTF8(dstr);
4479 if (UNLIKELY(TAINT_get))
4480 rxtainted |= SUBST_TAINT_REPL;
4487 /* can do inplace substitution? */
4492 && (I32)clen <= RXp_MINLENRET(prog)
4494 || !(r_flags & REXEC_COPY_STR)
4495 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
4497 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
4498 && (!doutf8 || SvUTF8(TARG))
4499 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4503 /* string might have got converted to COW since we set was_cow */
4504 if (SvIsCOW(TARG)) {
4505 if (!force_on_match)
4507 assert(SvVOK(TARG));
4510 if (force_on_match) {
4511 /* redo the first match, this time with the orig var
4512 * forced into being a string */
4514 orig = SvPV_force_nomg(TARG, len);
4520 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4521 rxtainted |= SUBST_TAINT_PAT;
4522 m = orig + RXp_OFFS(prog)[0].start;
4523 d = orig + RXp_OFFS(prog)[0].end;
4525 if (m - s > strend - d) { /* faster to shorten from end */
4528 Copy(c, m, clen, char);
4533 Move(d, m, i, char);
4537 SvCUR_set(TARG, m - s);
4539 else { /* faster from front */
4543 Move(s, d - i, i, char);
4546 Copy(c, d, clen, char);
4553 d = s = RXp_OFFS(prog)[0].start + orig;
4556 if (UNLIKELY(iters++ > maxiters))
4557 DIE(aTHX_ "Substitution loop");
4558 /* run time pattern taint, eg locale */
4559 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4560 rxtainted |= SUBST_TAINT_PAT;
4561 m = RXp_OFFS(prog)[0].start + orig;
4564 Move(s, d, i, char);
4568 Copy(c, d, clen, char);
4571 s = RXp_OFFS(prog)[0].end + orig;
4572 } while (CALLREGEXEC(rx, s, strend, orig,
4573 s == m, /* don't match same null twice */
4575 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4578 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
4579 Move(s, d, i+1, char); /* include the NUL */
4583 if (PL_op->op_private & OPpTRUEBOOL)
4593 if (force_on_match) {
4594 /* redo the first match, this time with the orig var
4595 * forced into being a string */
4597 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4598 /* I feel that it should be possible to avoid this mortal copy
4599 given that the code below copies into a new destination.
4600 However, I suspect it isn't worth the complexity of
4601 unravelling the C<goto force_it> for the small number of
4602 cases where it would be viable to drop into the copy code. */
4603 TARG = sv_2mortal(newSVsv(TARG));
4605 orig = SvPV_force_nomg(TARG, len);
4611 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4612 rxtainted |= SUBST_TAINT_PAT;
4614 s = RXp_OFFS(prog)[0].start + orig;
4615 dstr = newSVpvn_flags(orig, s-orig,
4616 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
4621 /* note that a whole bunch of local vars are saved here for
4622 * use by pp_substcont: here's a list of them in case you're
4623 * searching for places in this sub that uses a particular var:
4624 * iters maxiters r_flags oldsave rxtainted orig dstr targ
4625 * s m strend rx once */
4627 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
4631 if (UNLIKELY(iters++ > maxiters))
4632 DIE(aTHX_ "Substitution loop");
4633 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4634 rxtainted |= SUBST_TAINT_PAT;
4635 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
4637 char *old_orig = orig;
4638 assert(RXp_SUBOFFSET(prog) == 0);
4640 orig = RXp_SUBBEG(prog);
4641 s = orig + (old_s - old_orig);
4642 strend = s + (strend - old_s);
4644 m = RXp_OFFS(prog)[0].start + orig;
4645 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
4646 s = RXp_OFFS(prog)[0].end + orig;
4648 /* replacement already stringified */
4650 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
4654 sv_catsv(dstr, repl);
4658 } while (CALLREGEXEC(rx, s, strend, orig,
4659 s == m, /* Yields minend of 0 or 1 */
4661 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4662 assert(strend >= s);
4663 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
4665 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4666 /* From here on down we're using the copy, and leaving the original
4673 /* The match may make the string COW. If so, brilliant, because
4674 that's just saved us one malloc, copy and free - the regexp has
4675 donated the old buffer, and we malloc an entirely new one, rather
4676 than the regexp malloc()ing a buffer and copying our original,
4677 only for us to throw it away here during the substitution. */
4678 if (SvIsCOW(TARG)) {
4679 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
4685 SvPV_set(TARG, SvPVX(dstr));
4686 SvCUR_set(TARG, SvCUR(dstr));
4687 SvLEN_set(TARG, SvLEN(dstr));
4688 SvFLAGS(TARG) |= SvUTF8(dstr);
4689 SvPV_set(dstr, NULL);
4692 if (PL_op->op_private & OPpTRUEBOOL)
4699 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4700 (void)SvPOK_only_UTF8(TARG);
4703 /* See "how taint works" above */
4705 if ((rxtainted & SUBST_TAINT_PAT) ||
4706 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
4707 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
4709 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
4711 if (!(rxtainted & SUBST_TAINT_BOOLRET)
4712 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
4714 SvTAINTED_on(TOPs); /* taint return value */
4716 SvTAINTED_off(TOPs); /* may have got tainted earlier */
4718 /* needed for mg_set below */
4720 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
4724 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
4726 LEAVE_SCOPE(oldsave);
4736 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
4737 ++*PL_markstack_ptr;
4739 LEAVE_with_name("grep_item"); /* exit inner scope */
4742 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
4744 const U8 gimme = GIMME_V;
4746 LEAVE_with_name("grep"); /* exit outer scope */
4747 (void)POPMARK; /* pop src */
4748 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
4749 (void)POPMARK; /* pop dst */
4750 SP = PL_stack_base + POPMARK; /* pop original mark */
4751 if (gimme == G_SCALAR) {
4752 if (PL_op->op_private & OPpTRUEBOOL)
4753 PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
4759 else if (gimme == G_LIST)
4766 ENTER_with_name("grep_item"); /* enter inner scope */
4769 src = PL_stack_base[TOPMARK];
4770 if (SvPADTMP(src)) {
4771 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
4777 RETURNOP(cLOGOP->op_other);
4781 /* leave_adjust_stacks():
4783 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
4784 * positioning them at to_sp+1 onwards, and do the equivalent of a
4785 * FREEMPS and TAINT_NOT.
4787 * Not intended to be called in void context.
4789 * When leaving a sub, eval, do{} or other scope, the things that need
4790 * doing to process the return args are:
4791 * * in scalar context, only return the last arg (or PL_sv_undef if none);
4792 * * for the types of return that return copies of their args (such
4793 * as rvalue sub return), make a mortal copy of every return arg,
4794 * except where we can optimise the copy away without it being
4795 * semantically visible;
4796 * * make sure that the arg isn't prematurely freed; in the case of an
4797 * arg not copied, this may involve mortalising it. For example, in
4798 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
4799 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
4801 * What condition to use when deciding whether to pass the arg through
4802 * or make a copy, is determined by the 'pass' arg; its valid values are:
4803 * 0: rvalue sub/eval exit
4804 * 1: other rvalue scope exit
4805 * 2: :lvalue sub exit in rvalue context
4806 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
4808 * There is a big issue with doing a FREETMPS. We would like to free any
4809 * temps created by the last statement which the sub executed, rather than
4810 * leaving them for the caller. In a situation where a sub call isn't
4811 * soon followed by a nextstate (e.g. nested recursive calls, a la
4812 * fibonacci()), temps can accumulate, causing memory and performance
4815 * On the other hand, we don't want to free any TEMPs which are keeping
4816 * alive any return args that we skipped copying; nor do we wish to undo
4817 * any mortalising done here.
4819 * The solution is to split the temps stack frame into two, with a cut
4820 * point delineating the two halves. We arrange that by the end of this
4821 * function, all the temps stack frame entries we wish to keep are in the
4822 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
4823 * the range tmps_base .. PL_tmps_ix. During the course of this
4824 * function, tmps_base starts off as PL_tmps_floor+1, then increases
4825 * whenever we find or create a temp that we know should be kept. In
4826 * general the stuff above tmps_base is undecided until we reach the end,
4827 * and we may need a sort stage for that.
4829 * To determine whether a TEMP is keeping a return arg alive, every
4830 * arg that is kept rather than copied and which has the SvTEMP flag
4831 * set, has the flag temporarily unset, to mark it. At the end we scan
4832 * the temps stack frame above the cut for entries without SvTEMP and
4833 * keep them, while turning SvTEMP on again. Note that if we die before
4834 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
4835 * those SVs may be slightly less efficient.
4837 * In practice various optimisations for some common cases mean we can
4838 * avoid most of the scanning and swapping about with the temps stack.
4842 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
4845 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
4848 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
4852 if (gimme == G_LIST) {
4853 nargs = SP - from_sp;
4857 assert(gimme == G_SCALAR);
4858 if (UNLIKELY(from_sp >= SP)) {
4859 /* no return args */
4860 assert(from_sp == SP);
4862 *++SP = &PL_sv_undef;
4872 /* common code for G_SCALAR and G_LIST */
4874 tmps_base = PL_tmps_floor + 1;
4878 /* pointer version of tmps_base. Not safe across temp stack
4882 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
4883 tmps_basep = PL_tmps_stack + tmps_base;
4885 /* process each return arg */
4888 SV *sv = *from_sp++;
4890 assert(PL_tmps_ix + nargs < PL_tmps_max);
4892 /* PADTMPs with container set magic shouldn't appear in the
4893 * wild. This assert is more important for pp_leavesublv(),
4894 * but by testing for it here, we're more likely to catch
4895 * bad cases (what with :lvalue subs not being widely
4896 * deployed). The two issues are that for something like
4897 * sub :lvalue { $tied{foo} }
4899 * sub :lvalue { substr($foo,1,2) }
4900 * pp_leavesublv() will croak if the sub returns a PADTMP,
4901 * and currently functions like pp_substr() return a mortal
4902 * rather than using their PADTMP when returning a PVLV.
4903 * This is because the PVLV will hold a ref to $foo,
4904 * so $foo would get delayed in being freed while
4905 * the PADTMP SV remained in the PAD.
4906 * So if this assert fails it means either:
4907 * 1) there is pp code similar to pp_substr that is
4908 * returning a PADTMP instead of a mortal, and probably
4910 * 2) pp_leavesublv is making unwarranted assumptions
4911 * about always croaking on a PADTMP
4913 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
4915 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
4916 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
4922 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4923 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4924 : pass == 2 ? (!SvPADTMP(sv))
4927 /* pass through: skip copy for logic or optimisation
4928 * reasons; instead mortalise it, except that ... */
4932 /* ... since this SV is an SvTEMP , we don't need to
4933 * re-mortalise it; instead we just need to ensure
4934 * that its existing entry in the temps stack frame
4935 * ends up below the cut and so avoids being freed
4936 * this time round. We mark it as needing to be kept
4937 * by temporarily unsetting SvTEMP; then at the end,
4938 * we shuffle any !SvTEMP entries on the tmps stack
4939 * back below the cut.
4940 * However, there's a significant chance that there's
4941 * a 1:1 correspondence between the first few (or all)
4942 * elements in the return args stack frame and those
4943 * in the temps stack frame; e,g.:
4944 * sub f { ....; map {...} .... },
4945 * or if we're exiting multiple scopes and one of the
4946 * inner scopes has already made mortal copies of each
4949 * If so, this arg sv will correspond to the next item
4950 * on the tmps stack above the cut, and so can be kept
4951 * merely by moving the cut boundary up one, rather
4952 * than messing with SvTEMP. If all args are 1:1 then
4953 * we can avoid the sorting stage below completely.
4955 * If there are no items above the cut on the tmps
4956 * stack, then the SvTEMP must comne from an item
4957 * below the cut, so there's nothing to do.
4959 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
4960 if (sv == *tmps_basep)
4966 else if (!SvPADTMP(sv)) {
4967 /* mortalise arg to avoid it being freed during save
4968 * stack unwinding. Pad tmps don't need mortalising as
4969 * they're never freed. This is the equivalent of
4970 * sv_2mortal(SvREFCNT_inc(sv)), except that:
4971 * * it assumes that the temps stack has already been
4973 * * it puts the new item at the cut rather than at
4974 * ++PL_tmps_ix, moving the previous occupant there
4977 if (!SvIMMORTAL(sv)) {
4978 SvREFCNT_inc_simple_void_NN(sv);
4980 /* Note that if there's nothing above the cut,
4981 * this copies the garbage one slot above
4982 * PL_tmps_ix onto itself. This is harmless (the
4983 * stack's already been extended), but might in
4984 * theory trigger warnings from tools like ASan
4986 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
4992 /* Make a mortal copy of the SV.
4993 * The following code is the equivalent of sv_mortalcopy()
4995 * * it assumes the temps stack has already been extended;
4996 * * it optimises the copying for some simple SV types;
4997 * * it puts the new item at the cut rather than at
4998 * ++PL_tmps_ix, moving the previous occupant there
5001 SV *newsv = newSV_type(SVt_NULL);
5003 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
5004 /* put it on the tmps stack early so it gets freed if we die */
5005 *tmps_basep++ = newsv;
5008 if (SvTYPE(sv) <= SVt_IV) {
5009 /* arg must be one of undef, IV/UV, or RV: skip
5010 * sv_setsv_flags() and do the copy directly */
5012 U32 srcflags = SvFLAGS(sv);
5014 assert(!SvGMAGICAL(sv));
5015 if (srcflags & (SVf_IOK|SVf_ROK)) {
5016 SET_SVANY_FOR_BODYLESS_IV(newsv);
5018 if (srcflags & SVf_ROK) {
5019 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
5020 /* SV type plus flags */
5021 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
5024 /* both src and dst are <= SVt_IV, so sv_any
5025 * points to the head; so access the heads
5026 * directly rather than going via sv_any.
5028 assert( &(sv->sv_u.svu_iv)
5029 == &(((XPVIV*) SvANY(sv))->xiv_iv));
5030 assert( &(newsv->sv_u.svu_iv)
5031 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
5032 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
5033 /* SV type plus flags */
5034 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
5035 |(srcflags & SVf_IVisUV));
5039 assert(!(srcflags & SVf_OK));
5040 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
5042 SvFLAGS(newsv) = dstflags;
5046 /* do the full sv_setsv() */
5050 old_base = tmps_basep - PL_tmps_stack;
5052 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
5053 /* the mg_get or sv_setsv might have created new temps
5054 * or realloced the tmps stack; regrow and reload */
5055 EXTEND_MORTAL(nargs);
5056 tmps_basep = PL_tmps_stack + old_base;
5057 TAINT_NOT; /* Each item is independent */
5063 /* If there are any temps left above the cut, we need to sort
5064 * them into those to keep and those to free. The only ones to
5065 * keep are those for which we've temporarily unset SvTEMP.
5066 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
5067 * swapping pairs as necessary. Stop when we meet in the middle.
5070 SV **top = PL_tmps_stack + PL_tmps_ix;
5071 while (tmps_basep <= top) {
5084 tmps_base = tmps_basep - PL_tmps_stack;
5087 PL_stack_sp = to_sp;
5089 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
5090 while (PL_tmps_ix >= tmps_base) {
5091 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
5093 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
5097 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
5103 /* also tail-called by pp_return */
5113 assert(CxTYPE(cx) == CXt_SUB);
5115 if (CxMULTICALL(cx)) {
5116 /* entry zero of a stack is always PL_sv_undef, which
5117 * simplifies converting a '()' return into undef in scalar context */
5118 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
5122 gimme = cx->blk_gimme;
5123 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
5125 if (gimme == G_VOID)
5126 PL_stack_sp = oldsp;
5128 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
5131 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
5133 retop = cx->blk_sub.retop;
5140 /* clear (if possible) or abandon the current @_. If 'abandon' is true,
5141 * forces an abandon */
5144 Perl_clear_defarray(pTHX_ AV* av, bool abandon)
5146 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
5148 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
5153 const SSize_t size = AvFILLp(av) + 1;
5154 /* The ternary gives consistency with av_extend() */
5155 AV *newav = newAV_alloc_x(size < 4 ? 4 : size);
5156 AvREIFY_only(newav);
5157 PAD_SVl(0) = MUTABLE_SV(newav);
5158 SvREFCNT_dec_NN(av);
5169 I32 old_savestack_ix;
5174 /* Locate the CV to call:
5175 * - most common case: RV->CV: f(), $ref->():
5176 * note that if a sub is compiled before its caller is compiled,
5177 * the stash entry will be a ref to a CV, rather than being a GV.
5178 * - second most common case: CV: $ref->method()
5181 /* a non-magic-RV -> CV ? */
5182 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
5183 cv = MUTABLE_CV(SvRV(sv));
5184 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
5188 cv = MUTABLE_CV(sv);
5191 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
5192 /* handle all the weird cases */
5193 switch (SvTYPE(sv)) {
5195 if (!isGV_with_GP(sv))
5199 cv = GvCVu((const GV *)sv);
5200 if (UNLIKELY(!cv)) {
5202 cv = sv_2cv(sv, &stash, &gv, 0);
5204 old_savestack_ix = PL_savestack_ix;
5215 if (UNLIKELY(SvAMAGIC(sv))) {
5216 sv = amagic_deref_call(sv, to_cv_amg);
5217 /* Don't SPAGAIN here. */
5223 if (UNLIKELY(!SvOK(sv)))
5224 DIE(aTHX_ PL_no_usym, "a subroutine");
5226 sym = SvPV_nomg_const(sv, len);
5227 if (PL_op->op_private & HINT_STRICT_REFS)
5228 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
5229 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
5232 cv = MUTABLE_CV(SvRV(sv));
5233 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
5239 DIE(aTHX_ "Not a CODE reference");
5243 /* At this point we want to save PL_savestack_ix, either by doing a
5244 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
5245 * CV we will be using (so we don't know whether its XS, so we can't
5246 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
5247 * the save stack. So remember where we are currently on the save
5248 * stack, and later update the CX or scopestack entry accordingly. */
5249 old_savestack_ix = PL_savestack_ix;
5251 /* these two fields are in a union. If they ever become separate,
5252 * we have to test for both of them being null below */
5254 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
5255 while (UNLIKELY(!CvROOT(cv))) {
5259 /* anonymous or undef'd function leaves us no recourse */
5260 if (CvLEXICAL(cv) && CvHASGV(cv))
5261 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
5262 SVfARG(cv_name(cv, NULL, 0)));
5263 if (CvANON(cv) || !CvHASGV(cv)) {
5264 DIE(aTHX_ "Undefined subroutine called");
5267 /* autoloaded stub? */
5268 if (cv != GvCV(gv = CvGV(cv))) {
5271 /* should call AUTOLOAD now? */
5274 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
5275 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
5276 |(PL_op->op_flags & OPf_REF
5277 ? GV_AUTOLOAD_ISMETHOD
5279 cv = autogv ? GvCV(autogv) : NULL;
5282 sub_name = sv_newmortal();
5283 gv_efullname3(sub_name, gv, NULL);
5284 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
5288 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
5289 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
5290 DIE(aTHX_ "Closure prototype called");
5292 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
5295 Perl_get_db_sub(aTHX_ &sv, cv);
5297 PL_curcopdb = PL_curcop;
5299 /* check for lsub that handles lvalue subroutines */
5300 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
5301 /* if lsub not found then fall back to DB::sub */
5302 if (!cv) cv = GvCV(PL_DBsub);
5304 cv = GvCV(PL_DBsub);
5307 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
5308 DIE(aTHX_ "No DB::sub routine defined");
5311 if (!(CvISXSUB(cv))) {
5312 /* This path taken at least 75% of the time */
5319 /* keep PADTMP args alive throughout the call (we need to do this
5320 * because @_ isn't refcounted). Note that we create the mortals
5321 * in the caller's tmps frame, so they won't be freed until after
5322 * we return from the sub.
5331 *svp = sv = sv_mortalcopy(sv);
5337 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
5338 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
5339 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
5341 padlist = CvPADLIST(cv);
5342 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
5343 pad_push(padlist, depth);
5344 PAD_SET_CUR_NOSAVE(padlist, depth);
5345 if (LIKELY(hasargs)) {
5346 AV *const av = MUTABLE_AV(PAD_SVl(0));
5350 defavp = &GvAV(PL_defgv);
5351 cx->blk_sub.savearray = *defavp;
5352 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
5354 /* it's the responsibility of whoever leaves a sub to ensure
5355 * that a clean, empty AV is left in pad[0]. This is normally
5356 * done by cx_popsub() */
5357 assert(!AvREAL(av) && AvFILLp(av) == -1);
5360 if (UNLIKELY(items - 1 > AvMAX(av))) {
5361 SV **ary = AvALLOC(av);
5362 Renew(ary, items, SV*);
5363 AvMAX(av) = items - 1;
5369 Copy(MARK+1,AvARRAY(av),items,SV*);
5370 AvFILLp(av) = items - 1;
5372 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5374 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5375 SVfARG(cv_name(cv, NULL, 0)));
5376 /* warning must come *after* we fully set up the context
5377 * stuff so that __WARN__ handlers can safely dounwind()
5380 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
5381 && ckWARN(WARN_RECURSION)
5382 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
5383 sub_crush_depth(cv);
5384 RETURNOP(CvSTART(cv));
5387 SSize_t markix = TOPMARK;
5391 /* pretend we did the ENTER earlier */
5392 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
5397 if (UNLIKELY(((PL_op->op_private
5398 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
5399 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5401 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5402 SVfARG(cv_name(cv, NULL, 0)));
5404 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
5405 /* Need to copy @_ to stack. Alternative may be to
5406 * switch stack to @_, and copy return values
5407 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
5408 AV * const av = GvAV(PL_defgv);
5409 const SSize_t items = AvFILL(av) + 1;
5413 const bool m = cBOOL(SvRMAGICAL(av));
5414 /* Mark is at the end of the stack. */
5416 for (; i < items; ++i)
5420 SV ** const svp = av_fetch(av, i, 0);
5421 sv = svp ? *svp : NULL;
5423 else sv = AvARRAY(av)[i];
5424 if (sv) SP[i+1] = sv;
5426 SP[i+1] = av_nonelem(av, i);
5434 SV **mark = PL_stack_base + markix;
5435 SSize_t items = SP - mark;
5438 if (*mark && SvPADTMP(*mark)) {
5439 *mark = sv_mortalcopy(*mark);
5443 /* We assume first XSUB in &DB::sub is the called one. */
5444 if (UNLIKELY(PL_curcopdb)) {
5445 SAVEVPTR(PL_curcop);
5446 PL_curcop = PL_curcopdb;
5449 /* Do we need to open block here? XXXX */
5451 /* calculate gimme here as PL_op might get changed and then not
5452 * restored until the LEAVE further down */
5453 is_scalar = (GIMME_V == G_SCALAR);
5455 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
5457 CvXSUB(cv)(aTHX_ cv);
5459 #if defined DEBUGGING && !defined DEBUGGING_RE_ONLY
5460 /* This duplicates the check done in runops_debug(), but provides more
5461 * information in the common case of the fault being with an XSUB.
5463 * It should also catch an XSUB pushing more than it extends
5464 * in scalar context.
5466 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base)
5467 Perl_croak_nocontext(
5468 "panic: XSUB %s::%s (%s) failed to extend arg stack: "
5469 "base=%p, sp=%p, hwm=%p\n",
5470 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv),
5471 PL_stack_base, PL_stack_sp,
5472 PL_stack_base + PL_curstackinfo->si_stack_hwm);
5474 /* Enforce some sanity in scalar context. */
5476 SV **svp = PL_stack_base + markix + 1;
5477 if (svp != PL_stack_sp) {
5478 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
5488 Perl_sub_crush_depth(pTHX_ CV *cv)
5490 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
5493 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
5495 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
5496 SVfARG(cv_name(cv,NULL,0)));
5502 /* like croak, but report in context of caller */
5505 Perl_croak_caller(const char *pat, ...)
5509 const PERL_CONTEXT *cx = caller_cx(0, NULL);
5511 /* make error appear at call site */
5513 PL_curcop = cx->blk_oldcop;
5515 va_start(args, pat);
5517 NOT_REACHED; /* NOTREACHED */
5526 SV* const elemsv = POPs;
5527 IV elem = SvIV(elemsv);
5528 AV *const av = MUTABLE_AV(POPs);
5529 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
5530 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
5531 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
5532 bool preeminent = TRUE;
5535 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
5536 Perl_warner(aTHX_ packWARN(WARN_MISC),
5537 "Use of reference \"%" SVf "\" as array index",
5539 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
5542 if (UNLIKELY(localizing)) {
5546 /* If we can determine whether the element exist,
5547 * Try to preserve the existenceness of a tied array
5548 * element by using EXISTS and DELETE if possible.
5549 * Fallback to FETCH and STORE otherwise. */
5550 if (SvCANEXISTDELETE(av))
5551 preeminent = av_exists(av, elem);
5554 svp = av_fetch(av, elem, lval && !defer);
5556 #ifdef PERL_MALLOC_WRAP
5557 if (SvUOK(elemsv)) {
5558 const UV uv = SvUV(elemsv);
5559 elem = uv > IV_MAX ? IV_MAX : uv;
5561 else if (SvNOK(elemsv))
5562 elem = (IV)SvNV(elemsv);
5564 MEM_WRAP_CHECK_s(elem,SV*,"Out of memory during array extend");
5567 if (!svp || !*svp) {
5570 DIE(aTHX_ PL_no_aelem, elem);
5571 len = av_top_index(av);
5572 /* Resolve a negative index that falls within the array. Leave
5573 it negative it if falls outside the array. */
5574 if (elem < 0 && len + elem >= 0)
5576 if (elem >= 0 && elem <= len)
5577 /* Falls within the array. */
5578 PUSHs(av_nonelem(av,elem));
5580 /* Falls outside the array. If it is negative,
5581 magic_setdefelem will use the index for error reporting.
5583 mPUSHs(newSVavdefelem(av, elem, 1));
5586 if (UNLIKELY(localizing)) {
5588 save_aelem(av, elem, svp);
5590 SAVEADELETE(av, elem);
5592 else if (PL_op->op_private & OPpDEREF) {
5593 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
5597 sv = (svp ? *svp : &PL_sv_undef);
5598 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
5605 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
5607 PERL_ARGS_ASSERT_VIVIFY_REF;
5612 Perl_croak_no_modify();
5613 prepare_SV_for_RV(sv);
5616 SvRV_set(sv, newSV_type(SVt_NULL));
5619 SvRV_set(sv, MUTABLE_SV(newAV()));
5622 SvRV_set(sv, MUTABLE_SV(newHV()));
5629 if (SvGMAGICAL(sv)) {
5630 /* copy the sv without magic to prevent magic from being
5632 SV* msv = sv_newmortal();
5633 sv_setsv_nomg(msv, sv);
5639 PERL_STATIC_INLINE HV *
5640 S_opmethod_stash(pTHX_ SV* meth)
5645 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
5646 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
5647 "package or object reference", SVfARG(meth)),
5649 : *(PL_stack_base + TOPMARK + 1);
5651 PERL_ARGS_ASSERT_OPMETHOD_STASH;
5655 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
5658 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
5659 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
5660 stash = gv_stashsv(sv, GV_CACHE_ONLY);
5661 if (stash) return stash;
5665 ob = MUTABLE_SV(SvRV(sv));
5666 else if (!SvOK(sv)) goto undefined;
5667 else if (isGV_with_GP(sv)) {
5669 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5670 "without a package or object reference",
5673 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
5674 assert(!LvTARGLEN(ob));
5678 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
5681 /* this isn't a reference */
5684 const char * const packname = SvPV_nomg_const(sv, packlen);
5685 const U32 packname_utf8 = SvUTF8(sv);
5686 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
5687 if (stash) return stash;
5689 if (!(iogv = gv_fetchpvn_flags(
5690 packname, packlen, packname_utf8, SVt_PVIO
5692 !(ob=MUTABLE_SV(GvIO(iogv))))
5694 /* this isn't the name of a filehandle either */
5697 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5698 "without a package or object reference",
5701 /* assume it's a package name */
5702 stash = gv_stashpvn(packname, packlen, packname_utf8);
5703 if (stash) return stash;
5704 else return MUTABLE_HV(sv);
5706 /* it _is_ a filehandle name -- replace with a reference */
5707 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
5710 /* if we got here, ob should be an object or a glob */
5711 if (!ob || !(SvOBJECT(ob)
5712 || (isGV_with_GP(ob)
5713 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
5716 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
5717 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
5718 ? newSVpvs_flags("DOES", SVs_TEMP)
5730 SV* const meth = TOPs;
5733 SV* const rmeth = SvRV(meth);
5734 if (SvTYPE(rmeth) == SVt_PVCV) {
5740 stash = opmethod_stash(meth);
5742 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5745 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5749 #define METHOD_CHECK_CACHE(stash,cache,meth) \
5750 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
5752 gv = MUTABLE_GV(HeVAL(he)); \
5753 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
5754 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
5756 XPUSHs(MUTABLE_SV(GvCV(gv))); \
5765 SV* const meth = cMETHOP_meth;
5766 HV* const stash = opmethod_stash(meth);
5768 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
5769 METHOD_CHECK_CACHE(stash, stash, meth);
5772 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5775 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5784 SV* const meth = cMETHOP_meth;
5785 HV* const stash = CopSTASH(PL_curcop);
5786 /* Actually, SUPER doesn't need real object's (or class') stash at all,
5787 * as it uses CopSTASH. However, we must ensure that object(class) is
5788 * correct (this check is done by S_opmethod_stash) */
5789 opmethod_stash(meth);
5791 if ((cache = HvMROMETA(stash)->super)) {
5792 METHOD_CHECK_CACHE(stash, cache, meth);
5795 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5798 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5806 SV* const meth = cMETHOP_meth;
5807 HV* stash = gv_stashsv(cMETHOP_rclass, 0);
5808 opmethod_stash(meth); /* not used but needed for error checks */
5810 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
5811 else stash = MUTABLE_HV(cMETHOP_rclass);
5813 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5816 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5820 PP(pp_method_redir_super)
5825 SV* const meth = cMETHOP_meth;
5826 HV* stash = gv_stashsv(cMETHOP_rclass, 0);
5827 opmethod_stash(meth); /* not used but needed for error checks */
5829 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOP_rclass);
5830 else if ((cache = HvMROMETA(stash)->super)) {
5831 METHOD_CHECK_CACHE(stash, cache, meth);
5834 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5837 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5842 * ex: set ts=8 sts=4 sw=4 et: