3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
15 * Awake! Awake! Fear, Fire, Foes! Awake!
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
21 /* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
35 #define PERL_IN_PP_HOT_C
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
64 PUSHs(GvSVn(cGVOP_gv));
69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and
80 PUSHMARK(PL_stack_sp);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
103 /* also used for: pp_andassign() */
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
118 if (PL_op->op_type == OP_AND)
120 return cLOGOP->op_other;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
131 SV *left = POPs; SV *right = TOPs;
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) {
134 SV * const temp = left;
135 left = right; right = temp;
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
165 SvREFCNT_inc_simple_void(value);
171 /* Need to fix things up. */
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
191 LEAVE_with_name("sassign_coderef");
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
195 First: ops for \&{"BONK"}; return us the constant in the
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
210 assert(CvFLAGS(source) & CVf_CONST);
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
226 SvSetMagicSV(left, right);
236 RETURNOP(cLOGOP->op_other);
238 RETURNOP(cLOGOP->op_next);
244 TAINT_NOT; /* Each statement is presumed innocent */
245 PL_stack_sp = PL_stack_base + cxstack[cxstack_ix].blk_oldsp;
247 if (!(PL_op->op_flags & OPf_SPECIAL)) {
248 I32 oldsave = PL_scopestack[PL_scopestack_ix - 1];
249 LEAVE_SCOPE(oldsave);
256 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
261 const char *rpv = NULL;
263 bool rcopied = FALSE;
265 if (TARG == right && right != left) { /* $r = $l.$r */
266 rpv = SvPV_nomg_const(right, rlen);
267 rbyte = !DO_UTF8(right);
268 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
269 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
273 if (TARG != left) { /* not $l .= $r */
275 const char* const lpv = SvPV_nomg_const(left, llen);
276 lbyte = !DO_UTF8(left);
277 sv_setpvn(TARG, lpv, llen);
283 else { /* $l .= $r and left == TARG */
285 if (left == right && ckWARN(WARN_UNINITIALIZED)) /* $l .= $l */
286 report_uninit(right);
290 SvPV_force_nomg_nolen(left);
292 lbyte = !DO_UTF8(left);
298 rpv = SvPV_nomg_const(right, rlen);
299 rbyte = !DO_UTF8(right);
301 if (lbyte != rbyte) {
303 sv_utf8_upgrade_nomg(TARG);
306 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
307 sv_utf8_upgrade_nomg(right);
308 rpv = SvPV_nomg_const(right, rlen);
311 sv_catpvn_nomg(TARG, rpv, rlen);
318 /* push the elements of av onto the stack.
319 * XXX Note that padav has similar code but without the mg_get().
320 * I suspect that the mg_get is no longer needed, but while padav
321 * differs, it can't share this function */
324 S_pushav(pTHX_ AV* const av)
327 const SSize_t maxarg = AvFILL(av) + 1;
329 if (UNLIKELY(SvRMAGICAL(av))) {
331 for (i=0; i < (PADOFFSET)maxarg; i++) {
332 SV ** const svp = av_fetch(av, i, FALSE);
333 /* See note in pp_helem, and bug id #27839 */
335 ? SvGMAGICAL(*svp) ? (mg_get(*svp), *svp) : *svp
341 for (i=0; i < (PADOFFSET)maxarg; i++) {
342 SV * const sv = AvARRAY(av)[i];
343 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
351 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
356 PADOFFSET base = PL_op->op_targ;
357 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
359 if (PL_op->op_flags & OPf_SPECIAL) {
360 /* fake the RHS of my ($x,$y,..) = @_ */
362 S_pushav(aTHX_ GvAVn(PL_defgv));
366 /* note, this is only skipped for compile-time-known void cxt */
367 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
370 for (i = 0; i <count; i++)
371 *++SP = PAD_SV(base+i);
373 if (PL_op->op_private & OPpLVAL_INTRO) {
374 SV **svp = &(PAD_SVl(base));
375 const UV payload = (UV)(
376 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
377 | (count << SAVE_TIGHT_SHIFT)
378 | SAVEt_CLEARPADRANGE);
379 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
380 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) == base);
387 for (i = 0; i <count; i++)
388 SvPADSTALE_off(*svp++); /* mark lexical as active */
399 OP * const op = PL_op;
400 /* access PL_curpad once */
401 SV ** const padentry = &(PAD_SVl(op->op_targ));
406 PUTBACK; /* no pop/push after this, TOPs ok */
408 if (op->op_flags & OPf_MOD) {
409 if (op->op_private & OPpLVAL_INTRO)
410 if (!(op->op_private & OPpPAD_STATE))
411 save_clearsv(padentry);
412 if (op->op_private & OPpDEREF) {
413 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
414 than TARG reduces the scope of TARG, so it does not
415 span the call to save_clearsv, resulting in smaller
417 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
429 tryAMAGICunTARGETlist(iter_amg, 0);
430 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
432 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
433 if (!isGV_with_GP(PL_last_in_gv)) {
434 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
435 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
438 XPUSHs(MUTABLE_SV(PL_last_in_gv));
441 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
442 if (PL_last_in_gv == (GV *)&PL_sv_undef)
443 PL_last_in_gv = NULL;
445 assert(isGV_with_GP(PL_last_in_gv));
448 return do_readline();
456 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
460 (SvIOK_notUV(left) && SvIOK_notUV(right))
461 ? (SvIVX(left) == SvIVX(right))
462 : ( do_ncmp(left, right) == 0)
468 /* also used for: pp_i_preinc() */
472 SV *sv = *PL_stack_sp;
474 if (LIKELY(((sv->sv_flags &
475 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
476 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
478 && SvIVX(sv) != IV_MAX)
480 SvIV_set(sv, SvIVX(sv) + 1);
482 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
489 /* also used for: pp_i_predec() */
493 SV *sv = *PL_stack_sp;
495 if (LIKELY(((sv->sv_flags &
496 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
497 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
499 && SvIVX(sv) != IV_MIN)
501 SvIV_set(sv, SvIVX(sv) - 1);
503 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
510 /* also used for: pp_orassign() */
519 if (PL_op->op_type == OP_OR)
521 RETURNOP(cLOGOP->op_other);
526 /* also used for: pp_dor() pp_dorassign() */
533 const int op_type = PL_op->op_type;
534 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
539 if (UNLIKELY(!sv || !SvANY(sv))) {
540 if (op_type == OP_DOR)
542 RETURNOP(cLOGOP->op_other);
548 if (UNLIKELY(!sv || !SvANY(sv)))
553 switch (SvTYPE(sv)) {
555 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
559 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
563 if (CvROOT(sv) || CvXSUB(sv))
576 if(op_type == OP_DOR)
578 RETURNOP(cLOGOP->op_other);
580 /* assuming OP_DEFINED */
590 dSP; dATARGET; bool useleft; SV *svl, *svr;
592 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
596 #ifdef PERL_PRESERVE_IVUV
598 /* special-case some simple common cases */
599 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
601 U32 flags = (svl->sv_flags & svr->sv_flags);
602 if (flags & SVf_IOK) {
603 /* both args are simple IVs */
608 topl = ((UV)il) >> (UVSIZE * 8 - 2);
609 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
611 /* if both are in a range that can't under/overflow, do a
612 * simple integer add: if the top of both numbers
613 * are 00 or 11, then it's safe */
614 if (!( ((topl+1) | (topr+1)) & 2)) {
616 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
622 else if (flags & SVf_NOK) {
623 /* both args are NVs */
628 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
629 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
630 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
632 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
635 /* nothing was lost by converting to IVs */
638 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
646 useleft = USE_LEFT(svl);
647 /* We must see if we can perform the addition with integers if possible,
648 as the integer code detects overflow while the NV code doesn't.
649 If either argument hasn't had a numeric conversion yet attempt to get
650 the IV. It's important to do this now, rather than just assuming that
651 it's not IOK as a PV of "9223372036854775806" may not take well to NV
652 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
653 integer in case the second argument is IV=9223372036854775806
654 We can (now) rely on sv_2iv to do the right thing, only setting the
655 public IOK flag if the value in the NV (or PV) slot is truly integer.
657 A side effect is that this also aggressively prefers integer maths over
658 fp maths for integer values.
660 How to detect overflow?
662 C 99 section 6.2.6.1 says
664 The range of nonnegative values of a signed integer type is a subrange
665 of the corresponding unsigned integer type, and the representation of
666 the same value in each type is the same. A computation involving
667 unsigned operands can never overflow, because a result that cannot be
668 represented by the resulting unsigned integer type is reduced modulo
669 the number that is one greater than the largest value that can be
670 represented by the resulting type.
674 which I read as "unsigned ints wrap."
676 signed integer overflow seems to be classed as "exception condition"
678 If an exceptional condition occurs during the evaluation of an
679 expression (that is, if the result is not mathematically defined or not
680 in the range of representable values for its type), the behavior is
683 (6.5, the 5th paragraph)
685 I had assumed that on 2s complement machines signed arithmetic would
686 wrap, hence coded pp_add and pp_subtract on the assumption that
687 everything perl builds on would be happy. After much wailing and
688 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
689 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
690 unsigned code below is actually shorter than the old code. :-)
693 if (SvIV_please_nomg(svr)) {
694 /* Unless the left argument is integer in range we are going to have to
695 use NV maths. Hence only attempt to coerce the right argument if
696 we know the left is integer. */
704 /* left operand is undef, treat as zero. + 0 is identity,
705 Could SETi or SETu right now, but space optimise by not adding
706 lots of code to speed up what is probably a rarish case. */
708 /* Left operand is defined, so is it IV? */
709 if (SvIV_please_nomg(svl)) {
710 if ((auvok = SvUOK(svl)))
713 const IV aiv = SvIVX(svl);
716 auvok = 1; /* Now acting as a sign flag. */
718 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
725 bool result_good = 0;
728 bool buvok = SvUOK(svr);
733 const IV biv = SvIVX(svr);
738 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
740 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
741 else "IV" now, independent of how it came in.
742 if a, b represents positive, A, B negative, a maps to -A etc
747 all UV maths. negate result if A negative.
748 add if signs same, subtract if signs differ. */
754 /* Must get smaller */
760 /* result really should be -(auv-buv). as its negation
761 of true value, need to swap our result flag */
778 if (result <= (UV)IV_MIN)
779 SETi(result == (UV)IV_MIN
780 ? IV_MIN : -(IV)result);
782 /* result valid, but out of range for IV. */
787 } /* Overflow, drop through to NVs. */
792 useleft = USE_LEFT(svl);
796 NV value = SvNV_nomg(svr);
799 /* left operand is undef, treat as zero. + 0.0 is identity. */
803 SETn( value + SvNV_nomg(svl) );
809 /* also used for: pp_aelemfast_lex() */
814 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
815 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
816 const U32 lval = PL_op->op_flags & OPf_MOD;
817 SV** const svp = av_fetch(av, (I8)PL_op->op_private, lval);
818 SV *sv = (svp ? *svp : &PL_sv_undef);
820 if (UNLIKELY(!svp && lval))
821 DIE(aTHX_ PL_no_aelem, (int)(I8)PL_op->op_private);
824 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
834 do_join(TARG, *MARK, MARK, SP);
845 * We ass_u_me that LvTARGOFF() comes first, and that two STRLENs
846 * will be enough to hold an OP*.
848 SV* const sv = sv_newmortal();
849 sv_upgrade(sv, SVt_PVLV);
851 Copy(&PL_op, &LvTARGOFF(sv), 1, OP*);
854 XPUSHs(MUTABLE_SV(PL_op));
859 /* Oversized hot code. */
861 /* also used for: pp_say() */
865 dSP; dMARK; dORIGMARK;
869 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
873 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
876 if (MARK == ORIGMARK) {
877 /* If using default handle then we need to make space to
878 * pass object as 1st arg, so move other args up ...
882 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
885 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
887 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
888 | (PL_op->op_type == OP_SAY
889 ? TIED_METHOD_SAY : 0)), sp - mark);
892 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
893 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
896 SETERRNO(EBADF,RMS_IFI);
899 else if (!(fp = IoOFP(io))) {
901 report_wrongway_fh(gv, '<');
904 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
908 SV * const ofs = GvSV(PL_ofsgv); /* $, */
910 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
912 if (!do_print(*MARK, fp))
916 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
917 if (!do_print(GvSV(PL_ofsgv), fp)) {
926 if (!do_print(*MARK, fp))
934 if (PL_op->op_type == OP_SAY) {
935 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
938 else if (PL_ors_sv && SvOK(PL_ors_sv))
939 if (!do_print(PL_ors_sv, fp)) /* $\ */
942 if (IoFLAGS(io) & IOf_FLUSH)
943 if (PerlIO_flush(fp) == EOF)
953 XPUSHs(&PL_sv_undef);
958 /* also used for: pp_rv2hv() */
959 /* also called directly by pp_lvavref */
964 const I32 gimme = GIMME_V;
965 static const char an_array[] = "an ARRAY";
966 static const char a_hash[] = "a HASH";
967 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
968 || PL_op->op_type == OP_LVAVREF;
969 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
973 if (UNLIKELY(SvAMAGIC(sv))) {
974 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
977 if (UNLIKELY(SvTYPE(sv) != type))
978 /* diag_listed_as: Not an ARRAY reference */
979 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
980 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
981 && PL_op->op_private & OPpLVAL_INTRO))
982 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
984 else if (UNLIKELY(SvTYPE(sv) != type)) {
987 if (!isGV_with_GP(sv)) {
988 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
996 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
997 if (PL_op->op_private & OPpLVAL_INTRO)
998 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1000 if (PL_op->op_flags & OPf_REF) {
1004 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1005 const I32 flags = is_lvalue_sub();
1006 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1007 if (gimme != G_ARRAY)
1008 goto croak_cant_return;
1015 AV *const av = MUTABLE_AV(sv);
1016 /* The guts of pp_rv2av */
1017 if (gimme == G_ARRAY) {
1023 else if (gimme == G_SCALAR) {
1025 const SSize_t maxarg = AvFILL(av) + 1;
1029 /* The guts of pp_rv2hv */
1030 if (gimme == G_ARRAY) { /* array wanted */
1032 return Perl_do_kv(aTHX);
1034 else if ((PL_op->op_private & OPpTRUEBOOL
1035 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1036 && block_gimme() == G_VOID ))
1037 && (!SvRMAGICAL(sv) || !mg_find(sv, PERL_MAGIC_tied)))
1038 SETs(HvUSEDKEYS(sv) ? &PL_sv_yes : sv_2mortal(newSViv(0)));
1039 else if (gimme == G_SCALAR) {
1041 TARG = Perl_hv_scalar(aTHX_ MUTABLE_HV(sv));
1048 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
1049 is_pp_rv2av ? "array" : "hash");
1054 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
1056 PERL_ARGS_ASSERT_DO_ODDBALL;
1059 if (ckWARN(WARN_MISC)) {
1061 if (oddkey == firstkey &&
1063 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
1064 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
1066 err = "Reference found where even-sized list expected";
1069 err = "Odd number of elements in hash assignment";
1070 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
1077 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which
1078 * are common to both the LHS and RHS of an aassign, and replace them
1079 * with copies. All these copies are made before the actual list assign is
1082 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
1083 * element ($b) to the first LH element ($a), modifies $a; when the
1084 * second assignment is done, the second RH element now has the wrong
1085 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
1086 * Note that we don't need to make a mortal copy of $b.
1088 * The algorithm below works by, for every RHS element, mark the
1089 * corresponding LHS target element with SVf_BREAK. Then if the RHS
1090 * element is found with SVf_BREAK set, it means it would have been
1091 * modified, so make a copy.
1092 * Note that by scanning both LHS and RHS in lockstep, we avoid
1093 * unnecessary copies (like $b above) compared with a naive
1094 * "mark all LHS; copy all marked RHS; unmark all LHS".
1096 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
1097 * it can't be common and can be skipped.
1099 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
1100 * that we thought we didn't need to call S_aassign_copy_common(), but we
1101 * have anyway for sanity checking. If we find we need to copy, then panic.
1104 PERL_STATIC_INLINE void
1105 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
1106 SV **firstrelem, SV **lastrelem
1115 SSize_t lcount = lastlelem - firstlelem + 1;
1116 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
1117 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
1118 bool copy_all = FALSE;
1120 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
1121 assert(firstlelem < lastlelem); /* at least 2 LH elements */
1122 assert(firstrelem < lastrelem); /* at least 2 RH elements */
1126 /* we never have to copy the first RH element; it can't be corrupted
1127 * by assigning something to the corresponding first LH element.
1128 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
1130 relem = firstrelem + 1;
1132 for (; relem <= lastrelem; relem++) {
1135 /* mark next LH element */
1137 if (--lcount >= 0) {
1140 if (UNLIKELY(!svl)) {/* skip AV alias marker */
1141 assert (lelem <= lastlelem);
1147 if (SvSMAGICAL(svl)) {
1150 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
1153 /* this LH element will consume all further args;
1154 * no need to mark any further LH elements (if any).
1155 * But we still need to scan any remaining RHS elements;
1156 * set lcount negative to distinguish from lcount == 0,
1157 * so the loop condition continues being true
1160 lelem--; /* no need to unmark this element */
1162 else if (!(do_rc1 && SvREFCNT(svl) == 1) && svl != &PL_sv_undef) {
1163 assert(!SvIMMORTAL(svl));
1164 SvFLAGS(svl) |= SVf_BREAK;
1168 /* don't check RH element if no SVf_BREAK flags set yet */
1175 /* see if corresponding RH element needs copying */
1181 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
1185 /* op_dump(PL_op); */
1187 "panic: aassign skipped needed copy of common RH elem %"
1188 UVuf, (UV)(relem - firstrelem));
1192 TAINT_NOT; /* Each item is independent */
1194 /* Dear TODO test in t/op/sort.t, I love you.
1195 (It's relying on a panic, not a "semi-panic" from newSVsv()
1196 and then an assertion failure below.) */
1197 if (UNLIKELY(SvIS_FREED(svr))) {
1198 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
1201 /* avoid break flag while copying; otherwise COW etc
1203 SvFLAGS(svr) &= ~SVf_BREAK;
1204 /* Not newSVsv(), as it does not allow copy-on-write,
1205 resulting in wasteful copies.
1206 Also, we use SV_NOSTEAL in case the SV is used more than
1207 once, e.g. (...) = (f())[0,0]
1208 Where the same SV appears twice on the RHS without a ref
1209 count bump. (Although I suspect that the SV won't be
1210 stealable here anyway - DAPM).
1212 *relem = sv_mortalcopy_flags(svr,
1213 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1214 /* ... but restore afterwards in case it's needed again,
1215 * e.g. ($a,$b,$c) = (1,$a,$a)
1217 SvFLAGS(svr) |= SVf_BREAK;
1229 while (lelem > firstlelem) {
1230 SV * const svl = *(--lelem);
1232 SvFLAGS(svl) &= ~SVf_BREAK;
1241 SV **lastlelem = PL_stack_sp;
1242 SV **lastrelem = PL_stack_base + POPMARK;
1243 SV **firstrelem = PL_stack_base + POPMARK + 1;
1244 SV **firstlelem = lastrelem + 1;
1257 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
1258 * only need to save locally, not on the save stack */
1259 U16 old_delaymagic = PL_delaymagic;
1264 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
1266 /* If there's a common identifier on both sides we have to take
1267 * special care that assigning the identifier on the left doesn't
1268 * clobber a value on the right that's used later in the list.
1271 /* at least 2 LH and RH elements, or commonality isn't an issue */
1272 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1273 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
1274 if (SvGMAGICAL(*relem))
1277 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1278 if (*lelem && SvSMAGICAL(*lelem))
1281 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
1282 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
1283 /* skip the scan if all scalars have a ref count of 1 */
1284 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
1286 if (!sv || SvREFCNT(sv) == 1)
1288 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
1295 S_aassign_copy_common(aTHX_
1296 firstlelem, lastlelem, firstrelem, lastrelem
1306 /* on debugging builds, do the scan even if we've concluded we
1307 * don't need to, then panic if we find commonality. Note that the
1308 * scanner assumes at least 2 elements */
1309 if (firstlelem < lastlelem && firstrelem < lastrelem) {
1317 lval = (gimme == G_ARRAY) ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
1324 while (LIKELY(lelem <= lastlelem)) {
1326 TAINT_NOT; /* Each item stands on its own, taintwise. */
1328 if (UNLIKELY(!sv)) {
1331 ASSUME(SvTYPE(sv) == SVt_PVAV);
1333 switch (SvTYPE(sv)) {
1335 bool already_copied = FALSE;
1336 ary = MUTABLE_AV(sv);
1337 magic = SvMAGICAL(ary) != 0;
1339 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1341 /* We need to clear ary. The is a danger that if we do this,
1342 * elements on the RHS may be prematurely freed, e.g.
1344 * In the case of possible commonality, make a copy of each
1345 * RHS SV *before* clearing the array, and add a reference
1346 * from the tmps stack, so that it doesn't leak on death.
1347 * Otherwise, make a copy of each RHS SV only as we're storing
1348 * it into the array - that way we don't have to worry about
1349 * it being leaked if we die, but don't incur the cost of
1350 * mortalising everything.
1353 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1354 && (relem <= lastrelem)
1355 && (magic || AvFILL(ary) != -1))
1358 EXTEND_MORTAL(lastrelem - relem + 1);
1359 for (svp = relem; svp <= lastrelem; svp++) {
1360 /* see comment in S_aassign_copy_common about SV_NOSTEAL */
1361 *svp = sv_mortalcopy_flags(*svp,
1362 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1365 already_copied = TRUE;
1369 if (relem <= lastrelem)
1370 av_extend(ary, lastrelem - relem);
1373 while (relem <= lastrelem) { /* gobble up all the rest */
1375 if (LIKELY(!alias)) {
1380 /* before newSV, in case it dies */
1383 /* see comment in S_aassign_copy_common about
1385 sv_setsv_flags(sv, *relem,
1386 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
1391 if (!already_copied)
1394 DIE(aTHX_ "Assigned value is not a reference");
1395 if (SvTYPE(SvRV(*relem)) > SVt_PVLV)
1396 /* diag_listed_as: Assigned value is not %s reference */
1398 "Assigned value is not a SCALAR reference");
1399 if (lval && !already_copied)
1400 *relem = sv_mortalcopy(*relem);
1401 /* XXX else check for weak refs? */
1402 sv = SvREFCNT_inc_NN(SvRV(*relem));
1406 SvREFCNT_inc_simple_void_NN(sv); /* undo mortal free */
1407 didstore = av_store(ary,i++,sv);
1416 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
1417 SvSETMAGIC(MUTABLE_SV(ary));
1422 case SVt_PVHV: { /* normal hash */
1426 SV** topelem = relem;
1427 SV **firsthashrelem = relem;
1428 bool already_copied = FALSE;
1430 hash = MUTABLE_HV(sv);
1431 magic = SvMAGICAL(hash) != 0;
1433 odd = ((lastrelem - firsthashrelem)&1)? 0 : 1;
1434 if (UNLIKELY(odd)) {
1435 do_oddball(lastrelem, firsthashrelem);
1436 /* we have firstlelem to reuse, it's not needed anymore
1438 *(lastrelem+1) = &PL_sv_undef;
1442 SAVEFREESV(SvREFCNT_inc_simple_NN(sv));
1444 /* We need to clear hash. The is a danger that if we do this,
1445 * elements on the RHS may be prematurely freed, e.g.
1446 * %h = (foo => $h{bar});
1447 * In the case of possible commonality, make a copy of each
1448 * RHS SV *before* clearing the hash, and add a reference
1449 * from the tmps stack, so that it doesn't leak on death.
1452 if ( (PL_op->op_private & OPpASSIGN_COMMON_AGG)
1453 && (relem <= lastrelem)
1454 && (magic || HvUSEDKEYS(hash)))
1457 EXTEND_MORTAL(lastrelem - relem + 1);
1458 for (svp = relem; svp <= lastrelem; svp++) {
1459 *svp = sv_mortalcopy_flags(*svp,
1460 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
1463 already_copied = TRUE;
1468 while (LIKELY(relem < lastrelem+odd)) { /* gobble up all the rest */
1471 /* Copy the key if aassign is called in lvalue context,
1472 to avoid having the next op modify our rhs. Copy
1473 it also if it is gmagical, lest it make the
1474 hv_store_ent call below croak, leaking the value. */
1475 sv = (lval || SvGMAGICAL(*relem)) && !already_copied
1476 ? sv_mortalcopy(*relem)
1485 sv_setsv_nomg(tmpstr,*relem++); /* value */
1488 if (gimme == G_ARRAY) {
1489 if (hv_exists_ent(hash, sv, 0))
1490 /* key overwrites an existing entry */
1493 /* copy element back: possibly to an earlier
1494 * stack location if we encountered dups earlier,
1495 * possibly to a later stack location if odd */
1497 *topelem++ = tmpstr;
1501 SvREFCNT_inc_simple_void_NN(tmpstr); /* undo mortal free */
1502 didstore = hv_store_ent(hash,sv,tmpstr,0);
1504 if (!didstore) sv_2mortal(tmpstr);
1510 if (duplicates && gimme == G_ARRAY) {
1511 /* at this point we have removed the duplicate key/value
1512 * pairs from the stack, but the remaining values may be
1513 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
1514 * the (a 2), but the stack now probably contains
1515 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
1516 * obliterates the earlier key. So refresh all values. */
1517 lastrelem -= duplicates;
1518 relem = firsthashrelem;
1519 while (relem < lastrelem+odd) {
1521 he = hv_fetch_ent(hash, *relem++, 0, 0);
1522 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
1525 if (odd && gimme == G_ARRAY) lastrelem++;
1529 if (SvIMMORTAL(sv)) {
1530 if (relem <= lastrelem)
1534 if (relem <= lastrelem) {
1536 SvTEMP(sv) && !SvSMAGICAL(sv) && SvREFCNT(sv) == 1 &&
1537 (!isGV_with_GP(sv) || SvFAKE(sv)) && ckWARN(WARN_MISC)
1540 packWARN(WARN_MISC),
1541 "Useless assignment to a temporary"
1543 sv_setsv(sv, *relem);
1547 sv_setsv(sv, &PL_sv_undef);
1552 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
1553 /* Will be used to set PL_tainting below */
1554 Uid_t tmp_uid = PerlProc_getuid();
1555 Uid_t tmp_euid = PerlProc_geteuid();
1556 Gid_t tmp_gid = PerlProc_getgid();
1557 Gid_t tmp_egid = PerlProc_getegid();
1559 /* XXX $> et al currently silently ignore failures */
1560 if (PL_delaymagic & DM_UID) {
1561 #ifdef HAS_SETRESUID
1563 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1564 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
1567 # ifdef HAS_SETREUID
1569 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
1570 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
1573 if ((PL_delaymagic & DM_UID) == DM_RUID) {
1574 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
1575 PL_delaymagic &= ~DM_RUID;
1577 # endif /* HAS_SETRUID */
1579 if ((PL_delaymagic & DM_UID) == DM_EUID) {
1580 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
1581 PL_delaymagic &= ~DM_EUID;
1583 # endif /* HAS_SETEUID */
1584 if (PL_delaymagic & DM_UID) {
1585 if (PL_delaymagic_uid != PL_delaymagic_euid)
1586 DIE(aTHX_ "No setreuid available");
1587 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
1589 # endif /* HAS_SETREUID */
1590 #endif /* HAS_SETRESUID */
1592 tmp_uid = PerlProc_getuid();
1593 tmp_euid = PerlProc_geteuid();
1595 /* XXX $> et al currently silently ignore failures */
1596 if (PL_delaymagic & DM_GID) {
1597 #ifdef HAS_SETRESGID
1599 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1600 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
1603 # ifdef HAS_SETREGID
1605 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
1606 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
1609 if ((PL_delaymagic & DM_GID) == DM_RGID) {
1610 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
1611 PL_delaymagic &= ~DM_RGID;
1613 # endif /* HAS_SETRGID */
1615 if ((PL_delaymagic & DM_GID) == DM_EGID) {
1616 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
1617 PL_delaymagic &= ~DM_EGID;
1619 # endif /* HAS_SETEGID */
1620 if (PL_delaymagic & DM_GID) {
1621 if (PL_delaymagic_gid != PL_delaymagic_egid)
1622 DIE(aTHX_ "No setregid available");
1623 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
1625 # endif /* HAS_SETREGID */
1626 #endif /* HAS_SETRESGID */
1628 tmp_gid = PerlProc_getgid();
1629 tmp_egid = PerlProc_getegid();
1631 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
1632 #ifdef NO_TAINT_SUPPORT
1633 PERL_UNUSED_VAR(tmp_uid);
1634 PERL_UNUSED_VAR(tmp_euid);
1635 PERL_UNUSED_VAR(tmp_gid);
1636 PERL_UNUSED_VAR(tmp_egid);
1639 PL_delaymagic = old_delaymagic;
1641 if (gimme == G_VOID)
1642 SP = firstrelem - 1;
1643 else if (gimme == G_SCALAR) {
1646 SETi(lastrelem - firstrelem + 1);
1650 /* note that in this case *firstlelem may have been overwritten
1651 by sv_undef in the odd hash case */
1654 SP = firstrelem + (lastlelem - firstlelem);
1655 lelem = firstlelem + (relem - firstrelem);
1657 *relem++ = (lelem <= lastlelem) ? *lelem++ : &PL_sv_undef;
1667 PMOP * const pm = cPMOP;
1668 REGEXP * rx = PM_GETRE(pm);
1669 SV * const pkg = rx ? CALLREG_PACKAGE(rx) : NULL;
1670 SV * const rv = sv_newmortal();
1674 SvUPGRADE(rv, SVt_IV);
1675 /* For a subroutine describing itself as "This is a hacky workaround" I'm
1676 loathe to use it here, but it seems to be the right fix. Or close.
1677 The key part appears to be that it's essential for pp_qr to return a new
1678 object (SV), which implies that there needs to be an effective way to
1679 generate a new SV from the existing SV that is pre-compiled in the
1681 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
1684 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
1685 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
1686 *cvp = cv_clone(cv);
1687 SvREFCNT_dec_NN(cv);
1691 HV *const stash = gv_stashsv(pkg, GV_ADD);
1692 SvREFCNT_dec_NN(pkg);
1693 (void)sv_bless(rv, stash);
1696 if (UNLIKELY(RX_ISTAINTED(rx))) {
1698 SvTAINTED_on(SvRV(rv));
1711 SSize_t curpos = 0; /* initial pos() or current $+[0] */
1714 const char *truebase; /* Start of string */
1715 REGEXP *rx = PM_GETRE(pm);
1717 const I32 gimme = GIMME_V;
1719 const I32 oldsave = PL_savestack_ix;
1720 I32 had_zerolen = 0;
1723 if (PL_op->op_flags & OPf_STACKED)
1732 PUTBACK; /* EVAL blocks need stack_sp. */
1733 /* Skip get-magic if this is a qr// clone, because regcomp has
1735 truebase = ReANY(rx)->mother_re
1736 ? SvPV_nomg_const(TARG, len)
1737 : SvPV_const(TARG, len);
1739 DIE(aTHX_ "panic: pp_match");
1740 strend = truebase + len;
1741 rxtainted = (RX_ISTAINTED(rx) ||
1742 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
1745 /* We need to know this in case we fail out early - pos() must be reset */
1746 global = dynpm->op_pmflags & PMf_GLOBAL;
1748 /* PMdf_USED is set after a ?? matches once */
1751 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
1753 pm->op_pmflags & PMf_USED
1756 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
1760 /* empty pattern special-cased to use last successful pattern if
1761 possible, except for qr// */
1762 if (!ReANY(rx)->mother_re && !RX_PRELEN(rx)
1768 if (RX_MINLEN(rx) >= 0 && (STRLEN)RX_MINLEN(rx) > len) {
1769 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
1770 UVuf" < %"IVdf")\n",
1771 (UV)len, (IV)RX_MINLEN(rx)));
1775 /* get pos() if //g */
1777 mg = mg_find_mglob(TARG);
1778 if (mg && mg->mg_len >= 0) {
1779 curpos = MgBYTEPOS(mg, TARG, truebase, len);
1780 /* last time pos() was set, it was zero-length match */
1781 if (mg->mg_flags & MGf_MINMATCH)
1786 #ifdef PERL_SAWAMPERSAND
1789 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
1790 || (dynpm->op_pmflags & PMf_KEEPCOPY)
1794 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
1795 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
1796 * only on the first iteration. Therefore we need to copy $' as well
1797 * as $&, to make the rest of the string available for captures in
1798 * subsequent iterations */
1799 if (! (global && gimme == G_ARRAY))
1800 r_flags |= REXEC_COPY_SKIP_POST;
1802 #ifdef PERL_SAWAMPERSAND
1803 if (dynpm->op_pmflags & PMf_KEEPCOPY)
1804 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
1805 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
1812 s = truebase + curpos;
1814 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
1815 had_zerolen, TARG, NULL, r_flags))
1819 if (dynpm->op_pmflags & PMf_ONCE)
1821 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
1823 dynpm->op_pmflags |= PMf_USED;
1827 RX_MATCH_TAINTED_on(rx);
1828 TAINT_IF(RX_MATCH_TAINTED(rx));
1832 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
1834 mg = sv_magicext_mglob(TARG);
1835 MgBYTEPOS_set(mg, TARG, truebase, RX_OFFS(rx)[0].end);
1836 if (RX_ZERO_LEN(rx))
1837 mg->mg_flags |= MGf_MINMATCH;
1839 mg->mg_flags &= ~MGf_MINMATCH;
1842 if ((!RX_NPARENS(rx) && !global) || gimme != G_ARRAY) {
1843 LEAVE_SCOPE(oldsave);
1847 /* push captures on stack */
1850 const I32 nparens = RX_NPARENS(rx);
1851 I32 i = (global && !nparens) ? 1 : 0;
1853 SPAGAIN; /* EVAL blocks could move the stack. */
1854 EXTEND(SP, nparens + i);
1855 EXTEND_MORTAL(nparens + i);
1856 for (i = !i; i <= nparens; i++) {
1857 PUSHs(sv_newmortal());
1858 if (LIKELY((RX_OFFS(rx)[i].start != -1)
1859 && RX_OFFS(rx)[i].end != -1 ))
1861 const I32 len = RX_OFFS(rx)[i].end - RX_OFFS(rx)[i].start;
1862 const char * const s = RX_OFFS(rx)[i].start + truebase;
1863 if (UNLIKELY(RX_OFFS(rx)[i].end < 0 || RX_OFFS(rx)[i].start < 0
1864 || len < 0 || len > strend - s))
1865 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
1866 "start=%ld, end=%ld, s=%p, strend=%p, len=%"UVuf,
1867 (long) i, (long) RX_OFFS(rx)[i].start,
1868 (long)RX_OFFS(rx)[i].end, s, strend, (UV) len);
1869 sv_setpvn(*SP, s, len);
1870 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
1875 curpos = (UV)RX_OFFS(rx)[0].end;
1876 had_zerolen = RX_ZERO_LEN(rx);
1877 PUTBACK; /* EVAL blocks may use stack */
1878 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
1881 LEAVE_SCOPE(oldsave);
1884 NOT_REACHED; /* NOTREACHED */
1887 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
1889 mg = mg_find_mglob(TARG);
1893 LEAVE_SCOPE(oldsave);
1894 if (gimme == G_ARRAY)
1900 Perl_do_readline(pTHX)
1902 dSP; dTARGETSTACKED;
1907 IO * const io = GvIO(PL_last_in_gv);
1908 const I32 type = PL_op->op_type;
1909 const I32 gimme = GIMME_V;
1912 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
1914 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
1915 if (gimme == G_SCALAR) {
1917 SvSetSV_nosteal(TARG, TOPs);
1927 if (IoFLAGS(io) & IOf_ARGV) {
1928 if (IoFLAGS(io) & IOf_START) {
1930 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
1931 IoFLAGS(io) &= ~IOf_START;
1932 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
1933 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
1934 sv_setpvs(GvSVn(PL_last_in_gv), "-");
1935 SvSETMAGIC(GvSV(PL_last_in_gv));
1940 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
1941 if (!fp) { /* Note: fp != IoIFP(io) */
1942 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
1945 else if (type == OP_GLOB)
1946 fp = Perl_start_glob(aTHX_ POPs, io);
1948 else if (type == OP_GLOB)
1950 else if (IoTYPE(io) == IoTYPE_WRONLY) {
1951 report_wrongway_fh(PL_last_in_gv, '>');
1955 if ((!io || !(IoFLAGS(io) & IOf_START))
1956 && ckWARN(WARN_CLOSED)
1959 report_evil_fh(PL_last_in_gv);
1961 if (gimme == G_SCALAR) {
1962 /* undef TARG, and push that undefined value */
1963 if (type != OP_RCATLINE) {
1964 sv_setsv(TARG,NULL);
1971 if (gimme == G_SCALAR) {
1973 if (type == OP_RCATLINE && SvGMAGICAL(sv))
1976 if (type == OP_RCATLINE)
1977 SvPV_force_nomg_nolen(sv);
1981 else if (isGV_with_GP(sv)) {
1982 SvPV_force_nomg_nolen(sv);
1984 SvUPGRADE(sv, SVt_PV);
1985 tmplen = SvLEN(sv); /* remember if already alloced */
1986 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
1987 /* try short-buffering it. Please update t/op/readline.t
1988 * if you change the growth length.
1993 if (type == OP_RCATLINE && SvOK(sv)) {
1995 SvPV_force_nomg_nolen(sv);
2001 sv = sv_2mortal(newSV(80));
2005 /* This should not be marked tainted if the fp is marked clean */
2006 #define MAYBE_TAINT_LINE(io, sv) \
2007 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
2012 /* delay EOF state for a snarfed empty file */
2013 #define SNARF_EOF(gimme,rs,io,sv) \
2014 (gimme != G_SCALAR || SvCUR(sv) \
2015 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
2019 if (!sv_gets(sv, fp, offset)
2021 || SNARF_EOF(gimme, PL_rs, io, sv)
2022 || PerlIO_error(fp)))
2024 PerlIO_clearerr(fp);
2025 if (IoFLAGS(io) & IOf_ARGV) {
2026 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
2029 (void)do_close(PL_last_in_gv, FALSE);
2031 else if (type == OP_GLOB) {
2032 if (!do_close(PL_last_in_gv, FALSE)) {
2033 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
2034 "glob failed (child exited with status %d%s)",
2035 (int)(STATUS_CURRENT >> 8),
2036 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
2039 if (gimme == G_SCALAR) {
2040 if (type != OP_RCATLINE) {
2041 SV_CHECK_THINKFIRST_COW_DROP(TARG);
2047 MAYBE_TAINT_LINE(io, sv);
2050 MAYBE_TAINT_LINE(io, sv);
2052 IoFLAGS(io) |= IOf_NOLINE;
2056 if (type == OP_GLOB) {
2060 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
2061 char * const tmps = SvEND(sv) - 1;
2062 if (*tmps == *SvPVX_const(PL_rs)) {
2064 SvCUR_set(sv, SvCUR(sv) - 1);
2067 for (t1 = SvPVX_const(sv); *t1; t1++)
2069 if (strchr("*%?", *t1))
2071 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
2074 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
2075 (void)POPs; /* Unmatched wildcard? Chuck it... */
2078 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
2079 if (ckWARN(WARN_UTF8)) {
2080 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
2081 const STRLEN len = SvCUR(sv) - offset;
2084 if (!is_utf8_string_loc(s, len, &f))
2085 /* Emulate :encoding(utf8) warning in the same case. */
2086 Perl_warner(aTHX_ packWARN(WARN_UTF8),
2087 "utf8 \"\\x%02X\" does not map to Unicode",
2088 f < (U8*)SvEND(sv) ? *f : 0);
2091 if (gimme == G_ARRAY) {
2092 if (SvLEN(sv) - SvCUR(sv) > 20) {
2093 SvPV_shrink_to_cur(sv);
2095 sv = sv_2mortal(newSV(80));
2098 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
2099 /* try to reclaim a bit of scalar space (only on 1st alloc) */
2100 const STRLEN new_len
2101 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
2102 SvPV_renew(sv, new_len);
2113 SV * const keysv = POPs;
2114 HV * const hv = MUTABLE_HV(POPs);
2115 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2116 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2118 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2119 bool preeminent = TRUE;
2121 if (SvTYPE(hv) != SVt_PVHV)
2128 /* If we can determine whether the element exist,
2129 * Try to preserve the existenceness of a tied hash
2130 * element by using EXISTS and DELETE if possible.
2131 * Fallback to FETCH and STORE otherwise. */
2132 if (SvCANEXISTDELETE(hv))
2133 preeminent = hv_exists_ent(hv, keysv, 0);
2136 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2137 svp = he ? &HeVAL(he) : NULL;
2139 if (!svp || !*svp || *svp == &PL_sv_undef) {
2143 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2145 lv = sv_newmortal();
2146 sv_upgrade(lv, SVt_PVLV);
2148 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
2149 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
2150 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2156 if (HvNAME_get(hv) && isGV(*svp))
2157 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
2158 else if (preeminent)
2159 save_helem_flags(hv, keysv, svp,
2160 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
2162 SAVEHDELETE(hv, keysv);
2164 else if (PL_op->op_private & OPpDEREF) {
2165 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
2169 sv = (svp && *svp ? *svp : &PL_sv_undef);
2170 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
2171 * was to make C<local $tied{foo} = $tied{foo}> possible.
2172 * However, it seems no longer to be needed for that purpose, and
2173 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
2174 * would loop endlessly since the pos magic is getting set on the
2175 * mortal copy and lost. However, the copy has the effect of
2176 * triggering the get magic, and losing it altogether made things like
2177 * c<$tied{foo};> in void context no longer do get magic, which some
2178 * code relied on. Also, delayed triggering of magic on @+ and friends
2179 * meant the original regex may be out of scope by now. So as a
2180 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
2181 * being called too many times). */
2182 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
2189 /* a stripped-down version of Perl_softref2xv() for use by
2190 * pp_multideref(), which doesn't use PL_op->op_flags */
2193 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
2196 if (PL_op->op_private & HINT_STRICT_REFS) {
2198 Perl_die(aTHX_ PL_no_symref_sv, sv,
2199 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
2201 Perl_die(aTHX_ PL_no_usym, what);
2204 Perl_die(aTHX_ PL_no_usym, what);
2205 return gv_fetchsv_nomg(sv, GV_ADD, type);
2209 /* Handle one or more aggregate derefs and array/hash indexings, e.g.
2210 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
2212 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
2213 * Each of these either contains a set of actions, or an argument, such as
2214 * an IV to use as an array index, or a lexical var to retrieve.
2215 * Several actions re stored per UV; we keep shifting new actions off the
2216 * one UV, and only reload when it becomes zero.
2221 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
2222 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
2223 UV actions = items->uv;
2226 /* this tells find_uninit_var() where we're up to */
2227 PL_multideref_pc = items;
2230 /* there are three main classes of action; the first retrieve
2231 * the initial AV or HV from a variable or the stack; the second
2232 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
2233 * the third an unrolled (/DREFHV, rv2hv, helem).
2235 switch (actions & MDEREF_ACTION_MASK) {
2238 actions = (++items)->uv;
2241 case MDEREF_AV_padav_aelem: /* $lex[...] */
2242 sv = PAD_SVl((++items)->pad_offset);
2245 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
2246 sv = UNOP_AUX_item_sv(++items);
2247 assert(isGV_with_GP(sv));
2248 sv = (SV*)GvAVn((GV*)sv);
2251 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
2256 goto do_AV_rv2av_aelem;
2259 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
2260 sv = UNOP_AUX_item_sv(++items);
2261 assert(isGV_with_GP(sv));
2262 sv = GvSVn((GV*)sv);
2263 goto do_AV_vivify_rv2av_aelem;
2265 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
2266 sv = PAD_SVl((++items)->pad_offset);
2269 do_AV_vivify_rv2av_aelem:
2270 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
2271 /* this is the OPpDEREF action normally found at the end of
2272 * ops like aelem, helem, rv2sv */
2273 sv = vivify_ref(sv, OPpDEREF_AV);
2277 /* this is basically a copy of pp_rv2av when it just has the
2280 if (LIKELY(SvROK(sv))) {
2281 if (UNLIKELY(SvAMAGIC(sv))) {
2282 sv = amagic_deref_call(sv, to_av_amg);
2285 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
2286 DIE(aTHX_ "Not an ARRAY reference");
2288 else if (SvTYPE(sv) != SVt_PVAV) {
2289 if (!isGV_with_GP(sv))
2290 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
2291 sv = MUTABLE_SV(GvAVn((GV*)sv));
2297 /* retrieve the key; this may be either a lexical or package
2298 * var (whose index/ptr is stored as an item) or a signed
2299 * integer constant stored as an item.
2302 IV elem = 0; /* to shut up stupid compiler warnings */
2305 assert(SvTYPE(sv) == SVt_PVAV);
2307 switch (actions & MDEREF_INDEX_MASK) {
2308 case MDEREF_INDEX_none:
2310 case MDEREF_INDEX_const:
2311 elem = (++items)->iv;
2313 case MDEREF_INDEX_padsv:
2314 elemsv = PAD_SVl((++items)->pad_offset);
2316 case MDEREF_INDEX_gvsv:
2317 elemsv = UNOP_AUX_item_sv(++items);
2318 assert(isGV_with_GP(elemsv));
2319 elemsv = GvSVn((GV*)elemsv);
2321 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
2322 && ckWARN(WARN_MISC)))
2323 Perl_warner(aTHX_ packWARN(WARN_MISC),
2324 "Use of reference \"%"SVf"\" as array index",
2326 /* the only time that S_find_uninit_var() needs this
2327 * is to determine which index value triggered the
2328 * undef warning. So just update it here. Note that
2329 * since we don't save and restore this var (e.g. for
2330 * tie or overload execution), its value will be
2331 * meaningless apart from just here */
2332 PL_multideref_pc = items;
2333 elem = SvIV(elemsv);
2338 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
2340 if (!(actions & MDEREF_FLAG_last)) {
2341 SV** svp = av_fetch((AV*)sv, elem, 1);
2342 if (!svp || ! (sv=*svp))
2343 DIE(aTHX_ PL_no_aelem, elem);
2347 if (PL_op->op_private &
2348 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2350 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2351 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
2354 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2355 sv = av_delete((AV*)sv, elem, discard);
2363 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2364 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2365 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2366 bool preeminent = TRUE;
2367 AV *const av = (AV*)sv;
2370 if (UNLIKELY(localizing)) {
2374 /* If we can determine whether the element exist,
2375 * Try to preserve the existenceness of a tied array
2376 * element by using EXISTS and DELETE if possible.
2377 * Fallback to FETCH and STORE otherwise. */
2378 if (SvCANEXISTDELETE(av))
2379 preeminent = av_exists(av, elem);
2382 svp = av_fetch(av, elem, lval && !defer);
2385 if (!svp || !(sv = *svp)) {
2388 DIE(aTHX_ PL_no_aelem, elem);
2389 len = av_tindex(av);
2390 sv = sv_2mortal(newSVavdefelem(av,
2391 /* Resolve a negative index now, unless it points
2392 * before the beginning of the array, in which
2393 * case record it for error reporting in
2394 * magic_setdefelem. */
2395 elem < 0 && len + elem >= 0
2396 ? len + elem : elem, 1));
2399 if (UNLIKELY(localizing)) {
2401 save_aelem(av, elem, svp);
2402 sv = *svp; /* may have changed */
2405 SAVEADELETE(av, elem);
2410 sv = (svp ? *svp : &PL_sv_undef);
2411 /* see note in pp_helem() */
2412 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
2429 case MDEREF_HV_padhv_helem: /* $lex{...} */
2430 sv = PAD_SVl((++items)->pad_offset);
2433 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
2434 sv = UNOP_AUX_item_sv(++items);
2435 assert(isGV_with_GP(sv));
2436 sv = (SV*)GvHVn((GV*)sv);
2439 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
2444 goto do_HV_rv2hv_helem;
2447 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
2448 sv = UNOP_AUX_item_sv(++items);
2449 assert(isGV_with_GP(sv));
2450 sv = GvSVn((GV*)sv);
2451 goto do_HV_vivify_rv2hv_helem;
2453 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
2454 sv = PAD_SVl((++items)->pad_offset);
2457 do_HV_vivify_rv2hv_helem:
2458 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
2459 /* this is the OPpDEREF action normally found at the end of
2460 * ops like aelem, helem, rv2sv */
2461 sv = vivify_ref(sv, OPpDEREF_HV);
2465 /* this is basically a copy of pp_rv2hv when it just has the
2466 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
2469 if (LIKELY(SvROK(sv))) {
2470 if (UNLIKELY(SvAMAGIC(sv))) {
2471 sv = amagic_deref_call(sv, to_hv_amg);
2474 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
2475 DIE(aTHX_ "Not a HASH reference");
2477 else if (SvTYPE(sv) != SVt_PVHV) {
2478 if (!isGV_with_GP(sv))
2479 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
2480 sv = MUTABLE_SV(GvHVn((GV*)sv));
2486 /* retrieve the key; this may be either a lexical / package
2487 * var or a string constant, whose index/ptr is stored as an
2490 SV *keysv = NULL; /* to shut up stupid compiler warnings */
2492 assert(SvTYPE(sv) == SVt_PVHV);
2494 switch (actions & MDEREF_INDEX_MASK) {
2495 case MDEREF_INDEX_none:
2498 case MDEREF_INDEX_const:
2499 keysv = UNOP_AUX_item_sv(++items);
2502 case MDEREF_INDEX_padsv:
2503 keysv = PAD_SVl((++items)->pad_offset);
2506 case MDEREF_INDEX_gvsv:
2507 keysv = UNOP_AUX_item_sv(++items);
2508 keysv = GvSVn((GV*)keysv);
2512 /* see comment above about setting this var */
2513 PL_multideref_pc = items;
2516 /* ensure that candidate CONSTs have been HEKified */
2517 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
2518 || SvTYPE(keysv) >= SVt_PVMG
2521 || SvIsCOW_shared_hash(keysv));
2523 /* this is basically a copy of pp_helem with OPpDEREF skipped */
2525 if (!(actions & MDEREF_FLAG_last)) {
2526 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
2527 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
2528 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2532 if (PL_op->op_private &
2533 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
2535 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
2536 sv = hv_exists_ent((HV*)sv, keysv, 0)
2537 ? &PL_sv_yes : &PL_sv_no;
2540 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
2541 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
2549 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
2550 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
2551 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
2552 bool preeminent = TRUE;
2554 HV * const hv = (HV*)sv;
2557 if (UNLIKELY(localizing)) {
2561 /* If we can determine whether the element exist,
2562 * Try to preserve the existenceness of a tied hash
2563 * element by using EXISTS and DELETE if possible.
2564 * Fallback to FETCH and STORE otherwise. */
2565 if (SvCANEXISTDELETE(hv))
2566 preeminent = hv_exists_ent(hv, keysv, 0);
2569 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
2570 svp = he ? &HeVAL(he) : NULL;
2574 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
2578 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
2579 lv = sv_newmortal();
2580 sv_upgrade(lv, SVt_PVLV);
2582 sv_magic(lv, key2 = newSVsv(keysv),
2583 PERL_MAGIC_defelem, NULL, 0);
2584 /* sv_magic() increments refcount */
2585 SvREFCNT_dec_NN(key2);
2586 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
2592 if (HvNAME_get(hv) && isGV(sv))
2593 save_gp(MUTABLE_GV(sv),
2594 !(PL_op->op_flags & OPf_SPECIAL));
2595 else if (preeminent) {
2596 save_helem_flags(hv, keysv, svp,
2597 (PL_op->op_flags & OPf_SPECIAL)
2598 ? 0 : SAVEf_SETMAGIC);
2599 sv = *svp; /* may have changed */
2602 SAVEHDELETE(hv, keysv);
2607 sv = (svp && *svp ? *svp : &PL_sv_undef);
2608 /* see note in pp_helem() */
2609 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
2618 actions >>= MDEREF_SHIFT;
2632 cx = &cxstack[cxstack_ix];
2633 itersvp = CxITERVAR(cx);
2635 switch (CxTYPE(cx)) {
2637 case CXt_LOOP_LAZYSV: /* string increment */
2639 SV* cur = cx->blk_loop.state_u.lazysv.cur;
2640 SV *end = cx->blk_loop.state_u.lazysv.end;
2641 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
2642 It has SvPVX of "" and SvCUR of 0, which is what we want. */
2644 const char *max = SvPV_const(end, maxlen);
2645 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
2649 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2650 /* safe to reuse old SV */
2651 sv_setsv(oldsv, cur);
2655 /* we need a fresh SV every time so that loop body sees a
2656 * completely new SV for closures/references to work as
2658 *itersvp = newSVsv(cur);
2659 SvREFCNT_dec_NN(oldsv);
2661 if (strEQ(SvPVX_const(cur), max))
2662 sv_setiv(cur, 0); /* terminate next time */
2668 case CXt_LOOP_LAZYIV: /* integer increment */
2670 IV cur = cx->blk_loop.state_u.lazyiv.cur;
2671 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
2675 /* don't risk potential race */
2676 if (LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
2677 /* safe to reuse old SV */
2678 sv_setiv(oldsv, cur);
2682 /* we need a fresh SV every time so that loop body sees a
2683 * completely new SV for closures/references to work as they
2685 *itersvp = newSViv(cur);
2686 SvREFCNT_dec_NN(oldsv);
2689 if (UNLIKELY(cur == IV_MAX)) {
2690 /* Handle end of range at IV_MAX */
2691 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
2693 ++cx->blk_loop.state_u.lazyiv.cur;
2697 case CXt_LOOP_FOR: /* iterate array */
2700 AV *av = cx->blk_loop.state_u.ary.ary;
2702 bool av_is_stack = FALSE;
2709 if (PL_op->op_private & OPpITER_REVERSED) {
2710 ix = --cx->blk_loop.state_u.ary.ix;
2711 if (UNLIKELY(ix <= (av_is_stack ? cx->blk_loop.resetsp : -1)))
2715 ix = ++cx->blk_loop.state_u.ary.ix;
2716 if (UNLIKELY(ix > (av_is_stack ? cx->blk_oldsp : AvFILL(av))))
2720 if (UNLIKELY(SvMAGICAL(av) || AvREIFY(av))) {
2721 SV * const * const svp = av_fetch(av, ix, FALSE);
2722 sv = svp ? *svp : NULL;
2725 sv = AvARRAY(av)[ix];
2728 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
2729 SvSetMagicSV(*itersvp, sv);
2734 if (UNLIKELY(SvIS_FREED(sv))) {
2736 Perl_croak(aTHX_ "Use of freed value in iteration");
2743 SvREFCNT_inc_simple_void_NN(sv);
2746 else if (!av_is_stack) {
2747 sv = newSVavdefelem(av, ix, 0);
2754 SvREFCNT_dec(oldsv);
2759 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
2765 A description of how taint works in pattern matching and substitution.
2767 This is all conditional on NO_TAINT_SUPPORT not being defined. Under
2768 NO_TAINT_SUPPORT, taint-related operations should become no-ops.
2770 While the pattern is being assembled/concatenated and then compiled,
2771 PL_tainted will get set (via TAINT_set) if any component of the pattern
2772 is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
2773 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
2774 TAINT_get). It will also be set if any component of the pattern matches
2775 based on locale-dependent behavior.
2777 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
2778 the pattern is marked as tainted. This means that subsequent usage, such
2779 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
2780 on the new pattern too.
2782 RXf_TAINTED_SEEN is used post-execution by the get magic code
2783 of $1 et al to indicate whether the returned value should be tainted.
2784 It is the responsibility of the caller of the pattern (i.e. pp_match,
2785 pp_subst etc) to set this flag for any other circumstances where $1 needs
2788 The taint behaviour of pp_subst (and pp_substcont) is quite complex.
2790 There are three possible sources of taint
2792 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
2793 * the replacement string (or expression under /e)
2795 There are four destinations of taint and they are affected by the sources
2796 according to the rules below:
2798 * the return value (not including /r):
2799 tainted by the source string and pattern, but only for the
2800 number-of-iterations case; boolean returns aren't tainted;
2801 * the modified string (or modified copy under /r):
2802 tainted by the source string, pattern, and replacement strings;
2804 tainted by the pattern, and under 'use re "taint"', by the source
2806 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
2807 should always be unset before executing subsequent code.
2809 The overall action of pp_subst is:
2811 * at the start, set bits in rxtainted indicating the taint status of
2812 the various sources.
2814 * After each pattern execution, update the SUBST_TAINT_PAT bit in
2815 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
2816 pattern has subsequently become tainted via locale ops.
2818 * If control is being passed to pp_substcont to execute a /e block,
2819 save rxtainted in the CXt_SUBST block, for future use by
2822 * Whenever control is being returned to perl code (either by falling
2823 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
2824 use the flag bits in rxtainted to make all the appropriate types of
2825 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
2826 et al will appear tainted.
2828 pp_match is just a simpler version of the above.
2844 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
2845 See "how taint works" above */
2848 REGEXP *rx = PM_GETRE(pm);
2850 int force_on_match = 0;
2851 const I32 oldsave = PL_savestack_ix;
2853 bool doutf8 = FALSE; /* whether replacement is in utf8 */
2858 /* known replacement string? */
2859 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
2863 if (PL_op->op_flags & OPf_STACKED)
2872 SvGETMAGIC(TARG); /* must come before cow check */
2874 /* Awooga. Awooga. "bool" types that are actually char are dangerous,
2875 because they make integers such as 256 "false". */
2876 is_cow = SvIsCOW(TARG) ? TRUE : FALSE;
2879 sv_force_normal_flags(TARG,0);
2881 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)
2882 && (SvREADONLY(TARG)
2883 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
2884 || SvTYPE(TARG) > SVt_PVLV)
2885 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
2886 Perl_croak_no_modify();
2889 orig = SvPV_nomg(TARG, len);
2890 /* note we don't (yet) force the var into being a string; if we fail
2891 * to match, we leave as-is; on successful match howeverm, we *will*
2892 * coerce into a string, then repeat the match */
2893 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
2896 /* only replace once? */
2897 once = !(rpm->op_pmflags & PMf_GLOBAL);
2899 /* See "how taint works" above */
2902 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
2903 | (RX_ISTAINTED(rx) ? SUBST_TAINT_PAT : 0)
2904 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
2905 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2906 ? SUBST_TAINT_BOOLRET : 0));
2912 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
2914 strend = orig + len;
2915 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
2916 maxiters = 2 * slen + 10; /* We can match twice at each
2917 position, once with zero-length,
2918 second time with non-zero. */
2920 if (!RX_PRELEN(rx) && PL_curpm
2921 && !ReANY(rx)->mother_re) {
2926 #ifdef PERL_SAWAMPERSAND
2927 r_flags = ( RX_NPARENS(rx)
2929 || (RX_EXTFLAGS(rx) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
2930 || (rpm->op_pmflags & PMf_KEEPCOPY)
2935 r_flags = REXEC_COPY_STR;
2938 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
2941 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
2942 LEAVE_SCOPE(oldsave);
2947 /* known replacement string? */
2949 /* replacement needing upgrading? */
2950 if (DO_UTF8(TARG) && !doutf8) {
2951 nsv = sv_newmortal();
2954 sv_recode_to_utf8(nsv, _get_encoding());
2956 sv_utf8_upgrade(nsv);
2957 c = SvPV_const(nsv, clen);
2961 c = SvPV_const(dstr, clen);
2962 doutf8 = DO_UTF8(dstr);
2965 if (SvTAINTED(dstr))
2966 rxtainted |= SUBST_TAINT_REPL;
2973 /* can do inplace substitution? */
2978 && (I32)clen <= RX_MINLENRET(rx)
2980 || !(r_flags & REXEC_COPY_STR)
2981 || (!SvGMAGICAL(dstr) && !(RX_EXTFLAGS(rx) & RXf_EVAL_SEEN))
2983 && !(RX_EXTFLAGS(rx) & RXf_NO_INPLACE_SUBST)
2984 && (!doutf8 || SvUTF8(TARG))
2985 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
2989 if (SvIsCOW(TARG)) {
2990 if (!force_on_match)
2992 assert(SvVOK(TARG));
2995 if (force_on_match) {
2996 /* redo the first match, this time with the orig var
2997 * forced into being a string */
2999 orig = SvPV_force_nomg(TARG, len);
3005 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3006 rxtainted |= SUBST_TAINT_PAT;
3007 m = orig + RX_OFFS(rx)[0].start;
3008 d = orig + RX_OFFS(rx)[0].end;
3010 if (m - s > strend - d) { /* faster to shorten from end */
3013 Copy(c, m, clen, char);
3018 Move(d, m, i, char);
3022 SvCUR_set(TARG, m - s);
3024 else { /* faster from front */
3028 Move(s, d - i, i, char);
3031 Copy(c, d, clen, char);
3038 d = s = RX_OFFS(rx)[0].start + orig;
3041 if (UNLIKELY(iters++ > maxiters))
3042 DIE(aTHX_ "Substitution loop");
3043 if (UNLIKELY(RX_MATCH_TAINTED(rx))) /* run time pattern taint, eg locale */
3044 rxtainted |= SUBST_TAINT_PAT;
3045 m = RX_OFFS(rx)[0].start + orig;
3048 Move(s, d, i, char);
3052 Copy(c, d, clen, char);
3055 s = RX_OFFS(rx)[0].end + orig;
3056 } while (CALLREGEXEC(rx, s, strend, orig,
3057 s == m, /* don't match same null twice */
3059 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3062 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
3063 Move(s, d, i+1, char); /* include the NUL */
3073 if (force_on_match) {
3074 /* redo the first match, this time with the orig var
3075 * forced into being a string */
3077 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3078 /* I feel that it should be possible to avoid this mortal copy
3079 given that the code below copies into a new destination.
3080 However, I suspect it isn't worth the complexity of
3081 unravelling the C<goto force_it> for the small number of
3082 cases where it would be viable to drop into the copy code. */
3083 TARG = sv_2mortal(newSVsv(TARG));
3085 orig = SvPV_force_nomg(TARG, len);
3091 if (RX_MATCH_TAINTED(rx)) /* run time pattern taint, eg locale */
3092 rxtainted |= SUBST_TAINT_PAT;
3094 s = RX_OFFS(rx)[0].start + orig;
3095 dstr = newSVpvn_flags(orig, s-orig,
3096 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
3101 /* note that a whole bunch of local vars are saved here for
3102 * use by pp_substcont: here's a list of them in case you're
3103 * searching for places in this sub that uses a particular var:
3104 * iters maxiters r_flags oldsave rxtainted orig dstr targ
3105 * s m strend rx once */
3107 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
3111 if (UNLIKELY(iters++ > maxiters))
3112 DIE(aTHX_ "Substitution loop");
3113 if (UNLIKELY(RX_MATCH_TAINTED(rx)))
3114 rxtainted |= SUBST_TAINT_PAT;
3115 if (RX_MATCH_COPIED(rx) && RX_SUBBEG(rx) != orig) {
3117 char *old_orig = orig;
3118 assert(RX_SUBOFFSET(rx) == 0);
3120 orig = RX_SUBBEG(rx);
3121 s = orig + (old_s - old_orig);
3122 strend = s + (strend - old_s);
3124 m = RX_OFFS(rx)[0].start + orig;
3125 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
3126 s = RX_OFFS(rx)[0].end + orig;
3128 /* replacement already stringified */
3130 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
3135 if (!nsv) nsv = sv_newmortal();
3136 sv_copypv(nsv, repl);
3137 if (!DO_UTF8(nsv)) sv_recode_to_utf8(nsv, _get_encoding());
3138 sv_catsv(dstr, nsv);
3140 else sv_catsv(dstr, repl);
3141 if (UNLIKELY(SvTAINTED(repl)))
3142 rxtainted |= SUBST_TAINT_REPL;
3146 } while (CALLREGEXEC(rx, s, strend, orig,
3147 s == m, /* Yields minend of 0 or 1 */
3149 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
3150 assert(strend >= s);
3151 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
3153 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
3154 /* From here on down we're using the copy, and leaving the original
3161 /* The match may make the string COW. If so, brilliant, because
3162 that's just saved us one malloc, copy and free - the regexp has
3163 donated the old buffer, and we malloc an entirely new one, rather
3164 than the regexp malloc()ing a buffer and copying our original,
3165 only for us to throw it away here during the substitution. */
3166 if (SvIsCOW(TARG)) {
3167 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
3173 SvPV_set(TARG, SvPVX(dstr));
3174 SvCUR_set(TARG, SvCUR(dstr));
3175 SvLEN_set(TARG, SvLEN(dstr));
3176 SvFLAGS(TARG) |= SvUTF8(dstr);
3177 SvPV_set(dstr, NULL);
3184 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
3185 (void)SvPOK_only_UTF8(TARG);
3188 /* See "how taint works" above */
3190 if ((rxtainted & SUBST_TAINT_PAT) ||
3191 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
3192 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
3194 (RX_MATCH_TAINTED_on(rx)); /* taint $1 et al */
3196 if (!(rxtainted & SUBST_TAINT_BOOLRET)
3197 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
3199 SvTAINTED_on(TOPs); /* taint return value */
3201 SvTAINTED_off(TOPs); /* may have got tainted earlier */
3203 /* needed for mg_set below */
3205 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
3209 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
3211 LEAVE_SCOPE(oldsave);
3220 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
3221 ++*PL_markstack_ptr;
3223 LEAVE_with_name("grep_item"); /* exit inner scope */
3226 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
3228 const I32 gimme = GIMME_V;
3230 LEAVE_with_name("grep"); /* exit outer scope */
3231 (void)POPMARK; /* pop src */
3232 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
3233 (void)POPMARK; /* pop dst */
3234 SP = PL_stack_base + POPMARK; /* pop original mark */
3235 if (gimme == G_SCALAR) {
3239 else if (gimme == G_ARRAY)
3246 ENTER_with_name("grep_item"); /* enter inner scope */
3249 src = PL_stack_base[TOPMARK];
3250 if (SvPADTMP(src)) {
3251 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
3257 RETURNOP(cLOGOP->op_other);
3271 if (CxMULTICALL(&cxstack[cxstack_ix])) {
3272 /* entry zero of a stack is always PL_sv_undef, which
3273 * simplifies converting a '()' return into undef in scalar context */
3274 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
3279 cxstack_ix++; /* temporarily protect top context */
3282 if (gimme == G_SCALAR) {
3284 if (LIKELY(MARK <= SP)) {
3285 if (cx->blk_sub.cv && CvDEPTH(cx->blk_sub.cv) > 1) {
3286 if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3287 && !SvMAGICAL(TOPs)) {
3288 *MARK = SvREFCNT_inc(TOPs);
3293 sv = SvREFCNT_inc(TOPs); /* FREETMPS could clobber it */
3295 *MARK = sv_mortalcopy(sv);
3296 SvREFCNT_dec_NN(sv);
3299 else if (SvTEMP(TOPs) && SvREFCNT(TOPs) == 1
3300 && !SvMAGICAL(TOPs)) {
3304 *MARK = sv_mortalcopy(TOPs);
3308 *MARK = &PL_sv_undef;
3312 else if (gimme == G_ARRAY) {
3313 for (MARK = newsp + 1; MARK <= SP; MARK++) {
3314 if (!SvTEMP(*MARK) || SvREFCNT(*MARK) != 1
3315 || SvMAGICAL(*MARK)) {
3316 *MARK = sv_mortalcopy(*MARK);
3317 TAINT_NOT; /* Each item is independent */
3324 POPSUB(cx,sv); /* Stack values are safe: release CV and @_ ... */
3326 PL_curpm = newpm; /* ... and pop $1 et al */
3329 return cx->blk_sub.retop;
3339 const bool hasargs = (PL_op->op_flags & OPf_STACKED) != 0;
3342 DIE(aTHX_ "Not a CODE reference");
3343 /* This is overwhelmingly the most common case: */
3344 if (!LIKELY(SvTYPE(sv) == SVt_PVGV && (cv = GvCVu((const GV *)sv)))) {
3345 switch (SvTYPE(sv)) {
3348 if (!(cv = GvCVu((const GV *)sv))) {
3350 cv = sv_2cv(sv, &stash, &gv, 0);
3358 if(isGV_with_GP(sv)) goto we_have_a_glob;
3361 if (sv == &PL_sv_yes) { /* unfound import, ignore */
3363 SP = PL_stack_base + POPMARK;
3366 if (GIMME_V == G_SCALAR)
3367 PUSHs(&PL_sv_undef);
3373 sv = amagic_deref_call(sv, to_cv_amg);
3374 /* Don't SPAGAIN here. */
3381 DIE(aTHX_ PL_no_usym, "a subroutine");
3382 sym = SvPV_nomg_const(sv, len);
3383 if (PL_op->op_private & HINT_STRICT_REFS)
3384 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
3385 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
3388 cv = MUTABLE_CV(SvRV(sv));
3389 if (SvTYPE(cv) == SVt_PVCV)
3394 DIE(aTHX_ "Not a CODE reference");
3395 /* This is the second most common case: */
3397 cv = MUTABLE_CV(sv);
3405 if (UNLIKELY(CvCLONE(cv) && ! CvCLONED(cv)))
3406 DIE(aTHX_ "Closure prototype called");
3407 if (UNLIKELY(!CvROOT(cv) && !CvXSUB(cv))) {
3411 /* anonymous or undef'd function leaves us no recourse */
3412 if (CvLEXICAL(cv) && CvHASGV(cv))
3413 DIE(aTHX_ "Undefined subroutine &%"SVf" called",
3414 SVfARG(cv_name(cv, NULL, 0)));
3415 if (CvANON(cv) || !CvHASGV(cv)) {
3416 DIE(aTHX_ "Undefined subroutine called");
3419 /* autoloaded stub? */
3420 if (cv != GvCV(gv = CvGV(cv))) {
3423 /* should call AUTOLOAD now? */
3426 if ((autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
3427 GvNAMEUTF8(gv) ? SVf_UTF8 : 0)))
3433 sub_name = sv_newmortal();
3434 gv_efullname3(sub_name, gv, NULL);
3435 DIE(aTHX_ "Undefined subroutine &%"SVf" called", SVfARG(sub_name));
3443 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
3446 Perl_get_db_sub(aTHX_ &sv, cv);
3448 PL_curcopdb = PL_curcop;
3450 /* check for lsub that handles lvalue subroutines */
3451 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
3452 /* if lsub not found then fall back to DB::sub */
3453 if (!cv) cv = GvCV(PL_DBsub);
3455 cv = GvCV(PL_DBsub);
3458 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
3459 DIE(aTHX_ "No DB::sub routine defined");
3464 if (!(CvISXSUB(cv))) {
3465 /* This path taken at least 75% of the time */
3467 PADLIST * const padlist = CvPADLIST(cv);
3470 PUSHBLOCK(cx, CXt_SUB, MARK);
3472 cx->blk_sub.retop = PL_op->op_next;
3473 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) {
3474 PERL_STACK_OVERFLOW_CHECK();
3475 pad_push(padlist, depth);
3478 PAD_SET_CUR_NOSAVE(padlist, depth);
3479 if (LIKELY(hasargs)) {
3480 AV *const av = MUTABLE_AV(PAD_SVl(0));
3484 if (UNLIKELY(AvREAL(av))) {
3485 /* @_ is normally not REAL--this should only ever
3486 * happen when DB::sub() calls things that modify @_ */
3491 defavp = &GvAV(PL_defgv);
3492 cx->blk_sub.savearray = *defavp;
3493 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
3494 CX_CURPAD_SAVE(cx->blk_sub);
3495 cx->blk_sub.argarray = av;
3498 if (UNLIKELY(items - 1 > AvMAX(av))) {
3499 SV **ary = AvALLOC(av);
3500 AvMAX(av) = items - 1;
3501 Renew(ary, items, SV*);
3506 Copy(MARK+1,AvARRAY(av),items,SV*);
3507 AvFILLp(av) = items - 1;
3513 if (SvPADTMP(*MARK)) {
3514 *MARK = sv_mortalcopy(*MARK);
3522 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3524 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3525 SVfARG(cv_name(cv, NULL, 0)));
3526 /* warning must come *after* we fully set up the context
3527 * stuff so that __WARN__ handlers can safely dounwind()
3530 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
3531 && ckWARN(WARN_RECURSION)
3532 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
3533 sub_crush_depth(cv);
3534 RETURNOP(CvSTART(cv));
3537 SSize_t markix = TOPMARK;
3542 if (UNLIKELY(((PL_op->op_private
3543 & PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
3544 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
3546 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%"SVf,
3547 SVfARG(cv_name(cv, NULL, 0)));
3549 if (UNLIKELY(!hasargs && GvAV(PL_defgv))) {
3550 /* Need to copy @_ to stack. Alternative may be to
3551 * switch stack to @_, and copy return values
3552 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
3553 AV * const av = GvAV(PL_defgv);
3554 const SSize_t items = AvFILL(av) + 1;
3558 const bool m = cBOOL(SvRMAGICAL(av));
3559 /* Mark is at the end of the stack. */
3561 for (; i < items; ++i)
3565 SV ** const svp = av_fetch(av, i, 0);
3566 sv = svp ? *svp : NULL;
3568 else sv = AvARRAY(av)[i];
3569 if (sv) SP[i+1] = sv;
3571 SP[i+1] = newSVavdefelem(av, i, 1);
3579 SV **mark = PL_stack_base + markix;
3580 SSize_t items = SP - mark;
3583 if (*mark && SvPADTMP(*mark)) {
3584 *mark = sv_mortalcopy(*mark);
3588 /* We assume first XSUB in &DB::sub is the called one. */
3589 if (UNLIKELY(PL_curcopdb)) {
3590 SAVEVPTR(PL_curcop);
3591 PL_curcop = PL_curcopdb;
3594 /* Do we need to open block here? XXXX */
3596 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
3598 CvXSUB(cv)(aTHX_ cv);
3600 /* Enforce some sanity in scalar context. */
3601 if (gimme == G_SCALAR) {
3602 SV **svp = PL_stack_base + markix + 1;
3603 if (svp != PL_stack_sp) {
3604 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
3614 Perl_sub_crush_depth(pTHX_ CV *cv)
3616 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
3619 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
3621 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%"SVf"\"",
3622 SVfARG(cv_name(cv,NULL,0)));
3630 SV* const elemsv = POPs;
3631 IV elem = SvIV(elemsv);
3632 AV *const av = MUTABLE_AV(POPs);
3633 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3634 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3635 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3636 bool preeminent = TRUE;
3639 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
3640 Perl_warner(aTHX_ packWARN(WARN_MISC),
3641 "Use of reference \"%"SVf"\" as array index",
3643 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
3646 if (UNLIKELY(localizing)) {
3650 /* If we can determine whether the element exist,
3651 * Try to preserve the existenceness of a tied array
3652 * element by using EXISTS and DELETE if possible.
3653 * Fallback to FETCH and STORE otherwise. */
3654 if (SvCANEXISTDELETE(av))
3655 preeminent = av_exists(av, elem);
3658 svp = av_fetch(av, elem, lval && !defer);
3660 #ifdef PERL_MALLOC_WRAP
3661 if (SvUOK(elemsv)) {
3662 const UV uv = SvUV(elemsv);
3663 elem = uv > IV_MAX ? IV_MAX : uv;
3665 else if (SvNOK(elemsv))
3666 elem = (IV)SvNV(elemsv);
3668 static const char oom_array_extend[] =
3669 "Out of memory during array extend"; /* Duplicated in av.c */
3670 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
3673 if (!svp || !*svp) {
3676 DIE(aTHX_ PL_no_aelem, elem);
3677 len = av_tindex(av);
3678 mPUSHs(newSVavdefelem(av,
3679 /* Resolve a negative index now, unless it points before the
3680 beginning of the array, in which case record it for error
3681 reporting in magic_setdefelem. */
3682 elem < 0 && len + elem >= 0 ? len + elem : elem,
3686 if (UNLIKELY(localizing)) {
3688 save_aelem(av, elem, svp);
3690 SAVEADELETE(av, elem);
3692 else if (PL_op->op_private & OPpDEREF) {
3693 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3697 sv = (svp ? *svp : &PL_sv_undef);
3698 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
3705 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
3707 PERL_ARGS_ASSERT_VIVIFY_REF;
3712 Perl_croak_no_modify();
3713 prepare_SV_for_RV(sv);
3716 SvRV_set(sv, newSV(0));
3719 SvRV_set(sv, MUTABLE_SV(newAV()));
3722 SvRV_set(sv, MUTABLE_SV(newHV()));
3729 if (SvGMAGICAL(sv)) {
3730 /* copy the sv without magic to prevent magic from being
3732 SV* msv = sv_newmortal();
3733 sv_setsv_nomg(msv, sv);
3739 PERL_STATIC_INLINE HV *
3740 S_opmethod_stash(pTHX_ SV* meth)
3745 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
3746 ? (Perl_croak(aTHX_ "Can't call method \"%"SVf"\" without a "
3747 "package or object reference", SVfARG(meth)),
3749 : *(PL_stack_base + TOPMARK + 1);
3751 PERL_ARGS_ASSERT_OPMETHOD_STASH;
3755 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on an undefined value",
3758 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
3759 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
3760 stash = gv_stashsv(sv, GV_CACHE_ONLY);
3761 if (stash) return stash;
3765 ob = MUTABLE_SV(SvRV(sv));
3766 else if (!SvOK(sv)) goto undefined;
3767 else if (isGV_with_GP(sv)) {
3769 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3770 "without a package or object reference",
3773 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
3774 assert(!LvTARGLEN(ob));
3778 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
3781 /* this isn't a reference */
3784 const char * const packname = SvPV_nomg_const(sv, packlen);
3785 const U32 packname_utf8 = SvUTF8(sv);
3786 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
3787 if (stash) return stash;
3789 if (!(iogv = gv_fetchpvn_flags(
3790 packname, packlen, packname_utf8, SVt_PVIO
3792 !(ob=MUTABLE_SV(GvIO(iogv))))
3794 /* this isn't the name of a filehandle either */
3797 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" "
3798 "without a package or object reference",
3801 /* assume it's a package name */
3802 stash = gv_stashpvn(packname, packlen, packname_utf8);
3803 if (stash) return stash;
3804 else return MUTABLE_HV(sv);
3806 /* it _is_ a filehandle name -- replace with a reference */
3807 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
3810 /* if we got here, ob should be an object or a glob */
3811 if (!ob || !(SvOBJECT(ob)
3812 || (isGV_with_GP(ob)
3813 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
3816 Perl_croak(aTHX_ "Can't call method \"%"SVf"\" on unblessed reference",
3817 SVfARG((SvSCREAM(meth) && strEQ(SvPV_nolen_const(meth),"isa"))
3818 ? newSVpvs_flags("DOES", SVs_TEMP)
3830 SV* const meth = TOPs;
3833 SV* const rmeth = SvRV(meth);
3834 if (SvTYPE(rmeth) == SVt_PVCV) {
3840 stash = opmethod_stash(meth);
3842 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3845 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3849 #define METHOD_CHECK_CACHE(stash,cache,meth) \
3850 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
3852 gv = MUTABLE_GV(HeVAL(he)); \
3853 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
3854 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
3856 XPUSHs(MUTABLE_SV(GvCV(gv))); \
3865 SV* const meth = cMETHOPx_meth(PL_op);
3866 HV* const stash = opmethod_stash(meth);
3868 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
3869 METHOD_CHECK_CACHE(stash, stash, meth);
3872 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3875 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3884 SV* const meth = cMETHOPx_meth(PL_op);
3885 HV* const stash = CopSTASH(PL_curcop);
3886 /* Actually, SUPER doesn't need real object's (or class') stash at all,
3887 * as it uses CopSTASH. However, we must ensure that object(class) is
3888 * correct (this check is done by S_opmethod_stash) */
3889 opmethod_stash(meth);
3891 if ((cache = HvMROMETA(stash)->super)) {
3892 METHOD_CHECK_CACHE(stash, cache, meth);
3895 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
3898 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3906 SV* const meth = cMETHOPx_meth(PL_op);
3907 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
3908 opmethod_stash(meth); /* not used but needed for error checks */
3910 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
3911 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
3913 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
3916 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3920 PP(pp_method_redir_super)
3925 SV* const meth = cMETHOPx_meth(PL_op);
3926 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
3927 opmethod_stash(meth); /* not used but needed for error checks */
3929 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
3930 else if ((cache = HvMROMETA(stash)->super)) {
3931 METHOD_CHECK_CACHE(stash, cache, meth);
3934 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
3937 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
3942 * ex: set ts=8 sts=4 sw=4 et: