This is a live mirror of the Perl 5 development currently hosted at https://github.com/perl/perl5
MULTICONCAT - use distinct TMPS for const overload
[perl5.git] / pp_hot.c
... / ...
CommitLineData
1/* pp_hot.c
2 *
3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
5 *
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
8 *
9 */
10
11/*
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
13 * shaking the air.
14 *
15 * Awake! Awake! Fear, Fire, Foes! Awake!
16 * Fire, Foes! Awake!
17 *
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
19 */
20
21/* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
26 *
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
31 * performance.
32 */
33
34#include "EXTERN.h"
35#define PERL_IN_PP_HOT_C
36#include "perl.h"
37
38/* Hot code. */
39
40PP(pp_const)
41{
42 dSP;
43 XPUSHs(cSVOP_sv);
44 RETURN;
45}
46
47PP(pp_nextstate)
48{
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
52 FREETMPS;
53 PERL_ASYNC_CHECK();
54 return NORMAL;
55}
56
57PP(pp_gvsv)
58{
59 dSP;
60 EXTEND(SP,1);
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
63 else
64 PUSHs(GvSVn(cGVOP_gv));
65 RETURN;
66}
67
68
69/* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
70
71PP(pp_null)
72{
73 return NORMAL;
74}
75
76/* This is sometimes called directly by pp_coreargs, pp_grepstart and
77 amagic_call. */
78PP(pp_pushmark)
79{
80 PUSHMARK(PL_stack_sp);
81 return NORMAL;
82}
83
84PP(pp_stringify)
85{
86 dSP; dTARGET;
87 SV * const sv = TOPs;
88 SETs(TARG);
89 sv_copypv(TARG, sv);
90 SvSETMAGIC(TARG);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
92 return NORMAL;
93}
94
95PP(pp_gv)
96{
97 dSP;
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
99 RETURN;
100}
101
102
103/* also used for: pp_andassign() */
104
105PP(pp_and)
106{
107 PERL_ASYNC_CHECK();
108 {
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
115 if (!SvTRUE_NN(sv))
116 return NORMAL;
117 else {
118 if (PL_op->op_type == OP_AND)
119 --PL_stack_sp;
120 return cLOGOP->op_other;
121 }
122 }
123}
124
125PP(pp_sassign)
126{
127 dSP;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
130 */
131 SV *left = POPs; SV *right = TOPs;
132
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
136 }
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
139 TAINT_NOT;
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
141 /* *foo =\&bar */
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
146
147 if (!got_coderef) {
148 assert(SvROK(cv));
149 }
150
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
153 context. */
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
161
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
164 SvRV_set(gv, value);
165 SvREFCNT_inc_simple_void(value);
166 SETs(left);
167 RETURN;
168 }
169 }
170
171 /* Need to fix things up. */
172 if (!is_gv) {
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
175 }
176
177 if (!got_coderef) {
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
180 if (SvROK(cv)) {
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
187 */
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
189 SvRV(cv))));
190 SvREFCNT_dec_NN(cv);
191 LEAVE_with_name("sassign_coderef");
192 } else {
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
194 is that
195 First: ops for \&{"BONK"}; return us the constant in the
196 symbol table
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
199 to typeblob)
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
202
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
205 */
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
208
209 assert(source);
210 assert(CvFLAGS(source) & CVf_CONST);
211
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
215 }
216 }
217
218 }
219 if (
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
222 )
223 Perl_warner(aTHX_
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
225 );
226 SvSetMagicSV(left, right);
227 SETs(left);
228 RETURN;
229}
230
231PP(pp_cond_expr)
232{
233 dSP;
234 SV *sv;
235
236 PERL_ASYNC_CHECK();
237 sv = POPs;
238 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
239}
240
241PP(pp_unstack)
242{
243 PERL_CONTEXT *cx;
244 PERL_ASYNC_CHECK();
245 TAINT_NOT; /* Each statement is presumed innocent */
246 cx = CX_CUR();
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
248 FREETMPS;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
251 CX_LEAVE_SCOPE(cx);
252 }
253 return NORMAL;
254}
255
256PP(pp_concat)
257{
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
259 {
260 dPOPTOPssrl;
261 bool lbyte;
262 STRLEN rlen;
263 const char *rpv = NULL;
264 bool rbyte = FALSE;
265 bool rcopied = FALSE;
266
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
272 rcopied = TRUE;
273 }
274
275 if (TARG != left) { /* not $l .= $r */
276 STRLEN llen;
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
280 if (!lbyte)
281 SvUTF8_on(TARG);
282 else
283 SvUTF8_off(TARG);
284 }
285 else { /* $l .= $r and left == TARG */
286 if (!SvOK(left)) {
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
290 )
291 report_uninit(left);
292 SvPVCLEAR(left);
293 }
294 else {
295 SvPV_force_nomg_nolen(left);
296 }
297 lbyte = !DO_UTF8(left);
298 if (IN_BYTES)
299 SvUTF8_off(left);
300 }
301
302 if (!rcopied) {
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
305 }
306 if (lbyte != rbyte) {
307 if (lbyte)
308 sv_utf8_upgrade_nomg(TARG);
309 else {
310 if (!rcopied)
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
314 }
315 }
316 sv_catpvn_nomg(TARG, rpv, rlen);
317
318 SETTARG;
319 RETURN;
320 }
321}
322
323
324/* pp_multiconcat()
325
326Concatenate one or more args, possibly interleaved with constant string
327segments. The result may be assigned to, or appended to, a variable or
328expression.
329
330Several op_flags and/or op_private bits indicate what the target is, and
331whether it's appended to. Valid permutations are:
332
333 - (PADTMP) = (A.B.C....)
334 OPpTARGET_MY $lex = (A.B.C....)
335 OPpTARGET_MY,OPpLVAL_INTRO my $lex = (A.B.C....)
336 OPpTARGET_MY,OPpMULTICONCAT_APPEND $lex .= (A.B.C....)
337 OPf_STACKED expr = (A.B.C....)
338 OPf_STACKED,OPpMULTICONCAT_APPEND expr .= (A.B.C....)
339
340Other combinations like (A.B).(C.D) are not optimised into a multiconcat
341op, as it's too hard to get the correct ordering of ties, overload etc.
342
343In addition:
344
345 OPpMULTICONCAT_FAKE: not a real concat, instead an optimised
346 sprintf "...%s...". Don't call '.'
347 overloading: only use '""' overloading.
348
349 OPpMULTICONCAT_STRINGIFY: (for Deparse's benefit) the RHS was of the
350 form "...$a...$b..." rather than
351 "..." . $a . "..." . $b . "..."
352
353An OP_MULTICONCAT is of type UNOP_AUX. The fixed slots of the aux array are
354defined with PERL_MULTICONCAT_IX_FOO constants, where:
355
356
357 FOO index description
358 -------- ----- ----------------------------------
359 NARGS 0 number of arguments
360 PLAIN_PV 1 non-utf8 constant string
361 PLAIN_LEN 2 non-utf8 constant string length
362 UTF8_PV 3 utf8 constant string
363 UTF8_LEN 4 utf8 constant string length
364 LENGTHS 5 first of nargs+1 const segment lengths
365
366The idea is that a general string concatenation will have a fixed (known
367at compile time) number of variable args, interspersed with constant
368strings, e.g. "a=$a b=$b\n"
369
370All the constant string segments "a=", " b=" and "\n" are stored as a
371single string "a= b=\n", pointed to from the PLAIN_PV/UTF8_PV slot, along
372with a series of segment lengths: e.g. 2,3,1. In the case where the
373constant string is plain but has a different utf8 representation, both
374variants are stored, and two sets of (nargs+1) segments lengths are stored
375in the slots beginning at PERL_MULTICONCAT_IX_LENGTHS.
376
377A segment length of -1 indicates that there is no constant string at that
378point; this distinguishes between e.g. ($a . $b) and ($a . "" . $b), which
379have differing overloading behaviour.
380
381*/
382
383PP(pp_multiconcat)
384{
385 dSP;
386 SV *targ; /* The SV to be assigned or appended to */
387 SV *dsv; /* the SV to concat args to (often == targ) */
388 char *dsv_pv; /* where within SvPVX(dsv) we're writing to */
389 STRLEN targ_len; /* SvCUR(targ) */
390 SV **toparg; /* the highest arg position on the stack */
391 UNOP_AUX_item *aux; /* PL_op->op_aux buffer */
392 UNOP_AUX_item *const_lens; /* the segment length array part of aux */
393 const char *const_pv; /* the current segment of the const string buf */
394 SSize_t nargs; /* how many args were expected */
395 SSize_t stack_adj; /* how much to adjust SP on return */
396 STRLEN grow; /* final size of destination string (dsv) */
397 UV targ_count; /* how many times targ has appeared on the RHS */
398 bool is_append; /* OPpMULTICONCAT_APPEND flag is set */
399 bool slow_concat; /* args too complex for quick concat */
400 U32 dst_utf8; /* the result will be utf8 (indicate this with
401 SVf_UTF8 in a U32, rather than using bool,
402 for ease of testing and setting) */
403 /* for each arg, holds the result of an SvPV() call */
404 struct multiconcat_svpv {
405 char *pv;
406 SSize_t len;
407 }
408 *targ_chain, /* chain of slots where targ has appeared on RHS */
409 *svpv_p, /* ptr for looping through svpv_buf */
410 *svpv_base, /* first slot (may be greater than svpv_buf), */
411 *svpv_end, /* and slot after highest result so far, of: */
412 svpv_buf[PERL_MULTICONCAT_MAXARG]; /* buf for storing SvPV() results */
413
414 aux = cUNOP_AUXx(PL_op)->op_aux;
415 stack_adj = nargs = aux[PERL_MULTICONCAT_IX_NARGS].ssize;
416 is_append = cBOOL(PL_op->op_private & OPpMULTICONCAT_APPEND);
417
418 /* get targ from the stack or pad */
419
420 if (PL_op->op_flags & OPf_STACKED) {
421 if (is_append) {
422 /* for 'expr .= ...', expr is the bottom item on the stack */
423 targ = SP[-nargs];
424 stack_adj++;
425 }
426 else
427 /* for 'expr = ...', expr is the top item on the stack */
428 targ = POPs;
429 }
430 else {
431 SV **svp = &(PAD_SVl(PL_op->op_targ));
432 targ = *svp;
433 if (PL_op->op_private & OPpLVAL_INTRO) {
434 assert(PL_op->op_private & OPpTARGET_MY);
435 save_clearsv(svp);
436 }
437 if (!nargs)
438 /* $lex .= "const" doesn't cause anything to be pushed */
439 EXTEND(SP,1);
440 }
441
442 toparg = SP;
443 SP -= (nargs - 1);
444 dsv = targ; /* Set the destination for all concats. This is
445 initially targ; later on, dsv may be switched
446 to point to a TEMP SV if overloading is
447 encountered. */
448 grow = 1; /* allow for '\0' at minimum */
449 targ_count = 0;
450 targ_chain = NULL;
451 targ_len = 0;
452 svpv_end = svpv_buf;
453 /* only utf8 variants of the const strings? */
454 dst_utf8 = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv ? 0 : SVf_UTF8;
455
456
457 /* --------------------------------------------------------------
458 * Phase 1:
459 *
460 * stringify (i.e. SvPV()) every arg and store the resultant pv/len/utf8
461 * triplets in svpv_buf[]. Also increment 'grow' by the args' lengths.
462 *
463 * utf8 is indicated by storing a negative length.
464 *
465 * Where an arg is actually targ, the stringification is deferred:
466 * the length is set to 0, and the slot is added to targ_chain.
467 *
468 * If an overloaded arg is found, the loop is abandoned at that point,
469 * and dsv is set to an SvTEMP SV where the results-so-far will be
470 * accumulated.
471 */
472
473 for (; SP <= toparg; SP++, svpv_end++) {
474 bool simple_flags;
475 U32 utf8;
476 STRLEN len;
477 SV *sv;
478
479 assert(svpv_end - svpv_buf < PERL_MULTICONCAT_MAXARG);
480
481 sv = *SP;
482 simple_flags = (SvFLAGS(sv) & (SVs_GMG|SVf_ROK|SVf_POK)) == SVf_POK;
483
484 /* this if/else chain is arranged so that common/simple cases
485 * take few conditionals */
486
487 if (LIKELY(simple_flags && (sv != targ))) {
488 /* common case: sv is a simple PV and not the targ */
489 svpv_end->pv = SvPVX(sv);
490 len = SvCUR(sv);
491 }
492 else if (simple_flags) {
493 /* sv is targ (but can't be magic or overloaded).
494 * Delay storing PV pointer; instead, add slot to targ_chain
495 * so it can be populated later, after targ has been grown and
496 * we know its final SvPVX() address.
497 */
498 targ_on_rhs:
499 svpv_end->len = 0; /* zerojng here means we can skip
500 updating later if targ_len == 0 */
501 svpv_end->pv = (char*)targ_chain;
502 targ_chain = svpv_end;
503 targ_count++;
504 continue;
505 }
506 else {
507 if (UNLIKELY(SvFLAGS(sv) & (SVs_GMG|SVf_ROK))) {
508 /* its got magic, is tied, and/or is overloaded */
509 SvGETMAGIC(sv);
510
511 if (UNLIKELY(SvAMAGIC(sv))
512 && !(PL_op->op_private & OPpMULTICONCAT_FAKE))
513 {
514 /* One of the RHS args is overloaded. Abandon stringifying
515 * the args at this point, then in the concat loop later
516 * on, concat the plain args stringified so far into a
517 * TEMP SV. At the end of this function the remaining
518 * args (including the current one) will be handled
519 * specially, using overload calls.
520 * FAKE implies an optimised sprintf which doesn't use
521 * concat overloading, only "" overloading.
522 */
523
524 if ( svpv_end == svpv_buf + 1
525 /* no const string segments */
526 && aux[PERL_MULTICONCAT_IX_LENGTHS].ssize == -1
527 && aux[PERL_MULTICONCAT_IX_LENGTHS + 1].ssize == -1
528 ) {
529 /* special case: if the overloaded sv is the
530 * second arg in the concat chain, stop at the
531 * first arg rather than this, so that
532 *
533 * $arg1 . $arg2
534 *
535 * invokes overloading as
536 *
537 * concat($arg2, $arg1, 1)
538 *
539 * rather than
540 *
541 * concat($arg2, "$arg1", 1)
542 *
543 * This means that if for example arg1 is a ref,
544 * it gets passed as-is to the concat method
545 * rather than a stringified copy. If it's not the
546 * first arg, it doesn't matter, as in $arg0 .
547 * $arg1 . $arg2, where the result of ($arg0 .
548 * $arg1) will already be a string.
549 * THis isn't perfect: we'll have already
550 * done SvPV($arg1) on the previous iteration;
551 * and are now throwing away that result and
552 * hoping arg1 hasn;t been affected.
553 */
554 svpv_end--;
555 SP--;
556 }
557
558 setup_overload:
559 dsv = newSVpvn_flags("", 0, SVs_TEMP);
560
561 if (targ_chain) {
562 /* Get the string value of targ and populate any
563 * RHS slots which use it */
564 char *pv = SvPV_nomg(targ, len);
565 dst_utf8 |= (SvFLAGS(targ) & SVf_UTF8);
566 grow += len * targ_count;
567 do {
568 struct multiconcat_svpv *p = targ_chain;
569 targ_chain = (struct multiconcat_svpv *)(p->pv);
570 p->pv = pv;
571 p->len = len;
572 } while (targ_chain);
573 }
574 else if (is_append)
575 SvGETMAGIC(targ);
576
577 goto phase3;
578 }
579
580 if (SvFLAGS(sv) & SVs_RMG) {
581 /* probably tied; copy it to guarantee separate values
582 * each time it's used, e.g. "-$tied-$tied-$tied-",
583 * since FETCH() isn't necessarily idempotent */
584 SV *nsv = newSV(0);
585 sv_setsv_flags(nsv, sv, SV_NOSTEAL);
586 sv_2mortal(nsv);
587 if ( sv == targ
588 && is_append
589 && nargs == 1
590 /* no const string segments */
591 && aux[PERL_MULTICONCAT_IX_LENGTHS].ssize == -1
592 && aux[PERL_MULTICONCAT_IX_LENGTHS+1].ssize == -1)
593 {
594 /* special-case $tied .= $tied.
595 *
596 * For something like
597 * sub FETCH { $i++ }
598 * then
599 * $tied .= $tied . $tied . $tied;
600 * will STORE "4123"
601 * while
602 * $tied .= $tied
603 * will STORE "12"
604 *
605 * i.e. for a single mutator concat, the LHS is
606 * retrieved first; in all other cases it is
607 * retrieved last. Whether this is sane behaviour
608 * is open to debate; but for now, multiconcat (as
609 * it is an optimisation) tries to reproduce
610 * existing behaviour.
611 */
612 sv_catsv(nsv, sv);
613 sv_setsv(sv,nsv);
614 SP++;
615 goto phase7; /* just return targ as-is */
616 }
617
618 sv = nsv;
619 }
620 }
621
622 if (sv == targ) {
623 /* must warn for each RH usage of targ, except that
624 * we will later get one warning when doing
625 * SvPV_force(targ), *except* on '.=' */
626 if ( !SvOK(sv)
627 && (targ_chain || is_append)
628 && ckWARN(WARN_UNINITIALIZED)
629 )
630 report_uninit(sv);
631 goto targ_on_rhs;
632 }
633
634 /* stringify general SV */
635 svpv_end->pv = sv_2pv_flags(sv, &len, 0);
636 }
637
638 utf8 = (SvFLAGS(sv) & SVf_UTF8);
639 dst_utf8 |= utf8;
640 ASSUME(len < SSize_t_MAX);
641 svpv_end->len = utf8 ? -(SSize_t)len : (SSize_t)len;
642 grow += len;
643 }
644
645 /* --------------------------------------------------------------
646 * Phase 2:
647 *
648 * Stringify targ:
649 *
650 * if targ appears on the RHS or is appended to, force stringify it;
651 * otherwise set it to "". Then set targ_len.
652 */
653
654 if (is_append) {
655 if (UNLIKELY(SvFLAGS(targ) & (SVs_GMG|SVf_ROK))) {
656 SvGETMAGIC(targ); /* must do before SvAMAGIC() check */
657 if (UNLIKELY(SvAMAGIC(targ))) {
658 /* $overloaded .= ....;
659 * accumulate RHS in a temp SV rather than targ,
660 * then append tmp to targ at the end using overload
661 */
662 assert(!targ_chain);
663 dsv = newSVpvn_flags("", 0, SVs_TEMP);
664 goto phase3;
665 }
666 }
667
668 if (SvOK(targ)) {
669 U32 targ_utf8;
670 stringify_targ:
671 SvPV_force_nomg_nolen(targ);
672 targ_utf8 = SvFLAGS(targ) & SVf_UTF8;
673 if (UNLIKELY(dst_utf8 & ~targ_utf8)) {
674 if (LIKELY(!IN_BYTES))
675 sv_utf8_upgrade_nomg(targ);
676 }
677 else
678 dst_utf8 |= targ_utf8;
679
680 targ_len = SvCUR(targ);
681 grow += targ_len * (targ_count + is_append);
682 goto phase3;
683 }
684 }
685 else if (UNLIKELY(SvTYPE(targ) >= SVt_REGEXP)) {
686 /* Assigning to some weird LHS type. Don't force the LHS to be an
687 * empty string; instead, do things 'long hand' by using the
688 * overload code path, which concats to a TEMP sv and does
689 * sv_catsv() calls rather than COPY()s. This ensures that even
690 * bizarre code like this doesn't break or crash:
691 * *F = *F . *F.
692 * (which makes the 'F' typeglob an alias to the
693 * '*main::F*main::F' typeglob).
694 */
695 goto setup_overload;
696 }
697 else if (targ_chain) {
698 /* targ was found on RHS.
699 * We don't need the SvGETMAGIC() call and SvAMAGIC() test as
700 * both were already done earlier in the SvPV() loop; other
701 * than that we can share the same code with the append
702 * branch below.
703 * Note that this goto jumps directly into the SvOK() branch
704 * even if targ isn't SvOK(), to force an 'uninitialised'
705 * warning; e.g.
706 * $undef .= .... targ only on LHS: don't warn
707 * $undef .= $undef .... targ on RHS too: warn
708 */
709 assert(!SvAMAGIC(targ));
710 goto stringify_targ;
711 }
712
713
714 /* unrolled SvPVCLEAR() - mostly: no need to grow or set SvCUR() to 0;
715 * those will be done later. */
716 assert(targ == dsv);
717 SV_CHECK_THINKFIRST_COW_DROP(targ);
718 SvUPGRADE(targ, SVt_PV);
719 SvFLAGS(targ) &= ~(SVf_OK|SVf_IVisUV|SVf_UTF8);
720 SvFLAGS(targ) |= (SVf_POK|SVp_POK|dst_utf8);
721
722 phase3:
723
724 /* --------------------------------------------------------------
725 * Phase 3:
726 *
727 * UTF-8 tweaks and grow dsv:
728 *
729 * Now that we know the length and utf8-ness of both the targ and
730 * args, grow dsv to the size needed to accumulate all the args, based
731 * on whether targ appears on the RHS, whether we're appending, and
732 * whether any non-utf8 args expand in size if converted to utf8.
733 *
734 * For the latter, if dst_utf8 we scan non-utf8 args looking for
735 * variant chars, and adjust the svpv->len value of those args to the
736 * utf8 size and negate it to flag them. At the same time we un-negate
737 * the lens of any utf8 args since after this phase we no longer care
738 * whether an arg is utf8 or not.
739 *
740 * Finally, initialise const_lens and const_pv based on utf8ness.
741 * Note that there are 3 permutations:
742 *
743 * * If the constant string is invariant whether utf8 or not (e.g. "abc"),
744 * then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] are the same as
745 * aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] and there is one set of
746 * segment lengths.
747 *
748 * * If the string is fully utf8, e.g. "\x{100}", then
749 * aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] == (NULL,0) and there is
750 * one set of segment lengths.
751 *
752 * * If the string has different plain and utf8 representations
753 * (e.g. "\x80"), then then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN]]
754 * holds the plain rep, while aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN]
755 * holds the utf8 rep, and there are 2 sets of segment lengths,
756 * with the utf8 set following after the plain set.
757 *
758 * On entry to this section the (pv,len) pairs in svpv_buf have the
759 * following meanings:
760 * (pv, len) a plain string
761 * (pv, -len) a utf8 string
762 * (NULL, 0) left-most targ \ linked together R-to-L
763 * (next, 0) other targ / in targ_chain
764 */
765
766 /* turn off utf8 handling if 'use bytes' is in scope */
767 if (UNLIKELY(dst_utf8 && IN_BYTES)) {
768 dst_utf8 = 0;
769 SvUTF8_off(dsv);
770 /* undo all the negative lengths which flag utf8-ness */
771 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
772 SSize_t len = svpv_p->len;
773 if (len < 0)
774 svpv_p->len = -len;
775 }
776 }
777
778 /* grow += total of lengths of constant string segments */
779 {
780 SSize_t len;
781 len = aux[dst_utf8 ? PERL_MULTICONCAT_IX_UTF8_LEN
782 : PERL_MULTICONCAT_IX_PLAIN_LEN].ssize;
783 slow_concat = cBOOL(len);
784 grow += len;
785 }
786
787 const_lens = aux + PERL_MULTICONCAT_IX_LENGTHS;
788
789 if (dst_utf8) {
790 const_pv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv;
791 if ( aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv
792 && const_pv != aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv)
793 /* separate sets of lengths for plain and utf8 */
794 const_lens += nargs + 1;
795
796 /* If the result is utf8 but some of the args aren't,
797 * calculate how much extra growth is needed for all the chars
798 * which will expand to two utf8 bytes.
799 * Also, if the growth is non-zero, negate the length to indicate
800 * that this this is a variant string. Conversely, un-negate the
801 * length on utf8 args (which was only needed to flag non-utf8
802 * args in this loop */
803 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
804 char *p;
805 SSize_t len, l, extra;
806
807 len = svpv_p->len;
808 if (len <= 0) {
809 svpv_p->len = -len;
810 continue;
811 }
812
813 p = svpv_p->pv;
814 extra = 0;
815 l = len;
816 while (l--)
817 extra += !UTF8_IS_INVARIANT(*p++);
818 if (UNLIKELY(extra)) {
819 grow += extra;
820 /* -ve len indicates special handling */
821 svpv_p->len = -(len + extra);
822 slow_concat = TRUE;
823 }
824 }
825 }
826 else
827 const_pv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv;
828
829 /* unrolled SvGROW(), except don't check for SVf_IsCOW, which should
830 * already have been dropped */
831 assert(!SvIsCOW(dsv));
832 dsv_pv = (SvLEN(dsv) < (grow) ? sv_grow(dsv,grow) : SvPVX(dsv));
833
834
835 /* --------------------------------------------------------------
836 * Phase 4:
837 *
838 * Now that dsv (which is probably targ) has been grown, we know the
839 * final address of the targ PVX, if needed. Preserve / move targ
840 * contents if appending or if targ appears on RHS.
841 *
842 * Also update svpv_buf slots in targ_chain.
843 *
844 * Don't bother with any of this if the target length is zero:
845 * targ_len is set to zero unless we're appending or targ appears on
846 * RHS. And even if it is, we can optimise by skipping this chunk of
847 * code for zero targ_len. In the latter case, we don't need to update
848 * the slots in targ_chain with the (zero length) target string, since
849 * we set the len in such slots to 0 earlier, and since the Copy() is
850 * skipped on zero length, it doesn't matter what svpv_p->pv contains.
851 *
852 * On entry to this section the (pv,len) pairs in svpv_buf have the
853 * following meanings:
854 * (pv, len) a pure-plain or utf8 string
855 * (pv, -(len+extra)) a plain string which will expand by 'extra'
856 * bytes when converted to utf8
857 * (NULL, 0) left-most targ \ linked together R-to-L
858 * (next, 0) other targ / in targ_chain
859 *
860 * On exit, the targ contents will have been moved to the
861 * earliest place they are needed (e.g. $x = "abc$x" will shift them
862 * 3 bytes, while $x .= ... will leave them at the beginning);
863 * and dst_pv will point to the location within SvPVX(dsv) where the
864 * next arg should be copied.
865 */
866
867 svpv_base = svpv_buf;
868
869 if (targ_len) {
870 struct multiconcat_svpv *tc_stop;
871 char *targ_pv = dsv_pv;
872
873 assert(targ == dsv);
874 assert(is_append || targ_count);
875
876 if (is_append) {
877 dsv_pv += targ_len;
878 tc_stop = NULL;
879 }
880 else {
881 /* The targ appears on RHS, e.g. '$t = $a . $t . $t'.
882 * Move the current contents of targ to the first
883 * position where it's needed, and use that as the src buffer
884 * for any further uses (such as the second RHS $t above).
885 * In calculating the first position, we need to sum the
886 * lengths of all consts and args before that.
887 */
888
889 UNOP_AUX_item *lens = const_lens;
890 /* length of first const string segment */
891 STRLEN offset = lens->ssize > 0 ? lens->ssize : 0;
892
893 assert(targ_chain);
894 svpv_p = svpv_base;
895
896 for (;;) {
897 SSize_t len;
898 if (!svpv_p->pv)
899 break; /* the first targ argument */
900 /* add lengths of the next arg and const string segment */
901 len = svpv_p->len;
902 if (len < 0) /* variant args have this */
903 len = -len;
904 offset += (STRLEN)len;
905 len = (++lens)->ssize;
906 offset += (len >= 0) ? (STRLEN)len : 0;
907 if (!offset) {
908 /* all args and consts so far are empty; update
909 * the start position for the concat later */
910 svpv_base++;
911 const_lens++;
912 }
913 svpv_p++;
914 assert(svpv_p < svpv_end);
915 }
916
917 if (offset) {
918 targ_pv += offset;
919 Move(dsv_pv, targ_pv, targ_len, char);
920 /* a negative length implies don't Copy(), but do increment */
921 svpv_p->len = -targ_len;
922 slow_concat = TRUE;
923 }
924 else {
925 /* skip the first targ copy */
926 svpv_base++;
927 const_lens++;
928 dsv_pv += targ_len;
929 }
930
931 /* Don't populate the first targ slot in the loop below; it's
932 * either not used because we advanced svpv_base beyond it, or
933 * we already stored the special -targ_len value in it
934 */
935 tc_stop = svpv_p;
936 }
937
938 /* populate slots in svpv_buf representing targ on RHS */
939 while (targ_chain != tc_stop) {
940 struct multiconcat_svpv *p = targ_chain;
941 targ_chain = (struct multiconcat_svpv *)(p->pv);
942 p->pv = targ_pv;
943 p->len = (SSize_t)targ_len;
944 }
945 }
946
947
948 /* --------------------------------------------------------------
949 * Phase 5:
950 *
951 * Append all the args in svpv_buf, plus the const strings, to dsv.
952 *
953 * On entry to this section the (pv,len) pairs in svpv_buf have the
954 * following meanings:
955 * (pv, len) a pure-plain or utf8 string (which may be targ)
956 * (pv, -(len+extra)) a plain string which will expand by 'extra'
957 * bytes when converted to utf8
958 * (0, -len) left-most targ, whose content has already
959 * been copied. Just advance dsv_pv by len.
960 */
961
962 /* If there are no constant strings and no special case args
963 * (svpv_p->len < 0), use a simpler, more efficient concat loop
964 */
965 if (!slow_concat) {
966 for (svpv_p = svpv_base; svpv_p < svpv_end; svpv_p++) {
967 SSize_t len = svpv_p->len;
968 if (!len)
969 continue;
970 Copy(svpv_p->pv, dsv_pv, len, char);
971 dsv_pv += len;
972 }
973 const_lens += (svpv_end - svpv_base + 1);
974 }
975 else {
976 /* Note that we iterate the loop nargs+1 times: to append nargs
977 * arguments and nargs+1 constant strings. For example, "-$a-$b-"
978 */
979 svpv_p = svpv_base - 1;
980
981 for (;;) {
982 SSize_t len = (const_lens++)->ssize;
983
984 /* append next const string segment */
985 if (len > 0) {
986 Copy(const_pv, dsv_pv, len, char);
987 dsv_pv += len;
988 const_pv += len;
989 }
990
991 if (++svpv_p == svpv_end)
992 break;
993
994 /* append next arg */
995 len = svpv_p->len;
996
997 if (LIKELY(len > 0)) {
998 Copy(svpv_p->pv, dsv_pv, len, char);
999 dsv_pv += len;
1000 }
1001 else if (UNLIKELY(len < 0)) {
1002 /* negative length indicates two special cases */
1003 const char *p = svpv_p->pv;
1004 len = -len;
1005 if (UNLIKELY(p)) {
1006 /* copy plain-but-variant pv to a utf8 targ */
1007 char * end_pv = dsv_pv + len;
1008 assert(dst_utf8);
1009 while (dsv_pv < end_pv) {
1010 U8 c = (U8) *p++;
1011 append_utf8_from_native_byte(c, (U8**)&dsv_pv);
1012 }
1013 }
1014 else
1015 /* arg is already-copied targ */
1016 dsv_pv += len;
1017 }
1018
1019 }
1020 }
1021
1022 *dsv_pv = '\0';
1023 SvCUR_set(dsv, dsv_pv - SvPVX(dsv));
1024 assert(grow >= SvCUR(dsv) + 1);
1025 assert(SvLEN(dsv) >= SvCUR(dsv) + 1);
1026
1027 /* --------------------------------------------------------------
1028 * Phase 6:
1029 *
1030 * Handle overloading. If an overloaded arg or targ was detected
1031 * earlier, dsv will have been set to a new mortal, and any args and
1032 * consts to the left of the first overloaded arg will have been
1033 * accumulated to it. This section completes any further concatenation
1034 * steps with overloading handled.
1035 */
1036
1037 if (UNLIKELY(dsv != targ)) {
1038 SV *res;
1039
1040 SvFLAGS(dsv) |= dst_utf8;
1041
1042 if (SP <= toparg) {
1043 /* Stringifying the RHS was abandoned because *SP
1044 * is overloaded. dsv contains all the concatted strings
1045 * before *SP. Apply the rest of the args using overloading.
1046 */
1047 SV *left, *right, *res;
1048 int i;
1049 bool getmg = FALSE;
1050 /* number of args already concatted */
1051 SSize_t n = (nargs - 1) - (toparg - SP);
1052 /* current arg is either the first
1053 * or second value to be concatted
1054 * (including constant strings), so would
1055 * form part of the first concat */
1056 bool first_concat = ( n == 0
1057 || (n == 1 && const_lens[-2].ssize < 0
1058 && const_lens[-1].ssize < 0));
1059 int f_assign = first_concat ? 0 : AMGf_assign;
1060
1061 left = dsv;
1062
1063 for (; n < nargs; n++) {
1064 /* loop twice, first applying the arg, then the const segment */
1065 for (i = 0; i < 2; i++) {
1066 if (i) {
1067 /* append next const string segment */
1068 STRLEN len = (STRLEN)((const_lens++)->ssize);
1069 /* a length of -1 implies no constant string
1070 * rather than a zero-length one, e.g.
1071 * ($a . $b) versus ($a . "" . $b)
1072 */
1073 if ((SSize_t)len < 0)
1074 continue;
1075
1076 /* set right to the next constant string segment */
1077 right = newSVpvn_flags(const_pv, len,
1078 (dst_utf8 | SVs_TEMP));
1079 const_pv += len;
1080 }
1081 else {
1082 /* append next arg */
1083 right = *SP++;
1084 if (getmg)
1085 SvGETMAGIC(right);
1086 else
1087 /* SvGETMAGIC already called on this SV just
1088 * before we broke from the loop earlier */
1089 getmg = TRUE;
1090
1091 if (first_concat && n == 0 && const_lens[-1].ssize < 0) {
1092 /* nothing before the current arg; repeat the
1093 * loop to get a second arg */
1094 left = right;
1095 first_concat = FALSE;
1096 continue;
1097 }
1098 }
1099
1100 if ((SvAMAGIC(left) || SvAMAGIC(right))
1101 && (res = amagic_call(left, right, concat_amg, f_assign))
1102 )
1103 left = res;
1104 else {
1105 if (left != dsv) {
1106 sv_setsv(dsv, left);
1107 left = dsv;
1108 }
1109 sv_catsv_nomg(left, right);
1110 }
1111 f_assign = AMGf_assign;
1112 }
1113 }
1114 dsv = left;
1115 }
1116
1117 /* assign/append RHS (dsv) to LHS (targ) */
1118 if (is_append) {
1119 if ((SvAMAGIC(targ) || SvAMAGIC(dsv))
1120 && (res = amagic_call(targ, dsv, concat_amg, AMGf_assign))
1121 )
1122 sv_setsv(targ, res);
1123 else
1124 sv_catsv_nomg(targ, dsv);
1125 }
1126 else
1127 sv_setsv(targ, dsv);
1128 }
1129
1130 /* --------------------------------------------------------------
1131 * Phase 7:
1132 *
1133 * return result
1134 */
1135
1136 phase7:
1137
1138 SP -= stack_adj;
1139 SvTAINT(targ);
1140 SETTARG;
1141 RETURN;
1142}
1143
1144
1145/* push the elements of av onto the stack.
1146 * Returns PL_op->op_next to allow tail-call optimisation of its callers */
1147
1148STATIC OP*
1149S_pushav(pTHX_ AV* const av)
1150{
1151 dSP;
1152 const SSize_t maxarg = AvFILL(av) + 1;
1153 EXTEND(SP, maxarg);
1154 if (UNLIKELY(SvRMAGICAL(av))) {
1155 PADOFFSET i;
1156 for (i=0; i < (PADOFFSET)maxarg; i++) {
1157 SV ** const svp = av_fetch(av, i, FALSE);
1158 SP[i+1] = svp ? *svp : &PL_sv_undef;
1159 }
1160 }
1161 else {
1162 PADOFFSET i;
1163 for (i=0; i < (PADOFFSET)maxarg; i++) {
1164 SV * const sv = AvARRAY(av)[i];
1165 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
1166 }
1167 }
1168 SP += maxarg;
1169 PUTBACK;
1170 return NORMAL;
1171}
1172
1173
1174/* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
1175
1176PP(pp_padrange)
1177{
1178 dSP;
1179 PADOFFSET base = PL_op->op_targ;
1180 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
1181 if (PL_op->op_flags & OPf_SPECIAL) {
1182 /* fake the RHS of my ($x,$y,..) = @_ */
1183 PUSHMARK(SP);
1184 (void)S_pushav(aTHX_ GvAVn(PL_defgv));
1185 SPAGAIN;
1186 }
1187
1188 /* note, this is only skipped for compile-time-known void cxt */
1189 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
1190 int i;
1191
1192 EXTEND(SP, count);
1193 PUSHMARK(SP);
1194 for (i = 0; i <count; i++)
1195 *++SP = PAD_SV(base+i);
1196 }
1197 if (PL_op->op_private & OPpLVAL_INTRO) {
1198 SV **svp = &(PAD_SVl(base));
1199 const UV payload = (UV)(
1200 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
1201 | (count << SAVE_TIGHT_SHIFT)
1202 | SAVEt_CLEARPADRANGE);
1203 int i;
1204
1205 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
1206 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
1207 == (Size_t)base);
1208 {
1209 dSS_ADD;
1210 SS_ADD_UV(payload);
1211 SS_ADD_END(1);
1212 }
1213
1214 for (i = 0; i <count; i++)
1215 SvPADSTALE_off(*svp++); /* mark lexical as active */
1216 }
1217 RETURN;
1218}
1219
1220
1221PP(pp_padsv)
1222{
1223 dSP;
1224 EXTEND(SP, 1);
1225 {
1226 OP * const op = PL_op;
1227 /* access PL_curpad once */
1228 SV ** const padentry = &(PAD_SVl(op->op_targ));
1229 {
1230 dTARG;
1231 TARG = *padentry;
1232 PUSHs(TARG);
1233 PUTBACK; /* no pop/push after this, TOPs ok */
1234 }
1235 if (op->op_flags & OPf_MOD) {
1236 if (op->op_private & OPpLVAL_INTRO)
1237 if (!(op->op_private & OPpPAD_STATE))
1238 save_clearsv(padentry);
1239 if (op->op_private & OPpDEREF) {
1240 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
1241 than TARG reduces the scope of TARG, so it does not
1242 span the call to save_clearsv, resulting in smaller
1243 machine code. */
1244 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
1245 }
1246 }
1247 return op->op_next;
1248 }
1249}
1250
1251PP(pp_readline)
1252{
1253 dSP;
1254 /* pp_coreargs pushes a NULL to indicate no args passed to
1255 * CORE::readline() */
1256 if (TOPs) {
1257 SvGETMAGIC(TOPs);
1258 tryAMAGICunTARGETlist(iter_amg, 0);
1259 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1260 }
1261 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
1262 if (!isGV_with_GP(PL_last_in_gv)) {
1263 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
1264 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
1265 else {
1266 dSP;
1267 XPUSHs(MUTABLE_SV(PL_last_in_gv));
1268 PUTBACK;
1269 Perl_pp_rv2gv(aTHX);
1270 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1271 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv));
1272 }
1273 }
1274 return do_readline();
1275}
1276
1277PP(pp_eq)
1278{
1279 dSP;
1280 SV *left, *right;
1281
1282 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
1283 right = POPs;
1284 left = TOPs;
1285 SETs(boolSV(
1286 (SvIOK_notUV(left) && SvIOK_notUV(right))
1287 ? (SvIVX(left) == SvIVX(right))
1288 : ( do_ncmp(left, right) == 0)
1289 ));
1290 RETURN;
1291}
1292
1293
1294/* also used for: pp_i_preinc() */
1295
1296PP(pp_preinc)
1297{
1298 SV *sv = *PL_stack_sp;
1299
1300 if (LIKELY(((sv->sv_flags &
1301 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1302 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1303 == SVf_IOK))
1304 && SvIVX(sv) != IV_MAX)
1305 {
1306 SvIV_set(sv, SvIVX(sv) + 1);
1307 }
1308 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
1309 sv_inc(sv);
1310 SvSETMAGIC(sv);
1311 return NORMAL;
1312}
1313
1314
1315/* also used for: pp_i_predec() */
1316
1317PP(pp_predec)
1318{
1319 SV *sv = *PL_stack_sp;
1320
1321 if (LIKELY(((sv->sv_flags &
1322 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1323 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1324 == SVf_IOK))
1325 && SvIVX(sv) != IV_MIN)
1326 {
1327 SvIV_set(sv, SvIVX(sv) - 1);
1328 }
1329 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
1330 sv_dec(sv);
1331 SvSETMAGIC(sv);
1332 return NORMAL;
1333}
1334
1335
1336/* also used for: pp_orassign() */
1337
1338PP(pp_or)
1339{
1340 dSP;
1341 SV *sv;
1342 PERL_ASYNC_CHECK();
1343 sv = TOPs;
1344 if (SvTRUE_NN(sv))
1345 RETURN;
1346 else {
1347 if (PL_op->op_type == OP_OR)
1348 --SP;
1349 RETURNOP(cLOGOP->op_other);
1350 }
1351}
1352
1353
1354/* also used for: pp_dor() pp_dorassign() */
1355
1356PP(pp_defined)
1357{
1358 dSP;
1359 SV* sv;
1360 bool defined;
1361 const int op_type = PL_op->op_type;
1362 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
1363
1364 if (is_dor) {
1365 PERL_ASYNC_CHECK();
1366 sv = TOPs;
1367 if (UNLIKELY(!sv || !SvANY(sv))) {
1368 if (op_type == OP_DOR)
1369 --SP;
1370 RETURNOP(cLOGOP->op_other);
1371 }
1372 }
1373 else {
1374 /* OP_DEFINED */
1375 sv = POPs;
1376 if (UNLIKELY(!sv || !SvANY(sv)))
1377 RETPUSHNO;
1378 }
1379
1380 defined = FALSE;
1381 switch (SvTYPE(sv)) {
1382 case SVt_PVAV:
1383 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
1384 defined = TRUE;
1385 break;
1386 case SVt_PVHV:
1387 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
1388 defined = TRUE;
1389 break;
1390 case SVt_PVCV:
1391 if (CvROOT(sv) || CvXSUB(sv))
1392 defined = TRUE;
1393 break;
1394 default:
1395 SvGETMAGIC(sv);
1396 if (SvOK(sv))
1397 defined = TRUE;
1398 break;
1399 }
1400
1401 if (is_dor) {
1402 if(defined)
1403 RETURN;
1404 if(op_type == OP_DOR)
1405 --SP;
1406 RETURNOP(cLOGOP->op_other);
1407 }
1408 /* assuming OP_DEFINED */
1409 if(defined)
1410 RETPUSHYES;
1411 RETPUSHNO;
1412}
1413
1414
1415
1416PP(pp_add)
1417{
1418 dSP; dATARGET; bool useleft; SV *svl, *svr;
1419
1420 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
1421 svr = TOPs;
1422 svl = TOPm1s;
1423
1424#ifdef PERL_PRESERVE_IVUV
1425
1426 /* special-case some simple common cases */
1427 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
1428 IV il, ir;
1429 U32 flags = (svl->sv_flags & svr->sv_flags);
1430 if (flags & SVf_IOK) {
1431 /* both args are simple IVs */
1432 UV topl, topr;
1433 il = SvIVX(svl);
1434 ir = SvIVX(svr);
1435 do_iv:
1436 topl = ((UV)il) >> (UVSIZE * 8 - 2);
1437 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
1438
1439 /* if both are in a range that can't under/overflow, do a
1440 * simple integer add: if the top of both numbers
1441 * are 00 or 11, then it's safe */
1442 if (!( ((topl+1) | (topr+1)) & 2)) {
1443 SP--;
1444 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
1445 SETs(TARG);
1446 RETURN;
1447 }
1448 goto generic;
1449 }
1450 else if (flags & SVf_NOK) {
1451 /* both args are NVs */
1452 NV nl = SvNVX(svl);
1453 NV nr = SvNVX(svr);
1454
1455 if (
1456#if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
1457 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
1458 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
1459#else
1460 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
1461#endif
1462 )
1463 /* nothing was lost by converting to IVs */
1464 goto do_iv;
1465 SP--;
1466 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
1467 SETs(TARG);
1468 RETURN;
1469 }
1470 }
1471
1472 generic:
1473
1474 useleft = USE_LEFT(svl);
1475 /* We must see if we can perform the addition with integers if possible,
1476 as the integer code detects overflow while the NV code doesn't.
1477 If either argument hasn't had a numeric conversion yet attempt to get
1478 the IV. It's important to do this now, rather than just assuming that
1479 it's not IOK as a PV of "9223372036854775806" may not take well to NV
1480 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
1481 integer in case the second argument is IV=9223372036854775806
1482 We can (now) rely on sv_2iv to do the right thing, only setting the
1483 public IOK flag if the value in the NV (or PV) slot is truly integer.
1484
1485 A side effect is that this also aggressively prefers integer maths over
1486 fp maths for integer values.
1487
1488 How to detect overflow?
1489
1490 C 99 section 6.2.6.1 says
1491
1492 The range of nonnegative values of a signed integer type is a subrange
1493 of the corresponding unsigned integer type, and the representation of
1494 the same value in each type is the same. A computation involving
1495 unsigned operands can never overflow, because a result that cannot be
1496 represented by the resulting unsigned integer type is reduced modulo
1497 the number that is one greater than the largest value that can be
1498 represented by the resulting type.
1499
1500 (the 9th paragraph)
1501
1502 which I read as "unsigned ints wrap."
1503
1504 signed integer overflow seems to be classed as "exception condition"
1505
1506 If an exceptional condition occurs during the evaluation of an
1507 expression (that is, if the result is not mathematically defined or not
1508 in the range of representable values for its type), the behavior is
1509 undefined.
1510
1511 (6.5, the 5th paragraph)
1512
1513 I had assumed that on 2s complement machines signed arithmetic would
1514 wrap, hence coded pp_add and pp_subtract on the assumption that
1515 everything perl builds on would be happy. After much wailing and
1516 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
1517 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
1518 unsigned code below is actually shorter than the old code. :-)
1519 */
1520
1521 if (SvIV_please_nomg(svr)) {
1522 /* Unless the left argument is integer in range we are going to have to
1523 use NV maths. Hence only attempt to coerce the right argument if
1524 we know the left is integer. */
1525 UV auv = 0;
1526 bool auvok = FALSE;
1527 bool a_valid = 0;
1528
1529 if (!useleft) {
1530 auv = 0;
1531 a_valid = auvok = 1;
1532 /* left operand is undef, treat as zero. + 0 is identity,
1533 Could SETi or SETu right now, but space optimise by not adding
1534 lots of code to speed up what is probably a rarish case. */
1535 } else {
1536 /* Left operand is defined, so is it IV? */
1537 if (SvIV_please_nomg(svl)) {
1538 if ((auvok = SvUOK(svl)))
1539 auv = SvUVX(svl);
1540 else {
1541 const IV aiv = SvIVX(svl);
1542 if (aiv >= 0) {
1543 auv = aiv;
1544 auvok = 1; /* Now acting as a sign flag. */
1545 } else {
1546 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
1547 }
1548 }
1549 a_valid = 1;
1550 }
1551 }
1552 if (a_valid) {
1553 bool result_good = 0;
1554 UV result;
1555 UV buv;
1556 bool buvok = SvUOK(svr);
1557
1558 if (buvok)
1559 buv = SvUVX(svr);
1560 else {
1561 const IV biv = SvIVX(svr);
1562 if (biv >= 0) {
1563 buv = biv;
1564 buvok = 1;
1565 } else
1566 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
1567 }
1568 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
1569 else "IV" now, independent of how it came in.
1570 if a, b represents positive, A, B negative, a maps to -A etc
1571 a + b => (a + b)
1572 A + b => -(a - b)
1573 a + B => (a - b)
1574 A + B => -(a + b)
1575 all UV maths. negate result if A negative.
1576 add if signs same, subtract if signs differ. */
1577
1578 if (auvok ^ buvok) {
1579 /* Signs differ. */
1580 if (auv >= buv) {
1581 result = auv - buv;
1582 /* Must get smaller */
1583 if (result <= auv)
1584 result_good = 1;
1585 } else {
1586 result = buv - auv;
1587 if (result <= buv) {
1588 /* result really should be -(auv-buv). as its negation
1589 of true value, need to swap our result flag */
1590 auvok = !auvok;
1591 result_good = 1;
1592 }
1593 }
1594 } else {
1595 /* Signs same */
1596 result = auv + buv;
1597 if (result >= auv)
1598 result_good = 1;
1599 }
1600 if (result_good) {
1601 SP--;
1602 if (auvok)
1603 SETu( result );
1604 else {
1605 /* Negate result */
1606 if (result <= (UV)IV_MIN)
1607 SETi(result == (UV)IV_MIN
1608 ? IV_MIN : -(IV)result);
1609 else {
1610 /* result valid, but out of range for IV. */
1611 SETn( -(NV)result );
1612 }
1613 }
1614 RETURN;
1615 } /* Overflow, drop through to NVs. */
1616 }
1617 }
1618
1619#else
1620 useleft = USE_LEFT(svl);
1621#endif
1622
1623 {
1624 NV value = SvNV_nomg(svr);
1625 (void)POPs;
1626 if (!useleft) {
1627 /* left operand is undef, treat as zero. + 0.0 is identity. */
1628 SETn(value);
1629 RETURN;
1630 }
1631 SETn( value + SvNV_nomg(svl) );
1632 RETURN;
1633 }
1634}
1635
1636
1637/* also used for: pp_aelemfast_lex() */
1638
1639PP(pp_aelemfast)
1640{
1641 dSP;
1642 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
1643 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
1644 const U32 lval = PL_op->op_flags & OPf_MOD;
1645 const I8 key = (I8)PL_op->op_private;
1646 SV** svp;
1647 SV *sv;
1648
1649 assert(SvTYPE(av) == SVt_PVAV);
1650
1651 EXTEND(SP, 1);
1652
1653 /* inlined av_fetch() for simple cases ... */
1654 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
1655 sv = AvARRAY(av)[key];
1656 if (sv) {
1657 PUSHs(sv);
1658 RETURN;
1659 }
1660 }
1661
1662 /* ... else do it the hard way */
1663 svp = av_fetch(av, key, lval);
1664 sv = (svp ? *svp : &PL_sv_undef);
1665
1666 if (UNLIKELY(!svp && lval))
1667 DIE(aTHX_ PL_no_aelem, (int)key);
1668
1669 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
1670 mg_get(sv);
1671 PUSHs(sv);
1672 RETURN;
1673}
1674
1675PP(pp_join)
1676{
1677 dSP; dMARK; dTARGET;
1678 MARK++;
1679 do_join(TARG, *MARK, MARK, SP);
1680 SP = MARK;
1681 SETs(TARG);
1682 RETURN;
1683}
1684
1685/* Oversized hot code. */
1686
1687/* also used for: pp_say() */
1688
1689PP(pp_print)
1690{
1691 dSP; dMARK; dORIGMARK;
1692 PerlIO *fp;
1693 MAGIC *mg;
1694 GV * const gv
1695 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
1696 IO *io = GvIO(gv);
1697
1698 if (io
1699 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1700 {
1701 had_magic:
1702 if (MARK == ORIGMARK) {
1703 /* If using default handle then we need to make space to
1704 * pass object as 1st arg, so move other args up ...
1705 */
1706 MEXTEND(SP, 1);
1707 ++MARK;
1708 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
1709 ++SP;
1710 }
1711 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
1712 mg,
1713 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
1714 | (PL_op->op_type == OP_SAY
1715 ? TIED_METHOD_SAY : 0)), sp - mark);
1716 }
1717 if (!io) {
1718 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
1719 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1720 goto had_magic;
1721 report_evil_fh(gv);
1722 SETERRNO(EBADF,RMS_IFI);
1723 goto just_say_no;
1724 }
1725 else if (!(fp = IoOFP(io))) {
1726 if (IoIFP(io))
1727 report_wrongway_fh(gv, '<');
1728 else
1729 report_evil_fh(gv);
1730 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
1731 goto just_say_no;
1732 }
1733 else {
1734 SV * const ofs = GvSV(PL_ofsgv); /* $, */
1735 MARK++;
1736 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
1737 while (MARK <= SP) {
1738 if (!do_print(*MARK, fp))
1739 break;
1740 MARK++;
1741 if (MARK <= SP) {
1742 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
1743 if (!do_print(GvSV(PL_ofsgv), fp)) {
1744 MARK--;
1745 break;
1746 }
1747 }
1748 }
1749 }
1750 else {
1751 while (MARK <= SP) {
1752 if (!do_print(*MARK, fp))
1753 break;
1754 MARK++;
1755 }
1756 }
1757 if (MARK <= SP)
1758 goto just_say_no;
1759 else {
1760 if (PL_op->op_type == OP_SAY) {
1761 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
1762 goto just_say_no;
1763 }
1764 else if (PL_ors_sv && SvOK(PL_ors_sv))
1765 if (!do_print(PL_ors_sv, fp)) /* $\ */
1766 goto just_say_no;
1767
1768 if (IoFLAGS(io) & IOf_FLUSH)
1769 if (PerlIO_flush(fp) == EOF)
1770 goto just_say_no;
1771 }
1772 }
1773 SP = ORIGMARK;
1774 XPUSHs(&PL_sv_yes);
1775 RETURN;
1776
1777 just_say_no:
1778 SP = ORIGMARK;
1779 XPUSHs(&PL_sv_undef);
1780 RETURN;
1781}
1782
1783
1784/* do the common parts of pp_padhv() and pp_rv2hv()
1785 * It assumes the caller has done EXTEND(SP, 1) or equivalent.
1786 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
1787 * 'has_targ' indicates that the op has a target - this should
1788 * be a compile-time constant so that the code can constant-folded as
1789 * appropriate
1790 * */
1791
1792PERL_STATIC_INLINE OP*
1793S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
1794{
1795 bool is_tied;
1796 bool is_bool;
1797 MAGIC *mg;
1798 dSP;
1799 IV i;
1800 SV *sv;
1801
1802 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
1803
1804 if (gimme == G_ARRAY) {
1805 hv_pushkv(hv, 3);
1806 return NORMAL;
1807 }
1808
1809 if (is_keys)
1810 /* 'keys %h' masquerading as '%h': reset iterator */
1811 (void)hv_iterinit(hv);
1812
1813 if (gimme == G_VOID)
1814 return NORMAL;
1815
1816 is_bool = ( PL_op->op_private & OPpTRUEBOOL
1817 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1818 && block_gimme() == G_VOID));
1819 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
1820
1821 if (UNLIKELY(is_tied)) {
1822 if (is_keys && !is_bool) {
1823 i = 0;
1824 while (hv_iternext(hv))
1825 i++;
1826 goto push_i;
1827 }
1828 else {
1829 sv = magic_scalarpack(hv, mg);
1830 goto push_sv;
1831 }
1832 }
1833 else {
1834 i = HvUSEDKEYS(hv);
1835 if (is_bool) {
1836 sv = i ? &PL_sv_yes : &PL_sv_zero;
1837 push_sv:
1838 PUSHs(sv);
1839 }
1840 else {
1841 push_i:
1842 if (has_targ) {
1843 dTARGET;
1844 PUSHi(i);
1845 }
1846 else
1847#ifdef PERL_OP_PARENT
1848 if (is_keys) {
1849 /* parent op should be an unused OP_KEYS whose targ we can
1850 * use */
1851 dTARG;
1852 OP *k;
1853
1854 assert(!OpHAS_SIBLING(PL_op));
1855 k = PL_op->op_sibparent;
1856 assert(k->op_type == OP_KEYS);
1857 TARG = PAD_SV(k->op_targ);
1858 PUSHi(i);
1859 }
1860 else
1861#endif
1862 mPUSHi(i);
1863 }
1864 }
1865
1866 PUTBACK;
1867 return NORMAL;
1868}
1869
1870
1871/* This is also called directly by pp_lvavref. */
1872PP(pp_padav)
1873{
1874 dSP; dTARGET;
1875 U8 gimme;
1876 assert(SvTYPE(TARG) == SVt_PVAV);
1877 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1878 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1879 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1880 EXTEND(SP, 1);
1881
1882 if (PL_op->op_flags & OPf_REF) {
1883 PUSHs(TARG);
1884 RETURN;
1885 }
1886 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1887 const I32 flags = is_lvalue_sub();
1888 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1889 if (GIMME_V == G_SCALAR)
1890 /* diag_listed_as: Can't return %s to lvalue scalar context */
1891 Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
1892 PUSHs(TARG);
1893 RETURN;
1894 }
1895 }
1896
1897 gimme = GIMME_V;
1898 if (gimme == G_ARRAY)
1899 return S_pushav(aTHX_ (AV*)TARG);
1900
1901 if (gimme == G_SCALAR) {
1902 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
1903 if (!maxarg)
1904 PUSHs(&PL_sv_zero);
1905 else if (PL_op->op_private & OPpTRUEBOOL)
1906 PUSHs(&PL_sv_yes);
1907 else
1908 mPUSHi(maxarg);
1909 }
1910 RETURN;
1911}
1912
1913
1914PP(pp_padhv)
1915{
1916 dSP; dTARGET;
1917 U8 gimme;
1918
1919 assert(SvTYPE(TARG) == SVt_PVHV);
1920 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1921 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1922 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1923
1924 EXTEND(SP, 1);
1925
1926 if (PL_op->op_flags & OPf_REF) {
1927 PUSHs(TARG);
1928 RETURN;
1929 }
1930 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1931 const I32 flags = is_lvalue_sub();
1932 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1933 if (GIMME_V == G_SCALAR)
1934 /* diag_listed_as: Can't return %s to lvalue scalar context */
1935 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
1936 PUSHs(TARG);
1937 RETURN;
1938 }
1939 }
1940
1941 gimme = GIMME_V;
1942
1943 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
1944 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
1945 0 /* has_targ*/);
1946}
1947
1948
1949/* also used for: pp_rv2hv() */
1950/* also called directly by pp_lvavref */
1951
1952PP(pp_rv2av)
1953{
1954 dSP; dTOPss;
1955 const U8 gimme = GIMME_V;
1956 static const char an_array[] = "an ARRAY";
1957 static const char a_hash[] = "a HASH";
1958 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
1959 || PL_op->op_type == OP_LVAVREF;
1960 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
1961
1962 SvGETMAGIC(sv);
1963 if (SvROK(sv)) {
1964 if (UNLIKELY(SvAMAGIC(sv))) {
1965 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
1966 }
1967 sv = SvRV(sv);
1968 if (UNLIKELY(SvTYPE(sv) != type))
1969 /* diag_listed_as: Not an ARRAY reference */
1970 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
1971 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
1972 && PL_op->op_private & OPpLVAL_INTRO))
1973 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
1974 }
1975 else if (UNLIKELY(SvTYPE(sv) != type)) {
1976 GV *gv;
1977
1978 if (!isGV_with_GP(sv)) {
1979 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1980 type, &sp);
1981 if (!gv)
1982 RETURN;
1983 }
1984 else {
1985 gv = MUTABLE_GV(sv);
1986 }
1987 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1988 if (PL_op->op_private & OPpLVAL_INTRO)
1989 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1990 }
1991 if (PL_op->op_flags & OPf_REF) {
1992 SETs(sv);
1993 RETURN;
1994 }
1995 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
1996 const I32 flags = is_lvalue_sub();
1997 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1998 if (gimme != G_ARRAY)
1999 goto croak_cant_return;
2000 SETs(sv);
2001 RETURN;
2002 }
2003 }
2004
2005 if (is_pp_rv2av) {
2006 AV *const av = MUTABLE_AV(sv);
2007
2008 if (gimme == G_ARRAY) {
2009 SP--;
2010 PUTBACK;
2011 return S_pushav(aTHX_ av);
2012 }
2013
2014 if (gimme == G_SCALAR) {
2015 const SSize_t maxarg = AvFILL(av) + 1;
2016 if (PL_op->op_private & OPpTRUEBOOL)
2017 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
2018 else {
2019 dTARGET;
2020 SETi(maxarg);
2021 }
2022 }
2023 }
2024 else {
2025 SP--; PUTBACK;
2026 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
2027 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
2028 1 /* has_targ*/);
2029 }
2030 RETURN;
2031
2032 croak_cant_return:
2033 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
2034 is_pp_rv2av ? "array" : "hash");
2035 RETURN;
2036}
2037
2038STATIC void
2039S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
2040{
2041 PERL_ARGS_ASSERT_DO_ODDBALL;
2042
2043 if (*oddkey) {
2044 if (ckWARN(WARN_MISC)) {
2045 const char *err;
2046 if (oddkey == firstkey &&
2047 SvROK(*oddkey) &&
2048 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
2049 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
2050 {
2051 err = "Reference found where even-sized list expected";
2052 }
2053 else
2054 err = "Odd number of elements in hash assignment";
2055 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
2056 }
2057
2058 }
2059}
2060
2061
2062/* Do a mark and sweep with the SVf_BREAK flag to detect elements which
2063 * are common to both the LHS and RHS of an aassign, and replace them
2064 * with copies. All these copies are made before the actual list assign is
2065 * done.
2066 *
2067 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
2068 * element ($b) to the first LH element ($a), modifies $a; when the
2069 * second assignment is done, the second RH element now has the wrong
2070 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
2071 * Note that we don't need to make a mortal copy of $b.
2072 *
2073 * The algorithm below works by, for every RHS element, mark the
2074 * corresponding LHS target element with SVf_BREAK. Then if the RHS
2075 * element is found with SVf_BREAK set, it means it would have been
2076 * modified, so make a copy.
2077 * Note that by scanning both LHS and RHS in lockstep, we avoid
2078 * unnecessary copies (like $b above) compared with a naive
2079 * "mark all LHS; copy all marked RHS; unmark all LHS".
2080 *
2081 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
2082 * it can't be common and can be skipped.
2083 *
2084 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
2085 * that we thought we didn't need to call S_aassign_copy_common(), but we
2086 * have anyway for sanity checking. If we find we need to copy, then panic.
2087 */
2088
2089PERL_STATIC_INLINE void
2090S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
2091 SV **firstrelem, SV **lastrelem
2092#ifdef DEBUGGING
2093 , bool fake
2094#endif
2095)
2096{
2097 dVAR;
2098 SV **relem;
2099 SV **lelem;
2100 SSize_t lcount = lastlelem - firstlelem + 1;
2101 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
2102 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
2103 bool copy_all = FALSE;
2104
2105 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
2106 assert(firstlelem < lastlelem); /* at least 2 LH elements */
2107 assert(firstrelem < lastrelem); /* at least 2 RH elements */
2108
2109
2110 lelem = firstlelem;
2111 /* we never have to copy the first RH element; it can't be corrupted
2112 * by assigning something to the corresponding first LH element.
2113 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
2114 */
2115 relem = firstrelem + 1;
2116
2117 for (; relem <= lastrelem; relem++) {
2118 SV *svr;
2119
2120 /* mark next LH element */
2121
2122 if (--lcount >= 0) {
2123 SV *svl = *lelem++;
2124
2125 if (UNLIKELY(!svl)) {/* skip AV alias marker */
2126 assert (lelem <= lastlelem);
2127 svl = *lelem++;
2128 lcount--;
2129 }
2130
2131 assert(svl);
2132 if (SvSMAGICAL(svl)) {
2133 copy_all = TRUE;
2134 }
2135 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
2136 if (!marked)
2137 return;
2138 /* this LH element will consume all further args;
2139 * no need to mark any further LH elements (if any).
2140 * But we still need to scan any remaining RHS elements;
2141 * set lcount negative to distinguish from lcount == 0,
2142 * so the loop condition continues being true
2143 */
2144 lcount = -1;
2145 lelem--; /* no need to unmark this element */
2146 }
2147 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
2148 SvFLAGS(svl) |= SVf_BREAK;
2149 marked = TRUE;
2150 }
2151 else if (!marked) {
2152 /* don't check RH element if no SVf_BREAK flags set yet */
2153 if (!lcount)
2154 break;
2155 continue;
2156 }
2157 }
2158
2159 /* see if corresponding RH element needs copying */
2160
2161 assert(marked);
2162 svr = *relem;
2163 assert(svr);
2164
2165 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
2166 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
2167
2168#ifdef DEBUGGING
2169 if (fake) {
2170 /* op_dump(PL_op); */
2171 Perl_croak(aTHX_
2172 "panic: aassign skipped needed copy of common RH elem %"
2173 UVuf, (UV)(relem - firstrelem));
2174 }
2175#endif
2176
2177 TAINT_NOT; /* Each item is independent */
2178
2179 /* Dear TODO test in t/op/sort.t, I love you.
2180 (It's relying on a panic, not a "semi-panic" from newSVsv()
2181 and then an assertion failure below.) */
2182 if (UNLIKELY(SvIS_FREED(svr))) {
2183 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
2184 (void*)svr);
2185 }
2186 /* avoid break flag while copying; otherwise COW etc
2187 * disabled... */
2188 SvFLAGS(svr) &= ~SVf_BREAK;
2189 /* Not newSVsv(), as it does not allow copy-on-write,
2190 resulting in wasteful copies.
2191 Also, we use SV_NOSTEAL in case the SV is used more than
2192 once, e.g. (...) = (f())[0,0]
2193 Where the same SV appears twice on the RHS without a ref
2194 count bump. (Although I suspect that the SV won't be
2195 stealable here anyway - DAPM).
2196 */
2197 *relem = sv_mortalcopy_flags(svr,
2198 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2199 /* ... but restore afterwards in case it's needed again,
2200 * e.g. ($a,$b,$c) = (1,$a,$a)
2201 */
2202 SvFLAGS(svr) |= brk;
2203 }
2204
2205 if (!lcount)
2206 break;
2207 }
2208
2209 if (!marked)
2210 return;
2211
2212 /*unmark LHS */
2213
2214 while (lelem > firstlelem) {
2215 SV * const svl = *(--lelem);
2216 if (svl)
2217 SvFLAGS(svl) &= ~SVf_BREAK;
2218 }
2219}
2220
2221
2222
2223PP(pp_aassign)
2224{
2225 dVAR; dSP;
2226 SV **lastlelem = PL_stack_sp;
2227 SV **lastrelem = PL_stack_base + POPMARK;
2228 SV **firstrelem = PL_stack_base + POPMARK + 1;
2229 SV **firstlelem = lastrelem + 1;
2230
2231 SV **relem;
2232 SV **lelem;
2233 U8 gimme;
2234 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
2235 * only need to save locally, not on the save stack */
2236 U16 old_delaymagic = PL_delaymagic;
2237#ifdef DEBUGGING
2238 bool fake = 0;
2239#endif
2240
2241 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
2242
2243 /* If there's a common identifier on both sides we have to take
2244 * special care that assigning the identifier on the left doesn't
2245 * clobber a value on the right that's used later in the list.
2246 */
2247
2248 /* at least 2 LH and RH elements, or commonality isn't an issue */
2249 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2250 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
2251 if (SvGMAGICAL(*relem))
2252 goto do_scan;
2253 }
2254 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2255 if (*lelem && SvSMAGICAL(*lelem))
2256 goto do_scan;
2257 }
2258 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
2259 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
2260 /* skip the scan if all scalars have a ref count of 1 */
2261 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2262 SV *sv = *lelem;
2263 if (!sv || SvREFCNT(sv) == 1)
2264 continue;
2265 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
2266 goto do_scan;
2267 break;
2268 }
2269 }
2270 else {
2271 do_scan:
2272 S_aassign_copy_common(aTHX_
2273 firstlelem, lastlelem, firstrelem, lastrelem
2274#ifdef DEBUGGING
2275 , fake
2276#endif
2277 );
2278 }
2279 }
2280 }
2281#ifdef DEBUGGING
2282 else {
2283 /* on debugging builds, do the scan even if we've concluded we
2284 * don't need to, then panic if we find commonality. Note that the
2285 * scanner assumes at least 2 elements */
2286 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2287 fake = 1;
2288 goto do_scan;
2289 }
2290 }
2291#endif
2292
2293 gimme = GIMME_V;
2294 relem = firstrelem;
2295 lelem = firstlelem;
2296
2297 if (relem > lastrelem)
2298 goto no_relems;
2299
2300 /* first lelem loop while there are still relems */
2301 while (LIKELY(lelem <= lastlelem)) {
2302 bool alias = FALSE;
2303 SV *lsv = *lelem++;
2304
2305 TAINT_NOT; /* Each item stands on its own, taintwise. */
2306
2307 assert(relem <= lastrelem);
2308 if (UNLIKELY(!lsv)) {
2309 alias = TRUE;
2310 lsv = *lelem++;
2311 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2312 }
2313
2314 switch (SvTYPE(lsv)) {
2315 case SVt_PVAV: {
2316 SV **svp;
2317 SSize_t i;
2318 SSize_t tmps_base;
2319 SSize_t nelems = lastrelem - relem + 1;
2320 AV *ary = MUTABLE_AV(lsv);
2321
2322 /* Assigning to an aggregate is tricky. First there is the
2323 * issue of commonality, e.g. @a = ($a[0]). Since the
2324 * stack isn't refcounted, clearing @a prior to storing
2325 * elements will free $a[0]. Similarly with
2326 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
2327 *
2328 * The way to avoid these issues is to make the copy of each
2329 * SV (and we normally store a *copy* in the array) *before*
2330 * clearing the array. But this has a problem in that
2331 * if the code croaks during copying, the not-yet-stored copies
2332 * could leak. One way to avoid this is to make all the copies
2333 * mortal, but that's quite expensive.
2334 *
2335 * The current solution to these issues is to use a chunk
2336 * of the tmps stack as a temporary refcounted-stack. SVs
2337 * will be put on there during processing to avoid leaks,
2338 * but will be removed again before the end of this block,
2339 * so free_tmps() is never normally called. Also, the
2340 * sv_refcnt of the SVs doesn't have to be manipulated, since
2341 * the ownership of 1 reference count is transferred directly
2342 * from the tmps stack to the AV when the SV is stored.
2343 *
2344 * We disarm slots in the temps stack by storing PL_sv_undef
2345 * there: it doesn't matter if that SV's refcount is
2346 * repeatedly decremented during a croak. But usually this is
2347 * only an interim measure. By the end of this code block
2348 * we try where possible to not leave any PL_sv_undef's on the
2349 * tmps stack e.g. by shuffling newer entries down.
2350 *
2351 * There is one case where we don't copy: non-magical
2352 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
2353 * is on the tmps stack, so its safe to directly steal the SV
2354 * rather than copying. This is common in things like function
2355 * returns, map etc, which all return a list of such SVs.
2356 *
2357 * Note however something like @a = (f())[0,0], where there is
2358 * a danger of the same SV being shared: this avoided because
2359 * when the SV is stored as $a[0], its ref count gets bumped,
2360 * so the RC==1 test fails and the second element is copied
2361 * instead.
2362 *
2363 * We also use one slot in the tmps stack to hold an extra
2364 * ref to the array, to ensure it doesn't get prematurely
2365 * freed. Again, this is removed before the end of this block.
2366 *
2367 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
2368 * @a = ($a[0]) case, but the current implementation uses the
2369 * same algorithm regardless, so ignores that flag. (It *is*
2370 * used in the hash branch below, however).
2371 */
2372
2373 /* Reserve slots for ary, plus the elems we're about to copy,
2374 * then protect ary and temporarily void the remaining slots
2375 * with &PL_sv_undef */
2376 EXTEND_MORTAL(nelems + 1);
2377 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
2378 tmps_base = PL_tmps_ix + 1;
2379 for (i = 0; i < nelems; i++)
2380 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2381 PL_tmps_ix += nelems;
2382
2383 /* Make a copy of each RHS elem and save on the tmps_stack
2384 * (or pass through where we can optimise away the copy) */
2385
2386 if (UNLIKELY(alias)) {
2387 U32 lval = (gimme == G_ARRAY)
2388 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
2389 for (svp = relem; svp <= lastrelem; svp++) {
2390 SV *rsv = *svp;
2391
2392 SvGETMAGIC(rsv);
2393 if (!SvROK(rsv))
2394 DIE(aTHX_ "Assigned value is not a reference");
2395 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
2396 /* diag_listed_as: Assigned value is not %s reference */
2397 DIE(aTHX_
2398 "Assigned value is not a SCALAR reference");
2399 if (lval)
2400 *svp = rsv = sv_mortalcopy(rsv);
2401 /* XXX else check for weak refs? */
2402 rsv = SvREFCNT_inc_NN(SvRV(rsv));
2403 assert(tmps_base <= PL_tmps_max);
2404 PL_tmps_stack[tmps_base++] = rsv;
2405 }
2406 }
2407 else {
2408 for (svp = relem; svp <= lastrelem; svp++) {
2409 SV *rsv = *svp;
2410
2411 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2412 /* can skip the copy */
2413 SvREFCNT_inc_simple_void_NN(rsv);
2414 SvTEMP_off(rsv);
2415 }
2416 else {
2417 SV *nsv;
2418 /* do get before newSV, in case it dies and leaks */
2419 SvGETMAGIC(rsv);
2420 nsv = newSV(0);
2421 /* see comment in S_aassign_copy_common about
2422 * SV_NOSTEAL */
2423 sv_setsv_flags(nsv, rsv,
2424 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
2425 rsv = *svp = nsv;
2426 }
2427
2428 assert(tmps_base <= PL_tmps_max);
2429 PL_tmps_stack[tmps_base++] = rsv;
2430 }
2431 }
2432
2433 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
2434 av_clear(ary);
2435
2436 /* store in the array, the SVs that are in the tmps stack */
2437
2438 tmps_base -= nelems;
2439
2440 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
2441 /* for arrays we can't cheat with, use the official API */
2442 av_extend(ary, nelems - 1);
2443 for (i = 0; i < nelems; i++) {
2444 SV **svp = &(PL_tmps_stack[tmps_base + i]);
2445 SV *rsv = *svp;
2446 /* A tied store won't take ownership of rsv, so keep
2447 * the 1 refcnt on the tmps stack; otherwise disarm
2448 * the tmps stack entry */
2449 if (av_store(ary, i, rsv))
2450 *svp = &PL_sv_undef;
2451 /* av_store() may have added set magic to rsv */;
2452 SvSETMAGIC(rsv);
2453 }
2454 /* disarm ary refcount: see comments below about leak */
2455 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2456 }
2457 else {
2458 /* directly access/set the guts of the AV */
2459 SSize_t fill = nelems - 1;
2460 if (fill > AvMAX(ary))
2461 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
2462 &AvARRAY(ary));
2463 AvFILLp(ary) = fill;
2464 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
2465 /* Quietly remove all the SVs from the tmps stack slots,
2466 * since ary has now taken ownership of the refcnt.
2467 * Also remove ary: which will now leak if we die before
2468 * the SvREFCNT_dec_NN(ary) below */
2469 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2470 Move(&PL_tmps_stack[tmps_base + nelems],
2471 &PL_tmps_stack[tmps_base - 1],
2472 PL_tmps_ix - (tmps_base + nelems) + 1,
2473 SV*);
2474 PL_tmps_ix -= (nelems + 1);
2475 }
2476
2477 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2478 /* its assumed @ISA set magic can't die and leak ary */
2479 SvSETMAGIC(MUTABLE_SV(ary));
2480 SvREFCNT_dec_NN(ary);
2481
2482 relem = lastrelem + 1;
2483 goto no_relems;
2484 }
2485
2486 case SVt_PVHV: { /* normal hash */
2487
2488 SV **svp;
2489 bool dirty_tmps;
2490 SSize_t i;
2491 SSize_t tmps_base;
2492 SSize_t nelems = lastrelem - relem + 1;
2493 HV *hash = MUTABLE_HV(lsv);
2494
2495 if (UNLIKELY(nelems & 1)) {
2496 do_oddball(lastrelem, relem);
2497 /* we have firstlelem to reuse, it's not needed any more */
2498 *++lastrelem = &PL_sv_undef;
2499 nelems++;
2500 }
2501
2502 /* See the SVt_PVAV branch above for a long description of
2503 * how the following all works. The main difference for hashes
2504 * is that we treat keys and values separately (and have
2505 * separate loops for them): as for arrays, values are always
2506 * copied (except for the SvTEMP optimisation), since they
2507 * need to be stored in the hash; while keys are only
2508 * processed where they might get prematurely freed or
2509 * whatever. */
2510
2511 /* tmps stack slots:
2512 * * reserve a slot for the hash keepalive;
2513 * * reserve slots for the hash values we're about to copy;
2514 * * preallocate for the keys we'll possibly copy or refcount bump
2515 * later;
2516 * then protect hash and temporarily void the remaining
2517 * value slots with &PL_sv_undef */
2518 EXTEND_MORTAL(nelems + 1);
2519
2520 /* convert to number of key/value pairs */
2521 nelems >>= 1;
2522
2523 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
2524 tmps_base = PL_tmps_ix + 1;
2525 for (i = 0; i < nelems; i++)
2526 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2527 PL_tmps_ix += nelems;
2528
2529 /* Make a copy of each RHS hash value and save on the tmps_stack
2530 * (or pass through where we can optimise away the copy) */
2531
2532 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
2533 SV *rsv = *svp;
2534
2535 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2536 /* can skip the copy */
2537 SvREFCNT_inc_simple_void_NN(rsv);
2538 SvTEMP_off(rsv);
2539 }
2540 else {
2541 SV *nsv;
2542 /* do get before newSV, in case it dies and leaks */
2543 SvGETMAGIC(rsv);
2544 nsv = newSV(0);
2545 /* see comment in S_aassign_copy_common about
2546 * SV_NOSTEAL */
2547 sv_setsv_flags(nsv, rsv,
2548 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
2549 rsv = *svp = nsv;
2550 }
2551
2552 assert(tmps_base <= PL_tmps_max);
2553 PL_tmps_stack[tmps_base++] = rsv;
2554 }
2555 tmps_base -= nelems;
2556
2557
2558 /* possibly protect keys */
2559
2560 if (UNLIKELY(gimme == G_ARRAY)) {
2561 /* handle e.g.
2562 * @a = ((%h = ($$r, 1)), $r = "x");
2563 * $_++ for %h = (1,2,3,4);
2564 */
2565 EXTEND_MORTAL(nelems);
2566 for (svp = relem; svp <= lastrelem; svp += 2)
2567 *svp = sv_mortalcopy_flags(*svp,
2568 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2569 }
2570 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
2571 /* for possible commonality, e.g.
2572 * %h = ($h{a},1)
2573 * avoid premature freeing RHS keys by mortalising
2574 * them.
2575 * For a magic element, make a copy so that its magic is
2576 * called *before* the hash is emptied (which may affect
2577 * a tied value for example).
2578 * In theory we should check for magic keys in all
2579 * cases, not just under OPpASSIGN_COMMON_AGG, but in
2580 * practice, !OPpASSIGN_COMMON_AGG implies only
2581 * constants or padtmps on the RHS.
2582 */
2583 EXTEND_MORTAL(nelems);
2584 for (svp = relem; svp <= lastrelem; svp += 2) {
2585 SV *rsv = *svp;
2586 if (UNLIKELY(SvGMAGICAL(rsv))) {
2587 SSize_t n;
2588 *svp = sv_mortalcopy_flags(*svp,
2589 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2590 /* allow other branch to continue pushing
2591 * onto tmps stack without checking each time */
2592 n = (lastrelem - relem) >> 1;
2593 EXTEND_MORTAL(n);
2594 }
2595 else
2596 PL_tmps_stack[++PL_tmps_ix] =
2597 SvREFCNT_inc_simple_NN(rsv);
2598 }
2599 }
2600
2601 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
2602 hv_clear(hash);
2603
2604 /* now assign the keys and values to the hash */
2605
2606 dirty_tmps = FALSE;
2607
2608 if (UNLIKELY(gimme == G_ARRAY)) {
2609 /* @a = (%h = (...)) etc */
2610 SV **svp;
2611 SV **topelem = relem;
2612
2613 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2614 SV *key = *svp++;
2615 SV *val = *svp;
2616 /* remove duplicates from list we return */
2617 if (!hv_exists_ent(hash, key, 0)) {
2618 /* copy key back: possibly to an earlier
2619 * stack location if we encountered dups earlier,
2620 * The values will be updated later
2621 */
2622 *topelem = key;
2623 topelem += 2;
2624 }
2625 /* A tied store won't take ownership of val, so keep
2626 * the 1 refcnt on the tmps stack; otherwise disarm
2627 * the tmps stack entry */
2628 if (hv_store_ent(hash, key, val, 0))
2629 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2630 else
2631 dirty_tmps = TRUE;
2632 /* hv_store_ent() may have added set magic to val */;
2633 SvSETMAGIC(val);
2634 }
2635 if (topelem < svp) {
2636 /* at this point we have removed the duplicate key/value
2637 * pairs from the stack, but the remaining values may be
2638 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
2639 * the (a 2), but the stack now probably contains
2640 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
2641 * obliterates the earlier key. So refresh all values. */
2642 lastrelem = topelem - 1;
2643 while (relem < lastrelem) {
2644 HE *he;
2645 he = hv_fetch_ent(hash, *relem++, 0, 0);
2646 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
2647 }
2648 }
2649 }
2650 else {
2651 SV **svp;
2652 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2653 SV *key = *svp++;
2654 SV *val = *svp;
2655 if (hv_store_ent(hash, key, val, 0))
2656 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2657 else
2658 dirty_tmps = TRUE;
2659 /* hv_store_ent() may have added set magic to val */;
2660 SvSETMAGIC(val);
2661 }
2662 }
2663
2664 if (dirty_tmps) {
2665 /* there are still some 'live' recounts on the tmps stack
2666 * - usually caused by storing into a tied hash. So let
2667 * free_tmps() do the proper but slow job later.
2668 * Just disarm hash refcount: see comments below about leak
2669 */
2670 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2671 }
2672 else {
2673 /* Quietly remove all the SVs from the tmps stack slots,
2674 * since hash has now taken ownership of the refcnt.
2675 * Also remove hash: which will now leak if we die before
2676 * the SvREFCNT_dec_NN(hash) below */
2677 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2678 Move(&PL_tmps_stack[tmps_base + nelems],
2679 &PL_tmps_stack[tmps_base - 1],
2680 PL_tmps_ix - (tmps_base + nelems) + 1,
2681 SV*);
2682 PL_tmps_ix -= (nelems + 1);
2683 }
2684
2685 SvREFCNT_dec_NN(hash);
2686
2687 relem = lastrelem + 1;
2688 goto no_relems;
2689 }
2690
2691 default:
2692 if (!SvIMMORTAL(lsv)) {
2693 SV *ref;
2694
2695 if (UNLIKELY(
2696 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
2697 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
2698 ))
2699 Perl_warner(aTHX_
2700 packWARN(WARN_MISC),
2701 "Useless assignment to a temporary"
2702 );
2703
2704 /* avoid freeing $$lsv if it might be needed for further
2705 * elements, e.g. ($ref, $foo) = (1, $$ref) */
2706 if ( SvROK(lsv)
2707 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
2708 && lelem <= lastlelem
2709 ) {
2710 SSize_t ix;
2711 SvREFCNT_inc_simple_void_NN(ref);
2712 /* an unrolled sv_2mortal */
2713 ix = ++PL_tmps_ix;
2714 if (UNLIKELY(ix >= PL_tmps_max))
2715 /* speculatively grow enough to cover other
2716 * possible refs */
2717 (void)tmps_grow_p(ix + (lastlelem - lelem));
2718 PL_tmps_stack[ix] = ref;
2719 }
2720
2721 sv_setsv(lsv, *relem);
2722 *relem = lsv;
2723 SvSETMAGIC(lsv);
2724 }
2725 if (++relem > lastrelem)
2726 goto no_relems;
2727 break;
2728 } /* switch */
2729 } /* while */
2730
2731
2732 no_relems:
2733
2734 /* simplified lelem loop for when there are no relems left */
2735 while (LIKELY(lelem <= lastlelem)) {
2736 SV *lsv = *lelem++;
2737
2738 TAINT_NOT; /* Each item stands on its own, taintwise. */
2739
2740 if (UNLIKELY(!lsv)) {
2741 lsv = *lelem++;
2742 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2743 }
2744
2745 switch (SvTYPE(lsv)) {
2746 case SVt_PVAV:
2747 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
2748 av_clear((AV*)lsv);
2749 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2750 SvSETMAGIC(lsv);
2751 }
2752 break;
2753
2754 case SVt_PVHV:
2755 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
2756 hv_clear((HV*)lsv);
2757 break;
2758
2759 default:
2760 if (!SvIMMORTAL(lsv)) {
2761 sv_set_undef(lsv);
2762 SvSETMAGIC(lsv);
2763 *relem++ = lsv;
2764 }
2765 break;
2766 } /* switch */
2767 } /* while */
2768
2769 TAINT_NOT; /* result of list assign isn't tainted */
2770
2771 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
2772 /* Will be used to set PL_tainting below */
2773 Uid_t tmp_uid = PerlProc_getuid();
2774 Uid_t tmp_euid = PerlProc_geteuid();
2775 Gid_t tmp_gid = PerlProc_getgid();
2776 Gid_t tmp_egid = PerlProc_getegid();
2777
2778 /* XXX $> et al currently silently ignore failures */
2779 if (PL_delaymagic & DM_UID) {
2780#ifdef HAS_SETRESUID
2781 PERL_UNUSED_RESULT(
2782 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2783 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
2784 (Uid_t)-1));
2785#elif defined(HAS_SETREUID)
2786 PERL_UNUSED_RESULT(
2787 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2788 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
2789#else
2790# ifdef HAS_SETRUID
2791 if ((PL_delaymagic & DM_UID) == DM_RUID) {
2792 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
2793 PL_delaymagic &= ~DM_RUID;
2794 }
2795# endif /* HAS_SETRUID */
2796# ifdef HAS_SETEUID
2797 if ((PL_delaymagic & DM_UID) == DM_EUID) {
2798 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
2799 PL_delaymagic &= ~DM_EUID;
2800 }
2801# endif /* HAS_SETEUID */
2802 if (PL_delaymagic & DM_UID) {
2803 if (PL_delaymagic_uid != PL_delaymagic_euid)
2804 DIE(aTHX_ "No setreuid available");
2805 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
2806 }
2807#endif /* HAS_SETRESUID */
2808
2809 tmp_uid = PerlProc_getuid();
2810 tmp_euid = PerlProc_geteuid();
2811 }
2812 /* XXX $> et al currently silently ignore failures */
2813 if (PL_delaymagic & DM_GID) {
2814#ifdef HAS_SETRESGID
2815 PERL_UNUSED_RESULT(
2816 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2817 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
2818 (Gid_t)-1));
2819#elif defined(HAS_SETREGID)
2820 PERL_UNUSED_RESULT(
2821 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2822 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
2823#else
2824# ifdef HAS_SETRGID
2825 if ((PL_delaymagic & DM_GID) == DM_RGID) {
2826 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
2827 PL_delaymagic &= ~DM_RGID;
2828 }
2829# endif /* HAS_SETRGID */
2830# ifdef HAS_SETEGID
2831 if ((PL_delaymagic & DM_GID) == DM_EGID) {
2832 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
2833 PL_delaymagic &= ~DM_EGID;
2834 }
2835# endif /* HAS_SETEGID */
2836 if (PL_delaymagic & DM_GID) {
2837 if (PL_delaymagic_gid != PL_delaymagic_egid)
2838 DIE(aTHX_ "No setregid available");
2839 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
2840 }
2841#endif /* HAS_SETRESGID */
2842
2843 tmp_gid = PerlProc_getgid();
2844 tmp_egid = PerlProc_getegid();
2845 }
2846 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
2847#ifdef NO_TAINT_SUPPORT
2848 PERL_UNUSED_VAR(tmp_uid);
2849 PERL_UNUSED_VAR(tmp_euid);
2850 PERL_UNUSED_VAR(tmp_gid);
2851 PERL_UNUSED_VAR(tmp_egid);
2852#endif
2853 }
2854 PL_delaymagic = old_delaymagic;
2855
2856 if (gimme == G_VOID)
2857 SP = firstrelem - 1;
2858 else if (gimme == G_SCALAR) {
2859 SP = firstrelem;
2860 EXTEND(SP,1);
2861 if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
2862 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
2863 else {
2864 dTARGET;
2865 SETi(firstlelem - firstrelem);
2866 }
2867 }
2868 else
2869 SP = relem - 1;
2870
2871 RETURN;
2872}
2873
2874PP(pp_qr)
2875{
2876 dSP;
2877 PMOP * const pm = cPMOP;
2878 REGEXP * rx = PM_GETRE(pm);
2879 regexp *prog = ReANY(rx);
2880 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
2881 SV * const rv = sv_newmortal();
2882 CV **cvp;
2883 CV *cv;
2884
2885 SvUPGRADE(rv, SVt_IV);
2886 /* For a subroutine describing itself as "This is a hacky workaround" I'm
2887 loathe to use it here, but it seems to be the right fix. Or close.
2888 The key part appears to be that it's essential for pp_qr to return a new
2889 object (SV), which implies that there needs to be an effective way to
2890 generate a new SV from the existing SV that is pre-compiled in the
2891 optree. */
2892 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
2893 SvROK_on(rv);
2894
2895 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
2896 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
2897 *cvp = cv_clone(cv);
2898 SvREFCNT_dec_NN(cv);
2899 }
2900
2901 if (pkg) {
2902 HV *const stash = gv_stashsv(pkg, GV_ADD);
2903 SvREFCNT_dec_NN(pkg);
2904 (void)sv_bless(rv, stash);
2905 }
2906
2907 if (UNLIKELY(RXp_ISTAINTED(prog))) {
2908 SvTAINTED_on(rv);
2909 SvTAINTED_on(SvRV(rv));
2910 }
2911 XPUSHs(rv);
2912 RETURN;
2913}
2914
2915PP(pp_match)
2916{
2917 dSP; dTARG;
2918 PMOP *pm = cPMOP;
2919 PMOP *dynpm = pm;
2920 const char *s;
2921 const char *strend;
2922 SSize_t curpos = 0; /* initial pos() or current $+[0] */
2923 I32 global;
2924 U8 r_flags = 0;
2925 const char *truebase; /* Start of string */
2926 REGEXP *rx = PM_GETRE(pm);
2927 regexp *prog = ReANY(rx);
2928 bool rxtainted;
2929 const U8 gimme = GIMME_V;
2930 STRLEN len;
2931 const I32 oldsave = PL_savestack_ix;
2932 I32 had_zerolen = 0;
2933 MAGIC *mg = NULL;
2934
2935 if (PL_op->op_flags & OPf_STACKED)
2936 TARG = POPs;
2937 else {
2938 if (ARGTARG)
2939 GETTARGET;
2940 else {
2941 TARG = DEFSV;
2942 }
2943 EXTEND(SP,1);
2944 }
2945
2946 PUTBACK; /* EVAL blocks need stack_sp. */
2947 /* Skip get-magic if this is a qr// clone, because regcomp has
2948 already done it. */
2949 truebase = prog->mother_re
2950 ? SvPV_nomg_const(TARG, len)
2951 : SvPV_const(TARG, len);
2952 if (!truebase)
2953 DIE(aTHX_ "panic: pp_match");
2954 strend = truebase + len;
2955 rxtainted = (RXp_ISTAINTED(prog) ||
2956 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
2957 TAINT_NOT;
2958
2959 /* We need to know this in case we fail out early - pos() must be reset */
2960 global = dynpm->op_pmflags & PMf_GLOBAL;
2961
2962 /* PMdf_USED is set after a ?? matches once */
2963 if (
2964#ifdef USE_ITHREADS
2965 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
2966#else
2967 pm->op_pmflags & PMf_USED
2968#endif
2969 ) {
2970 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
2971 goto nope;
2972 }
2973
2974 /* handle the empty pattern */
2975 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
2976 if (PL_curpm == PL_reg_curpm) {
2977 if (PL_curpm_under) {
2978 if (PL_curpm_under == PL_reg_curpm) {
2979 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
2980 } else {
2981 pm = PL_curpm_under;
2982 }
2983 }
2984 } else {
2985 pm = PL_curpm;
2986 }
2987 rx = PM_GETRE(pm);
2988 prog = ReANY(rx);
2989 }
2990
2991 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
2992 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
2993 UVuf " < %" IVdf ")\n",
2994 (UV)len, (IV)RXp_MINLEN(prog)));
2995 goto nope;
2996 }
2997
2998 /* get pos() if //g */
2999 if (global) {
3000 mg = mg_find_mglob(TARG);
3001 if (mg && mg->mg_len >= 0) {
3002 curpos = MgBYTEPOS(mg, TARG, truebase, len);
3003 /* last time pos() was set, it was zero-length match */
3004 if (mg->mg_flags & MGf_MINMATCH)
3005 had_zerolen = 1;
3006 }
3007 }
3008
3009#ifdef PERL_SAWAMPERSAND
3010 if ( RXp_NPARENS(prog)
3011 || PL_sawampersand
3012 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3013 || (dynpm->op_pmflags & PMf_KEEPCOPY)
3014 )
3015#endif
3016 {
3017 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
3018 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
3019 * only on the first iteration. Therefore we need to copy $' as well
3020 * as $&, to make the rest of the string available for captures in
3021 * subsequent iterations */
3022 if (! (global && gimme == G_ARRAY))
3023 r_flags |= REXEC_COPY_SKIP_POST;
3024 };
3025#ifdef PERL_SAWAMPERSAND
3026 if (dynpm->op_pmflags & PMf_KEEPCOPY)
3027 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
3028 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
3029#endif
3030
3031 s = truebase;
3032
3033 play_it_again:
3034 if (global)
3035 s = truebase + curpos;
3036
3037 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
3038 had_zerolen, TARG, NULL, r_flags))
3039 goto nope;
3040
3041 PL_curpm = pm;
3042 if (dynpm->op_pmflags & PMf_ONCE)
3043#ifdef USE_ITHREADS
3044 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
3045#else
3046 dynpm->op_pmflags |= PMf_USED;
3047#endif
3048
3049 if (rxtainted)
3050 RXp_MATCH_TAINTED_on(prog);
3051 TAINT_IF(RXp_MATCH_TAINTED(prog));
3052
3053 /* update pos */
3054
3055 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
3056 if (!mg)
3057 mg = sv_magicext_mglob(TARG);
3058 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
3059 if (RXp_ZERO_LEN(prog))
3060 mg->mg_flags |= MGf_MINMATCH;
3061 else
3062 mg->mg_flags &= ~MGf_MINMATCH;
3063 }
3064
3065 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
3066 LEAVE_SCOPE(oldsave);
3067 RETPUSHYES;
3068 }
3069
3070 /* push captures on stack */
3071
3072 {
3073 const I32 nparens = RXp_NPARENS(prog);
3074 I32 i = (global && !nparens) ? 1 : 0;
3075
3076 SPAGAIN; /* EVAL blocks could move the stack. */
3077 EXTEND(SP, nparens + i);
3078 EXTEND_MORTAL(nparens + i);
3079 for (i = !i; i <= nparens; i++) {
3080 PUSHs(sv_newmortal());
3081 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
3082 && RXp_OFFS(prog)[i].end != -1 ))
3083 {
3084 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
3085 const char * const s = RXp_OFFS(prog)[i].start + truebase;
3086 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
3087 || RXp_OFFS(prog)[i].start < 0
3088 || len < 0
3089 || len > strend - s)
3090 )
3091 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
3092 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
3093 (long) i, (long) RXp_OFFS(prog)[i].start,
3094 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
3095 sv_setpvn(*SP, s, len);
3096 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
3097 SvUTF8_on(*SP);
3098 }
3099 }
3100 if (global) {
3101 curpos = (UV)RXp_OFFS(prog)[0].end;
3102 had_zerolen = RXp_ZERO_LEN(prog);
3103 PUTBACK; /* EVAL blocks may use stack */
3104 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
3105 goto play_it_again;
3106 }
3107 LEAVE_SCOPE(oldsave);
3108 RETURN;
3109 }
3110 NOT_REACHED; /* NOTREACHED */
3111
3112 nope:
3113 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
3114 if (!mg)
3115 mg = mg_find_mglob(TARG);
3116 if (mg)
3117 mg->mg_len = -1;
3118 }
3119 LEAVE_SCOPE(oldsave);
3120 if (gimme == G_ARRAY)
3121 RETURN;
3122 RETPUSHNO;
3123}
3124
3125OP *
3126Perl_do_readline(pTHX)
3127{
3128 dSP; dTARGETSTACKED;
3129 SV *sv;
3130 STRLEN tmplen = 0;
3131 STRLEN offset;
3132 PerlIO *fp;
3133 IO * const io = GvIO(PL_last_in_gv);
3134 const I32 type = PL_op->op_type;
3135 const U8 gimme = GIMME_V;
3136
3137 if (io) {
3138 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
3139 if (mg) {
3140 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
3141 if (gimme == G_SCALAR) {
3142 SPAGAIN;
3143 SvSetSV_nosteal(TARG, TOPs);
3144 SETTARG;
3145 }
3146 return NORMAL;
3147 }
3148 }
3149 fp = NULL;
3150 if (io) {
3151 fp = IoIFP(io);
3152 if (!fp) {
3153 if (IoFLAGS(io) & IOf_ARGV) {
3154 if (IoFLAGS(io) & IOf_START) {
3155 IoLINES(io) = 0;
3156 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
3157 IoFLAGS(io) &= ~IOf_START;
3158 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
3159 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
3160 sv_setpvs(GvSVn(PL_last_in_gv), "-");
3161 SvSETMAGIC(GvSV(PL_last_in_gv));
3162 fp = IoIFP(io);
3163 goto have_fp;
3164 }
3165 }
3166 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3167 if (!fp) { /* Note: fp != IoIFP(io) */
3168 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
3169 }
3170 }
3171 else if (type == OP_GLOB)
3172 fp = Perl_start_glob(aTHX_ POPs, io);
3173 }
3174 else if (type == OP_GLOB)
3175 SP--;
3176 else if (IoTYPE(io) == IoTYPE_WRONLY) {
3177 report_wrongway_fh(PL_last_in_gv, '>');
3178 }
3179 }
3180 if (!fp) {
3181 if ((!io || !(IoFLAGS(io) & IOf_START))
3182 && ckWARN(WARN_CLOSED)
3183 && type != OP_GLOB)
3184 {
3185 report_evil_fh(PL_last_in_gv);
3186 }
3187 if (gimme == G_SCALAR) {
3188 /* undef TARG, and push that undefined value */
3189 if (type != OP_RCATLINE) {
3190 sv_set_undef(TARG);
3191 }
3192 PUSHTARG;
3193 }
3194 RETURN;
3195 }
3196 have_fp:
3197 if (gimme == G_SCALAR) {
3198 sv = TARG;
3199 if (type == OP_RCATLINE && SvGMAGICAL(sv))
3200 mg_get(sv);
3201 if (SvROK(sv)) {
3202 if (type == OP_RCATLINE)
3203 SvPV_force_nomg_nolen(sv);
3204 else
3205 sv_unref(sv);
3206 }
3207 else if (isGV_with_GP(sv)) {
3208 SvPV_force_nomg_nolen(sv);
3209 }
3210 SvUPGRADE(sv, SVt_PV);
3211 tmplen = SvLEN(sv); /* remember if already alloced */
3212 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
3213 /* try short-buffering it. Please update t/op/readline.t
3214 * if you change the growth length.
3215 */
3216 Sv_Grow(sv, 80);
3217 }
3218 offset = 0;
3219 if (type == OP_RCATLINE && SvOK(sv)) {
3220 if (!SvPOK(sv)) {
3221 SvPV_force_nomg_nolen(sv);
3222 }
3223 offset = SvCUR(sv);
3224 }
3225 }
3226 else {
3227 sv = sv_2mortal(newSV(80));
3228 offset = 0;
3229 }
3230
3231 /* This should not be marked tainted if the fp is marked clean */
3232#define MAYBE_TAINT_LINE(io, sv) \
3233 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
3234 TAINT; \
3235 SvTAINTED_on(sv); \
3236 }
3237
3238/* delay EOF state for a snarfed empty file */
3239#define SNARF_EOF(gimme,rs,io,sv) \
3240 (gimme != G_SCALAR || SvCUR(sv) \
3241 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
3242
3243 for (;;) {
3244 PUTBACK;
3245 if (!sv_gets(sv, fp, offset)
3246 && (type == OP_GLOB
3247 || SNARF_EOF(gimme, PL_rs, io, sv)
3248 || PerlIO_error(fp)))
3249 {
3250 PerlIO_clearerr(fp);
3251 if (IoFLAGS(io) & IOf_ARGV) {
3252 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3253 if (fp)
3254 continue;
3255 (void)do_close(PL_last_in_gv, FALSE);
3256 }
3257 else if (type == OP_GLOB) {
3258 if (!do_close(PL_last_in_gv, FALSE)) {
3259 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
3260 "glob failed (child exited with status %d%s)",
3261 (int)(STATUS_CURRENT >> 8),
3262 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
3263 }
3264 }
3265 if (gimme == G_SCALAR) {
3266 if (type != OP_RCATLINE) {
3267 SV_CHECK_THINKFIRST_COW_DROP(TARG);
3268 SvOK_off(TARG);
3269 }
3270 SPAGAIN;
3271 PUSHTARG;
3272 }
3273 MAYBE_TAINT_LINE(io, sv);
3274 RETURN;
3275 }
3276 MAYBE_TAINT_LINE(io, sv);
3277 IoLINES(io)++;
3278 IoFLAGS(io) |= IOf_NOLINE;
3279 SvSETMAGIC(sv);
3280 SPAGAIN;
3281 XPUSHs(sv);
3282 if (type == OP_GLOB) {
3283 const char *t1;
3284 Stat_t statbuf;
3285
3286 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
3287 char * const tmps = SvEND(sv) - 1;
3288 if (*tmps == *SvPVX_const(PL_rs)) {
3289 *tmps = '\0';
3290 SvCUR_set(sv, SvCUR(sv) - 1);
3291 }
3292 }
3293 for (t1 = SvPVX_const(sv); *t1; t1++)
3294#ifdef __VMS
3295 if (strchr("*%?", *t1))
3296#else
3297 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
3298#endif
3299 break;
3300 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
3301 (void)POPs; /* Unmatched wildcard? Chuck it... */
3302 continue;
3303 }
3304 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
3305 if (ckWARN(WARN_UTF8)) {
3306 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
3307 const STRLEN len = SvCUR(sv) - offset;
3308 const U8 *f;
3309
3310 if (!is_utf8_string_loc(s, len, &f))
3311 /* Emulate :encoding(utf8) warning in the same case. */
3312 Perl_warner(aTHX_ packWARN(WARN_UTF8),
3313 "utf8 \"\\x%02X\" does not map to Unicode",
3314 f < (U8*)SvEND(sv) ? *f : 0);
3315 }
3316 }
3317 if (gimme == G_ARRAY) {
3318 if (SvLEN(sv) - SvCUR(sv) > 20) {
3319 SvPV_shrink_to_cur(sv);
3320 }
3321 sv = sv_2mortal(newSV(80));
3322 continue;
3323 }
3324 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
3325 /* try to reclaim a bit of scalar space (only on 1st alloc) */
3326 const STRLEN new_len
3327 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
3328 SvPV_renew(sv, new_len);
3329 }
3330 RETURN;
3331 }
3332}
3333
3334PP(pp_helem)
3335{
3336 dSP;
3337 HE* he;
3338 SV **svp;
3339 SV * const keysv = POPs;
3340 HV * const hv = MUTABLE_HV(POPs);
3341 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3342 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3343 SV *sv;
3344 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3345 bool preeminent = TRUE;
3346
3347 if (SvTYPE(hv) != SVt_PVHV)
3348 RETPUSHUNDEF;
3349
3350 if (localizing) {
3351 MAGIC *mg;
3352 HV *stash;
3353
3354 /* If we can determine whether the element exist,
3355 * Try to preserve the existenceness of a tied hash
3356 * element by using EXISTS and DELETE if possible.
3357 * Fallback to FETCH and STORE otherwise. */
3358 if (SvCANEXISTDELETE(hv))
3359 preeminent = hv_exists_ent(hv, keysv, 0);
3360 }
3361
3362 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3363 svp = he ? &HeVAL(he) : NULL;
3364 if (lval) {
3365 if (!svp || !*svp || *svp == &PL_sv_undef) {
3366 SV* lv;
3367 SV* key2;
3368 if (!defer) {
3369 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3370 }
3371 lv = sv_newmortal();
3372 sv_upgrade(lv, SVt_PVLV);
3373 LvTYPE(lv) = 'y';
3374 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
3375 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
3376 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3377 LvTARGLEN(lv) = 1;
3378 PUSHs(lv);
3379 RETURN;
3380 }
3381 if (localizing) {
3382 if (HvNAME_get(hv) && isGV_or_RVCV(*svp))
3383 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
3384 else if (preeminent)
3385 save_helem_flags(hv, keysv, svp,
3386 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
3387 else
3388 SAVEHDELETE(hv, keysv);
3389 }
3390 else if (PL_op->op_private & OPpDEREF) {
3391 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3392 RETURN;
3393 }
3394 }
3395 sv = (svp && *svp ? *svp : &PL_sv_undef);
3396 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
3397 * was to make C<local $tied{foo} = $tied{foo}> possible.
3398 * However, it seems no longer to be needed for that purpose, and
3399 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
3400 * would loop endlessly since the pos magic is getting set on the
3401 * mortal copy and lost. However, the copy has the effect of
3402 * triggering the get magic, and losing it altogether made things like
3403 * c<$tied{foo};> in void context no longer do get magic, which some
3404 * code relied on. Also, delayed triggering of magic on @+ and friends
3405 * meant the original regex may be out of scope by now. So as a
3406 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
3407 * being called too many times). */
3408 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
3409 mg_get(sv);
3410 PUSHs(sv);
3411 RETURN;
3412}
3413
3414
3415/* a stripped-down version of Perl_softref2xv() for use by
3416 * pp_multideref(), which doesn't use PL_op->op_flags */
3417
3418STATIC GV *
3419S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
3420 const svtype type)
3421{
3422 if (PL_op->op_private & HINT_STRICT_REFS) {
3423 if (SvOK(sv))
3424 Perl_die(aTHX_ PL_no_symref_sv, sv,
3425 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
3426 else
3427 Perl_die(aTHX_ PL_no_usym, what);
3428 }
3429 if (!SvOK(sv))
3430 Perl_die(aTHX_ PL_no_usym, what);
3431 return gv_fetchsv_nomg(sv, GV_ADD, type);
3432}
3433
3434
3435/* Handle one or more aggregate derefs and array/hash indexings, e.g.
3436 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
3437 *
3438 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
3439 * Each of these either contains a set of actions, or an argument, such as
3440 * an IV to use as an array index, or a lexical var to retrieve.
3441 * Several actions re stored per UV; we keep shifting new actions off the
3442 * one UV, and only reload when it becomes zero.
3443 */
3444
3445PP(pp_multideref)
3446{
3447 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
3448 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
3449 UV actions = items->uv;
3450
3451 assert(actions);
3452 /* this tells find_uninit_var() where we're up to */
3453 PL_multideref_pc = items;
3454
3455 while (1) {
3456 /* there are three main classes of action; the first retrieve
3457 * the initial AV or HV from a variable or the stack; the second
3458 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
3459 * the third an unrolled (/DREFHV, rv2hv, helem).
3460 */
3461 switch (actions & MDEREF_ACTION_MASK) {
3462
3463 case MDEREF_reload:
3464 actions = (++items)->uv;
3465 continue;
3466
3467 case MDEREF_AV_padav_aelem: /* $lex[...] */
3468 sv = PAD_SVl((++items)->pad_offset);
3469 goto do_AV_aelem;
3470
3471 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
3472 sv = UNOP_AUX_item_sv(++items);
3473 assert(isGV_with_GP(sv));
3474 sv = (SV*)GvAVn((GV*)sv);
3475 goto do_AV_aelem;
3476
3477 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
3478 {
3479 dSP;
3480 sv = POPs;
3481 PUTBACK;
3482 goto do_AV_rv2av_aelem;
3483 }
3484
3485 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
3486 sv = UNOP_AUX_item_sv(++items);
3487 assert(isGV_with_GP(sv));
3488 sv = GvSVn((GV*)sv);
3489 goto do_AV_vivify_rv2av_aelem;
3490
3491 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
3492 sv = PAD_SVl((++items)->pad_offset);
3493 /* FALLTHROUGH */
3494
3495 do_AV_vivify_rv2av_aelem:
3496 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
3497 /* this is the OPpDEREF action normally found at the end of
3498 * ops like aelem, helem, rv2sv */
3499 sv = vivify_ref(sv, OPpDEREF_AV);
3500 /* FALLTHROUGH */
3501
3502 do_AV_rv2av_aelem:
3503 /* this is basically a copy of pp_rv2av when it just has the
3504 * sKR/1 flags */
3505 SvGETMAGIC(sv);
3506 if (LIKELY(SvROK(sv))) {
3507 if (UNLIKELY(SvAMAGIC(sv))) {
3508 sv = amagic_deref_call(sv, to_av_amg);
3509 }
3510 sv = SvRV(sv);
3511 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
3512 DIE(aTHX_ "Not an ARRAY reference");
3513 }
3514 else if (SvTYPE(sv) != SVt_PVAV) {
3515 if (!isGV_with_GP(sv))
3516 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
3517 sv = MUTABLE_SV(GvAVn((GV*)sv));
3518 }
3519 /* FALLTHROUGH */
3520
3521 do_AV_aelem:
3522 {
3523 /* retrieve the key; this may be either a lexical or package
3524 * var (whose index/ptr is stored as an item) or a signed
3525 * integer constant stored as an item.
3526 */
3527 SV *elemsv;
3528 IV elem = 0; /* to shut up stupid compiler warnings */
3529
3530
3531 assert(SvTYPE(sv) == SVt_PVAV);
3532
3533 switch (actions & MDEREF_INDEX_MASK) {
3534 case MDEREF_INDEX_none:
3535 goto finish;
3536 case MDEREF_INDEX_const:
3537 elem = (++items)->iv;
3538 break;
3539 case MDEREF_INDEX_padsv:
3540 elemsv = PAD_SVl((++items)->pad_offset);
3541 goto check_elem;
3542 case MDEREF_INDEX_gvsv:
3543 elemsv = UNOP_AUX_item_sv(++items);
3544 assert(isGV_with_GP(elemsv));
3545 elemsv = GvSVn((GV*)elemsv);
3546 check_elem:
3547 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
3548 && ckWARN(WARN_MISC)))
3549 Perl_warner(aTHX_ packWARN(WARN_MISC),
3550 "Use of reference \"%" SVf "\" as array index",
3551 SVfARG(elemsv));
3552 /* the only time that S_find_uninit_var() needs this
3553 * is to determine which index value triggered the
3554 * undef warning. So just update it here. Note that
3555 * since we don't save and restore this var (e.g. for
3556 * tie or overload execution), its value will be
3557 * meaningless apart from just here */
3558 PL_multideref_pc = items;
3559 elem = SvIV(elemsv);
3560 break;
3561 }
3562
3563
3564 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
3565
3566 if (!(actions & MDEREF_FLAG_last)) {
3567 SV** svp = av_fetch((AV*)sv, elem, 1);
3568 if (!svp || ! (sv=*svp))
3569 DIE(aTHX_ PL_no_aelem, elem);
3570 break;
3571 }
3572
3573 if (PL_op->op_private &
3574 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3575 {
3576 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3577 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
3578 }
3579 else {
3580 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3581 sv = av_delete((AV*)sv, elem, discard);
3582 if (discard)
3583 return NORMAL;
3584 if (!sv)
3585 sv = &PL_sv_undef;
3586 }
3587 }
3588 else {
3589 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3590 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3591 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3592 bool preeminent = TRUE;
3593 AV *const av = (AV*)sv;
3594 SV** svp;
3595
3596 if (UNLIKELY(localizing)) {
3597 MAGIC *mg;
3598 HV *stash;
3599
3600 /* If we can determine whether the element exist,
3601 * Try to preserve the existenceness of a tied array
3602 * element by using EXISTS and DELETE if possible.
3603 * Fallback to FETCH and STORE otherwise. */
3604 if (SvCANEXISTDELETE(av))
3605 preeminent = av_exists(av, elem);
3606 }
3607
3608 svp = av_fetch(av, elem, lval && !defer);
3609
3610 if (lval) {
3611 if (!svp || !(sv = *svp)) {
3612 IV len;
3613 if (!defer)
3614 DIE(aTHX_ PL_no_aelem, elem);
3615 len = av_tindex(av);
3616 sv = sv_2mortal(newSVavdefelem(av,
3617 /* Resolve a negative index now, unless it points
3618 * before the beginning of the array, in which
3619 * case record it for error reporting in
3620 * magic_setdefelem. */
3621 elem < 0 && len + elem >= 0
3622 ? len + elem : elem, 1));
3623 }
3624 else {
3625 if (UNLIKELY(localizing)) {
3626 if (preeminent) {
3627 save_aelem(av, elem, svp);
3628 sv = *svp; /* may have changed */
3629 }
3630 else
3631 SAVEADELETE(av, elem);
3632 }
3633 }
3634 }
3635 else {
3636 sv = (svp ? *svp : &PL_sv_undef);
3637 /* see note in pp_helem() */
3638 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
3639 mg_get(sv);
3640 }
3641 }
3642
3643 }
3644 finish:
3645 {
3646 dSP;
3647 XPUSHs(sv);
3648 RETURN;
3649 }
3650 /* NOTREACHED */
3651
3652
3653
3654
3655 case MDEREF_HV_padhv_helem: /* $lex{...} */
3656 sv = PAD_SVl((++items)->pad_offset);
3657 goto do_HV_helem;
3658
3659 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
3660 sv = UNOP_AUX_item_sv(++items);
3661 assert(isGV_with_GP(sv));
3662 sv = (SV*)GvHVn((GV*)sv);
3663 goto do_HV_helem;
3664
3665 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
3666 {
3667 dSP;
3668 sv = POPs;
3669 PUTBACK;
3670 goto do_HV_rv2hv_helem;
3671 }
3672
3673 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
3674 sv = UNOP_AUX_item_sv(++items);
3675 assert(isGV_with_GP(sv));
3676 sv = GvSVn((GV*)sv);
3677 goto do_HV_vivify_rv2hv_helem;
3678
3679 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
3680 sv = PAD_SVl((++items)->pad_offset);
3681 /* FALLTHROUGH */
3682
3683 do_HV_vivify_rv2hv_helem:
3684 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
3685 /* this is the OPpDEREF action normally found at the end of
3686 * ops like aelem, helem, rv2sv */
3687 sv = vivify_ref(sv, OPpDEREF_HV);
3688 /* FALLTHROUGH */
3689
3690 do_HV_rv2hv_helem:
3691 /* this is basically a copy of pp_rv2hv when it just has the
3692 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
3693
3694 SvGETMAGIC(sv);
3695 if (LIKELY(SvROK(sv))) {
3696 if (UNLIKELY(SvAMAGIC(sv))) {
3697 sv = amagic_deref_call(sv, to_hv_amg);
3698 }
3699 sv = SvRV(sv);
3700 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
3701 DIE(aTHX_ "Not a HASH reference");
3702 }
3703 else if (SvTYPE(sv) != SVt_PVHV) {
3704 if (!isGV_with_GP(sv))
3705 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
3706 sv = MUTABLE_SV(GvHVn((GV*)sv));
3707 }
3708 /* FALLTHROUGH */
3709
3710 do_HV_helem:
3711 {
3712 /* retrieve the key; this may be either a lexical / package
3713 * var or a string constant, whose index/ptr is stored as an
3714 * item
3715 */
3716 SV *keysv = NULL; /* to shut up stupid compiler warnings */
3717
3718 assert(SvTYPE(sv) == SVt_PVHV);
3719
3720 switch (actions & MDEREF_INDEX_MASK) {
3721 case MDEREF_INDEX_none:
3722 goto finish;
3723
3724 case MDEREF_INDEX_const:
3725 keysv = UNOP_AUX_item_sv(++items);
3726 break;
3727
3728 case MDEREF_INDEX_padsv:
3729 keysv = PAD_SVl((++items)->pad_offset);
3730 break;
3731
3732 case MDEREF_INDEX_gvsv:
3733 keysv = UNOP_AUX_item_sv(++items);
3734 keysv = GvSVn((GV*)keysv);
3735 break;
3736 }
3737
3738 /* see comment above about setting this var */
3739 PL_multideref_pc = items;
3740
3741
3742 /* ensure that candidate CONSTs have been HEKified */
3743 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
3744 || SvTYPE(keysv) >= SVt_PVMG
3745 || !SvOK(keysv)
3746 || SvROK(keysv)
3747 || SvIsCOW_shared_hash(keysv));
3748
3749 /* this is basically a copy of pp_helem with OPpDEREF skipped */
3750
3751 if (!(actions & MDEREF_FLAG_last)) {
3752 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
3753 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
3754 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3755 break;
3756 }
3757
3758 if (PL_op->op_private &
3759 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3760 {
3761 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3762 sv = hv_exists_ent((HV*)sv, keysv, 0)
3763 ? &PL_sv_yes : &PL_sv_no;
3764 }
3765 else {
3766 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3767 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
3768 if (discard)
3769 return NORMAL;
3770 if (!sv)
3771 sv = &PL_sv_undef;
3772 }
3773 }
3774 else {
3775 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3776 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3777 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3778 bool preeminent = TRUE;
3779 SV **svp;
3780 HV * const hv = (HV*)sv;
3781 HE* he;
3782
3783 if (UNLIKELY(localizing)) {
3784 MAGIC *mg;
3785 HV *stash;
3786
3787 /* If we can determine whether the element exist,
3788 * Try to preserve the existenceness of a tied hash
3789 * element by using EXISTS and DELETE if possible.
3790 * Fallback to FETCH and STORE otherwise. */
3791 if (SvCANEXISTDELETE(hv))
3792 preeminent = hv_exists_ent(hv, keysv, 0);
3793 }
3794
3795 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3796 svp = he ? &HeVAL(he) : NULL;
3797
3798
3799 if (lval) {
3800 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
3801 SV* lv;
3802 SV* key2;
3803 if (!defer)
3804 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3805 lv = sv_newmortal();
3806 sv_upgrade(lv, SVt_PVLV);
3807 LvTYPE(lv) = 'y';
3808 sv_magic(lv, key2 = newSVsv(keysv),
3809 PERL_MAGIC_defelem, NULL, 0);
3810 /* sv_magic() increments refcount */
3811 SvREFCNT_dec_NN(key2);
3812 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3813 LvTARGLEN(lv) = 1;
3814 sv = lv;
3815 }
3816 else {
3817 if (localizing) {
3818 if (HvNAME_get(hv) && isGV_or_RVCV(sv))
3819 save_gp(MUTABLE_GV(sv),
3820 !(PL_op->op_flags & OPf_SPECIAL));
3821 else if (preeminent) {
3822 save_helem_flags(hv, keysv, svp,
3823 (PL_op->op_flags & OPf_SPECIAL)
3824 ? 0 : SAVEf_SETMAGIC);
3825 sv = *svp; /* may have changed */
3826 }
3827 else
3828 SAVEHDELETE(hv, keysv);
3829 }
3830 }
3831 }
3832 else {
3833 sv = (svp && *svp ? *svp : &PL_sv_undef);
3834 /* see note in pp_helem() */
3835 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
3836 mg_get(sv);
3837 }
3838 }
3839 goto finish;
3840 }
3841
3842 } /* switch */
3843
3844 actions >>= MDEREF_SHIFT;
3845 } /* while */
3846 /* NOTREACHED */
3847}
3848
3849
3850PP(pp_iter)
3851{
3852 PERL_CONTEXT *cx;
3853 SV *oldsv;
3854 SV **itersvp;
3855
3856 SV *sv;
3857 AV *av;
3858 IV ix;
3859 IV inc;
3860
3861 cx = CX_CUR();
3862 itersvp = CxITERVAR(cx);
3863 assert(itersvp);
3864
3865 switch (CxTYPE(cx)) {
3866
3867 case CXt_LOOP_LAZYSV: /* string increment */
3868 {
3869 SV* cur = cx->blk_loop.state_u.lazysv.cur;
3870 SV *end = cx->blk_loop.state_u.lazysv.end;
3871 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
3872 It has SvPVX of "" and SvCUR of 0, which is what we want. */
3873 STRLEN maxlen = 0;
3874 const char *max = SvPV_const(end, maxlen);
3875 if (DO_UTF8(end) && IN_UNI_8_BIT)
3876 maxlen = sv_len_utf8_nomg(end);
3877 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
3878 goto retno;
3879
3880 oldsv = *itersvp;
3881 /* NB: on the first iteration, oldsv will have a ref count of at
3882 * least 2 (one extra from blk_loop.itersave), so the GV or pad
3883 * slot will get localised; on subsequent iterations the RC==1
3884 * optimisation may kick in and the SV will be reused. */
3885 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3886 /* safe to reuse old SV */
3887 sv_setsv(oldsv, cur);
3888 }
3889 else
3890 {
3891 /* we need a fresh SV every time so that loop body sees a
3892 * completely new SV for closures/references to work as
3893 * they used to */
3894 *itersvp = newSVsv(cur);
3895 SvREFCNT_dec(oldsv);
3896 }
3897 if (strEQ(SvPVX_const(cur), max))
3898 sv_setiv(cur, 0); /* terminate next time */
3899 else
3900 sv_inc(cur);
3901 break;
3902 }
3903
3904 case CXt_LOOP_LAZYIV: /* integer increment */
3905 {
3906 IV cur = cx->blk_loop.state_u.lazyiv.cur;
3907 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
3908 goto retno;
3909
3910 oldsv = *itersvp;
3911 /* see NB comment above */
3912 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3913 /* safe to reuse old SV */
3914
3915 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
3916 == SVt_IV)
3917 {
3918 /* Cheap SvIOK_only().
3919 * Assert that flags which SvIOK_only() would test or
3920 * clear can't be set, because we're SVt_IV */
3921 assert(!(SvFLAGS(oldsv) &
3922 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
3923 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
3924 /* SvIV_set() where sv_any points to head */
3925 oldsv->sv_u.svu_iv = cur;
3926
3927 }
3928 else
3929 sv_setiv(oldsv, cur);
3930 }
3931 else
3932 {
3933 /* we need a fresh SV every time so that loop body sees a
3934 * completely new SV for closures/references to work as they
3935 * used to */
3936 *itersvp = newSViv(cur);
3937 SvREFCNT_dec(oldsv);
3938 }
3939
3940 if (UNLIKELY(cur == IV_MAX)) {
3941 /* Handle end of range at IV_MAX */
3942 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
3943 } else
3944 ++cx->blk_loop.state_u.lazyiv.cur;
3945 break;
3946 }
3947
3948 case CXt_LOOP_LIST: /* for (1,2,3) */
3949
3950 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
3951 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3952 ix = (cx->blk_loop.state_u.stack.ix += inc);
3953 if (UNLIKELY(inc > 0
3954 ? ix > cx->blk_oldsp
3955 : ix <= cx->blk_loop.state_u.stack.basesp)
3956 )
3957 goto retno;
3958
3959 sv = PL_stack_base[ix];
3960 av = NULL;
3961 goto loop_ary_common;
3962
3963 case CXt_LOOP_ARY: /* for (@ary) */
3964
3965 av = cx->blk_loop.state_u.ary.ary;
3966 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3967 ix = (cx->blk_loop.state_u.ary.ix += inc);
3968 if (UNLIKELY(inc > 0
3969 ? ix > AvFILL(av)
3970 : ix < 0)
3971 )
3972 goto retno;
3973
3974 if (UNLIKELY(SvRMAGICAL(av))) {
3975 SV * const * const svp = av_fetch(av, ix, FALSE);
3976 sv = svp ? *svp : NULL;
3977 }
3978 else {
3979 sv = AvARRAY(av)[ix];
3980 }
3981
3982 loop_ary_common:
3983
3984 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
3985 SvSetMagicSV(*itersvp, sv);
3986 break;
3987 }
3988
3989 if (LIKELY(sv)) {
3990 if (UNLIKELY(SvIS_FREED(sv))) {
3991 *itersvp = NULL;
3992 Perl_croak(aTHX_ "Use of freed value in iteration");
3993 }
3994 if (SvPADTMP(sv)) {
3995 sv = newSVsv(sv);
3996 }
3997 else {
3998 SvTEMP_off(sv);
3999 SvREFCNT_inc_simple_void_NN(sv);
4000 }
4001 }
4002 else if (av) {
4003 sv = newSVavdefelem(av, ix, 0);
4004 }
4005 else
4006 sv = &PL_sv_undef;
4007
4008 oldsv = *itersvp;
4009 *itersvp = sv;
4010 SvREFCNT_dec(oldsv);
4011 break;
4012
4013 default:
4014 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
4015 }
4016
4017 /* Bypass pushing &PL_sv_yes and calling pp_and(); instead
4018 * jump straight to the AND op's op_other */
4019 assert(PL_op->op_next->op_type == OP_AND);
4020 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
4021 return cLOGOPx(PL_op->op_next)->op_other;
4022
4023 retno:
4024 /* Bypass pushing &PL_sv_no and calling pp_and(); instead
4025 * jump straight to the AND op's op_next */
4026 assert(PL_op->op_next->op_type == OP_AND);
4027 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
4028 /* pp_enteriter should have pre-extended the stack */
4029 EXTEND_SKIP(PL_stack_sp, 1);
4030 /* we only need this for the rare case where the OP_AND isn't
4031 * in void context, e.g. $x = do { for (..) {...} };
4032 * but its cheaper to just push it rather than testing first
4033 */
4034 *++PL_stack_sp = &PL_sv_no;
4035 return PL_op->op_next->op_next;
4036}
4037
4038
4039/*
4040A description of how taint works in pattern matching and substitution.
4041
4042This is all conditional on NO_TAINT_SUPPORT not being defined. Under
4043NO_TAINT_SUPPORT, taint-related operations should become no-ops.
4044
4045While the pattern is being assembled/concatenated and then compiled,
4046PL_tainted will get set (via TAINT_set) if any component of the pattern
4047is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
4048the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
4049TAINT_get). It will also be set if any component of the pattern matches
4050based on locale-dependent behavior.
4051
4052When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
4053the pattern is marked as tainted. This means that subsequent usage, such
4054as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
4055on the new pattern too.
4056
4057RXf_TAINTED_SEEN is used post-execution by the get magic code
4058of $1 et al to indicate whether the returned value should be tainted.
4059It is the responsibility of the caller of the pattern (i.e. pp_match,
4060pp_subst etc) to set this flag for any other circumstances where $1 needs
4061to be tainted.
4062
4063The taint behaviour of pp_subst (and pp_substcont) is quite complex.
4064
4065There are three possible sources of taint
4066 * the source string
4067 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
4068 * the replacement string (or expression under /e)
4069
4070There are four destinations of taint and they are affected by the sources
4071according to the rules below:
4072
4073 * the return value (not including /r):
4074 tainted by the source string and pattern, but only for the
4075 number-of-iterations case; boolean returns aren't tainted;
4076 * the modified string (or modified copy under /r):
4077 tainted by the source string, pattern, and replacement strings;
4078 * $1 et al:
4079 tainted by the pattern, and under 'use re "taint"', by the source
4080 string too;
4081 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
4082 should always be unset before executing subsequent code.
4083
4084The overall action of pp_subst is:
4085
4086 * at the start, set bits in rxtainted indicating the taint status of
4087 the various sources.
4088
4089 * After each pattern execution, update the SUBST_TAINT_PAT bit in
4090 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
4091 pattern has subsequently become tainted via locale ops.
4092
4093 * If control is being passed to pp_substcont to execute a /e block,
4094 save rxtainted in the CXt_SUBST block, for future use by
4095 pp_substcont.
4096
4097 * Whenever control is being returned to perl code (either by falling
4098 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
4099 use the flag bits in rxtainted to make all the appropriate types of
4100 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
4101 et al will appear tainted.
4102
4103pp_match is just a simpler version of the above.
4104
4105*/
4106
4107PP(pp_subst)
4108{
4109 dSP; dTARG;
4110 PMOP *pm = cPMOP;
4111 PMOP *rpm = pm;
4112 char *s;
4113 char *strend;
4114 const char *c;
4115 STRLEN clen;
4116 SSize_t iters = 0;
4117 SSize_t maxiters;
4118 bool once;
4119 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
4120 See "how taint works" above */
4121 char *orig;
4122 U8 r_flags;
4123 REGEXP *rx = PM_GETRE(pm);
4124 regexp *prog = ReANY(rx);
4125 STRLEN len;
4126 int force_on_match = 0;
4127 const I32 oldsave = PL_savestack_ix;
4128 STRLEN slen;
4129 bool doutf8 = FALSE; /* whether replacement is in utf8 */
4130#ifdef PERL_ANY_COW
4131 bool was_cow;
4132#endif
4133 SV *nsv = NULL;
4134 /* known replacement string? */
4135 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
4136
4137 PERL_ASYNC_CHECK();
4138
4139 if (PL_op->op_flags & OPf_STACKED)
4140 TARG = POPs;
4141 else {
4142 if (ARGTARG)
4143 GETTARGET;
4144 else {
4145 TARG = DEFSV;
4146 }
4147 EXTEND(SP,1);
4148 }
4149
4150 SvGETMAGIC(TARG); /* must come before cow check */
4151#ifdef PERL_ANY_COW
4152 /* note that a string might get converted to COW during matching */
4153 was_cow = cBOOL(SvIsCOW(TARG));
4154#endif
4155 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4156#ifndef PERL_ANY_COW
4157 if (SvIsCOW(TARG))
4158 sv_force_normal_flags(TARG,0);
4159#endif
4160 if ((SvREADONLY(TARG)
4161 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
4162 || SvTYPE(TARG) > SVt_PVLV)
4163 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
4164 Perl_croak_no_modify();
4165 }
4166 PUTBACK;
4167
4168 orig = SvPV_nomg(TARG, len);
4169 /* note we don't (yet) force the var into being a string; if we fail
4170 * to match, we leave as-is; on successful match however, we *will*
4171 * coerce into a string, then repeat the match */
4172 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
4173 force_on_match = 1;
4174
4175 /* only replace once? */
4176 once = !(rpm->op_pmflags & PMf_GLOBAL);
4177
4178 /* See "how taint works" above */
4179 if (TAINTING_get) {
4180 rxtainted = (
4181 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
4182 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
4183 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
4184 | ((once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4185 ? SUBST_TAINT_BOOLRET : 0));
4186 TAINT_NOT;
4187 }
4188
4189 force_it:
4190 if (!pm || !orig)
4191 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
4192
4193 strend = orig + len;
4194 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
4195 maxiters = 2 * slen + 10; /* We can match twice at each
4196 position, once with zero-length,
4197 second time with non-zero. */
4198
4199 /* handle the empty pattern */
4200 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
4201 if (PL_curpm == PL_reg_curpm) {
4202 if (PL_curpm_under) {
4203 if (PL_curpm_under == PL_reg_curpm) {
4204 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
4205 } else {
4206 pm = PL_curpm_under;
4207 }
4208 }
4209 } else {
4210 pm = PL_curpm;
4211 }
4212 rx = PM_GETRE(pm);
4213 prog = ReANY(rx);
4214 }
4215
4216#ifdef PERL_SAWAMPERSAND
4217 r_flags = ( RXp_NPARENS(prog)
4218 || PL_sawampersand
4219 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
4220 || (rpm->op_pmflags & PMf_KEEPCOPY)
4221 )
4222 ? REXEC_COPY_STR
4223 : 0;
4224#else
4225 r_flags = REXEC_COPY_STR;
4226#endif
4227
4228 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
4229 {
4230 SPAGAIN;
4231 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
4232 LEAVE_SCOPE(oldsave);
4233 RETURN;
4234 }
4235 PL_curpm = pm;
4236
4237 /* known replacement string? */
4238 if (dstr) {
4239 /* replacement needing upgrading? */
4240 if (DO_UTF8(TARG) && !doutf8) {
4241 nsv = sv_newmortal();
4242 SvSetSV(nsv, dstr);
4243 sv_utf8_upgrade(nsv);
4244 c = SvPV_const(nsv, clen);
4245 doutf8 = TRUE;
4246 }
4247 else {
4248 c = SvPV_const(dstr, clen);
4249 doutf8 = DO_UTF8(dstr);
4250 }
4251
4252 if (UNLIKELY(TAINT_get))
4253 rxtainted |= SUBST_TAINT_REPL;
4254 }
4255 else {
4256 c = NULL;
4257 doutf8 = FALSE;
4258 }
4259
4260 /* can do inplace substitution? */
4261 if (c
4262#ifdef PERL_ANY_COW
4263 && !was_cow
4264#endif
4265 && (I32)clen <= RXp_MINLENRET(prog)
4266 && ( once
4267 || !(r_flags & REXEC_COPY_STR)
4268 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
4269 )
4270 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
4271 && (!doutf8 || SvUTF8(TARG))
4272 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4273 {
4274
4275#ifdef PERL_ANY_COW
4276 /* string might have got converted to COW since we set was_cow */
4277 if (SvIsCOW(TARG)) {
4278 if (!force_on_match)
4279 goto have_a_cow;
4280 assert(SvVOK(TARG));
4281 }
4282#endif
4283 if (force_on_match) {
4284 /* redo the first match, this time with the orig var
4285 * forced into being a string */
4286 force_on_match = 0;
4287 orig = SvPV_force_nomg(TARG, len);
4288 goto force_it;
4289 }
4290
4291 if (once) {
4292 char *d, *m;
4293 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4294 rxtainted |= SUBST_TAINT_PAT;
4295 m = orig + RXp_OFFS(prog)[0].start;
4296 d = orig + RXp_OFFS(prog)[0].end;
4297 s = orig;
4298 if (m - s > strend - d) { /* faster to shorten from end */
4299 I32 i;
4300 if (clen) {
4301 Copy(c, m, clen, char);
4302 m += clen;
4303 }
4304 i = strend - d;
4305 if (i > 0) {
4306 Move(d, m, i, char);
4307 m += i;
4308 }
4309 *m = '\0';
4310 SvCUR_set(TARG, m - s);
4311 }
4312 else { /* faster from front */
4313 I32 i = m - s;
4314 d -= clen;
4315 if (i > 0)
4316 Move(s, d - i, i, char);
4317 sv_chop(TARG, d-i);
4318 if (clen)
4319 Copy(c, d, clen, char);
4320 }
4321 SPAGAIN;
4322 PUSHs(&PL_sv_yes);
4323 }
4324 else {
4325 char *d, *m;
4326 d = s = RXp_OFFS(prog)[0].start + orig;
4327 do {
4328 I32 i;
4329 if (UNLIKELY(iters++ > maxiters))
4330 DIE(aTHX_ "Substitution loop");
4331 /* run time pattern taint, eg locale */
4332 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4333 rxtainted |= SUBST_TAINT_PAT;
4334 m = RXp_OFFS(prog)[0].start + orig;
4335 if ((i = m - s)) {
4336 if (s != d)
4337 Move(s, d, i, char);
4338 d += i;
4339 }
4340 if (clen) {
4341 Copy(c, d, clen, char);
4342 d += clen;
4343 }
4344 s = RXp_OFFS(prog)[0].end + orig;
4345 } while (CALLREGEXEC(rx, s, strend, orig,
4346 s == m, /* don't match same null twice */
4347 TARG, NULL,
4348 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4349 if (s != d) {
4350 I32 i = strend - s;
4351 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
4352 Move(s, d, i+1, char); /* include the NUL */
4353 }
4354 SPAGAIN;
4355 if (PL_op->op_private & OPpTRUEBOOL)
4356 PUSHs(iters ? &PL_sv_yes : &PL_sv_zero);
4357 else
4358 mPUSHi(iters);
4359 }
4360 }
4361 else {
4362 bool first;
4363 char *m;
4364 SV *repl;
4365 if (force_on_match) {
4366 /* redo the first match, this time with the orig var
4367 * forced into being a string */
4368 force_on_match = 0;
4369 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4370 /* I feel that it should be possible to avoid this mortal copy
4371 given that the code below copies into a new destination.
4372 However, I suspect it isn't worth the complexity of
4373 unravelling the C<goto force_it> for the small number of
4374 cases where it would be viable to drop into the copy code. */
4375 TARG = sv_2mortal(newSVsv(TARG));
4376 }
4377 orig = SvPV_force_nomg(TARG, len);
4378 goto force_it;
4379 }
4380#ifdef PERL_ANY_COW
4381 have_a_cow:
4382#endif
4383 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4384 rxtainted |= SUBST_TAINT_PAT;
4385 repl = dstr;
4386 s = RXp_OFFS(prog)[0].start + orig;
4387 dstr = newSVpvn_flags(orig, s-orig,
4388 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
4389 if (!c) {
4390 PERL_CONTEXT *cx;
4391 SPAGAIN;
4392 m = orig;
4393 /* note that a whole bunch of local vars are saved here for
4394 * use by pp_substcont: here's a list of them in case you're
4395 * searching for places in this sub that uses a particular var:
4396 * iters maxiters r_flags oldsave rxtainted orig dstr targ
4397 * s m strend rx once */
4398 CX_PUSHSUBST(cx);
4399 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
4400 }
4401 first = TRUE;
4402 do {
4403 if (UNLIKELY(iters++ > maxiters))
4404 DIE(aTHX_ "Substitution loop");
4405 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4406 rxtainted |= SUBST_TAINT_PAT;
4407 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
4408 char *old_s = s;
4409 char *old_orig = orig;
4410 assert(RXp_SUBOFFSET(prog) == 0);
4411
4412 orig = RXp_SUBBEG(prog);
4413 s = orig + (old_s - old_orig);
4414 strend = s + (strend - old_s);
4415 }
4416 m = RXp_OFFS(prog)[0].start + orig;
4417 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
4418 s = RXp_OFFS(prog)[0].end + orig;
4419 if (first) {
4420 /* replacement already stringified */
4421 if (clen)
4422 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
4423 first = FALSE;
4424 }
4425 else {
4426 sv_catsv(dstr, repl);
4427 }
4428 if (once)
4429 break;
4430 } while (CALLREGEXEC(rx, s, strend, orig,
4431 s == m, /* Yields minend of 0 or 1 */
4432 TARG, NULL,
4433 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4434 assert(strend >= s);
4435 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
4436
4437 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4438 /* From here on down we're using the copy, and leaving the original
4439 untouched. */
4440 TARG = dstr;
4441 SPAGAIN;
4442 PUSHs(dstr);
4443 } else {
4444#ifdef PERL_ANY_COW
4445 /* The match may make the string COW. If so, brilliant, because
4446 that's just saved us one malloc, copy and free - the regexp has
4447 donated the old buffer, and we malloc an entirely new one, rather
4448 than the regexp malloc()ing a buffer and copying our original,
4449 only for us to throw it away here during the substitution. */
4450 if (SvIsCOW(TARG)) {
4451 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
4452 } else
4453#endif
4454 {
4455 SvPV_free(TARG);
4456 }
4457 SvPV_set(TARG, SvPVX(dstr));
4458 SvCUR_set(TARG, SvCUR(dstr));
4459 SvLEN_set(TARG, SvLEN(dstr));
4460 SvFLAGS(TARG) |= SvUTF8(dstr);
4461 SvPV_set(dstr, NULL);
4462
4463 SPAGAIN;
4464 mPUSHi(iters);
4465 }
4466 }
4467
4468 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4469 (void)SvPOK_only_UTF8(TARG);
4470 }
4471
4472 /* See "how taint works" above */
4473 if (TAINTING_get) {
4474 if ((rxtainted & SUBST_TAINT_PAT) ||
4475 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
4476 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
4477 )
4478 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
4479
4480 if (!(rxtainted & SUBST_TAINT_BOOLRET)
4481 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
4482 )
4483 SvTAINTED_on(TOPs); /* taint return value */
4484 else
4485 SvTAINTED_off(TOPs); /* may have got tainted earlier */
4486
4487 /* needed for mg_set below */
4488 TAINT_set(
4489 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
4490 );
4491 SvTAINT(TARG);
4492 }
4493 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
4494 TAINT_NOT;
4495 LEAVE_SCOPE(oldsave);
4496 RETURN;
4497}
4498
4499PP(pp_grepwhile)
4500{
4501 dSP;
4502 dPOPss;
4503
4504 if (SvTRUE_NN(sv))
4505 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
4506 ++*PL_markstack_ptr;
4507 FREETMPS;
4508 LEAVE_with_name("grep_item"); /* exit inner scope */
4509
4510 /* All done yet? */
4511 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
4512 I32 items;
4513 const U8 gimme = GIMME_V;
4514
4515 LEAVE_with_name("grep"); /* exit outer scope */
4516 (void)POPMARK; /* pop src */
4517 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
4518 (void)POPMARK; /* pop dst */
4519 SP = PL_stack_base + POPMARK; /* pop original mark */
4520 if (gimme == G_SCALAR) {
4521 if (PL_op->op_private & OPpTRUEBOOL)
4522 PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
4523 else {
4524 dTARGET;
4525 PUSHi(items);
4526 }
4527 }
4528 else if (gimme == G_ARRAY)
4529 SP += items;
4530 RETURN;
4531 }
4532 else {
4533 SV *src;
4534
4535 ENTER_with_name("grep_item"); /* enter inner scope */
4536 SAVEVPTR(PL_curpm);
4537
4538 src = PL_stack_base[TOPMARK];
4539 if (SvPADTMP(src)) {
4540 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
4541 PL_tmps_floor++;
4542 }
4543 SvTEMP_off(src);
4544 DEFSV_set(src);
4545
4546 RETURNOP(cLOGOP->op_other);
4547 }
4548}
4549
4550/* leave_adjust_stacks():
4551 *
4552 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
4553 * positioning them at to_sp+1 onwards, and do the equivalent of a
4554 * FREEMPS and TAINT_NOT.
4555 *
4556 * Not intended to be called in void context.
4557 *
4558 * When leaving a sub, eval, do{} or other scope, the things that need
4559 * doing to process the return args are:
4560 * * in scalar context, only return the last arg (or PL_sv_undef if none);
4561 * * for the types of return that return copies of their args (such
4562 * as rvalue sub return), make a mortal copy of every return arg,
4563 * except where we can optimise the copy away without it being
4564 * semantically visible;
4565 * * make sure that the arg isn't prematurely freed; in the case of an
4566 * arg not copied, this may involve mortalising it. For example, in
4567 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
4568 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
4569 *
4570 * What condition to use when deciding whether to pass the arg through
4571 * or make a copy, is determined by the 'pass' arg; its valid values are:
4572 * 0: rvalue sub/eval exit
4573 * 1: other rvalue scope exit
4574 * 2: :lvalue sub exit in rvalue context
4575 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
4576 *
4577 * There is a big issue with doing a FREETMPS. We would like to free any
4578 * temps created by the last statement which the sub executed, rather than
4579 * leaving them for the caller. In a situation where a sub call isn't
4580 * soon followed by a nextstate (e.g. nested recursive calls, a la
4581 * fibonacci()), temps can accumulate, causing memory and performance
4582 * issues.
4583 *
4584 * On the other hand, we don't want to free any TEMPs which are keeping
4585 * alive any return args that we skipped copying; nor do we wish to undo
4586 * any mortalising done here.
4587 *
4588 * The solution is to split the temps stack frame into two, with a cut
4589 * point delineating the two halves. We arrange that by the end of this
4590 * function, all the temps stack frame entries we wish to keep are in the
4591 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
4592 * the range tmps_base .. PL_tmps_ix. During the course of this
4593 * function, tmps_base starts off as PL_tmps_floor+1, then increases
4594 * whenever we find or create a temp that we know should be kept. In
4595 * general the stuff above tmps_base is undecided until we reach the end,
4596 * and we may need a sort stage for that.
4597 *
4598 * To determine whether a TEMP is keeping a return arg alive, every
4599 * arg that is kept rather than copied and which has the SvTEMP flag
4600 * set, has the flag temporarily unset, to mark it. At the end we scan
4601 * the temps stack frame above the cut for entries without SvTEMP and
4602 * keep them, while turning SvTEMP on again. Note that if we die before
4603 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
4604 * those SVs may be slightly less efficient.
4605 *
4606 * In practice various optimisations for some common cases mean we can
4607 * avoid most of the scanning and swapping about with the temps stack.
4608 */
4609
4610void
4611Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
4612{
4613 dVAR;
4614 dSP;
4615 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
4616 SSize_t nargs;
4617
4618 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
4619
4620 TAINT_NOT;
4621
4622 if (gimme == G_ARRAY) {
4623 nargs = SP - from_sp;
4624 from_sp++;
4625 }
4626 else {
4627 assert(gimme == G_SCALAR);
4628 if (UNLIKELY(from_sp >= SP)) {
4629 /* no return args */
4630 assert(from_sp == SP);
4631 EXTEND(SP, 1);
4632 *++SP = &PL_sv_undef;
4633 to_sp = SP;
4634 nargs = 0;
4635 }
4636 else {
4637 from_sp = SP;
4638 nargs = 1;
4639 }
4640 }
4641
4642 /* common code for G_SCALAR and G_ARRAY */
4643
4644 tmps_base = PL_tmps_floor + 1;
4645
4646 assert(nargs >= 0);
4647 if (nargs) {
4648 /* pointer version of tmps_base. Not safe across temp stack
4649 * reallocs. */
4650 SV **tmps_basep;
4651
4652 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
4653 tmps_basep = PL_tmps_stack + tmps_base;
4654
4655 /* process each return arg */
4656
4657 do {
4658 SV *sv = *from_sp++;
4659
4660 assert(PL_tmps_ix + nargs < PL_tmps_max);
4661#ifdef DEBUGGING
4662 /* PADTMPs with container set magic shouldn't appear in the
4663 * wild. This assert is more important for pp_leavesublv(),
4664 * but by testing for it here, we're more likely to catch
4665 * bad cases (what with :lvalue subs not being widely
4666 * deployed). The two issues are that for something like
4667 * sub :lvalue { $tied{foo} }
4668 * or
4669 * sub :lvalue { substr($foo,1,2) }
4670 * pp_leavesublv() will croak if the sub returns a PADTMP,
4671 * and currently functions like pp_substr() return a mortal
4672 * rather than using their PADTMP when returning a PVLV.
4673 * This is because the PVLV will hold a ref to $foo,
4674 * so $foo would get delayed in being freed while
4675 * the PADTMP SV remained in the PAD.
4676 * So if this assert fails it means either:
4677 * 1) there is pp code similar to pp_substr that is
4678 * returning a PADTMP instead of a mortal, and probably
4679 * needs fixing, or
4680 * 2) pp_leavesublv is making unwarranted assumptions
4681 * about always croaking on a PADTMP
4682 */
4683 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
4684 MAGIC *mg;
4685 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
4686 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
4687 }
4688 }
4689#endif
4690
4691 if (
4692 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4693 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4694 : pass == 2 ? (!SvPADTMP(sv))
4695 : 1)
4696 {
4697 /* pass through: skip copy for logic or optimisation
4698 * reasons; instead mortalise it, except that ... */
4699 *++to_sp = sv;
4700
4701 if (SvTEMP(sv)) {
4702 /* ... since this SV is an SvTEMP , we don't need to
4703 * re-mortalise it; instead we just need to ensure
4704 * that its existing entry in the temps stack frame
4705 * ends up below the cut and so avoids being freed
4706 * this time round. We mark it as needing to be kept
4707 * by temporarily unsetting SvTEMP; then at the end,
4708 * we shuffle any !SvTEMP entries on the tmps stack
4709 * back below the cut.
4710 * However, there's a significant chance that there's
4711 * a 1:1 correspondence between the first few (or all)
4712 * elements in the return args stack frame and those
4713 * in the temps stack frame; e,g.:
4714 * sub f { ....; map {...} .... },
4715 * or if we're exiting multiple scopes and one of the
4716 * inner scopes has already made mortal copies of each
4717 * return arg.
4718 *
4719 * If so, this arg sv will correspond to the next item
4720 * on the tmps stack above the cut, and so can be kept
4721 * merely by moving the cut boundary up one, rather
4722 * than messing with SvTEMP. If all args are 1:1 then
4723 * we can avoid the sorting stage below completely.
4724 *
4725 * If there are no items above the cut on the tmps
4726 * stack, then the SvTEMP must comne from an item
4727 * below the cut, so there's nothing to do.
4728 */
4729 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
4730 if (sv == *tmps_basep)
4731 tmps_basep++;
4732 else
4733 SvTEMP_off(sv);
4734 }
4735 }
4736 else if (!SvPADTMP(sv)) {
4737 /* mortalise arg to avoid it being freed during save
4738 * stack unwinding. Pad tmps don't need mortalising as
4739 * they're never freed. This is the equivalent of
4740 * sv_2mortal(SvREFCNT_inc(sv)), except that:
4741 * * it assumes that the temps stack has already been
4742 * extended;
4743 * * it puts the new item at the cut rather than at
4744 * ++PL_tmps_ix, moving the previous occupant there
4745 * instead.
4746 */
4747 if (!SvIMMORTAL(sv)) {
4748 SvREFCNT_inc_simple_void_NN(sv);
4749 SvTEMP_on(sv);
4750 /* Note that if there's nothing above the cut,
4751 * this copies the garbage one slot above
4752 * PL_tmps_ix onto itself. This is harmless (the
4753 * stack's already been extended), but might in
4754 * theory trigger warnings from tools like ASan
4755 */
4756 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
4757 *tmps_basep++ = sv;
4758 }
4759 }
4760 }
4761 else {
4762 /* Make a mortal copy of the SV.
4763 * The following code is the equivalent of sv_mortalcopy()
4764 * except that:
4765 * * it assumes the temps stack has already been extended;
4766 * * it optimises the copying for some simple SV types;
4767 * * it puts the new item at the cut rather than at
4768 * ++PL_tmps_ix, moving the previous occupant there
4769 * instead.
4770 */
4771 SV *newsv = newSV(0);
4772
4773 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
4774 /* put it on the tmps stack early so it gets freed if we die */
4775 *tmps_basep++ = newsv;
4776 *++to_sp = newsv;
4777
4778 if (SvTYPE(sv) <= SVt_IV) {
4779 /* arg must be one of undef, IV/UV, or RV: skip
4780 * sv_setsv_flags() and do the copy directly */
4781 U32 dstflags;
4782 U32 srcflags = SvFLAGS(sv);
4783
4784 assert(!SvGMAGICAL(sv));
4785 if (srcflags & (SVf_IOK|SVf_ROK)) {
4786 SET_SVANY_FOR_BODYLESS_IV(newsv);
4787
4788 if (srcflags & SVf_ROK) {
4789 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
4790 /* SV type plus flags */
4791 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
4792 }
4793 else {
4794 /* both src and dst are <= SVt_IV, so sv_any
4795 * points to the head; so access the heads
4796 * directly rather than going via sv_any.
4797 */
4798 assert( &(sv->sv_u.svu_iv)
4799 == &(((XPVIV*) SvANY(sv))->xiv_iv));
4800 assert( &(newsv->sv_u.svu_iv)
4801 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
4802 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
4803 /* SV type plus flags */
4804 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
4805 |(srcflags & SVf_IVisUV));
4806 }
4807 }
4808 else {
4809 assert(!(srcflags & SVf_OK));
4810 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
4811 }
4812 SvFLAGS(newsv) = dstflags;
4813
4814 }
4815 else {
4816 /* do the full sv_setsv() */
4817 SSize_t old_base;
4818
4819 SvTEMP_on(newsv);
4820 old_base = tmps_basep - PL_tmps_stack;
4821 SvGETMAGIC(sv);
4822 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
4823 /* the mg_get or sv_setsv might have created new temps
4824 * or realloced the tmps stack; regrow and reload */
4825 EXTEND_MORTAL(nargs);
4826 tmps_basep = PL_tmps_stack + old_base;
4827 TAINT_NOT; /* Each item is independent */
4828 }
4829
4830 }
4831 } while (--nargs);
4832
4833 /* If there are any temps left above the cut, we need to sort
4834 * them into those to keep and those to free. The only ones to
4835 * keep are those for which we've temporarily unset SvTEMP.
4836 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
4837 * swapping pairs as necessary. Stop when we meet in the middle.
4838 */
4839 {
4840 SV **top = PL_tmps_stack + PL_tmps_ix;
4841 while (tmps_basep <= top) {
4842 SV *sv = *top;
4843 if (SvTEMP(sv))
4844 top--;
4845 else {
4846 SvTEMP_on(sv);
4847 *top = *tmps_basep;
4848 *tmps_basep = sv;
4849 tmps_basep++;
4850 }
4851 }
4852 }
4853
4854 tmps_base = tmps_basep - PL_tmps_stack;
4855 }
4856
4857 PL_stack_sp = to_sp;
4858
4859 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
4860 while (PL_tmps_ix >= tmps_base) {
4861 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
4862#ifdef PERL_POISON
4863 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
4864#endif
4865 if (LIKELY(sv)) {
4866 SvTEMP_off(sv);
4867 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
4868 }
4869 }
4870}
4871
4872
4873/* also tail-called by pp_return */
4874
4875PP(pp_leavesub)
4876{
4877 U8 gimme;
4878 PERL_CONTEXT *cx;
4879 SV **oldsp;
4880 OP *retop;
4881
4882 cx = CX_CUR();
4883 assert(CxTYPE(cx) == CXt_SUB);
4884
4885 if (CxMULTICALL(cx)) {
4886 /* entry zero of a stack is always PL_sv_undef, which
4887 * simplifies converting a '()' return into undef in scalar context */
4888 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
4889 return 0;
4890 }
4891
4892 gimme = cx->blk_gimme;
4893 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
4894
4895 if (gimme == G_VOID)
4896 PL_stack_sp = oldsp;
4897 else
4898 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
4899
4900 CX_LEAVE_SCOPE(cx);
4901 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
4902 cx_popblock(cx);
4903 retop = cx->blk_sub.retop;
4904 CX_POP(cx);
4905
4906 return retop;
4907}
4908
4909
4910/* clear (if possible) or abandon the current @_. If 'abandon' is true,
4911 * forces an abandon */
4912
4913void
4914Perl_clear_defarray(pTHX_ AV* av, bool abandon)
4915{
4916 const SSize_t fill = AvFILLp(av);
4917
4918 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
4919
4920 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
4921 av_clear(av);
4922 AvREIFY_only(av);
4923 }
4924 else {
4925 AV *newav = newAV();
4926 av_extend(newav, fill);
4927 AvREIFY_only(newav);
4928 PAD_SVl(0) = MUTABLE_SV(newav);
4929 SvREFCNT_dec_NN(av);
4930 }
4931}
4932
4933
4934PP(pp_entersub)
4935{
4936 dSP; dPOPss;
4937 GV *gv;
4938 CV *cv;
4939 PERL_CONTEXT *cx;
4940 I32 old_savestack_ix;
4941
4942 if (UNLIKELY(!sv))
4943 goto do_die;
4944
4945 /* Locate the CV to call:
4946 * - most common case: RV->CV: f(), $ref->():
4947 * note that if a sub is compiled before its caller is compiled,
4948 * the stash entry will be a ref to a CV, rather than being a GV.
4949 * - second most common case: CV: $ref->method()
4950 */
4951
4952 /* a non-magic-RV -> CV ? */
4953 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
4954 cv = MUTABLE_CV(SvRV(sv));
4955 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
4956 goto do_ref;
4957 }
4958 else
4959 cv = MUTABLE_CV(sv);
4960
4961 /* a CV ? */
4962 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
4963 /* handle all the weird cases */
4964 switch (SvTYPE(sv)) {
4965 case SVt_PVLV:
4966 if (!isGV_with_GP(sv))
4967 goto do_default;
4968 /* FALLTHROUGH */
4969 case SVt_PVGV:
4970 cv = GvCVu((const GV *)sv);
4971 if (UNLIKELY(!cv)) {
4972 HV *stash;
4973 cv = sv_2cv(sv, &stash, &gv, 0);
4974 if (!cv) {
4975 old_savestack_ix = PL_savestack_ix;
4976 goto try_autoload;
4977 }
4978 }
4979 break;
4980
4981 default:
4982 do_default:
4983 SvGETMAGIC(sv);
4984 if (SvROK(sv)) {
4985 do_ref:
4986 if (UNLIKELY(SvAMAGIC(sv))) {
4987 sv = amagic_deref_call(sv, to_cv_amg);
4988 /* Don't SPAGAIN here. */
4989 }
4990 }
4991 else {
4992 const char *sym;
4993 STRLEN len;
4994 if (UNLIKELY(!SvOK(sv)))
4995 DIE(aTHX_ PL_no_usym, "a subroutine");
4996
4997 if (UNLIKELY(sv == &PL_sv_yes)) { /* unfound import, ignore */
4998 if (PL_op->op_flags & OPf_STACKED) /* hasargs */
4999 SP = PL_stack_base + POPMARK;
5000 else
5001 (void)POPMARK;
5002 if (GIMME_V == G_SCALAR)
5003 PUSHs(&PL_sv_undef);
5004 RETURN;
5005 }
5006
5007 sym = SvPV_nomg_const(sv, len);
5008 if (PL_op->op_private & HINT_STRICT_REFS)
5009 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
5010 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
5011 break;
5012 }
5013 cv = MUTABLE_CV(SvRV(sv));
5014 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
5015 break;
5016 /* FALLTHROUGH */
5017 case SVt_PVHV:
5018 case SVt_PVAV:
5019 do_die:
5020 DIE(aTHX_ "Not a CODE reference");
5021 }
5022 }
5023
5024 /* At this point we want to save PL_savestack_ix, either by doing a
5025 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
5026 * CV we will be using (so we don't know whether its XS, so we can't
5027 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
5028 * the save stack. So remember where we are currently on the save
5029 * stack, and later update the CX or scopestack entry accordingly. */
5030 old_savestack_ix = PL_savestack_ix;
5031
5032 /* these two fields are in a union. If they ever become separate,
5033 * we have to test for both of them being null below */
5034 assert(cv);
5035 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
5036 while (UNLIKELY(!CvROOT(cv))) {
5037 GV* autogv;
5038 SV* sub_name;
5039
5040 /* anonymous or undef'd function leaves us no recourse */
5041 if (CvLEXICAL(cv) && CvHASGV(cv))
5042 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
5043 SVfARG(cv_name(cv, NULL, 0)));
5044 if (CvANON(cv) || !CvHASGV(cv)) {
5045 DIE(aTHX_ "Undefined subroutine called");
5046 }
5047
5048 /* autoloaded stub? */
5049 if (cv != GvCV(gv = CvGV(cv))) {
5050 cv = GvCV(gv);
5051 }
5052 /* should call AUTOLOAD now? */
5053 else {
5054 try_autoload:
5055 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
5056 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
5057 |(PL_op->op_flags & OPf_REF
5058 ? GV_AUTOLOAD_ISMETHOD
5059 : 0));
5060 cv = autogv ? GvCV(autogv) : NULL;
5061 }
5062 if (!cv) {
5063 sub_name = sv_newmortal();
5064 gv_efullname3(sub_name, gv, NULL);
5065 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
5066 }
5067 }
5068
5069 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
5070 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
5071 DIE(aTHX_ "Closure prototype called");
5072
5073 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
5074 && !CvNODEBUG(cv)))
5075 {
5076 Perl_get_db_sub(aTHX_ &sv, cv);
5077 if (CvISXSUB(cv))
5078 PL_curcopdb = PL_curcop;
5079 if (CvLVALUE(cv)) {
5080 /* check for lsub that handles lvalue subroutines */
5081 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
5082 /* if lsub not found then fall back to DB::sub */
5083 if (!cv) cv = GvCV(PL_DBsub);
5084 } else {
5085 cv = GvCV(PL_DBsub);
5086 }
5087
5088 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
5089 DIE(aTHX_ "No DB::sub routine defined");
5090 }
5091
5092 if (!(CvISXSUB(cv))) {
5093 /* This path taken at least 75% of the time */
5094 dMARK;
5095 PADLIST *padlist;
5096 I32 depth;
5097 bool hasargs;
5098 U8 gimme;
5099
5100 /* keep PADTMP args alive throughout the call (we need to do this
5101 * because @_ isn't refcounted). Note that we create the mortals
5102 * in the caller's tmps frame, so they won't be freed until after
5103 * we return from the sub.
5104 */
5105 {
5106 SV **svp = MARK;
5107 while (svp < SP) {
5108 SV *sv = *++svp;
5109 if (!sv)
5110 continue;
5111 if (SvPADTMP(sv))
5112 *svp = sv = sv_mortalcopy(sv);
5113 SvTEMP_off(sv);
5114 }
5115 }
5116
5117 gimme = GIMME_V;
5118 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
5119 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
5120 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
5121
5122 padlist = CvPADLIST(cv);
5123 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
5124 pad_push(padlist, depth);
5125 PAD_SET_CUR_NOSAVE(padlist, depth);
5126 if (LIKELY(hasargs)) {
5127 AV *const av = MUTABLE_AV(PAD_SVl(0));
5128 SSize_t items;
5129 AV **defavp;
5130
5131 defavp = &GvAV(PL_defgv);
5132 cx->blk_sub.savearray = *defavp;
5133 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
5134
5135 /* it's the responsibility of whoever leaves a sub to ensure
5136 * that a clean, empty AV is left in pad[0]. This is normally
5137 * done by cx_popsub() */
5138 assert(!AvREAL(av) && AvFILLp(av) == -1);
5139
5140 items = SP - MARK;
5141 if (UNLIKELY(items - 1 > AvMAX(av))) {
5142 SV **ary = AvALLOC(av);
5143 Renew(ary, items, SV*);
5144 AvMAX(av) = items - 1;
5145 AvALLOC(av) = ary;
5146 AvARRAY(av) = ary;
5147 }
5148
5149 if (items)
5150 Copy(MARK+1,AvARRAY(av),items,SV*);
5151 AvFILLp(av) = items - 1;
5152 }
5153 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5154 !CvLVALUE(cv)))
5155 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5156 SVfARG(cv_name(cv, NULL, 0)));
5157 /* warning must come *after* we fully set up the context
5158 * stuff so that __WARN__ handlers can safely dounwind()
5159 * if they want to
5160 */
5161 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
5162 && ckWARN(WARN_RECURSION)
5163 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
5164 sub_crush_depth(cv);
5165 RETURNOP(CvSTART(cv));
5166 }
5167 else {
5168 SSize_t markix = TOPMARK;
5169 bool is_scalar;
5170
5171 ENTER;
5172 /* pretend we did the ENTER earlier */
5173 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
5174
5175 SAVETMPS;
5176 PUTBACK;
5177
5178 if (UNLIKELY(((PL_op->op_private
5179 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
5180 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5181 !CvLVALUE(cv)))
5182 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5183 SVfARG(cv_name(cv, NULL, 0)));
5184
5185 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
5186 /* Need to copy @_ to stack. Alternative may be to
5187 * switch stack to @_, and copy return values
5188 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
5189 AV * const av = GvAV(PL_defgv);
5190 const SSize_t items = AvFILL(av) + 1;
5191
5192 if (items) {
5193 SSize_t i = 0;
5194 const bool m = cBOOL(SvRMAGICAL(av));
5195 /* Mark is at the end of the stack. */
5196 EXTEND(SP, items);
5197 for (; i < items; ++i)
5198 {
5199 SV *sv;
5200 if (m) {
5201 SV ** const svp = av_fetch(av, i, 0);
5202 sv = svp ? *svp : NULL;
5203 }
5204 else sv = AvARRAY(av)[i];
5205 if (sv) SP[i+1] = sv;
5206 else {
5207 SP[i+1] = newSVavdefelem(av, i, 1);
5208 }
5209 }
5210 SP += items;
5211 PUTBACK ;
5212 }
5213 }
5214 else {
5215 SV **mark = PL_stack_base + markix;
5216 SSize_t items = SP - mark;
5217 while (items--) {
5218 mark++;
5219 if (*mark && SvPADTMP(*mark)) {
5220 *mark = sv_mortalcopy(*mark);
5221 }
5222 }
5223 }
5224 /* We assume first XSUB in &DB::sub is the called one. */
5225 if (UNLIKELY(PL_curcopdb)) {
5226 SAVEVPTR(PL_curcop);
5227 PL_curcop = PL_curcopdb;
5228 PL_curcopdb = NULL;
5229 }
5230 /* Do we need to open block here? XXXX */
5231
5232 /* calculate gimme here as PL_op might get changed and then not
5233 * restored until the LEAVE further down */
5234 is_scalar = (GIMME_V == G_SCALAR);
5235
5236 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
5237 assert(CvXSUB(cv));
5238 CvXSUB(cv)(aTHX_ cv);
5239
5240#if defined DEBUGGING && !defined DEBUGGING_RE_ONLY
5241 /* This duplicates the check done in runops_debug(), but provides more
5242 * information in the common case of the fault being with an XSUB.
5243 *
5244 * It should also catch an XSUB pushing more than it extends
5245 * in scalar context.
5246 */
5247 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base)
5248 Perl_croak_nocontext(
5249 "panic: XSUB %s::%s (%s) failed to extend arg stack: "
5250 "base=%p, sp=%p, hwm=%p\n",
5251 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv),
5252 PL_stack_base, PL_stack_sp,
5253 PL_stack_base + PL_curstackinfo->si_stack_hwm);
5254#endif
5255 /* Enforce some sanity in scalar context. */
5256 if (is_scalar) {
5257 SV **svp = PL_stack_base + markix + 1;
5258 if (svp != PL_stack_sp) {
5259 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
5260 PL_stack_sp = svp;
5261 }
5262 }
5263 LEAVE;
5264 return NORMAL;
5265 }
5266}
5267
5268void
5269Perl_sub_crush_depth(pTHX_ CV *cv)
5270{
5271 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
5272
5273 if (CvANON(cv))
5274 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
5275 else {
5276 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
5277 SVfARG(cv_name(cv,NULL,0)));
5278 }
5279}
5280
5281
5282
5283/* like croak, but report in context of caller */
5284
5285void
5286Perl_croak_caller(const char *pat, ...)
5287{
5288 dTHX;
5289 va_list args;
5290 const PERL_CONTEXT *cx = caller_cx(0, NULL);
5291
5292 /* make error appear at call site */
5293 assert(cx);
5294 PL_curcop = cx->blk_oldcop;
5295
5296 va_start(args, pat);
5297 vcroak(pat, &args);
5298 NOT_REACHED; /* NOTREACHED */
5299 va_end(args);
5300}
5301
5302
5303PP(pp_aelem)
5304{
5305 dSP;
5306 SV** svp;
5307 SV* const elemsv = POPs;
5308 IV elem = SvIV(elemsv);
5309 AV *const av = MUTABLE_AV(POPs);
5310 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
5311 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
5312 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
5313 bool preeminent = TRUE;
5314 SV *sv;
5315
5316 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
5317 Perl_warner(aTHX_ packWARN(WARN_MISC),
5318 "Use of reference \"%" SVf "\" as array index",
5319 SVfARG(elemsv));
5320 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
5321 RETPUSHUNDEF;
5322
5323 if (UNLIKELY(localizing)) {
5324 MAGIC *mg;
5325 HV *stash;
5326
5327 /* If we can determine whether the element exist,
5328 * Try to preserve the existenceness of a tied array
5329 * element by using EXISTS and DELETE if possible.
5330 * Fallback to FETCH and STORE otherwise. */
5331 if (SvCANEXISTDELETE(av))
5332 preeminent = av_exists(av, elem);
5333 }
5334
5335 svp = av_fetch(av, elem, lval && !defer);
5336 if (lval) {
5337#ifdef PERL_MALLOC_WRAP
5338 if (SvUOK(elemsv)) {
5339 const UV uv = SvUV(elemsv);
5340 elem = uv > IV_MAX ? IV_MAX : uv;
5341 }
5342 else if (SvNOK(elemsv))
5343 elem = (IV)SvNV(elemsv);
5344 if (elem > 0) {
5345 static const char oom_array_extend[] =
5346 "Out of memory during array extend"; /* Duplicated in av.c */
5347 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
5348 }
5349#endif
5350 if (!svp || !*svp) {
5351 IV len;
5352 if (!defer)
5353 DIE(aTHX_ PL_no_aelem, elem);
5354 len = av_tindex(av);
5355 mPUSHs(newSVavdefelem(av,
5356 /* Resolve a negative index now, unless it points before the
5357 beginning of the array, in which case record it for error
5358 reporting in magic_setdefelem. */
5359 elem < 0 && len + elem >= 0 ? len + elem : elem,
5360 1));
5361 RETURN;
5362 }
5363 if (UNLIKELY(localizing)) {
5364 if (preeminent)
5365 save_aelem(av, elem, svp);
5366 else
5367 SAVEADELETE(av, elem);
5368 }
5369 else if (PL_op->op_private & OPpDEREF) {
5370 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
5371 RETURN;
5372 }
5373 }
5374 sv = (svp ? *svp : &PL_sv_undef);
5375 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
5376 mg_get(sv);
5377 PUSHs(sv);
5378 RETURN;
5379}
5380
5381SV*
5382Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
5383{
5384 PERL_ARGS_ASSERT_VIVIFY_REF;
5385
5386 SvGETMAGIC(sv);
5387 if (!SvOK(sv)) {
5388 if (SvREADONLY(sv))
5389 Perl_croak_no_modify();
5390 prepare_SV_for_RV(sv);
5391 switch (to_what) {
5392 case OPpDEREF_SV:
5393 SvRV_set(sv, newSV(0));
5394 break;
5395 case OPpDEREF_AV:
5396 SvRV_set(sv, MUTABLE_SV(newAV()));
5397 break;
5398 case OPpDEREF_HV:
5399 SvRV_set(sv, MUTABLE_SV(newHV()));
5400 break;
5401 }
5402 SvROK_on(sv);
5403 SvSETMAGIC(sv);
5404 SvGETMAGIC(sv);
5405 }
5406 if (SvGMAGICAL(sv)) {
5407 /* copy the sv without magic to prevent magic from being
5408 executed twice */
5409 SV* msv = sv_newmortal();
5410 sv_setsv_nomg(msv, sv);
5411 return msv;
5412 }
5413 return sv;
5414}
5415
5416PERL_STATIC_INLINE HV *
5417S_opmethod_stash(pTHX_ SV* meth)
5418{
5419 SV* ob;
5420 HV* stash;
5421
5422 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
5423 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
5424 "package or object reference", SVfARG(meth)),
5425 (SV *)NULL)
5426 : *(PL_stack_base + TOPMARK + 1);
5427
5428 PERL_ARGS_ASSERT_OPMETHOD_STASH;
5429
5430 if (UNLIKELY(!sv))
5431 undefined:
5432 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
5433 SVfARG(meth));
5434
5435 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
5436 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
5437 stash = gv_stashsv(sv, GV_CACHE_ONLY);
5438 if (stash) return stash;
5439 }
5440
5441 if (SvROK(sv))
5442 ob = MUTABLE_SV(SvRV(sv));
5443 else if (!SvOK(sv)) goto undefined;
5444 else if (isGV_with_GP(sv)) {
5445 if (!GvIO(sv))
5446 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5447 "without a package or object reference",
5448 SVfARG(meth));
5449 ob = sv;
5450 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
5451 assert(!LvTARGLEN(ob));
5452 ob = LvTARG(ob);
5453 assert(ob);
5454 }
5455 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
5456 }
5457 else {
5458 /* this isn't a reference */
5459 GV* iogv;
5460 STRLEN packlen;
5461 const char * const packname = SvPV_nomg_const(sv, packlen);
5462 const U32 packname_utf8 = SvUTF8(sv);
5463 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
5464 if (stash) return stash;
5465
5466 if (!(iogv = gv_fetchpvn_flags(
5467 packname, packlen, packname_utf8, SVt_PVIO
5468 )) ||
5469 !(ob=MUTABLE_SV(GvIO(iogv))))
5470 {
5471 /* this isn't the name of a filehandle either */
5472 if (!packlen)
5473 {
5474 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5475 "without a package or object reference",
5476 SVfARG(meth));
5477 }
5478 /* assume it's a package name */
5479 stash = gv_stashpvn(packname, packlen, packname_utf8);
5480 if (stash) return stash;
5481 else return MUTABLE_HV(sv);
5482 }
5483 /* it _is_ a filehandle name -- replace with a reference */
5484 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
5485 }
5486
5487 /* if we got here, ob should be an object or a glob */
5488 if (!ob || !(SvOBJECT(ob)
5489 || (isGV_with_GP(ob)
5490 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
5491 && SvOBJECT(ob))))
5492 {
5493 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
5494 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
5495 ? newSVpvs_flags("DOES", SVs_TEMP)
5496 : meth));
5497 }
5498
5499 return SvSTASH(ob);
5500}
5501
5502PP(pp_method)
5503{
5504 dSP;
5505 GV* gv;
5506 HV* stash;
5507 SV* const meth = TOPs;
5508
5509 if (SvROK(meth)) {
5510 SV* const rmeth = SvRV(meth);
5511 if (SvTYPE(rmeth) == SVt_PVCV) {
5512 SETs(rmeth);
5513 RETURN;
5514 }
5515 }
5516
5517 stash = opmethod_stash(meth);
5518
5519 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5520 assert(gv);
5521
5522 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5523 RETURN;
5524}
5525
5526#define METHOD_CHECK_CACHE(stash,cache,meth) \
5527 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
5528 if (he) { \
5529 gv = MUTABLE_GV(HeVAL(he)); \
5530 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
5531 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
5532 { \
5533 XPUSHs(MUTABLE_SV(GvCV(gv))); \
5534 RETURN; \
5535 } \
5536 } \
5537
5538PP(pp_method_named)
5539{
5540 dSP;
5541 GV* gv;
5542 SV* const meth = cMETHOPx_meth(PL_op);
5543 HV* const stash = opmethod_stash(meth);
5544
5545 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
5546 METHOD_CHECK_CACHE(stash, stash, meth);
5547 }
5548
5549 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5550 assert(gv);
5551
5552 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5553 RETURN;
5554}
5555
5556PP(pp_method_super)
5557{
5558 dSP;
5559 GV* gv;
5560 HV* cache;
5561 SV* const meth = cMETHOPx_meth(PL_op);
5562 HV* const stash = CopSTASH(PL_curcop);
5563 /* Actually, SUPER doesn't need real object's (or class') stash at all,
5564 * as it uses CopSTASH. However, we must ensure that object(class) is
5565 * correct (this check is done by S_opmethod_stash) */
5566 opmethod_stash(meth);
5567
5568 if ((cache = HvMROMETA(stash)->super)) {
5569 METHOD_CHECK_CACHE(stash, cache, meth);
5570 }
5571
5572 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5573 assert(gv);
5574
5575 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5576 RETURN;
5577}
5578
5579PP(pp_method_redir)
5580{
5581 dSP;
5582 GV* gv;
5583 SV* const meth = cMETHOPx_meth(PL_op);
5584 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
5585 opmethod_stash(meth); /* not used but needed for error checks */
5586
5587 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
5588 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
5589
5590 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5591 assert(gv);
5592
5593 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5594 RETURN;
5595}
5596
5597PP(pp_method_redir_super)
5598{
5599 dSP;
5600 GV* gv;
5601 HV* cache;
5602 SV* const meth = cMETHOPx_meth(PL_op);
5603 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
5604 opmethod_stash(meth); /* not used but needed for error checks */
5605
5606 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
5607 else if ((cache = HvMROMETA(stash)->super)) {
5608 METHOD_CHECK_CACHE(stash, cache, meth);
5609 }
5610
5611 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5612 assert(gv);
5613
5614 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5615 RETURN;
5616}
5617
5618/*
5619 * ex: set ts=8 sts=4 sw=4 et:
5620 */