This is a live mirror of the Perl 5 development currently hosted at https://github.com/perl/perl5
*_cloexec() I/O functions
[perl5.git] / pp_hot.c
... / ...
CommitLineData
1/* pp_hot.c
2 *
3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
5 *
6 * You may distribute under the terms of either the GNU General Public
7 * License or the Artistic License, as specified in the README file.
8 *
9 */
10
11/*
12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland,
13 * shaking the air.
14 *
15 * Awake! Awake! Fear, Fire, Foes! Awake!
16 * Fire, Foes! Awake!
17 *
18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"]
19 */
20
21/* This file contains 'hot' pp ("push/pop") functions that
22 * execute the opcodes that make up a perl program. A typical pp function
23 * expects to find its arguments on the stack, and usually pushes its
24 * results onto the stack, hence the 'pp' terminology. Each OP structure
25 * contains a pointer to the relevant pp_foo() function.
26 *
27 * By 'hot', we mean common ops whose execution speed is critical.
28 * By gathering them together into a single file, we encourage
29 * CPU cache hits on hot code. Also it could be taken as a warning not to
30 * change any code in this file unless you're sure it won't affect
31 * performance.
32 */
33
34#include "EXTERN.h"
35#define PERL_IN_PP_HOT_C
36#include "perl.h"
37
38/* Hot code. */
39
40PP(pp_const)
41{
42 dSP;
43 XPUSHs(cSVOP_sv);
44 RETURN;
45}
46
47PP(pp_nextstate)
48{
49 PL_curcop = (COP*)PL_op;
50 TAINT_NOT; /* Each statement is presumed innocent */
51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp;
52 FREETMPS;
53 PERL_ASYNC_CHECK();
54 return NORMAL;
55}
56
57PP(pp_gvsv)
58{
59 dSP;
60 EXTEND(SP,1);
61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO))
62 PUSHs(save_scalar(cGVOP_gv));
63 else
64 PUSHs(GvSVn(cGVOP_gv));
65 RETURN;
66}
67
68
69/* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */
70
71PP(pp_null)
72{
73 return NORMAL;
74}
75
76/* This is sometimes called directly by pp_coreargs, pp_grepstart and
77 amagic_call. */
78PP(pp_pushmark)
79{
80 PUSHMARK(PL_stack_sp);
81 return NORMAL;
82}
83
84PP(pp_stringify)
85{
86 dSP; dTARGET;
87 SV * const sv = TOPs;
88 SETs(TARG);
89 sv_copypv(TARG, sv);
90 SvSETMAGIC(TARG);
91 /* no PUTBACK, SETs doesn't inc/dec SP */
92 return NORMAL;
93}
94
95PP(pp_gv)
96{
97 dSP;
98 XPUSHs(MUTABLE_SV(cGVOP_gv));
99 RETURN;
100}
101
102
103/* also used for: pp_andassign() */
104
105PP(pp_and)
106{
107 PERL_ASYNC_CHECK();
108 {
109 /* SP is not used to remove a variable that is saved across the
110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine
111 register or load/store vs direct mem ops macro is introduced, this
112 should be a define block between direct PL_stack_sp and dSP operations,
113 presently, using PL_stack_sp is bias towards CISC cpus */
114 SV * const sv = *PL_stack_sp;
115 if (!SvTRUE_NN(sv))
116 return NORMAL;
117 else {
118 if (PL_op->op_type == OP_AND)
119 --PL_stack_sp;
120 return cLOGOP->op_other;
121 }
122 }
123}
124
125PP(pp_sassign)
126{
127 dSP;
128 /* sassign keeps its args in the optree traditionally backwards.
129 So we pop them differently.
130 */
131 SV *left = POPs; SV *right = TOPs;
132
133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */
134 SV * const temp = left;
135 left = right; right = temp;
136 }
137 assert(TAINTING_get || !TAINT_get);
138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right))
139 TAINT_NOT;
140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) {
141 /* *foo =\&bar */
142 SV * const cv = SvRV(right);
143 const U32 cv_type = SvTYPE(cv);
144 const bool is_gv = isGV_with_GP(left);
145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM;
146
147 if (!got_coderef) {
148 assert(SvROK(cv));
149 }
150
151 /* Can do the optimisation if left (LVALUE) is not a typeglob,
152 right (RVALUE) is a reference to something, and we're in void
153 context. */
154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) {
155 /* Is the target symbol table currently empty? */
156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV);
157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) {
158 /* Good. Create a new proxy constant subroutine in the target.
159 The gv becomes a(nother) reference to the constant. */
160 SV *const value = SvRV(cv);
161
162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV);
163 SvPCS_IMPORTED_on(gv);
164 SvRV_set(gv, value);
165 SvREFCNT_inc_simple_void(value);
166 SETs(left);
167 RETURN;
168 }
169 }
170
171 /* Need to fix things up. */
172 if (!is_gv) {
173 /* Need to fix GV. */
174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV));
175 }
176
177 if (!got_coderef) {
178 /* We've been returned a constant rather than a full subroutine,
179 but they expect a subroutine reference to apply. */
180 if (SvROK(cv)) {
181 ENTER_with_name("sassign_coderef");
182 SvREFCNT_inc_void(SvRV(cv));
183 /* newCONSTSUB takes a reference count on the passed in SV
184 from us. We set the name to NULL, otherwise we get into
185 all sorts of fun as the reference to our new sub is
186 donated to the GV that we're about to assign to.
187 */
188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL,
189 SvRV(cv))));
190 SvREFCNT_dec_NN(cv);
191 LEAVE_with_name("sassign_coderef");
192 } else {
193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"};
194 is that
195 First: ops for \&{"BONK"}; return us the constant in the
196 symbol table
197 Second: ops for *{"BONK"} cause that symbol table entry
198 (and our reference to it) to be upgraded from RV
199 to typeblob)
200 Thirdly: We get here. cv is actually PVGV now, and its
201 GvCV() is actually the subroutine we're looking for
202
203 So change the reference so that it points to the subroutine
204 of that typeglob, as that's what they were after all along.
205 */
206 GV *const upgraded = MUTABLE_GV(cv);
207 CV *const source = GvCV(upgraded);
208
209 assert(source);
210 assert(CvFLAGS(source) & CVf_CONST);
211
212 SvREFCNT_inc_simple_void_NN(source);
213 SvREFCNT_dec_NN(upgraded);
214 SvRV_set(right, MUTABLE_SV(source));
215 }
216 }
217
218 }
219 if (
220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 &&
221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC)
222 )
223 Perl_warner(aTHX_
224 packWARN(WARN_MISC), "Useless assignment to a temporary"
225 );
226 SvSetMagicSV(left, right);
227 SETs(left);
228 RETURN;
229}
230
231PP(pp_cond_expr)
232{
233 dSP;
234 SV *sv;
235
236 PERL_ASYNC_CHECK();
237 sv = POPs;
238 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next);
239}
240
241PP(pp_unstack)
242{
243 PERL_CONTEXT *cx;
244 PERL_ASYNC_CHECK();
245 TAINT_NOT; /* Each statement is presumed innocent */
246 cx = CX_CUR();
247 PL_stack_sp = PL_stack_base + cx->blk_oldsp;
248 FREETMPS;
249 if (!(PL_op->op_flags & OPf_SPECIAL)) {
250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx));
251 CX_LEAVE_SCOPE(cx);
252 }
253 return NORMAL;
254}
255
256PP(pp_concat)
257{
258 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign);
259 {
260 dPOPTOPssrl;
261 bool lbyte;
262 STRLEN rlen;
263 const char *rpv = NULL;
264 bool rbyte = FALSE;
265 bool rcopied = FALSE;
266
267 if (TARG == right && right != left) { /* $r = $l.$r */
268 rpv = SvPV_nomg_const(right, rlen);
269 rbyte = !DO_UTF8(right);
270 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
271 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */
272 rcopied = TRUE;
273 }
274
275 if (TARG != left) { /* not $l .= $r */
276 STRLEN llen;
277 const char* const lpv = SvPV_nomg_const(left, llen);
278 lbyte = !DO_UTF8(left);
279 sv_setpvn(TARG, lpv, llen);
280 if (!lbyte)
281 SvUTF8_on(TARG);
282 else
283 SvUTF8_off(TARG);
284 }
285 else { /* $l .= $r and left == TARG */
286 if (!SvOK(left)) {
287 if ((left == right /* $l .= $l */
288 || (PL_op->op_private & OPpTARGET_MY)) /* $l = $l . $r */
289 && ckWARN(WARN_UNINITIALIZED)
290 )
291 report_uninit(left);
292 SvPVCLEAR(left);
293 }
294 else {
295 SvPV_force_nomg_nolen(left);
296 }
297 lbyte = !DO_UTF8(left);
298 if (IN_BYTES)
299 SvUTF8_off(left);
300 }
301
302 if (!rcopied) {
303 rpv = SvPV_nomg_const(right, rlen);
304 rbyte = !DO_UTF8(right);
305 }
306 if (lbyte != rbyte) {
307 if (lbyte)
308 sv_utf8_upgrade_nomg(TARG);
309 else {
310 if (!rcopied)
311 right = newSVpvn_flags(rpv, rlen, SVs_TEMP);
312 sv_utf8_upgrade_nomg(right);
313 rpv = SvPV_nomg_const(right, rlen);
314 }
315 }
316 sv_catpvn_nomg(TARG, rpv, rlen);
317
318 SETTARG;
319 RETURN;
320 }
321}
322
323
324/* pp_multiconcat()
325
326Concatenate one or more args, possibly interleaved with constant string
327segments. The result may be assigned to, or appended to, a variable or
328expression.
329
330Several op_flags and/or op_private bits indicate what the target is, and
331whether it's appended to. Valid permutations are:
332
333 - (PADTMP) = (A.B.C....)
334 OPpTARGET_MY $lex = (A.B.C....)
335 OPpTARGET_MY,OPpLVAL_INTRO my $lex = (A.B.C....)
336 OPpTARGET_MY,OPpMULTICONCAT_APPEND $lex .= (A.B.C....)
337 OPf_STACKED expr = (A.B.C....)
338 OPf_STACKED,OPpMULTICONCAT_APPEND expr .= (A.B.C....)
339
340Other combinations like (A.B).(C.D) are not optimised into a multiconcat
341op, as it's too hard to get the correct ordering of ties, overload etc.
342
343In addition:
344
345 OPpMULTICONCAT_FAKE: not a real concat, instead an optimised
346 sprintf "...%s...". Don't call '.'
347 overloading: only use '""' overloading.
348
349 OPpMULTICONCAT_STRINGIFY: (for Deparse's benefit) the RHS was of the
350 form "...$a...$b..." rather than
351 "..." . $a . "..." . $b . "..."
352
353An OP_MULTICONCAT is of type UNOP_AUX. The fixed slots of the aux array are
354defined with PERL_MULTICONCAT_IX_FOO constants, where:
355
356
357 FOO index description
358 -------- ----- ----------------------------------
359 NARGS 0 number of arguments
360 PLAIN_PV 1 non-utf8 constant string
361 PLAIN_LEN 2 non-utf8 constant string length
362 UTF8_PV 3 utf8 constant string
363 UTF8_LEN 4 utf8 constant string length
364 LENGTHS 5 first of nargs+1 const segment lengths
365
366The idea is that a general string concatenation will have a fixed (known
367at compile time) number of variable args, interspersed with constant
368strings, e.g. "a=$a b=$b\n"
369
370All the constant string segments "a=", " b=" and "\n" are stored as a
371single string "a= b=\n", pointed to from the PLAIN_PV/UTF8_PV slot, along
372with a series of segment lengths: e.g. 2,3,1. In the case where the
373constant string is plain but has a different utf8 representation, both
374variants are stored, and two sets of (nargs+1) segments lengths are stored
375in the slots beginning at PERL_MULTICONCAT_IX_LENGTHS.
376
377A segment length of -1 indicates that there is no constant string at that
378point; this distinguishes between e.g. ($a . $b) and ($a . "" . $b), which
379have differing overloading behaviour.
380
381*/
382
383PP(pp_multiconcat)
384{
385 dSP;
386 SV *targ; /* The SV to be assigned or appended to */
387 SV *dsv; /* the SV to concat args to (often == targ) */
388 char *dsv_pv; /* where within SvPVX(dsv) we're writing to */
389 STRLEN targ_len; /* SvCUR(targ) */
390 SV **toparg; /* the highest arg position on the stack */
391 UNOP_AUX_item *aux; /* PL_op->op_aux buffer */
392 UNOP_AUX_item *const_lens; /* the segment length array part of aux */
393 const char *const_pv; /* the current segment of the const string buf */
394 SSize_t nargs; /* how many args were expected */
395 SSize_t stack_adj; /* how much to adjust SP on return */
396 STRLEN grow; /* final size of destination string (dsv) */
397 UV targ_count; /* how many times targ has appeared on the RHS */
398 bool is_append; /* OPpMULTICONCAT_APPEND flag is set */
399 bool slow_concat; /* args too complex for quick concat */
400 U32 dst_utf8; /* the result will be utf8 (indicate this with
401 SVf_UTF8 in a U32, rather than using bool,
402 for ease of testing and setting) */
403 /* for each arg, holds the result of an SvPV() call */
404 struct multiconcat_svpv {
405 char *pv;
406 SSize_t len;
407 }
408 *targ_chain, /* chain of slots where targ has appeared on RHS */
409 *svpv_p, /* ptr for looping through svpv_buf */
410 *svpv_base, /* first slot (may be greater than svpv_buf), */
411 *svpv_end, /* and slot after highest result so far, of: */
412 svpv_buf[PERL_MULTICONCAT_MAXARG]; /* buf for storing SvPV() results */
413
414 aux = cUNOP_AUXx(PL_op)->op_aux;
415 stack_adj = nargs = aux[PERL_MULTICONCAT_IX_NARGS].ssize;
416 is_append = cBOOL(PL_op->op_private & OPpMULTICONCAT_APPEND);
417
418 /* get targ from the stack or pad */
419
420 if (PL_op->op_flags & OPf_STACKED) {
421 if (is_append) {
422 /* for 'expr .= ...', expr is the bottom item on the stack */
423 targ = SP[-nargs];
424 stack_adj++;
425 }
426 else
427 /* for 'expr = ...', expr is the top item on the stack */
428 targ = POPs;
429 }
430 else {
431 SV **svp = &(PAD_SVl(PL_op->op_targ));
432 targ = *svp;
433 if (PL_op->op_private & OPpLVAL_INTRO) {
434 assert(PL_op->op_private & OPpTARGET_MY);
435 save_clearsv(svp);
436 }
437 if (!nargs)
438 /* $lex .= "const" doesn't cause anything to be pushed */
439 EXTEND(SP,1);
440 }
441
442 toparg = SP;
443 SP -= (nargs - 1);
444 dsv = targ; /* Set the destination for all concats. This is
445 initially targ; later on, dsv may be switched
446 to point to a TEMP SV if overloading is
447 encountered. */
448 grow = 1; /* allow for '\0' at minimum */
449 targ_count = 0;
450 targ_chain = NULL;
451 targ_len = 0;
452 svpv_end = svpv_buf;
453 /* only utf8 variants of the const strings? */
454 dst_utf8 = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv ? 0 : SVf_UTF8;
455
456
457 /* --------------------------------------------------------------
458 * Phase 1:
459 *
460 * stringify (i.e. SvPV()) every arg and store the resultant pv/len/utf8
461 * triplets in svpv_buf[]. Also increment 'grow' by the args' lengths.
462 *
463 * utf8 is indicated by storing a negative length.
464 *
465 * Where an arg is actually targ, the stringification is deferred:
466 * the length is set to 0, and the slot is added to targ_chain.
467 *
468 * If an overloaded arg is found, the loop is abandoned at that point,
469 * and dsv is set to an SvTEMP SV where the results-so-far will be
470 * accumulated.
471 */
472
473 for (; SP <= toparg; SP++, svpv_end++) {
474 bool simple_flags;
475 U32 utf8;
476 STRLEN len;
477 SV *sv;
478
479 assert(svpv_end - svpv_buf < PERL_MULTICONCAT_MAXARG);
480
481 sv = *SP;
482 simple_flags = (SvFLAGS(sv) & (SVs_GMG|SVf_ROK|SVf_POK)) == SVf_POK;
483
484 /* this if/else chain is arranged so that common/simple cases
485 * take few conditionals */
486
487 if (LIKELY(simple_flags && (sv != targ))) {
488 /* common case: sv is a simple PV and not the targ */
489 svpv_end->pv = SvPVX(sv);
490 len = SvCUR(sv);
491 }
492 else if (simple_flags) {
493 /* sv is targ (but can't be magic or overloaded).
494 * Delay storing PV pointer; instead, add slot to targ_chain
495 * so it can be populated later, after targ has been grown and
496 * we know its final SvPVX() address.
497 */
498 targ_on_rhs:
499 svpv_end->len = 0; /* zerojng here means we can skip
500 updating later if targ_len == 0 */
501 svpv_end->pv = (char*)targ_chain;
502 targ_chain = svpv_end;
503 targ_count++;
504 continue;
505 }
506 else {
507 if (UNLIKELY(SvFLAGS(sv) & (SVs_GMG|SVf_ROK))) {
508 /* its got magic, is tied, and/or is overloaded */
509 SvGETMAGIC(sv);
510
511 if (UNLIKELY(SvAMAGIC(sv))
512 && !(PL_op->op_private & OPpMULTICONCAT_FAKE))
513 {
514 /* One of the RHS args is overloaded. Abandon stringifying
515 * the args at this point, then in the concat loop later
516 * on, concat the plain args stringified so far into a
517 * TEMP SV. At the end of this function the remaining
518 * args (including the current one) will be handled
519 * specially, using overload calls.
520 * FAKE implies an optimised sprintf which doesn't use
521 * concat overloading, only "" overloading.
522 */
523
524 if ( svpv_end == svpv_buf + 1
525 /* no const string segments */
526 && aux[PERL_MULTICONCAT_IX_LENGTHS].ssize == -1
527 && aux[PERL_MULTICONCAT_IX_LENGTHS + 1].ssize == -1
528 ) {
529 /* special case: if the overloaded sv is the
530 * second arg in the concat chain, stop at the
531 * first arg rather than this, so that
532 *
533 * $arg1 . $arg2
534 *
535 * invokes overloading as
536 *
537 * concat($arg2, $arg1, 1)
538 *
539 * rather than
540 *
541 * concat($arg2, "$arg1", 1)
542 *
543 * This means that if for example arg1 is a ref,
544 * it gets passed as-is to the concat method
545 * rather than a stringified copy. If it's not the
546 * first arg, it doesn't matter, as in $arg0 .
547 * $arg1 . $arg2, where the result of ($arg0 .
548 * $arg1) will already be a string.
549 * THis isn't perfect: we'll have already
550 * done SvPV($arg1) on the previous iteration;
551 * and are now throwing away that result and
552 * hoping arg1 hasn;t been affected.
553 */
554 svpv_end--;
555 SP--;
556 }
557
558 setup_overload:
559 dsv = newSVpvn_flags("", 0, SVs_TEMP);
560
561 if (targ_chain) {
562 /* Get the string value of targ and populate any
563 * RHS slots which use it */
564 char *pv = SvPV_nomg(targ, len);
565 dst_utf8 |= (SvFLAGS(targ) & SVf_UTF8);
566 grow += len * targ_count;
567 do {
568 struct multiconcat_svpv *p = targ_chain;
569 targ_chain = (struct multiconcat_svpv *)(p->pv);
570 p->pv = pv;
571 p->len = len;
572 } while (targ_chain);
573 }
574 else if (is_append)
575 SvGETMAGIC(targ);
576
577 goto phase3;
578 }
579
580 if (SvFLAGS(sv) & SVs_RMG) {
581 /* probably tied; copy it to guarantee separate values
582 * each time it's used, e.g. "-$tied-$tied-$tied-",
583 * since FETCH() isn't necessarily idempotent */
584 SV *nsv = newSV(0);
585 sv_setsv_flags(nsv, sv, SV_NOSTEAL);
586 sv_2mortal(nsv);
587 if ( sv == targ
588 && is_append
589 && nargs == 1
590 /* no const string segments */
591 && aux[PERL_MULTICONCAT_IX_LENGTHS].ssize == -1
592 && aux[PERL_MULTICONCAT_IX_LENGTHS+1].ssize == -1)
593 {
594 /* special-case $tied .= $tied.
595 *
596 * For something like
597 * sub FETCH { $i++ }
598 * then
599 * $tied .= $tied . $tied . $tied;
600 * will STORE "4123"
601 * while
602 * $tied .= $tied
603 * will STORE "12"
604 *
605 * i.e. for a single mutator concat, the LHS is
606 * retrieved first; in all other cases it is
607 * retrieved last. Whether this is sane behaviour
608 * is open to debate; but for now, multiconcat (as
609 * it is an optimisation) tries to reproduce
610 * existing behaviour.
611 */
612 sv_catsv(nsv, sv);
613 sv_setsv(sv,nsv);
614 SP++;
615 goto phase7; /* just return targ as-is */
616 }
617
618 sv = nsv;
619 }
620 }
621
622 if (sv == targ) {
623 /* must warn for each RH usage of targ, except that
624 * we will later get one warning when doing
625 * SvPV_force(targ), *except* on '.=' */
626 if ( !SvOK(sv)
627 && (targ_chain || is_append)
628 && ckWARN(WARN_UNINITIALIZED)
629 )
630 report_uninit(sv);
631 goto targ_on_rhs;
632 }
633
634 /* stringify general SV */
635 svpv_end->pv = sv_2pv_flags(sv, &len, 0);
636 }
637
638 utf8 = (SvFLAGS(sv) & SVf_UTF8);
639 dst_utf8 |= utf8;
640 ASSUME(len < SSize_t_MAX);
641 svpv_end->len = utf8 ? -(SSize_t)len : (SSize_t)len;
642 grow += len;
643 }
644
645 /* --------------------------------------------------------------
646 * Phase 2:
647 *
648 * Stringify targ:
649 *
650 * if targ appears on the RHS or is appended to, force stringify it;
651 * otherwise set it to "". Then set targ_len.
652 */
653
654 if (is_append) {
655 if (UNLIKELY(SvFLAGS(targ) & (SVs_GMG|SVf_ROK))) {
656 SvGETMAGIC(targ); /* must do before SvAMAGIC() check */
657 if (UNLIKELY(SvAMAGIC(targ))) {
658 /* $overloaded .= ....;
659 * accumulate RHS in a temp SV rather than targ,
660 * then append tmp to targ at the end using overload
661 */
662 assert(!targ_chain);
663 dsv = newSVpvn_flags("", 0, SVs_TEMP);
664
665 if ( svpv_end == svpv_buf + 1
666 /* no const string segments */
667 && aux[PERL_MULTICONCAT_IX_LENGTHS].ssize == -1
668 ) {
669 /* special case $overloaded .= $arg1:
670 * avoid stringifying $arg1.
671 * Similar to the $arg1 . $arg2 case in phase1
672 */
673 svpv_end--;
674 SP--;
675 }
676
677 goto phase3;
678 }
679 }
680
681 if (SvOK(targ)) {
682 U32 targ_utf8;
683 stringify_targ:
684 SvPV_force_nomg_nolen(targ);
685 targ_utf8 = SvFLAGS(targ) & SVf_UTF8;
686 if (UNLIKELY(dst_utf8 & ~targ_utf8)) {
687 if (LIKELY(!IN_BYTES))
688 sv_utf8_upgrade_nomg(targ);
689 }
690 else
691 dst_utf8 |= targ_utf8;
692
693 targ_len = SvCUR(targ);
694 grow += targ_len * (targ_count + is_append);
695 goto phase3;
696 }
697 }
698 else if (UNLIKELY(SvTYPE(targ) >= SVt_REGEXP)) {
699 /* Assigning to some weird LHS type. Don't force the LHS to be an
700 * empty string; instead, do things 'long hand' by using the
701 * overload code path, which concats to a TEMP sv and does
702 * sv_catsv() calls rather than COPY()s. This ensures that even
703 * bizarre code like this doesn't break or crash:
704 * *F = *F . *F.
705 * (which makes the 'F' typeglob an alias to the
706 * '*main::F*main::F' typeglob).
707 */
708 goto setup_overload;
709 }
710 else if (targ_chain) {
711 /* targ was found on RHS.
712 * We don't need the SvGETMAGIC() call and SvAMAGIC() test as
713 * both were already done earlier in the SvPV() loop; other
714 * than that we can share the same code with the append
715 * branch below.
716 * Note that this goto jumps directly into the SvOK() branch
717 * even if targ isn't SvOK(), to force an 'uninitialised'
718 * warning; e.g.
719 * $undef .= .... targ only on LHS: don't warn
720 * $undef .= $undef .... targ on RHS too: warn
721 */
722 assert(!SvAMAGIC(targ));
723 goto stringify_targ;
724 }
725
726
727 /* unrolled SvPVCLEAR() - mostly: no need to grow or set SvCUR() to 0;
728 * those will be done later. */
729 assert(targ == dsv);
730 SV_CHECK_THINKFIRST_COW_DROP(targ);
731 SvUPGRADE(targ, SVt_PV);
732 SvFLAGS(targ) &= ~(SVf_OK|SVf_IVisUV|SVf_UTF8);
733 SvFLAGS(targ) |= (SVf_POK|SVp_POK|dst_utf8);
734
735 phase3:
736
737 /* --------------------------------------------------------------
738 * Phase 3:
739 *
740 * UTF-8 tweaks and grow dsv:
741 *
742 * Now that we know the length and utf8-ness of both the targ and
743 * args, grow dsv to the size needed to accumulate all the args, based
744 * on whether targ appears on the RHS, whether we're appending, and
745 * whether any non-utf8 args expand in size if converted to utf8.
746 *
747 * For the latter, if dst_utf8 we scan non-utf8 args looking for
748 * variant chars, and adjust the svpv->len value of those args to the
749 * utf8 size and negate it to flag them. At the same time we un-negate
750 * the lens of any utf8 args since after this phase we no longer care
751 * whether an arg is utf8 or not.
752 *
753 * Finally, initialise const_lens and const_pv based on utf8ness.
754 * Note that there are 3 permutations:
755 *
756 * * If the constant string is invariant whether utf8 or not (e.g. "abc"),
757 * then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] are the same as
758 * aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] and there is one set of
759 * segment lengths.
760 *
761 * * If the string is fully utf8, e.g. "\x{100}", then
762 * aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] == (NULL,0) and there is
763 * one set of segment lengths.
764 *
765 * * If the string has different plain and utf8 representations
766 * (e.g. "\x80"), then then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN]]
767 * holds the plain rep, while aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN]
768 * holds the utf8 rep, and there are 2 sets of segment lengths,
769 * with the utf8 set following after the plain set.
770 *
771 * On entry to this section the (pv,len) pairs in svpv_buf have the
772 * following meanings:
773 * (pv, len) a plain string
774 * (pv, -len) a utf8 string
775 * (NULL, 0) left-most targ \ linked together R-to-L
776 * (next, 0) other targ / in targ_chain
777 */
778
779 /* turn off utf8 handling if 'use bytes' is in scope */
780 if (UNLIKELY(dst_utf8 && IN_BYTES)) {
781 dst_utf8 = 0;
782 SvUTF8_off(dsv);
783 /* undo all the negative lengths which flag utf8-ness */
784 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
785 SSize_t len = svpv_p->len;
786 if (len < 0)
787 svpv_p->len = -len;
788 }
789 }
790
791 /* grow += total of lengths of constant string segments */
792 {
793 SSize_t len;
794 len = aux[dst_utf8 ? PERL_MULTICONCAT_IX_UTF8_LEN
795 : PERL_MULTICONCAT_IX_PLAIN_LEN].ssize;
796 slow_concat = cBOOL(len);
797 grow += len;
798 }
799
800 const_lens = aux + PERL_MULTICONCAT_IX_LENGTHS;
801
802 if (dst_utf8) {
803 const_pv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv;
804 if ( aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv
805 && const_pv != aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv)
806 /* separate sets of lengths for plain and utf8 */
807 const_lens += nargs + 1;
808
809 /* If the result is utf8 but some of the args aren't,
810 * calculate how much extra growth is needed for all the chars
811 * which will expand to two utf8 bytes.
812 * Also, if the growth is non-zero, negate the length to indicate
813 * that this this is a variant string. Conversely, un-negate the
814 * length on utf8 args (which was only needed to flag non-utf8
815 * args in this loop */
816 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) {
817 SSize_t len, extra;
818
819 len = svpv_p->len;
820 if (len <= 0) {
821 svpv_p->len = -len;
822 continue;
823 }
824
825 extra = variant_under_utf8_count((U8 *) svpv_p->pv,
826 (U8 *) svpv_p->pv + len);
827 if (UNLIKELY(extra)) {
828 grow += extra;
829 /* -ve len indicates special handling */
830 svpv_p->len = -(len + extra);
831 slow_concat = TRUE;
832 }
833 }
834 }
835 else
836 const_pv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv;
837
838 /* unrolled SvGROW(), except don't check for SVf_IsCOW, which should
839 * already have been dropped */
840 assert(!SvIsCOW(dsv));
841 dsv_pv = (SvLEN(dsv) < (grow) ? sv_grow(dsv,grow) : SvPVX(dsv));
842
843
844 /* --------------------------------------------------------------
845 * Phase 4:
846 *
847 * Now that dsv (which is probably targ) has been grown, we know the
848 * final address of the targ PVX, if needed. Preserve / move targ
849 * contents if appending or if targ appears on RHS.
850 *
851 * Also update svpv_buf slots in targ_chain.
852 *
853 * Don't bother with any of this if the target length is zero:
854 * targ_len is set to zero unless we're appending or targ appears on
855 * RHS. And even if it is, we can optimise by skipping this chunk of
856 * code for zero targ_len. In the latter case, we don't need to update
857 * the slots in targ_chain with the (zero length) target string, since
858 * we set the len in such slots to 0 earlier, and since the Copy() is
859 * skipped on zero length, it doesn't matter what svpv_p->pv contains.
860 *
861 * On entry to this section the (pv,len) pairs in svpv_buf have the
862 * following meanings:
863 * (pv, len) a pure-plain or utf8 string
864 * (pv, -(len+extra)) a plain string which will expand by 'extra'
865 * bytes when converted to utf8
866 * (NULL, 0) left-most targ \ linked together R-to-L
867 * (next, 0) other targ / in targ_chain
868 *
869 * On exit, the targ contents will have been moved to the
870 * earliest place they are needed (e.g. $x = "abc$x" will shift them
871 * 3 bytes, while $x .= ... will leave them at the beginning);
872 * and dst_pv will point to the location within SvPVX(dsv) where the
873 * next arg should be copied.
874 */
875
876 svpv_base = svpv_buf;
877
878 if (targ_len) {
879 struct multiconcat_svpv *tc_stop;
880 char *targ_pv = dsv_pv;
881
882 assert(targ == dsv);
883 assert(is_append || targ_count);
884
885 if (is_append) {
886 dsv_pv += targ_len;
887 tc_stop = NULL;
888 }
889 else {
890 /* The targ appears on RHS, e.g. '$t = $a . $t . $t'.
891 * Move the current contents of targ to the first
892 * position where it's needed, and use that as the src buffer
893 * for any further uses (such as the second RHS $t above).
894 * In calculating the first position, we need to sum the
895 * lengths of all consts and args before that.
896 */
897
898 UNOP_AUX_item *lens = const_lens;
899 /* length of first const string segment */
900 STRLEN offset = lens->ssize > 0 ? lens->ssize : 0;
901
902 assert(targ_chain);
903 svpv_p = svpv_base;
904
905 for (;;) {
906 SSize_t len;
907 if (!svpv_p->pv)
908 break; /* the first targ argument */
909 /* add lengths of the next arg and const string segment */
910 len = svpv_p->len;
911 if (len < 0) /* variant args have this */
912 len = -len;
913 offset += (STRLEN)len;
914 len = (++lens)->ssize;
915 offset += (len >= 0) ? (STRLEN)len : 0;
916 if (!offset) {
917 /* all args and consts so far are empty; update
918 * the start position for the concat later */
919 svpv_base++;
920 const_lens++;
921 }
922 svpv_p++;
923 assert(svpv_p < svpv_end);
924 }
925
926 if (offset) {
927 targ_pv += offset;
928 Move(dsv_pv, targ_pv, targ_len, char);
929 /* a negative length implies don't Copy(), but do increment */
930 svpv_p->len = -targ_len;
931 slow_concat = TRUE;
932 }
933 else {
934 /* skip the first targ copy */
935 svpv_base++;
936 const_lens++;
937 dsv_pv += targ_len;
938 }
939
940 /* Don't populate the first targ slot in the loop below; it's
941 * either not used because we advanced svpv_base beyond it, or
942 * we already stored the special -targ_len value in it
943 */
944 tc_stop = svpv_p;
945 }
946
947 /* populate slots in svpv_buf representing targ on RHS */
948 while (targ_chain != tc_stop) {
949 struct multiconcat_svpv *p = targ_chain;
950 targ_chain = (struct multiconcat_svpv *)(p->pv);
951 p->pv = targ_pv;
952 p->len = (SSize_t)targ_len;
953 }
954 }
955
956
957 /* --------------------------------------------------------------
958 * Phase 5:
959 *
960 * Append all the args in svpv_buf, plus the const strings, to dsv.
961 *
962 * On entry to this section the (pv,len) pairs in svpv_buf have the
963 * following meanings:
964 * (pv, len) a pure-plain or utf8 string (which may be targ)
965 * (pv, -(len+extra)) a plain string which will expand by 'extra'
966 * bytes when converted to utf8
967 * (0, -len) left-most targ, whose content has already
968 * been copied. Just advance dsv_pv by len.
969 */
970
971 /* If there are no constant strings and no special case args
972 * (svpv_p->len < 0), use a simpler, more efficient concat loop
973 */
974 if (!slow_concat) {
975 for (svpv_p = svpv_base; svpv_p < svpv_end; svpv_p++) {
976 SSize_t len = svpv_p->len;
977 if (!len)
978 continue;
979 Copy(svpv_p->pv, dsv_pv, len, char);
980 dsv_pv += len;
981 }
982 const_lens += (svpv_end - svpv_base + 1);
983 }
984 else {
985 /* Note that we iterate the loop nargs+1 times: to append nargs
986 * arguments and nargs+1 constant strings. For example, "-$a-$b-"
987 */
988 svpv_p = svpv_base - 1;
989
990 for (;;) {
991 SSize_t len = (const_lens++)->ssize;
992
993 /* append next const string segment */
994 if (len > 0) {
995 Copy(const_pv, dsv_pv, len, char);
996 dsv_pv += len;
997 const_pv += len;
998 }
999
1000 if (++svpv_p == svpv_end)
1001 break;
1002
1003 /* append next arg */
1004 len = svpv_p->len;
1005
1006 if (LIKELY(len > 0)) {
1007 Copy(svpv_p->pv, dsv_pv, len, char);
1008 dsv_pv += len;
1009 }
1010 else if (UNLIKELY(len < 0)) {
1011 /* negative length indicates two special cases */
1012 const char *p = svpv_p->pv;
1013 len = -len;
1014 if (UNLIKELY(p)) {
1015 /* copy plain-but-variant pv to a utf8 targ */
1016 char * end_pv = dsv_pv + len;
1017 assert(dst_utf8);
1018 while (dsv_pv < end_pv) {
1019 U8 c = (U8) *p++;
1020 append_utf8_from_native_byte(c, (U8**)&dsv_pv);
1021 }
1022 }
1023 else
1024 /* arg is already-copied targ */
1025 dsv_pv += len;
1026 }
1027
1028 }
1029 }
1030
1031 *dsv_pv = '\0';
1032 SvCUR_set(dsv, dsv_pv - SvPVX(dsv));
1033 assert(grow >= SvCUR(dsv) + 1);
1034 assert(SvLEN(dsv) >= SvCUR(dsv) + 1);
1035
1036 /* --------------------------------------------------------------
1037 * Phase 6:
1038 *
1039 * Handle overloading. If an overloaded arg or targ was detected
1040 * earlier, dsv will have been set to a new mortal, and any args and
1041 * consts to the left of the first overloaded arg will have been
1042 * accumulated to it. This section completes any further concatenation
1043 * steps with overloading handled.
1044 */
1045
1046 if (UNLIKELY(dsv != targ)) {
1047 SV *res;
1048
1049 SvFLAGS(dsv) |= dst_utf8;
1050
1051 if (SP <= toparg) {
1052 /* Stringifying the RHS was abandoned because *SP
1053 * is overloaded. dsv contains all the concatted strings
1054 * before *SP. Apply the rest of the args using overloading.
1055 */
1056 SV *left, *right, *res;
1057 int i;
1058 bool getmg = FALSE;
1059 /* number of args already concatted */
1060 SSize_t n = (nargs - 1) - (toparg - SP);
1061 /* current arg is either the first
1062 * or second value to be concatted
1063 * (including constant strings), so would
1064 * form part of the first concat */
1065 bool first_concat = ( n == 0
1066 || (n == 1 && const_lens[-2].ssize < 0
1067 && const_lens[-1].ssize < 0));
1068 int f_assign = first_concat ? 0 : AMGf_assign;
1069
1070 left = dsv;
1071
1072 for (; n < nargs; n++) {
1073 /* loop twice, first applying the arg, then the const segment */
1074 for (i = 0; i < 2; i++) {
1075 if (i) {
1076 /* append next const string segment */
1077 STRLEN len = (STRLEN)((const_lens++)->ssize);
1078 /* a length of -1 implies no constant string
1079 * rather than a zero-length one, e.g.
1080 * ($a . $b) versus ($a . "" . $b)
1081 */
1082 if ((SSize_t)len < 0)
1083 continue;
1084
1085 /* set right to the next constant string segment */
1086 right = newSVpvn_flags(const_pv, len,
1087 (dst_utf8 | SVs_TEMP));
1088 const_pv += len;
1089 }
1090 else {
1091 /* append next arg */
1092 right = *SP++;
1093 if (getmg)
1094 SvGETMAGIC(right);
1095 else
1096 /* SvGETMAGIC already called on this SV just
1097 * before we broke from the loop earlier */
1098 getmg = TRUE;
1099
1100 if (first_concat && n == 0 && const_lens[-1].ssize < 0) {
1101 /* nothing before the current arg; repeat the
1102 * loop to get a second arg */
1103 left = right;
1104 first_concat = FALSE;
1105 continue;
1106 }
1107 }
1108
1109 if ((SvAMAGIC(left) || SvAMAGIC(right))
1110 && (res = amagic_call(left, right, concat_amg, f_assign))
1111 )
1112 left = res;
1113 else {
1114 if (left != dsv) {
1115 sv_setsv(dsv, left);
1116 left = dsv;
1117 }
1118 sv_catsv_nomg(left, right);
1119 }
1120 f_assign = AMGf_assign;
1121 }
1122 }
1123 dsv = left;
1124 }
1125
1126 /* assign/append RHS (dsv) to LHS (targ) */
1127 if (is_append) {
1128 if ((SvAMAGIC(targ) || SvAMAGIC(dsv))
1129 && (res = amagic_call(targ, dsv, concat_amg, AMGf_assign))
1130 )
1131 sv_setsv(targ, res);
1132 else
1133 sv_catsv_nomg(targ, dsv);
1134 }
1135 else
1136 sv_setsv(targ, dsv);
1137 }
1138
1139 /* --------------------------------------------------------------
1140 * Phase 7:
1141 *
1142 * return result
1143 */
1144
1145 phase7:
1146
1147 SP -= stack_adj;
1148 SvTAINT(targ);
1149 SETTARG;
1150 RETURN;
1151}
1152
1153
1154/* push the elements of av onto the stack.
1155 * Returns PL_op->op_next to allow tail-call optimisation of its callers */
1156
1157STATIC OP*
1158S_pushav(pTHX_ AV* const av)
1159{
1160 dSP;
1161 const SSize_t maxarg = AvFILL(av) + 1;
1162 EXTEND(SP, maxarg);
1163 if (UNLIKELY(SvRMAGICAL(av))) {
1164 PADOFFSET i;
1165 for (i=0; i < (PADOFFSET)maxarg; i++) {
1166 SV ** const svp = av_fetch(av, i, FALSE);
1167 SP[i+1] = svp ? *svp : &PL_sv_undef;
1168 }
1169 }
1170 else {
1171 PADOFFSET i;
1172 for (i=0; i < (PADOFFSET)maxarg; i++) {
1173 SV * const sv = AvARRAY(av)[i];
1174 SP[i+1] = LIKELY(sv) ? sv : &PL_sv_undef;
1175 }
1176 }
1177 SP += maxarg;
1178 PUTBACK;
1179 return NORMAL;
1180}
1181
1182
1183/* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */
1184
1185PP(pp_padrange)
1186{
1187 dSP;
1188 PADOFFSET base = PL_op->op_targ;
1189 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK;
1190 if (PL_op->op_flags & OPf_SPECIAL) {
1191 /* fake the RHS of my ($x,$y,..) = @_ */
1192 PUSHMARK(SP);
1193 (void)S_pushav(aTHX_ GvAVn(PL_defgv));
1194 SPAGAIN;
1195 }
1196
1197 /* note, this is only skipped for compile-time-known void cxt */
1198 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) {
1199 int i;
1200
1201 EXTEND(SP, count);
1202 PUSHMARK(SP);
1203 for (i = 0; i <count; i++)
1204 *++SP = PAD_SV(base+i);
1205 }
1206 if (PL_op->op_private & OPpLVAL_INTRO) {
1207 SV **svp = &(PAD_SVl(base));
1208 const UV payload = (UV)(
1209 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT))
1210 | (count << SAVE_TIGHT_SHIFT)
1211 | SAVEt_CLEARPADRANGE);
1212 int i;
1213
1214 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT));
1215 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT))
1216 == (Size_t)base);
1217 {
1218 dSS_ADD;
1219 SS_ADD_UV(payload);
1220 SS_ADD_END(1);
1221 }
1222
1223 for (i = 0; i <count; i++)
1224 SvPADSTALE_off(*svp++); /* mark lexical as active */
1225 }
1226 RETURN;
1227}
1228
1229
1230PP(pp_padsv)
1231{
1232 dSP;
1233 EXTEND(SP, 1);
1234 {
1235 OP * const op = PL_op;
1236 /* access PL_curpad once */
1237 SV ** const padentry = &(PAD_SVl(op->op_targ));
1238 {
1239 dTARG;
1240 TARG = *padentry;
1241 PUSHs(TARG);
1242 PUTBACK; /* no pop/push after this, TOPs ok */
1243 }
1244 if (op->op_flags & OPf_MOD) {
1245 if (op->op_private & OPpLVAL_INTRO)
1246 if (!(op->op_private & OPpPAD_STATE))
1247 save_clearsv(padentry);
1248 if (op->op_private & OPpDEREF) {
1249 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather
1250 than TARG reduces the scope of TARG, so it does not
1251 span the call to save_clearsv, resulting in smaller
1252 machine code. */
1253 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF);
1254 }
1255 }
1256 return op->op_next;
1257 }
1258}
1259
1260PP(pp_readline)
1261{
1262 dSP;
1263 /* pp_coreargs pushes a NULL to indicate no args passed to
1264 * CORE::readline() */
1265 if (TOPs) {
1266 SvGETMAGIC(TOPs);
1267 tryAMAGICunTARGETlist(iter_amg, 0);
1268 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1269 }
1270 else PL_last_in_gv = PL_argvgv, PL_stack_sp--;
1271 if (!isGV_with_GP(PL_last_in_gv)) {
1272 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv)))
1273 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv));
1274 else {
1275 dSP;
1276 XPUSHs(MUTABLE_SV(PL_last_in_gv));
1277 PUTBACK;
1278 Perl_pp_rv2gv(aTHX);
1279 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--);
1280 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv));
1281 }
1282 }
1283 return do_readline();
1284}
1285
1286PP(pp_eq)
1287{
1288 dSP;
1289 SV *left, *right;
1290
1291 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric);
1292 right = POPs;
1293 left = TOPs;
1294 SETs(boolSV(
1295 (SvIOK_notUV(left) && SvIOK_notUV(right))
1296 ? (SvIVX(left) == SvIVX(right))
1297 : ( do_ncmp(left, right) == 0)
1298 ));
1299 RETURN;
1300}
1301
1302
1303/* also used for: pp_i_preinc() */
1304
1305PP(pp_preinc)
1306{
1307 SV *sv = *PL_stack_sp;
1308
1309 if (LIKELY(((sv->sv_flags &
1310 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1311 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1312 == SVf_IOK))
1313 && SvIVX(sv) != IV_MAX)
1314 {
1315 SvIV_set(sv, SvIVX(sv) + 1);
1316 }
1317 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */
1318 sv_inc(sv);
1319 SvSETMAGIC(sv);
1320 return NORMAL;
1321}
1322
1323
1324/* also used for: pp_i_predec() */
1325
1326PP(pp_predec)
1327{
1328 SV *sv = *PL_stack_sp;
1329
1330 if (LIKELY(((sv->sv_flags &
1331 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV|
1332 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK))
1333 == SVf_IOK))
1334 && SvIVX(sv) != IV_MIN)
1335 {
1336 SvIV_set(sv, SvIVX(sv) - 1);
1337 }
1338 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */
1339 sv_dec(sv);
1340 SvSETMAGIC(sv);
1341 return NORMAL;
1342}
1343
1344
1345/* also used for: pp_orassign() */
1346
1347PP(pp_or)
1348{
1349 dSP;
1350 SV *sv;
1351 PERL_ASYNC_CHECK();
1352 sv = TOPs;
1353 if (SvTRUE_NN(sv))
1354 RETURN;
1355 else {
1356 if (PL_op->op_type == OP_OR)
1357 --SP;
1358 RETURNOP(cLOGOP->op_other);
1359 }
1360}
1361
1362
1363/* also used for: pp_dor() pp_dorassign() */
1364
1365PP(pp_defined)
1366{
1367 dSP;
1368 SV* sv;
1369 bool defined;
1370 const int op_type = PL_op->op_type;
1371 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN);
1372
1373 if (is_dor) {
1374 PERL_ASYNC_CHECK();
1375 sv = TOPs;
1376 if (UNLIKELY(!sv || !SvANY(sv))) {
1377 if (op_type == OP_DOR)
1378 --SP;
1379 RETURNOP(cLOGOP->op_other);
1380 }
1381 }
1382 else {
1383 /* OP_DEFINED */
1384 sv = POPs;
1385 if (UNLIKELY(!sv || !SvANY(sv)))
1386 RETPUSHNO;
1387 }
1388
1389 defined = FALSE;
1390 switch (SvTYPE(sv)) {
1391 case SVt_PVAV:
1392 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
1393 defined = TRUE;
1394 break;
1395 case SVt_PVHV:
1396 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied)))
1397 defined = TRUE;
1398 break;
1399 case SVt_PVCV:
1400 if (CvROOT(sv) || CvXSUB(sv))
1401 defined = TRUE;
1402 break;
1403 default:
1404 SvGETMAGIC(sv);
1405 if (SvOK(sv))
1406 defined = TRUE;
1407 break;
1408 }
1409
1410 if (is_dor) {
1411 if(defined)
1412 RETURN;
1413 if(op_type == OP_DOR)
1414 --SP;
1415 RETURNOP(cLOGOP->op_other);
1416 }
1417 /* assuming OP_DEFINED */
1418 if(defined)
1419 RETPUSHYES;
1420 RETPUSHNO;
1421}
1422
1423
1424
1425PP(pp_add)
1426{
1427 dSP; dATARGET; bool useleft; SV *svl, *svr;
1428
1429 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric);
1430 svr = TOPs;
1431 svl = TOPm1s;
1432
1433#ifdef PERL_PRESERVE_IVUV
1434
1435 /* special-case some simple common cases */
1436 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) {
1437 IV il, ir;
1438 U32 flags = (svl->sv_flags & svr->sv_flags);
1439 if (flags & SVf_IOK) {
1440 /* both args are simple IVs */
1441 UV topl, topr;
1442 il = SvIVX(svl);
1443 ir = SvIVX(svr);
1444 do_iv:
1445 topl = ((UV)il) >> (UVSIZE * 8 - 2);
1446 topr = ((UV)ir) >> (UVSIZE * 8 - 2);
1447
1448 /* if both are in a range that can't under/overflow, do a
1449 * simple integer add: if the top of both numbers
1450 * are 00 or 11, then it's safe */
1451 if (!( ((topl+1) | (topr+1)) & 2)) {
1452 SP--;
1453 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */
1454 SETs(TARG);
1455 RETURN;
1456 }
1457 goto generic;
1458 }
1459 else if (flags & SVf_NOK) {
1460 /* both args are NVs */
1461 NV nl = SvNVX(svl);
1462 NV nr = SvNVX(svr);
1463
1464 if (
1465#if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan)
1466 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl)
1467 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr)
1468#else
1469 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr)
1470#endif
1471 )
1472 /* nothing was lost by converting to IVs */
1473 goto do_iv;
1474 SP--;
1475 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */
1476 SETs(TARG);
1477 RETURN;
1478 }
1479 }
1480
1481 generic:
1482
1483 useleft = USE_LEFT(svl);
1484 /* We must see if we can perform the addition with integers if possible,
1485 as the integer code detects overflow while the NV code doesn't.
1486 If either argument hasn't had a numeric conversion yet attempt to get
1487 the IV. It's important to do this now, rather than just assuming that
1488 it's not IOK as a PV of "9223372036854775806" may not take well to NV
1489 addition, and an SV which is NOK, NV=6.0 ought to be coerced to
1490 integer in case the second argument is IV=9223372036854775806
1491 We can (now) rely on sv_2iv to do the right thing, only setting the
1492 public IOK flag if the value in the NV (or PV) slot is truly integer.
1493
1494 A side effect is that this also aggressively prefers integer maths over
1495 fp maths for integer values.
1496
1497 How to detect overflow?
1498
1499 C 99 section 6.2.6.1 says
1500
1501 The range of nonnegative values of a signed integer type is a subrange
1502 of the corresponding unsigned integer type, and the representation of
1503 the same value in each type is the same. A computation involving
1504 unsigned operands can never overflow, because a result that cannot be
1505 represented by the resulting unsigned integer type is reduced modulo
1506 the number that is one greater than the largest value that can be
1507 represented by the resulting type.
1508
1509 (the 9th paragraph)
1510
1511 which I read as "unsigned ints wrap."
1512
1513 signed integer overflow seems to be classed as "exception condition"
1514
1515 If an exceptional condition occurs during the evaluation of an
1516 expression (that is, if the result is not mathematically defined or not
1517 in the range of representable values for its type), the behavior is
1518 undefined.
1519
1520 (6.5, the 5th paragraph)
1521
1522 I had assumed that on 2s complement machines signed arithmetic would
1523 wrap, hence coded pp_add and pp_subtract on the assumption that
1524 everything perl builds on would be happy. After much wailing and
1525 gnashing of teeth it would seem that irix64 knows its ANSI spec well,
1526 knows that it doesn't need to, and doesn't. Bah. Anyway, the all-
1527 unsigned code below is actually shorter than the old code. :-)
1528 */
1529
1530 if (SvIV_please_nomg(svr)) {
1531 /* Unless the left argument is integer in range we are going to have to
1532 use NV maths. Hence only attempt to coerce the right argument if
1533 we know the left is integer. */
1534 UV auv = 0;
1535 bool auvok = FALSE;
1536 bool a_valid = 0;
1537
1538 if (!useleft) {
1539 auv = 0;
1540 a_valid = auvok = 1;
1541 /* left operand is undef, treat as zero. + 0 is identity,
1542 Could SETi or SETu right now, but space optimise by not adding
1543 lots of code to speed up what is probably a rarish case. */
1544 } else {
1545 /* Left operand is defined, so is it IV? */
1546 if (SvIV_please_nomg(svl)) {
1547 if ((auvok = SvUOK(svl)))
1548 auv = SvUVX(svl);
1549 else {
1550 const IV aiv = SvIVX(svl);
1551 if (aiv >= 0) {
1552 auv = aiv;
1553 auvok = 1; /* Now acting as a sign flag. */
1554 } else {
1555 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv);
1556 }
1557 }
1558 a_valid = 1;
1559 }
1560 }
1561 if (a_valid) {
1562 bool result_good = 0;
1563 UV result;
1564 UV buv;
1565 bool buvok = SvUOK(svr);
1566
1567 if (buvok)
1568 buv = SvUVX(svr);
1569 else {
1570 const IV biv = SvIVX(svr);
1571 if (biv >= 0) {
1572 buv = biv;
1573 buvok = 1;
1574 } else
1575 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv);
1576 }
1577 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve,
1578 else "IV" now, independent of how it came in.
1579 if a, b represents positive, A, B negative, a maps to -A etc
1580 a + b => (a + b)
1581 A + b => -(a - b)
1582 a + B => (a - b)
1583 A + B => -(a + b)
1584 all UV maths. negate result if A negative.
1585 add if signs same, subtract if signs differ. */
1586
1587 if (auvok ^ buvok) {
1588 /* Signs differ. */
1589 if (auv >= buv) {
1590 result = auv - buv;
1591 /* Must get smaller */
1592 if (result <= auv)
1593 result_good = 1;
1594 } else {
1595 result = buv - auv;
1596 if (result <= buv) {
1597 /* result really should be -(auv-buv). as its negation
1598 of true value, need to swap our result flag */
1599 auvok = !auvok;
1600 result_good = 1;
1601 }
1602 }
1603 } else {
1604 /* Signs same */
1605 result = auv + buv;
1606 if (result >= auv)
1607 result_good = 1;
1608 }
1609 if (result_good) {
1610 SP--;
1611 if (auvok)
1612 SETu( result );
1613 else {
1614 /* Negate result */
1615 if (result <= (UV)IV_MIN)
1616 SETi(result == (UV)IV_MIN
1617 ? IV_MIN : -(IV)result);
1618 else {
1619 /* result valid, but out of range for IV. */
1620 SETn( -(NV)result );
1621 }
1622 }
1623 RETURN;
1624 } /* Overflow, drop through to NVs. */
1625 }
1626 }
1627
1628#else
1629 useleft = USE_LEFT(svl);
1630#endif
1631
1632 {
1633 NV value = SvNV_nomg(svr);
1634 (void)POPs;
1635 if (!useleft) {
1636 /* left operand is undef, treat as zero. + 0.0 is identity. */
1637 SETn(value);
1638 RETURN;
1639 }
1640 SETn( value + SvNV_nomg(svl) );
1641 RETURN;
1642 }
1643}
1644
1645
1646/* also used for: pp_aelemfast_lex() */
1647
1648PP(pp_aelemfast)
1649{
1650 dSP;
1651 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX
1652 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv);
1653 const U32 lval = PL_op->op_flags & OPf_MOD;
1654 const I8 key = (I8)PL_op->op_private;
1655 SV** svp;
1656 SV *sv;
1657
1658 assert(SvTYPE(av) == SVt_PVAV);
1659
1660 EXTEND(SP, 1);
1661
1662 /* inlined av_fetch() for simple cases ... */
1663 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) {
1664 sv = AvARRAY(av)[key];
1665 if (sv) {
1666 PUSHs(sv);
1667 RETURN;
1668 }
1669 }
1670
1671 /* ... else do it the hard way */
1672 svp = av_fetch(av, key, lval);
1673 sv = (svp ? *svp : &PL_sv_undef);
1674
1675 if (UNLIKELY(!svp && lval))
1676 DIE(aTHX_ PL_no_aelem, (int)key);
1677
1678 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
1679 mg_get(sv);
1680 PUSHs(sv);
1681 RETURN;
1682}
1683
1684PP(pp_join)
1685{
1686 dSP; dMARK; dTARGET;
1687 MARK++;
1688 do_join(TARG, *MARK, MARK, SP);
1689 SP = MARK;
1690 SETs(TARG);
1691 RETURN;
1692}
1693
1694/* Oversized hot code. */
1695
1696/* also used for: pp_say() */
1697
1698PP(pp_print)
1699{
1700 dSP; dMARK; dORIGMARK;
1701 PerlIO *fp;
1702 MAGIC *mg;
1703 GV * const gv
1704 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv;
1705 IO *io = GvIO(gv);
1706
1707 if (io
1708 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1709 {
1710 had_magic:
1711 if (MARK == ORIGMARK) {
1712 /* If using default handle then we need to make space to
1713 * pass object as 1st arg, so move other args up ...
1714 */
1715 MEXTEND(SP, 1);
1716 ++MARK;
1717 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*);
1718 ++SP;
1719 }
1720 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io),
1721 mg,
1722 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK
1723 | (PL_op->op_type == OP_SAY
1724 ? TIED_METHOD_SAY : 0)), sp - mark);
1725 }
1726 if (!io) {
1727 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv)))
1728 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar)))
1729 goto had_magic;
1730 report_evil_fh(gv);
1731 SETERRNO(EBADF,RMS_IFI);
1732 goto just_say_no;
1733 }
1734 else if (!(fp = IoOFP(io))) {
1735 if (IoIFP(io))
1736 report_wrongway_fh(gv, '<');
1737 else
1738 report_evil_fh(gv);
1739 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI);
1740 goto just_say_no;
1741 }
1742 else {
1743 SV * const ofs = GvSV(PL_ofsgv); /* $, */
1744 MARK++;
1745 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) {
1746 while (MARK <= SP) {
1747 if (!do_print(*MARK, fp))
1748 break;
1749 MARK++;
1750 if (MARK <= SP) {
1751 /* don't use 'ofs' here - it may be invalidated by magic callbacks */
1752 if (!do_print(GvSV(PL_ofsgv), fp)) {
1753 MARK--;
1754 break;
1755 }
1756 }
1757 }
1758 }
1759 else {
1760 while (MARK <= SP) {
1761 if (!do_print(*MARK, fp))
1762 break;
1763 MARK++;
1764 }
1765 }
1766 if (MARK <= SP)
1767 goto just_say_no;
1768 else {
1769 if (PL_op->op_type == OP_SAY) {
1770 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp))
1771 goto just_say_no;
1772 }
1773 else if (PL_ors_sv && SvOK(PL_ors_sv))
1774 if (!do_print(PL_ors_sv, fp)) /* $\ */
1775 goto just_say_no;
1776
1777 if (IoFLAGS(io) & IOf_FLUSH)
1778 if (PerlIO_flush(fp) == EOF)
1779 goto just_say_no;
1780 }
1781 }
1782 SP = ORIGMARK;
1783 XPUSHs(&PL_sv_yes);
1784 RETURN;
1785
1786 just_say_no:
1787 SP = ORIGMARK;
1788 XPUSHs(&PL_sv_undef);
1789 RETURN;
1790}
1791
1792
1793/* do the common parts of pp_padhv() and pp_rv2hv()
1794 * It assumes the caller has done EXTEND(SP, 1) or equivalent.
1795 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set.
1796 * 'has_targ' indicates that the op has a target - this should
1797 * be a compile-time constant so that the code can constant-folded as
1798 * appropriate
1799 * */
1800
1801PERL_STATIC_INLINE OP*
1802S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ)
1803{
1804 bool is_tied;
1805 bool is_bool;
1806 MAGIC *mg;
1807 dSP;
1808 IV i;
1809 SV *sv;
1810
1811 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV);
1812
1813 if (gimme == G_ARRAY) {
1814 hv_pushkv(hv, 3);
1815 return NORMAL;
1816 }
1817
1818 if (is_keys)
1819 /* 'keys %h' masquerading as '%h': reset iterator */
1820 (void)hv_iterinit(hv);
1821
1822 if (gimme == G_VOID)
1823 return NORMAL;
1824
1825 is_bool = ( PL_op->op_private & OPpTRUEBOOL
1826 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL
1827 && block_gimme() == G_VOID));
1828 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied));
1829
1830 if (UNLIKELY(is_tied)) {
1831 if (is_keys && !is_bool) {
1832 i = 0;
1833 while (hv_iternext(hv))
1834 i++;
1835 goto push_i;
1836 }
1837 else {
1838 sv = magic_scalarpack(hv, mg);
1839 goto push_sv;
1840 }
1841 }
1842 else {
1843 i = HvUSEDKEYS(hv);
1844 if (is_bool) {
1845 sv = i ? &PL_sv_yes : &PL_sv_zero;
1846 push_sv:
1847 PUSHs(sv);
1848 }
1849 else {
1850 push_i:
1851 if (has_targ) {
1852 dTARGET;
1853 PUSHi(i);
1854 }
1855 else
1856#ifdef PERL_OP_PARENT
1857 if (is_keys) {
1858 /* parent op should be an unused OP_KEYS whose targ we can
1859 * use */
1860 dTARG;
1861 OP *k;
1862
1863 assert(!OpHAS_SIBLING(PL_op));
1864 k = PL_op->op_sibparent;
1865 assert(k->op_type == OP_KEYS);
1866 TARG = PAD_SV(k->op_targ);
1867 PUSHi(i);
1868 }
1869 else
1870#endif
1871 mPUSHi(i);
1872 }
1873 }
1874
1875 PUTBACK;
1876 return NORMAL;
1877}
1878
1879
1880/* This is also called directly by pp_lvavref. */
1881PP(pp_padav)
1882{
1883 dSP; dTARGET;
1884 U8 gimme;
1885 assert(SvTYPE(TARG) == SVt_PVAV);
1886 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1887 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1888 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1889 EXTEND(SP, 1);
1890
1891 if (PL_op->op_flags & OPf_REF) {
1892 PUSHs(TARG);
1893 RETURN;
1894 }
1895 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1896 const I32 flags = is_lvalue_sub();
1897 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1898 if (GIMME_V == G_SCALAR)
1899 /* diag_listed_as: Can't return %s to lvalue scalar context */
1900 Perl_croak(aTHX_ "Can't return array to lvalue scalar context");
1901 PUSHs(TARG);
1902 RETURN;
1903 }
1904 }
1905
1906 gimme = GIMME_V;
1907 if (gimme == G_ARRAY)
1908 return S_pushav(aTHX_ (AV*)TARG);
1909
1910 if (gimme == G_SCALAR) {
1911 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1;
1912 if (!maxarg)
1913 PUSHs(&PL_sv_zero);
1914 else if (PL_op->op_private & OPpTRUEBOOL)
1915 PUSHs(&PL_sv_yes);
1916 else
1917 mPUSHi(maxarg);
1918 }
1919 RETURN;
1920}
1921
1922
1923PP(pp_padhv)
1924{
1925 dSP; dTARGET;
1926 U8 gimme;
1927
1928 assert(SvTYPE(TARG) == SVt_PVHV);
1929 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO ))
1930 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) ))
1931 SAVECLEARSV(PAD_SVl(PL_op->op_targ));
1932
1933 EXTEND(SP, 1);
1934
1935 if (PL_op->op_flags & OPf_REF) {
1936 PUSHs(TARG);
1937 RETURN;
1938 }
1939 else if (PL_op->op_private & OPpMAYBE_LVSUB) {
1940 const I32 flags = is_lvalue_sub();
1941 if (flags && !(flags & OPpENTERSUB_INARGS)) {
1942 if (GIMME_V == G_SCALAR)
1943 /* diag_listed_as: Can't return %s to lvalue scalar context */
1944 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context");
1945 PUSHs(TARG);
1946 RETURN;
1947 }
1948 }
1949
1950 gimme = GIMME_V;
1951
1952 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme,
1953 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS),
1954 0 /* has_targ*/);
1955}
1956
1957
1958/* also used for: pp_rv2hv() */
1959/* also called directly by pp_lvavref */
1960
1961PP(pp_rv2av)
1962{
1963 dSP; dTOPss;
1964 const U8 gimme = GIMME_V;
1965 static const char an_array[] = "an ARRAY";
1966 static const char a_hash[] = "a HASH";
1967 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV
1968 || PL_op->op_type == OP_LVAVREF;
1969 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV;
1970
1971 SvGETMAGIC(sv);
1972 if (SvROK(sv)) {
1973 if (UNLIKELY(SvAMAGIC(sv))) {
1974 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg);
1975 }
1976 sv = SvRV(sv);
1977 if (UNLIKELY(SvTYPE(sv) != type))
1978 /* diag_listed_as: Not an ARRAY reference */
1979 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash);
1980 else if (UNLIKELY(PL_op->op_flags & OPf_MOD
1981 && PL_op->op_private & OPpLVAL_INTRO))
1982 Perl_croak(aTHX_ "%s", PL_no_localize_ref);
1983 }
1984 else if (UNLIKELY(SvTYPE(sv) != type)) {
1985 GV *gv;
1986
1987 if (!isGV_with_GP(sv)) {
1988 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash,
1989 type, &sp);
1990 if (!gv)
1991 RETURN;
1992 }
1993 else {
1994 gv = MUTABLE_GV(sv);
1995 }
1996 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv));
1997 if (PL_op->op_private & OPpLVAL_INTRO)
1998 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv));
1999 }
2000 if (PL_op->op_flags & OPf_REF) {
2001 SETs(sv);
2002 RETURN;
2003 }
2004 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) {
2005 const I32 flags = is_lvalue_sub();
2006 if (flags && !(flags & OPpENTERSUB_INARGS)) {
2007 if (gimme != G_ARRAY)
2008 goto croak_cant_return;
2009 SETs(sv);
2010 RETURN;
2011 }
2012 }
2013
2014 if (is_pp_rv2av) {
2015 AV *const av = MUTABLE_AV(sv);
2016
2017 if (gimme == G_ARRAY) {
2018 SP--;
2019 PUTBACK;
2020 return S_pushav(aTHX_ av);
2021 }
2022
2023 if (gimme == G_SCALAR) {
2024 const SSize_t maxarg = AvFILL(av) + 1;
2025 if (PL_op->op_private & OPpTRUEBOOL)
2026 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero);
2027 else {
2028 dTARGET;
2029 SETi(maxarg);
2030 }
2031 }
2032 }
2033 else {
2034 SP--; PUTBACK;
2035 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme,
2036 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS),
2037 1 /* has_targ*/);
2038 }
2039 RETURN;
2040
2041 croak_cant_return:
2042 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context",
2043 is_pp_rv2av ? "array" : "hash");
2044 RETURN;
2045}
2046
2047STATIC void
2048S_do_oddball(pTHX_ SV **oddkey, SV **firstkey)
2049{
2050 PERL_ARGS_ASSERT_DO_ODDBALL;
2051
2052 if (*oddkey) {
2053 if (ckWARN(WARN_MISC)) {
2054 const char *err;
2055 if (oddkey == firstkey &&
2056 SvROK(*oddkey) &&
2057 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV ||
2058 SvTYPE(SvRV(*oddkey)) == SVt_PVHV))
2059 {
2060 err = "Reference found where even-sized list expected";
2061 }
2062 else
2063 err = "Odd number of elements in hash assignment";
2064 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err);
2065 }
2066
2067 }
2068}
2069
2070
2071/* Do a mark and sweep with the SVf_BREAK flag to detect elements which
2072 * are common to both the LHS and RHS of an aassign, and replace them
2073 * with copies. All these copies are made before the actual list assign is
2074 * done.
2075 *
2076 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS
2077 * element ($b) to the first LH element ($a), modifies $a; when the
2078 * second assignment is done, the second RH element now has the wrong
2079 * value. So we initially replace the RHS with ($b, mortalcopy($a)).
2080 * Note that we don't need to make a mortal copy of $b.
2081 *
2082 * The algorithm below works by, for every RHS element, mark the
2083 * corresponding LHS target element with SVf_BREAK. Then if the RHS
2084 * element is found with SVf_BREAK set, it means it would have been
2085 * modified, so make a copy.
2086 * Note that by scanning both LHS and RHS in lockstep, we avoid
2087 * unnecessary copies (like $b above) compared with a naive
2088 * "mark all LHS; copy all marked RHS; unmark all LHS".
2089 *
2090 * If the LHS element is a 'my' declaration' and has a refcount of 1, then
2091 * it can't be common and can be skipped.
2092 *
2093 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means
2094 * that we thought we didn't need to call S_aassign_copy_common(), but we
2095 * have anyway for sanity checking. If we find we need to copy, then panic.
2096 */
2097
2098PERL_STATIC_INLINE void
2099S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem,
2100 SV **firstrelem, SV **lastrelem
2101#ifdef DEBUGGING
2102 , bool fake
2103#endif
2104)
2105{
2106 dVAR;
2107 SV **relem;
2108 SV **lelem;
2109 SSize_t lcount = lastlelem - firstlelem + 1;
2110 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */
2111 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1);
2112 bool copy_all = FALSE;
2113
2114 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */
2115 assert(firstlelem < lastlelem); /* at least 2 LH elements */
2116 assert(firstrelem < lastrelem); /* at least 2 RH elements */
2117
2118
2119 lelem = firstlelem;
2120 /* we never have to copy the first RH element; it can't be corrupted
2121 * by assigning something to the corresponding first LH element.
2122 * So this scan does in a loop: mark LHS[N]; test RHS[N+1]
2123 */
2124 relem = firstrelem + 1;
2125
2126 for (; relem <= lastrelem; relem++) {
2127 SV *svr;
2128
2129 /* mark next LH element */
2130
2131 if (--lcount >= 0) {
2132 SV *svl = *lelem++;
2133
2134 if (UNLIKELY(!svl)) {/* skip AV alias marker */
2135 assert (lelem <= lastlelem);
2136 svl = *lelem++;
2137 lcount--;
2138 }
2139
2140 assert(svl);
2141 if (SvSMAGICAL(svl)) {
2142 copy_all = TRUE;
2143 }
2144 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) {
2145 if (!marked)
2146 return;
2147 /* this LH element will consume all further args;
2148 * no need to mark any further LH elements (if any).
2149 * But we still need to scan any remaining RHS elements;
2150 * set lcount negative to distinguish from lcount == 0,
2151 * so the loop condition continues being true
2152 */
2153 lcount = -1;
2154 lelem--; /* no need to unmark this element */
2155 }
2156 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) {
2157 SvFLAGS(svl) |= SVf_BREAK;
2158 marked = TRUE;
2159 }
2160 else if (!marked) {
2161 /* don't check RH element if no SVf_BREAK flags set yet */
2162 if (!lcount)
2163 break;
2164 continue;
2165 }
2166 }
2167
2168 /* see if corresponding RH element needs copying */
2169
2170 assert(marked);
2171 svr = *relem;
2172 assert(svr);
2173
2174 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) {
2175 U32 brk = (SvFLAGS(svr) & SVf_BREAK);
2176
2177#ifdef DEBUGGING
2178 if (fake) {
2179 /* op_dump(PL_op); */
2180 Perl_croak(aTHX_
2181 "panic: aassign skipped needed copy of common RH elem %"
2182 UVuf, (UV)(relem - firstrelem));
2183 }
2184#endif
2185
2186 TAINT_NOT; /* Each item is independent */
2187
2188 /* Dear TODO test in t/op/sort.t, I love you.
2189 (It's relying on a panic, not a "semi-panic" from newSVsv()
2190 and then an assertion failure below.) */
2191 if (UNLIKELY(SvIS_FREED(svr))) {
2192 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p",
2193 (void*)svr);
2194 }
2195 /* avoid break flag while copying; otherwise COW etc
2196 * disabled... */
2197 SvFLAGS(svr) &= ~SVf_BREAK;
2198 /* Not newSVsv(), as it does not allow copy-on-write,
2199 resulting in wasteful copies.
2200 Also, we use SV_NOSTEAL in case the SV is used more than
2201 once, e.g. (...) = (f())[0,0]
2202 Where the same SV appears twice on the RHS without a ref
2203 count bump. (Although I suspect that the SV won't be
2204 stealable here anyway - DAPM).
2205 */
2206 *relem = sv_mortalcopy_flags(svr,
2207 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2208 /* ... but restore afterwards in case it's needed again,
2209 * e.g. ($a,$b,$c) = (1,$a,$a)
2210 */
2211 SvFLAGS(svr) |= brk;
2212 }
2213
2214 if (!lcount)
2215 break;
2216 }
2217
2218 if (!marked)
2219 return;
2220
2221 /*unmark LHS */
2222
2223 while (lelem > firstlelem) {
2224 SV * const svl = *(--lelem);
2225 if (svl)
2226 SvFLAGS(svl) &= ~SVf_BREAK;
2227 }
2228}
2229
2230
2231
2232PP(pp_aassign)
2233{
2234 dVAR; dSP;
2235 SV **lastlelem = PL_stack_sp;
2236 SV **lastrelem = PL_stack_base + POPMARK;
2237 SV **firstrelem = PL_stack_base + POPMARK + 1;
2238 SV **firstlelem = lastrelem + 1;
2239
2240 SV **relem;
2241 SV **lelem;
2242 U8 gimme;
2243 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we
2244 * only need to save locally, not on the save stack */
2245 U16 old_delaymagic = PL_delaymagic;
2246#ifdef DEBUGGING
2247 bool fake = 0;
2248#endif
2249
2250 PL_delaymagic = DM_DELAY; /* catch simultaneous items */
2251
2252 /* If there's a common identifier on both sides we have to take
2253 * special care that assigning the identifier on the left doesn't
2254 * clobber a value on the right that's used later in the list.
2255 */
2256
2257 /* at least 2 LH and RH elements, or commonality isn't an issue */
2258 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2259 for (relem = firstrelem+1; relem <= lastrelem; relem++) {
2260 if (SvGMAGICAL(*relem))
2261 goto do_scan;
2262 }
2263 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2264 if (*lelem && SvSMAGICAL(*lelem))
2265 goto do_scan;
2266 }
2267 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) {
2268 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) {
2269 /* skip the scan if all scalars have a ref count of 1 */
2270 for (lelem = firstlelem; lelem <= lastlelem; lelem++) {
2271 SV *sv = *lelem;
2272 if (!sv || SvREFCNT(sv) == 1)
2273 continue;
2274 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV)
2275 goto do_scan;
2276 break;
2277 }
2278 }
2279 else {
2280 do_scan:
2281 S_aassign_copy_common(aTHX_
2282 firstlelem, lastlelem, firstrelem, lastrelem
2283#ifdef DEBUGGING
2284 , fake
2285#endif
2286 );
2287 }
2288 }
2289 }
2290#ifdef DEBUGGING
2291 else {
2292 /* on debugging builds, do the scan even if we've concluded we
2293 * don't need to, then panic if we find commonality. Note that the
2294 * scanner assumes at least 2 elements */
2295 if (firstlelem < lastlelem && firstrelem < lastrelem) {
2296 fake = 1;
2297 goto do_scan;
2298 }
2299 }
2300#endif
2301
2302 gimme = GIMME_V;
2303 relem = firstrelem;
2304 lelem = firstlelem;
2305
2306 if (relem > lastrelem)
2307 goto no_relems;
2308
2309 /* first lelem loop while there are still relems */
2310 while (LIKELY(lelem <= lastlelem)) {
2311 bool alias = FALSE;
2312 SV *lsv = *lelem++;
2313
2314 TAINT_NOT; /* Each item stands on its own, taintwise. */
2315
2316 assert(relem <= lastrelem);
2317 if (UNLIKELY(!lsv)) {
2318 alias = TRUE;
2319 lsv = *lelem++;
2320 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2321 }
2322
2323 switch (SvTYPE(lsv)) {
2324 case SVt_PVAV: {
2325 SV **svp;
2326 SSize_t i;
2327 SSize_t tmps_base;
2328 SSize_t nelems = lastrelem - relem + 1;
2329 AV *ary = MUTABLE_AV(lsv);
2330
2331 /* Assigning to an aggregate is tricky. First there is the
2332 * issue of commonality, e.g. @a = ($a[0]). Since the
2333 * stack isn't refcounted, clearing @a prior to storing
2334 * elements will free $a[0]. Similarly with
2335 * sub FETCH { $status[$_[1]] } @status = @tied[0,1];
2336 *
2337 * The way to avoid these issues is to make the copy of each
2338 * SV (and we normally store a *copy* in the array) *before*
2339 * clearing the array. But this has a problem in that
2340 * if the code croaks during copying, the not-yet-stored copies
2341 * could leak. One way to avoid this is to make all the copies
2342 * mortal, but that's quite expensive.
2343 *
2344 * The current solution to these issues is to use a chunk
2345 * of the tmps stack as a temporary refcounted-stack. SVs
2346 * will be put on there during processing to avoid leaks,
2347 * but will be removed again before the end of this block,
2348 * so free_tmps() is never normally called. Also, the
2349 * sv_refcnt of the SVs doesn't have to be manipulated, since
2350 * the ownership of 1 reference count is transferred directly
2351 * from the tmps stack to the AV when the SV is stored.
2352 *
2353 * We disarm slots in the temps stack by storing PL_sv_undef
2354 * there: it doesn't matter if that SV's refcount is
2355 * repeatedly decremented during a croak. But usually this is
2356 * only an interim measure. By the end of this code block
2357 * we try where possible to not leave any PL_sv_undef's on the
2358 * tmps stack e.g. by shuffling newer entries down.
2359 *
2360 * There is one case where we don't copy: non-magical
2361 * SvTEMP(sv)'s with a ref count of 1. The only owner of these
2362 * is on the tmps stack, so its safe to directly steal the SV
2363 * rather than copying. This is common in things like function
2364 * returns, map etc, which all return a list of such SVs.
2365 *
2366 * Note however something like @a = (f())[0,0], where there is
2367 * a danger of the same SV being shared: this avoided because
2368 * when the SV is stored as $a[0], its ref count gets bumped,
2369 * so the RC==1 test fails and the second element is copied
2370 * instead.
2371 *
2372 * We also use one slot in the tmps stack to hold an extra
2373 * ref to the array, to ensure it doesn't get prematurely
2374 * freed. Again, this is removed before the end of this block.
2375 *
2376 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible
2377 * @a = ($a[0]) case, but the current implementation uses the
2378 * same algorithm regardless, so ignores that flag. (It *is*
2379 * used in the hash branch below, however).
2380 */
2381
2382 /* Reserve slots for ary, plus the elems we're about to copy,
2383 * then protect ary and temporarily void the remaining slots
2384 * with &PL_sv_undef */
2385 EXTEND_MORTAL(nelems + 1);
2386 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary);
2387 tmps_base = PL_tmps_ix + 1;
2388 for (i = 0; i < nelems; i++)
2389 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2390 PL_tmps_ix += nelems;
2391
2392 /* Make a copy of each RHS elem and save on the tmps_stack
2393 * (or pass through where we can optimise away the copy) */
2394
2395 if (UNLIKELY(alias)) {
2396 U32 lval = (gimme == G_ARRAY)
2397 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0;
2398 for (svp = relem; svp <= lastrelem; svp++) {
2399 SV *rsv = *svp;
2400
2401 SvGETMAGIC(rsv);
2402 if (!SvROK(rsv))
2403 DIE(aTHX_ "Assigned value is not a reference");
2404 if (SvTYPE(SvRV(rsv)) > SVt_PVLV)
2405 /* diag_listed_as: Assigned value is not %s reference */
2406 DIE(aTHX_
2407 "Assigned value is not a SCALAR reference");
2408 if (lval)
2409 *svp = rsv = sv_mortalcopy(rsv);
2410 /* XXX else check for weak refs? */
2411 rsv = SvREFCNT_inc_NN(SvRV(rsv));
2412 assert(tmps_base <= PL_tmps_max);
2413 PL_tmps_stack[tmps_base++] = rsv;
2414 }
2415 }
2416 else {
2417 for (svp = relem; svp <= lastrelem; svp++) {
2418 SV *rsv = *svp;
2419
2420 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2421 /* can skip the copy */
2422 SvREFCNT_inc_simple_void_NN(rsv);
2423 SvTEMP_off(rsv);
2424 }
2425 else {
2426 SV *nsv;
2427 /* do get before newSV, in case it dies and leaks */
2428 SvGETMAGIC(rsv);
2429 nsv = newSV(0);
2430 /* see comment in S_aassign_copy_common about
2431 * SV_NOSTEAL */
2432 sv_setsv_flags(nsv, rsv,
2433 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
2434 rsv = *svp = nsv;
2435 }
2436
2437 assert(tmps_base <= PL_tmps_max);
2438 PL_tmps_stack[tmps_base++] = rsv;
2439 }
2440 }
2441
2442 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */
2443 av_clear(ary);
2444
2445 /* store in the array, the SVs that are in the tmps stack */
2446
2447 tmps_base -= nelems;
2448
2449 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) {
2450 /* for arrays we can't cheat with, use the official API */
2451 av_extend(ary, nelems - 1);
2452 for (i = 0; i < nelems; i++) {
2453 SV **svp = &(PL_tmps_stack[tmps_base + i]);
2454 SV *rsv = *svp;
2455 /* A tied store won't take ownership of rsv, so keep
2456 * the 1 refcnt on the tmps stack; otherwise disarm
2457 * the tmps stack entry */
2458 if (av_store(ary, i, rsv))
2459 *svp = &PL_sv_undef;
2460 /* av_store() may have added set magic to rsv */;
2461 SvSETMAGIC(rsv);
2462 }
2463 /* disarm ary refcount: see comments below about leak */
2464 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2465 }
2466 else {
2467 /* directly access/set the guts of the AV */
2468 SSize_t fill = nelems - 1;
2469 if (fill > AvMAX(ary))
2470 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary),
2471 &AvARRAY(ary));
2472 AvFILLp(ary) = fill;
2473 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*);
2474 /* Quietly remove all the SVs from the tmps stack slots,
2475 * since ary has now taken ownership of the refcnt.
2476 * Also remove ary: which will now leak if we die before
2477 * the SvREFCNT_dec_NN(ary) below */
2478 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2479 Move(&PL_tmps_stack[tmps_base + nelems],
2480 &PL_tmps_stack[tmps_base - 1],
2481 PL_tmps_ix - (tmps_base + nelems) + 1,
2482 SV*);
2483 PL_tmps_ix -= (nelems + 1);
2484 }
2485
2486 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2487 /* its assumed @ISA set magic can't die and leak ary */
2488 SvSETMAGIC(MUTABLE_SV(ary));
2489 SvREFCNT_dec_NN(ary);
2490
2491 relem = lastrelem + 1;
2492 goto no_relems;
2493 }
2494
2495 case SVt_PVHV: { /* normal hash */
2496
2497 SV **svp;
2498 bool dirty_tmps;
2499 SSize_t i;
2500 SSize_t tmps_base;
2501 SSize_t nelems = lastrelem - relem + 1;
2502 HV *hash = MUTABLE_HV(lsv);
2503
2504 if (UNLIKELY(nelems & 1)) {
2505 do_oddball(lastrelem, relem);
2506 /* we have firstlelem to reuse, it's not needed any more */
2507 *++lastrelem = &PL_sv_undef;
2508 nelems++;
2509 }
2510
2511 /* See the SVt_PVAV branch above for a long description of
2512 * how the following all works. The main difference for hashes
2513 * is that we treat keys and values separately (and have
2514 * separate loops for them): as for arrays, values are always
2515 * copied (except for the SvTEMP optimisation), since they
2516 * need to be stored in the hash; while keys are only
2517 * processed where they might get prematurely freed or
2518 * whatever. */
2519
2520 /* tmps stack slots:
2521 * * reserve a slot for the hash keepalive;
2522 * * reserve slots for the hash values we're about to copy;
2523 * * preallocate for the keys we'll possibly copy or refcount bump
2524 * later;
2525 * then protect hash and temporarily void the remaining
2526 * value slots with &PL_sv_undef */
2527 EXTEND_MORTAL(nelems + 1);
2528
2529 /* convert to number of key/value pairs */
2530 nelems >>= 1;
2531
2532 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash);
2533 tmps_base = PL_tmps_ix + 1;
2534 for (i = 0; i < nelems; i++)
2535 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2536 PL_tmps_ix += nelems;
2537
2538 /* Make a copy of each RHS hash value and save on the tmps_stack
2539 * (or pass through where we can optimise away the copy) */
2540
2541 for (svp = relem + 1; svp <= lastrelem; svp += 2) {
2542 SV *rsv = *svp;
2543
2544 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) {
2545 /* can skip the copy */
2546 SvREFCNT_inc_simple_void_NN(rsv);
2547 SvTEMP_off(rsv);
2548 }
2549 else {
2550 SV *nsv;
2551 /* do get before newSV, in case it dies and leaks */
2552 SvGETMAGIC(rsv);
2553 nsv = newSV(0);
2554 /* see comment in S_aassign_copy_common about
2555 * SV_NOSTEAL */
2556 sv_setsv_flags(nsv, rsv,
2557 (SV_DO_COW_SVSETSV|SV_NOSTEAL));
2558 rsv = *svp = nsv;
2559 }
2560
2561 assert(tmps_base <= PL_tmps_max);
2562 PL_tmps_stack[tmps_base++] = rsv;
2563 }
2564 tmps_base -= nelems;
2565
2566
2567 /* possibly protect keys */
2568
2569 if (UNLIKELY(gimme == G_ARRAY)) {
2570 /* handle e.g.
2571 * @a = ((%h = ($$r, 1)), $r = "x");
2572 * $_++ for %h = (1,2,3,4);
2573 */
2574 EXTEND_MORTAL(nelems);
2575 for (svp = relem; svp <= lastrelem; svp += 2)
2576 *svp = sv_mortalcopy_flags(*svp,
2577 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2578 }
2579 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) {
2580 /* for possible commonality, e.g.
2581 * %h = ($h{a},1)
2582 * avoid premature freeing RHS keys by mortalising
2583 * them.
2584 * For a magic element, make a copy so that its magic is
2585 * called *before* the hash is emptied (which may affect
2586 * a tied value for example).
2587 * In theory we should check for magic keys in all
2588 * cases, not just under OPpASSIGN_COMMON_AGG, but in
2589 * practice, !OPpASSIGN_COMMON_AGG implies only
2590 * constants or padtmps on the RHS.
2591 */
2592 EXTEND_MORTAL(nelems);
2593 for (svp = relem; svp <= lastrelem; svp += 2) {
2594 SV *rsv = *svp;
2595 if (UNLIKELY(SvGMAGICAL(rsv))) {
2596 SSize_t n;
2597 *svp = sv_mortalcopy_flags(*svp,
2598 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL);
2599 /* allow other branch to continue pushing
2600 * onto tmps stack without checking each time */
2601 n = (lastrelem - relem) >> 1;
2602 EXTEND_MORTAL(n);
2603 }
2604 else
2605 PL_tmps_stack[++PL_tmps_ix] =
2606 SvREFCNT_inc_simple_NN(rsv);
2607 }
2608 }
2609
2610 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash))
2611 hv_clear(hash);
2612
2613 /* now assign the keys and values to the hash */
2614
2615 dirty_tmps = FALSE;
2616
2617 if (UNLIKELY(gimme == G_ARRAY)) {
2618 /* @a = (%h = (...)) etc */
2619 SV **svp;
2620 SV **topelem = relem;
2621
2622 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2623 SV *key = *svp++;
2624 SV *val = *svp;
2625 /* remove duplicates from list we return */
2626 if (!hv_exists_ent(hash, key, 0)) {
2627 /* copy key back: possibly to an earlier
2628 * stack location if we encountered dups earlier,
2629 * The values will be updated later
2630 */
2631 *topelem = key;
2632 topelem += 2;
2633 }
2634 /* A tied store won't take ownership of val, so keep
2635 * the 1 refcnt on the tmps stack; otherwise disarm
2636 * the tmps stack entry */
2637 if (hv_store_ent(hash, key, val, 0))
2638 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2639 else
2640 dirty_tmps = TRUE;
2641 /* hv_store_ent() may have added set magic to val */;
2642 SvSETMAGIC(val);
2643 }
2644 if (topelem < svp) {
2645 /* at this point we have removed the duplicate key/value
2646 * pairs from the stack, but the remaining values may be
2647 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed
2648 * the (a 2), but the stack now probably contains
2649 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) }
2650 * obliterates the earlier key. So refresh all values. */
2651 lastrelem = topelem - 1;
2652 while (relem < lastrelem) {
2653 HE *he;
2654 he = hv_fetch_ent(hash, *relem++, 0, 0);
2655 *relem++ = (he ? HeVAL(he) : &PL_sv_undef);
2656 }
2657 }
2658 }
2659 else {
2660 SV **svp;
2661 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) {
2662 SV *key = *svp++;
2663 SV *val = *svp;
2664 if (hv_store_ent(hash, key, val, 0))
2665 PL_tmps_stack[tmps_base + i] = &PL_sv_undef;
2666 else
2667 dirty_tmps = TRUE;
2668 /* hv_store_ent() may have added set magic to val */;
2669 SvSETMAGIC(val);
2670 }
2671 }
2672
2673 if (dirty_tmps) {
2674 /* there are still some 'live' recounts on the tmps stack
2675 * - usually caused by storing into a tied hash. So let
2676 * free_tmps() do the proper but slow job later.
2677 * Just disarm hash refcount: see comments below about leak
2678 */
2679 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef;
2680 }
2681 else {
2682 /* Quietly remove all the SVs from the tmps stack slots,
2683 * since hash has now taken ownership of the refcnt.
2684 * Also remove hash: which will now leak if we die before
2685 * the SvREFCNT_dec_NN(hash) below */
2686 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems))
2687 Move(&PL_tmps_stack[tmps_base + nelems],
2688 &PL_tmps_stack[tmps_base - 1],
2689 PL_tmps_ix - (tmps_base + nelems) + 1,
2690 SV*);
2691 PL_tmps_ix -= (nelems + 1);
2692 }
2693
2694 SvREFCNT_dec_NN(hash);
2695
2696 relem = lastrelem + 1;
2697 goto no_relems;
2698 }
2699
2700 default:
2701 if (!SvIMMORTAL(lsv)) {
2702 SV *ref;
2703
2704 if (UNLIKELY(
2705 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 &&
2706 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC)
2707 ))
2708 Perl_warner(aTHX_
2709 packWARN(WARN_MISC),
2710 "Useless assignment to a temporary"
2711 );
2712
2713 /* avoid freeing $$lsv if it might be needed for further
2714 * elements, e.g. ($ref, $foo) = (1, $$ref) */
2715 if ( SvROK(lsv)
2716 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1)
2717 && lelem <= lastlelem
2718 ) {
2719 SSize_t ix;
2720 SvREFCNT_inc_simple_void_NN(ref);
2721 /* an unrolled sv_2mortal */
2722 ix = ++PL_tmps_ix;
2723 if (UNLIKELY(ix >= PL_tmps_max))
2724 /* speculatively grow enough to cover other
2725 * possible refs */
2726 (void)tmps_grow_p(ix + (lastlelem - lelem));
2727 PL_tmps_stack[ix] = ref;
2728 }
2729
2730 sv_setsv(lsv, *relem);
2731 *relem = lsv;
2732 SvSETMAGIC(lsv);
2733 }
2734 if (++relem > lastrelem)
2735 goto no_relems;
2736 break;
2737 } /* switch */
2738 } /* while */
2739
2740
2741 no_relems:
2742
2743 /* simplified lelem loop for when there are no relems left */
2744 while (LIKELY(lelem <= lastlelem)) {
2745 SV *lsv = *lelem++;
2746
2747 TAINT_NOT; /* Each item stands on its own, taintwise. */
2748
2749 if (UNLIKELY(!lsv)) {
2750 lsv = *lelem++;
2751 ASSUME(SvTYPE(lsv) == SVt_PVAV);
2752 }
2753
2754 switch (SvTYPE(lsv)) {
2755 case SVt_PVAV:
2756 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) {
2757 av_clear((AV*)lsv);
2758 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA))
2759 SvSETMAGIC(lsv);
2760 }
2761 break;
2762
2763 case SVt_PVHV:
2764 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv))
2765 hv_clear((HV*)lsv);
2766 break;
2767
2768 default:
2769 if (!SvIMMORTAL(lsv)) {
2770 sv_set_undef(lsv);
2771 SvSETMAGIC(lsv);
2772 *relem++ = lsv;
2773 }
2774 break;
2775 } /* switch */
2776 } /* while */
2777
2778 TAINT_NOT; /* result of list assign isn't tainted */
2779
2780 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) {
2781 /* Will be used to set PL_tainting below */
2782 Uid_t tmp_uid = PerlProc_getuid();
2783 Uid_t tmp_euid = PerlProc_geteuid();
2784 Gid_t tmp_gid = PerlProc_getgid();
2785 Gid_t tmp_egid = PerlProc_getegid();
2786
2787 /* XXX $> et al currently silently ignore failures */
2788 if (PL_delaymagic & DM_UID) {
2789#ifdef HAS_SETRESUID
2790 PERL_UNUSED_RESULT(
2791 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2792 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1,
2793 (Uid_t)-1));
2794#elif defined(HAS_SETREUID)
2795 PERL_UNUSED_RESULT(
2796 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1,
2797 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1));
2798#else
2799# ifdef HAS_SETRUID
2800 if ((PL_delaymagic & DM_UID) == DM_RUID) {
2801 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid));
2802 PL_delaymagic &= ~DM_RUID;
2803 }
2804# endif /* HAS_SETRUID */
2805# ifdef HAS_SETEUID
2806 if ((PL_delaymagic & DM_UID) == DM_EUID) {
2807 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid));
2808 PL_delaymagic &= ~DM_EUID;
2809 }
2810# endif /* HAS_SETEUID */
2811 if (PL_delaymagic & DM_UID) {
2812 if (PL_delaymagic_uid != PL_delaymagic_euid)
2813 DIE(aTHX_ "No setreuid available");
2814 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid));
2815 }
2816#endif /* HAS_SETRESUID */
2817
2818 tmp_uid = PerlProc_getuid();
2819 tmp_euid = PerlProc_geteuid();
2820 }
2821 /* XXX $> et al currently silently ignore failures */
2822 if (PL_delaymagic & DM_GID) {
2823#ifdef HAS_SETRESGID
2824 PERL_UNUSED_RESULT(
2825 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2826 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1,
2827 (Gid_t)-1));
2828#elif defined(HAS_SETREGID)
2829 PERL_UNUSED_RESULT(
2830 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1,
2831 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1));
2832#else
2833# ifdef HAS_SETRGID
2834 if ((PL_delaymagic & DM_GID) == DM_RGID) {
2835 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid));
2836 PL_delaymagic &= ~DM_RGID;
2837 }
2838# endif /* HAS_SETRGID */
2839# ifdef HAS_SETEGID
2840 if ((PL_delaymagic & DM_GID) == DM_EGID) {
2841 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid));
2842 PL_delaymagic &= ~DM_EGID;
2843 }
2844# endif /* HAS_SETEGID */
2845 if (PL_delaymagic & DM_GID) {
2846 if (PL_delaymagic_gid != PL_delaymagic_egid)
2847 DIE(aTHX_ "No setregid available");
2848 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid));
2849 }
2850#endif /* HAS_SETRESGID */
2851
2852 tmp_gid = PerlProc_getgid();
2853 tmp_egid = PerlProc_getegid();
2854 }
2855 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) );
2856#ifdef NO_TAINT_SUPPORT
2857 PERL_UNUSED_VAR(tmp_uid);
2858 PERL_UNUSED_VAR(tmp_euid);
2859 PERL_UNUSED_VAR(tmp_gid);
2860 PERL_UNUSED_VAR(tmp_egid);
2861#endif
2862 }
2863 PL_delaymagic = old_delaymagic;
2864
2865 if (gimme == G_VOID)
2866 SP = firstrelem - 1;
2867 else if (gimme == G_SCALAR) {
2868 SP = firstrelem;
2869 EXTEND(SP,1);
2870 if (PL_op->op_private & OPpASSIGN_TRUEBOOL)
2871 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero);
2872 else {
2873 dTARGET;
2874 SETi(firstlelem - firstrelem);
2875 }
2876 }
2877 else
2878 SP = relem - 1;
2879
2880 RETURN;
2881}
2882
2883PP(pp_qr)
2884{
2885 dSP;
2886 PMOP * const pm = cPMOP;
2887 REGEXP * rx = PM_GETRE(pm);
2888 regexp *prog = ReANY(rx);
2889 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx));
2890 SV * const rv = sv_newmortal();
2891 CV **cvp;
2892 CV *cv;
2893
2894 SvUPGRADE(rv, SVt_IV);
2895 /* For a subroutine describing itself as "This is a hacky workaround" I'm
2896 loathe to use it here, but it seems to be the right fix. Or close.
2897 The key part appears to be that it's essential for pp_qr to return a new
2898 object (SV), which implies that there needs to be an effective way to
2899 generate a new SV from the existing SV that is pre-compiled in the
2900 optree. */
2901 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx)));
2902 SvROK_on(rv);
2903
2904 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv);
2905 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) {
2906 *cvp = cv_clone(cv);
2907 SvREFCNT_dec_NN(cv);
2908 }
2909
2910 if (pkg) {
2911 HV *const stash = gv_stashsv(pkg, GV_ADD);
2912 SvREFCNT_dec_NN(pkg);
2913 (void)sv_bless(rv, stash);
2914 }
2915
2916 if (UNLIKELY(RXp_ISTAINTED(prog))) {
2917 SvTAINTED_on(rv);
2918 SvTAINTED_on(SvRV(rv));
2919 }
2920 XPUSHs(rv);
2921 RETURN;
2922}
2923
2924PP(pp_match)
2925{
2926 dSP; dTARG;
2927 PMOP *pm = cPMOP;
2928 PMOP *dynpm = pm;
2929 const char *s;
2930 const char *strend;
2931 SSize_t curpos = 0; /* initial pos() or current $+[0] */
2932 I32 global;
2933 U8 r_flags = 0;
2934 const char *truebase; /* Start of string */
2935 REGEXP *rx = PM_GETRE(pm);
2936 regexp *prog = ReANY(rx);
2937 bool rxtainted;
2938 const U8 gimme = GIMME_V;
2939 STRLEN len;
2940 const I32 oldsave = PL_savestack_ix;
2941 I32 had_zerolen = 0;
2942 MAGIC *mg = NULL;
2943
2944 if (PL_op->op_flags & OPf_STACKED)
2945 TARG = POPs;
2946 else {
2947 if (ARGTARG)
2948 GETTARGET;
2949 else {
2950 TARG = DEFSV;
2951 }
2952 EXTEND(SP,1);
2953 }
2954
2955 PUTBACK; /* EVAL blocks need stack_sp. */
2956 /* Skip get-magic if this is a qr// clone, because regcomp has
2957 already done it. */
2958 truebase = prog->mother_re
2959 ? SvPV_nomg_const(TARG, len)
2960 : SvPV_const(TARG, len);
2961 if (!truebase)
2962 DIE(aTHX_ "panic: pp_match");
2963 strend = truebase + len;
2964 rxtainted = (RXp_ISTAINTED(prog) ||
2965 (TAINT_get && (pm->op_pmflags & PMf_RETAINT)));
2966 TAINT_NOT;
2967
2968 /* We need to know this in case we fail out early - pos() must be reset */
2969 global = dynpm->op_pmflags & PMf_GLOBAL;
2970
2971 /* PMdf_USED is set after a ?? matches once */
2972 if (
2973#ifdef USE_ITHREADS
2974 SvREADONLY(PL_regex_pad[pm->op_pmoffset])
2975#else
2976 pm->op_pmflags & PMf_USED
2977#endif
2978 ) {
2979 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once"));
2980 goto nope;
2981 }
2982
2983 /* handle the empty pattern */
2984 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
2985 if (PL_curpm == PL_reg_curpm) {
2986 if (PL_curpm_under) {
2987 if (PL_curpm_under == PL_reg_curpm) {
2988 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
2989 } else {
2990 pm = PL_curpm_under;
2991 }
2992 }
2993 } else {
2994 pm = PL_curpm;
2995 }
2996 rx = PM_GETRE(pm);
2997 prog = ReANY(rx);
2998 }
2999
3000 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) {
3001 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%"
3002 UVuf " < %" IVdf ")\n",
3003 (UV)len, (IV)RXp_MINLEN(prog)));
3004 goto nope;
3005 }
3006
3007 /* get pos() if //g */
3008 if (global) {
3009 mg = mg_find_mglob(TARG);
3010 if (mg && mg->mg_len >= 0) {
3011 curpos = MgBYTEPOS(mg, TARG, truebase, len);
3012 /* last time pos() was set, it was zero-length match */
3013 if (mg->mg_flags & MGf_MINMATCH)
3014 had_zerolen = 1;
3015 }
3016 }
3017
3018#ifdef PERL_SAWAMPERSAND
3019 if ( RXp_NPARENS(prog)
3020 || PL_sawampersand
3021 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
3022 || (dynpm->op_pmflags & PMf_KEEPCOPY)
3023 )
3024#endif
3025 {
3026 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE);
3027 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer
3028 * only on the first iteration. Therefore we need to copy $' as well
3029 * as $&, to make the rest of the string available for captures in
3030 * subsequent iterations */
3031 if (! (global && gimme == G_ARRAY))
3032 r_flags |= REXEC_COPY_SKIP_POST;
3033 };
3034#ifdef PERL_SAWAMPERSAND
3035 if (dynpm->op_pmflags & PMf_KEEPCOPY)
3036 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */
3037 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST);
3038#endif
3039
3040 s = truebase;
3041
3042 play_it_again:
3043 if (global)
3044 s = truebase + curpos;
3045
3046 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase,
3047 had_zerolen, TARG, NULL, r_flags))
3048 goto nope;
3049
3050 PL_curpm = pm;
3051 if (dynpm->op_pmflags & PMf_ONCE)
3052#ifdef USE_ITHREADS
3053 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]);
3054#else
3055 dynpm->op_pmflags |= PMf_USED;
3056#endif
3057
3058 if (rxtainted)
3059 RXp_MATCH_TAINTED_on(prog);
3060 TAINT_IF(RXp_MATCH_TAINTED(prog));
3061
3062 /* update pos */
3063
3064 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) {
3065 if (!mg)
3066 mg = sv_magicext_mglob(TARG);
3067 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end);
3068 if (RXp_ZERO_LEN(prog))
3069 mg->mg_flags |= MGf_MINMATCH;
3070 else
3071 mg->mg_flags &= ~MGf_MINMATCH;
3072 }
3073
3074 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) {
3075 LEAVE_SCOPE(oldsave);
3076 RETPUSHYES;
3077 }
3078
3079 /* push captures on stack */
3080
3081 {
3082 const I32 nparens = RXp_NPARENS(prog);
3083 I32 i = (global && !nparens) ? 1 : 0;
3084
3085 SPAGAIN; /* EVAL blocks could move the stack. */
3086 EXTEND(SP, nparens + i);
3087 EXTEND_MORTAL(nparens + i);
3088 for (i = !i; i <= nparens; i++) {
3089 PUSHs(sv_newmortal());
3090 if (LIKELY((RXp_OFFS(prog)[i].start != -1)
3091 && RXp_OFFS(prog)[i].end != -1 ))
3092 {
3093 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start;
3094 const char * const s = RXp_OFFS(prog)[i].start + truebase;
3095 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0
3096 || RXp_OFFS(prog)[i].start < 0
3097 || len < 0
3098 || len > strend - s)
3099 )
3100 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, "
3101 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf,
3102 (long) i, (long) RXp_OFFS(prog)[i].start,
3103 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len);
3104 sv_setpvn(*SP, s, len);
3105 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len))
3106 SvUTF8_on(*SP);
3107 }
3108 }
3109 if (global) {
3110 curpos = (UV)RXp_OFFS(prog)[0].end;
3111 had_zerolen = RXp_ZERO_LEN(prog);
3112 PUTBACK; /* EVAL blocks may use stack */
3113 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST;
3114 goto play_it_again;
3115 }
3116 LEAVE_SCOPE(oldsave);
3117 RETURN;
3118 }
3119 NOT_REACHED; /* NOTREACHED */
3120
3121 nope:
3122 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) {
3123 if (!mg)
3124 mg = mg_find_mglob(TARG);
3125 if (mg)
3126 mg->mg_len = -1;
3127 }
3128 LEAVE_SCOPE(oldsave);
3129 if (gimme == G_ARRAY)
3130 RETURN;
3131 RETPUSHNO;
3132}
3133
3134OP *
3135Perl_do_readline(pTHX)
3136{
3137 dSP; dTARGETSTACKED;
3138 SV *sv;
3139 STRLEN tmplen = 0;
3140 STRLEN offset;
3141 PerlIO *fp;
3142 IO * const io = GvIO(PL_last_in_gv);
3143 const I32 type = PL_op->op_type;
3144 const U8 gimme = GIMME_V;
3145
3146 if (io) {
3147 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar);
3148 if (mg) {
3149 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0);
3150 if (gimme == G_SCALAR) {
3151 SPAGAIN;
3152 SvSetSV_nosteal(TARG, TOPs);
3153 SETTARG;
3154 }
3155 return NORMAL;
3156 }
3157 }
3158 fp = NULL;
3159 if (io) {
3160 fp = IoIFP(io);
3161 if (!fp) {
3162 if (IoFLAGS(io) & IOf_ARGV) {
3163 if (IoFLAGS(io) & IOf_START) {
3164 IoLINES(io) = 0;
3165 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) {
3166 IoFLAGS(io) &= ~IOf_START;
3167 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0);
3168 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */
3169 sv_setpvs(GvSVn(PL_last_in_gv), "-");
3170 SvSETMAGIC(GvSV(PL_last_in_gv));
3171 fp = IoIFP(io);
3172 goto have_fp;
3173 }
3174 }
3175 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3176 if (!fp) { /* Note: fp != IoIFP(io) */
3177 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/
3178 }
3179 }
3180 else if (type == OP_GLOB)
3181 fp = Perl_start_glob(aTHX_ POPs, io);
3182 }
3183 else if (type == OP_GLOB)
3184 SP--;
3185 else if (IoTYPE(io) == IoTYPE_WRONLY) {
3186 report_wrongway_fh(PL_last_in_gv, '>');
3187 }
3188 }
3189 if (!fp) {
3190 if ((!io || !(IoFLAGS(io) & IOf_START))
3191 && ckWARN(WARN_CLOSED)
3192 && type != OP_GLOB)
3193 {
3194 report_evil_fh(PL_last_in_gv);
3195 }
3196 if (gimme == G_SCALAR) {
3197 /* undef TARG, and push that undefined value */
3198 if (type != OP_RCATLINE) {
3199 sv_set_undef(TARG);
3200 }
3201 PUSHTARG;
3202 }
3203 RETURN;
3204 }
3205 have_fp:
3206 if (gimme == G_SCALAR) {
3207 sv = TARG;
3208 if (type == OP_RCATLINE && SvGMAGICAL(sv))
3209 mg_get(sv);
3210 if (SvROK(sv)) {
3211 if (type == OP_RCATLINE)
3212 SvPV_force_nomg_nolen(sv);
3213 else
3214 sv_unref(sv);
3215 }
3216 else if (isGV_with_GP(sv)) {
3217 SvPV_force_nomg_nolen(sv);
3218 }
3219 SvUPGRADE(sv, SVt_PV);
3220 tmplen = SvLEN(sv); /* remember if already alloced */
3221 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) {
3222 /* try short-buffering it. Please update t/op/readline.t
3223 * if you change the growth length.
3224 */
3225 Sv_Grow(sv, 80);
3226 }
3227 offset = 0;
3228 if (type == OP_RCATLINE && SvOK(sv)) {
3229 if (!SvPOK(sv)) {
3230 SvPV_force_nomg_nolen(sv);
3231 }
3232 offset = SvCUR(sv);
3233 }
3234 }
3235 else {
3236 sv = sv_2mortal(newSV(80));
3237 offset = 0;
3238 }
3239
3240 /* This should not be marked tainted if the fp is marked clean */
3241#define MAYBE_TAINT_LINE(io, sv) \
3242 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \
3243 TAINT; \
3244 SvTAINTED_on(sv); \
3245 }
3246
3247/* delay EOF state for a snarfed empty file */
3248#define SNARF_EOF(gimme,rs,io,sv) \
3249 (gimme != G_SCALAR || SvCUR(sv) \
3250 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs))
3251
3252 for (;;) {
3253 PUTBACK;
3254 if (!sv_gets(sv, fp, offset)
3255 && (type == OP_GLOB
3256 || SNARF_EOF(gimme, PL_rs, io, sv)
3257 || PerlIO_error(fp)))
3258 {
3259 PerlIO_clearerr(fp);
3260 if (IoFLAGS(io) & IOf_ARGV) {
3261 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL);
3262 if (fp)
3263 continue;
3264 (void)do_close(PL_last_in_gv, FALSE);
3265 }
3266 else if (type == OP_GLOB) {
3267 if (!do_close(PL_last_in_gv, FALSE)) {
3268 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB),
3269 "glob failed (child exited with status %d%s)",
3270 (int)(STATUS_CURRENT >> 8),
3271 (STATUS_CURRENT & 0x80) ? ", core dumped" : "");
3272 }
3273 }
3274 if (gimme == G_SCALAR) {
3275 if (type != OP_RCATLINE) {
3276 SV_CHECK_THINKFIRST_COW_DROP(TARG);
3277 SvOK_off(TARG);
3278 }
3279 SPAGAIN;
3280 PUSHTARG;
3281 }
3282 MAYBE_TAINT_LINE(io, sv);
3283 RETURN;
3284 }
3285 MAYBE_TAINT_LINE(io, sv);
3286 IoLINES(io)++;
3287 IoFLAGS(io) |= IOf_NOLINE;
3288 SvSETMAGIC(sv);
3289 SPAGAIN;
3290 XPUSHs(sv);
3291 if (type == OP_GLOB) {
3292 const char *t1;
3293 Stat_t statbuf;
3294
3295 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) {
3296 char * const tmps = SvEND(sv) - 1;
3297 if (*tmps == *SvPVX_const(PL_rs)) {
3298 *tmps = '\0';
3299 SvCUR_set(sv, SvCUR(sv) - 1);
3300 }
3301 }
3302 for (t1 = SvPVX_const(sv); *t1; t1++)
3303#ifdef __VMS
3304 if (strchr("*%?", *t1))
3305#else
3306 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1))
3307#endif
3308 break;
3309 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) {
3310 (void)POPs; /* Unmatched wildcard? Chuck it... */
3311 continue;
3312 }
3313 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */
3314 if (ckWARN(WARN_UTF8)) {
3315 const U8 * const s = (const U8*)SvPVX_const(sv) + offset;
3316 const STRLEN len = SvCUR(sv) - offset;
3317 const U8 *f;
3318
3319 if (!is_utf8_string_loc(s, len, &f))
3320 /* Emulate :encoding(utf8) warning in the same case. */
3321 Perl_warner(aTHX_ packWARN(WARN_UTF8),
3322 "utf8 \"\\x%02X\" does not map to Unicode",
3323 f < (U8*)SvEND(sv) ? *f : 0);
3324 }
3325 }
3326 if (gimme == G_ARRAY) {
3327 if (SvLEN(sv) - SvCUR(sv) > 20) {
3328 SvPV_shrink_to_cur(sv);
3329 }
3330 sv = sv_2mortal(newSV(80));
3331 continue;
3332 }
3333 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) {
3334 /* try to reclaim a bit of scalar space (only on 1st alloc) */
3335 const STRLEN new_len
3336 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */
3337 SvPV_renew(sv, new_len);
3338 }
3339 RETURN;
3340 }
3341}
3342
3343PP(pp_helem)
3344{
3345 dSP;
3346 HE* he;
3347 SV **svp;
3348 SV * const keysv = POPs;
3349 HV * const hv = MUTABLE_HV(POPs);
3350 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3351 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3352 SV *sv;
3353 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3354 bool preeminent = TRUE;
3355
3356 if (SvTYPE(hv) != SVt_PVHV)
3357 RETPUSHUNDEF;
3358
3359 if (localizing) {
3360 MAGIC *mg;
3361 HV *stash;
3362
3363 /* If we can determine whether the element exist,
3364 * Try to preserve the existenceness of a tied hash
3365 * element by using EXISTS and DELETE if possible.
3366 * Fallback to FETCH and STORE otherwise. */
3367 if (SvCANEXISTDELETE(hv))
3368 preeminent = hv_exists_ent(hv, keysv, 0);
3369 }
3370
3371 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3372 svp = he ? &HeVAL(he) : NULL;
3373 if (lval) {
3374 if (!svp || !*svp || *svp == &PL_sv_undef) {
3375 SV* lv;
3376 SV* key2;
3377 if (!defer) {
3378 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3379 }
3380 lv = sv_newmortal();
3381 sv_upgrade(lv, SVt_PVLV);
3382 LvTYPE(lv) = 'y';
3383 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0);
3384 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */
3385 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3386 LvTARGLEN(lv) = 1;
3387 PUSHs(lv);
3388 RETURN;
3389 }
3390 if (localizing) {
3391 if (HvNAME_get(hv) && isGV_or_RVCV(*svp))
3392 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL));
3393 else if (preeminent)
3394 save_helem_flags(hv, keysv, svp,
3395 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC);
3396 else
3397 SAVEHDELETE(hv, keysv);
3398 }
3399 else if (PL_op->op_private & OPpDEREF) {
3400 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
3401 RETURN;
3402 }
3403 }
3404 sv = (svp && *svp ? *svp : &PL_sv_undef);
3405 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this
3406 * was to make C<local $tied{foo} = $tied{foo}> possible.
3407 * However, it seems no longer to be needed for that purpose, and
3408 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g>
3409 * would loop endlessly since the pos magic is getting set on the
3410 * mortal copy and lost. However, the copy has the effect of
3411 * triggering the get magic, and losing it altogether made things like
3412 * c<$tied{foo};> in void context no longer do get magic, which some
3413 * code relied on. Also, delayed triggering of magic on @+ and friends
3414 * meant the original regex may be out of scope by now. So as a
3415 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it
3416 * being called too many times). */
3417 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv))
3418 mg_get(sv);
3419 PUSHs(sv);
3420 RETURN;
3421}
3422
3423
3424/* a stripped-down version of Perl_softref2xv() for use by
3425 * pp_multideref(), which doesn't use PL_op->op_flags */
3426
3427STATIC GV *
3428S_softref2xv_lite(pTHX_ SV *const sv, const char *const what,
3429 const svtype type)
3430{
3431 if (PL_op->op_private & HINT_STRICT_REFS) {
3432 if (SvOK(sv))
3433 Perl_die(aTHX_ PL_no_symref_sv, sv,
3434 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what);
3435 else
3436 Perl_die(aTHX_ PL_no_usym, what);
3437 }
3438 if (!SvOK(sv))
3439 Perl_die(aTHX_ PL_no_usym, what);
3440 return gv_fetchsv_nomg(sv, GV_ADD, type);
3441}
3442
3443
3444/* Handle one or more aggregate derefs and array/hash indexings, e.g.
3445 * $h->{foo} or $a[0]{$key}[$i] or f()->[1]
3446 *
3447 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET.
3448 * Each of these either contains a set of actions, or an argument, such as
3449 * an IV to use as an array index, or a lexical var to retrieve.
3450 * Several actions re stored per UV; we keep shifting new actions off the
3451 * one UV, and only reload when it becomes zero.
3452 */
3453
3454PP(pp_multideref)
3455{
3456 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */
3457 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux;
3458 UV actions = items->uv;
3459
3460 assert(actions);
3461 /* this tells find_uninit_var() where we're up to */
3462 PL_multideref_pc = items;
3463
3464 while (1) {
3465 /* there are three main classes of action; the first retrieve
3466 * the initial AV or HV from a variable or the stack; the second
3467 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem),
3468 * the third an unrolled (/DREFHV, rv2hv, helem).
3469 */
3470 switch (actions & MDEREF_ACTION_MASK) {
3471
3472 case MDEREF_reload:
3473 actions = (++items)->uv;
3474 continue;
3475
3476 case MDEREF_AV_padav_aelem: /* $lex[...] */
3477 sv = PAD_SVl((++items)->pad_offset);
3478 goto do_AV_aelem;
3479
3480 case MDEREF_AV_gvav_aelem: /* $pkg[...] */
3481 sv = UNOP_AUX_item_sv(++items);
3482 assert(isGV_with_GP(sv));
3483 sv = (SV*)GvAVn((GV*)sv);
3484 goto do_AV_aelem;
3485
3486 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */
3487 {
3488 dSP;
3489 sv = POPs;
3490 PUTBACK;
3491 goto do_AV_rv2av_aelem;
3492 }
3493
3494 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */
3495 sv = UNOP_AUX_item_sv(++items);
3496 assert(isGV_with_GP(sv));
3497 sv = GvSVn((GV*)sv);
3498 goto do_AV_vivify_rv2av_aelem;
3499
3500 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */
3501 sv = PAD_SVl((++items)->pad_offset);
3502 /* FALLTHROUGH */
3503
3504 do_AV_vivify_rv2av_aelem:
3505 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */
3506 /* this is the OPpDEREF action normally found at the end of
3507 * ops like aelem, helem, rv2sv */
3508 sv = vivify_ref(sv, OPpDEREF_AV);
3509 /* FALLTHROUGH */
3510
3511 do_AV_rv2av_aelem:
3512 /* this is basically a copy of pp_rv2av when it just has the
3513 * sKR/1 flags */
3514 SvGETMAGIC(sv);
3515 if (LIKELY(SvROK(sv))) {
3516 if (UNLIKELY(SvAMAGIC(sv))) {
3517 sv = amagic_deref_call(sv, to_av_amg);
3518 }
3519 sv = SvRV(sv);
3520 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV))
3521 DIE(aTHX_ "Not an ARRAY reference");
3522 }
3523 else if (SvTYPE(sv) != SVt_PVAV) {
3524 if (!isGV_with_GP(sv))
3525 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV);
3526 sv = MUTABLE_SV(GvAVn((GV*)sv));
3527 }
3528 /* FALLTHROUGH */
3529
3530 do_AV_aelem:
3531 {
3532 /* retrieve the key; this may be either a lexical or package
3533 * var (whose index/ptr is stored as an item) or a signed
3534 * integer constant stored as an item.
3535 */
3536 SV *elemsv;
3537 IV elem = 0; /* to shut up stupid compiler warnings */
3538
3539
3540 assert(SvTYPE(sv) == SVt_PVAV);
3541
3542 switch (actions & MDEREF_INDEX_MASK) {
3543 case MDEREF_INDEX_none:
3544 goto finish;
3545 case MDEREF_INDEX_const:
3546 elem = (++items)->iv;
3547 break;
3548 case MDEREF_INDEX_padsv:
3549 elemsv = PAD_SVl((++items)->pad_offset);
3550 goto check_elem;
3551 case MDEREF_INDEX_gvsv:
3552 elemsv = UNOP_AUX_item_sv(++items);
3553 assert(isGV_with_GP(elemsv));
3554 elemsv = GvSVn((GV*)elemsv);
3555 check_elem:
3556 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv)
3557 && ckWARN(WARN_MISC)))
3558 Perl_warner(aTHX_ packWARN(WARN_MISC),
3559 "Use of reference \"%" SVf "\" as array index",
3560 SVfARG(elemsv));
3561 /* the only time that S_find_uninit_var() needs this
3562 * is to determine which index value triggered the
3563 * undef warning. So just update it here. Note that
3564 * since we don't save and restore this var (e.g. for
3565 * tie or overload execution), its value will be
3566 * meaningless apart from just here */
3567 PL_multideref_pc = items;
3568 elem = SvIV(elemsv);
3569 break;
3570 }
3571
3572
3573 /* this is basically a copy of pp_aelem with OPpDEREF skipped */
3574
3575 if (!(actions & MDEREF_FLAG_last)) {
3576 SV** svp = av_fetch((AV*)sv, elem, 1);
3577 if (!svp || ! (sv=*svp))
3578 DIE(aTHX_ PL_no_aelem, elem);
3579 break;
3580 }
3581
3582 if (PL_op->op_private &
3583 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3584 {
3585 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3586 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no;
3587 }
3588 else {
3589 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3590 sv = av_delete((AV*)sv, elem, discard);
3591 if (discard)
3592 return NORMAL;
3593 if (!sv)
3594 sv = &PL_sv_undef;
3595 }
3596 }
3597 else {
3598 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3599 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3600 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3601 bool preeminent = TRUE;
3602 AV *const av = (AV*)sv;
3603 SV** svp;
3604
3605 if (UNLIKELY(localizing)) {
3606 MAGIC *mg;
3607 HV *stash;
3608
3609 /* If we can determine whether the element exist,
3610 * Try to preserve the existenceness of a tied array
3611 * element by using EXISTS and DELETE if possible.
3612 * Fallback to FETCH and STORE otherwise. */
3613 if (SvCANEXISTDELETE(av))
3614 preeminent = av_exists(av, elem);
3615 }
3616
3617 svp = av_fetch(av, elem, lval && !defer);
3618
3619 if (lval) {
3620 if (!svp || !(sv = *svp)) {
3621 IV len;
3622 if (!defer)
3623 DIE(aTHX_ PL_no_aelem, elem);
3624 len = av_tindex(av);
3625 sv = sv_2mortal(newSVavdefelem(av,
3626 /* Resolve a negative index now, unless it points
3627 * before the beginning of the array, in which
3628 * case record it for error reporting in
3629 * magic_setdefelem. */
3630 elem < 0 && len + elem >= 0
3631 ? len + elem : elem, 1));
3632 }
3633 else {
3634 if (UNLIKELY(localizing)) {
3635 if (preeminent) {
3636 save_aelem(av, elem, svp);
3637 sv = *svp; /* may have changed */
3638 }
3639 else
3640 SAVEADELETE(av, elem);
3641 }
3642 }
3643 }
3644 else {
3645 sv = (svp ? *svp : &PL_sv_undef);
3646 /* see note in pp_helem() */
3647 if (SvRMAGICAL(av) && SvGMAGICAL(sv))
3648 mg_get(sv);
3649 }
3650 }
3651
3652 }
3653 finish:
3654 {
3655 dSP;
3656 XPUSHs(sv);
3657 RETURN;
3658 }
3659 /* NOTREACHED */
3660
3661
3662
3663
3664 case MDEREF_HV_padhv_helem: /* $lex{...} */
3665 sv = PAD_SVl((++items)->pad_offset);
3666 goto do_HV_helem;
3667
3668 case MDEREF_HV_gvhv_helem: /* $pkg{...} */
3669 sv = UNOP_AUX_item_sv(++items);
3670 assert(isGV_with_GP(sv));
3671 sv = (SV*)GvHVn((GV*)sv);
3672 goto do_HV_helem;
3673
3674 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */
3675 {
3676 dSP;
3677 sv = POPs;
3678 PUTBACK;
3679 goto do_HV_rv2hv_helem;
3680 }
3681
3682 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */
3683 sv = UNOP_AUX_item_sv(++items);
3684 assert(isGV_with_GP(sv));
3685 sv = GvSVn((GV*)sv);
3686 goto do_HV_vivify_rv2hv_helem;
3687
3688 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */
3689 sv = PAD_SVl((++items)->pad_offset);
3690 /* FALLTHROUGH */
3691
3692 do_HV_vivify_rv2hv_helem:
3693 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */
3694 /* this is the OPpDEREF action normally found at the end of
3695 * ops like aelem, helem, rv2sv */
3696 sv = vivify_ref(sv, OPpDEREF_HV);
3697 /* FALLTHROUGH */
3698
3699 do_HV_rv2hv_helem:
3700 /* this is basically a copy of pp_rv2hv when it just has the
3701 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */
3702
3703 SvGETMAGIC(sv);
3704 if (LIKELY(SvROK(sv))) {
3705 if (UNLIKELY(SvAMAGIC(sv))) {
3706 sv = amagic_deref_call(sv, to_hv_amg);
3707 }
3708 sv = SvRV(sv);
3709 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV))
3710 DIE(aTHX_ "Not a HASH reference");
3711 }
3712 else if (SvTYPE(sv) != SVt_PVHV) {
3713 if (!isGV_with_GP(sv))
3714 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV);
3715 sv = MUTABLE_SV(GvHVn((GV*)sv));
3716 }
3717 /* FALLTHROUGH */
3718
3719 do_HV_helem:
3720 {
3721 /* retrieve the key; this may be either a lexical / package
3722 * var or a string constant, whose index/ptr is stored as an
3723 * item
3724 */
3725 SV *keysv = NULL; /* to shut up stupid compiler warnings */
3726
3727 assert(SvTYPE(sv) == SVt_PVHV);
3728
3729 switch (actions & MDEREF_INDEX_MASK) {
3730 case MDEREF_INDEX_none:
3731 goto finish;
3732
3733 case MDEREF_INDEX_const:
3734 keysv = UNOP_AUX_item_sv(++items);
3735 break;
3736
3737 case MDEREF_INDEX_padsv:
3738 keysv = PAD_SVl((++items)->pad_offset);
3739 break;
3740
3741 case MDEREF_INDEX_gvsv:
3742 keysv = UNOP_AUX_item_sv(++items);
3743 keysv = GvSVn((GV*)keysv);
3744 break;
3745 }
3746
3747 /* see comment above about setting this var */
3748 PL_multideref_pc = items;
3749
3750
3751 /* ensure that candidate CONSTs have been HEKified */
3752 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const)
3753 || SvTYPE(keysv) >= SVt_PVMG
3754 || !SvOK(keysv)
3755 || SvROK(keysv)
3756 || SvIsCOW_shared_hash(keysv));
3757
3758 /* this is basically a copy of pp_helem with OPpDEREF skipped */
3759
3760 if (!(actions & MDEREF_FLAG_last)) {
3761 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0);
3762 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef)
3763 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3764 break;
3765 }
3766
3767 if (PL_op->op_private &
3768 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE))
3769 {
3770 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) {
3771 sv = hv_exists_ent((HV*)sv, keysv, 0)
3772 ? &PL_sv_yes : &PL_sv_no;
3773 }
3774 else {
3775 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0;
3776 sv = hv_delete_ent((HV*)sv, keysv, discard, 0);
3777 if (discard)
3778 return NORMAL;
3779 if (!sv)
3780 sv = &PL_sv_undef;
3781 }
3782 }
3783 else {
3784 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
3785 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
3786 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
3787 bool preeminent = TRUE;
3788 SV **svp;
3789 HV * const hv = (HV*)sv;
3790 HE* he;
3791
3792 if (UNLIKELY(localizing)) {
3793 MAGIC *mg;
3794 HV *stash;
3795
3796 /* If we can determine whether the element exist,
3797 * Try to preserve the existenceness of a tied hash
3798 * element by using EXISTS and DELETE if possible.
3799 * Fallback to FETCH and STORE otherwise. */
3800 if (SvCANEXISTDELETE(hv))
3801 preeminent = hv_exists_ent(hv, keysv, 0);
3802 }
3803
3804 he = hv_fetch_ent(hv, keysv, lval && !defer, 0);
3805 svp = he ? &HeVAL(he) : NULL;
3806
3807
3808 if (lval) {
3809 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) {
3810 SV* lv;
3811 SV* key2;
3812 if (!defer)
3813 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv));
3814 lv = sv_newmortal();
3815 sv_upgrade(lv, SVt_PVLV);
3816 LvTYPE(lv) = 'y';
3817 sv_magic(lv, key2 = newSVsv(keysv),
3818 PERL_MAGIC_defelem, NULL, 0);
3819 /* sv_magic() increments refcount */
3820 SvREFCNT_dec_NN(key2);
3821 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv);
3822 LvTARGLEN(lv) = 1;
3823 sv = lv;
3824 }
3825 else {
3826 if (localizing) {
3827 if (HvNAME_get(hv) && isGV_or_RVCV(sv))
3828 save_gp(MUTABLE_GV(sv),
3829 !(PL_op->op_flags & OPf_SPECIAL));
3830 else if (preeminent) {
3831 save_helem_flags(hv, keysv, svp,
3832 (PL_op->op_flags & OPf_SPECIAL)
3833 ? 0 : SAVEf_SETMAGIC);
3834 sv = *svp; /* may have changed */
3835 }
3836 else
3837 SAVEHDELETE(hv, keysv);
3838 }
3839 }
3840 }
3841 else {
3842 sv = (svp && *svp ? *svp : &PL_sv_undef);
3843 /* see note in pp_helem() */
3844 if (SvRMAGICAL(hv) && SvGMAGICAL(sv))
3845 mg_get(sv);
3846 }
3847 }
3848 goto finish;
3849 }
3850
3851 } /* switch */
3852
3853 actions >>= MDEREF_SHIFT;
3854 } /* while */
3855 /* NOTREACHED */
3856}
3857
3858
3859PP(pp_iter)
3860{
3861 PERL_CONTEXT *cx;
3862 SV *oldsv;
3863 SV **itersvp;
3864
3865 SV *sv;
3866 AV *av;
3867 IV ix;
3868 IV inc;
3869
3870 cx = CX_CUR();
3871 itersvp = CxITERVAR(cx);
3872 assert(itersvp);
3873
3874 switch (CxTYPE(cx)) {
3875
3876 case CXt_LOOP_LAZYSV: /* string increment */
3877 {
3878 SV* cur = cx->blk_loop.state_u.lazysv.cur;
3879 SV *end = cx->blk_loop.state_u.lazysv.end;
3880 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no.
3881 It has SvPVX of "" and SvCUR of 0, which is what we want. */
3882 STRLEN maxlen = 0;
3883 const char *max = SvPV_const(end, maxlen);
3884 if (DO_UTF8(end) && IN_UNI_8_BIT)
3885 maxlen = sv_len_utf8_nomg(end);
3886 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen))
3887 goto retno;
3888
3889 oldsv = *itersvp;
3890 /* NB: on the first iteration, oldsv will have a ref count of at
3891 * least 2 (one extra from blk_loop.itersave), so the GV or pad
3892 * slot will get localised; on subsequent iterations the RC==1
3893 * optimisation may kick in and the SV will be reused. */
3894 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3895 /* safe to reuse old SV */
3896 sv_setsv(oldsv, cur);
3897 }
3898 else
3899 {
3900 /* we need a fresh SV every time so that loop body sees a
3901 * completely new SV for closures/references to work as
3902 * they used to */
3903 *itersvp = newSVsv(cur);
3904 SvREFCNT_dec(oldsv);
3905 }
3906 if (strEQ(SvPVX_const(cur), max))
3907 sv_setiv(cur, 0); /* terminate next time */
3908 else
3909 sv_inc(cur);
3910 break;
3911 }
3912
3913 case CXt_LOOP_LAZYIV: /* integer increment */
3914 {
3915 IV cur = cx->blk_loop.state_u.lazyiv.cur;
3916 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end))
3917 goto retno;
3918
3919 oldsv = *itersvp;
3920 /* see NB comment above */
3921 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) {
3922 /* safe to reuse old SV */
3923
3924 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV))
3925 == SVt_IV)
3926 {
3927 /* Cheap SvIOK_only().
3928 * Assert that flags which SvIOK_only() would test or
3929 * clear can't be set, because we're SVt_IV */
3930 assert(!(SvFLAGS(oldsv) &
3931 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK)))));
3932 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK);
3933 /* SvIV_set() where sv_any points to head */
3934 oldsv->sv_u.svu_iv = cur;
3935
3936 }
3937 else
3938 sv_setiv(oldsv, cur);
3939 }
3940 else
3941 {
3942 /* we need a fresh SV every time so that loop body sees a
3943 * completely new SV for closures/references to work as they
3944 * used to */
3945 *itersvp = newSViv(cur);
3946 SvREFCNT_dec(oldsv);
3947 }
3948
3949 if (UNLIKELY(cur == IV_MAX)) {
3950 /* Handle end of range at IV_MAX */
3951 cx->blk_loop.state_u.lazyiv.end = IV_MIN;
3952 } else
3953 ++cx->blk_loop.state_u.lazyiv.cur;
3954 break;
3955 }
3956
3957 case CXt_LOOP_LIST: /* for (1,2,3) */
3958
3959 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */
3960 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3961 ix = (cx->blk_loop.state_u.stack.ix += inc);
3962 if (UNLIKELY(inc > 0
3963 ? ix > cx->blk_oldsp
3964 : ix <= cx->blk_loop.state_u.stack.basesp)
3965 )
3966 goto retno;
3967
3968 sv = PL_stack_base[ix];
3969 av = NULL;
3970 goto loop_ary_common;
3971
3972 case CXt_LOOP_ARY: /* for (@ary) */
3973
3974 av = cx->blk_loop.state_u.ary.ary;
3975 inc = 1 - (PL_op->op_private & OPpITER_REVERSED);
3976 ix = (cx->blk_loop.state_u.ary.ix += inc);
3977 if (UNLIKELY(inc > 0
3978 ? ix > AvFILL(av)
3979 : ix < 0)
3980 )
3981 goto retno;
3982
3983 if (UNLIKELY(SvRMAGICAL(av))) {
3984 SV * const * const svp = av_fetch(av, ix, FALSE);
3985 sv = svp ? *svp : NULL;
3986 }
3987 else {
3988 sv = AvARRAY(av)[ix];
3989 }
3990
3991 loop_ary_common:
3992
3993 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) {
3994 SvSetMagicSV(*itersvp, sv);
3995 break;
3996 }
3997
3998 if (LIKELY(sv)) {
3999 if (UNLIKELY(SvIS_FREED(sv))) {
4000 *itersvp = NULL;
4001 Perl_croak(aTHX_ "Use of freed value in iteration");
4002 }
4003 if (SvPADTMP(sv)) {
4004 sv = newSVsv(sv);
4005 }
4006 else {
4007 SvTEMP_off(sv);
4008 SvREFCNT_inc_simple_void_NN(sv);
4009 }
4010 }
4011 else if (av) {
4012 sv = newSVavdefelem(av, ix, 0);
4013 }
4014 else
4015 sv = &PL_sv_undef;
4016
4017 oldsv = *itersvp;
4018 *itersvp = sv;
4019 SvREFCNT_dec(oldsv);
4020 break;
4021
4022 default:
4023 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx));
4024 }
4025
4026 /* Bypass pushing &PL_sv_yes and calling pp_and(); instead
4027 * jump straight to the AND op's op_other */
4028 assert(PL_op->op_next->op_type == OP_AND);
4029 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
4030 return cLOGOPx(PL_op->op_next)->op_other;
4031
4032 retno:
4033 /* Bypass pushing &PL_sv_no and calling pp_and(); instead
4034 * jump straight to the AND op's op_next */
4035 assert(PL_op->op_next->op_type == OP_AND);
4036 assert(PL_op->op_next->op_ppaddr == Perl_pp_and);
4037 /* pp_enteriter should have pre-extended the stack */
4038 EXTEND_SKIP(PL_stack_sp, 1);
4039 /* we only need this for the rare case where the OP_AND isn't
4040 * in void context, e.g. $x = do { for (..) {...} };
4041 * but its cheaper to just push it rather than testing first
4042 */
4043 *++PL_stack_sp = &PL_sv_no;
4044 return PL_op->op_next->op_next;
4045}
4046
4047
4048/*
4049A description of how taint works in pattern matching and substitution.
4050
4051This is all conditional on NO_TAINT_SUPPORT not being defined. Under
4052NO_TAINT_SUPPORT, taint-related operations should become no-ops.
4053
4054While the pattern is being assembled/concatenated and then compiled,
4055PL_tainted will get set (via TAINT_set) if any component of the pattern
4056is tainted, e.g. /.*$tainted/. At the end of pattern compilation,
4057the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via
4058TAINT_get). It will also be set if any component of the pattern matches
4059based on locale-dependent behavior.
4060
4061When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to
4062the pattern is marked as tainted. This means that subsequent usage, such
4063as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED,
4064on the new pattern too.
4065
4066RXf_TAINTED_SEEN is used post-execution by the get magic code
4067of $1 et al to indicate whether the returned value should be tainted.
4068It is the responsibility of the caller of the pattern (i.e. pp_match,
4069pp_subst etc) to set this flag for any other circumstances where $1 needs
4070to be tainted.
4071
4072The taint behaviour of pp_subst (and pp_substcont) is quite complex.
4073
4074There are three possible sources of taint
4075 * the source string
4076 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN)
4077 * the replacement string (or expression under /e)
4078
4079There are four destinations of taint and they are affected by the sources
4080according to the rules below:
4081
4082 * the return value (not including /r):
4083 tainted by the source string and pattern, but only for the
4084 number-of-iterations case; boolean returns aren't tainted;
4085 * the modified string (or modified copy under /r):
4086 tainted by the source string, pattern, and replacement strings;
4087 * $1 et al:
4088 tainted by the pattern, and under 'use re "taint"', by the source
4089 string too;
4090 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted:
4091 should always be unset before executing subsequent code.
4092
4093The overall action of pp_subst is:
4094
4095 * at the start, set bits in rxtainted indicating the taint status of
4096 the various sources.
4097
4098 * After each pattern execution, update the SUBST_TAINT_PAT bit in
4099 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the
4100 pattern has subsequently become tainted via locale ops.
4101
4102 * If control is being passed to pp_substcont to execute a /e block,
4103 save rxtainted in the CXt_SUBST block, for future use by
4104 pp_substcont.
4105
4106 * Whenever control is being returned to perl code (either by falling
4107 off the "end" of pp_subst/pp_substcont, or by entering a /e block),
4108 use the flag bits in rxtainted to make all the appropriate types of
4109 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1
4110 et al will appear tainted.
4111
4112pp_match is just a simpler version of the above.
4113
4114*/
4115
4116PP(pp_subst)
4117{
4118 dSP; dTARG;
4119 PMOP *pm = cPMOP;
4120 PMOP *rpm = pm;
4121 char *s;
4122 char *strend;
4123 const char *c;
4124 STRLEN clen;
4125 SSize_t iters = 0;
4126 SSize_t maxiters;
4127 bool once;
4128 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits.
4129 See "how taint works" above */
4130 char *orig;
4131 U8 r_flags;
4132 REGEXP *rx = PM_GETRE(pm);
4133 regexp *prog = ReANY(rx);
4134 STRLEN len;
4135 int force_on_match = 0;
4136 const I32 oldsave = PL_savestack_ix;
4137 STRLEN slen;
4138 bool doutf8 = FALSE; /* whether replacement is in utf8 */
4139#ifdef PERL_ANY_COW
4140 bool was_cow;
4141#endif
4142 SV *nsv = NULL;
4143 /* known replacement string? */
4144 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL;
4145
4146 PERL_ASYNC_CHECK();
4147
4148 if (PL_op->op_flags & OPf_STACKED)
4149 TARG = POPs;
4150 else {
4151 if (ARGTARG)
4152 GETTARGET;
4153 else {
4154 TARG = DEFSV;
4155 }
4156 EXTEND(SP,1);
4157 }
4158
4159 SvGETMAGIC(TARG); /* must come before cow check */
4160#ifdef PERL_ANY_COW
4161 /* note that a string might get converted to COW during matching */
4162 was_cow = cBOOL(SvIsCOW(TARG));
4163#endif
4164 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4165#ifndef PERL_ANY_COW
4166 if (SvIsCOW(TARG))
4167 sv_force_normal_flags(TARG,0);
4168#endif
4169 if ((SvREADONLY(TARG)
4170 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG))
4171 || SvTYPE(TARG) > SVt_PVLV)
4172 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG)))))
4173 Perl_croak_no_modify();
4174 }
4175 PUTBACK;
4176
4177 orig = SvPV_nomg(TARG, len);
4178 /* note we don't (yet) force the var into being a string; if we fail
4179 * to match, we leave as-is; on successful match however, we *will*
4180 * coerce into a string, then repeat the match */
4181 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG))
4182 force_on_match = 1;
4183
4184 /* only replace once? */
4185 once = !(rpm->op_pmflags & PMf_GLOBAL);
4186
4187 /* See "how taint works" above */
4188 if (TAINTING_get) {
4189 rxtainted = (
4190 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0)
4191 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0)
4192 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0)
4193 | (( (once && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4194 || (PL_op->op_private & OPpTRUEBOOL)) ? SUBST_TAINT_BOOLRET : 0));
4195 TAINT_NOT;
4196 }
4197
4198 force_it:
4199 if (!pm || !orig)
4200 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig);
4201
4202 strend = orig + len;
4203 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len;
4204 maxiters = 2 * slen + 10; /* We can match twice at each
4205 position, once with zero-length,
4206 second time with non-zero. */
4207
4208 /* handle the empty pattern */
4209 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) {
4210 if (PL_curpm == PL_reg_curpm) {
4211 if (PL_curpm_under) {
4212 if (PL_curpm_under == PL_reg_curpm) {
4213 Perl_croak(aTHX_ "Infinite recursion via empty pattern");
4214 } else {
4215 pm = PL_curpm_under;
4216 }
4217 }
4218 } else {
4219 pm = PL_curpm;
4220 }
4221 rx = PM_GETRE(pm);
4222 prog = ReANY(rx);
4223 }
4224
4225#ifdef PERL_SAWAMPERSAND
4226 r_flags = ( RXp_NPARENS(prog)
4227 || PL_sawampersand
4228 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY))
4229 || (rpm->op_pmflags & PMf_KEEPCOPY)
4230 )
4231 ? REXEC_COPY_STR
4232 : 0;
4233#else
4234 r_flags = REXEC_COPY_STR;
4235#endif
4236
4237 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags))
4238 {
4239 SPAGAIN;
4240 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no);
4241 LEAVE_SCOPE(oldsave);
4242 RETURN;
4243 }
4244 PL_curpm = pm;
4245
4246 /* known replacement string? */
4247 if (dstr) {
4248 /* replacement needing upgrading? */
4249 if (DO_UTF8(TARG) && !doutf8) {
4250 nsv = sv_newmortal();
4251 SvSetSV(nsv, dstr);
4252 sv_utf8_upgrade(nsv);
4253 c = SvPV_const(nsv, clen);
4254 doutf8 = TRUE;
4255 }
4256 else {
4257 c = SvPV_const(dstr, clen);
4258 doutf8 = DO_UTF8(dstr);
4259 }
4260
4261 if (UNLIKELY(TAINT_get))
4262 rxtainted |= SUBST_TAINT_REPL;
4263 }
4264 else {
4265 c = NULL;
4266 doutf8 = FALSE;
4267 }
4268
4269 /* can do inplace substitution? */
4270 if (c
4271#ifdef PERL_ANY_COW
4272 && !was_cow
4273#endif
4274 && (I32)clen <= RXp_MINLENRET(prog)
4275 && ( once
4276 || !(r_flags & REXEC_COPY_STR)
4277 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN))
4278 )
4279 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST)
4280 && (!doutf8 || SvUTF8(TARG))
4281 && !(rpm->op_pmflags & PMf_NONDESTRUCT))
4282 {
4283
4284#ifdef PERL_ANY_COW
4285 /* string might have got converted to COW since we set was_cow */
4286 if (SvIsCOW(TARG)) {
4287 if (!force_on_match)
4288 goto have_a_cow;
4289 assert(SvVOK(TARG));
4290 }
4291#endif
4292 if (force_on_match) {
4293 /* redo the first match, this time with the orig var
4294 * forced into being a string */
4295 force_on_match = 0;
4296 orig = SvPV_force_nomg(TARG, len);
4297 goto force_it;
4298 }
4299
4300 if (once) {
4301 char *d, *m;
4302 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4303 rxtainted |= SUBST_TAINT_PAT;
4304 m = orig + RXp_OFFS(prog)[0].start;
4305 d = orig + RXp_OFFS(prog)[0].end;
4306 s = orig;
4307 if (m - s > strend - d) { /* faster to shorten from end */
4308 I32 i;
4309 if (clen) {
4310 Copy(c, m, clen, char);
4311 m += clen;
4312 }
4313 i = strend - d;
4314 if (i > 0) {
4315 Move(d, m, i, char);
4316 m += i;
4317 }
4318 *m = '\0';
4319 SvCUR_set(TARG, m - s);
4320 }
4321 else { /* faster from front */
4322 I32 i = m - s;
4323 d -= clen;
4324 if (i > 0)
4325 Move(s, d - i, i, char);
4326 sv_chop(TARG, d-i);
4327 if (clen)
4328 Copy(c, d, clen, char);
4329 }
4330 SPAGAIN;
4331 PUSHs(&PL_sv_yes);
4332 }
4333 else {
4334 char *d, *m;
4335 d = s = RXp_OFFS(prog)[0].start + orig;
4336 do {
4337 I32 i;
4338 if (UNLIKELY(iters++ > maxiters))
4339 DIE(aTHX_ "Substitution loop");
4340 /* run time pattern taint, eg locale */
4341 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4342 rxtainted |= SUBST_TAINT_PAT;
4343 m = RXp_OFFS(prog)[0].start + orig;
4344 if ((i = m - s)) {
4345 if (s != d)
4346 Move(s, d, i, char);
4347 d += i;
4348 }
4349 if (clen) {
4350 Copy(c, d, clen, char);
4351 d += clen;
4352 }
4353 s = RXp_OFFS(prog)[0].end + orig;
4354 } while (CALLREGEXEC(rx, s, strend, orig,
4355 s == m, /* don't match same null twice */
4356 TARG, NULL,
4357 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4358 if (s != d) {
4359 I32 i = strend - s;
4360 SvCUR_set(TARG, d - SvPVX_const(TARG) + i);
4361 Move(s, d, i+1, char); /* include the NUL */
4362 }
4363 SPAGAIN;
4364 assert(iters);
4365 if (PL_op->op_private & OPpTRUEBOOL)
4366 PUSHs(&PL_sv_yes);
4367 else
4368 mPUSHi(iters);
4369 }
4370 }
4371 else {
4372 bool first;
4373 char *m;
4374 SV *repl;
4375 if (force_on_match) {
4376 /* redo the first match, this time with the orig var
4377 * forced into being a string */
4378 force_on_match = 0;
4379 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4380 /* I feel that it should be possible to avoid this mortal copy
4381 given that the code below copies into a new destination.
4382 However, I suspect it isn't worth the complexity of
4383 unravelling the C<goto force_it> for the small number of
4384 cases where it would be viable to drop into the copy code. */
4385 TARG = sv_2mortal(newSVsv(TARG));
4386 }
4387 orig = SvPV_force_nomg(TARG, len);
4388 goto force_it;
4389 }
4390#ifdef PERL_ANY_COW
4391 have_a_cow:
4392#endif
4393 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */
4394 rxtainted |= SUBST_TAINT_PAT;
4395 repl = dstr;
4396 s = RXp_OFFS(prog)[0].start + orig;
4397 dstr = newSVpvn_flags(orig, s-orig,
4398 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0));
4399 if (!c) {
4400 PERL_CONTEXT *cx;
4401 SPAGAIN;
4402 m = orig;
4403 /* note that a whole bunch of local vars are saved here for
4404 * use by pp_substcont: here's a list of them in case you're
4405 * searching for places in this sub that uses a particular var:
4406 * iters maxiters r_flags oldsave rxtainted orig dstr targ
4407 * s m strend rx once */
4408 CX_PUSHSUBST(cx);
4409 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot);
4410 }
4411 first = TRUE;
4412 do {
4413 if (UNLIKELY(iters++ > maxiters))
4414 DIE(aTHX_ "Substitution loop");
4415 if (UNLIKELY(RXp_MATCH_TAINTED(prog)))
4416 rxtainted |= SUBST_TAINT_PAT;
4417 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) {
4418 char *old_s = s;
4419 char *old_orig = orig;
4420 assert(RXp_SUBOFFSET(prog) == 0);
4421
4422 orig = RXp_SUBBEG(prog);
4423 s = orig + (old_s - old_orig);
4424 strend = s + (strend - old_s);
4425 }
4426 m = RXp_OFFS(prog)[0].start + orig;
4427 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG));
4428 s = RXp_OFFS(prog)[0].end + orig;
4429 if (first) {
4430 /* replacement already stringified */
4431 if (clen)
4432 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8);
4433 first = FALSE;
4434 }
4435 else {
4436 sv_catsv(dstr, repl);
4437 }
4438 if (once)
4439 break;
4440 } while (CALLREGEXEC(rx, s, strend, orig,
4441 s == m, /* Yields minend of 0 or 1 */
4442 TARG, NULL,
4443 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW));
4444 assert(strend >= s);
4445 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG));
4446
4447 if (rpm->op_pmflags & PMf_NONDESTRUCT) {
4448 /* From here on down we're using the copy, and leaving the original
4449 untouched. */
4450 TARG = dstr;
4451 SPAGAIN;
4452 PUSHs(dstr);
4453 } else {
4454#ifdef PERL_ANY_COW
4455 /* The match may make the string COW. If so, brilliant, because
4456 that's just saved us one malloc, copy and free - the regexp has
4457 donated the old buffer, and we malloc an entirely new one, rather
4458 than the regexp malloc()ing a buffer and copying our original,
4459 only for us to throw it away here during the substitution. */
4460 if (SvIsCOW(TARG)) {
4461 sv_force_normal_flags(TARG, SV_COW_DROP_PV);
4462 } else
4463#endif
4464 {
4465 SvPV_free(TARG);
4466 }
4467 SvPV_set(TARG, SvPVX(dstr));
4468 SvCUR_set(TARG, SvCUR(dstr));
4469 SvLEN_set(TARG, SvLEN(dstr));
4470 SvFLAGS(TARG) |= SvUTF8(dstr);
4471 SvPV_set(dstr, NULL);
4472
4473 SPAGAIN;
4474 if (PL_op->op_private & OPpTRUEBOOL)
4475 PUSHs(&PL_sv_yes);
4476 else
4477 mPUSHi(iters);
4478 }
4479 }
4480
4481 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) {
4482 (void)SvPOK_only_UTF8(TARG);
4483 }
4484
4485 /* See "how taint works" above */
4486 if (TAINTING_get) {
4487 if ((rxtainted & SUBST_TAINT_PAT) ||
4488 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) ==
4489 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT))
4490 )
4491 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */
4492
4493 if (!(rxtainted & SUBST_TAINT_BOOLRET)
4494 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT))
4495 )
4496 SvTAINTED_on(TOPs); /* taint return value */
4497 else
4498 SvTAINTED_off(TOPs); /* may have got tainted earlier */
4499
4500 /* needed for mg_set below */
4501 TAINT_set(
4502 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL))
4503 );
4504 SvTAINT(TARG);
4505 }
4506 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */
4507 TAINT_NOT;
4508 LEAVE_SCOPE(oldsave);
4509 RETURN;
4510}
4511
4512PP(pp_grepwhile)
4513{
4514 dSP;
4515 dPOPss;
4516
4517 if (SvTRUE_NN(sv))
4518 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr];
4519 ++*PL_markstack_ptr;
4520 FREETMPS;
4521 LEAVE_with_name("grep_item"); /* exit inner scope */
4522
4523 /* All done yet? */
4524 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) {
4525 I32 items;
4526 const U8 gimme = GIMME_V;
4527
4528 LEAVE_with_name("grep"); /* exit outer scope */
4529 (void)POPMARK; /* pop src */
4530 items = --*PL_markstack_ptr - PL_markstack_ptr[-1];
4531 (void)POPMARK; /* pop dst */
4532 SP = PL_stack_base + POPMARK; /* pop original mark */
4533 if (gimme == G_SCALAR) {
4534 if (PL_op->op_private & OPpTRUEBOOL)
4535 PUSHs(items ? &PL_sv_yes : &PL_sv_zero);
4536 else {
4537 dTARGET;
4538 PUSHi(items);
4539 }
4540 }
4541 else if (gimme == G_ARRAY)
4542 SP += items;
4543 RETURN;
4544 }
4545 else {
4546 SV *src;
4547
4548 ENTER_with_name("grep_item"); /* enter inner scope */
4549 SAVEVPTR(PL_curpm);
4550
4551 src = PL_stack_base[TOPMARK];
4552 if (SvPADTMP(src)) {
4553 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src);
4554 PL_tmps_floor++;
4555 }
4556 SvTEMP_off(src);
4557 DEFSV_set(src);
4558
4559 RETURNOP(cLOGOP->op_other);
4560 }
4561}
4562
4563/* leave_adjust_stacks():
4564 *
4565 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp),
4566 * positioning them at to_sp+1 onwards, and do the equivalent of a
4567 * FREEMPS and TAINT_NOT.
4568 *
4569 * Not intended to be called in void context.
4570 *
4571 * When leaving a sub, eval, do{} or other scope, the things that need
4572 * doing to process the return args are:
4573 * * in scalar context, only return the last arg (or PL_sv_undef if none);
4574 * * for the types of return that return copies of their args (such
4575 * as rvalue sub return), make a mortal copy of every return arg,
4576 * except where we can optimise the copy away without it being
4577 * semantically visible;
4578 * * make sure that the arg isn't prematurely freed; in the case of an
4579 * arg not copied, this may involve mortalising it. For example, in
4580 * C<sub f { my $x = ...; $x }>, $x would be freed when we do
4581 * CX_LEAVE_SCOPE(cx) unless it's protected or copied.
4582 *
4583 * What condition to use when deciding whether to pass the arg through
4584 * or make a copy, is determined by the 'pass' arg; its valid values are:
4585 * 0: rvalue sub/eval exit
4586 * 1: other rvalue scope exit
4587 * 2: :lvalue sub exit in rvalue context
4588 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits
4589 *
4590 * There is a big issue with doing a FREETMPS. We would like to free any
4591 * temps created by the last statement which the sub executed, rather than
4592 * leaving them for the caller. In a situation where a sub call isn't
4593 * soon followed by a nextstate (e.g. nested recursive calls, a la
4594 * fibonacci()), temps can accumulate, causing memory and performance
4595 * issues.
4596 *
4597 * On the other hand, we don't want to free any TEMPs which are keeping
4598 * alive any return args that we skipped copying; nor do we wish to undo
4599 * any mortalising done here.
4600 *
4601 * The solution is to split the temps stack frame into two, with a cut
4602 * point delineating the two halves. We arrange that by the end of this
4603 * function, all the temps stack frame entries we wish to keep are in the
4604 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in
4605 * the range tmps_base .. PL_tmps_ix. During the course of this
4606 * function, tmps_base starts off as PL_tmps_floor+1, then increases
4607 * whenever we find or create a temp that we know should be kept. In
4608 * general the stuff above tmps_base is undecided until we reach the end,
4609 * and we may need a sort stage for that.
4610 *
4611 * To determine whether a TEMP is keeping a return arg alive, every
4612 * arg that is kept rather than copied and which has the SvTEMP flag
4613 * set, has the flag temporarily unset, to mark it. At the end we scan
4614 * the temps stack frame above the cut for entries without SvTEMP and
4615 * keep them, while turning SvTEMP on again. Note that if we die before
4616 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of
4617 * those SVs may be slightly less efficient.
4618 *
4619 * In practice various optimisations for some common cases mean we can
4620 * avoid most of the scanning and swapping about with the temps stack.
4621 */
4622
4623void
4624Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass)
4625{
4626 dVAR;
4627 dSP;
4628 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
4629 SSize_t nargs;
4630
4631 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
4632
4633 TAINT_NOT;
4634
4635 if (gimme == G_ARRAY) {
4636 nargs = SP - from_sp;
4637 from_sp++;
4638 }
4639 else {
4640 assert(gimme == G_SCALAR);
4641 if (UNLIKELY(from_sp >= SP)) {
4642 /* no return args */
4643 assert(from_sp == SP);
4644 EXTEND(SP, 1);
4645 *++SP = &PL_sv_undef;
4646 to_sp = SP;
4647 nargs = 0;
4648 }
4649 else {
4650 from_sp = SP;
4651 nargs = 1;
4652 }
4653 }
4654
4655 /* common code for G_SCALAR and G_ARRAY */
4656
4657 tmps_base = PL_tmps_floor + 1;
4658
4659 assert(nargs >= 0);
4660 if (nargs) {
4661 /* pointer version of tmps_base. Not safe across temp stack
4662 * reallocs. */
4663 SV **tmps_basep;
4664
4665 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
4666 tmps_basep = PL_tmps_stack + tmps_base;
4667
4668 /* process each return arg */
4669
4670 do {
4671 SV *sv = *from_sp++;
4672
4673 assert(PL_tmps_ix + nargs < PL_tmps_max);
4674#ifdef DEBUGGING
4675 /* PADTMPs with container set magic shouldn't appear in the
4676 * wild. This assert is more important for pp_leavesublv(),
4677 * but by testing for it here, we're more likely to catch
4678 * bad cases (what with :lvalue subs not being widely
4679 * deployed). The two issues are that for something like
4680 * sub :lvalue { $tied{foo} }
4681 * or
4682 * sub :lvalue { substr($foo,1,2) }
4683 * pp_leavesublv() will croak if the sub returns a PADTMP,
4684 * and currently functions like pp_substr() return a mortal
4685 * rather than using their PADTMP when returning a PVLV.
4686 * This is because the PVLV will hold a ref to $foo,
4687 * so $foo would get delayed in being freed while
4688 * the PADTMP SV remained in the PAD.
4689 * So if this assert fails it means either:
4690 * 1) there is pp code similar to pp_substr that is
4691 * returning a PADTMP instead of a mortal, and probably
4692 * needs fixing, or
4693 * 2) pp_leavesublv is making unwarranted assumptions
4694 * about always croaking on a PADTMP
4695 */
4696 if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
4697 MAGIC *mg;
4698 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
4699 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
4700 }
4701 }
4702#endif
4703
4704 if (
4705 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4706 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
4707 : pass == 2 ? (!SvPADTMP(sv))
4708 : 1)
4709 {
4710 /* pass through: skip copy for logic or optimisation
4711 * reasons; instead mortalise it, except that ... */
4712 *++to_sp = sv;
4713
4714 if (SvTEMP(sv)) {
4715 /* ... since this SV is an SvTEMP , we don't need to
4716 * re-mortalise it; instead we just need to ensure
4717 * that its existing entry in the temps stack frame
4718 * ends up below the cut and so avoids being freed
4719 * this time round. We mark it as needing to be kept
4720 * by temporarily unsetting SvTEMP; then at the end,
4721 * we shuffle any !SvTEMP entries on the tmps stack
4722 * back below the cut.
4723 * However, there's a significant chance that there's
4724 * a 1:1 correspondence between the first few (or all)
4725 * elements in the return args stack frame and those
4726 * in the temps stack frame; e,g.:
4727 * sub f { ....; map {...} .... },
4728 * or if we're exiting multiple scopes and one of the
4729 * inner scopes has already made mortal copies of each
4730 * return arg.
4731 *
4732 * If so, this arg sv will correspond to the next item
4733 * on the tmps stack above the cut, and so can be kept
4734 * merely by moving the cut boundary up one, rather
4735 * than messing with SvTEMP. If all args are 1:1 then
4736 * we can avoid the sorting stage below completely.
4737 *
4738 * If there are no items above the cut on the tmps
4739 * stack, then the SvTEMP must comne from an item
4740 * below the cut, so there's nothing to do.
4741 */
4742 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
4743 if (sv == *tmps_basep)
4744 tmps_basep++;
4745 else
4746 SvTEMP_off(sv);
4747 }
4748 }
4749 else if (!SvPADTMP(sv)) {
4750 /* mortalise arg to avoid it being freed during save
4751 * stack unwinding. Pad tmps don't need mortalising as
4752 * they're never freed. This is the equivalent of
4753 * sv_2mortal(SvREFCNT_inc(sv)), except that:
4754 * * it assumes that the temps stack has already been
4755 * extended;
4756 * * it puts the new item at the cut rather than at
4757 * ++PL_tmps_ix, moving the previous occupant there
4758 * instead.
4759 */
4760 if (!SvIMMORTAL(sv)) {
4761 SvREFCNT_inc_simple_void_NN(sv);
4762 SvTEMP_on(sv);
4763 /* Note that if there's nothing above the cut,
4764 * this copies the garbage one slot above
4765 * PL_tmps_ix onto itself. This is harmless (the
4766 * stack's already been extended), but might in
4767 * theory trigger warnings from tools like ASan
4768 */
4769 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
4770 *tmps_basep++ = sv;
4771 }
4772 }
4773 }
4774 else {
4775 /* Make a mortal copy of the SV.
4776 * The following code is the equivalent of sv_mortalcopy()
4777 * except that:
4778 * * it assumes the temps stack has already been extended;
4779 * * it optimises the copying for some simple SV types;
4780 * * it puts the new item at the cut rather than at
4781 * ++PL_tmps_ix, moving the previous occupant there
4782 * instead.
4783 */
4784 SV *newsv = newSV(0);
4785
4786 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
4787 /* put it on the tmps stack early so it gets freed if we die */
4788 *tmps_basep++ = newsv;
4789 *++to_sp = newsv;
4790
4791 if (SvTYPE(sv) <= SVt_IV) {
4792 /* arg must be one of undef, IV/UV, or RV: skip
4793 * sv_setsv_flags() and do the copy directly */
4794 U32 dstflags;
4795 U32 srcflags = SvFLAGS(sv);
4796
4797 assert(!SvGMAGICAL(sv));
4798 if (srcflags & (SVf_IOK|SVf_ROK)) {
4799 SET_SVANY_FOR_BODYLESS_IV(newsv);
4800
4801 if (srcflags & SVf_ROK) {
4802 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
4803 /* SV type plus flags */
4804 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
4805 }
4806 else {
4807 /* both src and dst are <= SVt_IV, so sv_any
4808 * points to the head; so access the heads
4809 * directly rather than going via sv_any.
4810 */
4811 assert( &(sv->sv_u.svu_iv)
4812 == &(((XPVIV*) SvANY(sv))->xiv_iv));
4813 assert( &(newsv->sv_u.svu_iv)
4814 == &(((XPVIV*) SvANY(newsv))->xiv_iv));
4815 newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
4816 /* SV type plus flags */
4817 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
4818 |(srcflags & SVf_IVisUV));
4819 }
4820 }
4821 else {
4822 assert(!(srcflags & SVf_OK));
4823 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
4824 }
4825 SvFLAGS(newsv) = dstflags;
4826
4827 }
4828 else {
4829 /* do the full sv_setsv() */
4830 SSize_t old_base;
4831
4832 SvTEMP_on(newsv);
4833 old_base = tmps_basep - PL_tmps_stack;
4834 SvGETMAGIC(sv);
4835 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
4836 /* the mg_get or sv_setsv might have created new temps
4837 * or realloced the tmps stack; regrow and reload */
4838 EXTEND_MORTAL(nargs);
4839 tmps_basep = PL_tmps_stack + old_base;
4840 TAINT_NOT; /* Each item is independent */
4841 }
4842
4843 }
4844 } while (--nargs);
4845
4846 /* If there are any temps left above the cut, we need to sort
4847 * them into those to keep and those to free. The only ones to
4848 * keep are those for which we've temporarily unset SvTEMP.
4849 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
4850 * swapping pairs as necessary. Stop when we meet in the middle.
4851 */
4852 {
4853 SV **top = PL_tmps_stack + PL_tmps_ix;
4854 while (tmps_basep <= top) {
4855 SV *sv = *top;
4856 if (SvTEMP(sv))
4857 top--;
4858 else {
4859 SvTEMP_on(sv);
4860 *top = *tmps_basep;
4861 *tmps_basep = sv;
4862 tmps_basep++;
4863 }
4864 }
4865 }
4866
4867 tmps_base = tmps_basep - PL_tmps_stack;
4868 }
4869
4870 PL_stack_sp = to_sp;
4871
4872 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
4873 while (PL_tmps_ix >= tmps_base) {
4874 SV* const sv = PL_tmps_stack[PL_tmps_ix--];
4875#ifdef PERL_POISON
4876 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
4877#endif
4878 if (LIKELY(sv)) {
4879 SvTEMP_off(sv);
4880 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
4881 }
4882 }
4883}
4884
4885
4886/* also tail-called by pp_return */
4887
4888PP(pp_leavesub)
4889{
4890 U8 gimme;
4891 PERL_CONTEXT *cx;
4892 SV **oldsp;
4893 OP *retop;
4894
4895 cx = CX_CUR();
4896 assert(CxTYPE(cx) == CXt_SUB);
4897
4898 if (CxMULTICALL(cx)) {
4899 /* entry zero of a stack is always PL_sv_undef, which
4900 * simplifies converting a '()' return into undef in scalar context */
4901 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef);
4902 return 0;
4903 }
4904
4905 gimme = cx->blk_gimme;
4906 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */
4907
4908 if (gimme == G_VOID)
4909 PL_stack_sp = oldsp;
4910 else
4911 leave_adjust_stacks(oldsp, oldsp, gimme, 0);
4912
4913 CX_LEAVE_SCOPE(cx);
4914 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */
4915 cx_popblock(cx);
4916 retop = cx->blk_sub.retop;
4917 CX_POP(cx);
4918
4919 return retop;
4920}
4921
4922
4923/* clear (if possible) or abandon the current @_. If 'abandon' is true,
4924 * forces an abandon */
4925
4926void
4927Perl_clear_defarray(pTHX_ AV* av, bool abandon)
4928{
4929 const SSize_t fill = AvFILLp(av);
4930
4931 PERL_ARGS_ASSERT_CLEAR_DEFARRAY;
4932
4933 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) {
4934 av_clear(av);
4935 AvREIFY_only(av);
4936 }
4937 else {
4938 AV *newav = newAV();
4939 av_extend(newav, fill);
4940 AvREIFY_only(newav);
4941 PAD_SVl(0) = MUTABLE_SV(newav);
4942 SvREFCNT_dec_NN(av);
4943 }
4944}
4945
4946
4947PP(pp_entersub)
4948{
4949 dSP; dPOPss;
4950 GV *gv;
4951 CV *cv;
4952 PERL_CONTEXT *cx;
4953 I32 old_savestack_ix;
4954
4955 if (UNLIKELY(!sv))
4956 goto do_die;
4957
4958 /* Locate the CV to call:
4959 * - most common case: RV->CV: f(), $ref->():
4960 * note that if a sub is compiled before its caller is compiled,
4961 * the stash entry will be a ref to a CV, rather than being a GV.
4962 * - second most common case: CV: $ref->method()
4963 */
4964
4965 /* a non-magic-RV -> CV ? */
4966 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) {
4967 cv = MUTABLE_CV(SvRV(sv));
4968 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */
4969 goto do_ref;
4970 }
4971 else
4972 cv = MUTABLE_CV(sv);
4973
4974 /* a CV ? */
4975 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) {
4976 /* handle all the weird cases */
4977 switch (SvTYPE(sv)) {
4978 case SVt_PVLV:
4979 if (!isGV_with_GP(sv))
4980 goto do_default;
4981 /* FALLTHROUGH */
4982 case SVt_PVGV:
4983 cv = GvCVu((const GV *)sv);
4984 if (UNLIKELY(!cv)) {
4985 HV *stash;
4986 cv = sv_2cv(sv, &stash, &gv, 0);
4987 if (!cv) {
4988 old_savestack_ix = PL_savestack_ix;
4989 goto try_autoload;
4990 }
4991 }
4992 break;
4993
4994 default:
4995 do_default:
4996 SvGETMAGIC(sv);
4997 if (SvROK(sv)) {
4998 do_ref:
4999 if (UNLIKELY(SvAMAGIC(sv))) {
5000 sv = amagic_deref_call(sv, to_cv_amg);
5001 /* Don't SPAGAIN here. */
5002 }
5003 }
5004 else {
5005 const char *sym;
5006 STRLEN len;
5007 if (UNLIKELY(!SvOK(sv)))
5008 DIE(aTHX_ PL_no_usym, "a subroutine");
5009
5010 sym = SvPV_nomg_const(sv, len);
5011 if (PL_op->op_private & HINT_STRICT_REFS)
5012 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : "");
5013 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv));
5014 break;
5015 }
5016 cv = MUTABLE_CV(SvRV(sv));
5017 if (LIKELY(SvTYPE(cv) == SVt_PVCV))
5018 break;
5019 /* FALLTHROUGH */
5020 case SVt_PVHV:
5021 case SVt_PVAV:
5022 do_die:
5023 DIE(aTHX_ "Not a CODE reference");
5024 }
5025 }
5026
5027 /* At this point we want to save PL_savestack_ix, either by doing a
5028 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final
5029 * CV we will be using (so we don't know whether its XS, so we can't
5030 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on
5031 * the save stack. So remember where we are currently on the save
5032 * stack, and later update the CX or scopestack entry accordingly. */
5033 old_savestack_ix = PL_savestack_ix;
5034
5035 /* these two fields are in a union. If they ever become separate,
5036 * we have to test for both of them being null below */
5037 assert(cv);
5038 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv));
5039 while (UNLIKELY(!CvROOT(cv))) {
5040 GV* autogv;
5041 SV* sub_name;
5042
5043 /* anonymous or undef'd function leaves us no recourse */
5044 if (CvLEXICAL(cv) && CvHASGV(cv))
5045 DIE(aTHX_ "Undefined subroutine &%" SVf " called",
5046 SVfARG(cv_name(cv, NULL, 0)));
5047 if (CvANON(cv) || !CvHASGV(cv)) {
5048 DIE(aTHX_ "Undefined subroutine called");
5049 }
5050
5051 /* autoloaded stub? */
5052 if (cv != GvCV(gv = CvGV(cv))) {
5053 cv = GvCV(gv);
5054 }
5055 /* should call AUTOLOAD now? */
5056 else {
5057 try_autoload:
5058 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv),
5059 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0)
5060 |(PL_op->op_flags & OPf_REF
5061 ? GV_AUTOLOAD_ISMETHOD
5062 : 0));
5063 cv = autogv ? GvCV(autogv) : NULL;
5064 }
5065 if (!cv) {
5066 sub_name = sv_newmortal();
5067 gv_efullname3(sub_name, gv, NULL);
5068 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name));
5069 }
5070 }
5071
5072 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */
5073 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE))
5074 DIE(aTHX_ "Closure prototype called");
5075
5076 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub)
5077 && !CvNODEBUG(cv)))
5078 {
5079 Perl_get_db_sub(aTHX_ &sv, cv);
5080 if (CvISXSUB(cv))
5081 PL_curcopdb = PL_curcop;
5082 if (CvLVALUE(cv)) {
5083 /* check for lsub that handles lvalue subroutines */
5084 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV));
5085 /* if lsub not found then fall back to DB::sub */
5086 if (!cv) cv = GvCV(PL_DBsub);
5087 } else {
5088 cv = GvCV(PL_DBsub);
5089 }
5090
5091 if (!cv || (!CvXSUB(cv) && !CvSTART(cv)))
5092 DIE(aTHX_ "No DB::sub routine defined");
5093 }
5094
5095 if (!(CvISXSUB(cv))) {
5096 /* This path taken at least 75% of the time */
5097 dMARK;
5098 PADLIST *padlist;
5099 I32 depth;
5100 bool hasargs;
5101 U8 gimme;
5102
5103 /* keep PADTMP args alive throughout the call (we need to do this
5104 * because @_ isn't refcounted). Note that we create the mortals
5105 * in the caller's tmps frame, so they won't be freed until after
5106 * we return from the sub.
5107 */
5108 {
5109 SV **svp = MARK;
5110 while (svp < SP) {
5111 SV *sv = *++svp;
5112 if (!sv)
5113 continue;
5114 if (SvPADTMP(sv))
5115 *svp = sv = sv_mortalcopy(sv);
5116 SvTEMP_off(sv);
5117 }
5118 }
5119
5120 gimme = GIMME_V;
5121 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix);
5122 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED);
5123 cx_pushsub(cx, cv, PL_op->op_next, hasargs);
5124
5125 padlist = CvPADLIST(cv);
5126 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2))
5127 pad_push(padlist, depth);
5128 PAD_SET_CUR_NOSAVE(padlist, depth);
5129 if (LIKELY(hasargs)) {
5130 AV *const av = MUTABLE_AV(PAD_SVl(0));
5131 SSize_t items;
5132 AV **defavp;
5133
5134 defavp = &GvAV(PL_defgv);
5135 cx->blk_sub.savearray = *defavp;
5136 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av));
5137
5138 /* it's the responsibility of whoever leaves a sub to ensure
5139 * that a clean, empty AV is left in pad[0]. This is normally
5140 * done by cx_popsub() */
5141 assert(!AvREAL(av) && AvFILLp(av) == -1);
5142
5143 items = SP - MARK;
5144 if (UNLIKELY(items - 1 > AvMAX(av))) {
5145 SV **ary = AvALLOC(av);
5146 Renew(ary, items, SV*);
5147 AvMAX(av) = items - 1;
5148 AvALLOC(av) = ary;
5149 AvARRAY(av) = ary;
5150 }
5151
5152 if (items)
5153 Copy(MARK+1,AvARRAY(av),items,SV*);
5154 AvFILLp(av) = items - 1;
5155 }
5156 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5157 !CvLVALUE(cv)))
5158 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5159 SVfARG(cv_name(cv, NULL, 0)));
5160 /* warning must come *after* we fully set up the context
5161 * stuff so that __WARN__ handlers can safely dounwind()
5162 * if they want to
5163 */
5164 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN
5165 && ckWARN(WARN_RECURSION)
5166 && !(PERLDB_SUB && cv == GvCV(PL_DBsub))))
5167 sub_crush_depth(cv);
5168 RETURNOP(CvSTART(cv));
5169 }
5170 else {
5171 SSize_t markix = TOPMARK;
5172 bool is_scalar;
5173
5174 ENTER;
5175 /* pretend we did the ENTER earlier */
5176 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix;
5177
5178 SAVETMPS;
5179 PUTBACK;
5180
5181 if (UNLIKELY(((PL_op->op_private
5182 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub)
5183 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO &&
5184 !CvLVALUE(cv)))
5185 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf,
5186 SVfARG(cv_name(cv, NULL, 0)));
5187
5188 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) {
5189 /* Need to copy @_ to stack. Alternative may be to
5190 * switch stack to @_, and copy return values
5191 * back. This would allow popping @_ in XSUB, e.g.. XXXX */
5192 AV * const av = GvAV(PL_defgv);
5193 const SSize_t items = AvFILL(av) + 1;
5194
5195 if (items) {
5196 SSize_t i = 0;
5197 const bool m = cBOOL(SvRMAGICAL(av));
5198 /* Mark is at the end of the stack. */
5199 EXTEND(SP, items);
5200 for (; i < items; ++i)
5201 {
5202 SV *sv;
5203 if (m) {
5204 SV ** const svp = av_fetch(av, i, 0);
5205 sv = svp ? *svp : NULL;
5206 }
5207 else sv = AvARRAY(av)[i];
5208 if (sv) SP[i+1] = sv;
5209 else {
5210 SP[i+1] = newSVavdefelem(av, i, 1);
5211 }
5212 }
5213 SP += items;
5214 PUTBACK ;
5215 }
5216 }
5217 else {
5218 SV **mark = PL_stack_base + markix;
5219 SSize_t items = SP - mark;
5220 while (items--) {
5221 mark++;
5222 if (*mark && SvPADTMP(*mark)) {
5223 *mark = sv_mortalcopy(*mark);
5224 }
5225 }
5226 }
5227 /* We assume first XSUB in &DB::sub is the called one. */
5228 if (UNLIKELY(PL_curcopdb)) {
5229 SAVEVPTR(PL_curcop);
5230 PL_curcop = PL_curcopdb;
5231 PL_curcopdb = NULL;
5232 }
5233 /* Do we need to open block here? XXXX */
5234
5235 /* calculate gimme here as PL_op might get changed and then not
5236 * restored until the LEAVE further down */
5237 is_scalar = (GIMME_V == G_SCALAR);
5238
5239 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */
5240 assert(CvXSUB(cv));
5241 CvXSUB(cv)(aTHX_ cv);
5242
5243#if defined DEBUGGING && !defined DEBUGGING_RE_ONLY
5244 /* This duplicates the check done in runops_debug(), but provides more
5245 * information in the common case of the fault being with an XSUB.
5246 *
5247 * It should also catch an XSUB pushing more than it extends
5248 * in scalar context.
5249 */
5250 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base)
5251 Perl_croak_nocontext(
5252 "panic: XSUB %s::%s (%s) failed to extend arg stack: "
5253 "base=%p, sp=%p, hwm=%p\n",
5254 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv),
5255 PL_stack_base, PL_stack_sp,
5256 PL_stack_base + PL_curstackinfo->si_stack_hwm);
5257#endif
5258 /* Enforce some sanity in scalar context. */
5259 if (is_scalar) {
5260 SV **svp = PL_stack_base + markix + 1;
5261 if (svp != PL_stack_sp) {
5262 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp;
5263 PL_stack_sp = svp;
5264 }
5265 }
5266 LEAVE;
5267 return NORMAL;
5268 }
5269}
5270
5271void
5272Perl_sub_crush_depth(pTHX_ CV *cv)
5273{
5274 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH;
5275
5276 if (CvANON(cv))
5277 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine");
5278 else {
5279 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"",
5280 SVfARG(cv_name(cv,NULL,0)));
5281 }
5282}
5283
5284
5285
5286/* like croak, but report in context of caller */
5287
5288void
5289Perl_croak_caller(const char *pat, ...)
5290{
5291 dTHX;
5292 va_list args;
5293 const PERL_CONTEXT *cx = caller_cx(0, NULL);
5294
5295 /* make error appear at call site */
5296 assert(cx);
5297 PL_curcop = cx->blk_oldcop;
5298
5299 va_start(args, pat);
5300 vcroak(pat, &args);
5301 NOT_REACHED; /* NOTREACHED */
5302 va_end(args);
5303}
5304
5305
5306PP(pp_aelem)
5307{
5308 dSP;
5309 SV** svp;
5310 SV* const elemsv = POPs;
5311 IV elem = SvIV(elemsv);
5312 AV *const av = MUTABLE_AV(POPs);
5313 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET;
5314 const U32 defer = PL_op->op_private & OPpLVAL_DEFER;
5315 const bool localizing = PL_op->op_private & OPpLVAL_INTRO;
5316 bool preeminent = TRUE;
5317 SV *sv;
5318
5319 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC)))
5320 Perl_warner(aTHX_ packWARN(WARN_MISC),
5321 "Use of reference \"%" SVf "\" as array index",
5322 SVfARG(elemsv));
5323 if (UNLIKELY(SvTYPE(av) != SVt_PVAV))
5324 RETPUSHUNDEF;
5325
5326 if (UNLIKELY(localizing)) {
5327 MAGIC *mg;
5328 HV *stash;
5329
5330 /* If we can determine whether the element exist,
5331 * Try to preserve the existenceness of a tied array
5332 * element by using EXISTS and DELETE if possible.
5333 * Fallback to FETCH and STORE otherwise. */
5334 if (SvCANEXISTDELETE(av))
5335 preeminent = av_exists(av, elem);
5336 }
5337
5338 svp = av_fetch(av, elem, lval && !defer);
5339 if (lval) {
5340#ifdef PERL_MALLOC_WRAP
5341 if (SvUOK(elemsv)) {
5342 const UV uv = SvUV(elemsv);
5343 elem = uv > IV_MAX ? IV_MAX : uv;
5344 }
5345 else if (SvNOK(elemsv))
5346 elem = (IV)SvNV(elemsv);
5347 if (elem > 0) {
5348 static const char oom_array_extend[] =
5349 "Out of memory during array extend"; /* Duplicated in av.c */
5350 MEM_WRAP_CHECK_1(elem,SV*,oom_array_extend);
5351 }
5352#endif
5353 if (!svp || !*svp) {
5354 IV len;
5355 if (!defer)
5356 DIE(aTHX_ PL_no_aelem, elem);
5357 len = av_tindex(av);
5358 mPUSHs(newSVavdefelem(av,
5359 /* Resolve a negative index now, unless it points before the
5360 beginning of the array, in which case record it for error
5361 reporting in magic_setdefelem. */
5362 elem < 0 && len + elem >= 0 ? len + elem : elem,
5363 1));
5364 RETURN;
5365 }
5366 if (UNLIKELY(localizing)) {
5367 if (preeminent)
5368 save_aelem(av, elem, svp);
5369 else
5370 SAVEADELETE(av, elem);
5371 }
5372 else if (PL_op->op_private & OPpDEREF) {
5373 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF));
5374 RETURN;
5375 }
5376 }
5377 sv = (svp ? *svp : &PL_sv_undef);
5378 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */
5379 mg_get(sv);
5380 PUSHs(sv);
5381 RETURN;
5382}
5383
5384SV*
5385Perl_vivify_ref(pTHX_ SV *sv, U32 to_what)
5386{
5387 PERL_ARGS_ASSERT_VIVIFY_REF;
5388
5389 SvGETMAGIC(sv);
5390 if (!SvOK(sv)) {
5391 if (SvREADONLY(sv))
5392 Perl_croak_no_modify();
5393 prepare_SV_for_RV(sv);
5394 switch (to_what) {
5395 case OPpDEREF_SV:
5396 SvRV_set(sv, newSV(0));
5397 break;
5398 case OPpDEREF_AV:
5399 SvRV_set(sv, MUTABLE_SV(newAV()));
5400 break;
5401 case OPpDEREF_HV:
5402 SvRV_set(sv, MUTABLE_SV(newHV()));
5403 break;
5404 }
5405 SvROK_on(sv);
5406 SvSETMAGIC(sv);
5407 SvGETMAGIC(sv);
5408 }
5409 if (SvGMAGICAL(sv)) {
5410 /* copy the sv without magic to prevent magic from being
5411 executed twice */
5412 SV* msv = sv_newmortal();
5413 sv_setsv_nomg(msv, sv);
5414 return msv;
5415 }
5416 return sv;
5417}
5418
5419PERL_STATIC_INLINE HV *
5420S_opmethod_stash(pTHX_ SV* meth)
5421{
5422 SV* ob;
5423 HV* stash;
5424
5425 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp
5426 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a "
5427 "package or object reference", SVfARG(meth)),
5428 (SV *)NULL)
5429 : *(PL_stack_base + TOPMARK + 1);
5430
5431 PERL_ARGS_ASSERT_OPMETHOD_STASH;
5432
5433 if (UNLIKELY(!sv))
5434 undefined:
5435 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value",
5436 SVfARG(meth));
5437
5438 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv);
5439 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */
5440 stash = gv_stashsv(sv, GV_CACHE_ONLY);
5441 if (stash) return stash;
5442 }
5443
5444 if (SvROK(sv))
5445 ob = MUTABLE_SV(SvRV(sv));
5446 else if (!SvOK(sv)) goto undefined;
5447 else if (isGV_with_GP(sv)) {
5448 if (!GvIO(sv))
5449 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5450 "without a package or object reference",
5451 SVfARG(meth));
5452 ob = sv;
5453 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') {
5454 assert(!LvTARGLEN(ob));
5455 ob = LvTARG(ob);
5456 assert(ob);
5457 }
5458 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob));
5459 }
5460 else {
5461 /* this isn't a reference */
5462 GV* iogv;
5463 STRLEN packlen;
5464 const char * const packname = SvPV_nomg_const(sv, packlen);
5465 const U32 packname_utf8 = SvUTF8(sv);
5466 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY);
5467 if (stash) return stash;
5468
5469 if (!(iogv = gv_fetchpvn_flags(
5470 packname, packlen, packname_utf8, SVt_PVIO
5471 )) ||
5472 !(ob=MUTABLE_SV(GvIO(iogv))))
5473 {
5474 /* this isn't the name of a filehandle either */
5475 if (!packlen)
5476 {
5477 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" "
5478 "without a package or object reference",
5479 SVfARG(meth));
5480 }
5481 /* assume it's a package name */
5482 stash = gv_stashpvn(packname, packlen, packname_utf8);
5483 if (stash) return stash;
5484 else return MUTABLE_HV(sv);
5485 }
5486 /* it _is_ a filehandle name -- replace with a reference */
5487 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv)));
5488 }
5489
5490 /* if we got here, ob should be an object or a glob */
5491 if (!ob || !(SvOBJECT(ob)
5492 || (isGV_with_GP(ob)
5493 && (ob = MUTABLE_SV(GvIO((const GV *)ob)))
5494 && SvOBJECT(ob))))
5495 {
5496 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference",
5497 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES)
5498 ? newSVpvs_flags("DOES", SVs_TEMP)
5499 : meth));
5500 }
5501
5502 return SvSTASH(ob);
5503}
5504
5505PP(pp_method)
5506{
5507 dSP;
5508 GV* gv;
5509 HV* stash;
5510 SV* const meth = TOPs;
5511
5512 if (SvROK(meth)) {
5513 SV* const rmeth = SvRV(meth);
5514 if (SvTYPE(rmeth) == SVt_PVCV) {
5515 SETs(rmeth);
5516 RETURN;
5517 }
5518 }
5519
5520 stash = opmethod_stash(meth);
5521
5522 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5523 assert(gv);
5524
5525 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5526 RETURN;
5527}
5528
5529#define METHOD_CHECK_CACHE(stash,cache,meth) \
5530 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \
5531 if (he) { \
5532 gv = MUTABLE_GV(HeVAL(he)); \
5533 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \
5534 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \
5535 { \
5536 XPUSHs(MUTABLE_SV(GvCV(gv))); \
5537 RETURN; \
5538 } \
5539 } \
5540
5541PP(pp_method_named)
5542{
5543 dSP;
5544 GV* gv;
5545 SV* const meth = cMETHOPx_meth(PL_op);
5546 HV* const stash = opmethod_stash(meth);
5547
5548 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) {
5549 METHOD_CHECK_CACHE(stash, stash, meth);
5550 }
5551
5552 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5553 assert(gv);
5554
5555 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5556 RETURN;
5557}
5558
5559PP(pp_method_super)
5560{
5561 dSP;
5562 GV* gv;
5563 HV* cache;
5564 SV* const meth = cMETHOPx_meth(PL_op);
5565 HV* const stash = CopSTASH(PL_curcop);
5566 /* Actually, SUPER doesn't need real object's (or class') stash at all,
5567 * as it uses CopSTASH. However, we must ensure that object(class) is
5568 * correct (this check is done by S_opmethod_stash) */
5569 opmethod_stash(meth);
5570
5571 if ((cache = HvMROMETA(stash)->super)) {
5572 METHOD_CHECK_CACHE(stash, cache, meth);
5573 }
5574
5575 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5576 assert(gv);
5577
5578 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5579 RETURN;
5580}
5581
5582PP(pp_method_redir)
5583{
5584 dSP;
5585 GV* gv;
5586 SV* const meth = cMETHOPx_meth(PL_op);
5587 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
5588 opmethod_stash(meth); /* not used but needed for error checks */
5589
5590 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); }
5591 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
5592
5593 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK);
5594 assert(gv);
5595
5596 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5597 RETURN;
5598}
5599
5600PP(pp_method_redir_super)
5601{
5602 dSP;
5603 GV* gv;
5604 HV* cache;
5605 SV* const meth = cMETHOPx_meth(PL_op);
5606 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0);
5607 opmethod_stash(meth); /* not used but needed for error checks */
5608
5609 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op));
5610 else if ((cache = HvMROMETA(stash)->super)) {
5611 METHOD_CHECK_CACHE(stash, cache, meth);
5612 }
5613
5614 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER);
5615 assert(gv);
5616
5617 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv));
5618 RETURN;
5619}
5620
5621/*
5622 * ex: set ts=8 sts=4 sw=4 et:
5623 */