+ SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */
+ SSize_t nargs;
+
+ PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS;
+
+ TAINT_NOT;
+
+ if (gimme == G_ARRAY) {
+ nargs = SP - from_sp;
+ from_sp++;
+ }
+ else {
+ assert(gimme == G_SCALAR);
+ if (UNLIKELY(from_sp >= SP)) {
+ /* no return args */
+ assert(from_sp == SP);
+ EXTEND(SP, 1);
+ *++SP = &PL_sv_undef;
+ to_sp = SP;
+ nargs = 0;
+ }
+ else {
+ from_sp = SP;
+ nargs = 1;
+ }
+ }
+
+ /* common code for G_SCALAR and G_ARRAY */
+
+ tmps_base = PL_tmps_floor + 1;
+
+ assert(nargs >= 0);
+ if (nargs) {
+ /* pointer version of tmps_base. Not safe across temp stack
+ * reallocs. */
+ SV **tmps_basep;
+
+ EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */
+ tmps_basep = PL_tmps_stack + tmps_base;
+
+ /* process each return arg */
+
+ do {
+ SV *sv = *from_sp++;
+
+ assert(PL_tmps_ix + nargs < PL_tmps_max);
+#ifdef DEBUGGING
+ /* PADTMPs with container set magic shouldn't appear in the
+ * wild. This assert is more important for pp_leavesublv(),
+ * but by testing for it here, we're more likely to catch
+ * bad cases (what with :lvalue subs not being widely
+ * deployed). The two issues are that for something like
+ * sub :lvalue { $tied{foo} }
+ * or
+ * sub :lvalue { substr($foo,1,2) }
+ * pp_leavesublv() will croak if the sub returns a PADTMP,
+ * and currently functions like pp_substr() return a mortal
+ * rather than using their PADTMP when returning a PVLV.
+ * This is because the PVLV will hold a ref to $foo,
+ * so $foo would get delayed in being freed while
+ * the PADTMP SV remained in the PAD.
+ * So if this assert fails it means either:
+ * 1) there is pp code similar to pp_substr that is
+ * returning a PADTMP instead of a mortal, and probably
+ * needs fixing, or
+ * 2) pp_leavesublv is making unwarranted assumptions
+ * about always croaking on a PADTMP
+ */
+ if (SvPADTMP(sv) && SvSMAGICAL(sv)) {
+ MAGIC *mg;
+ for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) {
+ assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type));
+ }
+ }
+#endif
+
+ if (
+ pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
+ : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1)
+ : pass == 2 ? (!SvPADTMP(sv))
+ : 1)
+ {
+ /* pass through: skip copy for logic or optimisation
+ * reasons; instead mortalise it, except that ... */
+ *++to_sp = sv;
+
+ if (SvTEMP(sv)) {
+ /* ... since this SV is an SvTEMP , we don't need to
+ * re-mortalise it; instead we just need to ensure
+ * that its existing entry in the temps stack frame
+ * ends up below the cut and so avoids being freed
+ * this time round. We mark it as needing to be kept
+ * by temporarily unsetting SvTEMP; then at the end,
+ * we shuffle any !SvTEMP entries on the tmps stack
+ * back below the cut.
+ * However, there's a significant chance that there's
+ * a 1:1 correspondence between the first few (or all)
+ * elements in the return args stack frame and those
+ * in the temps stack frame; e,g.:
+ * sub f { ....; map {...} .... },
+ * or if we're exiting multiple scopes and one of the
+ * inner scopes has already made mortal copies of each
+ * return arg.
+ *
+ * If so, this arg sv will correspond to the next item
+ * on the tmps stack above the cut, and so can be kept
+ * merely by moving the cut boundary up one, rather
+ * than messing with SvTEMP. If all args are 1:1 then
+ * we can avoid the sorting stage below completely.
+ *
+ * If there are no items above the cut on the tmps
+ * stack, then the SvTEMP must comne from an item
+ * below the cut, so there's nothing to do.
+ */
+ if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) {
+ if (sv == *tmps_basep)
+ tmps_basep++;
+ else
+ SvTEMP_off(sv);
+ }
+ }
+ else if (!SvPADTMP(sv)) {
+ /* mortalise arg to avoid it being freed during save
+ * stack unwinding. Pad tmps don't need mortalising as
+ * they're never freed. This is the equivalent of
+ * sv_2mortal(SvREFCNT_inc(sv)), except that:
+ * * it assumes that the temps stack has already been
+ * extended;
+ * * it puts the new item at the cut rather than at
+ * ++PL_tmps_ix, moving the previous occupant there
+ * instead.
+ */
+ if (!SvIMMORTAL(sv)) {
+ SvREFCNT_inc_simple_void_NN(sv);
+ SvTEMP_on(sv);
+ /* Note that if there's nothing above the cut,
+ * this copies the garbage one slot above
+ * PL_tmps_ix onto itself. This is harmless (the
+ * stack's already been extended), but might in
+ * theory trigger warnings from tools like ASan
+ */
+ PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
+ *tmps_basep++ = sv;
+ }
+ }
+ }
+ else {
+ /* Make a mortal copy of the SV.
+ * The following code is the equivalent of sv_mortalcopy()
+ * except that:
+ * * it assumes the temps stack has already been extended;
+ * * it optimises the copying for some simple SV types;
+ * * it puts the new item at the cut rather than at
+ * ++PL_tmps_ix, moving the previous occupant there
+ * instead.
+ */
+ SV *newsv = newSV(0);
+
+ PL_tmps_stack[++PL_tmps_ix] = *tmps_basep;
+ /* put it on the tmps stack early so it gets freed if we die */
+ *tmps_basep++ = newsv;
+ *++to_sp = newsv;
+
+ if (SvTYPE(sv) <= SVt_IV) {
+ /* arg must be one of undef, IV/UV, or RV: skip
+ * sv_setsv_flags() and do the copy directly */
+ U32 dstflags;
+ U32 srcflags = SvFLAGS(sv);
+
+ assert(!SvGMAGICAL(sv));
+ if (srcflags & (SVf_IOK|SVf_ROK)) {
+ SET_SVANY_FOR_BODYLESS_IV(newsv);
+
+ if (srcflags & SVf_ROK) {
+ newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv));
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_ROK|SVs_TEMP);
+ }
+ else {
+ /* both src and dst are <= SVt_IV, so sv_any
+ * points to the head; so access the heads
+ * directly rather than going via sv_any.
+ */
+ assert( &(sv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(sv))->xiv_iv));
+ assert( &(newsv->sv_u.svu_iv)
+ == &(((XPVIV*) SvANY(newsv))->xiv_iv));
+ newsv->sv_u.svu_iv = sv->sv_u.svu_iv;
+ /* SV type plus flags */
+ dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP
+ |(srcflags & SVf_IVisUV));
+ }
+ }
+ else {
+ assert(!(srcflags & SVf_OK));
+ dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */
+ }
+ SvFLAGS(newsv) = dstflags;
+
+ }
+ else {
+ /* do the full sv_setsv() */
+ SSize_t old_base;
+
+ SvTEMP_on(newsv);
+ old_base = tmps_basep - PL_tmps_stack;
+ SvGETMAGIC(sv);
+ sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV);
+ /* the mg_get or sv_setsv might have created new temps
+ * or realloced the tmps stack; regrow and reload */
+ EXTEND_MORTAL(nargs);
+ tmps_basep = PL_tmps_stack + old_base;
+ TAINT_NOT; /* Each item is independent */
+ }
+
+ }
+ } while (--nargs);
+
+ /* If there are any temps left above the cut, we need to sort
+ * them into those to keep and those to free. The only ones to
+ * keep are those for which we've temporarily unset SvTEMP.
+ * Work inwards from the two ends at tmps_basep .. PL_tmps_ix,
+ * swapping pairs as necessary. Stop when we meet in the middle.
+ */
+ {
+ SV **top = PL_tmps_stack + PL_tmps_ix;
+ while (tmps_basep <= top) {
+ SV *sv = *top;
+ if (SvTEMP(sv))
+ top--;
+ else {
+ SvTEMP_on(sv);
+ *top = *tmps_basep;
+ *tmps_basep = sv;
+ tmps_basep++;
+ }
+ }
+ }
+
+ tmps_base = tmps_basep - PL_tmps_stack;
+ }
+
+ PL_stack_sp = to_sp;
+
+ /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */
+ while (PL_tmps_ix >= tmps_base) {
+ SV* const sv = PL_tmps_stack[PL_tmps_ix--];
+#ifdef PERL_POISON
+ PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB);
+#endif
+ if (LIKELY(sv)) {
+ SvTEMP_off(sv);
+ SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */
+ }
+ }
+}
+
+
+/* also tail-called by pp_return */
+
+PP(pp_leavesub)
+{
+ U8 gimme;