}
}
}
+#else /* !defined(PL_OP_SLAB_ALLOC) */
+
+/* See the explanatory comments above struct opslab in op.h. */
+
+# ifndef PERL_SLAB_SIZE
+# define PERL_SLAB_SIZE 64
+# endif
+# ifndef PERL_MAX_SLAB_SIZE
+# define PERL_MAX_SLAB_SIZE 2048
+# endif
+
+/* rounds up to nearest pointer */
+# define SIZE_TO_PSIZE(x) (((x) + sizeof(I32 *) - 1)/sizeof(I32 *))
+# define DIFF(o,p) ((size_t)((I32 **)(p) - (I32**)(o)))
+
+static OPSLAB *
+S_new_slab(pTHX_ size_t sz)
+{
+ OPSLAB *slab = (OPSLAB *)PerlMemShared_calloc(sz, sizeof(I32 *));
+ slab->opslab_first = (OPSLOT *)((I32 **)slab + sz - 1);
+ return slab;
+}
+
+/* requires double parens and aTHX_ */
+#define DEBUG_S_warn(args) \
+ DEBUG_S( \
+ PerlIO_printf(Perl_debug_log, "%s", SvPVx_nolen(Perl_mess args)) \
+ )
+
+void *
+Perl_Slab_Alloc(pTHX_ size_t sz)
+{
+ dVAR;
+ OPSLAB *slab;
+ OPSLAB *slab2;
+ OPSLOT *slot;
+ OP *o;
+ size_t opsz, space;
+
+ if (!PL_compcv || CvROOT(PL_compcv)
+ || (CvSTART(PL_compcv) && !CvSLABBED(PL_compcv)))
+ return PerlMemShared_calloc(1, sz);
+
+ if (!CvSTART(PL_compcv)) { /* sneak it in here */
+ CvSTART(PL_compcv) =
+ (OP *)(slab = S_new_slab(aTHX_ PERL_SLAB_SIZE));
+ CvSLABBED_on(PL_compcv);
+ slab->opslab_refcnt = 2; /* one for the CV; one for the new OP */
+ }
+ else ++(slab = (OPSLAB *)CvSTART(PL_compcv))->opslab_refcnt;
+
+ opsz = SIZE_TO_PSIZE(sz);
+ sz = opsz + OPSLOT_HEADER_P;
+
+ if (slab->opslab_freed) {
+ OP **too = &slab->opslab_freed;
+ o = *too;
+ DEBUG_S_warn((aTHX_ "found free op at %p, slab %p", o, slab));
+ while (o && DIFF(OpSLOT(o), OpSLOT(o)->opslot_next) < sz) {
+ DEBUG_S_warn((aTHX_ "Alas! too small"));
+ o = *(too = &o->op_next);
+ if (o) { DEBUG_S_warn((aTHX_ "found another free op at %p", o)); }
+ }
+ if (o) {
+ *too = o->op_next;
+ Zero(o, opsz, I32 *);
+ o->op_slabbed = 1;
+ return (void *)o;
+ }
+ }
+
+# define INIT_OPSLOT \
+ slot->opslot_slab = slab; \
+ slot->opslot_next = slab2->opslab_first; \
+ slab2->opslab_first = slot; \
+ o = &slot->opslot_op; \
+ o->op_slabbed = 1
+
+ /* The partially-filled slab is next in the chain. */
+ slab2 = slab->opslab_next ? slab->opslab_next : slab;
+ if ((space = DIFF(&slab2->opslab_slots, slab2->opslab_first)) < sz) {
+ /* Remaining space is too small. */
+
+ /* If we can fit a BASEOP, add it to the free chain, so as not
+ to waste it. */
+ if (space >= SIZE_TO_PSIZE(sizeof(OP)) + OPSLOT_HEADER_P) {
+ slot = &slab2->opslab_slots;
+ INIT_OPSLOT;
+ o->op_type = OP_FREED;
+ o->op_next = slab->opslab_freed;
+ slab->opslab_freed = o;
+ }
+
+ /* Create a new slab. Make this one twice as big. */
+ slot = slab2->opslab_first;
+ while (slot->opslot_next) slot = slot->opslot_next;
+ slab2 = S_new_slab(aTHX_ DIFF(slab2, slot)*2 > PERL_MAX_SLAB_SIZE
+ ? PERL_MAX_SLAB_SIZE
+ : DIFF(slab2, slot)*2);
+ slab2->opslab_next = slab->opslab_next;
+ slab->opslab_next = slab2;
+ }
+ assert(DIFF(&slab2->opslab_slots, slab2->opslab_first) >= sz);
+
+ /* Create a new op slot */
+ slot = (OPSLOT *)((I32 **)slab2->opslab_first - sz);
+ assert(slot >= &slab2->opslab_slots);
+ if (DIFF(&slab2->opslab_slots, slot)
+ < SIZE_TO_PSIZE(sizeof(OP)) + OPSLOT_HEADER_P)
+ slot = &slab2->opslab_slots;
+ INIT_OPSLOT;
+ DEBUG_S_warn((aTHX_ "allocating op at %p, slab %p", o, slab));
+ return (void *)o;
+}
+
+# undef INIT_OPSLOT
+
+/* This cannot possibly be right, but it was copied from the old slab
+ allocator, to which it was originally added, without explanation, in
+ commit 083fcd5. */
+# ifdef NETWARE
+# define PerlMemShared PerlMem
+# endif
+
+void
+Perl_Slab_Free(pTHX_ void *op)
+{
+ dVAR;
+ OP * const o = (OP *)op;
+ OPSLAB *slab;
+
+ PERL_ARGS_ASSERT_SLAB_FREE;
+
+ if (!o->op_slabbed) {
+ PerlMemShared_free(op);
+ return;
+ }
+
+ slab = OpSLAB(o);
+ /* If this op is already freed, our refcount will get screwy. */
+ assert(o->op_type != OP_FREED);
+ o->op_type = OP_FREED;
+ o->op_next = slab->opslab_freed;
+ slab->opslab_freed = o;
+ DEBUG_S_warn((aTHX_ "free op at %p, recorded in slab %p", o, slab));
+ OpslabREFCNT_dec_padok(slab);
+}
+
+void
+Perl_opslab_free_nopad(pTHX_ OPSLAB *slab)
+{
+ dVAR;
+ const bool havepad = !!PL_comppad;
+ PERL_ARGS_ASSERT_OPSLAB_FREE_NOPAD;
+ if (havepad) {
+ ENTER;
+ PAD_SAVE_SETNULLPAD();
+ }
+ opslab_free(slab);
+ if (havepad) LEAVE;
+}
+
+void
+Perl_opslab_free(pTHX_ OPSLAB *slab)
+{
+ dVAR;
+ OPSLAB *slab2;
+ PERL_ARGS_ASSERT_OPSLAB_FREE;
+ DEBUG_S_warn((aTHX_ "freeing slab %p", slab));
+ assert(slab->opslab_refcnt == 1);
+ for (; slab; slab = slab2) {
+ slab2 = slab->opslab_next;
+# ifdef DEBUGGING
+ slab->opslab_refcnt = ~(size_t)0;
+# endif
+ PerlMemShared_free(slab);
+ }
+}
+
+void
+Perl_opslab_force_free(pTHX_ OPSLAB *slab)
+{
+ OPSLAB *slab2;
+ OPSLOT *slot;
+# ifdef DEBUGGING
+ size_t savestack_count = 0;
+# endif
+ PERL_ARGS_ASSERT_OPSLAB_FORCE_FREE;
+ slab2 = slab;
+ do {
+ for (slot = slab2->opslab_first;
+ slot->opslot_next;
+ slot = slot->opslot_next) {
+ if (slot->opslot_op.op_type != OP_FREED
+ && !(slot->opslot_op.op_savefree
+# ifdef DEBUGGING
+ && ++savestack_count
+# endif
+ )
+ ) {
+ assert(slot->opslot_op.op_slabbed);
+ slab->opslab_refcnt++; /* op_free may free slab */
+ op_free(&slot->opslot_op);
+ if (!--slab->opslab_refcnt) goto free;
+ }
+ }
+ } while ((slab2 = slab2->opslab_next));
+ /* > 1 because the CV still holds a reference count. */
+ if (slab->opslab_refcnt > 1) { /* still referenced by the savestack */
+# ifdef DEBUGGING
+ assert(savestack_count == slab->opslab_refcnt-1);
+# endif
+ return;
+ }
+ free:
+ opslab_free(slab);
+}
+
#endif
/*
* In the following definition, the ", (OP*)0" is just to make the compiler
dVAR;
OPCODE type;
- if (!o)
+#ifndef PL_OP_SLAB_ALLOC
+ /* Though ops may be freed twice, freeing the op after its slab is a
+ big no-no. */
+ assert(!o || !o->op_slabbed || OpSLAB(o)->opslab_refcnt != ~(size_t)0);
+#endif
+ /* During the forced freeing of ops after compilation failure, kidops
+ may be freed before their parents. */
+ if (!o || o->op_type == OP_FREED)
return;
if (o->op_latefreed) {
if (o->op_latefree)
PL_main_root->op_next = 0;
CALL_PEEP(PL_main_start);
finalize_optree(PL_main_root);
+ cv_forget_slab(PL_compcv);
PL_compcv = 0;
/* Register with debugger */
PERL_ARGS_ASSERT_OP_INTEGERIZE;
- /* integerize op, unless it happens to be C<-foo>.
- * XXX should pp_i_negate() do magic string negation instead? */
- if ((PL_opargs[type] & OA_OTHERINT) && (PL_hints & HINT_INTEGER)
- && !(type == OP_NEGATE && cUNOPo->op_first->op_type == OP_CONST
- && (cUNOPo->op_first->op_private & OPpCONST_BARE)))
+ /* integerize op. */
+ if ((PL_opargs[type] & OA_OTHERINT) && (PL_hints & HINT_INTEGER))
{
dVAR;
o->op_ppaddr = PL_ppaddr[type = ++(o->op_type)];
if (type == OP_RV2GV)
newop = newGVOP(OP_GV, 0, MUTABLE_GV(sv));
else
- newop = newSVOP(OP_CONST, 0, MUTABLE_SV(sv));
+ newop = newSVOP(OP_CONST, OPpCONST_FOLDED<<8, MUTABLE_SV(sv));
op_getmad(o,newop,'f');
return newop;
* confident that nothing used that CV's pad while the
* regex was parsed */
assert(AvFILLp(PL_comppad) == 0); /* just @_ */
+ /* But we know that one op is using this CV's slab. */
+ cv_forget_slab(PL_compcv);
LEAVE_SCOPE(floor);
pm->op_pmflags &= ~PMf_HAS_CV;
}
* pad_fixup_inner_anons() can find it */
(void)pad_add_anon(cv, o->op_type);
SvREFCNT_inc_simple_void(cv);
+
+ cv_forget_slab(cv);
}
else {
pm->op_code_list = expr;
svop->op_sv = sv;
svop->op_next = (OP*)svop;
svop->op_flags = (U8)flags;
+ svop->op_private = (U8)(0 | (flags >> 8));
if (PL_opargs[type] & OA_RETSCALAR)
scalar((OP*)svop);
if (PL_opargs[type] & OA_TARGET)
|| other->op_type == OP_TRANS)
/* Mark the op as being unbindable with =~ */
other->op_flags |= OPf_SPECIAL;
+ else if (other->op_type == OP_CONST)
+ other->op_private |= OPpCONST_FOLDED;
return other;
}
else {
|| live->op_type == OP_TRANS || live->op_type == OP_TRANSR)
/* Mark the op as being unbindable with =~ */
live->op_flags |= OPf_SPECIAL;
+ else if (live->op_type == OP_CONST)
+ live->op_private |= OPpCONST_FOLDED;
return live;
}
NewOp(1101, logop, 1, LOGOP);
scalar(listop);
o = new_logop(OP_AND, 0, &expr, &listop);
if (o == expr && o->op_type == OP_CONST && !SvTRUE(cSVOPo->op_sv)) {
- op_free(expr); /* oops, it's a while (0) */
op_free((OP*)loop);
- return NULL; /* listop already freed by new_logop */
+ return expr; /* listop already freed by new_logop */
}
if (listop)
((LISTOP*)listop)->op_last->op_next =
/* for my $x () sets OPpLVAL_INTRO;
* for our $x () sets OPpOUR_INTRO */
loop->op_private = (U8)iterpflags;
-#ifdef PL_OP_SLAB_ALLOC
+#ifndef PL_OP_SLAB_ALLOC
+ if (loop->op_slabbed
+ && DIFF(loop, OpSLOT(loop)->opslot_next)
+ < SIZE_TO_PSIZE(sizeof(LOOP)))
+#endif
{
LOOP *tmp;
NewOp(1234,tmp,1,LOOP);
S_op_destroy(aTHX_ (OP*)loop);
loop = tmp;
}
-#else
- loop = (LOOP*)PerlMemShared_realloc(loop, sizeof(LOOP));
+#ifndef PL_OP_SLAB_ALLOC
+ else if (!loop->op_slabbed)
+ loop = (LOOP*)PerlMemShared_realloc(loop, sizeof(LOOP));
#endif
loop->op_targ = padoff;
wop = newWHILEOP(flags, 1, loop, newOP(OP_ITER, 0), block, cont, 0);
SvREFCNT_inc_simple_void_NN(const_sv);
if (cv) {
assert(!CvROOT(cv) && !CvCONST(cv));
+ cv_forget_slab(cv);
sv_setpvs(MUTABLE_SV(cv), ""); /* prototype is "" */
CvXSUBANY(cv).any_ptr = const_sv;
CvXSUB(cv) = const_sv_xsub;
cv_flags_t existing_builtin_attrs = CvFLAGS(cv) & CVf_BUILTIN_ATTRS;
AV *const temp_av = CvPADLIST(cv);
CV *const temp_cv = CvOUTSIDE(cv);
+ const cv_flags_t slabbed = CvSLABBED(cv);
+ OP * const cvstart = CvSTART(cv);
assert(!CvWEAKOUTSIDE(cv));
assert(!CvCVGV_RC(cv));
CvPADLIST(cv) = CvPADLIST(PL_compcv);
CvOUTSIDE(PL_compcv) = temp_cv;
CvPADLIST(PL_compcv) = temp_av;
+ CvSTART(cv) = CvSTART(PL_compcv);
+ CvSTART(PL_compcv) = cvstart;
+ if (slabbed) CvSLABBED_on(PL_compcv);
+ else CvSLABBED_off(PL_compcv);
if (CvFILE(cv) && CvDYNFILE(cv)) {
Safefree(CvFILE(cv));
: newUNOP(OP_LEAVESUB, 0, scalarseq(block));
CvROOT(cv)->op_private |= OPpREFCOUNTED;
OpREFCNT_set(CvROOT(cv), 1);
+#ifndef PL_OP_SLAB_ALLOC
+ /* The cv no longer needs to hold a refcount on the slab, as CvROOT
+ itself has a refcount. */
+ CvSLABBED_off(cv);
+ OpslabREFCNT_dec_padok((OPSLAB *)CvSTART(cv));
+#endif
CvSTART(cv) = LINKLIST(CvROOT(cv));
CvROOT(cv)->op_next = 0;
CALL_PEEP(CvSTART(cv));
CvROOT(cv)->op_next = 0;
CALL_PEEP(CvSTART(cv));
finalize_optree(CvROOT(cv));
+ cv_forget_slab(cv);
#ifdef PERL_MAD
op_getmad(o,pegop,'n');
op_getmad_weak(block, pegop, 'b');
int descending;
GV *gv;
const char *gvname;
+ bool have_scopeop;
PERL_ARGS_ASSERT_SIMPLIFY_SORT;
GvMULTI_on(gv_fetchpvs("a", GV_ADD|GV_NOTQUAL, SVt_PV));
GvMULTI_on(gv_fetchpvs("b", GV_ADD|GV_NOTQUAL, SVt_PV));
kid = kUNOP->op_first; /* get past null */
- if (kid->op_type != OP_SCOPE)
+ if (!(have_scopeop = kid->op_type == OP_SCOPE)
+ && kid->op_type != OP_LEAVE)
return;
kid = kLISTOP->op_last; /* get past scope */
switch(kid->op_type) {
case OP_NCMP:
case OP_I_NCMP:
case OP_SCMP:
+ if (!have_scopeop) goto padkids;
break;
default:
return;
}
k = kid; /* remember this node*/
- if (kBINOP->op_first->op_type != OP_RV2SV)
+ if (kBINOP->op_first->op_type != OP_RV2SV
+ || kBINOP->op_last ->op_type != OP_RV2SV)
+ {
+ /*
+ Warn about my($a) or my($b) in a sort block, *if* $a or $b is
+ then used in a comparison. This catches most, but not
+ all cases. For instance, it catches
+ sort { my($a); $a <=> $b }
+ but not
+ sort { my($a); $a < $b ? -1 : $a == $b ? 0 : 1; }
+ (although why you'd do that is anyone's guess).
+ */
+
+ padkids:
+ if (!ckWARN(WARN_SYNTAX)) return;
+ kid = kBINOP->op_first;
+ do {
+ if (kid->op_type == OP_PADSV) {
+ SV * const name = AvARRAY(PL_comppad_name)[kid->op_targ];
+ if (SvCUR(name) == 2 && *SvPVX(name) == '$'
+ && (SvPVX(name)[1] == 'a' || SvPVX(name)[1] == 'b'))
+ /* diag_listed_as: "my %s" used in sort comparison */
+ Perl_warner(aTHX_ packWARN(WARN_SYNTAX),
+ "\"%s %s\" used in sort comparison",
+ SvPAD_STATE(name) ? "state" : "my",
+ SvPVX(name));
+ }
+ } while ((kid = kid->op_sibling));
return;
+ }
kid = kBINOP->op_first; /* get past cmp */
if (kUNOP->op_first->op_type != OP_GV)
return;
return;
kid = k; /* back to cmp */
- if (kBINOP->op_last->op_type != OP_RV2SV)
- return;
+ /* already checked above that it is rv2sv */
kid = kBINOP->op_last; /* down to 2nd arg */
if (kUNOP->op_first->op_type != OP_GV)
return;