OP *o;
size_t opsz, space;
+ /* We only allocate ops from the slab during subroutine compilation.
+ We find the slab via PL_compcv, hence that must be non-NULL. It could
+ also be pointing to a subroutine which is now fully set up (CvROOT()
+ pointing to the top of the optree for that sub), or a subroutine
+ which isn't using the slab allocator. If our sanity checks aren't met,
+ don't use a slab, but allocate the OP directly from the heap. */
if (!PL_compcv || CvROOT(PL_compcv)
|| (CvSTART(PL_compcv) && !CvSLABBED(PL_compcv)))
return PerlMemShared_calloc(1, sz);
- if (!CvSTART(PL_compcv)) { /* sneak it in here */
+#if defined(USE_ITHREADS) && IVSIZE > U32SIZE
+ /* Work around a goof with alignment on our part. For sparc32 (and
+ possibly other architectures), if built with -Duse64bitint, the IV
+ op_pmoffset in struct pmop should be 8 byte aligned, but the slab
+ allocator is only providing 4 byte alignment. The real fix is to change
+ the IV to a type the same size as a pointer, such as size_t, but we
+ can't do that without breaking the ABI, which is a no-no in a maint
+ release. So instead, simply allocate struct pmop directly, which will be
+ suitably aligned: */
+ if (sz == sizeof(struct pmop))
+ return PerlMemShared_calloc(1, sz);
+#endif
+
+ /* While the subroutine is under construction, the slabs are accessed via
+ CvSTART(), to avoid needing to expand PVCV by one pointer for something
+ unneeded at runtime. Once a subroutine is constructed, the slabs are
+ accessed via CvROOT(). So if CvSTART() is NULL, no slab has been
+ allocated yet. See the commit message for 8be227ab5eaa23f2 for more
+ details. */
+ if (!CvSTART(PL_compcv)) {
CvSTART(PL_compcv) =
(OP *)(slab = S_new_slab(aTHX_ PERL_SLAB_SIZE));
CvSLABBED_on(PL_compcv);
opsz = SIZE_TO_PSIZE(sz);
sz = opsz + OPSLOT_HEADER_P;
+ /* The slabs maintain a free list of OPs. In particular, constant folding
+ will free up OPs, so it makes sense to re-use them where possible. A
+ freed up slot is used in preference to a new allocation. */
if (slab->opslab_freed) {
OP **too = &slab->opslab_freed;
o = *too;
}
}
else if (len == 2 && name[1] == '_' && !is_our)
- /* diag_listed_as: Use of my $_ is deprecated */
- Perl_ck_warner_d(aTHX_ packWARN(WARN_DEPRECATED),
- "Use of %s $_ is deprecated",
+ /* diag_listed_as: Use of my $_ is experimental */
+ Perl_ck_warner_d(aTHX_ packWARN(WARN_EXPERIMENTAL__LEXICAL_TOPIC),
+ "Use of %s $_ is experimental",
PL_parser->in_my == KEY_state
? "state"
: "my");
FreeOp(o);
}
-#ifdef USE_ITHREADS
-# define forget_pmop(a,b) S_forget_pmop(aTHX_ a,b)
-#else
-# define forget_pmop(a,b) S_forget_pmop(aTHX_ a)
-#endif
-
/* Destructor */
void
#endif
if (still_valid) {
int try_downgrade = SvREFCNT(gv) == 2;
- SvREFCNT_dec(gv);
+ SvREFCNT_dec_NN(gv);
if (try_downgrade)
gv_try_downgrade(gv);
}
if (!(cPMOPo->op_pmflags & PMf_CODELIST_PRIVATE))
op_free(cPMOPo->op_code_list);
cPMOPo->op_code_list = NULL;
- forget_pmop(cPMOPo, 1);
+ forget_pmop(cPMOPo);
cPMOPo->op_pmreplrootu.op_pmreplroot = NULL;
/* we use the same protection as the "SAFE" version of the PM_ macros
* here since sv_clean_all might release some PMOPs
STATIC void
S_forget_pmop(pTHX_ PMOP *const o
-#ifdef USE_ITHREADS
- , U32 flags
-#endif
)
{
HV * const pmstash = PmopSTASH(o);
}
if (PL_curpm == o)
PL_curpm = NULL;
-#ifdef USE_ITHREADS
- if (flags)
- PmopSTASH_free(o);
-#endif
}
STATIC void
case OP_PUSHRE:
case OP_MATCH:
case OP_QR:
- forget_pmop((PMOP*)kid, 0);
+ forget_pmop((PMOP*)kid);
}
find_and_forget_pmops(kid);
kid = kid->op_sibling;
PERL_PV_PRETTY_DUMP
| PERL_PV_ESCAPE_NOCLEAR
| PERL_PV_ESCAPE_UNI_DETECT));
- SvREFCNT_dec(dsv);
+ SvREFCNT_dec_NN(dsv);
}
}
else if (SvOK(sv)) {
lexname = newSVpvn_share(key,
SvUTF8(sv) ? -(I32)keylen : (I32)keylen,
0);
- SvREFCNT_dec(sv);
+ SvREFCNT_dec_NN(sv);
*svp = lexname;
}
/* FALL THROUGH */
case OP_ASLICE:
case OP_HSLICE:
- if (type == OP_LEAVESUBLV)
- o->op_private |= OPpMAYBE_LVSUB;
localize = 1;
/* FALL THROUGH */
case OP_AASSIGN:
+ if (type == OP_LEAVESUBLV)
+ o->op_private |= OPpMAYBE_LVSUB;
+ /* FALL THROUGH */
case OP_NEXTSTATE:
case OP_DBSTATE:
PL_modcount = RETURN_UNLIMITED_NUMBER;
while (1) {
if (*s && strchr("@$%*", *s) && *++s
- && (isALNUM(*s) || UTF8_IS_CONTINUED(*s))) {
+ && (isWORDCHAR(*s) || UTF8_IS_CONTINUED(*s))) {
s++;
sigil = TRUE;
- while (*s && (isALNUM(*s) || UTF8_IS_CONTINUED(*s)))
+ while (*s && (isWORDCHAR(*s) || UTF8_IS_CONTINUED(*s)))
s++;
while (*s && (strchr(", \t\n", *s)))
s++;
case OP_LCFIRST:
case OP_UC:
case OP_LC:
+ case OP_FC:
case OP_SLT:
case OP_SGT:
case OP_SLE:
LINKLIST(expr);
- /* fix up DO blocks; treat each one as a separate little sub */
+ /* fix up DO blocks; treat each one as a separate little sub;
+ * also, mark any arrays as LIST/REF */
if (expr->op_type == OP_LIST) {
OP *o;
for (o = cLISTOPx(expr)->op_first; o; o = o->op_sibling) {
+
+ if (o->op_type == OP_PADAV || o->op_type == OP_RV2AV) {
+ assert( !(o->op_flags & OPf_WANT));
+ /* push the array rather than its contents. The regex
+ * engine will retrieve and join the elements later */
+ o->op_flags |= (OPf_WANT_LIST | OPf_REF);
+ continue;
+ }
+
if (!(o->op_type == OP_NULL && (o->op_flags & OPf_SPECIAL)))
continue;
o->op_next = NULL; /* undo temporary hack from above */
scalar(o);
LINKLIST(o);
if (cLISTOPo->op_first->op_type == OP_LEAVE) {
- LISTOP *leave = cLISTOPx(cLISTOPo->op_first);
+ LISTOP *leaveop = cLISTOPx(cLISTOPo->op_first);
/* skip ENTER */
- assert(leave->op_first->op_type == OP_ENTER);
- assert(leave->op_first->op_sibling);
- o->op_next = leave->op_first->op_sibling;
- /* skip LEAVE */
- assert(leave->op_flags & OPf_KIDS);
- assert(leave->op_last->op_next = (OP*)leave);
- leave->op_next = NULL; /* stop on last op */
- op_null((OP*)leave);
+ assert(leaveop->op_first->op_type == OP_ENTER);
+ assert(leaveop->op_first->op_sibling);
+ o->op_next = leaveop->op_first->op_sibling;
+ /* skip leave */
+ assert(leaveop->op_flags & OPf_KIDS);
+ assert(leaveop->op_last->op_next == (OP*)leaveop);
+ leaveop->op_next = NULL; /* stop on last op */
+ op_null((OP*)leaveop);
}
else {
/* skip SCOPE */
finalize_optree(o);
}
}
+ else if (expr->op_type == OP_PADAV || expr->op_type == OP_RV2AV) {
+ assert( !(expr->op_flags & OPf_WANT));
+ /* push the array rather than its contents. The regex
+ * engine will retrieve and join the elements later */
+ expr->op_flags |= (OPf_WANT_LIST | OPf_REF);
+ }
PL_hints |= HINT_BLOCK_SCOPE;
pm = (PMOP*)o;
U32 rx_flags = pm->op_pmflags & RXf_PMf_COMPILETIME;
regexp_engine const *eng = current_re_engine();
+ if (o->op_flags & OPf_SPECIAL)
+ rx_flags |= RXf_SPLIT;
+
if (!has_code || !eng->op_comp) {
/* compile-time simple constant pattern */
pm->op_pmflags |= PMf_CODELIST_PRIVATE;
}
+ if (o->op_flags & OPf_SPECIAL)
+ pm->op_pmflags |= PMf_SPLIT;
+
/* the OP_REGCMAYBE is a placeholder in the non-threaded case
* to allow its op_next to be pointed past the regcomp and
* preceding stacking ops;
Constructs, checks, and returns an op of any type that involves an
embedded C-level pointer (PV). I<type> is the opcode. I<flags> gives
the eight bits of C<op_flags>. I<pv> supplies the C-level pointer, which
-must have been allocated using L</PerlMemShared_malloc>; the memory will
+must have been allocated using C<PerlMemShared_malloc>; the memory will
be freed when the op is destroyed.
=cut
Constructs a state op (COP). The state op is normally a C<nextstate> op,
but will be a C<dbstate> op if debugging is enabled for currently-compiled
-code. The state op is populated from L</PL_curcop> (or L</PL_compiling>).
+code. The state op is populated from C<PL_curcop> (or C<PL_compiling>).
If I<label> is non-null, it supplies the name of a label to attach to
the state op; this function takes ownership of the memory pointed at by
I<label>, and will free it. I<flags> gives the eight bits of C<op_flags>
op_free(block);
SvREFCNT_dec(compcv);
PL_compcv = NULL;
- goto clone;
+ goto setname;
}
/* Checking whether outcv is CvOUTSIDE(compcv) is not sufficient to
determine whether this sub definition is in the same scope as its
cv = compcv;
*spot = cv;
}
+ setname:
if (!CvNAME_HEK(cv)) {
CvNAME_HEK_set(cv,
hek
0)
);
}
+ if (const_sv) goto clone;
+
CvFILE_set_from_cop(cv, PL_curcop);
CvSTASH_set(cv, PL_curstash);
&PadARRAY(PadlistARRAY(CvPADLIST(outcv))[CvDEPTH(outcv)])[pax];
if (reusable) cv_clone_into(clonee, *spot);
else *spot = cv_clone(clonee);
- SvREFCNT_dec(clonee);
+ SvREFCNT_dec_NN(clonee);
cv = *spot;
SvPADMY_on(cv);
}
),
cv, const_svp);
}
- SvREFCNT_dec(cv);
+ SvREFCNT_dec_NN(cv);
cv = NULL;
}
}
dVAR;
o->op_type = OP_PADCV;
o->op_ppaddr = PL_ppaddr[OP_PADCV];
- return o;
}
return newUNOP(OP_RV2CV, flags, scalar(o));
}
#endif
kUNOP->op_first = newop;
}
+ /* transforms OP_REFGEN into OP_SREFGEN, OP_CHOP into OP_SCHOP,
+ * and OP_CHOMP into OP_SCHOMP */
o->op_ppaddr = PL_ppaddr[++o->op_type];
return ck_fun(o);
}
* \ mark - glob - rv2cv
* | \ gv(CORE::GLOBAL::glob)
* |
- * \ null - const(wildcard) - const(ix)
+ * \ null - const(wildcard)
*/
o->op_flags |= OPf_SPECIAL;
o->op_targ = pad_alloc(OP_GLOB, SVs_PADTMP);
- op_append_elem(OP_GLOB, o,
- newSVOP(OP_CONST, 0, newSViv(PL_glob_index++)));
o = newLISTOP(OP_LIST, 0, o, NULL);
o = newUNOP(OP_ENTERSUB, OPf_STACKED,
op_append_elem(OP_LIST, o,
gv = (GV *)newSV(0);
gv_init(gv, 0, "", 0, 0);
gv_IOadd(gv);
-#ifndef PERL_EXTERNAL_GLOB
- sv_setiv(GvSVn(gv),PL_glob_index++);
-#endif
op_append_elem(OP_GLOB, o, newGVOP(OP_GV, 0, gv));
- SvREFCNT_dec(gv); /* newGVOP increased it */
+ SvREFCNT_dec_NN(gv); /* newGVOP increased it */
scalarkids(o);
return o;
}
if (kid)
kid = kid->op_sibling; /* get past "big" */
if (kid && kid->op_type == OP_CONST) {
- const bool save_taint = TAINT_get; /* accepted unused var warning if NO_TAINT_SUPPORT */
+ const bool save_taint = TAINT_get;
fbm_compile(((SVOP*)kid)->op_sv, 0);
TAINT_set(save_taint);
+#ifdef NO_TAINT_SUPPORT
+ PERL_UNUSED_VAR(save_taint);
+#endif
}
}
return ck_fun(o);
cLISTOPo->op_last = kid; /* There was only one element previously */
}
- if (kid->op_type == OP_CONST && !(kid->op_private & OPpCONST_FOLDED)) {
- SV * const sv = kSVOP->op_sv;
- if (SvPOK(sv) && SvCUR(sv) == 1 && *SvPVX(sv) == ' ')
- o->op_flags |= OPf_SPECIAL;
- }
if (kid->op_type != OP_MATCH || kid->op_flags & OPf_STACKED) {
OP * const sibl = kid->op_sibling;
kid->op_sibling = 0;
- kid = pmruntime( newPMOP(OP_MATCH, 0), kid, 0, 0);
+ kid = pmruntime( newPMOP(OP_MATCH, OPf_SPECIAL), kid, 0, 0); /* OPf_SPECIAL is used to trigger split " " behavior */
if (cLISTOPo->op_first == cLISTOPo->op_last)
cLISTOPo->op_last = kid;
cLISTOPo->op_first = kid;
=cut
*/
+/* shared by toke.c:yylex */
+CV *
+Perl_find_lexical_cv(pTHX_ PADOFFSET off)
+{
+ PADNAME *name = PAD_COMPNAME(off);
+ CV *compcv = PL_compcv;
+ while (PadnameOUTER(name)) {
+ assert(PARENT_PAD_INDEX(name));
+ compcv = CvOUTSIDE(PL_compcv);
+ name = PadlistNAMESARRAY(CvPADLIST(compcv))
+ [off = PARENT_PAD_INDEX(name)];
+ }
+ assert(!PadnameIsOUR(name));
+ if (!PadnameIsSTATE(name) && SvMAGICAL(name)) {
+ MAGIC * mg = mg_find(name, PERL_MAGIC_proto);
+ assert(mg);
+ assert(mg->mg_obj);
+ return (CV *)mg->mg_obj;
+ }
+ return (CV *)AvARRAY(PadlistARRAY(CvPADLIST(compcv))[1])[off];
+}
+
CV *
Perl_rv2cv_op_cv(pTHX_ OP *cvop, U32 flags)
{
gv = NULL;
} break;
case OP_PADCV: {
- PADNAME *name = PAD_COMPNAME(rvop->op_targ);
- CV *compcv = PL_compcv;
- PADOFFSET off = rvop->op_targ;
- while (PadnameOUTER(name)) {
- assert(PARENT_PAD_INDEX(name));
- compcv = CvOUTSIDE(PL_compcv);
- name = PadlistNAMESARRAY(CvPADLIST(compcv))
- [off = PARENT_PAD_INDEX(name)];
- }
- assert(!PadnameIsOUR(name));
- if (!PadnameIsSTATE(name)) {
- MAGIC * mg = mg_find(name, PERL_MAGIC_proto);
- assert(mg);
- assert(mg->mg_obj);
- cv = (CV *)mg->mg_obj;
- }
- else cv =
- (CV *)AvARRAY(PadlistARRAY(CvPADLIST(compcv))[1])[off];
+ cv = find_lexical_cv(rvop->op_targ);
gv = NULL;
} break;
default: {
really need is a new call checker API that accepts a
GV or string (or GV or CV). */
HEK * const hek = CvNAME_HEK(cv);
- assert(hek);
+ /* After a syntax error in a lexical sub, the cv that
+ rv2cv_op_cv returns may be a nameless stub. */
+ if (!hek) return ck_entersub_args_list(o);;
namegv = (GV *)sv_newmortal();
gv_init_pvn(namegv, PL_curstash, HEK_KEY(hek), HEK_LEN(hek),
SVf_UTF8 * !!HEK_UTF8(hek));