#define SIZE_TO_PSIZE(x) (((x) + sizeof(I32 *) - 1)/sizeof(I32 *))
#define DIFF(o,p) ((size_t)((I32 **)(p) - (I32**)(o)))
+/* malloc a new op slab (suitable for attaching to PL_compcv) */
+
static OPSLAB *
S_new_slab(pTHX_ size_t sz)
{
PerlIO_printf(Perl_debug_log, "%s", SvPVx_nolen(Perl_mess args)) \
)
+/* Returns a sz-sized block of memory (suitable for holding an op) from
+ * a free slot in the chain of op slabs attached to PL_compcv.
+ * Allocates a new slab if necessary.
+ * if PL_compcv isn't compiling, malloc() instead.
+ */
+
void *
Perl_Slab_Alloc(pTHX_ size_t sz)
{
}
#endif
+
+/* Return the block of memory used by an op to the free list of
+ * the OP slab associated with that op.
+ */
+
void
Perl_Slab_Free(pTHX_ void *op)
{
if (havepad) LEAVE;
}
+/* Free a chain of OP slabs. Should only be called after all ops contained
+ * in it have been freed. At this point, its reference count should be 1,
+ * because OpslabREFCNT_dec() skips doing rc-- when it detects that rc == 1,
+ * and just directly calls opslab_free().
+ * (Note that the reference count which PL_compcv held on the slab should
+ * have been removed once compilation of the sub was complete).
+ *
+ *
+ */
+
void
Perl_opslab_free(pTHX_ OPSLAB *slab)
{
} while (slab);
}
+/* like opslab_free(), but first calls op_free() on any ops in the slab
+ * not marked as OP_FREED
+ */
+
void
Perl_opslab_force_free(pTHX_ OPSLAB *slab)
{
if (o->op_flags & OPf_KIDS) {
OP *kid, *nextkid;
+ assert(cUNOPo->op_first); /* OPf_KIDS implies op_first non-null */
for (kid = cUNOPo->op_first; kid; kid = nextkid) {
nextkid = OpSIBLING(kid); /* Get before next freeing kid */
- if (!kid || kid->op_type == OP_FREED)
+ if (kid->op_type == OP_FREED)
/* During the forced freeing of ops after
compilation failure, kidops may be freed before
their parents. */
/* for a helem/hslice/kvslice, if its a fixed hash, croak on invalid
* const fields. Also, convert CONST keys to HEK-in-SVs.
- * rop is the op that retrieves the hash;
+ * rop is the op that retrieves the hash;
* key_op is the first key
+ * real if false, only check (and possibly croak); don't update op
*/
STATIC void
-S_check_hash_fields_and_hekify(pTHX_ UNOP *rop, SVOP *key_op)
+S_check_hash_fields_and_hekify(pTHX_ UNOP *rop, SVOP *key_op, int real)
{
PADNAME *lexname;
GV **fields;
if ( !SvIsCOW_shared_hash(sv = *svp)
&& SvTYPE(sv) < SVt_PVMG
&& SvOK(sv)
- && !SvROK(sv))
+ && !SvROK(sv)
+ && real)
{
SSize_t keylen;
const char * const key = SvPV_const(sv, *(STRLEN*)&keylen);
/* see if any strings would grow if converted to utf8 */
if (!utf8) {
- char *p = (char*)argp->p;
- STRLEN len = argp->len;
- while (len--) {
- U8 c = *p++;
- if (!UTF8_IS_INVARIANT(c))
- variant++;
- }
+ variant += variant_under_utf8_count((U8 *) argp->p,
+ (U8 *) argp->p + argp->len);
}
}
check_keys:
if (o->op_private & OPpLVAL_INTRO || rop->op_type != OP_RV2HV)
rop = NULL;
- S_check_hash_fields_and_hekify(aTHX_ rop, key_op);
+ S_check_hash_fields_and_hekify(aTHX_ rop, key_op, 1);
break;
}
case OP_NULL:
start = LINKLIST(PL_main_root);
PL_main_root->op_next = 0;
S_process_optree(aTHX_ NULL, PL_main_root, start);
- cv_forget_slab(PL_compcv);
+ if (!PL_parser->error_count)
+ /* on error, leave CV slabbed so that ops left lying around
+ * will eb cleaned up. Else unslab */
+ cv_forget_slab(PL_compcv);
PL_compcv = 0;
/* Register with debugger */
rx_flags |= RXf_SPLIT;
}
- /* Skip compiling if parser found an error for this pattern */
- if (pm->op_pmflags & PMf_HAS_ERROR) {
- return o;
- }
-
if (!has_code || !eng->op_comp) {
/* compile-time simple constant pattern */
# endif
}
#endif
- /* But we know that one op is using this CV's slab. */
- cv_forget_slab(PL_compcv);
+ /* This LEAVE_SCOPE will restore PL_compcv to point to the
+ * outer CV (the one whose slab holds the pm op). The
+ * inner CV (which holds expr) will be freed later, once
+ * all the entries on the parse stack have been popped on
+ * return from this function. Which is why its safe to
+ * call op_free(expr) below.
+ */
LEAVE_SCOPE(floor);
pm->op_pmflags &= ~PMf_HAS_CV;
}
+ /* Skip compiling if parser found an error for this pattern */
+ if (pm->op_pmflags & PMf_HAS_ERROR) {
+ return o;
+ }
+
PM_SETRE(pm,
eng->op_comp
? eng->op_comp(aTHX_ NULL, 0, expr, eng, NULL, NULL,
}
else {
/* compile-time pattern that includes literal code blocks */
- REGEXP* re = eng->op_comp(aTHX_ NULL, 0, expr, eng, NULL, NULL,
+
+ REGEXP* re;
+
+ /* Skip compiling if parser found an error for this pattern */
+ if (pm->op_pmflags & PMf_HAS_ERROR) {
+ return o;
+ }
+
+ re = eng->op_comp(aTHX_ NULL, 0, expr, eng, NULL, NULL,
rx_flags,
(pm->op_pmflags |
((PL_hints & HINT_RE_EVAL) ? PMf_USE_RE_EVAL : 0))
Perl_vload_module(pTHX_ U32 flags, SV *name, SV *ver, va_list *args)
{
OP *veop, *imop;
- OP * const modname = newSVOP(OP_CONST, 0, name);
+ OP * modname;
+ I32 floor;
PERL_ARGS_ASSERT_VLOAD_MODULE;
+ /* utilize() fakes up a BEGIN { require ..; import ... }, so make sure
+ * that it has a PL_parser to play with while doing that, and also
+ * that it doesn't mess with any existing parser, by creating a tmp
+ * new parser with lex_start(). This won't actually be used for much,
+ * since pp_require() will create another parser for the real work.
+ * The ENTER/LEAVE pair protect callers from any side effects of use.
+ *
+ * start_subparse() creates a new PL_compcv. This means that any ops
+ * allocated below will be allocated from that CV's op slab, and so
+ * will be automatically freed if the utilise() fails
+ */
+
+ ENTER;
+ SAVEVPTR(PL_curcop);
+ lex_start(NULL, NULL, LEX_START_SAME_FILTER);
+ floor = start_subparse(FALSE, 0);
+
+ modname = newSVOP(OP_CONST, 0, name);
modname->op_private |= OPpCONST_BARE;
if (ver) {
veop = newSVOP(OP_CONST, 0, ver);
}
}
- /* utilize() fakes up a BEGIN { require ..; import ... }, so make sure
- * that it has a PL_parser to play with while doing that, and also
- * that it doesn't mess with any existing parser, by creating a tmp
- * new parser with lex_start(). This won't actually be used for much,
- * since pp_require() will create another parser for the real work.
- * The ENTER/LEAVE pair protect callers from any side effects of use. */
-
- ENTER;
- SAVEVPTR(PL_curcop);
- lex_start(NULL, NULL, LEX_START_SAME_FILTER);
- utilize(!(flags & PERL_LOADMOD_DENY), start_subparse(FALSE, 0),
- veop, modname, imop);
+ utilize(!(flags & PERL_LOADMOD_DENY), floor, veop, modname, imop);
LEAVE;
}
if (cv) { /* must reuse cv in case stub is referenced elsewhere */
/* transfer PL_compcv to cv */
if (block) {
+ bool free_file = CvFILE(cv) && CvDYNFILE(cv);
cv_flags_t preserved_flags =
CvFLAGS(cv) & (CVf_BUILTIN_ATTRS|CVf_NAMED);
PADLIST *const temp_padl = CvPADLIST(cv);
CvFLAGS(compcv) &= ~(CVf_SLABBED|CVf_WEAKOUTSIDE);
CvFLAGS(compcv) |= other_flags;
- if (CvFILE(cv) && CvDYNFILE(cv)) {
+ if (free_file) {
Safefree(CvFILE(cv));
+ CvFILE(cv) = NULL;
}
/* inner references to compcv must be fixed up ... */
if (const_sv)
goto clone;
+ if (CvFILE(cv) && CvDYNFILE(cv))
+ Safefree(CvFILE(cv));
CvFILE_set_from_cop(cv, PL_curcop);
CvSTASH_set(cv, PL_curstash);
if (cv) { /* must reuse cv if autoloaded */
/* transfer PL_compcv to cv */
if (block) {
+ bool free_file = CvFILE(cv) && CvDYNFILE(cv);
cv_flags_t existing_builtin_attrs = CvFLAGS(cv) & CVf_BUILTIN_ATTRS;
PADLIST *const temp_av = CvPADLIST(cv);
CV *const temp_cv = CvOUTSIDE(cv);
CvFLAGS(PL_compcv) &= ~(CVf_SLABBED|CVf_WEAKOUTSIDE);
CvFLAGS(PL_compcv) |= other_flags;
- if (CvFILE(cv) && CvDYNFILE(cv)) {
+ if (free_file) {
Safefree(CvFILE(cv));
}
CvFILE_set_from_cop(cv, PL_curcop);
return cv;
}
+/* Add a stub CV to a typeglob.
+ * This is the implementation of a forward declaration, 'sub foo';'
+ */
+
CV *
Perl_newSTUB(pTHX_ GV *gv, bool fake)
{
* the extra hassle for those edge cases */
break;
- if (pass) {
+ {
UNOP *rop = NULL;
OP * helem_op = o->op_next;
ASSUME( helem_op->op_type == OP_HELEM
- || helem_op->op_type == OP_NULL);
+ || helem_op->op_type == OP_NULL
+ || pass == 0);
if (helem_op->op_type == OP_HELEM) {
rop = (UNOP*)(((BINOP*)helem_op)->op_first);
if ( helem_op->op_private & OPpLVAL_INTRO
)
rop = NULL;
}
- S_check_hash_fields_and_hekify(aTHX_ rop, cSVOPo);
+ /* on first pass just check; on second pass
+ * hekify */
+ S_check_hash_fields_and_hekify(aTHX_ rop, cSVOPo,
+ pass);
+ }
+ if (pass) {
#ifdef USE_ITHREADS
/* Relocate sv to the pad for thread safety */
op_relocate_sv(&cSVOPo->op_sv, &o->op_targ);
=cut
*/
+
+/* use PERL_MAGIC_ext to call a function to free the xop structure when
+ * freeing PL_custom_ops */
+
+static int
+custom_op_register_free(pTHX_ SV *sv, MAGIC *mg)
+{
+ XOP *xop;
+
+ PERL_UNUSED_ARG(mg);
+ xop = INT2PTR(XOP *, SvIV(sv));
+ safefree((void*)xop->xop_name);
+ safefree((void*)xop->xop_desc);
+ safefree(xop);
+ return 0;
+}
+
+
+static const MGVTBL custom_op_register_vtbl = {
+ 0, /* get */
+ 0, /* set */
+ 0, /* len */
+ 0, /* clear */
+ custom_op_register_free, /* free */
+ 0, /* copy */
+ 0, /* dup */
+#ifdef MGf_LOCAL
+ 0, /* local */
+#endif
+};
+
+
XOPRETANY
Perl_custom_op_get_field(pTHX_ const OP *o, const xop_flags_enum field)
{
if (PL_custom_ops)
he = hv_fetch_ent(PL_custom_ops, keysv, 0, 0);
- /* assume noone will have just registered a desc */
+ /* See if the op isn't registered, but its name *is* registered.
+ * That implies someone is using the pre-5.14 API,where only name and
+ * description could be registered. If so, fake up a real
+ * registration.
+ * We only check for an existing name, and assume no one will have
+ * just registered a desc */
if (!he && PL_custom_op_names &&
(he = hv_fetch_ent(PL_custom_op_names, keysv, 0, 0))
) {
XopENTRY_set(xop, xop_desc, savepvn(pv, l));
}
Perl_custom_op_register(aTHX_ o->op_ppaddr, xop);
+ he = hv_fetch_ent(PL_custom_ops, keysv, 0, 0);
+ /* add magic to the SV so that the xop struct (pointed to by
+ * SvIV(sv)) is freed. Normally a static xop is registered, but
+ * for this backcompat hack, we've alloced one */
+ (void)sv_magicext(HeVAL(he), NULL, PERL_MAGIC_ext,
+ &custom_op_register_vtbl, NULL, 0);
+
}
else {
if (!he)