X-Git-Url: https://perl5.git.perl.org/perl5.git/blobdiff_plain/51c777cac7e1ad2a9910a6b33dc5e64d48281952..a310a8f2bf41061e1bf6feadf7d6758f96b481c5:/op.c?ds=sidebyside diff --git a/op.c b/op.c index 67f0cf1..1406ffc 100644 --- a/op.c +++ b/op.c @@ -109,215 +109,52 @@ recursive, but it's recursive on basic blocks, not on tree nodes. #define CALL_RPEEP(o) PL_rpeepp(aTHX_ o) #define CALL_OPFREEHOOK(o) if (PL_opfreehook) PL_opfreehook(aTHX_ o) -#if defined(PL_OP_SLAB_ALLOC) +/* See the explanatory comments above struct opslab in op.h. */ #ifdef PERL_DEBUG_READONLY_OPS -# define PERL_SLAB_SIZE 4096 +# define PERL_SLAB_SIZE 128 +# define PERL_MAX_SLAB_SIZE 4096 # include #endif #ifndef PERL_SLAB_SIZE -#define PERL_SLAB_SIZE 2048 -#endif - -void * -Perl_Slab_Alloc(pTHX_ size_t sz) -{ - dVAR; - /* - * To make incrementing use count easy PL_OpSlab is an I32 * - * To make inserting the link to slab PL_OpPtr is I32 ** - * So compute size in units of sizeof(I32 *) as that is how Pl_OpPtr increments - * Add an overhead for pointer to slab and round up as a number of pointers - */ - sz = (sz + 2*sizeof(I32 *) -1)/sizeof(I32 *); - if ((PL_OpSpace -= sz) < 0) { -#ifdef PERL_DEBUG_READONLY_OPS - /* We need to allocate chunk by chunk so that we can control the VM - mapping */ - PL_OpPtr = (I32**) mmap(0, PERL_SLAB_SIZE*sizeof(I32*), PROT_READ|PROT_WRITE, - MAP_ANON|MAP_PRIVATE, -1, 0); - - DEBUG_m(PerlIO_printf(Perl_debug_log, "mapped %lu at %p\n", - (unsigned long) PERL_SLAB_SIZE*sizeof(I32*), - PL_OpPtr)); - if(PL_OpPtr == MAP_FAILED) { - perror("mmap failed"); - abort(); - } -#else - - PL_OpPtr = (I32 **) PerlMemShared_calloc(PERL_SLAB_SIZE,sizeof(I32*)); -#endif - if (!PL_OpPtr) { - return NULL; - } - /* We reserve the 0'th I32 sized chunk as a use count */ - PL_OpSlab = (I32 *) PL_OpPtr; - /* Reduce size by the use count word, and by the size we need. - * Latter is to mimic the '-=' in the if() above - */ - PL_OpSpace = PERL_SLAB_SIZE - (sizeof(I32)+sizeof(I32 **)-1)/sizeof(I32 **) - sz; - /* Allocation pointer starts at the top. - Theory: because we build leaves before trunk allocating at end - means that at run time access is cache friendly upward - */ - PL_OpPtr += PERL_SLAB_SIZE; - -#ifdef PERL_DEBUG_READONLY_OPS - /* We remember this slab. */ - /* This implementation isn't efficient, but it is simple. */ - PL_slabs = (I32**) realloc(PL_slabs, sizeof(I32**) * (PL_slab_count + 1)); - PL_slabs[PL_slab_count++] = PL_OpSlab; - DEBUG_m(PerlIO_printf(Perl_debug_log, "Allocate %p\n", PL_OpSlab)); -#endif - } - assert( PL_OpSpace >= 0 ); - /* Move the allocation pointer down */ - PL_OpPtr -= sz; - assert( PL_OpPtr > (I32 **) PL_OpSlab ); - *PL_OpPtr = PL_OpSlab; /* Note which slab it belongs to */ - (*PL_OpSlab)++; /* Increment use count of slab */ - assert( PL_OpPtr+sz <= ((I32 **) PL_OpSlab + PERL_SLAB_SIZE) ); - assert( *PL_OpSlab > 0 ); - return (void *)(PL_OpPtr + 1); -} - -#ifdef PERL_DEBUG_READONLY_OPS -void -Perl_pending_Slabs_to_ro(pTHX) { - /* Turn all the allocated op slabs read only. */ - U32 count = PL_slab_count; - I32 **const slabs = PL_slabs; - - /* Reset the array of pending OP slabs, as we're about to turn this lot - read only. Also, do it ahead of the loop in case the warn triggers, - and a warn handler has an eval */ - - PL_slabs = NULL; - PL_slab_count = 0; - - /* Force a new slab for any further allocation. */ - PL_OpSpace = 0; - - while (count--) { - void *const start = slabs[count]; - const size_t size = PERL_SLAB_SIZE* sizeof(I32*); - if(mprotect(start, size, PROT_READ)) { - Perl_warn(aTHX_ "mprotect for %p %lu failed with %d", - start, (unsigned long) size, errno); - } - } - - free(slabs); -} - -STATIC void -S_Slab_to_rw(pTHX_ void *op) -{ - I32 * const * const ptr = (I32 **) op; - I32 * const slab = ptr[-1]; - - PERL_ARGS_ASSERT_SLAB_TO_RW; - - assert( ptr-1 > (I32 **) slab ); - assert( ptr < ( (I32 **) slab + PERL_SLAB_SIZE) ); - assert( *slab > 0 ); - if(mprotect(slab, PERL_SLAB_SIZE*sizeof(I32*), PROT_READ|PROT_WRITE)) { - Perl_warn(aTHX_ "mprotect RW for %p %lu failed with %d", - slab, (unsigned long) PERL_SLAB_SIZE*sizeof(I32*), errno); - } -} - -OP * -Perl_op_refcnt_inc(pTHX_ OP *o) -{ - if(o) { - Slab_to_rw(o); - ++o->op_targ; - } - return o; - -} - -PADOFFSET -Perl_op_refcnt_dec(pTHX_ OP *o) -{ - PERL_ARGS_ASSERT_OP_REFCNT_DEC; - Slab_to_rw(o); - return --o->op_targ; -} -#else -# define Slab_to_rw(op) +# define PERL_SLAB_SIZE 64 #endif - -void -Perl_Slab_Free(pTHX_ void *op) -{ - I32 * const * const ptr = (I32 **) op; - I32 * const slab = ptr[-1]; - PERL_ARGS_ASSERT_SLAB_FREE; - assert( ptr-1 > (I32 **) slab ); - assert( ptr < ( (I32 **) slab + PERL_SLAB_SIZE) ); - assert( *slab > 0 ); - Slab_to_rw(op); - if (--(*slab) == 0) { -# ifdef NETWARE -# define PerlMemShared PerlMem -# endif - -#ifdef PERL_DEBUG_READONLY_OPS - U32 count = PL_slab_count; - /* Need to remove this slab from our list of slabs */ - if (count) { - while (count--) { - if (PL_slabs[count] == slab) { - dVAR; - /* Found it. Move the entry at the end to overwrite it. */ - DEBUG_m(PerlIO_printf(Perl_debug_log, - "Deallocate %p by moving %p from %lu to %lu\n", - PL_OpSlab, - PL_slabs[PL_slab_count - 1], - PL_slab_count, count)); - PL_slabs[count] = PL_slabs[--PL_slab_count]; - /* Could realloc smaller at this point, but probably not - worth it. */ - if(munmap(slab, PERL_SLAB_SIZE*sizeof(I32*))) { - perror("munmap failed"); - abort(); - } - break; - } - } - } -#else - PerlMemShared_free(slab); +#ifndef PERL_MAX_SLAB_SIZE +# define PERL_MAX_SLAB_SIZE 2048 #endif - if (slab == PL_OpSlab) { - PL_OpSpace = 0; - } - } -} -#else /* !defined(PL_OP_SLAB_ALLOC) */ - -/* See the explanatory comments above struct opslab in op.h. */ - -# ifndef PERL_SLAB_SIZE -# define PERL_SLAB_SIZE 64 -# endif /* rounds up to nearest pointer */ -# define SIZE_TO_PSIZE(x) (((x) + sizeof(I32 *) - 1)/sizeof(I32 *)) -# define DIFF(o,p) ((size_t)((I32 **)(p) - (I32**)(o))) +#define SIZE_TO_PSIZE(x) (((x) + sizeof(I32 *) - 1)/sizeof(I32 *)) +#define DIFF(o,p) ((size_t)((I32 **)(p) - (I32**)(o))) static OPSLAB * S_new_slab(pTHX_ size_t sz) { +#ifdef PERL_DEBUG_READONLY_OPS + OPSLAB *slab = (OPSLAB *) mmap(0, sz * sizeof(I32 *), + PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0); + DEBUG_m(PerlIO_printf(Perl_debug_log, "mapped %lu at %p\n", + (unsigned long) sz, slab)); + if (slab == MAP_FAILED) { + perror("mmap failed"); + abort(); + } + slab->opslab_size = (U16)sz; +#else OPSLAB *slab = (OPSLAB *)PerlMemShared_calloc(sz, sizeof(I32 *)); +#endif slab->opslab_first = (OPSLOT *)((I32 **)slab + sz - 1); return slab; } +/* requires double parens and aTHX_ */ +#define DEBUG_S_warn(args) \ + DEBUG_S( \ + PerlIO_printf(Perl_debug_log, "%s", SvPVx_nolen(Perl_mess args)) \ + ) + void * Perl_Slab_Alloc(pTHX_ size_t sz) { @@ -326,7 +163,7 @@ Perl_Slab_Alloc(pTHX_ size_t sz) OPSLAB *slab2; OPSLOT *slot; OP *o; - size_t space; + size_t opsz, space; if (!PL_compcv || CvROOT(PL_compcv) || (CvSTART(PL_compcv) && !CvSLABBED(PL_compcv))) @@ -340,28 +177,27 @@ Perl_Slab_Alloc(pTHX_ size_t sz) } else ++(slab = (OPSLAB *)CvSTART(PL_compcv))->opslab_refcnt; - sz = SIZE_TO_PSIZE(sz) + OPSLOT_HEADER_P; + opsz = SIZE_TO_PSIZE(sz); + sz = opsz + OPSLOT_HEADER_P; if (slab->opslab_freed) { OP **too = &slab->opslab_freed; o = *too; - DEBUG_S(Perl_warn(aTHX_ "found free op at %p, slab %p", o, slab)); + DEBUG_S_warn((aTHX_ "found free op at %p, slab %p", o, slab)); while (o && DIFF(OpSLOT(o), OpSLOT(o)->opslot_next) < sz) { - DEBUG_S(Perl_warn(aTHX_ "Alas! too small")); + DEBUG_S_warn((aTHX_ "Alas! too small")); o = *(too = &o->op_next); - DEBUG_S( - if(o) Perl_warn(aTHX_ "found another free op at %p", o) - ); + if (o) { DEBUG_S_warn((aTHX_ "found another free op at %p", o)); } } if (o) { *too = o->op_next; - Zero(o, DIFF(o, OpSLOT(o)->opslot_next), I32 *); + Zero(o, opsz, I32 *); o->op_slabbed = 1; return (void *)o; } } -# define INIT_OPSLOT \ +#define INIT_OPSLOT \ slot->opslot_slab = slab; \ slot->opslot_next = slab2->opslab_first; \ slab2->opslab_first = slot; \ @@ -373,8 +209,6 @@ Perl_Slab_Alloc(pTHX_ size_t sz) if ((space = DIFF(&slab2->opslab_slots, slab2->opslab_first)) < sz) { /* Remaining space is too small. */ - OPSLAB *newslab; - /* If we can fit a BASEOP, add it to the free chain, so as not to waste it. */ if (space >= SIZE_TO_PSIZE(sizeof(OP)) + OPSLOT_HEADER_P) { @@ -388,9 +222,12 @@ Perl_Slab_Alloc(pTHX_ size_t sz) /* Create a new slab. Make this one twice as big. */ slot = slab2->opslab_first; while (slot->opslot_next) slot = slot->opslot_next; - newslab = S_new_slab(aTHX_ DIFF(slab2, slot)*2); - newslab->opslab_next = slab->opslab_next; - slab->opslab_next = slab2 = newslab; + slab2 = S_new_slab(aTHX_ + (DIFF(slab2, slot)+1)*2 > PERL_MAX_SLAB_SIZE + ? PERL_MAX_SLAB_SIZE + : (DIFF(slab2, slot)+1)*2); + slab2->opslab_next = slab->opslab_next; + slab->opslab_next = slab2; } assert(DIFF(&slab2->opslab_slots, slab2->opslab_first) >= sz); @@ -401,22 +238,65 @@ Perl_Slab_Alloc(pTHX_ size_t sz) < SIZE_TO_PSIZE(sizeof(OP)) + OPSLOT_HEADER_P) slot = &slab2->opslab_slots; INIT_OPSLOT; - DEBUG_S(Perl_warn(aTHX_ "allocating op at %p, slab %p", o, slab)); + DEBUG_S_warn((aTHX_ "allocating op at %p, slab %p", o, slab)); return (void *)o; } -# undef INIT_OPSLOT +#undef INIT_OPSLOT + +#ifdef PERL_DEBUG_READONLY_OPS +void +Perl_Slab_to_ro(pTHX_ OPSLAB *slab) +{ + PERL_ARGS_ASSERT_SLAB_TO_RO; + + if (slab->opslab_readonly) return; + slab->opslab_readonly = 1; + for (; slab; slab = slab->opslab_next) { + /*DEBUG_U(PerlIO_printf(Perl_debug_log,"mprotect ->ro %lu at %p\n", + (unsigned long) slab->opslab_size, slab));*/ + if (mprotect(slab, slab->opslab_size * sizeof(I32 *), PROT_READ)) + Perl_warn(aTHX_ "mprotect for %p %lu failed with %d", slab, + (unsigned long)slab->opslab_size, errno); + } +} + +void +Perl_Slab_to_rw(pTHX_ OPSLAB *const slab) +{ + OPSLAB *slab2; + + PERL_ARGS_ASSERT_SLAB_TO_RW; + + if (!slab->opslab_readonly) return; + slab2 = slab; + for (; slab2; slab2 = slab2->opslab_next) { + /*DEBUG_U(PerlIO_printf(Perl_debug_log,"mprotect ->rw %lu at %p\n", + (unsigned long) size, slab2));*/ + if (mprotect((void *)slab2, slab2->opslab_size * sizeof(I32 *), + PROT_READ|PROT_WRITE)) { + Perl_warn(aTHX_ "mprotect RW for %p %lu failed with %d", slab, + (unsigned long)slab2->opslab_size, errno); + } + } + slab->opslab_readonly = 0; +} + +#else +# define Slab_to_rw(op) +#endif /* This cannot possibly be right, but it was copied from the old slab allocator, to which it was originally added, without explanation, in commit 083fcd5. */ -# ifdef NETWARE +#ifdef NETWARE # define PerlMemShared PerlMem -# endif +#endif void Perl_Slab_Free(pTHX_ void *op) { + dVAR; OP * const o = (OP *)op; OPSLAB *slab; @@ -433,9 +313,7 @@ Perl_Slab_Free(pTHX_ void *op) o->op_type = OP_FREED; o->op_next = slab->opslab_freed; slab->opslab_freed = o; - DEBUG_S( - Perl_warn(aTHX_ "free op at %p, recorded in slab %p", o, slab) - ); + DEBUG_S_warn((aTHX_ "free op at %p, recorded in slab %p", o, slab)); OpslabREFCNT_dec_padok(slab); } @@ -456,16 +334,26 @@ Perl_opslab_free_nopad(pTHX_ OPSLAB *slab) void Perl_opslab_free(pTHX_ OPSLAB *slab) { + dVAR; OPSLAB *slab2; PERL_ARGS_ASSERT_OPSLAB_FREE; - DEBUG_S(Perl_warn(aTHX_ "freeing slab %p", slab)); + DEBUG_S_warn((aTHX_ "freeing slab %p", slab)); assert(slab->opslab_refcnt == 1); for (; slab; slab = slab2) { slab2 = slab->opslab_next; -# ifdef DEBUGGING +#ifdef DEBUGGING slab->opslab_refcnt = ~(size_t)0; -# endif +#endif +#ifdef PERL_DEBUG_READONLY_OPS + DEBUG_m(PerlIO_printf(Perl_debug_log, "Deallocate slab at %p\n", + slab)); + if (munmap(slab, slab->opslab_size * sizeof(I32 *))) { + perror("munmap failed"); + abort(); + } +#else PerlMemShared_free(slab); +#endif } } @@ -474,9 +362,9 @@ Perl_opslab_force_free(pTHX_ OPSLAB *slab) { OPSLAB *slab2; OPSLOT *slot; -# ifdef DEBUGGING +#ifdef DEBUGGING size_t savestack_count = 0; -# endif +#endif PERL_ARGS_ASSERT_OPSLAB_FORCE_FREE; slab2 = slab; do { @@ -485,9 +373,9 @@ Perl_opslab_force_free(pTHX_ OPSLAB *slab) slot = slot->opslot_next) { if (slot->opslot_op.op_type != OP_FREED && !(slot->opslot_op.op_savefree -# ifdef DEBUGGING +#ifdef DEBUGGING && ++savestack_count -# endif +#endif ) ) { assert(slot->opslot_op.op_slabbed); @@ -499,15 +387,50 @@ Perl_opslab_force_free(pTHX_ OPSLAB *slab) } while ((slab2 = slab2->opslab_next)); /* > 1 because the CV still holds a reference count. */ if (slab->opslab_refcnt > 1) { /* still referenced by the savestack */ -# ifdef DEBUGGING +#ifdef DEBUGGING assert(savestack_count == slab->opslab_refcnt-1); -# endif +#endif return; } free: opslab_free(slab); } +#ifdef PERL_DEBUG_READONLY_OPS +OP * +Perl_op_refcnt_inc(pTHX_ OP *o) +{ + if(o) { + OPSLAB *const slab = o->op_slabbed ? OpSLAB(o) : NULL; + if (slab && slab->opslab_readonly) { + Slab_to_rw(slab); + ++o->op_targ; + Slab_to_ro(slab); + } else { + ++o->op_targ; + } + } + return o; + +} + +PADOFFSET +Perl_op_refcnt_dec(pTHX_ OP *o) +{ + PADOFFSET result; + OPSLAB *const slab = o->op_slabbed ? OpSLAB(o) : NULL; + + PERL_ARGS_ASSERT_OP_REFCNT_DEC; + + if (slab && slab->opslab_readonly) { + Slab_to_rw(slab); + result = --o->op_targ; + Slab_to_ro(slab); + } else { + result = --o->op_targ; + } + return result; +} #endif /* * In the following definition, the ", (OP*)0" is just to make the compiler @@ -719,10 +642,6 @@ Perl_alloccopstash(pTHX_ HV *hv) static void S_op_destroy(pTHX_ OP *o) { - if (o->op_latefree) { - o->op_latefreed = 1; - return; - } FreeOp(o); } @@ -740,20 +659,13 @@ Perl_op_free(pTHX_ OP *o) dVAR; OPCODE type; -#ifndef PL_OP_SLAB_ALLOC /* Though ops may be freed twice, freeing the op after its slab is a big no-no. */ assert(!o || !o->op_slabbed || OpSLAB(o)->opslab_refcnt != ~(size_t)0); -#endif /* During the forced freeing of ops after compilation failure, kidops may be freed before their parents. */ if (!o || o->op_type == OP_FREED) return; - if (o->op_latefreed) { - if (o->op_latefree) - return; - goto do_free; - } type = o->op_type; if (o->op_private & OPpREFCOUNTED) { @@ -788,35 +700,26 @@ Perl_op_free(pTHX_ OP *o) CALL_OPFREEHOOK(o); if (o->op_flags & OPf_KIDS) { - register OP *kid, *nextkid; + OP *kid, *nextkid; for (kid = cUNOPo->op_first; kid; kid = nextkid) { nextkid = kid->op_sibling; /* Get before next freeing kid */ op_free(kid); } } + if (type == OP_NULL) + type = (OPCODE)o->op_targ; -#ifdef PERL_DEBUG_READONLY_OPS - Slab_to_rw(o); -#endif + if (o->op_slabbed) { + Slab_to_rw(OpSLAB(o)); + } /* COP* is not cleared by op_clear() so that we may track line * numbers etc even after null() */ - if (type == OP_NEXTSTATE || type == OP_DBSTATE - || (type == OP_NULL /* the COP might have been null'ed */ - && ((OPCODE)o->op_targ == OP_NEXTSTATE - || (OPCODE)o->op_targ == OP_DBSTATE))) { + if (type == OP_NEXTSTATE || type == OP_DBSTATE) { cop_free((COP*)o); } - if (type == OP_NULL) - type = (OPCODE)o->op_targ; - op_clear(o); - if (o->op_latefree) { - o->op_latefreed = 1; - return; - } - do_free: FreeOp(o); #ifdef DEBUG_LEAKING_SCALARS if (PL_op == o) @@ -918,6 +821,7 @@ Perl_op_clear(pTHX_ OP *o) } #endif break; + case OP_DUMP: case OP_GOTO: case OP_NEXT: case OP_LAST: @@ -928,6 +832,7 @@ Perl_op_clear(pTHX_ OP *o) case OP_TRANS: case OP_TRANSR: if (o->op_private & (OPpTRANS_FROM_UTF|OPpTRANS_TO_UTF)) { + assert(o->op_type == OP_TRANS || o->op_type == OP_TRANSR); #ifdef USE_ITHREADS if (cPADOPo->op_padix > 0) { pad_swipe(cPADOPo->op_padix, TRUE); @@ -1151,7 +1056,7 @@ Perl_op_linklist(pTHX_ OP *o) /* establish postfix order */ first = cUNOPo->op_first; if (first) { - register OP *kid; + OP *kid; o->op_next = LINKLIST(first); kid = first; for (;;) { @@ -1193,8 +1098,11 @@ S_scalarboolean(pTHX_ OP *o) if (ckWARN(WARN_SYNTAX)) { const line_t oldline = CopLINE(PL_curcop); - if (PL_parser && PL_parser->copline != NOLINE) + if (PL_parser && PL_parser->copline != NOLINE) { + /* This ensures that warnings are reported at the first line + of the conditional, not the last. */ CopLINE_set(PL_curcop, PL_parser->copline); + } Perl_warner(aTHX_ packWARN(WARN_SYNTAX), "Found = in conditional, should be =="); CopLINE_set(PL_curcop, oldline); } @@ -1273,8 +1181,8 @@ Perl_scalarvoid(pTHX_ OP *o) { dVAR; OP *kid; + SV *useless_sv = NULL; const char* useless = NULL; - U32 useless_is_utf8 = 0; SV* sv; U8 want; @@ -1475,19 +1383,19 @@ Perl_scalarvoid(pTHX_ OP *o) useless = NULL; else { SV * const dsv = newSVpvs(""); - SV* msv = sv_2mortal(Perl_newSVpvf(aTHX_ - "a constant (%s)", - pv_pretty(dsv, maybe_macro, SvCUR(sv), 32, NULL, NULL, - PERL_PV_PRETTY_DUMP | PERL_PV_ESCAPE_NOCLEAR | PERL_PV_ESCAPE_UNI_DETECT ))); + useless_sv + = Perl_newSVpvf(aTHX_ + "a constant (%s)", + pv_pretty(dsv, maybe_macro, + SvCUR(sv), 32, NULL, NULL, + PERL_PV_PRETTY_DUMP + | PERL_PV_ESCAPE_NOCLEAR + | PERL_PV_ESCAPE_UNI_DETECT)); SvREFCNT_dec(dsv); - useless = SvPV_nolen(msv); - useless_is_utf8 = SvUTF8(msv); } } else if (SvOK(sv)) { - SV* msv = sv_2mortal(Perl_newSVpvf(aTHX_ - "a constant (%"SVf")", sv)); - useless = SvPV_nolen(msv); + useless_sv = Perl_newSVpvf(aTHX_ "a constant (%"SVf")", sv); } else useless = "a constant (undef)"; @@ -1614,10 +1522,18 @@ Perl_scalarvoid(pTHX_ OP *o) case OP_SCALAR: return scalar(o); } - if (useless) - Perl_ck_warner(aTHX_ packWARN(WARN_VOID), "Useless use of %"SVf" in void context", - newSVpvn_flags(useless, strlen(useless), - SVs_TEMP | ( useless_is_utf8 ? SVf_UTF8 : 0 ))); + + if (useless_sv) { + /* mortalise it, in case warnings are fatal. */ + Perl_ck_warner(aTHX_ packWARN(WARN_VOID), + "Useless use of %"SVf" in void context", + sv_2mortal(useless_sv)); + } + else if (useless) { + Perl_ck_warner(aTHX_ packWARN(WARN_VOID), + "Useless use of %s in void context", + useless); + } return o; } @@ -2534,31 +2450,20 @@ S_dup_attrlist(pTHX_ OP *o) } STATIC void -S_apply_attrs(pTHX_ HV *stash, SV *target, OP *attrs, bool for_my) +S_apply_attrs(pTHX_ HV *stash, SV *target, OP *attrs) { dVAR; - SV *stashsv; + SV * const stashsv = stash ? newSVhek(HvNAME_HEK(stash)) : &PL_sv_no; PERL_ARGS_ASSERT_APPLY_ATTRS; /* fake up C */ ENTER; /* need to protect against side-effects of 'use' */ - stashsv = stash ? newSVhek(HvNAME_HEK(stash)) : &PL_sv_no; #define ATTRSMODULE "attributes" #define ATTRSMODULE_PM "attributes.pm" - if (for_my) { - /* Don't force the C if we don't need it. */ - SV * const * const svp = hv_fetchs(GvHVn(PL_incgv), ATTRSMODULE_PM, FALSE); - if (svp && *svp != &PL_sv_undef) - NOOP; /* already in %INC */ - else - Perl_load_module(aTHX_ PERL_LOADMOD_NOIMPORT, - newSVpvs(ATTRSMODULE), NULL); - } - else { - Perl_load_module(aTHX_ PERL_LOADMOD_IMPORT_OPS, + Perl_load_module(aTHX_ PERL_LOADMOD_IMPORT_OPS, newSVpvs(ATTRSMODULE), NULL, op_prepend_elem(OP_LIST, @@ -2567,7 +2472,6 @@ S_apply_attrs(pTHX_ HV *stash, SV *target, OP *attrs, bool for_my) newSVOP(OP_CONST, 0, newRV(target)), dup_attrlist(attrs)))); - } LEAVE; } @@ -2576,7 +2480,7 @@ S_apply_attrs_my(pTHX_ HV *stash, OP *target, OP *attrs, OP **imopsp) { dVAR; OP *pack, *imop, *arg; - SV *meth, *stashsv; + SV *meth, *stashsv, **svp; PERL_ARGS_ASSERT_APPLY_ATTRS_MY; @@ -2588,7 +2492,15 @@ S_apply_attrs_my(pTHX_ HV *stash, OP *target, OP *attrs, OP **imopsp) target->op_type == OP_PADAV); /* Ensure that attributes.pm is loaded. */ - apply_attrs(stash, PAD_SV(target->op_targ), attrs, TRUE); + ENTER; /* need to protect against side-effects of 'use' */ + /* Don't force the C if we don't need it. */ + svp = hv_fetchs(GvHVn(PL_incgv), ATTRSMODULE_PM, FALSE); + if (svp && *svp != &PL_sv_undef) + NOOP; /* already in %INC */ + else + Perl_load_module(aTHX_ PERL_LOADMOD_NOIMPORT, + newSVpvs(ATTRSMODULE), NULL); + LEAVE; /* Need package name for method call. */ pack = newSVOP(OP_CONST, 0, newSVpvs(ATTRSMODULE)); @@ -2708,7 +2620,7 @@ S_my_kid(pTHX_ OP *o, OP *attrs, OP **imopsp) (type == OP_RV2SV ? GvSV(gv) : type == OP_RV2AV ? MUTABLE_SV(GvAV(gv)) : type == OP_RV2HV ? MUTABLE_SV(GvHV(gv)) : MUTABLE_SV(gv)), - attrs, FALSE); + attrs); } o->op_private |= OPpOUR_INTRO; return o; @@ -2943,6 +2855,18 @@ Perl_op_scope(pTHX_ OP *o) return o; } +OP * +Perl_op_unscope(pTHX_ OP *o) +{ + if (o && o->op_type == OP_LINESEQ) { + OP *kid = cLISTOPo->op_first; + for(; kid; kid = kid->op_sibling) + if (kid->op_type == OP_NEXTSTATE || kid->op_type == OP_DBSTATE) + op_null(kid); + } + return o; +} + int Perl_block_start(pTHX_ int full) { @@ -2966,6 +2890,7 @@ Perl_block_end(pTHX_ I32 floor, OP *seq) dVAR; const int needblockscope = PL_hints & HINT_BLOCK_SCOPE; OP* retval = scalarseq(seq); + OP *o; CALL_BLOCK_HOOKS(bhk_pre_end, &retval); @@ -2973,7 +2898,66 @@ Perl_block_end(pTHX_ I32 floor, OP *seq) CopHINTS_set(&PL_compiling, PL_hints); if (needblockscope) PL_hints |= HINT_BLOCK_SCOPE; /* propagate out */ - pad_leavemy(); + o = pad_leavemy(); + + if (o) { + /* pad_leavemy has created a sequence of introcv ops for all my + subs declared in the block. We have to replicate that list with + clonecv ops, to deal with this situation: + + sub { + my sub s1; + my sub s2; + sub s1 { state sub foo { \&s2 } } + }->() + + Originally, I was going to have introcv clone the CV and turn + off the stale flag. Since &s1 is declared before &s2, the + introcv op for &s1 is executed (on sub entry) before the one for + &s2. But the &foo sub inside &s1 (which is cloned when &s1 is + cloned, since it is a state sub) closes over &s2 and expects + to see it in its outer CV’s pad. If the introcv op clones &s1, + then &s2 is still marked stale. Since &s1 is not active, and + &foo closes over &s1’s implicit entry for &s2, we get a ‘Varia- + ble will not stay shared’ warning. Because it is the same stub + that will be used when the introcv op for &s2 is executed, clos- + ing over it is safe. Hence, we have to turn off the stale flag + on all lexical subs in the block before we clone any of them. + Hence, having introcv clone the sub cannot work. So we create a + list of ops like this: + + lineseq + | + +-- introcv + | + +-- introcv + | + +-- introcv + | + . + . + . + | + +-- clonecv + | + +-- clonecv + | + +-- clonecv + | + . + . + . + */ + OP *kid = o->op_flags & OPf_KIDS ? cLISTOPo->op_first : o; + OP * const last = o->op_flags & OPf_KIDS ? cLISTOPo->op_last : o; + for (;; kid = kid->op_sibling) { + OP *newkid = newOP(OP_CLONECV, 0); + newkid->op_targ = kid->op_targ; + o = op_append_elem(OP_LINESEQ, o, newkid); + if (kid == last) break; + } + retval = op_prepend_elem(OP_LINESEQ, o, retval); + } CALL_BLOCK_HOOKS(bhk_post_end, &retval); @@ -3054,6 +3038,32 @@ Perl_newPROG(pTHX_ OP *o) } else { if (o->op_type == OP_STUB) { + /* This block is entered if nothing is compiled for the main + program. This will be the case for an genuinely empty main + program, or one which only has BEGIN blocks etc, so already + run and freed. + + Historically (5.000) the guard above was !o. However, commit + f8a08f7b8bd67b28 (Jun 2001), integrated to blead as + c71fccf11fde0068, changed perly.y so that newPROG() is now + called with the output of block_end(), which returns a new + OP_STUB for the case of an empty optree. ByteLoader (and + maybe other things) also take this path, because they set up + PL_main_start and PL_main_root directly, without generating an + optree. + + If the parsing the main program aborts (due to parse errors, + or due to BEGIN or similar calling exit), then newPROG() + isn't even called, and hence this code path and its cleanups + are skipped. This shouldn't make a make a difference: + * a non-zero return from perl_parse is a failure, and + perl_destruct() should be called immediately. + * however, if exit(0) is called during the parse, then + perl_parse() returns 0, and perl_run() is called. As + PL_main_start will be NULL, perl_run() will return + promptly, and the exit code will remain 0. + */ + PL_comppad_name = 0; PL_compcv = 0; S_op_destroy(aTHX_ o); @@ -3181,11 +3191,8 @@ S_op_integerize(pTHX_ OP *o) PERL_ARGS_ASSERT_OP_INTEGERIZE; - /* integerize op, unless it happens to be C<-foo>. - * XXX should pp_i_negate() do magic string negation instead? */ - if ((PL_opargs[type] & OA_OTHERINT) && (PL_hints & HINT_INTEGER) - && !(type == OP_NEGATE && cUNOPo->op_first->op_type == OP_CONST - && (cUNOPo->op_first->op_private & OPpCONST_BARE))) + /* integerize op. */ + if ((PL_opargs[type] & OA_OTHERINT) && (PL_hints & HINT_INTEGER)) { dVAR; o->op_ppaddr = PL_ppaddr[type = ++(o->op_type)]; @@ -3202,7 +3209,7 @@ static OP * S_fold_constants(pTHX_ register OP *o) { dVAR; - register OP * VOL curop; + OP * VOL curop; OP *newop; VOL I32 type = o->op_type; SV * VOL sv = NULL; @@ -3234,6 +3241,22 @@ S_fold_constants(pTHX_ register OP *o) if (IN_LOCALE_COMPILETIME) goto nope; break; + case OP_PACK: + if (!cLISTOPo->op_first->op_sibling + || cLISTOPo->op_first->op_sibling->op_type != OP_CONST) + goto nope; + { + SV * const sv = cSVOPx_sv(cLISTOPo->op_first->op_sibling); + if (!SvPOK(sv) || SvGMAGICAL(sv)) goto nope; + { + const char *s = SvPVX_const(sv); + while (s < SvEND(sv)) { + if (*s == 'p' || *s == 'P') goto nope; + s++; + } + } + } + break; case OP_REPEAT: if (o->op_private & OPpREPEAT_DOLIST) goto nope; } @@ -3324,7 +3347,7 @@ S_fold_constants(pTHX_ register OP *o) if (type == OP_RV2GV) newop = newGVOP(OP_GV, 0, MUTABLE_GV(sv)); else - newop = newSVOP(OP_CONST, 0, MUTABLE_SV(sv)); + newop = newSVOP(OP_CONST, OPpCONST_FOLDED<<8, MUTABLE_SV(sv)); op_getmad(o,newop,'f'); return newop; @@ -3336,7 +3359,7 @@ static OP * S_gen_constant_list(pTHX_ register OP *o) { dVAR; - register OP *curop; + OP *curop; const I32 oldtmps_floor = PL_tmps_floor; list(o); @@ -3895,9 +3918,6 @@ Perl_newOP(pTHX_ I32 type, I32 flags) o->op_type = (OPCODE)type; o->op_ppaddr = PL_ppaddr[type]; o->op_flags = (U8)flags; - o->op_latefree = 0; - o->op_latefreed = 0; - o->op_attached = 0; o->op_next = o; o->op_private = (U8)(0 | (flags >> 8)); @@ -4041,10 +4061,10 @@ S_pmtrans(pTHX_ OP *o, OP *expr, OP *repl) STRLEN rlen; const U8 *t = (U8*)SvPV_const(tstr, tlen); const U8 *r = (U8*)SvPV_const(rstr, rlen); - register I32 i; - register I32 j; + I32 i; + I32 j; I32 grows = 0; - register short *tbl; + short *tbl; const I32 complement = o->op_private & OPpTRANS_COMPLEMENT; const I32 squash = o->op_private & OPpTRANS_SQUASH; @@ -4632,8 +4652,6 @@ Perl_pmruntime(pTHX_ OP *o, OP *expr, bool isreg, I32 floor) * pad_fixup_inner_anons() can find it */ (void)pad_add_anon(cv, o->op_type); SvREFCNT_inc_simple_void(cv); - - cv_forget_slab(cv); } else { pm->op_code_list = expr; @@ -4844,6 +4862,7 @@ Perl_newSVOP(pTHX_ I32 type, I32 flags, SV *sv) svop->op_sv = sv; svop->op_next = (OP*)svop; svop->op_flags = (U8)flags; + svop->op_private = (U8)(0 | (flags >> 8)); if (PL_opargs[type] & OA_RETSCALAR) scalar((OP*)svop); if (PL_opargs[type] & OA_TARGET) @@ -5646,7 +5665,7 @@ Perl_newSTATEOP(pTHX_ I32 flags, char *label, OP *o) dVAR; const U32 seq = intro_my(); const U32 utf8 = flags & SVf_UTF8; - register COP *cop; + COP *cop; flags &= ~SVf_UTF8; @@ -5684,8 +5703,7 @@ Perl_newSTATEOP(pTHX_ I32 flags, char *label, OP *o) CopLINE_set(cop, CopLINE(PL_curcop)); else { CopLINE_set(cop, PL_parser->copline); - if (PL_parser) - PL_parser->copline = NOLINE; + PL_parser->copline = NOLINE; } #ifdef USE_ITHREADS CopFILE_set(cop, CopFILE(PL_curcop)); /* XXX share in a pvtable? */ @@ -5845,6 +5863,8 @@ S_new_logop(pTHX_ I32 type, I32 flags, OP** firstp, OP** otherp) || other->op_type == OP_TRANS) /* Mark the op as being unbindable with =~ */ other->op_flags |= OPf_SPECIAL; + else if (other->op_type == OP_CONST) + other->op_private |= OPpCONST_FOLDED; return other; } else { @@ -5909,6 +5929,8 @@ S_new_logop(pTHX_ I32 type, I32 flags, OP** firstp, OP** otherp) } if (warnop) { const line_t oldline = CopLINE(PL_curcop); + /* This ensures that warnings are reported at the first line + of the construction, not the last. */ CopLINE_set(PL_curcop, PL_parser->copline); Perl_warner(aTHX_ packWARN(WARN_MISC), "Value of %s%s can be \"0\"; test with defined()", @@ -6002,6 +6024,8 @@ Perl_newCONDOP(pTHX_ I32 flags, OP *first, OP *trueop, OP *falseop) || live->op_type == OP_TRANS || live->op_type == OP_TRANSR) /* Mark the op as being unbindable with =~ */ live->op_flags |= OPf_SPECIAL; + else if (live->op_type == OP_CONST) + live->op_private |= OPpCONST_FOLDED; return live; } NewOp(1101, logop, 1, LOGOP); @@ -6274,9 +6298,8 @@ Perl_newWHILEOP(pTHX_ I32 flags, I32 debuggable, LOOP *loop, scalar(listop); o = new_logop(OP_AND, 0, &expr, &listop); if (o == expr && o->op_type == OP_CONST && !SvTRUE(cSVOPo->op_sv)) { - op_free(expr); /* oops, it's a while (0) */ op_free((OP*)loop); - return NULL; /* listop already freed by new_logop */ + return expr; /* listop already freed by new_logop */ } if (listop) ((LISTOP*)listop)->op_last->op_next = @@ -6439,10 +6462,9 @@ Perl_newFOROP(pTHX_ I32 flags, OP *sv, OP *expr, OP *block, OP *cont) /* for my $x () sets OPpLVAL_INTRO; * for our $x () sets OPpOUR_INTRO */ loop->op_private = (U8)iterpflags; -#ifndef PL_OP_SLAB_ALLOC - if (DIFF(loop, OpSLOT(loop)->opslot_next) + if (loop->op_slabbed + && DIFF(loop, OpSLOT(loop)->opslot_next) < SIZE_TO_PSIZE(sizeof(LOOP))) -#endif { LOOP *tmp; NewOp(1234,tmp,1,LOOP); @@ -6450,6 +6472,8 @@ Perl_newFOROP(pTHX_ I32 flags, OP *sv, OP *expr, OP *block, OP *cont) S_op_destroy(aTHX_ (OP*)loop); loop = tmp; } + else if (!loop->op_slabbed) + loop = (LOOP*)PerlMemShared_realloc(loop, sizeof(LOOP)); loop->op_targ = padoff; wop = newWHILEOP(flags, 1, loop, newOP(OP_ITER, 0), block, cont, 0); if (madsv) @@ -6463,7 +6487,7 @@ Perl_newFOROP(pTHX_ I32 flags, OP *sv, OP *expr, OP *block, OP *cont) Constructs, checks, and returns a loop-exiting op (such as C or C). I is the opcode. I