PERL_UNUSED_RESULT(Gconvert((NV)(nv), (int)ndig, 0, buffer))
#endif
-#ifdef PERL_NEW_COPY_ON_WRITE
-# ifndef SV_COW_THRESHOLD
+#ifndef SV_COW_THRESHOLD
# define SV_COW_THRESHOLD 0 /* COW iff len > K */
-# endif
-# ifndef SV_COWBUF_THRESHOLD
+#endif
+#ifndef SV_COWBUF_THRESHOLD
# define SV_COWBUF_THRESHOLD 1250 /* COW iff len > K */
-# endif
-# ifndef SV_COW_MAX_WASTE_THRESHOLD
+#endif
+#ifndef SV_COW_MAX_WASTE_THRESHOLD
# define SV_COW_MAX_WASTE_THRESHOLD 80 /* COW iff (len - cur) < K */
-# endif
-# ifndef SV_COWBUF_WASTE_THRESHOLD
+#endif
+#ifndef SV_COWBUF_WASTE_THRESHOLD
# define SV_COWBUF_WASTE_THRESHOLD 80 /* COW iff (len - cur) < K */
-# endif
-# ifndef SV_COW_MAX_WASTE_FACTOR_THRESHOLD
+#endif
+#ifndef SV_COW_MAX_WASTE_FACTOR_THRESHOLD
# define SV_COW_MAX_WASTE_FACTOR_THRESHOLD 2 /* COW iff len < (cur * K) */
-# endif
-# ifndef SV_COWBUF_WASTE_FACTOR_THRESHOLD
+#endif
+#ifndef SV_COWBUF_WASTE_FACTOR_THRESHOLD
# define SV_COWBUF_WASTE_FACTOR_THRESHOLD 2 /* COW iff len < (cur * K) */
-# endif
#endif
/* Work around compiler warnings about unsigned >= THRESHOLD when thres-
hold is 0. */
? count * body_size \
: FIT_ARENA0 (body_size)
#define FIT_ARENA(count,body_size) \
- count \
+ (U32)(count \
? FIT_ARENAn (count, body_size) \
- : FIT_ARENA0 (body_size)
+ : FIT_ARENA0 (body_size))
/* Calculate the length to copy. Specifically work out the length less any
final padding the compiler needed to add. See the comment in sv_upgrade
return;
}
-static void
-S_glob_assign_ref(pTHX_ SV *const dstr, SV *const sstr)
+void
+Perl_gv_setref(pTHX_ SV *const dstr, SV *const sstr)
{
SV * const sref = SvRV(sstr);
SV *dref;
U8 import_flag = 0;
const U32 stype = SvTYPE(sref);
- PERL_ARGS_ASSERT_GLOB_ASSIGN_REF;
+ PERL_ARGS_ASSERT_GV_SETREF;
if (intro) {
GvINTRO_off(dstr); /* one-shot flag */
}
if (import_flag == GVf_IMPORTED_SV) {
if (intro) {
- dSS_ADD;
- SS_ADD_PTR(gp_ref(GvGP(dstr)));
- SS_ADD_UV(SAVEt_GP_ALIASED_SV
- | cBOOL(GvALIASED_SV(dstr)) << 8);
- SS_ADD_END(2);
+ save_aliased_sv((GV *)dstr);
}
/* Turn off the flag if sref is not referenced elsewhere,
even by weak refs. (SvRMAGICAL is a pessimistic check for
Perl_magic_clearisa(aTHX_ NULL, mg);
}
else if (stype == SVt_PVIO) {
- DEBUG_o(Perl_deb(aTHX_ "glob_assign_ref clearing PL_stashcache\n"));
+ DEBUG_o(Perl_deb(aTHX_ "gv_setref clearing PL_stashcache\n"));
/* It's a cache. It will rebuild itself quite happily.
It's a lot of effort to work out exactly which key (or keys)
might be invalidated by the creation of the this file handle.
if (dtype >= SVt_PV) {
if (isGV_with_GP(dstr)) {
- glob_assign_ref(dstr, sstr);
+ gv_setref(dstr, sstr);
return;
}
if (SvPVX_const(dstr)) {
* be allocated it is still not worth swiping PADTMPs for short
* strings, as the savings here are small.
*
- * If the rhs is already flagged as a copy-on-write string and COW
- * is possible here, we use copy-on-write and make both SVs share
- * the string buffer.
- *
- * If the rhs is not flagged as copy-on-write, then we see whether
- * it is worth upgrading it to such. If the lhs already has a buf-
+ * If swiping is not an option, then we see whether it is
+ * worth using copy-on-write. If the lhs already has a buf-
* fer big enough and the string is short, we skip it and fall back
* to method 3, since memcpy is faster for short strings than the
* later bookkeeping overhead that copy-on-write entails.
+
+ * If the rhs is not a copy-on-write string yet, then we also
+ * consider whether the buffer is too large relative to the string
+ * it holds. Some operations such as readline allocate a large
+ * buffer in the expectation of reusing it. But turning such into
+ * a COW buffer is counter-productive because it increases memory
+ * usage by making readline allocate a new large buffer the sec-
+ * ond time round. So, if the buffer is too large, again, we use
+ * method 3 (copy).
*
- * If there is no buffer on the left, or the buffer is too small,
- * then we use copy-on-write.
+ * Finally, if there is no buffer on the left, or the buffer is too
+ * small, then we use copy-on-write and make both SVs share the
+ * string buffer.
+ *
*/
/* Whichever path we take through the next code, we want this true,
=for apidoc sv_catpvn_flags
Concatenates the string onto the end of the string which is in the SV. The
-C<len> indicates number of bytes to copy. If the SV has the UTF-8
-status set, then the bytes appended should be valid UTF-8.
+C<len> indicates number of bytes to copy.
+
+By default, the string appended is assumed to be valid UTF-8 if the SV has
+the UTF-8 status set, and a string of bytes otherwise. One can force the
+appended string to be interpreted as UTF-8 by supplying the C<SV_CATUTF8>
+flag, and as bytes by supplying the C<SV_CATBYTES> flag; the SV or the
+string appended will be upgraded to UTF-8 if necessary.
+
If C<flags> has the C<SV_SMAGIC> bit set, will
C<mg_set> on C<dsv> afterwards if appropriate.
C<sv_catpvn> and C<sv_catpvn_nomg> are implemented
if (ssv) {
STRLEN slen;
const char *spv = SvPV_flags_const(ssv, slen, flags);
- if (spv) {
- if (flags & SV_GMAGIC)
+ if (flags & SV_GMAGIC)
SvGETMAGIC(dsv);
- sv_catpvn_flags(dsv, spv, slen,
+ sv_catpvn_flags(dsv, spv, slen,
DO_UTF8(ssv) ? SV_CATUTF8 : SV_CATBYTES);
- if (flags & SV_SMAGIC)
+ if (flags & SV_SMAGIC)
SvSETMAGIC(dsv);
- }
}
}
new_SV(sv);
if (len) {
- sv_upgrade(sv, SVt_PV);
- SvGROW(sv, len + 1);
+ sv_grow(sv, len + 1);
}
return sv;
}
float b, c, keep_earlier;
if (byte > cache[3]) {
/* New position is between the existing pair of pairs. */
- b = cache[3];
- c = byte;
+ b = (float)cache[3];
+ c = (float)byte;
} else {
/* New position is before the existing pair of pairs. */
- b = byte;
- c = cache[3];
+ b = (float)byte;
+ c = (float)cache[3];
}
keep_earlier = THREEWAY_SQUARE(0, b, c, blen);
if (byte > cache[3]) {
*/
#define PUSH_EXTEND_MORTAL__SV_C(AnSv) \
STMT_START { \
- EXTEND_MORTAL(1); \
- PL_tmps_stack[++PL_tmps_ix] = (AnSv); \
+ SSize_t ix = ++PL_tmps_ix; \
+ if (UNLIKELY(ix >= PL_tmps_max)) \
+ ix = tmps_grow_p(ix); \
+ PL_tmps_stack[ix] = (AnSv); \
} STMT_END
/*
{
dVAR;
if (!sv)
- return NULL;
+ return sv;
if (SvIMMORTAL(sv))
return sv;
PUSH_EXTEND_MORTAL__SV_C(sv);
SV *sv;
new_SV(sv);
- sv_upgrade(sv, type);
+ ASSUME(SvTYPE(sv) == SVt_FIRST);
+ if(type != SVt_FIRST)
+ sv_upgrade(sv, type);
return sv;
}
#if LONG_DOUBLEKIND == LONG_DOUBLE_IS_DOUBLEDOUBLE_128_BIT_LITTLE_ENDIAN || \
LONG_DOUBLEKIND == LONG_DOUBLE_IS_DOUBLEDOUBLE_128_BIT_BIG_ENDIAN
# define LONGDOUBLE_DOUBLEDOUBLE
-# define DOUBLEDOUBLE_MAXBITS 1028
+/* The first double can be as large as 2**1023, or '1' x '0' x 1023.
+ * The second double can be as small as 2**-1074, or '0' x 1073 . '1'.
+ * The sum of them can be '1' . '0' x 2096 . '1', with implied radix point
+ * after the first 1023 zero bits. */
+# define DOUBLEDOUBLE_MAXBITS 2098
#endif
/* vhex will contain the values (0..15) of the hex digits ("nybbles"
* of 4 bits); 1 for the implicit 1, and the mantissa bits, four bits
- * per xdigit. */
+ * per xdigit. For the double-double case, this can be rather many. */
#ifdef LONGDOUBLE_DOUBLEDOUBLE
# define VHEX_SIZE (1+DOUBLEDOUBLE_MAXBITS/4)
#else
if (vend) *v++ = ((nv) == 0.0) ? 0 : 1; else v++; \
} STMT_END
-#ifdef LONGDOUBLE_DOUBLEDOUBLE
-# define HEXTRACTSIZE (DOUBLEDOUBLE_MAXBITS/8)
+ /* HEXTRACTSIZE is the maximum number of xdigits. */
+#if defined(USE_LONG_DOUBLE) && defined(LONGDOUBLE_DOUBLEDOUBLE)
+# define HEXTRACTSIZE (DOUBLEDOUBLE_MAXBITS/4)
#else
-# define HEXTRACTSIZE NVSIZE
+# define HEXTRACTSIZE 2 * NVSIZE
#endif
const U8* nvp = (const U8*)(&nv);
- const U8* vmaxend = vhex + 2 * HEXTRACTSIZE + 1;
+ const U8* vmaxend = vhex + HEXTRACTSIZE;
(void)Perl_frexp(PERL_ABS(nv), exponent);
if (vend && (vend <= vhex || vend > vmaxend))
Perl_croak(aTHX_ "Hexadecimal float: internal error");
}
# else
HEXTRACT_LO_NYBBLE(1);
- for (ix = 2; ix < HEXTRACTSIZE; ix++) {
+ for (ix = 2; ix < NVSIZE; ix++) {
HEXTRACT_BYTE(ix);
}
# endif
/* For double-double the ixmin and ixmax stay at zero,
* which is convenient since the HEXTRACTSIZE is tricky
* for double-double. */
- ixmin < 0 || ixmax >= HEXTRACTSIZE ||
+ ixmin < 0 || ixmax >= NVSIZE ||
(vend && v != vend))
Perl_croak(aTHX_ "Hexadecimal float: internal error");
return v;
* Since each double has their own exponent, the
* doubles may float (haha) rather far from each
* other, and the number of required bits is much
- * larger, up to total of 1028 bits. (NOTE: this
- * is not actually implemented properly yet,
- * we are using just the first double, see
- * S_hextract() for details. But let's prepare
- * for the future.) */
-
- /* 2 hexdigits for each byte. */
+ * larger, up to total of DOUBLEDOUBLE_MAXBITS bits.
+ * See the definition of DOUBLEDOUBLE_MAXBITS.
+ *
+ * Need 2 hexdigits for each byte. */
need += (DOUBLEDOUBLE_MAXBITS/8 + 1) * 2;
/* the size for the exponent already added */
#endif
PL_DBsingle = sv_dup(proto_perl->IDBsingle, param);
PL_DBtrace = sv_dup(proto_perl->IDBtrace, param);
PL_DBsignal = sv_dup(proto_perl->IDBsignal, param);
+ Copy(proto_perl->IDBcontrol, PL_DBcontrol, DBVARMG_COUNT, IV);
/* symbol tables */
PL_defstash = hv_dup_inc(proto_perl->Idefstash, param);
if (match && subscript_type == FUV_SUBSCRIPT_WITHIN)
break;
- return varname(gv, hash ? '%' : '@', obase->op_targ,
+ return varname(gv, (char)(hash ? '%' : '@'), obase->op_targ,
keysv, index, subscript_type);
}
if (match)
break;
return varname(gv,
- (o->op_type == OP_PADAV || o->op_type == OP_RV2AV)
- ? '@' : '%',
+ (char)((o->op_type == OP_PADAV || o->op_type == OP_RV2AV)
+ ? '@' : '%'),
o->op_targ, NULL, 0, FUV_SUBSCRIPT_WITHIN);
}
NOT_REACHED; /* NOTREACHED */
{
if (PL_op) {
SV* varname = NULL;
+ const char *desc;
if (uninit_sv && PL_curpad) {
varname = find_uninit_var(PL_op, uninit_sv,0);
if (varname)
sv_insert(varname, 0, 0, " ", 1);
}
+ desc = PL_op->op_type == OP_STRINGIFY && PL_op->op_folded
+ ? "join or string"
+ : OP_DESC(PL_op);
/* PL_warn_uninit_sv is constant */
GCC_DIAG_IGNORE(-Wformat-nonliteral);
/* diag_listed_as: Use of uninitialized value%s */
Perl_warner(aTHX_ packWARN(WARN_UNINITIALIZED), PL_warn_uninit_sv,
SVfARG(varname ? varname : &PL_sv_no),
- " in ", OP_DESC(PL_op));
+ " in ", desc);
GCC_DIAG_RESTORE;
}
else {