},
'Compress::Raw::Zlib' => {
- 'DISTRIBUTION' => 'PMQS/Compress-Raw-Zlib-2.202.tar.gz',
+ 'DISTRIBUTION' => 'PMQS/Compress-Raw-Zlib-2.204.tar.gz',
'FILES' => q[cpan/Compress-Raw-Zlib],
'EXCLUDED' => [
qr{^examples/},
"Compress::Raw::Zlib::deflateStream", (void*)s);
XPUSHs(obj);
}
- if (GIMME == G_ARRAY) {
+ if (GIMME_V == G_ARRAY) {
SV * sv = sv_2mortal(newSViv(err)) ;
setDUALstatus(sv, err);
XPUSHs(sv) ;
(void*)s);
XPUSHs(obj);
}
- if (GIMME == G_ARRAY) {
+ if (GIMME_V == G_ARRAY) {
SV * sv = sv_2mortal(newSViv(err)) ;
setDUALstatus(sv, err);
XPUSHs(sv) ;
XPUSHs(sv_setref_pv(sv_newmortal(),
"Compress::Raw::Zlib::deflateStream", (void*)s));
- if (GIMME == G_ARRAY) {
+ if (GIMME_V == G_ARRAY) {
SV * sv = sv_2mortal(newSViv(err)) ;
setDUALstatus(sv, err);
XPUSHs(sv) ;
use bytes ;
our ($VERSION, $XS_VERSION, @ISA, @EXPORT, %EXPORT_TAGS, @EXPORT_OK, $AUTOLOAD, %DEFLATE_CONSTANTS, @DEFLATE_CONSTANTS);
-$VERSION = '2.202';
+$VERSION = '2.204';
$XS_VERSION = $VERSION;
$VERSION = eval $VERSION;
=head1 COPYRIGHT AND LICENSE
-Copyright (c) 2005-2022 Paul Marquess. All rights reserved.
+Copyright (c) 2005-2023 Paul Marquess. All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
# endif
#endif
+/* If available, use the ARM processor CRC32 instruction. */
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) && W == 8
+# define ARMCRC32
+#endif
+
/* Local functions. */
local z_crc_t multmodp OF((z_crc_t a, z_crc_t b));
local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
-/* If available, use the ARM processor CRC32 instruction. */
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) && W == 8
-# define ARMCRC32
+#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
+ local z_word_t byte_swap OF((z_word_t word));
+#endif
+
+#if defined(W) && !defined(ARMCRC32)
+ local z_crc_t crc_word OF((z_word_t data));
+ local z_word_t crc_word_big OF((z_word_t data));
#endif
#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
len &= 7;
/* Do three interleaved CRCs to realize the throughput of one crc32x
- instruction per cycle. Each CRC is calcuated on Z_BATCH words. The three
- CRCs are combined into a single CRC after each set of batches. */
+ instruction per cycle. Each CRC is calculated on Z_BATCH words. The
+ three CRCs are combined into a single CRC after each set of batches. */
while (num >= 3 * Z_BATCH) {
crc1 = 0;
crc2 = 0;
uLong crc2,
z_off_t len2)
{
- return crc32_combine64(crc1, crc2, len2);
+ return crc32_combine64(crc1, crc2, (z_off64_t)len2);
}
/* ========================================================================= */
uLong ZEXPORT crc32_combine_gen(
z_off_t len2)
{
- return crc32_combine_gen64(len2);
+ return crc32_combine_gen64((z_off64_t)len2);
}
/* ========================================================================= */
#include "deflate.h"
const char deflate_copyright[] =
- " deflate 1.2.12 Copyright 1995-2022 Jean-loup Gailly and Mark Adler ";
+ " deflate 1.2.13 Copyright 1995-2022 Jean-loup Gailly and Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
local void putShortMSB OF((deflate_state *s, uInt b));
local void flush_pending OF((z_streamp strm));
local unsigned read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
-#ifdef ASMV
-# pragma message("Assembler code may have bugs -- use at your own risk")
- void match_init OF((void)); /* asm code initialization */
- uInt longest_match OF((deflate_state *s, IPos cur_match));
-#else
local uInt longest_match OF((deflate_state *s, IPos cur_match));
-#endif
#ifdef ZLIB_DEBUG
local void check_match OF((deflate_state *s, IPos start, IPos match,
* characters, so that a running hash key can be computed from the previous
* key instead of complete recalculation each time.
*/
-#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
+#define UPDATE_HASH(s,h,c) (h = (((h) << s->hash_shift) ^ (c)) & s->hash_mask)
/* ===========================================================================
*/
#define CLEAR_HASH(s) \
do { \
- s->head[s->hash_size-1] = NIL; \
+ s->head[s->hash_size - 1] = NIL; \
zmemzero((Bytef *)s->head, \
- (unsigned)(s->hash_size-1)*sizeof(*s->head)); \
+ (unsigned)(s->hash_size - 1)*sizeof(*s->head)); \
} while (0)
/* ===========================================================================
if (windowBits < 0) { /* suppress zlib wrapper */
wrap = 0;
+ if (windowBits < -15)
+ return Z_STREAM_ERROR;
windowBits = -windowBits;
}
#ifdef GZIP
s->hash_bits = (uInt)memLevel + 7;
s->hash_size = 1 << s->hash_bits;
s->hash_mask = s->hash_size - 1;
- s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
+ s->hash_shift = ((s->hash_bits + MIN_MATCH-1) / MIN_MATCH);
s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
* sym_buf value to read moves forward three bytes. From that symbol, up to
* 31 bits are written to pending_buf. The closest the written pending_buf
* bits gets to the next sym_buf symbol to read is just before the last
- * code is written. At that time, 31*(n-2) bits have been written, just
- * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at
- * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1
+ * code is written. At that time, 31*(n - 2) bits have been written, just
+ * after 24*(n - 2) bits have been consumed from sym_buf. sym_buf starts at
+ * 8*n bits into pending_buf. (Note that the symbol buffer fills when n - 1
* symbols are written.) The closest the writing gets to what is unread is
- * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and
+ * then n + 14 bits. Here n is lit_bufsize, which is 16384 by default, and
* can range from 128 to 32768.
*
* Therefore, at a minimum, there are 142 bits of space between what is
}
/* =========================================================================
- * For the default windowBits of 15 and memLevel of 8, this function returns
- * a close to exact, as well as small, upper bound on the compressed size.
- * They are coded as constants here for a reason--if the #define's are
- * changed, then this function needs to be changed as well. The return
- * value for 15 and 8 only works for those exact settings.
+ * For the default windowBits of 15 and memLevel of 8, this function returns a
+ * close to exact, as well as small, upper bound on the compressed size. This
+ * is an expansion of ~0.03%, plus a small constant.
+ *
+ * For any setting other than those defaults for windowBits and memLevel, one
+ * of two worst case bounds is returned. This is at most an expansion of ~4% or
+ * ~13%, plus a small constant.
*
- * For any setting other than those defaults for windowBits and memLevel,
- * the value returned is a conservative worst case for the maximum expansion
- * resulting from using fixed blocks instead of stored blocks, which deflate
- * can emit on compressed data for some combinations of the parameters.
+ * Both the 0.03% and 4% derive from the overhead of stored blocks. The first
+ * one is for stored blocks of 16383 bytes (memLevel == 8), whereas the second
+ * is for stored blocks of 127 bytes (the worst case memLevel == 1). The
+ * expansion results from five bytes of header for each stored block.
*
- * This function could be more sophisticated to provide closer upper bounds for
- * every combination of windowBits and memLevel. But even the conservative
- * upper bound of about 14% expansion does not seem onerous for output buffer
- * allocation.
+ * The larger expansion of 13% results from a window size less than or equal to
+ * the symbols buffer size (windowBits <= memLevel + 7). In that case some of
+ * the data being compressed may have slid out of the sliding window, impeding
+ * a stored block from being emitted. Then the only choice is a fixed or
+ * dynamic block, where a fixed block limits the maximum expansion to 9 bits
+ * per 8-bit byte, plus 10 bits for every block. The smallest block size for
+ * which this can occur is 255 (memLevel == 2).
+ *
+ * Shifts are used to approximate divisions, for speed.
*/
uLong ZEXPORT deflateBound(
z_streamp strm,
uLong sourceLen)
{
deflate_state *s;
- uLong complen, wraplen;
+ uLong fixedlen, storelen, wraplen;
+
+ /* upper bound for fixed blocks with 9-bit literals and length 255
+ (memLevel == 2, which is the lowest that may not use stored blocks) --
+ ~13% overhead plus a small constant */
+ fixedlen = sourceLen + (sourceLen >> 3) + (sourceLen >> 8) +
+ (sourceLen >> 9) + 4;
- /* conservative upper bound for compressed data */
- complen = sourceLen +
- ((sourceLen + 7) >> 3) + ((sourceLen + 63) >> 6) + 5;
+ /* upper bound for stored blocks with length 127 (memLevel == 1) --
+ ~4% overhead plus a small constant */
+ storelen = sourceLen + (sourceLen >> 5) + (sourceLen >> 7) +
+ (sourceLen >> 11) + 7;
- /* if can't get parameters, return conservative bound plus zlib wrapper */
+ /* if can't get parameters, return larger bound plus a zlib wrapper */
if (deflateStateCheck(strm))
- return complen + 6;
+ return (fixedlen > storelen ? fixedlen : storelen) + 6;
/* compute wrapper length */
s = strm->state;
wraplen = 6;
}
- /* if not default parameters, return conservative bound */
+ /* if not default parameters, return one of the conservative bounds */
if (s->w_bits != 15 || s->hash_bits != 8 + 7)
- return complen + wraplen;
+ return (s->w_bits <= s->hash_bits ? fixedlen : storelen) + wraplen;
- /* default settings: return tight bound for that case */
+ /* default settings: return tight bound for that case -- ~0.03% overhead
+ plus a small constant */
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
(sourceLen >> 25) + 13 - 6 + wraplen;
}
s->status = BUSY_STATE;
if (s->status == INIT_STATE) {
/* zlib header */
- uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
+ uInt header = (Z_DEFLATED + ((s->w_bits - 8) << 4)) << 8;
uInt level_flags;
if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2)
s->match_length = s->prev_length = MIN_MATCH-1;
s->match_available = 0;
s->ins_h = 0;
-#ifndef FASTEST
-#ifdef ASMV
- match_init(); /* initialize the asm code */
-#endif
-#endif
}
#ifndef FASTEST
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
* OUT assertion: the match length is not greater than s->lookahead.
*/
-#ifndef ASMV
-/* For 80x86 and 680x0, an optimized version will be provided in match.asm or
- * match.S. The code will be functionally equivalent.
- */
local uInt longest_match(
deflate_state *s,
IPos cur_match)
*/
register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
register ush scan_start = *(ushf*)scan;
- register ush scan_end = *(ushf*)(scan+best_len-1);
+ register ush scan_end = *(ushf*)(scan + best_len - 1);
#else
register Bytef *strend = s->window + s->strstart + MAX_MATCH;
- register Byte scan_end1 = scan[best_len-1];
+ register Byte scan_end1 = scan[best_len - 1];
register Byte scan_end = scan[best_len];
#endif
*/
if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead;
- Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "need lookahead");
do {
Assert(cur_match < s->strstart, "no future");
/* This code assumes sizeof(unsigned short) == 2. Do not use
* UNALIGNED_OK if your compiler uses a different size.
*/
- if (*(ushf*)(match+best_len-1) != scan_end ||
+ if (*(ushf*)(match + best_len - 1) != scan_end ||
*(ushf*)match != scan_start) continue;
/* It is not necessary to compare scan[2] and match[2] since they are
* always equal when the other bytes match, given that the hash keys
* are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
- * strstart+3, +5, ... up to strstart+257. We check for insufficient
+ * strstart + 3, + 5, up to strstart + 257. We check for insufficient
* lookahead only every 4th comparison; the 128th check will be made
- * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
+ * at strstart + 257. If MAX_MATCH-2 is not a multiple of 8, it is
* necessary to put more guard bytes at the end of the window, or
* to check more often for insufficient lookahead.
*/
Assert(scan[2] == match[2], "scan[2]?");
scan++, match++;
do {
- } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
+ } while (*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
scan < strend);
/* The funny "do {}" generates better code on most compilers */
- /* Here, scan <= window+strstart+257 */
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ /* Here, scan <= window + strstart + 257 */
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1),
+ "wild scan");
if (*scan == *match) scan++;
- len = (MAX_MATCH - 1) - (int)(strend-scan);
+ len = (MAX_MATCH - 1) - (int)(strend - scan);
scan = strend - (MAX_MATCH-1);
#else /* UNALIGNED_OK */
- if (match[best_len] != scan_end ||
- match[best_len-1] != scan_end1 ||
- *match != *scan ||
- *++match != scan[1]) continue;
+ if (match[best_len] != scan_end ||
+ match[best_len - 1] != scan_end1 ||
+ *match != *scan ||
+ *++match != scan[1]) continue;
- /* The check at best_len-1 can be removed because it will be made
+ /* The check at best_len - 1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
Assert(*scan == *match, "match[2]?");
/* We check for insufficient lookahead only every 8th comparison;
- * the 256th check will be made at strstart+258.
+ * the 256th check will be made at strstart + 258.
*/
do {
} while (*++scan == *++match && *++scan == *++match &&
*++scan == *++match && *++scan == *++match &&
scan < strend);
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1),
+ "wild scan");
len = MAX_MATCH - (int)(strend - scan);
scan = strend - MAX_MATCH;
best_len = len;
if (len >= nice_match) break;
#ifdef UNALIGNED_OK
- scan_end = *(ushf*)(scan+best_len-1);
+ scan_end = *(ushf*)(scan + best_len - 1);
#else
- scan_end1 = scan[best_len-1];
+ scan_end1 = scan[best_len - 1];
scan_end = scan[best_len];
#endif
}
if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
return s->lookahead;
}
-#endif /* ASMV */
#else /* FASTEST */
*/
Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
- Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "need lookahead");
Assert(cur_match < s->strstart, "no future");
*/
if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
- /* The check at best_len-1 can be removed because it will be made
+ /* The check at best_len - 1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
Assert(*scan == *match, "match[2]?");
/* We check for insufficient lookahead only every 8th comparison;
- * the 256th check will be made at strstart+258.
+ * the 256th check will be made at strstart + 258.
*/
do {
} while (*++scan == *++match && *++scan == *++match &&
*++scan == *++match && *++scan == *++match &&
scan < strend);
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1), "wild scan");
len = MAX_MATCH - (int)(strend - scan);
z_error("invalid match");
}
if (z_verbose > 1) {
- fprintf(stderr,"\\[%d,%d]", start-match, length);
+ fprintf(stderr,"\\[%d,%d]", start - match, length);
do { putc(s->window[start++], stderr); } while (--length != 0);
}
}
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
- if (s->strstart >= wsize+MAX_DIST(s)) {
+ if (s->strstart >= wsize + MAX_DIST(s)) {
- zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more);
+ zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
s->match_start -= wsize;
s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
s->block_start -= (long) wsize;
*
* deflate_stored() is written to minimize the number of times an input byte is
* copied. It is most efficient with large input and output buffers, which
- * maximizes the opportunites to have a single copy from next_in to next_out.
+ * maximizes the opportunities to have a single copy from next_in to next_out.
*/
local block_state deflate_stored(
deflate_state *s,
if (s->lookahead == 0) break; /* flush the current block */
}
- /* Insert the string window[strstart .. strstart+2] in the
+ /* Insert the string window[strstart .. strstart + 2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
hash_head = NIL;
s->strstart += s->match_length;
s->match_length = 0;
s->ins_h = s->window[s->strstart];
- UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
+ UPDATE_HASH(s, s->ins_h, s->window[s->strstart + 1]);
#if MIN_MATCH != 3
Call UPDATE_HASH() MIN_MATCH-3 more times
#endif
} else {
/* No match, output a literal byte */
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
}
if (s->lookahead == 0) break; /* flush the current block */
}
- /* Insert the string window[strstart .. strstart+2] in the
+ /* Insert the string window[strstart .. strstart + 2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
hash_head = NIL;
uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
/* Do not insert strings in hash table beyond this. */
- check_match(s, s->strstart-1, s->prev_match, s->prev_length);
+ check_match(s, s->strstart - 1, s->prev_match, s->prev_length);
- _tr_tally_dist(s, s->strstart -1 - s->prev_match,
+ _tr_tally_dist(s, s->strstart - 1 - s->prev_match,
s->prev_length - MIN_MATCH, bflush);
/* Insert in hash table all strings up to the end of the match.
- * strstart-1 and strstart are already inserted. If there is not
+ * strstart - 1 and strstart are already inserted. If there is not
* enough lookahead, the last two strings are not inserted in
* the hash table.
*/
- s->lookahead -= s->prev_length-1;
+ s->lookahead -= s->prev_length - 1;
s->prev_length -= 2;
do {
if (++s->strstart <= max_insert) {
* single literal. If there was a match but the current match
* is longer, truncate the previous match to a single literal.
*/
- Tracevv((stderr,"%c", s->window[s->strstart-1]));
- _tr_tally_lit(s, s->window[s->strstart-1], bflush);
+ Tracevv((stderr,"%c", s->window[s->strstart - 1]));
+ _tr_tally_lit(s, s->window[s->strstart - 1], bflush);
if (bflush) {
FLUSH_BLOCK_ONLY(s, 0);
}
}
Assert (flush != Z_NO_FLUSH, "no flush?");
if (s->match_available) {
- Tracevv((stderr,"%c", s->window[s->strstart-1]));
- _tr_tally_lit(s, s->window[s->strstart-1], bflush);
+ Tracevv((stderr,"%c", s->window[s->strstart - 1]));
+ _tr_tally_lit(s, s->window[s->strstart - 1], bflush);
s->match_available = 0;
}
s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
if (s->match_length > s->lookahead)
s->match_length = s->lookahead;
}
- Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (uInt)(s->window_size - 1),
+ "wild scan");
}
/* Emit match if have run of MIN_MATCH or longer, else emit literal */
} else {
/* No match, output a literal byte */
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
}
/* Output a literal byte */
s->match_length = 0;
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
if (bflush) FLUSH_BLOCK(s, 0);
# define _tr_tally_dist(s, distance, length, flush) \
{ uch len = (uch)(length); \
ush dist = (ush)(distance); \
- s->sym_buf[s->sym_next++] = dist; \
- s->sym_buf[s->sym_next++] = dist >> 8; \
+ s->sym_buf[s->sym_next++] = (uch)dist; \
+ s->sym_buf[s->sym_next++] = (uch)(dist >> 8); \
s->sym_buf[s->sym_next++] = len; \
dist--; \
s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
state->window = window;
state->wnext = 0;
state->whave = 0;
+ state->sane = 1;
return Z_OK;
}
break;
case DONE:
- /* inflate stream terminated properly -- write leftover output */
+ /* inflate stream terminated properly */
ret = Z_STREAM_END;
- if (left < state->wsize) {
- if (out(out_desc, state->window, state->wsize - left))
- ret = Z_BUF_ERROR;
- }
goto inf_leave;
case BAD:
ret = Z_DATA_ERROR;
goto inf_leave;
- default: /* can't happen, but makes compilers happy */
+ default:
+ /* can't happen, but makes compilers happy */
ret = Z_STREAM_ERROR;
goto inf_leave;
}
- /* Return unused input */
+ /* Write leftover output and return unused input */
inf_leave:
+ if (left < state->wsize) {
+ if (out(out_desc, state->window, state->wsize - left) &&
+ ret == Z_STREAM_END)
+ ret = Z_BUF_ERROR;
+ }
strm->next_in = next;
strm->avail_in = have;
return ret;
/* extract wrap request from windowBits parameter */
if (windowBits < 0) {
+ if (windowBits < -15)
+ return Z_STREAM_ERROR;
wrap = 0;
windowBits = -windowBits;
}
if (copy > have) copy = have;
if (copy) {
if (state->head != Z_NULL &&
- state->head->extra != Z_NULL) {
- len = state->head->extra_len - state->length;
+ state->head->extra != Z_NULL &&
+ (len = state->head->extra_len - state->length) <
+ state->head->extra_max) {
zmemcpy(state->head->extra + len, next,
len + copy > state->head->extra_max ?
state->head->extra_max - len : copy);
#define MAXBITS 15
const char inflate_copyright[] =
- " inflate 1.2.12 Copyright 1995-2022 Mark Adler ";
+ " inflate 1.2.13 Copyright 1995-2022 Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
static const unsigned short lext[31] = { /* Length codes 257..285 extra */
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
- 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 199, 202};
+ 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 194, 65};
static const unsigned short dbase[32] = { /* Distance codes 0..29 base */
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
/* Maximum size of the dynamic table. The maximum number of code structures is
1444, which is the sum of 852 for literal/length codes and 592 for distance
codes. These values were found by exhaustive searches using the program
- examples/enough.c found in the zlib distribtution. The arguments to that
+ examples/enough.c found in the zlib distribution. The arguments to that
program are the number of symbols, the initial root table size, and the
maximum bit length of a code. "enough 286 9 15" for literal/length codes
returns returns 852, and "enough 30 6 15" for distance codes returns 592.
s->bits_sent += (ulg)length;
/* If not enough room in bi_buf, use (valid) bits from bi_buf and
- * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
+ * (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid))
* unused bits in value.
*/
if (s->bi_valid > (int)Buf_size - length) {
length = 0;
for (code = 0; code < LENGTH_CODES-1; code++) {
base_length[code] = length;
- for (n = 0; n < (1<<extra_lbits[code]); n++) {
+ for (n = 0; n < (1 << extra_lbits[code]); n++) {
_length_code[length++] = (uch)code;
}
}
* in two different ways: code 284 + 5 bits or code 285, so we
* overwrite length_code[255] to use the best encoding:
*/
- _length_code[length-1] = (uch)code;
+ _length_code[length - 1] = (uch)code;
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */
dist = 0;
for (code = 0 ; code < 16; code++) {
base_dist[code] = dist;
- for (n = 0; n < (1<<extra_dbits[code]); n++) {
+ for (n = 0; n < (1 << extra_dbits[code]); n++) {
_dist_code[dist++] = (uch)code;
}
}
dist >>= 7; /* from now on, all distances are divided by 128 */
for ( ; code < D_CODES; code++) {
base_dist[code] = dist << 7;
- for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
+ for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
_dist_code[256 + dist++] = (uch)code;
}
}
- Assert (dist == 256, "tr_static_init: 256+dist != 512");
+ Assert (dist == 256, "tr_static_init: 256 + dist != 512");
/* Construct the codes of the static literal tree */
for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
}
/* ===========================================================================
- * Genererate the file trees.h describing the static trees.
+ * Generate the file trees.h describing the static trees.
*/
#ifdef GEN_TREES_H
# ifndef ZLIB_DEBUG
# define SEPARATOR(i, last, width) \
((i) == (last)? "\n};\n\n" : \
- ((i) % (width) == (width)-1 ? ",\n" : ", "))
+ ((i) % (width) == (width) - 1 ? ",\n" : ", "))
void gen_trees_header()
{
while (j <= s->heap_len) {
/* Set j to the smallest of the two sons: */
if (j < s->heap_len &&
- smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
+ smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) {
j++;
}
/* Exit if v is smaller than both sons */
*/
tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
- for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
+ for (h = s->heap_max + 1; h < HEAP_SIZE; h++) {
n = s->heap[h];
bits = tree[tree[n].Dad].Len + 1;
if (bits > max_length) bits = max_length, overflow++;
s->bl_count[bits]++;
xbits = 0;
- if (n >= base) xbits = extra[n-base];
+ if (n >= base) xbits = extra[n - base];
f = tree[n].Freq;
s->opt_len += (ulg)f * (unsigned)(bits + xbits);
if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
/* Find the first bit length which could increase: */
do {
- bits = max_length-1;
+ bits = max_length - 1;
while (s->bl_count[bits] == 0) bits--;
- s->bl_count[bits]--; /* move one leaf down the tree */
- s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
+ s->bl_count[bits]--; /* move one leaf down the tree */
+ s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */
s->bl_count[max_length]--;
/* The brother of the overflow item also moves one step up,
* but this does not affect bl_count[max_length]
* without bit reversal.
*/
for (bits = 1; bits <= MAX_BITS; bits++) {
- code = (code + bl_count[bits-1]) << 1;
+ code = (code + bl_count[bits - 1]) << 1;
next_code[bits] = (ush)code;
}
/* Check that the bit counts in bl_count are consistent. The last code
* must be all ones.
*/
- Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
+ Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
"inconsistent bit counts");
Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
- n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
+ n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
}
}
int node; /* new node being created */
/* Construct the initial heap, with least frequent element in
- * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
+ * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1].
* heap[0] is not used.
*/
s->heap_len = 0, s->heap_max = HEAP_SIZE;
}
desc->max_code = max_code;
- /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
+ /* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree,
* establish sub-heaps of increasing lengths:
*/
for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
int min_count = 4; /* min repeat count */
if (nextlen == 0) max_count = 138, min_count = 3;
- tree[max_code+1].Len = (ush)0xffff; /* guard */
+ tree[max_code + 1].Len = (ush)0xffff; /* guard */
for (n = 0; n <= max_code; n++) {
- curlen = nextlen; nextlen = tree[n+1].Len;
+ curlen = nextlen; nextlen = tree[n + 1].Len;
if (++count < max_count && curlen == nextlen) {
continue;
} else if (count < min_count) {
int max_count = 7; /* max repeat count */
int min_count = 4; /* min repeat count */
- /* tree[max_code+1].Len = -1; */ /* guard already set */
+ /* tree[max_code + 1].Len = -1; */ /* guard already set */
if (nextlen == 0) max_count = 138, min_count = 3;
for (n = 0; n <= max_code; n++) {
- curlen = nextlen; nextlen = tree[n+1].Len;
+ curlen = nextlen; nextlen = tree[n + 1].Len;
if (++count < max_count && curlen == nextlen) {
continue;
} else if (count < min_count) {
send_code(s, curlen, s->bl_tree); count--;
}
Assert(count >= 3 && count <= 6, " 3_6?");
- send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
+ send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2);
} else if (count <= 10) {
- send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
+ send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3);
} else {
- send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
+ send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7);
}
count = 0; prevlen = curlen;
if (nextlen == 0) {
/* Build the bit length tree: */
build_tree(s, (tree_desc *)(&(s->bl_desc)));
- /* opt_len now includes the length of the tree representations, except
- * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
+ /* opt_len now includes the length of the tree representations, except the
+ * lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts.
*/
/* Determine the number of bit length codes to send. The pkzip format
if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
}
/* Update opt_len to include the bit length tree and counts */
- s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
+ s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4;
Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
s->opt_len, s->static_len));
Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
"too many codes");
Tracev((stderr, "\nbl counts: "));
- send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
- send_bits(s, dcodes-1, 5);
- send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
+ send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */
+ send_bits(s, dcodes - 1, 5);
+ send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */
for (rank = 0; rank < blcodes; rank++) {
Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
}
Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
- send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
+ send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */
Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
- send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
+ send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */
Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
}
ulg stored_len,
int last)
{
- send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
+ send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */
bi_windup(s); /* align on byte boundary */
put_short(s, (ush)stored_len);
put_short(s, (ush)~stored_len);
s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
s->compressed_len += (stored_len + 4) << 3;
s->bits_sent += 2*16;
- s->bits_sent += stored_len<<3;
+ s->bits_sent += stored_len << 3;
#endif
}
max_blindex = build_bl_tree(s);
/* Determine the best encoding. Compute the block lengths in bytes. */
- opt_lenb = (s->opt_len+3+7)>>3;
- static_lenb = (s->static_len+3+7)>>3;
+ opt_lenb = (s->opt_len + 3 + 7) >> 3;
+ static_lenb = (s->static_len + 3 + 7) >> 3;
Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
s->sym_next / 3));
- if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
+#ifndef FORCE_STATIC
+ if (static_lenb <= opt_lenb || s->strategy == Z_FIXED)
+#endif
+ opt_lenb = static_lenb;
} else {
Assert(buf != (char*)0, "lost buf");
#ifdef FORCE_STORED
if (buf != (char*)0) { /* force stored block */
#else
- if (stored_len+4 <= opt_lenb && buf != (char*)0) {
+ if (stored_len + 4 <= opt_lenb && buf != (char*)0) {
/* 4: two words for the lengths */
#endif
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
*/
_tr_stored_block(s, buf, stored_len, last);
-#ifdef FORCE_STATIC
- } else if (static_lenb >= 0) { /* force static trees */
-#else
- } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
-#endif
- send_bits(s, (STATIC_TREES<<1)+last, 3);
+ } else if (static_lenb == opt_lenb) {
+ send_bits(s, (STATIC_TREES<<1) + last, 3);
compress_block(s, (const ct_data *)static_ltree,
(const ct_data *)static_dtree);
#ifdef ZLIB_DEBUG
s->compressed_len += 3 + s->static_len;
#endif
} else {
- send_bits(s, (DYN_TREES<<1)+last, 3);
- send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
- max_blindex+1);
+ send_bits(s, (DYN_TREES<<1) + last, 3);
+ send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1,
+ max_blindex + 1);
compress_block(s, (const ct_data *)s->dyn_ltree,
(const ct_data *)s->dyn_dtree);
#ifdef ZLIB_DEBUG
s->compressed_len += 7; /* align on byte boundary */
#endif
}
- Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
- s->compressed_len-7*last));
+ Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3,
+ s->compressed_len - 7*last));
}
/* ===========================================================================
unsigned dist,
unsigned lc)
{
- s->sym_buf[s->sym_next++] = dist;
- s->sym_buf[s->sym_next++] = dist >> 8;
- s->sym_buf[s->sym_next++] = lc;
+ s->sym_buf[s->sym_next++] = (uch)dist;
+ s->sym_buf[s->sym_next++] = (uch)(dist >> 8);
+ s->sym_buf[s->sym_next++] = (uch)lc;
if (dist == 0) {
/* lc is the unmatched char */
s->dyn_ltree[lc].Freq++;
(ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
(ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
- s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
+ s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++;
s->dyn_dtree[d_code(dist)].Freq++;
}
return (s->sym_next == s->sym_end);
} else {
/* Here, lc is the match length - MIN_MATCH */
code = _length_code[lc];
- send_code(s, code+LITERALS+1, ltree); /* send the length code */
+ send_code(s, code + LITERALS + 1, ltree); /* send length code */
extra = extra_lbits[code];
if (extra != 0) {
lc -= base_length[code];
s->bi_buf = 0;
s->bi_valid = 0;
#ifdef ZLIB_DEBUG
- s->bits_sent = (s->bits_sent+7) & ~7;
+ s->bits_sent = (s->bits_sent + 7) & ~7;
#endif
}
# ifdef FAR
# undef FAR
# endif
+# ifndef WIN32_LEAN_AND_MEAN
+# define WIN32_LEAN_AND_MEAN
+# endif
# include <windows.h>
/* No need for _export, use ZLIB.DEF instead. */
/* For complete Windows compatibility, use WINAPI, not __stdcall. */
# undef _LARGEFILE64_SOURCE
#endif
-#if defined(__WATCOMC__) && !defined(Z_HAVE_UNISTD_H)
-# define Z_HAVE_UNISTD_H
+#ifndef Z_HAVE_UNISTD_H
+# ifdef __WATCOMC__
+# define Z_HAVE_UNISTD_H
+# endif
+#endif
+#ifndef Z_HAVE_UNISTD_H
+# if defined(_LARGEFILE64_SOURCE) && !defined(_WIN32)
+# define Z_HAVE_UNISTD_H
+# endif
#endif
#ifndef Z_SOLO
-# if defined(Z_HAVE_UNISTD_H) || defined(_LARGEFILE64_SOURCE)
+# if defined(Z_HAVE_UNISTD_H)
# include <unistd.h> /* for SEEK_*, off_t, and _LFS64_LARGEFILE */
# ifdef VMS
# include <unixio.h> /* for off_t */
/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.2.12, March 11th, 2022
+ version 1.2.13, October 13th, 2022
Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
extern "C" {
#endif
-#define ZLIB_VERSION "1.2.12"
-#define ZLIB_VERNUM 0x12c0
+#define ZLIB_VERSION "1.2.13"
+#define ZLIB_VERNUM 0x12d0
#define ZLIB_VER_MAJOR 1
#define ZLIB_VER_MINOR 2
-#define ZLIB_VER_REVISION 12
+#define ZLIB_VER_REVISION 13
#define ZLIB_VER_SUBREVISION 0
/*
== 0), or after each call of deflate(). If deflate returns Z_OK and with
zero avail_out, it must be called again after making room in the output
buffer because there might be more output pending. See deflatePending(),
- which can be used if desired to determine whether or not there is more ouput
+ which can be used if desired to determine whether or not there is more output
in that case.
Normally the parameter flush is set to Z_NO_FLUSH, which allows deflate to
to dictionary. dictionary must have enough space, where 32768 bytes is
always enough. If deflateGetDictionary() is called with dictionary equal to
Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
+ Similarly, if dictLength is Z_NULL, then it is not set.
deflateGetDictionary() may return a length less than the window size, even
when more than the window size in input has been provided. It may return up
to dictionary. dictionary must have enough space, where 32768 bytes is
always enough. If inflateGetDictionary() is called with dictionary equal to
Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
+ Similarly, if dictLength is Z_NULL, then it is not set.
inflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the
stream state is inconsistent.
In the event that the end of file is reached and only a partial item is
available at the end, i.e. the remaining uncompressed data length is not a
- multiple of size, then the final partial item is nevetheless read into buf
+ multiple of size, then the final partial item is nevertheless read into buf
and the end-of-file flag is set. The length of the partial item read is not
provided, but could be inferred from the result of gztell(). This behavior
is the same as the behavior of fread() implementations in common libraries,
but it prevents the direct use of gzfread() to read a concurrently written
- file, reseting and retrying on end-of-file, when size is not 1.
+ file, resetting and retrying on end-of-file, when size is not 1.
*/
ZEXTERN int ZEXPORT gzwrite OF((gzFile file, voidpc buf, unsigned len));
ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
-ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF ((z_streamp));
+ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp));
ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
#if defined(_WIN32) && !defined(Z_SOLO)
#ifdef ZLIB_DEBUG
flags += 1 << 8;
#endif
+ /*
#if defined(ASMV) || defined(ASMINF)
flags += 1 << 9;
#endif
+ */
#ifdef ZLIB_WINAPI
flags += 1 << 10;
#endif
* a protected system like OS/2. Use Microsoft C instead.
*/
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
{
voidpf buf;
ulg bsize = (ulg)items*size;
return buf;
}
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
{
int n;
# define _hfree hfree
#endif
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size)
{
(void)opaque;
return _halloc((long)items, size);
}
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
{
(void)opaque;
_hfree(ptr);
(!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
#endif
/* common defaults */