• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1diff -Npur zlib-1.2.11/deflate.c zlib-1.2.11-change/deflate.c
2--- zlib-1.2.11/deflate.c	2017-01-16 01:29:40.000000000 +0800
3+++ zlib-1.2.11-change/deflate.c	2022-07-28 04:48:30.310281281 +0800
4@@ -252,10 +252,6 @@ int ZEXPORT deflateInit2_(strm, level, m
5     int wrap = 1;
6     static const char my_version[] = ZLIB_VERSION;
7
8-    ushf *overlay;
9-    /* We overlay pending_buf and d_buf+l_buf. This works since the average
10-     * output size for (length,distance) codes is <= 24 bits.
11-     */
12
13     if (version == Z_NULL || version[0] != my_version[0] ||
14         stream_size != sizeof(z_stream)) {
15@@ -326,9 +322,47 @@ int ZEXPORT deflateInit2_(strm, level, m
16
17     s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
18
19-    overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
20-    s->pending_buf = (uchf *) overlay;
21-    s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
22+    /* We overlay pending_buf and sym_buf. This works since the average size
23+     * for length/distance pairs over any compressed block is assured to be 31
24+     * bits or less.
25+     *
26+     * Analysis: The longest fixed codes are a length code of 8 bits plus 5
27+     * extra bits, for lengths 131 to 257. The longest fixed distance codes are
28+     * 5 bits plus 13 extra bits, for distances 16385 to 32768. The longest
29+     * possible fixed-codes length/distance pair is then 31 bits total.
30+     *
31+     * sym_buf starts one-fourth of the way into pending_buf. So there are
32+     * three bytes in sym_buf for every four bytes in pending_buf. Each symbol
33+     * in sym_buf is three bytes -- two for the distance and one for the
34+     * literal/length. As each symbol is consumed, the pointer to the next
35+     * sym_buf value to read moves forward three bytes. From that symbol, up to
36+     * 31 bits are written to pending_buf. The closest the written pending_buf
37+     * bits gets to the next sym_buf symbol to read is just before the last
38+     * code is written. At that time, 31*(n-2) bits have been written, just
39+     * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at
40+     * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1
41+     * symbols are written.) The closest the writing gets to what is unread is
42+     * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and
43+     * can range from 128 to 32768.
44+     *
45+     * Therefore, at a minimum, there are 142 bits of space between what is
46+     * written and what is read in the overlain buffers, so the symbols cannot
47+     * be overwritten by the compressed data. That space is actually 139 bits,
48+     * due to the three-bit fixed-code block header.
49+     *
50+     * That covers the case where either Z_FIXED is specified, forcing fixed
51+     * codes, or when the use of fixed codes is chosen, because that choice
52+     * results in a smaller compressed block than dynamic codes. That latter
53+     * condition then assures that the above analysis also covers all dynamic
54+     * blocks. A dynamic-code block will only be chosen to be emitted if it has
55+     * fewer bits than a fixed-code block would for the same set of symbols.
56+     * Therefore its average symbol length is assured to be less than 31. So
57+     * the compressed data for a dynamic block also cannot overwrite the
58+     * symbols from which it is being constructed.
59+     */
60+
61+    s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 4);
62+    s->pending_buf_size = (ulg)s->lit_bufsize * 4;
63
64     if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
65         s->pending_buf == Z_NULL) {
66@@ -337,8 +371,12 @@ int ZEXPORT deflateInit2_(strm, level, m
67         deflateEnd (strm);
68         return Z_MEM_ERROR;
69     }
70-    s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
71-    s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
72+    s->sym_buf = s->pending_buf + s->lit_bufsize;
73+    s->sym_end = (s->lit_bufsize - 1) * 3;
74+    /* We avoid equality with lit_bufsize*3 because of wraparound at 64K
75+     * on 16 bit machines and because stored blocks are restricted to
76+     * 64K-1 bytes.
77+     */
78
79     s->level = level;
80     s->strategy = strategy;
81@@ -549,7 +587,7 @@ int ZEXPORT deflatePrime (strm, bits, va
82
83     if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
84     s = strm->state;
85-    if ((Bytef *)(s->d_buf) < s->pending_out + ((Buf_size + 7) >> 3))
86+    if (s->sym_buf < s->pending_out + ((Buf_size + 7) >> 3))
87         return Z_BUF_ERROR;
88     do {
89         put = Buf_size - s->bi_valid;
90@@ -1108,7 +1146,6 @@ int ZEXPORT deflateCopy (dest, source)
91 #else
92     deflate_state *ds;
93     deflate_state *ss;
94-    ushf *overlay;
95
96
97     if (deflateStateCheck(source) || dest == Z_NULL) {
98@@ -1128,8 +1165,7 @@ int ZEXPORT deflateCopy (dest, source)
99     ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
100     ds->prev   = (Posf *)  ZALLOC(dest, ds->w_size, sizeof(Pos));
101     ds->head   = (Posf *)  ZALLOC(dest, ds->hash_size, sizeof(Pos));
102-    overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
103-    ds->pending_buf = (uchf *) overlay;
104+    ds->pending_buf = (uchf *) ZALLOC(dest, ds->lit_bufsize, 4);
105
106     if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
107         ds->pending_buf == Z_NULL) {
108@@ -1143,8 +1179,7 @@ int ZEXPORT deflateCopy (dest, source)
109     zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
110
111     ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
112-    ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
113-    ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
114+    ds->sym_buf = ds->pending_buf + ds->lit_bufsize;
115
116     ds->l_desc.dyn_tree = ds->dyn_ltree;
117     ds->d_desc.dyn_tree = ds->dyn_dtree;
118@@ -1912,7 +1947,7 @@ local block_state deflate_fast(s, flush)
119         FLUSH_BLOCK(s, 1);
120         return finish_done;
121     }
122-    if (s->last_lit)
123+    if (s->sym_next)
124         FLUSH_BLOCK(s, 0);
125     return block_done;
126 }
127@@ -2043,7 +2078,7 @@ local block_state deflate_slow(s, flush)
128         FLUSH_BLOCK(s, 1);
129         return finish_done;
130     }
131-    if (s->last_lit)
132+    if (s->sym_next)
133         FLUSH_BLOCK(s, 0);
134     return block_done;
135 }
136@@ -2118,7 +2153,7 @@ local block_state deflate_rle(s, flush)
137         FLUSH_BLOCK(s, 1);
138         return finish_done;
139     }
140-    if (s->last_lit)
141+    if (s->sym_next)
142         FLUSH_BLOCK(s, 0);
143     return block_done;
144 }
145@@ -2157,7 +2192,7 @@ local block_state deflate_huff(s, flush)
146         FLUSH_BLOCK(s, 1);
147         return finish_done;
148     }
149-    if (s->last_lit)
150+    if (s->sym_next)
151         FLUSH_BLOCK(s, 0);
152     return block_done;
153 }
154diff -Npur zlib-1.2.11/deflate.h zlib-1.2.11-change/deflate.h
155--- zlib-1.2.11/deflate.h	2017-01-01 15:37:10.000000000 +0800
156+++ zlib-1.2.11-change/deflate.h	2022-07-28 04:42:55.134287681 +0800
157@@ -217,7 +217,7 @@ typedef struct internal_state {
158     /* Depth of each subtree used as tie breaker for trees of equal frequency
159      */
160
161-    uchf *l_buf;          /* buffer for literals or lengths */
162+    uchf *sym_buf;        /* buffer for distances and literals/lengths */
163
164     uInt  lit_bufsize;
165     /* Size of match buffer for literals/lengths.  There are 4 reasons for
166@@ -239,13 +239,8 @@ typedef struct internal_state {
167      *   - I can't count above 4
168      */
169
170-    uInt last_lit;      /* running index in l_buf */
171-
172-    ushf *d_buf;
173-    /* Buffer for distances. To simplify the code, d_buf and l_buf have
174-     * the same number of elements. To use different lengths, an extra flag
175-     * array would be necessary.
176-     */
177+    uInt sym_next;      /* running index in sym_buf */
178+    uInt sym_end;       /* symbol table full when sym_next reaches this */
179
180     ulg opt_len;        /* bit length of current block with optimal trees */
181     ulg static_len;     /* bit length of current block with static trees */
182@@ -325,20 +320,22 @@ void ZLIB_INTERNAL _tr_stored_block OF((
183
184 # define _tr_tally_lit(s, c, flush) \
185   { uch cc = (c); \
186-    s->d_buf[s->last_lit] = 0; \
187-    s->l_buf[s->last_lit++] = cc; \
188+    s->sym_buf[s->sym_next++] = 0; \
189+    s->sym_buf[s->sym_next++] = 0; \
190+    s->sym_buf[s->sym_next++] = cc; \
191     s->dyn_ltree[cc].Freq++; \
192-    flush = (s->last_lit == s->lit_bufsize-1); \
193+    flush = (s->sym_next == s->sym_end); \
194    }
195 # define _tr_tally_dist(s, distance, length, flush) \
196   { uch len = (uch)(length); \
197     ush dist = (ush)(distance); \
198-    s->d_buf[s->last_lit] = dist; \
199-    s->l_buf[s->last_lit++] = len; \
200+    s->sym_buf[s->sym_next++] = dist; \
201+    s->sym_buf[s->sym_next++] = dist >> 8; \
202+    s->sym_buf[s->sym_next++] = len; \
203     dist--; \
204     s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
205     s->dyn_dtree[d_code(dist)].Freq++; \
206-    flush = (s->last_lit == s->lit_bufsize-1); \
207+    flush = (s->sym_next == s->sym_end); \
208   }
209 #else
210 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
211diff -Npur zlib-1.2.11/trees.c zlib-1.2.11-change/trees.c
212--- zlib-1.2.11/trees.c	2017-01-16 01:07:14.000000000 +0800
213+++ zlib-1.2.11-change/trees.c	2022-07-28 05:00:04.094268034 +0800
214@@ -416,7 +416,7 @@ local void init_block(s)
215
216     s->dyn_ltree[END_BLOCK].Freq = 1;
217     s->opt_len = s->static_len = 0L;
218-    s->last_lit = s->matches = 0;
219+    s->sym_next = s->matches = 0;
220 }
221
222 #define SMALLEST 1
223@@ -947,7 +947,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, bu
224
225         Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
226                 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
227-                s->last_lit));
228+                s->sym_next / 3));
229
230         if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
231
232@@ -1016,8 +1016,9 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc
233     unsigned dist;  /* distance of matched string */
234     unsigned lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
235 {
236-    s->d_buf[s->last_lit] = (ush)dist;
237-    s->l_buf[s->last_lit++] = (uch)lc;
238+    s->sym_buf[s->sym_next++] = dist;
239+    s->sym_buf[s->sym_next++] = dist >> 8;
240+    s->sym_buf[s->sym_next++] = lc;
241     if (dist == 0) {
242         /* lc is the unmatched char */
243         s->dyn_ltree[lc].Freq++;
244@@ -1032,30 +1033,7 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc
245         s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
246         s->dyn_dtree[d_code(dist)].Freq++;
247     }
248-
249-#ifdef TRUNCATE_BLOCK
250-    /* Try to guess if it is profitable to stop the current block here */
251-    if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
252-        /* Compute an upper bound for the compressed length */
253-        ulg out_length = (ulg)s->last_lit*8L;
254-        ulg in_length = (ulg)((long)s->strstart - s->block_start);
255-        int dcode;
256-        for (dcode = 0; dcode < D_CODES; dcode++) {
257-            out_length += (ulg)s->dyn_dtree[dcode].Freq *
258-                (5L+extra_dbits[dcode]);
259-        }
260-        out_length >>= 3;
261-        Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
262-               s->last_lit, in_length, out_length,
263-               100L - out_length*100L/in_length));
264-        if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
265-    }
266-#endif
267-    return (s->last_lit == s->lit_bufsize-1);
268-    /* We avoid equality with lit_bufsize because of wraparound at 64K
269-     * on 16 bit machines and because stored blocks are restricted to
270-     * 64K-1 bytes.
271-     */
272+    return (s->sym_next == s->sym_end);
273 }
274
275 /* ===========================================================================
276@@ -1068,13 +1046,14 @@ local void compress_block(s, ltree, dtre
277 {
278     unsigned dist;      /* distance of matched string */
279     int lc;             /* match length or unmatched char (if dist == 0) */
280-    unsigned lx = 0;    /* running index in l_buf */
281+    unsigned sx = 0;    /* running index in sym_buf */
282     unsigned code;      /* the code to send */
283     int extra;          /* number of extra bits to send */
284
285-    if (s->last_lit != 0) do {
286-        dist = s->d_buf[lx];
287-        lc = s->l_buf[lx++];
288+    if (s->sym_next != 0) do {
289+        dist = s->sym_buf[sx++] & 0xff;
290+        dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
291+        lc = s->sym_buf[sx++];
292         if (dist == 0) {
293             send_code(s, lc, ltree); /* send a literal byte */
294             Tracecv(isgraph(lc), (stderr," '%c' ", lc));
295@@ -1099,11 +1078,10 @@ local void compress_block(s, ltree, dtre
296             }
297         } /* literal or match pair ? */
298
299-        /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
300-        Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,
301-               "pendingBuf overflow");
302+        /* Check that the overlay between pending_buf and sym_buf is ok: */
303+        Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
304
305-    } while (lx < s->last_lit);
306+    } while (sx < s->sym_next);
307
308     send_code(s, END_BLOCK, ltree);
309 }
310