1*4882a593Smuzhiyun /* +++ trees.c */
2*4882a593Smuzhiyun /* trees.c -- output deflated data using Huffman coding
3*4882a593Smuzhiyun * Copyright (C) 1995-1996 Jean-loup Gailly
4*4882a593Smuzhiyun * For conditions of distribution and use, see copyright notice in zlib.h
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun /*
8*4882a593Smuzhiyun * ALGORITHM
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * The "deflation" process uses several Huffman trees. The more
11*4882a593Smuzhiyun * common source values are represented by shorter bit sequences.
12*4882a593Smuzhiyun *
13*4882a593Smuzhiyun * Each code tree is stored in a compressed form which is itself
14*4882a593Smuzhiyun * a Huffman encoding of the lengths of all the code strings (in
15*4882a593Smuzhiyun * ascending order by source values). The actual code strings are
16*4882a593Smuzhiyun * reconstructed from the lengths in the inflate process, as described
17*4882a593Smuzhiyun * in the deflate specification.
18*4882a593Smuzhiyun *
19*4882a593Smuzhiyun * REFERENCES
20*4882a593Smuzhiyun *
21*4882a593Smuzhiyun * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
22*4882a593Smuzhiyun * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
23*4882a593Smuzhiyun *
24*4882a593Smuzhiyun * Storer, James A.
25*4882a593Smuzhiyun * Data Compression: Methods and Theory, pp. 49-50.
26*4882a593Smuzhiyun * Computer Science Press, 1988. ISBN 0-7167-8156-5.
27*4882a593Smuzhiyun *
28*4882a593Smuzhiyun * Sedgewick, R.
29*4882a593Smuzhiyun * Algorithms, p290.
30*4882a593Smuzhiyun * Addison-Wesley, 1983. ISBN 0-201-06672-6.
31*4882a593Smuzhiyun */
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun /* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun /* #include "deflate.h" */
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun #include <linux/zutil.h>
38*4882a593Smuzhiyun #include <linux/bitrev.h>
39*4882a593Smuzhiyun #include "defutil.h"
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun #ifdef DEBUG_ZLIB
42*4882a593Smuzhiyun # include <ctype.h>
43*4882a593Smuzhiyun #endif
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun /* ===========================================================================
46*4882a593Smuzhiyun * Constants
47*4882a593Smuzhiyun */
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun #define MAX_BL_BITS 7
50*4882a593Smuzhiyun /* Bit length codes must not exceed MAX_BL_BITS bits */
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun #define END_BLOCK 256
53*4882a593Smuzhiyun /* end of block literal code */
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun #define REP_3_6 16
56*4882a593Smuzhiyun /* repeat previous bit length 3-6 times (2 bits of repeat count) */
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun #define REPZ_3_10 17
59*4882a593Smuzhiyun /* repeat a zero length 3-10 times (3 bits of repeat count) */
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun #define REPZ_11_138 18
62*4882a593Smuzhiyun /* repeat a zero length 11-138 times (7 bits of repeat count) */
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun static const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
65*4882a593Smuzhiyun = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun static const int extra_dbits[D_CODES] /* extra bits for each distance code */
68*4882a593Smuzhiyun = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun static const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
71*4882a593Smuzhiyun = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun static const uch bl_order[BL_CODES]
74*4882a593Smuzhiyun = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
75*4882a593Smuzhiyun /* The lengths of the bit length codes are sent in order of decreasing
76*4882a593Smuzhiyun * probability, to avoid transmitting the lengths for unused bit length codes.
77*4882a593Smuzhiyun */
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun /* ===========================================================================
80*4882a593Smuzhiyun * Local data. These are initialized only once.
81*4882a593Smuzhiyun */
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun static ct_data static_ltree[L_CODES+2];
84*4882a593Smuzhiyun /* The static literal tree. Since the bit lengths are imposed, there is no
85*4882a593Smuzhiyun * need for the L_CODES extra codes used during heap construction. However
86*4882a593Smuzhiyun * The codes 286 and 287 are needed to build a canonical tree (see zlib_tr_init
87*4882a593Smuzhiyun * below).
88*4882a593Smuzhiyun */
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun static ct_data static_dtree[D_CODES];
91*4882a593Smuzhiyun /* The static distance tree. (Actually a trivial tree since all codes use
92*4882a593Smuzhiyun * 5 bits.)
93*4882a593Smuzhiyun */
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun static uch dist_code[512];
96*4882a593Smuzhiyun /* distance codes. The first 256 values correspond to the distances
97*4882a593Smuzhiyun * 3 .. 258, the last 256 values correspond to the top 8 bits of
98*4882a593Smuzhiyun * the 15 bit distances.
99*4882a593Smuzhiyun */
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun static uch length_code[MAX_MATCH-MIN_MATCH+1];
102*4882a593Smuzhiyun /* length code for each normalized match length (0 == MIN_MATCH) */
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun static int base_length[LENGTH_CODES];
105*4882a593Smuzhiyun /* First normalized length for each code (0 = MIN_MATCH) */
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun static int base_dist[D_CODES];
108*4882a593Smuzhiyun /* First normalized distance for each code (0 = distance of 1) */
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun struct static_tree_desc_s {
111*4882a593Smuzhiyun const ct_data *static_tree; /* static tree or NULL */
112*4882a593Smuzhiyun const int *extra_bits; /* extra bits for each code or NULL */
113*4882a593Smuzhiyun int extra_base; /* base index for extra_bits */
114*4882a593Smuzhiyun int elems; /* max number of elements in the tree */
115*4882a593Smuzhiyun int max_length; /* max bit length for the codes */
116*4882a593Smuzhiyun };
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun static static_tree_desc static_l_desc =
119*4882a593Smuzhiyun {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun static static_tree_desc static_d_desc =
122*4882a593Smuzhiyun {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun static static_tree_desc static_bl_desc =
125*4882a593Smuzhiyun {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun /* ===========================================================================
128*4882a593Smuzhiyun * Local (static) routines in this file.
129*4882a593Smuzhiyun */
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun static void tr_static_init (void);
132*4882a593Smuzhiyun static void init_block (deflate_state *s);
133*4882a593Smuzhiyun static void pqdownheap (deflate_state *s, ct_data *tree, int k);
134*4882a593Smuzhiyun static void gen_bitlen (deflate_state *s, tree_desc *desc);
135*4882a593Smuzhiyun static void gen_codes (ct_data *tree, int max_code, ush *bl_count);
136*4882a593Smuzhiyun static void build_tree (deflate_state *s, tree_desc *desc);
137*4882a593Smuzhiyun static void scan_tree (deflate_state *s, ct_data *tree, int max_code);
138*4882a593Smuzhiyun static void send_tree (deflate_state *s, ct_data *tree, int max_code);
139*4882a593Smuzhiyun static int build_bl_tree (deflate_state *s);
140*4882a593Smuzhiyun static void send_all_trees (deflate_state *s, int lcodes, int dcodes,
141*4882a593Smuzhiyun int blcodes);
142*4882a593Smuzhiyun static void compress_block (deflate_state *s, ct_data *ltree,
143*4882a593Smuzhiyun ct_data *dtree);
144*4882a593Smuzhiyun static void set_data_type (deflate_state *s);
145*4882a593Smuzhiyun static void bi_flush (deflate_state *s);
146*4882a593Smuzhiyun static void copy_block (deflate_state *s, char *buf, unsigned len,
147*4882a593Smuzhiyun int header);
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun #ifndef DEBUG_ZLIB
150*4882a593Smuzhiyun # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
151*4882a593Smuzhiyun /* Send a code of the given tree. c and tree must not have side effects */
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun #else /* DEBUG_ZLIB */
154*4882a593Smuzhiyun # define send_code(s, c, tree) \
155*4882a593Smuzhiyun { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
156*4882a593Smuzhiyun send_bits(s, tree[c].Code, tree[c].Len); }
157*4882a593Smuzhiyun #endif
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun #define d_code(dist) \
160*4882a593Smuzhiyun ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
161*4882a593Smuzhiyun /* Mapping from a distance to a distance code. dist is the distance - 1 and
162*4882a593Smuzhiyun * must not have side effects. dist_code[256] and dist_code[257] are never
163*4882a593Smuzhiyun * used.
164*4882a593Smuzhiyun */
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun /* ===========================================================================
167*4882a593Smuzhiyun * Initialize the various 'constant' tables. In a multi-threaded environment,
168*4882a593Smuzhiyun * this function may be called by two threads concurrently, but this is
169*4882a593Smuzhiyun * harmless since both invocations do exactly the same thing.
170*4882a593Smuzhiyun */
tr_static_init(void)171*4882a593Smuzhiyun static void tr_static_init(void)
172*4882a593Smuzhiyun {
173*4882a593Smuzhiyun static int static_init_done;
174*4882a593Smuzhiyun int n; /* iterates over tree elements */
175*4882a593Smuzhiyun int bits; /* bit counter */
176*4882a593Smuzhiyun int length; /* length value */
177*4882a593Smuzhiyun int code; /* code value */
178*4882a593Smuzhiyun int dist; /* distance index */
179*4882a593Smuzhiyun ush bl_count[MAX_BITS+1];
180*4882a593Smuzhiyun /* number of codes at each bit length for an optimal tree */
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun if (static_init_done) return;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun /* Initialize the mapping length (0..255) -> length code (0..28) */
185*4882a593Smuzhiyun length = 0;
186*4882a593Smuzhiyun for (code = 0; code < LENGTH_CODES-1; code++) {
187*4882a593Smuzhiyun base_length[code] = length;
188*4882a593Smuzhiyun for (n = 0; n < (1<<extra_lbits[code]); n++) {
189*4882a593Smuzhiyun length_code[length++] = (uch)code;
190*4882a593Smuzhiyun }
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun Assert (length == 256, "tr_static_init: length != 256");
193*4882a593Smuzhiyun /* Note that the length 255 (match length 258) can be represented
194*4882a593Smuzhiyun * in two different ways: code 284 + 5 bits or code 285, so we
195*4882a593Smuzhiyun * overwrite length_code[255] to use the best encoding:
196*4882a593Smuzhiyun */
197*4882a593Smuzhiyun length_code[length-1] = (uch)code;
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
200*4882a593Smuzhiyun dist = 0;
201*4882a593Smuzhiyun for (code = 0 ; code < 16; code++) {
202*4882a593Smuzhiyun base_dist[code] = dist;
203*4882a593Smuzhiyun for (n = 0; n < (1<<extra_dbits[code]); n++) {
204*4882a593Smuzhiyun dist_code[dist++] = (uch)code;
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun Assert (dist == 256, "tr_static_init: dist != 256");
208*4882a593Smuzhiyun dist >>= 7; /* from now on, all distances are divided by 128 */
209*4882a593Smuzhiyun for ( ; code < D_CODES; code++) {
210*4882a593Smuzhiyun base_dist[code] = dist << 7;
211*4882a593Smuzhiyun for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
212*4882a593Smuzhiyun dist_code[256 + dist++] = (uch)code;
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun }
215*4882a593Smuzhiyun Assert (dist == 256, "tr_static_init: 256+dist != 512");
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun /* Construct the codes of the static literal tree */
218*4882a593Smuzhiyun for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
219*4882a593Smuzhiyun n = 0;
220*4882a593Smuzhiyun while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
221*4882a593Smuzhiyun while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
222*4882a593Smuzhiyun while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
223*4882a593Smuzhiyun while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
224*4882a593Smuzhiyun /* Codes 286 and 287 do not exist, but we must include them in the
225*4882a593Smuzhiyun * tree construction to get a canonical Huffman tree (longest code
226*4882a593Smuzhiyun * all ones)
227*4882a593Smuzhiyun */
228*4882a593Smuzhiyun gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun /* The static distance tree is trivial: */
231*4882a593Smuzhiyun for (n = 0; n < D_CODES; n++) {
232*4882a593Smuzhiyun static_dtree[n].Len = 5;
233*4882a593Smuzhiyun static_dtree[n].Code = bitrev32((u32)n) >> (32 - 5);
234*4882a593Smuzhiyun }
235*4882a593Smuzhiyun static_init_done = 1;
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
238*4882a593Smuzhiyun /* ===========================================================================
239*4882a593Smuzhiyun * Initialize the tree data structures for a new zlib stream.
240*4882a593Smuzhiyun */
zlib_tr_init(deflate_state * s)241*4882a593Smuzhiyun void zlib_tr_init(
242*4882a593Smuzhiyun deflate_state *s
243*4882a593Smuzhiyun )
244*4882a593Smuzhiyun {
245*4882a593Smuzhiyun tr_static_init();
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun s->compressed_len = 0L;
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun s->l_desc.dyn_tree = s->dyn_ltree;
250*4882a593Smuzhiyun s->l_desc.stat_desc = &static_l_desc;
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun s->d_desc.dyn_tree = s->dyn_dtree;
253*4882a593Smuzhiyun s->d_desc.stat_desc = &static_d_desc;
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun s->bl_desc.dyn_tree = s->bl_tree;
256*4882a593Smuzhiyun s->bl_desc.stat_desc = &static_bl_desc;
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun s->bi_buf = 0;
259*4882a593Smuzhiyun s->bi_valid = 0;
260*4882a593Smuzhiyun s->last_eob_len = 8; /* enough lookahead for inflate */
261*4882a593Smuzhiyun #ifdef DEBUG_ZLIB
262*4882a593Smuzhiyun s->bits_sent = 0L;
263*4882a593Smuzhiyun #endif
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun /* Initialize the first block of the first file: */
266*4882a593Smuzhiyun init_block(s);
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun /* ===========================================================================
270*4882a593Smuzhiyun * Initialize a new block.
271*4882a593Smuzhiyun */
init_block(deflate_state * s)272*4882a593Smuzhiyun static void init_block(
273*4882a593Smuzhiyun deflate_state *s
274*4882a593Smuzhiyun )
275*4882a593Smuzhiyun {
276*4882a593Smuzhiyun int n; /* iterates over tree elements */
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun /* Initialize the trees. */
279*4882a593Smuzhiyun for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
280*4882a593Smuzhiyun for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
281*4882a593Smuzhiyun for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun s->dyn_ltree[END_BLOCK].Freq = 1;
284*4882a593Smuzhiyun s->opt_len = s->static_len = 0L;
285*4882a593Smuzhiyun s->last_lit = s->matches = 0;
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun
288*4882a593Smuzhiyun #define SMALLEST 1
289*4882a593Smuzhiyun /* Index within the heap array of least frequent node in the Huffman tree */
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun /* ===========================================================================
293*4882a593Smuzhiyun * Remove the smallest element from the heap and recreate the heap with
294*4882a593Smuzhiyun * one less element. Updates heap and heap_len.
295*4882a593Smuzhiyun */
296*4882a593Smuzhiyun #define pqremove(s, tree, top) \
297*4882a593Smuzhiyun {\
298*4882a593Smuzhiyun top = s->heap[SMALLEST]; \
299*4882a593Smuzhiyun s->heap[SMALLEST] = s->heap[s->heap_len--]; \
300*4882a593Smuzhiyun pqdownheap(s, tree, SMALLEST); \
301*4882a593Smuzhiyun }
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun /* ===========================================================================
304*4882a593Smuzhiyun * Compares to subtrees, using the tree depth as tie breaker when
305*4882a593Smuzhiyun * the subtrees have equal frequency. This minimizes the worst case length.
306*4882a593Smuzhiyun */
307*4882a593Smuzhiyun #define smaller(tree, n, m, depth) \
308*4882a593Smuzhiyun (tree[n].Freq < tree[m].Freq || \
309*4882a593Smuzhiyun (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun /* ===========================================================================
312*4882a593Smuzhiyun * Restore the heap property by moving down the tree starting at node k,
313*4882a593Smuzhiyun * exchanging a node with the smallest of its two sons if necessary, stopping
314*4882a593Smuzhiyun * when the heap property is re-established (each father smaller than its
315*4882a593Smuzhiyun * two sons).
316*4882a593Smuzhiyun */
pqdownheap(deflate_state * s,ct_data * tree,int k)317*4882a593Smuzhiyun static void pqdownheap(
318*4882a593Smuzhiyun deflate_state *s,
319*4882a593Smuzhiyun ct_data *tree, /* the tree to restore */
320*4882a593Smuzhiyun int k /* node to move down */
321*4882a593Smuzhiyun )
322*4882a593Smuzhiyun {
323*4882a593Smuzhiyun int v = s->heap[k];
324*4882a593Smuzhiyun int j = k << 1; /* left son of k */
325*4882a593Smuzhiyun while (j <= s->heap_len) {
326*4882a593Smuzhiyun /* Set j to the smallest of the two sons: */
327*4882a593Smuzhiyun if (j < s->heap_len &&
328*4882a593Smuzhiyun smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
329*4882a593Smuzhiyun j++;
330*4882a593Smuzhiyun }
331*4882a593Smuzhiyun /* Exit if v is smaller than both sons */
332*4882a593Smuzhiyun if (smaller(tree, v, s->heap[j], s->depth)) break;
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun /* Exchange v with the smallest son */
335*4882a593Smuzhiyun s->heap[k] = s->heap[j]; k = j;
336*4882a593Smuzhiyun
337*4882a593Smuzhiyun /* And continue down the tree, setting j to the left son of k */
338*4882a593Smuzhiyun j <<= 1;
339*4882a593Smuzhiyun }
340*4882a593Smuzhiyun s->heap[k] = v;
341*4882a593Smuzhiyun }
342*4882a593Smuzhiyun
343*4882a593Smuzhiyun /* ===========================================================================
344*4882a593Smuzhiyun * Compute the optimal bit lengths for a tree and update the total bit length
345*4882a593Smuzhiyun * for the current block.
346*4882a593Smuzhiyun * IN assertion: the fields freq and dad are set, heap[heap_max] and
347*4882a593Smuzhiyun * above are the tree nodes sorted by increasing frequency.
348*4882a593Smuzhiyun * OUT assertions: the field len is set to the optimal bit length, the
349*4882a593Smuzhiyun * array bl_count contains the frequencies for each bit length.
350*4882a593Smuzhiyun * The length opt_len is updated; static_len is also updated if stree is
351*4882a593Smuzhiyun * not null.
352*4882a593Smuzhiyun */
gen_bitlen(deflate_state * s,tree_desc * desc)353*4882a593Smuzhiyun static void gen_bitlen(
354*4882a593Smuzhiyun deflate_state *s,
355*4882a593Smuzhiyun tree_desc *desc /* the tree descriptor */
356*4882a593Smuzhiyun )
357*4882a593Smuzhiyun {
358*4882a593Smuzhiyun ct_data *tree = desc->dyn_tree;
359*4882a593Smuzhiyun int max_code = desc->max_code;
360*4882a593Smuzhiyun const ct_data *stree = desc->stat_desc->static_tree;
361*4882a593Smuzhiyun const int *extra = desc->stat_desc->extra_bits;
362*4882a593Smuzhiyun int base = desc->stat_desc->extra_base;
363*4882a593Smuzhiyun int max_length = desc->stat_desc->max_length;
364*4882a593Smuzhiyun int h; /* heap index */
365*4882a593Smuzhiyun int n, m; /* iterate over the tree elements */
366*4882a593Smuzhiyun int bits; /* bit length */
367*4882a593Smuzhiyun int xbits; /* extra bits */
368*4882a593Smuzhiyun ush f; /* frequency */
369*4882a593Smuzhiyun int overflow = 0; /* number of elements with bit length too large */
370*4882a593Smuzhiyun
371*4882a593Smuzhiyun for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun /* In a first pass, compute the optimal bit lengths (which may
374*4882a593Smuzhiyun * overflow in the case of the bit length tree).
375*4882a593Smuzhiyun */
376*4882a593Smuzhiyun tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
377*4882a593Smuzhiyun
378*4882a593Smuzhiyun for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
379*4882a593Smuzhiyun n = s->heap[h];
380*4882a593Smuzhiyun bits = tree[tree[n].Dad].Len + 1;
381*4882a593Smuzhiyun if (bits > max_length) bits = max_length, overflow++;
382*4882a593Smuzhiyun tree[n].Len = (ush)bits;
383*4882a593Smuzhiyun /* We overwrite tree[n].Dad which is no longer needed */
384*4882a593Smuzhiyun
385*4882a593Smuzhiyun if (n > max_code) continue; /* not a leaf node */
386*4882a593Smuzhiyun
387*4882a593Smuzhiyun s->bl_count[bits]++;
388*4882a593Smuzhiyun xbits = 0;
389*4882a593Smuzhiyun if (n >= base) xbits = extra[n-base];
390*4882a593Smuzhiyun f = tree[n].Freq;
391*4882a593Smuzhiyun s->opt_len += (ulg)f * (bits + xbits);
392*4882a593Smuzhiyun if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
393*4882a593Smuzhiyun }
394*4882a593Smuzhiyun if (overflow == 0) return;
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun Trace((stderr,"\nbit length overflow\n"));
397*4882a593Smuzhiyun /* This happens for example on obj2 and pic of the Calgary corpus */
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun /* Find the first bit length which could increase: */
400*4882a593Smuzhiyun do {
401*4882a593Smuzhiyun bits = max_length-1;
402*4882a593Smuzhiyun while (s->bl_count[bits] == 0) bits--;
403*4882a593Smuzhiyun s->bl_count[bits]--; /* move one leaf down the tree */
404*4882a593Smuzhiyun s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
405*4882a593Smuzhiyun s->bl_count[max_length]--;
406*4882a593Smuzhiyun /* The brother of the overflow item also moves one step up,
407*4882a593Smuzhiyun * but this does not affect bl_count[max_length]
408*4882a593Smuzhiyun */
409*4882a593Smuzhiyun overflow -= 2;
410*4882a593Smuzhiyun } while (overflow > 0);
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun /* Now recompute all bit lengths, scanning in increasing frequency.
413*4882a593Smuzhiyun * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
414*4882a593Smuzhiyun * lengths instead of fixing only the wrong ones. This idea is taken
415*4882a593Smuzhiyun * from 'ar' written by Haruhiko Okumura.)
416*4882a593Smuzhiyun */
417*4882a593Smuzhiyun for (bits = max_length; bits != 0; bits--) {
418*4882a593Smuzhiyun n = s->bl_count[bits];
419*4882a593Smuzhiyun while (n != 0) {
420*4882a593Smuzhiyun m = s->heap[--h];
421*4882a593Smuzhiyun if (m > max_code) continue;
422*4882a593Smuzhiyun if (tree[m].Len != (unsigned) bits) {
423*4882a593Smuzhiyun Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
424*4882a593Smuzhiyun s->opt_len += ((long)bits - (long)tree[m].Len)
425*4882a593Smuzhiyun *(long)tree[m].Freq;
426*4882a593Smuzhiyun tree[m].Len = (ush)bits;
427*4882a593Smuzhiyun }
428*4882a593Smuzhiyun n--;
429*4882a593Smuzhiyun }
430*4882a593Smuzhiyun }
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun /* ===========================================================================
434*4882a593Smuzhiyun * Generate the codes for a given tree and bit counts (which need not be
435*4882a593Smuzhiyun * optimal).
436*4882a593Smuzhiyun * IN assertion: the array bl_count contains the bit length statistics for
437*4882a593Smuzhiyun * the given tree and the field len is set for all tree elements.
438*4882a593Smuzhiyun * OUT assertion: the field code is set for all tree elements of non
439*4882a593Smuzhiyun * zero code length.
440*4882a593Smuzhiyun */
gen_codes(ct_data * tree,int max_code,ush * bl_count)441*4882a593Smuzhiyun static void gen_codes(
442*4882a593Smuzhiyun ct_data *tree, /* the tree to decorate */
443*4882a593Smuzhiyun int max_code, /* largest code with non zero frequency */
444*4882a593Smuzhiyun ush *bl_count /* number of codes at each bit length */
445*4882a593Smuzhiyun )
446*4882a593Smuzhiyun {
447*4882a593Smuzhiyun ush next_code[MAX_BITS+1]; /* next code value for each bit length */
448*4882a593Smuzhiyun ush code = 0; /* running code value */
449*4882a593Smuzhiyun int bits; /* bit index */
450*4882a593Smuzhiyun int n; /* code index */
451*4882a593Smuzhiyun
452*4882a593Smuzhiyun /* The distribution counts are first used to generate the code values
453*4882a593Smuzhiyun * without bit reversal.
454*4882a593Smuzhiyun */
455*4882a593Smuzhiyun for (bits = 1; bits <= MAX_BITS; bits++) {
456*4882a593Smuzhiyun next_code[bits] = code = (code + bl_count[bits-1]) << 1;
457*4882a593Smuzhiyun }
458*4882a593Smuzhiyun /* Check that the bit counts in bl_count are consistent. The last code
459*4882a593Smuzhiyun * must be all ones.
460*4882a593Smuzhiyun */
461*4882a593Smuzhiyun Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
462*4882a593Smuzhiyun "inconsistent bit counts");
463*4882a593Smuzhiyun Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
464*4882a593Smuzhiyun
465*4882a593Smuzhiyun for (n = 0; n <= max_code; n++) {
466*4882a593Smuzhiyun int len = tree[n].Len;
467*4882a593Smuzhiyun if (len == 0) continue;
468*4882a593Smuzhiyun /* Now reverse the bits */
469*4882a593Smuzhiyun tree[n].Code = bitrev32((u32)(next_code[len]++)) >> (32 - len);
470*4882a593Smuzhiyun
471*4882a593Smuzhiyun Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
472*4882a593Smuzhiyun n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
473*4882a593Smuzhiyun }
474*4882a593Smuzhiyun }
475*4882a593Smuzhiyun
476*4882a593Smuzhiyun /* ===========================================================================
477*4882a593Smuzhiyun * Construct one Huffman tree and assigns the code bit strings and lengths.
478*4882a593Smuzhiyun * Update the total bit length for the current block.
479*4882a593Smuzhiyun * IN assertion: the field freq is set for all tree elements.
480*4882a593Smuzhiyun * OUT assertions: the fields len and code are set to the optimal bit length
481*4882a593Smuzhiyun * and corresponding code. The length opt_len is updated; static_len is
482*4882a593Smuzhiyun * also updated if stree is not null. The field max_code is set.
483*4882a593Smuzhiyun */
build_tree(deflate_state * s,tree_desc * desc)484*4882a593Smuzhiyun static void build_tree(
485*4882a593Smuzhiyun deflate_state *s,
486*4882a593Smuzhiyun tree_desc *desc /* the tree descriptor */
487*4882a593Smuzhiyun )
488*4882a593Smuzhiyun {
489*4882a593Smuzhiyun ct_data *tree = desc->dyn_tree;
490*4882a593Smuzhiyun const ct_data *stree = desc->stat_desc->static_tree;
491*4882a593Smuzhiyun int elems = desc->stat_desc->elems;
492*4882a593Smuzhiyun int n, m; /* iterate over heap elements */
493*4882a593Smuzhiyun int max_code = -1; /* largest code with non zero frequency */
494*4882a593Smuzhiyun int node; /* new node being created */
495*4882a593Smuzhiyun
496*4882a593Smuzhiyun /* Construct the initial heap, with least frequent element in
497*4882a593Smuzhiyun * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
498*4882a593Smuzhiyun * heap[0] is not used.
499*4882a593Smuzhiyun */
500*4882a593Smuzhiyun s->heap_len = 0, s->heap_max = HEAP_SIZE;
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun for (n = 0; n < elems; n++) {
503*4882a593Smuzhiyun if (tree[n].Freq != 0) {
504*4882a593Smuzhiyun s->heap[++(s->heap_len)] = max_code = n;
505*4882a593Smuzhiyun s->depth[n] = 0;
506*4882a593Smuzhiyun } else {
507*4882a593Smuzhiyun tree[n].Len = 0;
508*4882a593Smuzhiyun }
509*4882a593Smuzhiyun }
510*4882a593Smuzhiyun
511*4882a593Smuzhiyun /* The pkzip format requires that at least one distance code exists,
512*4882a593Smuzhiyun * and that at least one bit should be sent even if there is only one
513*4882a593Smuzhiyun * possible code. So to avoid special checks later on we force at least
514*4882a593Smuzhiyun * two codes of non zero frequency.
515*4882a593Smuzhiyun */
516*4882a593Smuzhiyun while (s->heap_len < 2) {
517*4882a593Smuzhiyun node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
518*4882a593Smuzhiyun tree[node].Freq = 1;
519*4882a593Smuzhiyun s->depth[node] = 0;
520*4882a593Smuzhiyun s->opt_len--; if (stree) s->static_len -= stree[node].Len;
521*4882a593Smuzhiyun /* node is 0 or 1 so it does not have extra bits */
522*4882a593Smuzhiyun }
523*4882a593Smuzhiyun desc->max_code = max_code;
524*4882a593Smuzhiyun
525*4882a593Smuzhiyun /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
526*4882a593Smuzhiyun * establish sub-heaps of increasing lengths:
527*4882a593Smuzhiyun */
528*4882a593Smuzhiyun for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun /* Construct the Huffman tree by repeatedly combining the least two
531*4882a593Smuzhiyun * frequent nodes.
532*4882a593Smuzhiyun */
533*4882a593Smuzhiyun node = elems; /* next internal node of the tree */
534*4882a593Smuzhiyun do {
535*4882a593Smuzhiyun pqremove(s, tree, n); /* n = node of least frequency */
536*4882a593Smuzhiyun m = s->heap[SMALLEST]; /* m = node of next least frequency */
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
539*4882a593Smuzhiyun s->heap[--(s->heap_max)] = m;
540*4882a593Smuzhiyun
541*4882a593Smuzhiyun /* Create a new node father of n and m */
542*4882a593Smuzhiyun tree[node].Freq = tree[n].Freq + tree[m].Freq;
543*4882a593Smuzhiyun s->depth[node] = (uch) (max(s->depth[n], s->depth[m]) + 1);
544*4882a593Smuzhiyun tree[n].Dad = tree[m].Dad = (ush)node;
545*4882a593Smuzhiyun #ifdef DUMP_BL_TREE
546*4882a593Smuzhiyun if (tree == s->bl_tree) {
547*4882a593Smuzhiyun fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
548*4882a593Smuzhiyun node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
549*4882a593Smuzhiyun }
550*4882a593Smuzhiyun #endif
551*4882a593Smuzhiyun /* and insert the new node in the heap */
552*4882a593Smuzhiyun s->heap[SMALLEST] = node++;
553*4882a593Smuzhiyun pqdownheap(s, tree, SMALLEST);
554*4882a593Smuzhiyun
555*4882a593Smuzhiyun } while (s->heap_len >= 2);
556*4882a593Smuzhiyun
557*4882a593Smuzhiyun s->heap[--(s->heap_max)] = s->heap[SMALLEST];
558*4882a593Smuzhiyun
559*4882a593Smuzhiyun /* At this point, the fields freq and dad are set. We can now
560*4882a593Smuzhiyun * generate the bit lengths.
561*4882a593Smuzhiyun */
562*4882a593Smuzhiyun gen_bitlen(s, (tree_desc *)desc);
563*4882a593Smuzhiyun
564*4882a593Smuzhiyun /* The field len is now set, we can generate the bit codes */
565*4882a593Smuzhiyun gen_codes ((ct_data *)tree, max_code, s->bl_count);
566*4882a593Smuzhiyun }
567*4882a593Smuzhiyun
568*4882a593Smuzhiyun /* ===========================================================================
569*4882a593Smuzhiyun * Scan a literal or distance tree to determine the frequencies of the codes
570*4882a593Smuzhiyun * in the bit length tree.
571*4882a593Smuzhiyun */
scan_tree(deflate_state * s,ct_data * tree,int max_code)572*4882a593Smuzhiyun static void scan_tree(
573*4882a593Smuzhiyun deflate_state *s,
574*4882a593Smuzhiyun ct_data *tree, /* the tree to be scanned */
575*4882a593Smuzhiyun int max_code /* and its largest code of non zero frequency */
576*4882a593Smuzhiyun )
577*4882a593Smuzhiyun {
578*4882a593Smuzhiyun int n; /* iterates over all tree elements */
579*4882a593Smuzhiyun int prevlen = -1; /* last emitted length */
580*4882a593Smuzhiyun int curlen; /* length of current code */
581*4882a593Smuzhiyun int nextlen = tree[0].Len; /* length of next code */
582*4882a593Smuzhiyun int count = 0; /* repeat count of the current code */
583*4882a593Smuzhiyun int max_count = 7; /* max repeat count */
584*4882a593Smuzhiyun int min_count = 4; /* min repeat count */
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun if (nextlen == 0) max_count = 138, min_count = 3;
587*4882a593Smuzhiyun tree[max_code+1].Len = (ush)0xffff; /* guard */
588*4882a593Smuzhiyun
589*4882a593Smuzhiyun for (n = 0; n <= max_code; n++) {
590*4882a593Smuzhiyun curlen = nextlen; nextlen = tree[n+1].Len;
591*4882a593Smuzhiyun if (++count < max_count && curlen == nextlen) {
592*4882a593Smuzhiyun continue;
593*4882a593Smuzhiyun } else if (count < min_count) {
594*4882a593Smuzhiyun s->bl_tree[curlen].Freq += count;
595*4882a593Smuzhiyun } else if (curlen != 0) {
596*4882a593Smuzhiyun if (curlen != prevlen) s->bl_tree[curlen].Freq++;
597*4882a593Smuzhiyun s->bl_tree[REP_3_6].Freq++;
598*4882a593Smuzhiyun } else if (count <= 10) {
599*4882a593Smuzhiyun s->bl_tree[REPZ_3_10].Freq++;
600*4882a593Smuzhiyun } else {
601*4882a593Smuzhiyun s->bl_tree[REPZ_11_138].Freq++;
602*4882a593Smuzhiyun }
603*4882a593Smuzhiyun count = 0; prevlen = curlen;
604*4882a593Smuzhiyun if (nextlen == 0) {
605*4882a593Smuzhiyun max_count = 138, min_count = 3;
606*4882a593Smuzhiyun } else if (curlen == nextlen) {
607*4882a593Smuzhiyun max_count = 6, min_count = 3;
608*4882a593Smuzhiyun } else {
609*4882a593Smuzhiyun max_count = 7, min_count = 4;
610*4882a593Smuzhiyun }
611*4882a593Smuzhiyun }
612*4882a593Smuzhiyun }
613*4882a593Smuzhiyun
614*4882a593Smuzhiyun /* ===========================================================================
615*4882a593Smuzhiyun * Send a literal or distance tree in compressed form, using the codes in
616*4882a593Smuzhiyun * bl_tree.
617*4882a593Smuzhiyun */
send_tree(deflate_state * s,ct_data * tree,int max_code)618*4882a593Smuzhiyun static void send_tree(
619*4882a593Smuzhiyun deflate_state *s,
620*4882a593Smuzhiyun ct_data *tree, /* the tree to be scanned */
621*4882a593Smuzhiyun int max_code /* and its largest code of non zero frequency */
622*4882a593Smuzhiyun )
623*4882a593Smuzhiyun {
624*4882a593Smuzhiyun int n; /* iterates over all tree elements */
625*4882a593Smuzhiyun int prevlen = -1; /* last emitted length */
626*4882a593Smuzhiyun int curlen; /* length of current code */
627*4882a593Smuzhiyun int nextlen = tree[0].Len; /* length of next code */
628*4882a593Smuzhiyun int count = 0; /* repeat count of the current code */
629*4882a593Smuzhiyun int max_count = 7; /* max repeat count */
630*4882a593Smuzhiyun int min_count = 4; /* min repeat count */
631*4882a593Smuzhiyun
632*4882a593Smuzhiyun /* tree[max_code+1].Len = -1; */ /* guard already set */
633*4882a593Smuzhiyun if (nextlen == 0) max_count = 138, min_count = 3;
634*4882a593Smuzhiyun
635*4882a593Smuzhiyun for (n = 0; n <= max_code; n++) {
636*4882a593Smuzhiyun curlen = nextlen; nextlen = tree[n+1].Len;
637*4882a593Smuzhiyun if (++count < max_count && curlen == nextlen) {
638*4882a593Smuzhiyun continue;
639*4882a593Smuzhiyun } else if (count < min_count) {
640*4882a593Smuzhiyun do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
641*4882a593Smuzhiyun
642*4882a593Smuzhiyun } else if (curlen != 0) {
643*4882a593Smuzhiyun if (curlen != prevlen) {
644*4882a593Smuzhiyun send_code(s, curlen, s->bl_tree); count--;
645*4882a593Smuzhiyun }
646*4882a593Smuzhiyun Assert(count >= 3 && count <= 6, " 3_6?");
647*4882a593Smuzhiyun send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
648*4882a593Smuzhiyun
649*4882a593Smuzhiyun } else if (count <= 10) {
650*4882a593Smuzhiyun send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
651*4882a593Smuzhiyun
652*4882a593Smuzhiyun } else {
653*4882a593Smuzhiyun send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
654*4882a593Smuzhiyun }
655*4882a593Smuzhiyun count = 0; prevlen = curlen;
656*4882a593Smuzhiyun if (nextlen == 0) {
657*4882a593Smuzhiyun max_count = 138, min_count = 3;
658*4882a593Smuzhiyun } else if (curlen == nextlen) {
659*4882a593Smuzhiyun max_count = 6, min_count = 3;
660*4882a593Smuzhiyun } else {
661*4882a593Smuzhiyun max_count = 7, min_count = 4;
662*4882a593Smuzhiyun }
663*4882a593Smuzhiyun }
664*4882a593Smuzhiyun }
665*4882a593Smuzhiyun
666*4882a593Smuzhiyun /* ===========================================================================
667*4882a593Smuzhiyun * Construct the Huffman tree for the bit lengths and return the index in
668*4882a593Smuzhiyun * bl_order of the last bit length code to send.
669*4882a593Smuzhiyun */
build_bl_tree(deflate_state * s)670*4882a593Smuzhiyun static int build_bl_tree(
671*4882a593Smuzhiyun deflate_state *s
672*4882a593Smuzhiyun )
673*4882a593Smuzhiyun {
674*4882a593Smuzhiyun int max_blindex; /* index of last bit length code of non zero freq */
675*4882a593Smuzhiyun
676*4882a593Smuzhiyun /* Determine the bit length frequencies for literal and distance trees */
677*4882a593Smuzhiyun scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
678*4882a593Smuzhiyun scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
679*4882a593Smuzhiyun
680*4882a593Smuzhiyun /* Build the bit length tree: */
681*4882a593Smuzhiyun build_tree(s, (tree_desc *)(&(s->bl_desc)));
682*4882a593Smuzhiyun /* opt_len now includes the length of the tree representations, except
683*4882a593Smuzhiyun * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
684*4882a593Smuzhiyun */
685*4882a593Smuzhiyun
686*4882a593Smuzhiyun /* Determine the number of bit length codes to send. The pkzip format
687*4882a593Smuzhiyun * requires that at least 4 bit length codes be sent. (appnote.txt says
688*4882a593Smuzhiyun * 3 but the actual value used is 4.)
689*4882a593Smuzhiyun */
690*4882a593Smuzhiyun for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
691*4882a593Smuzhiyun if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
692*4882a593Smuzhiyun }
693*4882a593Smuzhiyun /* Update opt_len to include the bit length tree and counts */
694*4882a593Smuzhiyun s->opt_len += 3*(max_blindex+1) + 5+5+4;
695*4882a593Smuzhiyun Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
696*4882a593Smuzhiyun s->opt_len, s->static_len));
697*4882a593Smuzhiyun
698*4882a593Smuzhiyun return max_blindex;
699*4882a593Smuzhiyun }
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun /* ===========================================================================
702*4882a593Smuzhiyun * Send the header for a block using dynamic Huffman trees: the counts, the
703*4882a593Smuzhiyun * lengths of the bit length codes, the literal tree and the distance tree.
704*4882a593Smuzhiyun * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
705*4882a593Smuzhiyun */
send_all_trees(deflate_state * s,int lcodes,int dcodes,int blcodes)706*4882a593Smuzhiyun static void send_all_trees(
707*4882a593Smuzhiyun deflate_state *s,
708*4882a593Smuzhiyun int lcodes, /* number of codes for each tree */
709*4882a593Smuzhiyun int dcodes, /* number of codes for each tree */
710*4882a593Smuzhiyun int blcodes /* number of codes for each tree */
711*4882a593Smuzhiyun )
712*4882a593Smuzhiyun {
713*4882a593Smuzhiyun int rank; /* index in bl_order */
714*4882a593Smuzhiyun
715*4882a593Smuzhiyun Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
716*4882a593Smuzhiyun Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
717*4882a593Smuzhiyun "too many codes");
718*4882a593Smuzhiyun Tracev((stderr, "\nbl counts: "));
719*4882a593Smuzhiyun send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
720*4882a593Smuzhiyun send_bits(s, dcodes-1, 5);
721*4882a593Smuzhiyun send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
722*4882a593Smuzhiyun for (rank = 0; rank < blcodes; rank++) {
723*4882a593Smuzhiyun Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
724*4882a593Smuzhiyun send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
725*4882a593Smuzhiyun }
726*4882a593Smuzhiyun Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
727*4882a593Smuzhiyun
728*4882a593Smuzhiyun send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
729*4882a593Smuzhiyun Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
730*4882a593Smuzhiyun
731*4882a593Smuzhiyun send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
732*4882a593Smuzhiyun Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
733*4882a593Smuzhiyun }
734*4882a593Smuzhiyun
735*4882a593Smuzhiyun /* ===========================================================================
736*4882a593Smuzhiyun * Send a stored block
737*4882a593Smuzhiyun */
zlib_tr_stored_block(deflate_state * s,char * buf,ulg stored_len,int eof)738*4882a593Smuzhiyun void zlib_tr_stored_block(
739*4882a593Smuzhiyun deflate_state *s,
740*4882a593Smuzhiyun char *buf, /* input block */
741*4882a593Smuzhiyun ulg stored_len, /* length of input block */
742*4882a593Smuzhiyun int eof /* true if this is the last block for a file */
743*4882a593Smuzhiyun )
744*4882a593Smuzhiyun {
745*4882a593Smuzhiyun send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */
746*4882a593Smuzhiyun s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
747*4882a593Smuzhiyun s->compressed_len += (stored_len + 4) << 3;
748*4882a593Smuzhiyun
749*4882a593Smuzhiyun copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
750*4882a593Smuzhiyun }
751*4882a593Smuzhiyun
752*4882a593Smuzhiyun /* Send just the `stored block' type code without any length bytes or data.
753*4882a593Smuzhiyun */
zlib_tr_stored_type_only(deflate_state * s)754*4882a593Smuzhiyun void zlib_tr_stored_type_only(
755*4882a593Smuzhiyun deflate_state *s
756*4882a593Smuzhiyun )
757*4882a593Smuzhiyun {
758*4882a593Smuzhiyun send_bits(s, (STORED_BLOCK << 1), 3);
759*4882a593Smuzhiyun bi_windup(s);
760*4882a593Smuzhiyun s->compressed_len = (s->compressed_len + 3) & ~7L;
761*4882a593Smuzhiyun }
762*4882a593Smuzhiyun
763*4882a593Smuzhiyun
764*4882a593Smuzhiyun /* ===========================================================================
765*4882a593Smuzhiyun * Send one empty static block to give enough lookahead for inflate.
766*4882a593Smuzhiyun * This takes 10 bits, of which 7 may remain in the bit buffer.
767*4882a593Smuzhiyun * The current inflate code requires 9 bits of lookahead. If the
768*4882a593Smuzhiyun * last two codes for the previous block (real code plus EOB) were coded
769*4882a593Smuzhiyun * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
770*4882a593Smuzhiyun * the last real code. In this case we send two empty static blocks instead
771*4882a593Smuzhiyun * of one. (There are no problems if the previous block is stored or fixed.)
772*4882a593Smuzhiyun * To simplify the code, we assume the worst case of last real code encoded
773*4882a593Smuzhiyun * on one bit only.
774*4882a593Smuzhiyun */
zlib_tr_align(deflate_state * s)775*4882a593Smuzhiyun void zlib_tr_align(
776*4882a593Smuzhiyun deflate_state *s
777*4882a593Smuzhiyun )
778*4882a593Smuzhiyun {
779*4882a593Smuzhiyun send_bits(s, STATIC_TREES<<1, 3);
780*4882a593Smuzhiyun send_code(s, END_BLOCK, static_ltree);
781*4882a593Smuzhiyun s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
782*4882a593Smuzhiyun bi_flush(s);
783*4882a593Smuzhiyun /* Of the 10 bits for the empty block, we have already sent
784*4882a593Smuzhiyun * (10 - bi_valid) bits. The lookahead for the last real code (before
785*4882a593Smuzhiyun * the EOB of the previous block) was thus at least one plus the length
786*4882a593Smuzhiyun * of the EOB plus what we have just sent of the empty static block.
787*4882a593Smuzhiyun */
788*4882a593Smuzhiyun if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
789*4882a593Smuzhiyun send_bits(s, STATIC_TREES<<1, 3);
790*4882a593Smuzhiyun send_code(s, END_BLOCK, static_ltree);
791*4882a593Smuzhiyun s->compressed_len += 10L;
792*4882a593Smuzhiyun bi_flush(s);
793*4882a593Smuzhiyun }
794*4882a593Smuzhiyun s->last_eob_len = 7;
795*4882a593Smuzhiyun }
796*4882a593Smuzhiyun
797*4882a593Smuzhiyun /* ===========================================================================
798*4882a593Smuzhiyun * Determine the best encoding for the current block: dynamic trees, static
799*4882a593Smuzhiyun * trees or store, and output the encoded block to the zip file. This function
800*4882a593Smuzhiyun * returns the total compressed length for the file so far.
801*4882a593Smuzhiyun */
zlib_tr_flush_block(deflate_state * s,char * buf,ulg stored_len,int eof)802*4882a593Smuzhiyun ulg zlib_tr_flush_block(
803*4882a593Smuzhiyun deflate_state *s,
804*4882a593Smuzhiyun char *buf, /* input block, or NULL if too old */
805*4882a593Smuzhiyun ulg stored_len, /* length of input block */
806*4882a593Smuzhiyun int eof /* true if this is the last block for a file */
807*4882a593Smuzhiyun )
808*4882a593Smuzhiyun {
809*4882a593Smuzhiyun ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
810*4882a593Smuzhiyun int max_blindex = 0; /* index of last bit length code of non zero freq */
811*4882a593Smuzhiyun
812*4882a593Smuzhiyun /* Build the Huffman trees unless a stored block is forced */
813*4882a593Smuzhiyun if (s->level > 0) {
814*4882a593Smuzhiyun
815*4882a593Smuzhiyun /* Check if the file is ascii or binary */
816*4882a593Smuzhiyun if (s->data_type == Z_UNKNOWN) set_data_type(s);
817*4882a593Smuzhiyun
818*4882a593Smuzhiyun /* Construct the literal and distance trees */
819*4882a593Smuzhiyun build_tree(s, (tree_desc *)(&(s->l_desc)));
820*4882a593Smuzhiyun Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
821*4882a593Smuzhiyun s->static_len));
822*4882a593Smuzhiyun
823*4882a593Smuzhiyun build_tree(s, (tree_desc *)(&(s->d_desc)));
824*4882a593Smuzhiyun Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
825*4882a593Smuzhiyun s->static_len));
826*4882a593Smuzhiyun /* At this point, opt_len and static_len are the total bit lengths of
827*4882a593Smuzhiyun * the compressed block data, excluding the tree representations.
828*4882a593Smuzhiyun */
829*4882a593Smuzhiyun
830*4882a593Smuzhiyun /* Build the bit length tree for the above two trees, and get the index
831*4882a593Smuzhiyun * in bl_order of the last bit length code to send.
832*4882a593Smuzhiyun */
833*4882a593Smuzhiyun max_blindex = build_bl_tree(s);
834*4882a593Smuzhiyun
835*4882a593Smuzhiyun /* Determine the best encoding. Compute first the block length in bytes*/
836*4882a593Smuzhiyun opt_lenb = (s->opt_len+3+7)>>3;
837*4882a593Smuzhiyun static_lenb = (s->static_len+3+7)>>3;
838*4882a593Smuzhiyun
839*4882a593Smuzhiyun Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
840*4882a593Smuzhiyun opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
841*4882a593Smuzhiyun s->last_lit));
842*4882a593Smuzhiyun
843*4882a593Smuzhiyun if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
844*4882a593Smuzhiyun
845*4882a593Smuzhiyun } else {
846*4882a593Smuzhiyun Assert(buf != (char*)0, "lost buf");
847*4882a593Smuzhiyun opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
848*4882a593Smuzhiyun }
849*4882a593Smuzhiyun
850*4882a593Smuzhiyun /* If compression failed and this is the first and last block,
851*4882a593Smuzhiyun * and if the .zip file can be seeked (to rewrite the local header),
852*4882a593Smuzhiyun * the whole file is transformed into a stored file:
853*4882a593Smuzhiyun */
854*4882a593Smuzhiyun #ifdef STORED_FILE_OK
855*4882a593Smuzhiyun # ifdef FORCE_STORED_FILE
856*4882a593Smuzhiyun if (eof && s->compressed_len == 0L) { /* force stored file */
857*4882a593Smuzhiyun # else
858*4882a593Smuzhiyun if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) {
859*4882a593Smuzhiyun # endif
860*4882a593Smuzhiyun /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
861*4882a593Smuzhiyun if (buf == (char*)0) error ("block vanished");
862*4882a593Smuzhiyun
863*4882a593Smuzhiyun copy_block(s, buf, (unsigned)stored_len, 0); /* without header */
864*4882a593Smuzhiyun s->compressed_len = stored_len << 3;
865*4882a593Smuzhiyun s->method = STORED;
866*4882a593Smuzhiyun } else
867*4882a593Smuzhiyun #endif /* STORED_FILE_OK */
868*4882a593Smuzhiyun
869*4882a593Smuzhiyun #ifdef FORCE_STORED
870*4882a593Smuzhiyun if (buf != (char*)0) { /* force stored block */
871*4882a593Smuzhiyun #else
872*4882a593Smuzhiyun if (stored_len+4 <= opt_lenb && buf != (char*)0) {
873*4882a593Smuzhiyun /* 4: two words for the lengths */
874*4882a593Smuzhiyun #endif
875*4882a593Smuzhiyun /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
876*4882a593Smuzhiyun * Otherwise we can't have processed more than WSIZE input bytes since
877*4882a593Smuzhiyun * the last block flush, because compression would have been
878*4882a593Smuzhiyun * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
879*4882a593Smuzhiyun * transform a block into a stored block.
880*4882a593Smuzhiyun */
881*4882a593Smuzhiyun zlib_tr_stored_block(s, buf, stored_len, eof);
882*4882a593Smuzhiyun
883*4882a593Smuzhiyun #ifdef FORCE_STATIC
884*4882a593Smuzhiyun } else if (static_lenb >= 0) { /* force static trees */
885*4882a593Smuzhiyun #else
886*4882a593Smuzhiyun } else if (static_lenb == opt_lenb) {
887*4882a593Smuzhiyun #endif
888*4882a593Smuzhiyun send_bits(s, (STATIC_TREES<<1)+eof, 3);
889*4882a593Smuzhiyun compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
890*4882a593Smuzhiyun s->compressed_len += 3 + s->static_len;
891*4882a593Smuzhiyun } else {
892*4882a593Smuzhiyun send_bits(s, (DYN_TREES<<1)+eof, 3);
893*4882a593Smuzhiyun send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
894*4882a593Smuzhiyun max_blindex+1);
895*4882a593Smuzhiyun compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
896*4882a593Smuzhiyun s->compressed_len += 3 + s->opt_len;
897*4882a593Smuzhiyun }
898*4882a593Smuzhiyun Assert (s->compressed_len == s->bits_sent, "bad compressed size");
899*4882a593Smuzhiyun init_block(s);
900*4882a593Smuzhiyun
901*4882a593Smuzhiyun if (eof) {
902*4882a593Smuzhiyun bi_windup(s);
903*4882a593Smuzhiyun s->compressed_len += 7; /* align on byte boundary */
904*4882a593Smuzhiyun }
905*4882a593Smuzhiyun Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
906*4882a593Smuzhiyun s->compressed_len-7*eof));
907*4882a593Smuzhiyun
908*4882a593Smuzhiyun return s->compressed_len >> 3;
909*4882a593Smuzhiyun }
910*4882a593Smuzhiyun
911*4882a593Smuzhiyun /* ===========================================================================
912*4882a593Smuzhiyun * Save the match info and tally the frequency counts. Return true if
913*4882a593Smuzhiyun * the current block must be flushed.
914*4882a593Smuzhiyun */
915*4882a593Smuzhiyun int zlib_tr_tally(
916*4882a593Smuzhiyun deflate_state *s,
917*4882a593Smuzhiyun unsigned dist, /* distance of matched string */
918*4882a593Smuzhiyun unsigned lc /* match length-MIN_MATCH or unmatched char (if dist==0) */
919*4882a593Smuzhiyun )
920*4882a593Smuzhiyun {
921*4882a593Smuzhiyun s->d_buf[s->last_lit] = (ush)dist;
922*4882a593Smuzhiyun s->l_buf[s->last_lit++] = (uch)lc;
923*4882a593Smuzhiyun if (dist == 0) {
924*4882a593Smuzhiyun /* lc is the unmatched char */
925*4882a593Smuzhiyun s->dyn_ltree[lc].Freq++;
926*4882a593Smuzhiyun } else {
927*4882a593Smuzhiyun s->matches++;
928*4882a593Smuzhiyun /* Here, lc is the match length - MIN_MATCH */
929*4882a593Smuzhiyun dist--; /* dist = match distance - 1 */
930*4882a593Smuzhiyun Assert((ush)dist < (ush)MAX_DIST(s) &&
931*4882a593Smuzhiyun (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
932*4882a593Smuzhiyun (ush)d_code(dist) < (ush)D_CODES, "zlib_tr_tally: bad match");
933*4882a593Smuzhiyun
934*4882a593Smuzhiyun s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
935*4882a593Smuzhiyun s->dyn_dtree[d_code(dist)].Freq++;
936*4882a593Smuzhiyun }
937*4882a593Smuzhiyun
938*4882a593Smuzhiyun /* Try to guess if it is profitable to stop the current block here */
939*4882a593Smuzhiyun if ((s->last_lit & 0xfff) == 0 && s->level > 2) {
940*4882a593Smuzhiyun /* Compute an upper bound for the compressed length */
941*4882a593Smuzhiyun ulg out_length = (ulg)s->last_lit*8L;
942*4882a593Smuzhiyun ulg in_length = (ulg)((long)s->strstart - s->block_start);
943*4882a593Smuzhiyun int dcode;
944*4882a593Smuzhiyun for (dcode = 0; dcode < D_CODES; dcode++) {
945*4882a593Smuzhiyun out_length += (ulg)s->dyn_dtree[dcode].Freq *
946*4882a593Smuzhiyun (5L+extra_dbits[dcode]);
947*4882a593Smuzhiyun }
948*4882a593Smuzhiyun out_length >>= 3;
949*4882a593Smuzhiyun Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
950*4882a593Smuzhiyun s->last_lit, in_length, out_length,
951*4882a593Smuzhiyun 100L - out_length*100L/in_length));
952*4882a593Smuzhiyun if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
953*4882a593Smuzhiyun }
954*4882a593Smuzhiyun return (s->last_lit == s->lit_bufsize-1);
955*4882a593Smuzhiyun /* We avoid equality with lit_bufsize because of wraparound at 64K
956*4882a593Smuzhiyun * on 16 bit machines and because stored blocks are restricted to
957*4882a593Smuzhiyun * 64K-1 bytes.
958*4882a593Smuzhiyun */
959*4882a593Smuzhiyun }
960*4882a593Smuzhiyun
961*4882a593Smuzhiyun /* ===========================================================================
962*4882a593Smuzhiyun * Send the block data compressed using the given Huffman trees
963*4882a593Smuzhiyun */
964*4882a593Smuzhiyun static void compress_block(
965*4882a593Smuzhiyun deflate_state *s,
966*4882a593Smuzhiyun ct_data *ltree, /* literal tree */
967*4882a593Smuzhiyun ct_data *dtree /* distance tree */
968*4882a593Smuzhiyun )
969*4882a593Smuzhiyun {
970*4882a593Smuzhiyun unsigned dist; /* distance of matched string */
971*4882a593Smuzhiyun int lc; /* match length or unmatched char (if dist == 0) */
972*4882a593Smuzhiyun unsigned lx = 0; /* running index in l_buf */
973*4882a593Smuzhiyun unsigned code; /* the code to send */
974*4882a593Smuzhiyun int extra; /* number of extra bits to send */
975*4882a593Smuzhiyun
976*4882a593Smuzhiyun if (s->last_lit != 0) do {
977*4882a593Smuzhiyun dist = s->d_buf[lx];
978*4882a593Smuzhiyun lc = s->l_buf[lx++];
979*4882a593Smuzhiyun if (dist == 0) {
980*4882a593Smuzhiyun send_code(s, lc, ltree); /* send a literal byte */
981*4882a593Smuzhiyun Tracecv(isgraph(lc), (stderr," '%c' ", lc));
982*4882a593Smuzhiyun } else {
983*4882a593Smuzhiyun /* Here, lc is the match length - MIN_MATCH */
984*4882a593Smuzhiyun code = length_code[lc];
985*4882a593Smuzhiyun send_code(s, code+LITERALS+1, ltree); /* send the length code */
986*4882a593Smuzhiyun extra = extra_lbits[code];
987*4882a593Smuzhiyun if (extra != 0) {
988*4882a593Smuzhiyun lc -= base_length[code];
989*4882a593Smuzhiyun send_bits(s, lc, extra); /* send the extra length bits */
990*4882a593Smuzhiyun }
991*4882a593Smuzhiyun dist--; /* dist is now the match distance - 1 */
992*4882a593Smuzhiyun code = d_code(dist);
993*4882a593Smuzhiyun Assert (code < D_CODES, "bad d_code");
994*4882a593Smuzhiyun
995*4882a593Smuzhiyun send_code(s, code, dtree); /* send the distance code */
996*4882a593Smuzhiyun extra = extra_dbits[code];
997*4882a593Smuzhiyun if (extra != 0) {
998*4882a593Smuzhiyun dist -= base_dist[code];
999*4882a593Smuzhiyun send_bits(s, dist, extra); /* send the extra distance bits */
1000*4882a593Smuzhiyun }
1001*4882a593Smuzhiyun } /* literal or match pair ? */
1002*4882a593Smuzhiyun
1003*4882a593Smuzhiyun /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
1004*4882a593Smuzhiyun Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
1005*4882a593Smuzhiyun
1006*4882a593Smuzhiyun } while (lx < s->last_lit);
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun send_code(s, END_BLOCK, ltree);
1009*4882a593Smuzhiyun s->last_eob_len = ltree[END_BLOCK].Len;
1010*4882a593Smuzhiyun }
1011*4882a593Smuzhiyun
1012*4882a593Smuzhiyun /* ===========================================================================
1013*4882a593Smuzhiyun * Set the data type to ASCII or BINARY, using a crude approximation:
1014*4882a593Smuzhiyun * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
1015*4882a593Smuzhiyun * IN assertion: the fields freq of dyn_ltree are set and the total of all
1016*4882a593Smuzhiyun * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
1017*4882a593Smuzhiyun */
1018*4882a593Smuzhiyun static void set_data_type(
1019*4882a593Smuzhiyun deflate_state *s
1020*4882a593Smuzhiyun )
1021*4882a593Smuzhiyun {
1022*4882a593Smuzhiyun int n = 0;
1023*4882a593Smuzhiyun unsigned ascii_freq = 0;
1024*4882a593Smuzhiyun unsigned bin_freq = 0;
1025*4882a593Smuzhiyun while (n < 7) bin_freq += s->dyn_ltree[n++].Freq;
1026*4882a593Smuzhiyun while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq;
1027*4882a593Smuzhiyun while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
1028*4882a593Smuzhiyun s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
1029*4882a593Smuzhiyun }
1030*4882a593Smuzhiyun
1031*4882a593Smuzhiyun /* ===========================================================================
1032*4882a593Smuzhiyun * Copy a stored block, storing first the length and its
1033*4882a593Smuzhiyun * one's complement if requested.
1034*4882a593Smuzhiyun */
1035*4882a593Smuzhiyun static void copy_block(
1036*4882a593Smuzhiyun deflate_state *s,
1037*4882a593Smuzhiyun char *buf, /* the input data */
1038*4882a593Smuzhiyun unsigned len, /* its length */
1039*4882a593Smuzhiyun int header /* true if block header must be written */
1040*4882a593Smuzhiyun )
1041*4882a593Smuzhiyun {
1042*4882a593Smuzhiyun bi_windup(s); /* align on byte boundary */
1043*4882a593Smuzhiyun s->last_eob_len = 8; /* enough lookahead for inflate */
1044*4882a593Smuzhiyun
1045*4882a593Smuzhiyun if (header) {
1046*4882a593Smuzhiyun put_short(s, (ush)len);
1047*4882a593Smuzhiyun put_short(s, (ush)~len);
1048*4882a593Smuzhiyun #ifdef DEBUG_ZLIB
1049*4882a593Smuzhiyun s->bits_sent += 2*16;
1050*4882a593Smuzhiyun #endif
1051*4882a593Smuzhiyun }
1052*4882a593Smuzhiyun #ifdef DEBUG_ZLIB
1053*4882a593Smuzhiyun s->bits_sent += (ulg)len<<3;
1054*4882a593Smuzhiyun #endif
1055*4882a593Smuzhiyun /* bundle up the put_byte(s, *buf++) calls */
1056*4882a593Smuzhiyun memcpy(&s->pending_buf[s->pending], buf, len);
1057*4882a593Smuzhiyun s->pending += len;
1058*4882a593Smuzhiyun }
1059*4882a593Smuzhiyun
1060