deftree.c 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059
  1. /* +++ trees.c */
  2. /* trees.c -- output deflated data using Huffman coding
  3. * Copyright (C) 1995-1996 Jean-loup Gailly
  4. * For conditions of distribution and use, see copyright notice in zlib.h
  5. */
  6. /*
  7. * ALGORITHM
  8. *
  9. * The "deflation" process uses several Huffman trees. The more
  10. * common source values are represented by shorter bit sequences.
  11. *
  12. * Each code tree is stored in a compressed form which is itself
  13. * a Huffman encoding of the lengths of all the code strings (in
  14. * ascending order by source values). The actual code strings are
  15. * reconstructed from the lengths in the inflate process, as described
  16. * in the deflate specification.
  17. *
  18. * REFERENCES
  19. *
  20. * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
  21. * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
  22. *
  23. * Storer, James A.
  24. * Data Compression: Methods and Theory, pp. 49-50.
  25. * Computer Science Press, 1988. ISBN 0-7167-8156-5.
  26. *
  27. * Sedgewick, R.
  28. * Algorithms, p290.
  29. * Addison-Wesley, 1983. ISBN 0-201-06672-6.
  30. */
  31. /* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */
  32. /* #include "deflate.h" */
  33. #include <linux/zutil.h>
  34. #include <linux/bitrev.h>
  35. #include "defutil.h"
  36. #ifdef DEBUG_ZLIB
  37. # include <ctype.h>
  38. #endif
  39. /* ===========================================================================
  40. * Constants
  41. */
  42. #define MAX_BL_BITS 7
  43. /* Bit length codes must not exceed MAX_BL_BITS bits */
  44. #define END_BLOCK 256
  45. /* end of block literal code */
  46. #define REP_3_6 16
  47. /* repeat previous bit length 3-6 times (2 bits of repeat count) */
  48. #define REPZ_3_10 17
  49. /* repeat a zero length 3-10 times (3 bits of repeat count) */
  50. #define REPZ_11_138 18
  51. /* repeat a zero length 11-138 times (7 bits of repeat count) */
  52. static const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
  53. = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
  54. static const int extra_dbits[D_CODES] /* extra bits for each distance code */
  55. = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
  56. static const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
  57. = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
  58. static const uch bl_order[BL_CODES]
  59. = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
  60. /* The lengths of the bit length codes are sent in order of decreasing
  61. * probability, to avoid transmitting the lengths for unused bit length codes.
  62. */
  63. /* ===========================================================================
  64. * Local data. These are initialized only once.
  65. */
  66. static ct_data static_ltree[L_CODES+2];
  67. /* The static literal tree. Since the bit lengths are imposed, there is no
  68. * need for the L_CODES extra codes used during heap construction. However
  69. * The codes 286 and 287 are needed to build a canonical tree (see zlib_tr_init
  70. * below).
  71. */
  72. static ct_data static_dtree[D_CODES];
  73. /* The static distance tree. (Actually a trivial tree since all codes use
  74. * 5 bits.)
  75. */
  76. static uch dist_code[512];
  77. /* distance codes. The first 256 values correspond to the distances
  78. * 3 .. 258, the last 256 values correspond to the top 8 bits of
  79. * the 15 bit distances.
  80. */
  81. static uch length_code[MAX_MATCH-MIN_MATCH+1];
  82. /* length code for each normalized match length (0 == MIN_MATCH) */
  83. static int base_length[LENGTH_CODES];
  84. /* First normalized length for each code (0 = MIN_MATCH) */
  85. static int base_dist[D_CODES];
  86. /* First normalized distance for each code (0 = distance of 1) */
  87. struct static_tree_desc_s {
  88. const ct_data *static_tree; /* static tree or NULL */
  89. const int *extra_bits; /* extra bits for each code or NULL */
  90. int extra_base; /* base index for extra_bits */
  91. int elems; /* max number of elements in the tree */
  92. int max_length; /* max bit length for the codes */
  93. };
  94. static static_tree_desc static_l_desc =
  95. {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
  96. static static_tree_desc static_d_desc =
  97. {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
  98. static static_tree_desc static_bl_desc =
  99. {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
  100. /* ===========================================================================
  101. * Local (static) routines in this file.
  102. */
  103. static void tr_static_init (void);
  104. static void init_block (deflate_state *s);
  105. static void pqdownheap (deflate_state *s, ct_data *tree, int k);
  106. static void gen_bitlen (deflate_state *s, tree_desc *desc);
  107. static void gen_codes (ct_data *tree, int max_code, ush *bl_count);
  108. static void build_tree (deflate_state *s, tree_desc *desc);
  109. static void scan_tree (deflate_state *s, ct_data *tree, int max_code);
  110. static void send_tree (deflate_state *s, ct_data *tree, int max_code);
  111. static int build_bl_tree (deflate_state *s);
  112. static void send_all_trees (deflate_state *s, int lcodes, int dcodes,
  113. int blcodes);
  114. static void compress_block (deflate_state *s, ct_data *ltree,
  115. ct_data *dtree);
  116. static void set_data_type (deflate_state *s);
  117. static void bi_flush (deflate_state *s);
  118. static void copy_block (deflate_state *s, char *buf, unsigned len,
  119. int header);
  120. #ifndef DEBUG_ZLIB
  121. # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
  122. /* Send a code of the given tree. c and tree must not have side effects */
  123. #else /* DEBUG_ZLIB */
  124. # define send_code(s, c, tree) \
  125. { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
  126. send_bits(s, tree[c].Code, tree[c].Len); }
  127. #endif
  128. #define d_code(dist) \
  129. ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
  130. /* Mapping from a distance to a distance code. dist is the distance - 1 and
  131. * must not have side effects. dist_code[256] and dist_code[257] are never
  132. * used.
  133. */
  134. /* ===========================================================================
  135. * Initialize the various 'constant' tables. In a multi-threaded environment,
  136. * this function may be called by two threads concurrently, but this is
  137. * harmless since both invocations do exactly the same thing.
  138. */
  139. static void tr_static_init(void)
  140. {
  141. static int static_init_done;
  142. int n; /* iterates over tree elements */
  143. int bits; /* bit counter */
  144. int length; /* length value */
  145. int code; /* code value */
  146. int dist; /* distance index */
  147. ush bl_count[MAX_BITS+1];
  148. /* number of codes at each bit length for an optimal tree */
  149. if (static_init_done) return;
  150. /* Initialize the mapping length (0..255) -> length code (0..28) */
  151. length = 0;
  152. for (code = 0; code < LENGTH_CODES-1; code++) {
  153. base_length[code] = length;
  154. for (n = 0; n < (1<<extra_lbits[code]); n++) {
  155. length_code[length++] = (uch)code;
  156. }
  157. }
  158. Assert (length == 256, "tr_static_init: length != 256");
  159. /* Note that the length 255 (match length 258) can be represented
  160. * in two different ways: code 284 + 5 bits or code 285, so we
  161. * overwrite length_code[255] to use the best encoding:
  162. */
  163. length_code[length-1] = (uch)code;
  164. /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
  165. dist = 0;
  166. for (code = 0 ; code < 16; code++) {
  167. base_dist[code] = dist;
  168. for (n = 0; n < (1<<extra_dbits[code]); n++) {
  169. dist_code[dist++] = (uch)code;
  170. }
  171. }
  172. Assert (dist == 256, "tr_static_init: dist != 256");
  173. dist >>= 7; /* from now on, all distances are divided by 128 */
  174. for ( ; code < D_CODES; code++) {
  175. base_dist[code] = dist << 7;
  176. for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
  177. dist_code[256 + dist++] = (uch)code;
  178. }
  179. }
  180. Assert (dist == 256, "tr_static_init: 256+dist != 512");
  181. /* Construct the codes of the static literal tree */
  182. for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
  183. n = 0;
  184. while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
  185. while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
  186. while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
  187. while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
  188. /* Codes 286 and 287 do not exist, but we must include them in the
  189. * tree construction to get a canonical Huffman tree (longest code
  190. * all ones)
  191. */
  192. gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
  193. /* The static distance tree is trivial: */
  194. for (n = 0; n < D_CODES; n++) {
  195. static_dtree[n].Len = 5;
  196. static_dtree[n].Code = bitrev32((u32)n) >> (32 - 5);
  197. }
  198. static_init_done = 1;
  199. }
  200. /* ===========================================================================
  201. * Initialize the tree data structures for a new zlib stream.
  202. */
  203. void zlib_tr_init(
  204. deflate_state *s
  205. )
  206. {
  207. tr_static_init();
  208. s->compressed_len = 0L;
  209. s->l_desc.dyn_tree = s->dyn_ltree;
  210. s->l_desc.stat_desc = &static_l_desc;
  211. s->d_desc.dyn_tree = s->dyn_dtree;
  212. s->d_desc.stat_desc = &static_d_desc;
  213. s->bl_desc.dyn_tree = s->bl_tree;
  214. s->bl_desc.stat_desc = &static_bl_desc;
  215. s->bi_buf = 0;
  216. s->bi_valid = 0;
  217. s->last_eob_len = 8; /* enough lookahead for inflate */
  218. #ifdef DEBUG_ZLIB
  219. s->bits_sent = 0L;
  220. #endif
  221. /* Initialize the first block of the first file: */
  222. init_block(s);
  223. }
  224. /* ===========================================================================
  225. * Initialize a new block.
  226. */
  227. static void init_block(
  228. deflate_state *s
  229. )
  230. {
  231. int n; /* iterates over tree elements */
  232. /* Initialize the trees. */
  233. for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
  234. for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
  235. for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
  236. s->dyn_ltree[END_BLOCK].Freq = 1;
  237. s->opt_len = s->static_len = 0L;
  238. s->last_lit = s->matches = 0;
  239. }
  240. #define SMALLEST 1
  241. /* Index within the heap array of least frequent node in the Huffman tree */
  242. /* ===========================================================================
  243. * Remove the smallest element from the heap and recreate the heap with
  244. * one less element. Updates heap and heap_len.
  245. */
  246. #define pqremove(s, tree, top) \
  247. {\
  248. top = s->heap[SMALLEST]; \
  249. s->heap[SMALLEST] = s->heap[s->heap_len--]; \
  250. pqdownheap(s, tree, SMALLEST); \
  251. }
  252. /* ===========================================================================
  253. * Compares to subtrees, using the tree depth as tie breaker when
  254. * the subtrees have equal frequency. This minimizes the worst case length.
  255. */
  256. #define smaller(tree, n, m, depth) \
  257. (tree[n].Freq < tree[m].Freq || \
  258. (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
  259. /* ===========================================================================
  260. * Restore the heap property by moving down the tree starting at node k,
  261. * exchanging a node with the smallest of its two sons if necessary, stopping
  262. * when the heap property is re-established (each father smaller than its
  263. * two sons).
  264. */
  265. static void pqdownheap(
  266. deflate_state *s,
  267. ct_data *tree, /* the tree to restore */
  268. int k /* node to move down */
  269. )
  270. {
  271. int v = s->heap[k];
  272. int j = k << 1; /* left son of k */
  273. while (j <= s->heap_len) {
  274. /* Set j to the smallest of the two sons: */
  275. if (j < s->heap_len &&
  276. smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
  277. j++;
  278. }
  279. /* Exit if v is smaller than both sons */
  280. if (smaller(tree, v, s->heap[j], s->depth)) break;
  281. /* Exchange v with the smallest son */
  282. s->heap[k] = s->heap[j]; k = j;
  283. /* And continue down the tree, setting j to the left son of k */
  284. j <<= 1;
  285. }
  286. s->heap[k] = v;
  287. }
  288. /* ===========================================================================
  289. * Compute the optimal bit lengths for a tree and update the total bit length
  290. * for the current block.
  291. * IN assertion: the fields freq and dad are set, heap[heap_max] and
  292. * above are the tree nodes sorted by increasing frequency.
  293. * OUT assertions: the field len is set to the optimal bit length, the
  294. * array bl_count contains the frequencies for each bit length.
  295. * The length opt_len is updated; static_len is also updated if stree is
  296. * not null.
  297. */
  298. static void gen_bitlen(
  299. deflate_state *s,
  300. tree_desc *desc /* the tree descriptor */
  301. )
  302. {
  303. ct_data *tree = desc->dyn_tree;
  304. int max_code = desc->max_code;
  305. const ct_data *stree = desc->stat_desc->static_tree;
  306. const int *extra = desc->stat_desc->extra_bits;
  307. int base = desc->stat_desc->extra_base;
  308. int max_length = desc->stat_desc->max_length;
  309. int h; /* heap index */
  310. int n, m; /* iterate over the tree elements */
  311. int bits; /* bit length */
  312. int xbits; /* extra bits */
  313. ush f; /* frequency */
  314. int overflow = 0; /* number of elements with bit length too large */
  315. for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
  316. /* In a first pass, compute the optimal bit lengths (which may
  317. * overflow in the case of the bit length tree).
  318. */
  319. tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
  320. for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
  321. n = s->heap[h];
  322. bits = tree[tree[n].Dad].Len + 1;
  323. if (bits > max_length) bits = max_length, overflow++;
  324. tree[n].Len = (ush)bits;
  325. /* We overwrite tree[n].Dad which is no longer needed */
  326. if (n > max_code) continue; /* not a leaf node */
  327. s->bl_count[bits]++;
  328. xbits = 0;
  329. if (n >= base) xbits = extra[n-base];
  330. f = tree[n].Freq;
  331. s->opt_len += (ulg)f * (bits + xbits);
  332. if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
  333. }
  334. if (overflow == 0) return;
  335. Trace((stderr,"\nbit length overflow\n"));
  336. /* This happens for example on obj2 and pic of the Calgary corpus */
  337. /* Find the first bit length which could increase: */
  338. do {
  339. bits = max_length-1;
  340. while (s->bl_count[bits] == 0) bits--;
  341. s->bl_count[bits]--; /* move one leaf down the tree */
  342. s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
  343. s->bl_count[max_length]--;
  344. /* The brother of the overflow item also moves one step up,
  345. * but this does not affect bl_count[max_length]
  346. */
  347. overflow -= 2;
  348. } while (overflow > 0);
  349. /* Now recompute all bit lengths, scanning in increasing frequency.
  350. * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
  351. * lengths instead of fixing only the wrong ones. This idea is taken
  352. * from 'ar' written by Haruhiko Okumura.)
  353. */
  354. for (bits = max_length; bits != 0; bits--) {
  355. n = s->bl_count[bits];
  356. while (n != 0) {
  357. m = s->heap[--h];
  358. if (m > max_code) continue;
  359. if (tree[m].Len != (unsigned) bits) {
  360. Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
  361. s->opt_len += ((long)bits - (long)tree[m].Len)
  362. *(long)tree[m].Freq;
  363. tree[m].Len = (ush)bits;
  364. }
  365. n--;
  366. }
  367. }
  368. }
  369. /* ===========================================================================
  370. * Generate the codes for a given tree and bit counts (which need not be
  371. * optimal).
  372. * IN assertion: the array bl_count contains the bit length statistics for
  373. * the given tree and the field len is set for all tree elements.
  374. * OUT assertion: the field code is set for all tree elements of non
  375. * zero code length.
  376. */
  377. static void gen_codes(
  378. ct_data *tree, /* the tree to decorate */
  379. int max_code, /* largest code with non zero frequency */
  380. ush *bl_count /* number of codes at each bit length */
  381. )
  382. {
  383. ush next_code[MAX_BITS+1]; /* next code value for each bit length */
  384. ush code = 0; /* running code value */
  385. int bits; /* bit index */
  386. int n; /* code index */
  387. /* The distribution counts are first used to generate the code values
  388. * without bit reversal.
  389. */
  390. for (bits = 1; bits <= MAX_BITS; bits++) {
  391. next_code[bits] = code = (code + bl_count[bits-1]) << 1;
  392. }
  393. /* Check that the bit counts in bl_count are consistent. The last code
  394. * must be all ones.
  395. */
  396. Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
  397. "inconsistent bit counts");
  398. Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
  399. for (n = 0; n <= max_code; n++) {
  400. int len = tree[n].Len;
  401. if (len == 0) continue;
  402. /* Now reverse the bits */
  403. tree[n].Code = bitrev32((u32)(next_code[len]++)) >> (32 - len);
  404. Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
  405. n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
  406. }
  407. }
  408. /* ===========================================================================
  409. * Construct one Huffman tree and assigns the code bit strings and lengths.
  410. * Update the total bit length for the current block.
  411. * IN assertion: the field freq is set for all tree elements.
  412. * OUT assertions: the fields len and code are set to the optimal bit length
  413. * and corresponding code. The length opt_len is updated; static_len is
  414. * also updated if stree is not null. The field max_code is set.
  415. */
  416. static void build_tree(
  417. deflate_state *s,
  418. tree_desc *desc /* the tree descriptor */
  419. )
  420. {
  421. ct_data *tree = desc->dyn_tree;
  422. const ct_data *stree = desc->stat_desc->static_tree;
  423. int elems = desc->stat_desc->elems;
  424. int n, m; /* iterate over heap elements */
  425. int max_code = -1; /* largest code with non zero frequency */
  426. int node; /* new node being created */
  427. /* Construct the initial heap, with least frequent element in
  428. * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
  429. * heap[0] is not used.
  430. */
  431. s->heap_len = 0, s->heap_max = HEAP_SIZE;
  432. for (n = 0; n < elems; n++) {
  433. if (tree[n].Freq != 0) {
  434. s->heap[++(s->heap_len)] = max_code = n;
  435. s->depth[n] = 0;
  436. } else {
  437. tree[n].Len = 0;
  438. }
  439. }
  440. /* The pkzip format requires that at least one distance code exists,
  441. * and that at least one bit should be sent even if there is only one
  442. * possible code. So to avoid special checks later on we force at least
  443. * two codes of non zero frequency.
  444. */
  445. while (s->heap_len < 2) {
  446. node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
  447. tree[node].Freq = 1;
  448. s->depth[node] = 0;
  449. s->opt_len--; if (stree) s->static_len -= stree[node].Len;
  450. /* node is 0 or 1 so it does not have extra bits */
  451. }
  452. desc->max_code = max_code;
  453. /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
  454. * establish sub-heaps of increasing lengths:
  455. */
  456. for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
  457. /* Construct the Huffman tree by repeatedly combining the least two
  458. * frequent nodes.
  459. */
  460. node = elems; /* next internal node of the tree */
  461. do {
  462. pqremove(s, tree, n); /* n = node of least frequency */
  463. m = s->heap[SMALLEST]; /* m = node of next least frequency */
  464. s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
  465. s->heap[--(s->heap_max)] = m;
  466. /* Create a new node father of n and m */
  467. tree[node].Freq = tree[n].Freq + tree[m].Freq;
  468. s->depth[node] = (uch) (max(s->depth[n], s->depth[m]) + 1);
  469. tree[n].Dad = tree[m].Dad = (ush)node;
  470. #ifdef DUMP_BL_TREE
  471. if (tree == s->bl_tree) {
  472. fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
  473. node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
  474. }
  475. #endif
  476. /* and insert the new node in the heap */
  477. s->heap[SMALLEST] = node++;
  478. pqdownheap(s, tree, SMALLEST);
  479. } while (s->heap_len >= 2);
  480. s->heap[--(s->heap_max)] = s->heap[SMALLEST];
  481. /* At this point, the fields freq and dad are set. We can now
  482. * generate the bit lengths.
  483. */
  484. gen_bitlen(s, (tree_desc *)desc);
  485. /* The field len is now set, we can generate the bit codes */
  486. gen_codes ((ct_data *)tree, max_code, s->bl_count);
  487. }
  488. /* ===========================================================================
  489. * Scan a literal or distance tree to determine the frequencies of the codes
  490. * in the bit length tree.
  491. */
  492. static void scan_tree(
  493. deflate_state *s,
  494. ct_data *tree, /* the tree to be scanned */
  495. int max_code /* and its largest code of non zero frequency */
  496. )
  497. {
  498. int n; /* iterates over all tree elements */
  499. int prevlen = -1; /* last emitted length */
  500. int curlen; /* length of current code */
  501. int nextlen = tree[0].Len; /* length of next code */
  502. int count = 0; /* repeat count of the current code */
  503. int max_count = 7; /* max repeat count */
  504. int min_count = 4; /* min repeat count */
  505. if (nextlen == 0) max_count = 138, min_count = 3;
  506. tree[max_code+1].Len = (ush)0xffff; /* guard */
  507. for (n = 0; n <= max_code; n++) {
  508. curlen = nextlen; nextlen = tree[n+1].Len;
  509. if (++count < max_count && curlen == nextlen) {
  510. continue;
  511. } else if (count < min_count) {
  512. s->bl_tree[curlen].Freq += count;
  513. } else if (curlen != 0) {
  514. if (curlen != prevlen) s->bl_tree[curlen].Freq++;
  515. s->bl_tree[REP_3_6].Freq++;
  516. } else if (count <= 10) {
  517. s->bl_tree[REPZ_3_10].Freq++;
  518. } else {
  519. s->bl_tree[REPZ_11_138].Freq++;
  520. }
  521. count = 0; prevlen = curlen;
  522. if (nextlen == 0) {
  523. max_count = 138, min_count = 3;
  524. } else if (curlen == nextlen) {
  525. max_count = 6, min_count = 3;
  526. } else {
  527. max_count = 7, min_count = 4;
  528. }
  529. }
  530. }
  531. /* ===========================================================================
  532. * Send a literal or distance tree in compressed form, using the codes in
  533. * bl_tree.
  534. */
  535. static void send_tree(
  536. deflate_state *s,
  537. ct_data *tree, /* the tree to be scanned */
  538. int max_code /* and its largest code of non zero frequency */
  539. )
  540. {
  541. int n; /* iterates over all tree elements */
  542. int prevlen = -1; /* last emitted length */
  543. int curlen; /* length of current code */
  544. int nextlen = tree[0].Len; /* length of next code */
  545. int count = 0; /* repeat count of the current code */
  546. int max_count = 7; /* max repeat count */
  547. int min_count = 4; /* min repeat count */
  548. /* tree[max_code+1].Len = -1; */ /* guard already set */
  549. if (nextlen == 0) max_count = 138, min_count = 3;
  550. for (n = 0; n <= max_code; n++) {
  551. curlen = nextlen; nextlen = tree[n+1].Len;
  552. if (++count < max_count && curlen == nextlen) {
  553. continue;
  554. } else if (count < min_count) {
  555. do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
  556. } else if (curlen != 0) {
  557. if (curlen != prevlen) {
  558. send_code(s, curlen, s->bl_tree); count--;
  559. }
  560. Assert(count >= 3 && count <= 6, " 3_6?");
  561. send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
  562. } else if (count <= 10) {
  563. send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
  564. } else {
  565. send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
  566. }
  567. count = 0; prevlen = curlen;
  568. if (nextlen == 0) {
  569. max_count = 138, min_count = 3;
  570. } else if (curlen == nextlen) {
  571. max_count = 6, min_count = 3;
  572. } else {
  573. max_count = 7, min_count = 4;
  574. }
  575. }
  576. }
  577. /* ===========================================================================
  578. * Construct the Huffman tree for the bit lengths and return the index in
  579. * bl_order of the last bit length code to send.
  580. */
  581. static int build_bl_tree(
  582. deflate_state *s
  583. )
  584. {
  585. int max_blindex; /* index of last bit length code of non zero freq */
  586. /* Determine the bit length frequencies for literal and distance trees */
  587. scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
  588. scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
  589. /* Build the bit length tree: */
  590. build_tree(s, (tree_desc *)(&(s->bl_desc)));
  591. /* opt_len now includes the length of the tree representations, except
  592. * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
  593. */
  594. /* Determine the number of bit length codes to send. The pkzip format
  595. * requires that at least 4 bit length codes be sent. (appnote.txt says
  596. * 3 but the actual value used is 4.)
  597. */
  598. for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
  599. if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
  600. }
  601. /* Update opt_len to include the bit length tree and counts */
  602. s->opt_len += 3*(max_blindex+1) + 5+5+4;
  603. Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
  604. s->opt_len, s->static_len));
  605. return max_blindex;
  606. }
  607. /* ===========================================================================
  608. * Send the header for a block using dynamic Huffman trees: the counts, the
  609. * lengths of the bit length codes, the literal tree and the distance tree.
  610. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
  611. */
  612. static void send_all_trees(
  613. deflate_state *s,
  614. int lcodes, /* number of codes for each tree */
  615. int dcodes, /* number of codes for each tree */
  616. int blcodes /* number of codes for each tree */
  617. )
  618. {
  619. int rank; /* index in bl_order */
  620. Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
  621. Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
  622. "too many codes");
  623. Tracev((stderr, "\nbl counts: "));
  624. send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
  625. send_bits(s, dcodes-1, 5);
  626. send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
  627. for (rank = 0; rank < blcodes; rank++) {
  628. Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
  629. send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
  630. }
  631. Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
  632. send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
  633. Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
  634. send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
  635. Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
  636. }
  637. /* ===========================================================================
  638. * Send a stored block
  639. */
  640. void zlib_tr_stored_block(
  641. deflate_state *s,
  642. char *buf, /* input block */
  643. ulg stored_len, /* length of input block */
  644. int eof /* true if this is the last block for a file */
  645. )
  646. {
  647. send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */
  648. s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
  649. s->compressed_len += (stored_len + 4) << 3;
  650. copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
  651. }
  652. /* Send just the `stored block' type code without any length bytes or data.
  653. */
  654. void zlib_tr_stored_type_only(
  655. deflate_state *s
  656. )
  657. {
  658. send_bits(s, (STORED_BLOCK << 1), 3);
  659. bi_windup(s);
  660. s->compressed_len = (s->compressed_len + 3) & ~7L;
  661. }
  662. /* ===========================================================================
  663. * Send one empty static block to give enough lookahead for inflate.
  664. * This takes 10 bits, of which 7 may remain in the bit buffer.
  665. * The current inflate code requires 9 bits of lookahead. If the
  666. * last two codes for the previous block (real code plus EOB) were coded
  667. * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
  668. * the last real code. In this case we send two empty static blocks instead
  669. * of one. (There are no problems if the previous block is stored or fixed.)
  670. * To simplify the code, we assume the worst case of last real code encoded
  671. * on one bit only.
  672. */
  673. void zlib_tr_align(
  674. deflate_state *s
  675. )
  676. {
  677. send_bits(s, STATIC_TREES<<1, 3);
  678. send_code(s, END_BLOCK, static_ltree);
  679. s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
  680. bi_flush(s);
  681. /* Of the 10 bits for the empty block, we have already sent
  682. * (10 - bi_valid) bits. The lookahead for the last real code (before
  683. * the EOB of the previous block) was thus at least one plus the length
  684. * of the EOB plus what we have just sent of the empty static block.
  685. */
  686. if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
  687. send_bits(s, STATIC_TREES<<1, 3);
  688. send_code(s, END_BLOCK, static_ltree);
  689. s->compressed_len += 10L;
  690. bi_flush(s);
  691. }
  692. s->last_eob_len = 7;
  693. }
  694. /* ===========================================================================
  695. * Determine the best encoding for the current block: dynamic trees, static
  696. * trees or store, and output the encoded block to the zip file. This function
  697. * returns the total compressed length for the file so far.
  698. */
  699. ulg zlib_tr_flush_block(
  700. deflate_state *s,
  701. char *buf, /* input block, or NULL if too old */
  702. ulg stored_len, /* length of input block */
  703. int eof /* true if this is the last block for a file */
  704. )
  705. {
  706. ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
  707. int max_blindex = 0; /* index of last bit length code of non zero freq */
  708. /* Build the Huffman trees unless a stored block is forced */
  709. if (s->level > 0) {
  710. /* Check if the file is ascii or binary */
  711. if (s->data_type == Z_UNKNOWN) set_data_type(s);
  712. /* Construct the literal and distance trees */
  713. build_tree(s, (tree_desc *)(&(s->l_desc)));
  714. Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
  715. s->static_len));
  716. build_tree(s, (tree_desc *)(&(s->d_desc)));
  717. Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
  718. s->static_len));
  719. /* At this point, opt_len and static_len are the total bit lengths of
  720. * the compressed block data, excluding the tree representations.
  721. */
  722. /* Build the bit length tree for the above two trees, and get the index
  723. * in bl_order of the last bit length code to send.
  724. */
  725. max_blindex = build_bl_tree(s);
  726. /* Determine the best encoding. Compute first the block length in bytes*/
  727. opt_lenb = (s->opt_len+3+7)>>3;
  728. static_lenb = (s->static_len+3+7)>>3;
  729. Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
  730. opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
  731. s->last_lit));
  732. if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
  733. } else {
  734. Assert(buf != (char*)0, "lost buf");
  735. opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
  736. }
  737. /* If compression failed and this is the first and last block,
  738. * and if the .zip file can be seeked (to rewrite the local header),
  739. * the whole file is transformed into a stored file:
  740. */
  741. #ifdef STORED_FILE_OK
  742. # ifdef FORCE_STORED_FILE
  743. if (eof && s->compressed_len == 0L) { /* force stored file */
  744. # else
  745. if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) {
  746. # endif
  747. /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
  748. if (buf == (char*)0) error ("block vanished");
  749. copy_block(s, buf, (unsigned)stored_len, 0); /* without header */
  750. s->compressed_len = stored_len << 3;
  751. s->method = STORED;
  752. } else
  753. #endif /* STORED_FILE_OK */
  754. #ifdef FORCE_STORED
  755. if (buf != (char*)0) { /* force stored block */
  756. #else
  757. if (stored_len+4 <= opt_lenb && buf != (char*)0) {
  758. /* 4: two words for the lengths */
  759. #endif
  760. /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
  761. * Otherwise we can't have processed more than WSIZE input bytes since
  762. * the last block flush, because compression would have been
  763. * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
  764. * transform a block into a stored block.
  765. */
  766. zlib_tr_stored_block(s, buf, stored_len, eof);
  767. #ifdef FORCE_STATIC
  768. } else if (static_lenb >= 0) { /* force static trees */
  769. #else
  770. } else if (static_lenb == opt_lenb) {
  771. #endif
  772. send_bits(s, (STATIC_TREES<<1)+eof, 3);
  773. compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
  774. s->compressed_len += 3 + s->static_len;
  775. } else {
  776. send_bits(s, (DYN_TREES<<1)+eof, 3);
  777. send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
  778. max_blindex+1);
  779. compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
  780. s->compressed_len += 3 + s->opt_len;
  781. }
  782. Assert (s->compressed_len == s->bits_sent, "bad compressed size");
  783. init_block(s);
  784. if (eof) {
  785. bi_windup(s);
  786. s->compressed_len += 7; /* align on byte boundary */
  787. }
  788. Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
  789. s->compressed_len-7*eof));
  790. return s->compressed_len >> 3;
  791. }
  792. /* ===========================================================================
  793. * Save the match info and tally the frequency counts. Return true if
  794. * the current block must be flushed.
  795. */
  796. int zlib_tr_tally(
  797. deflate_state *s,
  798. unsigned dist, /* distance of matched string */
  799. unsigned lc /* match length-MIN_MATCH or unmatched char (if dist==0) */
  800. )
  801. {
  802. s->d_buf[s->last_lit] = (ush)dist;
  803. s->l_buf[s->last_lit++] = (uch)lc;
  804. if (dist == 0) {
  805. /* lc is the unmatched char */
  806. s->dyn_ltree[lc].Freq++;
  807. } else {
  808. s->matches++;
  809. /* Here, lc is the match length - MIN_MATCH */
  810. dist--; /* dist = match distance - 1 */
  811. Assert((ush)dist < (ush)MAX_DIST(s) &&
  812. (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
  813. (ush)d_code(dist) < (ush)D_CODES, "zlib_tr_tally: bad match");
  814. s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
  815. s->dyn_dtree[d_code(dist)].Freq++;
  816. }
  817. /* Try to guess if it is profitable to stop the current block here */
  818. if ((s->last_lit & 0xfff) == 0 && s->level > 2) {
  819. /* Compute an upper bound for the compressed length */
  820. ulg out_length = (ulg)s->last_lit*8L;
  821. ulg in_length = (ulg)((long)s->strstart - s->block_start);
  822. int dcode;
  823. for (dcode = 0; dcode < D_CODES; dcode++) {
  824. out_length += (ulg)s->dyn_dtree[dcode].Freq *
  825. (5L+extra_dbits[dcode]);
  826. }
  827. out_length >>= 3;
  828. Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
  829. s->last_lit, in_length, out_length,
  830. 100L - out_length*100L/in_length));
  831. if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
  832. }
  833. return (s->last_lit == s->lit_bufsize-1);
  834. /* We avoid equality with lit_bufsize because of wraparound at 64K
  835. * on 16 bit machines and because stored blocks are restricted to
  836. * 64K-1 bytes.
  837. */
  838. }
  839. /* ===========================================================================
  840. * Send the block data compressed using the given Huffman trees
  841. */
  842. static void compress_block(
  843. deflate_state *s,
  844. ct_data *ltree, /* literal tree */
  845. ct_data *dtree /* distance tree */
  846. )
  847. {
  848. unsigned dist; /* distance of matched string */
  849. int lc; /* match length or unmatched char (if dist == 0) */
  850. unsigned lx = 0; /* running index in l_buf */
  851. unsigned code; /* the code to send */
  852. int extra; /* number of extra bits to send */
  853. if (s->last_lit != 0) do {
  854. dist = s->d_buf[lx];
  855. lc = s->l_buf[lx++];
  856. if (dist == 0) {
  857. send_code(s, lc, ltree); /* send a literal byte */
  858. Tracecv(isgraph(lc), (stderr," '%c' ", lc));
  859. } else {
  860. /* Here, lc is the match length - MIN_MATCH */
  861. code = length_code[lc];
  862. send_code(s, code+LITERALS+1, ltree); /* send the length code */
  863. extra = extra_lbits[code];
  864. if (extra != 0) {
  865. lc -= base_length[code];
  866. send_bits(s, lc, extra); /* send the extra length bits */
  867. }
  868. dist--; /* dist is now the match distance - 1 */
  869. code = d_code(dist);
  870. Assert (code < D_CODES, "bad d_code");
  871. send_code(s, code, dtree); /* send the distance code */
  872. extra = extra_dbits[code];
  873. if (extra != 0) {
  874. dist -= base_dist[code];
  875. send_bits(s, dist, extra); /* send the extra distance bits */
  876. }
  877. } /* literal or match pair ? */
  878. /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
  879. Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
  880. } while (lx < s->last_lit);
  881. send_code(s, END_BLOCK, ltree);
  882. s->last_eob_len = ltree[END_BLOCK].Len;
  883. }
  884. /* ===========================================================================
  885. * Set the data type to ASCII or BINARY, using a crude approximation:
  886. * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
  887. * IN assertion: the fields freq of dyn_ltree are set and the total of all
  888. * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
  889. */
  890. static void set_data_type(
  891. deflate_state *s
  892. )
  893. {
  894. int n = 0;
  895. unsigned ascii_freq = 0;
  896. unsigned bin_freq = 0;
  897. while (n < 7) bin_freq += s->dyn_ltree[n++].Freq;
  898. while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq;
  899. while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
  900. s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
  901. }
  902. /* ===========================================================================
  903. * Copy a stored block, storing first the length and its
  904. * one's complement if requested.
  905. */
  906. static void copy_block(
  907. deflate_state *s,
  908. char *buf, /* the input data */
  909. unsigned len, /* its length */
  910. int header /* true if block header must be written */
  911. )
  912. {
  913. bi_windup(s); /* align on byte boundary */
  914. s->last_eob_len = 8; /* enough lookahead for inflate */
  915. if (header) {
  916. put_short(s, (ush)len);
  917. put_short(s, (ush)~len);
  918. #ifdef DEBUG_ZLIB
  919. s->bits_sent += 2*16;
  920. #endif
  921. }
  922. #ifdef DEBUG_ZLIB
  923. s->bits_sent += (ulg)len<<3;
  924. #endif
  925. /* bundle up the put_byte(s, *buf++) calls */
  926. memcpy(&s->pending_buf[s->pending], buf, len);
  927. s->pending += len;
  928. }