trees.c 43 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204
  1. /* trees.c -- output deflated data using Huffman coding
  2. * Copyright (C) 1995-2017 Jean-loup Gailly
  3. * detect_data_type() function provided freely by Cosmin Truta, 2006
  4. * For conditions of distribution and use, see copyright notice in zlib.h
  5. */
  6. /*
  7. * ALGORITHM
  8. *
  9. * The "deflation" process uses several Huffman trees. The more
  10. * common source values are represented by shorter bit sequences.
  11. *
  12. * Each code tree is stored in a compressed form which is itself
  13. * a Huffman encoding of the lengths of all the code strings (in
  14. * ascending order by source values). The actual code strings are
  15. * reconstructed from the lengths in the inflate process, as described
  16. * in the deflate specification.
  17. *
  18. * REFERENCES
  19. *
  20. * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
  21. * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
  22. *
  23. * Storer, James A.
  24. * Data Compression: Methods and Theory, pp. 49-50.
  25. * Computer Science Press, 1988. ISBN 0-7167-8156-5.
  26. *
  27. * Sedgewick, R.
  28. * Algorithms, p290.
  29. * Addison-Wesley, 1983. ISBN 0-201-06672-6.
  30. */
  31. /* @(#) $Id$ */
  32. /* #define GEN_TREES_H */
  33. #include "deflate.h"
  34. #ifdef ZLIB_DEBUG
  35. # include <ctype.h>
  36. #endif
  37. /* ===========================================================================
  38. * Constants
  39. */
  40. #define MAX_BL_BITS 7
  41. /* Bit length codes must not exceed MAX_BL_BITS bits */
  42. #define END_BLOCK 256
  43. /* end of block literal code */
  44. #define REP_3_6 16
  45. /* repeat previous bit length 3-6 times (2 bits of repeat count) */
  46. #define REPZ_3_10 17
  47. /* repeat a zero length 3-10 times (3 bits of repeat count) */
  48. #define REPZ_11_138 18
  49. /* repeat a zero length 11-138 times (7 bits of repeat count) */
  50. local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
  51. = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
  52. local const int extra_dbits[D_CODES] /* extra bits for each distance code */
  53. = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
  54. local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
  55. = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
  56. local const uch bl_order[BL_CODES]
  57. = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
  58. /* The lengths of the bit length codes are sent in order of decreasing
  59. * probability, to avoid transmitting the lengths for unused bit length codes.
  60. */
  61. /* ===========================================================================
  62. * Local data. These are initialized only once.
  63. */
  64. #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
  65. #if defined(GEN_TREES_H) || !defined(STDC)
  66. /* non ANSI compilers may not accept trees.h */
  67. local ct_data static_ltree[L_CODES+2];
  68. /* The static literal tree. Since the bit lengths are imposed, there is no
  69. * need for the L_CODES extra codes used during heap construction. However
  70. * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
  71. * below).
  72. */
  73. local ct_data static_dtree[D_CODES];
  74. /* The static distance tree. (Actually a trivial tree since all codes use
  75. * 5 bits.)
  76. */
  77. uch _dist_code[DIST_CODE_LEN];
  78. /* Distance codes. The first 256 values correspond to the distances
  79. * 3 .. 258, the last 256 values correspond to the top 8 bits of
  80. * the 15 bit distances.
  81. */
  82. uch _length_code[MAX_MATCH-MIN_MATCH+1];
  83. /* length code for each normalized match length (0 == MIN_MATCH) */
  84. local int base_length[LENGTH_CODES];
  85. /* First normalized length for each code (0 = MIN_MATCH) */
  86. local int base_dist[D_CODES];
  87. /* First normalized distance for each code (0 = distance of 1) */
  88. #else
  89. # include "trees.h"
  90. #endif /* GEN_TREES_H */
  91. struct static_tree_desc_s {
  92. const ct_data *static_tree; /* static tree or NULL */
  93. const intf *extra_bits; /* extra bits for each code or NULL */
  94. int extra_base; /* base index for extra_bits */
  95. int elems; /* max number of elements in the tree */
  96. int max_length; /* max bit length for the codes */
  97. };
  98. local const static_tree_desc static_l_desc =
  99. {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
  100. local const static_tree_desc static_d_desc =
  101. {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
  102. local const static_tree_desc static_bl_desc =
  103. {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
  104. /* ===========================================================================
  105. * Local (static) routines in this file.
  106. */
  107. local void tr_static_init OF((void));
  108. local void init_block OF((deflate_state *s));
  109. local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
  110. local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
  111. local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
  112. local void build_tree OF((deflate_state *s, tree_desc *desc));
  113. local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
  114. local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
  115. local int build_bl_tree OF((deflate_state *s));
  116. local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
  117. int blcodes));
  118. local void compress_block OF((deflate_state *s, const ct_data *ltree,
  119. const ct_data *dtree));
  120. local int detect_data_type OF((deflate_state *s));
  121. local unsigned bi_reverse OF((unsigned value, int length));
  122. local void bi_windup OF((deflate_state *s));
  123. local void bi_flush OF((deflate_state *s));
  124. #ifdef GEN_TREES_H
  125. local void gen_trees_header OF((void));
  126. #endif
  127. #ifndef ZLIB_DEBUG
  128. # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
  129. /* Send a code of the given tree. c and tree must not have side effects */
  130. #else /* !ZLIB_DEBUG */
  131. # define send_code(s, c, tree) \
  132. { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
  133. send_bits(s, tree[c].Code, tree[c].Len); }
  134. #endif
  135. /* ===========================================================================
  136. * Output a short LSB first on the stream.
  137. * IN assertion: there is enough room in pendingBuf.
  138. */
  139. #define put_short(s, w) { \
  140. put_byte(s, (uch)((w) & 0xff)); \
  141. put_byte(s, (uch)((ush)(w) >> 8)); \
  142. }
  143. /* ===========================================================================
  144. * Send a value on a given number of bits.
  145. * IN assertion: length <= 16 and value fits in length bits.
  146. */
  147. #ifdef ZLIB_DEBUG
  148. local void send_bits OF((deflate_state *s, int value, int length));
  149. local void send_bits(s, value, length)
  150. deflate_state *s;
  151. int value; /* value to send */
  152. int length; /* number of bits */
  153. {
  154. Tracevv((stderr," l %2d v %4x ", length, value));
  155. Assert(length > 0 && length <= 15, "invalid length");
  156. s->bits_sent += (ulg)length;
  157. /* If not enough room in bi_buf, use (valid) bits from bi_buf and
  158. * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
  159. * unused bits in value.
  160. */
  161. if (s->bi_valid > (int)Buf_size - length) {
  162. s->bi_buf |= (ush)value << s->bi_valid;
  163. put_short(s, s->bi_buf);
  164. s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
  165. s->bi_valid += length - Buf_size;
  166. } else {
  167. s->bi_buf |= (ush)value << s->bi_valid;
  168. s->bi_valid += length;
  169. }
  170. }
  171. #else /* !ZLIB_DEBUG */
  172. #define send_bits(s, value, length) \
  173. { int len = length;\
  174. if (s->bi_valid > (int)Buf_size - len) {\
  175. int val = (int)value;\
  176. s->bi_buf |= (ush)val << s->bi_valid;\
  177. put_short(s, s->bi_buf);\
  178. s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
  179. s->bi_valid += len - Buf_size;\
  180. } else {\
  181. s->bi_buf |= (ush)(value) << s->bi_valid;\
  182. s->bi_valid += len;\
  183. }\
  184. }
  185. #endif /* ZLIB_DEBUG */
  186. /* the arguments must not have side effects */
  187. /* ===========================================================================
  188. * Initialize the various 'constant' tables.
  189. */
  190. local void tr_static_init()
  191. {
  192. #if defined(GEN_TREES_H) || !defined(STDC)
  193. static int static_init_done = 0;
  194. int n; /* iterates over tree elements */
  195. int bits; /* bit counter */
  196. int length; /* length value */
  197. int code; /* code value */
  198. int dist; /* distance index */
  199. ush bl_count[MAX_BITS+1];
  200. /* number of codes at each bit length for an optimal tree */
  201. if (static_init_done) return;
  202. /* For some embedded targets, global variables are not initialized: */
  203. #ifdef NO_INIT_GLOBAL_POINTERS
  204. static_l_desc.static_tree = static_ltree;
  205. static_l_desc.extra_bits = extra_lbits;
  206. static_d_desc.static_tree = static_dtree;
  207. static_d_desc.extra_bits = extra_dbits;
  208. static_bl_desc.extra_bits = extra_blbits;
  209. #endif
  210. /* Initialize the mapping length (0..255) -> length code (0..28) */
  211. length = 0;
  212. for (code = 0; code < LENGTH_CODES-1; code++) {
  213. base_length[code] = length;
  214. for (n = 0; n < (1<<extra_lbits[code]); n++) {
  215. _length_code[length++] = (uch)code;
  216. }
  217. }
  218. Assert (length == 256, "tr_static_init: length != 256");
  219. /* Note that the length 255 (match length 258) can be represented
  220. * in two different ways: code 284 + 5 bits or code 285, so we
  221. * overwrite length_code[255] to use the best encoding:
  222. */
  223. _length_code[length-1] = (uch)code;
  224. /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
  225. dist = 0;
  226. for (code = 0 ; code < 16; code++) {
  227. base_dist[code] = dist;
  228. for (n = 0; n < (1<<extra_dbits[code]); n++) {
  229. _dist_code[dist++] = (uch)code;
  230. }
  231. }
  232. Assert (dist == 256, "tr_static_init: dist != 256");
  233. dist >>= 7; /* from now on, all distances are divided by 128 */
  234. for ( ; code < D_CODES; code++) {
  235. base_dist[code] = dist << 7;
  236. for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
  237. _dist_code[256 + dist++] = (uch)code;
  238. }
  239. }
  240. Assert (dist == 256, "tr_static_init: 256+dist != 512");
  241. /* Construct the codes of the static literal tree */
  242. for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
  243. n = 0;
  244. while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
  245. while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
  246. while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
  247. while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
  248. /* Codes 286 and 287 do not exist, but we must include them in the
  249. * tree construction to get a canonical Huffman tree (longest code
  250. * all ones)
  251. */
  252. gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
  253. /* The static distance tree is trivial: */
  254. for (n = 0; n < D_CODES; n++) {
  255. static_dtree[n].Len = 5;
  256. static_dtree[n].Code = bi_reverse((unsigned)n, 5);
  257. }
  258. static_init_done = 1;
  259. # ifdef GEN_TREES_H
  260. gen_trees_header();
  261. # endif
  262. #endif /* defined(GEN_TREES_H) || !defined(STDC) */
  263. }
  264. /* ===========================================================================
  265. * Genererate the file trees.h describing the static trees.
  266. */
  267. #ifdef GEN_TREES_H
  268. # ifndef ZLIB_DEBUG
  269. # include <stdio.h>
  270. # endif
  271. # define SEPARATOR(i, last, width) \
  272. ((i) == (last)? "\n};\n\n" : \
  273. ((i) % (width) == (width)-1 ? ",\n" : ", "))
  274. void gen_trees_header()
  275. {
  276. FILE *header = fopen("trees.h", "w");
  277. int i;
  278. Assert (header != NULL, "Can't open trees.h");
  279. fprintf(header,
  280. "/* header created automatically with -DGEN_TREES_H */\n\n");
  281. fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
  282. for (i = 0; i < L_CODES+2; i++) {
  283. fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
  284. static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
  285. }
  286. fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
  287. for (i = 0; i < D_CODES; i++) {
  288. fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
  289. static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
  290. }
  291. fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n");
  292. for (i = 0; i < DIST_CODE_LEN; i++) {
  293. fprintf(header, "%2u%s", _dist_code[i],
  294. SEPARATOR(i, DIST_CODE_LEN-1, 20));
  295. }
  296. fprintf(header,
  297. "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
  298. for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
  299. fprintf(header, "%2u%s", _length_code[i],
  300. SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
  301. }
  302. fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
  303. for (i = 0; i < LENGTH_CODES; i++) {
  304. fprintf(header, "%1u%s", base_length[i],
  305. SEPARATOR(i, LENGTH_CODES-1, 20));
  306. }
  307. fprintf(header, "local const int base_dist[D_CODES] = {\n");
  308. for (i = 0; i < D_CODES; i++) {
  309. fprintf(header, "%5u%s", base_dist[i],
  310. SEPARATOR(i, D_CODES-1, 10));
  311. }
  312. fclose(header);
  313. }
  314. #endif /* GEN_TREES_H */
  315. /* ===========================================================================
  316. * Initialize the tree data structures for a new zlib stream.
  317. */
  318. void ZLIB_INTERNAL _tr_init(s)
  319. deflate_state *s;
  320. {
  321. tr_static_init();
  322. s->l_desc.dyn_tree = s->dyn_ltree;
  323. s->l_desc.stat_desc = &static_l_desc;
  324. s->d_desc.dyn_tree = s->dyn_dtree;
  325. s->d_desc.stat_desc = &static_d_desc;
  326. s->bl_desc.dyn_tree = s->bl_tree;
  327. s->bl_desc.stat_desc = &static_bl_desc;
  328. s->bi_buf = 0;
  329. s->bi_valid = 0;
  330. #ifdef ZLIB_DEBUG
  331. s->compressed_len = 0L;
  332. s->bits_sent = 0L;
  333. #endif
  334. /* Initialize the first block of the first file: */
  335. init_block(s);
  336. }
  337. /* ===========================================================================
  338. * Initialize a new block.
  339. */
  340. local void init_block(s)
  341. deflate_state *s;
  342. {
  343. int n; /* iterates over tree elements */
  344. /* Initialize the trees. */
  345. for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
  346. for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
  347. for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
  348. s->dyn_ltree[END_BLOCK].Freq = 1;
  349. s->opt_len = s->static_len = 0L;
  350. s->last_lit = s->matches = 0;
  351. }
  352. #define SMALLEST 1
  353. /* Index within the heap array of least frequent node in the Huffman tree */
  354. /* ===========================================================================
  355. * Remove the smallest element from the heap and recreate the heap with
  356. * one less element. Updates heap and heap_len.
  357. */
  358. #define pqremove(s, tree, top) \
  359. {\
  360. top = s->heap[SMALLEST]; \
  361. s->heap[SMALLEST] = s->heap[s->heap_len--]; \
  362. pqdownheap(s, tree, SMALLEST); \
  363. }
  364. /* ===========================================================================
  365. * Compares to subtrees, using the tree depth as tie breaker when
  366. * the subtrees have equal frequency. This minimizes the worst case length.
  367. */
  368. #define smaller(tree, n, m, depth) \
  369. (tree[n].Freq < tree[m].Freq || \
  370. (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
  371. /* ===========================================================================
  372. * Restore the heap property by moving down the tree starting at node k,
  373. * exchanging a node with the smallest of its two sons if necessary, stopping
  374. * when the heap property is re-established (each father smaller than its
  375. * two sons).
  376. */
  377. local void pqdownheap(s, tree, k)
  378. deflate_state *s;
  379. ct_data *tree; /* the tree to restore */
  380. int k; /* node to move down */
  381. {
  382. int v = s->heap[k];
  383. int j = k << 1; /* left son of k */
  384. while (j <= s->heap_len) {
  385. /* Set j to the smallest of the two sons: */
  386. if (j < s->heap_len &&
  387. smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
  388. j++;
  389. }
  390. /* Exit if v is smaller than both sons */
  391. if (smaller(tree, v, s->heap[j], s->depth)) break;
  392. /* Exchange v with the smallest son */
  393. s->heap[k] = s->heap[j]; k = j;
  394. /* And continue down the tree, setting j to the left son of k */
  395. j <<= 1;
  396. }
  397. s->heap[k] = v;
  398. }
  399. /* ===========================================================================
  400. * Compute the optimal bit lengths for a tree and update the total bit length
  401. * for the current block.
  402. * IN assertion: the fields freq and dad are set, heap[heap_max] and
  403. * above are the tree nodes sorted by increasing frequency.
  404. * OUT assertions: the field len is set to the optimal bit length, the
  405. * array bl_count contains the frequencies for each bit length.
  406. * The length opt_len is updated; static_len is also updated if stree is
  407. * not null.
  408. */
  409. local void gen_bitlen(s, desc)
  410. deflate_state *s;
  411. tree_desc *desc; /* the tree descriptor */
  412. {
  413. ct_data *tree = desc->dyn_tree;
  414. int max_code = desc->max_code;
  415. const ct_data *stree = desc->stat_desc->static_tree;
  416. const intf *extra = desc->stat_desc->extra_bits;
  417. int base = desc->stat_desc->extra_base;
  418. int max_length = desc->stat_desc->max_length;
  419. int h; /* heap index */
  420. int n, m; /* iterate over the tree elements */
  421. int bits; /* bit length */
  422. int xbits; /* extra bits */
  423. ush f; /* frequency */
  424. int overflow = 0; /* number of elements with bit length too large */
  425. for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
  426. /* In a first pass, compute the optimal bit lengths (which may
  427. * overflow in the case of the bit length tree).
  428. */
  429. tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
  430. for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
  431. n = s->heap[h];
  432. bits = tree[tree[n].Dad].Len + 1;
  433. if (bits > max_length) bits = max_length, overflow++;
  434. tree[n].Len = (ush)bits;
  435. /* We overwrite tree[n].Dad which is no longer needed */
  436. if (n > max_code) continue; /* not a leaf node */
  437. s->bl_count[bits]++;
  438. xbits = 0;
  439. if (n >= base) xbits = extra[n-base];
  440. f = tree[n].Freq;
  441. s->opt_len += (ulg)f * (unsigned)(bits + xbits);
  442. if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
  443. }
  444. if (overflow == 0) return;
  445. Tracev((stderr,"\nbit length overflow\n"));
  446. /* This happens for example on obj2 and pic of the Calgary corpus */
  447. /* Find the first bit length which could increase: */
  448. do {
  449. bits = max_length-1;
  450. while (s->bl_count[bits] == 0) bits--;
  451. s->bl_count[bits]--; /* move one leaf down the tree */
  452. s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
  453. s->bl_count[max_length]--;
  454. /* The brother of the overflow item also moves one step up,
  455. * but this does not affect bl_count[max_length]
  456. */
  457. overflow -= 2;
  458. } while (overflow > 0);
  459. /* Now recompute all bit lengths, scanning in increasing frequency.
  460. * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
  461. * lengths instead of fixing only the wrong ones. This idea is taken
  462. * from 'ar' written by Haruhiko Okumura.)
  463. */
  464. for (bits = max_length; bits != 0; bits--) {
  465. n = s->bl_count[bits];
  466. while (n != 0) {
  467. m = s->heap[--h];
  468. if (m > max_code) continue;
  469. if ((unsigned) tree[m].Len != (unsigned) bits) {
  470. Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
  471. s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq;
  472. tree[m].Len = (ush)bits;
  473. }
  474. n--;
  475. }
  476. }
  477. }
  478. /* ===========================================================================
  479. * Generate the codes for a given tree and bit counts (which need not be
  480. * optimal).
  481. * IN assertion: the array bl_count contains the bit length statistics for
  482. * the given tree and the field len is set for all tree elements.
  483. * OUT assertion: the field code is set for all tree elements of non
  484. * zero code length.
  485. */
  486. local void gen_codes (tree, max_code, bl_count)
  487. ct_data *tree; /* the tree to decorate */
  488. int max_code; /* largest code with non zero frequency */
  489. ushf *bl_count; /* number of codes at each bit length */
  490. {
  491. ush next_code[MAX_BITS+1]; /* next code value for each bit length */
  492. unsigned code = 0; /* running code value */
  493. int bits; /* bit index */
  494. int n; /* code index */
  495. /* The distribution counts are first used to generate the code values
  496. * without bit reversal.
  497. */
  498. for (bits = 1; bits <= MAX_BITS; bits++) {
  499. code = (code + bl_count[bits-1]) << 1;
  500. next_code[bits] = (ush)code;
  501. }
  502. /* Check that the bit counts in bl_count are consistent. The last code
  503. * must be all ones.
  504. */
  505. Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
  506. "inconsistent bit counts");
  507. Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
  508. for (n = 0; n <= max_code; n++) {
  509. int len = tree[n].Len;
  510. if (len == 0) continue;
  511. /* Now reverse the bits */
  512. tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
  513. Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
  514. n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
  515. }
  516. }
  517. /* ===========================================================================
  518. * Construct one Huffman tree and assigns the code bit strings and lengths.
  519. * Update the total bit length for the current block.
  520. * IN assertion: the field freq is set for all tree elements.
  521. * OUT assertions: the fields len and code are set to the optimal bit length
  522. * and corresponding code. The length opt_len is updated; static_len is
  523. * also updated if stree is not null. The field max_code is set.
  524. */
  525. local void build_tree(s, desc)
  526. deflate_state *s;
  527. tree_desc *desc; /* the tree descriptor */
  528. {
  529. ct_data *tree = desc->dyn_tree;
  530. const ct_data *stree = desc->stat_desc->static_tree;
  531. int elems = desc->stat_desc->elems;
  532. int n, m; /* iterate over heap elements */
  533. int max_code = -1; /* largest code with non zero frequency */
  534. int node; /* new node being created */
  535. /* Construct the initial heap, with least frequent element in
  536. * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
  537. * heap[0] is not used.
  538. */
  539. s->heap_len = 0, s->heap_max = HEAP_SIZE;
  540. for (n = 0; n < elems; n++) {
  541. if (tree[n].Freq != 0) {
  542. s->heap[++(s->heap_len)] = max_code = n;
  543. s->depth[n] = 0;
  544. } else {
  545. tree[n].Len = 0;
  546. }
  547. }
  548. /* The pkzip format requires that at least one distance code exists,
  549. * and that at least one bit should be sent even if there is only one
  550. * possible code. So to avoid special checks later on we force at least
  551. * two codes of non zero frequency.
  552. */
  553. while (s->heap_len < 2) {
  554. node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
  555. tree[node].Freq = 1;
  556. s->depth[node] = 0;
  557. s->opt_len--; if (stree) s->static_len -= stree[node].Len;
  558. /* node is 0 or 1 so it does not have extra bits */
  559. }
  560. desc->max_code = max_code;
  561. /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
  562. * establish sub-heaps of increasing lengths:
  563. */
  564. for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
  565. /* Construct the Huffman tree by repeatedly combining the least two
  566. * frequent nodes.
  567. */
  568. node = elems; /* next internal node of the tree */
  569. do {
  570. pqremove(s, tree, n); /* n = node of least frequency */
  571. m = s->heap[SMALLEST]; /* m = node of next least frequency */
  572. s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
  573. s->heap[--(s->heap_max)] = m;
  574. /* Create a new node father of n and m */
  575. tree[node].Freq = tree[n].Freq + tree[m].Freq;
  576. s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ?
  577. s->depth[n] : s->depth[m]) + 1);
  578. tree[n].Dad = tree[m].Dad = (ush)node;
  579. #ifdef DUMP_BL_TREE
  580. if (tree == s->bl_tree) {
  581. fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
  582. node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
  583. }
  584. #endif
  585. /* and insert the new node in the heap */
  586. s->heap[SMALLEST] = node++;
  587. pqdownheap(s, tree, SMALLEST);
  588. } while (s->heap_len >= 2);
  589. s->heap[--(s->heap_max)] = s->heap[SMALLEST];
  590. /* At this point, the fields freq and dad are set. We can now
  591. * generate the bit lengths.
  592. */
  593. gen_bitlen(s, (tree_desc *)desc);
  594. /* The field len is now set, we can generate the bit codes */
  595. gen_codes ((ct_data *)tree, max_code, s->bl_count);
  596. }
  597. /* ===========================================================================
  598. * Scan a literal or distance tree to determine the frequencies of the codes
  599. * in the bit length tree.
  600. */
  601. local void scan_tree (s, tree, max_code)
  602. deflate_state *s;
  603. ct_data *tree; /* the tree to be scanned */
  604. int max_code; /* and its largest code of non zero frequency */
  605. {
  606. int n; /* iterates over all tree elements */
  607. int prevlen = -1; /* last emitted length */
  608. int curlen; /* length of current code */
  609. int nextlen = tree[0].Len; /* length of next code */
  610. int count = 0; /* repeat count of the current code */
  611. int max_count = 7; /* max repeat count */
  612. int min_count = 4; /* min repeat count */
  613. if (nextlen == 0) max_count = 138, min_count = 3;
  614. tree[max_code+1].Len = (ush)0xffff; /* guard */
  615. for (n = 0; n <= max_code; n++) {
  616. curlen = nextlen; nextlen = tree[n+1].Len;
  617. if (++count < max_count && curlen == nextlen) {
  618. continue;
  619. } else if (count < min_count) {
  620. s->bl_tree[curlen].Freq += count;
  621. } else if (curlen != 0) {
  622. if (curlen != prevlen) s->bl_tree[curlen].Freq++;
  623. s->bl_tree[REP_3_6].Freq++;
  624. } else if (count <= 10) {
  625. s->bl_tree[REPZ_3_10].Freq++;
  626. } else {
  627. s->bl_tree[REPZ_11_138].Freq++;
  628. }
  629. count = 0; prevlen = curlen;
  630. if (nextlen == 0) {
  631. max_count = 138, min_count = 3;
  632. } else if (curlen == nextlen) {
  633. max_count = 6, min_count = 3;
  634. } else {
  635. max_count = 7, min_count = 4;
  636. }
  637. }
  638. }
  639. /* ===========================================================================
  640. * Send a literal or distance tree in compressed form, using the codes in
  641. * bl_tree.
  642. */
  643. local void send_tree (s, tree, max_code)
  644. deflate_state *s;
  645. ct_data *tree; /* the tree to be scanned */
  646. int max_code; /* and its largest code of non zero frequency */
  647. {
  648. int n; /* iterates over all tree elements */
  649. int prevlen = -1; /* last emitted length */
  650. int curlen; /* length of current code */
  651. int nextlen = tree[0].Len; /* length of next code */
  652. int count = 0; /* repeat count of the current code */
  653. int max_count = 7; /* max repeat count */
  654. int min_count = 4; /* min repeat count */
  655. /* tree[max_code+1].Len = -1; */ /* guard already set */
  656. if (nextlen == 0) max_count = 138, min_count = 3;
  657. for (n = 0; n <= max_code; n++) {
  658. curlen = nextlen; nextlen = tree[n+1].Len;
  659. if (++count < max_count && curlen == nextlen) {
  660. continue;
  661. } else if (count < min_count) {
  662. do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
  663. } else if (curlen != 0) {
  664. if (curlen != prevlen) {
  665. send_code(s, curlen, s->bl_tree); count--;
  666. }
  667. Assert(count >= 3 && count <= 6, " 3_6?");
  668. send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
  669. } else if (count <= 10) {
  670. send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
  671. } else {
  672. send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
  673. }
  674. count = 0; prevlen = curlen;
  675. if (nextlen == 0) {
  676. max_count = 138, min_count = 3;
  677. } else if (curlen == nextlen) {
  678. max_count = 6, min_count = 3;
  679. } else {
  680. max_count = 7, min_count = 4;
  681. }
  682. }
  683. }
  684. /* ===========================================================================
  685. * Construct the Huffman tree for the bit lengths and return the index in
  686. * bl_order of the last bit length code to send.
  687. */
  688. local int build_bl_tree(s)
  689. deflate_state *s;
  690. {
  691. int max_blindex; /* index of last bit length code of non zero freq */
  692. /* Determine the bit length frequencies for literal and distance trees */
  693. scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
  694. scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
  695. /* Build the bit length tree: */
  696. build_tree(s, (tree_desc *)(&(s->bl_desc)));
  697. /* opt_len now includes the length of the tree representations, except
  698. * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
  699. */
  700. /* Determine the number of bit length codes to send. The pkzip format
  701. * requires that at least 4 bit length codes be sent. (appnote.txt says
  702. * 3 but the actual value used is 4.)
  703. */
  704. for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
  705. if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
  706. }
  707. /* Update opt_len to include the bit length tree and counts */
  708. s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
  709. Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
  710. s->opt_len, s->static_len));
  711. return max_blindex;
  712. }
  713. /* ===========================================================================
  714. * Send the header for a block using dynamic Huffman trees: the counts, the
  715. * lengths of the bit length codes, the literal tree and the distance tree.
  716. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
  717. */
  718. local void send_all_trees(s, lcodes, dcodes, blcodes)
  719. deflate_state *s;
  720. int lcodes, dcodes, blcodes; /* number of codes for each tree */
  721. {
  722. int rank; /* index in bl_order */
  723. Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
  724. Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
  725. "too many codes");
  726. Tracev((stderr, "\nbl counts: "));
  727. send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
  728. send_bits(s, dcodes-1, 5);
  729. send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
  730. for (rank = 0; rank < blcodes; rank++) {
  731. Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
  732. send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
  733. }
  734. Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
  735. send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
  736. Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
  737. send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
  738. Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
  739. }
  740. /* ===========================================================================
  741. * Send a stored block
  742. */
  743. void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
  744. deflate_state *s;
  745. charf *buf; /* input block */
  746. ulg stored_len; /* length of input block */
  747. int last; /* one if this is the last block for a file */
  748. {
  749. send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
  750. bi_windup(s); /* align on byte boundary */
  751. put_short(s, (ush)stored_len);
  752. put_short(s, (ush)~stored_len);
  753. zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len);
  754. s->pending += stored_len;
  755. #ifdef ZLIB_DEBUG
  756. s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
  757. s->compressed_len += (stored_len + 4) << 3;
  758. s->bits_sent += 2*16;
  759. s->bits_sent += stored_len<<3;
  760. #endif
  761. }
  762. /* ===========================================================================
  763. * Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
  764. */
  765. void ZLIB_INTERNAL _tr_flush_bits(s)
  766. deflate_state *s;
  767. {
  768. bi_flush(s);
  769. }
  770. /* ===========================================================================
  771. * Send one empty static block to give enough lookahead for inflate.
  772. * This takes 10 bits, of which 7 may remain in the bit buffer.
  773. */
  774. void ZLIB_INTERNAL _tr_align(s)
  775. deflate_state *s;
  776. {
  777. send_bits(s, STATIC_TREES<<1, 3);
  778. send_code(s, END_BLOCK, static_ltree);
  779. #ifdef ZLIB_DEBUG
  780. s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
  781. #endif
  782. bi_flush(s);
  783. }
  784. /* ===========================================================================
  785. * Determine the best encoding for the current block: dynamic trees, static
  786. * trees or store, and write out the encoded block.
  787. */
  788. void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
  789. deflate_state *s;
  790. charf *buf; /* input block, or NULL if too old */
  791. ulg stored_len; /* length of input block */
  792. int last; /* one if this is the last block for a file */
  793. {
  794. ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
  795. int max_blindex = 0; /* index of last bit length code of non zero freq */
  796. /* Build the Huffman trees unless a stored block is forced */
  797. if (s->level > 0) {
  798. /* Check if the file is binary or text */
  799. if (s->strm->data_type == Z_UNKNOWN)
  800. s->strm->data_type = detect_data_type(s);
  801. /* Construct the literal and distance trees */
  802. build_tree(s, (tree_desc *)(&(s->l_desc)));
  803. Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
  804. s->static_len));
  805. build_tree(s, (tree_desc *)(&(s->d_desc)));
  806. Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
  807. s->static_len));
  808. /* At this point, opt_len and static_len are the total bit lengths of
  809. * the compressed block data, excluding the tree representations.
  810. */
  811. /* Build the bit length tree for the above two trees, and get the index
  812. * in bl_order of the last bit length code to send.
  813. */
  814. max_blindex = build_bl_tree(s);
  815. /* Determine the best encoding. Compute the block lengths in bytes. */
  816. opt_lenb = (s->opt_len+3+7)>>3;
  817. static_lenb = (s->static_len+3+7)>>3;
  818. Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
  819. opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
  820. s->last_lit));
  821. if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
  822. } else {
  823. Assert(buf != (char*)0, "lost buf");
  824. opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
  825. }
  826. #ifdef FORCE_STORED
  827. if (buf != (char*)0) { /* force stored block */
  828. #else
  829. if (stored_len+4 <= opt_lenb && buf != (char*)0) {
  830. /* 4: two words for the lengths */
  831. #endif
  832. /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
  833. * Otherwise we can't have processed more than WSIZE input bytes since
  834. * the last block flush, because compression would have been
  835. * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
  836. * transform a block into a stored block.
  837. */
  838. _tr_stored_block(s, buf, stored_len, last);
  839. #ifdef FORCE_STATIC
  840. } else if (static_lenb >= 0) { /* force static trees */
  841. #else
  842. } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
  843. #endif
  844. send_bits(s, (STATIC_TREES<<1)+last, 3);
  845. compress_block(s, (const ct_data *)static_ltree,
  846. (const ct_data *)static_dtree);
  847. #ifdef ZLIB_DEBUG
  848. s->compressed_len += 3 + s->static_len;
  849. #endif
  850. } else {
  851. send_bits(s, (DYN_TREES<<1)+last, 3);
  852. send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
  853. max_blindex+1);
  854. compress_block(s, (const ct_data *)s->dyn_ltree,
  855. (const ct_data *)s->dyn_dtree);
  856. #ifdef ZLIB_DEBUG
  857. s->compressed_len += 3 + s->opt_len;
  858. #endif
  859. }
  860. Assert (s->compressed_len == s->bits_sent, "bad compressed size");
  861. /* The above check is made mod 2^32, for files larger than 512 MB
  862. * and uLong implemented on 32 bits.
  863. */
  864. init_block(s);
  865. if (last) {
  866. bi_windup(s);
  867. #ifdef ZLIB_DEBUG
  868. s->compressed_len += 7; /* align on byte boundary */
  869. #endif
  870. }
  871. Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
  872. s->compressed_len-7*last));
  873. }
  874. /* ===========================================================================
  875. * Save the match info and tally the frequency counts. Return true if
  876. * the current block must be flushed.
  877. */
  878. int ZLIB_INTERNAL _tr_tally (s, dist, lc)
  879. deflate_state *s;
  880. unsigned dist; /* distance of matched string */
  881. unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
  882. {
  883. s->d_buf[s->last_lit] = (ush)dist;
  884. s->l_buf[s->last_lit++] = (uch)lc;
  885. if (dist == 0) {
  886. /* lc is the unmatched char */
  887. s->dyn_ltree[lc].Freq++;
  888. } else {
  889. s->matches++;
  890. /* Here, lc is the match length - MIN_MATCH */
  891. dist--; /* dist = match distance - 1 */
  892. Assert((ush)dist < (ush)MAX_DIST(s) &&
  893. (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
  894. (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
  895. s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
  896. s->dyn_dtree[d_code(dist)].Freq++;
  897. }
  898. #ifdef TRUNCATE_BLOCK
  899. /* Try to guess if it is profitable to stop the current block here */
  900. if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
  901. /* Compute an upper bound for the compressed length */
  902. ulg out_length = (ulg)s->last_lit*8L;
  903. ulg in_length = (ulg)((long)s->strstart - s->block_start);
  904. int dcode;
  905. for (dcode = 0; dcode < D_CODES; dcode++) {
  906. out_length += (ulg)s->dyn_dtree[dcode].Freq *
  907. (5L+extra_dbits[dcode]);
  908. }
  909. out_length >>= 3;
  910. Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
  911. s->last_lit, in_length, out_length,
  912. 100L - out_length*100L/in_length));
  913. if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
  914. }
  915. #endif
  916. return (s->last_lit == s->lit_bufsize-1);
  917. /* We avoid equality with lit_bufsize because of wraparound at 64K
  918. * on 16 bit machines and because stored blocks are restricted to
  919. * 64K-1 bytes.
  920. */
  921. }
  922. /* ===========================================================================
  923. * Send the block data compressed using the given Huffman trees
  924. */
  925. local void compress_block(s, ltree, dtree)
  926. deflate_state *s;
  927. const ct_data *ltree; /* literal tree */
  928. const ct_data *dtree; /* distance tree */
  929. {
  930. unsigned dist; /* distance of matched string */
  931. int lc; /* match length or unmatched char (if dist == 0) */
  932. unsigned lx = 0; /* running index in l_buf */
  933. unsigned code; /* the code to send */
  934. int extra; /* number of extra bits to send */
  935. if (s->last_lit != 0) do {
  936. dist = s->d_buf[lx];
  937. lc = s->l_buf[lx++];
  938. if (dist == 0) {
  939. send_code(s, lc, ltree); /* send a literal byte */
  940. Tracecv(isgraph(lc), (stderr," '%c' ", lc));
  941. } else {
  942. /* Here, lc is the match length - MIN_MATCH */
  943. code = _length_code[lc];
  944. send_code(s, code+LITERALS+1, ltree); /* send the length code */
  945. extra = extra_lbits[code];
  946. if (extra != 0) {
  947. lc -= base_length[code];
  948. send_bits(s, lc, extra); /* send the extra length bits */
  949. }
  950. dist--; /* dist is now the match distance - 1 */
  951. code = d_code(dist);
  952. Assert (code < D_CODES, "bad d_code");
  953. send_code(s, code, dtree); /* send the distance code */
  954. extra = extra_dbits[code];
  955. if (extra != 0) {
  956. dist -= (unsigned)base_dist[code];
  957. send_bits(s, dist, extra); /* send the extra distance bits */
  958. }
  959. } /* literal or match pair ? */
  960. /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
  961. Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,
  962. "pendingBuf overflow");
  963. } while (lx < s->last_lit);
  964. send_code(s, END_BLOCK, ltree);
  965. }
  966. /* ===========================================================================
  967. * Check if the data type is TEXT or BINARY, using the following algorithm:
  968. * - TEXT if the two conditions below are satisfied:
  969. * a) There are no non-portable control characters belonging to the
  970. * "black list" (0..6, 14..25, 28..31).
  971. * b) There is at least one printable character belonging to the
  972. * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
  973. * - BINARY otherwise.
  974. * - The following partially-portable control characters form a
  975. * "gray list" that is ignored in this detection algorithm:
  976. * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
  977. * IN assertion: the fields Freq of dyn_ltree are set.
  978. */
  979. local int detect_data_type(s)
  980. deflate_state *s;
  981. {
  982. /* black_mask is the bit mask of black-listed bytes
  983. * set bits 0..6, 14..25, and 28..31
  984. * 0xf3ffc07f = binary 11110011111111111100000001111111
  985. */
  986. unsigned long black_mask = 0xf3ffc07fUL;
  987. int n;
  988. /* Check for non-textual ("black-listed") bytes. */
  989. for (n = 0; n <= 31; n++, black_mask >>= 1)
  990. if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0))
  991. return Z_BINARY;
  992. /* Check for textual ("white-listed") bytes. */
  993. if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
  994. || s->dyn_ltree[13].Freq != 0)
  995. return Z_TEXT;
  996. for (n = 32; n < LITERALS; n++)
  997. if (s->dyn_ltree[n].Freq != 0)
  998. return Z_TEXT;
  999. /* There are no "black-listed" or "white-listed" bytes:
  1000. * this stream either is empty or has tolerated ("gray-listed") bytes only.
  1001. */
  1002. return Z_BINARY;
  1003. }
  1004. /* ===========================================================================
  1005. * Reverse the first len bits of a code, using straightforward code (a faster
  1006. * method would use a table)
  1007. * IN assertion: 1 <= len <= 15
  1008. */
  1009. local unsigned bi_reverse(code, len)
  1010. unsigned code; /* the value to invert */
  1011. int len; /* its bit length */
  1012. {
  1013. register unsigned res = 0;
  1014. do {
  1015. res |= code & 1;
  1016. code >>= 1, res <<= 1;
  1017. } while (--len > 0);
  1018. return res >> 1;
  1019. }
  1020. /* ===========================================================================
  1021. * Flush the bit buffer, keeping at most 7 bits in it.
  1022. */
  1023. local void bi_flush(s)
  1024. deflate_state *s;
  1025. {
  1026. if (s->bi_valid == 16) {
  1027. put_short(s, s->bi_buf);
  1028. s->bi_buf = 0;
  1029. s->bi_valid = 0;
  1030. } else if (s->bi_valid >= 8) {
  1031. put_byte(s, (Byte)s->bi_buf);
  1032. s->bi_buf >>= 8;
  1033. s->bi_valid -= 8;
  1034. }
  1035. }
  1036. /* ===========================================================================
  1037. * Flush the bit buffer and align the output on a byte boundary
  1038. */
  1039. local void bi_windup(s)
  1040. deflate_state *s;
  1041. {
  1042. if (s->bi_valid > 8) {
  1043. put_short(s, s->bi_buf);
  1044. } else if (s->bi_valid > 0) {
  1045. put_byte(s, (Byte)s->bi_buf);
  1046. }
  1047. s->bi_buf = 0;
  1048. s->bi_valid = 0;
  1049. #ifdef ZLIB_DEBUG
  1050. s->bits_sent = (s->bits_sent+7) & ~7;
  1051. #endif
  1052. }