trees.c 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205
  1. /* trees.c -- output deflated data using Huffman coding
  2. * Copyright (C) 1995-2017 Jean-loup Gailly
  3. * detect_data_type() function provided freely by Cosmin Truta, 2006
  4. * For conditions of distribution and use, see copyright notice in zlib.h
  5. */
  6. /*
  7. * ALGORITHM
  8. *
  9. * The "deflation" process uses several Huffman trees. The more
  10. * common source values are represented by shorter bit sequences.
  11. *
  12. * Each code tree is stored in a compressed form which is itself
  13. * a Huffman encoding of the lengths of all the code strings (in
  14. * ascending order by source values). The actual code strings are
  15. * reconstructed from the lengths in the inflate process, as described
  16. * in the deflate specification.
  17. *
  18. * REFERENCES
  19. *
  20. * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
  21. * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
  22. *
  23. * Storer, James A.
  24. * Data Compression: Methods and Theory, pp. 49-50.
  25. * Computer Science Press, 1988. ISBN 0-7167-8156-5.
  26. *
  27. * Sedgewick, R.
  28. * Algorithms, p290.
  29. * Addison-Wesley, 1983. ISBN 0-201-06672-6.
  30. */
  31. /* @(#) $Id$ */
  32. /* #define GEN_TREES_H */
  33. #include "deflate.h"
  34. #ifdef ZLIB_DEBUG
  35. # include <ctype.h>
  36. #endif
  37. /* ===========================================================================
  38. * Constants
  39. */
  40. #define MAX_BL_BITS 7
  41. /* Bit length codes must not exceed MAX_BL_BITS bits */
  42. #define END_BLOCK 256
  43. /* end of block literal code */
  44. #define REP_3_6 16
  45. /* repeat previous bit length 3-6 times (2 bits of repeat count) */
  46. #define REPZ_3_10 17
  47. /* repeat a zero length 3-10 times (3 bits of repeat count) */
  48. #define REPZ_11_138 18
  49. /* repeat a zero length 11-138 times (7 bits of repeat count) */
  50. local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
  51. = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
  52. local const int extra_dbits[D_CODES] /* extra bits for each distance code */
  53. = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
  54. local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
  55. = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
  56. local const uch bl_order[BL_CODES]
  57. = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
  58. /* The lengths of the bit length codes are sent in order of decreasing
  59. * probability, to avoid transmitting the lengths for unused bit length codes.
  60. */
  61. /* ===========================================================================
  62. * Local data. These are initialized only once.
  63. */
  64. #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
  65. #if defined(GEN_TREES_H) || !defined(STDC)
  66. /* non ANSI compilers may not accept trees.h */
  67. local ct_data static_ltree[L_CODES+2];
  68. /* The static literal tree. Since the bit lengths are imposed, there is no
  69. * need for the L_CODES extra codes used during heap construction. However
  70. * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
  71. * below).
  72. */
  73. local ct_data static_dtree[D_CODES];
  74. /* The static distance tree. (Actually a trivial tree since all codes use
  75. * 5 bits.)
  76. */
  77. uch _dist_code[DIST_CODE_LEN];
  78. /* Distance codes. The first 256 values correspond to the distances
  79. * 3 .. 258, the last 256 values correspond to the top 8 bits of
  80. * the 15 bit distances.
  81. */
  82. uch _length_code[MAX_MATCH-MIN_MATCH+1];
  83. /* length code for each normalized match length (0 == MIN_MATCH) */
  84. local int base_length[LENGTH_CODES];
  85. /* First normalized length for each code (0 = MIN_MATCH) */
  86. local int base_dist[D_CODES];
  87. /* First normalized distance for each code (0 = distance of 1) */
  88. #else
  89. # include "trees.h"
  90. #endif /* GEN_TREES_H */
  91. struct static_tree_desc_s {
  92. const ct_data *static_tree; /* static tree or NULL */
  93. const intf *extra_bits; /* extra bits for each code or NULL */
  94. int extra_base; /* base index for extra_bits */
  95. int elems; /* max number of elements in the tree */
  96. int max_length; /* max bit length for the codes */
  97. };
  98. local const static_tree_desc static_l_desc =
  99. {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
  100. local const static_tree_desc static_d_desc =
  101. {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
  102. local const static_tree_desc static_bl_desc =
  103. {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
  104. /* ===========================================================================
  105. * Local (static) routines in this file.
  106. */
  107. local void tr_static_init OF((void));
  108. local void init_block OF((deflate_state *s));
  109. local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
  110. local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
  111. local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
  112. local void build_tree OF((deflate_state *s, tree_desc *desc));
  113. local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
  114. local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
  115. local int build_bl_tree OF((deflate_state *s));
  116. local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
  117. int blcodes));
  118. local void compress_block OF((deflate_state *s, const ct_data *ltree,
  119. const ct_data *dtree));
  120. local int detect_data_type OF((deflate_state *s));
  121. local unsigned bi_reverse OF((unsigned value, int length));
  122. local void bi_windup OF((deflate_state *s));
  123. local void bi_flush OF((deflate_state *s));
  124. #ifdef GEN_TREES_H
  125. local void gen_trees_header OF((void));
  126. #endif
  127. #ifndef ZLIB_DEBUG
  128. # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
  129. /* Send a code of the given tree. c and tree must not have side effects */
  130. #else /* !ZLIB_DEBUG */
  131. # define send_code(s, c, tree) \
  132. { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
  133. send_bits(s, tree[c].Code, tree[c].Len); }
  134. #endif
  135. /* ===========================================================================
  136. * Output a short LSB first on the stream.
  137. * IN assertion: there is enough room in pendingBuf.
  138. */
  139. #define put_short(s, w) { \
  140. put_byte(s, (uch)((w) & 0xff)); \
  141. put_byte(s, (uch)((ush)(w) >> 8)); \
  142. }
  143. /* ===========================================================================
  144. * Send a value on a given number of bits.
  145. * IN assertion: length <= 16 and value fits in length bits.
  146. */
  147. #ifdef ZLIB_DEBUG
  148. local void send_bits OF((deflate_state *s, int value, int length));
  149. local void send_bits(
  150. deflate_state *s,
  151. int value,
  152. int length)
  153. {
  154. Tracevv((stderr," l %2d v %4x ", length, value));
  155. Assert(length > 0 && length <= 15, "invalid length");
  156. s->bits_sent += (ulg)length;
  157. /* If not enough room in bi_buf, use (valid) bits from bi_buf and
  158. * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
  159. * unused bits in value.
  160. */
  161. if (s->bi_valid > (int)Buf_size - length) {
  162. s->bi_buf |= (ush)value << s->bi_valid;
  163. put_short(s, s->bi_buf);
  164. s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
  165. s->bi_valid += length - Buf_size;
  166. } else {
  167. s->bi_buf |= (ush)value << s->bi_valid;
  168. s->bi_valid += length;
  169. }
  170. }
  171. #else /* !ZLIB_DEBUG */
  172. #define send_bits(s, value, length) \
  173. { int len = length;\
  174. if (s->bi_valid > (int)Buf_size - len) {\
  175. int val = (int)value;\
  176. s->bi_buf |= (ush)val << s->bi_valid;\
  177. put_short(s, s->bi_buf);\
  178. s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
  179. s->bi_valid += len - Buf_size;\
  180. } else {\
  181. s->bi_buf |= (ush)(value) << s->bi_valid;\
  182. s->bi_valid += len;\
  183. }\
  184. }
  185. #endif /* ZLIB_DEBUG */
  186. /* the arguments must not have side effects */
  187. /* ===========================================================================
  188. * Initialize the various 'constant' tables.
  189. */
  190. local void tr_static_init()
  191. {
  192. #if defined(GEN_TREES_H) || !defined(STDC)
  193. static int static_init_done = 0;
  194. int n; /* iterates over tree elements */
  195. int bits; /* bit counter */
  196. int length; /* length value */
  197. int code; /* code value */
  198. int dist; /* distance index */
  199. ush bl_count[MAX_BITS+1];
  200. /* number of codes at each bit length for an optimal tree */
  201. if (static_init_done) return;
  202. /* For some embedded targets, global variables are not initialized: */
  203. #ifdef NO_INIT_GLOBAL_POINTERS
  204. static_l_desc.static_tree = static_ltree;
  205. static_l_desc.extra_bits = extra_lbits;
  206. static_d_desc.static_tree = static_dtree;
  207. static_d_desc.extra_bits = extra_dbits;
  208. static_bl_desc.extra_bits = extra_blbits;
  209. #endif
  210. /* Initialize the mapping length (0..255) -> length code (0..28) */
  211. length = 0;
  212. for (code = 0; code < LENGTH_CODES-1; code++) {
  213. base_length[code] = length;
  214. for (n = 0; n < (1<<extra_lbits[code]); n++) {
  215. _length_code[length++] = (uch)code;
  216. }
  217. }
  218. Assert (length == 256, "tr_static_init: length != 256");
  219. /* Note that the length 255 (match length 258) can be represented
  220. * in two different ways: code 284 + 5 bits or code 285, so we
  221. * overwrite length_code[255] to use the best encoding:
  222. */
  223. _length_code[length-1] = (uch)code;
  224. /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
  225. dist = 0;
  226. for (code = 0 ; code < 16; code++) {
  227. base_dist[code] = dist;
  228. for (n = 0; n < (1<<extra_dbits[code]); n++) {
  229. _dist_code[dist++] = (uch)code;
  230. }
  231. }
  232. Assert (dist == 256, "tr_static_init: dist != 256");
  233. dist >>= 7; /* from now on, all distances are divided by 128 */
  234. for ( ; code < D_CODES; code++) {
  235. base_dist[code] = dist << 7;
  236. for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
  237. _dist_code[256 + dist++] = (uch)code;
  238. }
  239. }
  240. Assert (dist == 256, "tr_static_init: 256+dist != 512");
  241. /* Construct the codes of the static literal tree */
  242. for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
  243. n = 0;
  244. while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
  245. while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
  246. while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
  247. while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
  248. /* Codes 286 and 287 do not exist, but we must include them in the
  249. * tree construction to get a canonical Huffman tree (longest code
  250. * all ones)
  251. */
  252. gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
  253. /* The static distance tree is trivial: */
  254. for (n = 0; n < D_CODES; n++) {
  255. static_dtree[n].Len = 5;
  256. static_dtree[n].Code = bi_reverse((unsigned)n, 5);
  257. }
  258. static_init_done = 1;
  259. # ifdef GEN_TREES_H
  260. gen_trees_header();
  261. # endif
  262. #endif /* defined(GEN_TREES_H) || !defined(STDC) */
  263. }
  264. /* ===========================================================================
  265. * Genererate the file trees.h describing the static trees.
  266. */
  267. #ifdef GEN_TREES_H
  268. # ifndef ZLIB_DEBUG
  269. # include <stdio.h>
  270. # endif
  271. # define SEPARATOR(i, last, width) \
  272. ((i) == (last)? "\n};\n\n" : \
  273. ((i) % (width) == (width)-1 ? ",\n" : ", "))
  274. void gen_trees_header()
  275. {
  276. FILE *header = fopen("trees.h", "w");
  277. int i;
  278. Assert (header != NULL, "Can't open trees.h");
  279. fprintf(header,
  280. "/* header created automatically with -DGEN_TREES_H */\n\n");
  281. fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
  282. for (i = 0; i < L_CODES+2; i++) {
  283. fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
  284. static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
  285. }
  286. fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
  287. for (i = 0; i < D_CODES; i++) {
  288. fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
  289. static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
  290. }
  291. fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n");
  292. for (i = 0; i < DIST_CODE_LEN; i++) {
  293. fprintf(header, "%2u%s", _dist_code[i],
  294. SEPARATOR(i, DIST_CODE_LEN-1, 20));
  295. }
  296. fprintf(header,
  297. "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
  298. for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
  299. fprintf(header, "%2u%s", _length_code[i],
  300. SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
  301. }
  302. fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
  303. for (i = 0; i < LENGTH_CODES; i++) {
  304. fprintf(header, "%1u%s", base_length[i],
  305. SEPARATOR(i, LENGTH_CODES-1, 20));
  306. }
  307. fprintf(header, "local const int base_dist[D_CODES] = {\n");
  308. for (i = 0; i < D_CODES; i++) {
  309. fprintf(header, "%5u%s", base_dist[i],
  310. SEPARATOR(i, D_CODES-1, 10));
  311. }
  312. fclose(header);
  313. }
  314. #endif /* GEN_TREES_H */
  315. /* ===========================================================================
  316. * Initialize the tree data structures for a new zlib stream.
  317. */
  318. void ZLIB_INTERNAL _tr_init(
  319. deflate_state *s)
  320. {
  321. tr_static_init();
  322. s->l_desc.dyn_tree = s->dyn_ltree;
  323. s->l_desc.stat_desc = &static_l_desc;
  324. s->d_desc.dyn_tree = s->dyn_dtree;
  325. s->d_desc.stat_desc = &static_d_desc;
  326. s->bl_desc.dyn_tree = s->bl_tree;
  327. s->bl_desc.stat_desc = &static_bl_desc;
  328. s->bi_buf = 0;
  329. s->bi_valid = 0;
  330. #ifdef ZLIB_DEBUG
  331. s->compressed_len = 0L;
  332. s->bits_sent = 0L;
  333. #endif
  334. /* Initialize the first block of the first file: */
  335. init_block(s);
  336. }
  337. /* ===========================================================================
  338. * Initialize a new block.
  339. */
  340. local void init_block(
  341. deflate_state *s)
  342. {
  343. int n; /* iterates over tree elements */
  344. /* Initialize the trees. */
  345. for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
  346. for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
  347. for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
  348. s->dyn_ltree[END_BLOCK].Freq = 1;
  349. s->opt_len = s->static_len = 0L;
  350. s->last_lit = s->matches = 0;
  351. }
  352. #define SMALLEST 1
  353. /* Index within the heap array of least frequent node in the Huffman tree */
  354. /* ===========================================================================
  355. * Remove the smallest element from the heap and recreate the heap with
  356. * one less element. Updates heap and heap_len.
  357. */
  358. #define pqremove(s, tree, top) \
  359. {\
  360. top = s->heap[SMALLEST]; \
  361. s->heap[SMALLEST] = s->heap[s->heap_len--]; \
  362. pqdownheap(s, tree, SMALLEST); \
  363. }
  364. /* ===========================================================================
  365. * Compares to subtrees, using the tree depth as tie breaker when
  366. * the subtrees have equal frequency. This minimizes the worst case length.
  367. */
  368. #define smaller(tree, n, m, depth) \
  369. (tree[n].Freq < tree[m].Freq || \
  370. (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
  371. /* ===========================================================================
  372. * Restore the heap property by moving down the tree starting at node k,
  373. * exchanging a node with the smallest of its two sons if necessary, stopping
  374. * when the heap property is re-established (each father smaller than its
  375. * two sons).
  376. */
  377. local void pqdownheap(
  378. deflate_state *s,
  379. ct_data *tree,
  380. int k)
  381. {
  382. int v = s->heap[k];
  383. int j = k << 1; /* left son of k */
  384. while (j <= s->heap_len) {
  385. /* Set j to the smallest of the two sons: */
  386. if (j < s->heap_len &&
  387. smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
  388. j++;
  389. }
  390. /* Exit if v is smaller than both sons */
  391. if (smaller(tree, v, s->heap[j], s->depth)) break;
  392. /* Exchange v with the smallest son */
  393. s->heap[k] = s->heap[j]; k = j;
  394. /* And continue down the tree, setting j to the left son of k */
  395. j <<= 1;
  396. }
  397. s->heap[k] = v;
  398. }
  399. /* ===========================================================================
  400. * Compute the optimal bit lengths for a tree and update the total bit length
  401. * for the current block.
  402. * IN assertion: the fields freq and dad are set, heap[heap_max] and
  403. * above are the tree nodes sorted by increasing frequency.
  404. * OUT assertions: the field len is set to the optimal bit length, the
  405. * array bl_count contains the frequencies for each bit length.
  406. * The length opt_len is updated; static_len is also updated if stree is
  407. * not null.
  408. */
  409. local void gen_bitlen(
  410. deflate_state *s,
  411. tree_desc *desc)
  412. {
  413. ct_data *tree = desc->dyn_tree;
  414. int max_code = desc->max_code;
  415. const ct_data *stree = desc->stat_desc->static_tree;
  416. const intf *extra = desc->stat_desc->extra_bits;
  417. int base = desc->stat_desc->extra_base;
  418. int max_length = desc->stat_desc->max_length;
  419. int h; /* heap index */
  420. int n, m; /* iterate over the tree elements */
  421. int bits; /* bit length */
  422. int xbits; /* extra bits */
  423. ush f; /* frequency */
  424. int overflow = 0; /* number of elements with bit length too large */
  425. for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
  426. /* In a first pass, compute the optimal bit lengths (which may
  427. * overflow in the case of the bit length tree).
  428. */
  429. tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
  430. for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
  431. n = s->heap[h];
  432. bits = tree[tree[n].Dad].Len + 1;
  433. if (bits > max_length) bits = max_length, overflow++;
  434. tree[n].Len = (ush)bits;
  435. /* We overwrite tree[n].Dad which is no longer needed */
  436. if (n > max_code) continue; /* not a leaf node */
  437. s->bl_count[bits]++;
  438. xbits = 0;
  439. if (n >= base) xbits = extra[n-base];
  440. f = tree[n].Freq;
  441. s->opt_len += (ulg)f * (unsigned)(bits + xbits);
  442. if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
  443. }
  444. if (overflow == 0) return;
  445. Tracev((stderr,"\nbit length overflow\n"));
  446. /* This happens for example on obj2 and pic of the Calgary corpus */
  447. /* Find the first bit length which could increase: */
  448. do {
  449. bits = max_length-1;
  450. while (s->bl_count[bits] == 0) bits--;
  451. s->bl_count[bits]--; /* move one leaf down the tree */
  452. s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
  453. s->bl_count[max_length]--;
  454. /* The brother of the overflow item also moves one step up,
  455. * but this does not affect bl_count[max_length]
  456. */
  457. overflow -= 2;
  458. } while (overflow > 0);
  459. /* Now recompute all bit lengths, scanning in increasing frequency.
  460. * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
  461. * lengths instead of fixing only the wrong ones. This idea is taken
  462. * from 'ar' written by Haruhiko Okumura.)
  463. */
  464. for (bits = max_length; bits != 0; bits--) {
  465. n = s->bl_count[bits];
  466. while (n != 0) {
  467. m = s->heap[--h];
  468. if (m > max_code) continue;
  469. if ((unsigned) tree[m].Len != (unsigned) bits) {
  470. Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
  471. s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq;
  472. tree[m].Len = (ush)bits;
  473. }
  474. n--;
  475. }
  476. }
  477. }
  478. /* ===========================================================================
  479. * Generate the codes for a given tree and bit counts (which need not be
  480. * optimal).
  481. * IN assertion: the array bl_count contains the bit length statistics for
  482. * the given tree and the field len is set for all tree elements.
  483. * OUT assertion: the field code is set for all tree elements of non
  484. * zero code length.
  485. */
  486. local void gen_codes (
  487. ct_data *tree,
  488. int max_code,
  489. ushf *bl_count)
  490. {
  491. ush next_code[MAX_BITS+1]; /* next code value for each bit length */
  492. unsigned code = 0; /* running code value */
  493. int bits; /* bit index */
  494. int n; /* code index */
  495. /* The distribution counts are first used to generate the code values
  496. * without bit reversal.
  497. */
  498. for (bits = 1; bits <= MAX_BITS; bits++) {
  499. code = (code + bl_count[bits-1]) << 1;
  500. next_code[bits] = (ush)code;
  501. }
  502. /* Check that the bit counts in bl_count are consistent. The last code
  503. * must be all ones.
  504. */
  505. Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
  506. "inconsistent bit counts");
  507. Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
  508. for (n = 0; n <= max_code; n++) {
  509. int len = tree[n].Len;
  510. if (len == 0) continue;
  511. /* Now reverse the bits */
  512. tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
  513. Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
  514. n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
  515. }
  516. }
  517. /* ===========================================================================
  518. * Construct one Huffman tree and assigns the code bit strings and lengths.
  519. * Update the total bit length for the current block.
  520. * IN assertion: the field freq is set for all tree elements.
  521. * OUT assertions: the fields len and code are set to the optimal bit length
  522. * and corresponding code. The length opt_len is updated; static_len is
  523. * also updated if stree is not null. The field max_code is set.
  524. */
  525. local void build_tree(
  526. deflate_state *s,
  527. tree_desc *desc)
  528. {
  529. ct_data *tree = desc->dyn_tree;
  530. const ct_data *stree = desc->stat_desc->static_tree;
  531. int elems = desc->stat_desc->elems;
  532. int n, m; /* iterate over heap elements */
  533. int max_code = -1; /* largest code with non zero frequency */
  534. int node; /* new node being created */
  535. /* Construct the initial heap, with least frequent element in
  536. * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
  537. * heap[0] is not used.
  538. */
  539. s->heap_len = 0, s->heap_max = HEAP_SIZE;
  540. for (n = 0; n < elems; n++) {
  541. if (tree[n].Freq != 0) {
  542. s->heap[++(s->heap_len)] = max_code = n;
  543. s->depth[n] = 0;
  544. } else {
  545. tree[n].Len = 0;
  546. }
  547. }
  548. /* The pkzip format requires that at least one distance code exists,
  549. * and that at least one bit should be sent even if there is only one
  550. * possible code. So to avoid special checks later on we force at least
  551. * two codes of non zero frequency.
  552. */
  553. while (s->heap_len < 2) {
  554. node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
  555. tree[node].Freq = 1;
  556. s->depth[node] = 0;
  557. s->opt_len--; if (stree) s->static_len -= stree[node].Len;
  558. /* node is 0 or 1 so it does not have extra bits */
  559. }
  560. desc->max_code = max_code;
  561. /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
  562. * establish sub-heaps of increasing lengths:
  563. */
  564. for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
  565. /* Construct the Huffman tree by repeatedly combining the least two
  566. * frequent nodes.
  567. */
  568. node = elems; /* next internal node of the tree */
  569. do {
  570. pqremove(s, tree, n); /* n = node of least frequency */
  571. m = s->heap[SMALLEST]; /* m = node of next least frequency */
  572. s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
  573. s->heap[--(s->heap_max)] = m;
  574. /* Create a new node father of n and m */
  575. tree[node].Freq = tree[n].Freq + tree[m].Freq;
  576. s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ?
  577. s->depth[n] : s->depth[m]) + 1);
  578. tree[n].Dad = tree[m].Dad = (ush)node;
  579. #ifdef DUMP_BL_TREE
  580. if (tree == s->bl_tree) {
  581. fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
  582. node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
  583. }
  584. #endif
  585. /* and insert the new node in the heap */
  586. s->heap[SMALLEST] = node++;
  587. pqdownheap(s, tree, SMALLEST);
  588. } while (s->heap_len >= 2);
  589. s->heap[--(s->heap_max)] = s->heap[SMALLEST];
  590. /* At this point, the fields freq and dad are set. We can now
  591. * generate the bit lengths.
  592. */
  593. gen_bitlen(s, (tree_desc *)desc);
  594. /* The field len is now set, we can generate the bit codes */
  595. gen_codes ((ct_data *)tree, max_code, s->bl_count);
  596. }
  597. /* ===========================================================================
  598. * Scan a literal or distance tree to determine the frequencies of the codes
  599. * in the bit length tree.
  600. */
  601. local void scan_tree (
  602. deflate_state *s,
  603. ct_data *tree,
  604. int max_code)
  605. {
  606. int n; /* iterates over all tree elements */
  607. int prevlen = -1; /* last emitted length */
  608. int curlen; /* length of current code */
  609. int nextlen = tree[0].Len; /* length of next code */
  610. int count = 0; /* repeat count of the current code */
  611. int max_count = 7; /* max repeat count */
  612. int min_count = 4; /* min repeat count */
  613. if (nextlen == 0) max_count = 138, min_count = 3;
  614. tree[max_code+1].Len = (ush)0xffff; /* guard */
  615. for (n = 0; n <= max_code; n++) {
  616. curlen = nextlen; nextlen = tree[n+1].Len;
  617. if (++count < max_count && curlen == nextlen) {
  618. continue;
  619. } else if (count < min_count) {
  620. s->bl_tree[curlen].Freq += count;
  621. } else if (curlen != 0) {
  622. if (curlen != prevlen) s->bl_tree[curlen].Freq++;
  623. s->bl_tree[REP_3_6].Freq++;
  624. } else if (count <= 10) {
  625. s->bl_tree[REPZ_3_10].Freq++;
  626. } else {
  627. s->bl_tree[REPZ_11_138].Freq++;
  628. }
  629. count = 0; prevlen = curlen;
  630. if (nextlen == 0) {
  631. max_count = 138, min_count = 3;
  632. } else if (curlen == nextlen) {
  633. max_count = 6, min_count = 3;
  634. } else {
  635. max_count = 7, min_count = 4;
  636. }
  637. }
  638. }
  639. /* ===========================================================================
  640. * Send a literal or distance tree in compressed form, using the codes in
  641. * bl_tree.
  642. */
  643. local void send_tree (
  644. deflate_state *s,
  645. ct_data *tree,
  646. int max_code)
  647. {
  648. int n; /* iterates over all tree elements */
  649. int prevlen = -1; /* last emitted length */
  650. int curlen; /* length of current code */
  651. int nextlen = tree[0].Len; /* length of next code */
  652. int count = 0; /* repeat count of the current code */
  653. int max_count = 7; /* max repeat count */
  654. int min_count = 4; /* min repeat count */
  655. /* tree[max_code+1].Len = -1; */ /* guard already set */
  656. if (nextlen == 0) max_count = 138, min_count = 3;
  657. for (n = 0; n <= max_code; n++) {
  658. curlen = nextlen; nextlen = tree[n+1].Len;
  659. if (++count < max_count && curlen == nextlen) {
  660. continue;
  661. } else if (count < min_count) {
  662. do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
  663. } else if (curlen != 0) {
  664. if (curlen != prevlen) {
  665. send_code(s, curlen, s->bl_tree); count--;
  666. }
  667. Assert(count >= 3 && count <= 6, " 3_6?");
  668. send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
  669. } else if (count <= 10) {
  670. send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
  671. } else {
  672. send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
  673. }
  674. count = 0; prevlen = curlen;
  675. if (nextlen == 0) {
  676. max_count = 138, min_count = 3;
  677. } else if (curlen == nextlen) {
  678. max_count = 6, min_count = 3;
  679. } else {
  680. max_count = 7, min_count = 4;
  681. }
  682. }
  683. }
  684. /* ===========================================================================
  685. * Construct the Huffman tree for the bit lengths and return the index in
  686. * bl_order of the last bit length code to send.
  687. */
  688. local int build_bl_tree(
  689. deflate_state *s)
  690. {
  691. int max_blindex; /* index of last bit length code of non zero freq */
  692. /* Determine the bit length frequencies for literal and distance trees */
  693. scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
  694. scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
  695. /* Build the bit length tree: */
  696. build_tree(s, (tree_desc *)(&(s->bl_desc)));
  697. /* opt_len now includes the length of the tree representations, except
  698. * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
  699. */
  700. /* Determine the number of bit length codes to send. The pkzip format
  701. * requires that at least 4 bit length codes be sent. (appnote.txt says
  702. * 3 but the actual value used is 4.)
  703. */
  704. for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
  705. if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
  706. }
  707. /* Update opt_len to include the bit length tree and counts */
  708. s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
  709. Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
  710. s->opt_len, s->static_len));
  711. return max_blindex;
  712. }
  713. /* ===========================================================================
  714. * Send the header for a block using dynamic Huffman trees: the counts, the
  715. * lengths of the bit length codes, the literal tree and the distance tree.
  716. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
  717. */
  718. local void send_all_trees(
  719. deflate_state *s,
  720. int lcodes,
  721. int dcodes,
  722. int blcodes)
  723. {
  724. int rank; /* index in bl_order */
  725. Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
  726. Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
  727. "too many codes");
  728. Tracev((stderr, "\nbl counts: "));
  729. send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
  730. send_bits(s, dcodes-1, 5);
  731. send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
  732. for (rank = 0; rank < blcodes; rank++) {
  733. Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
  734. send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
  735. }
  736. Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
  737. send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
  738. Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
  739. send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
  740. Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
  741. }
  742. /* ===========================================================================
  743. * Send a stored block
  744. */
  745. void ZLIB_INTERNAL _tr_stored_block(
  746. deflate_state *s,
  747. charf *buf,
  748. ulg stored_len,
  749. int last)
  750. {
  751. send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
  752. bi_windup(s); /* align on byte boundary */
  753. put_short(s, (ush)stored_len);
  754. put_short(s, (ush)~stored_len);
  755. zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len);
  756. s->pending += stored_len;
  757. #ifdef ZLIB_DEBUG
  758. s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
  759. s->compressed_len += (stored_len + 4) << 3;
  760. s->bits_sent += 2*16;
  761. s->bits_sent += stored_len<<3;
  762. #endif
  763. }
  764. /* ===========================================================================
  765. * Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
  766. */
  767. void ZLIB_INTERNAL _tr_flush_bits(
  768. deflate_state *s)
  769. {
  770. bi_flush(s);
  771. }
  772. /* ===========================================================================
  773. * Send one empty static block to give enough lookahead for inflate.
  774. * This takes 10 bits, of which 7 may remain in the bit buffer.
  775. */
  776. void ZLIB_INTERNAL _tr_align(
  777. deflate_state *s)
  778. {
  779. send_bits(s, STATIC_TREES<<1, 3);
  780. send_code(s, END_BLOCK, static_ltree);
  781. #ifdef ZLIB_DEBUG
  782. s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
  783. #endif
  784. bi_flush(s);
  785. }
  786. /* ===========================================================================
  787. * Determine the best encoding for the current block: dynamic trees, static
  788. * trees or store, and write out the encoded block.
  789. */
  790. void ZLIB_INTERNAL _tr_flush_block(
  791. deflate_state *s,
  792. charf *buf,
  793. ulg stored_len,
  794. int last)
  795. {
  796. ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
  797. int max_blindex = 0; /* index of last bit length code of non zero freq */
  798. /* Build the Huffman trees unless a stored block is forced */
  799. if (s->level > 0) {
  800. /* Check if the file is binary or text */
  801. if (s->strm->data_type == Z_UNKNOWN)
  802. s->strm->data_type = detect_data_type(s);
  803. /* Construct the literal and distance trees */
  804. build_tree(s, (tree_desc *)(&(s->l_desc)));
  805. Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
  806. s->static_len));
  807. build_tree(s, (tree_desc *)(&(s->d_desc)));
  808. Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
  809. s->static_len));
  810. /* At this point, opt_len and static_len are the total bit lengths of
  811. * the compressed block data, excluding the tree representations.
  812. */
  813. /* Build the bit length tree for the above two trees, and get the index
  814. * in bl_order of the last bit length code to send.
  815. */
  816. max_blindex = build_bl_tree(s);
  817. /* Determine the best encoding. Compute the block lengths in bytes. */
  818. opt_lenb = (s->opt_len+3+7)>>3;
  819. static_lenb = (s->static_len+3+7)>>3;
  820. Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
  821. opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
  822. s->last_lit));
  823. if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
  824. } else {
  825. Assert(buf != (char*)0, "lost buf");
  826. opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
  827. }
  828. #ifdef FORCE_STORED
  829. if (buf != (char*)0) { /* force stored block */
  830. #else
  831. if (stored_len+4 <= opt_lenb && buf != (char*)0) {
  832. /* 4: two words for the lengths */
  833. #endif
  834. /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
  835. * Otherwise we can't have processed more than WSIZE input bytes since
  836. * the last block flush, because compression would have been
  837. * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
  838. * transform a block into a stored block.
  839. */
  840. _tr_stored_block(s, buf, stored_len, last);
  841. #ifdef FORCE_STATIC
  842. } else if (static_lenb >= 0) { /* force static trees */
  843. #else
  844. } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
  845. #endif
  846. send_bits(s, (STATIC_TREES<<1)+last, 3);
  847. compress_block(s, (const ct_data *)static_ltree,
  848. (const ct_data *)static_dtree);
  849. #ifdef ZLIB_DEBUG
  850. s->compressed_len += 3 + s->static_len;
  851. #endif
  852. } else {
  853. send_bits(s, (DYN_TREES<<1)+last, 3);
  854. send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
  855. max_blindex+1);
  856. compress_block(s, (const ct_data *)s->dyn_ltree,
  857. (const ct_data *)s->dyn_dtree);
  858. #ifdef ZLIB_DEBUG
  859. s->compressed_len += 3 + s->opt_len;
  860. #endif
  861. }
  862. Assert (s->compressed_len == s->bits_sent, "bad compressed size");
  863. /* The above check is made mod 2^32, for files larger than 512 MB
  864. * and uLong implemented on 32 bits.
  865. */
  866. init_block(s);
  867. if (last) {
  868. bi_windup(s);
  869. #ifdef ZLIB_DEBUG
  870. s->compressed_len += 7; /* align on byte boundary */
  871. #endif
  872. }
  873. Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
  874. s->compressed_len-7*last));
  875. }
  876. /* ===========================================================================
  877. * Save the match info and tally the frequency counts. Return true if
  878. * the current block must be flushed.
  879. */
  880. int ZLIB_INTERNAL _tr_tally (
  881. deflate_state *s,
  882. unsigned dist,
  883. unsigned lc)
  884. {
  885. s->d_buf[s->last_lit] = (ush)dist;
  886. s->l_buf[s->last_lit++] = (uch)lc;
  887. if (dist == 0) {
  888. /* lc is the unmatched char */
  889. s->dyn_ltree[lc].Freq++;
  890. } else {
  891. s->matches++;
  892. /* Here, lc is the match length - MIN_MATCH */
  893. dist--; /* dist = match distance - 1 */
  894. Assert((ush)dist < (ush)MAX_DIST(s) &&
  895. (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
  896. (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
  897. s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
  898. s->dyn_dtree[d_code(dist)].Freq++;
  899. }
  900. #ifdef TRUNCATE_BLOCK
  901. /* Try to guess if it is profitable to stop the current block here */
  902. if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
  903. /* Compute an upper bound for the compressed length */
  904. ulg out_length = (ulg)s->last_lit*8L;
  905. ulg in_length = (ulg)((long)s->strstart - s->block_start);
  906. int dcode;
  907. for (dcode = 0; dcode < D_CODES; dcode++) {
  908. out_length += (ulg)s->dyn_dtree[dcode].Freq *
  909. (5L+extra_dbits[dcode]);
  910. }
  911. out_length >>= 3;
  912. Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
  913. s->last_lit, in_length, out_length,
  914. 100L - out_length*100L/in_length));
  915. if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
  916. }
  917. #endif
  918. return (s->last_lit == s->lit_bufsize-1);
  919. /* We avoid equality with lit_bufsize because of wraparound at 64K
  920. * on 16 bit machines and because stored blocks are restricted to
  921. * 64K-1 bytes.
  922. */
  923. }
  924. /* ===========================================================================
  925. * Send the block data compressed using the given Huffman trees
  926. */
  927. local void compress_block(
  928. deflate_state *s,
  929. const ct_data *ltree,
  930. const ct_data *dtree)
  931. {
  932. unsigned dist; /* distance of matched string */
  933. int lc; /* match length or unmatched char (if dist == 0) */
  934. unsigned lx = 0; /* running index in l_buf */
  935. unsigned code; /* the code to send */
  936. int extra; /* number of extra bits to send */
  937. if (s->last_lit != 0) do {
  938. dist = s->d_buf[lx];
  939. lc = s->l_buf[lx++];
  940. if (dist == 0) {
  941. send_code(s, lc, ltree); /* send a literal byte */
  942. Tracecv(isgraph(lc), (stderr," '%c' ", lc));
  943. } else {
  944. /* Here, lc is the match length - MIN_MATCH */
  945. code = _length_code[lc];
  946. send_code(s, code+LITERALS+1, ltree); /* send the length code */
  947. extra = extra_lbits[code];
  948. if (extra != 0) {
  949. lc -= base_length[code];
  950. send_bits(s, lc, extra); /* send the extra length bits */
  951. }
  952. dist--; /* dist is now the match distance - 1 */
  953. code = d_code(dist);
  954. Assert (code < D_CODES, "bad d_code");
  955. send_code(s, code, dtree); /* send the distance code */
  956. extra = extra_dbits[code];
  957. if (extra != 0) {
  958. dist -= (unsigned)base_dist[code];
  959. send_bits(s, dist, extra); /* send the extra distance bits */
  960. }
  961. } /* literal or match pair ? */
  962. /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
  963. Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,
  964. "pendingBuf overflow");
  965. } while (lx < s->last_lit);
  966. send_code(s, END_BLOCK, ltree);
  967. }
  968. /* ===========================================================================
  969. * Check if the data type is TEXT or BINARY, using the following algorithm:
  970. * - TEXT if the two conditions below are satisfied:
  971. * a) There are no non-portable control characters belonging to the
  972. * "black list" (0..6, 14..25, 28..31).
  973. * b) There is at least one printable character belonging to the
  974. * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
  975. * - BINARY otherwise.
  976. * - The following partially-portable control characters form a
  977. * "gray list" that is ignored in this detection algorithm:
  978. * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
  979. * IN assertion: the fields Freq of dyn_ltree are set.
  980. */
  981. local int detect_data_type(
  982. deflate_state *s)
  983. {
  984. /* black_mask is the bit mask of black-listed bytes
  985. * set bits 0..6, 14..25, and 28..31
  986. * 0xf3ffc07f = binary 11110011111111111100000001111111
  987. */
  988. unsigned long black_mask = 0xf3ffc07fUL;
  989. int n;
  990. /* Check for non-textual ("black-listed") bytes. */
  991. for (n = 0; n <= 31; n++, black_mask >>= 1)
  992. if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0))
  993. return Z_BINARY;
  994. /* Check for textual ("white-listed") bytes. */
  995. if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
  996. || s->dyn_ltree[13].Freq != 0)
  997. return Z_TEXT;
  998. for (n = 32; n < LITERALS; n++)
  999. if (s->dyn_ltree[n].Freq != 0)
  1000. return Z_TEXT;
  1001. /* There are no "black-listed" or "white-listed" bytes:
  1002. * this stream either is empty or has tolerated ("gray-listed") bytes only.
  1003. */
  1004. return Z_BINARY;
  1005. }
  1006. /* ===========================================================================
  1007. * Reverse the first len bits of a code, using straightforward code (a faster
  1008. * method would use a table)
  1009. * IN assertion: 1 <= len <= 15
  1010. */
  1011. local unsigned bi_reverse(
  1012. unsigned code,
  1013. int len)
  1014. {
  1015. register unsigned res = 0;
  1016. do {
  1017. res |= code & 1;
  1018. code >>= 1, res <<= 1;
  1019. } while (--len > 0);
  1020. return res >> 1;
  1021. }
  1022. /* ===========================================================================
  1023. * Flush the bit buffer, keeping at most 7 bits in it.
  1024. */
  1025. local void bi_flush(
  1026. deflate_state *s)
  1027. {
  1028. if (s->bi_valid == 16) {
  1029. put_short(s, s->bi_buf);
  1030. s->bi_buf = 0;
  1031. s->bi_valid = 0;
  1032. } else if (s->bi_valid >= 8) {
  1033. put_byte(s, (Byte)s->bi_buf);
  1034. s->bi_buf >>= 8;
  1035. s->bi_valid -= 8;
  1036. }
  1037. }
  1038. /* ===========================================================================
  1039. * Flush the bit buffer and align the output on a byte boundary
  1040. */
  1041. local void bi_windup(
  1042. deflate_state *s)
  1043. {
  1044. if (s->bi_valid > 8) {
  1045. put_short(s, s->bi_buf);
  1046. } else if (s->bi_valid > 0) {
  1047. put_byte(s, (Byte)s->bi_buf);
  1048. }
  1049. s->bi_buf = 0;
  1050. s->bi_valid = 0;
  1051. #ifdef ZLIB_DEBUG
  1052. s->bits_sent = (s->bits_sent+7) & ~7;
  1053. #endif
  1054. }