1use core;
2use core::cmp::min;
3
4use super::super::alloc;
5use super::backward_references::kHashMul32;
6use super::bit_cost::BitsEntropy;
7use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
8use super::entropy_encode::{
9 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
10};
11use super::static_dict::{
12 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
13 BROTLI_UNALIGNED_STORE64,
14};
15use super::util::{floatX, Log2FloorNonZero};
16static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
17
18fn EmitInsertLen(insertlen: u32, commands: &mut &mut [u32]) -> usize {
20 if insertlen < 6u32 {
21 (*commands)[0] = insertlen;
22 } else if insertlen < 130u32 {
23 let tail: u32 = insertlen.wrapping_sub(2);
24 let nbits: u32 = Log2FloorNonZero(tail as (u64)).wrapping_sub(1);
25 let prefix: u32 = tail >> nbits;
26 let inscode: u32 = (nbits << 1).wrapping_add(prefix).wrapping_add(2);
27 let extra: u32 = tail.wrapping_sub(prefix << nbits);
28 (*commands)[0] = inscode | extra << 8;
29 } else if insertlen < 2114u32 {
30 let tail: u32 = insertlen.wrapping_sub(66);
31 let nbits: u32 = Log2FloorNonZero(tail as (u64));
32 let code: u32 = nbits.wrapping_add(10);
33 let extra: u32 = tail.wrapping_sub(1u32 << nbits);
34 (*commands)[0] = code | extra << 8;
35 } else if insertlen < 6210u32 {
36 let extra: u32 = insertlen.wrapping_sub(2114);
37 (*commands)[0] = 21u32 | extra << 8;
38 } else if insertlen < 22594u32 {
39 let extra: u32 = insertlen.wrapping_sub(6210);
40 (*commands)[0] = 22u32 | extra << 8;
41 } else {
42 let extra: u32 = insertlen.wrapping_sub(22594);
43 (*commands)[0] = 23u32 | extra << 8;
44 }
45 let remainder = core::mem::take(commands);
46 let _ = core::mem::replace(commands, &mut remainder[1..]);
47 1
48}
49
50fn EmitDistance(distance: u32, commands: &mut &mut [u32]) -> usize {
51 let d: u32 = distance.wrapping_add(3);
52 let nbits: u32 = Log2FloorNonZero(d as (u64)).wrapping_sub(1);
53 let prefix: u32 = d >> nbits & 1u32;
54 let offset: u32 = (2u32).wrapping_add(prefix) << nbits;
55 let distcode: u32 = (2u32)
56 .wrapping_mul(nbits.wrapping_sub(1))
57 .wrapping_add(prefix)
58 .wrapping_add(80);
59 let extra: u32 = d.wrapping_sub(offset);
60 (*commands)[0] = distcode | extra << 8;
61 let remainder = core::mem::take(commands);
62 let _ = core::mem::replace(commands, &mut remainder[1..]);
63 1
64}
65
66fn EmitCopyLenLastDistance(copylen: usize, commands: &mut &mut [u32]) -> usize {
67 if copylen < 12usize {
68 (*commands)[0] = copylen.wrapping_add(20) as u32;
69 let remainder = core::mem::take(commands);
70 let _ = core::mem::replace(commands, &mut remainder[1..]);
71 1
72 } else if copylen < 72usize {
73 let tail: usize = copylen.wrapping_sub(8);
74 let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
75 let prefix: usize = tail >> nbits;
76 let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(28);
77 let extra: usize = tail.wrapping_sub(prefix << nbits);
78 (*commands)[0] = (code | extra << 8) as u32;
79 let remainder = core::mem::take(commands);
80 let _ = core::mem::replace(commands, &mut remainder[1..]);
81 1
82 } else if copylen < 136usize {
83 let tail: usize = copylen.wrapping_sub(8);
84 let code: usize = (tail >> 5).wrapping_add(54);
85 let extra: usize = tail & 31usize;
86 (*commands)[0] = (code | extra << 8) as u32;
87 let remainder = core::mem::take(commands);
88 let _ = core::mem::replace(commands, &mut remainder[1..]);
89 (*commands)[0] = 64u32;
90 let remainder2 = core::mem::take(commands);
91 let _ = core::mem::replace(commands, &mut remainder2[1..]);
92 2
93 } else if copylen < 2120usize {
94 let tail: usize = copylen.wrapping_sub(72);
95 let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
96 let code: usize = nbits.wrapping_add(52);
97 let extra: usize = tail.wrapping_sub(1usize << nbits);
98 (*commands)[0] = (code | extra << 8) as u32;
99 let remainder = core::mem::take(commands);
100 let _ = core::mem::replace(commands, &mut remainder[1..]);
101 (*commands)[0] = 64u32;
102 let remainder2 = core::mem::take(commands);
103 let _ = core::mem::replace(commands, &mut remainder2[1..]);
104 2
105 } else {
106 let extra: usize = copylen.wrapping_sub(2120);
107 (*commands)[0] = (63usize | extra << 8) as u32;
108 let remainder = core::mem::take(commands);
109 let _ = core::mem::replace(commands, &mut remainder[1..]);
110 (*commands)[0] = 64u32;
111 let remainder2 = core::mem::take(commands);
112 let _ = core::mem::replace(commands, &mut remainder2[1..]);
113 2
114 }
115}
116fn HashBytesAtOffset(v: u64, offset: i32, shift: usize, length: usize) -> u32 {
117 let h: u64 = (v >> (8i32 * offset) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
118 (h >> shift) as u32
119}
120
121fn EmitCopyLen(copylen: usize, commands: &mut &mut [u32]) -> usize {
122 if copylen < 10usize {
123 (*commands)[0] = copylen.wrapping_add(38) as u32;
124 } else if copylen < 134usize {
125 let tail: usize = copylen.wrapping_sub(6);
126 let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
127 let prefix: usize = tail >> nbits;
128 let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(44);
129 let extra: usize = tail.wrapping_sub(prefix << nbits);
130 (*commands)[0] = (code | extra << 8) as u32;
131 } else if copylen < 2118usize {
132 let tail: usize = copylen.wrapping_sub(70);
133 let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
134 let code: usize = nbits.wrapping_add(52);
135 let extra: usize = tail.wrapping_sub(1usize << nbits);
136 (*commands)[0] = (code | extra << 8) as u32;
137 } else {
138 let extra: usize = copylen.wrapping_sub(2118);
139 (*commands)[0] = (63usize | extra << 8) as u32;
140 }
141 let remainder = core::mem::take(commands);
142 let _ = core::mem::replace(commands, &mut remainder[1..]);
143 1
144}
145fn Hash(p: &[u8], shift: usize, length: usize) -> u32 {
146 let h: u64 =
147 (BROTLI_UNALIGNED_LOAD64(p) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
148 (h >> shift) as u32
149}
150
151fn IsMatch(p1: &[u8], p2: &[u8], length: usize) -> bool {
152 BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2)
153 && (length == 4 || (p1[4] == p2[4] && p1[5] == p2[5]))
154}
155
156#[allow(unused_assignments)]
157fn CreateCommands(
158 input_index: usize,
159 block_size: usize,
160 input_size: usize,
161 base_ip: &[u8],
162 table: &mut [i32],
163 table_bits: usize,
164 min_match: usize,
165 literals: &mut &mut [u8],
166 num_literals: &mut usize,
167 commands: &mut &mut [u32],
168 num_commands: &mut usize,
169) {
170 let mut ip_index: usize = input_index;
171 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
172 let ip_end: usize = input_index.wrapping_add(block_size);
173 let mut next_emit: usize = input_index;
174 let mut last_distance: i32 = -1i32;
175 let kInputMarginBytes: usize = 16usize;
176
177 if block_size >= kInputMarginBytes {
178 let len_limit: usize = min(
179 block_size.wrapping_sub(min_match),
180 input_size.wrapping_sub(kInputMarginBytes),
181 );
182 let ip_limit: usize = input_index.wrapping_add(len_limit);
183 let mut next_hash: u32;
184 let mut goto_emit_remainder = false;
185 next_hash = Hash(
186 &base_ip[{
187 ip_index = ip_index.wrapping_add(1);
188 ip_index
189 }..],
190 shift,
191 min_match,
192 );
193 while !goto_emit_remainder {
194 let mut skip: u32 = 32u32;
195 let mut next_ip: usize = ip_index;
196 let mut candidate: usize = 0;
197 loop {
198 {
199 'break3: loop {
200 {
201 let hash: u32 = next_hash;
202 let bytes_between_hash_lookups: u32 = skip >> 5;
203 skip = skip.wrapping_add(1);
204 ip_index = next_ip;
205 next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
206 if next_ip > ip_limit {
207 goto_emit_remainder = true;
208 {
209 break 'break3;
210 }
211 }
212 next_hash = Hash(&base_ip[next_ip..], shift, min_match);
213 candidate = ip_index.wrapping_sub(last_distance as usize);
214 if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
215 && candidate < ip_index
216 {
217 table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
218 {
219 break 'break3;
220 }
221 }
222 candidate = table[(hash as usize)] as usize;
223 table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
224 }
225 if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match) {
226 break;
227 }
228 }
229 }
230 if !(ip_index.wrapping_sub(candidate)
231 > (1usize << 18).wrapping_sub(16) as isize as usize
232 && !goto_emit_remainder)
233 {
234 break;
235 }
236 }
237 if goto_emit_remainder {
238 break;
239 }
240 {
241 let base: usize = ip_index;
242 let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
243 &base_ip[(candidate + min_match)..],
244 &base_ip[(ip_index + min_match)..],
245 ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
246 ));
247 let distance: i32 = base.wrapping_sub(candidate) as i32;
248 let insert: i32 = base.wrapping_sub(next_emit) as i32;
249 ip_index = ip_index.wrapping_add(matched);
250 *num_commands += EmitInsertLen(insert as u32, commands);
251 (*literals)[..(insert as usize)]
252 .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
253 *num_literals += insert as usize;
254 let new_literals = core::mem::take(literals);
255 let _ = core::mem::replace(literals, &mut new_literals[(insert as usize)..]);
256 if distance == last_distance {
257 (*commands)[0] = 64u32;
258 let remainder = core::mem::take(commands);
259 let _ = core::mem::replace(commands, &mut remainder[1..]);
260 *num_commands += 1;
261 } else {
262 *num_commands += EmitDistance(distance as u32, commands);
263 last_distance = distance;
264 }
265 *num_commands += EmitCopyLenLastDistance(matched, commands);
266 next_emit = ip_index;
267 if ip_index >= ip_limit {
268 goto_emit_remainder = true;
269 {
270 break;
271 }
272 }
273 {
274 let mut input_bytes: u64;
275 let mut prev_hash: u32;
276 let cur_hash: u32;
277 if min_match == 4 {
278 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
279 cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
280 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
281 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
282 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
283 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
284 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
285 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
286 } else {
287 assert!(ip_index >= 5);
288 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
290 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
291 table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
292 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
293 table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
294 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
295 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
296 assert!(ip_index >= 2);
297 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
298 cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
299 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
300 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
301 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
302 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
303 }
304 candidate = table[(cur_hash as usize)] as usize;
305 table[(cur_hash as usize)] = ip_index as i32;
306 }
307 }
308 while ip_index.wrapping_sub(candidate)
309 <= (1usize << 18).wrapping_sub(16) as isize as usize
310 && IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
311 {
312 let base_index: usize = ip_index;
313 let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
314 &base_ip[(candidate + min_match)..],
315 &base_ip[(ip_index + min_match)..],
316 ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
317 ));
318 ip_index = ip_index.wrapping_add(matched);
319 last_distance = base_index.wrapping_sub(candidate) as i32;
320 *num_commands += EmitCopyLen(matched, commands);
321 *num_commands += EmitDistance(last_distance as u32, commands);
322 next_emit = ip_index;
323 if ip_index >= ip_limit {
324 goto_emit_remainder = true;
325 {
326 break;
327 }
328 }
329 {
330 assert!(ip_index >= 5);
331 let mut input_bytes: u64;
332
333 let cur_hash: u32;
334 let mut prev_hash: u32;
335 if min_match == 4 {
336 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
337 cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
338 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
339 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
340 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
341 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
342 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
343 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
344 } else {
345 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
346 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
347 table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
348 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
349 table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
350 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
351 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
352 assert!(ip_index >= 2);
353 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
354 cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
355 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
356 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
357 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
358 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
359 }
360 candidate = table[(cur_hash as usize)] as usize;
361 table[(cur_hash as usize)] = ip_index as i32;
362 }
363 }
364 if !goto_emit_remainder {
365 next_hash = Hash(
366 &base_ip[{
367 ip_index = ip_index.wrapping_add(1);
368 ip_index
369 }..],
370 shift,
371 min_match,
372 );
373 }
374 }
375 }
376 if next_emit < ip_end {
377 let insert: u32 = ip_end.wrapping_sub(next_emit) as u32;
378 *num_commands += EmitInsertLen(insert, commands);
379 literals[..insert as usize]
380 .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
381 let mut xliterals = core::mem::take(literals);
382 *literals = &mut core::mem::take(&mut xliterals)[(insert as usize)..];
383 *num_literals += insert as usize;
384 }
385}
386
387fn ShouldCompress(input: &[u8], input_size: usize, num_literals: usize) -> bool {
388 let corpus_size = input_size as floatX;
389 if (num_literals as floatX) < 0.98 * corpus_size {
390 true
391 } else {
392 let mut literal_histo: [u32; 256] = [0; 256];
393 let max_total_bit_cost: floatX = corpus_size * 8.0 * 0.98 / 43.0;
394 let mut i: usize;
395 i = 0usize;
396 while i < input_size {
397 {
398 let _rhs = 1;
399 let _lhs = &mut literal_histo[input[i] as usize];
400 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
401 }
402 i = i.wrapping_add(43);
403 }
404 BitsEntropy(&mut literal_histo[..], 256) < max_total_bit_cost
405 }
406}
407
408pub fn BrotliWriteBits(n_bits: usize, bits: u64, pos: &mut usize, array: &mut [u8]) {
409 let p = &mut array[(*pos >> 3)..];
410 let mut v: u64 = p[0] as (u64);
411 v |= bits << (*pos & 7);
412 BROTLI_UNALIGNED_STORE64(p, v);
413 *pos = pos.wrapping_add(n_bits);
414}
415
416#[deprecated(note = "use store_meta_block_header instead")]
417pub fn BrotliStoreMetaBlockHeader(
418 len: usize,
419 is_uncompressed: i32,
420 storage_ix: &mut usize,
421 storage: &mut [u8],
422) {
423 store_meta_block_header(len, is_uncompressed != 0, storage_ix, storage);
424}
425
426pub(crate) fn store_meta_block_header(
427 len: usize,
428 is_uncompressed: bool,
429 storage_ix: &mut usize,
430 storage: &mut [u8],
431) {
432 let mut nibbles: u64 = 6;
433 BrotliWriteBits(1, 0, storage_ix, storage);
434 if len <= (1u32 << 16) as usize {
435 nibbles = 4;
436 } else if len <= (1u32 << 20) as usize {
437 nibbles = 5;
438 }
439 BrotliWriteBits(2, nibbles.wrapping_sub(4), storage_ix, storage);
440 BrotliWriteBits(
441 nibbles.wrapping_mul(4) as usize,
442 len.wrapping_sub(1) as u64,
443 storage_ix,
444 storage,
445 );
446 BrotliWriteBits(1, u64::from(is_uncompressed), storage_ix, storage);
447}
448
449pub fn memcpy<T: Sized + Clone>(
450 dst: &mut [T],
451 dst_offset: usize,
452 src: &[T],
453 src_offset: usize,
454 size_to_copy: usize,
455) {
456 dst[dst_offset..(dst_offset + size_to_copy)]
457 .clone_from_slice(&src[src_offset..(src_offset + size_to_copy)]);
458}
459fn BuildAndStoreCommandPrefixCode(
460 histogram: &[u32],
461 depth: &mut [u8],
462 bits: &mut [u16],
463 storage_ix: &mut usize,
464 storage: &mut [u8],
465) {
466 let mut tree = [HuffmanTree::new(0, 0, 0); 129];
467 let mut cmd_depth: [u8; 704] = [0; 704];
468 let mut cmd_bits: [u16; 64] = [0; 64];
469 BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
470 BrotliCreateHuffmanTree(
471 &histogram[64..],
472 64usize,
473 14i32,
474 &mut tree[..],
475 &mut depth[64..],
476 );
477 memcpy(&mut cmd_depth[..], 0, depth, 24, 24);
483 memcpy(&mut cmd_depth[..], 24, depth, 0, 8);
484 memcpy(&mut cmd_depth[..], 32usize, depth, (48usize), 8usize);
485 memcpy(&mut cmd_depth[..], 40usize, depth, (8usize), 8usize);
486 memcpy(&mut cmd_depth[..], 48usize, depth, (56usize), 8usize);
487 memcpy(&mut cmd_depth[..], 56usize, depth, (16usize), 8usize);
488 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
489 memcpy(bits, 0, &cmd_bits[..], 24usize, 16usize);
490 memcpy(bits, (8usize), &cmd_bits[..], 40usize, 8usize);
491 memcpy(bits, (16usize), &cmd_bits[..], 56usize, 8usize);
492 memcpy(bits, (24usize), &cmd_bits[..], 0, 48usize);
493 memcpy(bits, (48usize), &cmd_bits[..], 32usize, 8usize);
494 memcpy(bits, (56usize), &cmd_bits[..], 48usize, 8usize);
495 BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
496 {
497 for item in cmd_depth[..64].iter_mut() {
498 *item = 0;
499 }
500 memcpy(&mut cmd_depth[..], 0, depth, (24usize), 8usize);
502 memcpy(&mut cmd_depth[..], 64usize, depth, (32usize), 8usize);
503 memcpy(&mut cmd_depth[..], 128usize, depth, (40usize), 8usize);
504 memcpy(&mut cmd_depth[..], 192usize, depth, (48usize), 8usize);
505 memcpy(&mut cmd_depth[..], 384usize, depth, (56usize), 8usize);
506 for i in 0usize..8usize {
507 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i];
508 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(8)];
509 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
510 depth[i.wrapping_add(16)];
511 }
512 BrotliStoreHuffmanTree(
513 &mut cmd_depth[..],
514 704usize,
515 &mut tree[..],
516 storage_ix,
517 storage,
518 );
519 }
520 BrotliStoreHuffmanTree(
521 &mut depth[64..],
522 64usize,
523 &mut tree[..],
524 storage_ix,
525 storage,
526 );
527}
528
529fn StoreCommands<AllocHT: alloc::Allocator<HuffmanTree>>(
530 mht: &mut AllocHT,
531 mut literals: &[u8],
532 num_literals: usize,
533 commands: &[u32],
534 num_commands: usize,
535 storage_ix: &mut usize,
536 storage: &mut [u8],
537) {
538 static kNumExtraBits: [u32; 128] = [
539 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24, 0, 0, 0, 0, 0,
540 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6,
541 7, 8, 9, 10, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
542 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17,
543 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
544 ];
545 static kInsertOffset: [u32; 24] = [
546 0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578, 1090, 2114,
547 6210, 22594,
548 ];
549 let mut lit_depths: [u8; 256] = [0; 256];
550 let mut lit_bits: [u16; 256] = [0; 256]; let mut lit_histo: [u32; 256] = [0; 256]; let mut cmd_depths: [u8; 128] = [0; 128];
553 let mut cmd_bits: [u16; 128] = [0; 128];
554 let mut cmd_histo: [u32; 128] = [0; 128];
555 let mut i: usize;
556 for i in 0usize..num_literals {
557 let _rhs = 1;
558 let _lhs = &mut lit_histo[literals[i] as usize];
559 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
560 }
561 BrotliBuildAndStoreHuffmanTreeFast(
562 mht,
563 &lit_histo[..],
564 num_literals,
565 8usize,
566 &mut lit_depths[..],
567 &mut lit_bits[..],
568 storage_ix,
569 storage,
570 );
571 i = 0usize;
572 while i < num_commands {
573 {
574 let code: u32 = commands[i] & 0xffu32;
575 {
576 let _rhs = 1;
577 let _lhs = &mut cmd_histo[code as usize];
578 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
579 }
580 }
581 i = i.wrapping_add(1);
582 }
583 {
584 let _rhs = 1i32;
585 let _lhs = &mut cmd_histo[1];
586 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
587 }
588 {
589 let _rhs = 1i32;
590 let _lhs = &mut cmd_histo[2];
591 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
592 }
593 {
594 let _rhs = 1i32;
595 let _lhs = &mut cmd_histo[64];
596 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
597 }
598 {
599 let _rhs = 1i32;
600 let _lhs = &mut cmd_histo[84];
601 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
602 }
603 BuildAndStoreCommandPrefixCode(
604 &mut cmd_histo[..],
605 &mut cmd_depths[..],
606 &mut cmd_bits[..],
607 storage_ix,
608 storage,
609 );
610 for i in 0usize..num_commands {
611 let cmd: u32 = commands[i];
612 let code: u32 = cmd & 0xffu32;
613 let extra: u32 = cmd >> 8;
614 BrotliWriteBits(
615 cmd_depths[code as usize] as usize,
616 cmd_bits[code as usize] as (u64),
617 storage_ix,
618 storage,
619 );
620 BrotliWriteBits(
621 kNumExtraBits[code as usize] as usize,
622 extra as (u64),
623 storage_ix,
624 storage,
625 );
626 if code < 24u32 {
627 let insert: u32 = kInsertOffset[code as usize].wrapping_add(extra);
628 for literal in literals[..(insert as usize)].iter() {
629 let lit: u8 = *literal;
630 BrotliWriteBits(
631 lit_depths[lit as usize] as usize,
632 lit_bits[lit as usize] as (u64),
633 storage_ix,
634 storage,
635 );
636 }
637 literals = &literals[insert as usize..];
638 }
639 }
640}
641fn EmitUncompressedMetaBlock(
642 input: &[u8],
643 input_size: usize,
644 storage_ix: &mut usize,
645 storage: &mut [u8],
646) {
647 store_meta_block_header(input_size, true, storage_ix, storage);
648 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
649 memcpy(storage, (*storage_ix >> 3), input, 0, input_size);
650 *storage_ix = storage_ix.wrapping_add(input_size << 3);
651 storage[(*storage_ix >> 3)] = 0u8;
652}
653
654#[allow(unused_variables)]
655#[inline(always)]
656fn compress_fragment_two_pass_impl<AllocHT: alloc::Allocator<HuffmanTree>>(
657 m: &mut AllocHT,
658 base_ip: &[u8],
659 mut input_size: usize,
660 is_last: bool,
661 command_buf: &mut [u32],
662 literal_buf: &mut [u8],
663 table: &mut [i32],
664 table_bits: usize,
665 min_match: usize,
666 storage_ix: &mut usize,
667 storage: &mut [u8],
668) {
669 let mut input_index: usize = 0usize;
670 while input_size > 0usize {
671 let block_size: usize = min(input_size, kCompressFragmentTwoPassBlockSize);
672 let mut num_literals: usize = 0;
673 let mut num_commands: usize = 0;
674 {
675 let mut literals = &mut literal_buf[..];
676 let mut commands = &mut command_buf[..];
677 CreateCommands(
678 input_index,
679 block_size,
680 input_size,
681 base_ip,
682 table,
683 table_bits,
684 min_match,
685 &mut literals,
686 &mut num_literals,
687 &mut commands,
688 &mut num_commands,
689 );
690 }
691 if ShouldCompress(&base_ip[input_index..], block_size, num_literals) {
692 store_meta_block_header(block_size, false, storage_ix, storage);
693 BrotliWriteBits(13usize, 0, storage_ix, storage);
694 StoreCommands(
695 m,
696 literal_buf,
697 num_literals,
698 command_buf,
699 num_commands,
700 storage_ix,
701 storage,
702 );
703 } else {
704 EmitUncompressedMetaBlock(&base_ip[input_index..], block_size, storage_ix, storage);
705 }
706 input_index = input_index.wrapping_add(block_size);
707 input_size = input_size.wrapping_sub(block_size);
708 }
709}
710macro_rules! compress_specialization {
711 ($table_bits : expr, $fname: ident) => {
712 fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
713 mht: &mut AllocHT,
714 input: &[u8],
715 input_size: usize,
716 is_last: bool,
717 command_buf: &mut [u32],
718 literal_buf: &mut [u8],
719 table: &mut [i32],
720 storage_ix: &mut usize,
721 storage: &mut [u8],
722 ) {
723 let min_match = if $table_bits < 15 { 4 } else { 6 };
724 compress_fragment_two_pass_impl(
725 mht,
726 input,
727 input_size,
728 is_last,
729 command_buf,
730 literal_buf,
731 table,
732 $table_bits,
733 min_match,
734 storage_ix,
735 storage,
736 );
737 }
738 };
739}
740compress_specialization!(8, BrotliCompressFragmentTwoPassImpl8);
741compress_specialization!(9, BrotliCompressFragmentTwoPassImpl9);
742compress_specialization!(10, BrotliCompressFragmentTwoPassImpl10);
743compress_specialization!(11, BrotliCompressFragmentTwoPassImpl11);
744compress_specialization!(12, BrotliCompressFragmentTwoPassImpl12);
745compress_specialization!(13, BrotliCompressFragmentTwoPassImpl13);
746compress_specialization!(14, BrotliCompressFragmentTwoPassImpl14);
747compress_specialization!(15, BrotliCompressFragmentTwoPassImpl15);
748compress_specialization!(16, BrotliCompressFragmentTwoPassImpl16);
749compress_specialization!(17, BrotliCompressFragmentTwoPassImpl17);
750
751fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
752 let bitpos: usize = new_storage_ix & 7usize;
753 let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
754 {
755 let _rhs = mask as u8;
756 let _lhs = &mut storage[(new_storage_ix >> 3)];
757 *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
758 }
759 *storage_ix = new_storage_ix;
760}
761
762#[deprecated(note = "use compress_fragment_two_pass instead")]
763pub fn BrotliCompressFragmentTwoPass<AllocHT: alloc::Allocator<HuffmanTree>>(
764 m: &mut AllocHT,
765 input: &[u8],
766 input_size: usize,
767 is_last: i32,
768 command_buf: &mut [u32],
769 literal_buf: &mut [u8],
770 table: &mut [i32],
771 table_size: usize,
772 storage_ix: &mut usize,
773 storage: &mut [u8],
774) {
775 compress_fragment_two_pass(
776 m,
777 input,
778 input_size,
779 is_last != 0,
780 command_buf,
781 literal_buf,
782 table,
783 table_size,
784 storage_ix,
785 storage,
786 )
787}
788
789pub(crate) fn compress_fragment_two_pass<AllocHT: alloc::Allocator<HuffmanTree>>(
790 m: &mut AllocHT,
791 input: &[u8],
792 input_size: usize,
793 is_last: bool,
794 command_buf: &mut [u32],
795 literal_buf: &mut [u8],
796 table: &mut [i32],
797 table_size: usize,
798 storage_ix: &mut usize,
799 storage: &mut [u8],
800) {
801 let initial_storage_ix: usize = *storage_ix;
802 let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
803 if table_bits == 8usize {
804 BrotliCompressFragmentTwoPassImpl8(
805 m,
806 input,
807 input_size,
808 is_last,
809 command_buf,
810 literal_buf,
811 table,
812 storage_ix,
813 storage,
814 );
815 }
816 if table_bits == 9usize {
817 BrotliCompressFragmentTwoPassImpl9(
818 m,
819 input,
820 input_size,
821 is_last,
822 command_buf,
823 literal_buf,
824 table,
825 storage_ix,
826 storage,
827 );
828 }
829 if table_bits == 10usize {
830 BrotliCompressFragmentTwoPassImpl10(
831 m,
832 input,
833 input_size,
834 is_last,
835 command_buf,
836 literal_buf,
837 table,
838 storage_ix,
839 storage,
840 );
841 }
842 if table_bits == 11usize {
843 BrotliCompressFragmentTwoPassImpl11(
844 m,
845 input,
846 input_size,
847 is_last,
848 command_buf,
849 literal_buf,
850 table,
851 storage_ix,
852 storage,
853 );
854 }
855 if table_bits == 12usize {
856 BrotliCompressFragmentTwoPassImpl12(
857 m,
858 input,
859 input_size,
860 is_last,
861 command_buf,
862 literal_buf,
863 table,
864 storage_ix,
865 storage,
866 );
867 }
868 if table_bits == 13usize {
869 BrotliCompressFragmentTwoPassImpl13(
870 m,
871 input,
872 input_size,
873 is_last,
874 command_buf,
875 literal_buf,
876 table,
877 storage_ix,
878 storage,
879 );
880 }
881 if table_bits == 14usize {
882 BrotliCompressFragmentTwoPassImpl14(
883 m,
884 input,
885 input_size,
886 is_last,
887 command_buf,
888 literal_buf,
889 table,
890 storage_ix,
891 storage,
892 );
893 }
894 if table_bits == 15usize {
895 BrotliCompressFragmentTwoPassImpl15(
896 m,
897 input,
898 input_size,
899 is_last,
900 command_buf,
901 literal_buf,
902 table,
903 storage_ix,
904 storage,
905 );
906 }
907 if table_bits == 16usize {
908 BrotliCompressFragmentTwoPassImpl16(
909 m,
910 input,
911 input_size,
912 is_last,
913 command_buf,
914 literal_buf,
915 table,
916 storage_ix,
917 storage,
918 );
919 }
920 if table_bits == 17usize {
921 BrotliCompressFragmentTwoPassImpl17(
922 m,
923 input,
924 input_size,
925 is_last,
926 command_buf,
927 literal_buf,
928 table,
929 storage_ix,
930 storage,
931 );
932 }
933 if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
934 RewindBitPosition(initial_storage_ix, storage_ix, storage);
935 EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);
936 }
937 if is_last {
938 BrotliWriteBits(1, 1, storage_ix, storage);
939 BrotliWriteBits(1, 1, storage_ix, storage);
940 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
941 }
942}