1use core::cmp::min;
2
3use super::super::alloc;
10use super::backward_references::kHashMul32;
11use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
12use super::compress_fragment_two_pass::{memcpy, BrotliWriteBits};
13use super::entropy_encode::{
14 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
15};
16use super::static_dict::{
17 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
18};
19use super::util::{FastLog2, Log2FloorNonZero};
20use crate::enc::compress_fragment_two_pass::store_meta_block_header;
21use crate::enc::floatX;
22
23static kCmdHistoSeed: [u32; 128] = [
26 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
27 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
28 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
29 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0,
30];
31
32fn Hash(p: &[u8], shift: usize) -> u32 {
33 let h: u64 = (BROTLI_UNALIGNED_LOAD64(p) << 24).wrapping_mul(kHashMul32 as (u64));
34 (h >> shift) as u32
35}
36
37fn IsMatch(p1: &[u8], p2: &[u8]) -> bool {
38 BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) && (p1[4] as i32 == p2[4] as i32)
39}
40
41fn BuildAndStoreLiteralPrefixCode<AllocHT: alloc::Allocator<HuffmanTree>>(
42 mht: &mut AllocHT,
43 input: &[u8],
44 input_size: usize,
45 depths: &mut [u8],
46 bits: &mut [u16],
47 storage_ix: &mut usize,
48 storage: &mut [u8],
49) -> usize {
50 let mut histogram: [u32; 256] = [0; 256];
51 let mut histogram_total: usize;
52 let mut i: usize;
53 if input_size < (1i32 << 15) as usize {
54 for i in 0usize..input_size {
55 let _rhs = 1;
56 let _lhs = &mut histogram[input[i] as usize];
57 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
58 }
59 histogram_total = input_size;
60 i = 0usize;
61 while i < 256usize {
62 {
63 let adjust: u32 = (2u32).wrapping_mul(min(histogram[i], 11u32));
64 {
65 let _rhs = adjust;
66 let _lhs = &mut histogram[i];
67 *_lhs = (*_lhs).wrapping_add(_rhs);
68 }
69 histogram_total = histogram_total.wrapping_add(adjust as usize);
70 }
71 i = i.wrapping_add(1);
72 }
73 } else {
74 static kSampleRate: usize = 29usize;
75 i = 0usize;
76 while i < input_size {
77 {
78 let _rhs = 1;
79 let _lhs = &mut histogram[input[i] as usize];
80 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
81 }
82 i = i.wrapping_add(kSampleRate);
83 }
84 histogram_total = input_size
85 .wrapping_add(kSampleRate)
86 .wrapping_sub(1)
87 .wrapping_div(kSampleRate);
88 i = 0usize;
89 while i < 256usize {
90 {
91 let adjust: u32 =
92 (1u32).wrapping_add((2u32).wrapping_mul(min(histogram[i], 11u32)));
93 {
94 let _rhs = adjust;
95 let _lhs = &mut histogram[i];
96 *_lhs = (*_lhs).wrapping_add(_rhs);
97 }
98 histogram_total = histogram_total.wrapping_add(adjust as usize);
99 }
100 i = i.wrapping_add(1);
101 }
102 }
103 BrotliBuildAndStoreHuffmanTreeFast(
104 mht,
105 &mut histogram[..],
106 histogram_total,
107 8usize,
108 depths,
109 bits,
110 storage_ix,
111 storage,
112 );
113 {
114 let mut literal_ratio: usize = 0usize;
115 for i in 0usize..256usize {
116 if histogram[i] != 0 {
117 literal_ratio = literal_ratio
118 .wrapping_add(histogram[i].wrapping_mul(depths[i] as u32) as usize);
119 }
120 }
121 literal_ratio
122 .wrapping_mul(125)
123 .wrapping_div(histogram_total)
124 }
125}
126#[derive(PartialEq, Eq, Copy, Clone)]
127pub enum CodeBlockState {
128 EMIT_REMAINDER,
129 EMIT_COMMANDS,
130 NEXT_BLOCK,
131}
132
133fn EmitInsertLen(
134 insertlen: usize,
135 depth: &[u8],
136 bits: &[u16],
137 histo: &mut [u32],
138 storage_ix: &mut usize,
139 storage: &mut [u8],
140) {
141 if insertlen < 6usize {
142 let code: usize = insertlen.wrapping_add(40);
143 BrotliWriteBits(
144 depth[code] as usize,
145 bits[code] as (u64),
146 storage_ix,
147 storage,
148 );
149 {
150 let _rhs = 1;
151 let _lhs = &mut histo[code];
152 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
153 }
154 } else if insertlen < 130usize {
155 let tail: usize = insertlen.wrapping_sub(2);
156 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
157 let prefix: usize = tail >> nbits;
158 let inscode: usize = ((nbits << 1) as usize)
159 .wrapping_add(prefix)
160 .wrapping_add(42);
161 BrotliWriteBits(
162 depth[(inscode as usize)] as usize,
163 bits[(inscode as usize)] as (u64),
164 storage_ix,
165 storage,
166 );
167 BrotliWriteBits(
168 nbits as usize,
169 (tail as u64).wrapping_sub((prefix as u64) << nbits),
170 storage_ix,
171 storage,
172 );
173 {
174 let _rhs = 1;
175 let _lhs = &mut histo[(inscode as usize)];
176 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
177 }
178 } else if insertlen < 2114usize {
179 let tail: usize = insertlen.wrapping_sub(66);
180 let nbits: u32 = Log2FloorNonZero(tail as u64);
181 let code: usize = nbits.wrapping_add(50) as usize;
182 BrotliWriteBits(
183 depth[(code as usize)] as usize,
184 bits[(code as usize)] as (u64),
185 storage_ix,
186 storage,
187 );
188 BrotliWriteBits(
189 nbits as usize,
190 (tail as u64).wrapping_sub(1 << nbits),
191 storage_ix,
192 storage,
193 );
194 {
195 let _rhs = 1;
196 let _lhs = &mut histo[(code as usize)];
197 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
198 }
199 } else {
200 BrotliWriteBits(depth[61] as usize, bits[61] as (u64), storage_ix, storage);
201 BrotliWriteBits(
202 12usize,
203 (insertlen as u64).wrapping_sub(2114),
204 storage_ix,
205 storage,
206 );
207 {
208 let _rhs = 1;
209 let _lhs = &mut histo[61];
210 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
211 }
212 }
213}
214
215fn ShouldUseUncompressedMode(delta: isize, insertlen: usize, literal_ratio: usize) -> bool {
216 let compressed = delta as usize;
217 if compressed.wrapping_mul(50) > insertlen {
218 false
219 } else if literal_ratio > 980 {
220 true
221 } else {
222 false
223 }
224}
225fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
226 let bitpos: usize = new_storage_ix & 7usize;
227 let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
228 {
229 let _rhs = mask as u8;
230 let _lhs = &mut storage[(new_storage_ix >> 3)];
231 *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
232 }
233 *storage_ix = new_storage_ix;
234}
235
236fn EmitUncompressedMetaBlock(
237 begin: &[u8],
238 len: usize,
239 storage_ix_start: usize,
240 storage_ix: &mut usize,
241 storage: &mut [u8],
242) {
243 RewindBitPosition(storage_ix_start, storage_ix, storage);
244 store_meta_block_header(len, true, storage_ix, storage);
245 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
246 memcpy(storage, (*storage_ix >> 3), begin, 0, len);
247 *storage_ix = storage_ix.wrapping_add(len << 3);
248 storage[(*storage_ix >> 3)] = 0u8;
249}
250
251fn EmitLongInsertLen(
252 insertlen: usize,
253 depth: &[u8],
254 bits: &[u16],
255 histo: &mut [u32],
256 storage_ix: &mut usize,
257 storage: &mut [u8],
258) {
259 if insertlen < 22594usize {
260 BrotliWriteBits(depth[62] as usize, bits[62] as (u64), storage_ix, storage);
261 BrotliWriteBits(
262 14usize,
263 (insertlen as u64).wrapping_sub(6210),
264 storage_ix,
265 storage,
266 );
267 {
268 let _rhs = 1;
269 let _lhs = &mut histo[62];
270 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
271 }
272 } else {
273 BrotliWriteBits(depth[63] as usize, bits[63] as (u64), storage_ix, storage);
274 BrotliWriteBits(
275 24usize,
276 (insertlen as u64).wrapping_sub(22594),
277 storage_ix,
278 storage,
279 );
280 {
281 let _rhs = 1;
282 let _lhs = &mut histo[63];
283 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
284 }
285 }
286}
287
288fn EmitLiterals(
289 input: &[u8],
290 len: usize,
291 depth: &[u8],
292 bits: &[u16],
293 storage_ix: &mut usize,
294 storage: &mut [u8],
295) {
296 for j in 0usize..len {
297 let lit: u8 = input[j];
298 BrotliWriteBits(
299 depth[(lit as usize)] as usize,
300 bits[(lit as usize)] as (u64),
301 storage_ix,
302 storage,
303 );
304 }
305}
306
307fn EmitDistance(
308 distance: usize,
309 depth: &[u8],
310 bits: &[u16],
311 histo: &mut [u32],
312 storage_ix: &mut usize,
313 storage: &mut [u8],
314) {
315 let d: u64 = distance.wrapping_add(3) as u64;
316 let nbits: u32 = Log2FloorNonZero(d).wrapping_sub(1);
317 let prefix: u64 = d >> nbits & 1;
318 let offset: u64 = (2u64).wrapping_add(prefix) << nbits;
319 let distcode: u64 = ((2u32).wrapping_mul(nbits.wrapping_sub(1)) as (u64))
320 .wrapping_add(prefix)
321 .wrapping_add(80);
322 BrotliWriteBits(
323 depth[(distcode as usize)] as usize,
324 bits[(distcode as usize)] as (u64),
325 storage_ix,
326 storage,
327 );
328 BrotliWriteBits(nbits as usize, d.wrapping_sub(offset), storage_ix, storage);
329 {
330 let _rhs = 1;
331 let _lhs = &mut histo[(distcode as usize)];
332 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
333 }
334}
335
336fn EmitCopyLenLastDistance(
337 copylen: usize,
338 depth: &[u8],
339 bits: &[u16],
340 histo: &mut [u32],
341 storage_ix: &mut usize,
342 storage: &mut [u8],
343) {
344 if copylen < 12usize {
345 BrotliWriteBits(
346 depth[copylen.wrapping_sub(4)] as usize,
347 bits[copylen.wrapping_sub(4)] as (u64),
348 storage_ix,
349 storage,
350 );
351 {
352 let _rhs = 1;
353 let _lhs = &mut histo[copylen.wrapping_sub(4)];
354 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
355 }
356 } else if copylen < 72usize {
357 let tail: usize = copylen.wrapping_sub(8);
358 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
359 let prefix: usize = tail >> nbits;
360 let code: usize = ((nbits << 1) as usize).wrapping_add(prefix).wrapping_add(4);
361 BrotliWriteBits(
362 depth[(code as usize)] as usize,
363 bits[(code as usize)] as (u64),
364 storage_ix,
365 storage,
366 );
367 BrotliWriteBits(
368 nbits as usize,
369 tail.wrapping_sub(prefix << nbits) as u64,
370 storage_ix,
371 storage,
372 );
373 {
374 let _rhs = 1;
375 let _lhs = &mut histo[(code as usize)];
376 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
377 }
378 } else if copylen < 136usize {
379 let tail: usize = copylen.wrapping_sub(8);
380 let code: usize = (tail >> 5).wrapping_add(30);
381 BrotliWriteBits(
382 depth[code] as usize,
383 bits[code] as (u64),
384 storage_ix,
385 storage,
386 );
387 BrotliWriteBits(5usize, tail as u64 & 31, storage_ix, storage);
388 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
389 {
390 let _rhs = 1;
391 let _lhs = &mut histo[code];
392 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
393 }
394 {
395 let _rhs = 1;
396 let _lhs = &mut histo[64];
397 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
398 }
399 } else if copylen < 2120usize {
400 let tail: usize = copylen.wrapping_sub(72);
401 let nbits: u32 = Log2FloorNonZero(tail as u64);
402 let code: usize = nbits.wrapping_add(28) as usize;
403 BrotliWriteBits(
404 depth[(code as usize)] as usize,
405 bits[(code as usize)] as (u64),
406 storage_ix,
407 storage,
408 );
409 BrotliWriteBits(
410 nbits as usize,
411 (tail as u64).wrapping_sub(1u64 << nbits),
412 storage_ix,
413 storage,
414 );
415 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
416 {
417 let _rhs = 1;
418 let _lhs = &mut histo[(code as usize)];
419 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
420 }
421 {
422 let _rhs = 1;
423 let _lhs = &mut histo[64];
424 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
425 }
426 } else {
427 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
428 BrotliWriteBits(
429 24usize,
430 copylen.wrapping_sub(2120) as u64,
431 storage_ix,
432 storage,
433 );
434 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
435 {
436 let _rhs = 1;
437 let _lhs = &mut histo[39];
438 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
439 }
440 {
441 let _rhs = 1;
442 let _lhs = &mut histo[64];
443 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
444 }
445 }
446}
447
448fn HashBytesAtOffset(v: u64, offset: i32, shift: usize) -> u32 {
449 let h: u64 = (v >> (8i32 * offset) << 24).wrapping_mul(kHashMul32 as (u64));
450 (h >> shift) as u32
451}
452
453fn EmitCopyLen(
454 copylen: usize,
455 depth: &[u8],
456 bits: &[u16],
457 histo: &mut [u32],
458 storage_ix: &mut usize,
459 storage: &mut [u8],
460) {
461 if copylen < 10usize {
462 BrotliWriteBits(
463 depth[copylen.wrapping_add(14)] as usize,
464 bits[copylen.wrapping_add(14)] as (u64),
465 storage_ix,
466 storage,
467 );
468 {
469 let _rhs = 1;
470 let _lhs = &mut histo[copylen.wrapping_add(14)];
471 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
472 }
473 } else if copylen < 134usize {
474 let tail: usize = copylen.wrapping_sub(6);
475 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
476 let prefix: usize = tail >> nbits;
477 let code: usize = ((nbits << 1) as usize)
478 .wrapping_add(prefix)
479 .wrapping_add(20);
480 BrotliWriteBits(
481 depth[(code as usize)] as usize,
482 bits[(code as usize)] as (u64),
483 storage_ix,
484 storage,
485 );
486 BrotliWriteBits(
487 nbits as usize,
488 (tail as u64).wrapping_sub((prefix as u64) << nbits),
489 storage_ix,
490 storage,
491 );
492 {
493 let _rhs = 1;
494 let _lhs = &mut histo[(code as usize)];
495 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
496 }
497 } else if copylen < 2118usize {
498 let tail: usize = copylen.wrapping_sub(70);
499 let nbits: u32 = Log2FloorNonZero(tail as u64);
500 let code: usize = nbits.wrapping_add(28) as usize;
501 BrotliWriteBits(
502 depth[(code as usize)] as usize,
503 bits[(code as usize)] as (u64),
504 storage_ix,
505 storage,
506 );
507 BrotliWriteBits(
508 nbits as usize,
509 (tail as u64).wrapping_sub(1 << nbits),
510 storage_ix,
511 storage,
512 );
513 {
514 let _rhs = 1;
515 let _lhs = &mut histo[(code as usize)];
516 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
517 }
518 } else {
519 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
520 BrotliWriteBits(
521 24usize,
522 (copylen as u64).wrapping_sub(2118),
523 storage_ix,
524 storage,
525 );
526 {
527 let _rhs = 1;
528 let _lhs = &mut histo[39];
529 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
530 }
531 }
532}
533
534fn ShouldMergeBlock(data: &[u8], len: usize, depths: &[u8]) -> bool {
535 let mut histo: [usize; 256] = [0; 256];
536 static kSampleRate: usize = 43usize;
537 let mut i: usize;
538 i = 0usize;
539 while i < len {
540 {
541 let _rhs = 1;
542 let _lhs = &mut histo[data[i] as usize];
543 *_lhs = (*_lhs).wrapping_add(_rhs as usize);
544 }
545 i = i.wrapping_add(kSampleRate);
546 }
547 {
548 let total: usize = len
549 .wrapping_add(kSampleRate)
550 .wrapping_sub(1)
551 .wrapping_div(kSampleRate);
552 let mut r: floatX = (FastLog2(total as u64) + 0.5) * (total as floatX) + 200.0;
553 for i in 0usize..256usize {
554 r -= (histo[i] as floatX) * ((depths[i] as floatX) + FastLog2(histo[i] as u64));
555 }
556 r >= 0.0
557 }
558}
559
560fn UpdateBits(mut n_bits: usize, mut bits: u32, mut pos: usize, array: &mut [u8]) {
561 while n_bits > 0usize {
562 let byte_pos: usize = pos >> 3;
563 let n_unchanged_bits: usize = pos & 7usize;
564 let n_changed_bits: usize = min(n_bits, (8usize).wrapping_sub(n_unchanged_bits));
565 let total_bits: usize = n_unchanged_bits.wrapping_add(n_changed_bits);
566 let mask: u32 =
567 !(1u32 << total_bits).wrapping_sub(1) | (1u32 << n_unchanged_bits).wrapping_sub(1);
568 let unchanged_bits: u32 = array[byte_pos] as u32 & mask;
569 let changed_bits: u32 = bits & (1u32 << n_changed_bits).wrapping_sub(1);
570 array[byte_pos] = (changed_bits << n_unchanged_bits | unchanged_bits) as u8;
571 n_bits = n_bits.wrapping_sub(n_changed_bits);
572 bits >>= n_changed_bits;
573 pos = pos.wrapping_add(n_changed_bits);
574 }
575}
576
577fn BuildAndStoreCommandPrefixCode(
578 histogram: &[u32],
579 depth: &mut [u8],
580 bits: &mut [u16],
581 storage_ix: &mut usize,
582 storage: &mut [u8],
583) {
584 let mut tree = [HuffmanTree::new(0, 0, 0); 129];
585 let mut cmd_depth: [u8; 704] = [0u8; 704];
586
587 let mut cmd_bits: [u16; 64] = [0; 64];
588 BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
589 BrotliCreateHuffmanTree(
590 &histogram[64..],
591 64usize,
592 14i32,
593 &mut tree[..],
594 &mut depth[64..],
595 );
596 memcpy(&mut cmd_depth[..], 0, depth, 0, 24usize);
602 memcpy(&mut cmd_depth[..], 24usize, depth, (40usize), 8usize);
603 memcpy(&mut cmd_depth[..], 32usize, depth, (24usize), 8usize);
604 memcpy(&mut cmd_depth[..], 40usize, depth, (48usize), 8usize);
605 memcpy(&mut cmd_depth[..], 48usize, depth, (32usize), 8usize);
606 memcpy(&mut cmd_depth[..], 56usize, depth, (56usize), 8usize);
607 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
608 memcpy(bits, 0, &cmd_bits[..], 0, 24usize);
609 memcpy(bits, (24usize), &cmd_bits[..], 32usize, 8usize);
610 memcpy(bits, (32usize), &cmd_bits[..], 48usize, 8usize);
611 memcpy(bits, (40usize), &cmd_bits[..], 24usize, 8usize);
612 memcpy(bits, (48usize), &cmd_bits[..], 40usize, 8usize);
613 memcpy(bits, (56usize), &cmd_bits[..], 56usize, 8usize);
614 BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
615 {
616 for item in cmd_depth[..64].iter_mut() {
617 *item = 0;
618 }
619 memcpy(&mut cmd_depth[..], 0, depth, 0, 8usize);
620 memcpy(&mut cmd_depth[..], 64usize, depth, (8usize), 8usize);
621 memcpy(&mut cmd_depth[..], 128usize, depth, (16usize), 8usize);
622 memcpy(&mut cmd_depth[..], 192usize, depth, (24usize), 8usize);
623 memcpy(&mut cmd_depth[..], 384usize, depth, (32usize), 8usize);
624 for i in 0usize..8usize {
625 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] =
626 depth[i.wrapping_add(40)];
627 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] =
628 depth[i.wrapping_add(48)];
629 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
630 depth[i.wrapping_add(56)];
631 }
632 BrotliStoreHuffmanTree(
633 &mut cmd_depth[..],
634 704usize,
635 &mut tree[..],
636 storage_ix,
637 storage,
638 );
639 }
640 BrotliStoreHuffmanTree(
641 &mut depth[64..],
642 64usize,
643 &mut tree[..],
644 storage_ix,
645 storage,
646 );
647}
648
649#[allow(unused_assignments)]
650fn compress_fragment_fast_impl<AllocHT: alloc::Allocator<HuffmanTree>>(
651 m: &mut AllocHT,
652 input_ptr: &[u8],
653 mut input_size: usize,
654 is_last: bool,
655 table: &mut [i32],
656 table_bits: usize,
657 cmd_depth: &mut [u8],
658 cmd_bits: &mut [u16],
659 cmd_code_numbits: &mut usize,
660 cmd_code: &mut [u8],
661 storage_ix: &mut usize,
662 storage: &mut [u8],
663) {
664 let mut cmd_histo = [0u32; 128];
665 let mut ip_end = 0usize;
666 let mut next_emit = 0usize;
667 let base_ip = 0usize;
668 static kFirstBlockSize: usize = (3i32 << 15) as usize;
669 static kMergeBlockSize: usize = (1i32 << 16) as usize;
670 let kInputMarginBytes = 16usize;
671 let kMinMatchLen = 5usize;
672 let mut metablock_start = 0usize;
673 let mut block_size = min(input_size, kFirstBlockSize);
674 let mut total_block_size = block_size;
675 let mut mlen_storage_ix = storage_ix.wrapping_add(3);
676 let mut lit_depth = [0u8; 256];
677 let mut lit_bits = [0u16; 256];
678 let mut literal_ratio: usize;
679 let mut input_index = 0usize;
680 let mut last_distance: i32;
681 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
682 store_meta_block_header(block_size, false, storage_ix, storage);
683 BrotliWriteBits(13usize, 0, storage_ix, storage);
684 literal_ratio = BuildAndStoreLiteralPrefixCode(
685 m,
686 &input_ptr[input_index..],
687 block_size,
688 &mut lit_depth[..],
689 &mut lit_bits[..],
690 storage_ix,
691 storage,
692 );
693 {
694 let mut i = 0usize;
695 while i.wrapping_add(7) < *cmd_code_numbits {
696 BrotliWriteBits(8usize, cmd_code[i >> 3] as u64, storage_ix, storage);
697 i = i.wrapping_add(8);
698 }
699 }
700 BrotliWriteBits(
701 *cmd_code_numbits & 7usize,
702 cmd_code[*cmd_code_numbits >> 3] as u64,
703 storage_ix,
704 storage,
705 );
706 let mut code_block_selection = CodeBlockState::EMIT_COMMANDS;
707 'continue_to_next_block: loop {
708 let mut ip_index: usize;
709 if code_block_selection == CodeBlockState::EMIT_COMMANDS {
710 cmd_histo[..128].clone_from_slice(&kCmdHistoSeed[..]);
711 ip_index = input_index;
712 last_distance = -1i32;
713 ip_end = input_index.wrapping_add(block_size);
714 if block_size >= kInputMarginBytes {
715 let len_limit: usize = min(
716 block_size.wrapping_sub(kMinMatchLen),
717 input_size.wrapping_sub(kInputMarginBytes),
718 );
719 let ip_limit: usize = input_index.wrapping_add(len_limit);
720 let mut next_hash = Hash(
721 &input_ptr[{
722 ip_index = ip_index.wrapping_add(1);
723 ip_index
724 }..],
725 shift,
726 );
727 loop {
728 let mut skip = 32u32;
729 let mut next_ip = ip_index;
730 let mut candidate = 0usize;
731 loop {
732 loop {
733 let hash = next_hash;
734 let bytes_between_hash_lookups: u32 = skip >> 5;
735 skip = skip.wrapping_add(1);
736 ip_index = next_ip;
737 next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
738 if next_ip > ip_limit {
739 code_block_selection = CodeBlockState::EMIT_REMAINDER;
740 break;
741 }
742 next_hash = Hash(&input_ptr[next_ip..], shift);
743 candidate = ip_index.wrapping_sub(last_distance as usize);
744 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..])
745 && candidate < ip_index
746 {
747 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
748 break;
749 }
750 candidate = base_ip.wrapping_add(table[hash as usize] as usize);
751 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
752 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
753 break;
754 }
755 }
756 if !(ip_index.wrapping_sub(candidate)
757 > (1usize << 18).wrapping_sub(16) as isize as usize
758 && code_block_selection == CodeBlockState::EMIT_COMMANDS)
759 {
760 break;
761 }
762 }
763 if code_block_selection != CodeBlockState::EMIT_COMMANDS {
764 break;
765 }
766
767 let base: usize = ip_index;
768 let matched = (5usize).wrapping_add(FindMatchLengthWithLimit(
769 &input_ptr[candidate + 5..],
770 &input_ptr[ip_index + 5..],
771 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
772 ));
773 let distance = base.wrapping_sub(candidate) as i32;
774 let insert = base.wrapping_sub(next_emit);
775 ip_index = ip_index.wrapping_add(matched);
776 if insert < 6210 {
777 EmitInsertLen(
778 insert,
779 cmd_depth,
780 cmd_bits,
781 &mut cmd_histo[..],
782 storage_ix,
783 storage,
784 );
785 } else if ShouldUseUncompressedMode(
786 (next_emit as isize) - (metablock_start as isize),
787 insert,
788 literal_ratio,
789 ) {
790 EmitUncompressedMetaBlock(
791 &input_ptr[metablock_start..],
792 base.wrapping_sub(metablock_start),
793 mlen_storage_ix.wrapping_sub(3),
794 storage_ix,
795 storage,
796 );
797 input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
798 input_index = base;
799 next_emit = input_index;
800 code_block_selection = CodeBlockState::NEXT_BLOCK;
801 continue 'continue_to_next_block;
802 } else {
803 EmitLongInsertLen(
804 insert,
805 cmd_depth,
806 cmd_bits,
807 &mut cmd_histo[..],
808 storage_ix,
809 storage,
810 );
811 }
812 EmitLiterals(
813 &input_ptr[next_emit..],
814 insert,
815 &mut lit_depth[..],
816 &mut lit_bits[..],
817 storage_ix,
818 storage,
819 );
820 if distance == last_distance {
821 BrotliWriteBits(
822 cmd_depth[64] as usize,
823 cmd_bits[64] as u64,
824 storage_ix,
825 storage,
826 );
827 {
828 let _rhs = 1u32;
829 let _lhs = &mut cmd_histo[64];
830 *_lhs = (*_lhs).wrapping_add(_rhs);
831 }
832 } else {
833 EmitDistance(
834 distance as usize,
835 cmd_depth,
836 cmd_bits,
837 &mut cmd_histo[..],
838 storage_ix,
839 storage,
840 );
841 last_distance = distance;
842 }
843 EmitCopyLenLastDistance(
844 matched,
845 cmd_depth,
846 cmd_bits,
847 &mut cmd_histo[..],
848 storage_ix,
849 storage,
850 );
851 next_emit = ip_index;
852 if ip_index >= ip_limit {
853 code_block_selection = CodeBlockState::EMIT_REMAINDER;
854 continue 'continue_to_next_block;
855 }
856
857 assert!(ip_index >= 3);
858 let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
859 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0, shift);
860 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3, shift);
861 table[prev_hash as usize] =
862 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
863 prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
864 table[prev_hash as usize] =
865 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
866 prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
867 table[prev_hash as usize] =
868 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
869 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
870 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
871
872 while IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
873 let base: usize = ip_index;
874 let matched: usize = (5usize).wrapping_add(FindMatchLengthWithLimit(
875 &input_ptr[candidate + 5..],
876 &input_ptr[ip_index + 5..],
877 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
878 ));
879 if ip_index.wrapping_sub(candidate) > (1usize << 18).wrapping_sub(16) {
880 break;
881 }
882 ip_index = ip_index.wrapping_add(matched);
883 last_distance = base.wrapping_sub(candidate) as i32;
884 EmitCopyLen(
885 matched,
886 cmd_depth,
887 cmd_bits,
888 &mut cmd_histo[..],
889 storage_ix,
890 storage,
891 );
892 EmitDistance(
893 last_distance as usize,
894 cmd_depth,
895 cmd_bits,
896 &mut cmd_histo[..],
897 storage_ix,
898 storage,
899 );
900 next_emit = ip_index;
901 if ip_index >= ip_limit {
902 code_block_selection = CodeBlockState::EMIT_REMAINDER;
903 continue 'continue_to_next_block;
904 }
905
906 assert!(ip_index >= 3);
907 let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
908 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0, shift);
909 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3, shift);
910 table[prev_hash as usize] =
911 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
912 prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
913 table[prev_hash as usize] =
914 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
915 prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
916 table[prev_hash as usize] =
917 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
918 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
919 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
920 }
921 if code_block_selection == CodeBlockState::EMIT_REMAINDER {
922 break;
923 }
924 if code_block_selection == CodeBlockState::EMIT_COMMANDS {
925 next_hash = Hash(
926 &input_ptr[{
927 ip_index = ip_index.wrapping_add(1);
928 ip_index
929 }..],
930 shift,
931 );
932 }
933 }
934 }
935 code_block_selection = CodeBlockState::EMIT_REMAINDER;
936 continue 'continue_to_next_block;
937 } else if code_block_selection == CodeBlockState::EMIT_REMAINDER {
938 input_index = input_index.wrapping_add(block_size);
939 input_size = input_size.wrapping_sub(block_size);
940 block_size = min(input_size, kMergeBlockSize);
941 if input_size > 0
942 && (total_block_size.wrapping_add(block_size) <= (1i32 << 20) as usize)
943 && ShouldMergeBlock(&input_ptr[input_index..], block_size, &mut lit_depth[..])
944 {
945 total_block_size = total_block_size.wrapping_add(block_size);
946 UpdateBits(
947 20usize,
948 total_block_size.wrapping_sub(1) as u32,
949 mlen_storage_ix,
950 storage,
951 );
952 code_block_selection = CodeBlockState::EMIT_COMMANDS;
953 continue 'continue_to_next_block;
954 }
955 if next_emit < ip_end {
956 let insert: usize = ip_end.wrapping_sub(next_emit);
957 if insert < 6210 {
958 EmitInsertLen(
959 insert,
960 cmd_depth,
961 cmd_bits,
962 &mut cmd_histo[..],
963 storage_ix,
964 storage,
965 );
966 EmitLiterals(
967 &input_ptr[next_emit..],
968 insert,
969 &mut lit_depth[..],
970 &mut lit_bits[..],
971 storage_ix,
972 storage,
973 );
974 } else if ShouldUseUncompressedMode(
975 next_emit as isize - metablock_start as isize,
976 insert,
977 literal_ratio,
978 ) {
979 EmitUncompressedMetaBlock(
980 &input_ptr[metablock_start..],
981 ip_end.wrapping_sub(metablock_start),
982 mlen_storage_ix.wrapping_sub(3),
983 storage_ix,
984 storage,
985 );
986 } else {
987 EmitLongInsertLen(
988 insert,
989 cmd_depth,
990 cmd_bits,
991 &mut cmd_histo[..],
992 storage_ix,
993 storage,
994 );
995 EmitLiterals(
996 &input_ptr[next_emit..],
997 insert,
998 &mut lit_depth[..],
999 &mut lit_bits[..],
1000 storage_ix,
1001 storage,
1002 );
1003 }
1004 }
1005 next_emit = ip_end;
1006 code_block_selection = CodeBlockState::NEXT_BLOCK;
1007 continue 'continue_to_next_block;
1008 } else if code_block_selection == CodeBlockState::NEXT_BLOCK {
1009 if input_size > 0 {
1010 metablock_start = input_index;
1011 block_size = min(input_size, kFirstBlockSize);
1012 total_block_size = block_size;
1013 mlen_storage_ix = storage_ix.wrapping_add(3);
1014 store_meta_block_header(block_size, false, storage_ix, storage);
1015 BrotliWriteBits(13usize, 0, storage_ix, storage);
1016 literal_ratio = BuildAndStoreLiteralPrefixCode(
1017 m,
1018 &input_ptr[input_index..],
1019 block_size,
1020 &mut lit_depth[..],
1021 &mut lit_bits[..],
1022 storage_ix,
1023 storage,
1024 );
1025 BuildAndStoreCommandPrefixCode(
1026 &mut cmd_histo[..],
1027 cmd_depth,
1028 cmd_bits,
1029 storage_ix,
1030 storage,
1031 );
1032 code_block_selection = CodeBlockState::EMIT_COMMANDS;
1033 continue 'continue_to_next_block;
1034 }
1035 break;
1036 }
1037 }
1038 if !is_last {
1039 cmd_code[0] = 0;
1040 *cmd_code_numbits = 0;
1041 BuildAndStoreCommandPrefixCode(
1042 &mut cmd_histo[..],
1043 cmd_depth,
1044 cmd_bits,
1045 cmd_code_numbits,
1046 cmd_code,
1047 );
1048 }
1049}
1050
1051macro_rules! compress_specialization {
1052 ($table_bits : expr, $fname: ident) => {
1053 fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
1054 mht: &mut AllocHT,
1055 input: &[u8],
1056 input_size: usize,
1057 is_last: bool,
1058 table: &mut [i32],
1059 cmd_depth: &mut [u8],
1060 cmd_bits: &mut [u16],
1061 cmd_code_numbits: &mut usize,
1062 cmd_code: &mut [u8],
1063 storage_ix: &mut usize,
1064 storage: &mut [u8],
1065 ) {
1066 compress_fragment_fast_impl(
1067 mht,
1068 input,
1069 input_size,
1070 is_last,
1071 table,
1072 $table_bits,
1073 cmd_depth,
1074 cmd_bits,
1075 cmd_code_numbits,
1076 cmd_code,
1077 storage_ix,
1078 storage,
1079 );
1080 }
1081 };
1082}
1083
1084compress_specialization!(9, BrotliCompressFragmentFastImpl9);
1085compress_specialization!(11, BrotliCompressFragmentFastImpl11);
1086compress_specialization!(13, BrotliCompressFragmentFastImpl13);
1087compress_specialization!(15, BrotliCompressFragmentFastImpl15);
1088
1089#[deprecated(note = "use BrotliCompressFragmentFastImpl9 instead")]
1090pub fn BrotliCompressFragmentFast<AllocHT: alloc::Allocator<HuffmanTree>>(
1091 m: &mut AllocHT,
1092 input: &[u8],
1093 input_size: usize,
1094 is_last: i32,
1095 table: &mut [i32],
1096 table_size: usize,
1097 cmd_depth: &mut [u8],
1098 cmd_bits: &mut [u16],
1099 cmd_code_numbits: &mut usize,
1100 cmd_code: &mut [u8],
1101 storage_ix: &mut usize,
1102 storage: &mut [u8],
1103) {
1104 compress_fragment_fast(
1105 m,
1106 input,
1107 input_size,
1108 is_last != 0,
1109 table,
1110 table_size,
1111 cmd_depth,
1112 cmd_bits,
1113 cmd_code_numbits,
1114 cmd_code,
1115 storage_ix,
1116 storage,
1117 )
1118}
1119
1120pub(crate) fn compress_fragment_fast<AllocHT: alloc::Allocator<HuffmanTree>>(
1121 m: &mut AllocHT,
1122 input: &[u8],
1123 input_size: usize,
1124 is_last: bool,
1125 table: &mut [i32],
1126 table_size: usize,
1127 cmd_depth: &mut [u8],
1128 cmd_bits: &mut [u16],
1129 cmd_code_numbits: &mut usize,
1130 cmd_code: &mut [u8],
1131 storage_ix: &mut usize,
1132 storage: &mut [u8],
1133) {
1134 let initial_storage_ix: usize = *storage_ix;
1135 let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
1136 if input_size == 0usize {
1137 BrotliWriteBits(1usize, 1, storage_ix, storage);
1138 BrotliWriteBits(1usize, 1, storage_ix, storage);
1139 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1140 return;
1141 }
1142 if table_bits == 9usize {
1143 BrotliCompressFragmentFastImpl9(
1144 m,
1145 input,
1146 input_size,
1147 is_last,
1148 table,
1149 cmd_depth,
1150 cmd_bits,
1151 cmd_code_numbits,
1152 cmd_code,
1153 storage_ix,
1154 storage,
1155 );
1156 }
1157 if table_bits == 11usize {
1158 BrotliCompressFragmentFastImpl11(
1159 m,
1160 input,
1161 input_size,
1162 is_last,
1163 table,
1164 cmd_depth,
1165 cmd_bits,
1166 cmd_code_numbits,
1167 cmd_code,
1168 storage_ix,
1169 storage,
1170 );
1171 }
1172 if table_bits == 13usize {
1173 BrotliCompressFragmentFastImpl13(
1174 m,
1175 input,
1176 input_size,
1177 is_last,
1178 table,
1179 cmd_depth,
1180 cmd_bits,
1181 cmd_code_numbits,
1182 cmd_code,
1183 storage_ix,
1184 storage,
1185 );
1186 }
1187 if table_bits == 15usize {
1188 BrotliCompressFragmentFastImpl15(
1189 m,
1190 input,
1191 input_size,
1192 is_last,
1193 table,
1194 cmd_depth,
1195 cmd_bits,
1196 cmd_code_numbits,
1197 cmd_code,
1198 storage_ix,
1199 storage,
1200 );
1201 }
1202 if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
1203 EmitUncompressedMetaBlock(input, input_size, initial_storage_ix, storage_ix, storage);
1204 }
1205 if is_last {
1206 BrotliWriteBits(1usize, 1, storage_ix, storage);
1207 BrotliWriteBits(1usize, 1, storage_ix, storage);
1208 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1209 }
1210}