1#![allow(unknown_lints)]
2#![allow(unused_macros)]
3
4use core::cmp::{max, min};
5#[cfg(feature = "std")]
6use std::io::Write;
7
8use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use super::super::dictionary::{
10 kBrotliDictionary, kBrotliDictionaryOffsetsByLength, kBrotliDictionarySizeBitsByLength,
11};
12use super::super::transform::TransformDictionaryWord;
13use super::super::{alloc, core};
14use super::block_split::BlockSplit;
15use super::combined_alloc::BrotliAlloc;
16use super::command::{Command, GetCopyLengthCode, GetInsertLengthCode};
17use super::constants::{
18 kCodeLengthBits, kCodeLengthDepth, kCopyBase, kCopyExtra, kInsBase, kInsExtra,
19 kNonZeroRepsBits, kNonZeroRepsDepth, kSigned3BitContextLookup, kStaticCommandCodeBits,
20 kStaticCommandCodeDepth, kStaticDistanceCodeBits, kStaticDistanceCodeDepth, kUTF8ContextLookup,
21 kZeroRepsBits, kZeroRepsDepth, BROTLI_CONTEXT_LUT, BROTLI_NUM_BLOCK_LEN_SYMBOLS,
22 BROTLI_NUM_COMMAND_SYMBOLS, BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_NUM_LITERAL_SYMBOLS,
23};
24use super::context_map_entropy::{speed_to_tuple, ContextMapEntropy, SpeedAndMax};
25use super::entropy_encode::{
26 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, BrotliSetDepth,
27 BrotliWriteHuffmanTree, HuffmanComparator, HuffmanTree, SortHuffmanTreeItems,
28};
29use super::histogram::{
30 ContextType, HistogramAddItem, HistogramCommand, HistogramDistance, HistogramLiteral,
31};
32use super::input_pair::{InputPair, InputReference, InputReferenceMut};
33use super::interface::StaticCommand;
34use super::static_dict::kNumDistanceCacheEntries;
35use super::util::floatX;
36use super::{find_stride, interface, prior_eval, stride_eval};
37use crate::enc::backward_references::BrotliEncoderParams;
38use crate::enc::combined_alloc::{alloc_default, alloc_or_default, allocate};
39use crate::VERSION;
40
41pub struct PrefixCodeRange {
42 pub offset: u32,
43 pub nbits: u32,
44}
45pub const MAX_SIMPLE_DISTANCE_ALPHABET_SIZE: usize = 140;
46
47fn window_size_from_lgwin(lgwin: i32) -> usize {
48 (1 << lgwin) - 16usize
49}
50
51struct CommandQueue<'a, Alloc: BrotliAlloc + 'a> {
52 mb: InputPair<'a>,
53 #[allow(dead_code)]
55 mb_byte_offset: usize,
56 mc: &'a mut Alloc,
57 queue: <Alloc as Allocator<StaticCommand>>::AllocatedMemory,
58 pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
59 loc: usize,
60 entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
61 best_strides_per_block_type: <Alloc as Allocator<u8>>::AllocatedMemory,
62 entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
63 context_map_entropy: ContextMapEntropy<'a, Alloc>,
64 #[allow(dead_code)]
65 stride_detection_quality: u8,
66 #[allow(dead_code)]
67 high_entropy_detection_quality: u8,
68 block_type_literal: u8,
69 #[allow(dead_code)]
70 best_stride_index: usize,
71 overfull: bool,
72}
73
74impl<'a, Alloc: BrotliAlloc> CommandQueue<'a, Alloc> {
75 fn new(
76 alloc: &'a mut Alloc,
77 num_commands: usize,
78 pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
79 mb: InputPair<'a>,
80 stride_detection_quality: u8,
81 high_entropy_detection_quality: u8,
82 context_map_entropy: ContextMapEntropy<'a, Alloc>,
83 best_strides: <Alloc as Allocator<u8>>::AllocatedMemory,
84 entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
85 entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
86 ) -> CommandQueue<'a, Alloc> {
87 let queue = allocate::<StaticCommand, _>(alloc, num_commands * 17 / 16 + 4);
90 CommandQueue {
91 mc: alloc,
92 queue, pred_mode,
94 mb,
95 mb_byte_offset: 0,
96 loc: 0,
97 best_strides_per_block_type: best_strides,
98 entropy_tally_scratch,
99 entropy_pyramid,
100 stride_detection_quality,
101 high_entropy_detection_quality,
102 context_map_entropy,
103 block_type_literal: 0,
104 best_stride_index: 0,
105 overfull: false,
106 }
107 }
108 fn full(&self) -> bool {
109 self.loc == self.queue.len()
110 }
111 fn error_if_full(&mut self) {
112 if self.full() {
113 self.overfull = true;
114 }
115 }
116 fn clear(&mut self) {
117 self.loc = 0;
118 self.block_type_literal = 0;
119 }
120 fn free<Cb>(&mut self, callback: &mut Cb) -> Result<(), ()>
121 where
122 Cb: FnMut(
123 &mut interface::PredictionModeContextMap<InputReferenceMut>,
124 &mut [interface::StaticCommand],
125 InputPair,
126 &mut Alloc,
127 ),
128 {
129 callback(
130 &mut self.pred_mode,
131 self.queue.slice_mut().split_at_mut(self.loc).0,
132 self.mb,
133 self.mc,
134 );
135 self.clear();
136 self.entropy_tally_scratch.free(self.mc);
137 self.entropy_pyramid.free(self.mc);
138 self.context_map_entropy.free(self.mc);
139 <Alloc as Allocator<StaticCommand>>::free_cell(self.mc, core::mem::take(&mut self.queue));
140 <Alloc as Allocator<u8>>::free_cell(
141 self.mc,
142 core::mem::take(&mut self.best_strides_per_block_type),
143 );
144 if self.overfull {
145 return Err(());
146 }
147 Ok(())
148 }
149}
150
151impl<'a, Alloc: BrotliAlloc> interface::CommandProcessor<'a> for CommandQueue<'a, Alloc> {
152 fn push(&mut self, val: interface::Command<InputReference<'a>>) {
153 if self.full() {
154 let mut tmp = allocate::<StaticCommand, _>(self.mc, self.queue.slice().len() * 2);
155 tmp.slice_mut()
156 .split_at_mut(self.queue.slice().len())
157 .0
158 .clone_from_slice(self.queue.slice());
159 <Alloc as Allocator<StaticCommand>>::free_cell(
160 self.mc,
161 core::mem::replace(&mut self.queue, tmp),
162 );
163 }
164 if !self.full() {
165 self.queue.slice_mut()[self.loc] = val.freeze();
166 self.loc += 1;
167 } else {
168 self.error_if_full();
169 }
170 }
171 fn push_block_switch_literal(&mut self, block_type: u8) {
172 self.push(interface::Command::BlockSwitchLiteral(
173 interface::LiteralBlockSwitch::new(block_type, 0),
174 ))
175 }
176}
177
178#[cfg(feature = "std")]
179fn warn_on_missing_free() {
180 let _err = ::std::io::stderr()
181 .write(b"Need to free entropy_tally_scratch before dropping CommandQueue\n");
182}
183#[cfg(not(feature = "std"))]
184fn warn_on_missing_free() {
185 }
187impl<'a, Alloc: BrotliAlloc> Drop for CommandQueue<'a, Alloc> {
188 fn drop(&mut self) {
189 if !self.entropy_tally_scratch.is_free() {
190 warn_on_missing_free();
191 }
192 }
193}
194#[cfg(not(feature = "billing"))]
195fn best_singleton_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
196#[cfg(feature = "billing")]
197fn best_singleton_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
198 println!(
199 "{} hi cost: {} lo cost: {} speeds {:?} {:?}",
200 name, cost[1], cost[0], data[1], data[0]
201 );
202}
203
204#[cfg(not(feature = "billing"))]
205fn best_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
206#[cfg(feature = "billing")]
207fn best_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
208 for high in 0..2 {
209 println!(
210 "{} Speed [ inc: {}, max: {}, algo: {} ] cost: {}",
211 name,
212 if high != 0 { "hi" } else { "lo" },
213 data[high].0,
214 data[high].1,
215 cost[high]
216 );
217 }
218}
219
220fn process_command_queue<'a, CmdProcessor: interface::CommandProcessor<'a>>(
221 command_queue: &mut CmdProcessor,
222 input: InputPair<'a>,
223 commands: &[Command],
224 dist_cache: &[i32; kNumDistanceCacheEntries],
225 mut recoder_state: RecoderState,
226 block_type: &MetaBlockSplitRefs,
227 params: &BrotliEncoderParams,
228 context_type: Option<ContextType>,
229) -> RecoderState {
230 let mut input_iter = input;
231 let mut local_dist_cache = [0i32; kNumDistanceCacheEntries];
232 local_dist_cache.clone_from_slice(&dist_cache[..]);
233 let mut btypel_counter = 0usize;
234 let mut btypec_counter = 0usize;
235 let mut btyped_counter = 0usize;
236 let mut btypel_sub = if block_type.btypel.num_types == 1 {
237 1u32 << 31
238 } else {
239 block_type.btypel.lengths[0]
240 };
241 let mut btypec_sub = if block_type.btypec.num_types == 1 {
242 1u32 << 31
243 } else {
244 block_type.btypec.lengths[0]
245 };
246 let mut btyped_sub = if block_type.btyped.num_types == 1 {
247 1u32 << 31
248 } else {
249 block_type.btyped.lengths[0]
250 };
251 {
252 command_queue.push_block_switch_literal(0);
253 }
254 let mut mb_len = input.len();
255 for cmd in commands.iter() {
256 let (inserts, interim) = input_iter.split_at(min(cmd.insert_len_ as usize, mb_len));
257 recoder_state.num_bytes_encoded += inserts.len();
258 let _copy_cursor = input.len() - interim.len();
259 let copylen_code = cmd.copy_len_code();
261
262 let (prev_dist_index, dist_offset) = cmd.distance_index_and_offset(¶ms.dist);
263 let final_distance: usize;
264 if prev_dist_index == 0 {
265 final_distance = dist_offset as usize;
266 } else {
267 final_distance =
268 (local_dist_cache[prev_dist_index - 1] as isize + dist_offset) as usize;
269 }
270 let copy_len = copylen_code as usize;
271 let actual_copy_len: usize;
272 let max_distance = min(
273 recoder_state.num_bytes_encoded,
274 window_size_from_lgwin(params.lgwin),
275 );
276 assert!(inserts.len() <= mb_len);
277 if inserts.len() != 0 {
278 let mut tmp_inserts = inserts;
279 while tmp_inserts.len() > btypel_sub as usize {
280 let (in_a, in_b) = tmp_inserts.split_at(btypel_sub as usize);
282 if in_a.len() != 0 {
283 if context_type.is_some() {
284 command_queue.push_literals(&in_a);
285 } else if params.high_entropy_detection_quality == 0 {
286 command_queue.push_literals(&in_a);
287 } else {
288 command_queue.push_rand_literals(&in_a);
289 }
290 }
291 mb_len -= in_a.len();
292 tmp_inserts = in_b;
293 btypel_counter += 1;
294 if block_type.btypel.types.len() > btypel_counter {
295 btypel_sub = block_type.btypel.lengths[btypel_counter];
296 command_queue
297 .push_block_switch_literal(block_type.btypel.types[btypel_counter]);
298 } else {
299 btypel_sub = 1u32 << 31;
300 }
301 }
302 if context_type.is_some() {
303 command_queue.push_literals(&tmp_inserts);
304 } else if params.high_entropy_detection_quality == 0 {
305 command_queue.push_literals(&tmp_inserts);
306 } else {
307 command_queue.push_rand_literals(&tmp_inserts);
308 }
309 if tmp_inserts.len() != 0 {
310 mb_len -= tmp_inserts.len();
311 btypel_sub -= tmp_inserts.len() as u32;
312 }
313 }
314 if final_distance > max_distance {
315 assert!(copy_len >= 4);
317 assert!(copy_len < 25);
318 let dictionary_offset = final_distance - max_distance - 1;
319 let ndbits = kBrotliDictionarySizeBitsByLength[copy_len] as usize;
320 let action = dictionary_offset >> ndbits;
321 let word_sub_index = dictionary_offset & ((1 << ndbits) - 1);
322 let word_index =
323 word_sub_index * copy_len + kBrotliDictionaryOffsetsByLength[copy_len] as usize;
324 let raw_word = &kBrotliDictionary[word_index..word_index + copy_len];
325 let mut transformed_word = [0u8; 38];
326 actual_copy_len = TransformDictionaryWord(
327 &mut transformed_word[..],
328 raw_word,
329 copy_len as i32,
330 action as i32,
331 ) as usize;
332 if actual_copy_len <= mb_len {
333 command_queue.push(interface::Command::Dict(interface::DictCommand {
334 word_size: copy_len as u8,
335 transform: action as u8,
336 final_size: actual_copy_len as u8,
337 empty: 0,
338 word_id: word_sub_index as u32,
339 }));
340 mb_len -= actual_copy_len;
341 assert_eq!(
342 InputPair(
343 InputReference {
344 data: transformed_word.split_at(actual_copy_len).0,
345 orig_offset: 0
346 },
347 InputReference::default()
348 ),
349 interim.split_at(actual_copy_len).0
350 );
351 } else if mb_len != 0 {
352 command_queue.push_literals(&interim.split_at(mb_len).0);
355 mb_len = 0;
356 assert_eq!(
357 InputPair(
358 InputReference {
359 data: transformed_word.split_at(mb_len).0,
360 orig_offset: 0
361 },
362 InputReference::default()
363 ),
364 interim.split_at(mb_len).0
365 );
366 }
367 } else {
368 actual_copy_len = min(mb_len, copy_len);
369 if actual_copy_len != 0 {
370 command_queue.push(interface::Command::Copy(interface::CopyCommand {
371 distance: final_distance as u32,
372 num_bytes: actual_copy_len as u32,
373 }));
374 }
375 mb_len -= actual_copy_len;
376 if prev_dist_index != 1 || dist_offset != 0 {
377 let mut tmp_dist_cache = [0i32; kNumDistanceCacheEntries - 1];
379 tmp_dist_cache.clone_from_slice(&local_dist_cache[..kNumDistanceCacheEntries - 1]);
380 local_dist_cache[1..].clone_from_slice(&tmp_dist_cache[..]);
381 local_dist_cache[0] = final_distance as i32;
382 }
383 }
384 {
385 btypec_sub -= 1;
386 if btypec_sub == 0 {
387 btypec_counter += 1;
388 if block_type.btypec.types.len() > btypec_counter {
389 btypec_sub = block_type.btypec.lengths[btypec_counter];
390 command_queue.push(interface::Command::BlockSwitchCommand(
391 interface::BlockSwitch(block_type.btypec.types[btypec_counter]),
392 ));
393 } else {
394 btypec_sub = 1u32 << 31;
395 }
396 }
397 }
398 if copy_len != 0 && cmd.cmd_prefix_ >= 128 {
399 btyped_sub -= 1;
400 if btyped_sub == 0 {
401 btyped_counter += 1;
402 if block_type.btyped.types.len() > btyped_counter {
403 btyped_sub = block_type.btyped.lengths[btyped_counter];
404 command_queue.push(interface::Command::BlockSwitchDistance(
405 interface::BlockSwitch(block_type.btyped.types[btyped_counter]),
406 ));
407 } else {
408 btyped_sub = 1u32 << 31;
409 }
410 }
411 }
412
413 let (copied, remainder) = interim.split_at(actual_copy_len);
414 recoder_state.num_bytes_encoded += copied.len();
415 input_iter = remainder;
416 }
417 recoder_state
418}
419
420fn LogMetaBlock<'a, Alloc: BrotliAlloc, Cb>(
421 alloc: &mut Alloc,
422 commands: &[Command],
423 input0: &'a [u8],
424 input1: &'a [u8],
425 dist_cache: &[i32; kNumDistanceCacheEntries],
426 recoder_state: &mut RecoderState,
427 block_type: MetaBlockSplitRefs,
428 params: &BrotliEncoderParams,
429 context_type: Option<ContextType>,
430 callback: &mut Cb,
431) where
432 Cb: FnMut(
433 &mut interface::PredictionModeContextMap<InputReferenceMut>,
434 &mut [interface::StaticCommand],
435 InputPair,
436 &mut Alloc,
437 ),
438{
439 let mut local_literal_context_map = [0u8; 256 * 64];
440 let mut local_distance_context_map = [0u8; 256 * 64 + interface::DISTANCE_CONTEXT_MAP_OFFSET];
441 assert_eq!(
442 *block_type.btypel.types.iter().max().unwrap_or(&0) as u32 + 1,
443 block_type.btypel.num_types
444 );
445 assert_eq!(
446 *block_type.btypec.types.iter().max().unwrap_or(&0) as u32 + 1,
447 block_type.btypec.num_types
448 );
449 assert_eq!(
450 *block_type.btyped.types.iter().max().unwrap_or(&0) as u32 + 1,
451 block_type.btyped.num_types
452 );
453 if block_type.literal_context_map.len() <= 256 * 64 {
454 for (index, item) in block_type.literal_context_map.iter().enumerate() {
455 local_literal_context_map[index] = *item as u8;
456 }
457 }
458 if block_type.distance_context_map.len() <= 256 * 64 {
459 for (index, item) in block_type.distance_context_map.iter().enumerate() {
460 local_distance_context_map[interface::DISTANCE_CONTEXT_MAP_OFFSET + index] =
461 *item as u8;
462 }
463 }
464
465 let mut prediction_mode = interface::PredictionModeContextMap::<InputReferenceMut> {
466 literal_context_map: InputReferenceMut {
467 data: local_literal_context_map
468 .split_at_mut(block_type.literal_context_map.len())
469 .0,
470 orig_offset: 0,
471 },
472 predmode_speed_and_distance_context_map: InputReferenceMut {
473 data: local_distance_context_map
474 .split_at_mut(
475 interface::PredictionModeContextMap::<InputReference>::size_of_combined_array(
476 block_type.distance_context_map.len(),
477 ),
478 )
479 .0,
480 orig_offset: 0,
481 },
482 };
483 for item in prediction_mode.get_mixing_values_mut().iter_mut() {
484 *item = prior_eval::WhichPrior::STRIDE1 as u8;
485 }
486 prediction_mode
487 .set_stride_context_speed([params.literal_adaptation[2], params.literal_adaptation[3]]);
488 prediction_mode
489 .set_context_map_speed([params.literal_adaptation[0], params.literal_adaptation[1]]);
490 prediction_mode.set_combined_stride_context_speed([
491 params.literal_adaptation[0],
492 params.literal_adaptation[1],
493 ]);
494
495 prediction_mode.set_literal_prediction_mode(interface::LiteralPredictionModeNibble(
496 context_type.unwrap_or(ContextType::CONTEXT_LSB6) as u8,
497 ));
498 let mut entropy_tally_scratch;
499 let mut entropy_pyramid;
500 if params.stride_detection_quality == 1 || params.stride_detection_quality == 2 {
501 entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::new(alloc, None);
502 entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::new(alloc);
503 entropy_pyramid.populate(input0, input1, &mut entropy_tally_scratch);
504 } else {
505 entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::disabled_placeholder(alloc);
506 entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::disabled_placeholder(alloc);
507 }
508 let input = InputPair(
509 InputReference {
510 data: input0,
511 orig_offset: 0,
512 },
513 InputReference {
514 data: input1,
515 orig_offset: input0.len(),
516 },
517 );
518 let mut best_strides = alloc_default::<u8, Alloc>();
519 if params.stride_detection_quality > 2 {
520 let mut stride_selector =
521 stride_eval::StrideEval::<Alloc>::new(alloc, input, &prediction_mode, params);
522 process_command_queue(
523 &mut stride_selector,
524 input,
525 commands,
526 dist_cache,
527 *recoder_state,
528 &block_type,
529 params,
530 context_type,
531 );
532 let ntypes = stride_selector.num_types();
533 best_strides = allocate::<u8, _>(stride_selector.alloc(), ntypes);
534 stride_selector.choose_stride(best_strides.slice_mut());
535 }
536 let mut context_map_entropy = ContextMapEntropy::<Alloc>::new(
537 alloc,
538 input,
539 entropy_pyramid.stride_last_level_range(),
540 prediction_mode,
541 params.cdf_adaptation_detection,
542 );
543 if params.cdf_adaptation_detection != 0 {
544 process_command_queue(
545 &mut context_map_entropy,
546 input,
547 commands,
548 dist_cache,
549 *recoder_state,
550 &block_type,
551 params,
552 context_type,
553 );
554 {
555 let (cm_speed, cm_cost) = context_map_entropy.best_singleton_speeds(true, false);
556 let (stride_speed, stride_cost) =
557 context_map_entropy.best_singleton_speeds(false, false);
558 let (combined_speed, combined_cost) =
559 context_map_entropy.best_singleton_speeds(false, true);
560 best_singleton_speed_log("CM", &cm_speed, &cm_cost);
561 best_singleton_speed_log("stride", &stride_speed, &stride_cost);
562 best_singleton_speed_log("combined", &combined_speed, &combined_cost);
563 }
564
565 let cm_speed = context_map_entropy.best_speeds(true, false);
566 let stride_speed = context_map_entropy.best_speeds(false, false);
567 let combined_speed = context_map_entropy.best_speeds(false, true);
568 let acost = context_map_entropy.best_speeds_costs(true, false);
569 let bcost = context_map_entropy.best_speeds_costs(false, false);
570 let ccost = context_map_entropy.best_speeds_costs(false, true);
571 context_map_entropy
572 .prediction_mode_mut()
573 .set_stride_context_speed(speed_to_tuple(stride_speed));
574 context_map_entropy
575 .prediction_mode_mut()
576 .set_context_map_speed(speed_to_tuple(cm_speed));
577 context_map_entropy
578 .prediction_mode_mut()
579 .set_combined_stride_context_speed(speed_to_tuple(combined_speed));
580
581 best_speed_log("CM", &cm_speed, &acost);
582 best_speed_log("Stride", &stride_speed, &bcost);
583 best_speed_log("StrideCombined", &combined_speed, &ccost);
584 }
585 let mut prior_selector = prior_eval::PriorEval::<Alloc>::new(
586 alloc,
587 input,
588 entropy_pyramid.stride_last_level_range(),
589 context_map_entropy.take_prediction_mode(),
590 params,
591 );
592 if params.prior_bitmask_detection != 0 {
593 process_command_queue(
594 &mut prior_selector,
595 input,
596 commands,
597 dist_cache,
598 *recoder_state,
599 &block_type,
600 params,
601 context_type,
602 );
603 prior_selector.choose_bitmask();
604 }
605 let prediction_mode = prior_selector.take_prediction_mode();
606 prior_selector.free(alloc);
607 let mut command_queue = CommandQueue::new(
608 alloc,
609 commands.len(),
610 prediction_mode,
611 input,
612 params.stride_detection_quality,
613 params.high_entropy_detection_quality,
614 context_map_entropy,
615 best_strides,
616 entropy_tally_scratch,
617 entropy_pyramid,
618 );
619
620 *recoder_state = process_command_queue(
621 &mut command_queue,
622 input,
623 commands,
624 dist_cache,
625 *recoder_state,
626 &block_type,
627 params,
628 context_type,
629 );
630 command_queue.free(callback).unwrap();
631 }
634
635static kBlockLengthPrefixCode: [PrefixCodeRange; BROTLI_NUM_BLOCK_LEN_SYMBOLS] = [
636 PrefixCodeRange {
637 offset: 1u32,
638 nbits: 2u32,
639 },
640 PrefixCodeRange {
641 offset: 5u32,
642 nbits: 2u32,
643 },
644 PrefixCodeRange {
645 offset: 9u32,
646 nbits: 2u32,
647 },
648 PrefixCodeRange {
649 offset: 13u32,
650 nbits: 2u32,
651 },
652 PrefixCodeRange {
653 offset: 17u32,
654 nbits: 3u32,
655 },
656 PrefixCodeRange {
657 offset: 25u32,
658 nbits: 3u32,
659 },
660 PrefixCodeRange {
661 offset: 33u32,
662 nbits: 3u32,
663 },
664 PrefixCodeRange {
665 offset: 41u32,
666 nbits: 3u32,
667 },
668 PrefixCodeRange {
669 offset: 49u32,
670 nbits: 4u32,
671 },
672 PrefixCodeRange {
673 offset: 65u32,
674 nbits: 4u32,
675 },
676 PrefixCodeRange {
677 offset: 81u32,
678 nbits: 4u32,
679 },
680 PrefixCodeRange {
681 offset: 97u32,
682 nbits: 4u32,
683 },
684 PrefixCodeRange {
685 offset: 113u32,
686 nbits: 5u32,
687 },
688 PrefixCodeRange {
689 offset: 145u32,
690 nbits: 5u32,
691 },
692 PrefixCodeRange {
693 offset: 177u32,
694 nbits: 5u32,
695 },
696 PrefixCodeRange {
697 offset: 209u32,
698 nbits: 5u32,
699 },
700 PrefixCodeRange {
701 offset: 241u32,
702 nbits: 6u32,
703 },
704 PrefixCodeRange {
705 offset: 305u32,
706 nbits: 6u32,
707 },
708 PrefixCodeRange {
709 offset: 369u32,
710 nbits: 7u32,
711 },
712 PrefixCodeRange {
713 offset: 497u32,
714 nbits: 8u32,
715 },
716 PrefixCodeRange {
717 offset: 753u32,
718 nbits: 9u32,
719 },
720 PrefixCodeRange {
721 offset: 1265u32,
722 nbits: 10u32,
723 },
724 PrefixCodeRange {
725 offset: 2289u32,
726 nbits: 11u32,
727 },
728 PrefixCodeRange {
729 offset: 4337u32,
730 nbits: 12u32,
731 },
732 PrefixCodeRange {
733 offset: 8433u32,
734 nbits: 13u32,
735 },
736 PrefixCodeRange {
737 offset: 16625u32,
738 nbits: 24u32,
739 },
740];
741
742fn BrotliWriteBits(n_bits: u8, bits: u64, pos: &mut usize, array: &mut [u8]) {
743 assert_eq!(bits >> n_bits, 0);
744 assert!(n_bits <= 56);
745 let ptr_offset: usize = ((*pos >> 3) as u32) as usize;
746 let mut v = array[ptr_offset] as u64;
747 v |= bits << ((*pos) as u64 & 7);
748 array[ptr_offset + 7] = (v >> 56) as u8;
749 array[ptr_offset + 6] = ((v >> 48) & 0xff) as u8;
750 array[ptr_offset + 5] = ((v >> 40) & 0xff) as u8;
751 array[ptr_offset + 4] = ((v >> 32) & 0xff) as u8;
752 array[ptr_offset + 3] = ((v >> 24) & 0xff) as u8;
753 array[ptr_offset + 2] = ((v >> 16) & 0xff) as u8;
754 array[ptr_offset + 1] = ((v >> 8) & 0xff) as u8;
755 array[ptr_offset] = (v & 0xff) as u8;
756 *pos += n_bits as usize
757}
758
759fn BrotliWriteBitsPrepareStorage(pos: usize, array: &mut [u8]) {
760 assert_eq!(pos & 7, 0);
761 array[pos >> 3] = 0;
762}
763
764fn BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
765 num_codes: i32,
766 code_length_bitdepth: &[u8],
767 storage_ix: &mut usize,
768 storage: &mut [u8],
769) {
770 static kStorageOrder: [u8; 18] = [1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15];
771 static kHuffmanBitLengthHuffmanCodeSymbols: [u8; 6] = [0, 7, 3, 2, 1, 15];
772 static kHuffmanBitLengthHuffmanCodeBitLengths: [u8; 6] = [2, 4, 3, 2, 2, 4];
773 let mut skip_some: u64 = 0u64;
774 let mut codes_to_store: u64 = 18;
775 if num_codes > 1i32 {
776 'break5: while codes_to_store > 0 {
777 {
778 if code_length_bitdepth
779 [(kStorageOrder[codes_to_store.wrapping_sub(1) as usize] as usize)]
780 as i32
781 != 0i32
782 {
783 break 'break5;
784 }
785 }
786 codes_to_store = codes_to_store.wrapping_sub(1);
787 }
788 }
789 if code_length_bitdepth[(kStorageOrder[0] as usize)] as i32 == 0i32
790 && (code_length_bitdepth[(kStorageOrder[1] as usize)] as i32 == 0i32)
791 {
792 skip_some = 2;
793 if code_length_bitdepth[(kStorageOrder[2] as usize)] as i32 == 0i32 {
794 skip_some = 3;
795 }
796 }
797 BrotliWriteBits(2, skip_some, storage_ix, storage);
798
799 for i in skip_some..codes_to_store {
800 let l = code_length_bitdepth[kStorageOrder[i as usize] as usize] as usize;
801 BrotliWriteBits(
802 kHuffmanBitLengthHuffmanCodeBitLengths[l],
803 kHuffmanBitLengthHuffmanCodeSymbols[l] as u64,
804 storage_ix,
805 storage,
806 );
807 }
808}
809
810fn BrotliStoreHuffmanTreeToBitMask(
811 huffman_tree_size: usize,
812 huffman_tree: &[u8],
813 huffman_tree_extra_bits: &[u8],
814 code_length_bitdepth: &[u8],
815 code_length_bitdepth_symbols: &[u16],
816 storage_ix: &mut usize,
817 storage: &mut [u8],
818) {
819 for i in 0usize..huffman_tree_size {
820 let ix: usize = huffman_tree[i] as usize;
821 BrotliWriteBits(
822 code_length_bitdepth[ix],
823 code_length_bitdepth_symbols[ix] as (u64),
824 storage_ix,
825 storage,
826 );
827 if ix == 16usize {
828 BrotliWriteBits(2, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
829 } else if ix == 17usize {
830 BrotliWriteBits(3, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
831 }
832 }
833}
834
835pub fn BrotliStoreHuffmanTree(
836 depths: &[u8],
837 num: usize,
838 tree: &mut [HuffmanTree],
839 storage_ix: &mut usize,
840 storage: &mut [u8],
841) {
842 let mut huffman_tree = [0u8; 704];
843 let mut huffman_tree_extra_bits = [0u8; 704];
844 let mut huffman_tree_size = 0usize;
845 let mut code_length_bitdepth = [0u8; 18];
846 let mut code_length_bitdepth_symbols = [0u16; 18];
847 let mut huffman_tree_histogram = [0u32; 18];
848 let mut i: usize;
849 let mut num_codes: i32 = 0i32;
850 let mut code: usize = 0usize;
851
852 BrotliWriteHuffmanTree(
853 depths,
854 num,
855 &mut huffman_tree_size,
856 &mut huffman_tree[..],
857 &mut huffman_tree_extra_bits[..],
858 );
859 for i in 0usize..huffman_tree_size {
860 let _rhs = 1;
861 let _lhs = &mut huffman_tree_histogram[huffman_tree[i] as usize];
862 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
863 }
864 i = 0usize;
865 'break3: while i < 18usize {
866 {
867 if huffman_tree_histogram[i] != 0 {
868 if num_codes == 0i32 {
869 code = i;
870 num_codes = 1i32;
871 } else if num_codes == 1i32 {
872 num_codes = 2i32;
873 {
874 break 'break3;
875 }
876 }
877 }
878 }
879 i = i.wrapping_add(1);
880 }
881 BrotliCreateHuffmanTree(
882 &mut huffman_tree_histogram,
883 18usize,
884 5i32,
885 tree,
886 &mut code_length_bitdepth,
887 );
888 BrotliConvertBitDepthsToSymbols(
889 &mut code_length_bitdepth,
890 18usize,
891 &mut code_length_bitdepth_symbols,
892 );
893 BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
894 num_codes,
895 &code_length_bitdepth,
896 storage_ix,
897 storage,
898 );
899 if num_codes == 1i32 {
900 code_length_bitdepth[code] = 0u8;
901 }
902 BrotliStoreHuffmanTreeToBitMask(
903 huffman_tree_size,
904 &huffman_tree,
905 &huffman_tree_extra_bits,
906 &code_length_bitdepth,
907 &code_length_bitdepth_symbols,
908 storage_ix,
909 storage,
910 );
911}
912
913fn StoreStaticCodeLengthCode(storage_ix: &mut usize, storage: &mut [u8]) {
914 BrotliWriteBits(40, 0xff_5555_5554, storage_ix, storage);
915}
916
917pub struct SimpleSortHuffmanTree {}
918
919impl HuffmanComparator for SimpleSortHuffmanTree {
920 fn Cmp(&self, v0: &HuffmanTree, v1: &HuffmanTree) -> bool {
921 v0.total_count_ < v1.total_count_
922 }
923}
924
925pub fn BrotliBuildAndStoreHuffmanTreeFast<AllocHT: alloc::Allocator<HuffmanTree>>(
926 m: &mut AllocHT,
927 histogram: &[u32],
928 histogram_total: usize,
929 max_bits: usize,
930 depth: &mut [u8],
931 bits: &mut [u16],
932 storage_ix: &mut usize,
933 storage: &mut [u8],
934) {
935 let mut count: u64 = 0;
936 let mut symbols: [u64; 4] = [0; 4];
937 let mut length: u64 = 0;
938 let mut total: usize = histogram_total;
939 while total != 0usize {
940 if histogram[(length as usize)] != 0 {
941 if count < 4 {
942 symbols[count as usize] = length;
943 }
944 count = count.wrapping_add(1);
945 total = total.wrapping_sub(histogram[(length as usize)] as usize);
946 }
947 length = length.wrapping_add(1);
948 }
949 if count <= 1 {
950 BrotliWriteBits(4, 1, storage_ix, storage);
951 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
952 depth[symbols[0] as usize] = 0u8;
953 bits[symbols[0] as usize] = 0u16;
954 return;
955 }
956 for depth_elem in depth[..(length as usize)].iter_mut() {
957 *depth_elem = 0; }
959 {
960 let max_tree_size: u64 = (2u64).wrapping_mul(length).wrapping_add(1);
962 let mut tree = alloc_or_default::<HuffmanTree, _>(m, max_tree_size as usize);
964 let mut count_limit: u32;
965 count_limit = 1u32;
966 'break11: loop {
967 {
968 let mut node_index: u32 = 0u32;
969 let mut l: u64;
970 l = length;
971 while l != 0 {
972 l = l.wrapping_sub(1);
973 if histogram[l as usize] != 0 {
974 if histogram[l as usize] >= count_limit {
975 tree.slice_mut()[node_index as usize] =
976 HuffmanTree::new(histogram[l as usize], -1, l as i16);
977 } else {
978 tree.slice_mut()[node_index as usize] =
979 HuffmanTree::new(count_limit, -1, l as i16);
980 }
981 node_index = node_index.wrapping_add(1);
982 }
983 }
984 {
985 let n: i32 = node_index as i32;
986
987 let mut i: i32 = 0i32;
988 let mut j: i32 = n + 1i32;
989 let mut k: i32;
990 SortHuffmanTreeItems(tree.slice_mut(), n as usize, SimpleSortHuffmanTree {});
991 let sentinel = HuffmanTree::new(u32::MAX, -1, -1);
992 tree.slice_mut()[(node_index.wrapping_add(1) as usize)] = sentinel;
993 tree.slice_mut()[(node_index as usize)] = sentinel;
994 node_index = node_index.wrapping_add(2);
995 k = n - 1i32;
996 while k > 0i32 {
997 {
998 let left: i32;
999 let right: i32;
1000 if (tree.slice()[(i as usize)]).total_count_
1001 <= (tree.slice()[(j as usize)]).total_count_
1002 {
1003 left = i;
1004 i += 1;
1005 } else {
1006 left = j;
1007 j += 1;
1008 }
1009 if (tree.slice()[(i as usize)]).total_count_
1010 <= (tree.slice()[(j as usize)]).total_count_
1011 {
1012 right = i;
1013 i += 1;
1014 } else {
1015 right = j;
1016 j += 1;
1017 }
1018 let sum_total = (tree.slice()[(left as usize)])
1019 .total_count_
1020 .wrapping_add((tree.slice()[(right as usize)]).total_count_);
1021 let tree_ind = (node_index.wrapping_sub(1) as usize);
1022 (tree.slice_mut()[tree_ind]).total_count_ = sum_total;
1023 (tree.slice_mut()[tree_ind]).index_left_ = left as i16;
1024 (tree.slice_mut()[tree_ind]).index_right_or_value_ = right as i16;
1025 tree.slice_mut()[(node_index as usize)] = sentinel;
1026 node_index = node_index.wrapping_add(1);
1027 }
1028 k -= 1;
1029 }
1030 if BrotliSetDepth(2i32 * n - 1i32, tree.slice_mut(), depth, 14i32) {
1031 break 'break11;
1032 }
1033 }
1034 }
1035 count_limit = count_limit.wrapping_mul(2);
1036 }
1037 {
1038 m.free_cell(core::mem::take(&mut tree));
1039 }
1040 }
1041 BrotliConvertBitDepthsToSymbols(depth, length as usize, bits);
1042 if count <= 4 {
1043 BrotliWriteBits(2, 1, storage_ix, storage);
1044 BrotliWriteBits(2, count.wrapping_sub(1), storage_ix, storage);
1045 for i in 0..count as usize {
1046 for j in i + 1..count as usize {
1047 if depth[symbols[j] as usize] < depth[symbols[i] as usize] {
1048 symbols.swap(j, i);
1049 }
1050 }
1051 }
1052 if count == 2 {
1053 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1054 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1055 } else if count == 3 {
1056 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1057 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1058 BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1059 } else {
1060 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1061 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1062 BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1063 BrotliWriteBits(max_bits as u8, symbols[3], storage_ix, storage);
1064 BrotliWriteBits(
1065 1,
1066 if depth[(symbols[0] as usize)] as i32 == 1i32 {
1067 1i32
1068 } else {
1069 0i32
1070 } as (u64),
1071 storage_ix,
1072 storage,
1073 );
1074 }
1075 } else {
1076 let mut previous_value: u8 = 8u8;
1077 let mut i: u64;
1078 StoreStaticCodeLengthCode(storage_ix, storage);
1079 i = 0;
1080 while i < length {
1081 let value: u8 = depth[(i as usize)];
1082 let mut reps: u64 = 1;
1083 let mut k: u64;
1084 k = i.wrapping_add(1);
1085 while k < length && (depth[(k as usize)] as i32 == value as i32) {
1086 {
1087 reps = reps.wrapping_add(1);
1088 }
1089 k = k.wrapping_add(1);
1090 }
1091 i = i.wrapping_add(reps);
1092 if value as i32 == 0i32 {
1093 BrotliWriteBits(
1094 kZeroRepsDepth[reps as usize] as u8,
1095 kZeroRepsBits[reps as usize] as u64,
1096 storage_ix,
1097 storage,
1098 );
1099 } else {
1100 if previous_value as i32 != value as i32 {
1101 BrotliWriteBits(
1102 kCodeLengthDepth[value as usize],
1103 kCodeLengthBits[value as usize] as (u64),
1104 storage_ix,
1105 storage,
1106 );
1107 reps = reps.wrapping_sub(1);
1108 }
1109 if reps < 3 {
1110 while reps != 0 {
1111 reps = reps.wrapping_sub(1);
1112 BrotliWriteBits(
1113 kCodeLengthDepth[value as usize],
1114 kCodeLengthBits[value as usize] as (u64),
1115 storage_ix,
1116 storage,
1117 );
1118 }
1119 } else {
1120 reps = reps.wrapping_sub(3);
1121 BrotliWriteBits(
1122 kNonZeroRepsDepth[reps as usize] as u8,
1123 kNonZeroRepsBits[reps as usize] as u64,
1124 storage_ix,
1125 storage,
1126 );
1127 }
1128 previous_value = value;
1129 }
1130 }
1131 }
1132}
1133
1134pub struct MetaBlockSplit<
1135 Alloc: alloc::Allocator<u8>
1136 + alloc::Allocator<u32>
1137 + alloc::Allocator<HistogramLiteral>
1138 + alloc::Allocator<HistogramCommand>
1139 + alloc::Allocator<HistogramDistance>,
1140> {
1141 pub literal_split: BlockSplit<Alloc>,
1142 pub command_split: BlockSplit<Alloc>,
1143 pub distance_split: BlockSplit<Alloc>,
1144 pub literal_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1145 pub literal_context_map_size: usize,
1146 pub distance_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1147 pub distance_context_map_size: usize,
1148 pub literal_histograms: <Alloc as Allocator<HistogramLiteral>>::AllocatedMemory,
1149 pub literal_histograms_size: usize,
1150 pub command_histograms: <Alloc as Allocator<HistogramCommand>>::AllocatedMemory,
1151 pub command_histograms_size: usize,
1152 pub distance_histograms: <Alloc as Allocator<HistogramDistance>>::AllocatedMemory,
1153 pub distance_histograms_size: usize,
1154}
1155impl<
1156 Alloc: alloc::Allocator<u8>
1157 + alloc::Allocator<u32>
1158 + alloc::Allocator<HistogramLiteral>
1159 + alloc::Allocator<HistogramCommand>
1160 + alloc::Allocator<HistogramDistance>,
1161 > Default for MetaBlockSplit<Alloc>
1162{
1163 fn default() -> Self {
1164 Self {
1165 literal_split: BlockSplit::default(),
1166 command_split: BlockSplit::default(),
1167 distance_split: BlockSplit::default(),
1168 literal_context_map: alloc_default::<u32, Alloc>(),
1169 literal_context_map_size: 0,
1170 distance_context_map: alloc_default::<u32, Alloc>(),
1171 distance_context_map_size: 0,
1172 literal_histograms: alloc_default::<HistogramLiteral, Alloc>(),
1173 literal_histograms_size: 0,
1174 command_histograms: alloc_default::<HistogramCommand, Alloc>(),
1175 command_histograms_size: 0,
1176 distance_histograms: alloc_default::<HistogramDistance, Alloc>(),
1177 distance_histograms_size: 0,
1178 }
1179 }
1180}
1181
1182impl<
1183 Alloc: alloc::Allocator<u8>
1184 + alloc::Allocator<u32>
1185 + alloc::Allocator<HistogramLiteral>
1186 + alloc::Allocator<HistogramCommand>
1187 + alloc::Allocator<HistogramDistance>,
1188 > MetaBlockSplit<Alloc>
1189{
1190 pub fn new() -> Self {
1191 Self::default()
1192 }
1193
1194 pub fn destroy(&mut self, alloc: &mut Alloc) {
1195 self.literal_split.destroy(alloc);
1196 self.command_split.destroy(alloc);
1197 self.distance_split.destroy(alloc);
1198 <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut self.literal_context_map));
1199 self.literal_context_map_size = 0;
1200 <Alloc as Allocator<u32>>::free_cell(
1201 alloc,
1202 core::mem::take(&mut self.distance_context_map),
1203 );
1204 self.distance_context_map_size = 0;
1205 <Alloc as Allocator<HistogramLiteral>>::free_cell(
1206 alloc,
1207 core::mem::take(&mut self.literal_histograms),
1208 );
1209
1210 self.literal_histograms_size = 0;
1211 <Alloc as Allocator<HistogramCommand>>::free_cell(
1212 alloc,
1213 core::mem::take(&mut self.command_histograms),
1214 );
1215 self.command_histograms_size = 0;
1216 <Alloc as Allocator<HistogramDistance>>::free_cell(
1217 alloc,
1218 core::mem::take(&mut self.distance_histograms),
1219 );
1220 self.distance_histograms_size = 0;
1221 }
1222}
1223#[derive(Clone, Copy)]
1224pub struct BlockTypeCodeCalculator {
1225 pub last_type: usize,
1226 pub second_last_type: usize,
1227}
1228
1229pub struct BlockSplitCode {
1230 pub type_code_calculator: BlockTypeCodeCalculator,
1231 pub type_depths: [u8; 258],
1232 pub type_bits: [u16; 258],
1233 pub length_depths: [u8; 26],
1234 pub length_bits: [u16; 26],
1235}
1236
1237pub struct BlockEncoder<'a, Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>> {
1238 pub histogram_length_: usize,
1243 pub num_block_types_: usize,
1244 pub block_types_: &'a [u8],
1245 pub block_lengths_: &'a [u32],
1246 pub num_blocks_: usize,
1247 pub block_split_code_: BlockSplitCode,
1248 pub block_ix_: usize,
1249 pub block_len_: usize,
1250 pub entropy_ix_: usize,
1251 pub depths_: <Alloc as Allocator<u8>>::AllocatedMemory,
1252 pub bits_: <Alloc as Allocator<u16>>::AllocatedMemory,
1253}
1254
1255fn Log2FloorNonZero(mut n: u64) -> u32 {
1256 let mut result: u32 = 0u32;
1257 'loop1: loop {
1258 if {
1259 n >>= 1i32;
1260 n
1261 } != 0
1262 {
1263 result = result.wrapping_add(1);
1264 continue 'loop1;
1265 } else {
1266 break 'loop1;
1267 }
1268 }
1269 result
1270}
1271
1272fn BrotliEncodeMlen(length: u32, bits: &mut u64, numbits: &mut u32, nibblesbits: &mut u32) {
1273 let lg: u32 = (if length == 1u32 {
1274 1u32
1275 } else {
1276 Log2FloorNonZero(length.wrapping_sub(1) as (u64)).wrapping_add(1)
1277 });
1278 let mnibbles: u32 = (if lg < 16u32 {
1279 16u32
1280 } else {
1281 lg.wrapping_add(3)
1282 })
1283 .wrapping_div(4);
1284 assert!(length > 0);
1285 assert!(length <= (1 << 24));
1286 assert!(lg <= 24);
1287 *nibblesbits = mnibbles.wrapping_sub(4);
1288 *numbits = mnibbles.wrapping_mul(4);
1289 *bits = length.wrapping_sub(1) as u64;
1290}
1291
1292fn StoreCompressedMetaBlockHeader(
1293 is_final_block: bool,
1294 length: usize,
1295 storage_ix: &mut usize,
1296 storage: &mut [u8],
1297) {
1298 let mut lenbits: u64 = 0;
1299 let mut nlenbits: u32 = 0;
1300 let mut nibblesbits: u32 = 0;
1301 BrotliWriteBits(1, is_final_block.into(), storage_ix, storage);
1302 if is_final_block {
1303 BrotliWriteBits(1, 0, storage_ix, storage);
1304 }
1305 BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
1306 BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
1307 BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
1308 if !is_final_block {
1309 BrotliWriteBits(1, 0, storage_ix, storage);
1310 }
1311}
1312
1313impl BlockTypeCodeCalculator {
1314 fn new() -> Self {
1315 Self {
1316 last_type: 1,
1317 second_last_type: 0,
1318 }
1319 }
1320}
1321
1322impl<'a, Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'a, Alloc> {
1323 fn new(
1324 histogram_length: usize,
1325 num_block_types: usize,
1326 block_types: &'a [u8],
1327 block_lengths: &'a [u32],
1328 num_blocks: usize,
1329 ) -> Self {
1330 let block_len = if num_blocks != 0 && !block_lengths.is_empty() {
1331 block_lengths[0] as usize
1332 } else {
1333 0
1334 };
1335 Self {
1336 histogram_length_: histogram_length,
1337 num_block_types_: num_block_types,
1338 block_types_: block_types,
1339 block_lengths_: block_lengths,
1340 num_blocks_: num_blocks,
1341 block_split_code_: BlockSplitCode {
1342 type_code_calculator: BlockTypeCodeCalculator::new(),
1343 type_depths: [0; 258],
1344 type_bits: [0; 258],
1345 length_depths: [0; 26],
1346 length_bits: [0; 26],
1347 },
1348 block_ix_: 0,
1349 block_len_: block_len,
1350 entropy_ix_: 0,
1351 depths_: alloc_default::<u8, Alloc>(),
1352 bits_: alloc_default::<u16, Alloc>(),
1353 }
1354 }
1355}
1356
1357fn NextBlockTypeCode(calculator: &mut BlockTypeCodeCalculator, type_: u8) -> usize {
1358 let type_code: usize = (if type_ as usize == calculator.last_type.wrapping_add(1) {
1359 1u32
1360 } else if type_ as usize == calculator.second_last_type {
1361 0u32
1362 } else {
1363 (type_ as u32).wrapping_add(2)
1364 }) as usize;
1365 calculator.second_last_type = calculator.last_type;
1366 calculator.last_type = type_ as usize;
1367 type_code
1368}
1369
1370fn BlockLengthPrefixCode(len: u32) -> u32 {
1371 let mut code: u32 = (if len >= 177u32 {
1372 if len >= 753u32 {
1373 20i32
1374 } else {
1375 14i32
1376 }
1377 } else if len >= 41u32 {
1378 7i32
1379 } else {
1380 0i32
1381 }) as u32;
1382 while code < (26i32 - 1i32) as u32
1383 && (len >= kBlockLengthPrefixCode[code.wrapping_add(1) as usize].offset)
1384 {
1385 code = code.wrapping_add(1);
1386 }
1387 code
1388}
1389
1390fn StoreVarLenUint8(n: u64, storage_ix: &mut usize, storage: &mut [u8]) {
1391 if n == 0 {
1392 BrotliWriteBits(1, 0, storage_ix, storage);
1393 } else {
1394 let nbits: u8 = Log2FloorNonZero(n) as u8;
1395 BrotliWriteBits(1, 1, storage_ix, storage);
1396 BrotliWriteBits(3, nbits as u64, storage_ix, storage);
1397 BrotliWriteBits(nbits, n.wrapping_sub(1u64 << nbits), storage_ix, storage);
1398 }
1399}
1400
1401fn StoreSimpleHuffmanTree(
1402 depths: &[u8],
1403 symbols: &mut [usize],
1404 num_symbols: usize,
1405 max_bits: usize,
1406 storage_ix: &mut usize,
1407 storage: &mut [u8],
1408) {
1409 BrotliWriteBits(2, 1, storage_ix, storage);
1410 BrotliWriteBits(2, num_symbols.wrapping_sub(1) as u64, storage_ix, storage);
1411 {
1412 for i in 0..num_symbols {
1413 for j in i + 1..num_symbols {
1414 if depths[symbols[j]] < depths[symbols[i]] {
1415 symbols.swap(j, i);
1416 }
1417 }
1418 }
1419 }
1420 if num_symbols == 2usize {
1421 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1422 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1423 } else if num_symbols == 3usize {
1424 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1425 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1426 BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1427 } else {
1428 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1429 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1430 BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1431 BrotliWriteBits(max_bits as u8, symbols[3] as u64, storage_ix, storage);
1432 BrotliWriteBits(
1433 1,
1434 if depths[symbols[0]] as i32 == 1i32 {
1435 1i32
1436 } else {
1437 0i32
1438 } as (u64),
1439 storage_ix,
1440 storage,
1441 );
1442 }
1443}
1444
1445fn BuildAndStoreHuffmanTree(
1446 histogram: &[u32],
1447 histogram_length: usize,
1448 alphabet_size: usize,
1449 tree: &mut [HuffmanTree],
1450 depth: &mut [u8],
1451 bits: &mut [u16],
1452 storage_ix: &mut usize,
1453 storage: &mut [u8],
1454) {
1455 let mut count: usize = 0usize;
1456 let mut s4 = [0usize; 4];
1457 let mut i: usize;
1458 let mut max_bits: usize = 0usize;
1459 i = 0usize;
1460 'break31: while i < histogram_length {
1461 {
1462 if histogram[i] != 0 {
1463 if count < 4usize {
1464 s4[count] = i;
1465 } else if count > 4usize {
1466 break 'break31;
1467 }
1468 count = count.wrapping_add(1);
1469 }
1470 }
1471 i = i.wrapping_add(1);
1472 }
1473 {
1474 let mut max_bits_counter: usize = alphabet_size.wrapping_sub(1);
1475 while max_bits_counter != 0 {
1476 max_bits_counter >>= 1i32;
1477 max_bits = max_bits.wrapping_add(1);
1478 }
1479 }
1480 if count <= 1 {
1481 BrotliWriteBits(4, 1, storage_ix, storage);
1482 BrotliWriteBits(max_bits as u8, s4[0] as u64, storage_ix, storage);
1483 depth[s4[0]] = 0u8;
1484 bits[s4[0]] = 0u16;
1485 return;
1486 }
1487
1488 for depth_elem in depth[..histogram_length].iter_mut() {
1489 *depth_elem = 0; }
1491 BrotliCreateHuffmanTree(histogram, histogram_length, 15i32, tree, depth);
1492 BrotliConvertBitDepthsToSymbols(depth, histogram_length, bits);
1493 if count <= 4usize {
1494 StoreSimpleHuffmanTree(depth, &mut s4[..], count, max_bits, storage_ix, storage);
1495 } else {
1496 BrotliStoreHuffmanTree(depth, histogram_length, tree, storage_ix, storage);
1497 }
1498}
1499
1500fn GetBlockLengthPrefixCode(len: u32, code: &mut usize, n_extra: &mut u32, extra: &mut u32) {
1501 *code = BlockLengthPrefixCode(len) as usize;
1502 *n_extra = kBlockLengthPrefixCode[*code].nbits;
1503 *extra = len.wrapping_sub(kBlockLengthPrefixCode[*code].offset);
1504}
1505
1506fn StoreBlockSwitch(
1507 code: &mut BlockSplitCode,
1508 block_len: u32,
1509 block_type: u8,
1510 is_first_block: bool,
1511 storage_ix: &mut usize,
1512 storage: &mut [u8],
1513) {
1514 let typecode: usize = NextBlockTypeCode(&mut code.type_code_calculator, block_type);
1515 let mut lencode: usize = 0;
1516 let mut len_nextra: u32 = 0;
1517 let mut len_extra: u32 = 0;
1518 if !is_first_block {
1519 BrotliWriteBits(
1520 code.type_depths[typecode] as u8,
1521 code.type_bits[typecode] as (u64),
1522 storage_ix,
1523 storage,
1524 );
1525 }
1526 GetBlockLengthPrefixCode(block_len, &mut lencode, &mut len_nextra, &mut len_extra);
1527 BrotliWriteBits(
1528 code.length_depths[lencode],
1529 code.length_bits[lencode] as (u64),
1530 storage_ix,
1531 storage,
1532 );
1533 BrotliWriteBits(len_nextra as u8, len_extra as (u64), storage_ix, storage);
1534}
1535
1536fn BuildAndStoreBlockSplitCode(
1537 types: &[u8],
1538 lengths: &[u32],
1539 num_blocks: usize,
1540 num_types: usize,
1541 tree: &mut [HuffmanTree],
1542 code: &mut BlockSplitCode,
1543 storage_ix: &mut usize,
1544 storage: &mut [u8],
1545) {
1546 let mut type_histo: [u32; 258] = [0; 258];
1547 let mut length_histo: [u32; 26] = [0; 26];
1548 let mut i: usize;
1549 let mut type_code_calculator = BlockTypeCodeCalculator::new();
1550 i = 0usize;
1551 while i < num_blocks {
1552 {
1553 let type_code: usize = NextBlockTypeCode(&mut type_code_calculator, types[i]);
1554 if i != 0usize {
1555 let _rhs = 1;
1556 let _lhs = &mut type_histo[type_code];
1557 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1558 }
1559 {
1560 let _rhs = 1;
1561 let _lhs = &mut length_histo[BlockLengthPrefixCode(lengths[i]) as usize];
1562 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1563 }
1564 }
1565 i = i.wrapping_add(1);
1566 }
1567 StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1568 if num_types > 1 {
1569 BuildAndStoreHuffmanTree(
1570 &mut type_histo[0..],
1571 num_types.wrapping_add(2),
1572 num_types.wrapping_add(2),
1573 tree,
1574 &mut code.type_depths[0..],
1575 &mut code.type_bits[0..],
1576 storage_ix,
1577 storage,
1578 );
1579 BuildAndStoreHuffmanTree(
1580 &mut length_histo[0..],
1581 super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS, super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS,
1583 tree,
1584 &mut code.length_depths[0..],
1585 &mut code.length_bits[0..],
1586 storage_ix,
1587 storage,
1588 );
1589 StoreBlockSwitch(code, lengths[0], types[0], true, storage_ix, storage);
1590 }
1591}
1592
1593impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1594 fn build_and_store_block_switch_entropy_codes(
1595 &mut self,
1596 tree: &mut [HuffmanTree],
1597 storage_ix: &mut usize,
1598 storage: &mut [u8],
1599 ) {
1600 BuildAndStoreBlockSplitCode(
1601 self.block_types_,
1602 self.block_lengths_,
1603 self.num_blocks_,
1604 self.num_block_types_,
1605 tree,
1606 &mut self.block_split_code_,
1607 storage_ix,
1608 storage,
1609 );
1610 }
1611}
1612
1613fn StoreTrivialContextMap(
1614 num_types: usize,
1615 context_bits: usize,
1616 tree: &mut [HuffmanTree],
1617 storage_ix: &mut usize,
1618 storage: &mut [u8],
1619) {
1620 StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1621 if num_types > 1 {
1622 let repeat_code: usize = context_bits.wrapping_sub(1u32 as usize);
1623 let repeat_bits: usize = (1u32 << repeat_code).wrapping_sub(1) as usize;
1624 let alphabet_size: usize = num_types.wrapping_add(repeat_code);
1625 let mut histogram: [u32; 272] = [0; 272];
1626 let mut depths: [u8; 272] = [0; 272];
1627 let mut bits: [u16; 272] = [0; 272];
1628 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
1629 BrotliWriteBits(4u8, repeat_code.wrapping_sub(1) as u64, storage_ix, storage);
1630 histogram[repeat_code] = num_types as u32;
1631 histogram[0] = 1;
1632 for i in context_bits..alphabet_size {
1633 histogram[i] = 1;
1634 }
1635 BuildAndStoreHuffmanTree(
1636 &mut histogram[..],
1637 alphabet_size,
1638 alphabet_size,
1639 tree,
1640 &mut depths[..],
1641 &mut bits[..],
1642 storage_ix,
1643 storage,
1644 );
1645 for i in 0usize..num_types {
1646 let code: usize = if i == 0usize {
1647 0usize
1648 } else {
1649 i.wrapping_add(context_bits).wrapping_sub(1)
1650 };
1651 BrotliWriteBits(depths[code], bits[code] as (u64), storage_ix, storage);
1652 BrotliWriteBits(
1653 depths[repeat_code],
1654 bits[repeat_code] as (u64),
1655 storage_ix,
1656 storage,
1657 );
1658 BrotliWriteBits(repeat_code as u8, repeat_bits as u64, storage_ix, storage);
1659 }
1660 BrotliWriteBits(1, 1, storage_ix, storage);
1661 }
1662}
1663
1664fn IndexOf(v: &[u8], v_size: usize, value: u8) -> usize {
1665 let mut i: usize = 0usize;
1666 while i < v_size {
1667 {
1668 if v[i] as i32 == value as i32 {
1669 return i;
1670 }
1671 }
1672 i = i.wrapping_add(1);
1673 }
1674 i
1675}
1676
1677fn MoveToFront(v: &mut [u8], index: usize) {
1678 let value: u8 = v[index];
1679 let mut i: usize;
1680 i = index;
1681 while i != 0usize {
1682 {
1683 v[i] = v[i.wrapping_sub(1)];
1684 }
1685 i = i.wrapping_sub(1);
1686 }
1687 v[0] = value;
1688}
1689
1690fn MoveToFrontTransform(v_in: &[u32], v_size: usize, v_out: &mut [u32]) {
1691 let mut mtf: [u8; 256] = [0; 256];
1692 let mut max_value: u32;
1693 if v_size == 0usize {
1694 return;
1695 }
1696 max_value = v_in[0];
1697 for i in 1..v_size {
1698 if v_in[i] > max_value {
1699 max_value = v_in[i];
1700 }
1701 }
1702 for i in 0..=max_value as usize {
1703 mtf[i] = i as u8;
1704 }
1705 {
1706 let mtf_size: usize = max_value.wrapping_add(1) as usize;
1707 for i in 0usize..v_size {
1708 let index: usize = IndexOf(&mtf[..], mtf_size, v_in[i] as u8);
1709 v_out[i] = index as u32;
1710 MoveToFront(&mut mtf[..], index);
1711 }
1712 }
1713}
1714
1715fn RunLengthCodeZeros(
1716 in_size: usize,
1717 v: &mut [u32],
1718 out_size: &mut usize,
1719 max_run_length_prefix: &mut u32,
1720) {
1721 let mut max_reps: u32 = 0u32;
1722 let mut i: usize;
1723 let mut max_prefix: u32;
1724 i = 0usize;
1725 while i < in_size {
1726 let mut reps: u32 = 0u32;
1727 while i < in_size && (v[i] != 0u32) {
1728 i = i.wrapping_add(1);
1729 }
1730 while i < in_size && (v[i] == 0u32) {
1731 {
1732 reps = reps.wrapping_add(1);
1733 }
1734 i = i.wrapping_add(1);
1735 }
1736 max_reps = max(reps, max_reps);
1737 }
1738 max_prefix = if max_reps > 0u32 {
1739 Log2FloorNonZero(max_reps as (u64))
1740 } else {
1741 0u32
1742 };
1743 max_prefix = min(max_prefix, *max_run_length_prefix);
1744 *max_run_length_prefix = max_prefix;
1745 *out_size = 0usize;
1746 i = 0usize;
1747 while i < in_size {
1748 if v[i] != 0u32 {
1749 v[*out_size] = (v[i]).wrapping_add(*max_run_length_prefix);
1750 i = i.wrapping_add(1);
1751 *out_size = out_size.wrapping_add(1);
1752 } else {
1753 let mut reps: u32 = 1u32;
1754 let mut k: usize;
1755 k = i.wrapping_add(1);
1756 while k < in_size && (v[k] == 0u32) {
1757 {
1758 reps = reps.wrapping_add(1);
1759 }
1760 k = k.wrapping_add(1);
1761 }
1762 i = i.wrapping_add(reps as usize);
1763 while reps != 0u32 {
1764 if reps < 2u32 << max_prefix {
1765 let run_length_prefix: u32 = Log2FloorNonZero(reps as (u64));
1766 let extra_bits: u32 = reps.wrapping_sub(1u32 << run_length_prefix);
1767 v[*out_size] = run_length_prefix.wrapping_add(extra_bits << 9);
1768 *out_size = out_size.wrapping_add(1);
1769 {
1770 break;
1771 }
1772 } else {
1773 let extra_bits: u32 = (1u32 << max_prefix).wrapping_sub(1);
1774 v[*out_size] = max_prefix.wrapping_add(extra_bits << 9);
1775 reps = reps.wrapping_sub((2u32 << max_prefix).wrapping_sub(1));
1776 *out_size = out_size.wrapping_add(1);
1777 }
1778 }
1779 }
1780 }
1781}
1782
1783fn EncodeContextMap<AllocU32: alloc::Allocator<u32>>(
1784 m: &mut AllocU32,
1785 context_map: &[u32],
1786 context_map_size: usize,
1787 num_clusters: usize,
1788 tree: &mut [HuffmanTree],
1789 storage_ix: &mut usize,
1790 storage: &mut [u8],
1791) {
1792 let mut rle_symbols: AllocU32::AllocatedMemory;
1793 let mut max_run_length_prefix: u32 = 6u32;
1794 let mut num_rle_symbols: usize = 0usize;
1795 static kSymbolMask: u32 = (1u32 << 9) - 1;
1796 let mut depths: [u8; 272] = [0; 272];
1797 let mut bits: [u16; 272] = [0; 272];
1798 StoreVarLenUint8(num_clusters.wrapping_sub(1) as u64, storage_ix, storage);
1799 if num_clusters == 1 {
1800 return;
1801 }
1802 rle_symbols = alloc_or_default::<u32, _>(m, context_map_size);
1803 MoveToFrontTransform(context_map, context_map_size, rle_symbols.slice_mut());
1804 RunLengthCodeZeros(
1805 context_map_size,
1806 rle_symbols.slice_mut(),
1807 &mut num_rle_symbols,
1808 &mut max_run_length_prefix,
1809 );
1810 let mut histogram: [u32; 272] = [0; 272];
1811 for i in 0usize..num_rle_symbols {
1812 let _rhs = 1;
1813 let _lhs = &mut histogram[(rle_symbols.slice()[i] & kSymbolMask) as usize];
1814 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1815 }
1816 {
1817 let use_rle = max_run_length_prefix > 0;
1818 BrotliWriteBits(1, u64::from(use_rle), storage_ix, storage);
1819 if use_rle {
1820 BrotliWriteBits(
1821 4,
1822 max_run_length_prefix.wrapping_sub(1) as (u64),
1823 storage_ix,
1824 storage,
1825 );
1826 }
1827 }
1828 BuildAndStoreHuffmanTree(
1829 &mut histogram[..],
1830 num_clusters.wrapping_add(max_run_length_prefix as usize),
1831 num_clusters.wrapping_add(max_run_length_prefix as usize),
1832 tree,
1833 &mut depths[..],
1834 &mut bits[..],
1835 storage_ix,
1836 storage,
1837 );
1838 for i in 0usize..num_rle_symbols {
1839 let rle_symbol: u32 = rle_symbols.slice()[i] & kSymbolMask;
1840 let extra_bits_val: u32 = rle_symbols.slice()[i] >> 9;
1841 BrotliWriteBits(
1842 depths[rle_symbol as usize],
1843 bits[rle_symbol as usize] as (u64),
1844 storage_ix,
1845 storage,
1846 );
1847 if rle_symbol > 0u32 && (rle_symbol <= max_run_length_prefix) {
1848 BrotliWriteBits(
1849 rle_symbol as u8,
1850 extra_bits_val as (u64),
1851 storage_ix,
1852 storage,
1853 );
1854 }
1855 }
1856 BrotliWriteBits(1, 1, storage_ix, storage);
1857 m.free_cell(rle_symbols);
1858}
1859
1860impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1861 fn build_and_store_entropy_codes<HistogramType: SliceWrapper<u32>>(
1862 &mut self,
1863 m: &mut Alloc,
1864 histograms: &[HistogramType],
1865 histograms_size: usize,
1866 alphabet_size: usize,
1867 tree: &mut [HuffmanTree],
1868 storage_ix: &mut usize,
1869 storage: &mut [u8],
1870 ) {
1871 let table_size: usize = histograms_size.wrapping_mul(self.histogram_length_);
1872 self.depths_ = alloc_or_default::<u8, _>(m, table_size);
1873 self.bits_ = alloc_or_default::<u16, _>(m, table_size);
1874 {
1875 for i in 0usize..histograms_size {
1876 let ix: usize = i.wrapping_mul(self.histogram_length_);
1877 BuildAndStoreHuffmanTree(
1878 &(histograms[i]).slice()[0..],
1879 self.histogram_length_,
1880 alphabet_size,
1881 tree,
1882 &mut self.depths_.slice_mut()[ix..],
1883 &mut self.bits_.slice_mut()[ix..],
1884 storage_ix,
1885 storage,
1886 );
1887 }
1888 }
1889 }
1890
1891 fn store_symbol(&mut self, symbol: usize, storage_ix: &mut usize, storage: &mut [u8]) {
1892 if self.block_len_ == 0usize {
1893 let block_ix: usize = {
1894 self.block_ix_ = self.block_ix_.wrapping_add(1);
1895 self.block_ix_
1896 };
1897 let block_len: u32 = self.block_lengths_[block_ix];
1898 let block_type: u8 = self.block_types_[block_ix];
1899 self.block_len_ = block_len as usize;
1900 self.entropy_ix_ = (block_type as usize).wrapping_mul(self.histogram_length_);
1901 StoreBlockSwitch(
1902 &mut self.block_split_code_,
1903 block_len,
1904 block_type,
1905 false,
1906 storage_ix,
1907 storage,
1908 );
1909 }
1910 self.block_len_ = self.block_len_.wrapping_sub(1);
1911 {
1912 let ix: usize = self.entropy_ix_.wrapping_add(symbol);
1913 BrotliWriteBits(
1914 self.depths_.slice()[ix],
1915 self.bits_.slice()[ix] as (u64),
1916 storage_ix,
1917 storage,
1918 );
1919 }
1920 }
1921}
1922
1923impl Command {
1924 fn copy_len_code(&self) -> u32 {
1925 let modifier = self.copy_len_ >> 25;
1926 let delta: i32 = ((modifier | ((modifier & 0x40) << 1)) as u8) as i8 as i32;
1927 ((self.copy_len_ & 0x01ff_ffff) as i32 + delta) as u32
1928 }
1929}
1930
1931fn GetInsertExtra(inscode: u16) -> u32 {
1932 kInsExtra[inscode as usize]
1933}
1934
1935fn GetInsertBase(inscode: u16) -> u32 {
1936 kInsBase[inscode as usize]
1937}
1938
1939fn GetCopyBase(copycode: u16) -> u32 {
1940 kCopyBase[copycode as usize]
1941}
1942
1943fn GetCopyExtra(copycode: u16) -> u32 {
1944 kCopyExtra[copycode as usize]
1945}
1946
1947fn StoreCommandExtra(cmd: &Command, storage_ix: &mut usize, storage: &mut [u8]) {
1948 let copylen_code = cmd.copy_len_code();
1949 let inscode: u16 = GetInsertLengthCode(cmd.insert_len_ as usize);
1950 let copycode: u16 = GetCopyLengthCode(copylen_code as usize);
1951 let insnumextra: u32 = GetInsertExtra(inscode);
1952 let insextraval: u64 = cmd.insert_len_.wrapping_sub(GetInsertBase(inscode)) as (u64);
1953 let copyextraval: u64 = copylen_code.wrapping_sub(GetCopyBase(copycode)) as (u64);
1954 let bits: u64 = copyextraval << insnumextra | insextraval;
1955 BrotliWriteBits(
1956 insnumextra.wrapping_add(GetCopyExtra(copycode)) as u8,
1957 bits,
1958 storage_ix,
1959 storage,
1960 );
1961}
1962
1963fn Context(p1: u8, p2: u8, mode: ContextType) -> u8 {
1964 match mode {
1965 ContextType::CONTEXT_LSB6 => (p1 as i32 & 0x3fi32) as u8,
1966 ContextType::CONTEXT_MSB6 => (p1 as i32 >> 2) as u8,
1967 ContextType::CONTEXT_UTF8 => {
1968 (kUTF8ContextLookup[p1 as usize] as i32
1969 | kUTF8ContextLookup[(p2 as i32 + 256i32) as usize] as i32) as u8
1970 }
1971 ContextType::CONTEXT_SIGNED => {
1972 (((kSigned3BitContextLookup[p1 as usize] as i32) << 3)
1973 + kSigned3BitContextLookup[p2 as usize] as i32) as u8
1974 }
1975 }
1976 }
1978
1979impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1980 fn store_symbol_with_context(
1981 &mut self,
1982 symbol: usize,
1983 context: usize,
1984 context_map: &[u32],
1985 storage_ix: &mut usize,
1986 storage: &mut [u8],
1987 context_bits: usize,
1988 ) {
1989 if self.block_len_ == 0 {
1990 let block_ix: usize = {
1991 self.block_ix_ = self.block_ix_.wrapping_add(1);
1992 self.block_ix_
1993 };
1994 let block_len: u32 = self.block_lengths_[block_ix];
1995 let block_type: u8 = self.block_types_[block_ix];
1996 self.block_len_ = block_len as usize;
1997 self.entropy_ix_ = (block_type as usize) << context_bits;
1998 StoreBlockSwitch(
1999 &mut self.block_split_code_,
2000 block_len,
2001 block_type,
2002 false,
2003 storage_ix,
2004 storage,
2005 );
2006 }
2007 self.block_len_ = self.block_len_.wrapping_sub(1);
2008 {
2009 let histo_ix: usize = context_map[self.entropy_ix_.wrapping_add(context)] as usize;
2010 let ix: usize = histo_ix
2011 .wrapping_mul(self.histogram_length_)
2012 .wrapping_add(symbol);
2013 BrotliWriteBits(
2014 self.depths_.slice()[ix],
2015 self.bits_.slice()[ix] as (u64),
2016 storage_ix,
2017 storage,
2018 );
2019 }
2020 }
2021}
2022
2023impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
2024 fn cleanup(&mut self, m: &mut Alloc) {
2025 <Alloc as Allocator<u8>>::free_cell(m, core::mem::take(&mut self.depths_));
2026 <Alloc as Allocator<u16>>::free_cell(m, core::mem::take(&mut self.bits_));
2027 }
2028}
2029
2030pub fn JumpToByteBoundary(storage_ix: &mut usize, storage: &mut [u8]) {
2031 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
2032 storage[(*storage_ix >> 3)] = 0u8;
2033}
2034
2035#[deprecated(note = "use store_meta_block instead")]
2036pub fn BrotliStoreMetaBlock<Alloc: BrotliAlloc, Cb>(
2037 alloc: &mut Alloc,
2038 input: &[u8],
2039 start_pos: usize,
2040 length: usize,
2041 mask: usize,
2042 prev_byte: u8,
2043 prev_byte2: u8,
2044 is_last: i32,
2045 params: &BrotliEncoderParams,
2046 literal_context_mode: ContextType,
2047 distance_cache: &[i32; kNumDistanceCacheEntries],
2048 commands: &[Command],
2049 n_commands: usize,
2050 mb: &mut MetaBlockSplit<Alloc>,
2051 recoder_state: &mut RecoderState,
2052 storage_ix: &mut usize,
2053 storage: &mut [u8],
2054 callback: &mut Cb,
2055) where
2056 Cb: FnMut(
2057 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2058 &mut [StaticCommand],
2059 InputPair,
2060 &mut Alloc,
2061 ),
2062{
2063 store_meta_block(
2064 alloc,
2065 input,
2066 start_pos,
2067 length,
2068 mask,
2069 prev_byte,
2070 prev_byte2,
2071 is_last != 0,
2072 params,
2073 literal_context_mode,
2074 distance_cache,
2075 commands,
2076 n_commands,
2077 mb,
2078 recoder_state,
2079 storage_ix,
2080 storage,
2081 callback,
2082 )
2083}
2084
2085pub(crate) fn store_meta_block<Alloc: BrotliAlloc, Cb>(
2086 alloc: &mut Alloc,
2087 input: &[u8],
2088 start_pos: usize,
2089 length: usize,
2090 mask: usize,
2091 mut prev_byte: u8,
2092 mut prev_byte2: u8,
2093 is_last: bool,
2094 params: &BrotliEncoderParams,
2095 literal_context_mode: ContextType,
2096 distance_cache: &[i32; kNumDistanceCacheEntries],
2097 commands: &[Command],
2098 n_commands: usize,
2099 mb: &mut MetaBlockSplit<Alloc>,
2100 recoder_state: &mut RecoderState,
2101 storage_ix: &mut usize,
2102 storage: &mut [u8],
2103 callback: &mut Cb,
2104) where
2105 Cb: FnMut(
2106 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2107 &mut [interface::StaticCommand],
2108 InputPair,
2109 &mut Alloc,
2110 ),
2111{
2112 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2113 if params.log_meta_block {
2114 LogMetaBlock(
2115 alloc,
2116 commands.split_at(n_commands).0,
2117 input0,
2118 input1,
2119 distance_cache,
2120 recoder_state,
2121 block_split_reference(mb),
2122 params,
2123 Some(literal_context_mode),
2124 callback,
2125 );
2126 }
2127 let mut pos: usize = start_pos;
2128 let num_distance_symbols = params.dist.alphabet_size;
2129 let mut num_effective_distance_symbols = num_distance_symbols as usize;
2130 let _literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
2131 let mut literal_enc: BlockEncoder<Alloc>;
2132 let mut command_enc: BlockEncoder<Alloc>;
2133 let mut distance_enc: BlockEncoder<Alloc>;
2134 let dist = ¶ms.dist;
2135 if params.large_window && num_effective_distance_symbols > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS
2136 {
2137 num_effective_distance_symbols = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS;
2138 }
2139 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2140 let mut tree = allocate::<HuffmanTree, _>(alloc, 2 * 704 + 1);
2141 literal_enc = BlockEncoder::new(
2142 BROTLI_NUM_LITERAL_SYMBOLS,
2143 mb.literal_split.num_types,
2144 mb.literal_split.types.slice(),
2145 mb.literal_split.lengths.slice(),
2146 mb.literal_split.num_blocks,
2147 );
2148 command_enc = BlockEncoder::new(
2149 BROTLI_NUM_COMMAND_SYMBOLS,
2150 mb.command_split.num_types,
2151 mb.command_split.types.slice(),
2152 mb.command_split.lengths.slice(),
2153 mb.command_split.num_blocks,
2154 );
2155 distance_enc = BlockEncoder::new(
2156 num_effective_distance_symbols,
2157 mb.distance_split.num_types,
2158 mb.distance_split.types.slice(),
2159 mb.distance_split.lengths.slice(),
2160 mb.distance_split.num_blocks,
2161 );
2162 literal_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2163 command_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2164 distance_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2165 BrotliWriteBits(2, dist.distance_postfix_bits as (u64), storage_ix, storage);
2166 BrotliWriteBits(
2167 4,
2168 (dist.num_direct_distance_codes >> dist.distance_postfix_bits) as (u64),
2169 storage_ix,
2170 storage,
2171 );
2172 for _i in 0usize..mb.literal_split.num_types {
2173 BrotliWriteBits(2, literal_context_mode as (u64), storage_ix, storage);
2174 }
2175 if mb.literal_context_map_size == 0usize {
2176 StoreTrivialContextMap(
2177 mb.literal_histograms_size,
2178 6,
2179 tree.slice_mut(),
2180 storage_ix,
2181 storage,
2182 );
2183 } else {
2184 EncodeContextMap(
2185 alloc,
2186 mb.literal_context_map.slice(),
2187 mb.literal_context_map_size,
2188 mb.literal_histograms_size,
2189 tree.slice_mut(),
2190 storage_ix,
2191 storage,
2192 );
2193 }
2194 if mb.distance_context_map_size == 0usize {
2195 StoreTrivialContextMap(
2196 mb.distance_histograms_size,
2197 2usize,
2198 tree.slice_mut(),
2199 storage_ix,
2200 storage,
2201 );
2202 } else {
2203 EncodeContextMap(
2204 alloc,
2205 mb.distance_context_map.slice(),
2206 mb.distance_context_map_size,
2207 mb.distance_histograms_size,
2208 tree.slice_mut(),
2209 storage_ix,
2210 storage,
2211 );
2212 }
2213 literal_enc.build_and_store_entropy_codes(
2214 alloc,
2215 mb.literal_histograms.slice(),
2216 mb.literal_histograms_size,
2217 BROTLI_NUM_LITERAL_SYMBOLS,
2218 tree.slice_mut(),
2219 storage_ix,
2220 storage,
2221 );
2222 command_enc.build_and_store_entropy_codes(
2223 alloc,
2224 mb.command_histograms.slice(),
2225 mb.command_histograms_size,
2226 BROTLI_NUM_COMMAND_SYMBOLS,
2227 tree.slice_mut(),
2228 storage_ix,
2229 storage,
2230 );
2231 distance_enc.build_and_store_entropy_codes(
2232 alloc,
2233 mb.distance_histograms.slice(),
2234 mb.distance_histograms_size,
2235 num_distance_symbols as usize,
2236 tree.slice_mut(),
2237 storage_ix,
2238 storage,
2239 );
2240 {
2241 <Alloc as Allocator<HuffmanTree>>::free_cell(alloc, core::mem::take(&mut tree));
2242 }
2243 for i in 0usize..n_commands {
2244 let cmd: Command = commands[i];
2245 let cmd_code: usize = cmd.cmd_prefix_ as usize;
2246 command_enc.store_symbol(cmd_code, storage_ix, storage);
2247 StoreCommandExtra(&cmd, storage_ix, storage);
2248 if mb.literal_context_map_size == 0usize {
2249 let mut j: usize;
2250 j = cmd.insert_len_ as usize;
2251 while j != 0usize {
2252 {
2253 literal_enc.store_symbol(input[(pos & mask)] as usize, storage_ix, storage);
2254 pos = pos.wrapping_add(1);
2255 }
2256 j = j.wrapping_sub(1);
2257 }
2258 } else {
2259 let mut j: usize;
2260 j = cmd.insert_len_ as usize;
2261 while j != 0usize {
2262 {
2263 let context: usize =
2264 Context(prev_byte, prev_byte2, literal_context_mode) as usize;
2265 let literal: u8 = input[(pos & mask)];
2266 literal_enc.store_symbol_with_context(
2267 literal as usize,
2268 context,
2269 mb.literal_context_map.slice(),
2270 storage_ix,
2271 storage,
2272 6usize,
2273 );
2274 prev_byte2 = prev_byte;
2275 prev_byte = literal;
2276 pos = pos.wrapping_add(1);
2277 }
2278 j = j.wrapping_sub(1);
2279 }
2280 }
2281 pos = pos.wrapping_add(cmd.copy_len() as usize);
2282 if cmd.copy_len() != 0 {
2283 prev_byte2 = input[(pos.wrapping_sub(2) & mask)];
2284 prev_byte = input[(pos.wrapping_sub(1) & mask)];
2285 if cmd.cmd_prefix_ as i32 >= 128i32 {
2286 let dist_code: usize = cmd.dist_prefix_ as usize & 0x03ff;
2287 let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10; let distextra: u64 = cmd.dist_extra_ as (u64);
2289 if mb.distance_context_map_size == 0usize {
2290 distance_enc.store_symbol(dist_code, storage_ix, storage);
2291 } else {
2292 distance_enc.store_symbol_with_context(
2293 dist_code,
2294 cmd.distance_context() as usize,
2295 mb.distance_context_map.slice(),
2296 storage_ix,
2297 storage,
2298 2usize,
2299 );
2300 }
2301 BrotliWriteBits(distnumextra as u8, distextra, storage_ix, storage);
2302 }
2303 }
2304 }
2305 distance_enc.cleanup(alloc);
2306 command_enc.cleanup(alloc);
2307 literal_enc.cleanup(alloc);
2308 if is_last {
2309 JumpToByteBoundary(storage_ix, storage);
2310 }
2311}
2312
2313fn BuildHistograms(
2314 input: &[u8],
2315 start_pos: usize,
2316 mask: usize,
2317 commands: &[Command],
2318 n_commands: usize,
2319 lit_histo: &mut HistogramLiteral,
2320 cmd_histo: &mut HistogramCommand,
2321 dist_histo: &mut HistogramDistance,
2322) {
2323 let mut pos: usize = start_pos;
2324 for i in 0usize..n_commands {
2325 let cmd: Command = commands[i];
2326 let mut j: usize;
2327 HistogramAddItem(cmd_histo, cmd.cmd_prefix_ as usize);
2328 j = cmd.insert_len_ as usize;
2329 while j != 0usize {
2330 {
2331 HistogramAddItem(lit_histo, input[(pos & mask)] as usize);
2332 pos = pos.wrapping_add(1);
2333 }
2334 j = j.wrapping_sub(1);
2335 }
2336 pos = pos.wrapping_add(cmd.copy_len() as usize);
2337 if cmd.copy_len() != 0 && cmd.cmd_prefix_ >= 128 {
2338 HistogramAddItem(dist_histo, cmd.dist_prefix_ as usize & 0x03ff);
2339 }
2340 }
2341}
2342fn StoreDataWithHuffmanCodes(
2343 input: &[u8],
2344 start_pos: usize,
2345 mask: usize,
2346 commands: &[Command],
2347 n_commands: usize,
2348 lit_depth: &[u8],
2349 lit_bits: &[u16],
2350 cmd_depth: &[u8],
2351 cmd_bits: &[u16],
2352 dist_depth: &[u8],
2353 dist_bits: &[u16],
2354 storage_ix: &mut usize,
2355 storage: &mut [u8],
2356) {
2357 let mut pos: usize = start_pos;
2358 for i in 0usize..n_commands {
2359 let cmd: Command = commands[i];
2360 let cmd_code: usize = cmd.cmd_prefix_ as usize;
2361 let mut j: usize;
2362 BrotliWriteBits(
2363 cmd_depth[cmd_code],
2364 cmd_bits[cmd_code] as (u64),
2365 storage_ix,
2366 storage,
2367 );
2368 StoreCommandExtra(&cmd, storage_ix, storage);
2369 j = cmd.insert_len_ as usize;
2370 while j != 0usize {
2371 {
2372 let literal: u8 = input[(pos & mask)];
2373 BrotliWriteBits(
2374 lit_depth[(literal as usize)],
2375 lit_bits[(literal as usize)] as (u64),
2376 storage_ix,
2377 storage,
2378 );
2379 pos = pos.wrapping_add(1);
2380 }
2381 j = j.wrapping_sub(1);
2382 }
2383 pos = pos.wrapping_add(cmd.copy_len() as usize);
2384 if cmd.copy_len() != 0 && cmd.cmd_prefix_ >= 128 {
2385 let dist_code: usize = cmd.dist_prefix_ as usize & 0x03ff;
2386 let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10;
2387 let distextra: u32 = cmd.dist_extra_;
2388 BrotliWriteBits(
2389 dist_depth[dist_code],
2390 dist_bits[dist_code] as (u64),
2391 storage_ix,
2392 storage,
2393 );
2394 BrotliWriteBits(distnumextra as u8, distextra as (u64), storage_ix, storage);
2395 }
2396 }
2397}
2398
2399#[deprecated(note = "use store_meta_block_trivial instead")]
2400pub fn BrotliStoreMetaBlockTrivial<Alloc: BrotliAlloc, Cb>(
2401 alloc: &mut Alloc,
2402 input: &[u8],
2403 start_pos: usize,
2404 length: usize,
2405 mask: usize,
2406 is_last: i32,
2407 params: &BrotliEncoderParams,
2408 distance_cache: &[i32; kNumDistanceCacheEntries],
2409 commands: &[Command],
2410 n_commands: usize,
2411 recoder_state: &mut crate::enc::brotli_bit_stream::RecoderState,
2412 storage_ix: &mut usize,
2413 storage: &mut [u8],
2414 f: &mut Cb,
2415) where
2416 Cb: FnMut(
2417 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2418 &mut [StaticCommand],
2419 InputPair,
2420 &mut Alloc,
2421 ),
2422{
2423 store_meta_block_trivial(
2424 alloc,
2425 input,
2426 start_pos,
2427 length,
2428 mask,
2429 is_last != 0,
2430 params,
2431 distance_cache,
2432 commands,
2433 n_commands,
2434 recoder_state,
2435 storage_ix,
2436 storage,
2437 f,
2438 )
2439}
2440
2441pub(crate) fn store_meta_block_trivial<Alloc: BrotliAlloc, Cb>(
2442 alloc: &mut Alloc,
2443 input: &[u8],
2444 start_pos: usize,
2445 length: usize,
2446 mask: usize,
2447 is_last: bool,
2448 params: &BrotliEncoderParams,
2449 distance_cache: &[i32; kNumDistanceCacheEntries],
2450 commands: &[Command],
2451 n_commands: usize,
2452 recoder_state: &mut RecoderState,
2453 storage_ix: &mut usize,
2454 storage: &mut [u8],
2455 f: &mut Cb,
2456) where
2457 Cb: FnMut(
2458 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2459 &mut [interface::StaticCommand],
2460 InputPair,
2461 &mut Alloc,
2462 ),
2463{
2464 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2465 if params.log_meta_block {
2466 LogMetaBlock(
2467 alloc,
2468 commands.split_at(n_commands).0,
2469 input0,
2470 input1,
2471 distance_cache,
2472 recoder_state,
2473 block_split_nop(),
2474 params,
2475 Some(ContextType::CONTEXT_LSB6),
2476 f,
2477 );
2478 }
2479 let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2480 let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2481 let mut dist_histo: HistogramDistance = HistogramDistance::default();
2482 let mut lit_depth: [u8; 256] = [0; 256];
2483 let mut lit_bits: [u16; 256] = [0; 256];
2484 let mut cmd_depth: [u8; 704] = [0; 704];
2485 let mut cmd_bits: [u16; 704] = [0; 704];
2486 let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2487 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2488 let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2489 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2490 const MAX_HUFFMAN_TREE_SIZE: usize = (2i32 * 704i32 + 1i32) as usize;
2491 let mut tree: [HuffmanTree; MAX_HUFFMAN_TREE_SIZE] = [HuffmanTree {
2492 total_count_: 0,
2493 index_left_: 0,
2494 index_right_or_value_: 0,
2495 }; MAX_HUFFMAN_TREE_SIZE];
2496 let num_distance_symbols = params.dist.alphabet_size;
2497 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2498 BuildHistograms(
2499 input,
2500 start_pos,
2501 mask,
2502 commands,
2503 n_commands,
2504 &mut lit_histo,
2505 &mut cmd_histo,
2506 &mut dist_histo,
2507 );
2508 BrotliWriteBits(13, 0, storage_ix, storage);
2509 BuildAndStoreHuffmanTree(
2510 lit_histo.slice_mut(),
2511 BROTLI_NUM_LITERAL_SYMBOLS,
2512 BROTLI_NUM_LITERAL_SYMBOLS,
2513 &mut tree[..],
2514 &mut lit_depth[..],
2515 &mut lit_bits[..],
2516 storage_ix,
2517 storage,
2518 );
2519 BuildAndStoreHuffmanTree(
2520 cmd_histo.slice_mut(),
2521 BROTLI_NUM_COMMAND_SYMBOLS,
2522 BROTLI_NUM_COMMAND_SYMBOLS,
2523 &mut tree[..],
2524 &mut cmd_depth[..],
2525 &mut cmd_bits[..],
2526 storage_ix,
2527 storage,
2528 );
2529 BuildAndStoreHuffmanTree(
2530 dist_histo.slice_mut(),
2531 MAX_SIMPLE_DISTANCE_ALPHABET_SIZE,
2532 num_distance_symbols as usize,
2533 &mut tree[..],
2534 &mut dist_depth[..],
2535 &mut dist_bits[..],
2536 storage_ix,
2537 storage,
2538 );
2539 StoreDataWithHuffmanCodes(
2540 input,
2541 start_pos,
2542 mask,
2543 commands,
2544 n_commands,
2545 &mut lit_depth[..],
2546 &mut lit_bits[..],
2547 &mut cmd_depth[..],
2548 &mut cmd_bits[..],
2549 &mut dist_depth[..],
2550 &mut dist_bits[..],
2551 storage_ix,
2552 storage,
2553 );
2554 if is_last {
2555 JumpToByteBoundary(storage_ix, storage);
2556 }
2557}
2558
2559fn StoreStaticCommandHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2560 BrotliWriteBits(56, 0x0092_6244_1630_7003, storage_ix, storage);
2561 BrotliWriteBits(3, 0, storage_ix, storage);
2562}
2563
2564fn StoreStaticDistanceHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2565 BrotliWriteBits(28, 0x0369_dc03, storage_ix, storage);
2566}
2567
2568struct BlockSplitRef<'a> {
2569 types: &'a [u8],
2570 lengths: &'a [u32],
2571 num_types: u32,
2572}
2573
2574impl<'a> Default for BlockSplitRef<'a> {
2575 fn default() -> Self {
2576 BlockSplitRef {
2577 types: &[],
2578 lengths: &[],
2579 num_types: 1,
2580 }
2581 }
2582}
2583
2584#[derive(Default)]
2585struct MetaBlockSplitRefs<'a> {
2586 btypel: BlockSplitRef<'a>,
2587 literal_context_map: &'a [u32],
2588 btypec: BlockSplitRef<'a>,
2589 btyped: BlockSplitRef<'a>,
2590 distance_context_map: &'a [u32],
2591}
2592
2593fn block_split_nop() -> MetaBlockSplitRefs<'static> {
2594 MetaBlockSplitRefs::default()
2595}
2596
2597fn block_split_reference<'a, Alloc: BrotliAlloc>(
2598 mb: &'a MetaBlockSplit<Alloc>,
2599) -> MetaBlockSplitRefs<'a> {
2600 return MetaBlockSplitRefs::<'a> {
2601 btypel: BlockSplitRef {
2602 types: mb
2603 .literal_split
2604 .types
2605 .slice()
2606 .split_at(mb.literal_split.num_blocks)
2607 .0,
2608 lengths: mb
2609 .literal_split
2610 .lengths
2611 .slice()
2612 .split_at(mb.literal_split.num_blocks)
2613 .0,
2614 num_types: mb.literal_split.num_types as u32,
2615 },
2616 literal_context_map: mb
2617 .literal_context_map
2618 .slice()
2619 .split_at(mb.literal_context_map_size)
2620 .0,
2621 btypec: BlockSplitRef {
2622 types: mb
2623 .command_split
2624 .types
2625 .slice()
2626 .split_at(mb.command_split.num_blocks)
2627 .0,
2628 lengths: mb
2629 .command_split
2630 .lengths
2631 .slice()
2632 .split_at(mb.command_split.num_blocks)
2633 .0,
2634 num_types: mb.command_split.num_types as u32,
2635 },
2636 btyped: BlockSplitRef {
2637 types: mb
2638 .distance_split
2639 .types
2640 .slice()
2641 .split_at(mb.distance_split.num_blocks)
2642 .0,
2643 lengths: mb
2644 .distance_split
2645 .lengths
2646 .slice()
2647 .split_at(mb.distance_split.num_blocks)
2648 .0,
2649 num_types: mb.distance_split.num_types as u32,
2650 },
2651 distance_context_map: mb
2652 .distance_context_map
2653 .slice()
2654 .split_at(mb.distance_context_map_size)
2655 .0,
2656 };
2657}
2658
2659#[derive(Clone, Copy, Default)]
2660pub struct RecoderState {
2661 pub num_bytes_encoded: usize,
2662}
2663
2664impl RecoderState {
2665 pub fn new() -> Self {
2666 Self::default()
2667 }
2668}
2669
2670#[deprecated(note = "use store_meta_block_fast instead")]
2671pub fn BrotliStoreMetaBlockFast<Cb, Alloc: BrotliAlloc>(
2672 m: &mut Alloc,
2673 input: &[u8],
2674 start_pos: usize,
2675 length: usize,
2676 mask: usize,
2677 is_last: i32,
2678 params: &BrotliEncoderParams,
2679 dist_cache: &[i32; kNumDistanceCacheEntries],
2680 commands: &[Command],
2681 n_commands: usize,
2682 recoder_state: &mut RecoderState,
2683 storage_ix: &mut usize,
2684 storage: &mut [u8],
2685 cb: &mut Cb,
2686) where
2687 Cb: FnMut(
2688 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2689 &mut [StaticCommand],
2690 InputPair,
2691 &mut Alloc,
2692 ),
2693{
2694 store_meta_block_fast(
2695 m,
2696 input,
2697 start_pos,
2698 length,
2699 mask,
2700 is_last != 0,
2701 params,
2702 dist_cache,
2703 commands,
2704 n_commands,
2705 recoder_state,
2706 storage_ix,
2707 storage,
2708 cb,
2709 );
2710}
2711
2712pub(crate) fn store_meta_block_fast<Cb, Alloc: BrotliAlloc>(
2713 m: &mut Alloc,
2714 input: &[u8],
2715 start_pos: usize,
2716 length: usize,
2717 mask: usize,
2718 is_last: bool,
2719 params: &BrotliEncoderParams,
2720 dist_cache: &[i32; kNumDistanceCacheEntries],
2721 commands: &[Command],
2722 n_commands: usize,
2723 recoder_state: &mut RecoderState,
2724 storage_ix: &mut usize,
2725 storage: &mut [u8],
2726 cb: &mut Cb,
2727) where
2728 Cb: FnMut(
2729 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2730 &mut [StaticCommand],
2731 InputPair,
2732 &mut Alloc,
2733 ),
2734{
2735 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2736 if params.log_meta_block {
2737 LogMetaBlock(
2738 m,
2739 commands.split_at(n_commands).0,
2740 input0,
2741 input1,
2742 dist_cache,
2743 recoder_state,
2744 block_split_nop(),
2745 params,
2746 Some(ContextType::CONTEXT_LSB6),
2747 cb,
2748 );
2749 }
2750 let num_distance_symbols = params.dist.alphabet_size;
2751 let distance_alphabet_bits = Log2FloorNonZero(u64::from(num_distance_symbols) - 1) + 1;
2752 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2753 BrotliWriteBits(13, 0, storage_ix, storage);
2754 if n_commands <= 128usize {
2755 let mut histogram: [u32; 256] = [0; 256];
2756 let mut pos: usize = start_pos;
2757 let mut num_literals: usize = 0usize;
2758 let mut lit_depth: [u8; 256] = [0; 256];
2759 let mut lit_bits: [u16; 256] = [0; 256];
2760 for i in 0usize..n_commands {
2761 let cmd: Command = commands[i];
2762 let mut j: usize;
2763 j = cmd.insert_len_ as usize;
2764 while j != 0usize {
2765 {
2766 {
2767 let _rhs = 1;
2768 let _lhs = &mut histogram[input[(pos & mask)] as usize];
2769 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
2770 }
2771 pos = pos.wrapping_add(1);
2772 }
2773 j = j.wrapping_sub(1);
2774 }
2775 num_literals = num_literals.wrapping_add(cmd.insert_len_ as usize);
2776 pos = pos.wrapping_add(cmd.copy_len() as usize);
2777 }
2778 BrotliBuildAndStoreHuffmanTreeFast(
2779 m,
2780 &mut histogram[..],
2781 num_literals,
2782 8usize,
2783 &mut lit_depth[..],
2784 &mut lit_bits[..],
2785 storage_ix,
2786 storage,
2787 );
2788 StoreStaticCommandHuffmanTree(storage_ix, storage);
2789 StoreStaticDistanceHuffmanTree(storage_ix, storage);
2790 StoreDataWithHuffmanCodes(
2791 input,
2792 start_pos,
2793 mask,
2794 commands,
2795 n_commands,
2796 &mut lit_depth[..],
2797 &mut lit_bits[..],
2798 &kStaticCommandCodeDepth[..],
2799 &kStaticCommandCodeBits[..],
2800 &kStaticDistanceCodeDepth[..],
2801 &kStaticDistanceCodeBits[..],
2802 storage_ix,
2803 storage,
2804 );
2805 } else {
2806 let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2807 let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2808 let mut dist_histo: HistogramDistance = HistogramDistance::default();
2809 let mut lit_depth: [u8; 256] = [0; 256];
2810 let mut lit_bits: [u16; 256] = [0; 256];
2811 let mut cmd_depth: [u8; 704] = [0; 704];
2812 let mut cmd_bits: [u16; 704] = [0; 704];
2813 let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2814 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2815 let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2816 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2817 BuildHistograms(
2818 input,
2819 start_pos,
2820 mask,
2821 commands,
2822 n_commands,
2823 &mut lit_histo,
2824 &mut cmd_histo,
2825 &mut dist_histo,
2826 );
2827 BrotliBuildAndStoreHuffmanTreeFast(
2828 m,
2829 lit_histo.slice(),
2830 lit_histo.total_count_,
2831 8usize,
2832 &mut lit_depth[..],
2833 &mut lit_bits[..],
2834 storage_ix,
2835 storage,
2836 );
2837 BrotliBuildAndStoreHuffmanTreeFast(
2838 m,
2839 cmd_histo.slice(),
2840 cmd_histo.total_count_,
2841 10usize,
2842 &mut cmd_depth[..],
2843 &mut cmd_bits[..],
2844 storage_ix,
2845 storage,
2846 );
2847 BrotliBuildAndStoreHuffmanTreeFast(
2848 m,
2849 dist_histo.slice(),
2850 dist_histo.total_count_,
2851 distance_alphabet_bits as usize,
2852 &mut dist_depth[..],
2853 &mut dist_bits[..],
2854 storage_ix,
2855 storage,
2856 );
2857 StoreDataWithHuffmanCodes(
2858 input,
2859 start_pos,
2860 mask,
2861 commands,
2862 n_commands,
2863 &mut lit_depth[..],
2864 &mut lit_bits[..],
2865 &mut cmd_depth[..],
2866 &mut cmd_bits[..],
2867 &mut dist_depth[..],
2868 &mut dist_bits[..],
2869 storage_ix,
2870 storage,
2871 );
2872 }
2873 if is_last {
2874 JumpToByteBoundary(storage_ix, storage);
2875 }
2876}
2877fn BrotliStoreUncompressedMetaBlockHeader(
2878 length: usize,
2879 storage_ix: &mut usize,
2880 storage: &mut [u8],
2881) {
2882 let mut lenbits: u64 = 0;
2883 let mut nlenbits: u32 = 0;
2884 let mut nibblesbits: u32 = 0;
2885 BrotliWriteBits(1, 0, storage_ix, storage);
2886 BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
2887 BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
2888 BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
2889 BrotliWriteBits(1, 1, storage_ix, storage);
2890}
2891
2892fn InputPairFromMaskedInput(
2893 input: &[u8],
2894 position: usize,
2895 len: usize,
2896 mask: usize,
2897) -> (&[u8], &[u8]) {
2898 let masked_pos: usize = position & mask;
2899 if masked_pos.wrapping_add(len) > mask.wrapping_add(1) {
2900 let len1: usize = mask.wrapping_add(1).wrapping_sub(masked_pos);
2901 return (
2902 &input[masked_pos..(masked_pos + len1)],
2903 &input[0..len.wrapping_sub(len1)],
2904 );
2905 }
2906 (&input[masked_pos..masked_pos + len], &[])
2907}
2908
2909#[deprecated(note = "use store_uncompressed_meta_block instead")]
2910pub fn BrotliStoreUncompressedMetaBlock<Cb, Alloc: BrotliAlloc>(
2911 alloc: &mut Alloc,
2912 is_final_block: i32,
2913 input: &[u8],
2914 position: usize,
2915 mask: usize,
2916 params: &BrotliEncoderParams,
2917 len: usize,
2918 recoder_state: &mut RecoderState,
2919 storage_ix: &mut usize,
2920 storage: &mut [u8],
2921 suppress_meta_block_logging: bool,
2922 cb: &mut Cb,
2923) where
2924 Cb: FnMut(
2925 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2926 &mut [StaticCommand],
2927 InputPair,
2928 &mut Alloc,
2929 ),
2930{
2931 store_uncompressed_meta_block(
2932 alloc,
2933 is_final_block != 0,
2934 input,
2935 position,
2936 mask,
2937 params,
2938 len,
2939 recoder_state,
2940 storage_ix,
2941 storage,
2942 suppress_meta_block_logging,
2943 cb,
2944 )
2945}
2946
2947pub(crate) fn store_uncompressed_meta_block<Cb, Alloc: BrotliAlloc>(
2948 alloc: &mut Alloc,
2949 is_final_block: bool,
2950 input: &[u8],
2951 position: usize,
2952 mask: usize,
2953 params: &BrotliEncoderParams,
2954 len: usize,
2955 recoder_state: &mut RecoderState,
2956 storage_ix: &mut usize,
2957 storage: &mut [u8],
2958 suppress_meta_block_logging: bool,
2959 cb: &mut Cb,
2960) where
2961 Cb: FnMut(
2962 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2963 &mut [StaticCommand],
2964 InputPair,
2965 &mut Alloc,
2966 ),
2967{
2968 let (input0, input1) = InputPairFromMaskedInput(input, position, len, mask);
2969 BrotliStoreUncompressedMetaBlockHeader(len, storage_ix, storage);
2970 JumpToByteBoundary(storage_ix, storage);
2971 let dst_start0 = (*storage_ix >> 3);
2972 storage[dst_start0..(dst_start0 + input0.len())].clone_from_slice(input0);
2973 *storage_ix = storage_ix.wrapping_add(input0.len() << 3);
2974 let dst_start1 = (*storage_ix >> 3);
2975 storage[dst_start1..(dst_start1 + input1.len())].clone_from_slice(input1);
2976 *storage_ix = storage_ix.wrapping_add(input1.len() << 3);
2977 BrotliWriteBitsPrepareStorage(*storage_ix, storage);
2978 if params.log_meta_block && !suppress_meta_block_logging {
2979 let cmds = [Command {
2980 insert_len_: len as u32,
2981 copy_len_: 0,
2982 dist_extra_: 0,
2983 cmd_prefix_: 0,
2984 dist_prefix_: 0,
2985 }];
2986
2987 LogMetaBlock(
2988 alloc,
2989 &cmds,
2990 input0,
2991 input1,
2992 &[0, 0, 0, 0],
2993 recoder_state,
2994 block_split_nop(),
2995 params,
2996 None,
2997 cb,
2998 );
2999 }
3000 if is_final_block {
3001 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3002 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3003 JumpToByteBoundary(storage_ix, storage);
3004 }
3005}
3006
3007pub fn BrotliStoreSyncMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
3008 BrotliWriteBits(6, 6, storage_ix, storage);
3009 JumpToByteBoundary(storage_ix, storage);
3010}
3011
3012pub fn BrotliWriteEmptyLastMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
3013 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3014 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3015 JumpToByteBoundary(storage_ix, storage);
3016}
3017
3018const MAX_SIZE_ENCODING: usize = 10;
3019
3020fn encode_base_128(mut value: u64) -> (usize, [u8; MAX_SIZE_ENCODING]) {
3021 let mut ret = [0u8; MAX_SIZE_ENCODING];
3022 for index in 0..ret.len() {
3023 ret[index] = (value & 0x7f) as u8;
3024 value >>= 7;
3025 if value != 0 {
3026 ret[index] |= 0x80;
3027 } else {
3028 return (index + 1, ret);
3029 }
3030 }
3031 (ret.len(), ret)
3032}
3033
3034pub fn BrotliWriteMetadataMetaBlock(
3035 params: &BrotliEncoderParams,
3036 storage_ix: &mut usize,
3037 storage: &mut [u8],
3038) {
3039 BrotliWriteBits(1u8, 0u64, storage_ix, storage); BrotliWriteBits(2u8, 3u64, storage_ix, storage); BrotliWriteBits(1u8, 0u64, storage_ix, storage); BrotliWriteBits(2u8, 1u64, storage_ix, storage); let (size_hint_count, size_hint_b128) = encode_base_128(params.size_hint as u64);
3044
3045 BrotliWriteBits(8u8, 3 + size_hint_count as u64, storage_ix, storage); JumpToByteBoundary(storage_ix, storage);
3047 let magic_number: [u8; 3] = if params.catable && !params.use_dictionary {
3048 [0xe1, 0x97, 0x81]
3049 } else if params.appendable {
3050 [0xe1, 0x97, 0x82]
3051 } else {
3052 [0xe1, 0x97, 0x80]
3053 };
3054 for magic in magic_number.iter() {
3055 BrotliWriteBits(8u8, u64::from(*magic), storage_ix, storage);
3056 }
3057 BrotliWriteBits(8u8, u64::from(VERSION), storage_ix, storage);
3058 for sh in size_hint_b128[..size_hint_count].iter() {
3059 BrotliWriteBits(8u8, u64::from(*sh), storage_ix, storage);
3060 }
3061}
3062
3063#[cfg(test)]
3064mod test {
3065 use crate::enc::brotli_bit_stream::{encode_base_128, MAX_SIZE_ENCODING};
3066
3067 #[test]
3068 fn test_encode_base_128() {
3069 assert_eq!(encode_base_128(0), (1, [0u8; MAX_SIZE_ENCODING]));
3070 assert_eq!(encode_base_128(1), (1, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
3071 assert_eq!(encode_base_128(127), (1, [0x7f, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
3072 assert_eq!(
3073 encode_base_128(128),
3074 (2, [0x80, 0x1, 0, 0, 0, 0, 0, 0, 0, 0])
3075 );
3076 assert_eq!(
3077 encode_base_128(16383),
3078 (2, [0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0, 0])
3079 );
3080 assert_eq!(
3081 encode_base_128(16384),
3082 (3, [0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0, 0])
3083 );
3084 assert_eq!(
3085 encode_base_128(2097151),
3086 (3, [0xff, 0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0])
3087 );
3088 assert_eq!(
3089 encode_base_128(2097152),
3090 (4, [0x80, 0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0])
3091 );
3092 assert_eq!(
3093 encode_base_128(4194303),
3094 (4, [0xff, 0xff, 0xff, 0x1, 0, 0, 0, 0, 0, 0])
3095 );
3096 assert_eq!(
3097 encode_base_128(4294967295),
3098 (5, [0xff, 0xff, 0xff, 0xff, 0xf, 0, 0, 0, 0, 0])
3099 );
3100 assert_eq!(
3101 encode_base_128(4294967296),
3102 (5, [0x80, 0x80, 0x80, 0x80, 0x10, 0, 0, 0, 0, 0])
3103 );
3104 assert_eq!(
3105 encode_base_128(9223372036854775808),
3106 (
3107 10,
3108 [0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x1]
3109 )
3110 );
3111 assert_eq!(
3112 encode_base_128(18446744073709551615),
3113 (
3114 10,
3115 [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1]
3116 )
3117 );
3118 }
3119}