1use alloc::Allocator;
2use core;
3use core::cmp::{max, min};
4
5use super::super::alloc;
6use super::super::alloc::{SliceWrapper, SliceWrapperMut};
7use super::backward_references::{
8 AdvHashSpecialization, AdvHasher, AnyHasher, BasicHasher, BrotliCreateBackwardReferences,
9 BrotliEncoderMode, BrotliEncoderParams, BrotliHasherParams, H2Sub, H3Sub, H4Sub, H54Sub, H5Sub,
10 H6Sub, HQ5Sub, HQ7Sub, HowPrepared, StoreLookaheadThenStore, Struct1, UnionHasher, H9,
11 H9_BLOCK_BITS, H9_BLOCK_SIZE, H9_BUCKET_BITS, H9_NUM_LAST_DISTANCES_TO_CHECK,
12};
13use super::bit_cost::{shannon_entropy, BitsEntropy};
14use super::brotli_bit_stream::{
15 store_meta_block, store_meta_block_fast, store_meta_block_trivial,
16 store_uncompressed_meta_block, BrotliWriteEmptyLastMetaBlock, BrotliWriteMetadataMetaBlock,
17 MetaBlockSplit, RecoderState,
18};
19use super::combined_alloc::BrotliAlloc;
20use super::command::{get_length_code, BrotliDistanceParams, Command};
21use super::compress_fragment::compress_fragment_fast;
22use super::compress_fragment_two_pass::{compress_fragment_two_pass, BrotliWriteBits};
23use super::constants::{
24 BROTLI_CONTEXT, BROTLI_CONTEXT_LUT, BROTLI_MAX_NDIRECT, BROTLI_MAX_NPOSTFIX,
25 BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_WINDOW_GAP,
26};
27use super::hash_to_binary_tree::InitializeH10;
28use super::histogram::{
29 ContextType, CostAccessors, HistogramCommand, HistogramDistance, HistogramLiteral,
30};
31use super::interface;
32use super::metablock::{
33 BrotliBuildMetaBlock, BrotliBuildMetaBlockGreedy, BrotliInitDistanceParams,
34 BrotliOptimizeHistograms,
35};
36pub use super::parameters::BrotliEncoderParameter;
37use super::static_dict::{kNumDistanceCacheEntries, BrotliGetDictionary};
38use super::util::{floatX, Log2FloorNonZero};
39use crate::enc::combined_alloc::{alloc_default, allocate};
40use crate::enc::input_pair::InputReferenceMut;
41use crate::enc::utf8_util::is_mostly_utf8;
42
43static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
80
81static kMinUTF8Ratio: floatX = 0.75;
82
83pub struct RingBuffer<AllocU8: alloc::Allocator<u8>> {
84 pub size_: u32,
85 pub mask_: u32,
86 pub tail_size_: u32,
87 pub total_size_: u32,
88 pub cur_size_: u32,
89 pub pos_: u32,
90 pub data_mo: AllocU8::AllocatedMemory,
91 pub buffer_index: usize,
92}
93
94#[derive(PartialEq, Eq, Copy, Clone)]
95#[repr(i32)]
96pub enum BrotliEncoderStreamState {
97 BROTLI_STREAM_PROCESSING = 0,
98 BROTLI_STREAM_FLUSH_REQUESTED = 1,
99 BROTLI_STREAM_FINISHED = 2,
100 BROTLI_STREAM_METADATA_HEAD = 3,
101 BROTLI_STREAM_METADATA_BODY = 4,
102}
103
104#[derive(Clone, Copy, Debug)]
105enum NextOut {
106 DynamicStorage(u32),
107 TinyBuf(u32),
108 None,
109}
110fn GetNextOutInternal<'a>(
111 next_out: &NextOut,
112 storage: &'a mut [u8],
113 tiny_buf: &'a mut [u8; 16],
114) -> &'a mut [u8] {
115 match next_out {
116 &NextOut::DynamicStorage(offset) => &mut storage[offset as usize..],
117 &NextOut::TinyBuf(offset) => &mut tiny_buf[offset as usize..],
118 &NextOut::None => &mut [],
119 }
120}
121macro_rules! GetNextOut {
122 ($s : expr) => {
123 GetNextOutInternal(&$s.next_out_, $s.storage_.slice_mut(), &mut $s.tiny_buf_)
124 };
125}
126fn NextOutIncrement(next_out: &NextOut, inc: i32) -> NextOut {
127 match next_out {
128 &NextOut::DynamicStorage(offset) => NextOut::DynamicStorage((offset as i32 + inc) as u32),
129 &NextOut::TinyBuf(offset) => NextOut::TinyBuf((offset as i32 + inc) as u32),
130 &NextOut::None => NextOut::None,
131 }
132}
133fn IsNextOutNull(next_out: &NextOut) -> bool {
134 match next_out {
135 &NextOut::DynamicStorage(_) => false,
136 &NextOut::TinyBuf(_) => false,
137 &NextOut::None => true,
138 }
139}
140
141#[derive(Clone, Copy, Debug)]
142pub enum IsFirst {
143 NothingWritten,
144 HeaderWritten,
145 FirstCatableByteWritten,
146 BothCatableBytesWritten,
147}
148
149pub struct BrotliEncoderStateStruct<Alloc: BrotliAlloc> {
150 pub params: BrotliEncoderParams,
151 pub m8: Alloc,
152 pub hasher_: UnionHasher<Alloc>,
153 pub input_pos_: u64,
154 pub ringbuffer_: RingBuffer<Alloc>,
155 pub cmd_alloc_size_: usize,
156 pub commands_: <Alloc as Allocator<Command>>::AllocatedMemory, pub num_commands_: usize,
158 pub num_literals_: usize,
159 pub last_insert_len_: usize,
160 pub last_flush_pos_: u64,
161 pub last_processed_pos_: u64,
162 pub dist_cache_: [i32; 16],
163 pub saved_dist_cache_: [i32; kNumDistanceCacheEntries],
164 pub last_bytes_: u16,
165 pub last_bytes_bits_: u8,
166 pub prev_byte_: u8,
167 pub prev_byte2_: u8,
168 pub storage_size_: usize,
169 pub storage_: <Alloc as Allocator<u8>>::AllocatedMemory,
170 pub small_table_: [i32; 1024],
171 pub large_table_: <Alloc as Allocator<i32>>::AllocatedMemory,
172 pub cmd_depths_: [u8; 128],
174 pub cmd_bits_: [u16; 128],
175 pub cmd_code_: [u8; 512],
176 pub cmd_code_numbits_: usize,
177 pub command_buf_: <Alloc as Allocator<u32>>::AllocatedMemory,
178 pub literal_buf_: <Alloc as Allocator<u8>>::AllocatedMemory,
179 next_out_: NextOut,
180 pub available_out_: usize,
181 pub total_out_: u64,
182 pub tiny_buf_: [u8; 16],
183 pub remaining_metadata_bytes_: u32,
184 pub stream_state_: BrotliEncoderStreamState,
185 pub is_last_block_emitted_: bool,
186 pub is_initialized_: bool,
187 pub is_first_mb: IsFirst,
188 pub literal_scratch_space: <HistogramLiteral as CostAccessors>::i32vec,
189 pub command_scratch_space: <HistogramCommand as CostAccessors>::i32vec,
190 pub distance_scratch_space: <HistogramDistance as CostAccessors>::i32vec,
191 pub recoder_state: RecoderState,
192 custom_dictionary: bool,
193}
194
195pub fn set_parameter(
196 params: &mut BrotliEncoderParams,
197 p: BrotliEncoderParameter,
198 value: u32,
199) -> bool {
200 use crate::enc::parameters::BrotliEncoderParameter::*;
201 match p {
202 BROTLI_PARAM_MODE => {
203 params.mode = match value {
204 0 => BrotliEncoderMode::BROTLI_MODE_GENERIC,
205 1 => BrotliEncoderMode::BROTLI_MODE_TEXT,
206 2 => BrotliEncoderMode::BROTLI_MODE_FONT,
207 3 => BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR,
208 4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
209 5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
210 6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
211 _ => BrotliEncoderMode::BROTLI_MODE_GENERIC,
212 };
213 }
214 BROTLI_PARAM_QUALITY => params.quality = value as i32,
215 BROTLI_PARAM_STRIDE_DETECTION_QUALITY => params.stride_detection_quality = value as u8,
216 BROTLI_PARAM_HIGH_ENTROPY_DETECTION_QUALITY => {
217 params.high_entropy_detection_quality = value as u8
218 }
219 BROTLI_PARAM_CDF_ADAPTATION_DETECTION => params.cdf_adaptation_detection = value as u8,
220 BROTLI_PARAM_Q9_5 => params.q9_5 = (value != 0),
221 BROTLI_PARAM_PRIOR_BITMASK_DETECTION => params.prior_bitmask_detection = value as u8,
222 BROTLI_PARAM_SPEED => {
223 params.literal_adaptation[1].0 = value as u16;
224 if params.literal_adaptation[0] == (0, 0) {
225 params.literal_adaptation[0].0 = value as u16;
226 }
227 }
228 BROTLI_PARAM_SPEED_MAX => {
229 params.literal_adaptation[1].1 = value as u16;
230 if params.literal_adaptation[0].1 == 0 {
231 params.literal_adaptation[0].1 = value as u16;
232 }
233 }
234 BROTLI_PARAM_CM_SPEED => {
235 params.literal_adaptation[3].0 = value as u16;
236 if params.literal_adaptation[2] == (0, 0) {
237 params.literal_adaptation[2].0 = value as u16;
238 }
239 }
240 BROTLI_PARAM_CM_SPEED_MAX => {
241 params.literal_adaptation[3].1 = value as u16;
242 if params.literal_adaptation[2].1 == 0 {
243 params.literal_adaptation[2].1 = value as u16;
244 }
245 }
246 BROTLI_PARAM_SPEED_LOW => params.literal_adaptation[0].0 = value as u16,
247 BROTLI_PARAM_SPEED_LOW_MAX => params.literal_adaptation[0].1 = value as u16,
248 BROTLI_PARAM_CM_SPEED_LOW => params.literal_adaptation[2].0 = value as u16,
249 BROTLI_PARAM_CM_SPEED_LOW_MAX => params.literal_adaptation[2].1 = value as u16,
250 BROTLI_PARAM_LITERAL_BYTE_SCORE => params.hasher.literal_byte_score = value as i32,
251 BROTLI_METABLOCK_CALLBACK => params.log_meta_block = value != 0,
252 BROTLI_PARAM_LGWIN => params.lgwin = value as i32,
253 BROTLI_PARAM_LGBLOCK => params.lgblock = value as i32,
254 BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING => {
255 if value != 0 && value != 1 {
256 return false;
257 }
258 params.disable_literal_context_modeling = if value != 0 { 1 } else { 0 };
259 }
260 BROTLI_PARAM_SIZE_HINT => params.size_hint = value as usize,
261 BROTLI_PARAM_LARGE_WINDOW => params.large_window = value != 0,
262 BROTLI_PARAM_AVOID_DISTANCE_PREFIX_SEARCH => {
263 params.avoid_distance_prefix_search = value != 0
264 }
265 BROTLI_PARAM_CATABLE => {
266 params.catable = value != 0;
267 if !params.appendable {
268 params.appendable = value != 0;
269 }
270 params.use_dictionary = (value == 0);
271 }
272 BROTLI_PARAM_APPENDABLE => params.appendable = value != 0,
273 BROTLI_PARAM_MAGIC_NUMBER => params.magic_number = value != 0,
274 BROTLI_PARAM_FAVOR_EFFICIENCY => params.favor_cpu_efficiency = value != 0,
275 _ => return false,
276 }
277 true
278}
279
280impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
281 pub fn set_parameter(&mut self, p: BrotliEncoderParameter, value: u32) -> bool {
282 if self.is_initialized_ {
283 false
284 } else {
285 set_parameter(&mut self.params, p, value)
286 }
287 }
288}
289
290pub const BROTLI_LARGE_MAX_DISTANCE_BITS: u32 = 62;
292pub const BROTLI_LARGE_MIN_WBITS: u32 = 10;
293pub const BROTLI_LARGE_MAX_WBITS: u32 = 30;
294
295pub const BROTLI_MAX_DISTANCE_BITS: u32 = 24;
296pub const BROTLI_MAX_WINDOW_BITS: usize = BROTLI_MAX_DISTANCE_BITS as usize;
297pub const BROTLI_MAX_DISTANCE: usize = 0x03ff_fffc;
298pub const BROTLI_MAX_ALLOWED_DISTANCE: usize = 0x07ff_fffc;
299pub const BROTLI_NUM_DISTANCE_SHORT_CODES: u32 = 16;
300pub fn BROTLI_DISTANCE_ALPHABET_SIZE(NPOSTFIX: u32, NDIRECT: u32, MAXNBITS: u32) -> u32 {
301 BROTLI_NUM_DISTANCE_SHORT_CODES + (NDIRECT) + ((MAXNBITS) << ((NPOSTFIX) + 1))
302}
303
304pub const BROTLI_NUM_DISTANCE_SYMBOLS: usize = 1128;
309
310pub fn BrotliEncoderInitParams() -> BrotliEncoderParams {
311 BrotliEncoderParams {
312 dist: BrotliDistanceParams {
313 distance_postfix_bits: 0,
314 num_direct_distance_codes: 0,
315 alphabet_size: BROTLI_DISTANCE_ALPHABET_SIZE(0, 0, BROTLI_MAX_DISTANCE_BITS),
316 max_distance: BROTLI_MAX_DISTANCE,
317 },
318 mode: BrotliEncoderMode::BROTLI_MODE_GENERIC,
319 log_meta_block: false,
320 large_window: false,
321 avoid_distance_prefix_search: false,
322 quality: 11,
323 q9_5: false,
324 lgwin: 22i32,
325 lgblock: 0i32,
326 size_hint: 0usize,
327 disable_literal_context_modeling: 0i32,
328 stride_detection_quality: 0,
329 high_entropy_detection_quality: 0,
330 cdf_adaptation_detection: 0,
331 prior_bitmask_detection: 0,
332 literal_adaptation: [(0, 0); 4],
333 catable: false,
334 use_dictionary: true,
335 appendable: false,
336 magic_number: false,
337 favor_cpu_efficiency: false,
338 hasher: BrotliHasherParams {
339 type_: 6,
340 block_bits: 9 - 1,
341 bucket_bits: 15,
342 hash_len: 5,
343 num_last_distances_to_check: 16,
344 literal_byte_score: 0,
345 },
346 }
347}
348
349impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
350 fn extend_last_command(&mut self, bytes: &mut u32, wrapped_last_processed_pos: &mut u32) {
351 let last_command = &mut self.commands_.slice_mut()[self.num_commands_ - 1];
352
353 let mask = self.ringbuffer_.mask_;
354 let max_backward_distance: u64 = (1u64 << self.params.lgwin) - BROTLI_WINDOW_GAP as u64;
355 let last_copy_len = u64::from(last_command.copy_len_) & 0x01ff_ffff;
356 let last_processed_pos: u64 = self.last_processed_pos_ - last_copy_len;
357 let max_distance: u64 = if last_processed_pos < max_backward_distance {
358 last_processed_pos
359 } else {
360 max_backward_distance
361 };
362 let cmd_dist: u64 = self.dist_cache_[0] as u64;
363 let distance_code: u32 = last_command.restore_distance_code(&self.params.dist);
364 if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES
365 || distance_code as u64 - (BROTLI_NUM_DISTANCE_SHORT_CODES - 1) as u64 == cmd_dist)
366 {
367 if (cmd_dist <= max_distance) {
368 while (*bytes != 0
369 && self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
370 + (*wrapped_last_processed_pos as usize & mask as usize)]
371 == self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
372 + (((*wrapped_last_processed_pos as usize)
373 .wrapping_sub(cmd_dist as usize))
374 & mask as usize)])
375 {
376 last_command.copy_len_ += 1;
377 (*bytes) -= 1;
378 (*wrapped_last_processed_pos) += 1;
379 }
380 }
381 get_length_code(
383 last_command.insert_len_ as usize,
384 ((last_command.copy_len_ & 0x01ff_ffff) as i32
385 + (last_command.copy_len_ >> 25) as i32) as usize,
386 (last_command.dist_prefix_ & 0x03ff) == 0,
387 &mut last_command.cmd_prefix_,
388 );
389 }
390 }
391}
392
393fn RingBufferInit<AllocU8: alloc::Allocator<u8>>() -> RingBuffer<AllocU8> {
394 RingBuffer {
395 size_: 0,
396 mask_: 0, tail_size_: 0,
398 total_size_: 0,
399
400 cur_size_: 0,
401 pos_: 0,
402 data_mo: AllocU8::AllocatedMemory::default(),
403 buffer_index: 0usize,
404 }
405}
406
407impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
408 pub fn new(m8: Alloc) -> Self {
409 let cache: [i32; 16] = [4, 11, 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
410 Self {
411 params: BrotliEncoderInitParams(),
412 input_pos_: 0,
413 num_commands_: 0,
414 num_literals_: 0,
415 last_insert_len_: 0,
416 last_flush_pos_: 0,
417 last_processed_pos_: 0,
418 prev_byte_: 0,
419 prev_byte2_: 0,
420 storage_size_: 0,
421 storage_: alloc_default::<u8, Alloc>(),
422 hasher_: UnionHasher::<Alloc>::default(),
423 large_table_: alloc_default::<i32, Alloc>(),
424 cmd_code_numbits_: 0,
426 command_buf_: alloc_default::<u32, Alloc>(),
427 literal_buf_: alloc_default::<u8, Alloc>(),
428 next_out_: NextOut::None,
429 available_out_: 0,
430 total_out_: 0,
431 is_first_mb: IsFirst::NothingWritten,
432 stream_state_: BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING,
433 is_last_block_emitted_: false,
434 is_initialized_: false,
435 ringbuffer_: RingBufferInit(),
436 commands_: alloc_default::<Command, Alloc>(),
437 cmd_alloc_size_: 0,
438 dist_cache_: cache,
439 saved_dist_cache_: [cache[0], cache[1], cache[2], cache[3]],
440 cmd_bits_: [0; 128],
441 cmd_depths_: [0; 128],
442 last_bytes_: 0,
443 last_bytes_bits_: 0,
444 cmd_code_: [0; 512],
445 m8,
446 remaining_metadata_bytes_: 0,
447 small_table_: [0; 1024],
448 tiny_buf_: [0; 16],
449 literal_scratch_space: HistogramLiteral::make_nnz_storage(),
450 command_scratch_space: HistogramCommand::make_nnz_storage(),
451 distance_scratch_space: HistogramDistance::make_nnz_storage(),
452 recoder_state: RecoderState::new(),
453 custom_dictionary: false,
454 }
455 }
456}
457
458fn RingBufferFree<AllocU8: alloc::Allocator<u8>>(m: &mut AllocU8, rb: &mut RingBuffer<AllocU8>) {
459 m.free_cell(core::mem::take(&mut rb.data_mo));
460}
461fn DestroyHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
462 m16: &mut Alloc,
463 handle: &mut UnionHasher<Alloc>,
464) {
465 handle.free(m16);
466}
467impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
502 fn cleanup(&mut self) {
503 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
504 <Alloc as Allocator<Command>>::free_cell(
505 &mut self.m8,
506 core::mem::take(&mut self.commands_),
507 );
508 RingBufferFree(&mut self.m8, &mut self.ringbuffer_);
509 DestroyHasher(&mut self.m8, &mut self.hasher_);
510 <Alloc as Allocator<i32>>::free_cell(&mut self.m8, core::mem::take(&mut self.large_table_));
511 <Alloc as Allocator<u32>>::free_cell(&mut self.m8, core::mem::take(&mut self.command_buf_));
512 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.literal_buf_));
513 }
514}
515
516pub fn BrotliEncoderDestroyInstance<Alloc: BrotliAlloc>(s: &mut BrotliEncoderStateStruct<Alloc>) {
523 s.cleanup()
524}
525
526#[cfg(not(feature = "disallow_large_window_size"))]
527fn check_large_window_ok() -> bool {
528 true
529}
530#[cfg(feature = "disallow_large_window_size")]
531fn check_large_window_ok() -> bool {
532 false
533}
534
535pub fn SanitizeParams(params: &mut BrotliEncoderParams) {
536 params.quality = min(11i32, max(0i32, params.quality));
537 if params.lgwin < 10i32 {
538 params.lgwin = 10i32;
539 } else if params.lgwin > 24i32 {
540 if params.large_window && check_large_window_ok() {
541 if params.lgwin > 30i32 {
542 params.lgwin = 30i32;
543 }
544 } else {
545 params.lgwin = 24i32;
546 }
547 }
548 if params.catable {
549 params.appendable = true;
550 }
551}
552
553fn ComputeLgBlock(params: &BrotliEncoderParams) -> i32 {
554 let mut lgblock: i32 = params.lgblock;
555 if params.quality == 0i32 || params.quality == 1i32 {
556 lgblock = params.lgwin;
557 } else if params.quality < 4i32 {
558 lgblock = 14i32;
559 } else if lgblock == 0i32 {
560 lgblock = 16i32;
561 if params.quality >= 9i32 && (params.lgwin > lgblock) {
562 lgblock = min(18i32, params.lgwin);
563 }
564 } else {
565 lgblock = min(24i32, max(16i32, lgblock));
566 }
567 lgblock
568}
569
570fn ComputeRbBits(params: &BrotliEncoderParams) -> i32 {
571 1i32 + max(params.lgwin, params.lgblock)
572}
573
574fn RingBufferSetup<AllocU8: alloc::Allocator<u8>>(
575 params: &BrotliEncoderParams,
576 rb: &mut RingBuffer<AllocU8>,
577) {
578 let window_bits: i32 = ComputeRbBits(params);
579 let tail_bits: i32 = params.lgblock;
580 rb.size_ = 1u32 << window_bits;
581 rb.mask_ = (1u32 << window_bits).wrapping_sub(1);
582 rb.tail_size_ = 1u32 << tail_bits;
583 rb.total_size_ = rb.size_.wrapping_add(rb.tail_size_);
584}
585
586fn EncodeWindowBits(
587 lgwin: i32,
588 large_window: bool,
589 last_bytes: &mut u16,
590 last_bytes_bits: &mut u8,
591) {
592 if large_window {
593 *last_bytes = (((lgwin & 0x3F) << 8) | 0x11) as u16;
594 *last_bytes_bits = 14;
595 } else if lgwin == 16i32 {
596 *last_bytes = 0u16;
597 *last_bytes_bits = 1u8;
598 } else if lgwin == 17i32 {
599 *last_bytes = 1u16;
600 *last_bytes_bits = 7u8;
601 } else if lgwin > 17i32 {
602 *last_bytes = ((lgwin - 17i32) << 1 | 1i32) as u16;
603 *last_bytes_bits = 4u8;
604 } else {
605 *last_bytes = ((lgwin - 8i32) << 4 | 1i32) as u16;
606 *last_bytes_bits = 7u8;
607 }
608}
609
610fn InitCommandPrefixCodes(
611 cmd_depths: &mut [u8],
612 cmd_bits: &mut [u16],
613 cmd_code: &mut [u8],
614 cmd_code_numbits: &mut usize,
615) {
616 static kDefaultCommandDepths: [u8; 128] = [
617 0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6,
618 7, 7, 7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6, 7, 8, 8, 9, 10, 10, 10, 10, 10,
619 10, 10, 10, 10, 10, 10, 10, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 6, 6, 6,
620 6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10, 12, 12,
621 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 0, 0, 0, 0,
622 ];
623 static kDefaultCommandBits: [u16; 128] = [
624 0, 0, 8, 9, 3, 35, 7, 71, 39, 103, 23, 47, 175, 111, 239, 31, 0, 0, 0, 4, 12, 2, 10, 6, 13,
625 29, 11, 43, 27, 59, 87, 55, 15, 79, 319, 831, 191, 703, 447, 959, 0, 14, 1, 25, 5, 21, 19,
626 51, 119, 159, 95, 223, 479, 991, 63, 575, 127, 639, 383, 895, 255, 767, 511, 1023, 14, 0,
627 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8,
628 4, 12, 2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255, 767, 2815, 1791,
629 3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095, 0, 0, 0, 0,
630 ];
631 static kDefaultCommandCode: [u8; 57] = [
632 0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6, 0x70, 0x57, 0xbc,
633 0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c, 0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83,
634 0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa, 0x6, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce,
635 0x88, 0x54, 0x94, 0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x4, 0x0,
636 ];
637 static kDefaultCommandCodeNumBits: usize = 448usize;
638 cmd_depths[..].clone_from_slice(&kDefaultCommandDepths[..]);
639 cmd_bits[..].clone_from_slice(&kDefaultCommandBits[..]);
640 cmd_code[..kDefaultCommandCode.len()].clone_from_slice(&kDefaultCommandCode[..]);
641 *cmd_code_numbits = kDefaultCommandCodeNumBits;
642}
643
644impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
645 fn ensure_initialized(&mut self) -> bool {
646 if self.is_initialized_ {
647 return true;
648 }
649 SanitizeParams(&mut self.params);
650 self.params.lgblock = ComputeLgBlock(&mut self.params);
651 ChooseDistanceParams(&mut self.params);
652 self.remaining_metadata_bytes_ = u32::MAX;
653 RingBufferSetup(&mut self.params, &mut self.ringbuffer_);
654 {
655 let mut lgwin: i32 = self.params.lgwin;
656 if self.params.quality == 0i32 || self.params.quality == 1i32 {
657 lgwin = max(lgwin, 18i32);
658 }
659 EncodeWindowBits(
660 lgwin,
661 self.params.large_window,
662 &mut self.last_bytes_,
663 &mut self.last_bytes_bits_,
664 );
665 }
666 if self.params.quality == 0i32 {
667 InitCommandPrefixCodes(
668 &mut self.cmd_depths_[..],
669 &mut self.cmd_bits_[..],
670 &mut self.cmd_code_[..],
671 &mut self.cmd_code_numbits_,
672 );
673 }
674 if self.params.catable {
675 for item in self.dist_cache_.iter_mut() {
679 *item = 0x7ffffff0;
680 }
681 for item in self.saved_dist_cache_.iter_mut() {
682 *item = 0x7ffffff0;
683 }
684 }
685 self.is_initialized_ = true;
686 true
687 }
688}
689
690fn RingBufferInitBuffer<AllocU8: alloc::Allocator<u8>>(
691 m: &mut AllocU8,
692 buflen: u32,
693 rb: &mut RingBuffer<AllocU8>,
694) {
695 static kSlackForEightByteHashingEverywhere: usize = 7usize;
696 let mut new_data = m.alloc_cell(
697 ((2u32).wrapping_add(buflen) as usize).wrapping_add(kSlackForEightByteHashingEverywhere),
698 );
699 if !rb.data_mo.slice().is_empty() {
700 let lim: usize = ((2u32).wrapping_add(rb.cur_size_) as usize)
701 .wrapping_add(kSlackForEightByteHashingEverywhere);
702 new_data.slice_mut()[..lim].clone_from_slice(&rb.data_mo.slice()[..lim]);
703 m.free_cell(core::mem::take(&mut rb.data_mo));
704 }
705 let _ = core::mem::replace(&mut rb.data_mo, new_data);
706 rb.cur_size_ = buflen;
707 rb.buffer_index = 2usize;
708 rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(2))] = 0;
709 rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(1))] = 0;
710 for i in 0usize..kSlackForEightByteHashingEverywhere {
711 rb.data_mo.slice_mut()[rb
712 .buffer_index
713 .wrapping_add(rb.cur_size_ as usize)
714 .wrapping_add(i)] = 0;
715 }
716}
717
718fn RingBufferWriteTail<AllocU8: alloc::Allocator<u8>>(
719 bytes: &[u8],
720 n: usize,
721 rb: &mut RingBuffer<AllocU8>,
722) {
723 let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
724 if masked_pos < rb.tail_size_ as usize {
725 let p: usize = (rb.size_ as usize).wrapping_add(masked_pos);
726 let begin = rb.buffer_index.wrapping_add(p);
727 let lim = min(n, (rb.tail_size_ as usize).wrapping_sub(masked_pos));
728 rb.data_mo.slice_mut()[begin..(begin + lim)].clone_from_slice(&bytes[..lim]);
729 }
730}
731
732fn RingBufferWrite<AllocU8: alloc::Allocator<u8>>(
733 m: &mut AllocU8,
734 bytes: &[u8],
735 n: usize,
736 rb: &mut RingBuffer<AllocU8>,
737) {
738 if rb.pos_ == 0u32 && (n < rb.tail_size_ as usize) {
739 rb.pos_ = n as u32;
740 RingBufferInitBuffer(m, rb.pos_, rb);
741 rb.data_mo.slice_mut()[rb.buffer_index..(rb.buffer_index + n)]
742 .clone_from_slice(&bytes[..n]);
743 return;
744 }
745 if rb.cur_size_ < rb.total_size_ {
746 RingBufferInitBuffer(m, rb.total_size_, rb);
747 rb.data_mo.slice_mut()[rb
748 .buffer_index
749 .wrapping_add(rb.size_ as usize)
750 .wrapping_sub(2)] = 0u8;
751 rb.data_mo.slice_mut()[rb
752 .buffer_index
753 .wrapping_add(rb.size_ as usize)
754 .wrapping_sub(1)] = 0u8;
755 }
756 {
757 let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
758 RingBufferWriteTail(bytes, n, rb);
759 if masked_pos.wrapping_add(n) <= rb.size_ as usize {
760 let start = rb.buffer_index.wrapping_add(masked_pos);
762 rb.data_mo.slice_mut()[start..(start + n)].clone_from_slice(&bytes[..n]);
763 } else {
764 {
765 let start = rb.buffer_index.wrapping_add(masked_pos);
766 let mid = min(n, (rb.total_size_ as usize).wrapping_sub(masked_pos));
767 rb.data_mo.slice_mut()[start..(start + mid)].clone_from_slice(&bytes[..mid]);
768 }
769 let xstart = rb.buffer_index.wrapping_add(0);
770 let size = n.wrapping_sub((rb.size_ as usize).wrapping_sub(masked_pos));
771 let bytes_start = (rb.size_ as usize).wrapping_sub(masked_pos);
772 rb.data_mo.slice_mut()[xstart..(xstart + size)]
773 .clone_from_slice(&bytes[bytes_start..(bytes_start + size)]);
774 }
775 }
776 let data_2 = rb.data_mo.slice()[rb
777 .buffer_index
778 .wrapping_add(rb.size_ as usize)
779 .wrapping_sub(2)];
780 rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(2)] = data_2;
781 let data_1 = rb.data_mo.slice()[rb
782 .buffer_index
783 .wrapping_add(rb.size_ as usize)
784 .wrapping_sub(1)];
785 rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(1)] = data_1;
786 rb.pos_ = rb.pos_.wrapping_add(n as u32);
787 if rb.pos_ > 1u32 << 30 {
788 rb.pos_ = rb.pos_ & (1u32 << 30).wrapping_sub(1) | 1u32 << 30;
789 }
790}
791
792impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
793 pub fn copy_input_to_ring_buffer(&mut self, input_size: usize, input_buffer: &[u8]) {
794 if !self.ensure_initialized() {
795 return;
796 }
797 RingBufferWrite(
798 &mut self.m8,
799 input_buffer,
800 input_size,
801 &mut self.ringbuffer_,
802 );
803 self.input_pos_ = self.input_pos_.wrapping_add(input_size as u64);
804 if (self.ringbuffer_).pos_ <= (self.ringbuffer_).mask_ {
805 let start = (self.ringbuffer_)
806 .buffer_index
807 .wrapping_add((self.ringbuffer_).pos_ as usize);
808 for item in (self.ringbuffer_).data_mo.slice_mut()[start..(start + 7)].iter_mut() {
809 *item = 0;
810 }
811 }
812 }
813}
814
815fn ChooseHasher(params: &mut BrotliEncoderParams) {
816 let hparams = &mut params.hasher;
817 if params.quality >= 10 && !params.q9_5 {
818 hparams.type_ = 10;
819 } else if params.quality == 10 {
820 hparams.type_ = 9;
822 hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
823 hparams.block_bits = H9_BLOCK_BITS as i32;
824 hparams.bucket_bits = H9_BUCKET_BITS as i32;
825 hparams.hash_len = 4;
826 } else if params.quality == 9 {
827 hparams.type_ = 9;
828 hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
829 hparams.block_bits = H9_BLOCK_BITS as i32;
830 hparams.bucket_bits = H9_BUCKET_BITS as i32;
831 hparams.hash_len = 4;
832 } else if params.quality == 4 && (params.size_hint >= (1i32 << 20) as usize) {
833 hparams.type_ = 54i32;
834 } else if params.quality < 5 {
835 hparams.type_ = params.quality;
836 } else if params.lgwin <= 16 {
837 hparams.type_ = if params.quality < 7 {
838 40i32
839 } else if params.quality < 9 {
840 41i32
841 } else {
842 42i32
843 };
844 } else if ((params.q9_5 && params.size_hint > (1 << 20)) || params.size_hint > (1 << 22))
845 && (params.lgwin >= 19i32)
846 {
847 hparams.type_ = 6i32;
848 hparams.block_bits = min(params.quality - 1, 9);
849 hparams.bucket_bits = 15i32;
850 hparams.hash_len = 5i32;
851 hparams.num_last_distances_to_check = if params.quality < 7 {
852 4i32
853 } else if params.quality < 9 {
854 10i32
855 } else {
856 16i32
857 };
858 } else {
859 hparams.type_ = 5i32;
860 hparams.block_bits = min(params.quality - 1, 9);
861 hparams.bucket_bits = if params.quality < 7 && params.size_hint <= (1 << 20) {
862 14i32
863 } else {
864 15i32
865 };
866 hparams.num_last_distances_to_check = if params.quality < 7 {
867 4i32
868 } else if params.quality < 9 {
869 10i32
870 } else {
871 16i32
872 };
873 }
874}
875
876fn InitializeH2<AllocU32: alloc::Allocator<u32>>(
877 m32: &mut AllocU32,
878 params: &BrotliEncoderParams,
879) -> BasicHasher<H2Sub<AllocU32>> {
880 BasicHasher {
881 GetHasherCommon: Struct1 {
882 params: params.hasher,
883 is_prepared_: 1,
884 dict_num_lookups: 0,
885 dict_num_matches: 0,
886 },
887 buckets_: H2Sub {
888 buckets_: m32.alloc_cell(65537 + 8),
889 },
890 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
891 }
892}
893fn InitializeH3<AllocU32: alloc::Allocator<u32>>(
894 m32: &mut AllocU32,
895 params: &BrotliEncoderParams,
896) -> BasicHasher<H3Sub<AllocU32>> {
897 BasicHasher {
898 GetHasherCommon: Struct1 {
899 params: params.hasher,
900 is_prepared_: 1,
901 dict_num_lookups: 0,
902 dict_num_matches: 0,
903 },
904 buckets_: H3Sub {
905 buckets_: m32.alloc_cell(65538 + 8),
906 },
907 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
908 }
909}
910fn InitializeH4<AllocU32: alloc::Allocator<u32>>(
911 m32: &mut AllocU32,
912 params: &BrotliEncoderParams,
913) -> BasicHasher<H4Sub<AllocU32>> {
914 BasicHasher {
915 GetHasherCommon: Struct1 {
916 params: params.hasher,
917 is_prepared_: 1,
918 dict_num_lookups: 0,
919 dict_num_matches: 0,
920 },
921 buckets_: H4Sub {
922 buckets_: m32.alloc_cell(131072 + 8),
923 },
924 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
925 }
926}
927fn InitializeH54<AllocU32: alloc::Allocator<u32>>(
928 m32: &mut AllocU32,
929 params: &BrotliEncoderParams,
930) -> BasicHasher<H54Sub<AllocU32>> {
931 BasicHasher {
932 GetHasherCommon: Struct1 {
933 params: params.hasher,
934 is_prepared_: 1,
935 dict_num_lookups: 0,
936 dict_num_matches: 0,
937 },
938 buckets_: H54Sub {
939 buckets_: m32.alloc_cell(1048580 + 8),
940 },
941 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
942 }
943}
944
945fn InitializeH9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
946 m16: &mut Alloc,
947 params: &BrotliEncoderParams,
948) -> H9<Alloc> {
949 H9 {
950 dict_search_stats_: Struct1 {
951 params: params.hasher,
952 is_prepared_: 1,
953 dict_num_lookups: 0,
954 dict_num_matches: 0,
955 },
956 num_: allocate::<u16, _>(m16, 1 << H9_BUCKET_BITS),
957 buckets_: allocate::<u32, _>(m16, H9_BLOCK_SIZE << H9_BUCKET_BITS),
958 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
959 }
960}
961
962fn InitializeH5<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
963 m16: &mut Alloc,
964 params: &BrotliEncoderParams,
965) -> UnionHasher<Alloc> {
966 let block_size = 1u64 << params.hasher.block_bits;
967 let bucket_size = 1u64 << params.hasher.bucket_bits;
968 let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
969 allocate::<u32, _>(m16, (bucket_size * block_size) as usize);
970 let num: <Alloc as Allocator<u16>>::AllocatedMemory =
971 allocate::<u16, _>(m16, bucket_size as usize);
972
973 if params.hasher.block_bits == (HQ5Sub {}).block_bits()
974 && (1 << params.hasher.bucket_bits) == (HQ5Sub {}).bucket_size()
975 {
976 return UnionHasher::H5q5(AdvHasher {
977 buckets,
978 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
979 num,
980 GetHasherCommon: Struct1 {
981 params: params.hasher,
982 is_prepared_: 1,
983 dict_num_lookups: 0,
984 dict_num_matches: 0,
985 },
986 specialization: HQ5Sub {},
987 });
988 }
989 if params.hasher.block_bits == (HQ7Sub {}).block_bits()
990 && (1 << params.hasher.bucket_bits) == (HQ7Sub {}).bucket_size()
991 {
992 return UnionHasher::H5q7(AdvHasher {
993 buckets,
994 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
995 num,
996 GetHasherCommon: Struct1 {
997 params: params.hasher,
998 is_prepared_: 1,
999 dict_num_lookups: 0,
1000 dict_num_matches: 0,
1001 },
1002 specialization: HQ7Sub {},
1003 });
1004 }
1005 UnionHasher::H5(AdvHasher {
1006 buckets,
1007 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
1008 num,
1009 GetHasherCommon: Struct1 {
1010 params: params.hasher,
1011 is_prepared_: 1,
1012 dict_num_lookups: 0,
1013 dict_num_matches: 0,
1014 },
1015 specialization: H5Sub {
1016 hash_shift_: 32i32 - params.hasher.bucket_bits,
1017 bucket_size_: bucket_size as u32,
1018 block_bits_: params.hasher.block_bits,
1019 block_mask_: block_size.wrapping_sub(1) as u32,
1020 },
1021 })
1022}
1023fn InitializeH6<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1024 m16: &mut Alloc,
1025 params: &BrotliEncoderParams,
1026) -> UnionHasher<Alloc> {
1027 let block_size = 1u64 << params.hasher.block_bits;
1028 let bucket_size = 1u64 << params.hasher.bucket_bits;
1029 let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
1030 allocate::<u32, _>(m16, (bucket_size * block_size) as usize);
1031 let num: <Alloc as Allocator<u16>>::AllocatedMemory =
1032 allocate::<u16, _>(m16, bucket_size as usize);
1033 UnionHasher::H6(AdvHasher {
1034 buckets,
1035 num,
1036 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
1037 GetHasherCommon: Struct1 {
1038 params: params.hasher,
1039 is_prepared_: 1,
1040 dict_num_lookups: 0,
1041 dict_num_matches: 0,
1042 },
1043 specialization: H6Sub {
1044 bucket_size_: 1u32 << params.hasher.bucket_bits,
1045 block_bits_: params.hasher.block_bits,
1046 block_mask_: block_size.wrapping_sub(1) as u32,
1047 hash_mask: 0xffffffffffffffffu64 >> (64i32 - 8i32 * params.hasher.hash_len),
1048 hash_shift_: 64i32 - params.hasher.bucket_bits,
1049 },
1050 })
1051}
1052
1053fn BrotliMakeHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1054 m: &mut Alloc,
1055 params: &BrotliEncoderParams,
1056) -> UnionHasher<Alloc> {
1057 let hasher_type: i32 = params.hasher.type_;
1058 if hasher_type == 2i32 {
1059 return UnionHasher::H2(InitializeH2(m, params));
1060 }
1061 if hasher_type == 3i32 {
1062 return UnionHasher::H3(InitializeH3(m, params));
1063 }
1064 if hasher_type == 4i32 {
1065 return UnionHasher::H4(InitializeH4(m, params));
1066 }
1067 if hasher_type == 5i32 {
1068 return InitializeH5(m, params);
1069 }
1070 if hasher_type == 6i32 {
1071 return InitializeH6(m, params);
1072 }
1073 if hasher_type == 9i32 {
1074 return UnionHasher::H9(InitializeH9(m, params));
1075 }
1076 if hasher_type == 54i32 {
1088 return UnionHasher::H54(InitializeH54(m, params));
1089 }
1090 if hasher_type == 10i32 {
1091 return UnionHasher::H10(InitializeH10(m, false, params, 0));
1092 }
1093 InitializeH6(m, params)
1095
1096 }
1098fn HasherReset<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(t: &mut UnionHasher<Alloc>) {
1099 match t {
1100 &mut UnionHasher::Uninit => {}
1101 _ => (t.GetHasherCommon()).is_prepared_ = 0i32,
1102 };
1103}
1104
1105#[deprecated(note = "Use hasher_setup instead")]
1106pub fn HasherSetup<Alloc: Allocator<u16> + Allocator<u32>>(
1107 m16: &mut Alloc,
1108 handle: &mut UnionHasher<Alloc>,
1109 params: &mut BrotliEncoderParams,
1110 data: &[u8],
1111 position: usize,
1112 input_size: usize,
1113 is_last: i32,
1114) {
1115 hasher_setup(
1116 m16,
1117 handle,
1118 params,
1119 data,
1120 position,
1121 input_size,
1122 is_last != 0,
1123 )
1124}
1125
1126pub(crate) fn hasher_setup<Alloc: Allocator<u16> + Allocator<u32>>(
1127 m16: &mut Alloc,
1128 handle: &mut UnionHasher<Alloc>,
1129 params: &mut BrotliEncoderParams,
1130 data: &[u8],
1131 position: usize,
1132 input_size: usize,
1133 is_last: bool,
1134) {
1135 let one_shot = position == 0 && is_last;
1136 let is_uninit = match (handle) {
1137 &mut UnionHasher::Uninit => true,
1138 _ => false,
1139 };
1140 if is_uninit {
1141 ChooseHasher(&mut (*params));
1143 *handle = BrotliMakeHasher(m16, params);
1146 handle.GetHasherCommon().params = params.hasher;
1147 HasherReset(handle); handle.GetHasherCommon().is_prepared_ = 1;
1149 } else {
1150 match handle.Prepare(one_shot, input_size, data) {
1151 HowPrepared::ALREADY_PREPARED => {}
1152 HowPrepared::NEWLY_PREPARED => {
1153 if position == 0usize {
1154 let common = handle.GetHasherCommon();
1155 common.dict_num_lookups = 0usize;
1156 common.dict_num_matches = 0usize;
1157 }
1158 }
1159 }
1160 }
1161}
1162
1163fn HasherPrependCustomDictionary<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1164 m: &mut Alloc,
1165 handle: &mut UnionHasher<Alloc>,
1166 params: &mut BrotliEncoderParams,
1167 size: usize,
1168 dict: &[u8],
1169) {
1170 hasher_setup(m, handle, params, dict, 0usize, size, false);
1171 match handle {
1172 &mut UnionHasher::H2(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1173 &mut UnionHasher::H3(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1174 &mut UnionHasher::H4(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1175 &mut UnionHasher::H5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1176 &mut UnionHasher::H5q7(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1177 &mut UnionHasher::H5q5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1178 &mut UnionHasher::H6(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1179 &mut UnionHasher::H9(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1180 &mut UnionHasher::H54(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1181 &mut UnionHasher::H10(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1182 &mut UnionHasher::Uninit => panic!("Uninitialized"),
1183 }
1184}
1185
1186impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1187 pub fn set_custom_dictionary(&mut self, size: usize, dict: &[u8]) {
1188 self.set_custom_dictionary_with_optional_precomputed_hasher(size, dict, UnionHasher::Uninit)
1189 }
1190
1191 pub fn set_custom_dictionary_with_optional_precomputed_hasher(
1192 &mut self,
1193 size: usize,
1194 mut dict: &[u8],
1195 opt_hasher: UnionHasher<Alloc>,
1196 ) {
1197 let has_optional_hasher = if let UnionHasher::Uninit = opt_hasher {
1198 false
1199 } else {
1200 true
1201 };
1202 let max_dict_size: usize = (1usize << self.params.lgwin).wrapping_sub(16);
1203 self.hasher_ = opt_hasher;
1204 let mut dict_size: usize = size;
1205 if !self.ensure_initialized() {
1206 return;
1207 }
1208 if dict_size == 0 || self.params.quality == 0 || self.params.quality == 1 || size <= 1 {
1209 self.params.catable = true; self.params.appendable = true; return;
1212 }
1213 self.custom_dictionary = true;
1214 if size > max_dict_size {
1215 dict = &dict[size.wrapping_sub(max_dict_size)..];
1216 dict_size = max_dict_size;
1217 }
1218 self.copy_input_to_ring_buffer(dict_size, dict);
1219 self.last_flush_pos_ = dict_size as u64;
1220 self.last_processed_pos_ = dict_size as u64;
1221 if dict_size > 0 {
1222 self.prev_byte_ = dict[dict_size.wrapping_sub(1)];
1223 }
1224 if dict_size > 1 {
1225 self.prev_byte2_ = dict[dict_size.wrapping_sub(2)];
1226 }
1227 let m16 = &mut self.m8;
1228 if cfg!(debug_assertions) || !has_optional_hasher {
1229 let mut orig_hasher = UnionHasher::Uninit;
1230 if has_optional_hasher {
1231 orig_hasher = core::mem::replace(&mut self.hasher_, UnionHasher::Uninit);
1232 }
1233 HasherPrependCustomDictionary(
1234 m16,
1235 &mut self.hasher_,
1236 &mut self.params,
1237 dict_size,
1238 dict,
1239 );
1240 if has_optional_hasher {
1241 debug_assert!(orig_hasher == self.hasher_);
1242 DestroyHasher(m16, &mut orig_hasher);
1243 }
1244 }
1245 }
1246}
1247
1248pub fn BrotliEncoderMaxCompressedSizeMulti(input_size: usize, num_threads: usize) -> usize {
1249 BrotliEncoderMaxCompressedSize(input_size) + num_threads * 8
1250}
1251
1252pub fn BrotliEncoderMaxCompressedSize(input_size: usize) -> usize {
1253 let magic_size = 16usize;
1254 let num_large_blocks: usize = input_size >> 14;
1255 let tail: usize = input_size.wrapping_sub(num_large_blocks << 24);
1256 let tail_overhead: usize = (if tail > (1i32 << 20) as usize {
1257 4i32
1258 } else {
1259 3i32
1260 }) as usize;
1261 let overhead: usize = (2usize)
1262 .wrapping_add((4usize).wrapping_mul(num_large_blocks))
1263 .wrapping_add(tail_overhead)
1264 .wrapping_add(1);
1265 let result: usize = input_size.wrapping_add(overhead);
1266 if input_size == 0usize {
1267 return 1 + magic_size;
1268 }
1269 if result < input_size {
1270 0usize
1271 } else {
1272 result + magic_size
1273 }
1274}
1275
1276fn InitOrStitchToPreviousBlock<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1277 m: &mut Alloc,
1278 handle: &mut UnionHasher<Alloc>,
1279 data: &[u8],
1280 mask: usize,
1281 params: &mut BrotliEncoderParams,
1282 position: usize,
1283 input_size: usize,
1284 is_last: bool,
1285) {
1286 hasher_setup(m, handle, params, data, position, input_size, is_last);
1287 handle.StitchToPreviousBlock(input_size, position, data, mask);
1288}
1289
1290fn should_compress(
1291 data: &[u8],
1292 mask: usize,
1293 last_flush_pos: u64,
1294 bytes: usize,
1295 num_literals: usize,
1296 num_commands: usize,
1297) -> bool {
1298 const K_SAMPLE_RATE: u32 = 13;
1299 const K_MIN_ENTROPY: floatX = 7.92;
1300
1301 if num_commands < (bytes >> 8) + 2 && num_literals as floatX > 0.99 * bytes as floatX {
1302 let mut literal_histo = [0u32; 256];
1303 let bit_cost_threshold = (bytes as floatX) * K_MIN_ENTROPY / (K_SAMPLE_RATE as floatX);
1304 let t = bytes
1305 .wrapping_add(K_SAMPLE_RATE as usize)
1306 .wrapping_sub(1)
1307 .wrapping_div(K_SAMPLE_RATE as usize);
1308 let mut pos = last_flush_pos as u32;
1309 for _ in 0..t {
1310 let value = &mut literal_histo[data[pos as usize & mask] as usize];
1311 *value = value.wrapping_add(1);
1312 pos = pos.wrapping_add(K_SAMPLE_RATE);
1313 }
1314 if BitsEntropy(&literal_histo[..], 256) > bit_cost_threshold {
1315 return false;
1316 }
1317 }
1318 true
1319}
1320
1321fn ChooseContextMode(
1323 params: &BrotliEncoderParams,
1324 data: &[u8],
1325 pos: usize,
1326 mask: usize,
1327 length: usize,
1328) -> ContextType {
1329 match params.mode {
1332 BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR => return ContextType::CONTEXT_LSB6,
1333 BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR => return ContextType::CONTEXT_MSB6,
1334 BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR => return ContextType::CONTEXT_UTF8,
1335 BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR => return ContextType::CONTEXT_SIGNED,
1336 _ => {}
1337 }
1338 if (params.quality >= 10 && !is_mostly_utf8(data, pos, mask, length, kMinUTF8Ratio)) {
1339 return ContextType::CONTEXT_SIGNED;
1340 }
1341 ContextType::CONTEXT_UTF8
1342}
1343
1344#[derive(PartialEq, Eq, Copy, Clone)]
1345pub enum BrotliEncoderOperation {
1346 BROTLI_OPERATION_PROCESS = 0,
1347 BROTLI_OPERATION_FLUSH = 1,
1348 BROTLI_OPERATION_FINISH = 2,
1349 BROTLI_OPERATION_EMIT_METADATA = 3,
1350}
1351
1352fn MakeUncompressedStream(input: &[u8], input_size: usize, output: &mut [u8]) -> usize {
1353 let mut size: usize = input_size;
1354 let mut result: usize = 0usize;
1355 let mut offset: usize = 0usize;
1356 if input_size == 0usize {
1357 output[0] = 6u8;
1358 return 1;
1359 }
1360 output[result] = 0x21u8;
1361 result = result.wrapping_add(1);
1362 output[result] = 0x3u8;
1363 result = result.wrapping_add(1);
1364 while size > 0usize {
1365 let mut nibbles: u32 = 0u32;
1366
1367 let chunk_size: u32 = if size > (1u32 << 24) as usize {
1368 1u32 << 24
1369 } else {
1370 size as u32
1371 };
1372 if chunk_size > 1u32 << 16 {
1373 nibbles = if chunk_size > 1u32 << 20 { 2i32 } else { 1i32 } as u32;
1374 }
1375 let bits: u32 = nibbles << 1
1376 | chunk_size.wrapping_sub(1) << 3
1377 | 1u32 << (19u32).wrapping_add((4u32).wrapping_mul(nibbles));
1378 output[result] = bits as u8;
1379 result = result.wrapping_add(1);
1380 output[result] = (bits >> 8) as u8;
1381 result = result.wrapping_add(1);
1382 output[result] = (bits >> 16) as u8;
1383 result = result.wrapping_add(1);
1384 if nibbles == 2u32 {
1385 output[result] = (bits >> 24) as u8;
1386 result = result.wrapping_add(1);
1387 }
1388 output[result..(result + chunk_size as usize)]
1389 .clone_from_slice(&input[offset..(offset + chunk_size as usize)]);
1390 result = result.wrapping_add(chunk_size as usize);
1391 offset = offset.wrapping_add(chunk_size as usize);
1392 size = size.wrapping_sub(chunk_size as usize);
1393 }
1394 output[result] = 3u8;
1395 result = result.wrapping_add(1);
1396 result
1397}
1398
1399#[deprecated(note = "Use encoder_compress instead")]
1400pub fn BrotliEncoderCompress<
1401 Alloc: BrotliAlloc,
1402 MetablockCallback: FnMut(
1403 &mut interface::PredictionModeContextMap<InputReferenceMut>,
1404 &mut [interface::StaticCommand],
1405 interface::InputPair,
1406 &mut Alloc,
1407 ),
1408>(
1409 empty_m8: Alloc,
1410 m8: &mut Alloc,
1411 quality: i32,
1412 lgwin: i32,
1413 mode: BrotliEncoderMode,
1414 input_size: usize,
1415 input_buffer: &[u8],
1416 encoded_size: &mut usize,
1417 encoded_buffer: &mut [u8],
1418 metablock_callback: &mut MetablockCallback,
1419) -> i32 {
1420 encoder_compress(
1421 empty_m8,
1422 m8,
1423 quality,
1424 lgwin,
1425 mode,
1426 input_size,
1427 input_buffer,
1428 encoded_size,
1429 encoded_buffer,
1430 metablock_callback,
1431 )
1432 .into()
1433}
1434
1435pub(crate) fn encoder_compress<
1436 Alloc: BrotliAlloc,
1437 MetablockCallback: FnMut(
1438 &mut interface::PredictionModeContextMap<InputReferenceMut>,
1439 &mut [interface::StaticCommand],
1440 interface::InputPair,
1441 &mut Alloc,
1442 ),
1443>(
1444 empty_m8: Alloc,
1445 m8: &mut Alloc,
1446 mut quality: i32,
1447 lgwin: i32,
1448 mode: BrotliEncoderMode,
1449 input_size: usize,
1450 input_buffer: &[u8],
1451 encoded_size: &mut usize,
1452 encoded_buffer: &mut [u8],
1453 metablock_callback: &mut MetablockCallback,
1454) -> bool {
1455 let out_size: usize = *encoded_size;
1456 let input_start = input_buffer;
1457 let output_start = encoded_buffer;
1458 let max_out_size: usize = BrotliEncoderMaxCompressedSize(input_size);
1459 if out_size == 0 {
1460 return false;
1461 }
1462 if input_size == 0 {
1463 *encoded_size = 1;
1464 output_start[0] = 6;
1465 return true;
1466 }
1467 let mut is_fallback = false;
1468 let mut is_9_5 = false;
1469 if quality == 10 {
1470 quality = 9;
1471 is_9_5 = true;
1472 }
1473 if !is_fallback {
1474 let mut s_orig = BrotliEncoderStateStruct::new(core::mem::replace(m8, empty_m8));
1475 if is_9_5 {
1476 let mut params = BrotliEncoderParams::default();
1477 params.q9_5 = true;
1478 params.quality = 10;
1479 ChooseHasher(&mut params);
1480 s_orig.hasher_ = BrotliMakeHasher(m8, ¶ms);
1481 }
1482 let mut result: bool;
1483 {
1484 let s = &mut s_orig;
1485 let mut available_in: usize = input_size;
1486 let next_in_array: &[u8] = input_buffer;
1487 let mut next_in_offset: usize = 0;
1488 let mut available_out: usize = *encoded_size;
1489 let next_out_array: &mut [u8] = output_start;
1490 let mut next_out_offset: usize = 0;
1491 let mut total_out = Some(0);
1492 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_QUALITY, quality as u32);
1493 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LGWIN, lgwin as u32);
1494 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_MODE, mode as u32);
1495 s.set_parameter(
1496 BrotliEncoderParameter::BROTLI_PARAM_SIZE_HINT,
1497 input_size as u32,
1498 );
1499 if lgwin > BROTLI_MAX_WINDOW_BITS as i32 {
1500 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LARGE_WINDOW, 1);
1501 }
1502 result = s.compress_stream(
1503 BrotliEncoderOperation::BROTLI_OPERATION_FINISH,
1504 &mut available_in,
1505 next_in_array,
1506 &mut next_in_offset,
1507 &mut available_out,
1508 next_out_array,
1509 &mut next_out_offset,
1510 &mut total_out,
1511 metablock_callback,
1512 );
1513 if !s.is_finished() {
1514 result = false;
1515 }
1516
1517 *encoded_size = total_out.unwrap();
1518 BrotliEncoderDestroyInstance(s);
1519 }
1520 let _ = core::mem::replace(m8, s_orig.m8);
1521 if !result || max_out_size != 0 && (*encoded_size > max_out_size) {
1522 is_fallback = true;
1523 } else {
1524 return true;
1525 }
1526 }
1527 assert_ne!(is_fallback, false);
1528 *encoded_size = 0;
1529 if max_out_size == 0 {
1530 return false;
1531 }
1532 if out_size >= max_out_size {
1533 *encoded_size = MakeUncompressedStream(input_start, input_size, output_start);
1534 return true;
1535 }
1536 false
1537}
1538
1539impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1540 fn inject_byte_padding_block(&mut self) {
1541 let mut seal: u32 = self.last_bytes_ as u32;
1542 let mut seal_bits: usize = self.last_bytes_bits_ as usize;
1543 let destination: &mut [u8];
1544 self.last_bytes_ = 0;
1545 self.last_bytes_bits_ = 0;
1546 seal |= 0x6u32 << seal_bits;
1547
1548 seal_bits = seal_bits.wrapping_add(6);
1549 if !IsNextOutNull(&self.next_out_) {
1550 destination = &mut GetNextOut!(*self)[self.available_out_..];
1551 } else {
1552 destination = &mut self.tiny_buf_[..];
1553 self.next_out_ = NextOut::TinyBuf(0);
1554 }
1555 destination[0] = seal as u8;
1556 if seal_bits > 8usize {
1557 destination[1] = (seal >> 8) as u8;
1558 }
1559 if seal_bits > 16usize {
1560 destination[2] = (seal >> 16) as u8;
1561 }
1562 self.available_out_ = self
1563 .available_out_
1564 .wrapping_add(seal_bits.wrapping_add(7) >> 3);
1565 }
1566
1567 fn inject_flush_or_push_output(
1568 &mut self,
1569 available_out: &mut usize,
1570 next_out_array: &mut [u8],
1571 next_out_offset: &mut usize,
1572 total_out: &mut Option<usize>,
1573 ) -> bool {
1574 if self.stream_state_ as i32
1575 == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED as i32
1576 && (self.last_bytes_bits_ as i32 != 0i32)
1577 {
1578 self.inject_byte_padding_block();
1579 return true;
1580 }
1581 if self.available_out_ != 0usize && (*available_out != 0usize) {
1582 let copy_output_size: usize = min(self.available_out_, *available_out);
1583 (*next_out_array)[(*next_out_offset)..(*next_out_offset + copy_output_size)]
1584 .clone_from_slice(&GetNextOut!(self)[..copy_output_size]);
1585 *next_out_offset = next_out_offset.wrapping_add(copy_output_size);
1587 *available_out = available_out.wrapping_sub(copy_output_size);
1588 self.next_out_ = NextOutIncrement(&self.next_out_, (copy_output_size as i32));
1589 self.available_out_ = self.available_out_.wrapping_sub(copy_output_size);
1590 self.total_out_ = self.total_out_.wrapping_add(copy_output_size as u64);
1591 if let &mut Some(ref mut total_out_inner) = total_out {
1592 *total_out_inner = self.total_out_ as usize;
1593 }
1594 return true;
1595 }
1596 false
1597 }
1598
1599 fn unprocessed_input_size(&self) -> u64 {
1600 self.input_pos_.wrapping_sub(self.last_processed_pos_)
1601 }
1602
1603 fn update_size_hint(&mut self, available_in: usize) {
1604 if self.params.size_hint == 0usize {
1605 let delta: u64 = self.unprocessed_input_size();
1606 let tail: u64 = available_in as u64;
1607 let limit: u32 = 1u32 << 30;
1608 let total: u32;
1609 if delta >= u64::from(limit)
1610 || tail >= u64::from(limit)
1611 || delta.wrapping_add(tail) >= u64::from(limit)
1612 {
1613 total = limit;
1614 } else {
1615 total = delta.wrapping_add(tail) as u32;
1616 }
1617 self.params.size_hint = total as usize;
1618 }
1619 }
1620}
1621
1622fn WrapPosition(position: u64) -> u32 {
1623 let mut result: u32 = position as u32;
1624 let gb: u64 = position >> 30;
1625 if gb > 2 {
1626 result = result & (1u32 << 30).wrapping_sub(1)
1627 | ((gb.wrapping_sub(1) & 1) as u32).wrapping_add(1) << 30;
1628 }
1629 result
1630}
1631
1632impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1633 fn get_brotli_storage(&mut self, size: usize) {
1634 if self.storage_size_ < size {
1635 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
1636 self.storage_ = allocate::<u8, _>(&mut self.m8, size);
1637 self.storage_size_ = size;
1638 }
1639 }
1640}
1641
1642fn MaxHashTableSize(quality: i32) -> usize {
1643 (if quality == 0i32 {
1644 1i32 << 15
1645 } else {
1646 1i32 << 17
1647 }) as usize
1648}
1649
1650fn HashTableSize(max_table_size: usize, input_size: usize) -> usize {
1651 let mut htsize: usize = 256usize;
1652 while htsize < max_table_size && (htsize < input_size) {
1653 htsize <<= 1i32;
1654 }
1655 htsize
1656}
1657
1658macro_rules! GetHashTable {
1659 ($s : expr, $quality: expr, $input_size : expr, $table_size : expr) => {
1660 GetHashTableInternal(
1661 &mut $s.m8,
1662 &mut $s.small_table_,
1663 &mut $s.large_table_,
1664 $quality,
1665 $input_size,
1666 $table_size,
1667 )
1668 };
1669}
1670fn GetHashTableInternal<'a, AllocI32: alloc::Allocator<i32>>(
1671 mi32: &mut AllocI32,
1672 small_table_: &'a mut [i32; 1024],
1673 large_table_: &'a mut AllocI32::AllocatedMemory,
1674 quality: i32,
1675 input_size: usize,
1676 table_size: &mut usize,
1677) -> &'a mut [i32] {
1678 let max_table_size: usize = MaxHashTableSize(quality);
1679 let mut htsize: usize = HashTableSize(max_table_size, input_size);
1680 let table: &mut [i32];
1681 if quality == 0i32 && htsize & 0xaaaaausize == 0usize {
1682 htsize <<= 1i32;
1683 }
1684 if htsize <= small_table_.len() {
1685 table = &mut small_table_[..];
1686 } else {
1687 if htsize > large_table_.slice().len() {
1688 {
1690 mi32.free_cell(core::mem::take(large_table_));
1691 }
1692 *large_table_ = mi32.alloc_cell(htsize);
1693 }
1694 table = large_table_.slice_mut();
1695 }
1696 *table_size = htsize;
1697 for item in table[..htsize].iter_mut() {
1698 *item = 0;
1699 }
1700 table }
1702
1703impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1704 fn update_last_processed_pos(&mut self) -> bool {
1705 let wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
1706 let wrapped_input_pos: u32 = WrapPosition(self.input_pos_);
1707 self.last_processed_pos_ = self.input_pos_;
1708 wrapped_input_pos < wrapped_last_processed_pos
1709 }
1710}
1711
1712fn MaxMetablockSize(params: &BrotliEncoderParams) -> usize {
1713 1 << min(ComputeRbBits(params), 24)
1714}
1715
1716fn ChooseContextMap(
1717 quality: i32,
1718 bigram_histo: &mut [u32],
1719 num_literal_contexts: &mut usize,
1720 literal_context_map: &mut &[u32],
1721) {
1722 static kStaticContextMapContinuation: [u32; 64] = [
1723 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1724 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1725 0, 0, 0, 0,
1726 ];
1727 static kStaticContextMapSimpleUTF8: [u32; 64] = [
1728 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1729 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1730 0, 0, 0, 0,
1731 ];
1732 let mut monogram_histo = [0u32; 3];
1733 let mut two_prefix_histo = [0u32; 6];
1734
1735 let mut i: usize;
1736 let mut entropy = [0.0 as floatX; 4];
1737 i = 0usize;
1738 while i < 9usize {
1739 {
1740 {
1741 let _rhs = bigram_histo[i];
1742 let _lhs = &mut monogram_histo[i.wrapping_rem(3)];
1743 *_lhs = (*_lhs).wrapping_add(_rhs);
1744 }
1745 {
1746 let _rhs = bigram_histo[i];
1747 let _lhs = &mut two_prefix_histo[i.wrapping_rem(6)];
1748 *_lhs = (*_lhs).wrapping_add(_rhs);
1749 }
1750 }
1751 i = i.wrapping_add(1);
1752 }
1753 entropy[1] = shannon_entropy(&monogram_histo[..], 3).0;
1754 entropy[2] =
1755 shannon_entropy(&two_prefix_histo[..], 3).0 + shannon_entropy(&two_prefix_histo[3..], 3).0;
1756 entropy[3] = 0.0;
1757 for i in 0usize..3usize {
1758 entropy[3] += shannon_entropy(&bigram_histo[(3usize).wrapping_mul(i)..], 3).0;
1759 }
1760 let total: usize = monogram_histo[0]
1761 .wrapping_add(monogram_histo[1])
1762 .wrapping_add(monogram_histo[2]) as usize;
1763 entropy[0] = 1.0 / (total as floatX);
1764 entropy[1] *= entropy[0];
1765 entropy[2] *= entropy[0];
1766 entropy[3] *= entropy[0];
1767 if quality < 7i32 {
1768 entropy[3] = entropy[1] * 10.0;
1769 }
1770 if entropy[1] - entropy[2] < 0.2 && entropy[1] - entropy[3] < 0.2 {
1771 *num_literal_contexts = 1;
1772 } else if entropy[2] - entropy[3] < 0.02 {
1773 *num_literal_contexts = 2usize;
1774 *literal_context_map = &kStaticContextMapSimpleUTF8[..];
1775 } else {
1776 *num_literal_contexts = 3usize;
1777 *literal_context_map = &kStaticContextMapContinuation[..];
1778 }
1779}
1780
1781static kStaticContextMapComplexUTF8: [u32; 64] = [
1782 11, 11, 12, 12, 0, 0, 0, 0, 1, 1, 9, 9, 2, 2, 2, 2, 1, 1, 1, 1, 8, 3, 3, 3, 1, 1, 1, 1, 2, 2, 2, 2, 8, 4, 4, 4, 8, 7, 4, 4, 8, 0, 0, 0, 3, 3, 3, 3, 5, 5, 10, 5, 5, 5, 10, 5, 6, 6, 6, 6, 6, 6, 6, 6,
1797];
1798fn ShouldUseComplexStaticContextMap(
1802 input: &[u8],
1803 mut start_pos: usize,
1804 length: usize,
1805 mask: usize,
1806 quality: i32,
1807 size_hint: usize,
1808 num_literal_contexts: &mut usize,
1809 literal_context_map: &mut &[u32],
1810) -> bool {
1811 let _ = quality;
1812 if (size_hint < (1 << 20)) {
1815 false
1816 } else {
1817 let end_pos = start_pos + length;
1818 let mut combined_histo: [u32; 32] = [0; 32];
1822 let mut context_histo: [[u32; 32]; 13] = [[0; 32]; 13];
1823 let mut total = 0u32;
1824 let mut entropy = [0.0 as floatX; 3];
1825 let utf8_lut = BROTLI_CONTEXT_LUT(ContextType::CONTEXT_UTF8);
1826 while start_pos + 64 <= end_pos {
1827 let stride_end_pos = start_pos + 64;
1828 let mut prev2 = input[start_pos & mask];
1829 let mut prev1 = input[(start_pos + 1) & mask];
1830
1831 for pos in start_pos + 2..stride_end_pos {
1834 let literal = input[pos & mask];
1835 let context = kStaticContextMapComplexUTF8
1836 [BROTLI_CONTEXT(prev1, prev2, utf8_lut) as usize]
1837 as u8;
1838 total += 1;
1839 combined_histo[(literal >> 3) as usize] += 1;
1840 context_histo[context as usize][(literal >> 3) as usize] += 1;
1841 prev2 = prev1;
1842 prev1 = literal;
1843 }
1844 start_pos += 4096;
1845 }
1846 entropy[1] = shannon_entropy(&combined_histo[..], 32).0;
1847 entropy[2] = 0.0;
1848 for i in 0..13 {
1849 assert!(i < 13);
1850 entropy[2] += shannon_entropy(&context_histo[i][..], 32).0;
1851 }
1852 entropy[0] = 1.0 / (total as floatX);
1853 entropy[1] *= entropy[0];
1854 entropy[2] *= entropy[0];
1855 if (entropy[2] > 3.0 || entropy[1] - entropy[2] < 0.2) {
1863 false
1864 } else {
1865 *num_literal_contexts = 13;
1866 *literal_context_map = &kStaticContextMapComplexUTF8;
1867 true
1868 }
1869 }
1870}
1871
1872fn DecideOverLiteralContextModeling(
1873 input: &[u8],
1874 mut start_pos: usize,
1875 length: usize,
1876 mask: usize,
1877 quality: i32,
1878 size_hint: usize,
1879 num_literal_contexts: &mut usize,
1880 literal_context_map: &mut &[u32],
1881) {
1882 if quality < 5i32 || length < 64usize {
1883 } else if ShouldUseComplexStaticContextMap(
1884 input,
1885 start_pos,
1886 length,
1887 mask,
1888 quality,
1889 size_hint,
1890 num_literal_contexts,
1891 literal_context_map,
1892 ) {
1893 } else {
1894 let end_pos: usize = start_pos.wrapping_add(length);
1895 let mut bigram_prefix_histo = [0u32; 9];
1896 while start_pos.wrapping_add(64) <= end_pos {
1897 {
1898 static lut: [i32; 4] = [0, 0, 1, 2];
1899 let stride_end_pos: usize = start_pos.wrapping_add(64);
1900 let mut prev: i32 = lut[(input[(start_pos & mask)] as i32 >> 6) as usize] * 3i32;
1901 let mut pos: usize;
1902 pos = start_pos.wrapping_add(1);
1903 while pos < stride_end_pos {
1904 {
1905 let literal: u8 = input[(pos & mask)];
1906 {
1907 let _rhs = 1;
1908 let cur_ind = (prev + lut[(literal as i32 >> 6) as usize]);
1909 let _lhs = &mut bigram_prefix_histo[cur_ind as usize];
1910 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1911 }
1912 prev = lut[(literal as i32 >> 6) as usize] * 3i32;
1913 }
1914 pos = pos.wrapping_add(1);
1915 }
1916 }
1917 start_pos = start_pos.wrapping_add(4096);
1918 }
1919 ChooseContextMap(
1920 quality,
1921 &mut bigram_prefix_histo[..],
1922 num_literal_contexts,
1923 literal_context_map,
1924 );
1925 }
1926}
1927fn WriteMetaBlockInternal<Alloc: BrotliAlloc, Cb>(
1928 alloc: &mut Alloc,
1929 data: &[u8],
1930 mask: usize,
1931 last_flush_pos: u64,
1932 bytes: usize,
1933 mut is_last: bool,
1934 literal_context_mode: ContextType,
1935 params: &BrotliEncoderParams,
1936 lit_scratch_space: &mut <HistogramLiteral as CostAccessors>::i32vec,
1937 cmd_scratch_space: &mut <HistogramCommand as CostAccessors>::i32vec,
1938 dst_scratch_space: &mut <HistogramDistance as CostAccessors>::i32vec,
1939 prev_byte: u8,
1940 prev_byte2: u8,
1941 num_literals: usize,
1942 num_commands: usize,
1943 commands: &mut [Command],
1944 saved_dist_cache: &[i32; kNumDistanceCacheEntries],
1945 dist_cache: &mut [i32; 16],
1946 recoder_state: &mut RecoderState,
1947 storage_ix: &mut usize,
1948 storage: &mut [u8],
1949 cb: &mut Cb,
1950) where
1951 Cb: FnMut(
1952 &mut interface::PredictionModeContextMap<InputReferenceMut>,
1953 &mut [interface::StaticCommand],
1954 interface::InputPair,
1955 &mut Alloc,
1956 ),
1957{
1958 let actual_is_last = is_last;
1959 if params.appendable {
1960 is_last = false;
1961 } else {
1962 assert!(!params.catable); }
1964 let wrapped_last_flush_pos: u32 = WrapPosition(last_flush_pos);
1965
1966 let literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
1967 let mut block_params = params.clone();
1968 if bytes == 0usize {
1969 BrotliWriteBits(2usize, 3, storage_ix, storage);
1970 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1971 return;
1972 }
1973 if !should_compress(
1974 data,
1975 mask,
1976 last_flush_pos,
1977 bytes,
1978 num_literals,
1979 num_commands,
1980 ) {
1981 dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
1982 store_uncompressed_meta_block(
1983 alloc,
1984 is_last,
1985 data,
1986 wrapped_last_flush_pos as usize,
1987 mask,
1988 params,
1989 bytes,
1990 recoder_state,
1991 storage_ix,
1992 storage,
1993 false,
1994 cb,
1995 );
1996 if actual_is_last != is_last {
1997 BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
1998 }
1999 return;
2000 }
2001 let saved_byte_location = (*storage_ix) >> 3;
2002 let last_bytes: u16 =
2003 ((storage[saved_byte_location + 1] as u16) << 8) | storage[saved_byte_location] as u16;
2004 let last_bytes_bits: u8 = *storage_ix as u8;
2005 if params.quality <= 2 {
2014 store_meta_block_fast(
2015 alloc,
2016 data,
2017 wrapped_last_flush_pos as usize,
2018 bytes,
2019 mask,
2020 is_last,
2021 params,
2022 saved_dist_cache,
2023 commands,
2024 num_commands,
2025 recoder_state,
2026 storage_ix,
2027 storage,
2028 cb,
2029 );
2030 } else if params.quality < 4 {
2031 store_meta_block_trivial(
2032 alloc,
2033 data,
2034 wrapped_last_flush_pos as usize,
2035 bytes,
2036 mask,
2037 is_last,
2038 params,
2039 saved_dist_cache,
2040 commands,
2041 num_commands,
2042 recoder_state,
2043 storage_ix,
2044 storage,
2045 cb,
2046 );
2047 } else {
2048 let mut mb = MetaBlockSplit::<Alloc>::new();
2051 if params.quality < 10i32 {
2052 let mut num_literal_contexts: usize = 1;
2053 let mut literal_context_map: &[u32] = &[];
2054 if params.disable_literal_context_modeling == 0 {
2055 DecideOverLiteralContextModeling(
2056 data,
2057 wrapped_last_flush_pos as usize,
2058 bytes,
2059 mask,
2060 params.quality,
2061 params.size_hint,
2062 &mut num_literal_contexts,
2063 &mut literal_context_map,
2064 );
2065 }
2066 BrotliBuildMetaBlockGreedy(
2067 alloc,
2068 data,
2069 wrapped_last_flush_pos as usize,
2070 mask,
2071 prev_byte,
2072 prev_byte2,
2073 literal_context_mode,
2074 literal_context_lut,
2075 num_literal_contexts,
2076 literal_context_map,
2077 commands,
2078 num_commands,
2079 &mut mb,
2080 );
2081 } else {
2082 BrotliBuildMetaBlock(
2083 alloc,
2084 data,
2085 wrapped_last_flush_pos as usize,
2086 mask,
2087 &mut block_params,
2088 prev_byte,
2089 prev_byte2,
2090 commands,
2091 num_commands,
2092 literal_context_mode,
2093 lit_scratch_space,
2094 cmd_scratch_space,
2095 dst_scratch_space,
2096 &mut mb,
2097 );
2098 }
2099 if params.quality >= 4i32 {
2100 let mut num_effective_dist_codes = block_params.dist.alphabet_size;
2101 if num_effective_dist_codes > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32 {
2102 num_effective_dist_codes = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32;
2103 }
2104 BrotliOptimizeHistograms(num_effective_dist_codes as usize, &mut mb);
2105 }
2106 store_meta_block(
2107 alloc,
2108 data,
2109 wrapped_last_flush_pos as usize,
2110 bytes,
2111 mask,
2112 prev_byte,
2113 prev_byte2,
2114 is_last,
2115 &block_params,
2116 literal_context_mode,
2117 saved_dist_cache,
2118 commands,
2119 num_commands,
2120 &mut mb,
2121 recoder_state,
2122 storage_ix,
2123 storage,
2124 cb,
2125 );
2126 mb.destroy(alloc);
2127 }
2128 if bytes + 4 + saved_byte_location < (*storage_ix >> 3) {
2129 dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
2130 storage[saved_byte_location] = last_bytes as u8;
2134 storage[saved_byte_location + 1] = (last_bytes >> 8) as u8;
2135 *storage_ix = last_bytes_bits as usize;
2136 store_uncompressed_meta_block(
2137 alloc,
2138 is_last,
2139 data,
2140 wrapped_last_flush_pos as usize,
2141 mask,
2142 params,
2143 bytes,
2144 recoder_state,
2145 storage_ix,
2146 storage,
2147 true,
2148 cb,
2149 );
2150 }
2151 if actual_is_last != is_last {
2152 BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
2153 }
2154}
2155
2156fn ChooseDistanceParams(params: &mut BrotliEncoderParams) {
2157 let mut num_direct_distance_codes = 0u32;
2158 let mut distance_postfix_bits = 0u32;
2159
2160 if params.quality >= 4 {
2161 if params.mode == BrotliEncoderMode::BROTLI_MODE_FONT {
2162 distance_postfix_bits = 1;
2163 num_direct_distance_codes = 12;
2164 } else {
2165 distance_postfix_bits = params.dist.distance_postfix_bits;
2166 num_direct_distance_codes = params.dist.num_direct_distance_codes;
2167 }
2168 let ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0f;
2169 if distance_postfix_bits > BROTLI_MAX_NPOSTFIX as u32
2170 || num_direct_distance_codes > BROTLI_MAX_NDIRECT as u32
2171 || (ndirect_msb << distance_postfix_bits) != num_direct_distance_codes
2172 {
2173 distance_postfix_bits = 0;
2174 num_direct_distance_codes = 0;
2175 }
2176 }
2177 BrotliInitDistanceParams(params, distance_postfix_bits, num_direct_distance_codes);
2178 }
2199
2200impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2201 fn encode_data<MetablockCallback>(
2202 &mut self,
2203 is_last: bool,
2204 force_flush: bool,
2205 out_size: &mut usize,
2206 callback: &mut MetablockCallback,
2207 ) -> bool
2209 where
2210 MetablockCallback: FnMut(
2211 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2212 &mut [interface::StaticCommand],
2213 interface::InputPair,
2214 &mut Alloc,
2215 ),
2216 {
2217 let mut delta: u64 = self.unprocessed_input_size();
2218 let mut bytes: u32 = delta as u32;
2219 let mask = self.ringbuffer_.mask_;
2220 if !self.ensure_initialized() {
2221 return false;
2222 }
2223 let dictionary = BrotliGetDictionary();
2224 if self.is_last_block_emitted_ {
2225 return false;
2226 }
2227 if is_last {
2228 self.is_last_block_emitted_ = true;
2229 }
2230 if delta > self.input_block_size() as u64 {
2231 return false;
2232 }
2233 let mut storage_ix: usize = usize::from(self.last_bytes_bits_);
2234 {
2235 let meta_size = max(
2236 bytes as usize,
2237 self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize,
2238 );
2239 self.get_brotli_storage((2usize).wrapping_mul(meta_size).wrapping_add(503 + 24));
2240 }
2241 {
2242 self.storage_.slice_mut()[0] = self.last_bytes_ as u8;
2243 self.storage_.slice_mut()[1] = (self.last_bytes_ >> 8) as u8;
2244 }
2245 let mut catable_header_size = 0;
2246 if let IsFirst::NothingWritten = self.is_first_mb {
2247 if self.params.magic_number {
2248 BrotliWriteMetadataMetaBlock(
2249 &self.params,
2250 &mut storage_ix,
2251 self.storage_.slice_mut(),
2252 );
2253 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2254 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2255 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2256 self.next_out_ = NextOut::DynamicStorage(0);
2257 catable_header_size = storage_ix >> 3;
2258 *out_size = catable_header_size;
2259 self.is_first_mb = IsFirst::HeaderWritten;
2260 }
2261 }
2262 if let IsFirst::BothCatableBytesWritten = self.is_first_mb {
2263 } else if !self.params.catable {
2265 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2266 } else if bytes != 0 {
2267 assert!(self.last_processed_pos_ < 2 || self.custom_dictionary);
2268 let num_bytes_to_write_uncompressed: usize = min(2, bytes as usize);
2269 {
2270 let data =
2271 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2272 store_uncompressed_meta_block(
2273 &mut self.m8,
2274 false,
2275 data,
2276 self.last_flush_pos_ as usize,
2277 mask as usize,
2278 &self.params,
2279 num_bytes_to_write_uncompressed,
2280 &mut self.recoder_state,
2281 &mut storage_ix,
2282 self.storage_.slice_mut(),
2283 false, callback,
2285 );
2286 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2287 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2288 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2289 self.prev_byte2_ = self.prev_byte_;
2290 self.prev_byte_ = data[self.last_flush_pos_ as usize & mask as usize];
2291 if num_bytes_to_write_uncompressed == 2 {
2292 self.prev_byte2_ = self.prev_byte_;
2293 self.prev_byte_ = data[(self.last_flush_pos_ + 1) as usize & mask as usize];
2294 }
2295 }
2296 self.last_flush_pos_ += num_bytes_to_write_uncompressed as u64;
2297 bytes -= num_bytes_to_write_uncompressed as u32;
2298 self.last_processed_pos_ += num_bytes_to_write_uncompressed as u64;
2299 if num_bytes_to_write_uncompressed >= 2 {
2300 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2301 } else if num_bytes_to_write_uncompressed == 1 {
2302 if let IsFirst::FirstCatableByteWritten = self.is_first_mb {
2303 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2304 } else {
2305 self.is_first_mb = IsFirst::FirstCatableByteWritten;
2306 }
2307 }
2308 catable_header_size = storage_ix >> 3;
2309 self.next_out_ = NextOut::DynamicStorage(0);
2310 *out_size = catable_header_size;
2311 delta = self.unprocessed_input_size();
2312 }
2313 let mut wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
2314 if self.params.quality == 1i32 && self.command_buf_.slice().is_empty() {
2315 let new_buf = allocate::<u32, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2316 self.command_buf_ = new_buf;
2317 let new_buf8 = allocate::<u8, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2318 self.literal_buf_ = new_buf8;
2319 }
2320 if self.params.quality == 0i32 || self.params.quality == 1i32 {
2321 let mut table_size: usize = 0;
2322 {
2323 if delta == 0 && !is_last {
2324 *out_size = catable_header_size;
2325 return true;
2326 }
2327 let data =
2328 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2329
2330 let table: &mut [i32] =
2334 GetHashTable!(self, self.params.quality, bytes as usize, &mut table_size);
2335
2336 if self.params.quality == 0i32 {
2337 compress_fragment_fast(
2338 &mut self.m8,
2339 &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2340 bytes as usize,
2341 is_last,
2342 table,
2343 table_size,
2344 &mut self.cmd_depths_[..],
2345 &mut self.cmd_bits_[..],
2346 &mut self.cmd_code_numbits_,
2347 &mut self.cmd_code_[..],
2348 &mut storage_ix,
2349 self.storage_.slice_mut(),
2350 );
2351 } else {
2352 compress_fragment_two_pass(
2353 &mut self.m8,
2354 &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2355 bytes as usize,
2356 is_last,
2357 self.command_buf_.slice_mut(),
2358 self.literal_buf_.slice_mut(),
2359 table,
2360 table_size,
2361 &mut storage_ix,
2362 self.storage_.slice_mut(),
2363 );
2364 }
2365 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2366 | ((self.storage_.slice()[(storage_ix >> 3) + 1] as u16) << 8);
2367 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2368 }
2369 self.update_last_processed_pos();
2370 self.next_out_ = NextOut::DynamicStorage(0); *out_size = storage_ix >> 3;
2373 return true;
2374 }
2375 {
2376 let mut newsize: usize = self
2377 .num_commands_
2378 .wrapping_add(bytes.wrapping_div(2) as usize)
2379 .wrapping_add(1);
2380 if newsize > self.cmd_alloc_size_ {
2381 newsize = newsize.wrapping_add(bytes.wrapping_div(4).wrapping_add(16) as usize);
2382 self.cmd_alloc_size_ = newsize;
2383 let mut new_commands = allocate::<Command, _>(&mut self.m8, newsize);
2384 if !self.commands_.slice().is_empty() {
2385 new_commands.slice_mut()[..self.num_commands_]
2386 .clone_from_slice(&self.commands_.slice()[..self.num_commands_]);
2387 <Alloc as Allocator<Command>>::free_cell(
2388 &mut self.m8,
2389 core::mem::take(&mut self.commands_),
2390 );
2391 }
2392 self.commands_ = new_commands;
2393 }
2394 }
2395 InitOrStitchToPreviousBlock(
2396 &mut self.m8,
2397 &mut self.hasher_,
2398 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2399 mask as usize,
2400 &mut self.params,
2401 wrapped_last_processed_pos as usize,
2402 bytes as usize,
2403 is_last,
2404 );
2405 let literal_context_mode = ChooseContextMode(
2406 &self.params,
2407 self.ringbuffer_.data_mo.slice(),
2408 WrapPosition(self.last_flush_pos_) as usize,
2409 mask as usize,
2410 (self.input_pos_.wrapping_sub(self.last_flush_pos_)) as usize,
2411 );
2412 if self.num_commands_ != 0 && self.last_insert_len_ == 0 {
2413 self.extend_last_command(&mut bytes, &mut wrapped_last_processed_pos);
2414 }
2415 BrotliCreateBackwardReferences(
2416 &mut self.m8,
2417 dictionary,
2418 bytes as usize,
2419 wrapped_last_processed_pos as usize,
2420 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2421 mask as usize,
2422 &mut self.params,
2423 &mut self.hasher_,
2424 &mut self.dist_cache_,
2425 &mut self.last_insert_len_,
2426 &mut self.commands_.slice_mut()[self.num_commands_..],
2427 &mut self.num_commands_,
2428 &mut self.num_literals_,
2429 );
2430 {
2431 let max_length: usize = MaxMetablockSize(&mut self.params);
2432 let max_literals: usize = max_length.wrapping_div(8);
2433 let max_commands: usize = max_length.wrapping_div(8);
2434 let processed_bytes: usize =
2435 self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize;
2436 let next_input_fits_metablock =
2437 processed_bytes.wrapping_add(self.input_block_size()) <= max_length;
2438 let should_flush = self.params.quality < 4
2439 && self.num_literals_.wrapping_add(self.num_commands_) >= 0x2fff;
2440 if !is_last
2441 && !force_flush
2442 && !should_flush
2443 && next_input_fits_metablock
2444 && self.num_literals_ < max_literals
2445 && self.num_commands_ < max_commands
2446 {
2447 if self.update_last_processed_pos() {
2448 HasherReset(&mut self.hasher_);
2449 }
2450 *out_size = catable_header_size;
2451 return true;
2452 }
2453 }
2454 if self.last_insert_len_ > 0usize {
2455 self.commands_.slice_mut()[self.num_commands_].init_insert(self.last_insert_len_);
2456 self.num_commands_ = self.num_commands_.wrapping_add(1);
2457 self.num_literals_ = self.num_literals_.wrapping_add(self.last_insert_len_);
2458 self.last_insert_len_ = 0usize;
2459 }
2460 if !is_last && self.input_pos_ == self.last_flush_pos_ {
2461 *out_size = catable_header_size;
2462 return true;
2463 }
2464 {
2465 let metablock_size: u32 = self.input_pos_.wrapping_sub(self.last_flush_pos_) as u32;
2466 WriteMetaBlockInternal(
2471 &mut self.m8,
2472 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2473 mask as usize,
2474 self.last_flush_pos_,
2475 metablock_size as usize,
2476 is_last,
2477 literal_context_mode,
2478 &mut self.params,
2479 &mut self.literal_scratch_space,
2480 &mut self.command_scratch_space,
2481 &mut self.distance_scratch_space,
2482 self.prev_byte_,
2483 self.prev_byte2_,
2484 self.num_literals_,
2485 self.num_commands_,
2486 self.commands_.slice_mut(),
2487 &mut self.saved_dist_cache_,
2488 &mut self.dist_cache_,
2489 &mut self.recoder_state,
2490 &mut storage_ix,
2491 self.storage_.slice_mut(),
2492 callback,
2493 );
2494
2495 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2496 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2497 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2498 self.last_flush_pos_ = self.input_pos_;
2499 if self.update_last_processed_pos() {
2500 HasherReset(&mut self.hasher_);
2501 }
2502 let data = &self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index..];
2503 if self.last_flush_pos_ > 0 {
2504 self.prev_byte_ =
2505 data[(((self.last_flush_pos_ as u32).wrapping_sub(1) & mask) as usize)];
2506 }
2507 if self.last_flush_pos_ > 1 {
2508 self.prev_byte2_ =
2509 data[((self.last_flush_pos_.wrapping_sub(2) as u32 & mask) as usize)];
2510 }
2511 self.num_commands_ = 0usize;
2512 self.num_literals_ = 0usize;
2513 self.saved_dist_cache_
2514 .clone_from_slice(self.dist_cache_.split_at(4).0);
2515 self.next_out_ = NextOut::DynamicStorage(0); *out_size = storage_ix >> 3;
2517 true
2518 }
2519 }
2520
2521 fn write_metadata_header(&mut self) -> usize {
2522 let block_size = self.remaining_metadata_bytes_ as usize;
2523 let header = GetNextOut!(*self);
2524 let mut storage_ix: usize;
2525 storage_ix = self.last_bytes_bits_ as usize;
2526 header[0] = self.last_bytes_ as u8;
2527 header[1] = (self.last_bytes_ >> 8) as u8;
2528 self.last_bytes_ = 0;
2529 self.last_bytes_bits_ = 0;
2530 BrotliWriteBits(1, 0, &mut storage_ix, header);
2531 BrotliWriteBits(2usize, 3, &mut storage_ix, header);
2532 BrotliWriteBits(1, 0, &mut storage_ix, header);
2533 if block_size == 0usize {
2534 BrotliWriteBits(2usize, 0, &mut storage_ix, header);
2535 } else {
2536 let nbits: u32 = if block_size == 1 {
2537 0u32
2538 } else {
2539 Log2FloorNonZero((block_size as u32).wrapping_sub(1) as (u64)).wrapping_add(1)
2540 };
2541 let nbytes: u32 = nbits.wrapping_add(7).wrapping_div(8);
2542 BrotliWriteBits(2usize, nbytes as (u64), &mut storage_ix, header);
2543 BrotliWriteBits(
2544 (8u32).wrapping_mul(nbytes) as usize,
2545 block_size.wrapping_sub(1) as u64,
2546 &mut storage_ix,
2547 header,
2548 );
2549 }
2550 storage_ix.wrapping_add(7u32 as usize) >> 3
2551 }
2552}
2553
2554impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2555 fn process_metadata<
2556 MetaBlockCallback: FnMut(
2557 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2558 &mut [interface::StaticCommand],
2559 interface::InputPair,
2560 &mut Alloc,
2561 ),
2562 >(
2563 &mut self,
2564 available_in: &mut usize,
2565 next_in_array: &[u8],
2566 next_in_offset: &mut usize,
2567 available_out: &mut usize,
2568 next_out_array: &mut [u8],
2569 next_out_offset: &mut usize,
2570 total_out: &mut Option<usize>,
2571 metablock_callback: &mut MetaBlockCallback,
2572 ) -> bool {
2573 if *available_in > (1u32 << 24) as usize {
2574 return false;
2575 }
2576 if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32 {
2577 self.remaining_metadata_bytes_ = *available_in as u32;
2578 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD;
2579 }
2580 if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2581 && (self.stream_state_ as i32
2582 != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32)
2583 {
2584 return false;
2585 }
2586 loop {
2587 if self.inject_flush_or_push_output(
2588 available_out,
2589 next_out_array,
2590 next_out_offset,
2591 total_out,
2592 ) {
2593 continue;
2594 }
2595 if self.available_out_ != 0usize {
2596 break;
2597 }
2598 if self.input_pos_ != self.last_flush_pos_ {
2599 let mut avail_out: usize = self.available_out_;
2600 let result = self.encode_data(false, true, &mut avail_out, metablock_callback);
2601 self.available_out_ = avail_out;
2602 if !result {
2603 return false;
2604 }
2605 continue;
2606 }
2607 if self.stream_state_ as i32
2608 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2609 {
2610 self.next_out_ = NextOut::TinyBuf(0);
2611 self.available_out_ = self.write_metadata_header();
2612 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY;
2613 {
2614 continue;
2615 }
2616 } else {
2617 if self.remaining_metadata_bytes_ == 0u32 {
2618 self.remaining_metadata_bytes_ = u32::MAX;
2619 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2620 {
2621 break;
2622 }
2623 }
2624 if *available_out != 0 {
2625 let copy: u32 =
2626 min(self.remaining_metadata_bytes_ as usize, *available_out) as u32;
2627 next_out_array[*next_out_offset..(*next_out_offset + copy as usize)]
2628 .clone_from_slice(
2629 &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2630 );
2631 *next_in_offset += copy as usize;
2634 *available_in = available_in.wrapping_sub(copy as usize);
2635 self.remaining_metadata_bytes_ =
2636 self.remaining_metadata_bytes_.wrapping_sub(copy);
2637 *next_out_offset += copy as usize;
2638 *available_out = available_out.wrapping_sub(copy as usize);
2640 } else {
2641 let copy: u32 = min(self.remaining_metadata_bytes_, 16u32);
2642 self.next_out_ = NextOut::TinyBuf(0);
2643 GetNextOut!(self)[..(copy as usize)].clone_from_slice(
2644 &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2645 );
2646 *next_in_offset += copy as usize;
2649 *available_in = available_in.wrapping_sub(copy as usize);
2650 self.remaining_metadata_bytes_ =
2651 self.remaining_metadata_bytes_.wrapping_sub(copy);
2652 self.available_out_ = copy as usize;
2653 }
2654 {
2655 continue;
2656 }
2657 }
2658 }
2659 true
2660 }
2661}
2662fn CheckFlushCompleteInner(
2663 stream_state: &mut BrotliEncoderStreamState,
2664 available_out: usize,
2665 next_out: &mut NextOut,
2666) {
2667 if *stream_state == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED
2668 && (available_out == 0)
2669 {
2670 *stream_state = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2671 *next_out = NextOut::None;
2672 }
2673}
2674
2675impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2676 fn check_flush_complete(&mut self) {
2677 CheckFlushCompleteInner(
2678 &mut self.stream_state_,
2679 self.available_out_,
2680 &mut self.next_out_,
2681 );
2682 }
2683
2684 fn compress_stream_fast(
2685 &mut self,
2686 op: BrotliEncoderOperation,
2687 available_in: &mut usize,
2688 next_in_array: &[u8],
2689 next_in_offset: &mut usize,
2690 available_out: &mut usize,
2691 next_out_array: &mut [u8],
2692 next_out_offset: &mut usize,
2693 total_out: &mut Option<usize>,
2694 ) -> bool {
2695 let block_size_limit: usize = 1 << self.params.lgwin;
2696 let buf_size: usize = min(
2697 kCompressFragmentTwoPassBlockSize,
2698 min(*available_in, block_size_limit),
2699 );
2700 let mut command_buf = alloc_default::<u32, Alloc>();
2701 let mut literal_buf = alloc_default::<u8, Alloc>();
2702 if self.params.quality != 0i32 && (self.params.quality != 1i32) {
2703 return false;
2704 }
2705 if self.params.quality == 1i32 {
2706 if self.command_buf_.slice().is_empty()
2707 && (buf_size == kCompressFragmentTwoPassBlockSize)
2708 {
2709 self.command_buf_ =
2710 allocate::<u32, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2711 self.literal_buf_ =
2712 allocate::<u8, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2713 }
2714 if !self.command_buf_.slice().is_empty() {
2715 command_buf = core::mem::take(&mut self.command_buf_);
2716 literal_buf = core::mem::take(&mut self.literal_buf_);
2717 } else {
2718 command_buf = allocate::<u32, _>(&mut self.m8, buf_size);
2719 literal_buf = allocate::<u8, _>(&mut self.m8, buf_size);
2720 }
2721 }
2722 loop {
2723 if self.inject_flush_or_push_output(
2724 available_out,
2725 next_out_array,
2726 next_out_offset,
2727 total_out,
2728 ) {
2729 continue;
2730 }
2731 if self.available_out_ == 0usize
2732 && (self.stream_state_ as i32
2733 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2734 && (*available_in != 0usize
2735 || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2736 {
2737 let block_size: usize = min(block_size_limit, *available_in);
2738 let is_last = *available_in == block_size
2739 && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2740 let force_flush = *available_in == block_size
2741 && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2742 let max_out_size: usize = (2usize).wrapping_mul(block_size).wrapping_add(503);
2743 let mut inplace: i32 = 1i32;
2744 let storage: &mut [u8];
2745 let mut storage_ix: usize = self.last_bytes_bits_ as usize;
2746 let mut table_size: usize = 0;
2747
2748 if force_flush && block_size == 0 {
2749 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2750 {
2751 continue;
2752 }
2753 }
2754 if max_out_size <= *available_out {
2755 storage = &mut next_out_array[*next_out_offset..]; } else {
2757 inplace = 0i32;
2758 self.get_brotli_storage(max_out_size);
2759 storage = self.storage_.slice_mut();
2760 }
2761 storage[0] = self.last_bytes_ as u8;
2762 storage[1] = (self.last_bytes_ >> 8) as u8;
2763 let table: &mut [i32] =
2764 GetHashTable!(self, self.params.quality, block_size, &mut table_size);
2765 if self.params.quality == 0i32 {
2766 compress_fragment_fast(
2767 &mut self.m8,
2768 &(next_in_array)[*next_in_offset..],
2769 block_size,
2770 is_last,
2771 table,
2772 table_size,
2773 &mut self.cmd_depths_[..],
2774 &mut self.cmd_bits_[..],
2775 &mut self.cmd_code_numbits_,
2776 &mut self.cmd_code_[..],
2777 &mut storage_ix,
2778 storage,
2779 );
2780 } else {
2781 compress_fragment_two_pass(
2782 &mut self.m8,
2783 &(next_in_array)[*next_in_offset..],
2784 block_size,
2785 is_last,
2786 command_buf.slice_mut(),
2787 literal_buf.slice_mut(),
2788 table,
2789 table_size,
2790 &mut storage_ix,
2791 storage,
2792 );
2793 }
2794 *next_in_offset += block_size;
2795 *available_in = available_in.wrapping_sub(block_size);
2796 if inplace != 0 {
2797 let out_bytes: usize = storage_ix >> 3;
2798 *next_out_offset += out_bytes;
2799 *available_out = available_out.wrapping_sub(out_bytes);
2800 self.total_out_ = self.total_out_.wrapping_add(out_bytes as u64);
2801 if let &mut Some(ref mut total_out_inner) = total_out {
2802 *total_out_inner = self.total_out_ as usize;
2803 }
2804 } else {
2805 let out_bytes: usize = storage_ix >> 3;
2806 self.next_out_ = NextOut::DynamicStorage(0);
2807 self.available_out_ = out_bytes;
2808 }
2809 self.last_bytes_ = storage[(storage_ix >> 3)] as u16
2810 | ((storage[1 + (storage_ix >> 3)] as u16) << 8);
2811 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2812 if force_flush {
2813 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2814 }
2815 if is_last {
2816 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2817 }
2818 {
2819 continue;
2820 }
2821 }
2822 {
2823 break;
2824 }
2825 }
2826 if command_buf.slice().len() == kCompressFragmentTwoPassBlockSize
2827 && self.command_buf_.slice().is_empty()
2828 {
2829 self.command_buf_ = core::mem::take(&mut command_buf);
2831 self.literal_buf_ = core::mem::take(&mut literal_buf);
2832 } else {
2833 <Alloc as Allocator<u32>>::free_cell(&mut self.m8, command_buf);
2834 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, literal_buf);
2835 }
2836 self.check_flush_complete();
2837 true
2838 }
2839
2840 fn remaining_input_block_size(&mut self) -> usize {
2841 let delta: u64 = self.unprocessed_input_size();
2842 let block_size = self.input_block_size();
2843 if delta >= block_size as u64 {
2844 return 0usize;
2845 }
2846 (block_size as u64).wrapping_sub(delta) as usize
2847 }
2848
2849 pub fn compress_stream<
2850 MetablockCallback: FnMut(
2851 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2852 &mut [interface::StaticCommand],
2853 interface::InputPair,
2854 &mut Alloc,
2855 ),
2856 >(
2857 &mut self,
2858 op: BrotliEncoderOperation,
2859 available_in: &mut usize,
2860 next_in_array: &[u8],
2861 next_in_offset: &mut usize,
2862 available_out: &mut usize,
2863 next_out_array: &mut [u8],
2864 next_out_offset: &mut usize,
2865 total_out: &mut Option<usize>,
2866 metablock_callback: &mut MetablockCallback,
2867 ) -> bool {
2868 if !self.ensure_initialized() {
2869 return false;
2870 }
2871 if self.remaining_metadata_bytes_ != u32::MAX {
2872 if *available_in != self.remaining_metadata_bytes_ as usize {
2873 return false;
2874 }
2875 if op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2876 return false;
2877 }
2878 }
2879 if op as i32 == BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2880 self.update_size_hint(0);
2881 return self.process_metadata(
2882 available_in,
2883 next_in_array,
2884 next_in_offset,
2885 available_out,
2886 next_out_array,
2887 next_out_offset,
2888 total_out,
2889 metablock_callback,
2890 );
2891 }
2892 if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2893 || self.stream_state_ as i32
2894 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32
2895 {
2896 return false;
2897 }
2898 if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32
2899 && (*available_in != 0usize)
2900 {
2901 return false;
2902 }
2903 if (self.params.quality == 0i32 || self.params.quality == 1i32) && !self.params.catable {
2904 return self.compress_stream_fast(
2906 op,
2907 available_in,
2908 next_in_array,
2909 next_in_offset,
2910 available_out,
2911 next_out_array,
2912 next_out_offset,
2913 total_out,
2914 );
2915 }
2916 loop {
2917 let remaining_block_size: usize = self.remaining_input_block_size();
2918 if remaining_block_size != 0usize && (*available_in != 0usize) {
2919 let copy_input_size: usize = min(remaining_block_size, *available_in);
2920 self.copy_input_to_ring_buffer(copy_input_size, &next_in_array[*next_in_offset..]);
2921 *next_in_offset += copy_input_size;
2922 *available_in = available_in.wrapping_sub(copy_input_size);
2923 {
2924 continue;
2925 }
2926 }
2927 if self.inject_flush_or_push_output(
2928 available_out,
2929 next_out_array,
2930 next_out_offset,
2931 total_out,
2932 ) {
2933 continue;
2934 }
2935 if self.available_out_ == 0usize
2936 && (self.stream_state_ as i32
2937 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2938 && (remaining_block_size == 0usize
2939 || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2940 {
2941 let is_last =
2942 *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2943 let force_flush =
2944 *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2945
2946 self.update_size_hint(*available_in);
2947 let mut avail_out = self.available_out_;
2948 let result =
2949 self.encode_data(is_last, force_flush, &mut avail_out, metablock_callback);
2950 self.available_out_ = avail_out;
2951 if !result {
2953 return false;
2954 }
2955 if force_flush {
2956 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2957 }
2958 if is_last {
2959 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2960 }
2961 {
2962 continue;
2963 }
2964 }
2965 {
2966 break;
2967 }
2968 }
2969 self.check_flush_complete();
2970 true
2971 }
2972
2973 pub fn is_finished(&self) -> bool {
2974 self.stream_state_ == BrotliEncoderStreamState::BROTLI_STREAM_FINISHED
2975 && !self.has_more_output()
2976 }
2977
2978 pub fn has_more_output(&self) -> bool {
2979 self.available_out_ != 0
2980 }
2981
2982 pub fn take_output(&mut self, size: &mut usize) -> &[u8] {
2983 let mut consumed_size: usize = self.available_out_;
2984 let mut result: &[u8] = GetNextOut!(*self);
2985 if *size != 0 {
2986 consumed_size = min(*size, self.available_out_);
2987 }
2988 if consumed_size != 0 {
2989 self.next_out_ = NextOutIncrement(&self.next_out_, consumed_size as i32);
2990 self.available_out_ = self.available_out_.wrapping_sub(consumed_size);
2991 self.total_out_ = self.total_out_.wrapping_add(consumed_size as u64);
2992 CheckFlushCompleteInner(
2993 &mut self.stream_state_,
2994 self.available_out_,
2995 &mut self.next_out_,
2996 );
2997 *size = consumed_size;
2998 } else {
2999 *size = 0usize;
3000 result = &[];
3001 }
3002 result
3003 }
3004}
3005
3006pub fn BrotliEncoderVersion() -> u32 {
3007 0x0100_0f01
3008}
3009
3010impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
3011 pub fn input_block_size(&mut self) -> usize {
3012 if !self.ensure_initialized() {
3013 return 0;
3014 }
3015 1 << self.params.lgblock
3016 }
3017
3018 pub fn write_data<
3019 'a,
3020 MetablockCallback: FnMut(
3021 &mut interface::PredictionModeContextMap<InputReferenceMut>,
3022 &mut [interface::StaticCommand],
3023 interface::InputPair,
3024 &mut Alloc,
3025 ),
3026 >(
3027 &'a mut self,
3028 is_last: i32,
3030 force_flush: i32,
3032 out_size: &mut usize,
3034 output: &'a mut &'a mut [u8],
3036 metablock_callback: &mut MetablockCallback,
3037 ) -> bool {
3038 let ret = self.encode_data(is_last != 0, force_flush != 0, out_size, metablock_callback);
3039 *output = self.storage_.slice_mut();
3040 ret
3041 }
3042}
3043
3044#[cfg(feature = "std")]
3045mod test {
3046 #[cfg(test)]
3047 use alloc_stdlib::StandardAlloc;
3048
3049 #[test]
3050 fn test_encoder_compress() {
3051 let input = include_bytes!("../../testdata/alice29.txt");
3052 let mut output_buffer = [0; 100000];
3053 let mut output_len = output_buffer.len();
3054 let ret = super::encoder_compress(
3055 StandardAlloc::default(),
3056 &mut StandardAlloc::default(),
3057 9,
3058 16,
3059 super::BrotliEncoderMode::BROTLI_MODE_GENERIC,
3060 input.len(),
3061 input,
3062 &mut output_len,
3063 &mut output_buffer,
3064 &mut |_,_,_,_|(),
3065 );
3066 assert!(ret);
3067 assert_eq!(output_len,51737);
3068 let mut roundtrip = [0u8; 200000];
3069 let (_, s, t) = super::super::test::oneshot_decompress(&output_buffer[..output_len], &mut roundtrip[..]);
3070 assert_eq!(roundtrip[..t], input[..]);
3071 assert_eq!(s, output_len);
3072 }
3073}