brotli/enc/backward_references/
mod.rs

1mod benchmark;
2pub mod hash_to_binary_tree;
3pub mod hq;
4mod test;
5
6use core::cmp::{max, min};
7
8use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use super::command::{BrotliDistanceParams, Command, ComputeDistanceCode};
10use super::dictionary_hash::kStaticDictionaryHash;
11use super::hash_to_binary_tree::{H10Buckets, H10DefaultParams, ZopfliNode, H10};
12use super::static_dict::{
13    BrotliDictionary, FindMatchLengthWithLimit, FindMatchLengthWithLimitMin4,
14    BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
15};
16use super::util::{floatX, Log2FloorNonZero};
17use crate::enc::combined_alloc::allocate;
18
19pub static kInvalidMatch: u32 = 0x0fff_ffff;
20static kCutoffTransformsCount: u32 = 10;
21static kCutoffTransforms: u64 = 0x071b_520a_da2d_3200;
22pub static kHashMul32: u32 = 0x1e35_a7bd;
23pub static kHashMul64: u64 = 0x1e35_a7bd_1e35_a7bd;
24pub static kHashMul64Long: u64 = 0x1fe3_5a7b_d357_9bd3;
25
26#[derive(PartialEq, Eq, Copy, Clone, Debug)]
27#[repr(C)]
28pub enum BrotliEncoderMode {
29    BROTLI_MODE_GENERIC = 0,
30    BROTLI_MODE_TEXT = 1,
31    BROTLI_MODE_FONT = 2,
32    BROTLI_FORCE_LSB_PRIOR = 3,
33    BROTLI_FORCE_MSB_PRIOR = 4,
34    BROTLI_FORCE_UTF8_PRIOR = 5,
35    BROTLI_FORCE_SIGNED_PRIOR = 6,
36}
37
38#[derive(Clone, Copy, Debug, PartialEq)]
39pub struct BrotliHasherParams {
40    /// type of hasher to use (default: type 6, but others have tradeoffs of speed/memory)
41    pub type_: i32,
42    /// number of the number of buckets to have in the hash table (defaults to quality - 1)
43    pub bucket_bits: i32,
44    /// number of potential matches to hold per bucket (hash collisions)
45    pub block_bits: i32,
46    /// number of bytes of a potential match to hash
47    pub hash_len: i32,
48    /// number of previous distance matches to check for future matches (defaults to 16)
49    pub num_last_distances_to_check: i32,
50    /// how much to weigh distance vs an extra byte of copy match when comparing possible copy srcs
51    pub literal_byte_score: i32,
52}
53
54#[derive(Clone, Debug)]
55pub struct BrotliEncoderParams {
56    pub dist: BrotliDistanceParams,
57    /// if this brotli file is generic, font or specifically text
58    pub mode: BrotliEncoderMode,
59    /// quality param between 0 and 11 (11 is smallest but takes longest to encode)
60    pub quality: i32,
61    pub q9_5: bool,
62    /// log of how big the ring buffer should be for copying prior data
63    pub lgwin: i32,
64    /// log of how often metablocks should be serialized
65    pub lgblock: i32,
66    /// how big the source file is (or 0 if no hint is provided)
67    pub size_hint: usize,
68    // FIXME: this should be bool
69    /// avoid serializing out priors for literal sections in the favor of decode speed
70    pub disable_literal_context_modeling: i32,
71    pub hasher: BrotliHasherParams,
72    /// produce an IR of the compression file
73    pub log_meta_block: bool,
74    /// attempt to detect how many bytes before the current byte generates the best prediction of it
75    /// * 0 = off (stride 1 always)
76    /// * 1 = on per 16th of a file
77    /// * 2 = on per block type switch
78    pub stride_detection_quality: u8,
79    /// if nonzero, will search for high entropy strings and log them differently to the IR
80    pub high_entropy_detection_quality: u8,
81    /// if nonzero it will search for the temporal locality and effectiveness of the priors
82    /// for literals. The best adaptation and forgetfulness will be logged per metablock to the IR
83    pub cdf_adaptation_detection: u8,
84    /// whether to search for whether the previous byte or the context_map are better predictors on a per-context-map basis
85    pub prior_bitmask_detection: u8,
86    /// for prior bitmask detection: stride_low, stride_speed, cm_low, cm_speed
87    pub literal_adaptation: [(u16, u16); 4],
88    pub large_window: bool,
89    /// avoid search for the best ndirect vs npostfix parameters for distance
90    pub avoid_distance_prefix_search: bool,
91    /// construct brotli in such a way that it may be concatenated with another brotli file using appropriate bit ops
92    pub catable: bool,
93    /// can use the dictionary (default yes unless catable is set)
94    pub use_dictionary: bool,
95    /// construct brotli in such a way that another concatable brotli file may be appended
96    pub appendable: bool,
97    /// include a magic number and version number and size_hint at the beginning
98    pub magic_number: bool,
99    /// prefer to compute the map of previously seen strings
100    /// just once for all the threads at the beginning, since they overlap significantly
101    pub favor_cpu_efficiency: bool,
102}
103
104impl Default for BrotliEncoderParams {
105    fn default() -> BrotliEncoderParams {
106        super::encode::BrotliEncoderInitParams()
107    }
108}
109
110#[derive(Clone, Copy, Default, PartialEq)]
111pub struct H9Opts {
112    pub literal_byte_score: u32,
113}
114pub enum HowPrepared {
115    ALREADY_PREPARED,
116    NEWLY_PREPARED,
117}
118#[derive(Clone, PartialEq)]
119pub struct Struct1 {
120    pub params: BrotliHasherParams,
121    /// FIXME: this should be bool
122    pub is_prepared_: i32,
123    pub dict_num_lookups: usize,
124    pub dict_num_matches: usize,
125}
126
127fn LiteralSpreeLengthForSparseSearch(params: &BrotliEncoderParams) -> usize {
128    (if params.quality < 9 { 64i32 } else { 512i32 }) as usize
129}
130
131pub struct HasherSearchResult {
132    pub len: usize,
133    pub len_x_code: usize,
134    pub distance: usize,
135    pub score: u64,
136}
137
138pub trait CloneWithAlloc<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
139    fn clone_with_alloc(&self, m: &mut Alloc) -> Self;
140}
141
142pub trait AnyHasher {
143    fn Opts(&self) -> H9Opts;
144    fn GetHasherCommon(&mut self) -> &mut Struct1;
145    fn HashBytes(&self, data: &[u8]) -> usize;
146    fn HashTypeLength(&self) -> usize;
147    fn StoreLookahead(&self) -> usize;
148    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]);
149    fn FindLongestMatch(
150        &mut self,
151        dictionary: Option<&BrotliDictionary>,
152        dictionary_hash: &[u16],
153        data: &[u8],
154        ring_buffer_mask: usize,
155        distance_cache: &[i32],
156        cur_ix: usize,
157        max_length: usize,
158        max_backward: usize,
159        gap: usize,
160        max_distance: usize,
161        out: &mut HasherSearchResult,
162    ) -> bool;
163    fn Store(&mut self, data: &[u8], mask: usize, ix: usize);
164    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
165        for i in 0..4 {
166            self.Store(data, mask, ix + i * 4);
167        }
168    }
169    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
170        for i in 0..4 {
171            self.Store(data, mask, ix + i * 2);
172        }
173    }
174    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
175    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
176    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared;
177    fn StitchToPreviousBlock(
178        &mut self,
179        num_bytes: usize,
180        position: usize,
181        ringbuffer: &[u8],
182        ringbuffer_mask: usize,
183    );
184}
185
186pub fn StitchToPreviousBlockInternal<T: AnyHasher>(
187    handle: &mut T,
188    num_bytes: usize,
189    position: usize,
190    ringbuffer: &[u8],
191    ringbuffer_mask: usize,
192) {
193    if num_bytes >= handle.HashTypeLength().wrapping_sub(1) && (position >= 3) {
194        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(3));
195        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(2));
196        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(1));
197    }
198}
199
200pub fn StoreLookaheadThenStore<T: AnyHasher>(hasher: &mut T, size: usize, dict: &[u8]) {
201    let overlap = hasher.StoreLookahead().wrapping_sub(1);
202    if size > overlap {
203        hasher.BulkStoreRange(dict, usize::MAX, 0, size - overlap);
204    }
205}
206
207pub trait BasicHashComputer {
208    fn HashBytes(&self, data: &[u8]) -> u32;
209    fn BUCKET_BITS(&self) -> i32;
210    fn USE_DICTIONARY(&self) -> i32;
211    fn BUCKET_SWEEP(&self) -> i32;
212}
213pub struct BasicHasher<Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> {
214    pub GetHasherCommon: Struct1,
215    pub buckets_: Buckets,
216    pub h9_opts: H9Opts,
217}
218
219impl<A: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> PartialEq<BasicHasher<A>>
220    for BasicHasher<A>
221{
222    fn eq(&self, other: &BasicHasher<A>) -> bool {
223        self.GetHasherCommon == other.GetHasherCommon
224            && self.h9_opts == other.h9_opts
225            && self.buckets_.slice() == other.buckets_.slice()
226    }
227}
228
229impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> BasicHasher<T> {
230    fn StoreRangeOptBasic(
231        &mut self,
232        data: &[u8],
233        mask: usize,
234        ix_start: usize,
235        ix_end: usize,
236    ) -> usize {
237        let lookahead = 8;
238        if ix_end >= ix_start + lookahead * 2 {
239            let chunk_count = (ix_end - ix_start) / 4;
240            for chunk_id in 0..chunk_count {
241                let i = (ix_start + chunk_id * 4) & mask;
242                let word11 = data.split_at(i).1.split_at(11).0;
243                let mixed0 = self.HashBytes(word11);
244                let mixed1 = self.HashBytes(word11.split_at(1).1);
245                let mixed2 = self.HashBytes(word11.split_at(2).1);
246                let mixed3 = self.HashBytes(word11.split_at(3).1);
247                let off: u32 = (i >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
248                let offset0: usize = mixed0 + off as usize;
249                let offset1: usize = mixed1 + off as usize;
250                let offset2: usize = mixed2 + off as usize;
251                let offset3: usize = mixed3 + off as usize;
252                self.buckets_.slice_mut()[offset0] = i as u32;
253                self.buckets_.slice_mut()[offset1] = i as u32 + 1;
254                self.buckets_.slice_mut()[offset2] = i as u32 + 2;
255                self.buckets_.slice_mut()[offset3] = i as u32 + 3;
256            }
257            return ix_start + chunk_count * 4;
258        }
259        ix_start
260    }
261}
262pub struct H2Sub<AllocU32: alloc::Allocator<u32>> {
263    pub buckets_: AllocU32::AllocatedMemory, // 65537
264}
265impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> AnyHasher for BasicHasher<T> {
266    #[inline(always)]
267    fn Opts(&self) -> H9Opts {
268        self.h9_opts
269    }
270    #[allow(unused_variables)]
271    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {}
272    #[inline(always)]
273    fn HashTypeLength(&self) -> usize {
274        8
275    }
276    #[inline(always)]
277    fn StoreLookahead(&self) -> usize {
278        8
279    }
280    fn StitchToPreviousBlock(
281        &mut self,
282        num_bytes: usize,
283        position: usize,
284        ringbuffer: &[u8],
285        ringbuffer_mask: usize,
286    ) {
287        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
288    }
289    #[inline(always)]
290    fn GetHasherCommon(&mut self) -> &mut Struct1 {
291        &mut self.GetHasherCommon
292    }
293    #[inline(always)]
294    fn HashBytes(&self, data: &[u8]) -> usize {
295        self.buckets_.HashBytes(data) as usize
296    }
297    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
298        let (_, data_window) = data.split_at((ix & mask));
299        let key: u32 = self.HashBytes(data_window) as u32;
300        let off: u32 = (ix >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
301        self.buckets_.slice_mut()[key.wrapping_add(off) as usize] = ix as u32;
302    }
303    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
304        for i in self.StoreRangeOptBasic(data, mask, ix_start, ix_end)..ix_end {
305            self.Store(data, mask, i);
306        }
307    }
308    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
309        self.StoreRange(data, mask, ix_start, ix_end);
310    }
311    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
312        if self.GetHasherCommon.is_prepared_ != 0 {
313            return HowPrepared::ALREADY_PREPARED;
314        }
315        let partial_prepare_threshold = (4 << self.buckets_.BUCKET_BITS()) >> 7;
316        if one_shot && input_size <= partial_prepare_threshold {
317            for i in 0..input_size {
318                let key = self.HashBytes(&data[i..]);
319                let bs = self.buckets_.BUCKET_SWEEP() as usize;
320                for item in self.buckets_.slice_mut()[key..(key + bs)].iter_mut() {
321                    *item = 0;
322                }
323            }
324        } else {
325            for item in self.buckets_.slice_mut().iter_mut() {
326                *item = 0;
327            }
328        }
329        self.GetHasherCommon.is_prepared_ = 1;
330        HowPrepared::NEWLY_PREPARED
331    }
332
333    fn FindLongestMatch(
334        &mut self,
335        dictionary: Option<&BrotliDictionary>,
336        dictionary_hash: &[u16],
337        data: &[u8],
338        ring_buffer_mask: usize,
339        distance_cache: &[i32],
340        cur_ix: usize,
341        max_length: usize,
342        max_backward: usize,
343        gap: usize,
344        max_distance: usize,
345        out: &mut HasherSearchResult,
346    ) -> bool {
347        let opts = self.Opts();
348        let best_len_in: usize = out.len;
349        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
350        let key: u32 = self.HashBytes(&data[cur_ix_masked..]) as u32;
351        let mut compare_char: i32 = data[cur_ix_masked.wrapping_add(best_len_in)] as i32;
352        let mut best_score: u64 = out.score;
353        let mut best_len: usize = best_len_in;
354        let cached_backward: usize = distance_cache[0] as usize;
355        let mut prev_ix: usize = cur_ix.wrapping_sub(cached_backward);
356        let mut is_match_found = false;
357        out.len_x_code = 0usize;
358        if prev_ix < cur_ix {
359            prev_ix &= ring_buffer_mask as u32 as usize;
360            if compare_char == data[prev_ix.wrapping_add(best_len)] as i32 {
361                let len: usize = FindMatchLengthWithLimitMin4(
362                    &data[prev_ix..],
363                    &data[cur_ix_masked..],
364                    max_length,
365                );
366                if len != 0 {
367                    best_score = BackwardReferenceScoreUsingLastDistance(len, opts);
368                    best_len = len;
369                    out.len = len;
370                    out.distance = cached_backward;
371                    out.score = best_score;
372                    compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
373                    if self.buckets_.BUCKET_SWEEP() == 1i32 {
374                        self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
375                        return true;
376                    } else {
377                        is_match_found = true;
378                    }
379                }
380            }
381        }
382        let bucket_sweep = self.buckets_.BUCKET_SWEEP();
383        if bucket_sweep == 1i32 {
384            prev_ix = self.buckets_.slice()[key as usize] as usize;
385            self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
386            let backward: usize = cur_ix.wrapping_sub(prev_ix);
387            prev_ix &= ring_buffer_mask as u32 as usize;
388            if compare_char != data[prev_ix.wrapping_add(best_len_in)] as i32 {
389                return false;
390            }
391            if backward == 0usize || backward > max_backward {
392                return false;
393            }
394            let len: usize =
395                FindMatchLengthWithLimitMin4(&data[prev_ix..], &data[cur_ix_masked..], max_length);
396            if len != 0 {
397                out.len = len;
398                out.distance = backward;
399                out.score = BackwardReferenceScore(len, backward, opts);
400                return true;
401            }
402        } else {
403            for prev_ix_ref in
404                self.buckets_.slice().split_at(key as usize).1[..bucket_sweep as usize].iter()
405            {
406                let mut prev_ix = *prev_ix_ref as usize;
407                let backward: usize = cur_ix.wrapping_sub(prev_ix);
408                prev_ix &= ring_buffer_mask as u32 as usize;
409                if compare_char != data[prev_ix.wrapping_add(best_len)] as i32 {
410                    continue;
411                }
412                if backward == 0usize || backward > max_backward {
413                    continue;
414                }
415                let len = FindMatchLengthWithLimitMin4(
416                    &data[prev_ix..],
417                    &data[cur_ix_masked..],
418                    max_length,
419                );
420                if len != 0 {
421                    let score: u64 = BackwardReferenceScore(len, backward, opts);
422                    if best_score < score {
423                        best_score = score;
424                        best_len = len;
425                        out.len = best_len;
426                        out.distance = backward;
427                        out.score = score;
428                        compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
429                        is_match_found = true;
430                    }
431                }
432            }
433        }
434        if dictionary.is_some() && self.buckets_.USE_DICTIONARY() != 0 && !is_match_found {
435            is_match_found = SearchInStaticDictionary(
436                dictionary.unwrap(),
437                dictionary_hash,
438                self,
439                &data[cur_ix_masked..],
440                max_length,
441                max_backward.wrapping_add(gap),
442                max_distance,
443                out,
444                true,
445            );
446        }
447        self.buckets_.slice_mut()
448            [(key as usize).wrapping_add((cur_ix >> 3).wrapping_rem(bucket_sweep as usize))] =
449            cur_ix as u32;
450        is_match_found
451    }
452}
453impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H2Sub<AllocU32> {
454    fn HashBytes(&self, data: &[u8]) -> u32 {
455        let h: u64 =
456            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
457        (h >> (64i32 - 16i32)) as u32
458    }
459    fn BUCKET_BITS(&self) -> i32 {
460        16
461    }
462    fn BUCKET_SWEEP(&self) -> i32 {
463        1
464    }
465    fn USE_DICTIONARY(&self) -> i32 {
466        1
467    }
468}
469impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H2Sub<AllocU32> {
470    fn slice_mut(&mut self) -> &mut [u32] {
471        return self.buckets_.slice_mut();
472    }
473}
474impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H2Sub<AllocU32> {
475    fn slice(&self) -> &[u32] {
476        return self.buckets_.slice();
477    }
478}
479pub struct H3Sub<AllocU32: alloc::Allocator<u32>> {
480    pub buckets_: AllocU32::AllocatedMemory, // 65538
481}
482impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H3Sub<AllocU32> {
483    fn slice_mut(&mut self) -> &mut [u32] {
484        return self.buckets_.slice_mut();
485    }
486}
487impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H3Sub<AllocU32> {
488    fn slice(&self) -> &[u32] {
489        return self.buckets_.slice();
490    }
491}
492impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H3Sub<AllocU32> {
493    fn BUCKET_BITS(&self) -> i32 {
494        16
495    }
496    fn BUCKET_SWEEP(&self) -> i32 {
497        2
498    }
499    fn USE_DICTIONARY(&self) -> i32 {
500        0
501    }
502    fn HashBytes(&self, data: &[u8]) -> u32 {
503        let h: u64 =
504            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
505        (h >> (64i32 - 16i32)) as u32
506    }
507}
508pub struct H4Sub<AllocU32: alloc::Allocator<u32>> {
509    pub buckets_: AllocU32::AllocatedMemory, // 131076
510}
511impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H4Sub<AllocU32> {
512    fn BUCKET_BITS(&self) -> i32 {
513        17
514    }
515    fn BUCKET_SWEEP(&self) -> i32 {
516        4
517    }
518    fn USE_DICTIONARY(&self) -> i32 {
519        1
520    }
521    fn HashBytes(&self, data: &[u8]) -> u32 {
522        let h: u64 =
523            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
524        (h >> (64i32 - 17i32)) as u32
525    }
526}
527impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H4Sub<AllocU32> {
528    fn slice_mut(&mut self) -> &mut [u32] {
529        return self.buckets_.slice_mut();
530    }
531}
532impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H4Sub<AllocU32> {
533    fn slice(&self) -> &[u32] {
534        return self.buckets_.slice();
535    }
536}
537pub struct H54Sub<AllocU32: alloc::Allocator<u32>> {
538    pub buckets_: AllocU32::AllocatedMemory,
539}
540impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H54Sub<AllocU32> {
541    fn BUCKET_BITS(&self) -> i32 {
542        20
543    }
544    fn BUCKET_SWEEP(&self) -> i32 {
545        4
546    }
547    fn USE_DICTIONARY(&self) -> i32 {
548        0
549    }
550    fn HashBytes(&self, data: &[u8]) -> u32 {
551        let h: u64 =
552            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 7i32)).wrapping_mul(kHashMul64);
553        (h >> (64i32 - 20i32)) as u32
554    }
555}
556
557impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H54Sub<AllocU32> {
558    fn slice_mut(&mut self) -> &mut [u32] {
559        return self.buckets_.slice_mut();
560    }
561}
562impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H54Sub<AllocU32> {
563    fn slice(&self) -> &[u32] {
564        return self.buckets_.slice();
565    }
566}
567pub const H9_BUCKET_BITS: usize = 15;
568pub const H9_BLOCK_BITS: usize = 8;
569pub const H9_NUM_LAST_DISTANCES_TO_CHECK: usize = 16;
570pub const H9_BLOCK_SIZE: usize = 1 << H9_BLOCK_BITS;
571const H9_BLOCK_MASK: usize = (1 << H9_BLOCK_BITS) - 1;
572
573impl H9Opts {
574    pub fn new(params: &BrotliHasherParams) -> H9Opts {
575        H9Opts {
576            literal_byte_score: if params.literal_byte_score != 0 {
577                params.literal_byte_score as u32
578            } else {
579                540
580            },
581        }
582    }
583}
584
585pub struct H9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
586    pub num_: <Alloc as Allocator<u16>>::AllocatedMemory, //[u16;1 << H9_BUCKET_BITS],
587    pub buckets_: <Alloc as Allocator<u32>>::AllocatedMemory, //[u32; H9_BLOCK_SIZE << H9_BUCKET_BITS],
588    pub dict_search_stats_: Struct1,
589    pub h9_opts: H9Opts,
590}
591
592impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<H9<Alloc>> for H9<Alloc> {
593    fn eq(&self, other: &H9<Alloc>) -> bool {
594        self.dict_search_stats_ == other.dict_search_stats_
595            && self.num_.slice() == other.num_.slice()
596            && self.buckets_.slice() == other.buckets_.slice()
597            && self.h9_opts == other.h9_opts
598    }
599}
600
601fn adv_prepare_distance_cache(distance_cache: &mut [i32], num_distances: i32) {
602    if num_distances > 4i32 {
603        let last_distance: i32 = distance_cache[0];
604        distance_cache[4] = last_distance - 1i32;
605        distance_cache[5] = last_distance + 1i32;
606        distance_cache[6] = last_distance - 2i32;
607        distance_cache[7] = last_distance + 2i32;
608        distance_cache[8] = last_distance - 3i32;
609        distance_cache[9] = last_distance + 3i32;
610        if num_distances > 10i32 {
611            let next_last_distance: i32 = distance_cache[1];
612            distance_cache[10] = next_last_distance - 1i32;
613            distance_cache[11] = next_last_distance + 1i32;
614            distance_cache[12] = next_last_distance - 2i32;
615            distance_cache[13] = next_last_distance + 2i32;
616            distance_cache[14] = next_last_distance - 3i32;
617            distance_cache[15] = next_last_distance + 3i32;
618        }
619    }
620}
621
622pub const kDistanceCacheIndex: [u8; 16] = [0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1];
623
624pub const kDistanceCacheOffset: [i8; 16] = [0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3];
625
626//const BROTLI_LITERAL_BYTE_SCORE: u64 = 540;
627const BROTLI_DISTANCE_BIT_PENALTY: u32 = 120;
628
629// Score must be positive after applying maximal penalty.
630const BROTLI_SCORE_BASE: u32 = (BROTLI_DISTANCE_BIT_PENALTY * 8 * 8/* sizeof usize*/);
631const kDistanceShortCodeCost: [u32; 16] = [
632    /* Repeat last */
633    BROTLI_SCORE_BASE + 60,
634    /* 2nd, 3rd, 4th last */
635    BROTLI_SCORE_BASE - 95,
636    BROTLI_SCORE_BASE - 117,
637    BROTLI_SCORE_BASE - 127,
638    /* Last with offset */
639    BROTLI_SCORE_BASE - 93,
640    BROTLI_SCORE_BASE - 93,
641    BROTLI_SCORE_BASE - 96,
642    BROTLI_SCORE_BASE - 96,
643    BROTLI_SCORE_BASE - 99,
644    BROTLI_SCORE_BASE - 99,
645    /* 2nd last with offset */
646    BROTLI_SCORE_BASE - 105,
647    BROTLI_SCORE_BASE - 105,
648    BROTLI_SCORE_BASE - 115,
649    BROTLI_SCORE_BASE - 115,
650    BROTLI_SCORE_BASE - 125,
651    BROTLI_SCORE_BASE - 125,
652];
653
654fn BackwardReferenceScoreH9(
655    copy_length: usize,
656    backward_reference_offset: usize,
657    h9_opts: H9Opts,
658) -> u64 {
659    (u64::from(BROTLI_SCORE_BASE)
660        .wrapping_add((h9_opts.literal_byte_score as u64).wrapping_mul(copy_length as u64))
661        .wrapping_sub(
662            (BROTLI_DISTANCE_BIT_PENALTY as u64)
663                .wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
664        ))
665        >> 2
666}
667
668fn BackwardReferenceScoreUsingLastDistanceH9(
669    copy_length: usize,
670    distance_short_code: usize,
671    h9_opts: H9Opts,
672) -> u64 {
673    ((h9_opts.literal_byte_score as u64)
674        .wrapping_mul(copy_length as u64)
675        .wrapping_add(u64::from(kDistanceShortCodeCost[distance_short_code])))
676        >> 2
677}
678
679impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for H9<Alloc> {
680    #[inline(always)]
681    fn Opts(&self) -> H9Opts {
682        self.h9_opts
683    }
684    #[inline(always)]
685    fn GetHasherCommon(&mut self) -> &mut Struct1 {
686        &mut self.dict_search_stats_
687    }
688    #[inline(always)]
689    fn HashBytes(&self, data: &[u8]) -> usize {
690        let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
691        let thirty_two: usize = 32;
692        (h >> (thirty_two.wrapping_sub(H9_BUCKET_BITS))) as usize
693    }
694    #[inline(always)]
695    fn HashTypeLength(&self) -> usize {
696        4
697    }
698    #[inline(always)]
699    fn StoreLookahead(&self) -> usize {
700        4
701    }
702    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
703        let num_distances = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
704        adv_prepare_distance_cache(distance_cache, num_distances);
705    }
706    fn FindLongestMatch(
707        &mut self,
708        dictionary: Option<&BrotliDictionary>,
709        dictionary_hash: &[u16],
710        data: &[u8],
711        ring_buffer_mask: usize,
712        distance_cache: &[i32],
713        cur_ix: usize,
714        max_length: usize,
715        max_backward: usize,
716        gap: usize,
717        max_distance: usize,
718        out: &mut HasherSearchResult,
719    ) -> bool {
720        let best_len_in: usize = out.len;
721        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
722        let mut best_score: u64 = out.score;
723        let mut best_len: usize = best_len_in;
724        let mut is_match_found = false;
725        out.len_x_code = 0usize;
726        for i in 0..H9_NUM_LAST_DISTANCES_TO_CHECK {
727            let idx = kDistanceCacheIndex[i] as usize;
728            let backward =
729                (distance_cache[idx] as usize).wrapping_add(kDistanceCacheOffset[i] as usize);
730            let mut prev_ix = cur_ix.wrapping_sub(backward);
731            if prev_ix >= cur_ix {
732                continue;
733            }
734            if backward > max_backward {
735                continue;
736            }
737            prev_ix &= ring_buffer_mask;
738            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
739                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
740                || data[cur_ix_masked.wrapping_add(best_len)]
741                    != data[prev_ix.wrapping_add(best_len)]
742            {
743                continue;
744            }
745            {
746                let len: usize =
747                    FindMatchLengthWithLimit(&data[prev_ix..], &data[cur_ix_masked..], max_length);
748                if len >= 3 || (len == 2 && i < 2) {
749                    let score = BackwardReferenceScoreUsingLastDistanceH9(len, i, self.h9_opts);
750                    if best_score < score {
751                        best_score = score;
752                        best_len = len;
753                        out.len = best_len;
754                        out.distance = backward;
755                        out.score = best_score;
756                        is_match_found = true;
757                    }
758                }
759            }
760        }
761        if max_length >= 4 && cur_ix_masked.wrapping_add(best_len) <= ring_buffer_mask {
762            let key = self.HashBytes(data.split_at(cur_ix_masked).1);
763            let bucket = &mut self
764                .buckets_
765                .slice_mut()
766                .split_at_mut(key << H9_BLOCK_BITS)
767                .1
768                .split_at_mut(H9_BLOCK_SIZE)
769                .0;
770            assert!(bucket.len() > H9_BLOCK_MASK);
771            assert_eq!(bucket.len(), H9_BLOCK_MASK + 1);
772            let self_num_key = &mut self.num_.slice_mut()[key];
773            let down = if *self_num_key > H9_BLOCK_SIZE as u16 {
774                (*self_num_key as usize) - H9_BLOCK_SIZE
775            } else {
776                0usize
777            };
778            let mut i: usize = *self_num_key as usize;
779            let mut prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
780            while i > down {
781                i -= 1;
782                let mut prev_ix = bucket[i & H9_BLOCK_MASK] as usize;
783                let backward = cur_ix.wrapping_sub(prev_ix);
784                if (backward > max_backward) {
785                    break;
786                }
787                prev_ix &= ring_buffer_mask;
788                if (prev_ix.wrapping_add(best_len) > ring_buffer_mask
789                    || prev_best_val != data[prev_ix.wrapping_add(best_len)])
790                {
791                    continue;
792                }
793                {
794                    let len = FindMatchLengthWithLimit(
795                        data.split_at(prev_ix).1,
796                        data.split_at(cur_ix_masked).1,
797                        max_length,
798                    );
799                    if (len >= 4) {
800                        /* Comparing for >= 3 does not change the semantics, but just saves
801                        for a few unnecessary binary logarithms in backward reference
802                        score, since we are not interested in such short matches. */
803                        let score = BackwardReferenceScoreH9(len, backward, self.h9_opts);
804                        if (best_score < score) {
805                            best_score = score;
806                            best_len = len;
807                            out.len = best_len;
808                            out.distance = backward;
809                            out.score = best_score;
810                            is_match_found = true;
811                            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask {
812                                break;
813                            }
814                            prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
815                        }
816                    }
817                }
818            }
819            bucket[*self_num_key as usize & H9_BLOCK_MASK] = cur_ix as u32;
820            *self_num_key = self_num_key.wrapping_add(1);
821        }
822        if !is_match_found && dictionary.is_some() {
823            let (_, cur_data) = data.split_at(cur_ix_masked);
824            is_match_found = SearchInStaticDictionary(
825                dictionary.unwrap(),
826                dictionary_hash,
827                self,
828                cur_data,
829                max_length,
830                max_backward.wrapping_add(gap),
831                max_distance,
832                out,
833                false,
834            );
835        }
836        is_match_found
837    }
838
839    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
840        let (_, data_window) = data.split_at((ix & mask));
841        let key: u32 = self.HashBytes(data_window) as u32;
842        let self_num_key = &mut self.num_.slice_mut()[key as usize];
843        let minor_ix: usize = (*self_num_key as usize & H9_BLOCK_MASK);
844        self.buckets_.slice_mut()[minor_ix.wrapping_add((key as usize) << H9_BLOCK_BITS)] =
845            ix as u32;
846        *self_num_key = self_num_key.wrapping_add(1);
847    }
848    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
849        for i in ix_start..ix_end {
850            self.Store(data, mask, i);
851        }
852    }
853    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
854        for i in ix_start..ix_end {
855            self.Store(data, mask, i);
856        }
857    }
858    fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
859        if self.GetHasherCommon().is_prepared_ != 0 {
860            return HowPrepared::ALREADY_PREPARED;
861        }
862        for item in self.num_.slice_mut().iter_mut() {
863            *item = 0;
864        }
865        self.GetHasherCommon().is_prepared_ = 1;
866        HowPrepared::NEWLY_PREPARED
867    }
868    fn StitchToPreviousBlock(
869        &mut self,
870        num_bytes: usize,
871        position: usize,
872        ringbuffer: &[u8],
873        ringbuffer_mask: usize,
874    ) {
875        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask)
876    }
877}
878
879pub trait AdvHashSpecialization: PartialEq<Self> {
880    fn get_hash_mask(&self) -> u64;
881    fn set_hash_mask(&mut self, params_hash_len: i32);
882    fn get_k_hash_mul(&self) -> u64;
883    fn HashTypeLength(&self) -> usize;
884    fn StoreLookahead(&self) -> usize;
885    fn load_and_mix_word(&self, data: &[u8]) -> u64;
886    fn hash_shift(&self) -> i32;
887    fn bucket_size(&self) -> u32;
888    fn block_mask(&self) -> u32;
889    fn block_size(&self) -> u32;
890    fn block_bits(&self) -> i32;
891}
892pub struct AdvHasher<
893    Specialization: AdvHashSpecialization + Sized + Clone,
894    Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
895> {
896    pub GetHasherCommon: Struct1,
897    pub specialization: Specialization, // contains hash_mask_
898    pub num: <Alloc as Allocator<u16>>::AllocatedMemory,
899    pub buckets: <Alloc as Allocator<u32>>::AllocatedMemory,
900    pub h9_opts: H9Opts,
901}
902
903impl<
904        Specialization: AdvHashSpecialization + Sized + Clone,
905        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
906    > PartialEq<AdvHasher<Specialization, Alloc>> for AdvHasher<Specialization, Alloc>
907{
908    fn eq(&self, other: &Self) -> bool {
909        self.GetHasherCommon == other.GetHasherCommon
910            && self.specialization == other.specialization
911            && self.num.slice() == other.num.slice()
912            && self.buckets.slice() == other.buckets.slice()
913            && self.h9_opts == other.h9_opts
914    }
915}
916
917#[derive(Clone, PartialEq)]
918pub struct HQ5Sub {}
919impl AdvHashSpecialization for HQ5Sub {
920    #[inline(always)]
921    fn hash_shift(&self) -> i32 {
922        32i32 - 14 // 32 - bucket_bits
923    }
924    #[inline(always)]
925    fn bucket_size(&self) -> u32 {
926        1 << 14
927    }
928    #[inline(always)]
929    fn block_bits(&self) -> i32 {
930        4
931    }
932    #[inline(always)]
933    fn block_size(&self) -> u32 {
934        1 << 4
935    }
936    #[inline(always)]
937    fn block_mask(&self) -> u32 {
938        (1 << 4) - 1
939    }
940    #[inline(always)]
941    fn get_hash_mask(&self) -> u64 {
942        //return 0xffff_ffff_ffff_ffff;
943        0xffff_ffff // make it 32 bit
944    }
945    #[inline(always)]
946    fn get_k_hash_mul(&self) -> u64 {
947        kHashMul32 as u64
948    }
949    #[inline(always)]
950    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
951        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
952    }
953    #[inline(always)]
954    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
955    fn HashTypeLength(&self) -> usize {
956        4
957    }
958    #[inline(always)]
959    fn StoreLookahead(&self) -> usize {
960        4
961    }
962}
963
964#[derive(Clone, PartialEq)]
965pub struct HQ7Sub {}
966impl AdvHashSpecialization for HQ7Sub {
967    #[inline(always)]
968    fn hash_shift(&self) -> i32 {
969        32i32 - 15 // 32 - bucket_bits
970    }
971    #[inline(always)]
972    fn bucket_size(&self) -> u32 {
973        1 << 15
974    }
975    #[inline(always)]
976    fn block_bits(&self) -> i32 {
977        6
978    }
979    #[inline(always)]
980    fn block_size(&self) -> u32 {
981        1 << 6
982    }
983    #[inline(always)]
984    fn block_mask(&self) -> u32 {
985        (1 << 6) - 1
986    }
987    #[inline(always)]
988    fn get_hash_mask(&self) -> u64 {
989        //return 0xffff_ffff_ffff_ffff;
990        0xffff_ffff // make it 32 bit
991    }
992    #[inline(always)]
993    fn get_k_hash_mul(&self) -> u64 {
994        kHashMul32 as u64
995    }
996    #[inline(always)]
997    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
998        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
999    }
1000    #[inline(always)]
1001    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
1002    fn HashTypeLength(&self) -> usize {
1003        4
1004    }
1005    #[inline(always)]
1006    fn StoreLookahead(&self) -> usize {
1007        4
1008    }
1009}
1010
1011#[derive(Clone, PartialEq)]
1012pub struct H5Sub {
1013    pub hash_shift_: i32,
1014    pub bucket_size_: u32,
1015    pub block_mask_: u32,
1016    pub block_bits_: i32,
1017}
1018
1019impl AdvHashSpecialization for H5Sub {
1020    #[inline(always)]
1021    fn hash_shift(&self) -> i32 {
1022        self.hash_shift_
1023    }
1024    fn bucket_size(&self) -> u32 {
1025        self.bucket_size_
1026    }
1027    fn block_bits(&self) -> i32 {
1028        self.block_bits_
1029    }
1030    fn block_size(&self) -> u32 {
1031        1 << self.block_bits_
1032    }
1033    fn block_mask(&self) -> u32 {
1034        self.block_mask_
1035    }
1036    fn get_hash_mask(&self) -> u64 {
1037        //return 0xffff_ffff_ffff_ffff;
1038        0xffff_ffff // make it 32 bit
1039    }
1040    fn get_k_hash_mul(&self) -> u64 {
1041        kHashMul32 as u64
1042    }
1043    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1044        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1045    }
1046    #[allow(unused_variables)]
1047    fn set_hash_mask(&mut self, params_hash_len: i32) {}
1048    fn HashTypeLength(&self) -> usize {
1049        4
1050    }
1051    fn StoreLookahead(&self) -> usize {
1052        4
1053    }
1054}
1055
1056#[derive(Clone, PartialEq)]
1057pub struct H6Sub {
1058    pub hash_mask: u64,
1059    pub hash_shift_: i32,
1060    pub bucket_size_: u32,
1061    pub block_mask_: u32,
1062    pub block_bits_: i32,
1063}
1064
1065impl AdvHashSpecialization for H6Sub {
1066    #[inline(always)]
1067    fn hash_shift(&self) -> i32 {
1068        self.hash_shift_
1069    }
1070    #[inline(always)]
1071    fn bucket_size(&self) -> u32 {
1072        self.bucket_size_
1073    }
1074    fn block_bits(&self) -> i32 {
1075        self.block_bits_
1076    }
1077    fn block_size(&self) -> u32 {
1078        1 << self.block_bits_
1079    }
1080    #[inline(always)]
1081    fn block_mask(&self) -> u32 {
1082        self.block_mask_
1083    }
1084    #[inline(always)]
1085    fn get_hash_mask(&self) -> u64 {
1086        self.hash_mask
1087    }
1088    #[inline(always)]
1089    fn set_hash_mask(&mut self, params_hash_len: i32) {
1090        // FIXME: this assumes params_hash_len is fairly small, or else it may result in a negative shift value
1091        self.hash_mask = u64::MAX >> (64i32 - 8i32 * params_hash_len);
1092    }
1093    #[inline(always)]
1094    fn get_k_hash_mul(&self) -> u64 {
1095        kHashMul64Long
1096    }
1097    #[inline(always)]
1098    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1099        (BROTLI_UNALIGNED_LOAD64(data) & self.get_hash_mask()).wrapping_mul(self.get_k_hash_mul())
1100    }
1101    #[inline(always)]
1102    fn HashTypeLength(&self) -> usize {
1103        8
1104    }
1105    #[inline(always)]
1106    fn StoreLookahead(&self) -> usize {
1107        8
1108    }
1109}
1110
1111fn BackwardReferencePenaltyUsingLastDistance(distance_short_code: usize) -> u64 {
1112    // FIXME?: double bitwise AND with the same value?
1113    (39u64).wrapping_add((0x0001_ca10_u64 >> (distance_short_code & 0x0e) & 0x0e))
1114}
1115
1116impl<
1117        Specialization: AdvHashSpecialization + Clone,
1118        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1119    > AdvHasher<Specialization, Alloc>
1120{
1121    // 7 opt
1122    // returns a new ix_start
1123    fn StoreRangeOptBatch(
1124        &mut self,
1125        data: &[u8],
1126        mask: usize,
1127        ix_start: usize,
1128        ix_end: usize,
1129    ) -> usize {
1130        let lookahead = self.specialization.StoreLookahead();
1131        if ix_end >= ix_start + lookahead * 2 && lookahead == 4 {
1132            let num = self.num.slice_mut();
1133            let buckets = self.buckets.slice_mut();
1134            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1135            assert_eq!(
1136                buckets.len(),
1137                self.specialization.bucket_size() as usize
1138                    * self.specialization.block_size() as usize
1139            );
1140            let shift = self.specialization.hash_shift();
1141            let chunk_count = (ix_end - ix_start) / 4;
1142            for chunk_id in 0..chunk_count {
1143                let i = (ix_start + chunk_id * 4) & mask;
1144                let ffffffff = 0xffff_ffff;
1145                let word = u64::from(data[i])
1146                    | (u64::from(data[i + 1]) << 8)
1147                    | (u64::from(data[i + 2]) << 16)
1148                    | (u64::from(data[i + 3]) << 24)
1149                    | (u64::from(data[i + 4]) << 32)
1150                    | (u64::from(data[i + 5]) << 40)
1151                    | (u64::from(data[i + 6]) << 48);
1152                let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1153                    & self.specialization.get_hash_mask())
1154                    >> shift) as usize;
1155                let mixed1 = (((((word >> 8) & ffffffff) * self.specialization.get_k_hash_mul())
1156                    & self.specialization.get_hash_mask())
1157                    >> shift) as usize;
1158                let mixed2 = (((((word >> 16) & ffffffff) * self.specialization.get_k_hash_mul())
1159                    & self.specialization.get_hash_mask())
1160                    >> shift) as usize;
1161                let mixed3 = (((((word >> 24) & ffffffff) * self.specialization.get_k_hash_mul())
1162                    & self.specialization.get_hash_mask())
1163                    >> shift) as usize;
1164                let mut num_ref0 = u32::from(num[mixed0]);
1165                num[mixed0] = num_ref0.wrapping_add(1) as u16;
1166                num_ref0 &= self.specialization.block_mask();
1167                let mut num_ref1 = u32::from(num[mixed1]);
1168                num[mixed1] = num_ref1.wrapping_add(1) as u16;
1169                num_ref1 &= self.specialization.block_mask();
1170                let mut num_ref2 = u32::from(num[mixed2]);
1171                num[mixed2] = num_ref2.wrapping_add(1) as u16;
1172                num_ref2 &= self.specialization.block_mask();
1173                let mut num_ref3 = u32::from(num[mixed3]);
1174                num[mixed3] = num_ref3.wrapping_add(1) as u16;
1175                num_ref3 &= self.specialization.block_mask();
1176                let offset0: usize =
1177                    (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1178                let offset1: usize =
1179                    (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1180                let offset2: usize =
1181                    (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1182                let offset3: usize =
1183                    (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1184                buckets[offset0] = (i) as u32;
1185                buckets[offset1] = (i + 1) as u32;
1186                buckets[offset2] = (i + 2) as u32;
1187                buckets[offset3] = (i + 3) as u32;
1188            }
1189            return ix_start + chunk_count * 4;
1190        }
1191        ix_start
1192    }
1193
1194    fn BulkStoreRangeOptMemFetch(
1195        &mut self,
1196        data: &[u8],
1197        mask: usize,
1198        ix_start: usize,
1199        ix_end: usize,
1200    ) -> usize {
1201        const REG_SIZE: usize = 32usize;
1202        let lookahead = self.specialization.StoreLookahead();
1203        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1204            const lookahead4: usize = 4;
1205            assert_eq!(lookahead4, lookahead);
1206            let mut data64 = [0u8; REG_SIZE + lookahead4 - 1];
1207            let del = (ix_end - ix_start) / REG_SIZE;
1208            let num = self.num.slice_mut();
1209            let buckets = self.buckets.slice_mut();
1210            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1211            assert_eq!(
1212                buckets.len(),
1213                self.specialization.bucket_size() as usize
1214                    * self.specialization.block_size() as usize
1215            );
1216            let shift = self.specialization.hash_shift();
1217            for chunk_id in 0..del {
1218                let ix_offset = ix_start + chunk_id * REG_SIZE;
1219                data64[..REG_SIZE + lookahead4 - 1].clone_from_slice(
1220                    data.split_at(ix_offset)
1221                        .1
1222                        .split_at(REG_SIZE + lookahead4 - 1)
1223                        .0,
1224                );
1225                for quad_index in 0..(REG_SIZE >> 2) {
1226                    let i = quad_index << 2;
1227                    let ffffffff = 0xffff_ffff;
1228                    let word = u64::from(data64[i])
1229                        | (u64::from(data64[i + 1]) << 8)
1230                        | (u64::from(data64[i + 2]) << 16)
1231                        | (u64::from(data64[i + 3]) << 24)
1232                        | (u64::from(data64[i + 4]) << 32)
1233                        | (u64::from(data64[i + 5]) << 40)
1234                        | (u64::from(data64[i + 6]) << 48);
1235                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1236                        & self.specialization.get_hash_mask())
1237                        >> shift) as usize;
1238                    let mixed1 = (((((word >> 8) & ffffffff)
1239                        * self.specialization.get_k_hash_mul())
1240                        & self.specialization.get_hash_mask())
1241                        >> shift) as usize;
1242                    let mixed2 = (((((word >> 16) & ffffffff)
1243                        * self.specialization.get_k_hash_mul())
1244                        & self.specialization.get_hash_mask())
1245                        >> shift) as usize;
1246                    let mixed3 = (((((word >> 24) & ffffffff)
1247                        * self.specialization.get_k_hash_mul())
1248                        & self.specialization.get_hash_mask())
1249                        >> shift) as usize;
1250                    let mut num_ref0 = u32::from(num[mixed0]);
1251                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1252                    num_ref0 &= self.specialization.block_mask();
1253                    let mut num_ref1 = u32::from(num[mixed1]);
1254                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1255                    num_ref1 &= self.specialization.block_mask();
1256                    let mut num_ref2 = u32::from(num[mixed2]);
1257                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1258                    num_ref2 &= self.specialization.block_mask();
1259                    let mut num_ref3 = u32::from(num[mixed3]);
1260                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1261                    num_ref3 &= self.specialization.block_mask();
1262                    let offset0: usize =
1263                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1264                    let offset1: usize =
1265                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1266                    let offset2: usize =
1267                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1268                    let offset3: usize =
1269                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1270                    buckets[offset0] = (ix_offset + i) as u32;
1271                    buckets[offset1] = (ix_offset + i + 1) as u32;
1272                    buckets[offset2] = (ix_offset + i + 2) as u32;
1273                    buckets[offset3] = (ix_offset + i + 3) as u32;
1274                }
1275            }
1276            return ix_start + del * REG_SIZE;
1277        }
1278        ix_start
1279    }
1280
1281    #[cfg(feature = "benchmark")]
1282    fn BulkStoreRangeOptMemFetchLazyDupeUpdate(
1283        &mut self,
1284        data: &[u8],
1285        mask: usize,
1286        ix_start: usize,
1287        ix_end: usize,
1288    ) -> usize {
1289        const REG_SIZE: usize = 32usize;
1290        let lookahead = self.specialization.StoreLookahead();
1291        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1292            const lookahead4: usize = 4;
1293            assert_eq!(lookahead4, lookahead);
1294            let mut data64 = [0u8; REG_SIZE + lookahead4];
1295            let del = (ix_end - ix_start) / REG_SIZE;
1296            let num = self.num.slice_mut();
1297            let buckets = self.buckets.slice_mut();
1298            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1299            assert_eq!(
1300                buckets.len(),
1301                self.specialization.bucket_size() as usize
1302                    * self.specialization.block_size() as usize
1303            );
1304            let shift = self.specialization.hash_shift();
1305            for chunk_id in 0..del {
1306                let ix_offset = ix_start + chunk_id * REG_SIZE;
1307                data64[..REG_SIZE + lookahead4]
1308                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1309                for quad_index in 0..(REG_SIZE >> 2) {
1310                    let i = quad_index << 2;
1311                    let ffffffff = 0xffff_ffff;
1312                    let word = u64::from(data64[i])
1313                        | (u64::from(data64[i + 1]) << 8)
1314                        | (u64::from(data64[i + 2]) << 16)
1315                        | (u64::from(data64[i + 3]) << 24)
1316                        | (u64::from(data64[i + 4]) << 32)
1317                        | (u64::from(data64[i + 5]) << 40)
1318                        | (u64::from(data64[i + 6]) << 48);
1319                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1320                        & self.specialization.get_hash_mask())
1321                        >> shift) as usize;
1322                    let mixed1 = (((((word >> 8) & ffffffff)
1323                        * self.specialization.get_k_hash_mul())
1324                        & self.specialization.get_hash_mask())
1325                        >> shift) as usize;
1326                    let mixed2 = (((((word >> 16) & ffffffff)
1327                        * self.specialization.get_k_hash_mul())
1328                        & self.specialization.get_hash_mask())
1329                        >> shift) as usize;
1330                    let mixed3 = (((((word >> 24) & ffffffff)
1331                        * self.specialization.get_k_hash_mul())
1332                        & self.specialization.get_hash_mask())
1333                        >> shift) as usize;
1334                    let mut num_ref0 = u32::from(num[mixed0]);
1335                    let mut num_ref1 = u32::from(num[mixed1]);
1336                    let mut num_ref2 = u32::from(num[mixed2]);
1337                    let mut num_ref3 = u32::from(num[mixed3]);
1338                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1339                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1340                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1341                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1342                    num_ref0 &= self.specialization.block_mask();
1343                    num_ref1 &= self.specialization.block_mask();
1344                    num_ref2 &= self.specialization.block_mask();
1345                    num_ref3 &= self.specialization.block_mask();
1346                    let offset0: usize =
1347                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1348                    let offset1: usize =
1349                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1350                    let offset2: usize =
1351                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1352                    let offset3: usize =
1353                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1354                    buckets[offset0] = (ix_offset + i) as u32;
1355                    buckets[offset1] = (ix_offset + i + 1) as u32;
1356                    buckets[offset2] = (ix_offset + i + 2) as u32;
1357                    buckets[offset3] = (ix_offset + i + 3) as u32;
1358                }
1359            }
1360            return ix_start + del * REG_SIZE;
1361        }
1362        ix_start
1363    }
1364
1365    #[cfg(feature = "benchmark")]
1366    fn BulkStoreRangeOptRandomDupeUpdate(
1367        &mut self,
1368        data: &[u8],
1369        mask: usize,
1370        ix_start: usize,
1371        ix_end: usize,
1372    ) -> usize {
1373        const REG_SIZE: usize = 32usize;
1374        let lookahead = self.specialization.StoreLookahead();
1375        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1376            const lookahead4: usize = 4;
1377            assert_eq!(lookahead4, lookahead);
1378            let mut data64 = [0u8; REG_SIZE + lookahead4];
1379            let del = (ix_end - ix_start) / REG_SIZE;
1380            let num = self.num.slice_mut();
1381            let buckets = self.buckets.slice_mut();
1382            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1383            assert_eq!(
1384                buckets.len(),
1385                self.specialization.bucket_size() as usize
1386                    * self.specialization.block_size() as usize
1387            );
1388            let shift = self.specialization.hash_shift();
1389            for chunk_id in 0..del {
1390                let ix_offset = ix_start + chunk_id * REG_SIZE;
1391                data64[..REG_SIZE + lookahead4]
1392                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1393                for i in 0..REG_SIZE {
1394                    let mixed_word = ((u32::from(data64[i])
1395                        | (u32::from(data64[i + 1]) << 8)
1396                        | (u32::from(data64[i + 2]) << 16)
1397                        | (u32::from(data64[i + 3]) << 24))
1398                        as u64
1399                        * self.specialization.get_k_hash_mul())
1400                        & self.specialization.get_hash_mask();
1401                    let key = mixed_word >> shift;
1402                    let minor_ix: usize = chunk_id & self.specialization.block_mask() as usize; //   *num_ref as usize & self.specialization.block_mask() as usize; //GIGANTIC HAX: overwrite firsst option
1403                    let offset: usize =
1404                        minor_ix + (key << self.specialization.block_bits()) as usize;
1405                    buckets[offset] = (ix_offset + i) as u32;
1406                }
1407            }
1408            for (bucket_index, num_ref) in num.iter_mut().enumerate() {
1409                let region = buckets
1410                    .split_at_mut(bucket_index << self.specialization.block_bits())
1411                    .1
1412                    .split_at_mut(self.specialization.block_size() as usize)
1413                    .0;
1414                let mut lnum = 0usize;
1415                for block_index in 0..self.specialization.block_size() as usize {
1416                    if region[block_index] != 0 {
1417                        let byte_addr = region[block_index];
1418                        region[lnum] = byte_addr;
1419                        lnum += 1;
1420                    }
1421                }
1422                *num_ref = lnum as u16;
1423            }
1424            return ix_start + del * REG_SIZE;
1425        }
1426        ix_start
1427    }
1428}
1429
1430impl<
1431        Specialization: AdvHashSpecialization + Clone,
1432        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1433    > AnyHasher for AdvHasher<Specialization, Alloc>
1434{
1435    fn Opts(&self) -> H9Opts {
1436        self.h9_opts
1437    }
1438    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
1439        let num_distances = self.GetHasherCommon.params.num_last_distances_to_check;
1440        adv_prepare_distance_cache(distance_cache, num_distances);
1441    }
1442    fn StitchToPreviousBlock(
1443        &mut self,
1444        num_bytes: usize,
1445        position: usize,
1446        ringbuffer: &[u8],
1447        ringbuffer_mask: usize,
1448    ) {
1449        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
1450    }
1451    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
1452        if self.GetHasherCommon.is_prepared_ != 0 {
1453            return HowPrepared::ALREADY_PREPARED;
1454        }
1455        let partial_prepare_threshold = self.specialization.bucket_size() as usize >> 6;
1456        if one_shot && input_size <= partial_prepare_threshold {
1457            for i in 0..input_size {
1458                let key = self.HashBytes(&data[i..]);
1459                self.num.slice_mut()[key] = 0;
1460            }
1461        } else {
1462            for item in
1463                self.num.slice_mut()[..(self.specialization.bucket_size() as usize)].iter_mut()
1464            {
1465                *item = 0;
1466            }
1467        }
1468        self.GetHasherCommon.is_prepared_ = 1;
1469        HowPrepared::NEWLY_PREPARED
1470    }
1471
1472    fn GetHasherCommon(&mut self) -> &mut Struct1 {
1473        &mut self.GetHasherCommon
1474    }
1475    fn HashTypeLength(&self) -> usize {
1476        self.specialization.HashTypeLength()
1477    }
1478    fn StoreLookahead(&self) -> usize {
1479        self.specialization.StoreLookahead()
1480    }
1481    fn HashBytes(&self, data: &[u8]) -> usize {
1482        let shift = self.specialization.hash_shift();
1483        let h: u64 = self.specialization.load_and_mix_word(data);
1484        (h >> shift) as u32 as usize
1485    }
1486    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1487        if self.specialization.StoreLookahead() != 4 {
1488            for i in 0..4 {
1489                self.Store(data, mask, ix + i * 2);
1490            }
1491            return;
1492        }
1493        let shift = self.specialization.hash_shift();
1494        let num = self.num.slice_mut();
1495        let buckets = self.buckets.slice_mut();
1496        let li = ix & mask;
1497        let lword = u64::from(data[li])
1498            | (u64::from(data[li + 1]) << 8)
1499            | (u64::from(data[li + 2]) << 16)
1500            | (u64::from(data[li + 3]) << 24)
1501            | (u64::from(data[li + 4]) << 32)
1502            | (u64::from(data[li + 5]) << 40)
1503            | (u64::from(data[li + 6]) << 48)
1504            | (u64::from(data[li + 7]) << 56);
1505        let hi = (ix + 8) & mask;
1506        let hword = u64::from(data[hi]) | (u64::from(data[hi + 1]) << 8);
1507        let mixed0 = ((((lword & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1508            & self.specialization.get_hash_mask())
1509            >> shift) as usize;
1510        let mixed1 = (((((lword >> 16) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1511            & self.specialization.get_hash_mask())
1512            >> shift) as usize;
1513        let mixed2 = (((((lword >> 32) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1514            & self.specialization.get_hash_mask())
1515            >> shift) as usize;
1516        let mixed3 = ((((((hword & 0xffff) << 16) | ((lword >> 48) & 0xffff))
1517            * self.specialization.get_k_hash_mul())
1518            & self.specialization.get_hash_mask())
1519            >> shift) as usize;
1520        let mut num_ref0 = u32::from(num[mixed0]);
1521        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1522        num_ref0 &= self.specialization.block_mask();
1523        let mut num_ref1 = u32::from(num[mixed1]);
1524        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1525        num_ref1 &= self.specialization.block_mask();
1526        let mut num_ref2 = u32::from(num[mixed2]);
1527        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1528        num_ref2 &= self.specialization.block_mask();
1529        let mut num_ref3 = u32::from(num[mixed3]);
1530        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1531        num_ref3 &= self.specialization.block_mask();
1532        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1533        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1534        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1535        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1536        buckets[offset0] = ix as u32;
1537        buckets[offset1] = (ix + 2) as u32;
1538        buckets[offset2] = (ix + 4) as u32;
1539        buckets[offset3] = (ix + 6) as u32;
1540    }
1541    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1542        if self.specialization.StoreLookahead() != 4 {
1543            for i in 0..4 {
1544                self.Store(data, mask, ix + i * 4);
1545            }
1546            return;
1547        }
1548        let shift = self.specialization.hash_shift();
1549        let num = self.num.slice_mut();
1550        let buckets = self.buckets.slice_mut();
1551        let li = ix & mask;
1552        let llword = u32::from(data[li])
1553            | (u32::from(data[li + 1]) << 8)
1554            | (u32::from(data[li + 2]) << 16)
1555            | (u32::from(data[li + 3]) << 24);
1556        let luword = u32::from(data[li + 4])
1557            | (u32::from(data[li + 5]) << 8)
1558            | (u32::from(data[li + 6]) << 16)
1559            | (u32::from(data[li + 7]) << 24);
1560        let ui = (ix + 8) & mask;
1561        let ulword = u32::from(data[ui])
1562            | (u32::from(data[ui + 1]) << 8)
1563            | (u32::from(data[ui + 2]) << 16)
1564            | (u32::from(data[ui + 3]) << 24);
1565
1566        let uuword = u32::from(data[ui + 4])
1567            | (u32::from(data[ui + 5]) << 8)
1568            | (u32::from(data[ui + 6]) << 16)
1569            | (u32::from(data[ui + 7]) << 24);
1570
1571        let mixed0 = (((u64::from(llword) * self.specialization.get_k_hash_mul())
1572            & self.specialization.get_hash_mask())
1573            >> shift) as usize;
1574        let mixed1 = (((u64::from(luword) * self.specialization.get_k_hash_mul())
1575            & self.specialization.get_hash_mask())
1576            >> shift) as usize;
1577        let mixed2 = (((u64::from(ulword) * self.specialization.get_k_hash_mul())
1578            & self.specialization.get_hash_mask())
1579            >> shift) as usize;
1580        let mixed3 = (((u64::from(uuword) * self.specialization.get_k_hash_mul())
1581            & self.specialization.get_hash_mask())
1582            >> shift) as usize;
1583        let mut num_ref0 = u32::from(num[mixed0]);
1584        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1585        num_ref0 &= self.specialization.block_mask();
1586        let mut num_ref1 = u32::from(num[mixed1]);
1587        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1588        num_ref1 &= self.specialization.block_mask();
1589        let mut num_ref2 = u32::from(num[mixed2]);
1590        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1591        num_ref2 &= self.specialization.block_mask();
1592        let mut num_ref3 = u32::from(num[mixed3]);
1593        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1594        num_ref3 &= self.specialization.block_mask();
1595        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1596        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1597        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1598        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1599        buckets[offset0] = ix as u32;
1600        buckets[offset1] = (ix + 4) as u32;
1601        buckets[offset2] = (ix + 8) as u32;
1602        buckets[offset3] = (ix + 12) as u32;
1603    }
1604    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
1605        let (_, data_window) = data.split_at((ix & mask));
1606        let key: u32 = self.HashBytes(data_window) as u32;
1607        let minor_ix: usize =
1608            (self.num.slice()[(key as usize)] as u32 & self.specialization.block_mask()) as usize;
1609        let offset: usize =
1610            minor_ix.wrapping_add((key << self.specialization.block_bits()) as usize);
1611        self.buckets.slice_mut()[offset] = ix as u32;
1612        {
1613            let _lhs = &mut self.num.slice_mut()[(key as usize)];
1614            *_lhs = (*_lhs as i32 + 1) as u16;
1615        }
1616    }
1617    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
1618        for i in self.StoreRangeOptBatch(data, mask, ix_start, ix_end)..ix_end {
1619            self.Store(data, mask, i);
1620        }
1621    }
1622    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, mut ix_start: usize, ix_end: usize) {
1623        /*
1624        if ix_start + 4096 < ix_end {
1625          for vec_offset in 0..(ix_end - ix_start - 4096) / 16 {
1626            self.Store4Vec4(data, mask, ix_start + vec_offset * 16);
1627          }
1628          ix_start += 16 * ((ix_end - ix_start - 4096) / 16);
1629        }
1630        if ix_start + 512 < ix_end {
1631          for vec_offset in 0..(ix_end - ix_start - 512) / 8 {
1632            self.StoreEvenVec4(data, mask, ix_start + vec_offset * 8);
1633            //self.StoreRange(data, mask, ix_start + vec_offset * 8, ix_start + (1+ vec_offset) * 8);
1634          }
1635          ix_start += 8 * ((ix_end - ix_start - 512) / 8);
1636        }
1637         */
1638        ix_start = self.BulkStoreRangeOptMemFetch(data, mask, ix_start, ix_end);
1639        for i in ix_start..ix_end {
1640            self.Store(data, mask, i);
1641        }
1642    }
1643
1644    fn FindLongestMatch(
1645        &mut self,
1646        dictionary: Option<&BrotliDictionary>,
1647        dictionary_hash: &[u16],
1648        data: &[u8],
1649        ring_buffer_mask: usize,
1650        distance_cache: &[i32],
1651        cur_ix: usize,
1652        max_length: usize,
1653        max_backward: usize,
1654        gap: usize,
1655        max_distance: usize,
1656        out: &mut HasherSearchResult,
1657    ) -> bool {
1658        let opts = self.Opts();
1659        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
1660        let mut is_match_found = false;
1661        let mut best_score: u64 = out.score;
1662        let mut best_len: usize = out.len;
1663        out.len = 0usize;
1664        out.len_x_code = 0usize;
1665        let cur_data = data.split_at(cur_ix_masked).1;
1666        for i in 0..self.GetHasherCommon.params.num_last_distances_to_check as usize {
1667            let backward: usize = distance_cache[i] as usize;
1668            let mut prev_ix: usize = cur_ix.wrapping_sub(backward);
1669            if prev_ix >= cur_ix || backward > max_backward {
1670                continue;
1671            }
1672            prev_ix &= ring_buffer_mask;
1673            if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1674                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1675                || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1676            {
1677                continue;
1678            }
1679            let prev_data = data.split_at(prev_ix).1;
1680
1681            let len = FindMatchLengthWithLimit(prev_data, cur_data, max_length);
1682            if len >= 3 || (len == 2 && i < 2) {
1683                let mut score: u64 = BackwardReferenceScoreUsingLastDistance(len, opts);
1684                if best_score < score {
1685                    if i != 0 {
1686                        score = score.wrapping_sub(BackwardReferencePenaltyUsingLastDistance(i));
1687                    }
1688                    if best_score < score {
1689                        best_score = score;
1690                        best_len = len;
1691                        out.len = best_len;
1692                        out.distance = backward;
1693                        out.score = best_score;
1694                        is_match_found = true;
1695                    }
1696                }
1697            }
1698        }
1699
1700        let key: u32 = self.HashBytes(cur_data) as u32;
1701        let common_block_bits = self.specialization.block_bits();
1702        let num_ref_mut = &mut self.num.slice_mut()[key as usize];
1703        let num_copy = *num_ref_mut;
1704        let bucket: &mut [u32] = self
1705            .buckets
1706            .slice_mut()
1707            .split_at_mut((key << common_block_bits) as usize)
1708            .1
1709            .split_at_mut(self.specialization.block_size() as usize)
1710            .0;
1711        assert!(bucket.len() > self.specialization.block_mask() as usize);
1712        if num_copy != 0 {
1713            let down: usize = max(
1714                i32::from(num_copy) - self.specialization.block_size() as i32,
1715                0,
1716            ) as usize;
1717            let mut i = num_copy as usize;
1718            while i > down {
1719                i -= 1;
1720                let mut prev_ix = bucket[i & self.specialization.block_mask() as usize] as usize;
1721                let backward = cur_ix.wrapping_sub(prev_ix);
1722                prev_ix &= ring_buffer_mask;
1723                if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1724                    || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1725                    || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1726                {
1727                    if backward > max_backward {
1728                        break;
1729                    }
1730                    continue;
1731                }
1732                if backward > max_backward {
1733                    break;
1734                }
1735                let prev_data = data.split_at(prev_ix).1;
1736                let len = FindMatchLengthWithLimitMin4(prev_data, cur_data, max_length);
1737                if len != 0 {
1738                    let score: u64 = BackwardReferenceScore(len, backward, opts);
1739                    if best_score < score {
1740                        best_score = score;
1741                        best_len = len;
1742                        out.len = best_len;
1743                        out.distance = backward;
1744                        out.score = best_score;
1745                        is_match_found = true;
1746                    }
1747                }
1748            }
1749        }
1750        bucket[(num_copy as u32 & self.specialization.block_mask()) as usize] = cur_ix as u32;
1751        *num_ref_mut = num_ref_mut.wrapping_add(1);
1752
1753        if !is_match_found && dictionary.is_some() {
1754            let (_, cur_data) = data.split_at(cur_ix_masked);
1755            is_match_found = SearchInStaticDictionary(
1756                dictionary.unwrap(),
1757                dictionary_hash,
1758                self,
1759                cur_data,
1760                max_length,
1761                max_backward.wrapping_add(gap),
1762                max_distance,
1763                out,
1764                false,
1765            );
1766        }
1767        is_match_found
1768    }
1769}
1770
1771pub struct BankH40 {
1772    pub slots: [SlotH40; 65536],
1773}
1774
1775pub struct BankH41 {
1776    pub slots: [SlotH41; 65536],
1777}
1778
1779pub struct BankH42 {
1780    pub slots: [SlotH42; 512],
1781}
1782
1783pub struct SlotH40 {
1784    pub delta: u16,
1785    pub next: u16,
1786}
1787pub struct SlotH41 {
1788    pub delta: u16,
1789    pub next: u16,
1790}
1791
1792pub struct SlotH42 {
1793    pub delta: u16,
1794    pub next: u16,
1795}
1796
1797// UNSUPPORTED, for now.
1798pub struct H40 {
1799    pub common: Struct1,
1800    pub addr: [u32; 32768],
1801    pub head: [u16; 32768],
1802    pub tiny_hash: [u8; 65536],
1803    pub banks: [BankH40; 1],
1804    pub free_slot_idx: [u16; 1],
1805    pub max_hops: usize,
1806}
1807
1808pub struct H41 {
1809    pub common: Struct1,
1810    pub addr: [u32; 32768],
1811    pub head: [u16; 32768],
1812    pub tiny_hash: [u8; 65536],
1813    pub banks: [BankH41; 1],
1814    pub free_slot_idx: [u16; 1],
1815    pub max_hops: usize,
1816}
1817
1818pub struct H42 {
1819    pub common: Struct1,
1820    pub addr: [u32; 32768],
1821    pub head: [u16; 32768],
1822    pub tiny_hash: [u8; 65536],
1823    pub banks: [BankH42; 512],
1824    pub max_hops: usize,
1825}
1826
1827fn BackwardReferenceScoreUsingLastDistance(copy_length: usize, h9_opts: H9Opts) -> u64 {
1828    ((h9_opts.literal_byte_score as u64) >> 2)
1829        .wrapping_mul(copy_length as u64)
1830        .wrapping_add((30u64 * 8u64).wrapping_mul(::core::mem::size_of::<u64>() as u64))
1831        .wrapping_add(15)
1832}
1833
1834fn BackwardReferenceScore(
1835    copy_length: usize,
1836    backward_reference_offset: usize,
1837    h9_opts: H9Opts,
1838) -> u64 {
1839    (30u64 * 8u64)
1840        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
1841        .wrapping_add(((h9_opts.literal_byte_score as usize) >> 2).wrapping_mul(copy_length) as u64)
1842        .wrapping_sub(
1843            (30u64).wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
1844        )
1845}
1846
1847fn Hash14(data: &[u8]) -> u32 {
1848    let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
1849    h >> (32i32 - 14i32)
1850}
1851
1852fn TestStaticDictionaryItem(
1853    dictionary: &BrotliDictionary,
1854    item: usize,
1855    data: &[u8],
1856    max_length: usize,
1857    max_backward: usize,
1858    max_distance: usize,
1859    h9_opts: H9Opts,
1860    out: &mut HasherSearchResult,
1861) -> i32 {
1862    let backward: usize;
1863
1864    let len: usize = item & 0x1fusize;
1865    let dist: usize = item >> 5;
1866    let offset: usize =
1867        (dictionary.offsets_by_length[len] as usize).wrapping_add(len.wrapping_mul(dist));
1868    if len > max_length {
1869        return 0i32;
1870    }
1871    let matchlen: usize = FindMatchLengthWithLimit(data, &dictionary.data[offset..], len);
1872    if matchlen.wrapping_add(kCutoffTransformsCount as usize) <= len || matchlen == 0usize {
1873        return 0i32;
1874    }
1875    {
1876        let cut: u64 = len.wrapping_sub(matchlen) as u64;
1877        let transform_id: usize =
1878            (cut << 2).wrapping_add(kCutoffTransforms >> cut.wrapping_mul(6) & 0x3f) as usize;
1879        backward = max_backward
1880            .wrapping_add(dist)
1881            .wrapping_add(1)
1882            .wrapping_add(transform_id << dictionary.size_bits_by_length[len] as i32);
1883    }
1884    if backward > max_distance {
1885        return 0i32;
1886    }
1887    let score: u64 = BackwardReferenceScore(matchlen, backward, h9_opts);
1888    if score < out.score {
1889        return 0i32;
1890    }
1891    out.len = matchlen;
1892    out.len_x_code = len ^ matchlen;
1893    out.distance = backward;
1894    out.score = score;
1895    1i32
1896}
1897
1898fn SearchInStaticDictionary<HasherType: AnyHasher>(
1899    dictionary: &BrotliDictionary,
1900    dictionary_hash: &[u16],
1901    handle: &mut HasherType,
1902    data: &[u8],
1903    max_length: usize,
1904    max_backward: usize,
1905    max_distance: usize,
1906    out: &mut HasherSearchResult,
1907    shallow: bool,
1908) -> bool {
1909    let mut key: usize;
1910    let mut i: usize;
1911    let mut is_match_found = false;
1912    let opts = handle.Opts();
1913    let xself: &mut Struct1 = handle.GetHasherCommon();
1914    if xself.dict_num_matches < xself.dict_num_lookups >> 7 {
1915        return false;
1916    }
1917    key = (Hash14(data) << 1) as usize; //FIXME: works for any kind of hasher??
1918    i = 0usize;
1919    while i < if shallow { 1 } else { 2 } {
1920        {
1921            let item: usize = dictionary_hash[key] as usize;
1922            xself.dict_num_lookups = xself.dict_num_lookups.wrapping_add(1);
1923            if item != 0usize {
1924                let item_matches: i32 = TestStaticDictionaryItem(
1925                    dictionary,
1926                    item,
1927                    data,
1928                    max_length,
1929                    max_backward,
1930                    max_distance,
1931                    opts,
1932                    out,
1933                );
1934                if item_matches != 0 {
1935                    xself.dict_num_matches = xself.dict_num_matches.wrapping_add(1);
1936                    is_match_found = true;
1937                }
1938            }
1939        }
1940        i = i.wrapping_add(1);
1941        key = key.wrapping_add(1);
1942    }
1943    is_match_found
1944}
1945
1946impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1947    for BasicHasher<H2Sub<Alloc>>
1948{
1949    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1950        let mut ret = BasicHasher::<H2Sub<Alloc>> {
1951            GetHasherCommon: self.GetHasherCommon.clone(),
1952            buckets_: H2Sub {
1953                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
1954            },
1955            h9_opts: self.h9_opts,
1956        };
1957        ret.buckets_
1958            .buckets_
1959            .slice_mut()
1960            .clone_from_slice(self.buckets_.buckets_.slice());
1961        ret
1962    }
1963}
1964impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1965    for BasicHasher<H3Sub<Alloc>>
1966{
1967    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1968        let mut ret = BasicHasher::<H3Sub<Alloc>> {
1969            GetHasherCommon: self.GetHasherCommon.clone(),
1970            buckets_: H3Sub::<Alloc> {
1971                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
1972            },
1973            h9_opts: self.h9_opts,
1974        };
1975        ret.buckets_
1976            .buckets_
1977            .slice_mut()
1978            .clone_from_slice(self.buckets_.buckets_.slice());
1979        ret
1980    }
1981}
1982impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1983    for BasicHasher<H4Sub<Alloc>>
1984{
1985    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1986        let mut ret = BasicHasher::<H4Sub<Alloc>> {
1987            GetHasherCommon: self.GetHasherCommon.clone(),
1988            buckets_: H4Sub::<Alloc> {
1989                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
1990            },
1991            h9_opts: self.h9_opts,
1992        };
1993        ret.buckets_
1994            .buckets_
1995            .slice_mut()
1996            .clone_from_slice(self.buckets_.buckets_.slice());
1997        ret
1998    }
1999}
2000impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2001    for BasicHasher<H54Sub<Alloc>>
2002{
2003    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2004        let mut ret = BasicHasher::<H54Sub<Alloc>> {
2005            GetHasherCommon: self.GetHasherCommon.clone(),
2006            buckets_: H54Sub::<Alloc> {
2007                buckets_: allocate::<u32, _>(m, self.buckets_.len()),
2008            },
2009            h9_opts: self.h9_opts,
2010        };
2011        ret.buckets_
2012            .buckets_
2013            .slice_mut()
2014            .clone_from_slice(self.buckets_.buckets_.slice());
2015        ret
2016    }
2017}
2018impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc> for H9<Alloc> {
2019    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2020        let mut num = allocate::<u16, _>(m, self.num_.len());
2021        num.slice_mut().clone_from_slice(self.num_.slice());
2022        let mut buckets = allocate::<u32, _>(m, self.buckets_.len());
2023        buckets.slice_mut().clone_from_slice(self.buckets_.slice());
2024        H9::<Alloc> {
2025            num_: num,
2026            buckets_: buckets,
2027            dict_search_stats_: self.dict_search_stats_.clone(),
2028            h9_opts: self.h9_opts,
2029        }
2030    }
2031}
2032impl<
2033        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
2034        Special: AdvHashSpecialization + Sized + Clone,
2035    > CloneWithAlloc<Alloc> for AdvHasher<Special, Alloc>
2036{
2037    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2038        let mut num = allocate::<u16, _>(m, self.num.len());
2039        num.slice_mut().clone_from_slice(self.num.slice());
2040        let mut buckets = allocate::<u32, _>(m, self.buckets.len());
2041        buckets.slice_mut().clone_from_slice(self.buckets.slice());
2042        AdvHasher::<Special, Alloc> {
2043            GetHasherCommon: self.GetHasherCommon.clone(),
2044            specialization: self.specialization.clone(),
2045            num,
2046            buckets,
2047            h9_opts: self.h9_opts,
2048        }
2049    }
2050}
2051
2052pub enum UnionHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
2053    Uninit,
2054    H2(BasicHasher<H2Sub<Alloc>>),
2055    H3(BasicHasher<H3Sub<Alloc>>),
2056    H4(BasicHasher<H4Sub<Alloc>>),
2057    H54(BasicHasher<H54Sub<Alloc>>),
2058    H5(AdvHasher<H5Sub, Alloc>),
2059    H5q7(AdvHasher<HQ7Sub, Alloc>),
2060    H5q5(AdvHasher<HQ5Sub, Alloc>),
2061    H6(AdvHasher<H6Sub, Alloc>),
2062    H9(H9<Alloc>),
2063    H10(H10<Alloc, H10Buckets<Alloc>, H10DefaultParams>),
2064}
2065impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<UnionHasher<Alloc>>
2066    for UnionHasher<Alloc>
2067{
2068    fn eq(&self, other: &UnionHasher<Alloc>) -> bool {
2069        match *self {
2070            UnionHasher::H2(ref hasher) => match *other {
2071                UnionHasher::H2(ref otherh) => *hasher == *otherh,
2072                _ => false,
2073            },
2074            UnionHasher::H3(ref hasher) => match *other {
2075                UnionHasher::H3(ref otherh) => *hasher == *otherh,
2076                _ => false,
2077            },
2078            UnionHasher::H4(ref hasher) => match *other {
2079                UnionHasher::H4(ref otherh) => *hasher == *otherh,
2080                _ => false,
2081            },
2082            UnionHasher::H54(ref hasher) => match *other {
2083                UnionHasher::H54(ref otherh) => *hasher == *otherh,
2084                _ => false,
2085            },
2086            UnionHasher::H5(ref hasher) => match *other {
2087                UnionHasher::H5(ref otherh) => *hasher == *otherh,
2088                _ => false,
2089            },
2090            UnionHasher::H5q7(ref hasher) => match *other {
2091                UnionHasher::H5q7(ref otherh) => *hasher == *otherh,
2092                _ => false,
2093            },
2094            UnionHasher::H5q5(ref hasher) => match *other {
2095                UnionHasher::H5q5(ref otherh) => *hasher == *otherh,
2096                _ => false,
2097            },
2098            UnionHasher::H6(ref hasher) => match *other {
2099                UnionHasher::H6(ref otherh) => *hasher == *otherh,
2100                _ => false,
2101            },
2102            UnionHasher::H9(ref hasher) => match *other {
2103                UnionHasher::H9(ref otherh) => *hasher == *otherh,
2104                _ => false,
2105            },
2106            UnionHasher::H10(ref hasher) => match *other {
2107                UnionHasher::H10(ref otherh) => *hasher == *otherh,
2108                _ => false,
2109            },
2110            UnionHasher::Uninit => match *other {
2111                UnionHasher::Uninit => true,
2112                _ => false,
2113            },
2114        }
2115    }
2116}
2117impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2118    for UnionHasher<Alloc>
2119{
2120    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2121        match *self {
2122            UnionHasher::H2(ref hasher) => UnionHasher::H2(hasher.clone_with_alloc(m)),
2123            UnionHasher::H3(ref hasher) => UnionHasher::H3(hasher.clone_with_alloc(m)),
2124            UnionHasher::H4(ref hasher) => UnionHasher::H4(hasher.clone_with_alloc(m)),
2125            UnionHasher::H5(ref hasher) => UnionHasher::H5(hasher.clone_with_alloc(m)),
2126            UnionHasher::H5q7(ref hasher) => UnionHasher::H5q7(hasher.clone_with_alloc(m)),
2127            UnionHasher::H5q5(ref hasher) => UnionHasher::H5q5(hasher.clone_with_alloc(m)),
2128            UnionHasher::H6(ref hasher) => UnionHasher::H6(hasher.clone_with_alloc(m)),
2129            UnionHasher::H54(ref hasher) => UnionHasher::H54(hasher.clone_with_alloc(m)),
2130            UnionHasher::H9(ref hasher) => UnionHasher::H9(hasher.clone_with_alloc(m)),
2131            UnionHasher::H10(ref hasher) => UnionHasher::H10(hasher.clone_with_alloc(m)),
2132            UnionHasher::Uninit => UnionHasher::Uninit,
2133        }
2134    }
2135}
2136macro_rules! match_all_hashers_mut {
2137    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2138        match $xself {
2139     &mut UnionHasher::H2(ref mut hasher) => hasher.$func_call($($args),*),
2140     &mut UnionHasher::H3(ref mut hasher) => hasher.$func_call($($args),*),
2141     &mut UnionHasher::H4(ref mut hasher) => hasher.$func_call($($args),*),
2142     &mut UnionHasher::H5(ref mut hasher) => hasher.$func_call($($args),*),
2143     &mut UnionHasher::H5q7(ref mut hasher) => hasher.$func_call($($args),*),
2144     &mut UnionHasher::H5q5(ref mut hasher) => hasher.$func_call($($args),*),
2145     &mut UnionHasher::H6(ref mut hasher) => hasher.$func_call($($args),*),
2146     &mut UnionHasher::H54(ref mut hasher) => hasher.$func_call($($args),*),
2147     &mut UnionHasher::H9(ref mut hasher) => hasher.$func_call($($args),*),
2148     &mut UnionHasher::H10(ref mut hasher) => hasher.$func_call($($args),*),
2149     &mut UnionHasher::Uninit => panic!("UNINTIALIZED"),
2150        }
2151    };
2152}
2153macro_rules! match_all_hashers {
2154    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2155        match $xself {
2156     &UnionHasher::H2(ref hasher) => hasher.$func_call($($args),*),
2157     &UnionHasher::H3(ref hasher) => hasher.$func_call($($args),*),
2158     &UnionHasher::H4(ref hasher) => hasher.$func_call($($args),*),
2159     &UnionHasher::H5(ref hasher) => hasher.$func_call($($args),*),
2160     &UnionHasher::H5q7(ref hasher) => hasher.$func_call($($args),*),
2161     &UnionHasher::H5q5(ref hasher) => hasher.$func_call($($args),*),
2162     &UnionHasher::H6(ref hasher) => hasher.$func_call($($args),*),
2163     &UnionHasher::H54(ref hasher) => hasher.$func_call($($args),*),
2164     &UnionHasher::H9(ref hasher) => hasher.$func_call($($args),*),
2165     &UnionHasher::H10(ref hasher) => hasher.$func_call($($args),*),
2166     &UnionHasher::Uninit => panic!("UNINTIALIZED"),
2167        }
2168    };
2169}
2170impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for UnionHasher<Alloc> {
2171    fn Opts(&self) -> H9Opts {
2172        match_all_hashers!(self, Opts,)
2173    }
2174    fn GetHasherCommon(&mut self) -> &mut Struct1 {
2175        match_all_hashers_mut!(self, GetHasherCommon,)
2176    } /*
2177      fn GetH10Tree(&mut self) -> Option<&mut H10<AllocU32, H10Buckets, H10DefaultParams>> {
2178        return match_all_hashers_mut!(self, GetH10Tree,);
2179      }*/
2180    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
2181        match_all_hashers_mut!(self, Prepare, one_shot, input_size, data)
2182    }
2183    fn HashBytes(&self, data: &[u8]) -> usize {
2184        match_all_hashers!(self, HashBytes, data)
2185    }
2186    fn HashTypeLength(&self) -> usize {
2187        match_all_hashers!(self, HashTypeLength,)
2188    }
2189    fn StoreLookahead(&self) -> usize {
2190        match_all_hashers!(self, StoreLookahead,)
2191    }
2192    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
2193        match_all_hashers!(self, PrepareDistanceCache, distance_cache)
2194    }
2195    fn StitchToPreviousBlock(
2196        &mut self,
2197        num_bytes: usize,
2198        position: usize,
2199        ringbuffer: &[u8],
2200        ringbuffer_mask: usize,
2201    ) {
2202        match_all_hashers_mut!(
2203            self,
2204            StitchToPreviousBlock,
2205            num_bytes,
2206            position,
2207            ringbuffer,
2208            ringbuffer_mask
2209        )
2210    }
2211    fn FindLongestMatch(
2212        &mut self,
2213        dictionary: Option<&BrotliDictionary>,
2214        dictionary_hash: &[u16],
2215        data: &[u8],
2216        ring_buffer_mask: usize,
2217        distance_cache: &[i32],
2218        cur_ix: usize,
2219        max_length: usize,
2220        max_backward: usize,
2221        gap: usize,
2222        max_distance: usize,
2223        out: &mut HasherSearchResult,
2224    ) -> bool {
2225        match_all_hashers_mut!(
2226            self,
2227            FindLongestMatch,
2228            dictionary,
2229            dictionary_hash,
2230            data,
2231            ring_buffer_mask,
2232            distance_cache,
2233            cur_ix,
2234            max_length,
2235            max_backward,
2236            gap,
2237            max_distance,
2238            out
2239        )
2240    }
2241    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
2242        match_all_hashers_mut!(self, Store, data, mask, ix)
2243    }
2244    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2245        match_all_hashers_mut!(self, StoreRange, data, mask, ix_start, ix_end)
2246    }
2247    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2248        match_all_hashers_mut!(self, BulkStoreRange, data, mask, ix_start, ix_end)
2249    }
2250}
2251
2252impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> UnionHasher<Alloc> {
2253    pub fn free(&mut self, alloc: &mut Alloc) {
2254        match self {
2255            &mut UnionHasher::H2(ref mut hasher) => {
2256                <Alloc as Allocator<u32>>::free_cell(
2257                    alloc,
2258                    core::mem::take(&mut hasher.buckets_.buckets_),
2259                );
2260            }
2261            &mut UnionHasher::H3(ref mut hasher) => {
2262                <Alloc as Allocator<u32>>::free_cell(
2263                    alloc,
2264                    core::mem::take(&mut hasher.buckets_.buckets_),
2265                );
2266            }
2267            &mut UnionHasher::H4(ref mut hasher) => {
2268                <Alloc as Allocator<u32>>::free_cell(
2269                    alloc,
2270                    core::mem::take(&mut hasher.buckets_.buckets_),
2271                );
2272            }
2273            &mut UnionHasher::H54(ref mut hasher) => {
2274                <Alloc as Allocator<u32>>::free_cell(
2275                    alloc,
2276                    core::mem::take(&mut hasher.buckets_.buckets_),
2277                );
2278            }
2279            &mut UnionHasher::H5q7(ref mut hasher) => {
2280                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2281                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2282            }
2283            &mut UnionHasher::H5q5(ref mut hasher) => {
2284                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2285                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2286            }
2287            &mut UnionHasher::H5(ref mut hasher) => {
2288                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2289                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2290            }
2291            &mut UnionHasher::H6(ref mut hasher) => {
2292                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2293                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2294            }
2295            &mut UnionHasher::H9(ref mut hasher) => {
2296                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num_));
2297                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets_));
2298            }
2299            &mut UnionHasher::H10(ref mut hasher) => {
2300                hasher.free(alloc);
2301            }
2302            &mut UnionHasher::Uninit => {}
2303        }
2304        *self = UnionHasher::<Alloc>::default();
2305    }
2306}
2307
2308impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> Default for UnionHasher<Alloc> {
2309    fn default() -> Self {
2310        UnionHasher::Uninit
2311    }
2312}
2313
2314/*UnionHasher::H2(BasicHasher {
2315GetHasherCommon:Struct1{params:BrotliHasherParams{
2316 type_:2,
2317 block_bits: 8,
2318 bucket_bits:16,
2319 hash_len: 4,
2320 num_last_distances_to_check:0},
2321is_prepared_:0,
2322dict_num_lookups:0,
2323dict_num_matches:0,
2324},
2325buckets_:H2Sub{
2326buckets_:[0;65537],
2327},
2328})
2329*/
2330fn CreateBackwardReferences<AH: AnyHasher>(
2331    dictionary: Option<&BrotliDictionary>,
2332    dictionary_hash: &[u16],
2333    num_bytes: usize,
2334    mut position: usize,
2335    ringbuffer: &[u8],
2336    ringbuffer_mask: usize,
2337    params: &BrotliEncoderParams,
2338    hasher: &mut AH,
2339    dist_cache: &mut [i32],
2340    last_insert_len: &mut usize,
2341    mut commands: &mut [Command],
2342    num_commands: &mut usize,
2343    num_literals: &mut usize,
2344) {
2345    let gap = 0usize;
2346    let max_backward_limit: usize = (1usize << params.lgwin).wrapping_sub(16);
2347    let mut new_commands_count: usize = 0;
2348    let mut insert_length: usize = *last_insert_len;
2349    let pos_end: usize = position.wrapping_add(num_bytes);
2350    let store_end: usize = if num_bytes >= hasher.StoreLookahead() {
2351        position
2352            .wrapping_add(num_bytes)
2353            .wrapping_sub(hasher.StoreLookahead())
2354            .wrapping_add(1)
2355    } else {
2356        position
2357    };
2358    let random_heuristics_window_size: usize = LiteralSpreeLengthForSparseSearch(params);
2359    let mut apply_random_heuristics: usize = position.wrapping_add(random_heuristics_window_size);
2360    let kMinScore: u64 = (30u64 * 8)
2361        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
2362        .wrapping_add(100);
2363    hasher.PrepareDistanceCache(dist_cache);
2364    while position.wrapping_add(hasher.HashTypeLength()) < pos_end {
2365        let mut max_length: usize = pos_end.wrapping_sub(position);
2366        let mut max_distance: usize = min(position, max_backward_limit);
2367        let mut sr = HasherSearchResult {
2368            len: 0,
2369            len_x_code: 0,
2370            distance: 0,
2371            score: 0,
2372        };
2373        sr.len = 0usize;
2374        sr.len_x_code = 0usize;
2375        sr.distance = 0usize;
2376        sr.score = kMinScore;
2377        if hasher.FindLongestMatch(
2378            dictionary,
2379            dictionary_hash,
2380            ringbuffer,
2381            ringbuffer_mask,
2382            dist_cache,
2383            position,
2384            max_length,
2385            max_distance,
2386            gap,
2387            params.dist.max_distance,
2388            &mut sr,
2389        ) {
2390            let mut delayed_backward_references_in_row: i32 = 0i32;
2391            max_length = max_length.wrapping_sub(1);
2392            'break6: loop {
2393                'continue7: loop {
2394                    let cost_diff_lazy: u64 = 175;
2395
2396                    let mut sr2 = HasherSearchResult {
2397                        len: 0,
2398                        len_x_code: 0,
2399                        distance: 0,
2400                        score: 0,
2401                    };
2402                    sr2.len = if params.quality < 5 {
2403                        min(sr.len.wrapping_sub(1), max_length)
2404                    } else {
2405                        0usize
2406                    };
2407                    sr2.len_x_code = 0usize;
2408                    sr2.distance = 0usize;
2409                    sr2.score = kMinScore;
2410                    max_distance = min(position.wrapping_add(1), max_backward_limit);
2411                    let is_match_found: bool = hasher.FindLongestMatch(
2412                        dictionary,
2413                        dictionary_hash,
2414                        ringbuffer,
2415                        ringbuffer_mask,
2416                        dist_cache,
2417                        position.wrapping_add(1),
2418                        max_length,
2419                        max_distance,
2420                        gap,
2421                        params.dist.max_distance,
2422                        &mut sr2,
2423                    );
2424                    if is_match_found && (sr2.score >= sr.score.wrapping_add(cost_diff_lazy)) {
2425                        position = position.wrapping_add(1);
2426                        insert_length = insert_length.wrapping_add(1);
2427                        sr = sr2;
2428                        if {
2429                            delayed_backward_references_in_row += 1;
2430                            delayed_backward_references_in_row
2431                        } < 4i32
2432                            && (position.wrapping_add(hasher.HashTypeLength()) < pos_end)
2433                        {
2434                            break 'continue7;
2435                        }
2436                    }
2437                    break 'break6;
2438                }
2439                max_length = max_length.wrapping_sub(1);
2440            }
2441            apply_random_heuristics = position
2442                .wrapping_add((2usize).wrapping_mul(sr.len))
2443                .wrapping_add(random_heuristics_window_size);
2444            max_distance = min(position, max_backward_limit);
2445            {
2446                let distance_code: usize =
2447                    ComputeDistanceCode(sr.distance, max_distance, dist_cache);
2448                if sr.distance <= max_distance && (distance_code > 0usize) {
2449                    dist_cache[3] = dist_cache[2];
2450                    dist_cache[2] = dist_cache[1];
2451                    dist_cache[1] = dist_cache[0];
2452                    dist_cache[0] = sr.distance as i32;
2453                    hasher.PrepareDistanceCache(dist_cache);
2454                }
2455                new_commands_count += 1;
2456
2457                let (old, new_commands) = core::mem::take(&mut commands).split_at_mut(1);
2458                commands = new_commands;
2459                old[0].init(
2460                    &params.dist,
2461                    insert_length,
2462                    sr.len,
2463                    sr.len ^ sr.len_x_code,
2464                    distance_code,
2465                );
2466            }
2467            *num_literals = num_literals.wrapping_add(insert_length);
2468            insert_length = 0usize;
2469            hasher.StoreRange(
2470                ringbuffer,
2471                ringbuffer_mask,
2472                position.wrapping_add(2),
2473                min(position.wrapping_add(sr.len), store_end),
2474            );
2475            position = position.wrapping_add(sr.len);
2476        } else {
2477            insert_length = insert_length.wrapping_add(1);
2478            position = position.wrapping_add(1);
2479
2480            if position > apply_random_heuristics {
2481                let kMargin: usize = max(hasher.StoreLookahead().wrapping_sub(1), 4);
2482                if position.wrapping_add(16) >= pos_end.wrapping_sub(kMargin) {
2483                    insert_length = insert_length.wrapping_add(pos_end - position);
2484                    position = pos_end;
2485                } else if position
2486                    > apply_random_heuristics
2487                        .wrapping_add((4usize).wrapping_mul(random_heuristics_window_size))
2488                {
2489                    hasher.Store4Vec4(ringbuffer, ringbuffer_mask, position);
2490                    insert_length = insert_length.wrapping_add(16);
2491                    position = position.wrapping_add(16);
2492                } else {
2493                    hasher.StoreEvenVec4(ringbuffer, ringbuffer_mask, position);
2494                    insert_length = insert_length.wrapping_add(8);
2495                    position = position.wrapping_add(8);
2496                }
2497            }
2498        }
2499    }
2500    insert_length = insert_length.wrapping_add(pos_end.wrapping_sub(position));
2501    *last_insert_len = insert_length;
2502    *num_commands = num_commands.wrapping_add(new_commands_count);
2503}
2504pub fn BrotliCreateBackwardReferences<
2505    Alloc: alloc::Allocator<u16>
2506        + alloc::Allocator<u32>
2507        + alloc::Allocator<u64>
2508        + alloc::Allocator<floatX>
2509        + alloc::Allocator<ZopfliNode>,
2510>(
2511    alloc: &mut Alloc,
2512    dictionary: &BrotliDictionary,
2513    num_bytes: usize,
2514    position: usize,
2515    ringbuffer: &[u8],
2516    ringbuffer_mask: usize,
2517    params: &BrotliEncoderParams,
2518    hasher_union: &mut UnionHasher<Alloc>,
2519    dist_cache: &mut [i32],
2520    last_insert_len: &mut usize,
2521    commands: &mut [Command],
2522    num_commands: &mut usize,
2523    num_literals: &mut usize,
2524) {
2525    match (hasher_union) {
2526        &mut UnionHasher::Uninit => panic!("working with uninitialized hash map"),
2527        &mut UnionHasher::H10(ref mut hasher) => {
2528            if params.quality >= 11 {
2529                super::backward_references_hq::BrotliCreateHqZopfliBackwardReferences(
2530                    alloc,
2531                    if params.use_dictionary {
2532                        Some(dictionary)
2533                    } else {
2534                        None
2535                    },
2536                    num_bytes,
2537                    position,
2538                    ringbuffer,
2539                    ringbuffer_mask,
2540                    params,
2541                    hasher,
2542                    dist_cache,
2543                    last_insert_len,
2544                    commands,
2545                    num_commands,
2546                    num_literals,
2547                )
2548            } else {
2549                super::backward_references_hq::BrotliCreateZopfliBackwardReferences(
2550                    alloc,
2551                    if params.use_dictionary {
2552                        Some(dictionary)
2553                    } else {
2554                        None
2555                    },
2556                    num_bytes,
2557                    position,
2558                    ringbuffer,
2559                    ringbuffer_mask,
2560                    params,
2561                    hasher,
2562                    dist_cache,
2563                    last_insert_len,
2564                    commands,
2565                    num_commands,
2566                    num_literals,
2567                )
2568            }
2569        }
2570        &mut UnionHasher::H2(ref mut hasher) => CreateBackwardReferences(
2571            if params.use_dictionary {
2572                Some(dictionary)
2573            } else {
2574                None
2575            },
2576            &kStaticDictionaryHash[..],
2577            num_bytes,
2578            position,
2579            ringbuffer,
2580            ringbuffer_mask,
2581            params,
2582            hasher,
2583            dist_cache,
2584            last_insert_len,
2585            commands,
2586            num_commands,
2587            num_literals,
2588        ),
2589        &mut UnionHasher::H3(ref mut hasher) => CreateBackwardReferences(
2590            if params.use_dictionary {
2591                Some(dictionary)
2592            } else {
2593                None
2594            },
2595            &kStaticDictionaryHash[..],
2596            num_bytes,
2597            position,
2598            ringbuffer,
2599            ringbuffer_mask,
2600            params,
2601            hasher,
2602            dist_cache,
2603            last_insert_len,
2604            commands,
2605            num_commands,
2606            num_literals,
2607        ),
2608        &mut UnionHasher::H4(ref mut hasher) => CreateBackwardReferences(
2609            if params.use_dictionary {
2610                Some(dictionary)
2611            } else {
2612                None
2613            },
2614            &kStaticDictionaryHash[..],
2615            num_bytes,
2616            position,
2617            ringbuffer,
2618            ringbuffer_mask,
2619            params,
2620            hasher,
2621            dist_cache,
2622            last_insert_len,
2623            commands,
2624            num_commands,
2625            num_literals,
2626        ),
2627        &mut UnionHasher::H5(ref mut hasher) => CreateBackwardReferences(
2628            if params.use_dictionary {
2629                Some(dictionary)
2630            } else {
2631                None
2632            },
2633            &kStaticDictionaryHash[..],
2634            num_bytes,
2635            position,
2636            ringbuffer,
2637            ringbuffer_mask,
2638            params,
2639            hasher,
2640            dist_cache,
2641            last_insert_len,
2642            commands,
2643            num_commands,
2644            num_literals,
2645        ),
2646        &mut UnionHasher::H5q7(ref mut hasher) => CreateBackwardReferences(
2647            if params.use_dictionary {
2648                Some(dictionary)
2649            } else {
2650                None
2651            },
2652            &kStaticDictionaryHash[..],
2653            num_bytes,
2654            position,
2655            ringbuffer,
2656            ringbuffer_mask,
2657            params,
2658            hasher,
2659            dist_cache,
2660            last_insert_len,
2661            commands,
2662            num_commands,
2663            num_literals,
2664        ),
2665        &mut UnionHasher::H5q5(ref mut hasher) => CreateBackwardReferences(
2666            if params.use_dictionary {
2667                Some(dictionary)
2668            } else {
2669                None
2670            },
2671            &kStaticDictionaryHash[..],
2672            num_bytes,
2673            position,
2674            ringbuffer,
2675            ringbuffer_mask,
2676            params,
2677            hasher,
2678            dist_cache,
2679            last_insert_len,
2680            commands,
2681            num_commands,
2682            num_literals,
2683        ),
2684        &mut UnionHasher::H6(ref mut hasher) => CreateBackwardReferences(
2685            if params.use_dictionary {
2686                Some(dictionary)
2687            } else {
2688                None
2689            },
2690            &kStaticDictionaryHash[..],
2691            num_bytes,
2692            position,
2693            ringbuffer,
2694            ringbuffer_mask,
2695            params,
2696            hasher,
2697            dist_cache,
2698            last_insert_len,
2699            commands,
2700            num_commands,
2701            num_literals,
2702        ),
2703        &mut UnionHasher::H9(ref mut hasher) => CreateBackwardReferences(
2704            if params.use_dictionary {
2705                Some(dictionary)
2706            } else {
2707                None
2708            },
2709            &kStaticDictionaryHash[..],
2710            num_bytes,
2711            position,
2712            ringbuffer,
2713            ringbuffer_mask,
2714            params,
2715            hasher,
2716            dist_cache,
2717            last_insert_len,
2718            commands,
2719            num_commands,
2720            num_literals,
2721        ),
2722        &mut UnionHasher::H54(ref mut hasher) => CreateBackwardReferences(
2723            if params.use_dictionary {
2724                Some(dictionary)
2725            } else {
2726                None
2727            },
2728            &kStaticDictionaryHash[..],
2729            num_bytes,
2730            position,
2731            ringbuffer,
2732            ringbuffer_mask,
2733            params,
2734            hasher,
2735            dist_cache,
2736            last_insert_len,
2737            commands,
2738            num_commands,
2739            num_literals,
2740        ),
2741    }
2742}