1use std::{
2 hash::{BuildHasher, Hash, Hasher},
3 mem::{self, MaybeUninit},
4 ptr,
5 sync::{
6 atomic::{self, AtomicUsize, Ordering},
7 Arc, Mutex, TryLockError,
8 },
9};
10
11#[cfg(feature = "unstable-debug-counters")]
12use crate::common::concurrent::debug_counters;
13
14use crossbeam_epoch::{Atomic, CompareExchangeError, Guard, Owned, Shared};
15
16pub(crate) const BUCKET_ARRAY_DEFAULT_LENGTH: usize = 128;
17
18pub(crate) struct BucketArray<K, V> {
19 pub(crate) buckets: Box<[Atomic<Bucket<K, V>>]>,
20 pub(crate) next: Atomic<BucketArray<K, V>>,
21 pub(crate) epoch: usize,
22 pub(crate) rehash_lock: Arc<Mutex<()>>,
23 pub(crate) tombstone_count: AtomicUsize,
24}
25
26impl<K, V> Default for BucketArray<K, V> {
27 fn default() -> Self {
28 Self::with_length(0, BUCKET_ARRAY_DEFAULT_LENGTH)
29 }
30}
31
32impl<K, V> BucketArray<K, V> {
33 pub(crate) fn with_length(epoch: usize, length: usize) -> Self {
34 assert!(length.is_power_of_two());
35 let mut buckets = Vec::with_capacity(length);
36
37 unsafe {
38 ptr::write_bytes(buckets.as_mut_ptr(), 0, length);
39 buckets.set_len(length);
40 }
41
42 let buckets = buckets.into_boxed_slice();
43
44 #[cfg(feature = "unstable-debug-counters")]
45 {
46 use debug_counters::InternalGlobalDebugCounters as Counters;
47
48 let size = (buckets.len() * std::mem::size_of::<Atomic<Bucket<K, V>>>()) as u64;
49 Counters::bucket_array_created(size);
50 }
51
52 Self {
53 buckets,
54 next: Atomic::null(),
55 epoch,
56 rehash_lock: Arc::new(Mutex::new(())),
57 tombstone_count: AtomicUsize::default(),
58 }
59 }
60
61 pub(crate) fn capacity(&self) -> usize {
62 assert!(self.buckets.len().is_power_of_two());
63
64 self.buckets.len() / 2
65 }
66}
67
68#[cfg(feature = "unstable-debug-counters")]
69impl<K, V> Drop for BucketArray<K, V> {
70 fn drop(&mut self) {
71 use debug_counters::InternalGlobalDebugCounters as Counters;
72
73 let size = (self.buckets.len() * std::mem::size_of::<Atomic<Bucket<K, V>>>()) as u64;
74 Counters::bucket_array_dropped(size);
75 }
76}
77
78impl<'g, K: 'g + Eq, V: 'g> BucketArray<K, V> {
79 pub(crate) fn get(
80 &self,
81 guard: &'g Guard,
82 hash: u64,
83 mut eq: impl FnMut(&K) -> bool,
84 ) -> Result<Shared<'g, Bucket<K, V>>, RelocatedError> {
85 for bucket in self.probe(guard, hash) {
86 let Ok((_, _, this_bucket_ptr)) = bucket else {
87 return Err(RelocatedError);
88 };
89
90 let Some(this_bucket_ref) = (unsafe { this_bucket_ptr.as_ref() }) else {
91 return Ok(Shared::null());
93 };
94
95 if !eq(&this_bucket_ref.key) {
96 continue;
98 }
99
100 if is_tombstone(this_bucket_ptr) {
101 return Ok(Shared::null());
103 } else {
104 return Ok(this_bucket_ptr);
106 }
107 }
108
109 Ok(Shared::null())
110 }
111
112 pub(crate) fn remove_if<F>(
113 &self,
114 guard: &'g Guard,
115 hash: u64,
116 mut eq: impl FnMut(&K) -> bool,
117 mut condition: F,
118 ) -> Result<Shared<'g, Bucket<K, V>>, F>
119 where
120 F: FnMut(&K, &V) -> bool,
121 {
122 let mut probe = self.probe(guard, hash);
123 while let Some(bucket) = probe.next() {
124 let Ok((_, this_bucket, this_bucket_ptr)) = bucket else {
125 return Err(condition);
126 };
127
128 let Some(this_bucket_ref) = (unsafe { this_bucket_ptr.as_ref() }) else {
129 return Ok(Shared::null());
131 };
132
133 let this_key = &this_bucket_ref.key;
134
135 if !eq(this_key) {
136 continue;
138 }
139
140 if is_tombstone(this_bucket_ptr) {
141 return Ok(Shared::null());
143 }
144
145 let this_value = unsafe { &*this_bucket_ref.maybe_value.as_ptr() };
146
147 if !condition(this_key, this_value) {
148 return Ok(Shared::null());
150 }
151
152 let new_bucket_ptr = this_bucket_ptr.with_tag(TOMBSTONE_TAG);
155
156 match this_bucket.compare_exchange_weak(
157 this_bucket_ptr,
158 new_bucket_ptr,
159 Ordering::AcqRel,
160 Ordering::Relaxed,
161 guard,
162 ) {
163 Ok(_) => return Ok(new_bucket_ptr),
165 Err(_) => probe.reload(),
167 }
168 }
169
170 Ok(Shared::null())
171 }
172
173 pub(crate) fn insert_if_not_present<F>(
174 &self,
175 guard: &'g Guard,
176 hash: u64,
177 mut state: InsertOrModifyState<K, V, F>,
178 ) -> Result<InsertionResult<'g, K, V>, InsertOrModifyState<K, V, F>>
179 where
180 F: FnOnce() -> V,
181 {
182 let mut probe = self.probe(guard, hash);
183 while let Some(Ok((_, this_bucket, this_bucket_ptr))) = probe.next() {
184 if let Some(this_bucket_ref) = unsafe { this_bucket_ptr.as_ref() } {
185 if &this_bucket_ref.key != state.key() {
186 continue;
188 }
189
190 if !is_tombstone(this_bucket_ptr) {
191 return Ok(InsertionResult::AlreadyPresent(this_bucket_ptr));
193 }
194 }
195
196 let new_bucket = state.into_insert_bucket();
199
200 if let Err(CompareExchangeError { new, .. }) = this_bucket.compare_exchange_weak(
201 this_bucket_ptr,
202 new_bucket,
203 Ordering::AcqRel,
204 Ordering::Relaxed,
205 guard,
206 ) {
207 state = InsertOrModifyState::from_bucket_value(new, None);
208 probe.reload();
209 } else if unsafe { this_bucket_ptr.as_ref() }.is_some() {
210 return Ok(InsertionResult::ReplacedTombstone(this_bucket_ptr));
212 } else {
213 return Ok(InsertionResult::Inserted);
215 }
216 }
217
218 Err(state)
219 }
220
221 #[allow(clippy::type_complexity)]
223 pub(crate) fn insert_or_modify<F, G>(
224 &self,
225 guard: &'g Guard,
226 hash: u64,
227 mut state: InsertOrModifyState<K, V, F>,
228 mut modifier: G,
229 ) -> Result<Shared<'g, Bucket<K, V>>, (InsertOrModifyState<K, V, F>, G)>
230 where
231 F: FnOnce() -> V,
232 G: FnMut(&K, &V) -> V,
233 {
234 let mut probe = self.probe(guard, hash);
235 while let Some(bucket) = probe.next() {
236 let Ok((_, this_bucket, this_bucket_ptr)) = bucket else {
237 return Err((state, modifier));
238 };
239
240 let (new_bucket, maybe_insert_value) =
241 if let Some(this_bucket_ref) = unsafe { this_bucket_ptr.as_ref() } {
242 let this_key = &this_bucket_ref.key;
243
244 if this_key != state.key() {
245 continue;
247 }
248
249 if is_tombstone(this_bucket_ptr) {
250 (state.into_insert_bucket(), None)
252 } else {
253 let this_value = unsafe { &*this_bucket_ref.maybe_value.as_ptr() };
255 let new_value = modifier(this_key, this_value);
256
257 let (new_bucket, insert_value) = state.into_modify_bucket(new_value);
258
259 (new_bucket, Some(insert_value))
260 }
261 } else {
262 (state.into_insert_bucket(), None)
264 };
265
266 if let Err(CompareExchangeError { new, .. }) = this_bucket.compare_exchange_weak(
267 this_bucket_ptr,
268 new_bucket,
269 Ordering::AcqRel,
270 Ordering::Relaxed,
271 guard,
272 ) {
273 state = InsertOrModifyState::from_bucket_value(new, maybe_insert_value);
275 probe.reload();
276 } else {
277 return Ok(this_bucket_ptr);
279 }
280 }
281
282 Err((state, modifier))
283 }
284
285 fn insert_for_grow(
286 &self,
287 guard: &'g Guard,
288 hash: u64,
289 bucket_ptr: Shared<'g, Bucket<K, V>>,
290 ) -> Option<usize> {
291 assert!(!bucket_ptr.is_null());
292 assert!(!is_sentinel(bucket_ptr));
293 assert!(is_borrowed(bucket_ptr));
294
295 let key = &unsafe { bucket_ptr.deref() }.key;
296
297 let mut probe = self.probe(guard, hash);
298 while let Some(bucket) = probe.next() {
299 let Ok((i, this_bucket, this_bucket_ptr)) = bucket else {
300 return None;
301 };
302
303 if let Some(Bucket { key: this_key, .. }) = unsafe { this_bucket_ptr.as_ref() } {
304 if this_bucket_ptr == bucket_ptr {
305 return None;
306 } else if this_key != key {
307 continue;
308 } else if !is_borrowed(this_bucket_ptr) {
309 return None;
310 }
311 }
312
313 if this_bucket_ptr.is_null() && is_tombstone(bucket_ptr) {
314 return None;
315 } else if this_bucket
316 .compare_exchange_weak(
317 this_bucket_ptr,
318 bucket_ptr,
319 Ordering::AcqRel,
320 Ordering::Relaxed,
321 guard,
322 )
323 .is_ok()
324 {
325 return Some(i);
326 } else {
327 probe.reload();
328 }
329 }
330
331 None
332 }
333
334 pub(crate) fn keys<F, T>(
335 &self,
336 guard: &'g Guard,
337 with_key: &mut F,
338 ) -> Result<Vec<T>, RelocatedError>
339 where
340 F: FnMut(&K) -> T,
341 {
342 let mut keys = Vec::new();
343
344 for bucket in self.buckets.iter() {
345 let bucket_ptr = bucket.load_consume(guard);
346
347 if is_sentinel(bucket_ptr) {
348 return Err(RelocatedError);
349 }
350
351 if let Some(bucket_ref) = unsafe { bucket_ptr.as_ref() } {
352 if !is_tombstone(bucket_ptr) {
353 keys.push(with_key(&bucket_ref.key));
354 }
355 }
356 }
357
358 Ok(keys)
359 }
360}
361
362struct Probe<'b, 'g, K: 'g, V: 'g> {
363 buckets: &'b [Atomic<Bucket<K, V>>],
364 guard: &'g Guard,
365 this_bucket: (usize, &'b Atomic<Bucket<K, V>>),
366 offset: usize,
367
368 i: usize,
369 reload: bool,
370}
371
372impl<'g, K: 'g, V: 'g> Probe<'_, 'g, K, V> {
373 fn reload(&mut self) {
374 self.reload = true;
375 }
376}
377
378impl<'b, 'g, K: 'g, V: 'g> Iterator for Probe<'b, 'g, K, V> {
379 type Item = Result<(usize, &'b Atomic<Bucket<K, V>>, Shared<'g, Bucket<K, V>>), ()>;
380
381 fn next(&mut self) -> Option<Self::Item> {
382 if !self.reload {
383 let max = self.buckets.len() - 1;
384 if self.i >= max {
385 return None;
386 }
387 self.i += 1;
388 let i = self.i.wrapping_add(self.offset) & max;
389 self.this_bucket = (i, &self.buckets[i]);
390 }
391 self.reload = false;
392
393 let this_bucket_ptr = self.this_bucket.1.load_consume(self.guard);
394
395 if is_sentinel(this_bucket_ptr) {
396 return Some(Err(()));
397 }
398
399 let val = (self.this_bucket.0, self.this_bucket.1, this_bucket_ptr);
400 Some(Ok(val))
401 }
402}
403
404impl<'g, K: 'g, V: 'g> BucketArray<K, V> {
405 fn probe(&self, guard: &'g Guard, hash: u64) -> Probe<'_, 'g, K, V> {
406 let buckets = &self.buckets;
407 let offset = hash as usize & (buckets.len() - 1);
408 let this_bucket = (offset, &buckets[offset]);
412 Probe {
413 buckets,
414 guard,
415 this_bucket,
416 offset,
417
418 i: 0,
419 reload: true,
420 }
421 }
422
423 pub(crate) fn rehash<H>(
424 &self,
425 guard: &'g Guard,
426 build_hasher: &H,
427 rehash_op: RehashOp,
428 ) -> Option<&'g BucketArray<K, V>>
429 where
430 K: Hash + Eq,
431 H: BuildHasher,
432 {
433 let lock = match self.rehash_lock.try_lock() {
435 Ok(lk) => lk,
436 Err(TryLockError::WouldBlock) => {
437 std::mem::drop(self.rehash_lock.lock());
439 return None;
441 }
442 Err(e @ TryLockError::Poisoned(_)) => panic!("{e:?}"),
443 };
444
445 let next_array = self.next_array(guard, rehash_op);
446
447 for this_bucket in self.buckets.iter() {
448 let mut maybe_state: Option<(usize, Shared<'g, Bucket<K, V>>)> = None;
449
450 loop {
451 let this_bucket_ptr = this_bucket.load_consume(guard);
452
453 if is_sentinel(this_bucket_ptr) {
454 break;
455 }
456
457 let to_put_ptr = this_bucket_ptr.with_tag(this_bucket_ptr.tag() | BORROWED_TAG);
458
459 if let Some((index, mut next_bucket_ptr)) = maybe_state {
460 assert!(!this_bucket_ptr.is_null());
461
462 let next_bucket = &next_array.buckets[index];
463
464 while is_borrowed(next_bucket_ptr)
465 && next_bucket
466 .compare_exchange_weak(
467 next_bucket_ptr,
468 to_put_ptr,
469 Ordering::AcqRel,
470 Ordering::Relaxed,
471 guard,
472 )
473 .is_err()
474 {
475 next_bucket_ptr = next_bucket.load_consume(guard);
476 }
477 } else if let Some(this_bucket_ref) = unsafe { this_bucket_ptr.as_ref() } {
478 let key = &this_bucket_ref.key;
479 let hash = hash(build_hasher, key);
480
481 if let Some(index) = next_array.insert_for_grow(guard, hash, to_put_ptr) {
482 maybe_state = Some((index, to_put_ptr));
483 }
484 }
485
486 if this_bucket
487 .compare_exchange_weak(
488 this_bucket_ptr,
489 Shared::null().with_tag(SENTINEL_TAG),
490 Ordering::AcqRel,
491 Ordering::Relaxed,
492 guard,
493 )
494 .is_ok()
495 {
496 if !this_bucket_ptr.is_null()
498 && is_tombstone(this_bucket_ptr)
499 && maybe_state.is_none()
500 {
501 unsafe { defer_destroy_bucket(guard, this_bucket_ptr) };
502 }
503
504 break;
505 }
506 }
507 }
508
509 guard.flush();
510 std::mem::drop(lock);
511
512 Some(next_array)
513 }
514
515 fn next_array(&self, guard: &'g Guard, rehash_op: RehashOp) -> &'g BucketArray<K, V> {
516 let mut maybe_new_next = None;
517
518 loop {
519 let next_ptr = self.next.load_consume(guard);
520
521 if let Some(next_ref) = unsafe { next_ptr.as_ref() } {
522 return next_ref;
523 }
524
525 let new_length = rehash_op.new_len(self.buckets.len());
526 let new_next = maybe_new_next.unwrap_or_else(|| {
527 Owned::new(BucketArray::with_length(self.epoch + 1, new_length))
528 });
529
530 match self.next.compare_exchange_weak(
531 Shared::null(),
532 new_next,
533 Ordering::AcqRel,
534 Ordering::Relaxed,
535 guard,
536 ) {
537 Ok(p) => return unsafe { p.deref() },
538 Err(CompareExchangeError { new, .. }) => {
539 maybe_new_next = Some(new);
540 }
541 }
542 }
543 }
544}
545
546#[repr(align(8))]
547#[derive(Debug)]
548pub(crate) struct Bucket<K, V> {
549 pub(crate) key: K,
550 pub(crate) maybe_value: MaybeUninit<V>,
551}
552
553impl<K, V> Bucket<K, V> {
554 pub(crate) fn new(key: K, value: V) -> Bucket<K, V> {
555 #[cfg(feature = "unstable-debug-counters")]
556 debug_counters::InternalGlobalDebugCounters::bucket_created();
557
558 Self {
559 key,
560 maybe_value: MaybeUninit::new(value),
561 }
562 }
563}
564
565#[cfg(feature = "unstable-debug-counters")]
566impl<K, V> Drop for Bucket<K, V> {
567 fn drop(&mut self) {
568 debug_counters::InternalGlobalDebugCounters::bucket_dropped();
569 }
570}
571
572#[derive(Debug, Eq, PartialEq)]
573pub(crate) struct RelocatedError;
574
575pub(crate) enum InsertOrModifyState<K, V, F: FnOnce() -> V> {
576 New(K, F),
577 AttemptedInsertion(Owned<Bucket<K, V>>),
578 AttemptedModification(Owned<Bucket<K, V>>, ValueOrFunction<V, F>),
579}
580
581impl<K, V, F: FnOnce() -> V> InsertOrModifyState<K, V, F> {
582 fn from_bucket_value(
583 bucket: Owned<Bucket<K, V>>,
584 value_or_function: Option<ValueOrFunction<V, F>>,
585 ) -> Self {
586 if let Some(value_or_function) = value_or_function {
587 Self::AttemptedModification(bucket, value_or_function)
588 } else {
589 Self::AttemptedInsertion(bucket)
590 }
591 }
592
593 fn key(&self) -> &K {
594 match self {
595 InsertOrModifyState::New(k, _) => k,
596 InsertOrModifyState::AttemptedInsertion(b)
597 | InsertOrModifyState::AttemptedModification(b, _) => &b.key,
598 }
599 }
600
601 fn into_insert_bucket(self) -> Owned<Bucket<K, V>> {
602 match self {
603 InsertOrModifyState::New(k, f) => Owned::new(Bucket::new(k, f())),
604 InsertOrModifyState::AttemptedInsertion(b) => b,
605 InsertOrModifyState::AttemptedModification(mut b, v_or_f) => {
606 unsafe {
607 mem::drop(
608 mem::replace(&mut b.maybe_value, MaybeUninit::new(v_or_f.into_value()))
609 .assume_init(),
610 );
611 };
612
613 b
614 }
615 }
616 }
617
618 fn into_modify_bucket(self, value: V) -> (Owned<Bucket<K, V>>, ValueOrFunction<V, F>) {
619 match self {
620 InsertOrModifyState::New(k, f) => (
621 Owned::new(Bucket::new(k, value)),
622 ValueOrFunction::Function(f),
623 ),
624 InsertOrModifyState::AttemptedInsertion(mut b) => {
625 let insert_value = unsafe {
626 mem::replace(&mut b.maybe_value, MaybeUninit::new(value)).assume_init()
627 };
628
629 (b, ValueOrFunction::Value(insert_value))
630 }
631 InsertOrModifyState::AttemptedModification(mut b, v_or_f) => {
632 unsafe {
633 mem::drop(
634 mem::replace(&mut b.maybe_value, MaybeUninit::new(value)).assume_init(),
635 );
636 }
637
638 (b, v_or_f)
639 }
640 }
641 }
642}
643
644pub(crate) enum ValueOrFunction<V, F: FnOnce() -> V> {
645 Value(V),
646 Function(F),
647}
648
649impl<V, F: FnOnce() -> V> ValueOrFunction<V, F> {
650 fn into_value(self) -> V {
651 match self {
652 ValueOrFunction::Value(v) => v,
653 ValueOrFunction::Function(f) => f(),
654 }
655 }
656}
657
658pub(crate) fn hash<K, H>(build_hasher: &H, key: &K) -> u64
659where
660 K: ?Sized + Hash,
661 H: BuildHasher,
662{
663 let mut hasher = build_hasher.build_hasher();
664 key.hash(&mut hasher);
665
666 hasher.finish()
667}
668
669pub(crate) enum InsertionResult<'g, K, V> {
670 AlreadyPresent(Shared<'g, Bucket<K, V>>),
671 Inserted,
672 ReplacedTombstone(Shared<'g, Bucket<K, V>>),
673}
674
675pub(crate) unsafe fn defer_destroy_bucket<'g, K, V>(
676 guard: &'g Guard,
677 mut ptr: Shared<'g, Bucket<K, V>>,
678) {
679 assert!(!ptr.is_null());
680
681 guard.defer_unchecked(move || {
682 atomic::fence(Ordering::Acquire);
683
684 if !is_tombstone(ptr) {
685 ptr::drop_in_place(ptr.deref_mut().maybe_value.as_mut_ptr());
686 }
687
688 mem::drop(ptr.into_owned());
689 });
690}
691
692pub(crate) unsafe fn defer_destroy_tombstone<'g, K, V>(
693 guard: &'g Guard,
694 mut ptr: Shared<'g, Bucket<K, V>>,
695) {
696 assert!(!ptr.is_null());
697 assert!(is_tombstone(ptr));
698
699 atomic::fence(Ordering::Acquire);
700 let value = ptr::read(ptr.deref_mut().maybe_value.as_ptr());
702
703 guard.defer_unchecked(move || mem::drop(value));
707}
708
709pub(crate) unsafe fn defer_acquire_destroy<'g, T>(guard: &'g Guard, ptr: Shared<'g, T>) {
710 assert!(!ptr.is_null());
711
712 guard.defer_unchecked(move || {
713 atomic::fence(Ordering::Acquire);
714 mem::drop(ptr.into_owned());
715 });
716}
717
718#[derive(Clone, Copy)]
719pub(crate) enum RehashOp {
720 Expand,
721 Shrink,
722 GcOnly,
723 Skip,
724}
725
726impl RehashOp {
727 pub(crate) fn new(cap: usize, tombstone_count: &AtomicUsize, len: &AtomicUsize) -> Self {
728 let real_cap = cap as f64 * 2.0;
729 let quarter_cap = real_cap / 4.0;
730 let tbc = tombstone_count.load(Ordering::Relaxed) as f64;
731 let len = len.load(Ordering::Relaxed) as f64;
732
733 if tbc >= 25_000.0 || tbc / real_cap >= 0.1 {
734 if len - tbc < quarter_cap && quarter_cap as usize >= BUCKET_ARRAY_DEFAULT_LENGTH {
735 return Self::Shrink;
736 } else {
737 return Self::GcOnly;
738 }
739 }
740
741 if len > real_cap * 0.7 {
742 return Self::Expand;
743 }
744
745 Self::Skip
746 }
747
748 pub(crate) fn is_skip(self) -> bool {
749 matches!(self, Self::Skip)
750 }
751
752 fn new_len(self, current_len: usize) -> usize {
753 match self {
754 Self::Expand => current_len * 2,
755 Self::Shrink => current_len / 2,
756 Self::GcOnly => current_len,
757 Self::Skip => unreachable!(),
758 }
759 }
760}
761
762pub(crate) const SENTINEL_TAG: usize = 0b001; pub(crate) const TOMBSTONE_TAG: usize = 0b010; pub(crate) const BORROWED_TAG: usize = 0b100; #[inline]
767pub(crate) fn is_sentinel<K, V>(bucket_ptr: Shared<'_, Bucket<K, V>>) -> bool {
768 bucket_ptr.tag() & SENTINEL_TAG != 0
769}
770
771#[inline]
772pub(crate) fn is_tombstone<K, V>(bucket_ptr: Shared<'_, Bucket<K, V>>) -> bool {
773 bucket_ptr.tag() & TOMBSTONE_TAG != 0
774}
775
776#[inline]
777pub(crate) fn is_borrowed<K, V>(bucket_ptr: Shared<'_, Bucket<K, V>>) -> bool {
778 bucket_ptr.tag() & BORROWED_TAG != 0
779}
780
781#[cfg(test)]
782mod tests {
783 use super::{
784 defer_destroy_bucket, defer_destroy_tombstone, hash, is_tombstone, Bucket, BucketArray,
785 InsertOrModifyState, InsertionResult, RelocatedError,
786 };
787 use crossbeam_epoch::{Guard, Shared};
788 use std::{collections::hash_map::RandomState, sync::atomic::Ordering};
789
790 #[test]
791 fn get_insert_remove() {
792 let build_hasher = RandomState::new();
793 let buckets = BucketArray::with_length(0, 16);
794 let guard = unsafe { crossbeam_epoch::unprotected() };
795
796 let k1 = "foo";
797 let h1 = hash(&build_hasher, k1);
798 let v1 = 5;
799
800 let k2 = "bar";
801 let h2 = hash(&build_hasher, k2);
802 let v2 = 10;
803
804 let k3 = "baz";
805 let h3 = hash(&build_hasher, k3);
806 let v3 = 15;
807
808 assert_eq!(buckets.get(guard, h1, |&k| k == k1), Ok(Shared::null()));
809 assert_eq!(buckets.get(guard, h2, |&k| k == k2), Ok(Shared::null()));
810 assert_eq!(buckets.get(guard, h3, |&k| k == k3), Ok(Shared::null()));
811
812 assert!(matches!(
813 insert(&buckets, guard, k1, h1, || v1),
814 Ok(InsertionResult::Inserted)
815 ));
816
817 assert_eq!(
818 into_value(buckets.get(guard, h1, |&k| k == k1)),
819 Ok(Some(v1))
820 );
821 assert_eq!(buckets.get(guard, h2, |&k| k == k2), Ok(Shared::null()));
822 assert_eq!(buckets.get(guard, h3, |&k| k == k3), Ok(Shared::null()));
823
824 assert!(matches!(
825 insert(&buckets, guard, k2, h2, || v2),
826 Ok(InsertionResult::Inserted)
827 ));
828
829 assert_eq!(
830 into_value(buckets.get(guard, h1, |&k| k == k1)),
831 Ok(Some(v1))
832 );
833 assert_eq!(
834 into_value(buckets.get(guard, h2, |&k| k == k2)),
835 Ok(Some(v2))
836 );
837 assert_eq!(buckets.get(guard, h3, |&k| k == k3), Ok(Shared::null()));
838
839 assert!(matches!(
840 insert(&buckets, guard, k3, h3, || v3),
841 Ok(InsertionResult::Inserted)
842 ));
843
844 assert_eq!(
845 into_value(buckets.get(guard, h1, |&k| k == k1)),
846 Ok(Some(v1))
847 );
848 assert_eq!(
849 into_value(buckets.get(guard, h2, |&k| k == k2)),
850 Ok(Some(v2))
851 );
852 assert_eq!(
853 into_value(buckets.get(guard, h3, |&k| k == k3)),
854 Ok(Some(v3))
855 );
856
857 let b1 = buckets
858 .remove_if(guard, h1, |&k| k == k1, |_, _| true)
859 .ok()
860 .unwrap();
861 assert!(is_tombstone(b1));
862 unsafe { defer_destroy_tombstone(guard, b1) };
863
864 let b2 = buckets
865 .remove_if(guard, h2, |&k| k == k2, |_, _| true)
866 .ok()
867 .unwrap();
868 assert!(is_tombstone(b2));
869 unsafe { defer_destroy_tombstone(guard, b2) };
870
871 let b3 = buckets
872 .remove_if(guard, h3, |&k| k == k3, |_, _| true)
873 .ok()
874 .unwrap();
875 assert!(is_tombstone(b3));
876 unsafe { defer_destroy_tombstone(guard, b3) };
877
878 assert_eq!(buckets.get(guard, h1, |&k| k == k1), Ok(Shared::null()));
879 assert_eq!(buckets.get(guard, h2, |&k| k == k2), Ok(Shared::null()));
880 assert_eq!(buckets.get(guard, h3, |&k| k == k3), Ok(Shared::null()));
881
882 for this_bucket in buckets.buckets.iter() {
883 let this_bucket_ptr = this_bucket.swap(Shared::null(), Ordering::Relaxed, guard);
884
885 if this_bucket_ptr.is_null() {
886 continue;
887 }
888
889 unsafe {
890 defer_destroy_bucket(guard, this_bucket_ptr);
891 }
892 }
893 }
894
895 fn insert<'g, K, V, F>(
896 buckets: &BucketArray<K, V>,
897 guard: &'g Guard,
898 key: K,
899 hash: u64,
900 value_init: F,
901 ) -> Result<InsertionResult<'g, K, V>, InsertOrModifyState<K, V, F>>
902 where
903 K: Eq,
904 F: FnOnce() -> V,
905 {
906 let state = InsertOrModifyState::New(key, value_init);
907 buckets.insert_if_not_present(guard, hash, state)
908 }
909
910 fn into_value<K, V>(
911 maybe_bucket_ptr: Result<Shared<'_, Bucket<K, V>>, RelocatedError>,
912 ) -> Result<Option<V>, RelocatedError>
913 where
914 V: Clone,
915 {
916 maybe_bucket_ptr
917 .map(|p| unsafe { p.as_ref() }.map(|b| unsafe { &*b.maybe_value.as_ptr() }.clone()))
918 }
919}