1use core::fmt::{Debug, Error, Formatter};
10use core::iter::FromIterator;
11use core::mem::{self, MaybeUninit};
12use core::ops::Index;
13use core::ops::IndexMut;
14use core::ptr;
15use core::slice::{from_raw_parts, from_raw_parts_mut};
16
17#[cfg(feature = "std")]
18use std::collections::{BTreeMap, HashMap};
19
20use typenum::U64;
21
22use bitmaps::{Bitmap, Bits, Iter as BitmapIter};
23
24use crate::types::ChunkLength;
25
26mod iter;
27pub use self::iter::{Drain, Iter, IterMut, OptionDrain, OptionIter, OptionIterMut};
28
29#[cfg(feature = "refpool")]
30mod refpool;
31
32pub struct SparseChunk<A, N: Bits + ChunkLength<A> = U64> {
68 map: Bitmap<N>,
69 data: MaybeUninit<N::SizedType>,
70}
71
72impl<A, N: Bits + ChunkLength<A>> Drop for SparseChunk<A, N> {
73 fn drop(&mut self) {
74 if mem::needs_drop::<A>() {
75 let bits = self.map;
76 for index in &bits {
77 unsafe { ptr::drop_in_place(&mut self.values_mut()[index]) }
78 }
79 }
80 }
81}
82
83impl<A: Clone, N: Bits + ChunkLength<A>> Clone for SparseChunk<A, N> {
84 fn clone(&self) -> Self {
85 let mut out = Self::new();
86 for index in &self.map {
87 out.insert(index, self[index].clone());
88 }
89 out
90 }
91}
92
93impl<A, N> SparseChunk<A, N>
94where
95 N: Bits + ChunkLength<A>,
96{
97 pub const CAPACITY: usize = N::USIZE;
99
100 #[inline]
101 fn values(&self) -> &[A] {
102 unsafe { from_raw_parts(&self.data as *const _ as *const A, N::USIZE) }
103 }
104
105 #[inline]
106 fn values_mut(&mut self) -> &mut [A] {
107 unsafe { from_raw_parts_mut(&mut self.data as *mut _ as *mut A, N::USIZE) }
108 }
109
110 #[inline]
112 unsafe fn force_read(index: usize, chunk: &Self) -> A {
113 ptr::read(&chunk.values()[index as usize])
114 }
115
116 #[inline]
118 unsafe fn force_write(index: usize, value: A, chunk: &mut Self) {
119 ptr::write(&mut chunk.values_mut()[index as usize], value)
120 }
121
122 pub fn new() -> Self {
124 Self {
125 map: Bitmap::default(),
126 data: MaybeUninit::uninit(),
127 }
128 }
129
130 pub fn unit(index: usize, value: A) -> Self {
132 let mut chunk = Self::new();
133 chunk.insert(index, value);
134 chunk
135 }
136
137 pub fn pair(index1: usize, value1: A, index2: usize, value2: A) -> Self {
139 let mut chunk = Self::new();
140 chunk.insert(index1, value1);
141 chunk.insert(index2, value2);
142 chunk
143 }
144
145 #[inline]
147 pub fn len(&self) -> usize {
148 self.map.len()
149 }
150
151 #[inline]
153 pub fn is_empty(&self) -> bool {
154 self.map.len() == 0
155 }
156
157 #[inline]
159 pub fn is_full(&self) -> bool {
160 self.len() == N::USIZE
161 }
162
163 pub fn insert(&mut self, index: usize, value: A) -> Option<A> {
167 if index >= N::USIZE {
168 panic!("SparseChunk::insert: index out of bounds");
169 }
170 if self.map.set(index, true) {
171 Some(mem::replace(&mut self.values_mut()[index], value))
172 } else {
173 unsafe { SparseChunk::force_write(index, value, self) };
174 None
175 }
176 }
177
178 pub fn remove(&mut self, index: usize) -> Option<A> {
182 if index >= N::USIZE {
183 panic!("SparseChunk::remove: index out of bounds");
184 }
185 if self.map.set(index, false) {
186 Some(unsafe { SparseChunk::force_read(index, self) })
187 } else {
188 None
189 }
190 }
191
192 pub fn pop(&mut self) -> Option<A> {
196 self.first_index().and_then(|index| self.remove(index))
197 }
198
199 pub fn get(&self, index: usize) -> Option<&A> {
201 if index >= N::USIZE {
202 return None;
203 }
204 if self.map.get(index) {
205 Some(unsafe { self.get_unchecked(index) })
206 } else {
207 None
208 }
209 }
210
211 pub fn get_mut(&mut self, index: usize) -> Option<&mut A> {
213 if index >= N::USIZE {
214 return None;
215 }
216 if self.map.get(index) {
217 Some(unsafe { self.get_unchecked_mut(index) })
218 } else {
219 None
220 }
221 }
222
223 pub unsafe fn get_unchecked(&self, index: usize) -> &A {
230 self.values().get_unchecked(index)
231 }
232
233 pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut A {
240 self.values_mut().get_unchecked_mut(index)
241 }
242
243 pub fn indices(&self) -> BitmapIter<'_, N> {
245 self.map.into_iter()
246 }
247
248 pub fn first_index(&self) -> Option<usize> {
250 self.map.first_index()
251 }
252
253 pub fn iter(&self) -> Iter<'_, A, N> {
255 Iter {
256 indices: self.indices(),
257 chunk: self,
258 }
259 }
260
261 pub fn iter_mut(&mut self) -> IterMut<'_, A, N> {
264 IterMut {
265 bitmap: self.map,
266 chunk: self,
267 }
268 }
269
270 pub fn drain(self) -> Drain<A, N> {
272 Drain { chunk: self }
273 }
274
275 pub fn entries(&self) -> impl Iterator<Item = (usize, &A)> {
278 self.indices().zip(self.iter())
279 }
280
281 pub fn option_iter(&self) -> OptionIter<'_, A, N> {
286 OptionIter {
287 chunk: self,
288 index: 0,
289 }
290 }
291
292 pub fn option_iter_mut(&mut self) -> OptionIterMut<'_, A, N> {
297 OptionIterMut {
298 chunk: self,
299 index: 0,
300 }
301 }
302
303 pub fn option_drain(self) -> OptionDrain<A, N> {
308 OptionDrain {
309 chunk: self,
310 index: 0,
311 }
312 }
313}
314
315impl<A, N: Bits + ChunkLength<A>> Default for SparseChunk<A, N> {
316 fn default() -> Self {
317 Self::new()
318 }
319}
320
321impl<A, N: Bits + ChunkLength<A>> Index<usize> for SparseChunk<A, N> {
322 type Output = A;
323
324 #[inline]
325 fn index(&self, index: usize) -> &Self::Output {
326 self.get(index).unwrap()
327 }
328}
329
330impl<A, N: Bits + ChunkLength<A>> IndexMut<usize> for SparseChunk<A, N> {
331 #[inline]
332 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
333 self.get_mut(index).unwrap()
334 }
335}
336
337impl<A, N: Bits + ChunkLength<A>> IntoIterator for SparseChunk<A, N> {
338 type Item = A;
339 type IntoIter = Drain<A, N>;
340
341 #[inline]
342 fn into_iter(self) -> Self::IntoIter {
343 self.drain()
344 }
345}
346
347impl<A, N: Bits + ChunkLength<A>> FromIterator<Option<A>> for SparseChunk<A, N> {
348 fn from_iter<I>(iter: I) -> Self
349 where
350 I: IntoIterator<Item = Option<A>>,
351 {
352 let mut out = Self::new();
353 for (index, value) in iter.into_iter().enumerate() {
354 if let Some(value) = value {
355 out.insert(index, value);
356 }
357 }
358 out
359 }
360}
361
362impl<A, N> PartialEq for SparseChunk<A, N>
363where
364 A: PartialEq,
365 N: Bits + ChunkLength<A>,
366{
367 fn eq(&self, other: &Self) -> bool {
368 if self.map != other.map {
369 return false;
370 }
371 for index in self.indices() {
372 if self.get(index) != other.get(index) {
373 return false;
374 }
375 }
376 true
377 }
378}
379
380#[cfg(feature = "std")]
381impl<A, N> PartialEq<BTreeMap<usize, A>> for SparseChunk<A, N>
382where
383 A: PartialEq,
384 N: Bits + ChunkLength<A>,
385{
386 fn eq(&self, other: &BTreeMap<usize, A>) -> bool {
387 if self.len() != other.len() {
388 return false;
389 }
390 for index in self.indices() {
391 if self.get(index) != other.get(&index) {
392 return false;
393 }
394 }
395 true
396 }
397}
398
399#[cfg(feature = "std")]
400impl<A, N> PartialEq<HashMap<usize, A>> for SparseChunk<A, N>
401where
402 A: PartialEq,
403 N: Bits + ChunkLength<A>,
404{
405 fn eq(&self, other: &HashMap<usize, A>) -> bool {
406 if self.len() != other.len() {
407 return false;
408 }
409 for index in self.indices() {
410 if self.get(index) != other.get(&index) {
411 return false;
412 }
413 }
414 true
415 }
416}
417
418impl<A, N> Eq for SparseChunk<A, N>
419where
420 A: Eq,
421 N: Bits + ChunkLength<A>,
422{
423}
424
425impl<A, N> Debug for SparseChunk<A, N>
426where
427 A: Debug,
428 N: Bits + ChunkLength<A>,
429{
430 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
431 f.write_str("SparseChunk")?;
432 f.debug_map().entries(self.entries()).finish()
433 }
434}
435
436#[cfg(test)]
437mod test {
438 use super::*;
439 use typenum::U32;
440
441 #[test]
442 fn insert_remove_iterate() {
443 let mut chunk: SparseChunk<_, U32> = SparseChunk::new();
444 assert_eq!(None, chunk.insert(5, 5));
445 assert_eq!(None, chunk.insert(1, 1));
446 assert_eq!(None, chunk.insert(24, 42));
447 assert_eq!(None, chunk.insert(22, 22));
448 assert_eq!(Some(42), chunk.insert(24, 24));
449 assert_eq!(None, chunk.insert(31, 31));
450 assert_eq!(Some(24), chunk.remove(24));
451 assert_eq!(4, chunk.len());
452 let indices: Vec<_> = chunk.indices().collect();
453 assert_eq!(vec![1, 5, 22, 31], indices);
454 let values: Vec<_> = chunk.into_iter().collect();
455 assert_eq!(vec![1, 5, 22, 31], values);
456 }
457
458 #[test]
459 fn clone_chunk() {
460 let mut chunk: SparseChunk<_, U32> = SparseChunk::new();
461 assert_eq!(None, chunk.insert(5, 5));
462 assert_eq!(None, chunk.insert(1, 1));
463 assert_eq!(None, chunk.insert(24, 42));
464 assert_eq!(None, chunk.insert(22, 22));
465 let cloned = chunk.clone();
466 let right_indices: Vec<_> = chunk.indices().collect();
467 let left_indices: Vec<_> = cloned.indices().collect();
468 let right: Vec<_> = chunk.into_iter().collect();
469 let left: Vec<_> = cloned.into_iter().collect();
470 assert_eq!(left, right);
471 assert_eq!(left_indices, right_indices);
472 assert_eq!(vec![1, 5, 22, 24], left_indices);
473 assert_eq!(vec![1, 5, 22, 24], right_indices);
474 }
475
476 use crate::tests::DropTest;
477 use std::sync::atomic::{AtomicUsize, Ordering};
478
479 #[test]
480 fn dropping() {
481 let counter = AtomicUsize::new(0);
482 {
483 let mut chunk: SparseChunk<DropTest<'_>> = SparseChunk::new();
484 for i in 0..40 {
485 chunk.insert(i, DropTest::new(&counter));
486 }
487 assert_eq!(40, counter.load(Ordering::Relaxed));
488 for i in 0..20 {
489 chunk.remove(i);
490 }
491 assert_eq!(20, counter.load(Ordering::Relaxed));
492 }
493 assert_eq!(0, counter.load(Ordering::Relaxed));
494 }
495
496 #[test]
497 fn equality() {
498 let mut c1 = SparseChunk::<usize>::new();
499 for i in 0..32 {
500 c1.insert(i, i);
501 }
502 let mut c2 = c1.clone();
503 assert_eq!(c1, c2);
504 for i in 4..8 {
505 c2.insert(i, 0);
506 }
507 assert_ne!(c1, c2);
508 c2 = c1.clone();
509 for i in 0..16 {
510 c2.remove(i);
511 }
512 assert_ne!(c1, c2);
513 }
514}