typed_arena/lib.rs
1//! The arena, a fast but limited type of allocator.
2//!
3//! **A fast (but limited) allocation arena for values of a single type.**
4//!
5//! Allocated objects are destroyed all at once, when the arena itself is
6//! destroyed. There is no deallocation of individual objects while the arena
7//! itself is still alive. The flipside is that allocation is fast: typically
8//! just a vector push.
9//!
10//! There is also a method `into_vec()` to recover ownership of allocated
11//! objects when the arena is no longer required, instead of destroying
12//! everything.
13//!
14//! ## Example
15//!
16//! ```
17//! use typed_arena::Arena;
18//!
19//! struct Monster {
20//! level: u32,
21//! }
22//!
23//! let monsters = Arena::new();
24//!
25//! let goku = monsters.alloc(Monster { level: 9001 });
26//! assert!(goku.level > 9000);
27//! ```
28//!
29//! ## Safe Cycles
30//!
31//! All allocated objects get the same lifetime, so you can safely create cycles
32//! between them. This can be useful for certain data structures, such as graphs
33//! and trees with parent pointers.
34//!
35//! ```
36//! use std::cell::Cell;
37//! use typed_arena::Arena;
38//!
39//! struct CycleParticipant<'a> {
40//! other: Cell<Option<&'a CycleParticipant<'a>>>,
41//! }
42//!
43//! let arena = Arena::new();
44//!
45//! let a = arena.alloc(CycleParticipant { other: Cell::new(None) });
46//! let b = arena.alloc(CycleParticipant { other: Cell::new(None) });
47//!
48//! a.other.set(Some(b));
49//! b.other.set(Some(a));
50//! ```
51
52// Potential optimizations:
53// 1) add and stabilize a method for in-place reallocation of vecs.
54// 2) add and stabilize placement new.
55// 3) use an iterator. This may add far too much unsafe code.
56
57#![deny(missing_docs)]
58#![cfg_attr(not(any(feature = "std", test)), no_std)]
59
60#[cfg(not(feature = "std"))]
61extern crate alloc;
62
63#[cfg(any(feature = "std", test))]
64extern crate core;
65
66#[cfg(not(feature = "std"))]
67use alloc::vec::Vec;
68
69use core::cell::RefCell;
70use core::cmp;
71use core::iter;
72use core::mem;
73use core::ptr;
74use core::slice;
75use core::str;
76
77use mem::MaybeUninit;
78
79#[cfg(test)]
80mod test;
81
82// Initial size in bytes.
83const INITIAL_SIZE: usize = 1024;
84// Minimum capacity. Must be larger than 0.
85const MIN_CAPACITY: usize = 1;
86
87/// An arena of objects of type `T`.
88///
89/// ## Example
90///
91/// ```
92/// use typed_arena::Arena;
93///
94/// struct Monster {
95/// level: u32,
96/// }
97///
98/// let monsters = Arena::new();
99///
100/// let vegeta = monsters.alloc(Monster { level: 9001 });
101/// assert!(vegeta.level > 9000);
102/// ```
103pub struct Arena<T> {
104 chunks: RefCell<ChunkList<T>>,
105}
106
107struct ChunkList<T> {
108 current: Vec<T>,
109 rest: Vec<Vec<T>>,
110}
111
112impl<T> Arena<T> {
113 /// Construct a new arena.
114 ///
115 /// ## Example
116 ///
117 /// ```
118 /// use typed_arena::Arena;
119 ///
120 /// let arena = Arena::new();
121 /// # arena.alloc(1);
122 /// ```
123 pub fn new() -> Arena<T> {
124 let size = cmp::max(1, mem::size_of::<T>());
125 Arena::with_capacity(INITIAL_SIZE / size)
126 }
127
128 /// Construct a new arena with capacity for `n` values pre-allocated.
129 ///
130 /// ## Example
131 ///
132 /// ```
133 /// use typed_arena::Arena;
134 ///
135 /// let arena = Arena::with_capacity(1337);
136 /// # arena.alloc(1);
137 /// ```
138 pub fn with_capacity(n: usize) -> Arena<T> {
139 let n = cmp::max(MIN_CAPACITY, n);
140 Arena {
141 chunks: RefCell::new(ChunkList {
142 current: Vec::with_capacity(n),
143 rest: Vec::new(),
144 }),
145 }
146 }
147
148 /// Return the size of the arena
149 ///
150 /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces.
151 ///
152 /// ## Example
153 ///
154 /// ```
155 /// use typed_arena::Arena;
156 ///
157 /// let arena = Arena::with_capacity(0);
158 /// let a = arena.alloc(1);
159 /// let b = arena.alloc(2);
160 ///
161 /// assert_eq!(arena.len(), 2);
162 /// ```
163 pub fn len(&self) -> usize {
164 let chunks = self.chunks.borrow();
165
166 let mut res = 0;
167 for vec in chunks.rest.iter() {
168 res += vec.len()
169 }
170
171 res + chunks.current.len()
172 }
173
174 /// Allocates a value in the arena, and returns a mutable reference
175 /// to that value.
176 ///
177 /// ## Example
178 ///
179 /// ```
180 /// use typed_arena::Arena;
181 ///
182 /// let arena = Arena::new();
183 /// let x = arena.alloc(42);
184 /// assert_eq!(*x, 42);
185 /// ```
186 #[inline]
187 pub fn alloc(&self, value: T) -> &mut T {
188 self.alloc_fast_path(value)
189 .unwrap_or_else(|value| self.alloc_slow_path(value))
190 }
191
192 #[inline]
193 fn alloc_fast_path(&self, value: T) -> Result<&mut T, T> {
194 let mut chunks = self.chunks.borrow_mut();
195 let len = chunks.current.len();
196 if len < chunks.current.capacity() {
197 chunks.current.push(value);
198 // Avoid going through `Vec::deref_mut`, which overlaps
199 // other references we have already handed out!
200 debug_assert!(len < chunks.current.len()); // bounds check
201 Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) })
202 } else {
203 Err(value)
204 }
205 }
206
207 fn alloc_slow_path(&self, value: T) -> &mut T {
208 &mut self.alloc_extend(iter::once(value))[0]
209 }
210
211 /// Uses the contents of an iterator to allocate values in the arena.
212 /// Returns a mutable slice that contains these values.
213 ///
214 /// ## Example
215 ///
216 /// ```
217 /// use typed_arena::Arena;
218 ///
219 /// let arena = Arena::new();
220 /// let abc = arena.alloc_extend("abcdefg".chars().take(3));
221 /// assert_eq!(abc, ['a', 'b', 'c']);
222 /// ```
223 pub fn alloc_extend<I>(&self, iterable: I) -> &mut [T]
224 where
225 I: IntoIterator<Item = T>,
226 {
227 let mut iter = iterable.into_iter();
228
229 let mut chunks = self.chunks.borrow_mut();
230
231 let iter_min_len = iter.size_hint().0;
232 let mut next_item_index;
233 debug_assert!(
234 chunks.current.capacity() >= chunks.current.len(),
235 "capacity is always greater than or equal to len, so we don't need to worry about underflow"
236 );
237 if iter_min_len > chunks.current.capacity() - chunks.current.len() {
238 chunks.reserve(iter_min_len);
239 chunks.current.extend(iter);
240 next_item_index = 0;
241 } else {
242 next_item_index = chunks.current.len();
243 let mut i = 0;
244 while let Some(elem) = iter.next() {
245 if chunks.current.len() == chunks.current.capacity() {
246 // The iterator was larger than we could fit into the current chunk.
247 let chunks = &mut *chunks;
248 // Create a new chunk into which we can freely push the entire iterator into
249 chunks.reserve(i + 1);
250 let previous_chunk = chunks.rest.last_mut().unwrap();
251 let previous_chunk_len = previous_chunk.len();
252 // Move any elements we put into the previous chunk into this new chunk
253 chunks
254 .current
255 .extend(previous_chunk.drain(previous_chunk_len - i..));
256 chunks.current.push(elem);
257 // And the remaining elements in the iterator
258 chunks.current.extend(iter);
259 next_item_index = 0;
260 break;
261 } else {
262 chunks.current.push(elem);
263 }
264 i += 1;
265 }
266 }
267
268 // Extend the lifetime from that of `chunks_borrow` to that of `self`.
269 // This is OK because we’re careful to never move items
270 // by never pushing to inner `Vec`s beyond their initial capacity.
271 // The returned reference is unique (`&mut`):
272 // the `Arena` never gives away references to existing items.
273 unsafe {
274 let new_len = chunks.current.len() - next_item_index;
275 slice::from_raw_parts_mut(chunks.current.as_mut_ptr().add(next_item_index), new_len)
276 }
277 }
278
279 /// Allocates space for a given number of values, but doesn't initialize it.
280 ///
281 /// ## Safety
282 ///
283 /// After calling this method, the arena considers the elements initialized. If you fail to
284 /// initialize them (which includes because of panicking during the initialization), the arena
285 /// will run destructors on the uninitialized memory. Therefore, you must initialize them.
286 ///
287 /// Considering how easy it is to cause undefined behaviour using this, you're advised to
288 /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend].
289 ///
290 /// ## Example
291 ///
292 /// ```rust
293 /// use std::mem::{self, MaybeUninit};
294 /// use std::ptr;
295 /// use typed_arena::Arena;
296 ///
297 /// // Transmute from MaybeUninit slice to slice of initialized T.
298 /// // It is a separate function to preserve the lifetime of the reference.
299 /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
300 /// mem::transmute(r)
301 /// }
302 ///
303 /// let arena: Arena<bool> = Arena::new();
304 /// let slice: &mut [bool];
305 /// unsafe {
306 /// let uninitialized = arena.alloc_uninitialized(10);
307 /// for elem in uninitialized.iter_mut() {
308 /// ptr::write(elem.as_mut_ptr(), true);
309 /// }
310 /// slice = transmute_uninit(uninitialized);
311 /// }
312 /// ```
313 ///
314 /// ## Alternative allocation pattern
315 ///
316 /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also
317 /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with
318 /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm
319 /// them by this method. In such case, when there's a panic during initialization, the already
320 /// initialized elements would leak but it wouldn't cause UB.
321 ///
322 /// ```rust
323 /// use std::mem::{self, MaybeUninit};
324 /// use std::ptr;
325 /// use typed_arena::Arena;
326 ///
327 /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
328 /// mem::transmute(r)
329 /// }
330 ///
331 /// const COUNT: usize = 2;
332 ///
333 /// let arena: Arena<String> = Arena::new();
334 ///
335 /// arena.reserve_extend(COUNT);
336 /// let slice: &mut [String];
337 /// unsafe {
338 /// // Perform initialization before we claim the memory.
339 /// let uninitialized = arena.uninitialized_array();
340 /// assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend
341 /// for elem in &mut (*uninitialized)[..COUNT] {
342 /// ptr::write(elem.as_mut_ptr(), "Hello".to_owned());
343 /// }
344 /// let addr = (*uninitialized).as_ptr() as usize;
345 ///
346 /// // The alloc_uninitialized returns the same memory, but "confirms" its allocation.
347 /// slice = transmute_uninit(arena.alloc_uninitialized(COUNT));
348 /// assert_eq!(addr, slice.as_ptr() as usize);
349 /// assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]);
350 /// }
351 /// ```
352 pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit<T>] {
353 let mut chunks = self.chunks.borrow_mut();
354
355 debug_assert!(
356 chunks.current.capacity() >= chunks.current.len(),
357 "capacity is always greater than or equal to len, so we don't need to worry about underflow"
358 );
359 if num > chunks.current.capacity() - chunks.current.len() {
360 chunks.reserve(num);
361 }
362
363 // At this point, the current chunk must have free capacity.
364 let next_item_index = chunks.current.len();
365 chunks.current.set_len(next_item_index + num);
366
367 // Go through pointers, to make sure we never create a reference to uninitialized T.
368 let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
369 let start_uninit = start as *mut MaybeUninit<T>;
370 slice::from_raw_parts_mut(start_uninit, num)
371 }
372
373 /// Makes sure there's enough continuous space for at least `num` elements.
374 ///
375 /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also
376 /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized].
377 /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation
378 /// is needed, the unused ones in current chunk are never used.
379 pub fn reserve_extend(&self, num: usize) {
380 let mut chunks = self.chunks.borrow_mut();
381
382 debug_assert!(
383 chunks.current.capacity() >= chunks.current.len(),
384 "capacity is always greater than or equal to len, so we don't need to worry about underflow"
385 );
386 if num > chunks.current.capacity() - chunks.current.len() {
387 chunks.reserve(num);
388 }
389 }
390
391 /// Returns unused space.
392 ///
393 /// *This unused space is still not considered "allocated".* Therefore, it
394 /// won't be dropped unless there are further calls to `alloc`,
395 /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or
396 /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe.
397 ///
398 /// It returns a raw pointer to avoid creating multiple mutable references to the same place.
399 /// It is up to the caller not to dereference it after any of the `alloc_` methods are called.
400 pub fn uninitialized_array(&self) -> *mut [MaybeUninit<T>] {
401 let mut chunks = self.chunks.borrow_mut();
402 let len = chunks.current.capacity() - chunks.current.len();
403 let next_item_index = chunks.current.len();
404
405 unsafe {
406 // Go through pointers, to make sure we never create a reference to uninitialized T.
407 let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
408 let start_uninit = start as *mut MaybeUninit<T>;
409 ptr::slice_from_raw_parts_mut(start_uninit, len)
410 }
411 }
412
413 /// Convert this `Arena` into a `Vec<T>`.
414 ///
415 /// Items in the resulting `Vec<T>` appear in the order that they were
416 /// allocated in.
417 ///
418 /// ## Example
419 ///
420 /// ```
421 /// use typed_arena::Arena;
422 ///
423 /// let arena = Arena::new();
424 ///
425 /// arena.alloc("a");
426 /// arena.alloc("b");
427 /// arena.alloc("c");
428 ///
429 /// let easy_as_123 = arena.into_vec();
430 ///
431 /// assert_eq!(easy_as_123, vec!["a", "b", "c"]);
432 /// ```
433 pub fn into_vec(self) -> Vec<T> {
434 let mut chunks = self.chunks.into_inner();
435 // keep order of allocation in the resulting Vec
436 let n = chunks
437 .rest
438 .iter()
439 .fold(chunks.current.len(), |a, v| a + v.len());
440 let mut result = Vec::with_capacity(n);
441 for mut vec in chunks.rest {
442 result.append(&mut vec);
443 }
444 result.append(&mut chunks.current);
445 result
446 }
447
448 /// Returns an iterator that allows modifying each value.
449 ///
450 /// Items are yielded in the order that they were allocated.
451 ///
452 /// ## Example
453 ///
454 /// ```
455 /// use typed_arena::Arena;
456 ///
457 /// #[derive(Debug, PartialEq, Eq)]
458 /// struct Point { x: i32, y: i32 };
459 ///
460 /// let mut arena = Arena::new();
461 ///
462 /// arena.alloc(Point { x: 0, y: 0 });
463 /// arena.alloc(Point { x: 1, y: 1 });
464 ///
465 /// for point in arena.iter_mut() {
466 /// point.x += 10;
467 /// }
468 ///
469 /// let points = arena.into_vec();
470 ///
471 /// assert_eq!(points, vec![Point { x: 10, y: 0 }, Point { x: 11, y: 1 }]);
472 ///
473 /// ```
474 ///
475 /// ## Immutable Iteration
476 ///
477 /// Note that there is no corresponding `iter` method. Access to the arena's contents
478 /// requries mutable access to the arena itself.
479 ///
480 /// ```compile_fail
481 /// use typed_arena::Arena;
482 ///
483 /// let mut arena = Arena::new();
484 /// let x = arena.alloc(1);
485 ///
486 /// // borrow error!
487 /// for i in arena.iter_mut() {
488 /// println!("i: {}", i);
489 /// }
490 ///
491 /// // borrow error!
492 /// *x = 2;
493 /// ```
494 #[inline]
495 pub fn iter_mut(&mut self) -> IterMut<T> {
496 let chunks = self.chunks.get_mut();
497 let position = if !chunks.rest.is_empty() {
498 let index = 0;
499 let inner_iter = chunks.rest[index].iter_mut();
500 // Extend the lifetime of the individual elements to that of the arena.
501 // This is OK because we borrow the arena mutably to prevent new allocations
502 // and we take care here to never move items inside the arena while the
503 // iterator is alive.
504 let inner_iter = unsafe { mem::transmute(inner_iter) };
505 IterMutState::ChunkListRest { index, inner_iter }
506 } else {
507 // Extend the lifetime of the individual elements to that of the arena.
508 let iter = unsafe { mem::transmute(chunks.current.iter_mut()) };
509 IterMutState::ChunkListCurrent { iter }
510 };
511 IterMut {
512 chunks,
513 state: position,
514 }
515 }
516}
517
518impl Arena<u8> {
519 /// Allocates a string slice and returns a mutable reference to it.
520 ///
521 /// This is on `Arena<u8>`, because string slices use byte slices (`[u8]`) as their backing
522 /// storage.
523 ///
524 /// # Example
525 ///
526 /// ```
527 /// use typed_arena::Arena;
528 ///
529 /// let arena: Arena<u8> = Arena::new();
530 /// let hello = arena.alloc_str("Hello world");
531 /// assert_eq!("Hello world", hello);
532 /// ```
533 #[inline]
534 pub fn alloc_str(&self, s: &str) -> &mut str {
535 let buffer = self.alloc_extend(s.bytes());
536 // Can't fail the utf8 validation, it already came in as utf8
537 unsafe { str::from_utf8_unchecked_mut(buffer) }
538 }
539}
540
541impl<T> Default for Arena<T> {
542 fn default() -> Self {
543 Self::new()
544 }
545}
546
547impl<T> ChunkList<T> {
548 #[inline(never)]
549 #[cold]
550 fn reserve(&mut self, additional: usize) {
551 let double_cap = self
552 .current
553 .capacity()
554 .checked_mul(2)
555 .expect("capacity overflow");
556 let required_cap = additional
557 .checked_next_power_of_two()
558 .expect("capacity overflow");
559 let new_capacity = cmp::max(double_cap, required_cap);
560 let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity));
561 self.rest.push(chunk);
562 }
563}
564
565enum IterMutState<'a, T> {
566 ChunkListRest {
567 index: usize,
568 inner_iter: slice::IterMut<'a, T>,
569 },
570 ChunkListCurrent {
571 iter: slice::IterMut<'a, T>,
572 },
573}
574
575/// Mutable arena iterator.
576///
577/// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html).
578pub struct IterMut<'a, T: 'a> {
579 chunks: &'a mut ChunkList<T>,
580 state: IterMutState<'a, T>,
581}
582
583impl<'a, T> Iterator for IterMut<'a, T> {
584 type Item = &'a mut T;
585 fn next(&mut self) -> Option<&'a mut T> {
586 loop {
587 self.state = match self.state {
588 IterMutState::ChunkListRest {
589 mut index,
590 ref mut inner_iter,
591 } => {
592 match inner_iter.next() {
593 Some(item) => return Some(item),
594 None => {
595 index += 1;
596 if index < self.chunks.rest.len() {
597 let inner_iter = self.chunks.rest[index].iter_mut();
598 // Extend the lifetime of the individual elements to that of the arena.
599 let inner_iter = unsafe { mem::transmute(inner_iter) };
600 IterMutState::ChunkListRest { index, inner_iter }
601 } else {
602 let iter = self.chunks.current.iter_mut();
603 // Extend the lifetime of the individual elements to that of the arena.
604 let iter = unsafe { mem::transmute(iter) };
605 IterMutState::ChunkListCurrent { iter }
606 }
607 }
608 }
609 }
610 IterMutState::ChunkListCurrent { ref mut iter } => return iter.next(),
611 };
612 }
613 }
614
615 fn size_hint(&self) -> (usize, Option<usize>) {
616 let current_len = self.chunks.current.len();
617 let current_cap = self.chunks.current.capacity();
618 if self.chunks.rest.is_empty() {
619 (current_len, Some(current_len))
620 } else {
621 let rest_len = self.chunks.rest.len();
622 let last_chunk_len = self
623 .chunks
624 .rest
625 .last()
626 .map(|chunk| chunk.len())
627 .unwrap_or(0);
628
629 let min = current_len + last_chunk_len;
630 let max = min + (rest_len * current_cap / rest_len);
631
632 (min, Some(max))
633 }
634 }
635}