1use core::ptr::NonNull;
23#[doc(inline)]
4pub use alloc_crate::alloc::{alloc, alloc_zeroed, dealloc, handle_alloc_error, realloc};
56use crate::stable::{assume, invalid_mut};
78use super::{AllocError, Allocator, Layout};
910/// The global memory allocator.
11///
12/// This type implements the [`Allocator`] trait by forwarding calls
13/// to the allocator registered with the `#[global_allocator]` attribute
14/// if there is one, or the `std` crate’s default.
15///
16/// Note: while this type is unstable, the functionality it provides can be
17/// accessed through the [free functions in `alloc`](crate#functions).
18#[derive(Copy, Clone, Default, Debug)]
19pub struct Global;
2021impl Global {
22#[inline(always)]
23fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
24match layout.size() {
250 => Ok(unsafe {
26 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
27 invalid_mut(layout.align()),
280,
29 ))
30 }),
31// SAFETY: `layout` is non-zero in size,
32size => unsafe {
33let raw_ptr = if zeroed {
34 alloc_zeroed(layout)
35 } else {
36 alloc(layout)
37 };
38let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
39Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
40 ptr.as_ptr(),
41 size,
42 )))
43 },
44 }
45 }
4647// SAFETY: Same as `Allocator::grow`
48#[inline(always)]
49unsafe fn grow_impl(
50&self,
51 ptr: NonNull<u8>,
52 old_layout: Layout,
53 new_layout: Layout,
54 zeroed: bool,
55 ) -> Result<NonNull<[u8]>, AllocError> {
56debug_assert!(
57 new_layout.size() >= old_layout.size(),
58"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
59);
6061match old_layout.size() {
620 => self.alloc_impl(new_layout, zeroed),
6364// SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
65 // as required by safety conditions. Other conditions must be upheld by the caller
66old_size if old_layout.align() == new_layout.align() => unsafe {
67let new_size = new_layout.size();
6869// `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
70assume(new_size >= old_layout.size());
7172let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
73let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
74if zeroed {
75 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
76 }
77Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
78 ptr.as_ptr(),
79 new_size,
80 )))
81 },
8283// SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
84 // both the old and new memory allocation are valid for reads and writes for `old_size`
85 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
86 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
87 // for `dealloc` must be upheld by the caller.
88old_size => unsafe {
89let new_ptr = self.alloc_impl(new_layout, zeroed)?;
90 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_size);
91self.deallocate(ptr, old_layout);
92Ok(new_ptr)
93 },
94 }
95 }
96}
9798unsafe impl Allocator for Global {
99#[inline(always)]
100fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
101self.alloc_impl(layout, false)
102 }
103104#[inline(always)]
105fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
106self.alloc_impl(layout, true)
107 }
108109#[inline(always)]
110unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
111if layout.size() != 0 {
112// SAFETY: `layout` is non-zero in size,
113 // other conditions must be upheld by the caller
114unsafe { dealloc(ptr.as_ptr(), layout) }
115 }
116 }
117118#[inline(always)]
119unsafe fn grow(
120&self,
121 ptr: NonNull<u8>,
122 old_layout: Layout,
123 new_layout: Layout,
124 ) -> Result<NonNull<[u8]>, AllocError> {
125// SAFETY: all conditions must be upheld by the caller
126unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
127 }
128129#[inline(always)]
130unsafe fn grow_zeroed(
131&self,
132 ptr: NonNull<u8>,
133 old_layout: Layout,
134 new_layout: Layout,
135 ) -> Result<NonNull<[u8]>, AllocError> {
136// SAFETY: all conditions must be upheld by the caller
137unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
138 }
139140#[inline(always)]
141unsafe fn shrink(
142&self,
143 ptr: NonNull<u8>,
144 old_layout: Layout,
145 new_layout: Layout,
146 ) -> Result<NonNull<[u8]>, AllocError> {
147debug_assert!(
148 new_layout.size() <= old_layout.size(),
149"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
150);
151152match new_layout.size() {
153// SAFETY: conditions must be upheld by the caller
1540 => unsafe {
155self.deallocate(ptr, old_layout);
156Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
157 invalid_mut(new_layout.align()),
1580,
159 )))
160 },
161162// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
163new_size if old_layout.align() == new_layout.align() => unsafe {
164// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
165assume(new_size <= old_layout.size());
166167let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
168let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
169Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
170 ptr.as_ptr(),
171 new_size,
172 )))
173 },
174175// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
176 // both the old and new memory allocation are valid for reads and writes for `new_size`
177 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
178 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
179 // for `dealloc` must be upheld by the caller.
180new_size => unsafe {
181let new_ptr = self.allocate(new_layout)?;
182 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_size);
183self.deallocate(ptr, old_layout);
184Ok(new_ptr)
185 },
186 }
187 }
188}