1use crate::{Alignment, TryReserveError};
2use alloc::alloc::{alloc, dealloc, handle_alloc_error, realloc, Layout};
3use core::{
4 marker::PhantomData,
5 mem::{align_of, size_of},
6 ptr::{null_mut, NonNull},
7};
8
9pub struct ARawVec<T, A: Alignment> {
10 pub ptr: NonNull<T>,
11 pub capacity: usize,
12 pub align: A,
13 _marker: PhantomData<T>,
14}
15
16impl<T, A: Alignment> Drop for ARawVec<T, A> {
17 #[inline]
18 fn drop(&mut self) {
19 let size_bytes = self.capacity * size_of::<T>();
21 if size_bytes > 0 {
22 unsafe {
24 dealloc(
25 self.ptr.as_ptr() as *mut u8,
26 Layout::from_size_align_unchecked(
27 size_bytes,
28 self.align.alignment(align_of::<T>()),
29 ),
30 )
31 }
32 }
33 }
34}
35
36pub fn capacity_overflow() -> ! {
37 panic!("capacity overflow")
38}
39
40impl<T, A: Alignment> ARawVec<T, A> {
41 #[inline]
46 pub unsafe fn new_unchecked(align: usize) -> Self {
47 let cap = if size_of::<T>() == 0 { usize::MAX } else { 0 };
48 Self::from_raw_parts(null_mut::<u8>().wrapping_add(align) as *mut T, cap, align)
49 }
50
51 #[inline]
56 pub unsafe fn with_capacity_unchecked(capacity: usize, align: usize) -> Self {
57 if capacity == 0 || size_of::<T>() == 0 {
58 Self::new_unchecked(align)
59 } else {
60 Self {
61 ptr: NonNull::<T>::new_unchecked(with_capacity_unchecked(
62 capacity,
63 align,
64 size_of::<T>(),
65 ) as *mut T),
66 capacity,
67 align: A::new(align, align_of::<T>()),
68 _marker: PhantomData,
69 }
70 }
71 }
72
73 #[inline]
78 pub unsafe fn try_with_capacity_unchecked(
79 capacity: usize,
80 align: usize,
81 ) -> Result<Self, TryReserveError> {
82 if capacity == 0 || size_of::<T>() == 0 {
83 Ok(Self::new_unchecked(align))
84 } else {
85 Ok(Self {
86 ptr: NonNull::<T>::new_unchecked(try_with_capacity_unchecked(
87 capacity,
88 align,
89 size_of::<T>(),
90 )? as *mut T),
91 capacity,
92 align: A::new(align, align_of::<T>()),
93 _marker: PhantomData,
94 })
95 }
96 }
97
98 const MIN_NON_ZERO_CAP: usize = if size_of::<T>() == 1 {
99 8
100 } else if size_of::<T>() <= 1024 {
101 4
102 } else {
103 1
104 };
105
106 pub unsafe fn grow_amortized(&mut self, len: usize, additional: usize) {
107 debug_assert!(additional > 0);
108 if self.capacity == 0 {
109 *self = Self::with_capacity_unchecked(
110 additional.max(Self::MIN_NON_ZERO_CAP),
111 self.align.alignment(align_of::<T>()),
112 );
113 return;
114 }
115
116 if size_of::<T>() == 0 {
117 debug_assert_eq!(self.capacity, usize::MAX);
118 capacity_overflow();
119 }
120
121 let new_cap = match len.checked_add(additional) {
122 Some(cap) => cap,
123 None => capacity_overflow(),
124 };
125
126 let new_cap = new_cap.max(self.capacity * 2);
128 let new_cap = new_cap.max(Self::MIN_NON_ZERO_CAP);
129
130 let ptr = {
131 grow_unchecked(
132 self.as_mut_ptr() as *mut u8,
133 self.capacity,
134 new_cap,
135 self.align.alignment(align_of::<T>()),
136 size_of::<T>(),
137 ) as *mut T
138 };
139
140 self.capacity = new_cap;
141 self.ptr = NonNull::<T>::new_unchecked(ptr);
142 }
143
144 pub unsafe fn grow_exact(&mut self, len: usize, additional: usize) {
145 debug_assert!(additional > 0);
146 if size_of::<T>() == 0 {
147 debug_assert_eq!(self.capacity, usize::MAX);
148 capacity_overflow();
149 }
150
151 if self.capacity == 0 {
152 *self =
153 Self::with_capacity_unchecked(additional, self.align.alignment(align_of::<T>()));
154 return;
155 }
156
157 let new_cap = match len.checked_add(additional) {
158 Some(cap) => cap,
159 None => capacity_overflow(),
160 };
161
162 let ptr = grow_unchecked(
163 self.as_mut_ptr() as *mut u8,
164 self.capacity,
165 new_cap,
166 self.align.alignment(align_of::<T>()),
167 size_of::<T>(),
168 ) as *mut T;
169
170 self.capacity = new_cap;
171 self.ptr = NonNull::<T>::new_unchecked(ptr);
172 }
173
174 pub unsafe fn try_grow_amortized(
175 &mut self,
176 len: usize,
177 additional: usize,
178 ) -> Result<(), TryReserveError> {
179 debug_assert!(additional > 0);
180 if self.capacity == 0 {
181 *self = Self::try_with_capacity_unchecked(
182 additional.max(Self::MIN_NON_ZERO_CAP),
183 self.align.alignment(align_of::<T>()),
184 )?;
185 return Ok(());
186 }
187
188 if size_of::<T>() == 0 {
189 debug_assert_eq!(self.capacity, usize::MAX);
190 return Err(TryReserveError::CapacityOverflow);
191 }
192
193 let new_cap = match len.checked_add(additional) {
194 Some(cap) => cap,
195 None => return Err(TryReserveError::CapacityOverflow),
196 };
197
198 let new_cap = new_cap.max(self.capacity * 2);
200 let new_cap = new_cap.max(Self::MIN_NON_ZERO_CAP);
201
202 let ptr = {
203 try_grow_unchecked(
204 self.as_mut_ptr() as *mut u8,
205 self.capacity,
206 new_cap,
207 self.align.alignment(align_of::<T>()),
208 size_of::<T>(),
209 )? as *mut T
210 };
211
212 self.capacity = new_cap;
213 self.ptr = NonNull::<T>::new_unchecked(ptr);
214 Ok(())
215 }
216
217 pub unsafe fn try_grow_exact(
218 &mut self,
219 len: usize,
220 additional: usize,
221 ) -> Result<(), TryReserveError> {
222 debug_assert!(additional > 0);
223 if size_of::<T>() == 0 {
224 debug_assert_eq!(self.capacity, usize::MAX);
225 return Err(TryReserveError::CapacityOverflow);
226 }
227
228 if self.capacity == 0 {
229 *self = Self::try_with_capacity_unchecked(
230 additional,
231 self.align.alignment(align_of::<T>()),
232 )?;
233 return Ok(());
234 }
235
236 let new_cap = match len.checked_add(additional) {
237 Some(cap) => cap,
238 None => return Err(TryReserveError::CapacityOverflow),
239 };
240
241 let ptr = try_grow_unchecked(
242 self.as_mut_ptr() as *mut u8,
243 self.capacity,
244 new_cap,
245 self.align.alignment(align_of::<T>()),
246 size_of::<T>(),
247 )? as *mut T;
248
249 self.capacity = new_cap;
250 self.ptr = NonNull::<T>::new_unchecked(ptr);
251 Ok(())
252 }
253
254 pub unsafe fn shrink_to(&mut self, len: usize) {
255 if size_of::<T>() == 0 {
256 return;
257 }
258
259 debug_assert!(len < self.capacity());
260 let size_of = size_of::<T>();
261 let old_capacity = self.capacity;
262 let align = self.align;
263 let old_ptr = self.ptr.as_ptr() as *mut u8;
264
265 let new_size_bytes = len * size_of;
268 let old_size_bytes = old_capacity * size_of;
269 let old_layout =
270 Layout::from_size_align_unchecked(old_size_bytes, align.alignment(align_of::<T>()));
271
272 let ptr = realloc(old_ptr, old_layout, new_size_bytes);
273 let ptr = ptr as *mut T;
274 self.capacity = len;
275 self.ptr = NonNull::<T>::new_unchecked(ptr);
276 }
277
278 #[inline]
279 pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize, align: usize) -> Self {
280 Self {
281 ptr: NonNull::<T>::new_unchecked(ptr),
282 capacity,
283 align: A::new(align, align_of::<T>()),
284 _marker: PhantomData,
285 }
286 }
287
288 #[inline]
290 pub fn capacity(&self) -> usize {
291 self.capacity
292 }
293
294 #[inline]
295 pub fn align(&self) -> usize {
296 self.align.alignment(align_of::<T>())
297 }
298
299 #[inline]
300 pub fn as_ptr(&self) -> *const T {
301 self.ptr.as_ptr()
302 }
303
304 #[inline]
305 pub fn as_mut_ptr(&mut self) -> *mut T {
306 self.ptr.as_ptr()
307 }
308}
309
310pub unsafe fn with_capacity_unchecked(capacity: usize, align: usize, size_of: usize) -> *mut u8 {
311 let size_bytes = match capacity.checked_mul(size_of) {
312 Some(size_bytes) => size_bytes,
313 None => capacity_overflow(),
314 };
315 debug_assert!(size_bytes > 0);
316 let will_overflow = size_bytes > usize::MAX - (align - 1);
317 if will_overflow || !is_valid_alloc(size_bytes) {
318 capacity_overflow();
319 }
320
321 let layout = Layout::from_size_align_unchecked(size_bytes, align);
322 let ptr = alloc(layout);
323 if ptr.is_null() {
324 handle_alloc_error(layout);
325 }
326 ptr
327}
328
329unsafe fn grow_unchecked(
330 old_ptr: *mut u8,
331 old_capacity: usize,
332 new_capacity: usize,
333 align: usize,
334 size_of: usize,
335) -> *mut u8 {
336 let new_size_bytes = match new_capacity.checked_mul(size_of) {
337 Some(size_bytes) => size_bytes,
338 None => capacity_overflow(),
339 };
340 let will_overflow = new_size_bytes > usize::MAX - (align - 1);
341 if will_overflow || !is_valid_alloc(new_size_bytes) {
342 capacity_overflow();
343 }
344
345 let old_size_bytes = old_capacity * size_of;
347 let old_layout = Layout::from_size_align_unchecked(old_size_bytes, align);
348
349 let ptr = realloc(old_ptr, old_layout, new_size_bytes);
350
351 if ptr.is_null() {
352 let new_layout = Layout::from_size_align_unchecked(old_size_bytes, align);
353 handle_alloc_error(new_layout);
354 }
355
356 ptr
357}
358
359pub unsafe fn try_with_capacity_unchecked(
360 capacity: usize,
361 align: usize,
362 size_of: usize,
363) -> Result<*mut u8, TryReserveError> {
364 let size_bytes = match capacity.checked_mul(size_of) {
365 Some(size_bytes) => size_bytes,
366 None => return Err(TryReserveError::CapacityOverflow),
367 };
368 debug_assert!(size_bytes > 0);
369 let will_overflow = size_bytes > usize::MAX - (align - 1);
370 if will_overflow || !is_valid_alloc(size_bytes) {
371 return Err(TryReserveError::CapacityOverflow);
372 }
373
374 let layout = Layout::from_size_align_unchecked(size_bytes, align);
375 let ptr = alloc(layout);
376 if ptr.is_null() {
377 return Err(TryReserveError::AllocError { layout });
378 }
379 Ok(ptr)
380}
381
382unsafe fn try_grow_unchecked(
383 old_ptr: *mut u8,
384 old_capacity: usize,
385 new_capacity: usize,
386 align: usize,
387 size_of: usize,
388) -> Result<*mut u8, TryReserveError> {
389 let new_size_bytes = match new_capacity.checked_mul(size_of) {
390 Some(size_bytes) => size_bytes,
391 None => return Err(TryReserveError::CapacityOverflow),
392 };
393 let will_overflow = new_size_bytes > usize::MAX - (align - 1);
394 if will_overflow || !is_valid_alloc(new_size_bytes) {
395 return Err(TryReserveError::CapacityOverflow);
396 }
397
398 let old_size_bytes = old_capacity * size_of;
400 let old_layout = Layout::from_size_align_unchecked(old_size_bytes, align);
401
402 let ptr = realloc(old_ptr, old_layout, new_size_bytes);
403
404 if ptr.is_null() {
405 let layout = Layout::from_size_align_unchecked(new_size_bytes, align);
406 return Err(TryReserveError::AllocError { layout });
407 }
408
409 Ok(ptr)
410}
411
412#[inline]
413fn is_valid_alloc(alloc_size: usize) -> bool {
414 !(usize::BITS < 64 && alloc_size > isize::MAX as usize)
415}