1use std::alloc;
4use std::ptr::NonNull;
5
6mod fixed;
7mod growable;
8
9pub use fixed::CompactBytesSlice;
10pub use growable::CompactBytes;
11
12const INLINE_MASK: u8 = 0b1000_0000;
13
14#[repr(C, align(8))]
16#[derive(Copy, Clone)]
17struct InlineBytes<const CAPACITY: usize> {
18 buffer: [u8; CAPACITY],
19 data: u8,
20}
21
22type InlineBytes23 = InlineBytes<23>;
24static_assertions::assert_eq_size!(InlineBytes23, Vec<u8>);
25
26type InlineBytes15 = InlineBytes<15>;
28static_assertions::assert_eq_size!(InlineBytes15, Box<[u8]>);
29
30impl<const CAPACITY: usize> InlineBytes<CAPACITY> {
31 const CAPACITY: usize = CAPACITY;
33
34 #[inline]
41 pub unsafe fn new(slice: &[u8]) -> Self {
42 debug_assert!(slice.len() <= CAPACITY);
43
44 let len = slice.len();
45 let mut buffer = [0u8; CAPACITY];
46
47 unsafe {
49 buffer
50 .as_mut_ptr()
51 .copy_from_nonoverlapping(slice.as_ptr(), len)
52 };
53
54 let data = INLINE_MASK | (len as u8);
55
56 InlineBytes { buffer, data }
57 }
58
59 #[inline]
60 pub const fn empty() -> Self {
61 let buffer = [0u8; CAPACITY];
62
63 #[allow(clippy::identity_op)]
65 let data = INLINE_MASK | 0;
66
67 InlineBytes { buffer, data }
68 }
69
70 pub fn len(&self) -> usize {
71 (self.data & !INLINE_MASK) as usize
72 }
73
74 unsafe fn set_len(&mut self, new_len: usize) {
82 debug_assert!(new_len <= CAPACITY);
83 self.data = INLINE_MASK | (new_len as u8);
84 }
85}
86
87#[repr(C)]
89struct HeapBytesGrowable {
90 ptr: NonNull<u8>,
91 len: usize,
92 cap: usize,
93}
94static_assertions::assert_eq_size!(HeapBytesGrowable, Vec<u8>);
95
96impl HeapBytesGrowable {
97 pub const MIN_ALLOCATION_SIZE: usize = std::mem::size_of::<usize>() * 2;
99 pub const MAX_ALLOCATION_SIZE: usize = usize::MAX >> 1;
101
102 #[inline]
103 pub fn new(slice: &[u8]) -> Self {
104 let len = slice.len();
105 let cap = len.max(Self::MIN_ALLOCATION_SIZE);
106
107 debug_assert!(cap <= Self::MAX_ALLOCATION_SIZE, "too large of allocation");
108 let ptr = heap::alloc_ptr(cap);
109
110 unsafe { ptr.as_ptr().copy_from_nonoverlapping(slice.as_ptr(), len) };
111
112 HeapBytesGrowable { ptr, len, cap }
113 }
114
115 pub fn with_capacity(capacity: usize) -> Self {
116 assert!(
117 capacity <= Self::MAX_ALLOCATION_SIZE,
118 "too large of allocation"
119 );
120
121 let len = 0;
122 let cap = capacity.max(Self::MIN_ALLOCATION_SIZE);
123 let ptr = heap::alloc_ptr(cap);
124
125 HeapBytesGrowable { ptr, len, cap }
126 }
127
128 pub fn with_additional(slice: &[u8], additional: usize) -> Self {
129 let new_capacity = Self::amortized_growth(slice.len(), additional);
130 let mut row = Self::with_capacity(new_capacity);
131
132 debug_assert!(row.cap > slice.len());
133
134 unsafe {
136 std::ptr::copy_nonoverlapping(slice.as_ptr(), row.ptr.as_ptr(), slice.len());
137 };
138 row.len = slice.len();
140
141 row
142 }
143
144 pub unsafe fn set_len(&mut self, len: usize) {
145 self.len = len;
146 }
147
148 pub fn realloc(&mut self, new_capacity: usize) -> Result<usize, ()> {
149 if new_capacity < self.len {
151 return Err(());
152 }
153 if new_capacity == 0 {
155 return Err(());
156 }
157
158 let new_capacity = new_capacity.max(Self::MIN_ALLOCATION_SIZE);
160
161 if new_capacity == self.cap {
163 return Ok(new_capacity);
164 }
165
166 let cur_layout = heap::layout(self.cap);
167 let new_layout = heap::layout(new_capacity);
168
169 let new_size = new_layout.size();
171 if new_size < new_capacity {
172 return Err(());
173 }
174
175 let raw_ptr = unsafe { alloc::realloc(self.ptr.as_ptr(), cur_layout, new_size) };
180 let ptr = NonNull::new(raw_ptr).ok_or(())?;
181
182 self.ptr = ptr;
183 self.cap = new_capacity;
184
185 Ok(new_capacity)
186 }
187
188 #[inline]
189 fn dealloc(&mut self) {
190 heap::dealloc_ptr(self.ptr, self.cap);
191 }
192
193 #[inline(always)]
198 pub fn amortized_growth(cur_len: usize, additional: usize) -> usize {
199 let required = cur_len.saturating_add(additional);
200 let amortized = cur_len.saturating_mul(3) / 2;
201 amortized.max(required)
202 }
203}
204
205impl Drop for HeapBytesGrowable {
206 fn drop(&mut self) {
207 self.dealloc()
208 }
209}
210
211#[repr(C)]
213struct HeapBytesFixed {
214 ptr: NonNull<u8>,
215 len: usize,
216}
217static_assertions::assert_eq_size!(HeapBytesFixed, Box<[u8]>);
218
219impl HeapBytesFixed {
220 pub const MAX_ALLOCATION_SIZE: usize = usize::MAX >> 1;
222
223 #[inline]
224 pub fn new(slice: &[u8]) -> Self {
225 let len = slice.len();
226 debug_assert!(len <= Self::MAX_ALLOCATION_SIZE, "too large of allocation");
227
228 let ptr = heap::alloc_ptr(len);
229 unsafe { ptr.as_ptr().copy_from_nonoverlapping(slice.as_ptr(), len) };
230
231 HeapBytesFixed { ptr, len }
232 }
233
234 #[inline]
235 fn dealloc(&mut self) {
236 heap::dealloc_ptr(self.ptr, self.len);
237 }
238}
239
240mod heap {
241 use std::alloc;
242 use std::ptr::NonNull;
243
244 #[inline]
246 pub(crate) fn alloc_ptr(capacity: usize) -> NonNull<u8> {
247 let layout = layout(capacity);
248 debug_assert!(layout.size() > 0);
249
250 let ptr = unsafe { alloc::alloc(layout) };
252
253 NonNull::new(ptr).expect("failed to allocate HeapRow")
254 }
255
256 #[inline]
258 pub(crate) fn dealloc_ptr(ptr: NonNull<u8>, capacity: usize) {
259 let layout = layout(capacity);
260
261 unsafe { alloc::dealloc(ptr.as_ptr(), layout) };
265 }
266
267 #[inline(always)]
272 pub(crate) fn layout(capacity: usize) -> alloc::Layout {
273 debug_assert!(capacity > 0, "tried to allocate a HeapRow with 0 capacity");
274 alloc::Layout::array::<u8>(capacity).expect("valid capacity")
275 }
276}