1#![cfg_attr(feature = "alloc_trait", feature(allocator_api))]
19#![allow(renamed_and_removed_lints)]
21#![deny(missing_docs, broken_intra_doc_links)]
22#![no_std]
23
24#[cfg(feature = "alloc_trait")]
25use core::alloc::{Alloc, AllocErr, CannotReallocInPlace, Excess};
26use core::alloc::{GlobalAlloc, Layout};
27#[cfg(feature = "alloc_trait")]
28use core::ptr::NonNull;
29
30use libc::{c_int, c_void};
31
32#[cfg(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc"))]
46const ALIGNOF_MAX_ALIGN_T: usize = 8;
47#[cfg(any(
48 target_arch = "x86",
49 target_arch = "x86_64",
50 target_arch = "aarch64",
51 target_arch = "powerpc64",
52 target_arch = "loongarch64",
53 target_arch = "mips64",
54 target_arch = "riscv64",
55 target_arch = "s390x",
56 target_arch = "sparc64"
57))]
58const ALIGNOF_MAX_ALIGN_T: usize = 16;
59
60fn layout_to_flags(align: usize, size: usize) -> c_int {
67 if align <= ALIGNOF_MAX_ALIGN_T && align <= size {
68 0
69 } else {
70 ffi::MALLOCX_ALIGN(align)
71 }
72}
73
74macro_rules! assume {
76 ($e:expr) => {
77 debug_assert!($e);
78 if !($e) {
79 core::hint::unreachable_unchecked();
80 }
81 };
82}
83
84#[derive(Copy, Clone, Default, Debug)]
91pub struct Jemalloc;
92
93unsafe impl GlobalAlloc for Jemalloc {
94 #[inline]
95 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
96 assume!(layout.size() != 0);
97 let flags = layout_to_flags(layout.align(), layout.size());
98 let ptr = if flags == 0 {
99 ffi::malloc(layout.size())
100 } else {
101 ffi::mallocx(layout.size(), flags)
102 };
103 ptr as *mut u8
104 }
105
106 #[inline]
107 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
108 assume!(layout.size() != 0);
109 let flags = layout_to_flags(layout.align(), layout.size());
110 let ptr = if flags == 0 {
111 ffi::calloc(1, layout.size())
112 } else {
113 ffi::mallocx(layout.size(), flags | ffi::MALLOCX_ZERO)
114 };
115 ptr as *mut u8
116 }
117
118 #[inline]
119 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
120 assume!(!ptr.is_null());
121 assume!(layout.size() != 0);
122 let flags = layout_to_flags(layout.align(), layout.size());
123 ffi::sdallocx(ptr as *mut c_void, layout.size(), flags)
124 }
125
126 #[inline]
127 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
128 assume!(layout.size() != 0);
129 assume!(new_size != 0);
130 let flags = layout_to_flags(layout.align(), new_size);
131 let ptr = if flags == 0 {
132 ffi::realloc(ptr as *mut c_void, new_size)
133 } else {
134 ffi::rallocx(ptr as *mut c_void, new_size, flags)
135 };
136 ptr as *mut u8
137 }
138}
139
140#[cfg(feature = "alloc_trait")]
141unsafe impl Alloc for Jemalloc {
142 #[inline]
143 unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
144 NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
145 }
146
147 #[inline]
148 unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
149 NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
150 }
151
152 #[inline]
153 unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
154 GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
155 }
156
157 #[inline]
158 unsafe fn realloc(
159 &mut self,
160 ptr: NonNull<u8>,
161 layout: Layout,
162 new_size: usize,
163 ) -> Result<NonNull<u8>, AllocErr> {
164 NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
165 }
166
167 #[inline]
168 unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
169 let flags = layout_to_flags(layout.align(), layout.size());
170 let ptr = ffi::mallocx(layout.size(), flags);
171 if let Some(nonnull) = NonNull::new(ptr as *mut u8) {
172 let excess = ffi::nallocx(layout.size(), flags);
173 Ok(Excess(nonnull, excess))
174 } else {
175 Err(AllocErr)
176 }
177 }
178
179 #[inline]
180 unsafe fn realloc_excess(
181 &mut self,
182 ptr: NonNull<u8>,
183 layout: Layout,
184 new_size: usize,
185 ) -> Result<Excess, AllocErr> {
186 let flags = layout_to_flags(layout.align(), new_size);
187 let ptr = ffi::rallocx(ptr.cast().as_ptr(), new_size, flags);
188 if let Some(nonnull) = NonNull::new(ptr as *mut u8) {
189 let excess = ffi::nallocx(new_size, flags);
190 Ok(Excess(nonnull, excess))
191 } else {
192 Err(AllocErr)
193 }
194 }
195
196 #[inline]
197 fn usable_size(&self, layout: &Layout) -> (usize, usize) {
198 let flags = layout_to_flags(layout.align(), layout.size());
199 unsafe {
200 let max = ffi::nallocx(layout.size(), flags);
201 (layout.size(), max)
202 }
203 }
204
205 #[inline]
206 unsafe fn grow_in_place(
207 &mut self,
208 ptr: NonNull<u8>,
209 layout: Layout,
210 new_size: usize,
211 ) -> Result<(), CannotReallocInPlace> {
212 let flags = layout_to_flags(layout.align(), new_size);
213 let usable_size = ffi::xallocx(ptr.cast().as_ptr(), new_size, 0, flags);
214 if usable_size >= new_size {
215 Ok(())
216 } else {
217 Err(CannotReallocInPlace)
222 }
223 }
224
225 #[inline]
226 unsafe fn shrink_in_place(
227 &mut self,
228 ptr: NonNull<u8>,
229 layout: Layout,
230 new_size: usize,
231 ) -> Result<(), CannotReallocInPlace> {
232 if new_size == layout.size() {
233 return Ok(());
234 }
235 let flags = layout_to_flags(layout.align(), new_size);
236 let usable_size = ffi::xallocx(ptr.cast().as_ptr(), new_size, 0, flags);
237
238 if usable_size < layout.size() {
239 Ok(())
244 } else if usable_size == ffi::nallocx(new_size, flags) {
245 debug_assert_eq!(
254 ffi::nallocx(new_size, flags),
255 ffi::nallocx(layout.size(), flags)
256 );
257 Ok(())
258 } else {
259 Err(CannotReallocInPlace)
263 }
264 }
265}
266
267pub unsafe fn usable_size<T>(ptr: *const T) -> usize {
280 ffi::malloc_usable_size(ptr as *const c_void)
281}
282
283mod ffi {
285 pub use tikv_jemalloc_sys::*;
286}