Skip to content

Commit b92dcee

Browse files
committed
perf(utils): call const_[de]allocate directly instead of sub-allocating with rlsf
Since CTFE-heap deallocation is now [supported natively][1], we don't need our own sub-allocator anymore. This also speeds up the compilation. [1]: rust-lang/rust#92274
1 parent 335790a commit b92dcee

File tree

2 files changed

+7
-71
lines changed

2 files changed

+7
-71
lines changed

src/r3_core/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#![feature(const_ptr_offset_from)]
1515
#![feature(maybe_uninit_slice)]
1616
#![feature(const_mut_refs)]
17+
#![feature(const_nonnull_new)]
1718
#![feature(const_slice_from_raw_parts)]
1819
#![feature(const_option)]
1920
#![feature(const_option_ext)]

src/r3_core/src/utils/alloc/allocator.rs

+6-71
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,9 @@ macro_rules! const_try_result {
1212
};
1313
}
1414

15-
// FIXME: Once `const_deallocate` is added, we won't have to sub-allocate
16-
// `const` heap allocations, and we won't need `rlsf`
17-
1815
/// Compile-time allocator.
1916
///
20-
/// It's implemented on top of [`core::intrinsics::const_allocate`][]. Although
21-
/// the deallocation counterpart of the intrinsic function `const_deallocate`
22-
/// doesn't exist yet, `ConstAllocator` is capable of reusing deallocated
23-
/// regions as long as they are created from the same instance of
24-
/// `ConstAllocator`. This is accomplished by, instead of making a call to
25-
/// `const_allocate` for each allocation request, slicing out each allocated
26-
/// region from larger blocks using a dynamic storage allocation algorithm.
17+
/// This is implemented on top of [`core::intrinsics::const_allocate`][].
2718
///
2819
/// # Stability
2920
///
@@ -37,17 +28,8 @@ pub struct ConstAllocator {
3728
/// - Live allocations created through `ConstAllocator as Allocator`.
3829
///
3930
ref_count: *mut usize,
40-
tlsf: *mut TheFlexTlsf,
4131
}
4232

43-
type TheFlexTlsf = rlsf::FlexTlsf<
44-
ConstFlexSource,
45-
usize,
46-
usize,
47-
{ usize::BITS as usize },
48-
{ usize::BITS as usize },
49-
>;
50-
5133
impl ConstAllocator {
5234
/// Call the specified closure, passing a reference to a `Self` constructed
5335
/// on the stack.
@@ -173,18 +155,7 @@ impl ConstAllocator {
173155
let mut ref_count = RefCountGuard(1);
174156
let ref_count = (&mut ref_count.0) as *mut _;
175157

176-
struct TlsfGuard(Option<TheFlexTlsf>);
177-
impl const Drop for TlsfGuard {
178-
fn drop(&mut self) {
179-
self.0.take().unwrap().destroy();
180-
}
181-
}
182-
183-
let mut tlsf = TlsfGuard(Some(TheFlexTlsf::new(ConstFlexSource)));
184-
let this = Self {
185-
ref_count,
186-
tlsf: tlsf.0.as_mut().unwrap(),
187-
};
158+
let this = Self { ref_count };
188159

189160
f.call(&this)
190161
}
@@ -231,7 +202,6 @@ impl const Clone for ConstAllocator {
231202
unsafe { *self.ref_count += 1 };
232203
Self {
233204
ref_count: self.ref_count,
234-
tlsf: self.tlsf,
235205
}
236206
}
237207

@@ -426,52 +396,17 @@ where
426396

427397
unsafe impl const Allocator for ConstAllocator {
428398
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
429-
let tlsf = unsafe { &mut *self.tlsf };
430-
if let Some(x) = tlsf.allocate(layout) {
399+
let ptr = unsafe { core::intrinsics::const_allocate(layout.size(), layout.align()) };
400+
if let Some(ptr) = NonNull::new(ptr) {
431401
unsafe { *self.ref_count += 1 };
432-
Ok(rlsf::nonnull_slice_from_raw_parts(x, layout.size()))
402+
Ok(rlsf::nonnull_slice_from_raw_parts(ptr, layout.size()))
433403
} else {
434404
Err(AllocError)
435405
}
436406
}
437407

438408
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
439-
let tlsf = unsafe { &mut *self.tlsf };
440-
unsafe { tlsf.deallocate(ptr, layout.align()) };
409+
unsafe { core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align()) };
441410
unsafe { *self.ref_count -= 1 };
442411
}
443412
}
444-
445-
/// An implementation of `FlexSource` based on the CTFE heap.
446-
struct ConstFlexSource;
447-
448-
/// Theoretically could be one, but a larger value is chosen to lessen the
449-
/// fragmentation
450-
const BLOCK_SIZE: usize = 1 << 16;
451-
452-
const _: () = assert!(BLOCK_SIZE >= rlsf::ALIGN);
453-
454-
unsafe impl const rlsf::FlexSource for ConstFlexSource {
455-
unsafe fn alloc(&mut self, min_size: usize) -> Option<core::ptr::NonNull<[u8]>> {
456-
// FIXME: Work-around for `?` being unsupported in `const fn`
457-
let size = if let Some(size) = min_size.checked_add(BLOCK_SIZE - 1) {
458-
size & !(BLOCK_SIZE - 1)
459-
} else {
460-
return None;
461-
};
462-
463-
assert!(min_size != 0);
464-
465-
let ptr = unsafe { core::intrinsics::const_allocate(size, BLOCK_SIZE) };
466-
467-
// FIXME: `NonNull::new` is not `const fn` yet
468-
assert!(!ptr.guaranteed_eq(core::ptr::null_mut()));
469-
let ptr = unsafe { NonNull::new_unchecked(ptr) };
470-
471-
Some(rlsf::nonnull_slice_from_raw_parts(ptr, size))
472-
}
473-
474-
fn min_align(&self) -> usize {
475-
BLOCK_SIZE
476-
}
477-
}

0 commit comments

Comments
 (0)