From 4ba8a455191083dbfcd6d91d97ce535b5e04fc17 Mon Sep 17 00:00:00 2001 From: Pierre Avital Date: Fri, 5 Jul 2024 13:46:42 +0200 Subject: [PATCH] WHY DID I NOT COMMIT ANYTHING EARLIER!? WHYYY!??? --- stabby-abi/build.rs | 3 + .../src/alloc/allocators/freelist_alloc.rs | 375 +++++---- stabby-abi/src/alloc/allocators/mod.rs | 81 +- stabby-abi/src/alloc/boxed.rs | 18 +- stabby-abi/src/alloc/collections/arc_btree.rs | 787 ++++++++++-------- stabby-abi/src/alloc/mod.rs | 161 ++-- stabby-abi/src/alloc/single_or_vec.rs | 2 +- stabby-abi/src/alloc/string.rs | 6 +- stabby-abi/src/alloc/sync.rs | 27 +- stabby-abi/src/alloc/vec.rs | 8 +- stabby-abi/src/istable.rs | 4 + stabby-abi/src/stable_impls/mod.rs | 46 +- stabby-abi/src/vtable/mod.rs | 6 +- stabby-macros/src/enums.rs | 12 +- stabby-macros/src/lib.rs | 258 +++++- stabby-macros/src/structs.rs | 6 +- stabby-macros/src/unions.rs | 4 +- stabby/Cargo.toml | 5 + stabby/benches/allocators.rs | 30 + 19 files changed, 1182 insertions(+), 657 deletions(-) create mode 100644 stabby/benches/allocators.rs diff --git a/stabby-abi/build.rs b/stabby-abi/build.rs index 9bdc000..eeda0f7 100644 --- a/stabby-abi/build.rs +++ b/stabby-abi/build.rs @@ -109,6 +109,9 @@ fn main() { .max(10); tuples(max_tuple).unwrap(); println!("cargo:rustc-check-cfg=cfg(stabby_nightly, values(none()))"); + println!( + r#"cargo:rustc-check-cfg=cfg(stabby_default_alloc, values(none(), "RustAlloc", "LibcAlloc"))"# + ); println!( r#"cargo:rustc-check-cfg=cfg(stabby_check_unreachable, values(none(), "true", "false"))"# ); diff --git a/stabby-abi/src/alloc/allocators/freelist_alloc.rs b/stabby-abi/src/alloc/allocators/freelist_alloc.rs index f7f50dd..07ec779 100644 --- a/stabby-abi/src/alloc/allocators/freelist_alloc.rs +++ b/stabby-abi/src/alloc/allocators/freelist_alloc.rs @@ -1,75 +1,159 @@ -use core::{ - ffi::c_void, - mem::MaybeUninit, - ptr::NonNull, - sync::atomic::{AtomicPtr, AtomicUsize, Ordering}, -}; +use core::{mem::MaybeUninit, ptr::NonNull, sync::atomic::AtomicPtr}; -/// A simple free-list based allocator. -/// +/// A global [`FreelistAlloc`]. /// +/// This allocator is 0-sized and thread safe (by spin-lock). #[crate::stabby] #[derive(Clone, Copy, Default)] -pub struct FreelistAlloc { +pub struct FreelistGlobalAlloc { inner: [u8; 0], } -impl core::fmt::Debug for FreelistAlloc { +impl core::fmt::Debug for FreelistGlobalAlloc { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.write_str("FreelistAlloc") } } -impl FreelistAlloc { +impl FreelistGlobalAlloc { /// Constructs the allocator. pub const fn new() -> Self { Self { inner: [] } } } -impl crate::alloc::IAlloc for FreelistAlloc { +fn allock() -> FreelistGlobalAllock { + loop { + let mut ptr = + GLOBAL_ALLOC.swap(usize::MAX as *mut _, core::sync::atomic::Ordering::Acquire); + if ptr as usize == usize::MAX { + core::hint::spin_loop(); + continue; + } + return FreelistGlobalAllock { + alloc: FreelistAlloc { + head: unsafe { ptr.as_mut() }, + end: NonNull::new(GLOBAL_ALLOC_END.load(core::sync::atomic::Ordering::Acquire)), + }, + }; + } +} +static GLOBAL_ALLOC: AtomicPtr = AtomicPtr::new(core::ptr::null_mut()); +static GLOBAL_ALLOC_END: AtomicPtr = AtomicPtr::new(core::ptr::null_mut()); +struct FreelistGlobalAllock { + alloc: FreelistAlloc, +} +impl Drop for FreelistGlobalAllock { + fn drop(&mut self) { + GLOBAL_ALLOC_END.store( + unsafe { core::mem::transmute::>, *mut Slot>(self.alloc.end) }, + core::sync::atomic::Ordering::Release, + ); + GLOBAL_ALLOC.store( + unsafe { + core::mem::transmute::, *mut Slot>(self.alloc.head.take()) + }, + core::sync::atomic::Ordering::Release, + ) + } +} +impl crate::alloc::IAlloc for FreelistGlobalAlloc { + fn alloc(&mut self, layout: crate::alloc::Layout) -> *mut () { + let mut alloc = allock(); + (&mut alloc.alloc).alloc(layout) + } + unsafe fn realloc( + &mut self, + ptr: *mut (), + prev_layout: crate::alloc::Layout, + new_size: usize, + ) -> *mut () { + let mut alloc = allock(); + (&mut alloc.alloc).realloc(ptr, prev_layout, new_size) + } + unsafe fn free(&mut self, ptr: *mut ()) { + let mut alloc = allock(); + (&mut alloc.alloc).free(ptr) + } +} + +/// A free-list based allocator. +#[crate::stabby] +#[derive(Default)] +pub struct FreelistAlloc { + head: Option<&'static mut Slot>, + end: Option>, +} +impl crate::alloc::IAlloc for &mut FreelistAlloc { fn alloc(&mut self, layout: crate::alloc::Layout) -> *mut () { let layout = layout.for_alloc(); - let crate::alloc::Layout { - mut size, - mut align, - } = layout; - match ALLOC.lock().take(size, align) { + match self.take(layout) { Some(slot) => { - let this = unsafe { (slot as *mut Slot).add(1).cast() }; - unsafe { this.write_bytes(0, size) }; + let this = unsafe { (slot as *mut Slot).add(1).cast::<()>() }; this } None => core::ptr::null_mut(), } } - unsafe fn realloc(&mut self, p: *mut c_void, new_size: usize) -> *mut c_void { - let slot = p.cast::().sub(1); - let this = unsafe { &mut *slot }; - let prev_size = this.size; - let align = this.align; - let alloc = ALLOC.lock(); - if alloc.try_extend(this, new_size).is_ok() { - p - } else { - let new_ptr = self.alloc(crate::alloc::Layout { - size: new_size, - align, - }); - unsafe { core::ptr::copy_nonoverlapping(p.cast::(), this.cast(), this.size) }; - new_ptr + unsafe fn realloc( + &mut self, + ptr: *mut (), + prev_layout: crate::alloc::Layout, + new_size: usize, + ) -> *mut () { + let slot = ptr.cast::().sub(1); + let mut slot = unsafe { &mut *slot }; + let prev_size = slot.size; + match self.try_extend(&mut slot, new_size) { + Ok(()) => ptr, + Err(()) => { + let new_ptr = self.alloc(crate::alloc::Layout { + size: new_size, + align: prev_layout.align, + }); + unsafe { + core::ptr::copy_nonoverlapping(ptr.cast::(), new_ptr.cast(), prev_size) + }; + self.insert(slot); + new_ptr + } } } unsafe fn free(&mut self, p: *mut ()) { let slot = p.cast::().sub(1); - ALLOC.lock().insert(&mut *slot); + self.insert(&mut *slot); + } +} +impl core::fmt::Debug for FreelistAlloc { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "FreelistAlloc(end={:?})", + self.end.map_or(core::ptr::null_mut(), |p| p.as_ptr()) + )?; + let mut f = f.debug_list(); + let mut head = &self.head; + while let Some(slot) = head { + f.entry(&slot); + head = &slot.lower; + } + f.finish() } } -#[repr(C)] -struct Slot { +#[crate::stabby] +pub struct Slot { size: usize, lower: Option<&'static mut Slot>, - padding: usize, + start: NonNull, align: usize, } +impl core::fmt::Debug for Slot { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("Slot") + .field("start", &self.start()) + .field("end", &self.end()) + .field("size", &self.size) + .finish() + } +} impl core::cmp::Ord for Slot { fn cmp(&self, other: &Self) -> core::cmp::Ordering { (self as *const Self).cmp(&(other as *const Self)) @@ -90,26 +174,40 @@ impl Slot { const fn full_size(&self) -> usize { core::mem::size_of::() + self.size } - const fn start(&self) -> *const u8 { - unsafe { (self as *const Self).cast::().sub(self.padding) } + const fn start(&self) -> NonNull { + self.start + } + const fn end(&self) -> NonNull { + unsafe { + NonNull::new_unchecked( + (self as *const Self) + .cast::() + .add(self.full_size()) + .cast_mut() + .cast(), + ) + } } - const fn end(&self) -> *const Slot { - unsafe { (self as *const Self).cast::().add(self.full_size()) } + fn end_mut(&mut self) -> NonNull { + unsafe { + NonNull::new_unchecked( + (self as *mut Self) + .cast::() + .add(self.full_size()) + .cast(), + ) + } } fn shift(&'static mut self, target_align: usize) -> &'static mut Self { - let required_padding = target_align - core::mem::size_of::(); - let padding = self.padding; - if padding == required_padding { + let start = self.start().as_ptr().cast::(); + let align_offset = start.align_offset(target_align) as isize; + let self_offset = unsafe { (self as *const Self).cast::().offset_from(start) }; + self.align = target_align; + if align_offset == self_offset { return self; } - self.size += padding; - self.align = target_align; - self.padding = 0; - let new_addr = unsafe { - (self as *mut Self) - .cast::() - .offset(padding as isize - required_padding as isize) - }; + self.size = (self.size as isize + self_offset - align_offset) as usize; + let new_addr = unsafe { start.offset(align_offset) }; unsafe { core::ptr::copy( (self as *const Self).cast(), @@ -121,44 +219,34 @@ impl Slot { } fn split(self: &mut &'static mut Self, at: usize) -> Option<&'static mut Self> { let size = self.size; - (size > at + core::mem::size_of::()).then(move || { - self.size = at; - let slot = unsafe { &mut *(self.end() as *mut MaybeUninit) }; + (size > at).then(move || { + self.size = at - core::mem::size_of::(); + let start = self.end_mut(); + assert_eq!( + start + .as_ptr() + .cast::() + .align_offset(core::mem::align_of::()), + 0 + ); + let slot = unsafe { start.cast::>().as_mut() }; slot.write(Slot { - size: size - at + core::mem::size_of::(), + size: size - at, lower: None, - padding: 0, + start, align: 8, }) }) } } -#[repr(C)] -struct Allocator { - free_list: AtomicPtr, - end: AtomicPtr, -} -struct Slots { - list: Option<&'static mut Slot>, -} -impl Drop for Slots { - fn drop(&mut self) { - ALLOC.free_list.store( - unsafe { - core::mem::transmute::, *mut Slot>(self.list.take()) - }, - Ordering::Release, - ); - } -} -impl Slots { +impl FreelistAlloc { fn insert(&mut self, mut slot: &'static mut Slot) { - slot = slot.shift(core::mem::size_of::()); - let mut head = &mut self.list; + slot = slot.shift(8); + let mut head = &mut self.head; while let Some(h) = head { if *h < slot { - if core::ptr::eq(h.end(), slot.start()) { + if h.end_mut() == slot.start() { h.size += slot.full_size(); return; } @@ -173,12 +261,12 @@ impl Slots { slot.lower = head.take(); *head = Some(slot) } - fn take(&mut self, size: usize, align: usize) -> Option<&'static mut Slot> { - let req = size + align; - let slot_owner = self.select_slot(req)?; + fn take(&mut self, layout: crate::alloc::Layout) -> Option<&'static mut Slot> { + let req = layout.concat(crate::alloc::Layout::of::()); + let slot_owner = self.select_slot(req.size)?; let mut slot = slot_owner.take()?; let lower = slot.lower.take(); - *slot_owner = slot.split(size); + *slot_owner = slot.split(req.size); match slot_owner { Some(owner) => owner.lower = lower, None => *slot_owner = lower, @@ -188,7 +276,7 @@ impl Slots { fn select_slot(&mut self, size: usize) -> Option<&mut Option<&'static mut Slot>> { let mut head = unsafe { core::mem::transmute::<&mut Option<&'static mut Slot>, &mut Option<&'static mut Slot>>( - &mut self.list, + &mut self.head, ) }; while let Some(h) = head { @@ -203,85 +291,80 @@ impl Slots { return Some(head); } } - let alloc_end = ALLOC.end.load(Ordering::Relaxed); + let alloc_end = self.end; self.grow_take(alloc_end, size) } fn grow_take( &mut self, - alloc_end: *mut Slot, + alloc_end: Option>, mut size: usize, ) -> Option<&mut Option<&'static mut Slot>> { - let slot = unsafe { - crate::alloc::allocators::paging::memmap(alloc_end.cast(), &mut size)? - .cast::>() - .as_mut() - }; + let start = crate::alloc::allocators::paging::memmap( + alloc_end.map_or(core::ptr::null(), |p| p.as_ptr().cast()), + &mut size, + )?; + let slot = unsafe { start.cast::>().as_mut() }; let slot = slot.write(Slot { size: size - core::mem::size_of::(), lower: None, - padding: 0, + start: start.cast(), align: 8, }); - ALLOC.end.store(slot.end().cast_mut(), Ordering::Relaxed); + self.end = Some(slot.end_mut()); self.insert(slot); - Some(&mut self.list) + Some(&mut self.head) } - fn try_extend(&mut self, slot: &'static mut Slot, new_size: usize) -> Result<(), ()> { - let alloc_end = ALLOC.end.load(Ordering::Relaxed); - let prev_size = slot.size; - if core::ptr::eq(alloc_end, slot.end()) { - if self.grow_take(alloc_end, new_size - prev_size).is_some() { - slot.size = new_size; + fn try_extend(&mut self, slot: &mut &'static mut Slot, new_size: usize) -> Result<(), ()> { + 'a: loop { + let prev_size = slot.size; + if prev_size >= new_size { return Ok(()); } - } - let mut head = unsafe { - core::mem::transmute::<&mut Option<&'static mut Slot>, &mut Option<&'static mut Slot>>( - &mut self.list, - ) - }; - while let Some(h) = head { - match h.start().cmp(&slot.end()) { - core::cmp::Ordering::Less => return Err(()), - core::cmp::Ordering::Equal => { - let extension_size = unsafe { h.end().offset_from(h.start()) }; - if prev_size + extension_size >= new_size { - todo!("just extending the slot may steal too much capacity, yield some back if so") - } else if core::ptr::eq(alloc_end, h.end()) { - todo!("we might still be able to extend the newly acquired slot") - } - } - core::cmp::Ordering::Greater => { - head = unsafe { - core::mem::transmute::< - &mut Option<&'static mut Slot>, - &mut Option<&'static mut Slot>, - >(&mut h.lower) - }; + let alloc_end = self.end; + let slot_end = slot.end_mut(); + if alloc_end == Some(slot_end) { + if self.grow_take(alloc_end, new_size - prev_size).is_some() { + slot.size = new_size; + return Ok(()); } } - } - Err(()) - } -} -impl Allocator { - const fn new() -> Self { - Self { - free_list: AtomicPtr::new(core::ptr::null_mut()), - end: AtomicPtr::new(core::ptr::null_mut()), - } - } - fn lock(&self) -> Slots { - loop { - let list = self - .free_list - .swap(usize::MAX as *mut Slot, Ordering::AcqRel); - if list as usize != usize::MAX { - return Slots { - list: unsafe { list.as_mut() }, - }; + let mut head = unsafe { + core::mem::transmute::<&mut Option<&'static mut Slot>, &mut Option<&'static mut Slot>>( + &mut self.head, + ) + }; + while let Some(h) = head { + match h.start().cmp(&slot_end) { + core::cmp::Ordering::Less => return Err(()), + core::cmp::Ordering::Equal => { + let lower = h.lower.take(); + let extension = + unsafe { core::mem::replace(head, lower).unwrap_unchecked() }; + let extension_size = unsafe { + extension + .end_mut() + .as_ptr() + .offset_from(extension.start().as_ptr()) + as usize + }; + slot.size += extension_size; + if let Some(remainder) = slot.split(new_size) { + remainder.lower = head.take(); + *head = Some(remainder); + } + continue 'a; + } + core::cmp::Ordering::Greater => { + head = unsafe { + core::mem::transmute::< + &mut Option<&'static mut Slot>, + &mut Option<&'static mut Slot>, + >(&mut h.lower) + }; + } + } } - core::hint::spin_loop(); + return Err(()); } } } diff --git a/stabby-abi/src/alloc/allocators/mod.rs b/stabby-abi/src/alloc/allocators/mod.rs index 5d00843..c80bc41 100644 --- a/stabby-abi/src/alloc/allocators/mod.rs +++ b/stabby-abi/src/alloc/allocators/mod.rs @@ -6,7 +6,10 @@ /// A simple cross-platform allocator implementation /// /// This allocator is based on an ordered linked list of free memory blocks. -// pub mod freelist_alloc; +// #[cfg(any(target_arch = "wasm32", all(target_family = "unix", feature = "libc")))] +// mod freelist_alloc; +// #[cfg(any(target_arch = "wasm32", all(target_family = "unix", feature = "libc")))] +// pub use freelist_alloc::{FreelistAlloc, FreelistGlobalAlloc}; #[cfg(all(feature = "libc", not(target_arch = "wasm32")))] /// [`IAlloc`](crate::alloc::IAlloc) bindings for `libc::malloc` @@ -20,28 +23,27 @@ mod rust_alloc; #[cfg(feature = "alloc-rs")] pub use rust_alloc::RustAlloc; -#[cfg(target_arch = "wasm32")] -pub(crate) mod paging { - use core::ptr::NonNull; - pub(crate) const PAGESIZE: usize = 65536; - pub(crate) fn memmap(hint: *const (), requested_capacity: &mut usize) -> Option> { - let added_pages = (*requested_capacity / PAGESIZE) + 1; - let start = core::arch::wasm32::memory_grow(0, added_pages); - if start == usize::MAX { - return None; - } - *requested_capacity = added_pages * PAGESIZE; - unsafe { core::mem::transmute::>>(start * PAGESIZE) } - } - pub(crate) fn memunmap(hint: *mut (), max_unmap: usize) {} -} +// #[cfg(target_arch = "wasm32")] +// pub(crate) mod paging { +// use core::ptr::NonNull; +// pub(crate) const PAGESIZE: usize = 65536; +// pub(crate) fn memmap(_hint: *const (), requested_capacity: &mut usize) -> Option> { +// let added_pages = (*requested_capacity / PAGESIZE) + 1; +// let start = core::arch::wasm32::memory_grow(0, added_pages); +// if start == usize::MAX { +// return None; +// } +// *requested_capacity = added_pages * PAGESIZE; +// unsafe { core::mem::transmute::>>(start * PAGESIZE) } +// } +// pub(crate) fn memunmap(_addr: *mut (), _len: usize) {} +// } // #[cfg(all(target_family = "unix", feature = "libc"))] // pub(crate) mod paging { // use core::ptr::NonNull; // pub(crate) const PAGESIZE: usize = 65536; // pub(crate) fn memmap(hint: *const (), requested_capacity: &mut usize) -> Option> { -// const PAGESIZE: usize = 65536; // let added_pages = (*requested_capacity / PAGESIZE) + 1; // *requested_capacity = added_pages * PAGESIZE; // let start = unsafe { @@ -49,18 +51,49 @@ pub(crate) mod paging { // hint.cast_mut().cast(), // *requested_capacity, // libc::PROT_READ | libc::PROT_WRITE, -// libc::MAP_PRIVATE, -// -1, +// libc::MAP_PRIVATE | libc::MAP_ANONYMOUS, +// 0, // 0, // ) +// .cast::() // }; // if start as isize == -1 { // return None; // } -// NonNull::new(start.cast()) -// } -// pub(crate) fn memunmap(addr: *mut (), mut len: usize) { -// len -= len % PAGESIZE; -// unsafe { libc::munmap(addr.cast(), len) }; +// NonNull::new(start) // } +// // pub(crate) fn memunmap(addr: *mut (), mut len: usize) { +// // len -= len % PAGESIZE; +// // unsafe { libc::munmap(addr.cast(), len) }; +// // } // } + +#[cfg(any(stabby_default_alloc = "RustAlloc", feature = "alloc-rs"))] +/// The default allocator, depending on which of the following is available: +/// - RustAlloc: Rust's `GlobalAlloc`, through a vtable that ensures FFI-safety. +/// - LibcAlloc: libc::malloc, which is 0-sized. +/// - None. I _am_ working on getting a 0-dependy allocator working, but you should probably go with `feature = "alloc-rs"` anyway. +/// +/// You can also use the `stabby_default_alloc` cfg to override the default allocator regardless of feature flags. +pub(crate) type DefaultAllocator = RustAlloc; + +#[cfg(any( + stabby_default_alloc = "LibcAlloc", + all(feature = "libc", not(feature = "alloc-rs")) +))] +/// The default allocator, depending on which of the following is available: +/// - RustAlloc: Rust's `GlobalAlloc`, through a vtable that ensures FFI-safety. +/// - LibcAlloc: libc::malloc, which is 0-sized. +/// - None. I _am_ working on getting a 0-dependy allocator working, but you should probably go with `feature = "alloc-rs"` anyway. +/// +/// You can also use the `stabby_default_alloc` cfg to override the default allocator regardless of feature flags. +pub(crate) type DefaultAllocator = LibcAlloc; + +#[cfg(not(any(stabby_default_alloc, feature = "alloc-rs", feature = "libc")))] +/// The default allocator, depending on which of the following is available: +/// - RustAlloc: Rust's `GlobalAlloc`, through a vtable that ensures FFI-safety. +/// - LibcAlloc: libc::malloc, which is 0-sized. +/// - None. I _am_ working on getting a 0-dependy allocator working, but you should probably go with `feature = "alloc-rs"` anyway. +/// +/// You can also use the `stabby_default_alloc` cfg to override the default allocator regardless of feature flags. +pub(crate) type DefaultAllocator = core::convert::Infallible; diff --git a/stabby-abi/src/alloc/boxed.rs b/stabby-abi/src/alloc/boxed.rs index d2f914a..2761d09 100644 --- a/stabby-abi/src/alloc/boxed.rs +++ b/stabby-abi/src/alloc/boxed.rs @@ -26,8 +26,11 @@ unsafe impl Send for Box {} unsafe impl Sync for Box {} unsafe impl Send for BoxedSlice {} unsafe impl Sync for BoxedSlice {} -#[cfg(feature = "libc")] -impl Box { + +impl Box +where + super::DefaultAllocator: Default, +{ /// Attempts to allocate [`Self`], initializing it with `constructor`. /// /// Note that the allocation may or may not be zeroed. @@ -67,7 +70,7 @@ impl Box { ) -> Result { let mut ptr = match AllocPtr::alloc(&mut alloc) { Some(mut ptr) => { - unsafe { core::ptr::write(&mut ptr.prefix_mut().alloc, alloc) }; + unsafe { ptr.prefix_mut().alloc.write(alloc) }; ptr } None => return Err((constructor, alloc)), @@ -106,7 +109,7 @@ impl Box { ) -> Self { let mut ptr = match AllocPtr::alloc(&mut alloc) { Some(mut ptr) => { - unsafe { core::ptr::write(&mut ptr.prefix_mut().alloc, alloc) }; + unsafe { ptr.prefix_mut().alloc.write(alloc) }; ptr } None => panic!("Allocation failed"), @@ -155,14 +158,17 @@ impl Box { impl Box { fn free(&mut self) { - let mut alloc = unsafe { core::ptr::read(&self.ptr.prefix().alloc) }; + let mut alloc = unsafe { self.ptr.prefix().alloc.assume_init_read() }; unsafe { self.ptr.free(&mut alloc) } } } impl Clone for Box { fn clone(&self) -> Self { - Box::new_in(T::clone(self), unsafe { self.ptr.prefix() }.alloc.clone()) + Box::new_in( + T::clone(self), + unsafe { self.ptr.prefix().alloc.assume_init_ref() }.clone(), + ) } } impl core::ops::Deref for Box { diff --git a/stabby-abi/src/alloc/collections/arc_btree.rs b/stabby-abi/src/alloc/collections/arc_btree.rs index 95f7453..b86e335 100644 --- a/stabby-abi/src/alloc/collections/arc_btree.rs +++ b/stabby-abi/src/alloc/collections/arc_btree.rs @@ -1,42 +1,33 @@ use core::{ - borrow::Borrow, marker::PhantomData, mem::MaybeUninit, ptr::NonNull, sync::atomic::AtomicPtr, + borrow::Borrow, marker::PhantomData, mem::MaybeUninit, ops::Deref, ptr::NonNull, + sync::atomic::AtomicPtr, }; -use crate::{ - alloc::{sync::Arc, AllocPtr, DefaultAllocator, IAlloc}, - IStable, -}; +use crate::alloc::{sync::Arc, AllocPtr, DefaultAllocator, IAlloc}; /// An [`ArcBTreeSet`] that can be atomically modified. -#[cfg(feature = "libc")] pub struct AtomicArcBTreeSet( AtomicPtr>, -); -#[cfg(feature = "libc")] +) +where + DefaultAllocator: core::default::Default + IAlloc; impl Default for AtomicArcBTreeSet +where + DefaultAllocator: core::default::Default + IAlloc, { fn default() -> Self { Self::new() } } -#[cfg(feature = "libc")] impl AtomicArcBTreeSet +where + DefaultAllocator: core::default::Default + IAlloc, { /// Constructs a new, empty set. pub const fn new() -> Self { - Self(AtomicPtr::new(unsafe { - core::mem::transmute::< - ArcBTreeSet, - *mut ArcBTreeSetNodeInner, - >(ArcBTreeSet::< - T, - DefaultAllocator, - REPLACE_ON_INSERT, - SPLIT_LIMIT, - >::new_in(DefaultAllocator::new())) - })) + Self(AtomicPtr::new(core::ptr::null_mut())) } /// Applies `f` to the current value, swapping the current value for the one returned by `f`. /// @@ -127,8 +118,14 @@ impl ArcBTreeMap Self { - Self(ArcBTreeSet::new_in(alloc)) + pub const fn new_in(alloc: Alloc) -> ArcBTreeMap { + ArcBTreeMap::from_alloc(alloc) + } + /// Constructs a new map, using the provided allocator. + /// + /// This operation does not allocate. + pub const fn from_alloc(alloc: Alloc) -> Self { + Self(ArcBTreeSet::from_alloc(alloc)) } /// Returns the value associated to `key` pub fn get>(&self, key: &Q) -> Option<&V> { @@ -154,55 +151,46 @@ impl ArcBTreeMap(Result, Alloc>); - -unsafe impl - IStable for ArcBTreeSet +> { + root: Option>, + alloc: core::mem::MaybeUninit, +} +impl Clone + for ArcBTreeSet where - Alloc: IStable, - Arc: IStable, + T: Clone, + Alloc: Clone, { - type Size = <*const T as IStable>::Size; - type Align = <*const T as IStable>::Align; - type ForbiddenValues = <*const T as IStable>::ForbiddenValues; - type UnusedBits = <*const T as IStable>::UnusedBits; - type HasExactlyOneNiche = crate::B0; - type ContainsIndirections = crate::B1; - #[cfg(feature = "ctypes")] - type CType = <*const T as IStable>::CType; - const REPORT: &'static crate::report::TypeReport = &crate::report::TypeReport { - name: crate::str::Str::new("ArcBTreeSet"), - module: crate::str::Str::new("stabby_abi::alloc::collections::arc_btree"), - fields: crate::StableLike::new(Some(&crate::report::FieldReport { - name: crate::str::Str::new("T"), - ty: T::REPORT, - next_field: crate::StableLike::new(Some(&crate::report::FieldReport { - name: crate::str::Str::new("Alloc"), - ty: Alloc::REPORT, - next_field: crate::StableLike::new(None), - })), - })), - tyty: crate::report::TyTy::Struct, - version: 0, - }; - const ID: u64 = crate::report::gen_id(Self::REPORT); + fn clone(&self) -> Self { + match self.root.clone() { + Some(root) => Self { + root: Some(root), + alloc: core::mem::MaybeUninit::uninit(), + }, + None => unsafe { + Self { + root: None, + alloc: core::mem::MaybeUninit::new(self.alloc.assume_init_ref().clone()), + } + }, + } + } } - -#[cfg(feature = "libc")] -impl Default for ArcBTreeSet { +impl Default for ArcBTreeSet { fn default() -> Self { - Self::new() + Self::new_in(Alloc::default()) } } -#[cfg(feature = "libc")] -impl ArcBTreeSet { +impl ArcBTreeSet +where + DefaultAllocator: IAlloc, +{ /// Constructs an empty set in the [`DefaultAllocator`] pub const fn new() -> Self { Self::new_in(DefaultAllocator::new()) @@ -217,9 +205,9 @@ impl< { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.write_str("ArcBTreeSet")?; - match &self.0 { - Err(_) => f.write_str("{}"), - Ok(set) => set.fmt(f), + match self.root.as_ref() { + None => f.write_str("{}"), + Some(set) => set.fmt(f), } } } @@ -244,26 +232,73 @@ impl< f.finish() } } +impl Drop + for ArcBTreeSet +{ + fn drop(&mut self) { + if self.root.is_none() { + unsafe { self.alloc.assume_init_drop() } + } + } +} impl ArcBTreeSet +where + DefaultAllocator: core::default::Default, { + /// Takes a pointer to the root. const fn as_ptr( &self, ) -> *mut ArcBTreeSetNodeInner { - unsafe { core::mem::transmute(core::ptr::read(self)) } + unsafe { core::mem::transmute(core::ptr::read(&self.root)) } } + /// Reinterprets the pointer as a root, leaving partial ownership to the pointer. fn copy_from_ptr( ptr: *const ArcBTreeSetNodeInner, ) -> Self { - let owner: Self = unsafe { core::mem::transmute(ptr) }; - let this = owner.clone(); - core::mem::forget(owner); - this + match NonNull::new(ptr.cast_mut()) { + None => Self { + root: None, + alloc: core::mem::MaybeUninit::new(Default::default()), + }, + Some(ptr) => { + let owner: core::mem::ManuallyDrop< + Option>, + > = unsafe { core::mem::transmute(ptr) }; + let root = owner.deref().clone(); + Self { + root, + alloc: core::mem::MaybeUninit::uninit(), + } + } + } } + /// Reinterprets the pointer as a root, taking partial ownership from the pointer. unsafe fn take_ownership_from_ptr( ptr: *mut ArcBTreeSetNodeInner, ) -> Self { - unsafe { core::mem::transmute(ptr) } + match NonNull::new(ptr) { + None => Self { + root: None, + alloc: core::mem::MaybeUninit::new(Default::default()), + }, + Some(ptr) => { + let root = unsafe { core::mem::transmute(ptr) }; + Self { + root, + alloc: core::mem::MaybeUninit::uninit(), + } + } + } + } +} +impl ArcBTreeSet { + /// Constructs a new set in the provided allocator using the default const generics. + /// + /// Note that this doesn't actually allocate. + #[allow(clippy::let_unit_value)] + pub const fn new_in(alloc: Alloc) -> ArcBTreeSet { + ArcBTreeSet::from_alloc(alloc) } } impl @@ -275,17 +310,19 @@ impl Self { + pub const fn from_alloc(alloc: Alloc) -> Self { _ = Self::CHECK; - Self(Err(alloc)) + Self { + root: None, + alloc: core::mem::MaybeUninit::new(alloc), + } } /// Retrieves the value associated to `key` if it exists. pub fn get(&self, key: &K) -> Option<&T> where T: PartialOrd, { - self.0.as_ref().ok().and_then(|set| set.get(key)) + self.root.as_ref().and_then(|set| set.get(key)) } /// Inserts a value into the set. /// @@ -297,10 +334,11 @@ impl inner.insert(value), - Err(alloc) => { - self.0 = Ok(ArcBTreeSetNode(Arc::new_in( + match &mut self.root { + Some(inner) => inner.insert(value), + None => { + let alloc = unsafe { self.alloc.assume_init_read() }; + self.root = Some(ArcBTreeSetNode(Arc::new_in( ArcBTreeSetNodeInner::new( Some(ArcBTreeSetEntry { value, @@ -308,7 +346,7 @@ impl usize { - match &self.0 { - Err(_) => 0, - Ok(node) => node.len(), + match &self.root { + None => 0, + Some(node) => node.len(), } } /// Return `true` iff the set is empty. pub const fn is_empty(&self) -> bool { - self.0.is_err() + self.root.is_none() } } +use seal::*; +mod seal { + use super::*; + #[crate::stabby] + /// An immutable ArcBTreeMap. + pub struct ArcBTreeSetNode< + T, + Alloc: IAlloc, + const REPLACE_ON_INSERT: bool, + const SPLIT_LIMIT: usize, + >(pub Arc, Alloc>); -// #[stabby::stabby] -/// An immutable ArcBTreeMap. -struct ArcBTreeSetNode( - Arc, Alloc>, -); -struct ArcBTreeSetNodeInner< - T, - Alloc: IAlloc, - const REPLACE_ON_INSERT: bool, - const SPLIT_LIMIT: usize, -> { - entries: [MaybeUninit>; SPLIT_LIMIT], - len: usize, - greater: Option>, -} -impl - ArcBTreeSetNodeInner -{ - fn new( - entries: impl IntoIterator>, - greater: Option>, - ) -> Self { - let mut this = ArcBTreeSetNodeInner { - entries: [(); SPLIT_LIMIT].map(|_| MaybeUninit::uninit()), - len: 0, - greater, - }; - for entry in entries { - if this.len >= SPLIT_LIMIT - 1 { - panic!("Attempted to construct an node with too many entries"); + #[crate::stabby] + pub struct ArcBTreeSetNodeInner< + T, + Alloc: IAlloc, + const REPLACE_ON_INSERT: bool, + const SPLIT_LIMIT: usize, + > { + pub entries: + [MaybeUninit>; SPLIT_LIMIT], + pub len: usize, + pub greater: Option>, + } + impl + ArcBTreeSetNodeInner + { + pub fn new( + entries: impl IntoIterator< + Item = ArcBTreeSetEntry, + >, + greater: Option>, + ) -> Self { + let mut this = ArcBTreeSetNodeInner { + entries: [(); SPLIT_LIMIT].map(|_| MaybeUninit::uninit()), + len: 0, + greater, + }; + for entry in entries { + if this.len >= SPLIT_LIMIT - 1 { + panic!("Attempted to construct an node with too many entries"); + } + this.entries[this.len].write(entry); + this.len += 1; } - this.entries[this.len].write(entry); - this.len += 1; + this } - this - } -} -impl Clone - for ArcBTreeSetNode -{ - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} -impl Drop - for ArcBTreeSetNodeInner -{ - fn drop(&mut self) { - unsafe { core::ptr::drop_in_place(self.entries_mut()) } } -} -impl Clone - for ArcBTreeSetNodeInner -{ - fn clone(&self) -> Self { - let mut entries: [MaybeUninit>; - SPLIT_LIMIT] = [(); SPLIT_LIMIT].map(|_| core::mem::MaybeUninit::uninit()); - unsafe { - for (i, entry) in self.entries().iter().enumerate() { - *entries.get_unchecked_mut(i) = MaybeUninit::new(entry.clone()) - } + impl Clone + for ArcBTreeSetNode + { + fn clone(&self) -> Self { + Self(self.0.clone()) } - Self { - entries, - len: self.len, - greater: self.greater.clone(), + } + impl Drop + for ArcBTreeSetNodeInner + { + fn drop(&mut self) { + unsafe { core::ptr::drop_in_place(self.entries_mut()) } } } -} -// #[stabby::stabby] -/// A node of an immutable ArcBTreeMap. -struct ArcBTreeSetEntry { - smaller: Option>, - value: T, -} -impl Clone - for ArcBTreeSetEntry -{ - fn clone(&self) -> Self { - Self { - value: self.value.clone(), - smaller: self.smaller.clone(), + impl Clone + for ArcBTreeSetNodeInner + { + fn clone(&self) -> Self { + let mut entries: [MaybeUninit< + ArcBTreeSetEntry, + >; SPLIT_LIMIT] = [(); SPLIT_LIMIT].map(|_| core::mem::MaybeUninit::uninit()); + unsafe { + for (i, entry) in self.entries().iter().enumerate() { + *entries.get_unchecked_mut(i) = MaybeUninit::new(entry.clone()) + } + } + Self { + entries, + len: self.len, + greater: self.greater.clone(), + } } } -} -impl - ArcBTreeSetNode -{ - fn len(&self) -> usize { - self.0.entries().iter().fold(0, |acc, it| { - acc + 1 + it.smaller.as_ref().map_or(0, |n| n.len()) - }) + self.0.greater.as_ref().map_or(0, |n| n.len()) + #[crate::stabby] + /// A node of an immutable ArcBTreeMap. + pub struct ArcBTreeSetEntry< + T, + Alloc: IAlloc, + const REPLACE_ON_INSERT: bool, + const SPLIT_LIMIT: usize, + > { + pub smaller: Option>, + pub value: T, } - fn get(&self, key: &K) -> Option<&T> - where - T: PartialOrd, + impl Clone + for ArcBTreeSetEntry { - use core::cmp::Ordering; - for entry in self.0.entries() { - match entry.value.partial_cmp(key)? { - Ordering::Equal => return Some(&entry.value), - Ordering::Greater => return entry.smaller.as_ref()?.get(key), - _ => {} + fn clone(&self) -> Self { + Self { + value: self.value.clone(), + smaller: self.smaller.clone(), } } - self.0.greater.as_ref()?.get(key) } - fn insert(&mut self, value: T) -> Option - where - T: Clone, - Alloc: Clone, + + impl + ArcBTreeSetNode { - if !REPLACE_ON_INSERT && self.get(&value).is_some() { - return Some(value); + pub fn len(&self) -> usize { + self.0.entries().iter().fold(0, |acc, it| { + acc + 1 + it.smaller.as_ref().map_or(0, |n| n.len()) + }) + self.0.greater.as_ref().map_or(0, |n| n.len()) } - match self.insert_inner(value) { - Err(done) => done, - Ok((right, pivot)) => { - let entry = ArcBTreeSetEntry { - value: pivot, - smaller: Some(self.clone()), - }; - let mut inner = ArcBTreeSetNodeInner { - entries: [(); SPLIT_LIMIT].map(|_| MaybeUninit::uninit()), - len: 1, - greater: Some(right), - }; - inner.entries[0].write(entry); - self.0 = Arc::new_in(inner, Arc::allocator(&self.0).clone()); - None + pub fn get(&self, key: &K) -> Option<&T> + where + T: PartialOrd, + { + use core::cmp::Ordering; + for entry in self.0.entries() { + match entry.value.partial_cmp(key)? { + Ordering::Equal => return Some(&entry.value), + Ordering::Greater => return entry.smaller.as_ref()?.get(key), + _ => {} + } } + self.0.greater.as_ref()?.get(key) } - } - fn insert_inner(&mut self, value: T) -> Result<(Self, T), Option> - where - T: Clone, - Alloc: Clone, - { - use core::cmp::Ordering; - let inner = Arc::make_mut(&mut self.0); - let alloc = unsafe { - AllocPtr { - ptr: NonNull::new_unchecked(inner), - marker: PhantomData, + pub fn insert(&mut self, value: T) -> Option + where + T: Clone, + Alloc: Clone, + { + if !REPLACE_ON_INSERT && self.get(&value).is_some() { + return Some(value); } - }; - let alloc = &unsafe { alloc.prefix() }.alloc; - let entries = inner.entries_mut(); - for (i, entry) in entries.iter_mut().enumerate() { - match entry.value.cmp(&value) { - Ordering::Equal => return Err(Some(core::mem::replace(&mut entry.value, value))), - Ordering::Greater => match entry.smaller.as_mut() { - Some(smaller) => { - let (right, pivot) = smaller.insert_inner(value)?; - return match inner.insert(i, pivot, Some(right), alloc) { - None => Err(None), - Some(splits) => Ok(splits), - }; - } - None => { - return match inner.insert(i, value, None, alloc) { - None => Err(None), - Some(splits) => Ok(splits), - } - } - }, - _ => {} + match self.insert_inner(value) { + Err(done) => done, + Ok((right, pivot)) => { + let entry = ArcBTreeSetEntry { + value: pivot, + smaller: Some(self.clone()), + }; + let mut inner = ArcBTreeSetNodeInner { + entries: [(); SPLIT_LIMIT].map(|_| MaybeUninit::uninit()), + len: 1, + greater: Some(right), + }; + inner.entries[0].write(entry); + self.0 = Arc::new_in(inner, Arc::allocator(&self.0).clone()); + None + } } } - match inner.greater.as_mut() { - Some(greater) => { - let (right, pivot) = greater.insert_inner(value)?; - if let Some(splits) = inner.push(pivot, Some(right), alloc) { - return Ok(splits); + fn insert_inner(&mut self, value: T) -> Result<(Self, T), Option> + where + T: Clone, + Alloc: Clone, + { + use core::cmp::Ordering; + let inner = Arc::make_mut(&mut self.0); + let alloc = unsafe { + AllocPtr { + ptr: NonNull::new_unchecked(inner), + marker: PhantomData, + } + }; + let alloc = unsafe { alloc.prefix().alloc.assume_init_ref() }; + let entries = inner.entries_mut(); + for (i, entry) in entries.iter_mut().enumerate() { + match entry.value.cmp(&value) { + Ordering::Equal => { + return Err(Some(core::mem::replace(&mut entry.value, value))) + } + Ordering::Greater => match entry.smaller.as_mut() { + Some(smaller) => { + let (right, pivot) = smaller.insert_inner(value)?; + return match inner.insert(i, pivot, Some(right), alloc) { + None => Err(None), + Some(splits) => Ok(splits), + }; + } + None => { + return match inner.insert(i, value, None, alloc) { + None => Err(None), + Some(splits) => Ok(splits), + } + } + }, + _ => {} } } - None => { - if let Some(splits) = inner.push(value, None, alloc) { - return Ok(splits); + match inner.greater.as_mut() { + Some(greater) => { + let (right, pivot) = greater.insert_inner(value)?; + if let Some(splits) = inner.push(pivot, Some(right), alloc) { + return Ok(splits); + } + } + None => { + if let Some(splits) = inner.push(value, None, alloc) { + return Ok(splits); + } } } + Err(None) } - Err(None) - } - #[cfg(test)] - fn for_each(&self, f: &mut impl FnMut(&T)) { - for ArcBTreeSetEntry { value, smaller } in self.0.entries() { - if let Some(smaller) = smaller { - smaller.for_each(f); + #[cfg(test)] + pub fn for_each(&self, f: &mut impl FnMut(&T)) { + for ArcBTreeSetEntry { value, smaller } in self.0.entries() { + if let Some(smaller) = smaller { + smaller.for_each(f); + } + f(value) } - f(value) - } - if let Some(greater) = self.0.greater.as_ref() { - greater.for_each(f) - } - } -} -impl - ArcBTreeSetNodeInner -{ - fn insert( - &mut self, - i: usize, - value: T, - greater: Option>, - alloc: &Alloc, - ) -> Option<(ArcBTreeSetNode, T)> - where - Alloc: Clone, - { - unsafe { - for j in (i..self.len).rev() { - *self.entries.get_unchecked_mut(j + 1) = - MaybeUninit::new(self.entries.get_unchecked(j).assume_init_read()); + if let Some(greater) = self.0.greater.as_ref() { + greater.for_each(f) } - self.len += 1; - *self.entries.get_unchecked_mut(i) = MaybeUninit::new(ArcBTreeSetEntry { - value, - smaller: core::mem::replace( - &mut self - .entries - .get_unchecked_mut(i + 1) - .assume_init_mut() - .smaller, - greater, - ), - }); } - self.split(alloc) - } - fn push( - &mut self, - value: T, - greater: Option>, - alloc: &Alloc, - ) -> Option<(ArcBTreeSetNode, T)> - where - Alloc: Clone, + } + impl + ArcBTreeSetNodeInner { - unsafe { - self.entries - .get_unchecked_mut(self.len) - .write(ArcBTreeSetEntry { + fn insert( + &mut self, + i: usize, + value: T, + greater: Option>, + alloc: &Alloc, + ) -> Option<(ArcBTreeSetNode, T)> + where + Alloc: Clone, + { + unsafe { + for j in (i..self.len).rev() { + *self.entries.get_unchecked_mut(j + 1) = + MaybeUninit::new(self.entries.get_unchecked(j).assume_init_read()); + } + self.len += 1; + *self.entries.get_unchecked_mut(i) = MaybeUninit::new(ArcBTreeSetEntry { value, - smaller: core::mem::replace(&mut self.greater, greater), + smaller: core::mem::replace( + &mut self + .entries + .get_unchecked_mut(i + 1) + .assume_init_mut() + .smaller, + greater, + ), }); - self.len += 1; + } + self.split(alloc) } - self.split(alloc) - } - fn split( - &mut self, - alloc: &Alloc, - ) -> Option<(ArcBTreeSetNode, T)> - where - Alloc: Clone, - { - unsafe { - if self.len == SPLIT_LIMIT { - let ArcBTreeSetEntry { - value: pivot, - smaller, - } = self - .entries - .get_unchecked(SPLIT_LIMIT / 2) - .assume_init_read(); - let mut right = Self { - entries: [(); SPLIT_LIMIT].map(|_| core::mem::MaybeUninit::uninit()), - len: SPLIT_LIMIT / 2, - greater: self.greater.take(), - }; - core::ptr::copy_nonoverlapping( - self.entries.get_unchecked(SPLIT_LIMIT / 2 + 1), - right.entries.get_unchecked_mut(0), - SPLIT_LIMIT / 2, - ); - self.greater = smaller; - self.len = SPLIT_LIMIT / 2; - let right = ArcBTreeSetNode(Arc::new_in(right, alloc.clone())); - Some((right, pivot)) - } else { - None + fn push( + &mut self, + value: T, + greater: Option>, + alloc: &Alloc, + ) -> Option<(ArcBTreeSetNode, T)> + where + Alloc: Clone, + { + unsafe { + self.entries + .get_unchecked_mut(self.len) + .write(ArcBTreeSetEntry { + value, + smaller: core::mem::replace(&mut self.greater, greater), + }); + self.len += 1; + } + self.split(alloc) + } + fn split( + &mut self, + alloc: &Alloc, + ) -> Option<(ArcBTreeSetNode, T)> + where + Alloc: Clone, + { + unsafe { + if self.len == SPLIT_LIMIT { + let ArcBTreeSetEntry { + value: pivot, + smaller, + } = self + .entries + .get_unchecked(SPLIT_LIMIT / 2) + .assume_init_read(); + let mut right = Self { + entries: [(); SPLIT_LIMIT].map(|_| core::mem::MaybeUninit::uninit()), + len: SPLIT_LIMIT / 2, + greater: self.greater.take(), + }; + core::ptr::copy_nonoverlapping( + self.entries.get_unchecked(SPLIT_LIMIT / 2 + 1), + right.entries.get_unchecked_mut(0), + SPLIT_LIMIT / 2, + ); + self.greater = smaller; + self.len = SPLIT_LIMIT / 2; + let right = ArcBTreeSetNode(Arc::new_in(right, alloc.clone())); + Some((right, pivot)) + } else { + None + } } } } -} -impl - ArcBTreeSetNodeInner -{ - fn entries(&self) -> &[ArcBTreeSetEntry] { - unsafe { core::mem::transmute(self.entries.get_unchecked(..self.len)) } - } - fn entries_mut(&mut self) -> &mut [ArcBTreeSetEntry] { - unsafe { core::mem::transmute(self.entries.get_unchecked_mut(..self.len)) } + impl + ArcBTreeSetNodeInner + { + pub fn entries(&self) -> &[ArcBTreeSetEntry] { + unsafe { core::mem::transmute(self.entries.get_unchecked(..self.len)) } + } + pub fn entries_mut( + &mut self, + ) -> &mut [ArcBTreeSetEntry] { + unsafe { core::mem::transmute(self.entries.get_unchecked_mut(..self.len)) } + } } } #[test] +#[cfg(feature = "libc")] fn btree_insert_libc() { use rand::Rng; let mut rng = rand::thread_rng(); for i in 0..1000 { dbg!(i); - let mut vec = crate::alloc::vec::Vec::new(); - let mut btree = ArcBTreeSet::new(); + let mut vec = + crate::alloc::vec::Vec::new_in(crate::alloc::allocators::LibcAlloc::default()); + let mut btree = ArcBTreeSet::new_in(crate::alloc::allocators::LibcAlloc::default()); for _ in 0..rng.gen_range(0..800) { let val = rng.gen_range(0..100u8); if vec.binary_search(&val).is_ok() { @@ -670,8 +732,7 @@ fn btree_insert_rs() { dbg!(i); let mut vec = crate::alloc::vec::Vec::new_in(crate::alloc::allocators::RustAlloc::default()); - let mut btree = - ArcBTreeSet::<_, _, false, 5>::new_in(crate::alloc::allocators::RustAlloc::default()); + let mut btree = ArcBTreeSet::new_in(crate::alloc::allocators::RustAlloc::default()); for _ in 0..rng.gen_range(0..800) { let val = rng.gen_range(0..100); if vec.binary_search(&val).is_ok() { @@ -693,3 +754,37 @@ fn btree_insert_rs() { assert_eq!(iter.next(), None); } } + +// #[test] +// fn btree_insert_freelist() { +// use rand::Rng; +// let mut rng = rand::thread_rng(); +// for i in 0..1000 { +// dbg!(i); +// let mut vec = crate::alloc::vec::Vec::new_in( +// crate::alloc::allocators::FreelistGlobalAlloc::default(), +// ); +// let mut btree = ArcBTreeSet::<_, _, false, 5>::new_in( +// crate::alloc::allocators::FreelistGlobalAlloc::default(), +// ); +// for _ in 0..rng.gen_range(0..800) { +// let val = rng.gen_range(0..100); +// if vec.binary_search(&val).is_ok() { +// assert_eq!(btree.insert(val), Some(val)); +// } else { +// vec.push(val); +// vec.sort(); +// assert_eq!( +// btree.insert(val), +// None, +// "The BTree contained an unexpected value: {btree:?}, {vec:?}" +// ); +// } +// } +// vec.sort(); +// assert_eq!(vec.len(), btree.len()); +// let mut iter = vec.into_iter(); +// btree.for_each(|i| assert_eq!(Some(*i), iter.next())); +// assert_eq!(iter.next(), None); +// } +// } diff --git a/stabby-abi/src/alloc/mod.rs b/stabby-abi/src/alloc/mod.rs index 776ffd3..5ee7a08 100644 --- a/stabby-abi/src/alloc/mod.rs +++ b/stabby-abi/src/alloc/mod.rs @@ -19,11 +19,6 @@ use self::vec::ptr_diff; /// Allocators provided by `stabby` pub mod allocators; -#[cfg(all(feature = "libc", not(any(target_arch = "wasm32"))))] -/// The `libc_alloc` module, kept for API-stability. -pub mod libc_alloc { - pub use super::allocators::libc_alloc::LibcAlloc; -} /// A generic allocation error. #[crate::stabby] @@ -50,11 +45,13 @@ pub mod sync; /// [`alloc::vec`](https://doc.rust-lang.org/stable/alloc/vec/), but ABI-stable pub mod vec; -/// The default allocator: libc malloc based if the libc feature is enabled, or unavailable otherwise. -#[cfg(all(feature = "libc", not(any(target_arch = "wasm32"))))] -pub type DefaultAllocator = allocators::LibcAlloc; -#[cfg(not(all(feature = "libc", not(any(target_arch = "wasm32")))))] -pub type DefaultAllocator = core::convert::Infallible; +/// The default allocator, depending on which of the following is available: +/// - RustAlloc: Rust's `GlobalAlloc`, through a vtable that ensures FFI-safety. +/// - LibcAlloc: libc::malloc, which is 0-sized. +/// - None. I _am_ working on getting a 0-dependy allocator working, but you should probably go with `feature = "alloc-rs"` anyway. +/// +/// You can also use the `stabby_default_alloc` cfg to override the default allocator regardless of feature flags. +pub type DefaultAllocator = allocators::DefaultAllocator; #[crate::stabby] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -78,27 +75,20 @@ impl Layout { /// Note that while this ensures that even if `T`'s size is not a multiple of its alignment, /// the layout will have sufficient memory to store `n` of `T` in an aligned fashion. pub const fn array(n: usize) -> Self { - let Self { mut size, align } = Self::of::(); - let sizemodalign = size % align; - if sizemodalign != 0 { - size += align; - size -= sizemodalign; + let Self { size, align } = Self::of::(); + Layout { + size: size * n, + align, } - size *= n; - Layout { size, align } } /// Concatenates a layout to `self`, ensuring that alignment padding is taken into account. pub const fn concat(mut self, other: Self) -> Self { - let sizemodalign = self.size % other.align; - if sizemodalign != 0 { - self.size += other.align; - self.size -= sizemodalign; - } self.size += other.size; - if other.align > self.align { - self.align = other.align; - } - self + self.realign(if self.align < other.align { + other.align + } else { + self.align + }) } /// Returns the first pointer where `output >= ptr` such that `output % self.align == 0`. #[inline] @@ -108,14 +98,19 @@ impl Layout { } next_matching(self.align, ptr.cast()).cast() } - pub(crate) const fn for_alloc(mut self) -> Self { - if self.align >= 8 { - return self; - } - self.align = 8; - self.size = self.size + (8 - self.size % 8) * ((self.size % 8 != 0) as usize); + /// Changes the alignment of the layout, adding padding if necessary. + pub const fn realign(mut self, new_align: usize) -> Self { + self.align = new_align; + self.size = self.size + + (new_align - (self.size % new_align)) * (((self.size % new_align) != 0) as usize); self } + // pub(crate) fn for_alloc(mut self) -> Self { + // if self.align >= 8 { + // return self; + // } + // self.realign(8) + // } } /// An interface to an allocator. @@ -242,8 +237,10 @@ pub struct AllocPrefix { pub weak: core::sync::atomic::AtomicUsize, /// A slot to store a vector's capacity when it's turned into a boxed/arced slice. pub capacity: core::sync::atomic::AtomicUsize, + /// The origin of the prefix + pub origin: NonNull<()>, /// A slot for the allocator. - pub alloc: Alloc, + pub alloc: core::mem::MaybeUninit, } impl AllocPrefix { /// The offset between the prefix and a field of type `T`. @@ -311,21 +308,9 @@ impl AllocPtr { marker: PhantomData, } } - /// The offset between `self.ptr` and the prefix. - pub const fn prefix_skip() -> usize { - AllocPrefix::::skip_to::() - } ///The pointer to the prefix for this allocation const fn prefix_ptr(&self) -> NonNull> { - unsafe { - NonNull::new_unchecked( - self.ptr - .as_ptr() - .cast::() - .sub(Self::prefix_skip()) - .cast(), - ) - } + unsafe { NonNull::new_unchecked(self.ptr.as_ptr().cast::>().sub(1)) } } /// A reference to the prefix for this allocation. /// # Safety @@ -348,43 +333,47 @@ impl AllocPtr { pub unsafe fn prefix_mut(&mut self) -> &mut AllocPrefix { unsafe { self.prefix_ptr().as_mut() } } + /// Initializes any given pointer: + /// - The returned pointer is guaranteed to be correctly aligned for `T` + /// - It is guaranteed to preceded without padding by an `AllocPrefix` + /// # Safety + /// `ptr` MUST be word-aligned, and MUST be valid for writes for at least the size of + /// `#[repr(C)] struct { prefix: AllocPrefix, data: [T; capacity] }` + pub unsafe fn init(ptr: NonNull<()>, capacity: usize) -> Self { + let shifted_for_prefix = ptr + .as_ptr() + .cast::>() + .add(1) + .cast::(); + let inited = shifted_for_prefix + .cast::() + .add(shifted_for_prefix.align_offset(core::mem::align_of::())) + .cast::(); + let this: Self = AllocPtr { + ptr: NonNull::new_unchecked(inited), + marker: core::marker::PhantomData, + }; + this.prefix_ptr().as_ptr().write(AllocPrefix { + strong: AtomicUsize::new(1), + weak: AtomicUsize::new(1), + capacity: AtomicUsize::new(capacity), + origin: ptr, + alloc: core::mem::MaybeUninit::uninit(), + }); + this + } } impl AllocPtr { /// Allocates a pointer to a single element of `T`, prefixed by an [`AllocPrefix`] pub fn alloc(alloc: &mut Alloc) -> Option { - let ptr = alloc.alloc(Layout::of::>().concat(Layout::of::())); - NonNull::new(ptr).map(|prefix| unsafe { - prefix.cast::>().as_mut().capacity = AtomicUsize::new(1); - let this = Self { - ptr: NonNull::new_unchecked( - prefix.as_ptr().cast::().add(Self::prefix_skip()).cast(), - ), - marker: PhantomData, - }; - debug_assert!(core::ptr::eq( - prefix.as_ptr().cast(), - this.prefix() as *const _ - )); - this - }) + Self::alloc_array(alloc, 1) } /// Allocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`] pub fn alloc_array(alloc: &mut Alloc, capacity: usize) -> Option { - let ptr = - alloc.alloc(Layout::of::>().concat(Layout::array::(capacity))); - NonNull::new(ptr).map(|prefix| unsafe { - prefix.cast::>().as_mut().capacity = AtomicUsize::new(capacity); - let ptr = prefix.as_ptr().cast::().add(Self::prefix_skip()); - let this = Self { - ptr: NonNull::new_unchecked(ptr.cast()), - marker: PhantomData, - }; - debug_assert!(core::ptr::eq( - prefix.as_ptr().cast(), - this.prefix() as *const _ - )); - this - }) + let mut layout = Layout::of::>().concat(Layout::array::(capacity)); + layout.align = core::mem::align_of::>(); + let ptr = alloc.alloc(layout); + NonNull::new(ptr).map(|ptr| unsafe { Self::init(ptr, capacity) }) } /// Reallocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`]. /// @@ -398,7 +387,9 @@ impl AllocPtr { prev_capacity: usize, new_capacity: usize, ) -> Option { - let layout = Layout::of::>().concat(Layout::array::(prev_capacity)); + let mut layout = + Layout::of::>().concat(Layout::array::(prev_capacity)); + layout.align = core::mem::align_of::>(); let ptr = alloc.realloc( (self.prefix() as *const AllocPrefix) .cast_mut() @@ -408,25 +399,13 @@ impl AllocPtr { .concat(Layout::array::(new_capacity)) .size, ); - NonNull::new(ptr).map(|prefix| unsafe { - prefix.cast::>().as_mut().capacity = AtomicUsize::new(new_capacity); - let ptr = prefix.as_ptr().cast::().add(Self::prefix_skip()); - let this = Self { - ptr: NonNull::new_unchecked(ptr.cast()), - marker: PhantomData, - }; - debug_assert!(core::ptr::eq( - prefix.as_ptr().cast(), - this.prefix() as *const _ - )); - this - }) + NonNull::new(ptr).map(|ptr| unsafe { Self::init(ptr, new_capacity) }) } /// Reallocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`] /// # Safety /// `self` must not be dangling, and is freed after this returns. pub unsafe fn free(self, alloc: &mut Alloc) { - alloc.free(self.prefix() as *const _ as *mut _) + alloc.free(self.prefix().origin.as_ptr()) } } diff --git a/stabby-abi/src/alloc/single_or_vec.rs b/stabby-abi/src/alloc/single_or_vec.rs index c4fcd72..13b22df 100644 --- a/stabby-abi/src/alloc/single_or_vec.rs +++ b/stabby-abi/src/alloc/single_or_vec.rs @@ -26,9 +26,9 @@ where inner: crate::Result, Vec>, } -#[cfg(feature = "libc")] impl SingleOrVec where + DefaultAllocator: Default, Single: IDeterminantProvider>, Vec: IStable, crate::Result, Vec>: IStable, diff --git a/stabby-abi/src/alloc/string.rs b/stabby-abi/src/alloc/string.rs index a0597b5..5705464 100644 --- a/stabby-abi/src/alloc/string.rs +++ b/stabby-abi/src/alloc/string.rs @@ -13,8 +13,10 @@ pub struct String { pub(crate) inner: Vec, } -#[cfg(feature = "libc")] -impl String { +impl String +where + super::DefaultAllocator: Default, +{ /// Constructs a new string using the default allocator. pub const fn new() -> Self { Self { inner: Vec::new() } diff --git a/stabby-abi/src/alloc/sync.rs b/stabby-abi/src/alloc/sync.rs index 21b3620..d6e6af7 100644 --- a/stabby-abi/src/alloc/sync.rs +++ b/stabby-abi/src/alloc/sync.rs @@ -70,7 +70,7 @@ impl Arc { ) -> Result { let mut ptr = match AllocPtr::alloc(&mut alloc) { Some(mut ptr) => { - unsafe { core::ptr::write(&mut ptr.prefix_mut().alloc, alloc) }; + unsafe { ptr.prefix_mut().alloc.write(alloc) }; ptr } None => return Err((constructor, alloc)), @@ -110,10 +110,10 @@ impl Arc { mut alloc: Alloc, ) -> Self { let mut ptr = match AllocPtr::alloc(&mut alloc) { - Some(mut ptr) => { - unsafe { core::ptr::write(&mut ptr.prefix_mut().alloc, alloc) }; + Some(mut ptr) => unsafe { + ptr.prefix_mut().alloc.write(alloc); ptr - } + }, None => panic!("Allocation failed"), }; unsafe { @@ -204,7 +204,10 @@ impl Arc { Alloc: Clone, { if !Self::is_unique(self) { - *self = Self::new_in(T::clone(self), unsafe { self.ptr.prefix() }.alloc.clone()); + *self = Self::new_in( + T::clone(self), + unsafe { self.ptr.prefix().alloc.assume_init_ref() }.clone(), + ); } unsafe { Self::get_mut_unchecked(self) } } @@ -233,12 +236,12 @@ impl Arc { #[rustversion::since(1.73)] /// Returns a reference to the allocator used to construct `this` pub const fn allocator(this: &Self) -> &Alloc { - unsafe { &this.ptr.prefix().alloc } + unsafe { this.ptr.prefix().alloc.assume_init_ref() } } #[rustversion::before(1.73)] /// Returns a reference to the allocator used to construct `this` pub fn allocator(this: &Self) -> &Alloc { - unsafe { &this.ptr.prefix().alloc } + unsafe { this.ptr.prefix().alloc.assume_init_ref() } } } impl Drop for Arc { @@ -336,7 +339,7 @@ impl Drop for Weak { return; } unsafe { - let mut alloc = core::ptr::read(&self.ptr.prefix().alloc); + let mut alloc = self.ptr.prefix().alloc.assume_init_read(); self.ptr.free(&mut alloc) } } @@ -446,7 +449,7 @@ impl From> for ArcSlice { slice.start.prefix_mut().strong = AtomicUsize::new(1); slice.start.prefix_mut().weak = AtomicUsize::new(1); slice.start.prefix_mut().capacity = AtomicUsize::new(capacity); - core::ptr::write(&mut slice.start.prefix_mut().alloc, alloc); + slice.start.prefix_mut().alloc.write(alloc); } Self { inner: AllocSlice { @@ -467,7 +470,7 @@ impl From> for ArcSlice { start.ptr.cast::(), )) }; - core::ptr::write(&mut slice.start.prefix_mut().alloc, alloc); + slice.start.prefix_mut().alloc.write(alloc); } Self { inner: AllocSlice { @@ -493,7 +496,7 @@ impl TryFrom> for Vec { value.inner.start.ptr, value.inner.start.prefix().capacity.load(Ordering::Relaxed), ), - alloc: core::ptr::read(&value.inner.start.prefix().alloc), + alloc: value.inner.start.prefix().alloc.assume_init_read(), }, }; core::mem::forget(value); @@ -643,7 +646,7 @@ impl Drop for WeakSlice { { return; } - let mut alloc = unsafe { core::ptr::read(&self.inner.start.prefix().alloc) }; + let mut alloc = unsafe { self.inner.start.prefix().alloc.assume_init_read() }; unsafe { self.inner.start.free(&mut alloc) } } } diff --git a/stabby-abi/src/alloc/vec.rs b/stabby-abi/src/alloc/vec.rs index 5ba49b3..e6a8eec 100644 --- a/stabby-abi/src/alloc/vec.rs +++ b/stabby-abi/src/alloc/vec.rs @@ -55,8 +55,10 @@ pub(crate) const fn ptr_add(lhs: NonNull, rhs: usize) -> NonNull { } } -#[cfg(feature = "libc")] -impl Vec { +impl Vec +where + super::DefaultAllocator: Default, +{ /// Constructs a new vector with the default allocator. This doesn't actually allocate. pub const fn new() -> Self { Self::new_in(super::DefaultAllocator::new()) @@ -792,7 +794,7 @@ impl std::io::Write for Vec { } } -#[cfg(all(feature = "std", feature = "libc"))] +#[cfg(feature = "std")] #[test] fn test() { use rand::Rng; diff --git a/stabby-abi/src/istable.rs b/stabby-abi/src/istable.rs index 06b1a61..6d824c7 100644 --- a/stabby-abi/src/istable.rs +++ b/stabby-abi/src/istable.rs @@ -559,3 +559,7 @@ unsafe impl IStable for Struct { type CType = (); primitive_report!("FP"); } + +/// Used by `stabby` to prevent proof cycles in types that contain indirections to themselves. +#[crate::stabby] +pub struct _Self; diff --git a/stabby-abi/src/stable_impls/mod.rs b/stabby-abi/src/stable_impls/mod.rs index 39721a7..e8ed5bc 100644 --- a/stabby-abi/src/stable_impls/mod.rs +++ b/stabby-abi/src/stable_impls/mod.rs @@ -20,7 +20,6 @@ macro_rules! same_as { type Size = <$t as IStable>::Size; type UnusedBits = <$t as IStable>::UnusedBits; type ForbiddenValues = <$t as IStable>::ForbiddenValues; - type HasExactlyOneNiche = <$t as IStable>::HasExactlyOneNiche; #[cfg(feature = "ctypes")] type CType = <$t as IStable>::CType; primitive_report!($($name)*); @@ -423,6 +422,7 @@ unsafe impl IStable for usize { same_as!(u32, "usize"); #[cfg(target_pointer_width = "16")] same_as!(u16, "usize"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } @@ -434,149 +434,185 @@ unsafe impl IStable for core::num::NonZeroUsize { same_as!(core::num::NonZeroU32, "core::num::NonZeroUsize"); #[cfg(target_pointer_width = "16")] same_as!(core::num::NonZeroU16, "core::num::NonZeroUsize"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for i8 { same_as!(u8, "i8"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroI8 { same_as!(core::num::NonZeroU8, "core::num::NonZeroI8"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for i16 { same_as!(u16, "i16"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroI16 { same_as!(core::num::NonZeroU16, "core::num::NonZeroI16"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for i32 { same_as!(u32, "i32"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroI32 { same_as!(core::num::NonZeroU32, "core::num::NonZeroI32"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for i64 { same_as!(u64, "i64"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroI64 { same_as!(core::num::NonZeroU64, "core::num::NonZeroI64"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for i128 { same_as!(u128, "i128"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroI128 { same_as!(core::num::NonZeroU128, "core::num::NonZeroI128"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for isize { same_as!(usize, "isize"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::num::NonZeroIsize { same_as!(core::num::NonZeroUsize, "core::num::NonZeroIsize"); + type HasExactlyOneNiche = B1; type ContainsIndirections = B0; } unsafe impl IStable for core::mem::ManuallyDrop { same_as!(T, ::REPORT.name.as_str()); + type HasExactlyOneNiche = T::HasExactlyOneNiche; type ContainsIndirections = T::ContainsIndirections; } unsafe impl IStable for core::mem::MaybeUninit { same_as!(T, ::REPORT.name.as_str()); + type HasExactlyOneNiche = T::HasExactlyOneNiche; type ContainsIndirections = T::ContainsIndirections; } unsafe impl IStable for core::cell::UnsafeCell { same_as!(T, ::REPORT.name.as_str()); + type HasExactlyOneNiche = T::HasExactlyOneNiche; type ContainsIndirections = T::ContainsIndirections; } unsafe impl IStable for *const T { same_as!(usize, "*const", T); + type HasExactlyOneNiche = B0; type ContainsIndirections = B1; } unsafe impl IStable for *mut T { same_as!(usize, "*mut", T); + type HasExactlyOneNiche = B0; type ContainsIndirections = B1; } unsafe impl IStable for core::ptr::NonNull { same_as!(core::num::NonZeroUsize, "core::ptr::NonNull", T); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } unsafe impl IStable for core::sync::atomic::AtomicPtr { same_as!(*mut T, "core::sync::atomic::AtomicPtr", T); + type HasExactlyOneNiche = B0; type ContainsIndirections = B1; } unsafe impl IStable for core::sync::atomic::AtomicBool { same_as!(bool, "core::sync::atomic::AtomicBool"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicI8 { same_as!(i8, "core::sync::atomic::AtomicI8"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicI16 { same_as!(i16, "core::sync::atomic::AtomicI16"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicI32 { same_as!(i32, "core::sync::atomic::AtomicI32"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicI64 { same_as!(i64, "core::sync::atomic::AtomicI64"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicIsize { same_as!(isize, "core::sync::atomic::AtomicIsize"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicU8 { same_as!(u8, "core::sync::atomic::AtomicU8"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicU16 { same_as!(u16, "core::sync::atomic::AtomicU16"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicU32 { same_as!(u32, "core::sync::atomic::AtomicU32"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicU64 { same_as!(u64, "core::sync::atomic::AtomicU64"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for core::sync::atomic::AtomicUsize { same_as!(usize, "core::sync::atomic::AtomicUsize"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for &T { same_as!(core::num::NonZeroUsize, "&", T); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } unsafe impl IStable for &mut T { same_as!(core::num::NonZeroUsize, "&mut", T); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } unsafe impl IStable for core::pin::Pin { same_as!(T, "core::pin::Pin", T); + type HasExactlyOneNiche = T::HasExactlyOneNiche; type ContainsIndirections = T::ContainsIndirections; } unsafe impl IStable for f32 { same_as!(u32, "f32"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } unsafe impl IStable for f64 { same_as!(u64, "f64"); + type HasExactlyOneNiche = B0; type ContainsIndirections = B0; } @@ -731,20 +767,24 @@ macro_rules! fnstable { (-> $o: ident) => { unsafe impl<$o: IStable > IStable for extern "C" fn() -> $o { same_as!(core::num::NonZeroUsize, "extern \"C\" fn", $o); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } unsafe impl<$o: IStable > IStable for unsafe extern "C" fn() -> $o { same_as!(core::num::NonZeroUsize, "unsafe extern \"C\" fn", $o); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } #[rustversion::since(1.71)] unsafe impl<$o: IStable > IStable for extern "C-unwind" fn() -> $o { same_as!(core::num::NonZeroUsize, "extern \"C-unwind\" fn", $o); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } #[rustversion::since(1.71)] unsafe impl<$o: IStable > IStable for unsafe extern "C-unwind" fn() -> $o { same_as!(core::num::NonZeroUsize, "unsafe extern \"C-unwind\" fn", $o); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } }; @@ -752,21 +792,25 @@ macro_rules! fnstable { unsafe impl< $o , $t, $($tt,)* > IStable for extern "C" fn($t, $($tt,)*) -> $o where $o : IStable, $t: IStable, $($tt: IStable,)* { same_as!(core::num::NonZeroUsize, "extern \"C\" fn", union!($o, $t, $($tt,)*)); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } unsafe impl< $o : IStable, $t: IStable, $($tt: IStable,)* > IStable for unsafe extern "C" fn($t, $($tt,)*) -> $o { same_as!(core::num::NonZeroUsize, "unsafe extern \"C\" fn", union!($o, $t, $($tt,)*)); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } #[rustversion::since(1.71)] unsafe impl< $o , $t, $($tt,)* > IStable for extern "C-unwind" fn($t, $($tt,)*) -> $o where $o : IStable, $t: IStable, $($tt: IStable,)* { same_as!(core::num::NonZeroUsize, "extern \"C-unwind\" fn", union!($o, $t, $($tt,)*)); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } #[rustversion::since(1.71)] unsafe impl< $o : IStable, $t: IStable, $($tt: IStable,)* > IStable for unsafe extern "C-unwind" fn($t, $($tt,)*) -> $o { same_as!(core::num::NonZeroUsize, "unsafe extern \"C-unwind\" fn", union!($o, $t, $($tt,)*)); + type HasExactlyOneNiche = B1; type ContainsIndirections = B1; } fnstable!($($tt,)* -> $o); diff --git a/stabby-abi/src/vtable/mod.rs b/stabby-abi/src/vtable/mod.rs index 4fa69a3..865952c 100644 --- a/stabby-abi/src/vtable/mod.rs +++ b/stabby-abi/src/vtable/mod.rs @@ -41,10 +41,10 @@ pub trait IConstConstructor<'a, Source>: 'a + Copy { } } -#[cfg(all(feature = "libc", feature = "test"))] +#[cfg(all(any(feature = "libc", feature = "alloc-rs"), feature = "test"))] pub use internal::{VTableRegistry, VtBtree, VtVec}; -#[cfg(feature = "libc")] +#[cfg(any(feature = "libc", feature = "alloc-rs"))] pub(crate) mod internal { use crate::alloc::{boxed::BoxedSlice, collections::arc_btree::AtomicArcBTreeSet}; use core::ptr::NonNull; @@ -212,7 +212,7 @@ pub(crate) mod internal { } #[cfg(all( - feature = "libc", + any(feature = "libc", feature = "alloc-rs"), any(stabby_vtables = "vec", stabby_vtables = "btree", not(stabby_vtables)) ))] #[rustversion::all(not(nightly), since(1.78.0))] diff --git a/stabby-macros/src/enums.rs b/stabby-macros/src/enums.rs index 09f35df..a85793b 100644 --- a/stabby-macros/src/enums.rs +++ b/stabby-macros/src/enums.rs @@ -18,6 +18,8 @@ use proc_macro2::TokenStream; use quote::quote; use syn::{Attribute, DataEnum, Generics, Ident, Visibility}; +use crate::Unself; + #[derive(Clone, Copy, PartialEq, Eq)] pub enum Repr { Stabby, @@ -174,7 +176,7 @@ pub fn stabby( let mut layout = quote!(()); let DataEnum { variants, .. } = &data; let mut has_non_empty_fields = false; - let unit = syn::parse2(quote!(())).unwrap(); + let unit: syn::Type = syn::parse2(quote!(())).unwrap(); let mut report = crate::Report::r#enum(ident.to_string(), version, module.clone()); for variant in variants { match &variant.fields { @@ -184,7 +186,7 @@ pub fn stabby( crate::Report::r#struct(variant.ident.to_string(), version, module.clone()); let mut variant_layout = quote!(()); for f in &f.named { - let ty = &f.ty; + let ty = f.ty.unself(&ident); variant_layout = quote!(#st::FieldPair<#variant_layout, #ty>); variant_report.add_field(f.ident.as_ref().unwrap().to_string(), ty); } @@ -202,7 +204,7 @@ pub fn stabby( crate::Report::r#struct(variant.ident.to_string(), version, module.clone()); let mut variant_layout = quote!(()); for (n, f) in f.unnamed.iter().enumerate() { - let ty = &f.ty; + let ty = f.ty.unself(&ident); variant_layout = quote!(#st::FieldPair<#variant_layout, #ty>); variant_report.add_field(n.to_string(), ty); } @@ -217,13 +219,13 @@ pub fn stabby( ); has_non_empty_fields = true; let f = f.unnamed.first().unwrap(); - let ty = &f.ty; + let ty = f.ty.unself(&ident); layout = quote!(#st::Union<#layout, core::mem::ManuallyDrop<#ty>>); report.add_field(variant.ident.to_string(), ty); } } syn::Fields::Unit => { - report.add_field(variant.ident.to_string(), &unit); + report.add_field(variant.ident.to_string(), unit.clone()); } } } diff --git a/stabby-macros/src/lib.rs b/stabby-macros/src/lib.rs index 993e5fc..f9fedce 100644 --- a/stabby-macros/src/lib.rs +++ b/stabby-macros/src/lib.rs @@ -258,17 +258,17 @@ pub fn gen_closures_impl(_: TokenStream) -> TokenStream { } #[derive(Clone)] -enum Type<'a> { - Syn(&'a syn::Type), - Report(Report<'a>), +enum Type { + Syn(syn::Type), + Report(Report), } -impl<'a> From<&'a syn::Type> for Type<'a> { - fn from(value: &'a syn::Type) -> Self { +impl From for Type { + fn from(value: syn::Type) -> Self { Self::Syn(value) } } -impl<'a> From> for Type<'a> { - fn from(value: Report<'a>) -> Self { +impl From for Type { + fn from(value: Report) -> Self { Self::Report(value) } } @@ -294,14 +294,14 @@ impl ToTokens for Tyty { } } #[derive(Clone)] -pub(crate) struct Report<'a> { +pub(crate) struct Report { name: String, - fields: Vec<(String, Type<'a>)>, + fields: Vec<(String, Type)>, version: u32, module: proc_macro2::TokenStream, pub tyty: Tyty, } -impl<'a> Report<'a> { +impl Report { pub fn r#struct( name: impl Into, version: u32, @@ -349,19 +349,19 @@ impl<'a> Report<'a> { tyty: Tyty::Union, } } - pub fn add_field(&mut self, name: String, ty: impl Into>) { + pub fn add_field(&mut self, name: String, ty: impl Into) { self.fields.push((name, ty.into())); } fn __bounds( &self, - bounded_types: &mut HashSet<&'a syn::Type>, + bounded_types: &mut HashSet, mut report_bounds: proc_macro2::TokenStream, st: &proc_macro2::TokenStream, ) -> proc_macro2::TokenStream { for (_, ty) in self.fields.iter() { match ty { Type::Syn(ty) => { - if bounded_types.insert(*ty) { + if bounded_types.insert(ty.clone()) { report_bounds = quote!(#ty: #st::IStable, #report_bounds); } } @@ -422,7 +422,7 @@ impl<'a> Report<'a> { } } } -impl ToTokens for Report<'_> { +impl ToTokens for Report { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let st = crate::tl_mod(); let mut fields = quote!(None); @@ -482,3 +482,233 @@ pub fn canary_suffixes(_: TokenStream) -> TokenStream { } stream.into() } + +trait Unself { + fn unself(&self, this: &syn::Ident) -> Self; +} +impl Unself for syn::Path { + fn unself(&self, this: &syn::Ident) -> Self { + let syn::Path { + leading_colon, + segments, + } = self; + if self.is_ident("Self") || self.is_ident(this) { + let st = crate::tl_mod(); + return syn::parse2(quote! {#st::istable::_Self}).unwrap(); + } + syn::Path { + leading_colon: leading_colon.clone(), + segments: segments + .iter() + .map(|syn::PathSegment { ident, arguments }| syn::PathSegment { + ident: ident.clone(), + arguments: match arguments { + syn::PathArguments::None => syn::PathArguments::None, + syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { + colon2_token, + lt_token, + args, + gt_token, + }, + ) => syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { + colon2_token: colon2_token.clone(), + lt_token: lt_token.clone(), + args: args + .iter() + .map(|arg| match arg { + syn::GenericArgument::Type(ty) => { + syn::GenericArgument::Type(ty.unself(this)) + } + syn::GenericArgument::Binding(syn::Binding { + ident, + eq_token, + ty, + }) => syn::GenericArgument::Binding(syn::Binding { + ident: ident.clone(), + eq_token: eq_token.clone(), + ty: ty.unself(this), + }), + syn::GenericArgument::Constraint(syn::Constraint { + ident, + colon_token, + bounds, + }) => syn::GenericArgument::Constraint(syn::Constraint { + ident: ident.clone(), + colon_token: colon_token.clone(), + bounds: bounds.iter().map(|b| b.unself(this)).collect(), + }), + other => other.clone(), + }) + .collect(), + gt_token: gt_token.clone(), + }, + ), + syn::PathArguments::Parenthesized(syn::ParenthesizedGenericArguments { + paren_token, + inputs, + output, + }) => { + syn::PathArguments::Parenthesized(syn::ParenthesizedGenericArguments { + paren_token: paren_token.clone(), + inputs: inputs.iter().map(|t| t.unself(this)).collect(), + output: match output { + syn::ReturnType::Default => syn::ReturnType::Default, + syn::ReturnType::Type(arrow, ty) => syn::ReturnType::Type( + arrow.clone(), + Box::new(ty.unself(this)), + ), + }, + }) + } + }, + }) + .collect(), + } + } +} +impl Unself for syn::TypeParamBound { + fn unself(&self, this: &syn::Ident) -> Self { + match self { + TypeParamBound::Trait(syn::TraitBound { + paren_token, + modifier, + lifetimes, + path, + }) => TypeParamBound::Trait(syn::TraitBound { + paren_token: paren_token.clone(), + modifier: modifier.clone(), + lifetimes: lifetimes.clone(), + path: path.unself(this), + }), + TypeParamBound::Lifetime(l) => TypeParamBound::Lifetime(l.clone()), + } + } +} +impl Unself for syn::Type { + fn unself(&self, this: &syn::Ident) -> syn::Type { + match self { + syn::Type::Array(syn::TypeArray { + elem, + len, + bracket_token, + semi_token, + }) => syn::Type::Array(syn::TypeArray { + elem: Box::new(elem.unself(this)), + len: len.clone(), + bracket_token: bracket_token.clone(), + semi_token: semi_token.clone(), + }), + syn::Type::BareFn(syn::TypeBareFn { + lifetimes, + unsafety, + abi, + inputs, + output, + fn_token, + paren_token, + variadic, + }) => syn::Type::BareFn(syn::TypeBareFn { + lifetimes: lifetimes.clone(), + unsafety: unsafety.clone(), + abi: abi.clone(), + inputs: inputs + .iter() + .map(|syn::BareFnArg { attrs, name, ty }| syn::BareFnArg { + attrs: attrs.clone(), + name: name.clone(), + ty: ty.unself(this), + }) + .collect(), + output: match output { + syn::ReturnType::Default => syn::ReturnType::Default, + syn::ReturnType::Type(arrow, ret) => { + syn::ReturnType::Type(arrow.clone(), Box::new(ret.unself(this))) + } + }, + fn_token: fn_token.clone(), + paren_token: paren_token.clone(), + variadic: variadic.clone(), + }), + syn::Type::Group(syn::TypeGroup { group_token, elem }) => { + syn::Type::Group(syn::TypeGroup { + group_token: group_token.clone(), + elem: Box::new(elem.unself(this)), + }) + } + syn::Type::ImplTrait(syn::TypeImplTrait { impl_token, bounds }) => { + syn::Type::ImplTrait(syn::TypeImplTrait { + impl_token: impl_token.clone(), + bounds: bounds.into_iter().map(|p| p.unself(this)).collect(), + }) + } + syn::Type::Paren(syn::TypeParen { paren_token, elem }) => { + syn::Type::Paren(syn::TypeParen { + paren_token: paren_token.clone(), + elem: Box::new(elem.unself(this)), + }) + } + syn::Type::Path(syn::TypePath { qself, path }) => syn::Type::Path(syn::TypePath { + qself: qself.as_ref().map( + |syn::QSelf { + lt_token, + ty, + position, + as_token, + gt_token, + }| syn::QSelf { + lt_token: lt_token.clone(), + ty: Box::new(ty.unself(this)), + position: position.clone(), + as_token: as_token.clone(), + gt_token: gt_token.clone(), + }, + ), + path: path.unself(this), + }), + syn::Type::Ptr(syn::TypePtr { + star_token, + const_token, + mutability, + elem, + }) => syn::Type::Ptr(syn::TypePtr { + star_token: star_token.clone(), + const_token: const_token.clone(), + mutability: mutability.clone(), + elem: Box::new(elem.unself(this)), + }), + syn::Type::Reference(syn::TypeReference { + and_token, + lifetime, + mutability, + elem, + }) => syn::Type::Reference(syn::TypeReference { + and_token: and_token.clone(), + lifetime: lifetime.clone(), + mutability: mutability.clone(), + elem: Box::new(elem.unself(this)), + }), + syn::Type::Slice(syn::TypeSlice { + bracket_token, + elem, + }) => syn::Type::Slice(syn::TypeSlice { + bracket_token: bracket_token.clone(), + elem: Box::new(elem.unself(this)), + }), + syn::Type::TraitObject(syn::TypeTraitObject { dyn_token, bounds }) => { + syn::Type::TraitObject(syn::TypeTraitObject { + dyn_token: dyn_token.clone(), + bounds: bounds.iter().map(|b| b.unself(this)).collect(), + }) + } + syn::Type::Tuple(syn::TypeTuple { paren_token, elems }) => { + syn::Type::Tuple(syn::TypeTuple { + paren_token: paren_token.clone(), + elems: elems.iter().map(|ty| ty.unself(this)).collect(), + }) + } + o => o.clone(), + } + } +} diff --git a/stabby-macros/src/structs.rs b/stabby-macros/src/structs.rs index 58cca0a..9164524 100644 --- a/stabby-macros/src/structs.rs +++ b/stabby-macros/src/structs.rs @@ -16,6 +16,8 @@ use proc_macro2::Ident; use quote::quote; use syn::{Attribute, DataStruct, Generics, Visibility}; +use crate::Unself; + struct Args { optimize: bool, version: u32, @@ -81,7 +83,7 @@ pub fn stabby( syn::Fields::Named(fields) => { let fields = &fields.named; for field in fields { - let ty = &field.ty; + let ty = field.ty.unself(&ident); layout = Some(layout.map_or_else( || quote!(#ty), |layout| quote!(#st::FieldPair<#layout, #ty>), @@ -99,7 +101,7 @@ pub fn stabby( syn::Fields::Unnamed(fields) => { let fields = &fields.unnamed; for (i, field) in fields.iter().enumerate() { - let ty = &field.ty; + let ty = field.ty.unself(&ident); layout = Some(layout.map_or_else( || quote!(#ty), |layout| quote!(#st::FieldPair<#layout, #ty>), diff --git a/stabby-macros/src/unions.rs b/stabby-macros/src/unions.rs index 24d6eae..e253f05 100644 --- a/stabby-macros/src/unions.rs +++ b/stabby-macros/src/unions.rs @@ -16,6 +16,8 @@ use proc_macro2::TokenStream; use quote::quote; use syn::{Attribute, DataUnion, Generics, Ident, Visibility}; +use crate::Unself; + struct Args { version: u32, module: proc_macro2::TokenStream, @@ -68,7 +70,7 @@ pub fn stabby( let mut layout = quote!(()); let mut report = crate::Report::r#union(ident.to_string(), version, module); for field in &fields.named { - let ty = &field.ty; + let ty = field.ty.unself(&ident); layout = quote!(#st::Union<#layout, #ty>); report.add_field(field.ident.as_ref().unwrap().to_string(), ty); } diff --git a/stabby/Cargo.toml b/stabby/Cargo.toml index ab4654c..d948d71 100644 --- a/stabby/Cargo.toml +++ b/stabby/Cargo.toml @@ -59,3 +59,8 @@ harness = false [[bench]] name = "enums" harness = false + + +[[bench]] +name = "allocators" +harness = false diff --git a/stabby/benches/allocators.rs b/stabby/benches/allocators.rs new file mode 100644 index 0000000..bc9d5f5 --- /dev/null +++ b/stabby/benches/allocators.rs @@ -0,0 +1,30 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use rand::Rng; +use stabby::alloc::{allocators, collections::arc_btree::ArcBTreeSet, vec::Vec, IAlloc}; + +fn bench(c: &mut Criterion, set: &[i32]) { + c.bench_function(core::any::type_name::(), |b| { + b.iter(|| { + let mut vec = Vec::new_in(T::default()); + let mut btree = ArcBTreeSet::<_, _, false, 5>::new_in(T::default()); + for &i in set { + vec.push(i); + btree.insert(i); + } + black_box((vec, btree)); + }) + }); +} + +fn bench_allocs(c: &mut Criterion) { + let mut rng = rand::thread_rng(); + for n in [10, 100, 1000, 10000].into_iter() { + let set = (0..n).map(|_| rng.gen()).collect::>(); + // bench::(c, &set); + bench::(c, &set); + bench::(c, &set); + } +} + +criterion_group!(benches, bench_allocs); +criterion_main!(benches);