Created
May 16, 2021 08:13
-
-
Save lachlansneff/eaaba2beda9ffafc2308361d86278f12 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#![feature( | |
allocator_api, | |
slice_ptr_get, | |
slice_ptr_len, | |
ptr_metadata, | |
maybe_uninit_uninit_array, | |
)] | |
use core::{ | |
alloc::{Layout, AllocError}, | |
ptr::{self, NonNull}, | |
mem::{self, MaybeUninit}, | |
cell::UnsafeCell, | |
slice, | |
marker::PhantomData, | |
}; | |
pub unsafe trait Allocator { | |
// In most cases, this will just be `NonNull<[u8]>`. | |
type Handle: Clone; | |
fn get(&self, handle: &Self::Handle) -> NonNull<[u8]>; | |
fn default_handle(&self) -> Self::Handle; | |
fn allocate(&self, layout: Layout) -> Result<Self::Handle, AllocError>; | |
fn allocate_zeroed(&self, layout: Layout) -> Result<Self::Handle, AllocError> { | |
let handle = self.allocate(layout)?; | |
let ptr = self.get(&handle); | |
// SAFETY: `alloc` returns a valid memory block | |
unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) } | |
Ok(handle) | |
} | |
unsafe fn deallocate(&self, handle: &Self::Handle, layout: Layout); | |
unsafe fn grow( | |
&self, | |
handle: &Self::Handle, | |
old_layout: Layout, | |
new_layout: Layout, | |
) -> Result<Self::Handle, AllocError> { | |
debug_assert!( | |
new_layout.size() >= old_layout.size(), | |
"`new_layout.size()` must be greater than or equal to `old_layout.size()`" | |
); | |
let new_handle = self.allocate(new_layout)?; | |
let new_ptr = self.get(&new_handle); | |
let ptr = self.get(&handle).as_non_null_ptr(); | |
// SAFETY: because `new_layout.size()` must be greater than or equal to | |
// `old_layout.size()`, both the old and new memory allocation are valid for reads and | |
// writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet | |
// deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is | |
// safe. The safety contract for `dealloc` must be upheld by the caller. | |
unsafe { | |
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); | |
self.deallocate(&handle, old_layout); | |
} | |
Ok(new_handle) | |
} | |
unsafe fn grow_zeroed( | |
&self, | |
handle: &Self::Handle, | |
old_layout: Layout, | |
new_layout: Layout, | |
) -> Result<Self::Handle, AllocError> { | |
debug_assert!( | |
new_layout.size() >= old_layout.size(), | |
"`new_layout.size()` must be greater than or equal to `old_layout.size()`" | |
); | |
let new_handle = self.allocate_zeroed(new_layout)?; | |
let new_ptr = self.get(&new_handle); | |
let ptr = self.get(&handle).as_non_null_ptr(); | |
// SAFETY: because `new_layout.size()` must be greater than or equal to | |
// `old_layout.size()`, both the old and new memory allocation are valid for reads and | |
// writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet | |
// deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is | |
// safe. The safety contract for `dealloc` must be upheld by the caller. | |
unsafe { | |
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); | |
self.deallocate(&handle, old_layout); | |
} | |
Ok(new_handle) | |
} | |
unsafe fn shrink( | |
&self, | |
handle: &Self::Handle, | |
old_layout: Layout, | |
new_layout: Layout, | |
) -> Result<Self::Handle, AllocError> { | |
debug_assert!( | |
new_layout.size() <= old_layout.size(), | |
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`" | |
); | |
let new_handle = self.allocate(new_layout)?; | |
let new_ptr = self.get(&new_handle); | |
let ptr = self.get(&handle).as_non_null_ptr(); | |
// SAFETY: because `new_layout.size()` must be lower than or equal to | |
// `old_layout.size()`, both the old and new memory allocation are valid for reads and | |
// writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet | |
// deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is | |
// safe. The safety contract for `dealloc` must be upheld by the caller. | |
unsafe { | |
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_layout.size()); | |
self.deallocate(&handle, old_layout); | |
} | |
Ok(new_handle) | |
} | |
#[inline(always)] | |
fn by_ref(&self) -> &Self | |
where | |
Self: Sized, | |
{ | |
self | |
} | |
} | |
pub struct Vec<T, A: Allocator> { | |
handle: A::Handle, | |
len: usize, | |
alloc: A, | |
_marker: PhantomData<T>, | |
} | |
impl<T, A: Allocator> Vec<T, A> { | |
pub fn new_in(alloc: A) -> Self { | |
Self { | |
handle: alloc.default_handle(), | |
len: 0, | |
alloc, | |
_marker: PhantomData, | |
} | |
} | |
pub fn len(&self) -> usize { | |
self.len | |
} | |
pub fn capacity(&self) -> usize { | |
self.alloc.get(&self.handle).len() / mem::size_of::<T>() | |
} | |
pub fn as_ptr(&self) -> *const T { | |
self.alloc.get(&self.handle).as_mut_ptr() as *mut T as *const T | |
} | |
pub fn as_mut_ptr(&mut self) -> *mut T { | |
self.alloc.get(&self.handle).as_mut_ptr() as *mut T | |
} | |
pub fn as_slice(&self) -> &[T] { | |
unsafe { | |
slice::from_raw_parts(self.as_ptr(), self.len) | |
} | |
} | |
pub fn try_reserve(&mut self, additional: usize) -> Result<(), AllocError> { | |
let old_capacity = self.capacity(); | |
let old_layout = Layout::array::<T>(old_capacity).map_err(|_| AllocError)?; | |
let new_layout = Layout::array::<T>(old_capacity + additional).map_err(|_| AllocError)?; | |
self.handle = unsafe { self.alloc.grow(&self.handle, old_layout, new_layout)? }; | |
Ok(()) | |
} | |
pub fn try_push(&mut self, value: T) -> Result<(), AllocError> { | |
if self.len == self.capacity() { | |
self.try_reserve(1)?; | |
} | |
unsafe { | |
let end = self.as_mut_ptr().add(self.len); | |
ptr::write(end, value); | |
self.len += 1; | |
} | |
Ok(()) | |
} | |
} | |
#[derive(Default)] | |
pub struct InlineAlloc<const N: usize>; | |
#[repr(align(4))] | |
pub struct InlineAllocHandle<const N: usize>(UnsafeCell<[MaybeUninit<u8>; N]>); | |
impl<const N: usize> Clone for InlineAllocHandle<N> { | |
fn clone(&self) -> Self { | |
Self(UnsafeCell::new(unsafe { | |
self.0.get().read() | |
})) | |
} | |
} | |
unsafe impl<const N: usize> Allocator for InlineAlloc<N> { | |
type Handle = InlineAllocHandle<N>; | |
fn get(&self, handle: &Self::Handle) -> NonNull<[u8]> { | |
NonNull::from_raw_parts( | |
unsafe { NonNull::new_unchecked(handle.0.get() as *mut ()) }, | |
N, | |
) | |
} | |
fn default_handle(&self) -> Self::Handle { | |
InlineAllocHandle(UnsafeCell::new(MaybeUninit::uninit_array())) | |
} | |
fn allocate(&self, layout: Layout) -> Result<Self::Handle, AllocError> { | |
if layout.align() <= 4 && layout.size() <= N { | |
Ok(self.default_handle()) | |
} else { | |
Err(AllocError) | |
} | |
} | |
unsafe fn deallocate(&self, _handle: &Self::Handle, _layout: Layout) {} | |
} | |
fn main() { | |
let mut v: Vec<i32, InlineAlloc<{ 4 * 4 }>> = Vec::new_in(InlineAlloc::default()); | |
assert_eq!(mem::size_of_val(&v), 24); | |
assert_eq!(v.capacity(), 4); | |
v.try_push(1).unwrap(); | |
v.try_push(2).unwrap(); | |
v.try_push(3).unwrap(); | |
v.try_push(4).unwrap(); | |
v.try_push(5).unwrap_err(); | |
assert_eq!(&[1, 2, 3, 4] as &[_], v.as_slice()); | |
println!("{:?}", v.as_slice()); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment