Skip to content

Instantly share code, notes, and snippets.

@lachlansneff
Last active April 13, 2020 22:06
Show Gist options
  • Save lachlansneff/8f59278bccb82ec5b36c359f385b7e3f to your computer and use it in GitHub Desktop.
Save lachlansneff/8f59278bccb82ec5b36c359f385b7e3f to your computer and use it in GitHub Desktop.
// use ...
pub unsafe trait AllocOwner {
fn owns(&self, ptr: NonNull<u8>, layout: Layout) -> bool;
}
#[derive(Default)]
pub struct Local<A> {
allocator: UnsafeCell<A>,
}
impl<A> Local<A> {
pub fn new(allocator: A) -> Self {
Self {
allocator: UnsafeCell::new(allocator),
}
}
}
unsafe impl<A> AllocRef for &'_ Local<A>
where
for<'a> &'a mut A: AllocRef
{
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
unsafe { &mut *self.allocator.get() }.alloc(layout, init)
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
(&mut *self.allocator.get()).dealloc(ptr, layout)
}
}
pub struct StackAlloc<const N: usize> {
store: [MaybeUninit<u8>; N],
offset: usize,
}
impl<const N: usize> Default for StackAlloc<N> {
fn default() -> Self {
Self {
store: [MaybeUninit::uninit(); N],
offset: 0,
}
}
}
unsafe impl<const N: usize> AllocRef for &'_ mut StackAlloc<N> {
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
if self.offset + layout.size() >= self.store.len() {
return Err(AllocErr);
}
let ptr = unsafe {
let ptr = self.store.as_mut_ptr().add(self.offset);
ptr.add(ptr.align_offset(layout.align()))
};
self.offset += layout.size();
let mut block = MemoryBlock {
ptr: unsafe { NonNull::new_unchecked(ptr as _) },
size: layout.size(),
};
block.init(init);
Ok(block)
}
unsafe fn dealloc(&mut self, _ptr: NonNull<u8>, _layout: Layout) {
// TODO: Implement this by popping off the end of the array if possible.
}
}
unsafe impl<const N: usize> AllocOwner for &'_ StackAlloc<N> {
fn owns(&self, ptr: NonNull<u8>, layout: Layout) -> bool {
let start_ptr = self.store.as_ptr();
let in_ptr: *const u8 = ptr.as_ptr();
in_ptr >= start_ptr as *const u8 && start_ptr.align_offset(layout.align()) + layout.size() < self.store.len()
}
}
#[derive(Default)]
pub struct Fallback<Primary, Secondary> {
primary: Primary,
secondary: Secondary,
}
impl<Primary, Secondary> Fallback<Primary, Secondary> {
pub fn new(primary: Primary, secondary: Secondary) -> Self {
Self {
primary,
secondary,
}
}
}
unsafe impl<Primary, Secondary> AllocRef for &'_ mut Fallback<Primary, Secondary>
where
for<'a> &'a mut Primary: AllocRef,
for<'a> &'a Primary: AllocOwner,
for<'a> &'a mut Secondary: AllocRef,
{
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
(&mut self.primary).alloc(layout, init).or_else(|_| (&mut self.secondary).alloc(layout, init))
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if (&self.primary).owns(ptr, layout) {
(&mut self.primary).dealloc(ptr, layout);
} else {
(&mut self.secondary).dealloc(ptr, layout);
}
}
}
#[derive(Default)]
struct FreelistNode {
next: Option<NonNull<FreelistNode>>,
}
#[derive(Default)]
pub struct Freelist<A> {
parent: A,
root: Option<NonNull<FreelistNode>>,
}
impl<A> Freelist<A> {
pub fn new(parent: A) -> Self {
Self {
parent,
root: None,
}
}
}
unsafe impl<A> AllocRef for &'_ mut Freelist<A>
where
for<'a> &'a mut A: AllocRef,
{
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
match self.root {
Some(next) if layout.size() >= size_of::<FreelistNode>() => {
self.root = unsafe { &*next.as_ptr() }.next;
let mut block = MemoryBlock {
ptr: next.cast(),
size: layout.size(),
};
block.init(init);
Ok(block)
}
_ => (&mut self.parent).alloc(layout, init)
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() < size_of::<FreelistNode>() {
(&mut self.parent).dealloc(ptr, layout)
} else {
let node: NonNull<FreelistNode> = ptr.cast();
(*node.as_ptr()).next = self.root;
self.root = Some(node);
}
}
}
unsafe impl<A> AllocOwner for &'_ mut Freelist<A>
where
for<'a> &'a A: AllocOwner,
{
fn owns(&self, ptr: NonNull<u8>, layout: Layout) -> bool {
(&self.parent).owns(ptr, layout)
}
}
pub struct Segregator<Small, Large, const Threshhold: usize> {
small: Small,
large: Large,
}
impl<Small, Large, const Threshold: usize> Segregator<Small, Large, Threshold> {
pub fn new(small: Small, large: Large) -> Self {
Self { small, large }
}
}
unsafe impl<Small, Large, const Threshold: usize> AllocRef for &'_ mut Segregator<Small, Large, Threshold>
where
for<'a> &'a mut Small: AllocRef,
for<'a> &'a mut Large: AllocRef,
{
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
if layout.size() <= Threshold {
(&mut self.small).alloc(layout, init)
} else {
(&mut self.large).alloc(layout, init)
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() <= Threshold {
(&mut self.small).dealloc(ptr, layout)
} else {
(&mut self.large).dealloc(ptr, layout)
}
}
}
pub struct Affix<A, Prefix, Suffix> {
parent: A,
prefix: Prefix,
suffix: Suffix,
}
impl<A, Prefix, Suffix> Affix<A, Prefix, Suffix> {
pub fn new(parent: A, prefix: Prefix, suffix: Suffix) -> Self {
Self {
parent,
prefix,
suffix,
}
}
}
unsafe impl<A, Prefix, Suffix> AllocRef for &'_ mut Affix<A, Prefix, Suffix>
where
for<'a> &'a mut A: AllocRef,
Prefix: Clone,
Suffix: Clone,
{
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
let prefix_layout = Layout::new::<Prefix>();
let suffix_layout = Layout::new::<Suffix>();
let (total_layout, value_offset, suffix_offset) = prefix_layout
.extend(layout)
.and_then(|(layout , offset)| {
let (total_layout, suffix_offset) = layout.extend(suffix_layout)?;
Ok((total_layout, offset, suffix_offset))
}).map_err(|_| AllocErr)?;
let total_block = (&mut self.parent).alloc(total_layout, init)?;
// Write prefix and suffix.
unsafe {
let prefix_pointer: *mut Prefix = total_block.ptr.cast().as_ptr();
let suffix_pointer: *mut Suffix = total_block.ptr.as_ptr().add(suffix_offset) as _;
prefix_pointer.write(self.prefix.clone());
suffix_pointer.write(self.suffix.clone());
}
Ok(MemoryBlock {
ptr: unsafe { NonNull::new_unchecked(total_block.ptr.as_ptr().add(value_offset)) },
size: layout.size(),
})
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
let prefix_layout = Layout::new::<Prefix>();
let suffix_layout = Layout::new::<Suffix>();
let (total_layout, _, suffix_offset) = prefix_layout
.extend(layout)
.and_then(|(layout , offset)| {
let (total_layout, suffix_offset) = layout.extend(suffix_layout)?;
Ok((total_layout, offset, suffix_offset))
}).unwrap();
let start_of_block = ptr.as_ptr().sub(size_of::<Prefix>());
// Drop the prefix and suffix.
let prefix_pointer: *mut Prefix = start_of_block.cast();
let suffix_pointer: *mut Suffix = start_of_block.add(suffix_offset).cast();
prefix_pointer.drop_in_place();
suffix_pointer.drop_in_place();
(&mut self.parent).dealloc(NonNull::new_unchecked(start_of_block), total_layout)
}
}
pub struct LinearBucket<A, const BucketNum: usize, const Min: usize, const Step: usize> {
buckets: [A; BucketNum],
}
impl<A, const BucketNum: usize, const Min: usize, const Step: usize> Default for LinearBucket<A, BucketNum, Min, Step>
where
A: Default,
{
fn default() -> Self {
let mut array: [MaybeUninit<A>; BucketNum] = unsafe { MaybeUninit::uninit().assume_init() };
for element in array.iter_mut() {
*element = MaybeUninit::new(A::default());
}
let buckets = unsafe {
let ptr: *const [MaybeUninit<A>; BucketNum] = &array;
let ptr: *const [A; BucketNum] = ptr.cast();
ptr.read()
};
forget(array);
Self {
buckets,
}
}
}
// impl<A, const BucketNum: usize, const Min: usize, const Step: usize> LinearBucket<A, BucketNum, Min, Step> {
// pub fn new(allocator: impl Iterator<Item=A>) -> Self {
// Self {
// buckets:
// }
// }
// }
unsafe impl<A, const BucketNum: usize, const Min: usize, const Step: usize> AllocRef for &'_ mut LinearBucket<A, BucketNum, Min, Step> {
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
for (i, bucket) in self.buckets.iter_mut().enumerate() {
unimplemented!("do the bucket math")
}
Err(AllocErr)
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::vec::Vec;
use crate::boxed::Box;
type Alloc = Segregator<
Freelist<Global>,
Segregator<
LinearBucket<Freelist<Global>, 8, 33, 32>,
Segregator<
Freelist<Global>,
Segregator<
Freelist<Global>,
Segregator<
Freelist<Global>,
Global,
4096,
>,
2048,
>,
1024,
>,
256,
>,
32,
>;
#[test]
fn test_stack_alloc() {
let mut alloc = StackAlloc::<64>::default();
let mut vec: Vec<i32, _> = Vec::new_in(&mut alloc);
vec.extend(&[1, 2, 3, 4]);
assert_eq!(&vec, &[1, 2, 3, 4]);
}
#[test]
#[should_panic]
fn test_stack_alloc_fail() {
let mut alloc = StackAlloc::<8>::default();
let mut vec: Vec<i32, _> = Vec::new_in(&mut alloc);
vec.try_extend_from_slice(&[1, 2, 3, 4]).unwrap();
assert_eq!(&vec, &[1, 2, 3, 4]);
}
#[test]
fn test_fallback_alloc() {
type LocalAlloc = Fallback<
StackAlloc<256>,
Global,
>;
let mut alloc = LocalAlloc::default();
let mut vec: Vec<i32, _> = Vec::new_in(&mut alloc);
vec.try_extend_from_slice(&[10; 500]).unwrap();
assert!(
vec.iter().all(|&i| i == 10)
&& vec.len() == 500
);
}
#[test]
fn test_affix_alloc() {
use core::cell::Cell;
type LocalAlloc = Fallback<
StackAlloc<256>,
Global,
>;
struct AllocCounter<'a>(&'a Cell<usize>);
impl<'a> Clone for AllocCounter<'a> {
fn clone(&self) -> Self {
self.0.set(self.0.get() + 1);
AllocCounter(&self.0)
}
}
impl<'a> Drop for AllocCounter<'a> {
fn drop(&mut self) {
self.0.set(self.0.get() - 1);
}
}
let counter = Cell::new(1);
let alloc = Local::new(Affix::new(
LocalAlloc::default(),
AllocCounter(&counter),
(),
));
let b0 = Box::new_in(0, &alloc);
assert_eq!(counter.get(), 2);
let b1 = Box::new_in(1, &alloc);
assert_eq!(counter.get(), 3);
drop(b0);
assert_eq!(counter.get(), 2);
drop(b1);
assert_eq!(counter.get(), 1);
drop(alloc);
assert_eq!(counter.get(), 0);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment