Skip to content

Instantly share code, notes, and snippets.

@jsimmons
Created April 3, 2021 09:51
Show Gist options
  • Save jsimmons/972363e89ff852d2accf4c1dcccdfdfb to your computer and use it in GitHub Desktop.
Save jsimmons/972363e89ff852d2accf4c1dcccdfdfb to your computer and use it in GitHub Desktop.
Upgradable Rc / Arc
mod rc;
use rc::{BoxArc, BoxRc};
struct Widget {
value: i32,
}
impl Widget {
fn new(value: i32) -> Self {
Self { value }
}
}
impl Drop for Widget {
fn drop(&mut self) {
println!("dropping widget {}", self.value);
}
}
fn main() {
let widget = BoxRc::new(Widget::new(1));
drop(widget);
let widget = BoxRc::new(Widget::new(2));
drop(widget);
let widget = BoxRc::new(Widget::new(3));
let widget2 = widget.clone();
drop(widget);
println!("drops now");
drop(widget2);
let widget = BoxRc::new(Widget::new(4));
let widget2 = BoxArc::from_rc(&widget);
drop(widget);
println!("drops now");
drop(widget2);
}
use std::{
marker::PhantomData,
ops::Deref,
ptr::{self, NonNull},
sync::atomic::{fence, AtomicI32, Ordering},
};
struct RcInner<T: ?Sized> {
// Number of strong references in addition to the current value.
// A negative value indicates a Rc reference count.
// A positive value indicates an Arc reference count.
// Zero indicates we're the exclusive owner.
strong: AtomicI32,
// weak: isize,
value: T,
}
impl<T> RcInner<T> {
#[inline]
fn new(value: T) -> Self {
Self {
strong: AtomicI32::new(-1),
value,
}
}
}
impl<T: ?Sized> RcInner<T> {
#[inline]
fn incr_strong(&self) {
let strong = self.strong.load(Ordering::Relaxed);
if strong < 0 {
self.strong.store(strong - 1, Ordering::Relaxed);
} else if strong > 0 {
self.strong.fetch_add(1, Ordering::Relaxed);
} else {
panic!();
}
}
#[inline]
fn incr_strong_arc(&self) {
let strong = self.strong.load(Ordering::Relaxed);
// If we're currently negative, it means we're upgrading an Rc to an Arc.
// Since an BoxRc isn't Send'able, this must be a single threaded operation.
if strong < 0 {
self.strong.store(-strong + 1, Ordering::Relaxed);
} else if strong > 0 {
self.strong.fetch_add(1, Ordering::Relaxed);
} else {
panic!()
}
}
#[inline]
fn decr_strong(&self) -> bool {
let strong = self.strong.load(Ordering::Relaxed);
let old_strong = if strong < 0 {
self.strong.store(strong + 1, Ordering::Relaxed);
strong
} else if strong > 0 {
self.strong.fetch_sub(1, Ordering::Relaxed)
} else {
panic!()
};
old_strong != 1 && old_strong != -1
}
}
pub struct BoxRc<T: ?Sized> {
ptr: NonNull<RcInner<T>>,
phantom: PhantomData<RcInner<T>>,
}
impl<T> BoxRc<T> {
pub fn new(value: T) -> Self {
Self::from_inner(Box::leak(Box::new(RcInner::new(value))).into())
}
}
impl<T: ?Sized> BoxRc<T> {
fn from_inner(inner: NonNull<RcInner<T>>) -> Self {
Self {
ptr: inner.into(),
phantom: PhantomData,
}
}
#[inline]
fn inner(&self) -> &RcInner<T> {
unsafe { self.ptr.as_ref() }
}
#[cold]
fn drop_slow(&self) {
fence(Ordering::Acquire);
unsafe {
drop(Box::from_raw(self.ptr.as_ptr()));
}
// To support weak refs the real Rc decouples destruction and deallocation.
//unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value) }
}
}
impl<T: ?Sized> Clone for BoxRc<T> {
fn clone(&self) -> Self {
self.inner().incr_strong();
Self::from_inner(self.inner().into())
}
}
impl<T: ?Sized> Drop for BoxRc<T> {
fn drop(&mut self) {
if self.inner().decr_strong() {
return;
}
self.drop_slow()
}
}
impl<T: ?Sized> Deref for BoxRc<T> {
type Target = T;
// Inner is value whilever we have a valid BoxRc.
fn deref(&self) -> &Self::Target {
&self.inner().value
}
}
pub struct BoxArc<T: ?Sized> {
ptr: NonNull<RcInner<T>>,
phantom: PhantomData<RcInner<T>>,
}
unsafe impl<T: ?Sized + Sync + Send> Send for BoxArc<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for BoxArc<T> {}
impl<T> BoxArc<T> {
pub fn new(value: T) -> Self {
Self::from_inner(Box::leak(Box::new(RcInner::new(value))).into())
}
}
impl<T: ?Sized> BoxArc<T> {
pub fn from_rc(rc: &BoxRc<T>) -> Self {
let inner = rc.inner();
inner.incr_strong_arc();
Self::from_inner(inner.into())
}
fn from_inner(inner: NonNull<RcInner<T>>) -> Self {
Self {
ptr: inner.into(),
phantom: PhantomData,
}
}
#[inline]
fn inner(&self) -> &RcInner<T> {
unsafe { self.ptr.as_ref() }
}
#[cold]
fn drop_slow(&self) {
fence(Ordering::Acquire);
unsafe {
drop(Box::from_raw(self.ptr.as_ptr()));
}
// To support weak refs the real Rc decouples destruction and deallocation.
//unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value) }
}
}
impl<T: ?Sized> Clone for BoxArc<T> {
fn clone(&self) -> Self {
self.inner().incr_strong_arc();
Self::from_inner(self.inner().into())
}
}
impl<T: ?Sized> Drop for BoxArc<T> {
fn drop(&mut self) {
if self.inner().decr_strong() {
return;
}
self.drop_slow()
}
}
impl<T: ?Sized> Deref for BoxArc<T> {
type Target = T;
// Inner is value whilever we have a valid BoxArc.
fn deref(&self) -> &Self::Target {
&self.inner().value
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment