Skip to content

Instantly share code, notes, and snippets.

@rkjnsn
Last active August 29, 2015 14:20
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save rkjnsn/791ee9cc3b6d2961cf33 to your computer and use it in GitHub Desktop.
Save rkjnsn/791ee9cc3b6d2961cf33 to your computer and use it in GitHub Desktop.
A ScopedRc type that performs cycle collection with correct lifetime enforcement.
use std::boxed;
use std::cell::{Cell, UnsafeCell};
use std::cmp::{Ordering, max};
use std::fmt;
use std::hash::{Hasher, Hash};
use std::intrinsics::{abort, assume};
use std::isize;
use std::marker::PhantomData;
use std::mem::{self, forget, min_align_of, size_of, transmute};
use std::ops::Deref;
use std::ptr;
use std::raw::TraitObject;
use std::rt::heap::{EMPTY, allocate, deallocate, reallocate};
extern crate core;
use self::core::nonzero::NonZero;
extern crate alloc;
// We need the layout to be predictable
#[repr(C)]
struct RcBox<T> {
value: T,
strong: Cell<usize>,
weak: Cell<usize>,
// These allow CoW and immediate removal from tracking list
cycle_breaker: *const CycleBreaker,
index: Cell<usize>,
}
// Use when the ScopedRc isn't guarded to save some bytes
#[repr(C)]
struct UnguardedRcBox<T> {
value: T,
strong: Cell<usize>,
weak: Cell<usize>,
// Always NULL for this variant
cycle_breaker: *const CycleBreaker,
}
/// A reference-counted pointer type over an immutable value.
///
/// See the [module level documentation](./index.html) for more details.
#[unsafe_no_drop_flag]
pub struct ScopedRc<'a, T> {
// FIXME #12808: strange names to try to avoid interfering with field
// accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
_marker: PhantomData<&'a ()>,
}
impl<'a, T> !Send for ScopedRc<'a, T> {}
impl<'a, T> !Sync for ScopedRc<'a, T> {}
impl<'a, T> ScopedRc<'a, T> {
// Creates a ScopedRc without a guard. As this may lead to uncollected
// cycles, T must be 'static.
pub fn unguarded(value: T) -> ScopedRc<'static, T> where T: 'static {
unsafe {ScopedRc::priv_new(ptr::null(), value)}
}
// Allows creation of an unguarded ScopedRc with non-'static data. It is
// the responsibility of the caller to ensure that no cycles (and thus no
// leaks) are possible.
pub unsafe fn unsafe_unguarded(value: T) -> ScopedRc<'a, T> where T: 'a {
ScopedRc::priv_new(ptr::null(), value)
}
unsafe fn priv_new(cycle_breaker: *const CycleBreaker, value: T) -> ScopedRc<'a, T> {
// there is an implicit weak pointer owned by all the strong
// pointers, which ensures that the weak destructor never frees
// the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one.
let ptr;
if !cycle_breaker.is_null() {
ptr = NonZero::new(boxed::into_raw(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1),
cycle_breaker: cycle_breaker,
index: Cell::new(0), // Filled in by register
}));
(*cycle_breaker).register(ptr);
} else {
// cycle_breaker is null, so we don't need space for index
ptr = transmute(NonZero::new(boxed::into_raw(box UnguardedRcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1),
cycle_breaker: cycle_breaker,
})));
}
ScopedRc {
_ptr: ptr,
_marker: PhantomData,
}
}
/// Downgrades the `ScopedRc<'a, T>` to a `ScopedWeak<'a, T>` reference.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
/// let five = marker.new_rc(5);
///
/// let weak_five = five.downgrade();
/// ```
pub fn downgrade(&self) -> ScopedWeak<'a, T> {
self.inc_weak();
ScopedWeak { _ptr: self._ptr, _marker: PhantomData }
}
// Dereference the ScopedRc without checking if it has been collected.
// The caller must ensure that it is impossible for this method to be
// called during collection.
#[inline(always)]
pub unsafe fn unchecked_deref(&self) -> &T {
&self.inner().value
}
}
/// Get the number of weak references to this value.
#[inline]
pub fn weak_count<'a, T>(this: &ScopedRc<'a, T>) -> usize { this.weak() - 1 }
/// Get the number of strong references to this value.
#[inline]
pub fn strong_count<'a, T>(this: &ScopedRc<'a, T>) -> usize { this.strong() }
/// Returns true if there are no other `ScopedRc` or `ScopedWeak` values
/// that share the same inner value.
///
/// # Examples
///
/// ```
/// use scopedrc::{self, ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
/// let five = marker.new_rc(5);
///
/// scopedrc::is_unique(&five);
/// ```
#[inline]
pub fn is_unique<'a, T>(rc: &ScopedRc<'a, T>) -> bool {
weak_count(rc) == 0 && strong_count(rc) == 1
}
/// Unwraps the contained value if the `ScopedRc<'a, T>` is the only strong
/// reference.
///
/// Unlike get_mut, this works whenever `strong_count` is one, even if there
/// are weak references. If the `ScopedRc<'a, T>` is not unique, an `Err` is
/// returned with the same `ScopedRc<'a, T>`.
///
/// # Examples
///
/// ```
/// use scopedrc::{self, ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let x = marker.new_rc(3);
/// assert_eq!(scopedrc::try_unwrap(x), Ok(3));
///
/// let x = marker.new_rc(4);
/// let _y = x.clone();
/// assert_eq!(scopedrc::try_unwrap(x), Err(marker.new_rc(4)));
/// ```
#[inline]
pub fn try_unwrap<'a, T>(rc: ScopedRc<'a, T>) -> Result<T, ScopedRc<'a, T>> {
if is_unique(&rc) {
unsafe {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
if !rc.inner().cycle_breaker.is_null() {
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
} else {
deallocate(*rc._ptr as *mut u8, size_of::<UnguardedRcBox<T>>(),
min_align_of::<UnguardedRcBox<T>>());
}
forget(rc);
Ok(val)
}
} else {
Err(rc)
}
}
/// Returns a mutable reference to the contained value if the
/// `ScopedRc<'a, T>` is unique.
///
/// Returns `None` if the `ScopedRc<'a, T>` is not unique.
///
/// # Examples
///
/// ```
/// use scopedrc::{self, ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let mut x = marker.new_rc(3);
/// *scopedrc::get_mut(&mut x).unwrap() = 4;
/// assert_eq!(*x, 4);
///
/// let _y = x.clone();
/// assert!(scopedrc::get_mut(&mut x).is_none());
/// ```
#[inline]
pub fn get_mut<'a, 'b, T>(rc: &'b mut ScopedRc<'a, T>) -> Option<&'b mut T> {
// This is safe because the borrow is guaranteed to end before CycleBreaker
// could invalidate it.
if is_unique(rc) {
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
} else {
None
}
}
impl<'a, T: Clone> ScopedRc<'a, T> {
/// Make a mutable reference from the given `ScopedRc<'a, T>`.
///
/// This is also referred to as a copy-on-write operation because the inner
/// data is cloned if the reference count is greater than one.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let mut five = marker.new_rc(5);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
pub fn make_unique(&mut self) -> &mut T {
if !is_unique(self) {
*self = unsafe {ScopedRc::priv_new(&*self.inner().cycle_breaker, (**self).clone())};
}
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
// reference count is guaranteed to be 1 at this point, and we required
// the `ScopedRc<'a, T>` itself to be `mut`, so we're returning the
// only possible reference to the inner value.
let inner = unsafe { &mut **self._ptr };
&mut inner.value
}
}
impl<'a, T> Deref for ScopedRc<'a, T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
if self.strong() == 0 { // Unchecked deref in cycle collection
unsafe { abort(); }
}
&self.inner().value
}
}
impl<'a, T> Drop for ScopedRc<'a, T> {
/// Drops the `ScopedRc<'a, T>`.
///
/// This will decrement the strong reference count. If the strong reference
/// count becomes zero and the only other references are `ScopedWeak`
/// ones, `drop`s the inner value.
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
if self.strong() != 0 { // Non already collected
self.dec_strong();
if self.strong() == 0 {
// Can't use deref, here
ptr::read(&self.inner().value); // destroy the contained object
if !self.inner().cycle_breaker.is_null() {
(*self.inner().cycle_breaker).remove(self._ptr); // remove from list
}
}
}
if self.strong() == 0 {
// If we just reached zero, we need to remove the implicit
// "strong weak" pointer now that we've destroyed the
// contents. If we had already been collected, we were
// converted to a weak pointer and we need to remove that.
self.dec_weak();
if self.weak() == 0 {
if !self.inner().cycle_breaker.is_null() {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
} else {
deallocate(ptr as *mut u8, size_of::<UnguardedRcBox<T>>(),
min_align_of::<UnguardedRcBox<T>>())
}
}
}
}
}
}
}
impl<'a, T> Clone for ScopedRc<'a, T> {
/// Makes a clone of the `ScopedRc<'a, T>`.
///
/// When you clone an `ScopedRc<'a, T>`, it will create another pointer to
/// the data and increase the strong reference counter.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> ScopedRc<'a, T> {
if self.strong() != 0 { // Make sure we don't resurrect a collected ref
self.inc_strong();
}
ScopedRc { _ptr: self._ptr, _marker: PhantomData }
}
}
impl<'a, T: PartialEq> PartialEq for ScopedRc<'a, T> {
/// Equality for two `ScopedRc<'a, T>`s.
///
/// Two `ScopedRc<'a, T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five == marker.new_rc(5);
/// ```
#[inline(always)]
fn eq(&self, other: &ScopedRc<'a, T>) -> bool { **self == **other }
/// Inequality for two `ScopedRc<'a, T>`s.
///
/// Two `ScopedRc<'a, T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five != marker.new_rc(5);
/// ```
#[inline(always)]
fn ne(&self, other: &ScopedRc<'a, T>) -> bool { **self != **other }
}
impl<'a, T: Eq> Eq for ScopedRc<'a, T> {}
impl<'a, T: PartialOrd> PartialOrd for ScopedRc<'a, T> {
/// Partial comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five.partial_cmp(&marker.new_rc(5));
/// ```
#[inline(always)]
fn partial_cmp(&self, other: &ScopedRc<'a, T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five < marker.new_rc(5);
/// ```
#[inline(always)]
fn lt(&self, other: &ScopedRc<'a, T>) -> bool { **self < **other }
/// 'Less-than or equal to' comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five <= marker.new_rc(5);
/// ```
#[inline(always)]
fn le(&self, other: &ScopedRc<'a, T>) -> bool { **self <= **other }
/// Greater-than comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five > marker.new_rc(5);
/// ```
#[inline(always)]
fn gt(&self, other: &ScopedRc<'a, T>) -> bool { **self > **other }
/// 'Greater-than or equal to' comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five >= marker.new_rc(5);
/// ```
#[inline(always)]
fn ge(&self, other: &ScopedRc<'a, T>) -> bool { **self >= **other }
}
impl<'a, T: Ord> Ord for ScopedRc<'a, T> {
/// Comparison for two `ScopedRc<'a, T>`s.
///
/// The two are compared by calling `cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// five.partial_cmp(&marker.new_rc(5));
/// ```
#[inline]
fn cmp(&self, other: &ScopedRc<'a, T>) -> Ordering { (**self).cmp(&**other) }
}
// FIXME (#18248) Make `T` `Sized?`
impl<'a, T: Hash> Hash for ScopedRc<'a, T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<'a, T: fmt::Display> fmt::Display for ScopedRc<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<'a, T: fmt::Debug> fmt::Debug for ScopedRc<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, T> fmt::Pointer for ScopedRc<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self._ptr, f)
}
}
/// A weak version of `ScopedRc<'a, T>`.
///
/// Weak references do not count when determining if the inner value should be
/// dropped.
///
/// See the [module level documentation](./index.html) for more.
#[unsafe_no_drop_flag]
pub struct ScopedWeak<'a, T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
_marker: PhantomData<&'a ()>,
}
impl<'a, T> !Send for ScopedWeak<'a, T> {}
impl<'a, T> !Sync for ScopedWeak<'a, T> {}
impl<'a, T> ScopedWeak<'a, T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `ScopedWeak<'a, T>` reference to an `ScopedRc<'a, T>`,
/// if possible.
///
/// Returns `None` if there were no strong references and the data was
/// destroyed.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let five = marker.new_rc(5);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<ScopedRc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<ScopedRc<'a, T>> {
if self.strong() == 0 {
None
} else {
self.inc_strong();
Some(ScopedRc { _ptr: self._ptr, _marker: PhantomData })
}
}
}
impl<'a, T> Drop for ScopedWeak<'a, T> {
/// Drops the `ScopedWeak<'a, T>`.
///
/// This will decrement the weak reference count.
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
if !self.inner().cycle_breaker.is_null() {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
} else {
deallocate(ptr as *mut u8, size_of::<UnguardedRcBox<T>>(),
min_align_of::<UnguardedRcBox<T>>())
}
}
}
}
}
}
impl<'a, T> Clone for ScopedWeak<'a, T> {
/// Makes a clone of the `ScopedWeak<'a, T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// use scopedrc::{ScopedRcGuard, ScopedRc};
///
/// let marker = ScopedRcGuard::new();
///
/// let weak_five = marker.new_rc(5).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> ScopedWeak<'a, T> {
self.inc_weak();
ScopedWeak { _ptr: self._ptr, _marker: PhantomData }
}
}
impl<'a, T: fmt::Debug> fmt::Debug for ScopedWeak<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "(Weak)")
}
}
#[doc(hidden)]
trait RcBoxPtr<T> {
fn inner(&self) -> &RcBox<T>;
#[inline]
fn strong(&self) -> usize { self.inner().strong.get() }
#[inline]
fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
#[inline]
fn weak(&self) -> usize { self.inner().weak.get() }
#[inline]
fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
}
impl<'a, T> RcBoxPtr<T> for ScopedRc<'a, T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
&(**self._ptr)
}
}
}
impl<'a, T> RcBoxPtr<T> for ScopedWeak<'a, T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
&(**self._ptr)
}
}
}
/// Trait allowing construction of new ScopedRc's
///
/// impl'd by both NoCycleMarker and ScopedRcGuard
pub trait ScopedRcCreator<'a> {
fn new_rc<'b, T: 'a>(&'b self, value: T) -> ScopedRc<'b, T>;
}
/// Marker used to prevent cycles.
///
/// NoCycleMarker is guaranteed to be strictly outlived by all data held by the
/// ScopedRc's created with it, and will itself outlive the created ScopedRc's.
/// This makes it impossible to create any cycles.
#[derive(Clone)]
pub struct NoCycleMarker<'a> {
marker: PhantomData<Cell<&'a ()>>,
}
impl<'a> NoCycleMarker<'a> {
pub fn new() -> NoCycleMarker<'a> {
NoCycleMarker {
marker: PhantomData,
}
}
}
impl<'a> ScopedRcCreator<'a> for NoCycleMarker<'a> {
fn new_rc<'b, T: 'a>(&'b self, value: T) -> ScopedRc<'b, T> {
// Safe because cycles cannot be created using NoCycleMarker
unsafe {ScopedRc::unsafe_unguarded(value)}
}
}
impl<'a> Drop for NoCycleMarker<'a> {
fn drop(&mut self) {}
}
/// Guard used to clean up cycles.
///
/// ScopedRcGuard is guaranteed to live at least as long as all data in the
/// ScopedRc's created with it, and will itself outlive the created ScopedRc's.
/// When ScopedRcGuard goes out of scope, it will collect any remaining cycles
/// that were created using it.
pub struct ScopedRcGuard<'a> {
marker: PhantomData<Cell<&'a ()>>, // 'a is invariant
// We don't implement Drop directly to allow the compiler to unify our
// lifetime with that of the created ScopedRcs, which is necessary to
// support cycles.
cycle_breaker: Option<CycleBreaker>,
}
pub const STATIC_GUARD: &'static ScopedRcGuard<'static> = &ScopedRcGuard {
marker: PhantomData,
cycle_breaker: None,
};
impl<'a> ScopedRcGuard<'a> {
pub fn new() -> ScopedRcGuard<'a> {
ScopedRcGuard {
marker: PhantomData,
cycle_breaker: Some(CycleBreaker::new()),
}
}
}
impl<'a> ScopedRcCreator<'a> for ScopedRcGuard<'a> {
// Since 'a is invariant, this ensures that T is valid at least as long as
// the we are. The returned Rc borrows us to ensure that it can't outlive
// us.
fn new_rc<'b, T: 'a>(&'b self, value: T) -> ScopedRc<'b, T> {
unsafe {ScopedRc::priv_new(self.cycle_breaker.as_ref().map_or(ptr::null(), |x| &*x), value)}
}
}
// Breaks cycles of ScopedRc when the guard is dropped. This is a separate
// struct because ScopedRcGuard itself can't be Drop (if it were, cycles would
// not be possible).
struct CycleBreaker {
// Keeps a list of all ScopedRcs created with the containing guard to allow
// cycles to be cleaned up.
guard_refs: UnsafeCell<GuardRefVec>,
}
impl CycleBreaker {
fn new() -> CycleBreaker {
CycleBreaker {
guard_refs: UnsafeCell::new(GuardRefVec::new()),
}
}
// Register a new RcBox
unsafe fn register<T>(&self, rc_box: NonZero<*mut RcBox<T>>) {
let index = self.vec().len();
let guard_ref = DynamicGuardRef::new(GuardRef::new(rc_box));
guard_ref.set_index(index);
self.vec().push(guard_ref);
}
// Remove box whose strong count has reached zero
unsafe fn remove<T>(&self, rc_box: NonZero<*mut RcBox<T>>) {
let index = (**rc_box).index.get();
let last = self.vec().get_unchecked(self.vec().len() - 1);
last.set_index(index);
self.vec().set_unchecked(index, last);
self.vec().set_len(self.vec().len() - 1);
}
// Convenience method to get internal guard refs vector.
unsafe fn vec(&self) -> &mut GuardRefVec {
&mut *self.guard_refs.get()
}
}
impl Drop for CycleBreaker {
fn drop(&mut self) {
unsafe {
// force_drop removes the ref from the vector. Also, the destructor
// for the contained object may result in items getting added or
// removed. Thus, we just keep removing the elements until the
// vector is empty.
while self.vec().len() != 0 {
self.vec().get_unchecked(0).force_drop();
}
}
}
}
// Operations the guard can perform on known RcBoxes references. Make this a
// trait so the compiler will make a vtable for us.
trait GuardOps {
// Moves the strong count to the weak count. All strong references will
// effectively become weak references.
fn force_drop(&self);
// Update the index of the RcBox
fn set_index(&self, index: usize);
}
struct GuardRef<T> {
rc_box: NonZero<*mut RcBox<T>>,
}
impl<T> Copy for GuardRef<T> {}
impl<T> Clone for GuardRef<T> {
fn clone(&self) -> GuardRef<T> {
*self
}
}
impl<T> GuardRef<T> {
fn new(ptr: NonZero<*mut RcBox<T>>) -> GuardRef<T> {
GuardRef { rc_box: ptr }
}
}
impl<T> RcBoxPtr<T> for GuardRef<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!self.rc_box.is_null());
&(**self.rc_box)
}
}
}
impl<T> GuardOps for GuardRef<T> {
fn force_drop(&self) {
if self.strong() != 0 {
// Make all strong references weak. (Minus 1 to relase the "strong weak")
self.inner().weak.set(self.weak() + self.strong() - 1);
self.inner().strong.set(0);
// Drop the contained value
unsafe {
ptr::read(&self.inner().value);
(*self.inner().cycle_breaker).remove(self.rc_box);
}
}
}
fn set_index(&self, index: usize) {
self.inner().index.set(index);
}
}
// Type erases a GuardRef (through very hacky means)
struct DynamicGuardRef {
guard_ref: GuardRef<()>, // All GuardRefs are the same size
vtable: * mut (), // The compiler-generated GuardOps vtable for guard_ref
}
impl Copy for DynamicGuardRef {}
impl Clone for DynamicGuardRef {
fn clone(&self) -> DynamicGuardRef {
*self
}
}
impl DynamicGuardRef {
fn new<T>(guard_ref: GuardRef<T>) -> DynamicGuardRef {
unsafe {
let vtable = {
let ops_obj: &GuardOps = &guard_ref;
let trait_obj: TraitObject = transmute(ops_obj);
trait_obj.vtable
};
DynamicGuardRef {
guard_ref: transmute(guard_ref),
vtable: vtable,
}
}
}
}
impl Deref for DynamicGuardRef {
type Target = GuardOps;
// For some reason, just -> &GuardOps doesn't work (bug?)
fn deref(&self) -> &<DynamicGuardRef as Deref>::Target {
unsafe {
let guard_ref_ptr: *const GuardRef<()> = &self.guard_ref;
transmute(TraitObject {data: guard_ref_ptr as *mut (), vtable: self.vtable})
}
}
}
struct GuardRefVec {
// We're basically reimplementing a subset of Vec here.
ptr: ptr::Unique<DynamicGuardRef>,
len: usize,
cap: usize,
}
const MAX_MEMORY_SIZE: usize = isize::MAX as usize;
impl GuardRefVec {
fn new() -> GuardRefVec {
// We want ptr to never be NULL so instead we set it to some arbitrary
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
unsafe {
GuardRefVec {
ptr: ptr::Unique::new(EMPTY as *mut DynamicGuardRef),
len: 0,
cap: 0,
}
}
}
fn push(&mut self, value: DynamicGuardRef) {
#[cold]
#[inline(never)]
fn resize(vec: &mut GuardRefVec) {
let old_size = vec.cap * size_of::<DynamicGuardRef>();
if old_size >= MAX_MEMORY_SIZE { panic!("capacity overflow") }
let mut size = max(old_size, 2 * size_of::<DynamicGuardRef>()) * 2;
if old_size > size || size > MAX_MEMORY_SIZE {
size = MAX_MEMORY_SIZE;
}
unsafe {
let ptr = if old_size == 0 {
allocate(size, min_align_of::<DynamicGuardRef>()) as *mut DynamicGuardRef
} else {
reallocate(*vec.ptr as *mut u8, old_size, size,
min_align_of::<DynamicGuardRef>()) as *mut DynamicGuardRef
};
if ptr.is_null() { alloc::oom() }
vec.ptr = ptr::Unique::new(ptr);
}
vec.cap = max(vec.cap, 2) * 2;
}
if self.len == self.cap {
resize(self);
}
unsafe {
let end = (*self.ptr).offset(self.len as isize);
ptr::write(&mut *end, value);
self.len += 1;
}
}
fn len(&self) -> usize { self.len }
// We don't want to use an iterator because the list could potentially grow
// while we're iterating in some cases. DynamicGuardRef is Copy, so it will
// stay valid even if we are reallocated while it's in use.
unsafe fn get_unchecked(&self, index: usize) -> DynamicGuardRef {
*self.ptr.offset(index as isize)
}
unsafe fn set_unchecked(&mut self, index: usize, val: DynamicGuardRef) {
*self.ptr.offset(index as isize) = val;
}
unsafe fn set_len(&mut self, len: usize) { self.len = len; }
}
impl Drop for GuardRefVec {
fn drop(&mut self) {
if self.cap != 0 {
unsafe {
deallocate(*self.ptr as *mut u8, self.cap * size_of::<DynamicGuardRef>(),
min_align_of::<DynamicGuardRef>());
}
}
}
}
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(core)]
#![feature(filling_drop)]
#![feature(optin_builtin_traits)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
use scopedrc::{ScopedRc, ScopedRcCreator, ScopedRcGuard, NoCycleMarker, STATIC_GUARD};
use std::cell::{Cell, RefCell};
mod scopedrc;
struct Cycle<'a> {
next: RefCell<Option<ScopedRc<'a, Cycle<'a>>>>,
val: &'a char,
}
impl<'a> Drop for Cycle<'a> {
fn drop(&mut self) {
println!("{}", self.val);
}
}
// Static lifetimes
static D_VAL: char = 'd';
static E_VAL: char = 'e';
static F_VAL: char = 'f';
fn make_cycle<'a, T: ScopedRcCreator<'a>>(guard: &'a T) -> ScopedRc<'a, Cycle<'a>> {
let d = guard.new_rc(Cycle { next: RefCell::new(None), val: &D_VAL });
let e = guard.new_rc(Cycle { next: RefCell::new(None), val: &E_VAL });
let f = guard.new_rc(Cycle { next: RefCell::new(None), val: &F_VAL });
*d.next.borrow_mut() = Some(e.clone()); // Unreturned cycle
*e.next.borrow_mut() = Some(d.clone());
*f.next.borrow_mut() = Some(f.clone()); // self-cycle
return f;
}
static K_VAL: char = 'k';
fn no_cycle<'a, 'b, T: ScopedRcCreator<'a>>(guard: &'b T) -> ScopedRc<'b, Cycle<'a>> {
guard.new_rc(Cycle { next: RefCell::new(None), val: &K_VAL })
}
static G_VAL: char = 'g';
static H_VAL: char = 'h';
fn static_cycle() {
let g = STATIC_GUARD.new_rc(Cycle { next: RefCell::new(None), val: &G_VAL });
let h = ScopedRc::unguarded(Cycle { next: RefCell::new(None), val: &H_VAL });
*g.next.borrow_mut() = Some(g.clone()); // Static cycle will never be collected.
*h.next.borrow_mut() = Some(h.clone());
// Also make static cycle with make_cycle
make_cycle(STATIC_GUARD);
}
fn main() {
let guard1 = ScopedRcGuard::new();
let a_val = 'a';
let b_val = 'b';
let c_val = 'c';
let i_val = 'i';
let j_val = 'j';
// Error: &a_val is not valid for the static lifetime
//let a = STATIC_GUARD.new_rc(Cycle { next: RefCell::new(None), val: &a_val });
// Error: a_val does not outlive guard1
//let a = guard1.new_rc(Cycle { next: RefCell::new(None), val: &a_val });
// Error: guard2 does not outlive target
//let target
let guard2 = ScopedRcGuard::new();
// Ok
let target;
let a = guard2.new_rc(Cycle { next: RefCell::new(None), val: &a_val });
let b = guard2.new_rc(Cycle { next: RefCell::new(None), val: &b_val });
let c = guard2.new_rc(Cycle { next: RefCell::new(None), val: &c_val });
*a.next.borrow_mut() = Some(c.clone()); // Cycle
*b.next.borrow_mut() = Some(make_cycle(&guard2));
*c.next.borrow_mut() = Some(a.clone()); // Cycle
// Error: guard2 is borrowed
//drop(guard2);
target = b;
// Safe because &char type cannot contain cycles.
let i = unsafe { ScopedRc::unsafe_unguarded(&i_val) };
// Statically prevents cycles without a guard
let marker = NoCycleMarker::new();
// Ok: j_val strictly outlives marker
let j = marker.new_rc(Cycle { next: RefCell::new(None), val: &j_val });
// Error: j does not outlive marker
//*j.next.borrow_mut() = Some(j.clone());
// Ok: target does outlive marker
*j.next.borrow_mut() = Some(target.clone());
// Error: marker does not outlive itself
//make_cycle(&marker);
// Ok
let k1 = no_cycle(&marker);
// Ok
let k2 = no_cycle(&guard1);
// At end of scope, the b, i, j, and both k allocations are dropped
// immediately. The rest are cleaned up by the guard2 collector, except for
// the static cycles, which are leaked.
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment