summaryrefslogtreecommitdiffstats
path: root/library/alloc/src/rc.rs
diff options
context:
space:
mode:
Diffstat (limited to 'library/alloc/src/rc.rs')
-rw-r--r--library/alloc/src/rc.rs899
1 files changed, 757 insertions, 142 deletions
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index b3305b8ca..c485680f9 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -313,13 +313,17 @@ fn rcbox_layout_for_value_layout(layout: Layout) -> Layout {
#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_insignificant_dtor]
-pub struct Rc<T: ?Sized> {
+pub struct Rc<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
ptr: NonNull<RcBox<T>>,
phantom: PhantomData<RcBox<T>>,
+ alloc: A,
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> !Send for Rc<T> {}
+impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
// Note that this negative impl isn't strictly necessary for correctness,
// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
@@ -327,20 +331,32 @@ impl<T: ?Sized> !Send for Rc<T> {}
// having an explicit negative impl is nice for documentation purposes
// and results in nicer error messages.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> !Sync for Rc<T> {}
+impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
-impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
+impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
-impl<T: RefUnwindSafe + ?Sized> RefUnwindSafe for Rc<T> {}
+impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
impl<T: ?Sized> Rc<T> {
+ #[inline]
+ unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
+ unsafe { Self::from_inner_in(ptr, Global) }
+ }
+
+ #[inline]
+ unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
+ unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
// This unsafety is ok because while this Rc is alive we're guaranteed
@@ -348,12 +364,14 @@ impl<T: ?Sized> Rc<T> {
unsafe { self.ptr.as_ref() }
}
- unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
- Self { ptr, phantom: PhantomData }
+ #[inline]
+ unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self {
+ Self { ptr, phantom: PhantomData, alloc }
}
- unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
- unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ #[inline]
+ unsafe fn from_ptr_in(ptr: *mut RcBox<T>, alloc: A) -> Self {
+ unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
}
}
@@ -450,7 +468,7 @@ impl<T> Rc<T> {
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
- let weak = Weak { ptr: init_ptr };
+ let weak = Weak { ptr: init_ptr, alloc: Global };
// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
@@ -504,7 +522,7 @@ impl<T> Rc<T> {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -537,7 +555,7 @@ impl<T> Rc<T> {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -594,7 +612,7 @@ impl<T> Rc<T> {
Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
@@ -627,7 +645,7 @@ impl<T> Rc<T> {
Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
@@ -639,6 +657,235 @@ impl<T> Rc<T> {
pub fn pin(value: T) -> Pin<Rc<T>> {
unsafe { Pin::new_unchecked(Rc::new(value)) }
}
+}
+
+impl<T, A: Allocator> Rc<T, A> {
+ /// Returns a reference to the underlying allocator.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(this: &Self) -> &A {
+ &this.alloc
+ }
+ /// Constructs a new `Rc` in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Rc::new_in(5, System);
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
+ // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
+ // That would make code size bigger.
+ match Self::try_new_in(value, alloc) {
+ Ok(m) => m,
+ Err(_) => handle_alloc_error(Layout::new::<RcBox<T>>()),
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Rc::<u32, _>::new_uninit_in(System);
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Rc::<u32, _>::new_zeroed_in(System);
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
+ /// fails
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Rc::try_new_in(5, System);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
+ // There is an implicit weak pointer owned by all the strong
+ // pointers, which ensures that the weak destructor never frees
+ // the allocation while the strong destructor is running, even
+ // if the weak pointer is stored inside the strong one.
+ let (ptr, alloc) = Box::into_unique(Box::try_new_in(
+ RcBox { strong: Cell::new(1), weak: Cell::new(1), value },
+ alloc,
+ )?);
+ Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
+ /// error if the allocation fails
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api, new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Rc::from_ptr_in(
+ Rc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
+ /// fails
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api, new_uninit)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ //#[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Rc::from_ptr_in(
+ Rc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
+
+ /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
+ /// `value` will be pinned in memory and unable to be moved.
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn pin_in(value: T, alloc: A) -> Pin<Self> {
+ unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
+ }
/// Returns the inner value, if the `Rc` has exactly one strong reference.
///
@@ -665,13 +912,14 @@ impl<T> Rc<T> {
if Rc::strong_count(&this) == 1 {
unsafe {
let val = ptr::read(&*this); // copy the contained object
+ let alloc = ptr::read(&this.alloc); // copy the allocator
// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.inner().dec_strong();
- let _weak = Weak { ptr: this.ptr };
+ let _weak = Weak { ptr: this.ptr, alloc };
forget(this);
Ok(val)
}
@@ -758,7 +1006,7 @@ impl<T> Rc<[T]> {
Layout::array::<T>(len).unwrap(),
|layout| Global.allocate_zeroed(layout),
|mem| {
- ptr::slice_from_raw_parts_mut(mem as *mut T, len)
+ ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
as *mut RcBox<[mem::MaybeUninit<T>]>
},
))
@@ -766,7 +1014,84 @@ impl<T> Rc<[T]> {
}
}
-impl<T> Rc<mem::MaybeUninit<T>> {
+impl<T, A: Allocator> Rc<[T], A> {
+ /// Constructs a new reference-counted slice with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
+ unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
+ }
+
+ /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
+ /// filled with `0` bytes.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
+ /// let values = unsafe { values.assume_init() };
+ ///
+ /// assert_eq!(*values, [0, 0, 0])
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate_zeroed(layout),
+ |mem| {
+ ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
+ as *mut RcBox<[mem::MaybeUninit<T>]>
+ },
+ ),
+ alloc,
+ )
+ }
+ }
+}
+
+impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
/// Converts to `Rc<T>`.
///
/// # Safety
@@ -798,12 +1123,16 @@ impl<T> Rc<mem::MaybeUninit<T>> {
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
- pub unsafe fn assume_init(self) -> Rc<T> {
- unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
+ pub unsafe fn assume_init(self) -> Rc<T, A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
}
}
-impl<T> Rc<[mem::MaybeUninit<T>]> {
+impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
/// Converts to `Rc<[T]>`.
///
/// # Safety
@@ -838,12 +1167,128 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
- pub unsafe fn assume_init(self) -> Rc<[T]> {
- unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+ pub unsafe fn assume_init(self) -> Rc<[T], A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
}
}
impl<T: ?Sized> Rc<T> {
+ /// Constructs an `Rc<T>` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to
+ /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+ /// and alignment as `T`. This is trivially true if `U` is `T`.
+ /// Note that if `U` is not `T` but has the same size and alignment, this is
+ /// basically like transmuting references of different types. See
+ /// [`mem::transmute`][transmute] for more information on what
+ /// restrictions apply in this case.
+ ///
+ /// The raw pointer must point to a block of memory allocated by the global allocator
+ ///
+ /// The user of `from_raw` has to make sure a specific value of `T` is only
+ /// dropped once.
+ ///
+ /// This function is unsafe because improper use may lead to memory unsafety,
+ /// even if the returned `Rc<T>` is never accessed.
+ ///
+ /// [into_raw]: Rc::into_raw
+ /// [transmute]: core::mem::transmute
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let x_ptr = Rc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Rc` to prevent leak.
+ /// let x = Rc::from_raw(x_ptr);
+ /// assert_eq!(&*x, "hello");
+ ///
+ /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Self::from_raw_in(ptr, Global) }
+ }
+
+ /// Increments the strong reference count on the `Rc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Rc::into_raw`, the
+ /// associated `Rc` instance must be valid (i.e. the strong count must be at
+ /// least 1) for the duration of this method, and `ptr` must point to a block of memory
+ /// allocated by the global allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Rc::into_raw(five);
+ /// Rc::increment_strong_count(ptr);
+ ///
+ /// let five = Rc::from_raw(ptr);
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
+ pub unsafe fn increment_strong_count(ptr: *const T) {
+ unsafe { Self::increment_strong_count_in(ptr, Global) }
+ }
+
+ /// Decrements the strong reference count on the `Rc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Rc::into_raw`, the
+ /// associated `Rc` instance must be valid (i.e. the strong count must be at
+ /// least 1) when invoking this method, and `ptr` must point to a block of memory
+ /// allocated by the global allocator. This method can be used to release the final `Rc` and
+ /// backing storage, but **should not** be called after the final `Rc` has been released.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Rc::into_raw(five);
+ /// Rc::increment_strong_count(ptr);
+ ///
+ /// let five = Rc::from_raw(ptr);
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// Rc::decrement_strong_count(ptr);
+ /// assert_eq!(1, Rc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
+ pub unsafe fn decrement_strong_count(ptr: *const T) {
+ unsafe { Self::decrement_strong_count_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
@@ -891,16 +1336,18 @@ impl<T: ?Sized> Rc<T> {
unsafe { ptr::addr_of_mut!((*ptr).value) }
}
- /// Constructs an `Rc<T>` from a raw pointer.
+ /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
///
/// The raw pointer must have been previously returned by a call to
- /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+ /// [`Rc<U, A>::into_raw`][into_raw] where `U` must have the same size
/// and alignment as `T`. This is trivially true if `U` is `T`.
/// Note that if `U` is not `T` but has the same size and alignment, this is
/// basically like transmuting references of different types. See
/// [`mem::transmute`] for more information on what
/// restrictions apply in this case.
///
+ /// The raw pointer must point to a block of memory allocated by `alloc`
+ ///
/// The user of `from_raw` has to make sure a specific value of `T` is only
/// dropped once.
///
@@ -912,14 +1359,17 @@ impl<T: ?Sized> Rc<T> {
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let x = Rc::new("hello".to_owned());
+ /// let x = Rc::new_in("hello".to_owned(), System);
/// let x_ptr = Rc::into_raw(x);
///
/// unsafe {
/// // Convert back to an `Rc` to prevent leak.
- /// let x = Rc::from_raw(x_ptr);
+ /// let x = Rc::from_raw_in(x_ptr, System);
/// assert_eq!(&*x, "hello");
///
/// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
@@ -927,14 +1377,14 @@ impl<T: ?Sized> Rc<T> {
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
- #[stable(feature = "rc_raw", since = "1.17.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcBox.
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox<T> };
- unsafe { Self::from_ptr(rc_ptr) }
+ unsafe { Self::from_ptr_in(rc_ptr, alloc) }
}
/// Creates a new [`Weak`] pointer to this allocation.
@@ -951,11 +1401,14 @@ impl<T: ?Sized> Rc<T> {
#[must_use = "this returns a new `Weak` pointer, \
without modifying the original `Rc`"]
#[stable(feature = "rc_weak", since = "1.4.0")]
- pub fn downgrade(this: &Self) -> Weak<T> {
+ pub fn downgrade(this: &Self) -> Weak<T, A>
+ where
+ A: Clone,
+ {
this.inner().inc_weak();
// Make sure we do not create a dangling Weak
debug_assert!(!is_dangling(this.ptr.as_ptr()));
- Weak { ptr: this.ptr }
+ Weak { ptr: this.ptr, alloc: this.alloc.clone() }
}
/// Gets the number of [`Weak`] pointers to this allocation.
@@ -999,30 +1452,37 @@ impl<T: ?Sized> Rc<T> {
///
/// # Safety
///
- /// The pointer must have been obtained through `Rc::into_raw`, and the
+ /// The pointer must have been obtained through `Rc::into_raw`, the
/// associated `Rc` instance must be valid (i.e. the strong count must be at
- /// least 1) for the duration of this method.
+ /// least 1) for the duration of this method, and `ptr` must point to a block of memory
+ /// allocated by `alloc`
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let five = Rc::new(5);
+ /// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
- /// Rc::increment_strong_count(ptr);
+ /// Rc::increment_strong_count_in(ptr, System);
///
- /// let five = Rc::from_raw(ptr);
+ /// let five = Rc::from_raw_in(ptr, System);
/// assert_eq!(2, Rc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
- pub unsafe fn increment_strong_count(ptr: *const T) {
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
+ where
+ A: Clone,
+ {
// Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
- let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) };
+ let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
// Now increase refcount, but don't drop new refcount either
let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
}
@@ -1032,33 +1492,36 @@ impl<T: ?Sized> Rc<T> {
///
/// # Safety
///
- /// The pointer must have been obtained through `Rc::into_raw`, and the
+ /// The pointer must have been obtained through `Rc::into_raw`, the
/// associated `Rc` instance must be valid (i.e. the strong count must be at
- /// least 1) when invoking this method. This method can be used to release
- /// the final `Rc` and backing storage, but **should not** be called after
- /// the final `Rc` has been released.
+ /// least 1) when invoking this method, and `ptr` must point to a block of memory
+ /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
+ /// but **should not** be called after the final `Rc` has been released.
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let five = Rc::new(5);
+ /// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
- /// Rc::increment_strong_count(ptr);
+ /// Rc::increment_strong_count_in(ptr, System);
///
- /// let five = Rc::from_raw(ptr);
+ /// let five = Rc::from_raw_in(ptr, System);
/// assert_eq!(2, Rc::strong_count(&five));
- /// Rc::decrement_strong_count(ptr);
+ /// Rc::decrement_strong_count_in(ptr, System);
/// assert_eq!(1, Rc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
- pub unsafe fn decrement_strong_count(ptr: *const T) {
- unsafe { drop(Rc::from_raw(ptr)) };
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
+ unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
}
/// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
@@ -1188,7 +1651,7 @@ impl<T: ?Sized> Rc<T> {
}
}
-impl<T: Clone> Rc<T> {
+impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
/// Makes a mutable reference into the given `Rc`.
///
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
@@ -1246,7 +1709,7 @@ impl<T: Clone> Rc<T> {
if Rc::strong_count(this) != 1 {
// Gotta clone the data, there are other Rcs.
// Pre-allocate memory to allow writing the cloned value directly.
- let mut rc = Self::new_uninit();
+ let mut rc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Rc::get_mut_unchecked(&mut rc);
(**this).write_clone_into_raw(data.as_mut_ptr());
@@ -1254,7 +1717,7 @@ impl<T: Clone> Rc<T> {
}
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
- let mut rc = Self::new_uninit();
+ let mut rc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Rc::get_mut_unchecked(&mut rc);
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
@@ -1310,7 +1773,7 @@ impl<T: Clone> Rc<T> {
}
}
-impl Rc<dyn Any> {
+impl<A: Allocator + Clone> Rc<dyn Any, A> {
/// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
///
/// # Examples
@@ -1331,12 +1794,13 @@ impl Rc<dyn Any> {
/// ```
#[inline]
#[stable(feature = "rc_downcast", since = "1.29.0")]
- pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
+ pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
if (*self).is::<T>() {
unsafe {
let ptr = self.ptr.cast::<RcBox<T>>();
+ let alloc = self.alloc.clone();
forget(self);
- Ok(Rc::from_inner(ptr))
+ Ok(Rc::from_inner_in(ptr, alloc))
}
} else {
Err(self)
@@ -1371,11 +1835,12 @@ impl Rc<dyn Any> {
/// [`downcast`]: Self::downcast
#[inline]
#[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T> {
+ pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
unsafe {
let ptr = self.ptr.cast::<RcBox<T>>();
+ let alloc = self.alloc.clone();
mem::forget(self);
- Rc::from_inner(ptr)
+ Rc::from_inner_in(ptr, alloc)
}
}
}
@@ -1427,25 +1892,27 @@ impl<T: ?Sized> Rc<T> {
Ok(inner)
}
+}
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
#[cfg(not(no_global_oom_handling))]
- unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
+ unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox<T> {
// Allocate for the `RcBox<T>` using the given value.
unsafe {
- Self::allocate_for_layout(
+ Rc::<T>::allocate_for_layout(
Layout::for_value(&*ptr),
- |layout| Global.allocate(layout),
+ |layout| alloc.allocate(layout),
|mem| mem.with_metadata_of(ptr as *const RcBox<T>),
)
}
}
#[cfg(not(no_global_oom_handling))]
- fn from_box(src: Box<T>) -> Rc<T> {
+ fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
unsafe {
let value_size = size_of_val(&*src);
- let ptr = Self::allocate_for_ptr(&*src);
+ let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
// Copy value as bytes
ptr::copy_nonoverlapping(
@@ -1455,10 +1922,11 @@ impl<T: ?Sized> Rc<T> {
);
// Free the allocation without dropping its contents
- let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>);
+ let (bptr, alloc) = Box::into_raw_with_allocator(src);
+ let src = Box::from_raw(bptr as *mut mem::ManuallyDrop<T>);
drop(src);
- Self::from_ptr(ptr)
+ Self::from_ptr_in(ptr, alloc)
}
}
}
@@ -1471,7 +1939,7 @@ impl<T> Rc<[T]> {
Self::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.allocate(layout),
- |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
)
}
}
@@ -1538,6 +2006,21 @@ impl<T> Rc<[T]> {
}
}
+impl<T, A: Allocator> Rc<[T], A> {
+ /// Allocates an `RcBox<[T]>` with the given length.
+ #[inline]
+ #[cfg(not(no_global_oom_handling))]
+ unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> {
+ unsafe {
+ Rc::<[T]>::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate(layout),
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
+ )
+ }
+ }
+}
+
/// Specialization trait used for `From<&[T]>`.
trait RcFromSlice<T> {
fn from_slice(slice: &[T]) -> Self;
@@ -1560,7 +2043,7 @@ impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Deref for Rc<T> {
+impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
type Target = T;
#[inline(always)]
@@ -1573,7 +2056,7 @@ impl<T: ?Sized> Deref for Rc<T> {
impl<T: ?Sized> Receiver for Rc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
/// Drops the `Rc`.
///
/// This will decrement the strong reference count. If the strong reference
@@ -1611,7 +2094,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.inner().dec_weak();
if self.inner().weak() == 0 {
- Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+ self.alloc.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@@ -1619,7 +2102,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Clone for Rc<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
/// Makes a clone of the `Rc` pointer.
///
/// This creates another pointer to the same allocation, increasing the
@@ -1635,10 +2118,10 @@ impl<T: ?Sized> Clone for Rc<T> {
/// let _ = Rc::clone(&five);
/// ```
#[inline]
- fn clone(&self) -> Rc<T> {
+ fn clone(&self) -> Self {
unsafe {
self.inner().inc_strong();
- Self::from_inner(self.ptr)
+ Self::from_inner_in(self.ptr, self.alloc.clone())
}
}
}
@@ -1663,20 +2146,20 @@ impl<T: Default> Default for Rc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-trait RcEqIdent<T: ?Sized + PartialEq> {
- fn eq(&self, other: &Rc<T>) -> bool;
- fn ne(&self, other: &Rc<T>) -> bool;
+trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
+ fn eq(&self, other: &Rc<T, A>) -> bool;
+ fn ne(&self, other: &Rc<T, A>) -> bool;
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
#[inline]
- default fn eq(&self, other: &Rc<T>) -> bool {
+ default fn eq(&self, other: &Rc<T, A>) -> bool {
**self == **other
}
#[inline]
- default fn ne(&self, other: &Rc<T>) -> bool {
+ default fn ne(&self, other: &Rc<T, A>) -> bool {
**self != **other
}
}
@@ -1695,20 +2178,20 @@ impl<T: Eq> MarkerEq for T {}
///
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
+impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
#[inline]
- fn eq(&self, other: &Rc<T>) -> bool {
+ fn eq(&self, other: &Rc<T, A>) -> bool {
Rc::ptr_eq(self, other) || **self == **other
}
#[inline]
- fn ne(&self, other: &Rc<T>) -> bool {
+ fn ne(&self, other: &Rc<T, A>) -> bool {
!Rc::ptr_eq(self, other) && **self != **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
/// Equality for two `Rc`s.
///
/// Two `Rc`s are equal if their inner values are equal, even if they are
@@ -1728,7 +2211,7 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// assert!(five == Rc::new(5));
/// ```
#[inline]
- fn eq(&self, other: &Rc<T>) -> bool {
+ fn eq(&self, other: &Rc<T, A>) -> bool {
RcEqIdent::eq(self, other)
}
@@ -1750,16 +2233,16 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// assert!(five != Rc::new(6));
/// ```
#[inline]
- fn ne(&self, other: &Rc<T>) -> bool {
+ fn ne(&self, other: &Rc<T, A>) -> bool {
RcEqIdent::ne(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq> Eq for Rc<T> {}
+impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
+impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
/// Partial comparison for two `Rc`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
@@ -1775,7 +2258,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
/// ```
#[inline(always)]
- fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
@@ -1793,7 +2276,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five < Rc::new(6));
/// ```
#[inline(always)]
- fn lt(&self, other: &Rc<T>) -> bool {
+ fn lt(&self, other: &Rc<T, A>) -> bool {
**self < **other
}
@@ -1811,7 +2294,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five <= Rc::new(5));
/// ```
#[inline(always)]
- fn le(&self, other: &Rc<T>) -> bool {
+ fn le(&self, other: &Rc<T, A>) -> bool {
**self <= **other
}
@@ -1829,7 +2312,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five > Rc::new(4));
/// ```
#[inline(always)]
- fn gt(&self, other: &Rc<T>) -> bool {
+ fn gt(&self, other: &Rc<T, A>) -> bool {
**self > **other
}
@@ -1847,13 +2330,13 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five >= Rc::new(5));
/// ```
#[inline(always)]
- fn ge(&self, other: &Rc<T>) -> bool {
+ fn ge(&self, other: &Rc<T, A>) -> bool {
**self >= **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Ord> Ord for Rc<T> {
+impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
/// Comparison for two `Rc`s.
///
/// The two are compared by calling `cmp()` on their inner values.
@@ -1869,34 +2352,34 @@ impl<T: ?Sized + Ord> Ord for Rc<T> {
/// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
/// ```
#[inline]
- fn cmp(&self, other: &Rc<T>) -> Ordering {
+ fn cmp(&self, other: &Rc<T, A>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Hash> Hash for Rc<T> {
+impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
+impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
+impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> fmt::Pointer for Rc<T> {
+impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&(&**self as *const T), f)
}
@@ -1982,7 +2465,7 @@ impl From<String> for Rc<str> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T: ?Sized> From<Box<T>> for Rc<T> {
+impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
/// Move a boxed object to a new, reference counted, allocation.
///
/// # Example
@@ -1994,31 +2477,37 @@ impl<T: ?Sized> From<Box<T>> for Rc<T> {
/// assert_eq!(1, *shared);
/// ```
#[inline]
- fn from(v: Box<T>) -> Rc<T> {
- Rc::from_box(v)
+ fn from(v: Box<T, A>) -> Rc<T, A> {
+ Rc::from_box_in(v)
}
}
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T> From<Vec<T>> for Rc<[T]> {
+impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
/// Allocate a reference-counted slice and move `v`'s items into it.
///
/// # Example
///
/// ```
/// # use std::rc::Rc;
- /// let original: Box<Vec<i32>> = Box::new(vec![1, 2, 3]);
- /// let shared: Rc<Vec<i32>> = Rc::from(original);
- /// assert_eq!(vec![1, 2, 3], *shared);
+ /// let unique: Vec<i32> = vec![1, 2, 3];
+ /// let shared: Rc<[i32]> = Rc::from(unique);
+ /// assert_eq!(&[1, 2, 3], &shared[..]);
/// ```
#[inline]
- fn from(mut v: Vec<T>) -> Rc<[T]> {
+ fn from(v: Vec<T, A>) -> Rc<[T], A> {
unsafe {
- let rc = Rc::copy_from_slice(&v);
- // Allow the Vec to free its memory, but not destroy its contents
- v.set_len(0);
- rc
+ let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
+
+ let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
+ ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).value as *mut [T] as *mut T, len);
+
+ // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
+ // without dropping its contents or the allocator
+ let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
+
+ Self::from_ptr_in(rc_ptr, alloc)
}
}
}
@@ -2189,7 +2678,10 @@ impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
///
/// [`upgrade`]: Weak::upgrade
#[stable(feature = "rc_weak", since = "1.4.0")]
-pub struct Weak<T: ?Sized> {
+pub struct Weak<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
// This is a `NonNull` to allow optimizing the size of this type in enums,
// but it is not necessarily a valid pointer.
// `Weak::new` sets this to `usize::MAX` so that it doesn’t need
@@ -2197,15 +2689,16 @@ pub struct Weak<T: ?Sized> {
// will ever have because RcBox has alignment at least 2.
// This is only possible when `T: Sized`; unsized `T` never dangle.
ptr: NonNull<RcBox<T>>,
+ alloc: A,
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> !Send for Weak<T> {}
+impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> !Sync for Weak<T> {}
+impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
@@ -2224,16 +2717,45 @@ impl<T> Weak<T> {
/// let empty: Weak<i64> = Weak::new();
/// assert!(empty.upgrade().is_none());
/// ```
+ #[inline]
#[stable(feature = "downgraded_weak", since = "1.10.0")]
- #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
+ #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
#[must_use]
pub const fn new() -> Weak<T> {
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) } }
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) },
+ alloc: Global,
+ }
+ }
+}
+
+impl<T, A: Allocator> Weak<T, A> {
+ /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
+ /// allocator.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: Weak::upgrade
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Weak;
+ ///
+ /// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn new_in(alloc: A) -> Weak<T, A> {
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) },
+ alloc,
+ }
}
}
pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
- (ptr as *mut ()).addr() == usize::MAX
+ (ptr.cast::<()>()).addr() == usize::MAX
}
/// Helper type to allow accessing the reference counts without
@@ -2244,6 +2766,56 @@ struct WeakInner<'a> {
}
impl<T: ?Sized> Weak<T> {
+ /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+ ///
+ /// This can be used to safely get a strong reference (by calling [`upgrade`]
+ /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+ ///
+ /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
+ /// as these don't own anything; the method still works on them).
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have originated from the [`into_raw`] and must still own its potential
+ /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
+ ///
+ /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
+ /// takes ownership of one weak reference currently represented as a raw pointer (the weak
+ /// count is not modified by this operation) and therefore it must be paired with a previous
+ /// call to [`into_raw`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ ///
+ /// let raw_1 = Rc::downgrade(&strong).into_raw();
+ /// let raw_2 = Rc::downgrade(&strong).into_raw();
+ ///
+ /// assert_eq!(2, Rc::weak_count(&strong));
+ ///
+ /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ ///
+ /// drop(strong);
+ ///
+ /// // Decrement the last weak count.
+ /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+ /// ```
+ ///
+ /// [`into_raw`]: Weak::into_raw
+ /// [`upgrade`]: Weak::upgrade
+ /// [`new`]: Weak::new
+ #[inline]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Self::from_raw_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Weak<T, A> {
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
///
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -2321,6 +2893,45 @@ impl<T: ?Sized> Weak<T> {
result
}
+ /// Consumes the `Weak<T>` and turns it into a raw pointer.
+ ///
+ /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
+ /// one weak reference (the weak count is not modified by this operation). It can be turned
+ /// back into the `Weak<T>` with [`from_raw`].
+ ///
+ /// The same restrictions of accessing the target of the pointer as with
+ /// [`as_ptr`] apply.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ /// let weak = Rc::downgrade(&strong);
+ /// let raw = weak.into_raw();
+ ///
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ /// assert_eq!("hello", unsafe { &*raw });
+ ///
+ /// drop(unsafe { Weak::from_raw(raw) });
+ /// assert_eq!(0, Rc::weak_count(&strong));
+ /// ```
+ ///
+ /// [`from_raw`]: Weak::from_raw
+ /// [`as_ptr`]: Weak::as_ptr
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn into_raw_and_alloc(self) -> (*const T, A)
+ where
+ A: Clone,
+ {
+ let result = self.as_ptr();
+ let alloc = self.alloc.clone();
+ mem::forget(self);
+ (result, alloc)
+ }
+
/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
///
/// This can be used to safely get a strong reference (by calling [`upgrade`]
@@ -2332,7 +2943,7 @@ impl<T: ?Sized> Weak<T> {
/// # Safety
///
/// The pointer must have originated from the [`into_raw`] and must still own its potential
- /// weak reference.
+ /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
///
/// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
/// takes ownership of one weak reference currently represented as a raw pointer (the weak
@@ -2363,8 +2974,9 @@ impl<T: ?Sized> Weak<T> {
/// [`into_raw`]: Weak::into_raw
/// [`upgrade`]: Weak::upgrade
/// [`new`]: Weak::new
- #[stable(feature = "weak_into_raw", since = "1.45.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
// See Weak::as_ptr for context on how the input pointer is derived.
let ptr = if is_dangling(ptr as *mut T) {
@@ -2380,7 +2992,7 @@ impl<T: ?Sized> Weak<T> {
};
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
+ Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
}
/// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
@@ -2409,7 +3021,10 @@ impl<T: ?Sized> Weak<T> {
#[must_use = "this returns a new `Rc`, \
without modifying the original weak pointer"]
#[stable(feature = "rc_weak", since = "1.4.0")]
- pub fn upgrade(&self) -> Option<Rc<T>> {
+ pub fn upgrade(&self) -> Option<Rc<T, A>>
+ where
+ A: Clone,
+ {
let inner = self.inner()?;
if inner.strong() == 0 {
@@ -2417,7 +3032,7 @@ impl<T: ?Sized> Weak<T> {
} else {
unsafe {
inner.inc_strong();
- Some(Rc::from_inner(self.ptr))
+ Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
}
}
}
@@ -2437,15 +3052,15 @@ impl<T: ?Sized> Weak<T> {
#[must_use]
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
- self.inner()
- .map(|inner| {
- if inner.strong() > 0 {
- inner.weak() - 1 // subtract the implicit weak ptr
- } else {
- 0
- }
- })
- .unwrap_or(0)
+ if let Some(inner) = self.inner() {
+ if inner.strong() > 0 {
+ inner.weak() - 1 // subtract the implicit weak ptr
+ } else {
+ 0
+ }
+ } else {
+ 0
+ }
}
/// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
@@ -2513,7 +3128,7 @@ impl<T: ?Sized> Weak<T> {
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
/// Drops the `Weak` pointer.
///
/// # Examples
@@ -2546,14 +3161,14 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if inner.weak() == 0 {
unsafe {
- Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
+ self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
}
}
}
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> Clone for Weak<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
/// Makes a clone of the `Weak` pointer that points to the same allocation.
///
/// # Examples
@@ -2566,16 +3181,16 @@ impl<T: ?Sized> Clone for Weak<T> {
/// let _ = Weak::clone(&weak_five);
/// ```
#[inline]
- fn clone(&self) -> Weak<T> {
+ fn clone(&self) -> Weak<T, A> {
if let Some(inner) = self.inner() {
inner.inc_weak()
}
- Weak { ptr: self.ptr }
+ Weak { ptr: self.ptr, alloc: self.alloc.clone() }
}
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> fmt::Debug for Weak<T> {
+impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
@@ -2707,21 +3322,21 @@ impl<'a> RcInnerPtr for WeakInner<'a> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
+impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized> AsRef<T> for Rc<T> {
+impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
fn as_ref(&self) -> &T {
&**self
}
}
#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Rc<T> {}
+impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
/// Get the offset within an `RcBox` for the payload behind a pointer.
///
@@ -2822,7 +3437,7 @@ impl<T> UniqueRc<T> {
unsafe {
this.ptr.as_ref().inc_weak();
}
- Weak { ptr: this.ptr }
+ Weak { ptr: this.ptr, alloc: Global }
}
/// Converts the `UniqueRc` into a regular [`Rc`]