use core::ffi::c_void; use core::fmt; use core::hash::{Hash, Hasher}; use core::iter::{ExactSizeIterator, Iterator}; use core::marker::PhantomData; use core::mem::ManuallyDrop; use core::ops::Deref; use core::ptr; use core::usize; use super::{Arc, ArcInner, HeaderSliceWithLength, HeaderWithLength}; /// A "thin" `Arc` containing dynamically sized data /// /// This is functionally equivalent to `Arc<(H, [T])>` /// /// When you create an `Arc` containing a dynamically sized type /// like `HeaderSlice`, the `Arc` is represented on the stack /// as a "fat pointer", where the length of the slice is stored /// alongside the `Arc`'s pointer. In some situations you may wish to /// have a thin pointer instead, perhaps for FFI compatibility /// or space efficiency. /// /// Note that we use `[T; 0]` in order to have the right alignment for `T`. /// /// `ThinArc` solves this by storing the length in the allocation itself, /// via `HeaderSliceWithLength`. #[repr(transparent)] pub struct ThinArc { ptr: ptr::NonNull>>, phantom: PhantomData<(H, T)>, } unsafe impl Send for ThinArc {} unsafe impl Sync for ThinArc {} // Synthesize a fat pointer from a thin pointer. // // See the comment around the analogous operation in from_header_and_iter. fn thin_to_thick( thin: *mut ArcInner>, ) -> *mut ArcInner> { let len = unsafe { (*thin).data.header.length }; let fake_slice = ptr::slice_from_raw_parts_mut(thin as *mut T, len); fake_slice as *mut ArcInner> } impl ThinArc { /// Temporarily converts |self| into a bonafide Arc and exposes it to the /// provided callback. The refcount is not modified. #[inline] pub fn with_arc(&self, f: F) -> U where F: FnOnce(&Arc>) -> U, { // Synthesize transient Arc, which never touches the refcount of the ArcInner. let transient = unsafe { ManuallyDrop::new(Arc { p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())), phantom: PhantomData, }) }; // Expose the transient Arc to the callback, which may clone it if it wants // and forward the result to the user f(&transient) } /// Creates a `ThinArc` for a HeaderSlice using the given header struct and /// iterator to generate the slice. pub fn from_header_and_iter(header: H, items: I) -> Self where I: Iterator + ExactSizeIterator, { let header = HeaderWithLength::new(header, items.len()); Arc::into_thin(Arc::from_header_and_iter(header, items)) } /// Creates a `ThinArc` for a HeaderSlice using the given header struct and /// a slice to copy. pub fn from_header_and_slice(header: H, items: &[T]) -> Self where T: Copy, { let header = HeaderWithLength::new(header, items.len()); Arc::into_thin(Arc::from_header_and_slice(header, items)) } /// Returns the address on the heap of the ThinArc itself -- not the T /// within it -- for memory reporting. #[inline] pub fn ptr(&self) -> *const c_void { self.ptr.as_ptr() as *const ArcInner as *const c_void } /// Returns the address on the heap of the Arc itself -- not the T within it -- for memory /// reporting. #[inline] pub fn heap_ptr(&self) -> *const c_void { self.ptr() } /// # Safety /// /// Constructs an ThinArc from a raw pointer. /// /// The raw pointer must have been previously returned by a call to /// ThinArc::into_raw. /// /// The user of from_raw has to make sure a specific value of T is only dropped once. /// /// This function is unsafe because improper use may lead to memory unsafety, /// even if the returned ThinArc is never accessed. #[inline] pub unsafe fn from_raw(ptr: *const c_void) -> Self { Self { ptr: ptr::NonNull::new_unchecked(ptr as *mut c_void).cast(), phantom: PhantomData, } } /// Consume ThinArc and returned the wrapped pointer. #[inline] pub fn into_raw(self) -> *const c_void { let this = ManuallyDrop::new(self); this.ptr.cast().as_ptr() } /// Provides a raw pointer to the data. /// The counts are not affected in any way and the ThinArc is not consumed. /// The pointer is valid for as long as there are strong counts in the ThinArc. #[inline] pub fn as_ptr(&self) -> *const c_void { self.ptr() } } impl Deref for ThinArc { type Target = HeaderSliceWithLength; #[inline] fn deref(&self) -> &Self::Target { unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data } } } impl Clone for ThinArc { #[inline] fn clone(&self) -> Self { ThinArc::with_arc(self, |a| Arc::into_thin(a.clone())) } } impl Drop for ThinArc { #[inline] fn drop(&mut self) { let _ = Arc::from_thin(ThinArc { ptr: self.ptr, phantom: PhantomData, }); } } impl Arc> { /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount /// is not modified. #[inline] pub fn into_thin(a: Self) -> ThinArc { let a = ManuallyDrop::new(a); assert_eq!( a.header.length, a.slice.len(), "Length needs to be correct for ThinArc to work" ); let fat_ptr: *mut ArcInner> = a.ptr(); let thin_ptr = fat_ptr as *mut [usize] as *mut usize; ThinArc { ptr: unsafe { ptr::NonNull::new_unchecked( thin_ptr as *mut ArcInner>, ) }, phantom: PhantomData, } } /// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount /// is not modified. #[inline] pub fn from_thin(a: ThinArc) -> Self { let a = ManuallyDrop::new(a); let ptr = thin_to_thick(a.ptr.as_ptr()); unsafe { Arc { p: ptr::NonNull::new_unchecked(ptr), phantom: PhantomData, } } } } impl PartialEq for ThinArc { #[inline] fn eq(&self, other: &ThinArc) -> bool { ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b)) } } impl Eq for ThinArc {} impl Hash for ThinArc { fn hash(&self, state: &mut HSR) { ThinArc::with_arc(self, |a| a.hash(state)) } } impl fmt::Debug for ThinArc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } impl fmt::Pointer for ThinArc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.ptr(), f) } } #[cfg(test)] mod tests { use crate::{Arc, HeaderWithLength, ThinArc}; use alloc::vec; use core::clone::Clone; use core::ops::Drop; use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, SeqCst}; #[derive(PartialEq)] struct Canary(*mut atomic::AtomicUsize); impl Drop for Canary { fn drop(&mut self) { unsafe { (*self.0).fetch_add(1, SeqCst); } } } #[test] fn empty_thin() { let header = HeaderWithLength::new(100u32, 0); let x = Arc::from_header_and_iter(header, core::iter::empty::()); let y = Arc::into_thin(x.clone()); assert_eq!(y.header.header, 100); assert!(y.slice.is_empty()); assert_eq!(x.header.header, 100); assert!(x.slice.is_empty()); } #[test] fn thin_assert_padding() { #[derive(Clone, Default)] #[repr(C)] struct Padded { i: u16, } // The header will have more alignment than `Padded` let header = HeaderWithLength::new(0i32, 2); let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }]; let a = ThinArc::from_header_and_iter(header, items.into_iter()); assert_eq!(a.slice.len(), 2); assert_eq!(a.slice[0].i, 0xdead); assert_eq!(a.slice[1].i, 0xbeef); } #[test] #[allow(clippy::redundant_clone, clippy::eq_op)] fn slices_and_thin() { let mut canary = atomic::AtomicUsize::new(0); let c = Canary(&mut canary as *mut atomic::AtomicUsize); let v = vec![5, 6]; let header = HeaderWithLength::new(c, v.len()); { let x = Arc::into_thin(Arc::from_header_and_slice(header, &v)); let y = ThinArc::with_arc(&x, |q| q.clone()); let _ = y.clone(); let _ = x == x; Arc::from_thin(x.clone()); } assert_eq!(canary.load(Acquire), 1); } #[test] #[allow(clippy::redundant_clone, clippy::eq_op)] fn iter_and_thin() { let mut canary = atomic::AtomicUsize::new(0); let c = Canary(&mut canary as *mut atomic::AtomicUsize); let v = vec![5, 6]; let header = HeaderWithLength::new(c, v.len()); { let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter())); let y = ThinArc::with_arc(&x, |q| q.clone()); let _ = y.clone(); let _ = x == x; Arc::from_thin(x.clone()); } assert_eq!(canary.load(Acquire), 1); } #[test] fn into_raw_and_from_raw() { let mut canary = atomic::AtomicUsize::new(0); let c = Canary(&mut canary as *mut atomic::AtomicUsize); let v = vec![5, 6]; let header = HeaderWithLength::new(c, v.len()); { type ThinArcCanary = ThinArc; let x: ThinArcCanary = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter())); let ptr = x.as_ptr(); assert_eq!(x.into_raw(), ptr); let _x = unsafe { ThinArcCanary::from_raw(ptr) }; } assert_eq!(canary.load(Acquire), 1); } }