Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

1039
third-party/vendor/triomphe/src/arc.rs vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,118 @@
use core::mem;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::ptr;
use super::Arc;
/// A "borrowed `Arc`". This is a pointer to
/// a T that is known to have been allocated within an
/// `Arc`.
///
/// This is equivalent in guarantees to `&Arc<T>`, however it is
/// a bit more flexible. To obtain an `&Arc<T>` you must have
/// an `Arc<T>` instance somewhere pinned down until we're done with it.
/// It's also a direct pointer to `T`, so using this involves less pointer-chasing
///
/// However, C++ code may hand us refcounted things as pointers to T directly,
/// so we have to conjure up a temporary `Arc` on the stack each time. The
/// same happens for when the object is managed by a `OffsetArc`.
///
/// `ArcBorrow` lets us deal with borrows of known-refcounted objects
/// without needing to worry about where the `Arc<T>` is.
#[derive(Debug, Eq, PartialEq)]
#[repr(transparent)]
pub struct ArcBorrow<'a, T: ?Sized + 'a>(pub(crate) &'a T);
impl<'a, T> Copy for ArcBorrow<'a, T> {}
impl<'a, T> Clone for ArcBorrow<'a, T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<'a, T> ArcBorrow<'a, T> {
/// Clone this as an `Arc<T>`. This bumps the refcount.
#[inline]
pub fn clone_arc(&self) -> Arc<T> {
let arc = unsafe { Arc::from_raw(self.0) };
// addref it!
mem::forget(arc.clone());
arc
}
/// For constructing from a reference known to be Arc-backed,
/// e.g. if we obtain such a reference over FFI
/// TODO: should from_ref be relaxed to unsized types? It can't be
/// converted back to an Arc right now for unsized types.
/// # Safety
/// - The reference to `T` must have come from a Triomphe Arc, UniqueArc, or ArcBorrow.
#[inline]
pub unsafe fn from_ref(r: &'a T) -> Self {
ArcBorrow(r)
}
/// Compare two `ArcBorrow`s via pointer equality. Will only return
/// true if they come from the same allocation
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
ptr::eq(this.0 as *const T, other.0 as *const T)
}
/// Temporarily converts |self| into a bonafide Arc and exposes it to the
/// provided callback. The refcount is not modified.
#[inline]
pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<T>) -> U,
T: 'static,
{
// Synthesize transient Arc, which never touches the refcount.
let transient = unsafe { ManuallyDrop::new(Arc::from_raw(self.0)) };
// Expose the transient Arc to the callback, which may clone it if it wants
// and forward the result to the user
f(&transient)
}
/// Similar to deref, but uses the lifetime |a| rather than the lifetime of
/// self, which is incompatible with the signature of the Deref trait.
#[inline]
pub fn get(&self) -> &'a T {
self.0
}
}
impl<'a, T> Deref for ArcBorrow<'a, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
self.0
}
}
// Safety:
// This implementation must guarantee that it is sound to call replace_ptr with an unsized variant
// of the pointer retuned in `as_sized_ptr`. We leverage unsizing the contained reference. This
// continues to point to the data of an ArcInner. The reference count remains untouched which is
// correct since the number of owners did not change. This implies the returned instance fulfills
// its safety invariants.
#[cfg(feature = "unsize")]
unsafe impl<'lt, T: 'lt, U: ?Sized + 'lt> unsize::CoerciblePtr<U> for ArcBorrow<'lt, T> {
type Pointee = T;
type Output = ArcBorrow<'lt, U>;
fn as_sized_ptr(&mut self) -> *mut T {
// Returns a pointer to the inner data. We do not need to care about any particular
// provenance here, only the pointer value, which we need to reconstruct the new pointer.
self.0 as *const T as *mut T
}
unsafe fn replace_ptr(self, new: *mut U) -> ArcBorrow<'lt, U> {
let inner = ManuallyDrop::new(self);
// Safety: backed by the same Arc that backed `self`.
ArcBorrow(inner.0.replace_ptr(new))
}
}

View file

@ -0,0 +1,42 @@
use arc_swap::RefCnt;
use crate::{Arc, ThinArc};
use core::ffi::c_void;
unsafe impl<H, T> RefCnt for ThinArc<H, T> {
type Base = c_void;
#[inline]
fn into_ptr(me: Self) -> *mut Self::Base {
ThinArc::into_raw(me) as *mut _
}
#[inline]
fn as_ptr(me: &Self) -> *mut Self::Base {
ThinArc::as_ptr(me) as *mut _
}
#[inline]
unsafe fn from_ptr(ptr: *const Self::Base) -> Self {
ThinArc::from_raw(ptr)
}
}
unsafe impl<T> RefCnt for Arc<T> {
type Base = T;
#[inline]
fn into_ptr(me: Self) -> *mut Self::Base {
Arc::into_raw(me) as *mut _
}
#[inline]
fn as_ptr(me: &Self) -> *mut Self::Base {
Arc::as_ptr(me) as *mut _
}
#[inline]
unsafe fn from_ptr(ptr: *const Self::Base) -> Self {
Arc::from_raw(ptr)
}
}

View file

@ -0,0 +1,139 @@
use core::fmt;
use core::marker::PhantomData;
use core::ptr;
use core::usize;
use super::{Arc, ArcBorrow};
/// A tagged union that can represent `Arc<A>` or `Arc<B>` while only consuming a
/// single word. The type is also `NonNull`, and thus can be stored in an Option
/// without increasing size.
///
/// This is functionally equivalent to
/// `enum ArcUnion<A, B> { First(Arc<A>), Second(Arc<B>)` but only takes up
/// up a single word of stack space.
///
/// This could probably be extended to support four types if necessary.
pub struct ArcUnion<A, B> {
p: ptr::NonNull<()>,
phantom_a: PhantomData<A>,
phantom_b: PhantomData<B>,
}
unsafe impl<A: Sync + Send, B: Send + Sync> Send for ArcUnion<A, B> {}
unsafe impl<A: Sync + Send, B: Send + Sync> Sync for ArcUnion<A, B> {}
impl<A: PartialEq, B: PartialEq> PartialEq for ArcUnion<A, B> {
fn eq(&self, other: &Self) -> bool {
use crate::ArcUnionBorrow::*;
match (self.borrow(), other.borrow()) {
(First(x), First(y)) => x == y,
(Second(x), Second(y)) => x == y,
(_, _) => false,
}
}
}
/// This represents a borrow of an `ArcUnion`.
#[derive(Debug)]
pub enum ArcUnionBorrow<'a, A: 'a, B: 'a> {
First(ArcBorrow<'a, A>),
Second(ArcBorrow<'a, B>),
}
impl<A, B> ArcUnion<A, B> {
unsafe fn new(ptr: *mut ()) -> Self {
ArcUnion {
p: ptr::NonNull::new_unchecked(ptr),
phantom_a: PhantomData,
phantom_b: PhantomData,
}
}
/// Returns true if the two values are pointer-equal.
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.p == other.p
}
/// Returns an enum representing a borrow of either A or B.
pub fn borrow(&self) -> ArcUnionBorrow<A, B> {
if self.is_first() {
let ptr = self.p.as_ptr() as *const A;
let borrow = unsafe { ArcBorrow::from_ref(&*ptr) };
ArcUnionBorrow::First(borrow)
} else {
let ptr = ((self.p.as_ptr() as usize) & !0x1) as *const B;
let borrow = unsafe { ArcBorrow::from_ref(&*ptr) };
ArcUnionBorrow::Second(borrow)
}
}
/// Creates an `ArcUnion` from an instance of the first type.
#[inline]
pub fn from_first(other: Arc<A>) -> Self {
unsafe { Self::new(Arc::into_raw(other) as *mut _) }
}
/// Creates an `ArcUnion` from an instance of the second type.
#[inline]
pub fn from_second(other: Arc<B>) -> Self {
unsafe { Self::new(((Arc::into_raw(other) as usize) | 0x1) as *mut _) }
}
/// Returns true if this `ArcUnion` contains the first type.
#[inline]
pub fn is_first(&self) -> bool {
self.p.as_ptr() as usize & 0x1 == 0
}
/// Returns true if this `ArcUnion` contains the second type.
#[inline]
pub fn is_second(&self) -> bool {
!self.is_first()
}
/// Returns a borrow of the first type if applicable, otherwise `None`.
pub fn as_first(&self) -> Option<ArcBorrow<A>> {
match self.borrow() {
ArcUnionBorrow::First(x) => Some(x),
ArcUnionBorrow::Second(_) => None,
}
}
/// Returns a borrow of the second type if applicable, otherwise None.
pub fn as_second(&self) -> Option<ArcBorrow<B>> {
match self.borrow() {
ArcUnionBorrow::First(_) => None,
ArcUnionBorrow::Second(x) => Some(x),
}
}
}
impl<A, B> Clone for ArcUnion<A, B> {
fn clone(&self) -> Self {
match self.borrow() {
ArcUnionBorrow::First(x) => ArcUnion::from_first(x.clone_arc()),
ArcUnionBorrow::Second(x) => ArcUnion::from_second(x.clone_arc()),
}
}
}
impl<A, B> Drop for ArcUnion<A, B> {
fn drop(&mut self) {
match self.borrow() {
ArcUnionBorrow::First(x) => unsafe {
let _ = Arc::from_raw(&*x);
},
ArcUnionBorrow::Second(x) => unsafe {
let _ = Arc::from_raw(&*x);
},
}
}
}
impl<A: fmt::Debug, B: fmt::Debug> fmt::Debug for ArcUnion<A, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.borrow(), f)
}
}

View file

@ -0,0 +1,391 @@
use alloc::alloc::Layout;
use alloc::boxed::Box;
use alloc::string::String;
use alloc::vec::Vec;
use core::cmp::Ordering;
use core::iter::{ExactSizeIterator, Iterator};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop};
use core::ptr::{self, addr_of_mut};
use core::usize;
use super::{Arc, ArcInner};
/// Structure to allow Arc-managing some fixed-sized data and a variably-sized
/// slice in a single allocation.
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
#[repr(C)]
pub struct HeaderSlice<H, T: ?Sized> {
/// The fixed-sized data.
pub header: H,
/// The dynamically-sized data.
pub slice: T,
}
impl<H, T> Arc<HeaderSlice<H, [T]>> {
/// Creates an Arc for a HeaderSlice using the given header struct and
/// iterator to generate the slice. The resulting Arc will be fat.
pub fn from_header_and_iter<I>(header: H, mut items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
let num_items = items.len();
let inner = Arc::allocate_for_header_and_slice(num_items);
unsafe {
// Write the data.
//
// Note that any panics here (i.e. from the iterator) are safe, since
// we'll just leak the uninitialized memory.
ptr::write(&mut ((*inner.as_ptr()).data.header), header);
if num_items != 0 {
let mut current = (*inner.as_ptr()).data.slice.as_mut_ptr();
for _ in 0..num_items {
ptr::write(
current,
items
.next()
.expect("ExactSizeIterator over-reported length"),
);
current = current.offset(1);
}
assert!(
items.next().is_none(),
"ExactSizeIterator under-reported length"
);
}
assert!(
items.next().is_none(),
"ExactSizeIterator under-reported length"
);
}
// Safety: ptr is valid & the inner structure is fully initialized
Arc {
p: inner,
phantom: PhantomData,
}
}
/// Creates an Arc for a HeaderSlice using the given header struct and
/// iterator to generate the slice. The resulting Arc will be fat.
pub fn from_header_and_slice(header: H, items: &[T]) -> Self
where
T: Copy,
{
assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
let num_items = items.len();
let inner = Arc::allocate_for_header_and_slice(num_items);
unsafe {
// Write the data.
ptr::write(&mut ((*inner.as_ptr()).data.header), header);
let dst = (*inner.as_ptr()).data.slice.as_mut_ptr();
ptr::copy_nonoverlapping(items.as_ptr(), dst, num_items);
}
// Safety: ptr is valid & the inner structure is fully initialized
Arc {
p: inner,
phantom: PhantomData,
}
}
/// Creates an Arc for a HeaderSlice using the given header struct and
/// vec to generate the slice. The resulting Arc will be fat.
pub fn from_header_and_vec(header: H, mut v: Vec<T>) -> Self {
let len = v.len();
let inner = Arc::allocate_for_header_and_slice(len);
unsafe {
// Safety: inner is a valid pointer, so this can't go out of bounds
let dst = addr_of_mut!((*inner.as_ptr()).data.header);
// Safety: `dst` is valid for writes (just allocated)
ptr::write(dst, header);
}
unsafe {
let src = v.as_mut_ptr();
// Safety: inner is a valid pointer, so this can't go out of bounds
let dst = addr_of_mut!((*inner.as_ptr()).data.slice) as *mut T;
// Safety:
// - `src` is valid for reads for `len` (got from `Vec`)
// - `dst` is valid for writes for `len` (just allocated, with layout for appropriate slice)
// - `src` and `dst` don't overlap (separate allocations)
ptr::copy_nonoverlapping(src, dst, len);
// Deallocate vec without dropping `T`
//
// Safety: 0..0 elements are always initialized, 0 <= cap for any cap
v.set_len(0);
}
// Safety: ptr is valid & the inner structure is fully initialized
Arc {
p: inner,
phantom: PhantomData,
}
}
}
impl<H> Arc<HeaderSlice<H, str>> {
/// Creates an Arc for a HeaderSlice using the given header struct and
/// a str slice to generate the slice. The resulting Arc will be fat.
pub fn from_header_and_str(header: H, string: &str) -> Self {
let bytes = Arc::from_header_and_slice(header, string.as_bytes());
// Safety: `ArcInner` and `HeaderSlice` are `repr(C)`, `str` has the same layout as `[u8]`,
// thus it's ok to "transmute" between `Arc<HeaderSlice<H, [u8]>>` and `Arc<HeaderSlice<H, str>>`.
//
// `bytes` are a valid string since we've just got them from a valid `str`.
unsafe { Arc::from_raw_inner(Arc::into_raw_inner(bytes) as _) }
}
}
/// Header data with an inline length. Consumers that use HeaderWithLength as the
/// Header type in HeaderSlice can take advantage of ThinArc.
#[derive(Debug, Eq, PartialEq, Hash)]
#[repr(C)]
pub struct HeaderWithLength<H> {
/// The fixed-sized data.
pub header: H,
/// The slice length.
pub length: usize,
}
impl<H> HeaderWithLength<H> {
/// Creates a new HeaderWithLength.
#[inline]
pub fn new(header: H, length: usize) -> Self {
HeaderWithLength { header, length }
}
}
impl<T: ?Sized> From<Arc<HeaderSlice<(), T>>> for Arc<T> {
fn from(this: Arc<HeaderSlice<(), T>>) -> Self {
debug_assert_eq!(
Layout::for_value::<HeaderSlice<(), T>>(&this),
Layout::for_value::<T>(&this.slice)
);
// Safety: `HeaderSlice<(), T>` and `T` has the same layout
unsafe { Arc::from_raw_inner(Arc::into_raw_inner(this) as _) }
}
}
impl<T: ?Sized> From<Arc<T>> for Arc<HeaderSlice<(), T>> {
fn from(this: Arc<T>) -> Self {
// Safety: `T` and `HeaderSlice<(), T>` has the same layout
unsafe { Arc::from_raw_inner(Arc::into_raw_inner(this) as _) }
}
}
impl<T: Copy> From<&[T]> for Arc<[T]> {
fn from(slice: &[T]) -> Self {
Arc::from_header_and_slice((), slice).into()
}
}
impl From<&str> for Arc<str> {
fn from(s: &str) -> Self {
Arc::from_header_and_str((), s).into()
}
}
impl From<String> for Arc<str> {
fn from(s: String) -> Self {
Self::from(&s[..])
}
}
// FIXME: once `pointer::with_metadata_of` is stable or
// implementable on stable without assuming ptr layout
// this will be able to accept `T: ?Sized`.
impl<T> From<Box<T>> for Arc<T> {
fn from(b: Box<T>) -> Self {
let layout = Layout::for_value::<T>(&b);
// Safety: the closure only changes the type of the pointer
let inner = unsafe { Self::allocate_for_layout(layout, |mem| mem as *mut ArcInner<T>) };
unsafe {
let src = Box::into_raw(b);
// Safety: inner is a valid pointer, so this can't go out of bounds
let dst = addr_of_mut!((*inner.as_ptr()).data);
// Safety:
// - `src` is valid for reads (got from `Box`)
// - `dst` is valid for writes (just allocated)
// - `src` and `dst` don't overlap (separate allocations)
ptr::copy_nonoverlapping(src, dst, 1);
// Deallocate box without dropping `T`
//
// Safety:
// - `src` has been got from `Box::into_raw`
// - `ManuallyDrop<T>` is guaranteed to have the same layout as `T`
drop(Box::<ManuallyDrop<T>>::from_raw(src as _));
}
Arc {
p: inner,
phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for Arc<[T]> {
fn from(v: Vec<T>) -> Self {
Arc::from_header_and_vec((), v).into()
}
}
pub(crate) type HeaderSliceWithLength<H, T> = HeaderSlice<HeaderWithLength<H>, T>;
impl<H: PartialOrd, T: ?Sized + PartialOrd> PartialOrd for HeaderSliceWithLength<H, T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
(&self.header.header, &self.slice).partial_cmp(&(&other.header.header, &other.slice))
}
}
impl<H: Ord, T: ?Sized + Ord> Ord for HeaderSliceWithLength<H, T> {
fn cmp(&self, other: &Self) -> Ordering {
(&self.header.header, &self.slice).cmp(&(&other.header.header, &other.slice))
}
}
#[cfg(test)]
mod tests {
use alloc::boxed::Box;
use alloc::string::String;
use alloc::vec;
use core::iter;
use crate::{Arc, HeaderSlice};
#[test]
fn from_header_and_iter_smoke() {
let arc = Arc::from_header_and_iter(
(42u32, 17u8),
IntoIterator::into_iter([1u16, 2, 3, 4, 5, 6, 7]),
);
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, [1, 2, 3, 4, 5, 6, 7]);
}
#[test]
fn from_header_and_slice_smoke() {
let arc = Arc::from_header_and_slice((42u32, 17u8), &[1u16, 2, 3, 4, 5, 6, 7]);
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, [1u16, 2, 3, 4, 5, 6, 7]);
}
#[test]
fn from_header_and_vec_smoke() {
let arc = Arc::from_header_and_vec((42u32, 17u8), vec![1u16, 2, 3, 4, 5, 6, 7]);
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, [1u16, 2, 3, 4, 5, 6, 7]);
}
#[test]
fn from_header_and_iter_empty() {
let arc = Arc::from_header_and_iter((42u32, 17u8), iter::empty::<u16>());
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, []);
}
#[test]
fn from_header_and_slice_empty() {
let arc = Arc::from_header_and_slice((42u32, 17u8), &[1u16; 0]);
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, []);
}
#[test]
fn from_header_and_vec_empty() {
let arc = Arc::from_header_and_vec((42u32, 17u8), vec![1u16; 0]);
assert_eq!(arc.header, (42, 17));
assert_eq!(arc.slice, []);
}
#[test]
fn issue_13_empty() {
crate::Arc::from_header_and_iter((), iter::empty::<usize>());
}
#[test]
fn issue_13_consumption() {
let s: &[u8] = &[0u8; 255];
crate::Arc::from_header_and_iter((), s.iter().copied());
}
#[test]
fn from_header_and_str_smoke() {
let a = Arc::from_header_and_str(
42,
"The answer to the ultimate question of life, the universe, and everything",
);
assert_eq!(a.header, 42);
assert_eq!(
&a.slice,
"The answer to the ultimate question of life, the universe, and everything"
);
let empty = Arc::from_header_and_str((), "");
assert_eq!(empty.header, ());
assert_eq!(&empty.slice, "");
}
#[test]
fn erase_and_create_from_thin_air_header() {
let a: Arc<HeaderSlice<(), [u32]>> = Arc::from_header_and_slice((), &[12, 17, 16]);
let b: Arc<[u32]> = a.into();
assert_eq!(&*b, [12, 17, 16]);
let c: Arc<HeaderSlice<(), [u32]>> = b.into();
assert_eq!(&c.slice, [12, 17, 16]);
assert_eq!(c.header, ());
}
#[test]
fn from_box_and_vec() {
let b = Box::new(String::from("xxx"));
let b = Arc::<String>::from(b);
assert_eq!(&*b, "xxx");
let v = vec![String::from("1"), String::from("2"), String::from("3")];
let v = Arc::<[_]>::from(v);
assert_eq!(
&*v,
[String::from("1"), String::from("2"), String::from("3")]
);
let mut v = vec![String::from("1"), String::from("2"), String::from("3")];
v.reserve(10);
let v = Arc::<[_]>::from(v);
assert_eq!(
&*v,
[String::from("1"), String::from("2"), String::from("3")]
);
}
}

View file

@ -0,0 +1,47 @@
/// Wrap an iterator and implement `ExactSizeIterator`
/// assuming the underlying iterator reports lower bound equal to upper bound.
///
/// It does not check the size is reported correctly (except in debug mode).
pub(crate) struct IteratorAsExactSizeIterator<I> {
iter: I,
}
impl<I: Iterator> IteratorAsExactSizeIterator<I> {
#[inline]
pub(crate) fn new(iter: I) -> Self {
let (lower, upper) = iter.size_hint();
debug_assert_eq!(
Some(lower),
upper,
"IteratorAsExactSizeIterator requires size hint lower == upper"
);
IteratorAsExactSizeIterator { iter }
}
}
impl<I: Iterator> Iterator for IteratorAsExactSizeIterator<I> {
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<I: Iterator> ExactSizeIterator for IteratorAsExactSizeIterator<I> {
#[inline]
fn len(&self) -> usize {
let (lower, upper) = self.iter.size_hint();
debug_assert_eq!(
Some(lower),
upper,
"IteratorAsExactSizeIterator requires size hint lower == upper"
);
lower
}
}

95
third-party/vendor/triomphe/src/lib.rs vendored Normal file
View file

@ -0,0 +1,95 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Fork of Arc. This has the following advantages over std::sync::Arc:
//!
//! * `triomphe::Arc` doesn't support weak references: we save space by excluding the weak reference count, and we don't do extra read-modify-update operations to handle the possibility of weak references.
//! * `triomphe::UniqueArc` allows one to construct a temporarily-mutable `Arc` which can be converted to a regular `triomphe::Arc` later
//! * `triomphe::OffsetArc` can be used transparently from C++ code and is compatible with (and can be converted to/from) `triomphe::Arc`
//! * `triomphe::ArcBorrow` is functionally similar to `&triomphe::Arc<T>`, however in memory it's simply `&T`. This makes it more flexible for FFI; the source of the borrow need not be an `Arc` pinned on the stack (and can instead be a pointer from C++, or an `OffsetArc`). Additionally, this helps avoid pointer-chasing.
//! * `triomphe::Arc` has can be constructed for dynamically-sized types via `from_header_and_iter`
//! * `triomphe::ThinArc` provides thin-pointer `Arc`s to dynamically sized types
//! * `triomphe::ArcUnion` is union of two `triomphe:Arc`s which fits inside one word of memory
#![allow(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;
#[cfg(feature = "std")]
extern crate core;
#[cfg(feature = "arc-swap")]
extern crate arc_swap;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "stable_deref_trait")]
extern crate stable_deref_trait;
#[cfg(feature = "unsize")]
extern crate unsize;
/// Calculates the offset of the specified field from the start of the named struct.
/// This macro is impossible to be const until feature(const_ptr_offset_from) is stable.
macro_rules! offset_of {
($ty: path, $field: tt) => {{
// ensure the type is a named struct
// ensure the field exists and is accessible
let $ty { $field: _, .. };
let uninit = <::core::mem::MaybeUninit<$ty>>::uninit(); // const since 1.36
let base_ptr: *const $ty = uninit.as_ptr(); // const since 1.59
#[allow(unused_unsafe)]
let field_ptr = unsafe { ::core::ptr::addr_of!((*base_ptr).$field) }; // since 1.51
// // the const version requires feature(const_ptr_offset_from)
// // https://github.com/rust-lang/rust/issues/92980
// #[allow(unused_unsafe)]
// unsafe { (field_ptr as *const u8).offset_from(base_ptr as *const u8) as usize }
(field_ptr as usize) - (base_ptr as usize)
}};
}
mod arc;
mod arc_borrow;
#[cfg(feature = "arc-swap")]
mod arc_swap_support;
mod arc_union;
mod header;
mod iterator_as_exact_size_iterator;
mod offset_arc;
mod thin_arc;
mod unique_arc;
pub use arc::*;
pub use arc_borrow::*;
pub use arc_union::*;
pub use header::*;
pub use offset_arc::*;
pub use thin_arc::*;
pub use unique_arc::*;
#[cfg(feature = "std")]
use std::process::abort;
// `no_std`-compatible abort by forcing a panic while already panicking.
#[cfg(not(feature = "std"))]
#[cold]
fn abort() -> ! {
struct PanicOnDrop;
impl Drop for PanicOnDrop {
fn drop(&mut self) {
panic!()
}
}
let _double_panicer = PanicOnDrop;
panic!();
}

View file

@ -0,0 +1,138 @@
use core::fmt;
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::ptr;
use super::{Arc, ArcBorrow};
/// An `Arc`, except it holds a pointer to the T instead of to the
/// entire ArcInner.
///
/// An `OffsetArc<T>` has the same layout and ABI as a non-null
/// `const T*` in C, and may be used in FFI function signatures.
///
/// ```text
/// Arc<T> OffsetArc<T>
/// | |
/// v v
/// ---------------------
/// | RefCount | T (data) | [ArcInner<T>]
/// ---------------------
/// ```
///
/// This means that this is a direct pointer to
/// its contained data (and can be read from by both C++ and Rust),
/// but we can also convert it to a "regular" `Arc<T>` by removing the offset.
///
/// This is very useful if you have an Arc-containing struct shared between Rust and C++,
/// and wish for C++ to be able to read the data behind the `Arc` without incurring
/// an FFI call overhead.
#[derive(Eq)]
#[repr(transparent)]
pub struct OffsetArc<T> {
pub(crate) ptr: ptr::NonNull<T>,
pub(crate) phantom: PhantomData<T>,
}
unsafe impl<T: Sync + Send> Send for OffsetArc<T> {}
unsafe impl<T: Sync + Send> Sync for OffsetArc<T> {}
impl<T> Deref for OffsetArc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { &*self.ptr.as_ptr() }
}
}
impl<T> Clone for OffsetArc<T> {
#[inline]
fn clone(&self) -> Self {
Arc::into_raw_offset(self.clone_arc())
}
}
impl<T> Drop for OffsetArc<T> {
fn drop(&mut self) {
let _ = Arc::from_raw_offset(OffsetArc {
ptr: self.ptr,
phantom: PhantomData,
});
}
}
impl<T: fmt::Debug> fmt::Debug for OffsetArc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: PartialEq> PartialEq for OffsetArc<T> {
fn eq(&self, other: &OffsetArc<T>) -> bool {
*(*self) == *(*other)
}
#[allow(clippy::partialeq_ne_impl)]
fn ne(&self, other: &OffsetArc<T>) -> bool {
*(*self) != *(*other)
}
}
impl<T> OffsetArc<T> {
/// Temporarily converts |self| into a bonafide Arc and exposes it to the
/// provided callback. The refcount is not modified.
#[inline]
pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<T>) -> U,
{
// Synthesize transient Arc, which never touches the refcount of the ArcInner.
let transient = unsafe { ManuallyDrop::new(Arc::from_raw(self.ptr.as_ptr())) };
// Expose the transient Arc to the callback, which may clone it if it wants
// and forward the result to the user
f(&transient)
}
/// If uniquely owned, provide a mutable reference
/// Else create a copy, and mutate that
///
/// This is functionally the same thing as `Arc::make_mut`
#[inline]
pub fn make_mut(&mut self) -> &mut T
where
T: Clone,
{
unsafe {
// extract the OffsetArc as an owned variable. This does not modify
// the refcount and we should be careful to not drop `this`
let this = ptr::read(self);
// treat it as a real Arc, but wrapped in a ManuallyDrop
// in case `Arc::make_mut()` panics in the clone impl
let mut arc = ManuallyDrop::new(Arc::from_raw_offset(this));
// obtain the mutable reference. Cast away the lifetime since
// we have the right lifetime bounds in the parameters.
// This may mutate `arc`.
let ret = Arc::make_mut(&mut *arc) as *mut _;
// Store the possibly-mutated arc back inside, after converting
// it to a OffsetArc again. Release the ManuallyDrop.
// This also does not modify the refcount or call drop on self
ptr::write(self, Arc::into_raw_offset(ManuallyDrop::into_inner(arc)));
&mut *ret
}
}
/// Clone it as an `Arc`
#[inline]
pub fn clone_arc(&self) -> Arc<T> {
OffsetArc::with_arc(self, |a| a.clone())
}
/// Produce a pointer to the data that can be converted back
/// to an `Arc`
#[inline]
pub fn borrow_arc(&self) -> ArcBorrow<'_, T> {
ArcBorrow(&**self)
}
}

View file

@ -0,0 +1,451 @@
use core::cmp::Ordering;
use core::ffi::c_void;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::iter::{ExactSizeIterator, Iterator};
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::ptr;
use core::usize;
use super::{Arc, ArcInner, HeaderSliceWithLength, HeaderWithLength};
/// A "thin" `Arc` containing dynamically sized data
///
/// This is functionally equivalent to `Arc<(H, [T])>`
///
/// When you create an `Arc` containing a dynamically sized type
/// like `HeaderSlice<H, [T]>`, the `Arc` is represented on the stack
/// as a "fat pointer", where the length of the slice is stored
/// alongside the `Arc`'s pointer. In some situations you may wish to
/// have a thin pointer instead, perhaps for FFI compatibility
/// or space efficiency.
///
/// Note that we use `[T; 0]` in order to have the right alignment for `T`.
///
/// `ThinArc` solves this by storing the length in the allocation itself,
/// via `HeaderSliceWithLength`.
#[repr(transparent)]
pub struct ThinArc<H, T> {
ptr: ptr::NonNull<ArcInner<HeaderSliceWithLength<H, [T; 0]>>>,
phantom: PhantomData<(H, T)>,
}
unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
// Synthesize a fat pointer from a thin pointer.
//
// See the comment around the analogous operation in from_header_and_iter.
#[inline]
fn thin_to_thick<H, T>(
thin: *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
) -> *mut ArcInner<HeaderSliceWithLength<H, [T]>> {
let len = unsafe { (*thin).data.header.length };
let fake_slice = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>>
}
impl<H, T> ThinArc<H, T> {
/// Temporarily converts |self| into a bonafide Arc and exposes it to the
/// provided callback. The refcount is not modified.
#[inline]
pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<HeaderSliceWithLength<H, [T]>>) -> U,
{
// Synthesize transient Arc, which never touches the refcount of the ArcInner.
let transient = unsafe {
ManuallyDrop::new(Arc {
p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
phantom: PhantomData,
})
};
// Expose the transient Arc to the callback, which may clone it if it wants
// and forward the result to the user
f(&transient)
}
/// Creates a `ThinArc` for a HeaderSlice using the given header struct and
/// iterator to generate the slice.
pub fn from_header_and_iter<I>(header: H, items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
let header = HeaderWithLength::new(header, items.len());
Arc::into_thin(Arc::from_header_and_iter(header, items))
}
/// Creates a `ThinArc` for a HeaderSlice using the given header struct and
/// a slice to copy.
pub fn from_header_and_slice(header: H, items: &[T]) -> Self
where
T: Copy,
{
let header = HeaderWithLength::new(header, items.len());
Arc::into_thin(Arc::from_header_and_slice(header, items))
}
/// Returns the address on the heap of the ThinArc itself -- not the T
/// within it -- for memory reporting.
#[inline]
pub fn ptr(&self) -> *const c_void {
self.ptr.as_ptr() as *const ArcInner<T> as *const c_void
}
/// Returns the address on the heap of the Arc itself -- not the T within it -- for memory
/// reporting.
#[inline]
pub fn heap_ptr(&self) -> *const c_void {
self.ptr()
}
/// # Safety
///
/// Constructs an ThinArc from a raw pointer.
///
/// The raw pointer must have been previously returned by a call to
/// ThinArc::into_raw.
///
/// The user of from_raw has to make sure a specific value of T is only dropped once.
///
/// This function is unsafe because improper use may lead to memory unsafety,
/// even if the returned ThinArc is never accessed.
#[inline]
pub unsafe fn from_raw(ptr: *const c_void) -> Self {
Self {
ptr: ptr::NonNull::new_unchecked(ptr as *mut c_void).cast(),
phantom: PhantomData,
}
}
/// Consume ThinArc and returned the wrapped pointer.
#[inline]
pub fn into_raw(self) -> *const c_void {
let this = ManuallyDrop::new(self);
this.ptr.cast().as_ptr()
}
/// Provides a raw pointer to the data.
/// The counts are not affected in any way and the ThinArc is not consumed.
/// The pointer is valid for as long as there are strong counts in the ThinArc.
#[inline]
pub fn as_ptr(&self) -> *const c_void {
self.ptr()
}
}
impl<H, T> Deref for ThinArc<H, T> {
type Target = HeaderSliceWithLength<H, [T]>;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
}
}
impl<H, T> Clone for ThinArc<H, T> {
#[inline]
fn clone(&self) -> Self {
ThinArc::with_arc(self, |a| {
// Safety: `a` isn't mutable thus the header length remains valid
unsafe { Arc::into_thin_unchecked(a.clone()) }
})
}
}
impl<H, T> Drop for ThinArc<H, T> {
#[inline]
fn drop(&mut self) {
let _ = Arc::from_thin(ThinArc {
ptr: self.ptr,
phantom: PhantomData,
});
}
}
impl<H, T> Arc<HeaderSliceWithLength<H, [T]>> {
/// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
/// is not modified.
///
/// # Safety
/// Assumes that the header length matches the slice length.
#[inline]
unsafe fn into_thin_unchecked(a: Self) -> ThinArc<H, T> {
let a = ManuallyDrop::new(a);
debug_assert_eq!(
a.header.length,
a.slice.len(),
"Length needs to be correct for ThinArc to work"
);
let fat_ptr: *mut ArcInner<HeaderSliceWithLength<H, [T]>> = a.ptr();
let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
ThinArc {
ptr: unsafe {
ptr::NonNull::new_unchecked(
thin_ptr as *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
)
},
phantom: PhantomData,
}
}
/// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
/// is not modified.
#[inline]
pub fn into_thin(a: Self) -> ThinArc<H, T> {
assert_eq!(
a.header.length,
a.slice.len(),
"Length needs to be correct for ThinArc to work"
);
unsafe { Self::into_thin_unchecked(a) }
}
/// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount
/// is not modified.
#[inline]
pub fn from_thin(a: ThinArc<H, T>) -> Self {
let a = ManuallyDrop::new(a);
let ptr = thin_to_thick(a.ptr.as_ptr());
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
#[inline]
fn eq(&self, other: &ThinArc<H, T>) -> bool {
ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
}
}
impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
impl<H: PartialOrd, T: PartialOrd> PartialOrd for ThinArc<H, T> {
#[inline]
fn partial_cmp(&self, other: &ThinArc<H, T>) -> Option<Ordering> {
ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.partial_cmp(b)))
}
}
impl<H: Ord, T: Ord> Ord for ThinArc<H, T> {
#[inline]
fn cmp(&self, other: &ThinArc<H, T>) -> Ordering {
ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.cmp(b)))
}
}
impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
fn hash<HSR: Hasher>(&self, state: &mut HSR) {
ThinArc::with_arc(self, |a| a.hash(state))
}
}
impl<H: fmt::Debug, T: fmt::Debug> fmt::Debug for ThinArc<H, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<H, T> fmt::Pointer for ThinArc<H, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.ptr(), f)
}
}
#[cfg(test)]
mod tests {
use crate::{Arc, HeaderWithLength, ThinArc};
use alloc::vec;
use core::clone::Clone;
use core::ops::Drop;
use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, SeqCst};
#[derive(PartialEq)]
struct Canary(*mut atomic::AtomicUsize);
impl Drop for Canary {
fn drop(&mut self) {
unsafe {
(*self.0).fetch_add(1, SeqCst);
}
}
}
#[test]
fn empty_thin() {
let header = HeaderWithLength::new(100u32, 0);
let x = Arc::from_header_and_iter(header, core::iter::empty::<i32>());
let y = Arc::into_thin(x.clone());
assert_eq!(y.header.header, 100);
assert!(y.slice.is_empty());
assert_eq!(x.header.header, 100);
assert!(x.slice.is_empty());
}
#[test]
fn thin_assert_padding() {
#[derive(Clone, Default)]
#[repr(C)]
struct Padded {
i: u16,
}
// The header will have more alignment than `Padded`
let header = HeaderWithLength::new(0i32, 2);
let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }];
let a = ThinArc::from_header_and_iter(header, items.into_iter());
assert_eq!(a.slice.len(), 2);
assert_eq!(a.slice[0].i, 0xdead);
assert_eq!(a.slice[1].i, 0xbeef);
}
#[test]
#[allow(clippy::redundant_clone, clippy::eq_op)]
fn slices_and_thin() {
let mut canary = atomic::AtomicUsize::new(0);
let c = Canary(&mut canary as *mut atomic::AtomicUsize);
let v = vec![5, 6];
let header = HeaderWithLength::new(c, v.len());
{
let x = Arc::into_thin(Arc::from_header_and_slice(header, &v));
let y = ThinArc::with_arc(&x, |q| q.clone());
let _ = y.clone();
let _ = x == x;
Arc::from_thin(x.clone());
}
assert_eq!(canary.load(Acquire), 1);
}
#[test]
#[allow(clippy::redundant_clone, clippy::eq_op)]
fn iter_and_thin() {
let mut canary = atomic::AtomicUsize::new(0);
let c = Canary(&mut canary as *mut atomic::AtomicUsize);
let v = vec![5, 6];
let header = HeaderWithLength::new(c, v.len());
{
let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
let y = ThinArc::with_arc(&x, |q| q.clone());
let _ = y.clone();
let _ = x == x;
Arc::from_thin(x.clone());
}
assert_eq!(canary.load(Acquire), 1);
}
#[test]
fn into_raw_and_from_raw() {
let mut canary = atomic::AtomicUsize::new(0);
let c = Canary(&mut canary as *mut atomic::AtomicUsize);
let v = vec![5, 6];
let header = HeaderWithLength::new(c, v.len());
{
type ThinArcCanary = ThinArc<Canary, u32>;
let x: ThinArcCanary = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
let ptr = x.as_ptr();
assert_eq!(x.into_raw(), ptr);
let _x = unsafe { ThinArcCanary::from_raw(ptr) };
}
assert_eq!(canary.load(Acquire), 1);
}
#[test]
fn thin_eq_and_cmp() {
[
[("*", &b"AB"[..]), ("*", &b"ab"[..])],
[("*", &b"AB"[..]), ("*", &b"a"[..])],
[("*", &b"A"[..]), ("*", &b"ab"[..])],
[("A", &b"*"[..]), ("a", &b"*"[..])],
[("a", &b"*"[..]), ("A", &b"*"[..])],
[("AB", &b"*"[..]), ("a", &b"*"[..])],
[("A", &b"*"[..]), ("ab", &b"*"[..])],
]
.iter()
.for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
let l = ThinArc::from_header_and_slice(lh, ls);
let r = ThinArc::from_header_and_slice(rh, rs);
assert_eq!(l, l);
assert_eq!(r, r);
assert_ne!(l, r);
assert_ne!(r, l);
assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
})
}
#[test]
fn thin_eq_and_partial_cmp() {
[
[(0.0, &[0.0, 0.0][..]), (1.0, &[0.0, 0.0][..])],
[(1.0, &[0.0, 0.0][..]), (0.0, &[0.0, 0.0][..])],
[(0.0, &[0.0][..]), (0.0, &[0.0, 0.0][..])],
[(0.0, &[0.0, 0.0][..]), (0.0, &[0.0][..])],
[(0.0, &[1.0, 2.0][..]), (0.0, &[10.0, 20.0][..])],
]
.iter()
.for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
let l = ThinArc::from_header_and_slice(lh, ls);
let r = ThinArc::from_header_and_slice(rh, rs);
assert_eq!(l, l);
assert_eq!(r, r);
assert_ne!(l, r);
assert_ne!(r, l);
assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
})
}
#[allow(dead_code)]
const fn is_partial_ord<T: ?Sized + PartialOrd>() {}
#[allow(dead_code)]
const fn is_ord<T: ?Sized + Ord>() {}
// compile-time check that PartialOrd/Ord is correctly derived
const _: () = is_partial_ord::<ThinArc<f64, f64>>();
const _: () = is_partial_ord::<ThinArc<f64, u64>>();
const _: () = is_partial_ord::<ThinArc<u64, f64>>();
const _: () = is_ord::<ThinArc<u64, u64>>();
}

View file

@ -0,0 +1,276 @@
use alloc::vec::Vec;
use alloc::{alloc::Layout, boxed::Box};
use core::convert::TryFrom;
use core::iter::FromIterator;
use core::marker::PhantomData;
use core::mem::{ManuallyDrop, MaybeUninit};
use core::ops::{Deref, DerefMut};
use core::ptr::{self, NonNull};
use core::sync::atomic::AtomicUsize;
use crate::iterator_as_exact_size_iterator::IteratorAsExactSizeIterator;
use crate::HeaderSlice;
use super::{Arc, ArcInner};
/// An `Arc` that is known to be uniquely owned
///
/// When `Arc`s are constructed, they are known to be
/// uniquely owned. In such a case it is safe to mutate
/// the contents of the `Arc`. Normally, one would just handle
/// this by mutating the data on the stack before allocating the
/// `Arc`, however it's possible the data is large or unsized
/// and you need to heap-allocate it earlier in such a way
/// that it can be freely converted into a regular `Arc` once you're
/// done.
///
/// `UniqueArc` exists for this purpose, when constructed it performs
/// the same allocations necessary for an `Arc`, however it allows mutable access.
/// Once the mutation is finished, you can call `.shareable()` and get a regular `Arc`
/// out of it.
///
/// ```rust
/// # use triomphe::UniqueArc;
/// let data = [1, 2, 3, 4, 5];
/// let mut x = UniqueArc::new(data);
/// x[4] = 7; // mutate!
/// let y = x.shareable(); // y is an Arc<T>
/// ```
#[repr(transparent)]
pub struct UniqueArc<T: ?Sized>(Arc<T>);
impl<T> UniqueArc<T> {
#[inline]
/// Construct a new UniqueArc
pub fn new(data: T) -> Self {
UniqueArc(Arc::new(data))
}
/// Construct an uninitialized arc
#[inline]
pub fn new_uninit() -> UniqueArc<MaybeUninit<T>> {
unsafe {
let layout = Layout::new::<ArcInner<MaybeUninit<T>>>();
let ptr = alloc::alloc::alloc(layout);
let mut p = NonNull::new(ptr)
.unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout))
.cast::<ArcInner<MaybeUninit<T>>>();
ptr::write(&mut p.as_mut().count, AtomicUsize::new(1));
UniqueArc(Arc {
p,
phantom: PhantomData,
})
}
}
/// Gets the inner value of the unique arc
pub fn into_inner(this: Self) -> T {
// Wrap the Arc in a `ManuallyDrop` so that its drop routine never runs
let this = ManuallyDrop::new(this.0);
debug_assert!(
this.is_unique(),
"attempted to call `.into_inner()` on a `UniqueArc` with a non-zero ref count",
);
// Safety: We have exclusive access to the inner data and the
// arc will not perform its drop routine since we've
// wrapped it in a `ManuallyDrop`
unsafe { Box::from_raw(this.ptr()).data }
}
}
impl<T: ?Sized> UniqueArc<T> {
/// Convert to a shareable `Arc<T>` once we're done mutating it
#[inline]
pub fn shareable(self) -> Arc<T> {
self.0
}
/// Creates a new [`UniqueArc`] from the given [`Arc`].
///
/// An unchecked alternative to `Arc::try_unique()`
///
/// # Safety
///
/// The given `Arc` must have a reference count of exactly one
///
pub(crate) unsafe fn from_arc(arc: Arc<T>) -> Self {
debug_assert_eq!(Arc::count(&arc), 1);
Self(arc)
}
/// Creates a new `&mut `[`UniqueArc`] from the given `&mut `[`Arc`].
///
/// An unchecked alternative to `Arc::try_as_unique()`
///
/// # Safety
///
/// The given `Arc` must have a reference count of exactly one
pub(crate) unsafe fn from_arc_ref(arc: &mut Arc<T>) -> &mut Self {
debug_assert_eq!(Arc::count(arc), 1);
// Safety: caller guarantees that `arc` is unique,
// `UniqueArc` is `repr(transparent)`
&mut *(arc as *mut Arc<T> as *mut UniqueArc<T>)
}
}
impl<T> UniqueArc<MaybeUninit<T>> {
/// Calls `MaybeUninit::write` on the contained value.
pub fn write(&mut self, val: T) -> &mut T {
unsafe {
// Casting *mut MaybeUninit<T> -> *mut T is always fine
let ptr = self.as_mut_ptr() as *mut T;
// Safety: We have exclusive access to the inner data
ptr.write(val);
// Safety: the pointer was just written to
&mut *ptr
}
}
/// Obtain a mutable pointer to the stored `MaybeUninit<T>`.
pub fn as_mut_ptr(&mut self) -> *mut MaybeUninit<T> {
unsafe { &mut (*self.0.ptr()).data }
}
/// Convert to an initialized Arc.
///
/// # Safety
///
/// This function is equivalent to `MaybeUninit::assume_init` and has the
/// same safety requirements. You are responsible for ensuring that the `T`
/// has actually been initialized before calling this method.
#[inline]
pub unsafe fn assume_init(this: Self) -> UniqueArc<T> {
UniqueArc(Arc {
p: ManuallyDrop::new(this).0.p.cast(),
phantom: PhantomData,
})
}
}
impl<T> UniqueArc<[MaybeUninit<T>]> {
/// Create an Arc contains an array `[MaybeUninit<T>]` of `len`.
pub fn new_uninit_slice(len: usize) -> Self {
let ptr: NonNull<ArcInner<HeaderSlice<(), [MaybeUninit<T>]>>> =
Arc::allocate_for_header_and_slice(len);
// Safety:
// - `ArcInner` is properly allocated and initialized.
// - `()` and `[MaybeUninit<T>]` do not require special initialization
// - The `Arc` is just created and so -- unique.
unsafe {
let arc: Arc<HeaderSlice<(), [MaybeUninit<T>]>> = Arc::from_raw_inner(ptr.as_ptr());
let arc: Arc<[MaybeUninit<T>]> = arc.into();
UniqueArc(arc)
}
}
/// # Safety
///
/// Must initialize all fields before calling this function.
#[inline]
pub unsafe fn assume_init_slice(Self(this): Self) -> UniqueArc<[T]> {
UniqueArc(this.assume_init())
}
}
impl<T: ?Sized> TryFrom<Arc<T>> for UniqueArc<T> {
type Error = Arc<T>;
fn try_from(arc: Arc<T>) -> Result<Self, Self::Error> {
Arc::try_unique(arc)
}
}
impl<T: ?Sized> Deref for UniqueArc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.0
}
}
impl<T: ?Sized> DerefMut for UniqueArc<T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
// We know this to be uniquely owned
unsafe { &mut (*self.0.ptr()).data }
}
}
impl<A> FromIterator<A> for UniqueArc<[A]> {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
let iter = iter.into_iter();
let (lower, upper) = iter.size_hint();
let arc: Arc<[A]> = if Some(lower) == upper {
let iter = IteratorAsExactSizeIterator::new(iter);
Arc::from_header_and_iter((), iter).into()
} else {
let vec = iter.collect::<Vec<_>>();
Arc::from(vec)
};
// Safety: We just created an `Arc`, so it's unique.
unsafe { UniqueArc::from_arc(arc) }
}
}
// Safety:
// This leverages the correctness of Arc's CoerciblePtr impl. Additionally, we must ensure that
// this can not be used to violate the safety invariants of UniqueArc, which require that we can not
// duplicate the Arc, such that replace_ptr returns a valid instance. This holds since it consumes
// a unique owner of the contained ArcInner.
#[cfg(feature = "unsize")]
unsafe impl<T, U: ?Sized> unsize::CoerciblePtr<U> for UniqueArc<T> {
type Pointee = T;
type Output = UniqueArc<U>;
fn as_sized_ptr(&mut self) -> *mut T {
// Dispatch to the contained field.
unsize::CoerciblePtr::<U>::as_sized_ptr(&mut self.0)
}
unsafe fn replace_ptr(self, new: *mut U) -> UniqueArc<U> {
// Dispatch to the contained field, work around conflict of destructuring and Drop.
let inner = ManuallyDrop::new(self);
UniqueArc(ptr::read(&inner.0).replace_ptr(new))
}
}
#[cfg(test)]
mod tests {
use crate::{Arc, UniqueArc};
use core::{convert::TryFrom, mem::MaybeUninit};
#[test]
fn unique_into_inner() {
let unique = UniqueArc::new(10u64);
assert_eq!(UniqueArc::into_inner(unique), 10);
}
#[test]
fn try_from_arc() {
let x = Arc::new(10_000);
let y = x.clone();
assert!(UniqueArc::try_from(x).is_err());
assert_eq!(
UniqueArc::into_inner(UniqueArc::try_from(y).unwrap()),
10_000,
);
}
#[test]
#[allow(deprecated)]
fn maybeuninit_smoke() {
let mut arc: UniqueArc<MaybeUninit<_>> = UniqueArc::new_uninit();
arc.write(999);
let arc = unsafe { UniqueArc::assume_init(arc) };
assert_eq!(*arc, 999);
}
}