diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index abda5aefab645..b0b9eb80fc578 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -204,6 +204,8 @@
 #[macro_use]
 mod macros;
 
+#[cfg(not(no_rc))]
+mod raw_rc;
 mod raw_vec;
 
 // Heaps provided for low-level allocation strategies
diff --git a/library/alloc/src/raw_rc/mod.rs b/library/alloc/src/raw_rc/mod.rs
new file mode 100644
index 0000000000000..61a5289bc2e93
--- /dev/null
+++ b/library/alloc/src/raw_rc/mod.rs
@@ -0,0 +1,539 @@
+//! Base implementation for `rc::{Rc, UniqueRc, Weak}` and `sync::{Arc, UniqueArc, Weak}`.
+//!
+//! # Allocation Memory Layout
+//!
+//! The memory layout of a reference-counted allocation is designed so that the memory that stores
+//! the reference counts has a fixed offset to the memory that stores the value. In this way,
+//! operations that only rely on reference counts can ignore the actual type of the contained value
+//! and only care about the address of the contained value, which allows us to share code between
+//! reference-counting pointers that have different types of contained values. This can potentially
+//! reduce the binary size.
+//!
+//! Assume the type of the stored value is `T`, the allocation memory layout is designed as follows:
+//!
+//! - We use a `RefCounts` type to store the reference counts.
+//! - The alignment of the allocation is `align_of::<RefCounts>().max(align_of::<T>())`.
+//! - The value is stored at offset `size_of::<RefCounts>().next_multiple_of(align_of::<T>())`.
+//! - The size of the allocation is
+//!   `size_of::<RefCounts>().next_multiple_of(align_of::<T>()) + size_of::<T>()`.
+//! - The `RefCounts` object is stored at offset
+//!   `size_of::<RefCounts>().next_multiple_of(align_of::<T>()) - size_of::<RefCounts>()`.
+//!
+//! Here is a table showing the order and size of each component in an reference counted allocation
+//! of a `T` value:
+//!
+//! | Component   | Size                                                                                |
+//! | ----------- | ----------------------------------------------------------------------------------- |
+//! | Padding     | `size_of::<RefCounts>().next_multiple_of(align_of::<T>()) - size_of::<RefCounts>()` |
+//! | `RefCounts` | `size_of::<RefCounts>()`                                                            |
+//! | `T`         | `size_of::<T>()`                                                                    |
+//!
+//! This works because:
+//!
+//! - Both `RefCounts` and the object is stored in the allocation without overlapping.
+//! - The `RefCounts` object is stored at offset
+//!   `size_of::<RefCounts>().next_multiple_of(align_of::<T>()) - size_of::<RefCounts>()`, which
+//!   has a valid alignment for `RefCounts` because:
+//!   - If `align_of::<T>() <= align_of::<RefCounts>()`, we have the offset being 0, which has a
+//!     valid alignment for `RefCounts`.
+//!   - If `align_of::<T>() > align_of::<RefCounts>()`, we have `align_of::<T>()` being a multiple
+//!     of `align_of::<RefCounts>()`, since `size_of::<RefCounts>()` is also a multiple of
+//!    `align_of::<RefCounts>()`, we conclude the offset also has a valid alignment for `RefCounts`.
+//! - The value is stored at offset `size_of::<RefCounts>().next_multiple_of(align_of::<T>())`,
+//!   which trivially satisfies the alignment requirement of `T`.
+//! - The distance between the `RefCounts` object and the value is `size_of::<RefCounts>()`, a fixed
+//!   value.
+//!
+//! So both the `RefCounts` object and the value object have their alignment and size requirements
+//! satisfied. And we get a fixed offset between those two objects.
+//!
+//! # Reference-counting Pointer Design
+//!
+//! Both strong and weak reference-counting pointers store a pointer that points to the value
+//! object in a reference-counted allocation instead of the beginning of the allocation. This is
+//! based on the assumption that users access the contained value more frequently than the reference
+//! counters. Also, this possibly allows us to enable some optimizations like:
+//!
+//! - Making reference-counting pointers have ABI-compatible representation as raw pointers so we
+//!   can use them directly in FFI interfaces.
+//! - Converting `Option<Rc<T>>` to `Option<&T>` without checking for `None` values.
+//! - Converting `&[Rc<T>]` to `&[&T]` with zero cost.
+
+use core::alloc::{AllocError, Allocator};
+use core::cell::UnsafeCell;
+use core::ptr::NonNull;
+#[cfg(not(no_global_oom_handling))]
+use core::{mem, ptr};
+
+#[cfg(not(no_global_oom_handling))]
+use crate::alloc;
+pub(crate) use crate::raw_rc::raw_rc::RawRc;
+pub(crate) use crate::raw_rc::raw_unique_rc::RawUniqueRc;
+pub(crate) use crate::raw_rc::raw_weak::RawWeak;
+use crate::raw_rc::rc_layout::RcLayout;
+
+mod raw_rc;
+mod raw_unique_rc;
+mod raw_weak;
+mod rc_layout;
+
+/// The return value type for `RcOps::make_mut`.
+#[cfg(not(no_global_oom_handling))]
+pub(crate) enum MakeMutStrategy {
+    /// This `RawRc` is the only strong pointer that references the value, but there are weak
+    /// pointers also referencing the value. Before returning, the strong reference count has been
+    /// set to zero to prevent new strong pointers from being created through upgrading from weak
+    /// pointers.
+    Move,
+    /// There is more than one strong pointer that references the value.
+    Clone,
+}
+
+/// A trait for `rc` and `sync` modules to define their own implementation of reference-counting
+/// behaviors.
+///
+/// # Safety
+///
+/// Implementors should implement each method according to its description.
+pub(crate) unsafe trait RcOps: Sized {
+    /// Increments a reference counter managed by `RawRc` and `RawWeak`. Currently, both strong and
+    /// weak reference counters are incremented by this method.
+    ///
+    /// # Safety
+    ///
+    /// - `count` should only be handled by the same `RcOps` implementation.
+    /// - The value of `count` should be non-zero.
+    unsafe fn increment_ref_count(count: &UnsafeCell<usize>);
+
+    /// Decrements a reference counter managed by `RawRc` and `RawWeak`. Currently, both strong and
+    /// weak reference counters are decremented by this method. Returns whether the reference count
+    /// becomes zero after decrementing.
+    ///
+    /// # Safety
+    ///
+    /// - `count` should only be handled by the same `RcOps` implementation.
+    /// - The value of `count` should be non-zero.
+    unsafe fn decrement_ref_count(count: &UnsafeCell<usize>) -> bool;
+
+    /// Increments `strong_count` if and only if `strong_count` is non-zero. Returns whether
+    /// incrementing is performed.
+    ///
+    /// # Safety
+    ///
+    /// - `strong_count` should only be handled by the same `RcOps` implementation.
+    /// - `strong_count` should be provided by a `RawWeak` object.
+    unsafe fn upgrade(strong_count: &UnsafeCell<usize>) -> bool;
+
+    /// Increments `weak_count`. This is required instead of `increment_ref_count` because `Arc`
+    /// requires additional synchronization with `is_unique`.
+    ///
+    /// # Safety
+    ///
+    /// - `weak_count` should only be handled by the same `RcOps` implementation.
+    /// - `weak_count` should be provided by a `RawRc` object.
+    unsafe fn downgrade(weak_count: &UnsafeCell<usize>);
+
+    /// Decrements `strong_count` if and only if `strong_count` is 1. Returns true if decrementing
+    /// is performed.
+    ///
+    /// # Safety
+    ///
+    /// - `strong_count` should only be handled by the same `RcOps` implementation.
+    /// - `strong_count` should be provided by a `RawRc` object.
+    unsafe fn lock_strong_count(strong_count: &UnsafeCell<usize>) -> bool;
+
+    /// Sets `strong_count` to 1.
+    ///
+    /// # Safety
+    ///
+    /// - `strong_count` should only be handled by the same `RcOps` implementation.
+    /// - `strong_count` should be provided by a `RawUniqueRc` object.
+    unsafe fn unlock_strong_count(strong_count: &UnsafeCell<usize>);
+
+    /// Returns whether both `strong_count` are 1 and `weak_count` is 1. Used by `RawRc::get_mut`.
+    ///
+    /// # Safety
+    ///
+    /// - `ref_counts` should only be handled by the same `RcOps` implementation.
+    /// - `ref_counts` should be provided by a `RawRc` object.
+    unsafe fn is_unique(ref_counts: &RefCounts) -> bool;
+
+    /// Determines how to make a mutable reference from a `RawRc`:
+    ///
+    /// - If both strong count and weak count are 1, returns `None`.
+    /// - If strong count is 1 and weak count is greater than 1, returns
+    ///   `Some(MakeMutStrategy::Move)`.
+    /// - If strong count is greater than 1, returns `Some(MakeMutStrategy::Clone)`.
+    ///
+    /// # Safety
+    ///
+    /// - `ref_counts` should only be handled by the same `RcOps` implementation.
+    /// - `ref_counts` should be provided by a `RawRc` object.
+    #[cfg(not(no_global_oom_handling))]
+    unsafe fn make_mut(ref_counts: &RefCounts) -> Option<MakeMutStrategy>;
+}
+
+/// Defines the `RefCounts` struct to store reference counts. The reference counters have suitable
+/// alignment for atomic operations.
+macro_rules! define_ref_counts {
+    ($($target_pointer_width:literal => $align:literal,)*) => {
+        $(
+            /// Stores reference counts.
+            #[cfg(target_pointer_width = $target_pointer_width)]
+            #[repr(C, align($align))]
+            pub(crate) struct RefCounts {
+                /// Weak reference count (plus one if there are non-zero strong reference counts).
+                pub(crate) weak: UnsafeCell<usize>,
+                /// Strong reference count.
+                pub(crate) strong: UnsafeCell<usize>,
+            }
+        )*
+    };
+}
+
+// This ensures reference counters have correct alignment so that they can be treated as atomic
+// reference counters for `Arc`.
+define_ref_counts! {
+    "16" => 2,
+    "32" => 4,
+    "64" => 8,
+}
+
+impl RefCounts {
+    /// Creates a `RefCounts` with weak count of `1` and strong count of `strong_count`.
+    const fn new(strong_count: usize) -> Self {
+        Self { weak: UnsafeCell::new(1), strong: UnsafeCell::new(strong_count) }
+    }
+}
+
+/// Gets a pointer to the `RefCounts` object in the same allocation with a value pointed to by
+/// `value_ptr`.
+///
+/// # Safety
+///
+/// - `value_ptr` must point to a value object (can be uninitialized or dropped) that lives in a
+///   reference-counted allocation.
+unsafe fn ref_counts_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull<RefCounts> {
+    // SAFETY: Caller guarantees `value_ptr` point to the value inside some reference counted, our
+    // implementation guarantees the `RefCounts` object has offset of `size_of::<RefCounts>()` to
+    // the value, so we apply this offset.
+    unsafe { value_ptr.cast::<RefCounts>().sub(1) }
+}
+
+/// Gets a pointer to the strong counter object in the same allocation with a value pointed to by
+/// `value_ptr`.
+///
+/// # Safety
+///
+/// - `value_ptr` must point to a value object (can be uninitialized or dropped) that lives in a
+///   reference-counted allocation.
+unsafe fn strong_count_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull<UnsafeCell<usize>> {
+    let ref_counts_ptr = unsafe { ref_counts_ptr_from_value_ptr(value_ptr) };
+
+    unsafe { NonNull::new_unchecked(&raw mut (*ref_counts_ptr.as_ptr()).strong) }
+}
+
+/// Gets a pointer to the weak counter object in the same allocation with a value pointed to by
+/// `value_ptr`.
+///
+/// # Safety
+///
+/// - `value_ptr` must point to a value object (can be uninitialized or dropped) that lives in a
+///   reference-counted allocation.
+unsafe fn weak_count_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull<UnsafeCell<usize>> {
+    let ref_counts_ptr = unsafe { ref_counts_ptr_from_value_ptr(value_ptr) };
+
+    unsafe { NonNull::new_unchecked(&raw mut (*ref_counts_ptr.as_ptr()).weak) }
+}
+
+/// Allocates uninitialized memory for a reference-counted allocation with allocator `alloc` and
+/// layout `RcLayout`. Returns a pointer to the value location.
+#[inline]
+fn allocate_uninit_raw_bytes<A>(alloc: &A, rc_layout: RcLayout) -> Result<NonNull<()>, AllocError>
+where
+    A: Allocator,
+{
+    let allocation_result = alloc.allocate(rc_layout.get());
+
+    allocation_result
+        .map(|allocation_ptr| unsafe { allocation_ptr.cast().byte_add(rc_layout.value_offset()) })
+}
+
+/// Allocates zeroed memory for a reference-counted allocation with allocator `alloc` and layout
+/// `RcLayout`. Returns a pointer to the value location.
+#[inline]
+fn allocate_zeroed_raw_bytes<A>(alloc: &A, rc_layout: RcLayout) -> Result<NonNull<()>, AllocError>
+where
+    A: Allocator,
+{
+    let allocation_result = alloc.allocate_zeroed(rc_layout.get());
+
+    allocation_result
+        .map(|allocation_ptr| unsafe { allocation_ptr.cast().byte_add(rc_layout.value_offset()) })
+}
+
+/// Initializes reference counters in a reference-counted allocation pointed to by `value_ptr`
+/// with strong count of `STRONG_COUNT` and weak count of 1.
+///
+/// # Safety
+///
+/// - `value_ptr` points to a valid reference-counted allocation.
+#[inline]
+unsafe fn init_rc_allocation<const STRONG_COUNT: usize>(value_ptr: NonNull<()>) {
+    unsafe {
+        ref_counts_ptr_from_value_ptr(value_ptr).write(const { RefCounts::new(STRONG_COUNT) });
+    }
+}
+
+/// Tries to allocate a chunk of reference-counted memory that is described by `rc_layout` with
+/// `alloc`. The allocated memory has strong count of `STRONG_COUNT` and weak count of 1.
+fn try_allocate_uninit_in<A, const STRONG_COUNT: usize>(
+    alloc: &A,
+    rc_layout: RcLayout,
+) -> Result<NonNull<()>, AllocError>
+where
+    A: Allocator,
+{
+    allocate_uninit_raw_bytes(alloc, rc_layout)
+        .inspect(|&value_ptr| unsafe { init_rc_allocation::<STRONG_COUNT>(value_ptr) })
+}
+
+/// Creates an allocator of type `A`, then tries to allocate a chunk of reference-counted memory
+/// that is described by `rc_layout`.
+fn try_allocate_uninit<A, const STRONG_COUNT: usize>(
+    rc_layout: RcLayout,
+) -> Result<(NonNull<()>, A), AllocError>
+where
+    A: Allocator + Default,
+{
+    let alloc = A::default();
+
+    try_allocate_uninit_in::<A, STRONG_COUNT>(&alloc, rc_layout).map(|value_ptr| (value_ptr, alloc))
+}
+
+/// Tries to allocate a reference-counted memory that is described by `rc_layout` with `alloc`. The
+/// allocated memory has strong count of `STRONG_COUNT` and weak count of 1, and the value memory
+/// is all zero bytes.
+fn try_allocate_zeroed_in<A, const STRONG_COUNT: usize>(
+    alloc: &A,
+    rc_layout: RcLayout,
+) -> Result<NonNull<()>, AllocError>
+where
+    A: Allocator,
+{
+    allocate_zeroed_raw_bytes(alloc, rc_layout)
+        .inspect(|&value_ptr| unsafe { init_rc_allocation::<STRONG_COUNT>(value_ptr) })
+}
+
+/// Creates an allocator of type `A`, then tries to allocate a chunk of reference-counted memory
+/// with all zero bytes memory that is described by `rc_layout`.
+fn try_allocate_zeroed<A, const STRONG_COUNT: usize>(
+    rc_layout: RcLayout,
+) -> Result<(NonNull<()>, A), AllocError>
+where
+    A: Allocator + Default,
+{
+    let alloc = A::default();
+
+    try_allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, rc_layout).map(|value_ptr| (value_ptr, alloc))
+}
+
+/// If `allocation_result` is `Ok`, initializes the reference counts with strong count
+/// `STRONG_COUNT` and weak count of 1 and returns a pointer to the value object, otherwise panic
+/// will be triggered by calling `alloc::handle_alloc_error`.
+#[cfg(not(no_global_oom_handling))]
+#[inline]
+unsafe fn handle_rc_allocation_result<const STRONG_COUNT: usize>(
+    allocation_result: Result<NonNull<()>, AllocError>,
+    rc_layout: RcLayout,
+) -> NonNull<()> {
+    match allocation_result {
+        Ok(value_ptr) => {
+            unsafe { init_rc_allocation::<STRONG_COUNT>(value_ptr) };
+
+            value_ptr
+        }
+        Err(AllocError) => alloc::handle_alloc_error(rc_layout.get()),
+    }
+}
+
+/// Allocates reference-counted memory that is described by `rc_layout` with `alloc`. The allocated
+/// memory has strong count of `STRONG_COUNT` and weak count of 1. If the allocation fails, panic
+/// will be triggered by calling `alloc::handle_alloc_error`.
+#[cfg(not(no_global_oom_handling))]
+#[inline]
+fn allocate_uninit_in<A, const STRONG_COUNT: usize>(alloc: &A, rc_layout: RcLayout) -> NonNull<()>
+where
+    A: Allocator,
+{
+    let allocation_result = allocate_uninit_raw_bytes(alloc, rc_layout);
+
+    unsafe { handle_rc_allocation_result::<STRONG_COUNT>(allocation_result, rc_layout) }
+}
+
+/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory that is
+/// described by `rc_layout`.
+#[cfg(not(no_global_oom_handling))]
+#[inline]
+fn allocate_uninit<A, const STRONG_COUNT: usize>(rc_layout: RcLayout) -> (NonNull<()>, A)
+where
+    A: Allocator + Default,
+{
+    let alloc = A::default();
+    let value_ptr = allocate_uninit_in::<A, STRONG_COUNT>(&alloc, rc_layout);
+
+    (value_ptr, alloc)
+}
+
+/// Allocates reference-counted memory that is described by `rc_layout` with `alloc`. The allocated
+/// memory has strong count of `STRONG_COUNT` and weak count of 1, and the value memory is all zero
+/// bytes. If the allocation fails, panic will be triggered by calling `alloc::handle_alloc_error`.
+#[cfg(not(no_global_oom_handling))]
+fn allocate_zeroed_in<A, const STRONG_COUNT: usize>(alloc: &A, rc_layout: RcLayout) -> NonNull<()>
+where
+    A: Allocator,
+{
+    let allocation_result = allocate_zeroed_raw_bytes(alloc, rc_layout);
+
+    unsafe { handle_rc_allocation_result::<STRONG_COUNT>(allocation_result, rc_layout) }
+}
+
+/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory with all
+/// zero bytes that is described by `rc_layout`.
+#[cfg(not(no_global_oom_handling))]
+fn allocate_zeroed<A, const STRONG_COUNT: usize>(rc_layout: RcLayout) -> (NonNull<()>, A)
+where
+    A: Allocator + Default,
+{
+    let alloc = A::default();
+    let value_ptr = allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, rc_layout);
+
+    (value_ptr, alloc)
+}
+
+/// Allocates a reference-counted memory chunk for storing a value according to `rc_layout`, then
+/// initialize the value with `f`. If `f` panics, the allocated memory will be deallocated.
+#[cfg(not(no_global_oom_handling))]
+fn allocate_with_in<A, F, const STRONG_COUNT: usize>(
+    alloc: &A,
+    rc_layout: RcLayout,
+    f: F,
+) -> NonNull<()>
+where
+    A: Allocator,
+    F: FnOnce(NonNull<()>),
+{
+    struct Guard<'a, A>
+    where
+        A: Allocator,
+    {
+        value_ptr: NonNull<()>,
+        alloc: &'a A,
+        rc_layout: RcLayout,
+    }
+
+    impl<'a, A> Drop for Guard<'a, A>
+    where
+        A: Allocator,
+    {
+        fn drop(&mut self) {
+            unsafe { deallocate::<A>(self.value_ptr, self.alloc, self.rc_layout) };
+        }
+    }
+
+    let value_ptr = allocate_uninit_in::<A, STRONG_COUNT>(alloc, rc_layout);
+    let guard = Guard { value_ptr, alloc, rc_layout };
+
+    f(value_ptr);
+
+    mem::forget(guard);
+
+    value_ptr
+}
+
+/// Creates an allocator of type `A`, then allocate a chunk of reference-counted memory that is
+/// described by `rc_layout`. `f` will be called with a pointer that points the value storage to
+/// initialize the allocated memory. If `f` panics, the allocated memory will be deallocated.
+#[cfg(not(no_global_oom_handling))]
+fn allocate_with<A, F, const STRONG_COUNT: usize>(rc_layout: RcLayout, f: F) -> (NonNull<()>, A)
+where
+    A: Allocator + Default,
+    F: FnOnce(NonNull<()>),
+{
+    let alloc = A::default();
+    let value_ptr = allocate_with_in::<A, F, STRONG_COUNT>(&alloc, rc_layout, f);
+
+    (value_ptr, alloc)
+}
+
+/// Allocates reference-counted memory that has strong count of `STRONG_COUNT` and weak count of 1.
+/// The value will be initialized with data pointed to by `src_ptr`.
+///
+/// # Safety
+///
+/// - Memory pointed to by `src_ptr` has enough data to read for filling the value in an allocation
+///   that is described by `rc_layout`.
+#[cfg(not(no_global_oom_handling))]
+unsafe fn allocate_with_bytes_in<A, const STRONG_COUNT: usize>(
+    src_ptr: NonNull<()>,
+    alloc: &A,
+    rc_layout: RcLayout,
+) -> NonNull<()>
+where
+    A: Allocator,
+{
+    let value_ptr = allocate_uninit_in::<A, STRONG_COUNT>(alloc, rc_layout);
+    let value_size = rc_layout.value_size();
+
+    unsafe {
+        ptr::copy_nonoverlapping::<u8>(
+            src_ptr.as_ptr().cast(),
+            value_ptr.as_ptr().cast(),
+            value_size,
+        );
+    }
+
+    value_ptr
+}
+
+/// Allocates a chunk of reference-counted memory with a value that is copied from `value`.
+#[cfg(not(no_global_oom_handling))]
+fn allocate_with_value_in<T, A, const STRONG_COUNT: usize>(src: &T, alloc: &A) -> NonNull<T>
+where
+    T: ?Sized,
+    A: Allocator,
+{
+    let src_ptr = NonNull::from(src);
+    let rc_layout = unsafe { RcLayout::from_value_ptr(src_ptr) };
+    let (src_ptr, metadata) = src_ptr.to_raw_parts();
+    let value_ptr = unsafe { allocate_with_bytes_in::<A, STRONG_COUNT>(src_ptr, alloc, rc_layout) };
+
+    NonNull::from_raw_parts(value_ptr, metadata)
+}
+
+/// Creates an allocator of type `A`, then allocates a chunk of reference-counted memory with value
+/// copied from `value`.
+#[cfg(not(no_global_oom_handling))]
+#[inline]
+fn allocate_with_value<T, A, const STRONG_COUNT: usize>(value: &T) -> (NonNull<T>, A)
+where
+    T: ?Sized,
+    A: Allocator + Default,
+{
+    let alloc = A::default();
+    let value_ptr = allocate_with_value_in::<T, A, STRONG_COUNT>(value, &alloc);
+
+    (value_ptr, alloc)
+}
+
+/// Deallocates a reference-counted allocation with a value object pointed to by `value_ptr`.
+#[inline]
+unsafe fn deallocate<A>(value_ptr: NonNull<()>, alloc: &A, rc_layout: RcLayout)
+where
+    A: Allocator,
+{
+    let value_offset = rc_layout.value_offset();
+    let allocation_ptr = unsafe { value_ptr.byte_sub(value_offset) };
+
+    unsafe { alloc.deallocate(allocation_ptr.cast(), rc_layout.get()) }
+}
diff --git a/library/alloc/src/raw_rc/raw_rc.rs b/library/alloc/src/raw_rc/raw_rc.rs
new file mode 100644
index 0000000000000..21bb9ebeb051e
--- /dev/null
+++ b/library/alloc/src/raw_rc/raw_rc.rs
@@ -0,0 +1,1120 @@
+use core::alloc::{AllocError, Allocator};
+use core::any::Any;
+use core::cell::UnsafeCell;
+#[cfg(not(no_global_oom_handling))]
+use core::clone::CloneToUninit;
+use core::error::{Error, Request};
+use core::fmt::{self, Debug, Display, Formatter, Pointer};
+use core::hash::{Hash, Hasher};
+#[cfg(not(no_global_oom_handling))]
+use core::iter::TrustedLen;
+use core::marker::{PhantomData, Unsize};
+use core::mem::MaybeUninit;
+#[cfg(not(no_global_oom_handling))]
+use core::mem::{self, ManuallyDrop};
+use core::ops::{CoerceUnsized, DispatchFromDyn};
+use core::pin::PinCoerceUnsized;
+use core::ptr::NonNull;
+#[cfg(not(no_global_oom_handling))]
+use core::str;
+
+use crate::alloc::Global;
+#[cfg(not(no_global_oom_handling))]
+use crate::boxed::Box;
+use crate::raw_rc::RcOps;
+use crate::raw_rc::raw_weak::RawWeak;
+#[cfg(not(no_global_oom_handling))]
+use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt};
+#[cfg(not(no_global_oom_handling))]
+use crate::string::String;
+#[cfg(not(no_global_oom_handling))]
+use crate::vec::Vec;
+
+/// Decrements strong reference count in a reference-counted allocation with a value object that is
+/// pointed to by `value_ptr`.
+unsafe fn decrement_strong_ref_count<R>(value_ptr: NonNull<()>) -> bool
+where
+    R: RcOps,
+{
+    unsafe { R::decrement_ref_count(super::strong_count_ptr_from_value_ptr(value_ptr).as_ref()) }
+}
+
+/// Increments strong reference count in a reference-counted allocation with a value object that is
+/// pointed to by `value_ptr`.
+unsafe fn increment_strong_ref_count<R>(value_ptr: NonNull<()>)
+where
+    R: RcOps,
+{
+    unsafe { R::increment_ref_count(super::strong_count_ptr_from_value_ptr(value_ptr).as_ref()) }
+}
+
+unsafe fn is_unique<R>(value_ptr: NonNull<()>) -> bool
+where
+    R: RcOps,
+{
+    unsafe { R::is_unique(super::ref_counts_ptr_from_value_ptr(value_ptr).as_ref()) }
+}
+
+/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`, user should call
+/// `RawRc::drop` manually to drop this object.
+#[repr(transparent)]
+pub(crate) struct RawRc<T, A>
+where
+    T: ?Sized,
+{
+    /// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count that is owned
+    /// by the `RawRc` object. The weak pointer is always non-dangling.
+    weak: RawWeak<T, A>,
+
+    // Defines the ownership of `T` for drop-check.
+    _phantom_data: PhantomData<T>,
+}
+
+impl<T, A> RawRc<T, A>
+where
+    T: ?Sized,
+{
+    /// # Safety
+    ///
+    /// - `ptr` points to a value inside a reference-counted allocation.
+    /// - The allocation can be freed by `A::default()`.
+    pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
+    where
+        A: Default,
+    {
+        unsafe { Self::from_raw_parts(ptr, A::default()) }
+    }
+
+    /// # Safety
+    ///
+    /// - `ptr` points to a value inside a reference-counted allocation.
+    /// - The allocation can be freed by `alloc`.
+    pub(crate) unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
+        unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) }
+    }
+
+    /// # Safety
+    ///
+    /// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will
+    /// take the ownership of exactly one strong reference count.
+    pub(super) unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
+        Self { weak, _phantom_data: PhantomData }
+    }
+
+    pub(crate) fn allocator(&self) -> &A {
+        &self.weak.allocator()
+    }
+
+    pub(crate) fn as_ptr(&self) -> NonNull<T> {
+        self.weak.as_ptr()
+    }
+
+    pub(crate) unsafe fn cast<U>(self) -> RawRc<U, A> {
+        unsafe { RawRc::from_weak(self.weak.cast()) }
+    }
+
+    #[inline]
+    pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawRc<U, A>
+    where
+        U: ?Sized,
+        F: FnOnce(NonNull<T>) -> NonNull<U>,
+    {
+        unsafe { RawRc::from_weak(self.weak.cast_with(f)) }
+    }
+
+    #[inline]
+    pub(crate) unsafe fn clone<R>(&self) -> Self
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        unsafe {
+            increment_strong_ref_count::<R>(self.weak.as_ptr().cast());
+
+            Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
+        }
+    }
+
+    pub(crate) unsafe fn decrement_strong_count<R: RcOps>(ptr: NonNull<T>)
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::decrement_strong_count_in::<R>(ptr, A::default()) };
+    }
+
+    pub(crate) unsafe fn decrement_strong_count_in<R: RcOps>(ptr: NonNull<T>, alloc: A)
+    where
+        A: Allocator,
+    {
+        unsafe { RawRc::from_raw_parts(ptr, alloc).drop::<R>() };
+    }
+
+    pub(crate) unsafe fn increment_strong_count<R: RcOps>(ptr: NonNull<T>) {
+        unsafe { increment_strong_ref_count::<R>(ptr.cast()) };
+    }
+
+    pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        unsafe fn inner<R>(value_ptr: NonNull<()>)
+        where
+            R: RcOps,
+        {
+            unsafe { R::downgrade(super::weak_count_ptr_from_value_ptr(value_ptr).as_ref()) };
+        }
+
+        unsafe {
+            inner::<R>(self.weak.as_ptr().cast());
+
+            RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
+        }
+    }
+
+    #[inline]
+    pub(crate) unsafe fn drop<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        let is_last_strong_ref =
+            unsafe { decrement_strong_ref_count::<R>(self.weak.as_ptr().cast()) };
+
+        if is_last_strong_ref {
+            unsafe { self.weak.assume_init_drop::<R>() }
+        }
+    }
+
+    pub(crate) unsafe fn get_mut<R>(&mut self) -> Option<&mut T>
+    where
+        R: RcOps,
+    {
+        unsafe fn inner<R>(value_ptr: NonNull<()>) -> Option<NonNull<()>>
+        where
+            R: RcOps,
+        {
+            unsafe { is_unique::<R>(value_ptr) }.then_some(value_ptr)
+        }
+
+        let (ptr, metadata) = self.weak.as_ptr().to_raw_parts();
+
+        unsafe { inner::<R>(ptr) }
+            .map(|ptr| unsafe { NonNull::from_raw_parts(ptr, metadata).as_mut() })
+    }
+
+    /// Returns a mutable reference to the contained value.
+    ///
+    /// # Safety
+    ///
+    /// No other active references to the contained value should exist, and no new references to the
+    /// contained value will be acquired for the duration of the returned borrow.
+    pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T {
+        // SAFETY: The caller guarantees that we can access the contained value exclusively. Note
+        // that we can't create mutable references that have access to reference counters, because
+        // the caller only guarantee exclusive access to the contained value, not the reference
+        // counters.
+        unsafe { self.weak.as_ptr().as_mut() }
+    }
+
+    pub(crate) fn into_raw(self) -> NonNull<T> {
+        self.weak.into_raw()
+    }
+
+    pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
+        self.weak.into_raw_parts()
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) unsafe fn make_mut<R>(&mut self) -> &mut T
+    where
+        T: CloneToUninit,
+        A: Allocator + Clone,
+        R: RcOps,
+    {
+        use core::ptr;
+
+        use crate::raw_rc::MakeMutStrategy;
+        use crate::raw_rc::raw_weak::WeakGuard;
+
+        struct SetRcPtrOnDrop<'a, T, A>
+        where
+            T: ?Sized,
+        {
+            rc: &'a mut RawRc<T, A>,
+            new_ptr: NonNull<T>,
+        }
+
+        impl<T, A> Drop for SetRcPtrOnDrop<'_, T, A>
+        where
+            T: ?Sized,
+        {
+            fn drop(&mut self) {
+                unsafe { self.rc.weak.set_ptr(self.new_ptr) };
+            }
+        }
+
+        unsafe {
+            if let Some(strategy) = R::make_mut(self.ref_counts()) {
+                let rc_layout = RcLayout::from_value_ptr_unchecked(self.weak.as_ptr());
+
+                match strategy {
+                    MakeMutStrategy::Move => {
+                        // `R::make_mut` has made strong reference count to zero, so the `RawRc`
+                        // object is essentially a `RawWeak` object but has its value initialized.
+                        // This means we are the only owner of the value and we can safely move the
+                        // value into a new allocation.
+
+                        // This guarantees to drop old `RawRc` object even if the allocation
+                        // panics.
+                        let guard = WeakGuard::<T, A, R>::new(&mut self.weak);
+
+                        let new_ptr = super::allocate_with_bytes_in::<A, 1>(
+                            guard.as_ptr().cast(),
+                            &guard.allocator(),
+                            rc_layout,
+                        );
+
+                        // No panic happens, defuse the guard.
+                        mem::forget(guard);
+
+                        let new_ptr = NonNull::from_raw_parts(
+                            new_ptr,
+                            ptr::metadata(self.weak.as_ptr().as_ptr()),
+                        );
+
+                        // Ensure the value pointer in `self` is updated to `new_ptr`.
+                        let update_ptr_on_drop = SetRcPtrOnDrop { rc: self, new_ptr };
+
+                        // `MakeMutStrategy::Move` guarantees that the strong count is zero, also we
+                        // have copied the value to a new allocation, so we can pretend the original
+                        // `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak`
+                        // destructor to finish the cleanup.
+                        update_ptr_on_drop.rc.weak.drop_unchecked::<R>();
+                    }
+                    MakeMutStrategy::Clone => {
+                        // There are multiple owners of the value, we need to clone the value into a
+                        // new allocation.
+
+                        let new_ptr = super::allocate_with_in::<A, _, 1>(
+                            &self.allocator(),
+                            rc_layout,
+                            |dst_ptr| T::clone_to_uninit(self.as_ref(), dst_ptr.as_ptr().cast()),
+                        );
+
+                        let new_ptr = NonNull::from_raw_parts(
+                            new_ptr,
+                            ptr::metadata(self.weak.as_ptr().as_ptr()),
+                        );
+
+                        // Ensure the value pointer in `self` is updated to `new_ptr`.
+                        let update_ptr_on_drop = SetRcPtrOnDrop { rc: self, new_ptr };
+
+                        // Manually drop old `RawRc`.
+                        update_ptr_on_drop.rc.drop::<R>();
+                    }
+                }
+            }
+
+            self.get_mut_unchecked()
+        }
+    }
+
+    pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
+        RawWeak::ptr_eq(&self.weak, &other.weak)
+    }
+
+    pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
+        RawWeak::ptr_ne(&self.weak, &other.weak)
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts {
+        unsafe { self.weak.ref_counts_unchecked() }
+    }
+
+    pub(crate) fn strong_count(&self) -> &UnsafeCell<usize> {
+        unsafe { self.weak.strong_count_unchecked() }
+    }
+
+    pub(crate) fn weak_count(&self) -> &UnsafeCell<usize> {
+        unsafe { self.weak.weak_count_unchecked() }
+    }
+}
+
+impl<T, A> RawRc<T, A> {
+    unsafe fn from_weak_with_value(weak: RawWeak<T, A>, value: T) -> Self {
+        unsafe {
+            weak.as_ptr().write(value);
+
+            Self::from_weak(weak)
+        }
+    }
+
+    #[inline]
+    pub(crate) fn try_new(value: T) -> Result<Self, AllocError>
+    where
+        A: Allocator + Default,
+    {
+        RawWeak::try_new_uninit::<1>()
+            .map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
+    }
+
+    #[inline]
+    pub(crate) fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError>
+    where
+        A: Allocator,
+    {
+        RawWeak::try_new_uninit_in::<1>(alloc)
+            .map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    pub(crate) fn new(value: T) -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    pub(crate) fn new_in(value: T, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<1>(alloc), value) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    fn new_with<F>(f: F) -> Self
+    where
+        A: Allocator + Default,
+        F: FnOnce() -> T,
+    {
+        let (ptr, alloc) =
+            super::allocate_with::<A, _, 1>(T::RC_LAYOUT, |ptr| unsafe { ptr.cast().write(f()) });
+
+        unsafe { Self::from_raw_parts(ptr.cast(), alloc) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    unsafe fn new_cyclic_impl<F, R>(mut weak: RawWeak<T, A>, data_fn: F) -> Self
+    where
+        A: Allocator,
+        F: FnOnce(&RawWeak<T, A>) -> T,
+        R: RcOps,
+    {
+        use crate::raw_rc::raw_unique_rc::RawUniqueRc;
+        use crate::raw_rc::raw_weak::WeakGuard;
+
+        let guard = unsafe { WeakGuard::<T, A, R>::new(&mut weak) };
+        let data = data_fn(&guard);
+
+        mem::forget(guard);
+
+        unsafe { RawUniqueRc::from_weak_with_value(weak, data).into_rc::<R>() }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) unsafe fn new_cyclic<F, R>(data_fn: F) -> Self
+    where
+        A: Allocator + Default,
+        F: FnOnce(&RawWeak<T, A>) -> T,
+        R: RcOps,
+    {
+        let weak = RawWeak::new_uninit::<0>();
+
+        unsafe { Self::new_cyclic_impl::<F, R>(weak, data_fn) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) unsafe fn new_cyclic_in<F, R>(data_fn: F, alloc: A) -> Self
+    where
+        A: Allocator,
+        F: FnOnce(&RawWeak<T, A>) -> T,
+        R: RcOps,
+    {
+        let weak = RawWeak::new_uninit_in::<0>(alloc);
+
+        unsafe { Self::new_cyclic_impl::<F, R>(weak, data_fn) }
+    }
+
+    pub(crate) unsafe fn into_inner<R>(self) -> Option<T>
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        let is_last_strong_ref =
+            unsafe { decrement_strong_ref_count::<R>(self.weak.as_ptr().cast()) };
+
+        is_last_strong_ref.then(|| unsafe { self.weak.assume_init_into_inner::<R>() })
+    }
+
+    pub(crate) unsafe fn try_unwrap<R>(self) -> Result<T, RawRc<T, A>>
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        unsafe fn inner<R>(value_ptr: NonNull<()>) -> bool
+        where
+            R: RcOps,
+        {
+            unsafe {
+                R::lock_strong_count(super::strong_count_ptr_from_value_ptr(value_ptr).as_ref())
+            }
+        }
+
+        let is_last_strong_ref = unsafe { inner::<R>(self.weak.as_ptr().cast()) };
+
+        if is_last_strong_ref {
+            Ok(unsafe { self.weak.assume_init_into_inner::<R>() })
+        } else {
+            Err(self)
+        }
+    }
+
+    pub(crate) unsafe fn unwrap_or_clone<R>(self) -> T
+    where
+        T: Clone,
+        A: Allocator,
+        R: RcOps,
+    {
+        /// Calls `RawRc::drop` on drop.
+        struct Guard<'a, T, A, R>
+        where
+            T: ?Sized,
+            A: Allocator,
+            R: RcOps,
+        {
+            rc: &'a mut RawRc<T, A>,
+            _phantom_data: PhantomData<R>,
+        }
+
+        impl<T, A, R> Drop for Guard<'_, T, A, R>
+        where
+            T: ?Sized,
+            A: Allocator,
+            R: RcOps,
+        {
+            fn drop(&mut self) {
+                unsafe { self.rc.drop::<R>() };
+            }
+        }
+
+        unsafe {
+            self.try_unwrap::<R>().unwrap_or_else(|mut rc| {
+                let guard = Guard::<T, A, R> { rc: &mut rc, _phantom_data: PhantomData };
+
+                T::clone(guard.rc.as_ref())
+            })
+        }
+    }
+}
+
+impl<T, A> RawRc<MaybeUninit<T>, A> {
+    pub(crate) fn try_new_uninit() -> Result<Self, AllocError>
+    where
+        A: Allocator + Default,
+    {
+        RawWeak::try_new_uninit::<1>().map(|weak| unsafe { Self::from_weak(weak) })
+    }
+
+    pub(crate) fn try_new_uninit_in(alloc: A) -> Result<Self, AllocError>
+    where
+        A: Allocator,
+    {
+        RawWeak::try_new_uninit_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) })
+    }
+
+    pub(crate) fn try_new_zeroed() -> Result<Self, AllocError>
+    where
+        A: Allocator + Default,
+    {
+        RawWeak::try_new_zeroed::<1>().map(|weak| unsafe { Self::from_weak(weak) })
+    }
+
+    pub(crate) fn try_new_zeroed_in(alloc: A) -> Result<Self, AllocError>
+    where
+        A: Allocator,
+    {
+        RawWeak::try_new_zeroed_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) })
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit() -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak(RawWeak::new_uninit::<1>()) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_in(alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak(RawWeak::new_uninit_in::<1>(alloc)) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed() -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak(RawWeak::new_zeroed::<1>()) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_in(alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak(RawWeak::new_zeroed_in::<1>(alloc)) }
+    }
+
+    pub(crate) unsafe fn assume_init(self) -> RawRc<T, A> {
+        unsafe { self.cast() }
+    }
+}
+
+impl<T, A> RawRc<[T], A> {
+    #[cfg(not(no_global_oom_handling))]
+    fn from_trusted_len_iter<I>(iter: I) -> Self
+    where
+        A: Allocator + Default,
+        I: TrustedLen<Item = T>,
+    {
+        /// Used for dropping initialized elements in the slice if the iteration process panics.
+        struct Guard<T> {
+            head: NonNull<T>,
+            tail: NonNull<T>,
+        }
+
+        impl<T> Drop for Guard<T> {
+            fn drop(&mut self) {
+                unsafe {
+                    let length = self.tail.offset_from_unsigned(self.head);
+
+                    NonNull::<[T]>::slice_from_raw_parts(self.head, length).drop_in_place();
+                }
+            }
+        }
+
+        let (length, Some(high)) = iter.size_hint() else {
+            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
+            // length exceeding `usize::MAX`.
+            // The default implementation would collect into a vec which would panic.
+            // Thus we panic here immediately without invoking `Vec` code.
+            panic!("capacity overflow");
+        };
+
+        debug_assert_eq!(
+            length,
+            high,
+            "TrustedLen iterator's size hint is not exact: {:?}",
+            (length, high)
+        );
+
+        let rc_layout = RcLayout::new_array::<T>(length);
+
+        let (ptr, alloc) = super::allocate_with::<A, _, 1>(rc_layout, |ptr| {
+            let ptr = ptr.cast::<T>();
+            let mut guard = Guard::<T> { head: ptr, tail: ptr };
+
+            // SAFETY: `iter` is `TrustedLen`, we can assume we will write correct number of
+            // elements to the buffer.
+            iter.for_each(|value| unsafe {
+                guard.tail.write(value);
+                guard.tail = guard.tail.add(1);
+            });
+
+            mem::forget(guard);
+        });
+
+        // SAFETY: We have written `length` of `T` values to the buffer, the buffer is now
+        // initialized.
+        unsafe {
+            Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast::<T>(), length), alloc)
+        }
+    }
+
+    pub(crate) unsafe fn into_array<const N: usize, R>(self) -> Option<RawRc<[T; N], A>>
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        match RawRc::<[T; N], A>::try_from(self) {
+            Ok(result) => Some(result),
+            Err(mut raw_rc) => {
+                unsafe { raw_rc.drop::<R>() };
+
+                None
+            }
+        }
+    }
+}
+
+impl<T, A> RawRc<[MaybeUninit<T>], A> {
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_slice(length: usize) -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak(RawWeak::new_uninit_slice::<1>(length)) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_slice_in(length: usize, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_slice(length: usize) -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak(RawWeak::new_zeroed_slice::<1>(length)) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_slice_in(length: usize, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) }
+    }
+
+    pub(crate) unsafe fn assume_init(self) -> RawRc<[T], A> {
+        unsafe { self.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) }
+    }
+}
+
+impl<A> RawRc<dyn Any, A> {
+    pub(crate) fn downcast<T>(self) -> Result<RawRc<T, A>, Self>
+    where
+        T: Any,
+    {
+        if self.as_ref().is::<T>() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) }
+    }
+
+    pub(crate) unsafe fn downcast_unchecked<T>(self) -> RawRc<T, A>
+    where
+        T: Any,
+    {
+        unsafe { self.cast() }
+    }
+}
+
+impl<T, A> AsRef<T> for RawRc<T, A>
+where
+    T: ?Sized,
+{
+    fn as_ref(&self) -> &T {
+        unsafe { self.weak.as_ptr().as_ref() }
+    }
+}
+
+impl<T, U, A> CoerceUnsized<RawRc<U, A>> for RawRc<T, A>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+{
+}
+
+impl<T, A> Debug for RawRc<T, A>
+where
+    T: Debug + ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <T as Debug>::fmt(self.as_ref(), f)
+    }
+}
+
+impl<T, A> Display for RawRc<T, A>
+where
+    T: Display + ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <T as Display>::fmt(self.as_ref(), f)
+    }
+}
+
+impl<T, U> DispatchFromDyn<RawRc<U, Global>> for RawRc<T, Global>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+{
+}
+
+impl<T, A> Error for RawRc<T, A>
+where
+    T: Error + ?Sized,
+{
+    fn source(&self) -> Option<&(dyn Error + 'static)> {
+        T::source(self.as_ref())
+    }
+
+    #[allow(deprecated, deprecated_in_future)]
+    fn description(&self) -> &str {
+        T::description(self.as_ref())
+    }
+
+    #[allow(deprecated)]
+    fn cause(&self) -> Option<&dyn Error> {
+        T::cause(self.as_ref())
+    }
+
+    fn provide<'a>(&'a self, request: &mut Request<'a>) {
+        T::provide(self.as_ref(), request);
+    }
+}
+
+impl<T, A> Pointer for RawRc<T, A>
+where
+    T: ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <&T as Pointer>::fmt(&self.as_ref(), f)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> Default for RawRc<T, A>
+where
+    T: Default,
+    A: Allocator + Default,
+{
+    fn default() -> Self {
+        Self::new_with(T::default)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> Default for RawRc<[T], A>
+where
+    A: Allocator + Default,
+{
+    fn default() -> Self {
+        RawRc::<[T; 0], A>::default()
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<A> Default for RawRc<str, A>
+where
+    A: Allocator + Default,
+{
+    fn default() -> Self {
+        let empty_slice = RawRc::<[u8], A>::default();
+
+        // SAFETY: Empty slice is a valid `str`.
+        unsafe { empty_slice.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as *mut _)) }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> From<T> for RawRc<T, A>
+where
+    A: Allocator + Default,
+{
+    fn from(value: T) -> Self {
+        Self::new(value)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> From<Box<T, A>> for RawRc<T, A>
+where
+    T: ?Sized,
+    A: Allocator,
+{
+    fn from(value: Box<T, A>) -> Self {
+        let value_ref = &*value;
+        let alloc_ref = Box::allocator(&value);
+
+        unsafe {
+            let value_ptr = super::allocate_with_value_in::<T, A, 1>(value_ref, alloc_ref);
+            let (box_ptr, alloc) = Box::into_raw_with_allocator(value);
+
+            drop(Box::from_raw_in(box_ptr as *mut ManuallyDrop<T>, &alloc));
+
+            Self::from_raw_parts(value_ptr, alloc)
+        }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+trait SpecRawRcFromSlice<T> {
+    fn spec_from_slice(slice: &[T]) -> Self;
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> SpecRawRcFromSlice<T> for RawRc<[T], A>
+where
+    T: Clone,
+    A: Allocator + Default,
+{
+    default fn spec_from_slice(slice: &[T]) -> Self {
+        Self::from_trusted_len_iter(slice.iter().cloned())
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> SpecRawRcFromSlice<T> for RawRc<[T], A>
+where
+    T: Copy,
+    A: Allocator + Default,
+{
+    fn spec_from_slice(slice: &[T]) -> Self {
+        let (ptr, alloc) = super::allocate_with_value::<[T], A, 1>(slice);
+
+        unsafe { Self::from_raw_parts(ptr, alloc) }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> From<&[T]> for RawRc<[T], A>
+where
+    T: Clone,
+    A: Allocator + Default,
+{
+    fn from(value: &[T]) -> Self {
+        Self::spec_from_slice(value)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> From<&mut [T]> for RawRc<[T], A>
+where
+    T: Clone,
+    A: Allocator + Default,
+{
+    fn from(value: &mut [T]) -> Self {
+        Self::from(&*value)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<A> From<&str> for RawRc<str, A>
+where
+    A: Allocator + Default,
+{
+    #[inline]
+    fn from(value: &str) -> Self {
+        let rc_of_bytes = RawRc::<[u8], A>::from(value.as_bytes());
+
+        unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<A> From<&mut str> for RawRc<str, A>
+where
+    A: Allocator + Default,
+{
+    fn from(value: &mut str) -> Self {
+        Self::from(&*value)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl From<String> for RawRc<str, Global> {
+    fn from(value: String) -> Self {
+        let rc_of_bytes = RawRc::<[u8], Global>::from(value.into_bytes());
+
+        unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) }
+    }
+}
+
+impl<A> From<RawRc<str, A>> for RawRc<[u8], A> {
+    fn from(value: RawRc<str, A>) -> Self {
+        unsafe { value.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, const N: usize, A> From<[T; N]> for RawRc<[T], A>
+where
+    A: Allocator + Default,
+{
+    fn from(value: [T; N]) -> Self {
+        RawRc::new(value)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T, A> From<Vec<T, A>> for RawRc<[T], A>
+where
+    A: Allocator,
+{
+    fn from(value: Vec<T, A>) -> Self {
+        let src = &*value;
+        let alloc = value.allocator();
+        let value_ptr = super::allocate_with_value_in::<[T], A, 1>(src, alloc);
+        let (vec_ptr, _length, capacity, alloc) = value.into_raw_parts_with_alloc();
+
+        unsafe {
+            drop(Vec::from_raw_parts_in(vec_ptr, 0, capacity, &alloc));
+
+            Self::from_raw_parts(value_ptr, alloc)
+        }
+    }
+}
+
+impl<T, const N: usize, A> TryFrom<RawRc<[T], A>> for RawRc<[T; N], A> {
+    type Error = RawRc<[T], A>;
+
+    fn try_from(value: RawRc<[T], A>) -> Result<Self, Self::Error> {
+        if value.as_ref().len() == N { Ok(unsafe { value.cast() }) } else { Err(value) }
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+trait SpecRawRcFromIter<I> {
+    fn spec_from_iter(iter: I) -> Self;
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<I> SpecRawRcFromIter<I> for RawRc<[I::Item], Global>
+where
+    I: Iterator,
+{
+    default fn spec_from_iter(iter: I) -> Self {
+        Self::from(iter.collect::<Vec<_>>())
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<I> SpecRawRcFromIter<I> for RawRc<[I::Item], Global>
+where
+    I: TrustedLen,
+{
+    fn spec_from_iter(iter: I) -> Self {
+        Self::from_trusted_len_iter(iter)
+    }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T> FromIterator<T> for RawRc<[T], Global> {
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+        Self::spec_from_iter(iter.into_iter())
+    }
+}
+
+impl<T, A> Hash for RawRc<T, A>
+where
+    T: Hash + ?Sized,
+{
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        T::hash(self.as_ref(), state);
+    }
+}
+
+// Hack to allow specializing on `Eq` even though `Eq` has a method.
+#[rustc_unsafe_specialization_marker]
+trait MarkerEq: PartialEq<Self> {}
+
+impl<T> MarkerEq for T where T: Eq {}
+
+trait SpecPartialEq {
+    fn spec_eq(&self, other: &Self) -> bool;
+    fn spec_ne(&self, other: &Self) -> bool;
+}
+
+impl<T, A> SpecPartialEq for RawRc<T, A>
+where
+    T: PartialEq + ?Sized,
+{
+    #[inline]
+    default fn spec_eq(&self, other: &Self) -> bool {
+        T::eq(self.as_ref(), other.as_ref())
+    }
+
+    #[inline]
+    default fn spec_ne(&self, other: &Self) -> bool {
+        T::ne(self.as_ref(), other.as_ref())
+    }
+}
+
+/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
+/// would otherwise add a cost to all equality checks on refs. We assume that `RawArc`s are used to
+/// store large values, that are slow to clone, but also heavy to check for equality, causing this
+/// cost to pay off more easily. It's also more likely to have two `RawArc` clones, that point to
+/// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
+impl<T, A> SpecPartialEq for RawRc<T, A>
+where
+    T: MarkerEq + ?Sized,
+{
+    #[inline]
+    fn spec_eq(&self, other: &Self) -> bool {
+        Self::ptr_eq(self, other) || T::eq(self.as_ref(), other.as_ref())
+    }
+
+    #[inline]
+    fn spec_ne(&self, other: &Self) -> bool {
+        Self::ptr_ne(self, other) && T::ne(self.as_ref(), other.as_ref())
+    }
+}
+
+impl<T, A> PartialEq for RawRc<T, A>
+where
+    T: PartialEq + ?Sized,
+{
+    fn eq(&self, other: &Self) -> bool {
+        Self::spec_eq(self, other)
+    }
+
+    fn ne(&self, other: &Self) -> bool {
+        Self::spec_ne(self, other)
+    }
+}
+
+impl<T, A> Eq for RawRc<T, A> where T: Eq + ?Sized {}
+
+impl<T, A> PartialOrd for RawRc<T, A>
+where
+    T: PartialOrd + ?Sized,
+{
+    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+        T::partial_cmp(self.as_ref(), other.as_ref())
+    }
+
+    fn lt(&self, other: &Self) -> bool {
+        T::lt(self.as_ref(), other.as_ref())
+    }
+
+    fn le(&self, other: &Self) -> bool {
+        T::le(self.as_ref(), other.as_ref())
+    }
+
+    fn gt(&self, other: &Self) -> bool {
+        T::gt(self.as_ref(), other.as_ref())
+    }
+
+    fn ge(&self, other: &Self) -> bool {
+        T::ge(self.as_ref(), other.as_ref())
+    }
+}
+
+impl<T, A> Ord for RawRc<T, A>
+where
+    T: Ord + ?Sized,
+{
+    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
+        T::cmp(self.as_ref(), other.as_ref())
+    }
+}
+
+unsafe impl<T, A> PinCoerceUnsized for RawRc<T, A>
+where
+    T: ?Sized,
+    A: Allocator,
+{
+}
diff --git a/library/alloc/src/raw_rc/raw_unique_rc.rs b/library/alloc/src/raw_rc/raw_unique_rc.rs
new file mode 100644
index 0000000000000..abb0c1e447231
--- /dev/null
+++ b/library/alloc/src/raw_rc/raw_unique_rc.rs
@@ -0,0 +1,223 @@
+use core::alloc::Allocator;
+use core::fmt::{self, Debug, Display, Formatter, Pointer};
+use core::hash::{Hash, Hasher};
+use core::marker::{PhantomData, Unsize};
+use core::ops::{CoerceUnsized, DispatchFromDyn};
+use core::ptr::NonNull;
+
+use crate::alloc::Global;
+use crate::raw_rc::RcOps;
+use crate::raw_rc::raw_rc::RawRc;
+use crate::raw_rc::raw_weak::RawWeak;
+
+/// A uniquely owned `RawRc` that allows multiple weak references but only one strong reference.
+/// `RawUniqueRc` does not implement `Drop`, user should call `RawUniqueRc::drop` manually to drop
+/// this object.
+#[repr(transparent)]
+pub(crate) struct RawUniqueRc<T, A>
+where
+    T: ?Sized,
+{
+    // A `RawUniqueRc` is just a non-danging `RawWeak` that has zero strong count but with the value
+    // initialized.
+    weak: RawWeak<T, A>,
+
+    // Defines the ownership of `T` for drop-check.
+    _marker: PhantomData<T>,
+
+    // Invariance is necessary for soundness: once other `RawWeak` references exist, we already have
+    // a form of shared mutability!
+    _marker2: PhantomData<*mut T>,
+}
+
+impl<T, A> RawUniqueRc<T, A>
+where
+    T: ?Sized,
+{
+    /// Increments the weak count and returns the corresponding `RawWeak` object.
+    ///
+    /// # Safety
+    ///
+    /// - `self`, derived `RawWeak` or `RawRc` should only be handled by the same `RcOps`
+    ///   implementation.
+    pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        // SAFETY: Caller guarantees we only use the same `Rc` implementation and `self.weak`
+        // is never dangling.
+        unsafe { self.weak.clone_unchecked::<R>() }
+    }
+
+    pub(crate) unsafe fn drop<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        unsafe { self.weak.assume_init_drop::<R>() };
+    }
+
+    pub(crate) unsafe fn into_rc<R>(self) -> RawRc<T, A>
+    where
+        R: RcOps,
+    {
+        unsafe fn inner<R>(value_ptr: NonNull<()>)
+        where
+            R: RcOps,
+        {
+            unsafe {
+                R::unlock_strong_count(super::strong_count_ptr_from_value_ptr(value_ptr).as_ref());
+            }
+        }
+
+        unsafe {
+            inner::<R>(self.weak.as_ptr().cast());
+
+            RawRc::from_weak(self.weak)
+        }
+    }
+}
+
+impl<T, A> RawUniqueRc<T, A> {
+    #[cfg(not(no_global_oom_handling))]
+    pub(super) unsafe fn from_weak_with_value(weak: RawWeak<T, A>, value: T) -> Self {
+        unsafe { weak.as_ptr().write(value) };
+
+        Self { weak, _marker: PhantomData, _marker2: PhantomData }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new(value: T) -> Self
+    where
+        A: Allocator + Default,
+    {
+        unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<0>(), value) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_in(value: T, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<0>(alloc), value) }
+    }
+}
+
+impl<T, A> AsMut<T> for RawUniqueRc<T, A>
+where
+    T: ?Sized,
+{
+    fn as_mut(&mut self) -> &mut T {
+        unsafe { self.weak.as_ptr().as_mut() }
+    }
+}
+
+impl<T, A> AsRef<T> for RawUniqueRc<T, A>
+where
+    T: ?Sized,
+{
+    fn as_ref(&self) -> &T {
+        unsafe { self.weak.as_ptr().as_ref() }
+    }
+}
+
+impl<T, U, A> CoerceUnsized<RawUniqueRc<U, A>> for RawUniqueRc<T, A>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+    A: Allocator,
+{
+}
+
+impl<T, A> Debug for RawUniqueRc<T, A>
+where
+    T: Debug + ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <T as Debug>::fmt(self.as_ref(), f)
+    }
+}
+
+impl<T, U> DispatchFromDyn<RawUniqueRc<U, Global>> for RawUniqueRc<T, Global>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+{
+}
+
+impl<T, A> Display for RawUniqueRc<T, A>
+where
+    T: Display + ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <T as Display>::fmt(self.as_ref(), f)
+    }
+}
+
+impl<T, A> Eq for RawUniqueRc<T, A> where T: Eq + ?Sized {}
+
+impl<T, A> Hash for RawUniqueRc<T, A>
+where
+    T: Hash + ?Sized,
+{
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        T::hash(self.as_ref(), state);
+    }
+}
+
+impl<T, A> Ord for RawUniqueRc<T, A>
+where
+    T: Ord + ?Sized,
+{
+    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
+        T::cmp(self.as_ref(), other.as_ref())
+    }
+}
+
+impl<T, A> PartialEq for RawUniqueRc<T, A>
+where
+    T: PartialEq + ?Sized,
+{
+    fn eq(&self, other: &Self) -> bool {
+        T::eq(self.as_ref(), other.as_ref())
+    }
+
+    fn ne(&self, other: &Self) -> bool {
+        T::ne(self.as_ref(), other.as_ref())
+    }
+}
+
+impl<T, A> PartialOrd for RawUniqueRc<T, A>
+where
+    T: PartialOrd + ?Sized,
+{
+    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+        T::partial_cmp(self.as_ref(), other.as_ref())
+    }
+
+    fn lt(&self, other: &Self) -> bool {
+        T::lt(self.as_ref(), other.as_ref())
+    }
+
+    fn le(&self, other: &Self) -> bool {
+        T::le(self.as_ref(), other.as_ref())
+    }
+
+    fn gt(&self, other: &Self) -> bool {
+        T::gt(self.as_ref(), other.as_ref())
+    }
+
+    fn ge(&self, other: &Self) -> bool {
+        T::ge(self.as_ref(), other.as_ref())
+    }
+}
+
+impl<T, A> Pointer for RawUniqueRc<T, A>
+where
+    T: ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        <&T as Pointer>::fmt(&self.as_ref(), f)
+    }
+}
diff --git a/library/alloc/src/raw_rc/raw_weak.rs b/library/alloc/src/raw_rc/raw_weak.rs
new file mode 100644
index 0000000000000..9ddc400ca6899
--- /dev/null
+++ b/library/alloc/src/raw_rc/raw_weak.rs
@@ -0,0 +1,566 @@
+use core::alloc::{AllocError, Allocator};
+use core::cell::UnsafeCell;
+use core::fmt::{self, Debug, Formatter};
+use core::marker::{PhantomData, Unsize};
+use core::mem;
+use core::num::NonZeroUsize;
+use core::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn};
+use core::ptr::{self, NonNull};
+
+use crate::alloc::Global;
+use crate::raw_rc::raw_rc::RawRc;
+use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt};
+use crate::raw_rc::{RcOps, RefCounts};
+
+// We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because:
+//
+// - It does not point to any object that is stored inside a reference counted allocation. Because
+//   otherwise the corresponding `RefCounts` object will be placed at
+//   `NonZeroUsize::MAX - size_of::<RefCounts>()`, which is an odd number that violates
+//   `RefCounts`'s alignment requirement.
+// - All bytes in the byte representation of `NonZeroUsize::MAX` are the same, which makes it
+//   possible to utilize `memset` in certain situations like creating an array of dangling weak
+//   pointers.
+const DANGLING_WEAK_ADDRESS: NonZeroUsize = NonZeroUsize::MAX;
+
+// Verify that `DANGLING_WEAK_ADDRESS` is a suitable address for dangling weak pointers.
+const _: () = assert!(
+    DANGLING_WEAK_ADDRESS.get().wrapping_sub(size_of::<RefCounts>()) % align_of::<RefCounts>() != 0
+);
+
+#[inline]
+fn is_dangling(value_ptr: NonNull<()>) -> bool {
+    value_ptr.addr() == DANGLING_WEAK_ADDRESS
+}
+
+/// Decrements weak reference count in a reference-counted allocation with a value object that is
+/// pointed to by `value_ptr`.
+unsafe fn decrement_weak_ref_count<R>(value_ptr: NonNull<()>) -> bool
+where
+    R: RcOps,
+{
+    unsafe { R::decrement_ref_count(super::weak_count_ptr_from_value_ptr(value_ptr).as_ref()) }
+}
+
+/// Increments weak reference count in a reference-counted allocation with a value object that is
+/// pointed to by `value_ptr`.
+unsafe fn increment_weak_ref_count<R>(value_ptr: NonNull<()>)
+where
+    R: RcOps,
+{
+    unsafe { R::increment_ref_count(super::weak_count_ptr_from_value_ptr(value_ptr).as_ref()) }
+}
+
+/// Calls `RawWeak::drop_unchecked` on drop.
+pub(super) struct WeakGuard<'a, T, A, R>
+where
+    T: ?Sized,
+    A: Allocator,
+    R: RcOps,
+{
+    weak: &'a mut RawWeak<T, A>,
+    _phantom_data: PhantomData<R>,
+}
+
+impl<'a, T, A, R> WeakGuard<'a, T, A, R>
+where
+    T: ?Sized,
+    A: Allocator,
+    R: RcOps,
+{
+    /// # Safety
+    ///
+    /// - `weak` is non-dangling.
+    /// - After `WeakGuard` being dropped, the allocation pointed by the weak pointer should not be
+    ///   accessed anymore.
+    pub(super) unsafe fn new(weak: &'a mut RawWeak<T, A>) -> Self {
+        Self { weak, _phantom_data: PhantomData }
+    }
+}
+
+impl<T, A, R> Deref for WeakGuard<'_, T, A, R>
+where
+    T: ?Sized,
+    A: Allocator,
+    R: RcOps,
+{
+    type Target = RawWeak<T, A>;
+
+    fn deref(&self) -> &Self::Target {
+        &*self.weak
+    }
+}
+
+impl<T, A, R> DerefMut for WeakGuard<'_, T, A, R>
+where
+    T: ?Sized,
+    A: Allocator,
+    R: RcOps,
+{
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        self.weak
+    }
+}
+
+impl<T, A, R> Drop for WeakGuard<'_, T, A, R>
+where
+    T: ?Sized,
+    A: Allocator,
+    R: RcOps,
+{
+    fn drop(&mut self) {
+        unsafe { self.weak.drop_unchecked::<R>() };
+    }
+}
+
+/// Base implementation of a weak pointer. `RawWeak` does not implement `Drop`, user should call
+/// `RawWeak::drop` or `RawWeak::drop_unchecked` manually to drop this object.
+///
+/// A `RawWeak` can be either dangling or non-dangling. A dangling `RawWeak` does not point to a
+/// valid value. A non-dangling `RawWeak` points to a valid reference-counted allocation. The value
+/// pointed to by a `RawWeak` may be uninitialized.
+pub(crate) struct RawWeak<T, A>
+where
+    T: ?Sized,
+{
+    /// Points to a (possibly uninitialized or dropped) `T` value inside of a reference-counted
+    /// allocation.
+    ptr: NonNull<T>,
+
+    /// The allocator for `ptr`.
+    alloc: A,
+}
+
+impl<T, A> RawWeak<T, A>
+where
+    T: ?Sized,
+{
+    pub(crate) const unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
+        Self { ptr, alloc }
+    }
+
+    pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
+    where
+        A: Default,
+    {
+        unsafe { Self::from_raw_parts(ptr, A::default()) }
+    }
+
+    pub(crate) fn allocator(&self) -> &A {
+        &self.alloc
+    }
+
+    pub(crate) fn as_ptr(&self) -> NonNull<T> {
+        self.ptr
+    }
+
+    #[inline(never)]
+    unsafe fn assume_init_drop_slow<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        let guard = unsafe { WeakGuard::<T, A, R>::new(self) };
+
+        unsafe { guard.weak.ptr.drop_in_place() };
+    }
+
+    /// Drops the value along with the `RawWeak` object, assuming the value pointed to by `ptr` is
+    /// initialized,
+    #[inline]
+    pub(super) unsafe fn assume_init_drop<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        if const { mem::needs_drop::<T>() } {
+            unsafe { self.assume_init_drop_slow::<R>() };
+        } else {
+            unsafe { self.drop_unchecked::<R>() };
+        }
+    }
+
+    pub(crate) unsafe fn cast<U>(self) -> RawWeak<U, A> {
+        unsafe { self.cast_with(NonNull::cast) }
+    }
+
+    #[inline]
+    pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawWeak<U, A>
+    where
+        U: ?Sized,
+        F: FnOnce(NonNull<T>) -> NonNull<U>,
+    {
+        unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) }
+    }
+
+    /// Increments the weak count, and returns the corresponding `RawWeak` object.
+    ///
+    /// # Safety
+    ///
+    /// - `self` should only be handled by the same `RcOps` implementation.
+    #[inline]
+    pub(crate) unsafe fn clone<R>(&self) -> Self
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        unsafe fn inner<R>(ptr: NonNull<()>)
+        where
+            R: RcOps,
+        {
+            if !is_dangling(ptr) {
+                unsafe { increment_weak_ref_count::<R>(ptr) };
+            }
+        }
+
+        unsafe {
+            inner::<R>(self.ptr.cast());
+
+            Self::from_raw_parts(self.ptr, self.alloc.clone())
+        }
+    }
+
+    /// Increments the weak count, and returns the corresponding `RawWeak` object, assuming `self`
+    /// is non-dangling.
+    ///
+    /// # Safety
+    ///
+    /// - `self` should only be handled by the same `RcOps` implementation.
+    /// - `self` is non-dangling.
+    pub(crate) unsafe fn clone_unchecked<R>(&self) -> Self
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        unsafe {
+            increment_weak_ref_count::<R>(self.ptr.cast());
+
+            Self::from_raw_parts(self.ptr, self.alloc.clone())
+        }
+    }
+
+    /// Drops this weak pointer.
+    #[inline]
+    pub(crate) unsafe fn drop<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        if !is_dangling(self.ptr.cast()) {
+            unsafe { self.drop_unchecked::<R>() };
+        }
+    }
+
+    /// Drops this weak pointer, assuming `self` is non-dangling.
+    #[inline]
+    pub(super) unsafe fn drop_unchecked<R>(&mut self)
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        let is_last_weak_ref = unsafe { decrement_weak_ref_count::<R>(self.ptr.cast()) };
+
+        if is_last_weak_ref {
+            let rc_layout = unsafe { RcLayout::from_value_ptr_unchecked(self.ptr) };
+
+            unsafe { super::deallocate::<A>(self.ptr.cast(), &self.alloc, rc_layout) }
+        }
+    }
+
+    pub(crate) fn into_raw(self) -> NonNull<T> {
+        self.ptr
+    }
+
+    pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
+        (self.ptr, self.alloc)
+    }
+
+    pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
+        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
+    }
+
+    pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
+        !ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
+    }
+
+    /// Returns the `RefCounts` object inside the reference-counted allocation, assume `self` is
+    /// non-dangling.
+    ///
+    /// # Safety
+    ///
+    /// `self` is non-dangling.
+    #[cfg(not(no_global_oom_handling))]
+    pub(super) unsafe fn ref_counts_unchecked(&self) -> &RefCounts {
+        unsafe { super::ref_counts_ptr_from_value_ptr(self.ptr.cast()).as_ref() }
+    }
+
+    /// Returns the strong reference count object inside the reference-counted allocation if `self`
+    /// is non-dangling.
+    pub(crate) fn strong_count(&self) -> Option<&UnsafeCell<usize>> {
+        (!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() })
+    }
+
+    /// Returns the strong reference count object inside the reference-counted allocation, assume
+    /// `self` is non-dangling.
+    ///
+    /// # Safety
+    ///
+    /// `self` is non-dangling.
+    pub(super) unsafe fn strong_count_unchecked(&self) -> &UnsafeCell<usize> {
+        unsafe { super::strong_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() }
+    }
+
+    /// Returns the weak reference count object inside the reference-counted allocation if `self`
+    /// is non-dangling.
+    pub(crate) fn weak_count(&self) -> Option<&UnsafeCell<usize>> {
+        (!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() })
+    }
+
+    /// Returns the weak reference count object inside the reference-counted allocation, assume
+    /// `self` is non-dangling.
+    ///
+    /// # Safety
+    ///
+    /// `self` is non-dangling.
+    pub(super) unsafe fn weak_count_unchecked(&self) -> &UnsafeCell<usize> {
+        unsafe { super::weak_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() }
+    }
+
+    /// Sets the contained pointer to a new value.
+    ///
+    /// # Safety
+    ///
+    /// - `ptr` should be a valid pointer to a value object that lives in a reference-counted
+    ///   allocation.
+    /// - The allocation can be deallocated with the associated allocator.
+    #[cfg(not(no_global_oom_handling))]
+    pub(super) unsafe fn set_ptr(&mut self, ptr: NonNull<T>) {
+        self.ptr = ptr;
+    }
+
+    /// Creates a `RawRc` object if there are non-zero strong reference counts.
+    ///
+    /// # Safety
+    ///
+    /// `self` should only be handled by the same `RcOps` implementation.
+    pub(crate) unsafe fn upgrade<R>(&self) -> Option<RawRc<T, A>>
+    where
+        A: Clone,
+        R: RcOps,
+    {
+        unsafe fn inner<R>(value_ptr: NonNull<()>) -> bool
+        where
+            R: RcOps,
+        {
+            (!is_dangling(value_ptr))
+                && unsafe { R::upgrade(super::strong_count_ptr_from_value_ptr(value_ptr).as_ref()) }
+        }
+
+        unsafe {
+            inner::<R>(self.ptr.cast()).then(|| RawRc::from_raw_parts(self.ptr, self.alloc.clone()))
+        }
+    }
+}
+
+impl<T, A> RawWeak<T, A> {
+    pub(crate) fn new_dangling() -> Self
+    where
+        A: Default,
+    {
+        Self::new_dangling_in(A::default())
+    }
+
+    pub(crate) const fn new_dangling_in(alloc: A) -> Self {
+        unsafe { Self::from_raw_parts(NonNull::without_provenance(DANGLING_WEAK_ADDRESS), alloc) }
+    }
+
+    pub(crate) fn try_new_uninit<const STRONG_COUNT: usize>() -> Result<Self, AllocError>
+    where
+        A: Allocator + Default,
+    {
+        super::try_allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT)
+            .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.cast(), alloc) })
+    }
+
+    pub(crate) fn try_new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError>
+    where
+        A: Allocator,
+    {
+        super::try_allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT)
+            .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) })
+    }
+
+    pub(crate) fn try_new_zeroed<const STRONG_COUNT: usize>() -> Result<Self, AllocError>
+    where
+        A: Allocator + Default,
+    {
+        super::try_allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT)
+            .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.cast(), alloc) })
+    }
+
+    pub(crate) fn try_new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError>
+    where
+        A: Allocator,
+    {
+        super::try_allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT)
+            .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) })
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit<const STRONG_COUNT: usize>() -> Self
+    where
+        A: Allocator + Default,
+    {
+        let (ptr, alloc) = super::allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT);
+
+        unsafe { Self::from_raw_parts(ptr.cast(), alloc) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe {
+            Self::from_raw_parts(
+                super::allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT).cast(),
+                alloc,
+            )
+        }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed<const STRONG_COUNT: usize>() -> Self
+    where
+        A: Allocator + Default,
+    {
+        let (ptr, alloc) = super::allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT);
+
+        unsafe { Self::from_raw_parts(ptr.cast(), alloc) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        unsafe {
+            Self::from_raw_parts(
+                super::allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT).cast(),
+                alloc,
+            )
+        }
+    }
+
+    /// Consumes the `RawWeak` object and returns the contained value, assuming the value is
+    /// initialized.
+    ///
+    /// # Safety
+    ///
+    /// - `self` is non-dangling.
+    /// - The value pointed to by `self` is initialized.
+    /// - The strong reference count is zero.
+    pub(super) unsafe fn assume_init_into_inner<R>(mut self) -> T
+    where
+        A: Allocator,
+        R: RcOps,
+    {
+        unsafe {
+            let result = self.ptr.read();
+
+            self.drop_unchecked::<R>();
+
+            result
+        }
+    }
+}
+
+impl<T, A> RawWeak<[T], A> {
+    #[cfg(not(no_global_oom_handling))]
+    fn allocate<F>(length: usize, allocate_fn: F) -> Self
+    where
+        A: Allocator,
+        F: FnOnce(RcLayout) -> (NonNull<()>, A),
+    {
+        let rc_layout = RcLayout::new_array::<T>(length);
+        let (ptr, alloc) = allocate_fn(rc_layout);
+
+        unsafe { Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast(), length), alloc) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    fn allocate_in<F>(length: usize, alloc: A, allocate_fn: F) -> Self
+    where
+        A: Allocator,
+        F: FnOnce(&A, RcLayout) -> NonNull<()>,
+    {
+        let rc_layout = RcLayout::new_array::<T>(length);
+        let ptr = allocate_fn(&alloc, rc_layout);
+
+        unsafe { Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast(), length), alloc) }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_slice<const STRONG_COUNT: usize>(length: usize) -> Self
+    where
+        A: Allocator + Default,
+    {
+        Self::allocate(length, super::allocate_uninit::<A, STRONG_COUNT>)
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_uninit_slice_in<const STRONG_COUNT: usize>(length: usize, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        Self::allocate_in(length, alloc, super::allocate_uninit_in::<A, STRONG_COUNT>)
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_slice<const STRONG_COUNT: usize>(length: usize) -> Self
+    where
+        A: Allocator + Default,
+    {
+        Self::allocate(length, super::allocate_zeroed::<A, STRONG_COUNT>)
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_zeroed_slice_in<const STRONG_COUNT: usize>(length: usize, alloc: A) -> Self
+    where
+        A: Allocator,
+    {
+        Self::allocate_in(length, alloc, super::allocate_zeroed_in::<A, STRONG_COUNT>)
+    }
+}
+
+impl<T, U, A> CoerceUnsized<RawWeak<U, A>> for RawWeak<T, A>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+{
+}
+
+impl<T, A> Debug for RawWeak<T, A>
+where
+    T: ?Sized,
+{
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        f.write_str("(Weak)")
+    }
+}
+
+impl<T, A> Default for RawWeak<T, A>
+where
+    A: Default,
+{
+    fn default() -> Self {
+        Self::new_dangling()
+    }
+}
+
+impl<T, U> DispatchFromDyn<RawWeak<U, Global>> for RawWeak<T, Global>
+where
+    T: Unsize<U> + ?Sized,
+    U: ?Sized,
+{
+}
diff --git a/library/alloc/src/raw_rc/rc_layout.rs b/library/alloc/src/raw_rc/rc_layout.rs
new file mode 100644
index 0000000000000..957ee757b6b69
--- /dev/null
+++ b/library/alloc/src/raw_rc/rc_layout.rs
@@ -0,0 +1,170 @@
+use core::alloc::{Layout, LayoutError};
+use core::mem::SizedTypeProperties;
+use core::ptr::NonNull;
+
+use crate::raw_rc::RefCounts;
+
+/// A `Layout` that describes a reference-counted allocation.
+#[derive(Clone, Copy)]
+pub(crate) struct RcLayout(Layout);
+
+impl RcLayout {
+    /// Tries to create an `RcLayout` to store a value with layout `value_layout`. Returns `Err` if
+    /// `value_layout` is too big to store in a reference-counted allocation.
+    #[inline]
+    pub(crate) const fn try_from_value_layout(value_layout: Layout) -> Result<Self, LayoutError> {
+        match RefCounts::LAYOUT.extend(value_layout) {
+            Ok((rc_layout, _)) => Ok(Self(rc_layout)),
+            Err(error) => Err(error),
+        }
+    }
+
+    /// Creates an `RcLayout` to store a value with layout `value_layout`. Panics if `value_layout`
+    /// is too big to store in a reference-counted allocation.
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    pub(crate) fn from_value_layout(value_layout: Layout) -> Self {
+        Self::try_from_value_layout(value_layout).unwrap()
+    }
+
+    /// Creates an `RcLayout` to store a value with layout `value_layout`.
+    ///
+    /// # Safety
+    ///
+    /// `RcLayout::try_from_value_layout(value_layout)` must return `Ok`.
+    #[inline]
+    pub(crate) unsafe fn from_value_layout_unchecked(value_layout: Layout) -> Self {
+        unsafe { Self::try_from_value_layout(value_layout).unwrap_unchecked() }
+    }
+
+    /// Creates an `RcLayout` to store an array of `length` elements of type `T`. Panics if the array
+    /// is too big to store in a reference-counted allocation.
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) fn new_array<T>(length: usize) -> Self {
+        #[inline]
+        fn inner(value_layout: Layout, length: usize) -> RcLayout {
+            // We can use `repeat_packet` here because the outer function passes `T::LAYOUT` as the
+            // `value_layout`, which is already padded to a multiple of its alignment.
+            value_layout.repeat_packed(length).and_then(RcLayout::try_from_value_layout).unwrap()
+        }
+
+        inner(T::LAYOUT, length)
+    }
+
+    /// Returns an `Layout` object that describes the reference-counted allocation.
+    pub(crate) fn get(&self) -> Layout {
+        self.0
+    }
+
+    /// Returns the byte offset of the value stored in a reference-counted allocation that is
+    /// described by `self`.
+    #[inline]
+    pub(crate) fn value_offset(&self) -> usize {
+        // SAFETY:
+        //
+        // This essentially calculates `size_of::<RefCounts>().next_multiple_of(self.align())`.
+        //
+        // See comments in `Layout::size_rounded_up_to_custom_align` for detailed explanation.
+        unsafe {
+            let align_m1 = self.0.align().unchecked_sub(1);
+
+            size_of::<RefCounts>().unchecked_add(align_m1) & !align_m1
+        }
+    }
+
+    /// Returns the byte size of the value stored in a reference-counted allocation that is
+    /// described by `self`.
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    pub(crate) fn value_size(&self) -> usize {
+        unsafe { self.0.size().unchecked_sub(self.value_offset()) }
+    }
+
+    /// Creates an `RcLayout` for storing a value that is pointed to by `value_ptr`.
+    ///
+    /// # Safety
+    ///
+    /// `value_ptr` has correct metadata of `T`.
+    #[cfg(not(no_global_oom_handling))]
+    pub(crate) unsafe fn from_value_ptr<T>(value_ptr: NonNull<T>) -> Self
+    where
+        T: ?Sized,
+    {
+        /// A helper trait for computing `RcLayout` to store a `Self` object. If `Self` is
+        /// `Sized`, the `RcLayout` value is computed at compile time.
+        trait SpecRcLayout {
+            unsafe fn spec_rc_layout(value_ptr: NonNull<Self>) -> RcLayout;
+        }
+
+        impl<T> SpecRcLayout for T
+        where
+            T: ?Sized,
+        {
+            #[inline]
+            default unsafe fn spec_rc_layout(value_ptr: NonNull<Self>) -> RcLayout {
+                RcLayout::from_value_layout(unsafe { Layout::for_value_raw(value_ptr.as_ptr()) })
+            }
+        }
+
+        impl<T> SpecRcLayout for T {
+            #[inline]
+            unsafe fn spec_rc_layout(_: NonNull<Self>) -> RcLayout {
+                Self::RC_LAYOUT
+            }
+        }
+
+        unsafe { T::spec_rc_layout(value_ptr) }
+    }
+
+    /// Creates an `RcLayout` for storing a value that is pointed to by `value_ptr`, assuming the
+    /// value is small enough to fit inside a reference-counted allocation.
+    ///
+    /// # Safety
+    ///
+    /// - `value_ptr` has correct metadata for a `T` object.
+    /// - It is known that the memory layout described by `value_ptr` can be used to create an
+    ///   `RcLayout` successfully.
+    pub(crate) unsafe fn from_value_ptr_unchecked<T>(value_ptr: NonNull<T>) -> Self
+    where
+        T: ?Sized,
+    {
+        /// A helper trait for computing `RcLayout` to store a `Self` object. If `Self` is
+        /// `Sized`, the `RcLayout` value is computed at compile time.
+        trait SpecRcLayoutUnchecked {
+            unsafe fn spec_rc_layout_unchecked(value_ptr: NonNull<Self>) -> RcLayout;
+        }
+
+        impl<T> SpecRcLayoutUnchecked for T
+        where
+            T: ?Sized,
+        {
+            #[inline]
+            default unsafe fn spec_rc_layout_unchecked(value_ptr: NonNull<Self>) -> RcLayout {
+                unsafe {
+                    RcLayout::from_value_layout_unchecked(Layout::for_value_raw(value_ptr.as_ptr()))
+                }
+            }
+        }
+
+        impl<T> SpecRcLayoutUnchecked for T {
+            #[inline]
+            unsafe fn spec_rc_layout_unchecked(_: NonNull<Self>) -> RcLayout {
+                Self::RC_LAYOUT
+            }
+        }
+
+        unsafe { T::spec_rc_layout_unchecked(value_ptr) }
+    }
+}
+
+pub(crate) trait RcLayoutExt {
+    /// Computes `RcLayout` at compile time if `Self` is `Sized`.
+    const RC_LAYOUT: RcLayout;
+}
+
+impl<T> RcLayoutExt for T {
+    const RC_LAYOUT: RcLayout = match RcLayout::try_from_value_layout(T::LAYOUT) {
+        Ok(rc_layout) => rc_layout,
+        Err(_) => panic!("value is too big to store in a reference-counted allocation"),
+    };
+}
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index 4b8ea708e7e57..4eddf5481dbc9 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -242,55 +242,146 @@
 #![stable(feature = "rust1", since = "1.0.0")]
 
 use core::any::Any;
-use core::cell::Cell;
+use core::cell::UnsafeCell;
 #[cfg(not(no_global_oom_handling))]
 use core::clone::CloneToUninit;
 use core::clone::UseCloned;
 use core::cmp::Ordering;
 use core::hash::{Hash, Hasher};
-use core::intrinsics::abort;
-#[cfg(not(no_global_oom_handling))]
-use core::iter;
-use core::marker::{PhantomData, Unsize};
-use core::mem::{self, ManuallyDrop, align_of_val_raw};
-use core::num::NonZeroUsize;
+use core::marker::Unsize;
+use core::mem::{self, ManuallyDrop};
 use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
 use core::panic::{RefUnwindSafe, UnwindSafe};
 #[cfg(not(no_global_oom_handling))]
 use core::pin::Pin;
 use core::pin::PinCoerceUnsized;
-use core::ptr::{self, NonNull, drop_in_place};
-#[cfg(not(no_global_oom_handling))]
-use core::slice::from_raw_parts_mut;
-use core::{borrow, fmt, hint};
+use core::ptr::{self, NonNull};
+use core::{borrow, fmt, hint, intrinsics};
 
-#[cfg(not(no_global_oom_handling))]
-use crate::alloc::handle_alloc_error;
-use crate::alloc::{AllocError, Allocator, Global, Layout};
+use crate::alloc::{AllocError, Allocator, Global};
 use crate::borrow::{Cow, ToOwned};
+#[cfg(not(no_global_oom_handling))]
 use crate::boxed::Box;
 #[cfg(not(no_global_oom_handling))]
+use crate::raw_rc::MakeMutStrategy;
+use crate::raw_rc::{self, RawRc, RawUniqueRc, RawWeak, RefCounts};
+#[cfg(not(no_global_oom_handling))]
 use crate::string::String;
 #[cfg(not(no_global_oom_handling))]
 use crate::vec::Vec;
 
-// This is repr(C) to future-proof against possible field-reordering, which
-// would interfere with otherwise safe [into|from]_raw() of transmutable
-// inner types.
-#[repr(C)]
-struct RcInner<T: ?Sized> {
-    strong: Cell<usize>,
-    weak: Cell<usize>,
-    value: T,
+enum RcOps {}
+
+unsafe impl raw_rc::RcOps for RcOps {
+    #[inline]
+    unsafe fn increment_ref_count(count: &UnsafeCell<usize>) {
+        // NOTE: If you `mem::forget` `Rc`s (or `Weak`s), drop is skipped and the ref-count
+        // is not decremented, meaning the ref-count can overflow, and then you can
+        // free the allocation while outstanding `Rc`s (or `Weak`s) exist, which would be
+        // unsound. We abort because this is such a degenerate scenario that we don't
+        // care about what happens -- no real program should ever experience this.
+        //
+        // This should have negligible overhead since you don't actually need to
+        // clone these much in Rust thanks to ownership and move-semantics.
+
+        let count_ref = unsafe { &mut *count.get() };
+        let count = *count_ref;
+
+        // We insert an `assume` here to hint LLVM at an otherwise
+        // missed optimization.
+        // SAFETY: The reference count will never be zero when this is
+        // called.
+        unsafe { hint::assert_unchecked(count != 0) };
+
+        let (new_count, overflowed) = count.overflowing_add(1);
+
+        *count_ref = new_count;
+
+        // We want to abort on overflow instead of dropping the value.
+        // Checking for overflow after the store instead of before
+        // allows for slightly better code generation.
+        if intrinsics::unlikely(overflowed) {
+            intrinsics::abort();
+        }
+    }
+
+    #[inline]
+    unsafe fn decrement_ref_count(count: &UnsafeCell<usize>) -> bool {
+        let count = unsafe { &mut *count.get() };
+
+        *count -= 1;
+
+        *count == 0
+    }
+
+    #[inline]
+    unsafe fn upgrade(strong_count: &UnsafeCell<usize>) -> bool {
+        let strong_count = unsafe { &mut *strong_count.get() };
+
+        if *strong_count == 0 {
+            false
+        } else {
+            *strong_count += 1;
+
+            true
+        }
+    }
+
+    #[inline]
+    unsafe fn downgrade(weak_count: &UnsafeCell<usize>) {
+        unsafe { Self::increment_ref_count(weak_count) };
+    }
+
+    #[inline]
+    unsafe fn lock_strong_count(strong_count: &UnsafeCell<usize>) -> bool {
+        let strong_count = unsafe { &mut *strong_count.get() };
+
+        if *strong_count == 1 {
+            *strong_count = 0;
+
+            true
+        } else {
+            false
+        }
+    }
+
+    #[inline]
+    unsafe fn unlock_strong_count(strong_count: &UnsafeCell<usize>) {
+        unsafe { *strong_count.get() = 1 };
+    }
+
+    #[inline]
+    unsafe fn is_unique(ref_counts: &RefCounts) -> bool {
+        unsafe { *ref_counts.strong.get() == 1 && *ref_counts.weak.get() == 1 }
+    }
+
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    unsafe fn make_mut(ref_counts: &RefCounts) -> Option<MakeMutStrategy> {
+        let strong_count = unsafe { &mut *ref_counts.strong.get() };
+
+        if *strong_count == 1 {
+            if unsafe { *ref_counts.weak.get() } == 1 {
+                None
+            } else {
+                *strong_count = 0;
+
+                Some(MakeMutStrategy::Move)
+            }
+        } else {
+            Some(MakeMutStrategy::Clone)
+        }
+    }
 }
 
-/// Calculate layout for `RcInner<T>` using the inner value's layout
-fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
-    // Calculate layout using the given value layout.
-    // Previously, layout was calculated on the expression
-    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
-    // reference (see #54908).
-    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
+#[cfg(not(no_global_oom_handling))]
+#[inline]
+fn weak_fn_to_raw_weak_fn<F, T, A>(f: F) -> impl FnOnce(&RawWeak<T, A>) -> T
+where
+    F: FnOnce(&Weak<T, A>) -> T,
+    A: Allocator,
+{
+    move |raw_weak: &RawWeak<T, A>| f(Weak::ref_from_raw_weak(raw_weak))
 }
 
 /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
@@ -307,13 +398,12 @@ fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
 #[rustc_diagnostic_item = "Rc"]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_insignificant_dtor]
+#[repr(transparent)]
 pub struct Rc<
     T: ?Sized,
     #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
 > {
-    ptr: NonNull<RcInner<T>>,
-    phantom: PhantomData<RcInner<T>>,
-    alloc: A,
+    raw_rc: RawRc<T, A>,
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -338,58 +428,6 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for
 #[unstable(feature = "dispatch_from_dyn", issue = "none")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
 
-impl<T: ?Sized> Rc<T> {
-    #[inline]
-    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
-        unsafe { Self::from_inner_in(ptr, Global) }
-    }
-
-    #[inline]
-    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
-        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
-    }
-}
-
-impl<T: ?Sized, A: Allocator> Rc<T, A> {
-    #[inline(always)]
-    fn inner(&self) -> &RcInner<T> {
-        // This unsafety is ok because while this Rc is alive we're guaranteed
-        // that the inner pointer is valid.
-        unsafe { self.ptr.as_ref() }
-    }
-
-    #[inline]
-    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
-        let this = mem::ManuallyDrop::new(this);
-        (this.ptr, unsafe { ptr::read(&this.alloc) })
-    }
-
-    #[inline]
-    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
-        Self { ptr, phantom: PhantomData, alloc }
-    }
-
-    #[inline]
-    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
-        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
-    }
-
-    // Non-inlined part of `drop`.
-    #[inline(never)]
-    unsafe fn drop_slow(&mut self) {
-        // Reconstruct the "strong weak" pointer and drop it when this
-        // variable goes out of scope. This ensures that the memory is
-        // deallocated even if the destructor of `T` panics.
-        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
-
-        // Destroy the contained object.
-        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
-        unsafe {
-            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
-        }
-    }
-}
-
 impl<T> Rc<T> {
     /// Constructs a new `Rc<T>`.
     ///
@@ -403,16 +441,7 @@ impl<T> Rc<T> {
     #[cfg(not(no_global_oom_handling))]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new(value: T) -> Rc<T> {
-        // There is an implicit weak pointer owned by all the strong
-        // pointers, which ensures that the weak destructor never frees
-        // the allocation while the strong destructor is running, even
-        // if the weak pointer is stored inside the strong one.
-        unsafe {
-            Self::from_inner(
-                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
-                    .into(),
-            )
-        }
+        Self { raw_rc: RawRc::new(value) }
     }
 
     /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
@@ -472,7 +501,10 @@ impl<T> Rc<T> {
     where
         F: FnOnce(&Weak<T>) -> T,
     {
-        Self::new_cyclic_in(data_fn, Global)
+        let data_fn = weak_fn_to_raw_weak_fn(data_fn);
+        let raw_rc = unsafe { RawRc::new_cyclic::<_, RcOps>(data_fn) };
+
+        Self { raw_rc }
     }
 
     /// Constructs a new `Rc` with uninitialized contents.
@@ -497,13 +529,7 @@ impl<T> Rc<T> {
     #[stable(feature = "new_uninit", since = "1.82.0")]
     #[must_use]
     pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
-        unsafe {
-            Rc::from_ptr(Rc::allocate_for_layout(
-                Layout::new::<T>(),
-                |layout| Global.allocate(layout),
-                <*mut u8>::cast,
-            ))
-        }
+        Rc { raw_rc: RawRc::new_uninit() }
     }
 
     /// Constructs a new `Rc` with uninitialized contents, with the memory
@@ -530,13 +556,7 @@ impl<T> Rc<T> {
     #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
     #[must_use]
     pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
-        unsafe {
-            Rc::from_ptr(Rc::allocate_for_layout(
-                Layout::new::<T>(),
-                |layout| Global.allocate_zeroed(layout),
-                <*mut u8>::cast,
-            ))
-        }
+        Rc { raw_rc: RawRc::new_zeroed() }
     }
 
     /// Constructs a new `Rc<T>`, returning an error if the allocation fails
@@ -552,20 +572,7 @@ impl<T> Rc<T> {
     /// ```
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
-        // There is an implicit weak pointer owned by all the strong
-        // pointers, which ensures that the weak destructor never frees
-        // the allocation while the strong destructor is running, even
-        // if the weak pointer is stored inside the strong one.
-        unsafe {
-            Ok(Self::from_inner(
-                Box::leak(Box::try_new(RcInner {
-                    strong: Cell::new(1),
-                    weak: Cell::new(1),
-                    value,
-                })?)
-                .into(),
-            ))
-        }
+        RawRc::try_new(value).map(|raw_rc| Self { raw_rc })
     }
 
     /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
@@ -591,13 +598,7 @@ impl<T> Rc<T> {
     #[unstable(feature = "allocator_api", issue = "32838")]
     // #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
-        unsafe {
-            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
-                Layout::new::<T>(),
-                |layout| Global.allocate(layout),
-                <*mut u8>::cast,
-            )?))
-        }
+        RawRc::try_new_uninit().map(|raw_rc| Rc { raw_rc })
     }
 
     /// Constructs a new `Rc` with uninitialized contents, with the memory
@@ -624,13 +625,7 @@ impl<T> Rc<T> {
     #[unstable(feature = "allocator_api", issue = "32838")]
     //#[unstable(feature = "new_uninit", issue = "63291")]
     pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
-        unsafe {
-            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
-                Layout::new::<T>(),
-                |layout| Global.allocate_zeroed(layout),
-                <*mut u8>::cast,
-            )?))
-        }
+        RawRc::try_new_zeroed().map(|raw_rc| Rc { raw_rc })
     }
     /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
     /// `value` will be pinned in memory and unable to be moved.
@@ -658,12 +653,7 @@ impl<T, A: Allocator> Rc<T, A> {
     #[unstable(feature = "allocator_api", issue = "32838")]
     #[inline]
     pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
-        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
-        // That would make code size bigger.
-        match Self::try_new_in(value, alloc) {
-            Ok(m) => m,
-            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
-        }
+        Self { raw_rc: RawRc::new_in(value, alloc) }
     }
 
     /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
@@ -693,16 +683,7 @@ impl<T, A: Allocator> Rc<T, A> {
     // #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
-        unsafe {
-            Rc::from_ptr_in(
-                Rc::allocate_for_layout(
-                    Layout::new::<T>(),
-                    |layout| alloc.allocate(layout),
-                    <*mut u8>::cast,
-                ),
-                alloc,
-            )
-        }
+        Rc { raw_rc: RawRc::new_uninit_in(alloc) }
     }
 
     /// Constructs a new `Rc` with uninitialized contents, with the memory
@@ -731,16 +712,7 @@ impl<T, A: Allocator> Rc<T, A> {
     // #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
-        unsafe {
-            Rc::from_ptr_in(
-                Rc::allocate_for_layout(
-                    Layout::new::<T>(),
-                    |layout| alloc.allocate_zeroed(layout),
-                    <*mut u8>::cast,
-                ),
-                alloc,
-            )
-        }
+        Rc { raw_rc: RawRc::new_zeroed_in(alloc) }
     }
 
     /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
@@ -778,47 +750,10 @@ impl<T, A: Allocator> Rc<T, A> {
     where
         F: FnOnce(&Weak<T, A>) -> T,
     {
-        // Construct the inner in the "uninitialized" state with a single
-        // weak reference.
-        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
-            RcInner {
-                strong: Cell::new(0),
-                weak: Cell::new(1),
-                value: mem::MaybeUninit::<T>::uninit(),
-            },
-            alloc,
-        ));
-        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
-        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
+        let data_fn = weak_fn_to_raw_weak_fn(data_fn);
+        let raw_rc = unsafe { RawRc::new_cyclic_in::<_, RcOps>(data_fn, alloc) };
 
-        let weak = Weak { ptr: init_ptr, alloc };
-
-        // It's important we don't give up ownership of the weak pointer, or
-        // else the memory might be freed by the time `data_fn` returns. If
-        // we really wanted to pass ownership, we could create an additional
-        // weak pointer for ourselves, but this would result in additional
-        // updates to the weak reference count which might not be necessary
-        // otherwise.
-        let data = data_fn(&weak);
-
-        let strong = unsafe {
-            let inner = init_ptr.as_ptr();
-            ptr::write(&raw mut (*inner).value, data);
-
-            let prev_value = (*inner).strong.get();
-            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
-            (*inner).strong.set(1);
-
-            // Strong references should collectively own a shared weak reference,
-            // so don't run the destructor for our old weak reference.
-            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
-            // and forgetting the weak reference.
-            let alloc = weak.into_raw_with_allocator().1;
-
-            Rc::from_inner_in(init_ptr, alloc)
-        };
-
-        strong
+        Self { raw_rc }
     }
 
     /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
@@ -837,15 +772,7 @@ impl<T, A: Allocator> Rc<T, A> {
     #[unstable(feature = "allocator_api", issue = "32838")]
     #[inline]
     pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
-        // There is an implicit weak pointer owned by all the strong
-        // pointers, which ensures that the weak destructor never frees
-        // the allocation while the strong destructor is running, even
-        // if the weak pointer is stored inside the strong one.
-        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
-            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
-            alloc,
-        )?);
-        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
+        RawRc::try_new_in(value, alloc).map(|raw_rc| Self { raw_rc })
     }
 
     /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
@@ -876,16 +803,7 @@ impl<T, A: Allocator> Rc<T, A> {
     // #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
-        unsafe {
-            Ok(Rc::from_ptr_in(
-                Rc::try_allocate_for_layout(
-                    Layout::new::<T>(),
-                    |layout| alloc.allocate(layout),
-                    <*mut u8>::cast,
-                )?,
-                alloc,
-            ))
-        }
+        RawRc::try_new_uninit_in(alloc).map(|raw_rc| Rc { raw_rc })
     }
 
     /// Constructs a new `Rc` with uninitialized contents, with the memory
@@ -915,16 +833,7 @@ impl<T, A: Allocator> Rc<T, A> {
     //#[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
-        unsafe {
-            Ok(Rc::from_ptr_in(
-                Rc::try_allocate_for_layout(
-                    Layout::new::<T>(),
-                    |layout| alloc.allocate_zeroed(layout),
-                    <*mut u8>::cast,
-                )?,
-                alloc,
-            ))
-        }
+        RawRc::try_new_zeroed_in(alloc).map(|raw_rc| Rc { raw_rc })
     }
 
     /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
@@ -961,22 +870,10 @@ impl<T, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn try_unwrap(this: Self) -> Result<T, Self> {
-        if Rc::strong_count(&this) == 1 {
-            let this = ManuallyDrop::new(this);
-
-            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
-            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
-
-            // Indicate to Weaks that they can't be promoted by decrementing
-            // the strong count, and then remove the implicit "strong weak"
-            // pointer while also handling drop logic by just crafting a
-            // fake Weak.
-            this.inner().dec_strong();
-            let _weak = Weak { ptr: this.ptr, alloc };
-            Ok(val)
-        } else {
-            Err(this)
-        }
+        let raw_rc = Self::into_raw_rc(this);
+        let result = unsafe { raw_rc.try_unwrap::<RcOps>() };
+
+        result.map_err(|raw_rc| Self { raw_rc })
     }
 
     /// Returns the inner value, if the `Rc` has exactly one strong reference.
@@ -1012,7 +909,9 @@ impl<T, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_into_inner", since = "1.70.0")]
     pub fn into_inner(this: Self) -> Option<T> {
-        Rc::try_unwrap(this).ok()
+        let raw_rc = Self::into_raw_rc(this);
+
+        unsafe { raw_rc.into_inner::<RcOps>() }
     }
 }
 
@@ -1042,7 +941,7 @@ impl<T> Rc<[T]> {
     #[stable(feature = "new_uninit", since = "1.82.0")]
     #[must_use]
     pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
-        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
+        Rc { raw_rc: RawRc::new_uninit_slice(len) }
     }
 
     /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
@@ -1069,16 +968,7 @@ impl<T> Rc<[T]> {
     #[unstable(feature = "new_zeroed_alloc", issue = "129396")]
     #[must_use]
     pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
-        unsafe {
-            Rc::from_ptr(Rc::allocate_for_layout(
-                Layout::array::<T>(len).unwrap(),
-                |layout| Global.allocate_zeroed(layout),
-                |mem| {
-                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
-                        as *mut RcInner<[mem::MaybeUninit<T>]>
-                },
-            ))
-        }
+        Rc { raw_rc: RawRc::new_zeroed_slice(len) }
     }
 
     /// Converts the reference-counted slice into a reference-counted array.
@@ -1090,15 +980,10 @@ impl<T> Rc<[T]> {
     #[inline]
     #[must_use]
     pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
-        if self.len() == N {
-            let ptr = Self::into_raw(self) as *const [T; N];
+        let raw_rc = Self::into_raw_rc(self);
+        let result = unsafe { raw_rc.into_array::<N, RcOps>() };
 
-            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
-            let me = unsafe { Rc::from_raw(ptr) };
-            Some(me)
-        } else {
-            None
-        }
+        result.map(|raw_rc| Rc { raw_rc })
     }
 }
 
@@ -1132,7 +1017,7 @@ impl<T, A: Allocator> Rc<[T], A> {
     // #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
-        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
+        Rc { raw_rc: RawRc::new_uninit_slice_in(len, alloc) }
     }
 
     /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
@@ -1161,19 +1046,7 @@ impl<T, A: Allocator> Rc<[T], A> {
     // #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
-        unsafe {
-            Rc::from_ptr_in(
-                Rc::allocate_for_layout(
-                    Layout::array::<T>(len).unwrap(),
-                    |layout| alloc.allocate_zeroed(layout),
-                    |mem| {
-                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
-                            as *mut RcInner<[mem::MaybeUninit<T>]>
-                    },
-                ),
-                alloc,
-            )
-        }
+        Rc { raw_rc: RawRc::new_zeroed_slice_in(len, alloc) }
     }
 }
 
@@ -1209,8 +1082,10 @@ impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
     #[stable(feature = "new_uninit", since = "1.82.0")]
     #[inline]
     pub unsafe fn assume_init(self) -> Rc<T, A> {
-        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
-        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
+        let raw_rc = Self::into_raw_rc(self);
+        let raw_rc = unsafe { raw_rc.assume_init() };
+
+        Rc { raw_rc }
     }
 }
 
@@ -1249,8 +1124,10 @@ impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
     #[stable(feature = "new_uninit", since = "1.82.0")]
     #[inline]
     pub unsafe fn assume_init(self) -> Rc<[T], A> {
-        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
-        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
+        let raw_rc = Self::into_raw_rc(self);
+        let raw_rc = unsafe { raw_rc.assume_init() };
+
+        Rc { raw_rc }
     }
 }
 
@@ -1319,7 +1196,7 @@ impl<T: ?Sized> Rc<T> {
     #[inline]
     #[stable(feature = "rc_raw", since = "1.17.0")]
     pub unsafe fn from_raw(ptr: *const T) -> Self {
-        unsafe { Self::from_raw_in(ptr, Global) }
+        Self { raw_rc: unsafe { RawRc::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } }
     }
 
     /// Increments the strong reference count on the `Rc<T>` associated with the
@@ -1355,7 +1232,11 @@ impl<T: ?Sized> Rc<T> {
     #[inline]
     #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
     pub unsafe fn increment_strong_count(ptr: *const T) {
-        unsafe { Self::increment_strong_count_in(ptr, Global) }
+        unsafe {
+            RawRc::<T, Global>::increment_strong_count::<RcOps>(NonNull::new_unchecked(
+                ptr.cast_mut(),
+            ));
+        }
     }
 
     /// Decrements the strong reference count on the `Rc<T>` associated with the
@@ -1392,11 +1273,22 @@ impl<T: ?Sized> Rc<T> {
     #[inline]
     #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
     pub unsafe fn decrement_strong_count(ptr: *const T) {
-        unsafe { Self::decrement_strong_count_in(ptr, Global) }
+        unsafe {
+            RawRc::<T, Global>::decrement_strong_count::<RcOps>(NonNull::new_unchecked(
+                ptr.cast_mut(),
+            ))
+        }
     }
 }
 
 impl<T: ?Sized, A: Allocator> Rc<T, A> {
+    #[inline]
+    fn into_raw_rc(this: Self) -> RawRc<T, A> {
+        let this = ManuallyDrop::new(this);
+
+        unsafe { ptr::read(&this.raw_rc) }
+    }
+
     /// Returns a reference to the underlying allocator.
     ///
     /// Note: this is an associated function, which means that you have
@@ -1405,7 +1297,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn allocator(this: &Self) -> &A {
-        &this.alloc
+        this.raw_rc.allocator()
     }
 
     /// Consumes the `Rc`, returning the wrapped pointer.
@@ -1428,8 +1320,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[stable(feature = "rc_raw", since = "1.17.0")]
     #[rustc_never_returns_null_ptr]
     pub fn into_raw(this: Self) -> *const T {
-        let this = ManuallyDrop::new(this);
-        Self::as_ptr(&*this)
+        Self::into_raw_rc(this).into_raw().as_ptr()
     }
 
     /// Consumes the `Rc`, returning the wrapped pointer and allocator.
@@ -1453,11 +1344,9 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[must_use = "losing the pointer will leak memory"]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
-        let this = mem::ManuallyDrop::new(this);
-        let ptr = Self::as_ptr(&this);
-        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
-        let alloc = unsafe { ptr::read(&this.alloc) };
-        (ptr, alloc)
+        let (ptr, alloc) = Self::into_raw_rc(this).into_raw_parts();
+
+        (ptr.as_ptr(), alloc)
     }
 
     /// Provides a raw pointer to the data.
@@ -1479,12 +1368,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[stable(feature = "weak_into_raw", since = "1.45.0")]
     #[rustc_never_returns_null_ptr]
     pub fn as_ptr(this: &Self) -> *const T {
-        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
-
-        // SAFETY: This cannot go through Deref::deref or Rc::inner because
-        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
-        // write through the pointer after the Rc is recovered through `from_raw`.
-        unsafe { &raw mut (*ptr).value }
+        this.raw_rc.as_ptr().as_ptr()
     }
 
     /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
@@ -1556,12 +1440,9 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     /// ```
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
-        let offset = unsafe { data_offset(ptr) };
-
-        // Reverse the offset to find the original RcInner.
-        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
-
-        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
+        unsafe {
+            Self { raw_rc: RawRc::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) }
+        }
     }
 
     /// Creates a new [`Weak`] pointer to this allocation.
@@ -1582,10 +1463,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     where
         A: Clone,
     {
-        this.inner().inc_weak();
-        // Make sure we do not create a dangling Weak
-        debug_assert!(!is_dangling(this.ptr.as_ptr()));
-        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
+        Weak { raw_weak: unsafe { this.raw_rc.downgrade::<RcOps>() } }
     }
 
     /// Gets the number of [`Weak`] pointers to this allocation.
@@ -1603,7 +1481,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_counts", since = "1.15.0")]
     pub fn weak_count(this: &Self) -> usize {
-        this.inner().weak() - 1
+        unsafe { *this.raw_rc.weak_count().get() - 1 }
     }
 
     /// Gets the number of strong (`Rc`) pointers to this allocation.
@@ -1621,7 +1499,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_counts", since = "1.15.0")]
     pub fn strong_count(this: &Self) -> usize {
-        this.inner().strong()
+        unsafe { *this.raw_rc.strong_count().get() }
     }
 
     /// Increments the strong reference count on the `Rc<T>` associated with the
@@ -1663,10 +1541,11 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     where
         A: Clone,
     {
-        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
-        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
-        // Now increase refcount, but don't drop new refcount either
-        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
+        unsafe {
+            RawRc::<T, A>::increment_strong_count::<RcOps>(NonNull::new_unchecked(ptr.cast_mut()));
+        }
+
+        drop(alloc);
     }
 
     /// Decrements the strong reference count on the `Rc<T>` associated with the
@@ -1706,14 +1585,12 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
-        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
-    }
-
-    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
-    /// this allocation.
-    #[inline]
-    fn is_unique(this: &Self) -> bool {
-        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
+        unsafe {
+            RawRc::<T, A>::decrement_strong_count_in::<RcOps>(
+                NonNull::new_unchecked(ptr.cast_mut()),
+                alloc,
+            );
+        }
     }
 
     /// Returns a mutable reference into the given `Rc`, if there are
@@ -1743,7 +1620,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn get_mut(this: &mut Self) -> Option<&mut T> {
-        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
+        unsafe { this.raw_rc.get_mut::<RcOps>() }
     }
 
     /// Returns a mutable reference into the given `Rc`,
@@ -1809,9 +1686,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     #[inline]
     #[unstable(feature = "get_mut_unchecked", issue = "63292")]
     pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
-        // We are careful to *not* create a reference covering the "count" fields, as
-        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
-        unsafe { &mut (*this.ptr.as_ptr()).value }
+        unsafe { this.raw_rc.get_mut_unchecked() }
     }
 
     #[inline]
@@ -1832,7 +1707,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
     /// assert!(!Rc::ptr_eq(&five, &other_five));
     /// ```
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
-        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
+        RawRc::ptr_eq(&this.raw_rc, &other.raw_rc)
     }
 }
 
@@ -1891,57 +1766,7 @@ impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
     #[inline]
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn make_mut(this: &mut Self) -> &mut T {
-        let size_of_val = size_of_val::<T>(&**this);
-
-        if Rc::strong_count(this) != 1 {
-            // Gotta clone the data, there are other Rcs.
-
-            let this_data_ref: &T = &**this;
-            // `in_progress` drops the allocation if we panic before finishing initializing it.
-            let mut in_progress: UniqueRcUninit<T, A> =
-                UniqueRcUninit::new(this_data_ref, this.alloc.clone());
-
-            // Initialize with clone of this.
-            let initialized_clone = unsafe {
-                // Clone. If the clone panics, `in_progress` will be dropped and clean up.
-                this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
-                // Cast type of pointer, now that it is initialized.
-                in_progress.into_rc()
-            };
-
-            // Replace `this` with newly constructed Rc.
-            *this = initialized_clone;
-        } else if Rc::weak_count(this) != 0 {
-            // Can just steal the data, all that's left is Weaks
-
-            // We don't need panic-protection like the above branch does, but we might as well
-            // use the same mechanism.
-            let mut in_progress: UniqueRcUninit<T, A> =
-                UniqueRcUninit::new(&**this, this.alloc.clone());
-            unsafe {
-                // Initialize `in_progress` with move of **this.
-                // We have to express this in terms of bytes because `T: ?Sized`; there is no
-                // operation that just copies a value based on its `size_of_val()`.
-                ptr::copy_nonoverlapping(
-                    ptr::from_ref(&**this).cast::<u8>(),
-                    in_progress.data_ptr().cast::<u8>(),
-                    size_of_val,
-                );
-
-                this.inner().dec_strong();
-                // Remove implicit strong-weak ref (no need to craft a fake
-                // Weak here -- we know other Weaks can clean up for us)
-                this.inner().dec_weak();
-                // Replace `this` with newly constructed Rc that has the moved data.
-                ptr::write(this, in_progress.into_rc());
-            }
-        }
-        // This unsafety is ok because we're guaranteed that the pointer
-        // returned is the *only* pointer that will ever be returned to T. Our
-        // reference count is guaranteed to be 1 at this point, and we required
-        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
-        // reference to the allocation.
-        unsafe { &mut this.ptr.as_mut().value }
+        unsafe { this.raw_rc.make_mut::<RcOps>() }
     }
 }
 
@@ -1977,7 +1802,9 @@ impl<T: Clone, A: Allocator> Rc<T, A> {
     #[inline]
     #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
     pub fn unwrap_or_clone(this: Self) -> T {
-        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
+        let raw_rc = Self::into_raw_rc(this);
+
+        unsafe { raw_rc.unwrap_or_clone::<RcOps>() }
     }
 }
 
@@ -2003,13 +1830,9 @@ impl<A: Allocator> Rc<dyn Any, A> {
     #[inline]
     #[stable(feature = "rc_downcast", since = "1.29.0")]
     pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
-        if (*self).is::<T>() {
-            unsafe {
-                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
-                Ok(Rc::from_inner_in(ptr.cast(), alloc))
-            }
-        } else {
-            Err(self)
+        match Self::into_raw_rc(self).downcast::<T>() {
+            Ok(raw_rc) => Ok(Rc { raw_rc }),
+            Err(raw_rc) => Err(Self { raw_rc }),
         }
     }
 
@@ -2042,208 +1865,10 @@ impl<A: Allocator> Rc<dyn Any, A> {
     #[inline]
     #[unstable(feature = "downcast_unchecked", issue = "90850")]
     pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
-        unsafe {
-            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
-            Rc::from_inner_in(ptr.cast(), alloc)
-        }
-    }
-}
-
-impl<T: ?Sized> Rc<T> {
-    /// Allocates an `RcInner<T>` with sufficient space for
-    /// a possibly-unsized inner value where the value has the layout provided.
-    ///
-    /// The function `mem_to_rc_inner` is called with the data pointer
-    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn allocate_for_layout(
-        value_layout: Layout,
-        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
-        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
-    ) -> *mut RcInner<T> {
-        let layout = rc_inner_layout_for_value_layout(value_layout);
-        unsafe {
-            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
-                .unwrap_or_else(|_| handle_alloc_error(layout))
-        }
-    }
-
-    /// Allocates an `RcInner<T>` with sufficient space for
-    /// a possibly-unsized inner value where the value has the layout provided,
-    /// returning an error if allocation fails.
-    ///
-    /// The function `mem_to_rc_inner` is called with the data pointer
-    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
-    #[inline]
-    unsafe fn try_allocate_for_layout(
-        value_layout: Layout,
-        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
-        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
-    ) -> Result<*mut RcInner<T>, AllocError> {
-        let layout = rc_inner_layout_for_value_layout(value_layout);
-
-        // Allocate for the layout.
-        let ptr = allocate(layout)?;
-
-        // Initialize the RcInner
-        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
-        unsafe {
-            debug_assert_eq!(Layout::for_value_raw(inner), layout);
-
-            (&raw mut (*inner).strong).write(Cell::new(1));
-            (&raw mut (*inner).weak).write(Cell::new(1));
-        }
-
-        Ok(inner)
-    }
-}
-
-impl<T: ?Sized, A: Allocator> Rc<T, A> {
-    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
-        // Allocate for the `RcInner<T>` using the given value.
-        unsafe {
-            Rc::<T>::allocate_for_layout(
-                Layout::for_value_raw(ptr),
-                |layout| alloc.allocate(layout),
-                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
-            )
-        }
-    }
-
-    #[cfg(not(no_global_oom_handling))]
-    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
-        unsafe {
-            let value_size = size_of_val(&*src);
-            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
-
-            // Copy value as bytes
-            ptr::copy_nonoverlapping(
-                (&raw const *src) as *const u8,
-                (&raw mut (*ptr).value) as *mut u8,
-                value_size,
-            );
-
-            // Free the allocation without dropping its contents
-            let (bptr, alloc) = Box::into_raw_with_allocator(src);
-            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
-            drop(src);
-
-            Self::from_ptr_in(ptr, alloc)
-        }
-    }
-}
-
-impl<T> Rc<[T]> {
-    /// Allocates an `RcInner<[T]>` with the given length.
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
-        unsafe {
-            Self::allocate_for_layout(
-                Layout::array::<T>(len).unwrap(),
-                |layout| Global.allocate(layout),
-                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
-            )
-        }
-    }
-
-    /// Copy elements from slice into newly allocated `Rc<[T]>`
-    ///
-    /// Unsafe because the caller must either take ownership or bind `T: Copy`
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
-        unsafe {
-            let ptr = Self::allocate_for_slice(v.len());
-            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
-            Self::from_ptr(ptr)
-        }
-    }
-
-    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
-    ///
-    /// Behavior is undefined should the size be wrong.
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
-        // Panic guard while cloning T elements.
-        // In the event of a panic, elements that have been written
-        // into the new RcInner will be dropped, then the memory freed.
-        struct Guard<T> {
-            mem: NonNull<u8>,
-            elems: *mut T,
-            layout: Layout,
-            n_elems: usize,
-        }
-
-        impl<T> Drop for Guard<T> {
-            fn drop(&mut self) {
-                unsafe {
-                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
-                    ptr::drop_in_place(slice);
-
-                    Global.deallocate(self.mem, self.layout);
-                }
-            }
-        }
-
-        unsafe {
-            let ptr = Self::allocate_for_slice(len);
+        let raw_rc = Self::into_raw_rc(self);
+        let raw_rc = unsafe { raw_rc.downcast_unchecked() };
 
-            let mem = ptr as *mut _ as *mut u8;
-            let layout = Layout::for_value_raw(ptr);
-
-            // Pointer to first element
-            let elems = (&raw mut (*ptr).value) as *mut T;
-
-            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
-
-            for (i, item) in iter.enumerate() {
-                ptr::write(elems.add(i), item);
-                guard.n_elems += 1;
-            }
-
-            // All clear. Forget the guard so it doesn't free the new RcInner.
-            mem::forget(guard);
-
-            Self::from_ptr(ptr)
-        }
-    }
-}
-
-impl<T, A: Allocator> Rc<[T], A> {
-    /// Allocates an `RcInner<[T]>` with the given length.
-    #[inline]
-    #[cfg(not(no_global_oom_handling))]
-    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
-        unsafe {
-            Rc::<[T]>::allocate_for_layout(
-                Layout::array::<T>(len).unwrap(),
-                |layout| alloc.allocate(layout),
-                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
-            )
-        }
-    }
-}
-
-#[cfg(not(no_global_oom_handling))]
-/// Specialization trait used for `From<&[T]>`.
-trait RcFromSlice<T> {
-    fn from_slice(slice: &[T]) -> Self;
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
-    #[inline]
-    default fn from_slice(v: &[T]) -> Self {
-        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
-    }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
-    #[inline]
-    fn from_slice(v: &[T]) -> Self {
-        unsafe { Rc::copy_from_slice(v) }
+        Rc { raw_rc }
     }
 }
 
@@ -2253,7 +1878,7 @@ impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
 
     #[inline(always)]
     fn deref(&self) -> &T {
-        &self.inner().value
+        self.raw_rc.as_ref()
     }
 }
 
@@ -2306,12 +1931,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
     /// ```
     #[inline]
     fn drop(&mut self) {
-        unsafe {
-            self.inner().dec_strong();
-            if self.inner().strong() == 0 {
-                self.drop_slow();
-            }
-        }
+        unsafe { self.raw_rc.drop::<RcOps>() };
     }
 }
 
@@ -2333,10 +1953,7 @@ impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
     /// ```
     #[inline]
     fn clone(&self) -> Self {
-        unsafe {
-            self.inner().inc_strong();
-            Self::from_inner_in(self.ptr, self.alloc.clone())
-        }
+        Self { raw_rc: unsafe { self.raw_rc.clone::<RcOps>() } }
     }
 }
 
@@ -2358,15 +1975,7 @@ impl<T: Default> Default for Rc<T> {
     /// ```
     #[inline]
     fn default() -> Rc<T> {
-        unsafe {
-            Self::from_inner(
-                Box::leak(Box::write(
-                    Box::new_uninit(),
-                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
-                ))
-                .into(),
-            )
-        }
+        Self { raw_rc: RawRc::default() }
     }
 }
 
@@ -2378,9 +1987,7 @@ impl Default for Rc<str> {
     /// This may or may not share an allocation with other Rcs on the same thread.
     #[inline]
     fn default() -> Self {
-        let rc = Rc::<[u8]>::default();
-        // `[u8]` has the same layout as `str`.
-        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
+        Self { raw_rc: RawRc::default() }
     }
 }
 
@@ -2392,53 +1999,7 @@ impl<T> Default for Rc<[T]> {
     /// This may or may not share an allocation with other Rcs on the same thread.
     #[inline]
     fn default() -> Self {
-        let arr: [T; 0] = [];
-        Rc::from(arr)
-    }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
-    fn eq(&self, other: &Rc<T, A>) -> bool;
-    fn ne(&self, other: &Rc<T, A>) -> bool;
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
-    #[inline]
-    default fn eq(&self, other: &Rc<T, A>) -> bool {
-        **self == **other
-    }
-
-    #[inline]
-    default fn ne(&self, other: &Rc<T, A>) -> bool {
-        **self != **other
-    }
-}
-
-// Hack to allow specializing on `Eq` even though `Eq` has a method.
-#[rustc_unsafe_specialization_marker]
-pub(crate) trait MarkerEq: PartialEq<Self> {}
-
-impl<T: Eq> MarkerEq for T {}
-
-/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
-/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
-/// store large values, that are slow to clone, but also heavy to check for equality, causing this
-/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
-/// the same value, than two `&T`s.
-///
-/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
-    #[inline]
-    fn eq(&self, other: &Rc<T, A>) -> bool {
-        Rc::ptr_eq(self, other) || **self == **other
-    }
-
-    #[inline]
-    fn ne(&self, other: &Rc<T, A>) -> bool {
-        !Rc::ptr_eq(self, other) && **self != **other
+        Self { raw_rc: RawRc::default() }
     }
 }
 
@@ -2464,7 +2025,7 @@ impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
     /// ```
     #[inline]
     fn eq(&self, other: &Rc<T, A>) -> bool {
-        RcEqIdent::eq(self, other)
+        RawRc::eq(&self.raw_rc, &other.raw_rc)
     }
 
     /// Inequality for two `Rc`s.
@@ -2486,7 +2047,7 @@ impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
     /// ```
     #[inline]
     fn ne(&self, other: &Rc<T, A>) -> bool {
-        RcEqIdent::ne(self, other)
+        RawRc::ne(&self.raw_rc, &other.raw_rc)
     }
 }
 
@@ -2511,7 +2072,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
     /// ```
     #[inline(always)]
     fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
-        (**self).partial_cmp(&**other)
+        RawRc::partial_cmp(&self.raw_rc, &other.raw_rc)
     }
 
     /// Less-than comparison for two `Rc`s.
@@ -2529,7 +2090,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
     /// ```
     #[inline(always)]
     fn lt(&self, other: &Rc<T, A>) -> bool {
-        **self < **other
+        RawRc::lt(&self.raw_rc, &other.raw_rc)
     }
 
     /// 'Less than or equal to' comparison for two `Rc`s.
@@ -2547,7 +2108,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
     /// ```
     #[inline(always)]
     fn le(&self, other: &Rc<T, A>) -> bool {
-        **self <= **other
+        RawRc::le(&self.raw_rc, &other.raw_rc)
     }
 
     /// Greater-than comparison for two `Rc`s.
@@ -2565,7 +2126,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
     /// ```
     #[inline(always)]
     fn gt(&self, other: &Rc<T, A>) -> bool {
-        **self > **other
+        RawRc::gt(&self.raw_rc, &other.raw_rc)
     }
 
     /// 'Greater than or equal to' comparison for two `Rc`s.
@@ -2583,7 +2144,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
     /// ```
     #[inline(always)]
     fn ge(&self, other: &Rc<T, A>) -> bool {
-        **self >= **other
+        RawRc::ge(&self.raw_rc, &other.raw_rc)
     }
 }
 
@@ -2605,35 +2166,35 @@ impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
     /// ```
     #[inline]
     fn cmp(&self, other: &Rc<T, A>) -> Ordering {
-        (**self).cmp(&**other)
+        RawRc::cmp(&self.raw_rc, &other.raw_rc)
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
     fn hash<H: Hasher>(&self, state: &mut H) {
-        (**self).hash(state);
+        RawRc::hash(&self.raw_rc, state)
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Display::fmt(&**self, f)
+        <RawRc<T, A> as fmt::Display>::fmt(&self.raw_rc, f)
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Debug::fmt(&**self, f)
+        <RawRc<T, A> as fmt::Debug>::fmt(&self.raw_rc, f)
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Pointer::fmt(&(&raw const **self), f)
+        <RawRc<T, A> as fmt::Pointer>::fmt(&self.raw_rc, f)
     }
 }
 
@@ -2654,7 +2215,7 @@ impl<T> From<T> for Rc<T> {
     /// assert_eq!(Rc::from(x), rc);
     /// ```
     fn from(t: T) -> Self {
-        Rc::new(t)
+        Self { raw_rc: RawRc::from(t) }
     }
 }
 
@@ -2675,7 +2236,7 @@ impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
     /// ```
     #[inline]
     fn from(v: [T; N]) -> Rc<[T]> {
-        Rc::<[T; N]>::from(v)
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2694,7 +2255,7 @@ impl<T: Clone> From<&[T]> for Rc<[T]> {
     /// ```
     #[inline]
     fn from(v: &[T]) -> Rc<[T]> {
-        <Self as RcFromSlice<T>>::from_slice(v)
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2714,7 +2275,7 @@ impl<T: Clone> From<&mut [T]> for Rc<[T]> {
     /// ```
     #[inline]
     fn from(v: &mut [T]) -> Rc<[T]> {
-        Rc::from(&*v)
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2732,8 +2293,7 @@ impl From<&str> for Rc<str> {
     /// ```
     #[inline]
     fn from(v: &str) -> Rc<str> {
-        let rc = Rc::<[u8]>::from(v.as_bytes());
-        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2753,7 +2313,7 @@ impl From<&mut str> for Rc<str> {
     /// ```
     #[inline]
     fn from(v: &mut str) -> Rc<str> {
-        Rc::from(&*v)
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2772,7 +2332,7 @@ impl From<String> for Rc<str> {
     /// ```
     #[inline]
     fn from(v: String) -> Rc<str> {
-        Rc::from(&v[..])
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2791,7 +2351,7 @@ impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
     /// ```
     #[inline]
     fn from(v: Box<T, A>) -> Rc<T, A> {
-        Rc::from_box_in(v)
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2810,18 +2370,7 @@ impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
     /// ```
     #[inline]
     fn from(v: Vec<T, A>) -> Rc<[T], A> {
-        unsafe {
-            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
-
-            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
-            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
-
-            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
-            // without dropping its contents or the allocator
-            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
-
-            Self::from_ptr_in(rc_ptr, alloc)
-        }
+        Self { raw_rc: RawRc::from(v) }
     }
 }
 
@@ -2866,8 +2415,7 @@ impl From<Rc<str>> for Rc<[u8]> {
     /// ```
     #[inline]
     fn from(rc: Rc<str>) -> Self {
-        // SAFETY: `str` has the same layout as `[u8]`.
-        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
+        Self { raw_rc: RawRc::from(Rc::into_raw_rc(rc)) }
     }
 }
 
@@ -2876,11 +2424,9 @@ impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
     type Error = Rc<[T], A>;
 
     fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
-        if boxed_slice.len() == N {
-            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
-            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
-        } else {
-            Err(boxed_slice)
+        match RawRc::try_from(Rc::into_raw_rc(boxed_slice)) {
+            Ok(raw_rc) => Ok(Self { raw_rc }),
+            Err(raw_rc) => Err(Rc { raw_rc }),
         }
     }
 }
@@ -2927,47 +2473,7 @@ impl<T> FromIterator<T> for Rc<[T]> {
     /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
     /// ```
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
-        ToRcSlice::to_rc_slice(iter.into_iter())
-    }
-}
-
-/// Specialization trait used for collecting into `Rc<[T]>`.
-#[cfg(not(no_global_oom_handling))]
-trait ToRcSlice<T>: Iterator<Item = T> + Sized {
-    fn to_rc_slice(self) -> Rc<[T]>;
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
-    default fn to_rc_slice(self) -> Rc<[T]> {
-        self.collect::<Vec<T>>().into()
-    }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
-    fn to_rc_slice(self) -> Rc<[T]> {
-        // This is the case for a `TrustedLen` iterator.
-        let (low, high) = self.size_hint();
-        if let Some(high) = high {
-            debug_assert_eq!(
-                low,
-                high,
-                "TrustedLen iterator's size hint is not exact: {:?}",
-                (low, high)
-            );
-
-            unsafe {
-                // SAFETY: We need to ensure that the iterator has an exact length and we have.
-                Rc::from_iter_exact(self, low)
-            }
-        } else {
-            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
-            // length exceeding `usize::MAX`.
-            // The default implementation would collect into a vec which would panic.
-            // Thus we panic here immediately without invoking `Vec` code.
-            panic!("capacity overflow");
-        }
+        Self { raw_rc: RawRc::from_iter(iter) }
     }
 }
 
@@ -2995,18 +2501,12 @@ impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
 /// [`upgrade`]: Weak::upgrade
 #[stable(feature = "rc_weak", since = "1.4.0")]
 #[rustc_diagnostic_item = "RcWeak"]
+#[repr(transparent)]
 pub struct Weak<
     T: ?Sized,
     #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
 > {
-    // This is a `NonNull` to allow optimizing the size of this type in enums,
-    // but it is not necessarily a valid pointer.
-    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
-    // to allocate space on the heap. That's not a value a real pointer
-    // will ever have because RcInner has alignment at least 2.
-    // This is only possible when `T: Sized`; unsized `T` never dangle.
-    ptr: NonNull<RcInner<T>>,
-    alloc: A,
+    raw_weak: RawWeak<T, A>,
 }
 
 #[stable(feature = "rc_weak", since = "1.4.0")]
@@ -3039,7 +2539,7 @@ impl<T> Weak<T> {
     #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
     #[must_use]
     pub const fn new() -> Weak<T> {
-        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
+        Self { raw_weak: RawWeak::new_dangling_in(Global) }
     }
 }
 
@@ -3061,21 +2561,10 @@ impl<T, A: Allocator> Weak<T, A> {
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn new_in(alloc: A) -> Weak<T, A> {
-        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
+        Self { raw_weak: RawWeak::new_dangling_in(alloc) }
     }
 }
 
-pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
-    (ptr.cast::<()>()).addr() == usize::MAX
-}
-
-/// Helper type to allow accessing the reference counts without
-/// making any assertions about the data field.
-struct WeakInner<'a> {
-    weak: &'a Cell<usize>,
-    strong: &'a Cell<usize>,
-}
-
 impl<T: ?Sized> Weak<T> {
     /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
     ///
@@ -3122,16 +2611,30 @@ impl<T: ?Sized> Weak<T> {
     #[inline]
     #[stable(feature = "weak_into_raw", since = "1.45.0")]
     pub unsafe fn from_raw(ptr: *const T) -> Self {
-        unsafe { Self::from_raw_in(ptr, Global) }
+        Self { raw_weak: unsafe { RawWeak::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } }
     }
 }
 
 impl<T: ?Sized, A: Allocator> Weak<T, A> {
+    #[cfg(not(no_global_oom_handling))]
+    #[inline]
+    fn ref_from_raw_weak(raw_weak: &RawWeak<T, A>) -> &Self {
+        // SAFETY: This is safe because `Weak` has transparent representation of `RawWeak`.
+        unsafe { mem::transmute(raw_weak) }
+    }
+
+    #[inline]
+    fn into_raw_weak(self) -> RawWeak<T, A> {
+        let this = ManuallyDrop::new(self);
+
+        unsafe { ptr::read(&this.raw_weak) }
+    }
+
     /// Returns a reference to the underlying allocator.
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn allocator(&self) -> &A {
-        &self.alloc
+        self.raw_weak.allocator()
     }
 
     /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
@@ -3162,18 +2665,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[must_use]
     #[stable(feature = "rc_as_ptr", since = "1.45.0")]
     pub fn as_ptr(&self) -> *const T {
-        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
-
-        if is_dangling(ptr) {
-            // If the pointer is dangling, we return the sentinel directly. This cannot be
-            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
-            ptr as *const T
-        } else {
-            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
-            // The payload may be dropped at this point, and we have to maintain provenance,
-            // so use raw pointer manipulation.
-            unsafe { &raw mut (*ptr).value }
-        }
+        self.raw_weak.as_ptr().as_ptr()
     }
 
     /// Consumes the `Weak<T>` and turns it into a raw pointer.
@@ -3206,7 +2698,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[must_use = "losing the pointer will leak memory"]
     #[stable(feature = "weak_into_raw", since = "1.45.0")]
     pub fn into_raw(self) -> *const T {
-        mem::ManuallyDrop::new(self).as_ptr()
+        self.into_raw_weak().into_raw().as_ptr()
     }
 
     /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
@@ -3242,11 +2734,9 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub fn into_raw_with_allocator(self) -> (*const T, A) {
-        let this = mem::ManuallyDrop::new(self);
-        let result = this.as_ptr();
-        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
-        let alloc = unsafe { ptr::read(&this.alloc) };
-        (result, alloc)
+        let (ptr, alloc) = self.into_raw_weak().into_raw_parts();
+
+        (ptr.as_ptr(), alloc)
     }
 
     /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
@@ -3294,22 +2784,11 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[inline]
     #[unstable(feature = "allocator_api", issue = "32838")]
     pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
-        // See Weak::as_ptr for context on how the input pointer is derived.
-
-        let ptr = if is_dangling(ptr) {
-            // This is a dangling Weak.
-            ptr as *mut RcInner<T>
-        } else {
-            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
-            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
-            let offset = unsafe { data_offset(ptr) };
-            // Thus, we reverse the offset to get the whole RcInner.
-            // SAFETY: the pointer originated from a Weak, so this offset is safe.
-            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
-        };
-
-        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
-        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
+        Self {
+            raw_weak: unsafe {
+                RawWeak::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc)
+            },
+        }
     }
 
     /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
@@ -3342,16 +2821,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     where
         A: Clone,
     {
-        let inner = self.inner()?;
-
-        if inner.strong() == 0 {
-            None
-        } else {
-            unsafe {
-                inner.inc_strong();
-                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
-            }
-        }
+        unsafe { self.raw_weak.upgrade::<RcOps>() }.map(|raw_rc| Rc { raw_rc })
     }
 
     /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
@@ -3360,7 +2830,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[must_use]
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn strong_count(&self) -> usize {
-        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
+        self.raw_weak.strong_count().map_or(0, |count| unsafe { *count.get() })
     }
 
     /// Gets the number of `Weak` pointers pointing to this allocation.
@@ -3369,32 +2839,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[must_use]
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn weak_count(&self) -> usize {
-        if let Some(inner) = self.inner() {
-            if inner.strong() > 0 {
-                inner.weak() - 1 // subtract the implicit weak ptr
-            } else {
-                0
-            }
-        } else {
-            0
-        }
-    }
-
-    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
-    /// (i.e., when this `Weak` was created by `Weak::new`).
-    #[inline]
-    fn inner(&self) -> Option<WeakInner<'_>> {
-        if is_dangling(self.ptr.as_ptr()) {
-            None
-        } else {
-            // We are careful to *not* create a reference covering the "data" field, as
-            // the field may be mutated concurrently (for example, if the last `Rc`
-            // is dropped, the data field will be dropped in-place).
-            Some(unsafe {
-                let ptr = self.ptr.as_ptr();
-                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
-            })
-        }
+        self.raw_weak.weak_count().map_or(0, |count| unsafe { *count.get() } - 1)
     }
 
     /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
@@ -3440,7 +2885,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
     #[must_use]
     #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
     pub fn ptr_eq(&self, other: &Self) -> bool {
-        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
+        RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak)
     }
 }
 
@@ -3471,16 +2916,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
     /// assert!(other_weak_foo.upgrade().is_none());
     /// ```
     fn drop(&mut self) {
-        let inner = if let Some(inner) = self.inner() { inner } else { return };
-
-        inner.dec_weak();
-        // the weak count starts at 1, and will only go to zero if all
-        // the strong pointers have disappeared.
-        if inner.weak() == 0 {
-            unsafe {
-                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
-            }
-        }
+        unsafe { self.raw_weak.drop::<RcOps>() };
     }
 }
 
@@ -3499,10 +2935,7 @@ impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
     /// ```
     #[inline]
     fn clone(&self) -> Weak<T, A> {
-        if let Some(inner) = self.inner() {
-            inner.inc_weak()
-        }
-        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
+        Self { raw_weak: unsafe { self.raw_weak.clone::<RcOps>() } }
     }
 }
 
@@ -3512,7 +2945,7 @@ impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
 #[stable(feature = "rc_weak", since = "1.4.0")]
 impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "(Weak)")
+        <RawWeak<T, A> as fmt::Debug>::fmt(&self.raw_weak, f)
     }
 }
 
@@ -3532,154 +2965,27 @@ impl<T> Default for Weak<T> {
     /// assert!(empty.upgrade().is_none());
     /// ```
     fn default() -> Weak<T> {
-        Weak::new()
-    }
-}
-
-// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
-// is not decremented, meaning the ref-count can overflow, and then you can
-// free the allocation while outstanding Rcs (or Weaks) exist, which would be
-// unsound. We abort because this is such a degenerate scenario that we don't
-// care about what happens -- no real program should ever experience this.
-//
-// This should have negligible overhead since you don't actually need to
-// clone these much in Rust thanks to ownership and move-semantics.
-
-#[doc(hidden)]
-trait RcInnerPtr {
-    fn weak_ref(&self) -> &Cell<usize>;
-    fn strong_ref(&self) -> &Cell<usize>;
-
-    #[inline]
-    fn strong(&self) -> usize {
-        self.strong_ref().get()
-    }
-
-    #[inline]
-    fn inc_strong(&self) {
-        let strong = self.strong();
-
-        // We insert an `assume` here to hint LLVM at an otherwise
-        // missed optimization.
-        // SAFETY: The reference count will never be zero when this is
-        // called.
-        unsafe {
-            hint::assert_unchecked(strong != 0);
-        }
-
-        let strong = strong.wrapping_add(1);
-        self.strong_ref().set(strong);
-
-        // We want to abort on overflow instead of dropping the value.
-        // Checking for overflow after the store instead of before
-        // allows for slightly better code generation.
-        if core::intrinsics::unlikely(strong == 0) {
-            abort();
-        }
-    }
-
-    #[inline]
-    fn dec_strong(&self) {
-        self.strong_ref().set(self.strong() - 1);
-    }
-
-    #[inline]
-    fn weak(&self) -> usize {
-        self.weak_ref().get()
-    }
-
-    #[inline]
-    fn inc_weak(&self) {
-        let weak = self.weak();
-
-        // We insert an `assume` here to hint LLVM at an otherwise
-        // missed optimization.
-        // SAFETY: The reference count will never be zero when this is
-        // called.
-        unsafe {
-            hint::assert_unchecked(weak != 0);
-        }
-
-        let weak = weak.wrapping_add(1);
-        self.weak_ref().set(weak);
-
-        // We want to abort on overflow instead of dropping the value.
-        // Checking for overflow after the store instead of before
-        // allows for slightly better code generation.
-        if core::intrinsics::unlikely(weak == 0) {
-            abort();
-        }
-    }
-
-    #[inline]
-    fn dec_weak(&self) {
-        self.weak_ref().set(self.weak() - 1);
-    }
-}
-
-impl<T: ?Sized> RcInnerPtr for RcInner<T> {
-    #[inline(always)]
-    fn weak_ref(&self) -> &Cell<usize> {
-        &self.weak
-    }
-
-    #[inline(always)]
-    fn strong_ref(&self) -> &Cell<usize> {
-        &self.strong
-    }
-}
-
-impl<'a> RcInnerPtr for WeakInner<'a> {
-    #[inline(always)]
-    fn weak_ref(&self) -> &Cell<usize> {
-        self.weak
-    }
-
-    #[inline(always)]
-    fn strong_ref(&self) -> &Cell<usize> {
-        self.strong
+        Self { raw_weak: RawWeak::default() }
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
     fn borrow(&self) -> &T {
-        &**self
+        self.raw_rc.as_ref()
     }
 }
 
 #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
 impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
     fn as_ref(&self) -> &T {
-        &**self
+        self.raw_rc.as_ref()
     }
 }
 
 #[stable(feature = "pin", since = "1.33.0")]
 impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
 
-/// Gets the offset within an `RcInner` for the payload behind a pointer.
-///
-/// # Safety
-///
-/// The pointer must point to (and have valid metadata for) a previously
-/// valid instance of T, but the T is allowed to be dropped.
-unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
-    // Align the unsized value to the end of the RcInner.
-    // Because RcInner is repr(C), it will always be the last field in memory.
-    // SAFETY: since the only unsized types possible are slices, trait objects,
-    // and extern types, the input safety requirement is currently enough to
-    // satisfy the requirements of align_of_val_raw; this is an implementation
-    // detail of the language that must not be relied upon outside of std.
-    unsafe { data_offset_align(align_of_val_raw(ptr)) }
-}
-
-#[inline]
-fn data_offset_align(align: usize) -> usize {
-    let layout = Layout::new::<RcInner<()>>();
-    layout.size() + layout.padding_needed_for(align)
-}
-
 /// A uniquely owned [`Rc`].
 ///
 /// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
@@ -3717,17 +3023,12 @@ fn data_offset_align(align: usize) -> usize {
 /// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
 /// including fallible or async constructors.
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
+#[repr(transparent)]
 pub struct UniqueRc<
     T: ?Sized,
     #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
 > {
-    ptr: NonNull<RcInner<T>>,
-    // Define the ownership of `RcInner<T>` for drop-check
-    _marker: PhantomData<RcInner<T>>,
-    // Invariance is necessary for soundness: once other `Weak`
-    // references exist, we already have a form of shared mutability!
-    _marker2: PhantomData<*mut T>,
-    alloc: A,
+    raw_unique_rc: RawUniqueRc<T, A>,
 }
 
 // Not necessary for correctness since `UniqueRc` contains `NonNull`,
@@ -3755,49 +3056,49 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Display::fmt(&**self, f)
+        <RawUniqueRc<T, A> as fmt::Display>::fmt(&self.raw_unique_rc, f)
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Debug::fmt(&**self, f)
+        <RawUniqueRc<T, A> as fmt::Debug>::fmt(&self.raw_unique_rc, f)
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Pointer::fmt(&(&raw const **self), f)
+        <RawUniqueRc<T, A> as fmt::Pointer>::fmt(&self.raw_unique_rc, f)
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
     fn borrow(&self) -> &T {
-        &**self
+        self.raw_unique_rc.as_ref()
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
     fn borrow_mut(&mut self) -> &mut T {
-        &mut **self
+        self.raw_unique_rc.as_mut()
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
     fn as_ref(&self) -> &T {
-        &**self
+        self.raw_unique_rc.as_ref()
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
     fn as_mut(&mut self) -> &mut T {
-        &mut **self
+        self.raw_unique_rc.as_mut()
     }
 }
 
@@ -3822,7 +3123,7 @@ impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
     /// ```
     #[inline]
     fn eq(&self, other: &Self) -> bool {
-        PartialEq::eq(&**self, &**other)
+        RawUniqueRc::eq(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 
     /// Inequality for two `UniqueRc`s.
@@ -3841,7 +3142,7 @@ impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
     /// ```
     #[inline]
     fn ne(&self, other: &Self) -> bool {
-        PartialEq::ne(&**self, &**other)
+        RawUniqueRc::ne(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 }
 
@@ -3864,7 +3165,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
     /// ```
     #[inline(always)]
     fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
-        (**self).partial_cmp(&**other)
+        RawUniqueRc::partial_cmp(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 
     /// Less-than comparison for two `UniqueRc`s.
@@ -3883,7 +3184,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
     /// ```
     #[inline(always)]
     fn lt(&self, other: &UniqueRc<T, A>) -> bool {
-        **self < **other
+        RawUniqueRc::lt(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 
     /// 'Less than or equal to' comparison for two `UniqueRc`s.
@@ -3902,7 +3203,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
     /// ```
     #[inline(always)]
     fn le(&self, other: &UniqueRc<T, A>) -> bool {
-        **self <= **other
+        RawUniqueRc::le(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 
     /// Greater-than comparison for two `UniqueRc`s.
@@ -3921,7 +3222,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
     /// ```
     #[inline(always)]
     fn gt(&self, other: &UniqueRc<T, A>) -> bool {
-        **self > **other
+        RawUniqueRc::gt(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 
     /// 'Greater than or equal to' comparison for two `UniqueRc`s.
@@ -3940,7 +3241,7 @@ impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
     /// ```
     #[inline(always)]
     fn ge(&self, other: &UniqueRc<T, A>) -> bool {
-        **self >= **other
+        RawUniqueRc::ge(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 }
 
@@ -3963,7 +3264,7 @@ impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
     /// ```
     #[inline]
     fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
-        (**self).cmp(&**other)
+        RawUniqueRc::cmp(&self.raw_unique_rc, &other.raw_unique_rc)
     }
 }
 
@@ -3973,7 +3274,7 @@ impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
     fn hash<H: Hasher>(&self, state: &mut H) {
-        (**self).hash(state);
+        RawUniqueRc::hash(&self.raw_unique_rc, state);
     }
 }
 
@@ -3988,7 +3289,7 @@ impl<T> UniqueRc<T> {
     #[cfg(not(no_global_oom_handling))]
     #[unstable(feature = "unique_rc_arc", issue = "112566")]
     pub fn new(value: T) -> Self {
-        Self::new_in(value, Global)
+        Self { raw_unique_rc: RawUniqueRc::new(value) }
     }
 }
 
@@ -4002,17 +3303,7 @@ impl<T, A: Allocator> UniqueRc<T, A> {
     #[cfg(not(no_global_oom_handling))]
     #[unstable(feature = "unique_rc_arc", issue = "112566")]
     pub fn new_in(value: T, alloc: A) -> Self {
-        let (ptr, alloc) = Box::into_unique(Box::new_in(
-            RcInner {
-                strong: Cell::new(0),
-                // keep one weak reference so if all the weak pointers that are created are dropped
-                // the UniqueRc still stays valid.
-                weak: Cell::new(1),
-                value,
-            },
-            alloc,
-        ));
-        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
+        Self { raw_unique_rc: RawUniqueRc::new_in(value, alloc) }
     }
 }
 
@@ -4026,19 +3317,10 @@ impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
     /// references.
     #[unstable(feature = "unique_rc_arc", issue = "112566")]
     pub fn into_rc(this: Self) -> Rc<T, A> {
-        let mut this = ManuallyDrop::new(this);
-
-        // Move the allocator out.
-        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
-        // a `ManuallyDrop`.
-        let alloc: A = unsafe { ptr::read(&this.alloc) };
+        let this = ManuallyDrop::new(this);
+        let raw_rc = unsafe { ptr::read(&this.raw_unique_rc).into_rc::<RcOps>() };
 
-        // SAFETY: This pointer was allocated at creation time so we know it is valid.
-        unsafe {
-            // Convert our weak reference into a strong reference
-            this.ptr.as_mut().strong.set(1);
-            Rc::from_inner_in(this.ptr, alloc)
-        }
+        Rc { raw_rc }
     }
 }
 
@@ -4049,12 +3331,9 @@ impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
     /// to a [`Rc`] using [`UniqueRc::into_rc`].
     #[unstable(feature = "unique_rc_arc", issue = "112566")]
     pub fn downgrade(this: &Self) -> Weak<T, A> {
-        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
-        // one strong reference before converting to a regular Rc.
-        unsafe {
-            this.ptr.as_ref().inc_weak();
-        }
-        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
+        let raw_weak = unsafe { this.raw_unique_rc.downgrade::<RcOps>() };
+
+        Weak { raw_weak }
     }
 }
 
@@ -4063,99 +3342,20 @@ impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
     type Target = T;
 
     fn deref(&self) -> &T {
-        // SAFETY: This pointer was allocated at creation time so we know it is valid.
-        unsafe { &self.ptr.as_ref().value }
+        self.raw_unique_rc.as_ref()
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
     fn deref_mut(&mut self) -> &mut T {
-        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
-        // have unique ownership and therefore it's safe to make a mutable reference because
-        // `UniqueRc` owns the only strong reference to itself.
-        unsafe { &mut (*self.ptr.as_ptr()).value }
+        self.raw_unique_rc.as_mut()
     }
 }
 
 #[unstable(feature = "unique_rc_arc", issue = "112566")]
 unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
     fn drop(&mut self) {
-        unsafe {
-            // destroy the contained object
-            drop_in_place(DerefMut::deref_mut(self));
-
-            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
-            self.ptr.as_ref().dec_weak();
-
-            if self.ptr.as_ref().weak() == 0 {
-                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
-            }
-        }
-    }
-}
-
-/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
-/// but will deallocate it (without dropping the value) when dropped.
-///
-/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
-/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
-/// which `MaybeUninit` does not.
-#[cfg(not(no_global_oom_handling))]
-struct UniqueRcUninit<T: ?Sized, A: Allocator> {
-    ptr: NonNull<RcInner<T>>,
-    layout_for_value: Layout,
-    alloc: Option<A>,
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
-    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
-    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
-        let layout = Layout::for_value(for_value);
-        let ptr = unsafe {
-            Rc::allocate_for_layout(
-                layout,
-                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
-                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
-            )
-        };
-        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
-    }
-
-    /// Returns the pointer to be written into to initialize the [`Rc`].
-    fn data_ptr(&mut self) -> *mut T {
-        let offset = data_offset_align(self.layout_for_value.align());
-        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
-    }
-
-    /// Upgrade this into a normal [`Rc`].
-    ///
-    /// # Safety
-    ///
-    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
-    unsafe fn into_rc(self) -> Rc<T, A> {
-        let mut this = ManuallyDrop::new(self);
-        let ptr = this.ptr;
-        let alloc = this.alloc.take().unwrap();
-
-        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
-        // for having initialized the data.
-        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
-    }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
-    fn drop(&mut self) {
-        // SAFETY:
-        // * new() produced a pointer safe to deallocate.
-        // * We own the pointer unless into_rc() was called, which forgets us.
-        unsafe {
-            self.alloc.take().unwrap().deallocate(
-                self.ptr.cast(),
-                rc_inner_layout_for_value_layout(self.layout_for_value),
-            );
-        }
+        unsafe { self.raw_unique_rc.drop::<RcOps>() };
     }
 }
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 17090925cfa0c..977ebd0b5b05f 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -35,12 +35,15 @@ use crate::alloc::handle_alloc_error;
 use crate::alloc::{AllocError, Allocator, Global, Layout};
 use crate::borrow::{Cow, ToOwned};
 use crate::boxed::Box;
-use crate::rc::is_dangling;
 #[cfg(not(no_global_oom_handling))]
 use crate::string::String;
 #[cfg(not(no_global_oom_handling))]
 use crate::vec::Vec;
 
+fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
+    (ptr.cast::<()>()).addr() == usize::MAX
+}
+
 /// A soft limit on the amount of references that may be made to an `Arc`.
 ///
 /// Going above this limit will abort your program (although not
@@ -3326,6 +3329,12 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
     }
 }
 
+// Hack to allow specializing on `Eq` even though `Eq` has a method.
+#[rustc_unsafe_specialization_marker]
+trait MarkerEq: PartialEq<Self> {}
+
+impl<T: Eq> MarkerEq for T {}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 trait ArcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
     fn eq(&self, other: &Arc<T, A>) -> bool;
@@ -3352,7 +3361,7 @@ impl<T: ?Sized + PartialEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> {
 ///
 /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
 #[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + crate::rc::MarkerEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> {
+impl<T: ?Sized + MarkerEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> {
     #[inline]
     fn eq(&self, other: &Arc<T, A>) -> bool {
         Arc::ptr_eq(self, other) || **self == **other
diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py
index c8f4a32cb17e2..2d10ca532690b 100644
--- a/src/etc/gdb_providers.py
+++ b/src/etc/gdb_providers.py
@@ -182,14 +182,36 @@ def display_hint():
         return "array"
 
 
+_REF_COUNTS_PTR_TYPE = None
+
+
+def _get_ref_counts_ptr_type():
+    global _REF_COUNTS_PTR_TYPE
+
+    if _REF_COUNTS_PTR_TYPE is None:
+        _REF_COUNTS_PTR_TYPE = gdb.lookup_type("alloc::raw_rc::RefCounts").pointer()
+
+    return _REF_COUNTS_PTR_TYPE
+
+
 class StdRcProvider(printer_base):
     def __init__(self, valobj, is_atomic=False):
         self._valobj = valobj
         self._is_atomic = is_atomic
-        self._ptr = unwrap_unique_or_non_null(valobj["ptr"])
-        self._value = self._ptr["data" if is_atomic else "value"]
-        self._strong = self._ptr["strong"]["v" if is_atomic else "value"]["value"]
-        self._weak = self._ptr["weak"]["v" if is_atomic else "value"]["value"] - 1
+
+        if is_atomic:
+            self._ptr = unwrap_unique_or_non_null(valobj["ptr"])
+            self._value = self._ptr["data"]
+            self._strong = self._ptr["strong"]["v"]["value"]
+            self._weak = self._ptr["weak"]["v"]["value"] - 1
+        else:
+            self._ptr = unwrap_unique_or_non_null(valobj["raw_rc"]["weak"]["ptr"])
+            self._value = self._ptr.dereference()
+
+            ref_counts_ptr = self._ptr.reinterpret_cast(_get_ref_counts_ptr_type()) - 1
+
+            self._strong = ref_counts_ptr["strong"]["value"]
+            self._weak = ref_counts_ptr["weak"]["value"] - 1
 
     def to_string(self):
         if self._is_atomic:
diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py
index 98426e4242398..cf84f45106462 100644
--- a/src/etc/lldb_providers.py
+++ b/src/etc/lldb_providers.py
@@ -3,6 +3,7 @@
 from typing import List, TYPE_CHECKING
 
 from lldb import (
+    SBAddress,
     SBData,
     SBError,
     eBasicTypeLong,
@@ -1157,6 +1158,18 @@ def StdRcSummaryProvider(valobj: SBValue, _dict: LLDBOpaque) -> str:
     return "strong={}, weak={}".format(strong, weak)
 
 
+_REF_COUNTS_TYPE = None
+
+
+def _get_or_init_ref_counts_type(target):
+    global _REF_COUNTS_TYPE
+
+    if _REF_COUNTS_TYPE is None:
+        _REF_COUNTS_TYPE = target.FindFirstType("alloc::raw_rc::RefCounts")
+
+    return _REF_COUNTS_TYPE
+
+
 class StdRcSyntheticProvider:
     """Pretty-printer for alloc::rc::Rc<T> and alloc::sync::Arc<T>
 
@@ -1176,20 +1189,50 @@ class StdRcSyntheticProvider:
     def __init__(self, valobj: SBValue, _dict: LLDBOpaque, is_atomic: bool = False):
         self.valobj = valobj
 
-        self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr"))
+        if is_atomic:
+            self.ptr = unwrap_unique_or_non_null(
+                self.valobj.GetChildMemberWithName("ptr")
+            )
 
-        self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
+            self.value = self.ptr.GetChildMemberWithName("data")
 
-        self.strong = (
-            self.ptr.GetChildMemberWithName("strong")
-            .GetChildAtIndex(0)
-            .GetChildMemberWithName("value")
-        )
-        self.weak = (
-            self.ptr.GetChildMemberWithName("weak")
-            .GetChildAtIndex(0)
-            .GetChildMemberWithName("value")
-        )
+            self.strong = (
+                self.ptr.GetChildMemberWithName("strong")
+                .GetChildAtIndex(0)
+                .GetChildMemberWithName("value")
+            )
+            self.weak = (
+                self.ptr.GetChildMemberWithName("weak")
+                .GetChildAtIndex(0)
+                .GetChildMemberWithName("value")
+            )
+        else:
+            ptr = (
+                self.valobj.GetChildMemberWithName("raw_rc")
+                .GetChildMemberWithName("weak")
+                .GetChildMemberWithName("ptr")
+                .GetChildMemberWithName("pointer")
+            )
+
+            self.value = ptr.deref.Clone("value")
+
+            target = valobj.GetTarget()
+            ref_counts_type = _get_or_init_ref_counts_type(target)
+            ref_counts_address = ptr.GetValueAsUnsigned() - ref_counts_type.size
+
+            ref_counts_value = target.CreateValueFromAddress(
+                "ref_counts",
+                SBAddress(ref_counts_address, target),
+                ref_counts_type,
+            )
+
+            self.strong = ref_counts_value.GetChildMemberWithName(
+                "strong"
+            ).GetChildMemberWithName("value")
+
+            self.weak = ref_counts_value.GetChildMemberWithName(
+                "weak"
+            ).GetChildMemberWithName("value")
 
         self.value_builder = ValueBuilder(valobj)
 
diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis
index 1528a8b1226ca..9e86fd8bd7c85 100644
--- a/src/etc/natvis/liballoc.natvis
+++ b/src/etc/natvis/liballoc.natvis
@@ -73,59 +73,58 @@
   -->
   <!-- alloc::rc::Rc<T> -->
   <Type Name="alloc::rc::Rc&lt;*&gt;">
-    <DisplayString Optional="true">{ptr.pointer->value}</DisplayString>
+    <DisplayString Optional="true">{*raw_rc.weak.ptr.pointer}</DisplayString>
     <Expand>
       <!-- thin -->
-      <ExpandedItem Optional="true">ptr.pointer->value</ExpandedItem>
-      <Item Name="[Reference count]" Optional="true">ptr.pointer->strong</Item>
-      <Item Name="[Weak reference count]" Optional="true">ptr.pointer->weak</Item>
+      <ExpandedItem Optional="true">*raw_rc.weak.ptr.pointer</ExpandedItem>
+      <Item Name="[Reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->strong</Item>
+      <Item Name="[Weak reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->weak</Item>
 
       <!-- dyn -->
-      <Item Name="[Reference count]" Optional="true">ptr.pointer.pointer->strong</Item>
-      <Item Name="[Weak reference count]" Optional="true">ptr.pointer.pointer->weak</Item>
+      <Item Name="[Reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->strong</Item>
+      <Item Name="[Weak reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->weak</Item>
     </Expand>
   </Type>
 
   <!-- alloc::rc::Rc<[T]> -->
   <Type Name="alloc::rc::Rc&lt;slice2$&lt;*&gt;,*&gt;">
-    <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString>
+    <DisplayString>{{ len={raw_rc.weak.ptr.pointer.length} }}</DisplayString>
     <Expand>
-      <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item>
-      <Item Name="[Reference count]">ptr.pointer.data_ptr->strong</Item>
-      <Item Name="[Weak reference count]">ptr.pointer.data_ptr->weak</Item>
+      <Item Name="[Length]" ExcludeView="simple">raw_rc.weak.ptr.pointer.length</Item>
+      <Item Name="[Reference count]">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->strong</Item>
+      <Item Name="[Weak reference count]">((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->weak</Item>
       <ArrayItems>
-        <Size>ptr.pointer.length</Size>
-        <!-- We add +2 to the data_ptr in order to skip the ref count fields in the RcInner -->
-        <ValuePointer>($T1*)(((size_t*)ptr.pointer.data_ptr) + 2)</ValuePointer>
+        <Size>raw_rc.weak.ptr.pointer.length</Size>
+        <ValuePointer>($T1*)raw_rc.weak.ptr.pointer.data_ptr</ValuePointer>
       </ArrayItems>
     </Expand>
   </Type>
 
   <!-- alloc::rc::Weak<T> -->
   <Type Name="alloc::rc::Weak&lt;*&gt;">
-    <DisplayString Optional="true">{ptr.pointer->value}</DisplayString>
+    <DisplayString Optional="true">{*raw_weak.ptr.pointer}</DisplayString>
     <Expand>
       <!-- thin -->
-      <ExpandedItem Optional="true">ptr.pointer->value</ExpandedItem>
-      <Item Name="[Reference count]" Optional="true">ptr.pointer->strong</Item>
-      <Item Name="[Weak reference count]" Optional="true">ptr.pointer->weak</Item>
+      <ExpandedItem Optional="true">*raw_weak.ptr.pointer</ExpandedItem>
+      <Item Name="[Reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->strong</Item>
+      <Item Name="[Weak reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->weak</Item>
 
       <!-- dyn -->
-      <Item Name="[Reference count]" Optional="true">ptr.pointer.pointer->strong</Item>
-      <Item Name="[Weak reference count]" Optional="true">ptr.pointer.pointer->weak</Item>
+      <Item Name="[Reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->strong</Item>
+      <Item Name="[Weak reference count]" Optional="true">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->weak</Item>
     </Expand>
   </Type>
 
   <!-- alloc::rc::Weak<[T]> -->
   <Type Name="alloc::rc::Weak&lt;slice2$&lt;*&gt;,*&gt;">
-    <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString>
+    <DisplayString>{{ len={raw_weak.ptr.pointer.length} }}</DisplayString>
     <Expand>
-      <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item>
-      <Item Name="[Reference count]">ptr.pointer.data_ptr->strong</Item>
-      <Item Name="[Weak reference count]">ptr.pointer.data_ptr->weak</Item>
+      <Item Name="[Length]" ExcludeView="simple">raw_weak.ptr.pointer.length</Item>
+      <Item Name="[Reference count]">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->strong</Item>
+      <Item Name="[Weak reference count]">((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->weak</Item>
       <ArrayItems>
-        <Size>ptr.pointer.length</Size>
-        <ValuePointer>($T1*)(((size_t*)ptr.pointer.data_ptr) + 2)</ValuePointer>
+        <Size>raw_weak.ptr.pointer.length</Size>
+        <ValuePointer>($T1*)raw_weak.ptr.pointer.data_ptr</ValuePointer>
       </ArrayItems>
     </Expand>
   </Type>
diff --git a/src/tools/miri/tests/fail/memleak_rc.stderr b/src/tools/miri/tests/fail/memleak_rc.stderr
index df12eeed6ac64..350fe88077878 100644
--- a/src/tools/miri/tests/fail/memleak_rc.stderr
+++ b/src/tools/miri/tests/fail/memleak_rc.stderr
@@ -1,10 +1,15 @@
 error: memory leaked: ALLOC (Rust heap, SIZE, ALIGN), allocated here:
-  --> RUSTLIB/alloc/src/rc.rs:LL:CC
+  --> RUSTLIB/alloc/src/raw_rc/mod.rs:LL:CC
    |
-LL |                 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
-   |                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL |     let allocation_result = alloc.allocate(rc_layout.get());
+   |                             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: BACKTRACE:
+   = note: inside `alloc::raw_rc::allocate_uninit_raw_bytes::<std::alloc::Global>` at RUSTLIB/alloc/src/raw_rc/mod.rs:LL:CC
+   = note: inside `alloc::raw_rc::allocate_uninit_in::<std::alloc::Global, 1>` at RUSTLIB/alloc/src/raw_rc/mod.rs:LL:CC
+   = note: inside `alloc::raw_rc::allocate_uninit::<std::alloc::Global, 1>` at RUSTLIB/alloc/src/raw_rc/mod.rs:LL:CC
+   = note: inside `alloc::raw_rc::raw_weak::RawWeak::<std::cell::RefCell<std::option::Option<Dummy>>, std::alloc::Global>::new_uninit::<TAG>` at RUSTLIB/alloc/src/raw_rc/raw_weak.rs:LL:CC
+   = note: inside `alloc::raw_rc::raw_rc::RawRc::<std::cell::RefCell<std::option::Option<Dummy>>, std::alloc::Global>::new` at RUSTLIB/alloc/src/raw_rc/raw_rc.rs:LL:CC
    = note: inside `std::rc::Rc::<std::cell::RefCell<std::option::Option<Dummy>>>::new` at RUSTLIB/alloc/src/rc.rs:LL:CC
 note: inside `main`
   --> tests/fail/memleak_rc.rs:LL:CC
diff --git a/tests/codegen/array-of-dangling-weak-uses-memset.rs b/tests/codegen/array-of-dangling-weak-uses-memset.rs
new file mode 100644
index 0000000000000..10ecfa34a22ef
--- /dev/null
+++ b/tests/codegen/array-of-dangling-weak-uses-memset.rs
@@ -0,0 +1,23 @@
+//@ compile-flags: -Z merge-functions=disabled
+
+#![crate_type = "lib"]
+
+use std::{rc, sync};
+
+#[no_mangle]
+pub fn array_of_rc_weak() -> [rc::Weak<u32>; 100] {
+    // CHECK-LABEL: @array_of_rc_weak(
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: call void @llvm.memset.
+    // CHECK-NEXT: ret void
+    [(); 100].map(|()| rc::Weak::new())
+}
+
+#[no_mangle]
+pub fn array_of_sync_weak() -> [sync::Weak<u32>; 100] {
+    // CHECK-LABEL: @array_of_sync_weak(
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: call void @llvm.memset.
+    // CHECK-NEXT: ret void
+    [(); 100].map(|()| sync::Weak::new())
+}
diff --git a/tests/codegen/option-rc-as-deref-no-cmp.rs b/tests/codegen/option-rc-as-deref-no-cmp.rs
new file mode 100644
index 0000000000000..822f9204cff8a
--- /dev/null
+++ b/tests/codegen/option-rc-as-deref-no-cmp.rs
@@ -0,0 +1,14 @@
+//@ compile-flags: -Z merge-functions=disabled
+
+#![crate_type = "lib"]
+
+use std::rc::Rc;
+
+#[no_mangle]
+pub fn option_rc_as_deref_no_cmp(rc: &Option<Rc<u32>>) -> Option<&u32> {
+    // CHECK-LABEL: @option_rc_as_deref_no_cmp(ptr
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %[[RC:.+]] = load ptr, ptr %rc
+    // CHECK-NEXT: ret ptr %[[RC]]
+    rc.as_deref()
+}
diff --git a/tests/codegen/placement-new.rs b/tests/codegen/placement-new.rs
index 7f7f0033bece3..5022af8f52f19 100644
--- a/tests/codegen/placement-new.rs
+++ b/tests/codegen/placement-new.rs
@@ -22,9 +22,11 @@ pub fn box_default_inplace() -> Box<(String, String)> {
 #[no_mangle]
 pub fn rc_default_inplace() -> Rc<(String, String)> {
     // CHECK-NOT: alloca
-    // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(
+    // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]]
     // CHECK-NOT: call void @llvm.memcpy
-    // CHECK: ret ptr [[RC]]
+    // CHECK: [[DATA:%.*]] = getelementptr inbounds{{( nuw)?}} i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]]
+    // CHECK-NOT: call void @llvm.memcpy
+    // CHECK: ret ptr [[DATA]]
     Rc::default()
 }
 
diff --git a/tests/debuginfo/rc_arc.rs b/tests/debuginfo/rc_arc.rs
index f636c60702cde..6da8338f0ebba 100644
--- a/tests/debuginfo/rc_arc.rs
+++ b/tests/debuginfo/rc_arc.rs
@@ -27,13 +27,13 @@
 
 // cdb-command:dx rc,d
 // cdb-check:rc,d             : 111 [Type: alloc::rc::Rc<i32,alloc::alloc::Global>]
-// cdb-check:    [Reference count] : 11 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 11 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 
 // cdb-command:dx weak_rc,d
 // cdb-check:weak_rc,d        : 111 [Type: alloc::rc::Weak<i32,alloc::alloc::Global>]
-// cdb-check:    [Reference count] : 11 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 11 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 
 // cdb-command:dx arc,d
 // cdb-check:arc,d            : 222 [Type: alloc::sync::Arc<i32,alloc::alloc::Global>]
@@ -47,19 +47,19 @@
 
 // cdb-command:dx dyn_rc,d
 // cdb-check:dyn_rc,d         [Type: alloc::rc::Rc<dyn$<core::fmt::Debug>,alloc::alloc::Global>]
-// cdb-check:    [Reference count] : 31 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 31 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 
 // cdb-command:dx dyn_rc_weak,d
 // cdb-check:dyn_rc_weak,d    [Type: alloc::rc::Weak<dyn$<core::fmt::Debug>,alloc::alloc::Global>]
-// cdb-check:    [Reference count] : 31 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 31 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 
 // cdb-command:dx slice_rc,d
 // cdb-check:slice_rc,d       : { len=3 } [Type: alloc::rc::Rc<slice2$<u32>,alloc::alloc::Global>]
 // cdb-check:    [Length]         : 3 [Type: [...]]
-// cdb-check:    [Reference count] : 41 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 41 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 // cdb-check:    [0]              : 1 [Type: u32]
 // cdb-check:    [1]              : 2 [Type: u32]
 // cdb-check:    [2]              : 3 [Type: u32]
@@ -67,8 +67,8 @@
 // cdb-command:dx slice_rc_weak,d
 // cdb-check:slice_rc_weak,d  : { len=3 } [Type: alloc::rc::Weak<slice2$<u32>,alloc::alloc::Global>]
 // cdb-check:    [Length]         : 3 [Type: [...]]
-// cdb-check:    [Reference count] : 41 [Type: core::cell::Cell<usize>]
-// cdb-check:    [Weak reference count] : 2 [Type: core::cell::Cell<usize>]
+// cdb-check:    [Reference count] : 41 [Type: core::cell::UnsafeCell<usize>]
+// cdb-check:    [Weak reference count] : 2 [Type: core::cell::UnsafeCell<usize>]
 // cdb-check:    [0]              : 1 [Type: u32]
 // cdb-check:    [1]              : 2 [Type: u32]
 // cdb-check:    [2]              : 3 [Type: u32]
diff --git a/tests/debuginfo/strings-and-strs.rs b/tests/debuginfo/strings-and-strs.rs
index 7d550408bec37..b74df85127c9d 100644
--- a/tests/debuginfo/strings-and-strs.rs
+++ b/tests/debuginfo/strings-and-strs.rs
@@ -19,7 +19,7 @@
 // gdb-check:$4 = ("Hello", "World")
 
 // gdb-command:print str_in_rc
-// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull<alloc::rc::RcInner<&str>> {pointer: 0x[...]}, phantom: core::marker::PhantomData<alloc::rc::RcInner<&str>>, alloc: alloc::alloc::Global}
+// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {raw_rc: alloc::raw_rc::raw_rc::RawRc<&str, alloc::alloc::Global> {weak: alloc::raw_rc::raw_weak::RawWeak<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull<&str> {pointer: 0x[...]}, alloc: alloc::alloc::Global}, _phantom_data: core::marker::PhantomData<&str>}}
 
 // === LLDB TESTS ==================================================================================
 // lldb-command:run
@@ -38,7 +38,6 @@
 // lldb-command:v str_in_rc
 // lldb-check:(alloc::rc::Rc<&str, alloc::alloc::Global>) str_in_rc = strong=1, weak=0 { value = "Hello" { [0] = 'H' [1] = 'e' [2] = 'l' [3] = 'l' [4] = 'o' } }
 
-
 #![allow(unused_variables)]
 #![feature(omit_gdb_pretty_printer_section)]
 #![omit_gdb_pretty_printer_section]