1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244#[cfg(not(test))]
245use crate::boxed::Box;
246#[cfg(test)]
247use std::boxed::Box;
248
249use core::any::Any;
250use core::borrow;
251use core::cell::Cell;
252use core::cmp::Ordering;
253use core::fmt;
254use core::hash::{Hash, Hasher};
255use core::hint;
256use core::intrinsics::abort;
257#[cfg(not(no_global_oom_handling))]
258use core::iter;
259use core::marker::{PhantomData, Unsize};
260#[cfg(not(no_global_oom_handling))]
261use core::mem::size_of_val;
262use core::mem::{self, align_of_val_raw, forget, ManuallyDrop};
263use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, Receiver};
264use core::panic::{RefUnwindSafe, UnwindSafe};
265#[cfg(not(no_global_oom_handling))]
266use core::pin::Pin;
267use core::ptr::{self, drop_in_place, NonNull};
268#[cfg(not(no_global_oom_handling))]
269use core::slice::from_raw_parts_mut;
270
271#[cfg(not(no_global_oom_handling))]
272use crate::alloc::handle_alloc_error;
273#[cfg(not(no_global_oom_handling))]
274use crate::alloc::WriteCloneIntoRaw;
275use crate::alloc::{AllocError, Allocator, Global, Layout};
276use crate::borrow::{Cow, ToOwned};
277#[cfg(not(no_global_oom_handling))]
278use crate::string::String;
279#[cfg(not(no_global_oom_handling))]
280use crate::vec::Vec;
281
282#[cfg(test)]
283mod tests;
284
285// This is repr(C) to future-proof against possible field-reordering, which
286// would interfere with otherwise safe [into|from]_raw() of transmutable
287// inner types.
288#[repr(C)]
289struct RcBox<T: ?Sized> {
290 strong: Cell<usize>,
291 weak: Cell<usize>,
292 value: T,
293}
294
295/// Calculate layout for `RcBox<T>` using the inner value's layout
296fn rcbox_layout_for_value_layout(layout: Layout) -> Layout {
297 // Calculate layout using the given value layout.
298 // Previously, layout was calculated on the expression
299 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
300 // reference (see #54908).
301 Layout::new::<RcBox<()>>().extend(next:layout).unwrap().0.pad_to_align()
302}
303
304/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
305/// Counted'.
306///
307/// See the [module-level documentation](./index.html) for more details.
308///
309/// The inherent methods of `Rc` are all associated functions, which means
310/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
311/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
312///
313/// [get_mut]: Rc::get_mut
314#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
315#[stable(feature = "rust1", since = "1.0.0")]
316#[rustc_insignificant_dtor]
317pub struct Rc<
318 T: ?Sized,
319 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
320> {
321 ptr: NonNull<RcBox<T>>,
322 phantom: PhantomData<RcBox<T>>,
323 alloc: A,
324}
325
326#[stable(feature = "rust1", since = "1.0.0")]
327impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
328
329// Note that this negative impl isn't strictly necessary for correctness,
330// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
331// However, given how important `Rc`'s `!Sync`-ness is,
332// having an explicit negative impl is nice for documentation purposes
333// and results in nicer error messages.
334#[stable(feature = "rust1", since = "1.0.0")]
335impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
336
337#[stable(feature = "catch_unwind", since = "1.9.0")]
338impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
339#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
340impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
341
342#[unstable(feature = "coerce_unsized", issue = "18598")]
343impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
344
345#[unstable(feature = "dispatch_from_dyn", issue = "none")]
346impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
347
348impl<T: ?Sized> Rc<T> {
349 #[inline]
350 unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
351 unsafe { Self::from_inner_in(ptr, alloc:Global) }
352 }
353
354 #[inline]
355 unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
356 unsafe { Self::from_inner(ptr:NonNull::new_unchecked(ptr)) }
357 }
358}
359
360impl<T: ?Sized, A: Allocator> Rc<T, A> {
361 #[inline(always)]
362 fn inner(&self) -> &RcBox<T> {
363 // This unsafety is ok because while this Rc is alive we're guaranteed
364 // that the inner pointer is valid.
365 unsafe { self.ptr.as_ref() }
366 }
367
368 #[inline]
369 unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self {
370 Self { ptr, phantom: PhantomData, alloc }
371 }
372
373 #[inline]
374 unsafe fn from_ptr_in(ptr: *mut RcBox<T>, alloc: A) -> Self {
375 unsafe { Self::from_inner_in(ptr:NonNull::new_unchecked(ptr), alloc) }
376 }
377}
378
379impl<T> Rc<T> {
380 /// Constructs a new `Rc<T>`.
381 ///
382 /// # Examples
383 ///
384 /// ```
385 /// use std::rc::Rc;
386 ///
387 /// let five = Rc::new(5);
388 /// ```
389 #[cfg(not(no_global_oom_handling))]
390 #[stable(feature = "rust1", since = "1.0.0")]
391 pub fn new(value: T) -> Rc<T> {
392 // There is an implicit weak pointer owned by all the strong
393 // pointers, which ensures that the weak destructor never frees
394 // the allocation while the strong destructor is running, even
395 // if the weak pointer is stored inside the strong one.
396 unsafe {
397 Self::from_inner(
398 Box::leak(Box::new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value }))
399 .into(),
400 )
401 }
402 }
403
404 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
405 /// to allow you to construct a `T` which holds a weak pointer to itself.
406 ///
407 /// Generally, a structure circularly referencing itself, either directly or
408 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
409 /// Using this function, you get access to the weak pointer during the
410 /// initialization of `T`, before the `Rc<T>` is created, such that you can
411 /// clone and store it inside the `T`.
412 ///
413 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
414 /// then calls your closure, giving it a `Weak<T>` to this allocation,
415 /// and only afterwards completes the construction of the `Rc<T>` by placing
416 /// the `T` returned from your closure into the allocation.
417 ///
418 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
419 /// returns, calling [`upgrade`] on the weak reference inside your closure will
420 /// fail and result in a `None` value.
421 ///
422 /// # Panics
423 ///
424 /// If `data_fn` panics, the panic is propagated to the caller, and the
425 /// temporary [`Weak<T>`] is dropped normally.
426 ///
427 /// # Examples
428 ///
429 /// ```
430 /// # #![allow(dead_code)]
431 /// use std::rc::{Rc, Weak};
432 ///
433 /// struct Gadget {
434 /// me: Weak<Gadget>,
435 /// }
436 ///
437 /// impl Gadget {
438 /// /// Construct a reference counted Gadget.
439 /// fn new() -> Rc<Self> {
440 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
441 /// // `Rc` we're constructing.
442 /// Rc::new_cyclic(|me| {
443 /// // Create the actual struct here.
444 /// Gadget { me: me.clone() }
445 /// })
446 /// }
447 ///
448 /// /// Return a reference counted pointer to Self.
449 /// fn me(&self) -> Rc<Self> {
450 /// self.me.upgrade().unwrap()
451 /// }
452 /// }
453 /// ```
454 /// [`upgrade`]: Weak::upgrade
455 #[cfg(not(no_global_oom_handling))]
456 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
457 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
458 where
459 F: FnOnce(&Weak<T>) -> T,
460 {
461 // Construct the inner in the "uninitialized" state with a single
462 // weak reference.
463 let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox {
464 strong: Cell::new(0),
465 weak: Cell::new(1),
466 value: mem::MaybeUninit::<T>::uninit(),
467 }))
468 .into();
469
470 let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
471
472 let weak = Weak { ptr: init_ptr, alloc: Global };
473
474 // It's important we don't give up ownership of the weak pointer, or
475 // else the memory might be freed by the time `data_fn` returns. If
476 // we really wanted to pass ownership, we could create an additional
477 // weak pointer for ourselves, but this would result in additional
478 // updates to the weak reference count which might not be necessary
479 // otherwise.
480 let data = data_fn(&weak);
481
482 let strong = unsafe {
483 let inner = init_ptr.as_ptr();
484 ptr::write(ptr::addr_of_mut!((*inner).value), data);
485
486 let prev_value = (*inner).strong.get();
487 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
488 (*inner).strong.set(1);
489
490 Rc::from_inner(init_ptr)
491 };
492
493 // Strong references should collectively own a shared weak reference,
494 // so don't run the destructor for our old weak reference.
495 mem::forget(weak);
496 strong
497 }
498
499 /// Constructs a new `Rc` with uninitialized contents.
500 ///
501 /// # Examples
502 ///
503 /// ```
504 /// #![feature(new_uninit)]
505 /// #![feature(get_mut_unchecked)]
506 ///
507 /// use std::rc::Rc;
508 ///
509 /// let mut five = Rc::<u32>::new_uninit();
510 ///
511 /// // Deferred initialization:
512 /// Rc::get_mut(&mut five).unwrap().write(5);
513 ///
514 /// let five = unsafe { five.assume_init() };
515 ///
516 /// assert_eq!(*five, 5)
517 /// ```
518 #[cfg(not(no_global_oom_handling))]
519 #[unstable(feature = "new_uninit", issue = "63291")]
520 #[must_use]
521 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
522 unsafe {
523 Rc::from_ptr(Rc::allocate_for_layout(
524 Layout::new::<T>(),
525 |layout| Global.allocate(layout),
526 <*mut u8>::cast,
527 ))
528 }
529 }
530
531 /// Constructs a new `Rc` with uninitialized contents, with the memory
532 /// being filled with `0` bytes.
533 ///
534 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
535 /// incorrect usage of this method.
536 ///
537 /// # Examples
538 ///
539 /// ```
540 /// #![feature(new_uninit)]
541 ///
542 /// use std::rc::Rc;
543 ///
544 /// let zero = Rc::<u32>::new_zeroed();
545 /// let zero = unsafe { zero.assume_init() };
546 ///
547 /// assert_eq!(*zero, 0)
548 /// ```
549 ///
550 /// [zeroed]: mem::MaybeUninit::zeroed
551 #[cfg(not(no_global_oom_handling))]
552 #[unstable(feature = "new_uninit", issue = "63291")]
553 #[must_use]
554 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
555 unsafe {
556 Rc::from_ptr(Rc::allocate_for_layout(
557 Layout::new::<T>(),
558 |layout| Global.allocate_zeroed(layout),
559 <*mut u8>::cast,
560 ))
561 }
562 }
563
564 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
565 ///
566 /// # Examples
567 ///
568 /// ```
569 /// #![feature(allocator_api)]
570 /// use std::rc::Rc;
571 ///
572 /// let five = Rc::try_new(5);
573 /// # Ok::<(), std::alloc::AllocError>(())
574 /// ```
575 #[unstable(feature = "allocator_api", issue = "32838")]
576 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
577 // There is an implicit weak pointer owned by all the strong
578 // pointers, which ensures that the weak destructor never frees
579 // the allocation while the strong destructor is running, even
580 // if the weak pointer is stored inside the strong one.
581 unsafe {
582 Ok(Self::from_inner(
583 Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
584 .into(),
585 ))
586 }
587 }
588
589 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
590 ///
591 /// # Examples
592 ///
593 /// ```
594 /// #![feature(allocator_api, new_uninit)]
595 /// #![feature(get_mut_unchecked)]
596 ///
597 /// use std::rc::Rc;
598 ///
599 /// let mut five = Rc::<u32>::try_new_uninit()?;
600 ///
601 /// // Deferred initialization:
602 /// Rc::get_mut(&mut five).unwrap().write(5);
603 ///
604 /// let five = unsafe { five.assume_init() };
605 ///
606 /// assert_eq!(*five, 5);
607 /// # Ok::<(), std::alloc::AllocError>(())
608 /// ```
609 #[unstable(feature = "allocator_api", issue = "32838")]
610 // #[unstable(feature = "new_uninit", issue = "63291")]
611 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
612 unsafe {
613 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
614 Layout::new::<T>(),
615 |layout| Global.allocate(layout),
616 <*mut u8>::cast,
617 )?))
618 }
619 }
620
621 /// Constructs a new `Rc` with uninitialized contents, with the memory
622 /// being filled with `0` bytes, returning an error if the allocation fails
623 ///
624 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
625 /// incorrect usage of this method.
626 ///
627 /// # Examples
628 ///
629 /// ```
630 /// #![feature(allocator_api, new_uninit)]
631 ///
632 /// use std::rc::Rc;
633 ///
634 /// let zero = Rc::<u32>::try_new_zeroed()?;
635 /// let zero = unsafe { zero.assume_init() };
636 ///
637 /// assert_eq!(*zero, 0);
638 /// # Ok::<(), std::alloc::AllocError>(())
639 /// ```
640 ///
641 /// [zeroed]: mem::MaybeUninit::zeroed
642 #[unstable(feature = "allocator_api", issue = "32838")]
643 //#[unstable(feature = "new_uninit", issue = "63291")]
644 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
645 unsafe {
646 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
647 Layout::new::<T>(),
648 |layout| Global.allocate_zeroed(layout),
649 <*mut u8>::cast,
650 )?))
651 }
652 }
653 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
654 /// `value` will be pinned in memory and unable to be moved.
655 #[cfg(not(no_global_oom_handling))]
656 #[stable(feature = "pin", since = "1.33.0")]
657 #[must_use]
658 pub fn pin(value: T) -> Pin<Rc<T>> {
659 unsafe { Pin::new_unchecked(Rc::new(value)) }
660 }
661}
662
663impl<T, A: Allocator> Rc<T, A> {
664 /// Returns a reference to the underlying allocator.
665 ///
666 /// Note: this is an associated function, which means that you have
667 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
668 /// is so that there is no conflict with a method on the inner type.
669 #[inline]
670 #[unstable(feature = "allocator_api", issue = "32838")]
671 pub fn allocator(this: &Self) -> &A {
672 &this.alloc
673 }
674 /// Constructs a new `Rc` in the provided allocator.
675 ///
676 /// # Examples
677 ///
678 /// ```
679 /// #![feature(allocator_api)]
680 /// use std::rc::Rc;
681 /// use std::alloc::System;
682 ///
683 /// let five = Rc::new_in(5, System);
684 /// ```
685 #[cfg(not(no_global_oom_handling))]
686 #[unstable(feature = "allocator_api", issue = "32838")]
687 #[inline]
688 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
689 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
690 // That would make code size bigger.
691 match Self::try_new_in(value, alloc) {
692 Ok(m) => m,
693 Err(_) => handle_alloc_error(Layout::new::<RcBox<T>>()),
694 }
695 }
696
697 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
698 ///
699 /// # Examples
700 ///
701 /// ```
702 /// #![feature(new_uninit)]
703 /// #![feature(get_mut_unchecked)]
704 /// #![feature(allocator_api)]
705 ///
706 /// use std::rc::Rc;
707 /// use std::alloc::System;
708 ///
709 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
710 ///
711 /// let five = unsafe {
712 /// // Deferred initialization:
713 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
714 ///
715 /// five.assume_init()
716 /// };
717 ///
718 /// assert_eq!(*five, 5)
719 /// ```
720 #[cfg(not(no_global_oom_handling))]
721 #[unstable(feature = "allocator_api", issue = "32838")]
722 // #[unstable(feature = "new_uninit", issue = "63291")]
723 #[inline]
724 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
725 unsafe {
726 Rc::from_ptr_in(
727 Rc::allocate_for_layout(
728 Layout::new::<T>(),
729 |layout| alloc.allocate(layout),
730 <*mut u8>::cast,
731 ),
732 alloc,
733 )
734 }
735 }
736
737 /// Constructs a new `Rc` with uninitialized contents, with the memory
738 /// being filled with `0` bytes, in the provided allocator.
739 ///
740 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
741 /// incorrect usage of this method.
742 ///
743 /// # Examples
744 ///
745 /// ```
746 /// #![feature(new_uninit)]
747 /// #![feature(allocator_api)]
748 ///
749 /// use std::rc::Rc;
750 /// use std::alloc::System;
751 ///
752 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
753 /// let zero = unsafe { zero.assume_init() };
754 ///
755 /// assert_eq!(*zero, 0)
756 /// ```
757 ///
758 /// [zeroed]: mem::MaybeUninit::zeroed
759 #[cfg(not(no_global_oom_handling))]
760 #[unstable(feature = "allocator_api", issue = "32838")]
761 // #[unstable(feature = "new_uninit", issue = "63291")]
762 #[inline]
763 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
764 unsafe {
765 Rc::from_ptr_in(
766 Rc::allocate_for_layout(
767 Layout::new::<T>(),
768 |layout| alloc.allocate_zeroed(layout),
769 <*mut u8>::cast,
770 ),
771 alloc,
772 )
773 }
774 }
775
776 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
777 /// fails
778 ///
779 /// # Examples
780 ///
781 /// ```
782 /// #![feature(allocator_api)]
783 /// use std::rc::Rc;
784 /// use std::alloc::System;
785 ///
786 /// let five = Rc::try_new_in(5, System);
787 /// # Ok::<(), std::alloc::AllocError>(())
788 /// ```
789 #[unstable(feature = "allocator_api", issue = "32838")]
790 #[inline]
791 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
792 // There is an implicit weak pointer owned by all the strong
793 // pointers, which ensures that the weak destructor never frees
794 // the allocation while the strong destructor is running, even
795 // if the weak pointer is stored inside the strong one.
796 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
797 RcBox { strong: Cell::new(1), weak: Cell::new(1), value },
798 alloc,
799 )?);
800 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
801 }
802
803 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
804 /// error if the allocation fails
805 ///
806 /// # Examples
807 ///
808 /// ```
809 /// #![feature(allocator_api, new_uninit)]
810 /// #![feature(get_mut_unchecked)]
811 ///
812 /// use std::rc::Rc;
813 /// use std::alloc::System;
814 ///
815 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
816 ///
817 /// let five = unsafe {
818 /// // Deferred initialization:
819 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
820 ///
821 /// five.assume_init()
822 /// };
823 ///
824 /// assert_eq!(*five, 5);
825 /// # Ok::<(), std::alloc::AllocError>(())
826 /// ```
827 #[unstable(feature = "allocator_api", issue = "32838")]
828 // #[unstable(feature = "new_uninit", issue = "63291")]
829 #[inline]
830 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
831 unsafe {
832 Ok(Rc::from_ptr_in(
833 Rc::try_allocate_for_layout(
834 Layout::new::<T>(),
835 |layout| alloc.allocate(layout),
836 <*mut u8>::cast,
837 )?,
838 alloc,
839 ))
840 }
841 }
842
843 /// Constructs a new `Rc` with uninitialized contents, with the memory
844 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
845 /// fails
846 ///
847 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
848 /// incorrect usage of this method.
849 ///
850 /// # Examples
851 ///
852 /// ```
853 /// #![feature(allocator_api, new_uninit)]
854 ///
855 /// use std::rc::Rc;
856 /// use std::alloc::System;
857 ///
858 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
859 /// let zero = unsafe { zero.assume_init() };
860 ///
861 /// assert_eq!(*zero, 0);
862 /// # Ok::<(), std::alloc::AllocError>(())
863 /// ```
864 ///
865 /// [zeroed]: mem::MaybeUninit::zeroed
866 #[unstable(feature = "allocator_api", issue = "32838")]
867 //#[unstable(feature = "new_uninit", issue = "63291")]
868 #[inline]
869 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
870 unsafe {
871 Ok(Rc::from_ptr_in(
872 Rc::try_allocate_for_layout(
873 Layout::new::<T>(),
874 |layout| alloc.allocate_zeroed(layout),
875 <*mut u8>::cast,
876 )?,
877 alloc,
878 ))
879 }
880 }
881
882 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
883 /// `value` will be pinned in memory and unable to be moved.
884 #[cfg(not(no_global_oom_handling))]
885 #[unstable(feature = "allocator_api", issue = "32838")]
886 #[inline]
887 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
888 where
889 A: 'static,
890 {
891 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
892 }
893
894 /// Returns the inner value, if the `Rc` has exactly one strong reference.
895 ///
896 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
897 /// passed in.
898 ///
899 /// This will succeed even if there are outstanding weak references.
900 ///
901 /// # Examples
902 ///
903 /// ```
904 /// use std::rc::Rc;
905 ///
906 /// let x = Rc::new(3);
907 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
908 ///
909 /// let x = Rc::new(4);
910 /// let _y = Rc::clone(&x);
911 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
912 /// ```
913 #[inline]
914 #[stable(feature = "rc_unique", since = "1.4.0")]
915 pub fn try_unwrap(this: Self) -> Result<T, Self> {
916 if Rc::strong_count(&this) == 1 {
917 unsafe {
918 let val = ptr::read(&*this); // copy the contained object
919 let alloc = ptr::read(&this.alloc); // copy the allocator
920
921 // Indicate to Weaks that they can't be promoted by decrementing
922 // the strong count, and then remove the implicit "strong weak"
923 // pointer while also handling drop logic by just crafting a
924 // fake Weak.
925 this.inner().dec_strong();
926 let _weak = Weak { ptr: this.ptr, alloc };
927 forget(this);
928 Ok(val)
929 }
930 } else {
931 Err(this)
932 }
933 }
934
935 /// Returns the inner value, if the `Rc` has exactly one strong reference.
936 ///
937 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
938 ///
939 /// This will succeed even if there are outstanding weak references.
940 ///
941 /// If `Rc::into_inner` is called on every clone of this `Rc`,
942 /// it is guaranteed that exactly one of the calls returns the inner value.
943 /// This means in particular that the inner value is not dropped.
944 ///
945 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
946 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
947 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
948 /// (Note that the same kind of equivalence does **not** hold true for
949 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
950 ///
951 /// # Examples
952 ///
953 /// ```
954 /// use std::rc::Rc;
955 ///
956 /// let x = Rc::new(3);
957 /// assert_eq!(Rc::into_inner(x), Some(3));
958 ///
959 /// let x = Rc::new(4);
960 /// let y = Rc::clone(&x);
961 ///
962 /// assert_eq!(Rc::into_inner(y), None);
963 /// assert_eq!(Rc::into_inner(x), Some(4));
964 /// ```
965 #[inline]
966 #[stable(feature = "rc_into_inner", since = "1.70.0")]
967 pub fn into_inner(this: Self) -> Option<T> {
968 Rc::try_unwrap(this).ok()
969 }
970}
971
972impl<T> Rc<[T]> {
973 /// Constructs a new reference-counted slice with uninitialized contents.
974 ///
975 /// # Examples
976 ///
977 /// ```
978 /// #![feature(new_uninit)]
979 /// #![feature(get_mut_unchecked)]
980 ///
981 /// use std::rc::Rc;
982 ///
983 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
984 ///
985 /// // Deferred initialization:
986 /// let data = Rc::get_mut(&mut values).unwrap();
987 /// data[0].write(1);
988 /// data[1].write(2);
989 /// data[2].write(3);
990 ///
991 /// let values = unsafe { values.assume_init() };
992 ///
993 /// assert_eq!(*values, [1, 2, 3])
994 /// ```
995 #[cfg(not(no_global_oom_handling))]
996 #[unstable(feature = "new_uninit", issue = "63291")]
997 #[must_use]
998 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
999 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1000 }
1001
1002 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1003 /// filled with `0` bytes.
1004 ///
1005 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1006 /// incorrect usage of this method.
1007 ///
1008 /// # Examples
1009 ///
1010 /// ```
1011 /// #![feature(new_uninit)]
1012 ///
1013 /// use std::rc::Rc;
1014 ///
1015 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1016 /// let values = unsafe { values.assume_init() };
1017 ///
1018 /// assert_eq!(*values, [0, 0, 0])
1019 /// ```
1020 ///
1021 /// [zeroed]: mem::MaybeUninit::zeroed
1022 #[cfg(not(no_global_oom_handling))]
1023 #[unstable(feature = "new_uninit", issue = "63291")]
1024 #[must_use]
1025 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1026 unsafe {
1027 Rc::from_ptr(Rc::allocate_for_layout(
1028 Layout::array::<T>(len).unwrap(),
1029 |layout| Global.allocate_zeroed(layout),
1030 |mem| {
1031 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1032 as *mut RcBox<[mem::MaybeUninit<T>]>
1033 },
1034 ))
1035 }
1036 }
1037}
1038
1039impl<T, A: Allocator> Rc<[T], A> {
1040 /// Constructs a new reference-counted slice with uninitialized contents.
1041 ///
1042 /// # Examples
1043 ///
1044 /// ```
1045 /// #![feature(new_uninit)]
1046 /// #![feature(get_mut_unchecked)]
1047 /// #![feature(allocator_api)]
1048 ///
1049 /// use std::rc::Rc;
1050 /// use std::alloc::System;
1051 ///
1052 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1053 ///
1054 /// let values = unsafe {
1055 /// // Deferred initialization:
1056 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1057 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1058 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1059 ///
1060 /// values.assume_init()
1061 /// };
1062 ///
1063 /// assert_eq!(*values, [1, 2, 3])
1064 /// ```
1065 #[cfg(not(no_global_oom_handling))]
1066 #[unstable(feature = "allocator_api", issue = "32838")]
1067 // #[unstable(feature = "new_uninit", issue = "63291")]
1068 #[inline]
1069 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1070 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1071 }
1072
1073 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1074 /// filled with `0` bytes.
1075 ///
1076 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1077 /// incorrect usage of this method.
1078 ///
1079 /// # Examples
1080 ///
1081 /// ```
1082 /// #![feature(new_uninit)]
1083 /// #![feature(allocator_api)]
1084 ///
1085 /// use std::rc::Rc;
1086 /// use std::alloc::System;
1087 ///
1088 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1089 /// let values = unsafe { values.assume_init() };
1090 ///
1091 /// assert_eq!(*values, [0, 0, 0])
1092 /// ```
1093 ///
1094 /// [zeroed]: mem::MaybeUninit::zeroed
1095 #[cfg(not(no_global_oom_handling))]
1096 #[unstable(feature = "allocator_api", issue = "32838")]
1097 // #[unstable(feature = "new_uninit", issue = "63291")]
1098 #[inline]
1099 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1100 unsafe {
1101 Rc::from_ptr_in(
1102 Rc::allocate_for_layout(
1103 Layout::array::<T>(len).unwrap(),
1104 |layout| alloc.allocate_zeroed(layout),
1105 |mem| {
1106 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1107 as *mut RcBox<[mem::MaybeUninit<T>]>
1108 },
1109 ),
1110 alloc,
1111 )
1112 }
1113 }
1114}
1115
1116impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1117 /// Converts to `Rc<T>`.
1118 ///
1119 /// # Safety
1120 ///
1121 /// As with [`MaybeUninit::assume_init`],
1122 /// it is up to the caller to guarantee that the inner value
1123 /// really is in an initialized state.
1124 /// Calling this when the content is not yet fully initialized
1125 /// causes immediate undefined behavior.
1126 ///
1127 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1128 ///
1129 /// # Examples
1130 ///
1131 /// ```
1132 /// #![feature(new_uninit)]
1133 /// #![feature(get_mut_unchecked)]
1134 ///
1135 /// use std::rc::Rc;
1136 ///
1137 /// let mut five = Rc::<u32>::new_uninit();
1138 ///
1139 /// // Deferred initialization:
1140 /// Rc::get_mut(&mut five).unwrap().write(5);
1141 ///
1142 /// let five = unsafe { five.assume_init() };
1143 ///
1144 /// assert_eq!(*five, 5)
1145 /// ```
1146 #[unstable(feature = "new_uninit", issue = "63291")]
1147 #[inline]
1148 pub unsafe fn assume_init(self) -> Rc<T, A>
1149 where
1150 A: Clone,
1151 {
1152 let md_self = mem::ManuallyDrop::new(self);
1153 unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
1154 }
1155}
1156
1157impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1158 /// Converts to `Rc<[T]>`.
1159 ///
1160 /// # Safety
1161 ///
1162 /// As with [`MaybeUninit::assume_init`],
1163 /// it is up to the caller to guarantee that the inner value
1164 /// really is in an initialized state.
1165 /// Calling this when the content is not yet fully initialized
1166 /// causes immediate undefined behavior.
1167 ///
1168 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1169 ///
1170 /// # Examples
1171 ///
1172 /// ```
1173 /// #![feature(new_uninit)]
1174 /// #![feature(get_mut_unchecked)]
1175 ///
1176 /// use std::rc::Rc;
1177 ///
1178 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1179 ///
1180 /// // Deferred initialization:
1181 /// let data = Rc::get_mut(&mut values).unwrap();
1182 /// data[0].write(1);
1183 /// data[1].write(2);
1184 /// data[2].write(3);
1185 ///
1186 /// let values = unsafe { values.assume_init() };
1187 ///
1188 /// assert_eq!(*values, [1, 2, 3])
1189 /// ```
1190 #[unstable(feature = "new_uninit", issue = "63291")]
1191 #[inline]
1192 pub unsafe fn assume_init(self) -> Rc<[T], A>
1193 where
1194 A: Clone,
1195 {
1196 let md_self = mem::ManuallyDrop::new(self);
1197 unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
1198 }
1199}
1200
1201impl<T: ?Sized> Rc<T> {
1202 /// Constructs an `Rc<T>` from a raw pointer.
1203 ///
1204 /// The raw pointer must have been previously returned by a call to
1205 /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1206 ///
1207 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1208 /// is trivially true if `U` is `T`.
1209 /// * If `U` is unsized, its data pointer must have the same size and
1210 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1211 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1212 /// coercion].
1213 ///
1214 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1215 /// and alignment, this is basically like transmuting references of
1216 /// different types. See [`mem::transmute`][transmute] for more information
1217 /// on what restrictions apply in this case.
1218 ///
1219 /// The raw pointer must point to a block of memory allocated by the global allocator
1220 ///
1221 /// The user of `from_raw` has to make sure a specific value of `T` is only
1222 /// dropped once.
1223 ///
1224 /// This function is unsafe because improper use may lead to memory unsafety,
1225 /// even if the returned `Rc<T>` is never accessed.
1226 ///
1227 /// [into_raw]: Rc::into_raw
1228 /// [transmute]: core::mem::transmute
1229 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1230 ///
1231 /// # Examples
1232 ///
1233 /// ```
1234 /// use std::rc::Rc;
1235 ///
1236 /// let x = Rc::new("hello".to_owned());
1237 /// let x_ptr = Rc::into_raw(x);
1238 ///
1239 /// unsafe {
1240 /// // Convert back to an `Rc` to prevent leak.
1241 /// let x = Rc::from_raw(x_ptr);
1242 /// assert_eq!(&*x, "hello");
1243 ///
1244 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1245 /// }
1246 ///
1247 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1248 /// ```
1249 ///
1250 /// Convert a slice back into its original array:
1251 ///
1252 /// ```
1253 /// use std::rc::Rc;
1254 ///
1255 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1256 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1257 ///
1258 /// unsafe {
1259 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1260 /// assert_eq!(&*x, &[1, 2, 3]);
1261 /// }
1262 /// ```
1263 #[inline]
1264 #[stable(feature = "rc_raw", since = "1.17.0")]
1265 pub unsafe fn from_raw(ptr: *const T) -> Self {
1266 unsafe { Self::from_raw_in(ptr, Global) }
1267 }
1268
1269 /// Increments the strong reference count on the `Rc<T>` associated with the
1270 /// provided pointer by one.
1271 ///
1272 /// # Safety
1273 ///
1274 /// The pointer must have been obtained through `Rc::into_raw`, the
1275 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1276 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1277 /// allocated by the global allocator.
1278 ///
1279 /// # Examples
1280 ///
1281 /// ```
1282 /// use std::rc::Rc;
1283 ///
1284 /// let five = Rc::new(5);
1285 ///
1286 /// unsafe {
1287 /// let ptr = Rc::into_raw(five);
1288 /// Rc::increment_strong_count(ptr);
1289 ///
1290 /// let five = Rc::from_raw(ptr);
1291 /// assert_eq!(2, Rc::strong_count(&five));
1292 /// }
1293 /// ```
1294 #[inline]
1295 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1296 pub unsafe fn increment_strong_count(ptr: *const T) {
1297 unsafe { Self::increment_strong_count_in(ptr, Global) }
1298 }
1299
1300 /// Decrements the strong reference count on the `Rc<T>` associated with the
1301 /// provided pointer by one.
1302 ///
1303 /// # Safety
1304 ///
1305 /// The pointer must have been obtained through `Rc::into_raw`, the
1306 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1307 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1308 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1309 /// backing storage, but **should not** be called after the final `Rc` has been released.
1310 ///
1311 /// # Examples
1312 ///
1313 /// ```
1314 /// use std::rc::Rc;
1315 ///
1316 /// let five = Rc::new(5);
1317 ///
1318 /// unsafe {
1319 /// let ptr = Rc::into_raw(five);
1320 /// Rc::increment_strong_count(ptr);
1321 ///
1322 /// let five = Rc::from_raw(ptr);
1323 /// assert_eq!(2, Rc::strong_count(&five));
1324 /// Rc::decrement_strong_count(ptr);
1325 /// assert_eq!(1, Rc::strong_count(&five));
1326 /// }
1327 /// ```
1328 #[inline]
1329 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1330 pub unsafe fn decrement_strong_count(ptr: *const T) {
1331 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1332 }
1333}
1334
1335impl<T: ?Sized, A: Allocator> Rc<T, A> {
1336 /// Consumes the `Rc`, returning the wrapped pointer.
1337 ///
1338 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1339 /// [`Rc::from_raw`].
1340 ///
1341 /// # Examples
1342 ///
1343 /// ```
1344 /// use std::rc::Rc;
1345 ///
1346 /// let x = Rc::new("hello".to_owned());
1347 /// let x_ptr = Rc::into_raw(x);
1348 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1349 /// ```
1350 #[must_use = "losing the pointer will leak memory"]
1351 #[stable(feature = "rc_raw", since = "1.17.0")]
1352 #[rustc_never_returns_null_ptr]
1353 pub fn into_raw(this: Self) -> *const T {
1354 let ptr = Self::as_ptr(&this);
1355 mem::forget(this);
1356 ptr
1357 }
1358
1359 /// Provides a raw pointer to the data.
1360 ///
1361 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1362 /// for as long there are strong counts in the `Rc`.
1363 ///
1364 /// # Examples
1365 ///
1366 /// ```
1367 /// use std::rc::Rc;
1368 ///
1369 /// let x = Rc::new("hello".to_owned());
1370 /// let y = Rc::clone(&x);
1371 /// let x_ptr = Rc::as_ptr(&x);
1372 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1373 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1374 /// ```
1375 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1376 #[rustc_never_returns_null_ptr]
1377 pub fn as_ptr(this: &Self) -> *const T {
1378 let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
1379
1380 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1381 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1382 // write through the pointer after the Rc is recovered through `from_raw`.
1383 unsafe { ptr::addr_of_mut!((*ptr).value) }
1384 }
1385
1386 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1387 ///
1388 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1389 /// A>::into_raw`][into_raw] with the following requirements:
1390 ///
1391 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1392 /// is trivially true if `U` is `T`.
1393 /// * If `U` is unsized, its data pointer must have the same size and
1394 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1395 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1396 /// coercion].
1397 ///
1398 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1399 /// and alignment, this is basically like transmuting references of
1400 /// different types. See [`mem::transmute`][transmute] for more information
1401 /// on what restrictions apply in this case.
1402 ///
1403 /// The raw pointer must point to a block of memory allocated by `alloc`
1404 ///
1405 /// The user of `from_raw` has to make sure a specific value of `T` is only
1406 /// dropped once.
1407 ///
1408 /// This function is unsafe because improper use may lead to memory unsafety,
1409 /// even if the returned `Rc<T>` is never accessed.
1410 ///
1411 /// [into_raw]: Rc::into_raw
1412 /// [transmute]: core::mem::transmute
1413 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1414 ///
1415 /// # Examples
1416 ///
1417 /// ```
1418 /// #![feature(allocator_api)]
1419 ///
1420 /// use std::rc::Rc;
1421 /// use std::alloc::System;
1422 ///
1423 /// let x = Rc::new_in("hello".to_owned(), System);
1424 /// let x_ptr = Rc::into_raw(x);
1425 ///
1426 /// unsafe {
1427 /// // Convert back to an `Rc` to prevent leak.
1428 /// let x = Rc::from_raw_in(x_ptr, System);
1429 /// assert_eq!(&*x, "hello");
1430 ///
1431 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1432 /// }
1433 ///
1434 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1435 /// ```
1436 ///
1437 /// Convert a slice back into its original array:
1438 ///
1439 /// ```
1440 /// #![feature(allocator_api)]
1441 ///
1442 /// use std::rc::Rc;
1443 /// use std::alloc::System;
1444 ///
1445 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1446 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1447 ///
1448 /// unsafe {
1449 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1450 /// assert_eq!(&*x, &[1, 2, 3]);
1451 /// }
1452 /// ```
1453 #[unstable(feature = "allocator_api", issue = "32838")]
1454 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1455 let offset = unsafe { data_offset(ptr) };
1456
1457 // Reverse the offset to find the original RcBox.
1458 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox<T> };
1459
1460 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1461 }
1462
1463 /// Creates a new [`Weak`] pointer to this allocation.
1464 ///
1465 /// # Examples
1466 ///
1467 /// ```
1468 /// use std::rc::Rc;
1469 ///
1470 /// let five = Rc::new(5);
1471 ///
1472 /// let weak_five = Rc::downgrade(&five);
1473 /// ```
1474 #[must_use = "this returns a new `Weak` pointer, \
1475 without modifying the original `Rc`"]
1476 #[stable(feature = "rc_weak", since = "1.4.0")]
1477 pub fn downgrade(this: &Self) -> Weak<T, A>
1478 where
1479 A: Clone,
1480 {
1481 this.inner().inc_weak();
1482 // Make sure we do not create a dangling Weak
1483 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1484 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1485 }
1486
1487 /// Gets the number of [`Weak`] pointers to this allocation.
1488 ///
1489 /// # Examples
1490 ///
1491 /// ```
1492 /// use std::rc::Rc;
1493 ///
1494 /// let five = Rc::new(5);
1495 /// let _weak_five = Rc::downgrade(&five);
1496 ///
1497 /// assert_eq!(1, Rc::weak_count(&five));
1498 /// ```
1499 #[inline]
1500 #[stable(feature = "rc_counts", since = "1.15.0")]
1501 pub fn weak_count(this: &Self) -> usize {
1502 this.inner().weak() - 1
1503 }
1504
1505 /// Gets the number of strong (`Rc`) pointers to this allocation.
1506 ///
1507 /// # Examples
1508 ///
1509 /// ```
1510 /// use std::rc::Rc;
1511 ///
1512 /// let five = Rc::new(5);
1513 /// let _also_five = Rc::clone(&five);
1514 ///
1515 /// assert_eq!(2, Rc::strong_count(&five));
1516 /// ```
1517 #[inline]
1518 #[stable(feature = "rc_counts", since = "1.15.0")]
1519 pub fn strong_count(this: &Self) -> usize {
1520 this.inner().strong()
1521 }
1522
1523 /// Increments the strong reference count on the `Rc<T>` associated with the
1524 /// provided pointer by one.
1525 ///
1526 /// # Safety
1527 ///
1528 /// The pointer must have been obtained through `Rc::into_raw`, the
1529 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1530 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1531 /// allocated by `alloc`
1532 ///
1533 /// # Examples
1534 ///
1535 /// ```
1536 /// #![feature(allocator_api)]
1537 ///
1538 /// use std::rc::Rc;
1539 /// use std::alloc::System;
1540 ///
1541 /// let five = Rc::new_in(5, System);
1542 ///
1543 /// unsafe {
1544 /// let ptr = Rc::into_raw(five);
1545 /// Rc::increment_strong_count_in(ptr, System);
1546 ///
1547 /// let five = Rc::from_raw_in(ptr, System);
1548 /// assert_eq!(2, Rc::strong_count(&five));
1549 /// }
1550 /// ```
1551 #[inline]
1552 #[unstable(feature = "allocator_api", issue = "32838")]
1553 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1554 where
1555 A: Clone,
1556 {
1557 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1558 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1559 // Now increase refcount, but don't drop new refcount either
1560 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1561 }
1562
1563 /// Decrements the strong reference count on the `Rc<T>` associated with the
1564 /// provided pointer by one.
1565 ///
1566 /// # Safety
1567 ///
1568 /// The pointer must have been obtained through `Rc::into_raw`, the
1569 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1570 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1571 /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
1572 /// but **should not** be called after the final `Rc` has been released.
1573 ///
1574 /// # Examples
1575 ///
1576 /// ```
1577 /// #![feature(allocator_api)]
1578 ///
1579 /// use std::rc::Rc;
1580 /// use std::alloc::System;
1581 ///
1582 /// let five = Rc::new_in(5, System);
1583 ///
1584 /// unsafe {
1585 /// let ptr = Rc::into_raw(five);
1586 /// Rc::increment_strong_count_in(ptr, System);
1587 ///
1588 /// let five = Rc::from_raw_in(ptr, System);
1589 /// assert_eq!(2, Rc::strong_count(&five));
1590 /// Rc::decrement_strong_count_in(ptr, System);
1591 /// assert_eq!(1, Rc::strong_count(&five));
1592 /// }
1593 /// ```
1594 #[inline]
1595 #[unstable(feature = "allocator_api", issue = "32838")]
1596 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1597 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1598 }
1599
1600 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1601 /// this allocation.
1602 #[inline]
1603 fn is_unique(this: &Self) -> bool {
1604 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1605 }
1606
1607 /// Returns a mutable reference into the given `Rc`, if there are
1608 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1609 ///
1610 /// Returns [`None`] otherwise, because it is not safe to
1611 /// mutate a shared value.
1612 ///
1613 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1614 /// the inner value when there are other `Rc` pointers.
1615 ///
1616 /// [make_mut]: Rc::make_mut
1617 /// [clone]: Clone::clone
1618 ///
1619 /// # Examples
1620 ///
1621 /// ```
1622 /// use std::rc::Rc;
1623 ///
1624 /// let mut x = Rc::new(3);
1625 /// *Rc::get_mut(&mut x).unwrap() = 4;
1626 /// assert_eq!(*x, 4);
1627 ///
1628 /// let _y = Rc::clone(&x);
1629 /// assert!(Rc::get_mut(&mut x).is_none());
1630 /// ```
1631 #[inline]
1632 #[stable(feature = "rc_unique", since = "1.4.0")]
1633 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1634 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1635 }
1636
1637 /// Returns a mutable reference into the given `Rc`,
1638 /// without any check.
1639 ///
1640 /// See also [`get_mut`], which is safe and does appropriate checks.
1641 ///
1642 /// [`get_mut`]: Rc::get_mut
1643 ///
1644 /// # Safety
1645 ///
1646 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1647 /// they must not be dereferenced or have active borrows for the duration
1648 /// of the returned borrow, and their inner type must be exactly the same as the
1649 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1650 /// such pointers exist, for example immediately after `Rc::new`.
1651 ///
1652 /// # Examples
1653 ///
1654 /// ```
1655 /// #![feature(get_mut_unchecked)]
1656 ///
1657 /// use std::rc::Rc;
1658 ///
1659 /// let mut x = Rc::new(String::new());
1660 /// unsafe {
1661 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1662 /// }
1663 /// assert_eq!(*x, "foo");
1664 /// ```
1665 /// Other `Rc` pointers to the same allocation must be to the same type.
1666 /// ```no_run
1667 /// #![feature(get_mut_unchecked)]
1668 ///
1669 /// use std::rc::Rc;
1670 ///
1671 /// let x: Rc<str> = Rc::from("Hello, world!");
1672 /// let mut y: Rc<[u8]> = x.clone().into();
1673 /// unsafe {
1674 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1675 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1676 /// }
1677 /// println!("{}", &*x); // Invalid UTF-8 in a str
1678 /// ```
1679 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1680 /// ```no_run
1681 /// #![feature(get_mut_unchecked)]
1682 ///
1683 /// use std::rc::Rc;
1684 ///
1685 /// let x: Rc<&str> = Rc::new("Hello, world!");
1686 /// {
1687 /// let s = String::from("Oh, no!");
1688 /// let mut y: Rc<&str> = x.clone().into();
1689 /// unsafe {
1690 /// // this is Undefined Behavior, because x's inner type
1691 /// // is &'long str, not &'short str
1692 /// *Rc::get_mut_unchecked(&mut y) = &s;
1693 /// }
1694 /// }
1695 /// println!("{}", &*x); // Use-after-free
1696 /// ```
1697 #[inline]
1698 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1699 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1700 // We are careful to *not* create a reference covering the "count" fields, as
1701 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1702 unsafe { &mut (*this.ptr.as_ptr()).value }
1703 }
1704
1705 #[inline]
1706 #[stable(feature = "ptr_eq", since = "1.17.0")]
1707 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1708 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1709 ///
1710 /// # Examples
1711 ///
1712 /// ```
1713 /// use std::rc::Rc;
1714 ///
1715 /// let five = Rc::new(5);
1716 /// let same_five = Rc::clone(&five);
1717 /// let other_five = Rc::new(5);
1718 ///
1719 /// assert!(Rc::ptr_eq(&five, &same_five));
1720 /// assert!(!Rc::ptr_eq(&five, &other_five));
1721 /// ```
1722 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1723 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1724 }
1725}
1726
1727impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
1728 /// Makes a mutable reference into the given `Rc`.
1729 ///
1730 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1731 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1732 /// referred to as clone-on-write.
1733 ///
1734 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1735 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1736 /// be cloned.
1737 ///
1738 /// See also [`get_mut`], which will fail rather than cloning the inner value
1739 /// or disassociating [`Weak`] pointers.
1740 ///
1741 /// [`clone`]: Clone::clone
1742 /// [`get_mut`]: Rc::get_mut
1743 ///
1744 /// # Examples
1745 ///
1746 /// ```
1747 /// use std::rc::Rc;
1748 ///
1749 /// let mut data = Rc::new(5);
1750 ///
1751 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1752 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1753 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1754 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1755 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1756 ///
1757 /// // Now `data` and `other_data` point to different allocations.
1758 /// assert_eq!(*data, 8);
1759 /// assert_eq!(*other_data, 12);
1760 /// ```
1761 ///
1762 /// [`Weak`] pointers will be disassociated:
1763 ///
1764 /// ```
1765 /// use std::rc::Rc;
1766 ///
1767 /// let mut data = Rc::new(75);
1768 /// let weak = Rc::downgrade(&data);
1769 ///
1770 /// assert!(75 == *data);
1771 /// assert!(75 == *weak.upgrade().unwrap());
1772 ///
1773 /// *Rc::make_mut(&mut data) += 1;
1774 ///
1775 /// assert!(76 == *data);
1776 /// assert!(weak.upgrade().is_none());
1777 /// ```
1778 #[cfg(not(no_global_oom_handling))]
1779 #[inline]
1780 #[stable(feature = "rc_unique", since = "1.4.0")]
1781 pub fn make_mut(this: &mut Self) -> &mut T {
1782 if Rc::strong_count(this) != 1 {
1783 // Gotta clone the data, there are other Rcs.
1784 // Pre-allocate memory to allow writing the cloned value directly.
1785 let mut rc = Self::new_uninit_in(this.alloc.clone());
1786 unsafe {
1787 let data = Rc::get_mut_unchecked(&mut rc);
1788 (**this).write_clone_into_raw(data.as_mut_ptr());
1789 *this = rc.assume_init();
1790 }
1791 } else if Rc::weak_count(this) != 0 {
1792 // Can just steal the data, all that's left is Weaks
1793 let mut rc = Self::new_uninit_in(this.alloc.clone());
1794 unsafe {
1795 let data = Rc::get_mut_unchecked(&mut rc);
1796 data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1797
1798 this.inner().dec_strong();
1799 // Remove implicit strong-weak ref (no need to craft a fake
1800 // Weak here -- we know other Weaks can clean up for us)
1801 this.inner().dec_weak();
1802 ptr::write(this, rc.assume_init());
1803 }
1804 }
1805 // This unsafety is ok because we're guaranteed that the pointer
1806 // returned is the *only* pointer that will ever be returned to T. Our
1807 // reference count is guaranteed to be 1 at this point, and we required
1808 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1809 // reference to the allocation.
1810 unsafe { &mut this.ptr.as_mut().value }
1811 }
1812
1813 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1814 /// clone.
1815 ///
1816 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1817 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1818 ///
1819 /// # Examples
1820 ///
1821 /// ```
1822 /// # use std::{ptr, rc::Rc};
1823 /// let inner = String::from("test");
1824 /// let ptr = inner.as_ptr();
1825 ///
1826 /// let rc = Rc::new(inner);
1827 /// let inner = Rc::unwrap_or_clone(rc);
1828 /// // The inner value was not cloned
1829 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1830 ///
1831 /// let rc = Rc::new(inner);
1832 /// let rc2 = rc.clone();
1833 /// let inner = Rc::unwrap_or_clone(rc);
1834 /// // Because there were 2 references, we had to clone the inner value.
1835 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1836 /// // `rc2` is the last reference, so when we unwrap it we get back
1837 /// // the original `String`.
1838 /// let inner = Rc::unwrap_or_clone(rc2);
1839 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1840 /// ```
1841 #[inline]
1842 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1843 pub fn unwrap_or_clone(this: Self) -> T {
1844 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1845 }
1846}
1847
1848impl<A: Allocator + Clone> Rc<dyn Any, A> {
1849 /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
1850 ///
1851 /// # Examples
1852 ///
1853 /// ```
1854 /// use std::any::Any;
1855 /// use std::rc::Rc;
1856 ///
1857 /// fn print_if_string(value: Rc<dyn Any>) {
1858 /// if let Ok(string) = value.downcast::<String>() {
1859 /// println!("String ({}): {}", string.len(), string);
1860 /// }
1861 /// }
1862 ///
1863 /// let my_string = "Hello World".to_string();
1864 /// print_if_string(Rc::new(my_string));
1865 /// print_if_string(Rc::new(0i8));
1866 /// ```
1867 #[inline]
1868 #[stable(feature = "rc_downcast", since = "1.29.0")]
1869 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1870 if (*self).is::<T>() {
1871 unsafe {
1872 let ptr = self.ptr.cast::<RcBox<T>>();
1873 let alloc = self.alloc.clone();
1874 forget(self);
1875 Ok(Rc::from_inner_in(ptr, alloc))
1876 }
1877 } else {
1878 Err(self)
1879 }
1880 }
1881
1882 /// Downcasts the `Rc<dyn Any>` to a concrete type.
1883 ///
1884 /// For a safe alternative see [`downcast`].
1885 ///
1886 /// # Examples
1887 ///
1888 /// ```
1889 /// #![feature(downcast_unchecked)]
1890 ///
1891 /// use std::any::Any;
1892 /// use std::rc::Rc;
1893 ///
1894 /// let x: Rc<dyn Any> = Rc::new(1_usize);
1895 ///
1896 /// unsafe {
1897 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
1898 /// }
1899 /// ```
1900 ///
1901 /// # Safety
1902 ///
1903 /// The contained value must be of type `T`. Calling this method
1904 /// with the incorrect type is *undefined behavior*.
1905 ///
1906 ///
1907 /// [`downcast`]: Self::downcast
1908 #[inline]
1909 #[unstable(feature = "downcast_unchecked", issue = "90850")]
1910 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
1911 unsafe {
1912 let ptr = self.ptr.cast::<RcBox<T>>();
1913 let alloc = self.alloc.clone();
1914 mem::forget(self);
1915 Rc::from_inner_in(ptr, alloc)
1916 }
1917 }
1918}
1919
1920impl<T: ?Sized> Rc<T> {
1921 /// Allocates an `RcBox<T>` with sufficient space for
1922 /// a possibly-unsized inner value where the value has the layout provided.
1923 ///
1924 /// The function `mem_to_rcbox` is called with the data pointer
1925 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
1926 #[cfg(not(no_global_oom_handling))]
1927 unsafe fn allocate_for_layout(
1928 value_layout: Layout,
1929 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1930 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
1931 ) -> *mut RcBox<T> {
1932 let layout = rcbox_layout_for_value_layout(value_layout);
1933 unsafe {
1934 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
1935 .unwrap_or_else(|_| handle_alloc_error(layout))
1936 }
1937 }
1938
1939 /// Allocates an `RcBox<T>` with sufficient space for
1940 /// a possibly-unsized inner value where the value has the layout provided,
1941 /// returning an error if allocation fails.
1942 ///
1943 /// The function `mem_to_rcbox` is called with the data pointer
1944 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
1945 #[inline]
1946 unsafe fn try_allocate_for_layout(
1947 value_layout: Layout,
1948 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1949 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
1950 ) -> Result<*mut RcBox<T>, AllocError> {
1951 let layout = rcbox_layout_for_value_layout(value_layout);
1952
1953 // Allocate for the layout.
1954 let ptr = allocate(layout)?;
1955
1956 // Initialize the RcBox
1957 let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
1958 unsafe {
1959 debug_assert_eq!(Layout::for_value_raw(inner), layout);
1960
1961 ptr::addr_of_mut!((*inner).strong).write(Cell::new(1));
1962 ptr::addr_of_mut!((*inner).weak).write(Cell::new(1));
1963 }
1964
1965 Ok(inner)
1966 }
1967}
1968
1969impl<T: ?Sized, A: Allocator> Rc<T, A> {
1970 /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
1971 #[cfg(not(no_global_oom_handling))]
1972 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox<T> {
1973 // Allocate for the `RcBox<T>` using the given value.
1974 unsafe {
1975 Rc::<T>::allocate_for_layout(
1976 Layout::for_value_raw(ptr),
1977 |layout| alloc.allocate(layout),
1978 |mem| mem.with_metadata_of(ptr as *const RcBox<T>),
1979 )
1980 }
1981 }
1982
1983 #[cfg(not(no_global_oom_handling))]
1984 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
1985 unsafe {
1986 let value_size = size_of_val(&*src);
1987 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
1988
1989 // Copy value as bytes
1990 ptr::copy_nonoverlapping(
1991 core::ptr::addr_of!(*src) as *const u8,
1992 ptr::addr_of_mut!((*ptr).value) as *mut u8,
1993 value_size,
1994 );
1995
1996 // Free the allocation without dropping its contents
1997 let (bptr, alloc) = Box::into_raw_with_allocator(src);
1998 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
1999 drop(src);
2000
2001 Self::from_ptr_in(ptr, alloc)
2002 }
2003 }
2004}
2005
2006impl<T> Rc<[T]> {
2007 /// Allocates an `RcBox<[T]>` with the given length.
2008 #[cfg(not(no_global_oom_handling))]
2009 unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
2010 unsafe {
2011 Self::allocate_for_layout(
2012 Layout::array::<T>(len).unwrap(),
2013 |layout| Global.allocate(layout),
2014 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
2015 )
2016 }
2017 }
2018
2019 /// Copy elements from slice into newly allocated `Rc<[T]>`
2020 ///
2021 /// Unsafe because the caller must either take ownership or bind `T: Copy`
2022 #[cfg(not(no_global_oom_handling))]
2023 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2024 unsafe {
2025 let ptr = Self::allocate_for_slice(v.len());
2026 ptr::copy_nonoverlapping(
2027 v.as_ptr(),
2028 ptr::addr_of_mut!((*ptr).value) as *mut T,
2029 v.len(),
2030 );
2031 Self::from_ptr(ptr)
2032 }
2033 }
2034
2035 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2036 ///
2037 /// Behavior is undefined should the size be wrong.
2038 #[cfg(not(no_global_oom_handling))]
2039 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2040 // Panic guard while cloning T elements.
2041 // In the event of a panic, elements that have been written
2042 // into the new RcBox will be dropped, then the memory freed.
2043 struct Guard<T> {
2044 mem: NonNull<u8>,
2045 elems: *mut T,
2046 layout: Layout,
2047 n_elems: usize,
2048 }
2049
2050 impl<T> Drop for Guard<T> {
2051 fn drop(&mut self) {
2052 unsafe {
2053 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2054 ptr::drop_in_place(slice);
2055
2056 Global.deallocate(self.mem, self.layout);
2057 }
2058 }
2059 }
2060
2061 unsafe {
2062 let ptr = Self::allocate_for_slice(len);
2063
2064 let mem = ptr as *mut _ as *mut u8;
2065 let layout = Layout::for_value_raw(ptr);
2066
2067 // Pointer to first element
2068 let elems = ptr::addr_of_mut!((*ptr).value) as *mut T;
2069
2070 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2071
2072 for (i, item) in iter.enumerate() {
2073 ptr::write(elems.add(i), item);
2074 guard.n_elems += 1;
2075 }
2076
2077 // All clear. Forget the guard so it doesn't free the new RcBox.
2078 forget(guard);
2079
2080 Self::from_ptr(ptr)
2081 }
2082 }
2083}
2084
2085impl<T, A: Allocator> Rc<[T], A> {
2086 /// Allocates an `RcBox<[T]>` with the given length.
2087 #[inline]
2088 #[cfg(not(no_global_oom_handling))]
2089 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> {
2090 unsafe {
2091 Rc::<[T]>::allocate_for_layout(
2092 value_layout:Layout::array::<T>(len).unwrap(),
2093 |layout| alloc.allocate(layout),
2094 |mem: *mut u8| ptr::slice_from_raw_parts_mut(data:mem.cast::<T>(), len) as *mut RcBox<[T]>,
2095 )
2096 }
2097 }
2098}
2099
2100#[cfg(not(no_global_oom_handling))]
2101/// Specialization trait used for `From<&[T]>`.
2102trait RcFromSlice<T> {
2103 fn from_slice(slice: &[T]) -> Self;
2104}
2105
2106#[cfg(not(no_global_oom_handling))]
2107impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2108 #[inline]
2109 default fn from_slice(v: &[T]) -> Self {
2110 unsafe { Self::from_iter_exact(iter:v.iter().cloned(), v.len()) }
2111 }
2112}
2113
2114#[cfg(not(no_global_oom_handling))]
2115impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2116 #[inline]
2117 fn from_slice(v: &[T]) -> Self {
2118 unsafe { Rc::copy_from_slice(v) }
2119 }
2120}
2121
2122#[stable(feature = "rust1", since = "1.0.0")]
2123impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2124 type Target = T;
2125
2126 #[inline(always)]
2127 fn deref(&self) -> &T {
2128 &self.inner().value
2129 }
2130}
2131
2132#[unstable(feature = "deref_pure_trait", issue = "87121")]
2133unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2134
2135#[unstable(feature = "receiver_trait", issue = "none")]
2136impl<T: ?Sized> Receiver for Rc<T> {}
2137
2138#[stable(feature = "rust1", since = "1.0.0")]
2139unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2140 /// Drops the `Rc`.
2141 ///
2142 /// This will decrement the strong reference count. If the strong reference
2143 /// count reaches zero then the only other references (if any) are
2144 /// [`Weak`], so we `drop` the inner value.
2145 ///
2146 /// # Examples
2147 ///
2148 /// ```
2149 /// use std::rc::Rc;
2150 ///
2151 /// struct Foo;
2152 ///
2153 /// impl Drop for Foo {
2154 /// fn drop(&mut self) {
2155 /// println!("dropped!");
2156 /// }
2157 /// }
2158 ///
2159 /// let foo = Rc::new(Foo);
2160 /// let foo2 = Rc::clone(&foo);
2161 ///
2162 /// drop(foo); // Doesn't print anything
2163 /// drop(foo2); // Prints "dropped!"
2164 /// ```
2165 fn drop(&mut self) {
2166 unsafe {
2167 self.inner().dec_strong();
2168 if self.inner().strong() == 0 {
2169 // destroy the contained object
2170 ptr::drop_in_place(Self::get_mut_unchecked(self));
2171
2172 // remove the implicit "strong weak" pointer now that we've
2173 // destroyed the contents.
2174 self.inner().dec_weak();
2175
2176 if self.inner().weak() == 0 {
2177 self.alloc
2178 .deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
2179 }
2180 }
2181 }
2182 }
2183}
2184
2185#[stable(feature = "rust1", since = "1.0.0")]
2186impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2187 /// Makes a clone of the `Rc` pointer.
2188 ///
2189 /// This creates another pointer to the same allocation, increasing the
2190 /// strong reference count.
2191 ///
2192 /// # Examples
2193 ///
2194 /// ```
2195 /// use std::rc::Rc;
2196 ///
2197 /// let five = Rc::new(5);
2198 ///
2199 /// let _ = Rc::clone(&five);
2200 /// ```
2201 #[inline]
2202 fn clone(&self) -> Self {
2203 unsafe {
2204 self.inner().inc_strong();
2205 Self::from_inner_in(self.ptr, self.alloc.clone())
2206 }
2207 }
2208}
2209
2210#[cfg(not(no_global_oom_handling))]
2211#[stable(feature = "rust1", since = "1.0.0")]
2212impl<T: Default> Default for Rc<T> {
2213 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2214 ///
2215 /// # Examples
2216 ///
2217 /// ```
2218 /// use std::rc::Rc;
2219 ///
2220 /// let x: Rc<i32> = Default::default();
2221 /// assert_eq!(*x, 0);
2222 /// ```
2223 #[inline]
2224 fn default() -> Rc<T> {
2225 Rc::new(Default::default())
2226 }
2227}
2228
2229#[stable(feature = "rust1", since = "1.0.0")]
2230trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2231 fn eq(&self, other: &Rc<T, A>) -> bool;
2232 fn ne(&self, other: &Rc<T, A>) -> bool;
2233}
2234
2235#[stable(feature = "rust1", since = "1.0.0")]
2236impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2237 #[inline]
2238 default fn eq(&self, other: &Rc<T, A>) -> bool {
2239 **self == **other
2240 }
2241
2242 #[inline]
2243 default fn ne(&self, other: &Rc<T, A>) -> bool {
2244 **self != **other
2245 }
2246}
2247
2248// Hack to allow specializing on `Eq` even though `Eq` has a method.
2249#[rustc_unsafe_specialization_marker]
2250pub(crate) trait MarkerEq: PartialEq<Self> {}
2251
2252impl<T: Eq> MarkerEq for T {}
2253
2254/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2255/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2256/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2257/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2258/// the same value, than two `&T`s.
2259///
2260/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2261#[stable(feature = "rust1", since = "1.0.0")]
2262impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2263 #[inline]
2264 fn eq(&self, other: &Rc<T, A>) -> bool {
2265 Rc::ptr_eq(self, other) || **self == **other
2266 }
2267
2268 #[inline]
2269 fn ne(&self, other: &Rc<T, A>) -> bool {
2270 !Rc::ptr_eq(self, other) && **self != **other
2271 }
2272}
2273
2274#[stable(feature = "rust1", since = "1.0.0")]
2275impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2276 /// Equality for two `Rc`s.
2277 ///
2278 /// Two `Rc`s are equal if their inner values are equal, even if they are
2279 /// stored in different allocation.
2280 ///
2281 /// If `T` also implements `Eq` (implying reflexivity of equality),
2282 /// two `Rc`s that point to the same allocation are
2283 /// always equal.
2284 ///
2285 /// # Examples
2286 ///
2287 /// ```
2288 /// use std::rc::Rc;
2289 ///
2290 /// let five = Rc::new(5);
2291 ///
2292 /// assert!(five == Rc::new(5));
2293 /// ```
2294 #[inline]
2295 fn eq(&self, other: &Rc<T, A>) -> bool {
2296 RcEqIdent::eq(self, other)
2297 }
2298
2299 /// Inequality for two `Rc`s.
2300 ///
2301 /// Two `Rc`s are not equal if their inner values are not equal.
2302 ///
2303 /// If `T` also implements `Eq` (implying reflexivity of equality),
2304 /// two `Rc`s that point to the same allocation are
2305 /// always equal.
2306 ///
2307 /// # Examples
2308 ///
2309 /// ```
2310 /// use std::rc::Rc;
2311 ///
2312 /// let five = Rc::new(5);
2313 ///
2314 /// assert!(five != Rc::new(6));
2315 /// ```
2316 #[inline]
2317 fn ne(&self, other: &Rc<T, A>) -> bool {
2318 RcEqIdent::ne(self, other)
2319 }
2320}
2321
2322#[stable(feature = "rust1", since = "1.0.0")]
2323impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2324
2325#[stable(feature = "rust1", since = "1.0.0")]
2326impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2327 /// Partial comparison for two `Rc`s.
2328 ///
2329 /// The two are compared by calling `partial_cmp()` on their inner values.
2330 ///
2331 /// # Examples
2332 ///
2333 /// ```
2334 /// use std::rc::Rc;
2335 /// use std::cmp::Ordering;
2336 ///
2337 /// let five = Rc::new(5);
2338 ///
2339 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2340 /// ```
2341 #[inline(always)]
2342 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2343 (**self).partial_cmp(&**other)
2344 }
2345
2346 /// Less-than comparison for two `Rc`s.
2347 ///
2348 /// The two are compared by calling `<` on their inner values.
2349 ///
2350 /// # Examples
2351 ///
2352 /// ```
2353 /// use std::rc::Rc;
2354 ///
2355 /// let five = Rc::new(5);
2356 ///
2357 /// assert!(five < Rc::new(6));
2358 /// ```
2359 #[inline(always)]
2360 fn lt(&self, other: &Rc<T, A>) -> bool {
2361 **self < **other
2362 }
2363
2364 /// 'Less than or equal to' comparison for two `Rc`s.
2365 ///
2366 /// The two are compared by calling `<=` on their inner values.
2367 ///
2368 /// # Examples
2369 ///
2370 /// ```
2371 /// use std::rc::Rc;
2372 ///
2373 /// let five = Rc::new(5);
2374 ///
2375 /// assert!(five <= Rc::new(5));
2376 /// ```
2377 #[inline(always)]
2378 fn le(&self, other: &Rc<T, A>) -> bool {
2379 **self <= **other
2380 }
2381
2382 /// Greater-than comparison for two `Rc`s.
2383 ///
2384 /// The two are compared by calling `>` on their inner values.
2385 ///
2386 /// # Examples
2387 ///
2388 /// ```
2389 /// use std::rc::Rc;
2390 ///
2391 /// let five = Rc::new(5);
2392 ///
2393 /// assert!(five > Rc::new(4));
2394 /// ```
2395 #[inline(always)]
2396 fn gt(&self, other: &Rc<T, A>) -> bool {
2397 **self > **other
2398 }
2399
2400 /// 'Greater than or equal to' comparison for two `Rc`s.
2401 ///
2402 /// The two are compared by calling `>=` on their inner values.
2403 ///
2404 /// # Examples
2405 ///
2406 /// ```
2407 /// use std::rc::Rc;
2408 ///
2409 /// let five = Rc::new(5);
2410 ///
2411 /// assert!(five >= Rc::new(5));
2412 /// ```
2413 #[inline(always)]
2414 fn ge(&self, other: &Rc<T, A>) -> bool {
2415 **self >= **other
2416 }
2417}
2418
2419#[stable(feature = "rust1", since = "1.0.0")]
2420impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2421 /// Comparison for two `Rc`s.
2422 ///
2423 /// The two are compared by calling `cmp()` on their inner values.
2424 ///
2425 /// # Examples
2426 ///
2427 /// ```
2428 /// use std::rc::Rc;
2429 /// use std::cmp::Ordering;
2430 ///
2431 /// let five = Rc::new(5);
2432 ///
2433 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2434 /// ```
2435 #[inline]
2436 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2437 (**self).cmp(&**other)
2438 }
2439}
2440
2441#[stable(feature = "rust1", since = "1.0.0")]
2442impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2443 fn hash<H: Hasher>(&self, state: &mut H) {
2444 (**self).hash(state);
2445 }
2446}
2447
2448#[stable(feature = "rust1", since = "1.0.0")]
2449impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2450 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2451 fmt::Display::fmt(&**self, f)
2452 }
2453}
2454
2455#[stable(feature = "rust1", since = "1.0.0")]
2456impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2457 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2458 fmt::Debug::fmt(&**self, f)
2459 }
2460}
2461
2462#[stable(feature = "rust1", since = "1.0.0")]
2463impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2464 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2465 fmt::Pointer::fmt(&core::ptr::addr_of!(**self), f)
2466 }
2467}
2468
2469#[cfg(not(no_global_oom_handling))]
2470#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2471impl<T> From<T> for Rc<T> {
2472 /// Converts a generic type `T` into an `Rc<T>`
2473 ///
2474 /// The conversion allocates on the heap and moves `t`
2475 /// from the stack into it.
2476 ///
2477 /// # Example
2478 /// ```rust
2479 /// # use std::rc::Rc;
2480 /// let x = 5;
2481 /// let rc = Rc::new(5);
2482 ///
2483 /// assert_eq!(Rc::from(x), rc);
2484 /// ```
2485 fn from(t: T) -> Self {
2486 Rc::new(t)
2487 }
2488}
2489
2490#[cfg(not(no_global_oom_handling))]
2491#[stable(feature = "shared_from_array", since = "1.74.0")]
2492impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2493 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2494 ///
2495 /// The conversion moves the array into a newly allocated `Rc`.
2496 ///
2497 /// # Example
2498 ///
2499 /// ```
2500 /// # use std::rc::Rc;
2501 /// let original: [i32; 3] = [1, 2, 3];
2502 /// let shared: Rc<[i32]> = Rc::from(original);
2503 /// assert_eq!(&[1, 2, 3], &shared[..]);
2504 /// ```
2505 #[inline]
2506 fn from(v: [T; N]) -> Rc<[T]> {
2507 Rc::<[T; N]>::from(v)
2508 }
2509}
2510
2511#[cfg(not(no_global_oom_handling))]
2512#[stable(feature = "shared_from_slice", since = "1.21.0")]
2513impl<T: Clone> From<&[T]> for Rc<[T]> {
2514 /// Allocate a reference-counted slice and fill it by cloning `v`'s items.
2515 ///
2516 /// # Example
2517 ///
2518 /// ```
2519 /// # use std::rc::Rc;
2520 /// let original: &[i32] = &[1, 2, 3];
2521 /// let shared: Rc<[i32]> = Rc::from(original);
2522 /// assert_eq!(&[1, 2, 3], &shared[..]);
2523 /// ```
2524 #[inline]
2525 fn from(v: &[T]) -> Rc<[T]> {
2526 <Self as RcFromSlice<T>>::from_slice(v)
2527 }
2528}
2529
2530#[cfg(not(no_global_oom_handling))]
2531#[stable(feature = "shared_from_slice", since = "1.21.0")]
2532impl From<&str> for Rc<str> {
2533 /// Allocate a reference-counted string slice and copy `v` into it.
2534 ///
2535 /// # Example
2536 ///
2537 /// ```
2538 /// # use std::rc::Rc;
2539 /// let shared: Rc<str> = Rc::from("statue");
2540 /// assert_eq!("statue", &shared[..]);
2541 /// ```
2542 #[inline]
2543 fn from(v: &str) -> Rc<str> {
2544 let rc: Rc<[u8]> = Rc::<[u8]>::from(v.as_bytes());
2545 unsafe { Rc::from_raw(ptr:Rc::into_raw(this:rc) as *const str) }
2546 }
2547}
2548
2549#[cfg(not(no_global_oom_handling))]
2550#[stable(feature = "shared_from_slice", since = "1.21.0")]
2551impl From<String> for Rc<str> {
2552 /// Allocate a reference-counted string slice and copy `v` into it.
2553 ///
2554 /// # Example
2555 ///
2556 /// ```
2557 /// # use std::rc::Rc;
2558 /// let original: String = "statue".to_owned();
2559 /// let shared: Rc<str> = Rc::from(original);
2560 /// assert_eq!("statue", &shared[..]);
2561 /// ```
2562 #[inline]
2563 fn from(v: String) -> Rc<str> {
2564 Rc::from(&v[..])
2565 }
2566}
2567
2568#[cfg(not(no_global_oom_handling))]
2569#[stable(feature = "shared_from_slice", since = "1.21.0")]
2570impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2571 /// Move a boxed object to a new, reference counted, allocation.
2572 ///
2573 /// # Example
2574 ///
2575 /// ```
2576 /// # use std::rc::Rc;
2577 /// let original: Box<i32> = Box::new(1);
2578 /// let shared: Rc<i32> = Rc::from(original);
2579 /// assert_eq!(1, *shared);
2580 /// ```
2581 #[inline]
2582 fn from(v: Box<T, A>) -> Rc<T, A> {
2583 Rc::from_box_in(src:v)
2584 }
2585}
2586
2587#[cfg(not(no_global_oom_handling))]
2588#[stable(feature = "shared_from_slice", since = "1.21.0")]
2589impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2590 /// Allocate a reference-counted slice and move `v`'s items into it.
2591 ///
2592 /// # Example
2593 ///
2594 /// ```
2595 /// # use std::rc::Rc;
2596 /// let unique: Vec<i32> = vec![1, 2, 3];
2597 /// let shared: Rc<[i32]> = Rc::from(unique);
2598 /// assert_eq!(&[1, 2, 3], &shared[..]);
2599 /// ```
2600 #[inline]
2601 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2602 unsafe {
2603 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2604
2605 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2606 ptr::copy_nonoverlapping(vec_ptr, ptr::addr_of_mut!((*rc_ptr).value) as *mut T, len);
2607
2608 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2609 // without dropping its contents or the allocator
2610 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2611
2612 Self::from_ptr_in(rc_ptr, alloc)
2613 }
2614 }
2615}
2616
2617#[stable(feature = "shared_from_cow", since = "1.45.0")]
2618impl<'a, B> From<Cow<'a, B>> for Rc<B>
2619where
2620 B: ToOwned + ?Sized,
2621 Rc<B>: From<&'a B> + From<B::Owned>,
2622{
2623 /// Create a reference-counted pointer from
2624 /// a clone-on-write pointer by copying its content.
2625 ///
2626 /// # Example
2627 ///
2628 /// ```rust
2629 /// # use std::rc::Rc;
2630 /// # use std::borrow::Cow;
2631 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2632 /// let shared: Rc<str> = Rc::from(cow);
2633 /// assert_eq!("eggplant", &shared[..]);
2634 /// ```
2635 #[inline]
2636 fn from(cow: Cow<'a, B>) -> Rc<B> {
2637 match cow {
2638 Cow::Borrowed(s: &B) => Rc::from(s),
2639 Cow::Owned(s: ::Owned) => Rc::from(s),
2640 }
2641 }
2642}
2643
2644#[stable(feature = "shared_from_str", since = "1.62.0")]
2645impl From<Rc<str>> for Rc<[u8]> {
2646 /// Converts a reference-counted string slice into a byte slice.
2647 ///
2648 /// # Example
2649 ///
2650 /// ```
2651 /// # use std::rc::Rc;
2652 /// let string: Rc<str> = Rc::from("eggplant");
2653 /// let bytes: Rc<[u8]> = Rc::from(string);
2654 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2655 /// ```
2656 #[inline]
2657 fn from(rc: Rc<str>) -> Self {
2658 // SAFETY: `str` has the same layout as `[u8]`.
2659 unsafe { Rc::from_raw(ptr:Rc::into_raw(this:rc) as *const [u8]) }
2660 }
2661}
2662
2663#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2664impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
2665 type Error = Rc<[T]>;
2666
2667 fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
2668 if boxed_slice.len() == N {
2669 Ok(unsafe { Rc::from_raw(ptr:Rc::into_raw(this:boxed_slice) as *mut [T; N]) })
2670 } else {
2671 Err(boxed_slice)
2672 }
2673 }
2674}
2675
2676#[cfg(not(no_global_oom_handling))]
2677#[stable(feature = "shared_from_iter", since = "1.37.0")]
2678impl<T> FromIterator<T> for Rc<[T]> {
2679 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2680 ///
2681 /// # Performance characteristics
2682 ///
2683 /// ## The general case
2684 ///
2685 /// In the general case, collecting into `Rc<[T]>` is done by first
2686 /// collecting into a `Vec<T>`. That is, when writing the following:
2687 ///
2688 /// ```rust
2689 /// # use std::rc::Rc;
2690 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2691 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2692 /// ```
2693 ///
2694 /// this behaves as if we wrote:
2695 ///
2696 /// ```rust
2697 /// # use std::rc::Rc;
2698 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2699 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2700 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2701 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2702 /// ```
2703 ///
2704 /// This will allocate as many times as needed for constructing the `Vec<T>`
2705 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2706 ///
2707 /// ## Iterators of known length
2708 ///
2709 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2710 /// a single allocation will be made for the `Rc<[T]>`. For example:
2711 ///
2712 /// ```rust
2713 /// # use std::rc::Rc;
2714 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2715 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2716 /// ```
2717 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2718 ToRcSlice::to_rc_slice(iter.into_iter())
2719 }
2720}
2721
2722/// Specialization trait used for collecting into `Rc<[T]>`.
2723#[cfg(not(no_global_oom_handling))]
2724trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2725 fn to_rc_slice(self) -> Rc<[T]>;
2726}
2727
2728#[cfg(not(no_global_oom_handling))]
2729impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2730 default fn to_rc_slice(self) -> Rc<[T]> {
2731 self.collect::<Vec<T>>().into()
2732 }
2733}
2734
2735#[cfg(not(no_global_oom_handling))]
2736impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2737 fn to_rc_slice(self) -> Rc<[T]> {
2738 // This is the case for a `TrustedLen` iterator.
2739 let (low, high) = self.size_hint();
2740 if let Some(high) = high {
2741 debug_assert_eq!(
2742 low,
2743 high,
2744 "TrustedLen iterator's size hint is not exact: {:?}",
2745 (low, high)
2746 );
2747
2748 unsafe {
2749 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2750 Rc::from_iter_exact(self, low)
2751 }
2752 } else {
2753 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2754 // length exceeding `usize::MAX`.
2755 // The default implementation would collect into a vec which would panic.
2756 // Thus we panic here immediately without invoking `Vec` code.
2757 panic!("capacity overflow");
2758 }
2759 }
2760}
2761
2762/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2763/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
2764/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2765///
2766/// Since a `Weak` reference does not count towards ownership, it will not
2767/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2768/// guarantees about the value still being present. Thus it may return [`None`]
2769/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2770/// itself (the backing store) from being deallocated.
2771///
2772/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2773/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2774/// prevent circular references between [`Rc`] pointers, since mutual owning references
2775/// would never allow either [`Rc`] to be dropped. For example, a tree could
2776/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2777/// pointers from children back to their parents.
2778///
2779/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2780///
2781/// [`upgrade`]: Weak::upgrade
2782#[stable(feature = "rc_weak", since = "1.4.0")]
2783#[cfg_attr(not(test), rustc_diagnostic_item = "RcWeak")]
2784pub struct Weak<
2785 T: ?Sized,
2786 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2787> {
2788 // This is a `NonNull` to allow optimizing the size of this type in enums,
2789 // but it is not necessarily a valid pointer.
2790 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2791 // to allocate space on the heap. That's not a value a real pointer
2792 // will ever have because RcBox has alignment at least 2.
2793 // This is only possible when `T: Sized`; unsized `T` never dangle.
2794 ptr: NonNull<RcBox<T>>,
2795 alloc: A,
2796}
2797
2798#[stable(feature = "rc_weak", since = "1.4.0")]
2799impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
2800#[stable(feature = "rc_weak", since = "1.4.0")]
2801impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
2802
2803#[unstable(feature = "coerce_unsized", issue = "18598")]
2804impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
2805
2806#[unstable(feature = "dispatch_from_dyn", issue = "none")]
2807impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
2808
2809impl<T> Weak<T> {
2810 /// Constructs a new `Weak<T>`, without allocating any memory.
2811 /// Calling [`upgrade`] on the return value always gives [`None`].
2812 ///
2813 /// [`upgrade`]: Weak::upgrade
2814 ///
2815 /// # Examples
2816 ///
2817 /// ```
2818 /// use std::rc::Weak;
2819 ///
2820 /// let empty: Weak<i64> = Weak::new();
2821 /// assert!(empty.upgrade().is_none());
2822 /// ```
2823 #[inline]
2824 #[stable(feature = "downgraded_weak", since = "1.10.0")]
2825 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
2826 #[must_use]
2827 pub const fn new() -> Weak<T> {
2828 Weak {
2829 ptr: unsafe {
2830 NonNull::new_unchecked(ptr::without_provenance_mut::<RcBox<T>>(usize::MAX))
2831 },
2832 alloc: Global,
2833 }
2834 }
2835}
2836
2837impl<T, A: Allocator> Weak<T, A> {
2838 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
2839 /// allocator.
2840 /// Calling [`upgrade`] on the return value always gives [`None`].
2841 ///
2842 /// [`upgrade`]: Weak::upgrade
2843 ///
2844 /// # Examples
2845 ///
2846 /// ```
2847 /// use std::rc::Weak;
2848 ///
2849 /// let empty: Weak<i64> = Weak::new();
2850 /// assert!(empty.upgrade().is_none());
2851 /// ```
2852 #[inline]
2853 #[unstable(feature = "allocator_api", issue = "32838")]
2854 pub fn new_in(alloc: A) -> Weak<T, A> {
2855 Weak {
2856 ptr: unsafe {
2857 NonNull::new_unchecked(ptr::without_provenance_mut::<RcBox<T>>(usize::MAX))
2858 },
2859 alloc,
2860 }
2861 }
2862}
2863
2864pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
2865 (ptr.cast::<()>()).addr() == usize::MAX
2866}
2867
2868/// Helper type to allow accessing the reference counts without
2869/// making any assertions about the data field.
2870struct WeakInner<'a> {
2871 weak: &'a Cell<usize>,
2872 strong: &'a Cell<usize>,
2873}
2874
2875impl<T: ?Sized> Weak<T> {
2876 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
2877 ///
2878 /// This can be used to safely get a strong reference (by calling [`upgrade`]
2879 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
2880 ///
2881 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
2882 /// as these don't own anything; the method still works on them).
2883 ///
2884 /// # Safety
2885 ///
2886 /// The pointer must have originated from the [`into_raw`] and must still own its potential
2887 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
2888 ///
2889 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
2890 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
2891 /// count is not modified by this operation) and therefore it must be paired with a previous
2892 /// call to [`into_raw`].
2893 ///
2894 /// # Examples
2895 ///
2896 /// ```
2897 /// use std::rc::{Rc, Weak};
2898 ///
2899 /// let strong = Rc::new("hello".to_owned());
2900 ///
2901 /// let raw_1 = Rc::downgrade(&strong).into_raw();
2902 /// let raw_2 = Rc::downgrade(&strong).into_raw();
2903 ///
2904 /// assert_eq!(2, Rc::weak_count(&strong));
2905 ///
2906 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
2907 /// assert_eq!(1, Rc::weak_count(&strong));
2908 ///
2909 /// drop(strong);
2910 ///
2911 /// // Decrement the last weak count.
2912 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
2913 /// ```
2914 ///
2915 /// [`into_raw`]: Weak::into_raw
2916 /// [`upgrade`]: Weak::upgrade
2917 /// [`new`]: Weak::new
2918 #[inline]
2919 #[stable(feature = "weak_into_raw", since = "1.45.0")]
2920 pub unsafe fn from_raw(ptr: *const T) -> Self {
2921 unsafe { Self::from_raw_in(ptr, Global) }
2922 }
2923}
2924
2925impl<T: ?Sized, A: Allocator> Weak<T, A> {
2926 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
2927 ///
2928 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
2929 /// unaligned or even [`null`] otherwise.
2930 ///
2931 /// # Examples
2932 ///
2933 /// ```
2934 /// use std::rc::Rc;
2935 /// use std::ptr;
2936 ///
2937 /// let strong = Rc::new("hello".to_owned());
2938 /// let weak = Rc::downgrade(&strong);
2939 /// // Both point to the same object
2940 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
2941 /// // The strong here keeps it alive, so we can still access the object.
2942 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
2943 ///
2944 /// drop(strong);
2945 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
2946 /// // undefined behaviour.
2947 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
2948 /// ```
2949 ///
2950 /// [`null`]: ptr::null
2951 #[must_use]
2952 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
2953 pub fn as_ptr(&self) -> *const T {
2954 let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
2955
2956 if is_dangling(ptr) {
2957 // If the pointer is dangling, we return the sentinel directly. This cannot be
2958 // a valid payload address, as the payload is at least as aligned as RcBox (usize).
2959 ptr as *const T
2960 } else {
2961 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
2962 // The payload may be dropped at this point, and we have to maintain provenance,
2963 // so use raw pointer manipulation.
2964 unsafe { ptr::addr_of_mut!((*ptr).value) }
2965 }
2966 }
2967
2968 /// Consumes the `Weak<T>` and turns it into a raw pointer.
2969 ///
2970 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
2971 /// one weak reference (the weak count is not modified by this operation). It can be turned
2972 /// back into the `Weak<T>` with [`from_raw`].
2973 ///
2974 /// The same restrictions of accessing the target of the pointer as with
2975 /// [`as_ptr`] apply.
2976 ///
2977 /// # Examples
2978 ///
2979 /// ```
2980 /// use std::rc::{Rc, Weak};
2981 ///
2982 /// let strong = Rc::new("hello".to_owned());
2983 /// let weak = Rc::downgrade(&strong);
2984 /// let raw = weak.into_raw();
2985 ///
2986 /// assert_eq!(1, Rc::weak_count(&strong));
2987 /// assert_eq!("hello", unsafe { &*raw });
2988 ///
2989 /// drop(unsafe { Weak::from_raw(raw) });
2990 /// assert_eq!(0, Rc::weak_count(&strong));
2991 /// ```
2992 ///
2993 /// [`from_raw`]: Weak::from_raw
2994 /// [`as_ptr`]: Weak::as_ptr
2995 #[must_use = "losing the pointer will leak memory"]
2996 #[stable(feature = "weak_into_raw", since = "1.45.0")]
2997 pub fn into_raw(self) -> *const T {
2998 let result = self.as_ptr();
2999 mem::forget(self);
3000 result
3001 }
3002
3003 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3004 ///
3005 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3006 /// one weak reference (the weak count is not modified by this operation). It can be turned
3007 /// back into the `Weak<T>` with [`from_raw`].
3008 ///
3009 /// The same restrictions of accessing the target of the pointer as with
3010 /// [`as_ptr`] apply.
3011 ///
3012 /// # Examples
3013 ///
3014 /// ```
3015 /// use std::rc::{Rc, Weak};
3016 ///
3017 /// let strong = Rc::new("hello".to_owned());
3018 /// let weak = Rc::downgrade(&strong);
3019 /// let raw = weak.into_raw();
3020 ///
3021 /// assert_eq!(1, Rc::weak_count(&strong));
3022 /// assert_eq!("hello", unsafe { &*raw });
3023 ///
3024 /// drop(unsafe { Weak::from_raw(raw) });
3025 /// assert_eq!(0, Rc::weak_count(&strong));
3026 /// ```
3027 ///
3028 /// [`from_raw`]: Weak::from_raw
3029 /// [`as_ptr`]: Weak::as_ptr
3030 #[inline]
3031 #[unstable(feature = "allocator_api", issue = "32838")]
3032 pub fn into_raw_and_alloc(self) -> (*const T, A)
3033 where
3034 A: Clone,
3035 {
3036 let result = self.as_ptr();
3037 let alloc = self.alloc.clone();
3038 mem::forget(self);
3039 (result, alloc)
3040 }
3041
3042 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3043 ///
3044 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3045 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3046 ///
3047 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3048 /// as these don't own anything; the method still works on them).
3049 ///
3050 /// # Safety
3051 ///
3052 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3053 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3054 ///
3055 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3056 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3057 /// count is not modified by this operation) and therefore it must be paired with a previous
3058 /// call to [`into_raw`].
3059 ///
3060 /// # Examples
3061 ///
3062 /// ```
3063 /// use std::rc::{Rc, Weak};
3064 ///
3065 /// let strong = Rc::new("hello".to_owned());
3066 ///
3067 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3068 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3069 ///
3070 /// assert_eq!(2, Rc::weak_count(&strong));
3071 ///
3072 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3073 /// assert_eq!(1, Rc::weak_count(&strong));
3074 ///
3075 /// drop(strong);
3076 ///
3077 /// // Decrement the last weak count.
3078 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3079 /// ```
3080 ///
3081 /// [`into_raw`]: Weak::into_raw
3082 /// [`upgrade`]: Weak::upgrade
3083 /// [`new`]: Weak::new
3084 #[inline]
3085 #[unstable(feature = "allocator_api", issue = "32838")]
3086 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3087 // See Weak::as_ptr for context on how the input pointer is derived.
3088
3089 let ptr = if is_dangling(ptr) {
3090 // This is a dangling Weak.
3091 ptr as *mut RcBox<T>
3092 } else {
3093 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3094 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3095 let offset = unsafe { data_offset(ptr) };
3096 // Thus, we reverse the offset to get the whole RcBox.
3097 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3098 unsafe { ptr.byte_sub(offset) as *mut RcBox<T> }
3099 };
3100
3101 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3102 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3103 }
3104
3105 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3106 /// dropping of the inner value if successful.
3107 ///
3108 /// Returns [`None`] if the inner value has since been dropped.
3109 ///
3110 /// # Examples
3111 ///
3112 /// ```
3113 /// use std::rc::Rc;
3114 ///
3115 /// let five = Rc::new(5);
3116 ///
3117 /// let weak_five = Rc::downgrade(&five);
3118 ///
3119 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3120 /// assert!(strong_five.is_some());
3121 ///
3122 /// // Destroy all strong pointers.
3123 /// drop(strong_five);
3124 /// drop(five);
3125 ///
3126 /// assert!(weak_five.upgrade().is_none());
3127 /// ```
3128 #[must_use = "this returns a new `Rc`, \
3129 without modifying the original weak pointer"]
3130 #[stable(feature = "rc_weak", since = "1.4.0")]
3131 pub fn upgrade(&self) -> Option<Rc<T, A>>
3132 where
3133 A: Clone,
3134 {
3135 let inner = self.inner()?;
3136
3137 if inner.strong() == 0 {
3138 None
3139 } else {
3140 unsafe {
3141 inner.inc_strong();
3142 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3143 }
3144 }
3145 }
3146
3147 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3148 ///
3149 /// If `self` was created using [`Weak::new`], this will return 0.
3150 #[must_use]
3151 #[stable(feature = "weak_counts", since = "1.41.0")]
3152 pub fn strong_count(&self) -> usize {
3153 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3154 }
3155
3156 /// Gets the number of `Weak` pointers pointing to this allocation.
3157 ///
3158 /// If no strong pointers remain, this will return zero.
3159 #[must_use]
3160 #[stable(feature = "weak_counts", since = "1.41.0")]
3161 pub fn weak_count(&self) -> usize {
3162 if let Some(inner) = self.inner() {
3163 if inner.strong() > 0 {
3164 inner.weak() - 1 // subtract the implicit weak ptr
3165 } else {
3166 0
3167 }
3168 } else {
3169 0
3170 }
3171 }
3172
3173 /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
3174 /// (i.e., when this `Weak` was created by `Weak::new`).
3175 #[inline]
3176 fn inner(&self) -> Option<WeakInner<'_>> {
3177 if is_dangling(self.ptr.as_ptr()) {
3178 None
3179 } else {
3180 // We are careful to *not* create a reference covering the "data" field, as
3181 // the field may be mutated concurrently (for example, if the last `Rc`
3182 // is dropped, the data field will be dropped in-place).
3183 Some(unsafe {
3184 let ptr = self.ptr.as_ptr();
3185 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3186 })
3187 }
3188 }
3189
3190 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3191 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3192 /// this function ignores the metadata of `dyn Trait` pointers.
3193 ///
3194 /// # Notes
3195 ///
3196 /// Since this compares pointers it means that `Weak::new()` will equal each
3197 /// other, even though they don't point to any allocation.
3198 ///
3199 /// # Examples
3200 ///
3201 /// ```
3202 /// use std::rc::Rc;
3203 ///
3204 /// let first_rc = Rc::new(5);
3205 /// let first = Rc::downgrade(&first_rc);
3206 /// let second = Rc::downgrade(&first_rc);
3207 ///
3208 /// assert!(first.ptr_eq(&second));
3209 ///
3210 /// let third_rc = Rc::new(5);
3211 /// let third = Rc::downgrade(&third_rc);
3212 ///
3213 /// assert!(!first.ptr_eq(&third));
3214 /// ```
3215 ///
3216 /// Comparing `Weak::new`.
3217 ///
3218 /// ```
3219 /// use std::rc::{Rc, Weak};
3220 ///
3221 /// let first = Weak::new();
3222 /// let second = Weak::new();
3223 /// assert!(first.ptr_eq(&second));
3224 ///
3225 /// let third_rc = Rc::new(());
3226 /// let third = Rc::downgrade(&third_rc);
3227 /// assert!(!first.ptr_eq(&third));
3228 /// ```
3229 #[inline]
3230 #[must_use]
3231 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3232 pub fn ptr_eq(&self, other: &Self) -> bool {
3233 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3234 }
3235}
3236
3237#[stable(feature = "rc_weak", since = "1.4.0")]
3238unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3239 /// Drops the `Weak` pointer.
3240 ///
3241 /// # Examples
3242 ///
3243 /// ```
3244 /// use std::rc::{Rc, Weak};
3245 ///
3246 /// struct Foo;
3247 ///
3248 /// impl Drop for Foo {
3249 /// fn drop(&mut self) {
3250 /// println!("dropped!");
3251 /// }
3252 /// }
3253 ///
3254 /// let foo = Rc::new(Foo);
3255 /// let weak_foo = Rc::downgrade(&foo);
3256 /// let other_weak_foo = Weak::clone(&weak_foo);
3257 ///
3258 /// drop(weak_foo); // Doesn't print anything
3259 /// drop(foo); // Prints "dropped!"
3260 ///
3261 /// assert!(other_weak_foo.upgrade().is_none());
3262 /// ```
3263 fn drop(&mut self) {
3264 let inner = if let Some(inner) = self.inner() { inner } else { return };
3265
3266 inner.dec_weak();
3267 // the weak count starts at 1, and will only go to zero if all
3268 // the strong pointers have disappeared.
3269 if inner.weak() == 0 {
3270 unsafe {
3271 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3272 }
3273 }
3274 }
3275}
3276
3277#[stable(feature = "rc_weak", since = "1.4.0")]
3278impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3279 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3280 ///
3281 /// # Examples
3282 ///
3283 /// ```
3284 /// use std::rc::{Rc, Weak};
3285 ///
3286 /// let weak_five = Rc::downgrade(&Rc::new(5));
3287 ///
3288 /// let _ = Weak::clone(&weak_five);
3289 /// ```
3290 #[inline]
3291 fn clone(&self) -> Weak<T, A> {
3292 if let Some(inner: WeakInner<'_>) = self.inner() {
3293 inner.inc_weak()
3294 }
3295 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3296 }
3297}
3298
3299#[stable(feature = "rc_weak", since = "1.4.0")]
3300impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3301 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3302 write!(f, "(Weak)")
3303 }
3304}
3305
3306#[stable(feature = "downgraded_weak", since = "1.10.0")]
3307impl<T> Default for Weak<T> {
3308 /// Constructs a new `Weak<T>`, without allocating any memory.
3309 /// Calling [`upgrade`] on the return value always gives [`None`].
3310 ///
3311 /// [`upgrade`]: Weak::upgrade
3312 ///
3313 /// # Examples
3314 ///
3315 /// ```
3316 /// use std::rc::Weak;
3317 ///
3318 /// let empty: Weak<i64> = Default::default();
3319 /// assert!(empty.upgrade().is_none());
3320 /// ```
3321 fn default() -> Weak<T> {
3322 Weak::new()
3323 }
3324}
3325
3326// NOTE: We checked_add here to deal with mem::forget safely. In particular
3327// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
3328// you can free the allocation while outstanding Rcs (or Weaks) exist.
3329// We abort because this is such a degenerate scenario that we don't care about
3330// what happens -- no real program should ever experience this.
3331//
3332// This should have negligible overhead since you don't actually need to
3333// clone these much in Rust thanks to ownership and move-semantics.
3334
3335#[doc(hidden)]
3336trait RcInnerPtr {
3337 fn weak_ref(&self) -> &Cell<usize>;
3338 fn strong_ref(&self) -> &Cell<usize>;
3339
3340 #[inline]
3341 fn strong(&self) -> usize {
3342 self.strong_ref().get()
3343 }
3344
3345 #[inline]
3346 fn inc_strong(&self) {
3347 let strong = self.strong();
3348
3349 // We insert an `assume` here to hint LLVM at an otherwise
3350 // missed optimization.
3351 // SAFETY: The reference count will never be zero when this is
3352 // called.
3353 unsafe {
3354 hint::assert_unchecked(strong != 0);
3355 }
3356
3357 let strong = strong.wrapping_add(1);
3358 self.strong_ref().set(strong);
3359
3360 // We want to abort on overflow instead of dropping the value.
3361 // Checking for overflow after the store instead of before
3362 // allows for slightly better code generation.
3363 if core::intrinsics::unlikely(strong == 0) {
3364 abort();
3365 }
3366 }
3367
3368 #[inline]
3369 fn dec_strong(&self) {
3370 self.strong_ref().set(self.strong() - 1);
3371 }
3372
3373 #[inline]
3374 fn weak(&self) -> usize {
3375 self.weak_ref().get()
3376 }
3377
3378 #[inline]
3379 fn inc_weak(&self) {
3380 let weak = self.weak();
3381
3382 // We insert an `assume` here to hint LLVM at an otherwise
3383 // missed optimization.
3384 // SAFETY: The reference count will never be zero when this is
3385 // called.
3386 unsafe {
3387 hint::assert_unchecked(weak != 0);
3388 }
3389
3390 let weak = weak.wrapping_add(1);
3391 self.weak_ref().set(weak);
3392
3393 // We want to abort on overflow instead of dropping the value.
3394 // Checking for overflow after the store instead of before
3395 // allows for slightly better code generation.
3396 if core::intrinsics::unlikely(weak == 0) {
3397 abort();
3398 }
3399 }
3400
3401 #[inline]
3402 fn dec_weak(&self) {
3403 self.weak_ref().set(self.weak() - 1);
3404 }
3405}
3406
3407impl<T: ?Sized> RcInnerPtr for RcBox<T> {
3408 #[inline(always)]
3409 fn weak_ref(&self) -> &Cell<usize> {
3410 &self.weak
3411 }
3412
3413 #[inline(always)]
3414 fn strong_ref(&self) -> &Cell<usize> {
3415 &self.strong
3416 }
3417}
3418
3419impl<'a> RcInnerPtr for WeakInner<'a> {
3420 #[inline(always)]
3421 fn weak_ref(&self) -> &Cell<usize> {
3422 self.weak
3423 }
3424
3425 #[inline(always)]
3426 fn strong_ref(&self) -> &Cell<usize> {
3427 self.strong
3428 }
3429}
3430
3431#[stable(feature = "rust1", since = "1.0.0")]
3432impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3433 fn borrow(&self) -> &T {
3434 &**self
3435 }
3436}
3437
3438#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3439impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3440 fn as_ref(&self) -> &T {
3441 &**self
3442 }
3443}
3444
3445#[stable(feature = "pin", since = "1.33.0")]
3446impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3447
3448/// Get the offset within an `RcBox` for the payload behind a pointer.
3449///
3450/// # Safety
3451///
3452/// The pointer must point to (and have valid metadata for) a previously
3453/// valid instance of T, but the T is allowed to be dropped.
3454unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3455 // Align the unsized value to the end of the RcBox.
3456 // Because RcBox is repr(C), it will always be the last field in memory.
3457 // SAFETY: since the only unsized types possible are slices, trait objects,
3458 // and extern types, the input safety requirement is currently enough to
3459 // satisfy the requirements of align_of_val_raw; this is an implementation
3460 // detail of the language that must not be relied upon outside of std.
3461 unsafe { data_offset_align(align_of_val_raw(val:ptr)) }
3462}
3463
3464#[inline]
3465fn data_offset_align(align: usize) -> usize {
3466 let layout: Layout = Layout::new::<RcBox<()>>();
3467 layout.size() + layout.padding_needed_for(align)
3468}
3469
3470/// A uniquely owned `Rc`
3471///
3472/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3473/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3474/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3475///
3476/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3477/// use case is to have an object be mutable during its initialization phase but then have it become
3478/// immutable and converted to a normal `Rc`.
3479///
3480/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3481///
3482/// ```
3483/// #![feature(unique_rc_arc)]
3484/// use std::rc::{Rc, Weak, UniqueRc};
3485///
3486/// struct Gadget {
3487/// #[allow(dead_code)]
3488/// me: Weak<Gadget>,
3489/// }
3490///
3491/// fn create_gadget() -> Option<Rc<Gadget>> {
3492/// let mut rc = UniqueRc::new(Gadget {
3493/// me: Weak::new(),
3494/// });
3495/// rc.me = UniqueRc::downgrade(&rc);
3496/// Some(UniqueRc::into_rc(rc))
3497/// }
3498///
3499/// create_gadget().unwrap();
3500/// ```
3501///
3502/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3503/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3504/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3505/// including fallible or async constructors.
3506#[unstable(feature = "unique_rc_arc", issue = "112566")]
3507#[derive(Debug)]
3508pub struct UniqueRc<T> {
3509 ptr: NonNull<RcBox<T>>,
3510 phantom: PhantomData<RcBox<T>>,
3511}
3512
3513impl<T> UniqueRc<T> {
3514 /// Creates a new `UniqueRc`
3515 ///
3516 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3517 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3518 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3519 /// point to the new [`Rc`].
3520 #[cfg(not(no_global_oom_handling))]
3521 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3522 pub fn new(value: T) -> Self {
3523 Self {
3524 ptr: Box::leak(Box::new(RcBox {
3525 strong: Cell::new(0),
3526 // keep one weak reference so if all the weak pointers that are created are dropped
3527 // the UniqueRc still stays valid.
3528 weak: Cell::new(1),
3529 value,
3530 }))
3531 .into(),
3532 phantom: PhantomData,
3533 }
3534 }
3535
3536 /// Creates a new weak reference to the `UniqueRc`
3537 ///
3538 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
3539 /// to a [`Rc`] using [`UniqueRc::into_rc`].
3540 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3541 pub fn downgrade(this: &Self) -> Weak<T> {
3542 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
3543 // one strong reference before converting to a regular Rc.
3544 unsafe {
3545 this.ptr.as_ref().inc_weak();
3546 }
3547 Weak { ptr: this.ptr, alloc: Global }
3548 }
3549
3550 /// Converts the `UniqueRc` into a regular [`Rc`]
3551 ///
3552 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
3553 /// is passed to `into_rc`.
3554 ///
3555 /// Any weak references created before this method is called can now be upgraded to strong
3556 /// references.
3557 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3558 pub fn into_rc(this: Self) -> Rc<T> {
3559 let mut this = ManuallyDrop::new(this);
3560 // SAFETY: This pointer was allocated at creation time so we know it is valid.
3561 unsafe {
3562 // Convert our weak reference into a strong reference
3563 this.ptr.as_mut().strong.set(1);
3564 Rc::from_inner(this.ptr)
3565 }
3566 }
3567}
3568
3569#[unstable(feature = "unique_rc_arc", issue = "112566")]
3570impl<T> Deref for UniqueRc<T> {
3571 type Target = T;
3572
3573 fn deref(&self) -> &T {
3574 // SAFETY: This pointer was allocated at creation time so we know it is valid.
3575 unsafe { &self.ptr.as_ref().value }
3576 }
3577}
3578
3579#[unstable(feature = "unique_rc_arc", issue = "112566")]
3580impl<T> DerefMut for UniqueRc<T> {
3581 fn deref_mut(&mut self) -> &mut T {
3582 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
3583 // have unique ownership and therefore it's safe to make a mutable reference because
3584 // `UniqueRc` owns the only strong reference to itself.
3585 unsafe { &mut (*self.ptr.as_ptr()).value }
3586 }
3587}
3588
3589#[unstable(feature = "unique_rc_arc", issue = "112566")]
3590unsafe impl<#[may_dangle] T> Drop for UniqueRc<T> {
3591 fn drop(&mut self) {
3592 unsafe {
3593 // destroy the contained object
3594 drop_in_place(to_drop:DerefMut::deref_mut(self));
3595
3596 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
3597 self.ptr.as_ref().dec_weak();
3598
3599 if self.ptr.as_ref().weak() == 0 {
3600 Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3601 }
3602 }
3603 }
3604}
3605