1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244#[cfg(not(test))]
245use crate::boxed::Box;
246#[cfg(test)]
247use std::boxed::Box;
248
249use core::any::Any;
250use core::borrow;
251use core::cell::Cell;
252use core::cmp::Ordering;
253use core::fmt;
254use core::hash::{Hash, Hasher};
255use core::hint;
256use core::intrinsics::abort;
257#[cfg(not(no_global_oom_handling))]
258use core::iter;
259use core::marker::{PhantomData, Unsize};
260#[cfg(not(no_global_oom_handling))]
261use core::mem::size_of_val;
262use core::mem::{self, align_of_val_raw, forget, ManuallyDrop};
263use core::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Receiver};
264use core::panic::{RefUnwindSafe, UnwindSafe};
265#[cfg(not(no_global_oom_handling))]
266use core::pin::Pin;
267use core::ptr::{self, drop_in_place, NonNull};
268#[cfg(not(no_global_oom_handling))]
269use core::slice::from_raw_parts_mut;
270
271#[cfg(not(no_global_oom_handling))]
272use crate::alloc::handle_alloc_error;
273#[cfg(not(no_global_oom_handling))]
274use crate::alloc::WriteCloneIntoRaw;
275use crate::alloc::{AllocError, Allocator, Global, Layout};
276use crate::borrow::{Cow, ToOwned};
277#[cfg(not(no_global_oom_handling))]
278use crate::string::String;
279#[cfg(not(no_global_oom_handling))]
280use crate::vec::Vec;
281
282#[cfg(test)]
283mod tests;
284
285// This is repr(C) to future-proof against possible field-reordering, which
286// would interfere with otherwise safe [into|from]_raw() of transmutable
287// inner types.
288#[repr(C)]
289struct RcBox<T: ?Sized> {
290 strong: Cell<usize>,
291 weak: Cell<usize>,
292 value: T,
293}
294
295/// Calculate layout for `RcBox<T>` using the inner value's layout
296fn rcbox_layout_for_value_layout(layout: Layout) -> Layout {
297 // Calculate layout using the given value layout.
298 // Previously, layout was calculated on the expression
299 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
300 // reference (see #54908).
301 Layout::new::<RcBox<()>>().extend(next:layout).unwrap().0.pad_to_align()
302}
303
304/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
305/// Counted'.
306///
307/// See the [module-level documentation](./index.html) for more details.
308///
309/// The inherent methods of `Rc` are all associated functions, which means
310/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
311/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
312///
313/// [get_mut]: Rc::get_mut
314#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
315#[stable(feature = "rust1", since = "1.0.0")]
316#[rustc_insignificant_dtor]
317pub struct Rc<
318 T: ?Sized,
319 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
320> {
321 ptr: NonNull<RcBox<T>>,
322 phantom: PhantomData<RcBox<T>>,
323 alloc: A,
324}
325
326#[stable(feature = "rust1", since = "1.0.0")]
327impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
328
329// Note that this negative impl isn't strictly necessary for correctness,
330// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
331// However, given how important `Rc`'s `!Sync`-ness is,
332// having an explicit negative impl is nice for documentation purposes
333// and results in nicer error messages.
334#[stable(feature = "rust1", since = "1.0.0")]
335impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
336
337#[stable(feature = "catch_unwind", since = "1.9.0")]
338impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
339#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
340impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
341
342#[unstable(feature = "coerce_unsized", issue = "18598")]
343impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
344
345#[unstable(feature = "dispatch_from_dyn", issue = "none")]
346impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
347
348impl<T: ?Sized> Rc<T> {
349 #[inline]
350 unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
351 unsafe { Self::from_inner_in(ptr, alloc:Global) }
352 }
353
354 #[inline]
355 unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
356 unsafe { Self::from_inner(ptr:NonNull::new_unchecked(ptr)) }
357 }
358}
359
360impl<T: ?Sized, A: Allocator> Rc<T, A> {
361 #[inline(always)]
362 fn inner(&self) -> &RcBox<T> {
363 // This unsafety is ok because while this Rc is alive we're guaranteed
364 // that the inner pointer is valid.
365 unsafe { self.ptr.as_ref() }
366 }
367
368 #[inline]
369 unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self {
370 Self { ptr, phantom: PhantomData, alloc }
371 }
372
373 #[inline]
374 unsafe fn from_ptr_in(ptr: *mut RcBox<T>, alloc: A) -> Self {
375 unsafe { Self::from_inner_in(ptr:NonNull::new_unchecked(ptr), alloc) }
376 }
377}
378
379impl<T> Rc<T> {
380 /// Constructs a new `Rc<T>`.
381 ///
382 /// # Examples
383 ///
384 /// ```
385 /// use std::rc::Rc;
386 ///
387 /// let five = Rc::new(5);
388 /// ```
389 #[cfg(not(no_global_oom_handling))]
390 #[stable(feature = "rust1", since = "1.0.0")]
391 pub fn new(value: T) -> Rc<T> {
392 // There is an implicit weak pointer owned by all the strong
393 // pointers, which ensures that the weak destructor never frees
394 // the allocation while the strong destructor is running, even
395 // if the weak pointer is stored inside the strong one.
396 unsafe {
397 Self::from_inner(
398 Box::leak(Box::new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value }))
399 .into(),
400 )
401 }
402 }
403
404 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
405 /// to allow you to construct a `T` which holds a weak pointer to itself.
406 ///
407 /// Generally, a structure circularly referencing itself, either directly or
408 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
409 /// Using this function, you get access to the weak pointer during the
410 /// initialization of `T`, before the `Rc<T>` is created, such that you can
411 /// clone and store it inside the `T`.
412 ///
413 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
414 /// then calls your closure, giving it a `Weak<T>` to this allocation,
415 /// and only afterwards completes the construction of the `Rc<T>` by placing
416 /// the `T` returned from your closure into the allocation.
417 ///
418 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
419 /// returns, calling [`upgrade`] on the weak reference inside your closure will
420 /// fail and result in a `None` value.
421 ///
422 /// # Panics
423 ///
424 /// If `data_fn` panics, the panic is propagated to the caller, and the
425 /// temporary [`Weak<T>`] is dropped normally.
426 ///
427 /// # Examples
428 ///
429 /// ```
430 /// # #![allow(dead_code)]
431 /// use std::rc::{Rc, Weak};
432 ///
433 /// struct Gadget {
434 /// me: Weak<Gadget>,
435 /// }
436 ///
437 /// impl Gadget {
438 /// /// Construct a reference counted Gadget.
439 /// fn new() -> Rc<Self> {
440 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
441 /// // `Rc` we're constructing.
442 /// Rc::new_cyclic(|me| {
443 /// // Create the actual struct here.
444 /// Gadget { me: me.clone() }
445 /// })
446 /// }
447 ///
448 /// /// Return a reference counted pointer to Self.
449 /// fn me(&self) -> Rc<Self> {
450 /// self.me.upgrade().unwrap()
451 /// }
452 /// }
453 /// ```
454 /// [`upgrade`]: Weak::upgrade
455 #[cfg(not(no_global_oom_handling))]
456 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
457 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
458 where
459 F: FnOnce(&Weak<T>) -> T,
460 {
461 // Construct the inner in the "uninitialized" state with a single
462 // weak reference.
463 let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox {
464 strong: Cell::new(0),
465 weak: Cell::new(1),
466 value: mem::MaybeUninit::<T>::uninit(),
467 }))
468 .into();
469
470 let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
471
472 let weak = Weak { ptr: init_ptr, alloc: Global };
473
474 // It's important we don't give up ownership of the weak pointer, or
475 // else the memory might be freed by the time `data_fn` returns. If
476 // we really wanted to pass ownership, we could create an additional
477 // weak pointer for ourselves, but this would result in additional
478 // updates to the weak reference count which might not be necessary
479 // otherwise.
480 let data = data_fn(&weak);
481
482 let strong = unsafe {
483 let inner = init_ptr.as_ptr();
484 ptr::write(ptr::addr_of_mut!((*inner).value), data);
485
486 let prev_value = (*inner).strong.get();
487 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
488 (*inner).strong.set(1);
489
490 Rc::from_inner(init_ptr)
491 };
492
493 // Strong references should collectively own a shared weak reference,
494 // so don't run the destructor for our old weak reference.
495 mem::forget(weak);
496 strong
497 }
498
499 /// Constructs a new `Rc` with uninitialized contents.
500 ///
501 /// # Examples
502 ///
503 /// ```
504 /// #![feature(new_uninit)]
505 /// #![feature(get_mut_unchecked)]
506 ///
507 /// use std::rc::Rc;
508 ///
509 /// let mut five = Rc::<u32>::new_uninit();
510 ///
511 /// // Deferred initialization:
512 /// Rc::get_mut(&mut five).unwrap().write(5);
513 ///
514 /// let five = unsafe { five.assume_init() };
515 ///
516 /// assert_eq!(*five, 5)
517 /// ```
518 #[cfg(not(no_global_oom_handling))]
519 #[unstable(feature = "new_uninit", issue = "63291")]
520 #[must_use]
521 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
522 unsafe {
523 Rc::from_ptr(Rc::allocate_for_layout(
524 Layout::new::<T>(),
525 |layout| Global.allocate(layout),
526 <*mut u8>::cast,
527 ))
528 }
529 }
530
531 /// Constructs a new `Rc` with uninitialized contents, with the memory
532 /// being filled with `0` bytes.
533 ///
534 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
535 /// incorrect usage of this method.
536 ///
537 /// # Examples
538 ///
539 /// ```
540 /// #![feature(new_uninit)]
541 ///
542 /// use std::rc::Rc;
543 ///
544 /// let zero = Rc::<u32>::new_zeroed();
545 /// let zero = unsafe { zero.assume_init() };
546 ///
547 /// assert_eq!(*zero, 0)
548 /// ```
549 ///
550 /// [zeroed]: mem::MaybeUninit::zeroed
551 #[cfg(not(no_global_oom_handling))]
552 #[unstable(feature = "new_uninit", issue = "63291")]
553 #[must_use]
554 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
555 unsafe {
556 Rc::from_ptr(Rc::allocate_for_layout(
557 Layout::new::<T>(),
558 |layout| Global.allocate_zeroed(layout),
559 <*mut u8>::cast,
560 ))
561 }
562 }
563
564 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
565 ///
566 /// # Examples
567 ///
568 /// ```
569 /// #![feature(allocator_api)]
570 /// use std::rc::Rc;
571 ///
572 /// let five = Rc::try_new(5);
573 /// # Ok::<(), std::alloc::AllocError>(())
574 /// ```
575 #[unstable(feature = "allocator_api", issue = "32838")]
576 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
577 // There is an implicit weak pointer owned by all the strong
578 // pointers, which ensures that the weak destructor never frees
579 // the allocation while the strong destructor is running, even
580 // if the weak pointer is stored inside the strong one.
581 unsafe {
582 Ok(Self::from_inner(
583 Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
584 .into(),
585 ))
586 }
587 }
588
589 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
590 ///
591 /// # Examples
592 ///
593 /// ```
594 /// #![feature(allocator_api, new_uninit)]
595 /// #![feature(get_mut_unchecked)]
596 ///
597 /// use std::rc::Rc;
598 ///
599 /// let mut five = Rc::<u32>::try_new_uninit()?;
600 ///
601 /// // Deferred initialization:
602 /// Rc::get_mut(&mut five).unwrap().write(5);
603 ///
604 /// let five = unsafe { five.assume_init() };
605 ///
606 /// assert_eq!(*five, 5);
607 /// # Ok::<(), std::alloc::AllocError>(())
608 /// ```
609 #[unstable(feature = "allocator_api", issue = "32838")]
610 // #[unstable(feature = "new_uninit", issue = "63291")]
611 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
612 unsafe {
613 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
614 Layout::new::<T>(),
615 |layout| Global.allocate(layout),
616 <*mut u8>::cast,
617 )?))
618 }
619 }
620
621 /// Constructs a new `Rc` with uninitialized contents, with the memory
622 /// being filled with `0` bytes, returning an error if the allocation fails
623 ///
624 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
625 /// incorrect usage of this method.
626 ///
627 /// # Examples
628 ///
629 /// ```
630 /// #![feature(allocator_api, new_uninit)]
631 ///
632 /// use std::rc::Rc;
633 ///
634 /// let zero = Rc::<u32>::try_new_zeroed()?;
635 /// let zero = unsafe { zero.assume_init() };
636 ///
637 /// assert_eq!(*zero, 0);
638 /// # Ok::<(), std::alloc::AllocError>(())
639 /// ```
640 ///
641 /// [zeroed]: mem::MaybeUninit::zeroed
642 #[unstable(feature = "allocator_api", issue = "32838")]
643 //#[unstable(feature = "new_uninit", issue = "63291")]
644 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
645 unsafe {
646 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
647 Layout::new::<T>(),
648 |layout| Global.allocate_zeroed(layout),
649 <*mut u8>::cast,
650 )?))
651 }
652 }
653 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
654 /// `value` will be pinned in memory and unable to be moved.
655 #[cfg(not(no_global_oom_handling))]
656 #[stable(feature = "pin", since = "1.33.0")]
657 #[must_use]
658 pub fn pin(value: T) -> Pin<Rc<T>> {
659 unsafe { Pin::new_unchecked(Rc::new(value)) }
660 }
661}
662
663impl<T, A: Allocator> Rc<T, A> {
664 /// Returns a reference to the underlying allocator.
665 ///
666 /// Note: this is an associated function, which means that you have
667 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
668 /// is so that there is no conflict with a method on the inner type.
669 #[inline]
670 #[unstable(feature = "allocator_api", issue = "32838")]
671 pub fn allocator(this: &Self) -> &A {
672 &this.alloc
673 }
674 /// Constructs a new `Rc` in the provided allocator.
675 ///
676 /// # Examples
677 ///
678 /// ```
679 /// #![feature(allocator_api)]
680 /// use std::rc::Rc;
681 /// use std::alloc::System;
682 ///
683 /// let five = Rc::new_in(5, System);
684 /// ```
685 #[cfg(not(no_global_oom_handling))]
686 #[unstable(feature = "allocator_api", issue = "32838")]
687 #[inline]
688 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
689 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
690 // That would make code size bigger.
691 match Self::try_new_in(value, alloc) {
692 Ok(m) => m,
693 Err(_) => handle_alloc_error(Layout::new::<RcBox<T>>()),
694 }
695 }
696
697 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
698 ///
699 /// # Examples
700 ///
701 /// ```
702 /// #![feature(new_uninit)]
703 /// #![feature(get_mut_unchecked)]
704 /// #![feature(allocator_api)]
705 ///
706 /// use std::rc::Rc;
707 /// use std::alloc::System;
708 ///
709 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
710 ///
711 /// let five = unsafe {
712 /// // Deferred initialization:
713 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
714 ///
715 /// five.assume_init()
716 /// };
717 ///
718 /// assert_eq!(*five, 5)
719 /// ```
720 #[cfg(not(no_global_oom_handling))]
721 #[unstable(feature = "allocator_api", issue = "32838")]
722 // #[unstable(feature = "new_uninit", issue = "63291")]
723 #[inline]
724 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
725 unsafe {
726 Rc::from_ptr_in(
727 Rc::allocate_for_layout(
728 Layout::new::<T>(),
729 |layout| alloc.allocate(layout),
730 <*mut u8>::cast,
731 ),
732 alloc,
733 )
734 }
735 }
736
737 /// Constructs a new `Rc` with uninitialized contents, with the memory
738 /// being filled with `0` bytes, in the provided allocator.
739 ///
740 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
741 /// incorrect usage of this method.
742 ///
743 /// # Examples
744 ///
745 /// ```
746 /// #![feature(new_uninit)]
747 /// #![feature(allocator_api)]
748 ///
749 /// use std::rc::Rc;
750 /// use std::alloc::System;
751 ///
752 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
753 /// let zero = unsafe { zero.assume_init() };
754 ///
755 /// assert_eq!(*zero, 0)
756 /// ```
757 ///
758 /// [zeroed]: mem::MaybeUninit::zeroed
759 #[cfg(not(no_global_oom_handling))]
760 #[unstable(feature = "allocator_api", issue = "32838")]
761 // #[unstable(feature = "new_uninit", issue = "63291")]
762 #[inline]
763 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
764 unsafe {
765 Rc::from_ptr_in(
766 Rc::allocate_for_layout(
767 Layout::new::<T>(),
768 |layout| alloc.allocate_zeroed(layout),
769 <*mut u8>::cast,
770 ),
771 alloc,
772 )
773 }
774 }
775
776 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
777 /// fails
778 ///
779 /// # Examples
780 ///
781 /// ```
782 /// #![feature(allocator_api)]
783 /// use std::rc::Rc;
784 /// use std::alloc::System;
785 ///
786 /// let five = Rc::try_new_in(5, System);
787 /// # Ok::<(), std::alloc::AllocError>(())
788 /// ```
789 #[unstable(feature = "allocator_api", issue = "32838")]
790 #[inline]
791 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
792 // There is an implicit weak pointer owned by all the strong
793 // pointers, which ensures that the weak destructor never frees
794 // the allocation while the strong destructor is running, even
795 // if the weak pointer is stored inside the strong one.
796 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
797 RcBox { strong: Cell::new(1), weak: Cell::new(1), value },
798 alloc,
799 )?);
800 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
801 }
802
803 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
804 /// error if the allocation fails
805 ///
806 /// # Examples
807 ///
808 /// ```
809 /// #![feature(allocator_api, new_uninit)]
810 /// #![feature(get_mut_unchecked)]
811 ///
812 /// use std::rc::Rc;
813 /// use std::alloc::System;
814 ///
815 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
816 ///
817 /// let five = unsafe {
818 /// // Deferred initialization:
819 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
820 ///
821 /// five.assume_init()
822 /// };
823 ///
824 /// assert_eq!(*five, 5);
825 /// # Ok::<(), std::alloc::AllocError>(())
826 /// ```
827 #[unstable(feature = "allocator_api", issue = "32838")]
828 // #[unstable(feature = "new_uninit", issue = "63291")]
829 #[inline]
830 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
831 unsafe {
832 Ok(Rc::from_ptr_in(
833 Rc::try_allocate_for_layout(
834 Layout::new::<T>(),
835 |layout| alloc.allocate(layout),
836 <*mut u8>::cast,
837 )?,
838 alloc,
839 ))
840 }
841 }
842
843 /// Constructs a new `Rc` with uninitialized contents, with the memory
844 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
845 /// fails
846 ///
847 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
848 /// incorrect usage of this method.
849 ///
850 /// # Examples
851 ///
852 /// ```
853 /// #![feature(allocator_api, new_uninit)]
854 ///
855 /// use std::rc::Rc;
856 /// use std::alloc::System;
857 ///
858 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
859 /// let zero = unsafe { zero.assume_init() };
860 ///
861 /// assert_eq!(*zero, 0);
862 /// # Ok::<(), std::alloc::AllocError>(())
863 /// ```
864 ///
865 /// [zeroed]: mem::MaybeUninit::zeroed
866 #[unstable(feature = "allocator_api", issue = "32838")]
867 //#[unstable(feature = "new_uninit", issue = "63291")]
868 #[inline]
869 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
870 unsafe {
871 Ok(Rc::from_ptr_in(
872 Rc::try_allocate_for_layout(
873 Layout::new::<T>(),
874 |layout| alloc.allocate_zeroed(layout),
875 <*mut u8>::cast,
876 )?,
877 alloc,
878 ))
879 }
880 }
881
882 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
883 /// `value` will be pinned in memory and unable to be moved.
884 #[cfg(not(no_global_oom_handling))]
885 #[unstable(feature = "allocator_api", issue = "32838")]
886 #[inline]
887 pub fn pin_in(value: T, alloc: A) -> Pin<Self> {
888 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
889 }
890
891 /// Returns the inner value, if the `Rc` has exactly one strong reference.
892 ///
893 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
894 /// passed in.
895 ///
896 /// This will succeed even if there are outstanding weak references.
897 ///
898 /// # Examples
899 ///
900 /// ```
901 /// use std::rc::Rc;
902 ///
903 /// let x = Rc::new(3);
904 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
905 ///
906 /// let x = Rc::new(4);
907 /// let _y = Rc::clone(&x);
908 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
909 /// ```
910 #[inline]
911 #[stable(feature = "rc_unique", since = "1.4.0")]
912 pub fn try_unwrap(this: Self) -> Result<T, Self> {
913 if Rc::strong_count(&this) == 1 {
914 unsafe {
915 let val = ptr::read(&*this); // copy the contained object
916 let alloc = ptr::read(&this.alloc); // copy the allocator
917
918 // Indicate to Weaks that they can't be promoted by decrementing
919 // the strong count, and then remove the implicit "strong weak"
920 // pointer while also handling drop logic by just crafting a
921 // fake Weak.
922 this.inner().dec_strong();
923 let _weak = Weak { ptr: this.ptr, alloc };
924 forget(this);
925 Ok(val)
926 }
927 } else {
928 Err(this)
929 }
930 }
931
932 /// Returns the inner value, if the `Rc` has exactly one strong reference.
933 ///
934 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
935 ///
936 /// This will succeed even if there are outstanding weak references.
937 ///
938 /// If `Rc::into_inner` is called on every clone of this `Rc`,
939 /// it is guaranteed that exactly one of the calls returns the inner value.
940 /// This means in particular that the inner value is not dropped.
941 ///
942 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
943 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
944 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
945 /// (Note that the same kind of equivalence does **not** hold true for
946 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
947 #[inline]
948 #[stable(feature = "rc_into_inner", since = "1.70.0")]
949 pub fn into_inner(this: Self) -> Option<T> {
950 Rc::try_unwrap(this).ok()
951 }
952}
953
954impl<T> Rc<[T]> {
955 /// Constructs a new reference-counted slice with uninitialized contents.
956 ///
957 /// # Examples
958 ///
959 /// ```
960 /// #![feature(new_uninit)]
961 /// #![feature(get_mut_unchecked)]
962 ///
963 /// use std::rc::Rc;
964 ///
965 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
966 ///
967 /// // Deferred initialization:
968 /// let data = Rc::get_mut(&mut values).unwrap();
969 /// data[0].write(1);
970 /// data[1].write(2);
971 /// data[2].write(3);
972 ///
973 /// let values = unsafe { values.assume_init() };
974 ///
975 /// assert_eq!(*values, [1, 2, 3])
976 /// ```
977 #[cfg(not(no_global_oom_handling))]
978 #[unstable(feature = "new_uninit", issue = "63291")]
979 #[must_use]
980 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
981 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
982 }
983
984 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
985 /// filled with `0` bytes.
986 ///
987 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
988 /// incorrect usage of this method.
989 ///
990 /// # Examples
991 ///
992 /// ```
993 /// #![feature(new_uninit)]
994 ///
995 /// use std::rc::Rc;
996 ///
997 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
998 /// let values = unsafe { values.assume_init() };
999 ///
1000 /// assert_eq!(*values, [0, 0, 0])
1001 /// ```
1002 ///
1003 /// [zeroed]: mem::MaybeUninit::zeroed
1004 #[cfg(not(no_global_oom_handling))]
1005 #[unstable(feature = "new_uninit", issue = "63291")]
1006 #[must_use]
1007 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1008 unsafe {
1009 Rc::from_ptr(Rc::allocate_for_layout(
1010 Layout::array::<T>(len).unwrap(),
1011 |layout| Global.allocate_zeroed(layout),
1012 |mem| {
1013 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1014 as *mut RcBox<[mem::MaybeUninit<T>]>
1015 },
1016 ))
1017 }
1018 }
1019}
1020
1021impl<T, A: Allocator> Rc<[T], A> {
1022 /// Constructs a new reference-counted slice with uninitialized contents.
1023 ///
1024 /// # Examples
1025 ///
1026 /// ```
1027 /// #![feature(new_uninit)]
1028 /// #![feature(get_mut_unchecked)]
1029 /// #![feature(allocator_api)]
1030 ///
1031 /// use std::rc::Rc;
1032 /// use std::alloc::System;
1033 ///
1034 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1035 ///
1036 /// let values = unsafe {
1037 /// // Deferred initialization:
1038 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1039 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1040 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1041 ///
1042 /// values.assume_init()
1043 /// };
1044 ///
1045 /// assert_eq!(*values, [1, 2, 3])
1046 /// ```
1047 #[cfg(not(no_global_oom_handling))]
1048 #[unstable(feature = "allocator_api", issue = "32838")]
1049 // #[unstable(feature = "new_uninit", issue = "63291")]
1050 #[inline]
1051 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1052 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1053 }
1054
1055 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1056 /// filled with `0` bytes.
1057 ///
1058 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1059 /// incorrect usage of this method.
1060 ///
1061 /// # Examples
1062 ///
1063 /// ```
1064 /// #![feature(new_uninit)]
1065 /// #![feature(allocator_api)]
1066 ///
1067 /// use std::rc::Rc;
1068 /// use std::alloc::System;
1069 ///
1070 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1071 /// let values = unsafe { values.assume_init() };
1072 ///
1073 /// assert_eq!(*values, [0, 0, 0])
1074 /// ```
1075 ///
1076 /// [zeroed]: mem::MaybeUninit::zeroed
1077 #[cfg(not(no_global_oom_handling))]
1078 #[unstable(feature = "allocator_api", issue = "32838")]
1079 // #[unstable(feature = "new_uninit", issue = "63291")]
1080 #[inline]
1081 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1082 unsafe {
1083 Rc::from_ptr_in(
1084 Rc::allocate_for_layout(
1085 Layout::array::<T>(len).unwrap(),
1086 |layout| alloc.allocate_zeroed(layout),
1087 |mem| {
1088 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1089 as *mut RcBox<[mem::MaybeUninit<T>]>
1090 },
1091 ),
1092 alloc,
1093 )
1094 }
1095 }
1096}
1097
1098impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1099 /// Converts to `Rc<T>`.
1100 ///
1101 /// # Safety
1102 ///
1103 /// As with [`MaybeUninit::assume_init`],
1104 /// it is up to the caller to guarantee that the inner value
1105 /// really is in an initialized state.
1106 /// Calling this when the content is not yet fully initialized
1107 /// causes immediate undefined behavior.
1108 ///
1109 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1110 ///
1111 /// # Examples
1112 ///
1113 /// ```
1114 /// #![feature(new_uninit)]
1115 /// #![feature(get_mut_unchecked)]
1116 ///
1117 /// use std::rc::Rc;
1118 ///
1119 /// let mut five = Rc::<u32>::new_uninit();
1120 ///
1121 /// // Deferred initialization:
1122 /// Rc::get_mut(&mut five).unwrap().write(5);
1123 ///
1124 /// let five = unsafe { five.assume_init() };
1125 ///
1126 /// assert_eq!(*five, 5)
1127 /// ```
1128 #[unstable(feature = "new_uninit", issue = "63291")]
1129 #[inline]
1130 pub unsafe fn assume_init(self) -> Rc<T, A>
1131 where
1132 A: Clone,
1133 {
1134 let md_self = mem::ManuallyDrop::new(self);
1135 unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
1136 }
1137}
1138
1139impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1140 /// Converts to `Rc<[T]>`.
1141 ///
1142 /// # Safety
1143 ///
1144 /// As with [`MaybeUninit::assume_init`],
1145 /// it is up to the caller to guarantee that the inner value
1146 /// really is in an initialized state.
1147 /// Calling this when the content is not yet fully initialized
1148 /// causes immediate undefined behavior.
1149 ///
1150 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1151 ///
1152 /// # Examples
1153 ///
1154 /// ```
1155 /// #![feature(new_uninit)]
1156 /// #![feature(get_mut_unchecked)]
1157 ///
1158 /// use std::rc::Rc;
1159 ///
1160 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1161 ///
1162 /// // Deferred initialization:
1163 /// let data = Rc::get_mut(&mut values).unwrap();
1164 /// data[0].write(1);
1165 /// data[1].write(2);
1166 /// data[2].write(3);
1167 ///
1168 /// let values = unsafe { values.assume_init() };
1169 ///
1170 /// assert_eq!(*values, [1, 2, 3])
1171 /// ```
1172 #[unstable(feature = "new_uninit", issue = "63291")]
1173 #[inline]
1174 pub unsafe fn assume_init(self) -> Rc<[T], A>
1175 where
1176 A: Clone,
1177 {
1178 let md_self = mem::ManuallyDrop::new(self);
1179 unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
1180 }
1181}
1182
1183impl<T: ?Sized> Rc<T> {
1184 /// Constructs an `Rc<T>` from a raw pointer.
1185 ///
1186 /// The raw pointer must have been previously returned by a call to
1187 /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
1188 /// and alignment as `T`. This is trivially true if `U` is `T`.
1189 /// Note that if `U` is not `T` but has the same size and alignment, this is
1190 /// basically like transmuting references of different types. See
1191 /// [`mem::transmute`][transmute] for more information on what
1192 /// restrictions apply in this case.
1193 ///
1194 /// The raw pointer must point to a block of memory allocated by the global allocator
1195 ///
1196 /// The user of `from_raw` has to make sure a specific value of `T` is only
1197 /// dropped once.
1198 ///
1199 /// This function is unsafe because improper use may lead to memory unsafety,
1200 /// even if the returned `Rc<T>` is never accessed.
1201 ///
1202 /// [into_raw]: Rc::into_raw
1203 /// [transmute]: core::mem::transmute
1204 ///
1205 /// # Examples
1206 ///
1207 /// ```
1208 /// use std::rc::Rc;
1209 ///
1210 /// let x = Rc::new("hello".to_owned());
1211 /// let x_ptr = Rc::into_raw(x);
1212 ///
1213 /// unsafe {
1214 /// // Convert back to an `Rc` to prevent leak.
1215 /// let x = Rc::from_raw(x_ptr);
1216 /// assert_eq!(&*x, "hello");
1217 ///
1218 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1219 /// }
1220 ///
1221 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1222 /// ```
1223 #[inline]
1224 #[stable(feature = "rc_raw", since = "1.17.0")]
1225 pub unsafe fn from_raw(ptr: *const T) -> Self {
1226 unsafe { Self::from_raw_in(ptr, Global) }
1227 }
1228
1229 /// Increments the strong reference count on the `Rc<T>` associated with the
1230 /// provided pointer by one.
1231 ///
1232 /// # Safety
1233 ///
1234 /// The pointer must have been obtained through `Rc::into_raw`, the
1235 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1236 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1237 /// allocated by the global allocator.
1238 ///
1239 /// # Examples
1240 ///
1241 /// ```
1242 /// use std::rc::Rc;
1243 ///
1244 /// let five = Rc::new(5);
1245 ///
1246 /// unsafe {
1247 /// let ptr = Rc::into_raw(five);
1248 /// Rc::increment_strong_count(ptr);
1249 ///
1250 /// let five = Rc::from_raw(ptr);
1251 /// assert_eq!(2, Rc::strong_count(&five));
1252 /// }
1253 /// ```
1254 #[inline]
1255 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1256 pub unsafe fn increment_strong_count(ptr: *const T) {
1257 unsafe { Self::increment_strong_count_in(ptr, Global) }
1258 }
1259
1260 /// Decrements the strong reference count on the `Rc<T>` associated with the
1261 /// provided pointer by one.
1262 ///
1263 /// # Safety
1264 ///
1265 /// The pointer must have been obtained through `Rc::into_raw`, the
1266 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1267 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1268 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1269 /// backing storage, but **should not** be called after the final `Rc` has been released.
1270 ///
1271 /// # Examples
1272 ///
1273 /// ```
1274 /// use std::rc::Rc;
1275 ///
1276 /// let five = Rc::new(5);
1277 ///
1278 /// unsafe {
1279 /// let ptr = Rc::into_raw(five);
1280 /// Rc::increment_strong_count(ptr);
1281 ///
1282 /// let five = Rc::from_raw(ptr);
1283 /// assert_eq!(2, Rc::strong_count(&five));
1284 /// Rc::decrement_strong_count(ptr);
1285 /// assert_eq!(1, Rc::strong_count(&five));
1286 /// }
1287 /// ```
1288 #[inline]
1289 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1290 pub unsafe fn decrement_strong_count(ptr: *const T) {
1291 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1292 }
1293}
1294
1295impl<T: ?Sized, A: Allocator> Rc<T, A> {
1296 /// Consumes the `Rc`, returning the wrapped pointer.
1297 ///
1298 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1299 /// [`Rc::from_raw`].
1300 ///
1301 /// # Examples
1302 ///
1303 /// ```
1304 /// use std::rc::Rc;
1305 ///
1306 /// let x = Rc::new("hello".to_owned());
1307 /// let x_ptr = Rc::into_raw(x);
1308 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1309 /// ```
1310 #[stable(feature = "rc_raw", since = "1.17.0")]
1311 #[rustc_never_returns_null_ptr]
1312 pub fn into_raw(this: Self) -> *const T {
1313 let ptr = Self::as_ptr(&this);
1314 mem::forget(this);
1315 ptr
1316 }
1317
1318 /// Provides a raw pointer to the data.
1319 ///
1320 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1321 /// for as long there are strong counts in the `Rc`.
1322 ///
1323 /// # Examples
1324 ///
1325 /// ```
1326 /// use std::rc::Rc;
1327 ///
1328 /// let x = Rc::new("hello".to_owned());
1329 /// let y = Rc::clone(&x);
1330 /// let x_ptr = Rc::as_ptr(&x);
1331 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1332 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1333 /// ```
1334 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1335 #[rustc_never_returns_null_ptr]
1336 pub fn as_ptr(this: &Self) -> *const T {
1337 let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
1338
1339 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1340 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1341 // write through the pointer after the Rc is recovered through `from_raw`.
1342 unsafe { ptr::addr_of_mut!((*ptr).value) }
1343 }
1344
1345 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1346 ///
1347 /// The raw pointer must have been previously returned by a call to
1348 /// [`Rc<U, A>::into_raw`][into_raw] where `U` must have the same size
1349 /// and alignment as `T`. This is trivially true if `U` is `T`.
1350 /// Note that if `U` is not `T` but has the same size and alignment, this is
1351 /// basically like transmuting references of different types. See
1352 /// [`mem::transmute`] for more information on what
1353 /// restrictions apply in this case.
1354 ///
1355 /// The raw pointer must point to a block of memory allocated by `alloc`
1356 ///
1357 /// The user of `from_raw` has to make sure a specific value of `T` is only
1358 /// dropped once.
1359 ///
1360 /// This function is unsafe because improper use may lead to memory unsafety,
1361 /// even if the returned `Rc<T>` is never accessed.
1362 ///
1363 /// [into_raw]: Rc::into_raw
1364 ///
1365 /// # Examples
1366 ///
1367 /// ```
1368 /// #![feature(allocator_api)]
1369 ///
1370 /// use std::rc::Rc;
1371 /// use std::alloc::System;
1372 ///
1373 /// let x = Rc::new_in("hello".to_owned(), System);
1374 /// let x_ptr = Rc::into_raw(x);
1375 ///
1376 /// unsafe {
1377 /// // Convert back to an `Rc` to prevent leak.
1378 /// let x = Rc::from_raw_in(x_ptr, System);
1379 /// assert_eq!(&*x, "hello");
1380 ///
1381 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1382 /// }
1383 ///
1384 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1385 /// ```
1386 #[unstable(feature = "allocator_api", issue = "32838")]
1387 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1388 let offset = unsafe { data_offset(ptr) };
1389
1390 // Reverse the offset to find the original RcBox.
1391 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox<T> };
1392
1393 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1394 }
1395
1396 /// Creates a new [`Weak`] pointer to this allocation.
1397 ///
1398 /// # Examples
1399 ///
1400 /// ```
1401 /// use std::rc::Rc;
1402 ///
1403 /// let five = Rc::new(5);
1404 ///
1405 /// let weak_five = Rc::downgrade(&five);
1406 /// ```
1407 #[must_use = "this returns a new `Weak` pointer, \
1408 without modifying the original `Rc`"]
1409 #[stable(feature = "rc_weak", since = "1.4.0")]
1410 pub fn downgrade(this: &Self) -> Weak<T, A>
1411 where
1412 A: Clone,
1413 {
1414 this.inner().inc_weak();
1415 // Make sure we do not create a dangling Weak
1416 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1417 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1418 }
1419
1420 /// Gets the number of [`Weak`] pointers to this allocation.
1421 ///
1422 /// # Examples
1423 ///
1424 /// ```
1425 /// use std::rc::Rc;
1426 ///
1427 /// let five = Rc::new(5);
1428 /// let _weak_five = Rc::downgrade(&five);
1429 ///
1430 /// assert_eq!(1, Rc::weak_count(&five));
1431 /// ```
1432 #[inline]
1433 #[stable(feature = "rc_counts", since = "1.15.0")]
1434 pub fn weak_count(this: &Self) -> usize {
1435 this.inner().weak() - 1
1436 }
1437
1438 /// Gets the number of strong (`Rc`) pointers to this allocation.
1439 ///
1440 /// # Examples
1441 ///
1442 /// ```
1443 /// use std::rc::Rc;
1444 ///
1445 /// let five = Rc::new(5);
1446 /// let _also_five = Rc::clone(&five);
1447 ///
1448 /// assert_eq!(2, Rc::strong_count(&five));
1449 /// ```
1450 #[inline]
1451 #[stable(feature = "rc_counts", since = "1.15.0")]
1452 pub fn strong_count(this: &Self) -> usize {
1453 this.inner().strong()
1454 }
1455
1456 /// Increments the strong reference count on the `Rc<T>` associated with the
1457 /// provided pointer by one.
1458 ///
1459 /// # Safety
1460 ///
1461 /// The pointer must have been obtained through `Rc::into_raw`, the
1462 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1463 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1464 /// allocated by `alloc`
1465 ///
1466 /// # Examples
1467 ///
1468 /// ```
1469 /// #![feature(allocator_api)]
1470 ///
1471 /// use std::rc::Rc;
1472 /// use std::alloc::System;
1473 ///
1474 /// let five = Rc::new_in(5, System);
1475 ///
1476 /// unsafe {
1477 /// let ptr = Rc::into_raw(five);
1478 /// Rc::increment_strong_count_in(ptr, System);
1479 ///
1480 /// let five = Rc::from_raw_in(ptr, System);
1481 /// assert_eq!(2, Rc::strong_count(&five));
1482 /// }
1483 /// ```
1484 #[inline]
1485 #[unstable(feature = "allocator_api", issue = "32838")]
1486 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1487 where
1488 A: Clone,
1489 {
1490 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1491 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1492 // Now increase refcount, but don't drop new refcount either
1493 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1494 }
1495
1496 /// Decrements the strong reference count on the `Rc<T>` associated with the
1497 /// provided pointer by one.
1498 ///
1499 /// # Safety
1500 ///
1501 /// The pointer must have been obtained through `Rc::into_raw`, the
1502 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1503 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1504 /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
1505 /// but **should not** be called after the final `Rc` has been released.
1506 ///
1507 /// # Examples
1508 ///
1509 /// ```
1510 /// #![feature(allocator_api)]
1511 ///
1512 /// use std::rc::Rc;
1513 /// use std::alloc::System;
1514 ///
1515 /// let five = Rc::new_in(5, System);
1516 ///
1517 /// unsafe {
1518 /// let ptr = Rc::into_raw(five);
1519 /// Rc::increment_strong_count_in(ptr, System);
1520 ///
1521 /// let five = Rc::from_raw_in(ptr, System);
1522 /// assert_eq!(2, Rc::strong_count(&five));
1523 /// Rc::decrement_strong_count_in(ptr, System);
1524 /// assert_eq!(1, Rc::strong_count(&five));
1525 /// }
1526 /// ```
1527 #[inline]
1528 #[unstable(feature = "allocator_api", issue = "32838")]
1529 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1530 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1531 }
1532
1533 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1534 /// this allocation.
1535 #[inline]
1536 fn is_unique(this: &Self) -> bool {
1537 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1538 }
1539
1540 /// Returns a mutable reference into the given `Rc`, if there are
1541 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1542 ///
1543 /// Returns [`None`] otherwise, because it is not safe to
1544 /// mutate a shared value.
1545 ///
1546 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1547 /// the inner value when there are other `Rc` pointers.
1548 ///
1549 /// [make_mut]: Rc::make_mut
1550 /// [clone]: Clone::clone
1551 ///
1552 /// # Examples
1553 ///
1554 /// ```
1555 /// use std::rc::Rc;
1556 ///
1557 /// let mut x = Rc::new(3);
1558 /// *Rc::get_mut(&mut x).unwrap() = 4;
1559 /// assert_eq!(*x, 4);
1560 ///
1561 /// let _y = Rc::clone(&x);
1562 /// assert!(Rc::get_mut(&mut x).is_none());
1563 /// ```
1564 #[inline]
1565 #[stable(feature = "rc_unique", since = "1.4.0")]
1566 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1567 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1568 }
1569
1570 /// Returns a mutable reference into the given `Rc`,
1571 /// without any check.
1572 ///
1573 /// See also [`get_mut`], which is safe and does appropriate checks.
1574 ///
1575 /// [`get_mut`]: Rc::get_mut
1576 ///
1577 /// # Safety
1578 ///
1579 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1580 /// they must not be dereferenced or have active borrows for the duration
1581 /// of the returned borrow, and their inner type must be exactly the same as the
1582 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1583 /// such pointers exist, for example immediately after `Rc::new`.
1584 ///
1585 /// # Examples
1586 ///
1587 /// ```
1588 /// #![feature(get_mut_unchecked)]
1589 ///
1590 /// use std::rc::Rc;
1591 ///
1592 /// let mut x = Rc::new(String::new());
1593 /// unsafe {
1594 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1595 /// }
1596 /// assert_eq!(*x, "foo");
1597 /// ```
1598 /// Other `Rc` pointers to the same allocation must be to the same type.
1599 /// ```no_run
1600 /// #![feature(get_mut_unchecked)]
1601 ///
1602 /// use std::rc::Rc;
1603 ///
1604 /// let x: Rc<str> = Rc::from("Hello, world!");
1605 /// let mut y: Rc<[u8]> = x.clone().into();
1606 /// unsafe {
1607 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1608 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1609 /// }
1610 /// println!("{}", &*x); // Invalid UTF-8 in a str
1611 /// ```
1612 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1613 /// ```no_run
1614 /// #![feature(get_mut_unchecked)]
1615 ///
1616 /// use std::rc::Rc;
1617 ///
1618 /// let x: Rc<&str> = Rc::new("Hello, world!");
1619 /// {
1620 /// let s = String::from("Oh, no!");
1621 /// let mut y: Rc<&str> = x.clone().into();
1622 /// unsafe {
1623 /// // this is Undefined Behavior, because x's inner type
1624 /// // is &'long str, not &'short str
1625 /// *Rc::get_mut_unchecked(&mut y) = &s;
1626 /// }
1627 /// }
1628 /// println!("{}", &*x); // Use-after-free
1629 /// ```
1630 #[inline]
1631 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1632 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1633 // We are careful to *not* create a reference covering the "count" fields, as
1634 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1635 unsafe { &mut (*this.ptr.as_ptr()).value }
1636 }
1637
1638 #[inline]
1639 #[stable(feature = "ptr_eq", since = "1.17.0")]
1640 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1641 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1642 ///
1643 /// # Examples
1644 ///
1645 /// ```
1646 /// use std::rc::Rc;
1647 ///
1648 /// let five = Rc::new(5);
1649 /// let same_five = Rc::clone(&five);
1650 /// let other_five = Rc::new(5);
1651 ///
1652 /// assert!(Rc::ptr_eq(&five, &same_five));
1653 /// assert!(!Rc::ptr_eq(&five, &other_five));
1654 /// ```
1655 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1656 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1657 }
1658}
1659
1660impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
1661 /// Makes a mutable reference into the given `Rc`.
1662 ///
1663 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1664 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1665 /// referred to as clone-on-write.
1666 ///
1667 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1668 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1669 /// be cloned.
1670 ///
1671 /// See also [`get_mut`], which will fail rather than cloning the inner value
1672 /// or disassociating [`Weak`] pointers.
1673 ///
1674 /// [`clone`]: Clone::clone
1675 /// [`get_mut`]: Rc::get_mut
1676 ///
1677 /// # Examples
1678 ///
1679 /// ```
1680 /// use std::rc::Rc;
1681 ///
1682 /// let mut data = Rc::new(5);
1683 ///
1684 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1685 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1686 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1687 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1688 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1689 ///
1690 /// // Now `data` and `other_data` point to different allocations.
1691 /// assert_eq!(*data, 8);
1692 /// assert_eq!(*other_data, 12);
1693 /// ```
1694 ///
1695 /// [`Weak`] pointers will be disassociated:
1696 ///
1697 /// ```
1698 /// use std::rc::Rc;
1699 ///
1700 /// let mut data = Rc::new(75);
1701 /// let weak = Rc::downgrade(&data);
1702 ///
1703 /// assert!(75 == *data);
1704 /// assert!(75 == *weak.upgrade().unwrap());
1705 ///
1706 /// *Rc::make_mut(&mut data) += 1;
1707 ///
1708 /// assert!(76 == *data);
1709 /// assert!(weak.upgrade().is_none());
1710 /// ```
1711 #[cfg(not(no_global_oom_handling))]
1712 #[inline]
1713 #[stable(feature = "rc_unique", since = "1.4.0")]
1714 pub fn make_mut(this: &mut Self) -> &mut T {
1715 if Rc::strong_count(this) != 1 {
1716 // Gotta clone the data, there are other Rcs.
1717 // Pre-allocate memory to allow writing the cloned value directly.
1718 let mut rc = Self::new_uninit_in(this.alloc.clone());
1719 unsafe {
1720 let data = Rc::get_mut_unchecked(&mut rc);
1721 (**this).write_clone_into_raw(data.as_mut_ptr());
1722 *this = rc.assume_init();
1723 }
1724 } else if Rc::weak_count(this) != 0 {
1725 // Can just steal the data, all that's left is Weaks
1726 let mut rc = Self::new_uninit_in(this.alloc.clone());
1727 unsafe {
1728 let data = Rc::get_mut_unchecked(&mut rc);
1729 data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1730
1731 this.inner().dec_strong();
1732 // Remove implicit strong-weak ref (no need to craft a fake
1733 // Weak here -- we know other Weaks can clean up for us)
1734 this.inner().dec_weak();
1735 ptr::write(this, rc.assume_init());
1736 }
1737 }
1738 // This unsafety is ok because we're guaranteed that the pointer
1739 // returned is the *only* pointer that will ever be returned to T. Our
1740 // reference count is guaranteed to be 1 at this point, and we required
1741 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1742 // reference to the allocation.
1743 unsafe { &mut this.ptr.as_mut().value }
1744 }
1745
1746 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1747 /// clone.
1748 ///
1749 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1750 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1751 ///
1752 /// # Examples
1753 ///
1754 /// ```
1755 /// # use std::{ptr, rc::Rc};
1756 /// let inner = String::from("test");
1757 /// let ptr = inner.as_ptr();
1758 ///
1759 /// let rc = Rc::new(inner);
1760 /// let inner = Rc::unwrap_or_clone(rc);
1761 /// // The inner value was not cloned
1762 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1763 ///
1764 /// let rc = Rc::new(inner);
1765 /// let rc2 = rc.clone();
1766 /// let inner = Rc::unwrap_or_clone(rc);
1767 /// // Because there were 2 references, we had to clone the inner value.
1768 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1769 /// // `rc2` is the last reference, so when we unwrap it we get back
1770 /// // the original `String`.
1771 /// let inner = Rc::unwrap_or_clone(rc2);
1772 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1773 /// ```
1774 #[inline]
1775 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1776 pub fn unwrap_or_clone(this: Self) -> T {
1777 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1778 }
1779}
1780
1781impl<A: Allocator + Clone> Rc<dyn Any, A> {
1782 /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
1783 ///
1784 /// # Examples
1785 ///
1786 /// ```
1787 /// use std::any::Any;
1788 /// use std::rc::Rc;
1789 ///
1790 /// fn print_if_string(value: Rc<dyn Any>) {
1791 /// if let Ok(string) = value.downcast::<String>() {
1792 /// println!("String ({}): {}", string.len(), string);
1793 /// }
1794 /// }
1795 ///
1796 /// let my_string = "Hello World".to_string();
1797 /// print_if_string(Rc::new(my_string));
1798 /// print_if_string(Rc::new(0i8));
1799 /// ```
1800 #[inline]
1801 #[stable(feature = "rc_downcast", since = "1.29.0")]
1802 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1803 if (*self).is::<T>() {
1804 unsafe {
1805 let ptr = self.ptr.cast::<RcBox<T>>();
1806 let alloc = self.alloc.clone();
1807 forget(self);
1808 Ok(Rc::from_inner_in(ptr, alloc))
1809 }
1810 } else {
1811 Err(self)
1812 }
1813 }
1814
1815 /// Downcasts the `Rc<dyn Any>` to a concrete type.
1816 ///
1817 /// For a safe alternative see [`downcast`].
1818 ///
1819 /// # Examples
1820 ///
1821 /// ```
1822 /// #![feature(downcast_unchecked)]
1823 ///
1824 /// use std::any::Any;
1825 /// use std::rc::Rc;
1826 ///
1827 /// let x: Rc<dyn Any> = Rc::new(1_usize);
1828 ///
1829 /// unsafe {
1830 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
1831 /// }
1832 /// ```
1833 ///
1834 /// # Safety
1835 ///
1836 /// The contained value must be of type `T`. Calling this method
1837 /// with the incorrect type is *undefined behavior*.
1838 ///
1839 ///
1840 /// [`downcast`]: Self::downcast
1841 #[inline]
1842 #[unstable(feature = "downcast_unchecked", issue = "90850")]
1843 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
1844 unsafe {
1845 let ptr = self.ptr.cast::<RcBox<T>>();
1846 let alloc = self.alloc.clone();
1847 mem::forget(self);
1848 Rc::from_inner_in(ptr, alloc)
1849 }
1850 }
1851}
1852
1853impl<T: ?Sized> Rc<T> {
1854 /// Allocates an `RcBox<T>` with sufficient space for
1855 /// a possibly-unsized inner value where the value has the layout provided.
1856 ///
1857 /// The function `mem_to_rcbox` is called with the data pointer
1858 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
1859 #[cfg(not(no_global_oom_handling))]
1860 unsafe fn allocate_for_layout(
1861 value_layout: Layout,
1862 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1863 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
1864 ) -> *mut RcBox<T> {
1865 let layout = rcbox_layout_for_value_layout(value_layout);
1866 unsafe {
1867 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
1868 .unwrap_or_else(|_| handle_alloc_error(layout))
1869 }
1870 }
1871
1872 /// Allocates an `RcBox<T>` with sufficient space for
1873 /// a possibly-unsized inner value where the value has the layout provided,
1874 /// returning an error if allocation fails.
1875 ///
1876 /// The function `mem_to_rcbox` is called with the data pointer
1877 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
1878 #[inline]
1879 unsafe fn try_allocate_for_layout(
1880 value_layout: Layout,
1881 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1882 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
1883 ) -> Result<*mut RcBox<T>, AllocError> {
1884 let layout = rcbox_layout_for_value_layout(value_layout);
1885
1886 // Allocate for the layout.
1887 let ptr = allocate(layout)?;
1888
1889 // Initialize the RcBox
1890 let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
1891 unsafe {
1892 debug_assert_eq!(Layout::for_value_raw(inner), layout);
1893
1894 ptr::addr_of_mut!((*inner).strong).write(Cell::new(1));
1895 ptr::addr_of_mut!((*inner).weak).write(Cell::new(1));
1896 }
1897
1898 Ok(inner)
1899 }
1900}
1901
1902impl<T: ?Sized, A: Allocator> Rc<T, A> {
1903 /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
1904 #[cfg(not(no_global_oom_handling))]
1905 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox<T> {
1906 // Allocate for the `RcBox<T>` using the given value.
1907 unsafe {
1908 Rc::<T>::allocate_for_layout(
1909 Layout::for_value_raw(ptr),
1910 |layout| alloc.allocate(layout),
1911 |mem| mem.with_metadata_of(ptr as *const RcBox<T>),
1912 )
1913 }
1914 }
1915
1916 #[cfg(not(no_global_oom_handling))]
1917 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
1918 unsafe {
1919 let value_size = size_of_val(&*src);
1920 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
1921
1922 // Copy value as bytes
1923 ptr::copy_nonoverlapping(
1924 &*src as *const T as *const u8,
1925 ptr::addr_of_mut!((*ptr).value) as *mut u8,
1926 value_size,
1927 );
1928
1929 // Free the allocation without dropping its contents
1930 let (bptr, alloc) = Box::into_raw_with_allocator(src);
1931 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
1932 drop(src);
1933
1934 Self::from_ptr_in(ptr, alloc)
1935 }
1936 }
1937}
1938
1939impl<T> Rc<[T]> {
1940 /// Allocates an `RcBox<[T]>` with the given length.
1941 #[cfg(not(no_global_oom_handling))]
1942 unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
1943 unsafe {
1944 Self::allocate_for_layout(
1945 Layout::array::<T>(len).unwrap(),
1946 |layout| Global.allocate(layout),
1947 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
1948 )
1949 }
1950 }
1951
1952 /// Copy elements from slice into newly allocated `Rc<[T]>`
1953 ///
1954 /// Unsafe because the caller must either take ownership or bind `T: Copy`
1955 #[cfg(not(no_global_oom_handling))]
1956 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
1957 unsafe {
1958 let ptr = Self::allocate_for_slice(v.len());
1959 ptr::copy_nonoverlapping(
1960 v.as_ptr(),
1961 ptr::addr_of_mut!((*ptr).value) as *mut T,
1962 v.len(),
1963 );
1964 Self::from_ptr(ptr)
1965 }
1966 }
1967
1968 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
1969 ///
1970 /// Behavior is undefined should the size be wrong.
1971 #[cfg(not(no_global_oom_handling))]
1972 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
1973 // Panic guard while cloning T elements.
1974 // In the event of a panic, elements that have been written
1975 // into the new RcBox will be dropped, then the memory freed.
1976 struct Guard<T> {
1977 mem: NonNull<u8>,
1978 elems: *mut T,
1979 layout: Layout,
1980 n_elems: usize,
1981 }
1982
1983 impl<T> Drop for Guard<T> {
1984 fn drop(&mut self) {
1985 unsafe {
1986 let slice = from_raw_parts_mut(self.elems, self.n_elems);
1987 ptr::drop_in_place(slice);
1988
1989 Global.deallocate(self.mem, self.layout);
1990 }
1991 }
1992 }
1993
1994 unsafe {
1995 let ptr = Self::allocate_for_slice(len);
1996
1997 let mem = ptr as *mut _ as *mut u8;
1998 let layout = Layout::for_value_raw(ptr);
1999
2000 // Pointer to first element
2001 let elems = ptr::addr_of_mut!((*ptr).value) as *mut T;
2002
2003 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2004
2005 for (i, item) in iter.enumerate() {
2006 ptr::write(elems.add(i), item);
2007 guard.n_elems += 1;
2008 }
2009
2010 // All clear. Forget the guard so it doesn't free the new RcBox.
2011 forget(guard);
2012
2013 Self::from_ptr(ptr)
2014 }
2015 }
2016}
2017
2018impl<T, A: Allocator> Rc<[T], A> {
2019 /// Allocates an `RcBox<[T]>` with the given length.
2020 #[inline]
2021 #[cfg(not(no_global_oom_handling))]
2022 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> {
2023 unsafe {
2024 Rc::<[T]>::allocate_for_layout(
2025 value_layout:Layout::array::<T>(len).unwrap(),
2026 |layout| alloc.allocate(layout),
2027 |mem: *mut u8| ptr::slice_from_raw_parts_mut(data:mem.cast::<T>(), len) as *mut RcBox<[T]>,
2028 )
2029 }
2030 }
2031}
2032
2033#[cfg(not(no_global_oom_handling))]
2034/// Specialization trait used for `From<&[T]>`.
2035trait RcFromSlice<T> {
2036 fn from_slice(slice: &[T]) -> Self;
2037}
2038
2039#[cfg(not(no_global_oom_handling))]
2040impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2041 #[inline]
2042 default fn from_slice(v: &[T]) -> Self {
2043 unsafe { Self::from_iter_exact(iter:v.iter().cloned(), v.len()) }
2044 }
2045}
2046
2047#[cfg(not(no_global_oom_handling))]
2048impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2049 #[inline]
2050 fn from_slice(v: &[T]) -> Self {
2051 unsafe { Rc::copy_from_slice(v) }
2052 }
2053}
2054
2055#[stable(feature = "rust1", since = "1.0.0")]
2056impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2057 type Target = T;
2058
2059 #[inline(always)]
2060 fn deref(&self) -> &T {
2061 &self.inner().value
2062 }
2063}
2064
2065#[unstable(feature = "receiver_trait", issue = "none")]
2066impl<T: ?Sized> Receiver for Rc<T> {}
2067
2068#[stable(feature = "rust1", since = "1.0.0")]
2069unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2070 /// Drops the `Rc`.
2071 ///
2072 /// This will decrement the strong reference count. If the strong reference
2073 /// count reaches zero then the only other references (if any) are
2074 /// [`Weak`], so we `drop` the inner value.
2075 ///
2076 /// # Examples
2077 ///
2078 /// ```
2079 /// use std::rc::Rc;
2080 ///
2081 /// struct Foo;
2082 ///
2083 /// impl Drop for Foo {
2084 /// fn drop(&mut self) {
2085 /// println!("dropped!");
2086 /// }
2087 /// }
2088 ///
2089 /// let foo = Rc::new(Foo);
2090 /// let foo2 = Rc::clone(&foo);
2091 ///
2092 /// drop(foo); // Doesn't print anything
2093 /// drop(foo2); // Prints "dropped!"
2094 /// ```
2095 fn drop(&mut self) {
2096 unsafe {
2097 self.inner().dec_strong();
2098 if self.inner().strong() == 0 {
2099 // destroy the contained object
2100 ptr::drop_in_place(Self::get_mut_unchecked(self));
2101
2102 // remove the implicit "strong weak" pointer now that we've
2103 // destroyed the contents.
2104 self.inner().dec_weak();
2105
2106 if self.inner().weak() == 0 {
2107 self.alloc
2108 .deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
2109 }
2110 }
2111 }
2112 }
2113}
2114
2115#[stable(feature = "rust1", since = "1.0.0")]
2116impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2117 /// Makes a clone of the `Rc` pointer.
2118 ///
2119 /// This creates another pointer to the same allocation, increasing the
2120 /// strong reference count.
2121 ///
2122 /// # Examples
2123 ///
2124 /// ```
2125 /// use std::rc::Rc;
2126 ///
2127 /// let five = Rc::new(5);
2128 ///
2129 /// let _ = Rc::clone(&five);
2130 /// ```
2131 #[inline]
2132 fn clone(&self) -> Self {
2133 unsafe {
2134 self.inner().inc_strong();
2135 Self::from_inner_in(self.ptr, self.alloc.clone())
2136 }
2137 }
2138}
2139
2140#[cfg(not(no_global_oom_handling))]
2141#[stable(feature = "rust1", since = "1.0.0")]
2142impl<T: Default> Default for Rc<T> {
2143 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2144 ///
2145 /// # Examples
2146 ///
2147 /// ```
2148 /// use std::rc::Rc;
2149 ///
2150 /// let x: Rc<i32> = Default::default();
2151 /// assert_eq!(*x, 0);
2152 /// ```
2153 #[inline]
2154 fn default() -> Rc<T> {
2155 Rc::new(Default::default())
2156 }
2157}
2158
2159#[stable(feature = "rust1", since = "1.0.0")]
2160trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2161 fn eq(&self, other: &Rc<T, A>) -> bool;
2162 fn ne(&self, other: &Rc<T, A>) -> bool;
2163}
2164
2165#[stable(feature = "rust1", since = "1.0.0")]
2166impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2167 #[inline]
2168 default fn eq(&self, other: &Rc<T, A>) -> bool {
2169 **self == **other
2170 }
2171
2172 #[inline]
2173 default fn ne(&self, other: &Rc<T, A>) -> bool {
2174 **self != **other
2175 }
2176}
2177
2178// Hack to allow specializing on `Eq` even though `Eq` has a method.
2179#[rustc_unsafe_specialization_marker]
2180pub(crate) trait MarkerEq: PartialEq<Self> {}
2181
2182impl<T: Eq> MarkerEq for T {}
2183
2184/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2185/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2186/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2187/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2188/// the same value, than two `&T`s.
2189///
2190/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2191#[stable(feature = "rust1", since = "1.0.0")]
2192impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2193 #[inline]
2194 fn eq(&self, other: &Rc<T, A>) -> bool {
2195 Rc::ptr_eq(self, other) || **self == **other
2196 }
2197
2198 #[inline]
2199 fn ne(&self, other: &Rc<T, A>) -> bool {
2200 !Rc::ptr_eq(self, other) && **self != **other
2201 }
2202}
2203
2204#[stable(feature = "rust1", since = "1.0.0")]
2205impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2206 /// Equality for two `Rc`s.
2207 ///
2208 /// Two `Rc`s are equal if their inner values are equal, even if they are
2209 /// stored in different allocation.
2210 ///
2211 /// If `T` also implements `Eq` (implying reflexivity of equality),
2212 /// two `Rc`s that point to the same allocation are
2213 /// always equal.
2214 ///
2215 /// # Examples
2216 ///
2217 /// ```
2218 /// use std::rc::Rc;
2219 ///
2220 /// let five = Rc::new(5);
2221 ///
2222 /// assert!(five == Rc::new(5));
2223 /// ```
2224 #[inline]
2225 fn eq(&self, other: &Rc<T, A>) -> bool {
2226 RcEqIdent::eq(self, other)
2227 }
2228
2229 /// Inequality for two `Rc`s.
2230 ///
2231 /// Two `Rc`s are not equal if their inner values are not equal.
2232 ///
2233 /// If `T` also implements `Eq` (implying reflexivity of equality),
2234 /// two `Rc`s that point to the same allocation are
2235 /// always equal.
2236 ///
2237 /// # Examples
2238 ///
2239 /// ```
2240 /// use std::rc::Rc;
2241 ///
2242 /// let five = Rc::new(5);
2243 ///
2244 /// assert!(five != Rc::new(6));
2245 /// ```
2246 #[inline]
2247 fn ne(&self, other: &Rc<T, A>) -> bool {
2248 RcEqIdent::ne(self, other)
2249 }
2250}
2251
2252#[stable(feature = "rust1", since = "1.0.0")]
2253impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2254
2255#[stable(feature = "rust1", since = "1.0.0")]
2256impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2257 /// Partial comparison for two `Rc`s.
2258 ///
2259 /// The two are compared by calling `partial_cmp()` on their inner values.
2260 ///
2261 /// # Examples
2262 ///
2263 /// ```
2264 /// use std::rc::Rc;
2265 /// use std::cmp::Ordering;
2266 ///
2267 /// let five = Rc::new(5);
2268 ///
2269 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2270 /// ```
2271 #[inline(always)]
2272 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2273 (**self).partial_cmp(&**other)
2274 }
2275
2276 /// Less-than comparison for two `Rc`s.
2277 ///
2278 /// The two are compared by calling `<` on their inner values.
2279 ///
2280 /// # Examples
2281 ///
2282 /// ```
2283 /// use std::rc::Rc;
2284 ///
2285 /// let five = Rc::new(5);
2286 ///
2287 /// assert!(five < Rc::new(6));
2288 /// ```
2289 #[inline(always)]
2290 fn lt(&self, other: &Rc<T, A>) -> bool {
2291 **self < **other
2292 }
2293
2294 /// 'Less than or equal to' comparison for two `Rc`s.
2295 ///
2296 /// The two are compared by calling `<=` on their inner values.
2297 ///
2298 /// # Examples
2299 ///
2300 /// ```
2301 /// use std::rc::Rc;
2302 ///
2303 /// let five = Rc::new(5);
2304 ///
2305 /// assert!(five <= Rc::new(5));
2306 /// ```
2307 #[inline(always)]
2308 fn le(&self, other: &Rc<T, A>) -> bool {
2309 **self <= **other
2310 }
2311
2312 /// Greater-than comparison for two `Rc`s.
2313 ///
2314 /// The two are compared by calling `>` on their inner values.
2315 ///
2316 /// # Examples
2317 ///
2318 /// ```
2319 /// use std::rc::Rc;
2320 ///
2321 /// let five = Rc::new(5);
2322 ///
2323 /// assert!(five > Rc::new(4));
2324 /// ```
2325 #[inline(always)]
2326 fn gt(&self, other: &Rc<T, A>) -> bool {
2327 **self > **other
2328 }
2329
2330 /// 'Greater than or equal to' comparison for two `Rc`s.
2331 ///
2332 /// The two are compared by calling `>=` on their inner values.
2333 ///
2334 /// # Examples
2335 ///
2336 /// ```
2337 /// use std::rc::Rc;
2338 ///
2339 /// let five = Rc::new(5);
2340 ///
2341 /// assert!(five >= Rc::new(5));
2342 /// ```
2343 #[inline(always)]
2344 fn ge(&self, other: &Rc<T, A>) -> bool {
2345 **self >= **other
2346 }
2347}
2348
2349#[stable(feature = "rust1", since = "1.0.0")]
2350impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2351 /// Comparison for two `Rc`s.
2352 ///
2353 /// The two are compared by calling `cmp()` on their inner values.
2354 ///
2355 /// # Examples
2356 ///
2357 /// ```
2358 /// use std::rc::Rc;
2359 /// use std::cmp::Ordering;
2360 ///
2361 /// let five = Rc::new(5);
2362 ///
2363 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2364 /// ```
2365 #[inline]
2366 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2367 (**self).cmp(&**other)
2368 }
2369}
2370
2371#[stable(feature = "rust1", since = "1.0.0")]
2372impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2373 fn hash<H: Hasher>(&self, state: &mut H) {
2374 (**self).hash(state);
2375 }
2376}
2377
2378#[stable(feature = "rust1", since = "1.0.0")]
2379impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2380 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2381 fmt::Display::fmt(&**self, f)
2382 }
2383}
2384
2385#[stable(feature = "rust1", since = "1.0.0")]
2386impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2387 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2388 fmt::Debug::fmt(&**self, f)
2389 }
2390}
2391
2392#[stable(feature = "rust1", since = "1.0.0")]
2393impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2394 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2395 fmt::Pointer::fmt(&(&**self as *const T), f)
2396 }
2397}
2398
2399#[cfg(not(no_global_oom_handling))]
2400#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2401impl<T> From<T> for Rc<T> {
2402 /// Converts a generic type `T` into an `Rc<T>`
2403 ///
2404 /// The conversion allocates on the heap and moves `t`
2405 /// from the stack into it.
2406 ///
2407 /// # Example
2408 /// ```rust
2409 /// # use std::rc::Rc;
2410 /// let x = 5;
2411 /// let rc = Rc::new(5);
2412 ///
2413 /// assert_eq!(Rc::from(x), rc);
2414 /// ```
2415 fn from(t: T) -> Self {
2416 Rc::new(t)
2417 }
2418}
2419
2420#[cfg(not(no_global_oom_handling))]
2421#[stable(feature = "shared_from_array", since = "1.74.0")]
2422impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2423 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2424 ///
2425 /// The conversion moves the array into a newly allocated `Rc`.
2426 ///
2427 /// # Example
2428 ///
2429 /// ```
2430 /// # use std::rc::Rc;
2431 /// let original: [i32; 3] = [1, 2, 3];
2432 /// let shared: Rc<[i32]> = Rc::from(original);
2433 /// assert_eq!(&[1, 2, 3], &shared[..]);
2434 /// ```
2435 #[inline]
2436 fn from(v: [T; N]) -> Rc<[T]> {
2437 Rc::<[T; N]>::from(v)
2438 }
2439}
2440
2441#[cfg(not(no_global_oom_handling))]
2442#[stable(feature = "shared_from_slice", since = "1.21.0")]
2443impl<T: Clone> From<&[T]> for Rc<[T]> {
2444 /// Allocate a reference-counted slice and fill it by cloning `v`'s items.
2445 ///
2446 /// # Example
2447 ///
2448 /// ```
2449 /// # use std::rc::Rc;
2450 /// let original: &[i32] = &[1, 2, 3];
2451 /// let shared: Rc<[i32]> = Rc::from(original);
2452 /// assert_eq!(&[1, 2, 3], &shared[..]);
2453 /// ```
2454 #[inline]
2455 fn from(v: &[T]) -> Rc<[T]> {
2456 <Self as RcFromSlice<T>>::from_slice(v)
2457 }
2458}
2459
2460#[cfg(not(no_global_oom_handling))]
2461#[stable(feature = "shared_from_slice", since = "1.21.0")]
2462impl From<&str> for Rc<str> {
2463 /// Allocate a reference-counted string slice and copy `v` into it.
2464 ///
2465 /// # Example
2466 ///
2467 /// ```
2468 /// # use std::rc::Rc;
2469 /// let shared: Rc<str> = Rc::from("statue");
2470 /// assert_eq!("statue", &shared[..]);
2471 /// ```
2472 #[inline]
2473 fn from(v: &str) -> Rc<str> {
2474 let rc: Rc<[u8]> = Rc::<[u8]>::from(v.as_bytes());
2475 unsafe { Rc::from_raw(ptr:Rc::into_raw(this:rc) as *const str) }
2476 }
2477}
2478
2479#[cfg(not(no_global_oom_handling))]
2480#[stable(feature = "shared_from_slice", since = "1.21.0")]
2481impl From<String> for Rc<str> {
2482 /// Allocate a reference-counted string slice and copy `v` into it.
2483 ///
2484 /// # Example
2485 ///
2486 /// ```
2487 /// # use std::rc::Rc;
2488 /// let original: String = "statue".to_owned();
2489 /// let shared: Rc<str> = Rc::from(original);
2490 /// assert_eq!("statue", &shared[..]);
2491 /// ```
2492 #[inline]
2493 fn from(v: String) -> Rc<str> {
2494 Rc::from(&v[..])
2495 }
2496}
2497
2498#[cfg(not(no_global_oom_handling))]
2499#[stable(feature = "shared_from_slice", since = "1.21.0")]
2500impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2501 /// Move a boxed object to a new, reference counted, allocation.
2502 ///
2503 /// # Example
2504 ///
2505 /// ```
2506 /// # use std::rc::Rc;
2507 /// let original: Box<i32> = Box::new(1);
2508 /// let shared: Rc<i32> = Rc::from(original);
2509 /// assert_eq!(1, *shared);
2510 /// ```
2511 #[inline]
2512 fn from(v: Box<T, A>) -> Rc<T, A> {
2513 Rc::from_box_in(src:v)
2514 }
2515}
2516
2517#[cfg(not(no_global_oom_handling))]
2518#[stable(feature = "shared_from_slice", since = "1.21.0")]
2519impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2520 /// Allocate a reference-counted slice and move `v`'s items into it.
2521 ///
2522 /// # Example
2523 ///
2524 /// ```
2525 /// # use std::rc::Rc;
2526 /// let unique: Vec<i32> = vec![1, 2, 3];
2527 /// let shared: Rc<[i32]> = Rc::from(unique);
2528 /// assert_eq!(&[1, 2, 3], &shared[..]);
2529 /// ```
2530 #[inline]
2531 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2532 unsafe {
2533 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2534
2535 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2536 ptr::copy_nonoverlapping(vec_ptr, ptr::addr_of_mut!((*rc_ptr).value) as *mut T, len);
2537
2538 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2539 // without dropping its contents or the allocator
2540 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2541
2542 Self::from_ptr_in(rc_ptr, alloc)
2543 }
2544 }
2545}
2546
2547#[stable(feature = "shared_from_cow", since = "1.45.0")]
2548impl<'a, B> From<Cow<'a, B>> for Rc<B>
2549where
2550 B: ToOwned + ?Sized,
2551 Rc<B>: From<&'a B> + From<B::Owned>,
2552{
2553 /// Create a reference-counted pointer from
2554 /// a clone-on-write pointer by copying its content.
2555 ///
2556 /// # Example
2557 ///
2558 /// ```rust
2559 /// # use std::rc::Rc;
2560 /// # use std::borrow::Cow;
2561 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2562 /// let shared: Rc<str> = Rc::from(cow);
2563 /// assert_eq!("eggplant", &shared[..]);
2564 /// ```
2565 #[inline]
2566 fn from(cow: Cow<'a, B>) -> Rc<B> {
2567 match cow {
2568 Cow::Borrowed(s: &B) => Rc::from(s),
2569 Cow::Owned(s: ::Owned) => Rc::from(s),
2570 }
2571 }
2572}
2573
2574#[stable(feature = "shared_from_str", since = "1.62.0")]
2575impl From<Rc<str>> for Rc<[u8]> {
2576 /// Converts a reference-counted string slice into a byte slice.
2577 ///
2578 /// # Example
2579 ///
2580 /// ```
2581 /// # use std::rc::Rc;
2582 /// let string: Rc<str> = Rc::from("eggplant");
2583 /// let bytes: Rc<[u8]> = Rc::from(string);
2584 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2585 /// ```
2586 #[inline]
2587 fn from(rc: Rc<str>) -> Self {
2588 // SAFETY: `str` has the same layout as `[u8]`.
2589 unsafe { Rc::from_raw(ptr:Rc::into_raw(this:rc) as *const [u8]) }
2590 }
2591}
2592
2593#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2594impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
2595 type Error = Rc<[T]>;
2596
2597 fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
2598 if boxed_slice.len() == N {
2599 Ok(unsafe { Rc::from_raw(ptr:Rc::into_raw(this:boxed_slice) as *mut [T; N]) })
2600 } else {
2601 Err(boxed_slice)
2602 }
2603 }
2604}
2605
2606#[cfg(not(no_global_oom_handling))]
2607#[stable(feature = "shared_from_iter", since = "1.37.0")]
2608impl<T> FromIterator<T> for Rc<[T]> {
2609 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2610 ///
2611 /// # Performance characteristics
2612 ///
2613 /// ## The general case
2614 ///
2615 /// In the general case, collecting into `Rc<[T]>` is done by first
2616 /// collecting into a `Vec<T>`. That is, when writing the following:
2617 ///
2618 /// ```rust
2619 /// # use std::rc::Rc;
2620 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2621 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2622 /// ```
2623 ///
2624 /// this behaves as if we wrote:
2625 ///
2626 /// ```rust
2627 /// # use std::rc::Rc;
2628 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2629 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2630 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2631 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2632 /// ```
2633 ///
2634 /// This will allocate as many times as needed for constructing the `Vec<T>`
2635 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2636 ///
2637 /// ## Iterators of known length
2638 ///
2639 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2640 /// a single allocation will be made for the `Rc<[T]>`. For example:
2641 ///
2642 /// ```rust
2643 /// # use std::rc::Rc;
2644 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2645 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2646 /// ```
2647 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2648 ToRcSlice::to_rc_slice(iter.into_iter())
2649 }
2650}
2651
2652/// Specialization trait used for collecting into `Rc<[T]>`.
2653#[cfg(not(no_global_oom_handling))]
2654trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2655 fn to_rc_slice(self) -> Rc<[T]>;
2656}
2657
2658#[cfg(not(no_global_oom_handling))]
2659impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2660 default fn to_rc_slice(self) -> Rc<[T]> {
2661 self.collect::<Vec<T>>().into()
2662 }
2663}
2664
2665#[cfg(not(no_global_oom_handling))]
2666impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2667 fn to_rc_slice(self) -> Rc<[T]> {
2668 // This is the case for a `TrustedLen` iterator.
2669 let (low, high) = self.size_hint();
2670 if let Some(high) = high {
2671 debug_assert_eq!(
2672 low,
2673 high,
2674 "TrustedLen iterator's size hint is not exact: {:?}",
2675 (low, high)
2676 );
2677
2678 unsafe {
2679 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2680 Rc::from_iter_exact(self, low)
2681 }
2682 } else {
2683 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2684 // length exceeding `usize::MAX`.
2685 // The default implementation would collect into a vec which would panic.
2686 // Thus we panic here immediately without invoking `Vec` code.
2687 panic!("capacity overflow");
2688 }
2689 }
2690}
2691
2692/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2693/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
2694/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2695///
2696/// Since a `Weak` reference does not count towards ownership, it will not
2697/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2698/// guarantees about the value still being present. Thus it may return [`None`]
2699/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2700/// itself (the backing store) from being deallocated.
2701///
2702/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2703/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2704/// prevent circular references between [`Rc`] pointers, since mutual owning references
2705/// would never allow either [`Rc`] to be dropped. For example, a tree could
2706/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2707/// pointers from children back to their parents.
2708///
2709/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2710///
2711/// [`upgrade`]: Weak::upgrade
2712#[stable(feature = "rc_weak", since = "1.4.0")]
2713#[cfg_attr(not(test), rustc_diagnostic_item = "RcWeak")]
2714pub struct Weak<
2715 T: ?Sized,
2716 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2717> {
2718 // This is a `NonNull` to allow optimizing the size of this type in enums,
2719 // but it is not necessarily a valid pointer.
2720 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2721 // to allocate space on the heap. That's not a value a real pointer
2722 // will ever have because RcBox has alignment at least 2.
2723 // This is only possible when `T: Sized`; unsized `T` never dangle.
2724 ptr: NonNull<RcBox<T>>,
2725 alloc: A,
2726}
2727
2728#[stable(feature = "rc_weak", since = "1.4.0")]
2729impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
2730#[stable(feature = "rc_weak", since = "1.4.0")]
2731impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
2732
2733#[unstable(feature = "coerce_unsized", issue = "18598")]
2734impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
2735
2736#[unstable(feature = "dispatch_from_dyn", issue = "none")]
2737impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
2738
2739impl<T> Weak<T> {
2740 /// Constructs a new `Weak<T>`, without allocating any memory.
2741 /// Calling [`upgrade`] on the return value always gives [`None`].
2742 ///
2743 /// [`upgrade`]: Weak::upgrade
2744 ///
2745 /// # Examples
2746 ///
2747 /// ```
2748 /// use std::rc::Weak;
2749 ///
2750 /// let empty: Weak<i64> = Weak::new();
2751 /// assert!(empty.upgrade().is_none());
2752 /// ```
2753 #[inline]
2754 #[stable(feature = "downgraded_weak", since = "1.10.0")]
2755 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
2756 #[must_use]
2757 pub const fn new() -> Weak<T> {
2758 Weak {
2759 ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) },
2760 alloc: Global,
2761 }
2762 }
2763}
2764
2765impl<T, A: Allocator> Weak<T, A> {
2766 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
2767 /// allocator.
2768 /// Calling [`upgrade`] on the return value always gives [`None`].
2769 ///
2770 /// [`upgrade`]: Weak::upgrade
2771 ///
2772 /// # Examples
2773 ///
2774 /// ```
2775 /// use std::rc::Weak;
2776 ///
2777 /// let empty: Weak<i64> = Weak::new();
2778 /// assert!(empty.upgrade().is_none());
2779 /// ```
2780 #[inline]
2781 #[unstable(feature = "allocator_api", issue = "32838")]
2782 pub fn new_in(alloc: A) -> Weak<T, A> {
2783 Weak {
2784 ptr: unsafe { NonNull::new_unchecked(ptr:ptr::invalid_mut::<RcBox<T>>(addr:usize::MAX)) },
2785 alloc,
2786 }
2787 }
2788}
2789
2790pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
2791 (ptr.cast::<()>()).addr() == usize::MAX
2792}
2793
2794/// Helper type to allow accessing the reference counts without
2795/// making any assertions about the data field.
2796struct WeakInner<'a> {
2797 weak: &'a Cell<usize>,
2798 strong: &'a Cell<usize>,
2799}
2800
2801impl<T: ?Sized> Weak<T> {
2802 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
2803 ///
2804 /// This can be used to safely get a strong reference (by calling [`upgrade`]
2805 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
2806 ///
2807 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
2808 /// as these don't own anything; the method still works on them).
2809 ///
2810 /// # Safety
2811 ///
2812 /// The pointer must have originated from the [`into_raw`] and must still own its potential
2813 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
2814 ///
2815 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
2816 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
2817 /// count is not modified by this operation) and therefore it must be paired with a previous
2818 /// call to [`into_raw`].
2819 ///
2820 /// # Examples
2821 ///
2822 /// ```
2823 /// use std::rc::{Rc, Weak};
2824 ///
2825 /// let strong = Rc::new("hello".to_owned());
2826 ///
2827 /// let raw_1 = Rc::downgrade(&strong).into_raw();
2828 /// let raw_2 = Rc::downgrade(&strong).into_raw();
2829 ///
2830 /// assert_eq!(2, Rc::weak_count(&strong));
2831 ///
2832 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
2833 /// assert_eq!(1, Rc::weak_count(&strong));
2834 ///
2835 /// drop(strong);
2836 ///
2837 /// // Decrement the last weak count.
2838 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
2839 /// ```
2840 ///
2841 /// [`into_raw`]: Weak::into_raw
2842 /// [`upgrade`]: Weak::upgrade
2843 /// [`new`]: Weak::new
2844 #[inline]
2845 #[stable(feature = "weak_into_raw", since = "1.45.0")]
2846 pub unsafe fn from_raw(ptr: *const T) -> Self {
2847 unsafe { Self::from_raw_in(ptr, Global) }
2848 }
2849}
2850
2851impl<T: ?Sized, A: Allocator> Weak<T, A> {
2852 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
2853 ///
2854 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
2855 /// unaligned or even [`null`] otherwise.
2856 ///
2857 /// # Examples
2858 ///
2859 /// ```
2860 /// use std::rc::Rc;
2861 /// use std::ptr;
2862 ///
2863 /// let strong = Rc::new("hello".to_owned());
2864 /// let weak = Rc::downgrade(&strong);
2865 /// // Both point to the same object
2866 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
2867 /// // The strong here keeps it alive, so we can still access the object.
2868 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
2869 ///
2870 /// drop(strong);
2871 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
2872 /// // undefined behaviour.
2873 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
2874 /// ```
2875 ///
2876 /// [`null`]: ptr::null
2877 #[must_use]
2878 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
2879 pub fn as_ptr(&self) -> *const T {
2880 let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
2881
2882 if is_dangling(ptr) {
2883 // If the pointer is dangling, we return the sentinel directly. This cannot be
2884 // a valid payload address, as the payload is at least as aligned as RcBox (usize).
2885 ptr as *const T
2886 } else {
2887 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
2888 // The payload may be dropped at this point, and we have to maintain provenance,
2889 // so use raw pointer manipulation.
2890 unsafe { ptr::addr_of_mut!((*ptr).value) }
2891 }
2892 }
2893
2894 /// Consumes the `Weak<T>` and turns it into a raw pointer.
2895 ///
2896 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
2897 /// one weak reference (the weak count is not modified by this operation). It can be turned
2898 /// back into the `Weak<T>` with [`from_raw`].
2899 ///
2900 /// The same restrictions of accessing the target of the pointer as with
2901 /// [`as_ptr`] apply.
2902 ///
2903 /// # Examples
2904 ///
2905 /// ```
2906 /// use std::rc::{Rc, Weak};
2907 ///
2908 /// let strong = Rc::new("hello".to_owned());
2909 /// let weak = Rc::downgrade(&strong);
2910 /// let raw = weak.into_raw();
2911 ///
2912 /// assert_eq!(1, Rc::weak_count(&strong));
2913 /// assert_eq!("hello", unsafe { &*raw });
2914 ///
2915 /// drop(unsafe { Weak::from_raw(raw) });
2916 /// assert_eq!(0, Rc::weak_count(&strong));
2917 /// ```
2918 ///
2919 /// [`from_raw`]: Weak::from_raw
2920 /// [`as_ptr`]: Weak::as_ptr
2921 #[must_use = "`self` will be dropped if the result is not used"]
2922 #[stable(feature = "weak_into_raw", since = "1.45.0")]
2923 pub fn into_raw(self) -> *const T {
2924 let result = self.as_ptr();
2925 mem::forget(self);
2926 result
2927 }
2928
2929 /// Consumes the `Weak<T>` and turns it into a raw pointer.
2930 ///
2931 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
2932 /// one weak reference (the weak count is not modified by this operation). It can be turned
2933 /// back into the `Weak<T>` with [`from_raw`].
2934 ///
2935 /// The same restrictions of accessing the target of the pointer as with
2936 /// [`as_ptr`] apply.
2937 ///
2938 /// # Examples
2939 ///
2940 /// ```
2941 /// use std::rc::{Rc, Weak};
2942 ///
2943 /// let strong = Rc::new("hello".to_owned());
2944 /// let weak = Rc::downgrade(&strong);
2945 /// let raw = weak.into_raw();
2946 ///
2947 /// assert_eq!(1, Rc::weak_count(&strong));
2948 /// assert_eq!("hello", unsafe { &*raw });
2949 ///
2950 /// drop(unsafe { Weak::from_raw(raw) });
2951 /// assert_eq!(0, Rc::weak_count(&strong));
2952 /// ```
2953 ///
2954 /// [`from_raw`]: Weak::from_raw
2955 /// [`as_ptr`]: Weak::as_ptr
2956 #[inline]
2957 #[unstable(feature = "allocator_api", issue = "32838")]
2958 pub fn into_raw_and_alloc(self) -> (*const T, A)
2959 where
2960 A: Clone,
2961 {
2962 let result = self.as_ptr();
2963 let alloc = self.alloc.clone();
2964 mem::forget(self);
2965 (result, alloc)
2966 }
2967
2968 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
2969 ///
2970 /// This can be used to safely get a strong reference (by calling [`upgrade`]
2971 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
2972 ///
2973 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
2974 /// as these don't own anything; the method still works on them).
2975 ///
2976 /// # Safety
2977 ///
2978 /// The pointer must have originated from the [`into_raw`] and must still own its potential
2979 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
2980 ///
2981 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
2982 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
2983 /// count is not modified by this operation) and therefore it must be paired with a previous
2984 /// call to [`into_raw`].
2985 ///
2986 /// # Examples
2987 ///
2988 /// ```
2989 /// use std::rc::{Rc, Weak};
2990 ///
2991 /// let strong = Rc::new("hello".to_owned());
2992 ///
2993 /// let raw_1 = Rc::downgrade(&strong).into_raw();
2994 /// let raw_2 = Rc::downgrade(&strong).into_raw();
2995 ///
2996 /// assert_eq!(2, Rc::weak_count(&strong));
2997 ///
2998 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
2999 /// assert_eq!(1, Rc::weak_count(&strong));
3000 ///
3001 /// drop(strong);
3002 ///
3003 /// // Decrement the last weak count.
3004 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3005 /// ```
3006 ///
3007 /// [`into_raw`]: Weak::into_raw
3008 /// [`upgrade`]: Weak::upgrade
3009 /// [`new`]: Weak::new
3010 #[inline]
3011 #[unstable(feature = "allocator_api", issue = "32838")]
3012 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3013 // See Weak::as_ptr for context on how the input pointer is derived.
3014
3015 let ptr = if is_dangling(ptr) {
3016 // This is a dangling Weak.
3017 ptr as *mut RcBox<T>
3018 } else {
3019 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3020 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3021 let offset = unsafe { data_offset(ptr) };
3022 // Thus, we reverse the offset to get the whole RcBox.
3023 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3024 unsafe { ptr.byte_sub(offset) as *mut RcBox<T> }
3025 };
3026
3027 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3028 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3029 }
3030
3031 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3032 /// dropping of the inner value if successful.
3033 ///
3034 /// Returns [`None`] if the inner value has since been dropped.
3035 ///
3036 /// # Examples
3037 ///
3038 /// ```
3039 /// use std::rc::Rc;
3040 ///
3041 /// let five = Rc::new(5);
3042 ///
3043 /// let weak_five = Rc::downgrade(&five);
3044 ///
3045 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3046 /// assert!(strong_five.is_some());
3047 ///
3048 /// // Destroy all strong pointers.
3049 /// drop(strong_five);
3050 /// drop(five);
3051 ///
3052 /// assert!(weak_five.upgrade().is_none());
3053 /// ```
3054 #[must_use = "this returns a new `Rc`, \
3055 without modifying the original weak pointer"]
3056 #[stable(feature = "rc_weak", since = "1.4.0")]
3057 pub fn upgrade(&self) -> Option<Rc<T, A>>
3058 where
3059 A: Clone,
3060 {
3061 let inner = self.inner()?;
3062
3063 if inner.strong() == 0 {
3064 None
3065 } else {
3066 unsafe {
3067 inner.inc_strong();
3068 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3069 }
3070 }
3071 }
3072
3073 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3074 ///
3075 /// If `self` was created using [`Weak::new`], this will return 0.
3076 #[must_use]
3077 #[stable(feature = "weak_counts", since = "1.41.0")]
3078 pub fn strong_count(&self) -> usize {
3079 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3080 }
3081
3082 /// Gets the number of `Weak` pointers pointing to this allocation.
3083 ///
3084 /// If no strong pointers remain, this will return zero.
3085 #[must_use]
3086 #[stable(feature = "weak_counts", since = "1.41.0")]
3087 pub fn weak_count(&self) -> usize {
3088 if let Some(inner) = self.inner() {
3089 if inner.strong() > 0 {
3090 inner.weak() - 1 // subtract the implicit weak ptr
3091 } else {
3092 0
3093 }
3094 } else {
3095 0
3096 }
3097 }
3098
3099 /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
3100 /// (i.e., when this `Weak` was created by `Weak::new`).
3101 #[inline]
3102 fn inner(&self) -> Option<WeakInner<'_>> {
3103 if is_dangling(self.ptr.as_ptr()) {
3104 None
3105 } else {
3106 // We are careful to *not* create a reference covering the "data" field, as
3107 // the field may be mutated concurrently (for example, if the last `Rc`
3108 // is dropped, the data field will be dropped in-place).
3109 Some(unsafe {
3110 let ptr = self.ptr.as_ptr();
3111 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3112 })
3113 }
3114 }
3115
3116 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3117 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3118 /// this function ignores the metadata of `dyn Trait` pointers.
3119 ///
3120 /// # Notes
3121 ///
3122 /// Since this compares pointers it means that `Weak::new()` will equal each
3123 /// other, even though they don't point to any allocation.
3124 ///
3125 /// # Examples
3126 ///
3127 /// ```
3128 /// use std::rc::Rc;
3129 ///
3130 /// let first_rc = Rc::new(5);
3131 /// let first = Rc::downgrade(&first_rc);
3132 /// let second = Rc::downgrade(&first_rc);
3133 ///
3134 /// assert!(first.ptr_eq(&second));
3135 ///
3136 /// let third_rc = Rc::new(5);
3137 /// let third = Rc::downgrade(&third_rc);
3138 ///
3139 /// assert!(!first.ptr_eq(&third));
3140 /// ```
3141 ///
3142 /// Comparing `Weak::new`.
3143 ///
3144 /// ```
3145 /// use std::rc::{Rc, Weak};
3146 ///
3147 /// let first = Weak::new();
3148 /// let second = Weak::new();
3149 /// assert!(first.ptr_eq(&second));
3150 ///
3151 /// let third_rc = Rc::new(());
3152 /// let third = Rc::downgrade(&third_rc);
3153 /// assert!(!first.ptr_eq(&third));
3154 /// ```
3155 #[inline]
3156 #[must_use]
3157 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3158 pub fn ptr_eq(&self, other: &Self) -> bool {
3159 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3160 }
3161}
3162
3163#[stable(feature = "rc_weak", since = "1.4.0")]
3164unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3165 /// Drops the `Weak` pointer.
3166 ///
3167 /// # Examples
3168 ///
3169 /// ```
3170 /// use std::rc::{Rc, Weak};
3171 ///
3172 /// struct Foo;
3173 ///
3174 /// impl Drop for Foo {
3175 /// fn drop(&mut self) {
3176 /// println!("dropped!");
3177 /// }
3178 /// }
3179 ///
3180 /// let foo = Rc::new(Foo);
3181 /// let weak_foo = Rc::downgrade(&foo);
3182 /// let other_weak_foo = Weak::clone(&weak_foo);
3183 ///
3184 /// drop(weak_foo); // Doesn't print anything
3185 /// drop(foo); // Prints "dropped!"
3186 ///
3187 /// assert!(other_weak_foo.upgrade().is_none());
3188 /// ```
3189 fn drop(&mut self) {
3190 let inner = if let Some(inner) = self.inner() { inner } else { return };
3191
3192 inner.dec_weak();
3193 // the weak count starts at 1, and will only go to zero if all
3194 // the strong pointers have disappeared.
3195 if inner.weak() == 0 {
3196 unsafe {
3197 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3198 }
3199 }
3200 }
3201}
3202
3203#[stable(feature = "rc_weak", since = "1.4.0")]
3204impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3205 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3206 ///
3207 /// # Examples
3208 ///
3209 /// ```
3210 /// use std::rc::{Rc, Weak};
3211 ///
3212 /// let weak_five = Rc::downgrade(&Rc::new(5));
3213 ///
3214 /// let _ = Weak::clone(&weak_five);
3215 /// ```
3216 #[inline]
3217 fn clone(&self) -> Weak<T, A> {
3218 if let Some(inner: WeakInner<'_>) = self.inner() {
3219 inner.inc_weak()
3220 }
3221 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3222 }
3223}
3224
3225#[stable(feature = "rc_weak", since = "1.4.0")]
3226impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3227 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3228 write!(f, "(Weak)")
3229 }
3230}
3231
3232#[stable(feature = "downgraded_weak", since = "1.10.0")]
3233impl<T> Default for Weak<T> {
3234 /// Constructs a new `Weak<T>`, without allocating any memory.
3235 /// Calling [`upgrade`] on the return value always gives [`None`].
3236 ///
3237 /// [`upgrade`]: Weak::upgrade
3238 ///
3239 /// # Examples
3240 ///
3241 /// ```
3242 /// use std::rc::Weak;
3243 ///
3244 /// let empty: Weak<i64> = Default::default();
3245 /// assert!(empty.upgrade().is_none());
3246 /// ```
3247 fn default() -> Weak<T> {
3248 Weak::new()
3249 }
3250}
3251
3252// NOTE: We checked_add here to deal with mem::forget safely. In particular
3253// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
3254// you can free the allocation while outstanding Rcs (or Weaks) exist.
3255// We abort because this is such a degenerate scenario that we don't care about
3256// what happens -- no real program should ever experience this.
3257//
3258// This should have negligible overhead since you don't actually need to
3259// clone these much in Rust thanks to ownership and move-semantics.
3260
3261#[doc(hidden)]
3262trait RcInnerPtr {
3263 fn weak_ref(&self) -> &Cell<usize>;
3264 fn strong_ref(&self) -> &Cell<usize>;
3265
3266 #[inline]
3267 fn strong(&self) -> usize {
3268 self.strong_ref().get()
3269 }
3270
3271 #[inline]
3272 fn inc_strong(&self) {
3273 let strong = self.strong();
3274
3275 // We insert an `assume` here to hint LLVM at an otherwise
3276 // missed optimization.
3277 // SAFETY: The reference count will never be zero when this is
3278 // called.
3279 unsafe {
3280 hint::assert_unchecked(strong != 0);
3281 }
3282
3283 let strong = strong.wrapping_add(1);
3284 self.strong_ref().set(strong);
3285
3286 // We want to abort on overflow instead of dropping the value.
3287 // Checking for overflow after the store instead of before
3288 // allows for slightly better code generation.
3289 if core::intrinsics::unlikely(strong == 0) {
3290 abort();
3291 }
3292 }
3293
3294 #[inline]
3295 fn dec_strong(&self) {
3296 self.strong_ref().set(self.strong() - 1);
3297 }
3298
3299 #[inline]
3300 fn weak(&self) -> usize {
3301 self.weak_ref().get()
3302 }
3303
3304 #[inline]
3305 fn inc_weak(&self) {
3306 let weak = self.weak();
3307
3308 // We insert an `assume` here to hint LLVM at an otherwise
3309 // missed optimization.
3310 // SAFETY: The reference count will never be zero when this is
3311 // called.
3312 unsafe {
3313 hint::assert_unchecked(weak != 0);
3314 }
3315
3316 let weak = weak.wrapping_add(1);
3317 self.weak_ref().set(weak);
3318
3319 // We want to abort on overflow instead of dropping the value.
3320 // Checking for overflow after the store instead of before
3321 // allows for slightly better code generation.
3322 if core::intrinsics::unlikely(weak == 0) {
3323 abort();
3324 }
3325 }
3326
3327 #[inline]
3328 fn dec_weak(&self) {
3329 self.weak_ref().set(self.weak() - 1);
3330 }
3331}
3332
3333impl<T: ?Sized> RcInnerPtr for RcBox<T> {
3334 #[inline(always)]
3335 fn weak_ref(&self) -> &Cell<usize> {
3336 &self.weak
3337 }
3338
3339 #[inline(always)]
3340 fn strong_ref(&self) -> &Cell<usize> {
3341 &self.strong
3342 }
3343}
3344
3345impl<'a> RcInnerPtr for WeakInner<'a> {
3346 #[inline(always)]
3347 fn weak_ref(&self) -> &Cell<usize> {
3348 self.weak
3349 }
3350
3351 #[inline(always)]
3352 fn strong_ref(&self) -> &Cell<usize> {
3353 self.strong
3354 }
3355}
3356
3357#[stable(feature = "rust1", since = "1.0.0")]
3358impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3359 fn borrow(&self) -> &T {
3360 &**self
3361 }
3362}
3363
3364#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3365impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3366 fn as_ref(&self) -> &T {
3367 &**self
3368 }
3369}
3370
3371#[stable(feature = "pin", since = "1.33.0")]
3372impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3373
3374/// Get the offset within an `RcBox` for the payload behind a pointer.
3375///
3376/// # Safety
3377///
3378/// The pointer must point to (and have valid metadata for) a previously
3379/// valid instance of T, but the T is allowed to be dropped.
3380unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3381 // Align the unsized value to the end of the RcBox.
3382 // Because RcBox is repr(C), it will always be the last field in memory.
3383 // SAFETY: since the only unsized types possible are slices, trait objects,
3384 // and extern types, the input safety requirement is currently enough to
3385 // satisfy the requirements of align_of_val_raw; this is an implementation
3386 // detail of the language that must not be relied upon outside of std.
3387 unsafe { data_offset_align(align_of_val_raw(val:ptr)) }
3388}
3389
3390#[inline]
3391fn data_offset_align(align: usize) -> usize {
3392 let layout: Layout = Layout::new::<RcBox<()>>();
3393 layout.size() + layout.padding_needed_for(align)
3394}
3395
3396/// A uniquely owned `Rc`
3397///
3398/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3399/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3400/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3401///
3402/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3403/// use case is to have an object be mutable during its initialization phase but then have it become
3404/// immutable and converted to a normal `Rc`.
3405///
3406/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3407///
3408/// ```
3409/// #![feature(unique_rc_arc)]
3410/// use std::rc::{Rc, Weak, UniqueRc};
3411///
3412/// struct Gadget {
3413/// #[allow(dead_code)]
3414/// me: Weak<Gadget>,
3415/// }
3416///
3417/// fn create_gadget() -> Option<Rc<Gadget>> {
3418/// let mut rc = UniqueRc::new(Gadget {
3419/// me: Weak::new(),
3420/// });
3421/// rc.me = UniqueRc::downgrade(&rc);
3422/// Some(UniqueRc::into_rc(rc))
3423/// }
3424///
3425/// create_gadget().unwrap();
3426/// ```
3427///
3428/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3429/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3430/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3431/// including fallible or async constructors.
3432#[unstable(feature = "unique_rc_arc", issue = "112566")]
3433#[derive(Debug)]
3434pub struct UniqueRc<T> {
3435 ptr: NonNull<RcBox<T>>,
3436 phantom: PhantomData<RcBox<T>>,
3437}
3438
3439impl<T> UniqueRc<T> {
3440 /// Creates a new `UniqueRc`
3441 ///
3442 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3443 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3444 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3445 /// point to the new [`Rc`].
3446 #[cfg(not(no_global_oom_handling))]
3447 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3448 pub fn new(value: T) -> Self {
3449 Self {
3450 ptr: Box::leak(Box::new(RcBox {
3451 strong: Cell::new(0),
3452 // keep one weak reference so if all the weak pointers that are created are dropped
3453 // the UniqueRc still stays valid.
3454 weak: Cell::new(1),
3455 value,
3456 }))
3457 .into(),
3458 phantom: PhantomData,
3459 }
3460 }
3461
3462 /// Creates a new weak reference to the `UniqueRc`
3463 ///
3464 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
3465 /// to a [`Rc`] using [`UniqueRc::into_rc`].
3466 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3467 pub fn downgrade(this: &Self) -> Weak<T> {
3468 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
3469 // one strong reference before converting to a regular Rc.
3470 unsafe {
3471 this.ptr.as_ref().inc_weak();
3472 }
3473 Weak { ptr: this.ptr, alloc: Global }
3474 }
3475
3476 /// Converts the `UniqueRc` into a regular [`Rc`]
3477 ///
3478 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
3479 /// is passed to `into_rc`.
3480 ///
3481 /// Any weak references created before this method is called can now be upgraded to strong
3482 /// references.
3483 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3484 pub fn into_rc(this: Self) -> Rc<T> {
3485 let mut this = ManuallyDrop::new(this);
3486 // SAFETY: This pointer was allocated at creation time so we know it is valid.
3487 unsafe {
3488 // Convert our weak reference into a strong reference
3489 this.ptr.as_mut().strong.set(1);
3490 Rc::from_inner(this.ptr)
3491 }
3492 }
3493}
3494
3495#[unstable(feature = "unique_rc_arc", issue = "112566")]
3496impl<T> Deref for UniqueRc<T> {
3497 type Target = T;
3498
3499 fn deref(&self) -> &T {
3500 // SAFETY: This pointer was allocated at creation time so we know it is valid.
3501 unsafe { &self.ptr.as_ref().value }
3502 }
3503}
3504
3505#[unstable(feature = "unique_rc_arc", issue = "112566")]
3506impl<T> DerefMut for UniqueRc<T> {
3507 fn deref_mut(&mut self) -> &mut T {
3508 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
3509 // have unique ownership and therefore it's safe to make a mutable reference because
3510 // `UniqueRc` owns the only strong reference to itself.
3511 unsafe { &mut (*self.ptr.as_ptr()).value }
3512 }
3513}
3514
3515#[unstable(feature = "unique_rc_arc", issue = "112566")]
3516unsafe impl<#[may_dangle] T> Drop for UniqueRc<T> {
3517 fn drop(&mut self) {
3518 unsafe {
3519 // destroy the contained object
3520 drop_in_place(to_drop:DerefMut::deref_mut(self));
3521
3522 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
3523 self.ptr.as_ref().dec_weak();
3524
3525 if self.ptr.as_ref().weak() == 0 {
3526 Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3527 }
3528 }
3529 }
3530}
3531