|
1 | 1 | use core::alloc::{AllocError, Allocator}; |
2 | 2 | use core::cell::UnsafeCell; |
3 | 3 | use core::clone::CloneToUninit; |
| 4 | +#[cfg(not(no_global_oom_handling))] |
| 5 | +use core::iter::TrustedLen; |
4 | 6 | use core::marker::PhantomData; |
5 | 7 | #[cfg(not(no_global_oom_handling))] |
6 | 8 | use core::mem::{self, SizedTypeProperties}; |
@@ -623,6 +625,139 @@ impl<T, A> RawRc<MaybeUninit<T>, A> { |
623 | 625 | } |
624 | 626 | } |
625 | 627 |
|
| 628 | +impl<T, A> RawRc<[T], A> { |
| 629 | + #[cfg(not(no_global_oom_handling))] |
| 630 | + fn from_trusted_len_iter<I>(iter: I) -> Self |
| 631 | + where |
| 632 | + A: Allocator + Default, |
| 633 | + I: TrustedLen<Item = T>, |
| 634 | + { |
| 635 | + /// Returns a drop guard that calls the destructors of a slice of elements on drop. |
| 636 | + /// |
| 637 | + /// # Safety |
| 638 | + /// |
| 639 | + /// - `head..tail` must describe a valid consecutive slice of `T` values when the destructor |
| 640 | + /// of the returned guard is called. |
| 641 | + /// - After calling the returned function, the corresponding values should not be accessed |
| 642 | + /// anymore. |
| 643 | + unsafe fn drop_range_on_drop<T>( |
| 644 | + head: NonNull<T>, |
| 645 | + tail: NonNull<T>, |
| 646 | + ) -> impl DerefMut<Target = (NonNull<T>, NonNull<T>)> { |
| 647 | + // SAFETY: |
| 648 | + DropGuard::new((head, tail), |(head, tail)| unsafe { |
| 649 | + let length = tail.offset_from_unsigned(head); |
| 650 | + |
| 651 | + NonNull::<[T]>::slice_from_raw_parts(head, length).drop_in_place(); |
| 652 | + }) |
| 653 | + } |
| 654 | + |
| 655 | + let (length, Some(high)) = iter.size_hint() else { |
| 656 | + // TrustedLen contract guarantees that `upper_bound == None` implies an iterator |
| 657 | + // length exceeding `usize::MAX`. |
| 658 | + // The default implementation would collect into a vec which would panic. |
| 659 | + // Thus we panic here immediately without invoking `Vec` code. |
| 660 | + panic!("capacity overflow"); |
| 661 | + }; |
| 662 | + |
| 663 | + debug_assert_eq!( |
| 664 | + length, |
| 665 | + high, |
| 666 | + "TrustedLen iterator's size hint is not exact: {:?}", |
| 667 | + (length, high) |
| 668 | + ); |
| 669 | + |
| 670 | + let rc_layout = RcLayout::new_array::<T>(length); |
| 671 | + |
| 672 | + let (ptr, alloc) = rc_alloc::allocate_with::<A, _, 1>(rc_layout, |ptr| { |
| 673 | + let ptr = ptr.as_ptr().cast::<T>(); |
| 674 | + let mut guard = unsafe { drop_range_on_drop::<T>(ptr, ptr) }; |
| 675 | + |
| 676 | + // SAFETY: `iter` is `TrustedLen`, we can assume we will write correct number of |
| 677 | + // elements to the buffer. |
| 678 | + iter.for_each(|value| unsafe { |
| 679 | + guard.1.write(value); |
| 680 | + guard.1 = guard.1.add(1); |
| 681 | + }); |
| 682 | + |
| 683 | + mem::forget(guard); |
| 684 | + }); |
| 685 | + |
| 686 | + // SAFETY: We have written `length` of `T` values to the buffer, the buffer is now |
| 687 | + // initialized. |
| 688 | + unsafe { |
| 689 | + Self::from_raw_parts( |
| 690 | + NonNull::slice_from_raw_parts(ptr.as_ptr().cast::<T>(), length), |
| 691 | + alloc, |
| 692 | + ) |
| 693 | + } |
| 694 | + } |
| 695 | + |
| 696 | + fn try_into_array<const N: usize>(self) -> Result<RawRc<[T; N], A>, Self> { |
| 697 | + if unsafe { self.as_ptr().as_ref() }.len() == N { |
| 698 | + Ok(unsafe { self.cast() }) |
| 699 | + } else { |
| 700 | + Err(self) |
| 701 | + } |
| 702 | + } |
| 703 | + |
| 704 | + pub(crate) unsafe fn into_array<const N: usize, R>(self) -> Option<RawRc<[T; N], A>> |
| 705 | + where |
| 706 | + A: Allocator, |
| 707 | + R: RefCounter, |
| 708 | + { |
| 709 | + match self.try_into_array::<N>() { |
| 710 | + Ok(result) => Some(result), |
| 711 | + Err(mut raw_rc) => { |
| 712 | + unsafe { raw_rc.drop::<R>() }; |
| 713 | + |
| 714 | + None |
| 715 | + } |
| 716 | + } |
| 717 | + } |
| 718 | +} |
| 719 | + |
| 720 | +impl<T, A> RawRc<[MaybeUninit<T>], A> { |
| 721 | + #[cfg(not(no_global_oom_handling))] |
| 722 | + pub(crate) fn new_uninit_slice_in(length: usize, alloc: A) -> Self |
| 723 | + where |
| 724 | + A: Allocator, |
| 725 | + { |
| 726 | + unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) } |
| 727 | + } |
| 728 | + |
| 729 | + #[cfg(not(no_global_oom_handling))] |
| 730 | + pub(crate) fn new_uninit_slice(length: usize) -> Self |
| 731 | + where |
| 732 | + A: Allocator + Default, |
| 733 | + { |
| 734 | + unsafe { Self::from_weak(RawWeak::new_uninit_slice::<1>(length)) } |
| 735 | + } |
| 736 | + |
| 737 | + #[cfg(not(no_global_oom_handling))] |
| 738 | + pub(crate) fn new_zeroed_slice_in(length: usize, alloc: A) -> Self |
| 739 | + where |
| 740 | + A: Allocator, |
| 741 | + { |
| 742 | + unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) } |
| 743 | + } |
| 744 | + |
| 745 | + #[cfg(not(no_global_oom_handling))] |
| 746 | + pub(crate) fn new_zeroed_slice(length: usize) -> Self |
| 747 | + where |
| 748 | + A: Allocator + Default, |
| 749 | + { |
| 750 | + unsafe { Self::from_weak(RawWeak::new_zeroed_slice::<1>(length)) } |
| 751 | + } |
| 752 | + |
| 753 | + /// # Safety |
| 754 | + /// |
| 755 | + /// All `MaybeUninit<T>`s values contained by `self` must be initialized. |
| 756 | + pub(crate) unsafe fn assume_init(self) -> RawRc<[T], A> { |
| 757 | + unsafe { self.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } |
| 758 | + } |
| 759 | +} |
| 760 | + |
626 | 761 | /// Decrements strong reference count in a reference-counted allocation with a value object that is |
627 | 762 | /// pointed to by `value_ptr`. |
628 | 763 | #[inline] |
|
0 commit comments