@@ -2,10 +2,11 @@ use core::alloc::{AllocError, Allocator};
22use core:: cell:: UnsafeCell ;
33use core:: clone:: CloneToUninit ;
44use core:: marker:: PhantomData ;
5+ use core:: mem:: DropGuard ;
56#[ cfg( not( no_global_oom_handling) ) ]
6- use core:: mem:: { self , DropGuard } ;
7+ use core:: mem:: { self , MaybeUninit , SizedTypeProperties } ;
78#[ cfg( not( no_global_oom_handling) ) ]
8- use core:: ops:: DerefMut ;
9+ use core:: ops:: { ControlFlow , DerefMut , Try } ;
910#[ cfg( not( no_global_oom_handling) ) ]
1011use core:: ptr;
1112use core:: ptr:: NonNull ;
@@ -16,7 +17,7 @@ use crate::raw_rc::MakeMutStrategy;
1617use crate :: raw_rc:: raw_weak;
1718use crate :: raw_rc:: raw_weak:: RawWeak ;
1819#[ cfg( not( no_global_oom_handling) ) ]
19- use crate :: raw_rc:: rc_layout:: RcLayout ;
20+ use crate :: raw_rc:: rc_layout:: { RcLayout , RcLayoutExt } ;
2021use crate :: raw_rc:: rc_value_pointer:: RcValuePointer ;
2122use crate :: raw_rc:: { RefCounter , rc_alloc} ;
2223
@@ -366,6 +367,196 @@ where
366367 }
367368}
368369
370+ impl < T , A > RawRc < T , A > {
371+ /// # Safety
372+ ///
373+ /// `weak` must be non-dangling.
374+ unsafe fn from_weak_with_value ( weak : RawWeak < T , A > , value : T ) -> Self {
375+ unsafe {
376+ weak. as_ptr ( ) . write ( value) ;
377+
378+ Self :: from_weak ( weak)
379+ }
380+ }
381+
382+ #[ inline]
383+ pub ( crate ) fn try_new_in ( value : T , alloc : A ) -> Result < Self , AllocError >
384+ where
385+ A : Allocator ,
386+ {
387+ RawWeak :: try_new_uninit_in :: < 1 > ( alloc)
388+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
389+ }
390+
391+ #[ inline]
392+ pub ( crate ) fn try_new ( value : T ) -> Result < Self , AllocError >
393+ where
394+ A : Allocator + Default ,
395+ {
396+ RawWeak :: try_new_uninit :: < 1 > ( )
397+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
398+ }
399+
400+ #[ cfg( not( no_global_oom_handling) ) ]
401+ #[ inline]
402+ pub ( crate ) fn new_in ( value : T , alloc : A ) -> Self
403+ where
404+ A : Allocator ,
405+ {
406+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit_in :: < 1 > ( alloc) , value) }
407+ }
408+
409+ #[ cfg( not( no_global_oom_handling) ) ]
410+ #[ inline]
411+ pub ( crate ) fn new ( value : T ) -> Self
412+ where
413+ A : Allocator + Default ,
414+ {
415+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit :: < 1 > ( ) , value) }
416+ }
417+
418+ #[ cfg( not( no_global_oom_handling) ) ]
419+ fn new_with < F > ( f : F ) -> Self
420+ where
421+ A : Allocator + Default ,
422+ F : FnOnce ( ) -> T ,
423+ {
424+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( T :: RC_LAYOUT , |ptr| unsafe {
425+ ptr. as_ptr ( ) . cast ( ) . write ( f ( ) )
426+ } ) ;
427+
428+ unsafe { Self :: from_raw_parts ( ptr. as_ptr ( ) . cast ( ) , alloc) }
429+ }
430+
431+ /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
432+ ///
433+ /// # Safety
434+ ///
435+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
436+ #[ cfg( not( no_global_oom_handling) ) ]
437+ pub ( crate ) unsafe fn try_map < R , U > (
438+ mut self ,
439+ f : impl FnOnce ( & T ) -> U ,
440+ ) -> ControlFlow < U :: Residual , RawRc < U :: Output , A > >
441+ where
442+ A : Allocator ,
443+ R : RefCounter ,
444+ U : Try ,
445+ {
446+ let result = if T :: LAYOUT == U :: Output :: LAYOUT && unsafe { self . is_unique :: < R > ( ) } {
447+ let value = unsafe { self . as_ptr ( ) . read ( ) } ;
448+ let mut allocation = unsafe { self . cast :: < MaybeUninit < U :: Output > > ( ) } ;
449+
450+ // Destruct `self` as `RawRc<MaybeUninit<U::Output>, A>` if `f` panics or returns a
451+ // failure value.
452+ let guard = unsafe { new_rc_guard :: < MaybeUninit < U :: Output > , A , R > ( & mut allocation) } ;
453+
454+ let mapped_value = f ( & value) . branch ( ) ?;
455+
456+ drop ( value) ;
457+ mem:: forget ( guard) ;
458+
459+ unsafe {
460+ allocation. get_mut_unchecked ( ) . write ( mapped_value) ;
461+
462+ allocation. cast ( )
463+ }
464+ } else {
465+ // Destruct `self` if `f` panics or returns a failure value.
466+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut self ) } ;
467+
468+ let mapped_value = f ( unsafe { guard. as_ptr ( ) . as_ref ( ) } ) . branch ( ) ?;
469+
470+ drop ( guard) ;
471+
472+ let alloc = self . into_raw_parts ( ) . 1 ;
473+
474+ RawRc :: new_in ( mapped_value, alloc)
475+ } ;
476+
477+ ControlFlow :: Continue ( result)
478+ }
479+
480+ /// Maps the value in an `RawRc`, reusing the allocation if possible.
481+ ///
482+ /// # Safety
483+ ///
484+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
485+ #[ cfg( not( no_global_oom_handling) ) ]
486+ pub ( crate ) unsafe fn map < R , U > ( self , f : impl FnOnce ( & T ) -> U ) -> RawRc < U , A >
487+ where
488+ A : Allocator ,
489+ R : RefCounter ,
490+ {
491+ fn wrap_fn < T , U > ( f : impl FnOnce ( & T ) -> U ) -> impl FnOnce ( & T ) -> ControlFlow < !, U > {
492+ |x| ControlFlow :: Continue ( f ( x) )
493+ }
494+
495+ let f = wrap_fn ( f) ;
496+
497+ match unsafe { self . try_map :: < R , _ > ( f) } {
498+ ControlFlow :: Continue ( output) => output,
499+ }
500+ }
501+
502+ /// # Safety
503+ ///
504+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
505+ pub ( crate ) unsafe fn into_inner < R > ( self ) -> Option < T >
506+ where
507+ A : Allocator ,
508+ R : RefCounter ,
509+ {
510+ let is_last_strong_ref = unsafe { decrement_strong_ref_count :: < R > ( self . value_ptr ( ) ) } ;
511+
512+ is_last_strong_ref. then ( || unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
513+ }
514+
515+ /// # Safety
516+ ///
517+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
518+ pub ( crate ) unsafe fn try_unwrap < R > ( self ) -> Result < T , RawRc < T , A > >
519+ where
520+ A : Allocator ,
521+ R : RefCounter ,
522+ {
523+ unsafe fn inner < R > ( value_ptr : RcValuePointer ) -> bool
524+ where
525+ R : RefCounter ,
526+ {
527+ unsafe {
528+ R :: from_raw_counter ( value_ptr. strong_count_ptr ( ) . as_ref ( ) ) . try_lock_strong_count ( )
529+ }
530+ }
531+
532+ let is_last_strong_ref = unsafe { inner :: < R > ( self . value_ptr ( ) ) } ;
533+
534+ if is_last_strong_ref {
535+ Ok ( unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
536+ } else {
537+ Err ( self )
538+ }
539+ }
540+
541+ /// # Safety
542+ ///
543+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
544+ pub ( crate ) unsafe fn unwrap_or_clone < R > ( self ) -> T
545+ where
546+ T : Clone ,
547+ A : Allocator ,
548+ R : RefCounter ,
549+ {
550+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
551+ // implementation.
552+ unsafe { self . try_unwrap :: < R > ( ) } . unwrap_or_else ( |mut rc| {
553+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut rc) } ;
554+
555+ T :: clone ( unsafe { guard. as_ptr ( ) . as_ref ( ) } )
556+ } )
557+ }
558+ }
559+
369560/// Decrements strong reference count in a reference-counted allocation with a value object that is
370561/// pointed to by `value_ptr`.
371562#[ inline]
@@ -397,3 +588,15 @@ where
397588 R :: is_unique ( R :: from_raw_counter ( & ref_counts. strong ) , R :: from_raw_counter ( & ref_counts. weak ) )
398589 }
399590}
591+
592+ /// Returns a drop guard that calls `Rc::drop::<R>()` on drop.
593+ unsafe fn new_rc_guard < ' a , T , A , R > (
594+ rc : & ' a mut RawRc < T , A > ,
595+ ) -> DropGuard < & ' a mut RawRc < T , A > , impl FnOnce ( & ' a mut RawRc < T , A > ) >
596+ where
597+ T : ?Sized ,
598+ A : Allocator ,
599+ R : RefCounter ,
600+ {
601+ DropGuard :: new ( rc, |rc| unsafe { rc. drop :: < R > ( ) } )
602+ }
0 commit comments