1- use core:: alloc:: Allocator ;
1+ use core:: alloc:: { AllocError , Allocator } ;
22use core:: cell:: UnsafeCell ;
33#[ cfg( not( no_global_oom_handling) ) ]
44use core:: clone:: CloneToUninit ;
55use core:: marker:: PhantomData ;
6+ use core:: mem:: DropGuard ;
67#[ cfg( not( no_global_oom_handling) ) ]
7- use core:: mem:: { self , DropGuard } ;
8+ use core:: mem:: { self , MaybeUninit , SizedTypeProperties } ;
89#[ cfg( not( no_global_oom_handling) ) ]
9- use core:: ops:: DerefMut ;
10+ use core:: ops:: { ControlFlow , DerefMut , Try } ;
1011#[ cfg( not( no_global_oom_handling) ) ]
1112use core:: ptr;
1213use core:: ptr:: NonNull ;
@@ -20,7 +21,7 @@ use crate::raw_rc::raw_weak::RawWeak;
2021#[ cfg( not( no_global_oom_handling) ) ]
2122use crate :: raw_rc:: rc_alloc;
2223#[ cfg( not( no_global_oom_handling) ) ]
23- use crate :: raw_rc:: rc_layout:: RcLayout ;
24+ use crate :: raw_rc:: rc_layout:: { RcLayout , RcLayoutExt } ;
2425use crate :: raw_rc:: rc_value_pointer:: RcValuePointer ;
2526
2627/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`; the user should call
@@ -336,6 +337,196 @@ where
336337 }
337338}
338339
340+ impl < T , A > RawRc < T , A > {
341+ /// # Safety
342+ ///
343+ /// `weak` must be non-dangling.
344+ unsafe fn from_weak_with_value ( weak : RawWeak < T , A > , value : T ) -> Self {
345+ unsafe {
346+ weak. as_ptr ( ) . write ( value) ;
347+
348+ Self :: from_weak ( weak)
349+ }
350+ }
351+
352+ #[ inline]
353+ pub ( crate ) fn try_new ( value : T ) -> Result < Self , AllocError >
354+ where
355+ A : Allocator + Default ,
356+ {
357+ RawWeak :: try_new_uninit :: < 1 > ( )
358+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
359+ }
360+
361+ #[ inline]
362+ pub ( crate ) fn try_new_in ( value : T , alloc : A ) -> Result < Self , AllocError >
363+ where
364+ A : Allocator ,
365+ {
366+ RawWeak :: try_new_uninit_in :: < 1 > ( alloc)
367+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
368+ }
369+
370+ #[ cfg( not( no_global_oom_handling) ) ]
371+ #[ inline]
372+ pub ( crate ) fn new ( value : T ) -> Self
373+ where
374+ A : Allocator + Default ,
375+ {
376+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit :: < 1 > ( ) , value) }
377+ }
378+
379+ #[ cfg( not( no_global_oom_handling) ) ]
380+ #[ inline]
381+ pub ( crate ) fn new_in ( value : T , alloc : A ) -> Self
382+ where
383+ A : Allocator ,
384+ {
385+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit_in :: < 1 > ( alloc) , value) }
386+ }
387+
388+ #[ cfg( not( no_global_oom_handling) ) ]
389+ fn new_with < F > ( f : F ) -> Self
390+ where
391+ A : Allocator + Default ,
392+ F : FnOnce ( ) -> T ,
393+ {
394+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( T :: RC_LAYOUT , |ptr| unsafe {
395+ ptr. as_ptr ( ) . cast ( ) . write ( f ( ) )
396+ } ) ;
397+
398+ unsafe { Self :: from_raw_parts ( ptr. as_ptr ( ) . cast ( ) , alloc) }
399+ }
400+
401+ /// Maps the value in an `RawRc`, reusing the allocation if possible.
402+ ///
403+ /// # Safety
404+ ///
405+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
406+ #[ cfg( not( no_global_oom_handling) ) ]
407+ pub ( crate ) unsafe fn map < R , U > ( self , f : impl FnOnce ( & T ) -> U ) -> RawRc < U , A >
408+ where
409+ A : Allocator ,
410+ R : RefCounter ,
411+ {
412+ fn wrap_fn < T , U > ( f : impl FnOnce ( & T ) -> U ) -> impl FnOnce ( & T ) -> ControlFlow < !, U > {
413+ |x| ControlFlow :: Continue ( f ( x) )
414+ }
415+
416+ let f = wrap_fn ( f) ;
417+
418+ match unsafe { self . try_map :: < R , _ > ( f) } {
419+ ControlFlow :: Continue ( output) => output,
420+ }
421+ }
422+
423+ /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
424+ ///
425+ /// # Safety
426+ ///
427+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
428+ #[ cfg( not( no_global_oom_handling) ) ]
429+ pub ( crate ) unsafe fn try_map < R , U > (
430+ mut self ,
431+ f : impl FnOnce ( & T ) -> U ,
432+ ) -> ControlFlow < U :: Residual , RawRc < U :: Output , A > >
433+ where
434+ A : Allocator ,
435+ R : RefCounter ,
436+ U : Try ,
437+ {
438+ let result = if T :: LAYOUT == U :: Output :: LAYOUT && unsafe { self . is_unique :: < R > ( ) } {
439+ let value = unsafe { self . as_ptr ( ) . read ( ) } ;
440+ let mut allocation = unsafe { self . cast :: < MaybeUninit < U :: Output > > ( ) } ;
441+
442+ // Destruct `self` as `RawRc<MaybeUninit<U::Output>, A>` if `f` panics or returns a
443+ // failure value.
444+ let guard = unsafe { new_rc_guard :: < MaybeUninit < U :: Output > , A , R > ( & mut allocation) } ;
445+
446+ let mapped_value = f ( & value) . branch ( ) ?;
447+
448+ drop ( value) ;
449+ mem:: forget ( guard) ;
450+
451+ unsafe {
452+ allocation. get_mut_unchecked ( ) . write ( mapped_value) ;
453+
454+ allocation. cast ( )
455+ }
456+ } else {
457+ // Destruct `self` if `f` panics or returns a failure value.
458+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut self ) } ;
459+
460+ let mapped_value = f ( unsafe { guard. as_ptr ( ) . as_ref ( ) } ) . branch ( ) ?;
461+
462+ drop ( guard) ;
463+
464+ let alloc = self . into_raw_parts ( ) . 1 ;
465+
466+ RawRc :: new_in ( mapped_value, alloc)
467+ } ;
468+
469+ ControlFlow :: Continue ( result)
470+ }
471+
472+ /// # Safety
473+ ///
474+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
475+ pub ( crate ) unsafe fn into_inner < R > ( self ) -> Option < T >
476+ where
477+ A : Allocator ,
478+ R : RefCounter ,
479+ {
480+ let is_last_strong_ref = unsafe { decrement_strong_ref_count :: < R > ( self . value_ptr ( ) ) } ;
481+
482+ is_last_strong_ref. then ( || unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
483+ }
484+
485+ /// # Safety
486+ ///
487+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
488+ pub ( crate ) unsafe fn try_unwrap < R > ( self ) -> Result < T , RawRc < T , A > >
489+ where
490+ A : Allocator ,
491+ R : RefCounter ,
492+ {
493+ unsafe fn inner < R > ( value_ptr : RcValuePointer ) -> bool
494+ where
495+ R : RefCounter ,
496+ {
497+ unsafe {
498+ R :: from_raw_counter ( value_ptr. strong_count_ptr ( ) . as_ref ( ) ) . try_lock_strong_count ( )
499+ }
500+ }
501+
502+ let is_last_strong_ref = unsafe { inner :: < R > ( self . value_ptr ( ) ) } ;
503+
504+ if is_last_strong_ref {
505+ Ok ( unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
506+ } else {
507+ Err ( self )
508+ }
509+ }
510+
511+ /// # Safety
512+ ///
513+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
514+ pub ( crate ) unsafe fn unwrap_or_clone < R > ( self ) -> T
515+ where
516+ T : Clone ,
517+ A : Allocator ,
518+ R : RefCounter ,
519+ {
520+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
521+ // implementation.
522+ unsafe { self . try_unwrap :: < R > ( ) } . unwrap_or_else ( |mut rc| {
523+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut rc) } ;
524+
525+ T :: clone ( unsafe { guard. as_ptr ( ) . as_ref ( ) } )
526+ } )
527+ }
528+ }
529+
339530/// Decrements strong reference count in a reference-counted allocation with a value object that is
340531/// pointed to by `value_ptr`.
341532#[ inline]
@@ -367,3 +558,15 @@ where
367558 R :: is_unique ( R :: from_raw_counter ( & ref_counts. strong ) , R :: from_raw_counter ( & ref_counts. weak ) )
368559 }
369560}
561+
562+ /// Returns a drop guard that calls `Rc::drop::<R>()` on drop.
563+ unsafe fn new_rc_guard < ' a , T , A , R > (
564+ rc : & ' a mut RawRc < T , A > ,
565+ ) -> DropGuard < & ' a mut RawRc < T , A > , impl FnOnce ( & ' a mut RawRc < T , A > ) >
566+ where
567+ T : ?Sized ,
568+ A : Allocator ,
569+ R : RefCounter ,
570+ {
571+ DropGuard :: new ( rc, |rc| unsafe { rc. drop :: < R > ( ) } )
572+ }
0 commit comments