@@ -109,6 +109,10 @@ pub(crate) struct Vtable {
109109 /// fn(data, ptr, len)
110110 pub clone : unsafe fn ( & AtomicPtr < ( ) > , * const u8 , usize ) -> Bytes ,
111111 /// fn(data, ptr, len)
112+ ///
113+ /// takes `Bytes` to value
114+ pub to_vec : unsafe fn ( & AtomicPtr < ( ) > , * const u8 , usize ) -> Vec < u8 > ,
115+ /// fn(data, ptr, len)
112116 pub drop : unsafe fn ( & mut AtomicPtr < ( ) > , * const u8 , usize ) ,
113117}
114118
@@ -845,6 +849,13 @@ impl From<String> for Bytes {
845849 }
846850}
847851
852+ impl From < Bytes > for Vec < u8 > {
853+ fn from ( bytes : Bytes ) -> Vec < u8 > {
854+ let bytes = mem:: ManuallyDrop :: new ( bytes) ;
855+ unsafe { ( bytes. vtable . to_vec ) ( & bytes. data , bytes. ptr , bytes. len ) }
856+ }
857+ }
858+
848859// ===== impl Vtable =====
849860
850861impl fmt:: Debug for Vtable {
@@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {
860871
861872const STATIC_VTABLE : Vtable = Vtable {
862873 clone : static_clone,
874+ to_vec : static_to_vec,
863875 drop : static_drop,
864876} ;
865877
@@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
868880 Bytes :: from_static ( slice)
869881}
870882
883+ unsafe fn static_to_vec ( _: & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
884+ let slice = slice:: from_raw_parts ( ptr, len) ;
885+ slice. to_vec ( )
886+ }
887+
871888unsafe fn static_drop ( _: & mut AtomicPtr < ( ) > , _: * const u8 , _: usize ) {
872889 // nothing to drop for &'static [u8]
873890}
@@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
876893
877894static PROMOTABLE_EVEN_VTABLE : Vtable = Vtable {
878895 clone : promotable_even_clone,
896+ to_vec : promotable_even_to_vec,
879897 drop : promotable_even_drop,
880898} ;
881899
882900static PROMOTABLE_ODD_VTABLE : Vtable = Vtable {
883901 clone : promotable_odd_clone,
902+ to_vec : promotable_odd_to_vec,
884903 drop : promotable_odd_drop,
885904} ;
886905
@@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
897916 }
898917}
899918
919+ unsafe fn promotable_to_vec (
920+ data : & AtomicPtr < ( ) > ,
921+ ptr : * const u8 ,
922+ len : usize ,
923+ f : fn ( * mut ( ) ) -> * mut u8 ,
924+ ) -> Vec < u8 > {
925+ let shared = data. load ( Ordering :: Acquire ) ;
926+ let kind = shared as usize & KIND_MASK ;
927+
928+ if kind == KIND_ARC {
929+ shared_to_vec_impl ( shared. cast ( ) , ptr, len)
930+ } else {
931+ // If Bytes holds a Vec, then the offset must be 0.
932+ debug_assert_eq ! ( kind, KIND_VEC ) ;
933+
934+ let buf = f ( shared) ;
935+
936+ let cap = ( ptr as usize - buf as usize ) + len;
937+
938+ // Copy back buffer
939+ ptr:: copy ( ptr, buf, len) ;
940+
941+ Vec :: from_raw_parts ( buf, len, cap)
942+ }
943+ }
944+
945+ unsafe fn promotable_even_to_vec ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
946+ promotable_to_vec ( data, ptr, len, |shared| {
947+ ptr_map ( shared. cast ( ) , |addr| addr & !KIND_MASK )
948+ } )
949+ }
950+
900951unsafe fn promotable_even_drop ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) {
901952 data. with_mut ( |shared| {
902953 let shared = * shared;
@@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
924975 }
925976}
926977
978+ unsafe fn promotable_odd_to_vec ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
979+ promotable_to_vec ( data, ptr, len, |shared| shared. cast ( ) )
980+ }
981+
927982unsafe fn promotable_odd_drop ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) {
928983 data. with_mut ( |shared| {
929984 let shared = * shared;
@@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
9671022
9681023static SHARED_VTABLE : Vtable = Vtable {
9691024 clone : shared_clone,
1025+ to_vec : shared_to_vec,
9701026 drop : shared_drop,
9711027} ;
9721028
@@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
9791035 shallow_clone_arc ( shared as _ , ptr, len)
9801036}
9811037
1038+ unsafe fn shared_to_vec_impl ( shared : * mut Shared , ptr : * const u8 , len : usize ) -> Vec < u8 > {
1039+ // Check that the ref_cnt is 1 (unique).
1040+ //
1041+ // If it is unique, then it is set to 0 with AcqRel fence for the same
1042+ // reason in release_shared.
1043+ //
1044+ // Otherwise, we take the other branch and call release_shared.
1045+ if ( * shared)
1046+ . ref_cnt
1047+ . compare_exchange ( 1 , 0 , Ordering :: AcqRel , Ordering :: Relaxed )
1048+ . is_ok ( )
1049+ {
1050+ let buf = ( * shared) . buf ;
1051+ let cap = ( * shared) . cap ;
1052+
1053+ // Deallocate Shared
1054+ drop ( Box :: from_raw ( shared as * mut mem:: ManuallyDrop < Shared > ) ) ;
1055+
1056+ // Copy back buffer
1057+ ptr:: copy ( ptr, buf, len) ;
1058+
1059+ Vec :: from_raw_parts ( buf, len, cap)
1060+ } else {
1061+ let v = slice:: from_raw_parts ( ptr, len) . to_vec ( ) ;
1062+ release_shared ( shared) ;
1063+ v
1064+ }
1065+ }
1066+
1067+ unsafe fn shared_to_vec ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
1068+ shared_to_vec_impl ( data. load ( Ordering :: Relaxed ) . cast ( ) , ptr, len)
1069+ }
1070+
9821071unsafe fn shared_drop ( data : & mut AtomicPtr < ( ) > , _ptr : * const u8 , _len : usize ) {
9831072 data. with_mut ( |shared| {
9841073 release_shared ( shared. cast ( ) ) ;
0 commit comments