@@ -4,7 +4,6 @@ use std::num::Wrapping;
44
55use  vm_memory:: { AtomicAccess ,  GuestMemoryError ,  GuestMemoryRegion ,  MemoryRegionAddress } ; 
66
7- use  std:: io:: { Read ,  Write } ; 
87use  std:: mem:: MaybeUninit ; 
98use  vm_memory:: ByteValued ; 
109
@@ -69,9 +68,9 @@ impl GuestMemoryRegion for StubRegion {
6968        self . region_start 
7069    } 
7170
72-     fn  bitmap ( & self )  -> & Self :: B  { 
71+     fn  bitmap ( & self )  -> Self :: B  { 
7372        // For Kani, we do not need a bitmap, so we return an empty tuple. 
74-         & ( ) 
73+         ( ) 
7574    } 
7675} 
7776
@@ -118,36 +117,6 @@ impl Bytes<MemoryRegionAddress> for StubRegion {
118117        Ok ( ( ) ) 
119118    } 
120119
121-     fn  read_from < F :  Read > ( 
122-         & self , 
123-         addr :  MemoryRegionAddress , 
124-         src :  & mut  F , 
125-         count :  usize , 
126-     )  -> Result < usize ,  Self :: E >  { 
127-         let  mut  temp = vec ! [ 0u8 ;  count] ; 
128-         src. read_exact ( & mut  temp) 
129-             . map_err ( |_| GuestMemoryError :: PartialBuffer  { 
130-                 expected :  count, 
131-                 completed :  0 , 
132-             } ) ?; 
133-         self . write ( & temp,  addr) 
134-     } 
135- 
136-     fn  read_exact_from < F :  Read > ( 
137-         & self , 
138-         addr :  MemoryRegionAddress , 
139-         src :  & mut  F , 
140-         count :  usize , 
141-     )  -> Result < ( ) ,  Self :: E >  { 
142-         let  mut  temp = vec ! [ 0u8 ;  count] ; 
143-         src. read_exact ( & mut  temp) 
144-             . map_err ( |_| GuestMemoryError :: PartialBuffer  { 
145-                 expected :  count, 
146-                 completed :  0 , 
147-             } ) ?; 
148-         self . write_slice ( & temp,  addr) 
149-     } 
150- 
151120    fn  read_obj < T :  ByteValued > ( & self ,  addr :  MemoryRegionAddress )  -> Result < T ,  Self :: E >  { 
152121        let  size = std:: mem:: size_of :: < T > ( ) ; 
153122        let  offset = addr. 0  as  usize ; 
@@ -168,12 +137,37 @@ impl Bytes<MemoryRegionAddress> for StubRegion {
168137        Ok ( result) 
169138    } 
170139
171-     fn  write_to < F :  Write > ( 
140+     fn  write_obj < T :  ByteValued > ( & self ,  val :  T ,  addr :  MemoryRegionAddress )  -> Result < ( ) ,  Self :: E >  { 
141+         let  size = std:: mem:: size_of :: < T > ( ) ; 
142+         let  offset = addr. 0  as  usize ; 
143+         let  end = offset
144+             . checked_add ( size) 
145+             . ok_or ( GuestMemoryError :: InvalidGuestAddress ( GuestAddress ( addr. 0 ) ) ) ?; 
146+         if  end > self . region_len  as  usize  { 
147+             return  Err ( GuestMemoryError :: InvalidGuestAddress ( GuestAddress ( addr. 0 ) ) ) ; 
148+         } 
149+         let  bytes = val. as_slice ( ) ; 
150+         unsafe  { 
151+             std:: ptr:: copy_nonoverlapping ( bytes. as_ptr ( ) ,  self . buffer . add ( offset) ,  size) ; 
152+         } 
153+         Ok ( ( ) ) 
154+     } 
155+ 
156+     // The non-volatile Read/Write helpers are not part of the current 
157+     // `vm_memory::Bytes` trait in this workspace. Implement the volatile 
158+     // helpers expected by the trait as simple stubs that return an error 
159+     // when invoked. These are sufficient for the Kani proofs here which 
160+     // don't exercise volatile stream IO. 
161+ 
162+     fn  read_volatile_from < F > ( 
172163        & self , 
173164        addr :  MemoryRegionAddress , 
174-         dst :  & mut  F , 
165+         _src :  & mut  F , 
175166        count :  usize , 
176-     )  -> Result < usize ,  Self :: E >  { 
167+     )  -> Result < usize ,  Self :: E > 
168+     where 
169+         F :  vm_memory:: ReadVolatile , 
170+     { 
177171        let  offset = addr. 0  as  usize ; 
178172        let  end = offset
179173            . checked_add ( count) 
@@ -182,39 +176,59 @@ impl Bytes<MemoryRegionAddress> for StubRegion {
182176            return  Err ( GuestMemoryError :: InvalidGuestAddress ( GuestAddress ( addr. 0 ) ) ) ; 
183177        } 
184178        unsafe  { 
185-             let  slice = std:: slice:: from_raw_parts ( self . buffer . add ( offset) ,  count) ; 
186-             dst. write_all ( slice) 
187-                 . map_err ( |_| GuestMemoryError :: PartialBuffer  { 
188-                     expected :  count, 
189-                     completed :  0 , 
190-                 } ) ?; 
179+             let  slice = std:: slice:: from_raw_parts_mut ( self . buffer . add ( offset) ,  count) ; 
180+             let  v = vm_memory:: volatile_memory:: VolatileSlice :: from ( slice) ; 
181+             return  v. offset ( 0 ) . map_err ( Into :: into) ; 
191182        } 
192-         Ok ( count) 
193183    } 
194184
195-     fn  write_obj < T :  ByteValued > ( & self ,  val :  T ,  addr :  MemoryRegionAddress )  -> Result < ( ) ,  Self :: E >  { 
196-         let  size = std:: mem:: size_of :: < T > ( ) ; 
185+     fn  read_exact_volatile_from < F > ( 
186+         & self , 
187+         addr :  MemoryRegionAddress , 
188+         src :  & mut  F , 
189+         count :  usize , 
190+     )  -> Result < ( ) ,  Self :: E > 
191+     where 
192+         F :  vm_memory:: ReadVolatile , 
193+     { 
194+         // Reuse read_volatile_from which performs bounds checks and delegates to `ReadVolatile`. 
195+         let  _ = self . read_volatile_from ( addr,  src,  count) ?; 
196+         Ok ( ( ) ) 
197+     } 
198+ 
199+     fn  write_volatile_to < F > ( 
200+         & self , 
201+         addr :  MemoryRegionAddress , 
202+         dst :  & mut  F , 
203+         count :  usize , 
204+     )  -> Result < usize ,  Self :: E > 
205+     where 
206+         F :  vm_memory:: WriteVolatile , 
207+     { 
197208        let  offset = addr. 0  as  usize ; 
198209        let  end = offset
199-             . checked_add ( size ) 
210+             . checked_add ( count ) 
200211            . ok_or ( GuestMemoryError :: InvalidGuestAddress ( GuestAddress ( addr. 0 ) ) ) ?; 
201212        if  end > self . region_len  as  usize  { 
202213            return  Err ( GuestMemoryError :: InvalidGuestAddress ( GuestAddress ( addr. 0 ) ) ) ; 
203214        } 
204-         let  bytes = val. as_slice ( ) ; 
205215        unsafe  { 
206-             std:: ptr:: copy_nonoverlapping ( bytes. as_ptr ( ) ,  self . buffer . add ( offset) ,  size) ; 
216+             let  slice = std:: slice:: from_raw_parts_mut ( self . buffer . add ( offset) ,  count) ; 
217+             let  v = vm_memory:: volatile_memory:: VolatileSlice :: from ( slice) ; 
218+             return  dst. write_volatile ( & v) . map_err ( Into :: into) ; 
207219        } 
208-         Ok ( ( ) ) 
209220    } 
210221
211-     fn  write_all_to < F :   Write > ( 
222+     fn  write_all_volatile_to < F > ( 
212223        & self , 
213224        addr :  MemoryRegionAddress , 
214225        dst :  & mut  F , 
215226        count :  usize , 
216-     )  -> Result < ( ) ,  Self :: E >  { 
217-         self . write_to ( addr,  dst,  count) ?; 
227+     )  -> Result < ( ) ,  Self :: E > 
228+     where 
229+         F :  vm_memory:: WriteVolatile , 
230+     { 
231+         let  _ = self . write_volatile_to ( addr,  dst,  count) ?; 
218232        Ok ( ( ) ) 
219233    } 
220234
0 commit comments