@@ -272,90 +272,74 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
272
272
273
273
/// A partial, owned list of provenance to transfer into another allocation.
274
274
///
275
- /// Offsets are already adjusted to the destination allocation .
275
+ /// Offsets are relative to the beginning of the copied range .
276
276
pub struct ProvenanceCopy < Prov > {
277
- dest_ptrs : Option < Box < [ ( Size , Prov ) ] > > ,
278
- dest_bytes : Option < Box < [ ( Size , ( Prov , u8 ) ) ] > > ,
277
+ ptrs : Box < [ ( Size , Prov ) ] > ,
278
+ bytes : Box < [ ( Size , ( Prov , u8 ) ) ] > ,
279
279
}
280
280
281
281
impl < Prov : Provenance > ProvenanceMap < Prov > {
282
- pub fn prepare_copy (
283
- & self ,
284
- src : AllocRange ,
285
- dest : Size ,
286
- count : u64 ,
287
- cx : & impl HasDataLayout ,
288
- ) -> ProvenanceCopy < Prov > {
289
- let shift_offset = move |idx, offset| {
290
- // compute offset for current repetition
291
- let dest_offset = dest + src. size * idx; // `Size` operations
292
- // shift offsets from source allocation to destination allocation
293
- ( offset - src. start ) + dest_offset // `Size` operations
294
- } ;
282
+ pub fn prepare_copy ( & self , range : AllocRange , cx : & impl HasDataLayout ) -> ProvenanceCopy < Prov > {
283
+ let shift_offset = move |offset| offset - range. start ;
295
284
let ptr_size = cx. data_layout ( ) . pointer_size ( ) ;
296
285
297
286
// # Pointer-sized provenances
298
287
// Get the provenances that are entirely within this range.
299
288
// (Different from `range_get_ptrs` which asks if they overlap the range.)
300
289
// Only makes sense if we are copying at least one pointer worth of bytes.
301
- let mut dest_ptrs_box = None ;
302
- if src. size >= ptr_size {
303
- let adjusted_end = Size :: from_bytes ( src. end ( ) . bytes ( ) - ( ptr_size. bytes ( ) - 1 ) ) ;
304
- let ptrs = self . ptrs . range ( src. start ..adjusted_end) ;
305
- // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
306
- // is mostly filled with redundant information since it's just N copies of the same `Prov`s
307
- // at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
308
- // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
309
- // the right sequence of provenance for all N copies.
310
- // Basically, this large array would have to be created anyway in the target allocation.
311
- let mut dest_ptrs = Vec :: with_capacity ( ptrs. len ( ) * ( count as usize ) ) ;
312
- for i in 0 ..count {
313
- dest_ptrs
314
- . extend ( ptrs. iter ( ) . map ( |& ( offset, reloc) | ( shift_offset ( i, offset) , reloc) ) ) ;
315
- }
316
- debug_assert_eq ! ( dest_ptrs. len( ) , dest_ptrs. capacity( ) ) ;
317
- dest_ptrs_box = Some ( dest_ptrs. into_boxed_slice ( ) ) ;
290
+ let mut ptrs_box: Box < [ _ ] > = Box :: new ( [ ] ) ;
291
+ if range. size >= ptr_size {
292
+ let adjusted_end = Size :: from_bytes ( range. end ( ) . bytes ( ) - ( ptr_size. bytes ( ) - 1 ) ) ;
293
+ let ptrs = self . ptrs . range ( range. start ..adjusted_end) ;
294
+ ptrs_box = ptrs. iter ( ) . map ( |& ( offset, reloc) | ( shift_offset ( offset) , reloc) ) . collect ( ) ;
318
295
} ;
319
296
320
297
// # Byte-sized provenances
321
298
// This includes the existing bytewise provenance in the range, and ptr provenance
322
299
// that overlaps with the begin/end of the range.
323
- let mut dest_bytes_box = None ;
324
- let begin_overlap = self . range_ptrs_get ( alloc_range ( src . start , Size :: ZERO ) , cx) . first ( ) ;
325
- let end_overlap = self . range_ptrs_get ( alloc_range ( src . end ( ) , Size :: ZERO ) , cx) . first ( ) ;
300
+ let mut bytes_box : Box < [ _ ] > = Box :: new ( [ ] ) ;
301
+ let begin_overlap = self . range_ptrs_get ( alloc_range ( range . start , Size :: ZERO ) , cx) . first ( ) ;
302
+ let end_overlap = self . range_ptrs_get ( alloc_range ( range . end ( ) , Size :: ZERO ) , cx) . first ( ) ;
326
303
// We only need to go here if there is some overlap or some bytewise provenance.
327
304
if begin_overlap. is_some ( ) || end_overlap. is_some ( ) || self . bytes . is_some ( ) {
328
305
let mut bytes: Vec < ( Size , ( Prov , u8 ) ) > = Vec :: new ( ) ;
329
306
// First, if there is a part of a pointer at the start, add that.
330
307
if let Some ( entry) = begin_overlap {
331
308
trace ! ( "start overlapping entry: {entry:?}" ) ;
332
- // For really small copies, make sure we don't run off the end of the `src` range.
333
- let entry_end = cmp:: min ( entry. 0 + ptr_size, src . end ( ) ) ;
334
- for offset in src . start ..entry_end {
335
- bytes. push ( ( offset, ( entry. 1 , ( offset - entry. 0 ) . bytes ( ) as u8 ) ) ) ;
309
+ // For really small copies, make sure we don't run off the end of the range.
310
+ let entry_end = cmp:: min ( entry. 0 + ptr_size, range . end ( ) ) ;
311
+ for offset in range . start ..entry_end {
312
+ bytes. push ( ( shift_offset ( offset) , ( entry. 1 , ( offset - entry. 0 ) . bytes ( ) as u8 ) ) ) ;
336
313
}
337
314
} else {
338
315
trace ! ( "no start overlapping entry" ) ;
339
316
}
340
317
341
318
// Then the main part, bytewise provenance from `self.bytes`.
342
- bytes. extend ( self . range_bytes_get ( src) ) ;
319
+ bytes. extend (
320
+ self . range_bytes_get ( range)
321
+ . iter ( )
322
+ . map ( |& ( offset, reloc) | ( shift_offset ( offset) , reloc) ) ,
323
+ ) ;
343
324
344
325
// And finally possibly parts of a pointer at the end.
345
326
if let Some ( entry) = end_overlap {
346
327
trace ! ( "end overlapping entry: {entry:?}" ) ;
347
- // For really small copies, make sure we don't start before `src ` does.
348
- let entry_start = cmp:: max ( entry. 0 , src . start ) ;
349
- for offset in entry_start..src . end ( ) {
328
+ // For really small copies, make sure we don't start before `range ` does.
329
+ let entry_start = cmp:: max ( entry. 0 , range . start ) ;
330
+ for offset in entry_start..range . end ( ) {
350
331
if bytes. last ( ) . is_none_or ( |bytes_entry| bytes_entry. 0 < offset) {
351
332
// The last entry, if it exists, has a lower offset than us, so we
352
333
// can add it at the end and remain sorted.
353
- bytes. push ( ( offset, ( entry. 1 , ( offset - entry. 0 ) . bytes ( ) as u8 ) ) ) ;
334
+ bytes. push ( (
335
+ shift_offset ( offset) ,
336
+ ( entry. 1 , ( offset - entry. 0 ) . bytes ( ) as u8 ) ,
337
+ ) ) ;
354
338
} else {
355
339
// There already is an entry for this offset in there! This can happen when the
356
340
// start and end range checks actually end up hitting the same pointer, so we
357
341
// already added this in the "pointer at the start" part above.
358
- assert ! ( entry. 0 <= src . start) ;
342
+ assert ! ( entry. 0 <= range . start) ;
359
343
}
360
344
}
361
345
} else {
@@ -364,29 +348,30 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
364
348
trace ! ( "byte provenances: {bytes:?}" ) ;
365
349
366
350
// And again a buffer for the new list on the target side.
367
- let mut dest_bytes = Vec :: with_capacity ( bytes. len ( ) * ( count as usize ) ) ;
368
- for i in 0 ..count {
369
- dest_bytes
370
- . extend ( bytes. iter ( ) . map ( |& ( offset, reloc) | ( shift_offset ( i, offset) , reloc) ) ) ;
371
- }
372
- debug_assert_eq ! ( dest_bytes. len( ) , dest_bytes. capacity( ) ) ;
373
- dest_bytes_box = Some ( dest_bytes. into_boxed_slice ( ) ) ;
351
+ bytes_box = bytes. into_boxed_slice ( ) ;
374
352
}
375
353
376
- ProvenanceCopy { dest_ptrs : dest_ptrs_box , dest_bytes : dest_bytes_box }
354
+ ProvenanceCopy { ptrs : ptrs_box , bytes : bytes_box }
377
355
}
378
356
379
357
/// Applies a provenance copy.
380
358
/// The affected range, as defined in the parameters to `prepare_copy` is expected
381
359
/// to be clear of provenance.
382
- pub fn apply_copy ( & mut self , copy : ProvenanceCopy < Prov > ) {
383
- if let Some ( dest_ptrs) = copy. dest_ptrs {
384
- self . ptrs . insert_presorted ( dest_ptrs. into ( ) ) ;
360
+ pub fn apply_copy ( & mut self , copy : ProvenanceCopy < Prov > , range : AllocRange , repeat : u64 ) {
361
+ let shift_offset = |idx : u64 , offset : Size | offset + range. start + idx * range. size ;
362
+ if !copy. ptrs . is_empty ( ) {
363
+ for i in 0 ..repeat {
364
+ self . ptrs . insert_presorted (
365
+ copy. ptrs . iter ( ) . map ( |& ( offset, reloc) | ( shift_offset ( i, offset) , reloc) ) ,
366
+ ) ;
367
+ }
385
368
}
386
- if let Some ( dest_bytes) = copy. dest_bytes
387
- && !dest_bytes. is_empty ( )
388
- {
389
- self . bytes . get_or_insert_with ( Box :: default) . insert_presorted ( dest_bytes. into ( ) ) ;
369
+ if !copy. bytes . is_empty ( ) {
370
+ for i in 0 ..repeat {
371
+ self . bytes . get_or_insert_with ( Box :: default) . insert_presorted (
372
+ copy. bytes . iter ( ) . map ( |& ( offset, reloc) | ( shift_offset ( i, offset) , reloc) ) ,
373
+ ) ;
374
+ }
390
375
}
391
376
}
392
377
}
0 commit comments