@@ -333,13 +333,6 @@ impl Default for DroplessArena {
333333}
334334
335335impl DroplessArena {
336- #[ inline]
337- fn align ( & self , align : usize ) {
338- let final_address = ( ( self . ptr . get ( ) as usize ) + align - 1 ) & !( align - 1 ) ;
339- self . ptr . set ( final_address as * mut u8 ) ;
340- assert ! ( self . ptr <= self . end) ;
341- }
342-
343336 #[ inline( never) ]
344337 #[ cold]
345338 fn grow ( & self , additional : usize ) {
@@ -370,30 +363,50 @@ impl DroplessArena {
370363 }
371364 }
372365
366+ /// Allocates a byte slice with specified size and alignment from the
367+ /// current memory chunk. Returns `None` if there is no free space left to
368+ /// satisfy the request.
373369 #[ inline]
374- pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> & mut [ u8 ] {
375- unsafe {
376- assert ! ( bytes != 0 ) ;
377-
378- self . align ( align) ;
370+ fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < * mut u8 > {
371+ let ptr = self . ptr . get ( ) as usize ;
372+ let end = self . end . get ( ) as usize ;
373+ // The allocation request fits into the current chunk iff:
374+ //
375+ // let aligned = align_to(ptr, align);
376+ // ptr <= aligned && aligned + bytes <= end
377+ //
378+ // Except that we work with fixed width integers and need to be careful
379+ // about potential overflow in the calcuation. If the overflow does
380+ // happen, then we definitely don't have enough free and need to grow
381+ // the arena.
382+ let aligned = ptr. checked_add ( align - 1 ) ? & !( align - 1 ) ;
383+ let new_ptr = aligned. checked_add ( bytes) ?;
384+ if new_ptr <= end {
385+ self . ptr . set ( new_ptr as * mut u8 ) ;
386+ Some ( aligned as * mut u8 )
387+ } else {
388+ None
389+ }
390+ }
379391
380- let future_end = intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) ;
381- if ( future_end as * mut u8 ) > self . end . get ( ) {
382- self . grow ( bytes) ;
392+ #[ inline]
393+ pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> * mut u8 {
394+ assert ! ( bytes != 0 ) ;
395+ loop {
396+ if let Some ( a) = self . alloc_raw_without_grow ( bytes, align) {
397+ break a;
383398 }
384-
385- let ptr = self . ptr . get ( ) ;
386- // Set the pointer past ourselves
387- self . ptr . set ( intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) as * mut u8 ) ;
388- slice:: from_raw_parts_mut ( ptr, bytes)
399+ // No free space left. Allocate a new chunk to satisfy the request.
400+ // On failure the grow will panic or abort.
401+ self . grow ( bytes) ;
389402 }
390403 }
391404
392405 #[ inline]
393406 pub fn alloc < T > ( & self , object : T ) -> & mut T {
394407 assert ! ( !mem:: needs_drop:: <T >( ) ) ;
395408
396- let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
409+ let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
397410
398411 unsafe {
399412 // Write into uninitialized memory.
@@ -418,13 +431,11 @@ impl DroplessArena {
418431 assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
419432 assert ! ( !slice. is_empty( ) ) ;
420433
421- let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _
422- as * mut T ;
434+ let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
423435
424436 unsafe {
425- let arena_slice = slice:: from_raw_parts_mut ( mem, slice. len ( ) ) ;
426- arena_slice. copy_from_slice ( slice) ;
427- arena_slice
437+ mem. copy_from_nonoverlapping ( slice. as_ptr ( ) , slice. len ( ) ) ;
438+ slice:: from_raw_parts_mut ( mem, slice. len ( ) )
428439 }
429440 }
430441
@@ -467,7 +478,7 @@ impl DroplessArena {
467478 return & mut [ ] ;
468479 }
469480 let size = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
470- let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
481+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut T ;
471482 unsafe { self . write_from_iter ( iter, len, mem) }
472483 }
473484 ( _, _) => {
@@ -482,7 +493,7 @@ impl DroplessArena {
482493 let len = vec. len ( ) ;
483494 let start_ptr = self
484495 . alloc_raw ( len * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) )
485- as * mut _ as * mut T ;
496+ as * mut T ;
486497 vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
487498 vec. set_len ( 0 ) ;
488499 slice:: from_raw_parts_mut ( start_ptr, len)
@@ -526,8 +537,7 @@ pub struct DropArena {
526537impl DropArena {
527538 #[ inline]
528539 pub unsafe fn alloc < T > ( & self , object : T ) -> & mut T {
529- let mem =
530- self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
540+ let mem = self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
531541 // Write into uninitialized memory.
532542 ptr:: write ( mem, object) ;
533543 let result = & mut * mem;
@@ -550,7 +560,7 @@ impl DropArena {
550560 let start_ptr = self
551561 . arena
552562 . alloc_raw ( len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) , mem:: align_of :: < T > ( ) )
553- as * mut _ as * mut T ;
563+ as * mut T ;
554564
555565 let mut destructors = self . destructors . borrow_mut ( ) ;
556566 // Reserve space for the destructors so we can't panic while adding them
0 commit comments