@@ -155,8 +155,8 @@ impl CtfeValidationMode {
155155
156156/// State for tracking recursive validation of references
157157pub struct RefTracking < T , PATH = ( ) > {
158- pub seen : FxHashSet < T > ,
159- pub todo : Vec < ( T , PATH ) > ,
158+ seen : FxHashSet < T > ,
159+ todo : Vec < ( T , PATH ) > ,
160160}
161161
162162impl < T : Clone + Eq + Hash + std:: fmt:: Debug , PATH : Default > RefTracking < T , PATH > {
@@ -169,8 +169,11 @@ impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH>
169169 ref_tracking_for_consts. seen . insert ( op) ;
170170 ref_tracking_for_consts
171171 }
172+ pub fn next ( & mut self ) -> Option < ( T , PATH ) > {
173+ self . todo . pop ( )
174+ }
172175
173- pub fn track ( & mut self , op : T , path : impl FnOnce ( ) -> PATH ) {
176+ fn track ( & mut self , op : T , path : impl FnOnce ( ) -> PATH ) {
174177 if self . seen . insert ( op. clone ( ) ) {
175178 trace ! ( "Recursing below ptr {:#?}" , op) ;
176179 let path = path ( ) ;
@@ -435,95 +438,112 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
435438 if self . ecx . scalar_may_be_null ( Scalar :: from_maybe_pointer ( place. ptr ( ) , self . ecx ) ) ? {
436439 throw_validation_failure ! ( self . path, NullPtr { ptr_kind } )
437440 }
438- // Do not allow pointers to uninhabited types.
441+ // Do not allow references to uninhabited types.
439442 if place. layout . abi . is_uninhabited ( ) {
440443 let ty = place. layout . ty ;
441444 throw_validation_failure ! ( self . path, PtrToUninhabited { ptr_kind, ty } )
442445 }
443446 // Recursive checking
444447 if let Some ( ref_tracking) = self . ref_tracking . as_deref_mut ( ) {
445- // Determine whether this pointer expects to be pointing to something mutable.
446- let ptr_expected_mutbl = match ptr_kind {
447- PointerKind :: Box => Mutability :: Mut ,
448- PointerKind :: Ref ( mutbl) => {
449- // We do not take into account interior mutability here since we cannot know if
450- // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
451- // that in the recursive descent behind this reference (controlled by
452- // `allow_immutable_unsafe_cell`).
453- mutbl
454- }
455- } ;
456448 // Proceed recursively even for ZST, no reason to skip them!
457449 // `!` is a ZST and we want to validate it.
458- if let Ok ( ( alloc_id , _offset , _prov ) ) = self . ecx . ptr_try_get_alloc_id ( place . ptr ( ) , 0 ) {
450+ if let Some ( ctfe_mode ) = self . ctfe_mode {
459451 let mut skip_recursive_check = false ;
460- if let Some ( GlobalAlloc :: Static ( did) ) = self . ecx . tcx . try_get_global_alloc ( alloc_id)
452+ // CTFE imposes restrictions on what references can point to.
453+ if let Ok ( ( alloc_id, _offset, _prov) ) =
454+ self . ecx . ptr_try_get_alloc_id ( place. ptr ( ) , 0 )
461455 {
462- let DefKind :: Static { nested, .. } = self . ecx . tcx . def_kind ( did) else { bug ! ( ) } ;
463- // Special handling for pointers to statics (irrespective of their type).
464- assert ! ( !self . ecx. tcx. is_thread_local_static( did) ) ;
465- assert ! ( self . ecx. tcx. is_static( did) ) ;
466- // Mode-specific checks
467- match self . ctfe_mode {
468- Some (
469- CtfeValidationMode :: Static { .. } | CtfeValidationMode :: Promoted { .. } ,
470- ) => {
471- // We skip recursively checking other statics. These statics must be sound by
472- // themselves, and the only way to get broken statics here is by using
473- // unsafe code.
474- // The reasons we don't check other statics is twofold. For one, in all
475- // sound cases, the static was already validated on its own, and second, we
476- // trigger cycle errors if we try to compute the value of the other static
477- // and that static refers back to us (potentially through a promoted).
478- // This could miss some UB, but that's fine.
479- // We still walk nested allocations, as they are fundamentally part of this validation run.
480- // This means we will also recurse into nested statics of *other*
481- // statics, even though we do not recurse into other statics directly.
482- // That's somewhat inconsistent but harmless.
483- skip_recursive_check = !nested;
484- }
485- Some ( CtfeValidationMode :: Const { .. } ) => {
486- // We can't recursively validate `extern static`, so we better reject them.
487- if self . ecx . tcx . is_foreign_item ( did) {
488- throw_validation_failure ! ( self . path, ConstRefToExtern ) ;
456+ if let Some ( GlobalAlloc :: Static ( did) ) =
457+ self . ecx . tcx . try_get_global_alloc ( alloc_id)
458+ {
459+ let DefKind :: Static { nested, .. } = self . ecx . tcx . def_kind ( did) else {
460+ bug ! ( )
461+ } ;
462+ // Special handling for pointers to statics (irrespective of their type).
463+ assert ! ( !self . ecx. tcx. is_thread_local_static( did) ) ;
464+ assert ! ( self . ecx. tcx. is_static( did) ) ;
465+ // Mode-specific checks
466+ match ctfe_mode {
467+ CtfeValidationMode :: Static { .. }
468+ | CtfeValidationMode :: Promoted { .. } => {
469+ // We skip recursively checking other statics. These statics must be sound by
470+ // themselves, and the only way to get broken statics here is by using
471+ // unsafe code.
472+ // The reasons we don't check other statics is twofold. For one, in all
473+ // sound cases, the static was already validated on its own, and second, we
474+ // trigger cycle errors if we try to compute the value of the other static
475+ // and that static refers back to us (potentially through a promoted).
476+ // This could miss some UB, but that's fine.
477+ // We still walk nested allocations, as they are fundamentally part of this validation run.
478+ // This means we will also recurse into nested statics of *other*
479+ // statics, even though we do not recurse into other statics directly.
480+ // That's somewhat inconsistent but harmless.
481+ skip_recursive_check = !nested;
482+ }
483+ CtfeValidationMode :: Const { .. } => {
484+ // We can't recursively validate `extern static`, so we better reject them.
485+ if self . ecx . tcx . is_foreign_item ( did) {
486+ throw_validation_failure ! ( self . path, ConstRefToExtern ) ;
487+ }
489488 }
490489 }
491- None => { }
492490 }
493- }
494491
495- // Dangling and Mutability check.
496- let ( size, _align, alloc_kind) = self . ecx . get_alloc_info ( alloc_id) ;
497- if alloc_kind == AllocKind :: Dead {
498- // This can happen for zero-sized references. We can't have *any* references to non-existing
499- // allocations though, interning rejects them all as the rest of rustc isn't happy with them...
500- // so we throw an error, even though this isn't really UB.
501- // A potential future alternative would be to resurrect this as a zero-sized allocation
502- // (which codegen will then compile to an aligned dummy pointer anyway).
503- throw_validation_failure ! ( self . path, DanglingPtrUseAfterFree { ptr_kind } ) ;
504- }
505- // If this allocation has size zero, there is no actual mutability here.
506- if size != Size :: ZERO {
507- let alloc_actual_mutbl = mutability ( self . ecx , alloc_id) ;
508- // Mutable pointer to immutable memory is no good.
509- if ptr_expected_mutbl == Mutability :: Mut
510- && alloc_actual_mutbl == Mutability :: Not
511- {
512- throw_validation_failure ! ( self . path, MutableRefToImmutable ) ;
492+ // Dangling and Mutability check.
493+ let ( size, _align, alloc_kind) = self . ecx . get_alloc_info ( alloc_id) ;
494+ if alloc_kind == AllocKind :: Dead {
495+ // This can happen for zero-sized references. We can't have *any* references to
496+ // non-existing allocations in const-eval though, interning rejects them all as
497+ // the rest of rustc isn't happy with them... so we throw an error, even though
498+ // this isn't really UB.
499+ // A potential future alternative would be to resurrect this as a zero-sized allocation
500+ // (which codegen will then compile to an aligned dummy pointer anyway).
501+ throw_validation_failure ! ( self . path, DanglingPtrUseAfterFree { ptr_kind } ) ;
513502 }
514- // In a const, everything must be completely immutable.
515- if matches ! ( self . ctfe_mode, Some ( CtfeValidationMode :: Const { .. } ) ) {
503+ // If this allocation has size zero, there is no actual mutability here.
504+ if size != Size :: ZERO {
505+ // Determine whether this pointer expects to be pointing to something mutable.
506+ let ptr_expected_mutbl = match ptr_kind {
507+ PointerKind :: Box => Mutability :: Mut ,
508+ PointerKind :: Ref ( mutbl) => {
509+ // We do not take into account interior mutability here since we cannot know if
510+ // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
511+ // that in the recursive descent behind this reference (controlled by
512+ // `allow_immutable_unsafe_cell`).
513+ mutbl
514+ }
515+ } ;
516+ // Determine what it actually points to.
517+ let alloc_actual_mutbl = mutability ( self . ecx , alloc_id) ;
518+ // Mutable pointer to immutable memory is no good.
516519 if ptr_expected_mutbl == Mutability :: Mut
517- || alloc_actual_mutbl == Mutability :: Mut
520+ && alloc_actual_mutbl == Mutability :: Not
518521 {
519- throw_validation_failure ! ( self . path, ConstRefToMutable ) ;
522+ throw_validation_failure ! ( self . path, MutableRefToImmutable ) ;
523+ }
524+ // In a const, everything must be completely immutable.
525+ if matches ! ( self . ctfe_mode, Some ( CtfeValidationMode :: Const { .. } ) ) {
526+ if ptr_expected_mutbl == Mutability :: Mut
527+ || alloc_actual_mutbl == Mutability :: Mut
528+ {
529+ throw_validation_failure ! ( self . path, ConstRefToMutable ) ;
530+ }
520531 }
521532 }
522533 }
523534 // Potentially skip recursive check.
524535 if skip_recursive_check {
525536 return Ok ( ( ) ) ;
526537 }
538+ } else {
539+ // This is not CTFE, so it's Miri with recursive checking.
540+ // FIXME: we do *not* check behind boxes, since creating a new box first creates it uninitialized
541+ // and then puts the value in there, so briefly we have a box with uninit contents.
542+ // FIXME: should we also skip `UnsafeCell` behind shared references? Currently that is not
543+ // needed since validation reads bypass Stacked Borrows and data race checks.
544+ if matches ! ( ptr_kind, PointerKind :: Box ) {
545+ return Ok ( ( ) ) ;
546+ }
527547 }
528548 let path = & self . path ;
529549 ref_tracking. track ( place, || {
@@ -1072,11 +1092,23 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
10721092 /// `op` is assumed to cover valid memory if it is an indirect operand.
10731093 /// It will error if the bits at the destination do not match the ones described by the layout.
10741094 #[ inline( always) ]
1075- pub fn validate_operand ( & self , op : & OpTy < ' tcx , M :: Provenance > ) -> InterpResult < ' tcx > {
1095+ pub fn validate_operand (
1096+ & self ,
1097+ op : & OpTy < ' tcx , M :: Provenance > ,
1098+ recursive : bool ,
1099+ ) -> InterpResult < ' tcx > {
10761100 // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
10771101 // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1078- // value, it rules out things like `UnsafeCell` in awkward places. It also can make checking
1079- // recurse through references which, for now, we don't want here, either.
1080- self . validate_operand_internal ( op, vec ! [ ] , None , None )
1102+ // value, it rules out things like `UnsafeCell` in awkward places.
1103+ if !recursive {
1104+ return self . validate_operand_internal ( op, vec ! [ ] , None , None ) ;
1105+ }
1106+ // Do a recursive check.
1107+ let mut ref_tracking = RefTracking :: empty ( ) ;
1108+ self . validate_operand_internal ( op, vec ! [ ] , Some ( & mut ref_tracking) , None ) ?;
1109+ while let Some ( ( mplace, path) ) = ref_tracking. todo . pop ( ) {
1110+ self . validate_operand_internal ( & mplace. into ( ) , path, Some ( & mut ref_tracking) , None ) ?;
1111+ }
1112+ Ok ( ( ) )
10811113 }
10821114}
0 commit comments