@@ -27,7 +27,7 @@ use rustc_span::sym;
2727
2828use super :: { AllocId , Allocation , InterpCx , MPlaceTy , Machine , MemoryKind , PlaceTy } ;
2929use crate :: const_eval;
30- use crate :: errors:: { DanglingPtrInFinal , MutablePtrInFinal } ;
30+ use crate :: errors:: MutablePtrInFinal ;
3131
3232pub trait CompileTimeMachine < ' mir , ' tcx : ' mir , T > = Machine <
3333 ' mir ,
@@ -62,26 +62,13 @@ impl HasStaticRootDefId for const_eval::CompileTimeInterpreter<'_, '_> {
6262fn intern_shallow < ' rt , ' mir , ' tcx , T , M : CompileTimeMachine < ' mir , ' tcx , T > > (
6363 ecx : & ' rt mut InterpCx < ' mir , ' tcx , M > ,
6464 alloc_id : AllocId ,
65- mutability : Mutability ,
6665) -> Result < impl Iterator < Item = CtfeProvenance > + ' tcx , ( ) > {
6766 trace ! ( "intern_shallow {:?}" , alloc_id) ;
6867 // remove allocation
6968 // FIXME(#120456) - is `swap_remove` correct?
70- let Some ( ( _kind, mut alloc) ) = ecx. memory . alloc_map . swap_remove ( & alloc_id) else {
69+ let Some ( ( _kind, alloc) ) = ecx. memory . alloc_map . swap_remove ( & alloc_id) else {
7170 return Err ( ( ) ) ;
7271 } ;
73- // Set allocation mutability as appropriate. This is used by LLVM to put things into
74- // read-only memory, and also by Miri when evaluating other globals that
75- // access this one.
76- match mutability {
77- Mutability :: Not => {
78- alloc. mutability = Mutability :: Not ;
79- }
80- Mutability :: Mut => {
81- // This must be already mutable, we won't "un-freeze" allocations ever.
82- assert_eq ! ( alloc. mutability, Mutability :: Mut ) ;
83- }
84- }
8572 // link the alloc id to the actual allocation
8673 let alloc = ecx. tcx . mk_const_alloc ( alloc) ;
8774 if let Some ( static_id) = ecx. machine . static_def_id ( ) {
@@ -123,14 +110,9 @@ pub enum InternKind {
123110 Promoted ,
124111}
125112
126- /// Intern `ret` and everything it references.
127- ///
128- /// This *cannot raise an interpreter error*. Doing so is left to validation, which
129- /// tracks where in the value we are and thus can show much better error messages.
130- ///
131- /// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
132- #[ instrument( level = "debug" , skip( ecx) ) ]
133- pub fn intern_const_alloc_recursive <
113+ /// Now that evaluation is finished, and we are not going to modify allocations anymore,
114+ /// recursively mark all allocations as immutable if the item kind calls for it (const/promoted/immut static).
115+ pub fn patch_mutability_of_allocs <
134116 ' mir ,
135117 ' tcx : ' mir ,
136118 M : CompileTimeMachine < ' mir , ' tcx , const_eval:: MemoryKind > ,
@@ -141,12 +123,12 @@ pub fn intern_const_alloc_recursive<
141123) -> Result < ( ) , ErrorGuaranteed > {
142124 // We are interning recursively, and for mutability we are distinguishing the "root" allocation
143125 // that we are starting in, and all other allocations that we are encountering recursively.
144- let ( base_mutability, inner_mutability, is_static ) = match intern_kind {
126+ let ( base_mutability, inner_mutability) = match intern_kind {
145127 InternKind :: Constant | InternKind :: Promoted => {
146128 // Completely immutable. Interning anything mutably here can only lead to unsoundness,
147129 // since all consts are conceptually independent values but share the same underlying
148130 // memory.
149- ( Mutability :: Not , Mutability :: Not , false )
131+ ( Mutability :: Not , Mutability :: Not )
150132 }
151133 InternKind :: Static ( Mutability :: Not ) => {
152134 (
@@ -159,30 +141,79 @@ pub fn intern_const_alloc_recursive<
159141 // Inner allocations are never mutable. They can only arise via the "tail
160142 // expression" / "outer scope" rule, and we treat them consistently with `const`.
161143 Mutability :: Not ,
162- true ,
163144 )
164145 }
165146 InternKind :: Static ( Mutability :: Mut ) => {
166147 // Just make everything mutable. We accept code like
167148 // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
168- ( Mutability :: Mut , Mutability :: Mut , true )
149+ ( Mutability :: Mut , Mutability :: Mut )
169150 }
170151 } ;
152+ let base_alloc_id = ret. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ;
153+ let mut todo: Vec < _ > = {
154+ let base_alloc = & mut ecx. memory . alloc_map . get_mut ( & base_alloc_id) . unwrap ( ) . 1 ;
155+ base_alloc. mutability = base_mutability;
156+ base_alloc. provenance ( ) . ptrs ( ) . iter ( ) . copied ( ) . collect ( )
157+ } ;
158+ let mut seen = FxHashSet :: default ( ) ;
159+ seen. insert ( base_alloc_id) ;
160+ while let Some ( ( _, prov) ) = todo. pop ( ) {
161+ if !seen. insert ( prov. alloc_id ( ) ) {
162+ // Already processed
163+ continue ;
164+ }
165+ let Some ( ( _, alloc) ) = & mut ecx. memory . alloc_map . get_mut ( & prov. alloc_id ( ) ) else {
166+ continue ;
167+ } ;
168+ // We always intern with `inner_mutability`, and furthermore we ensured above that if
169+ // that is "immutable", then there are *no* mutable pointers anywhere in the newly
170+ // interned memory -- justifying that we can indeed intern immutably. However this also
171+ // means we can *not* easily intern immutably here if `prov.immutable()` is true and
172+ // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
173+ // we'd have to somehow check that they are *all* immutable before deciding that this
174+ // allocation can be made immutable. In the future we could consider analyzing all
175+ // pointers before deciding which allocations can be made immutable; but for now we are
176+ // okay with losing some potential for immutability here. This can anyway only affect
177+ // `static mut`.
178+ alloc. mutability = inner_mutability;
179+ todo. extend ( alloc. provenance ( ) . ptrs ( ) . iter ( ) . copied ( ) ) ;
180+ }
181+ Ok ( ( ) )
182+ }
183+
184+ /// Intern `ret` and everything it references.
185+ ///
186+ /// This *cannot raise an interpreter error*. Doing so is left to validation, which
187+ /// tracks where in the value we are and thus can show much better error messages.
188+ ///
189+ /// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
190+ #[ instrument( level = "debug" , skip( ecx) ) ]
191+ pub fn intern_const_alloc_recursive <
192+ ' mir ,
193+ ' tcx : ' mir ,
194+ M : CompileTimeMachine < ' mir , ' tcx , const_eval:: MemoryKind > ,
195+ > (
196+ ecx : & mut InterpCx < ' mir , ' tcx , M > ,
197+ intern_kind : InternKind ,
198+ ret : & MPlaceTy < ' tcx > ,
199+ ) -> Result < ( ) , ErrorGuaranteed > {
200+ let ( inner_mutability, is_static) = match intern_kind {
201+ InternKind :: Constant | InternKind :: Promoted => ( Mutability :: Not , false ) ,
202+ InternKind :: Static ( mutability) => ( mutability, true ) ,
203+ } ;
171204
172205 // Intern the base allocation, and initialize todo list for recursive interning.
173206 let base_alloc_id = ret. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ;
174- trace ! ( ?base_alloc_id, ?base_mutability ) ;
207+ trace ! ( ?base_alloc_id) ;
175208 // First we intern the base allocation, as it requires a different mutability.
176209 // This gives us the initial set of nested allocations, which will then all be processed
177210 // recursively in the loop below.
178211 let mut todo: Vec < _ > = if is_static {
179212 // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
180- // But still change its mutability to match the requested one.
181- let alloc = ecx. memory . alloc_map . get_mut ( & base_alloc_id) . unwrap ( ) ;
182- alloc. 1 . mutability = base_mutability;
213+ let alloc = ecx. memory . alloc_map . get ( & base_alloc_id) . unwrap ( ) ;
183214 alloc. 1 . provenance ( ) . ptrs ( ) . iter ( ) . map ( |& ( _, prov) | prov) . collect ( )
184215 } else {
185- intern_shallow ( ecx, base_alloc_id, base_mutability ) . unwrap ( ) . collect ( )
216+ intern_shallow ( ecx, base_alloc_id) . unwrap ( ) . collect ( )
186217 } ;
187218 // We need to distinguish "has just been interned" from "was already in `tcx`",
188219 // so we track this in a separate set.
@@ -248,19 +279,7 @@ pub fn intern_const_alloc_recursive<
248279 continue ;
249280 }
250281 just_interned. insert ( alloc_id) ;
251- // We always intern with `inner_mutability`, and furthermore we ensured above that if
252- // that is "immutable", then there are *no* mutable pointers anywhere in the newly
253- // interned memory -- justifying that we can indeed intern immutably. However this also
254- // means we can *not* easily intern immutably here if `prov.immutable()` is true and
255- // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
256- // we'd have to somehow check that they are *all* immutable before deciding that this
257- // allocation can be made immutable. In the future we could consider analyzing all
258- // pointers before deciding which allocations can be made immutable; but for now we are
259- // okay with losing some potential for immutability here. This can anyway only affect
260- // `static mut`.
261- todo. extend ( intern_shallow ( ecx, alloc_id, inner_mutability) . map_err ( |( ) | {
262- ecx. tcx . dcx ( ) . emit_err ( DanglingPtrInFinal { span : ecx. tcx . span , kind : intern_kind } )
263- } ) ?) ;
282+ todo. extend ( intern_shallow ( ecx, alloc_id) . unwrap ( ) ) ;
264283 }
265284 if found_bad_mutable_pointer {
266285 let err_diag = MutablePtrInFinal { span : ecx. tcx . span , kind : intern_kind } ;
@@ -291,7 +310,8 @@ pub fn intern_const_alloc_for_constprop<
291310 return Ok ( ( ) ) ;
292311 }
293312 // Move allocation to `tcx`.
294- for _ in intern_shallow ( ecx, alloc_id, Mutability :: Not ) . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ? {
313+ ecx. memory . alloc_map . get_mut ( & alloc_id) . unwrap ( ) . 1 . mutability = Mutability :: Not ;
314+ for _ in intern_shallow ( ecx, alloc_id) . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ? {
295315 // We are not doing recursive interning, so we don't currently support provenance.
296316 // (If this assertion ever triggers, we should just implement a
297317 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
@@ -318,7 +338,8 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
318338 let dest = self . allocate ( layout, MemoryKind :: Stack ) ?;
319339 f ( self , & dest. clone ( ) . into ( ) ) ?;
320340 let alloc_id = dest. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ; // this was just allocated, it must have provenance
321- for prov in intern_shallow ( self , alloc_id, Mutability :: Not ) . unwrap ( ) {
341+ self . memory . alloc_map . get_mut ( & alloc_id) . unwrap ( ) . 1 . mutability = Mutability :: Not ;
342+ for prov in intern_shallow ( self , alloc_id) . unwrap ( ) {
322343 // We are not doing recursive interning, so we don't currently support provenance.
323344 // (If this assertion ever triggers, we should just implement a
324345 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
0 commit comments