@@ -44,31 +44,14 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev
44
44
/// The ectx from which we intern.
45
45
ecx: & ' rt mut InterpCx < ' mir , ' tcx , M > ,
46
46
/// Previously encountered safe references.
47
- ref_tracking: & ' rt mut RefTracking < ( MPlaceTy < ' tcx > , InternMode ) > ,
47
+ ref_tracking: & ' rt mut RefTracking < MPlaceTy < ' tcx > > ,
48
48
/// A list of all encountered allocations. After type-based interning, we traverse this list to
49
49
/// also intern allocations that are only referenced by a raw pointer or inside a union.
50
50
leftover_allocations: & ' rt mut FxIndexSet < AllocId > ,
51
- /// The root kind of the value that we're looking at. This field is never mutated for a
52
- /// particular allocation. It is primarily used to make as many allocations as possible
53
- /// read-only so LLVM can place them in const memory.
54
- mode: InternMode ,
55
- /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
56
- /// the intern mode of references we encounter.
57
- inside_unsafe_cell: bool ,
58
51
/// The mutability with which to intern the pointers we find.
59
52
intern_mutability: Mutability ,
60
53
}
61
54
62
- #[ derive( Copy , Clone , Debug , PartialEq , Hash , Eq ) ]
63
- enum InternMode {
64
- /// A static and its current mutability. Below shared references inside a `static mut`,
65
- /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
66
- /// is *mutable*.
67
- Static ( hir:: Mutability ) ,
68
- /// A `const`.
69
- Const ,
70
- }
71
-
72
55
/// Signalling data structure to ensure we don't recurse
73
56
/// into the memory of other constants or statics
74
57
struct IsStaticOrFn ;
@@ -151,7 +134,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
151
134
// Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
152
135
let tcx = self . ecx. tcx ;
153
136
let ty = mplace. layout. ty;
154
- if let ty:: Ref ( _, referenced_ty, ref_mutability ) = * ty. kind ( ) {
137
+ if let ty:: Ref ( _, referenced_ty , _ref_mutability ) = * ty. kind ( ) {
155
138
let value = self . ecx. read_immediate ( mplace ) ?;
156
139
let mplace = self . ecx. ref_to_mplace ( & value ) ?;
157
140
assert_eq ! ( mplace. layout. ty, referenced_ty) ;
@@ -174,56 +157,14 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
174
157
// Check if we have encountered this pointer+layout combination before.
175
158
// Only recurse for allocation-backed pointers.
176
159
if let Some ( alloc_id) = mplace. ptr( ) . provenance {
177
- // Compute the mode with which we intern this. Our goal here is to make as many
178
- // statics as we can immutable so they can be placed in read-only memory by LLVM.
179
- let ref_mode = match self . mode {
180
- InternMode :: Static ( mutbl) => {
181
- // In statics, merge outer mutability with reference mutability and
182
- // take into account whether we are in an `UnsafeCell`.
183
-
184
- // The only way a mutable reference actually works as a mutable reference is
185
- // by being in a `static mut` directly or behind another mutable reference.
186
- // If there's an immutable reference or we are inside a `static`, then our
187
- // mutable reference is equivalent to an immutable one. As an example:
188
- // `&&mut Foo` is semantically equivalent to `&&Foo`
189
- match ref_mutability {
190
- _ if self . inside_unsafe_cell => {
191
- // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
192
- // mutability does not matter.
193
- InternMode :: Static ( ref_mutability)
194
- }
195
- Mutability :: Not => {
196
- // A shared reference, things become immutable.
197
- // We do *not* consider `freeze` here: `intern_shallow` considers
198
- // `freeze` for the actual mutability of this allocation; the intern
199
- // mode for references contained in this allocation is tracked more
200
- // precisely when traversing the referenced data (by tracking
201
- // `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
202
- // has the left inner reference interned into a read-only
203
- // allocation.
204
- InternMode :: Static ( Mutability :: Not )
205
- }
206
- Mutability :: Mut => {
207
- // Mutable reference.
208
- InternMode :: Static ( mutbl)
209
- }
210
- }
211
- }
212
- InternMode :: Const => {
213
- // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
214
- // checking for mutable references that we encounter -- they must all be
215
- // ZST.
216
- InternMode :: Const
217
- }
218
- } ;
219
160
match self. intern_shallow( alloc_id) {
220
161
// No need to recurse, these are interned already and statics may have
221
162
// cycles, so we don't want to recurse there
222
163
Some ( IsStaticOrFn ) => { }
223
164
// intern everything referenced by this value. The mutability is taken from the
224
165
// reference. It is checked above that mutable references only happen in
225
166
// `static mut`
226
- None => self . ref_tracking . track ( ( mplace, ref_mode ) , || ( ) ) ,
167
+ None => self . ref_tracking. track( mplace, || ( ) ) ,
227
168
}
228
169
}
229
170
Ok ( ( ) )
@@ -272,19 +213,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
272
213
return Ok ( ( ) ) ;
273
214
}
274
215
275
- if let Some ( def) = mplace. layout . ty . ty_adt_def ( ) {
276
- if def. is_unsafe_cell ( ) {
277
- // We are crossing over an `UnsafeCell`, we can mutate again. This means that
278
- // References we encounter inside here are interned as pointing to mutable
279
- // allocations.
280
- // Remember the `old` value to handle nested `UnsafeCell`.
281
- let old = std:: mem:: replace ( & mut self . inside_unsafe_cell , true ) ;
282
- let walked = self . walk_value ( mplace) ;
283
- self . inside_unsafe_cell = old;
284
- return walked;
285
- }
286
- }
287
-
288
216
self . walk_value( mplace)
289
217
}
290
218
}
@@ -314,11 +242,6 @@ pub fn intern_const_alloc_recursive<
314
242
intern_kind: InternKind ,
315
243
ret: & MPlaceTy < ' tcx > ,
316
244
) -> Result < ( ) , ErrorGuaranteed > {
317
- let base_intern_mode = match intern_kind {
318
- InternKind :: Static ( mutbl) => InternMode :: Static ( mutbl) ,
319
- // `Constant` includes array lengths.
320
- InternKind :: Constant | InternKind :: Promoted => InternMode :: Const ,
321
- } ;
322
245
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
323
246
// that we are starting in, and all other allocations that we are encountering recursively.
324
247
let ( base_mutability, inner_mutability ) = match intern_kind {
@@ -363,20 +286,18 @@ pub fn intern_const_alloc_recursive<
363
286
base_mutability,
364
287
) ;
365
288
366
- ref_tracking. track ( ( ret. clone ( ) , base_intern_mode ) , || ( ) ) ;
289
+ ref_tracking. track( ret. clone( ) , || ( ) ) ;
367
290
368
291
// We do a type-based traversal to find more allocations to intern. The interner is currently
369
292
// mid-refactoring; eventually the type-based traversal will be replaced but a simple traversal
370
293
// of all provenance we see in the allocations, but for now we avoid changing rustc error
371
294
// messages or accepting extra programs by keeping the old type-based interner around.
372
- while let Some ( ( ( mplace, mode ) , _) ) = ref_tracking. todo . pop ( ) {
295
+ while let Some ( ( mplace, _) ) = ref_tracking. todo. pop( ) {
373
296
let res = InternVisitor {
374
297
ref_tracking: & mut ref_tracking ,
375
298
ecx,
376
- mode,
377
299
leftover_allocations,
378
300
intern_mutability : inner_mutability ,
379
- inside_unsafe_cell : false ,
380
301
}
381
302
. visit_value( & mplace ) ;
382
303
// We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
0 commit comments