@@ -20,36 +20,43 @@ use std::collections::hash_map::Entry;
20
20
use std:: fmt;
21
21
use std:: iter;
22
22
use std:: ops:: Index ;
23
- use std:: usize;
24
23
25
24
use super :: dataflow:: BitDenotation ;
26
25
use super :: abs_domain:: { AbstractElem , Lift } ;
27
26
28
- macro_rules! new_index {
29
- ( $Index: ident, $INVALID_INDEX: ident) => {
30
- #[ derive( Copy , Clone , PartialEq , Eq , Debug ) ]
31
- pub struct $Index( usize ) ;
32
-
33
- const $INVALID_INDEX: $Index = $Index( usize :: MAX ) ;
34
-
35
- impl $Index {
36
- pub fn idx( & self ) -> Option <usize > {
37
- if * self == $INVALID_INDEX {
38
- None
39
- } else {
40
- Some ( self . 0 )
27
+ // This submodule holds some newtype'd Index wrappers that are using
28
+ // NonZero to ensure that Option<Index> occupies only a single word.
29
+ // They are in a submodule to impose privacy restrictions; namely, to
30
+ // ensure that other code does not accidentally access `index.0`
31
+ // (which is likely to yield a subtle off-by-one error).
32
+ mod indexes {
33
+ use core:: nonzero:: NonZero ;
34
+
35
+ macro_rules! new_index {
36
+ ( $Index: ident) => {
37
+ #[ derive( Copy , Clone , PartialEq , Eq , Debug ) ]
38
+ pub struct $Index( NonZero <usize >) ;
39
+
40
+ impl $Index {
41
+ pub fn new( idx: usize ) -> Self {
42
+ unsafe { $Index( NonZero :: new( idx + 1 ) ) }
43
+ }
44
+ pub fn idx( & self ) -> usize {
45
+ * self . 0 - 1
41
46
}
42
47
}
43
48
}
44
49
}
45
- }
46
50
47
- /// Index into MovePathData.move_paths
48
- new_index ! ( MovePathIndex , INVALID_MOVE_PATH_INDEX ) ;
51
+ /// Index into MovePathData.move_paths
52
+ new_index ! ( MovePathIndex ) ;
49
53
50
- /// Index into MoveData.moves.
51
- new_index ! ( MoveOutIndex , INVALID_MOVE_OUT_INDEX ) ;
54
+ /// Index into MoveData.moves.
55
+ new_index ! ( MoveOutIndex ) ;
56
+ }
52
57
58
+ pub use self :: indexes:: MovePathIndex ;
59
+ pub use self :: indexes:: MoveOutIndex ;
53
60
54
61
/// `MovePath` is a canonicalized representation of a path that is
55
62
/// moved or assigned to.
@@ -65,9 +72,9 @@ new_index!(MoveOutIndex, INVALID_MOVE_OUT_INDEX);
65
72
/// they both have the MovePath representing `x` as their parent.
66
73
#[ derive( Clone ) ]
67
74
pub struct MovePath < ' tcx > {
68
- pub next_sibling : MovePathIndex ,
69
- pub first_child : MovePathIndex ,
70
- pub parent : MovePathIndex ,
75
+ pub next_sibling : Option < MovePathIndex > ,
76
+ pub first_child : Option < MovePathIndex > ,
77
+ pub parent : Option < MovePathIndex > ,
71
78
pub lvalue : Lvalue < ' tcx > ,
72
79
}
73
80
@@ -76,9 +83,9 @@ pub struct MovePath<'tcx> {
76
83
/// children of each path.
77
84
#[ derive( Clone ) ]
78
85
struct PreMovePath < ' tcx > {
79
- pub next_sibling : MovePathIndex ,
80
- pub first_child : Cell < MovePathIndex > ,
81
- pub parent : MovePathIndex ,
86
+ pub next_sibling : Option < MovePathIndex > ,
87
+ pub first_child : Cell < Option < MovePathIndex > > ,
88
+ pub parent : Option < MovePathIndex > ,
82
89
pub lvalue : Lvalue < ' tcx > ,
83
90
}
84
91
@@ -96,14 +103,14 @@ impl<'tcx> PreMovePath<'tcx> {
96
103
impl < ' tcx > fmt:: Debug for MovePath < ' tcx > {
97
104
fn fmt ( & self , w : & mut fmt:: Formatter ) -> fmt:: Result {
98
105
try!( write ! ( w, "MovePath {{" ) ) ;
99
- if self . parent != INVALID_MOVE_PATH_INDEX {
100
- try!( write ! ( w, " parent: {:?}," , self . parent) ) ;
106
+ if let Some ( parent) = self . parent {
107
+ try!( write ! ( w, " parent: {:?}," , parent) ) ;
101
108
}
102
- if self . first_child != INVALID_MOVE_PATH_INDEX {
103
- try!( write ! ( w, " first_child: {:?}," , self . first_child) ) ;
109
+ if let Some ( first_child) = self . first_child {
110
+ try!( write ! ( w, " first_child: {:?}," , first_child) ) ;
104
111
}
105
- if self . next_sibling != INVALID_MOVE_PATH_INDEX {
106
- try!( write ! ( w, " next_sibling: {:?}" , self . next_sibling) ) ;
112
+ if let Some ( next_sibling) = self . next_sibling {
113
+ try!( write ! ( w, " next_sibling: {:?}" , next_sibling) ) ;
107
114
}
108
115
write ! ( w, " lvalue: {:?} }}" , self . lvalue)
109
116
}
@@ -147,8 +154,7 @@ pub struct PathMap {
147
154
impl Index < MovePathIndex > for PathMap {
148
155
type Output = [ MoveOutIndex ] ;
149
156
fn index ( & self , index : MovePathIndex ) -> & Self :: Output {
150
- assert ! ( index != INVALID_MOVE_PATH_INDEX ) ;
151
- & self . map [ index. 0 ]
157
+ & self . map [ index. idx ( ) ]
152
158
}
153
159
}
154
160
@@ -168,7 +174,7 @@ pub struct MoveOut {
168
174
169
175
impl fmt:: Debug for MoveOut {
170
176
fn fmt ( & self , fmt : & mut fmt:: Formatter ) -> fmt:: Result {
171
- write ! ( fmt, "p{}@{:?}" , self . path. 0 , self . source)
177
+ write ! ( fmt, "p{}@{:?}" , self . path. idx ( ) , self . source)
172
178
}
173
179
}
174
180
@@ -194,13 +200,13 @@ pub struct MovePathData<'tcx> {
194
200
impl < ' tcx > Index < MovePathIndex > for MovePathData < ' tcx > {
195
201
type Output = MovePath < ' tcx > ;
196
202
fn index ( & self , i : MovePathIndex ) -> & MovePath < ' tcx > {
197
- & self . move_paths [ i. idx ( ) . unwrap ( ) ]
203
+ & self . move_paths [ i. idx ( ) ]
198
204
}
199
205
}
200
206
201
- /// MovePathRevIndex maps from a uint in an lvalue-category to the
207
+ /// MovePathInverseMap maps from a uint in an lvalue-category to the
202
208
/// MovePathIndex for the MovePath for that lvalue.
203
- type MovePathRevIndex = Vec < MovePathIndex > ;
209
+ type MovePathInverseMap = Vec < Option < MovePathIndex > > ;
204
210
205
211
struct MovePathDataBuilder < ' a , ' tcx : ' a > {
206
212
mir : & ' a Mir < ' tcx > ,
@@ -210,9 +216,9 @@ struct MovePathDataBuilder<'a, 'tcx: 'a> {
210
216
211
217
/// Tables mapping from an l-value to its MovePathIndex.
212
218
pub struct MovePathLookup < ' tcx > {
213
- vars : MovePathRevIndex ,
214
- temps : MovePathRevIndex ,
215
- args : MovePathRevIndex ,
219
+ vars : MovePathInverseMap ,
220
+ temps : MovePathInverseMap ,
221
+ args : MovePathInverseMap ,
216
222
statics : FnvHashMap < DefId , MovePathIndex > ,
217
223
return_ptr : Option < MovePathIndex > ,
218
224
@@ -254,7 +260,7 @@ enum LookupKind { Generate, Reuse }
254
260
struct Lookup < T > ( LookupKind , T ) ;
255
261
256
262
impl Lookup < MovePathIndex > {
257
- fn idx ( & self ) -> usize { ( self . 1 ) . 0 }
263
+ fn idx ( & self ) -> usize { ( self . 1 ) . idx ( ) }
258
264
}
259
265
260
266
impl < ' tcx > MovePathLookup < ' tcx > {
@@ -266,28 +272,31 @@ impl<'tcx> MovePathLookup<'tcx> {
266
272
statics : Default :: default ( ) ,
267
273
return_ptr : None ,
268
274
projections : vec ! [ ] ,
269
- next_index : MovePathIndex ( 0 ) ,
275
+ next_index : MovePathIndex :: new ( 0 ) ,
270
276
}
271
277
}
272
278
273
279
fn next_index ( next : & mut MovePathIndex ) -> MovePathIndex {
274
280
let i = * next;
275
- * next = MovePathIndex ( i. 0 + 1 ) ;
281
+ * next = MovePathIndex :: new ( i. idx ( ) + 1 ) ;
276
282
i
277
283
}
278
284
279
- fn lookup_or_generate ( vec : & mut Vec < MovePathIndex > ,
285
+ fn lookup_or_generate ( vec : & mut Vec < Option < MovePathIndex > > ,
280
286
idx : u32 ,
281
287
next_index : & mut MovePathIndex ) -> Lookup < MovePathIndex > {
282
288
let idx = idx as usize ;
283
- vec. fill_to_with ( idx, INVALID_MOVE_PATH_INDEX ) ;
289
+ vec. fill_to_with ( idx, None ) ;
284
290
let entry = & mut vec[ idx] ;
285
- if * entry == INVALID_MOVE_PATH_INDEX {
286
- let i = Self :: next_index ( next_index) ;
287
- * entry = i;
288
- Lookup ( LookupKind :: Generate , i)
289
- } else {
290
- Lookup ( LookupKind :: Reuse , * entry)
291
+ match * entry {
292
+ None => {
293
+ let i = Self :: next_index ( next_index) ;
294
+ * entry = Some ( i) ;
295
+ Lookup ( LookupKind :: Generate , i)
296
+ }
297
+ Some ( entry_idx) => {
298
+ Lookup ( LookupKind :: Reuse , entry_idx)
299
+ }
291
300
}
292
301
}
293
302
@@ -342,8 +351,8 @@ impl<'tcx> MovePathLookup<'tcx> {
342
351
base : MovePathIndex ) -> Lookup < MovePathIndex > {
343
352
let MovePathLookup { ref mut projections,
344
353
ref mut next_index, .. } = * self ;
345
- projections. fill_to ( base. 0 ) ;
346
- match projections[ base. 0 ] . entry ( proj. elem . lift ( ) ) {
354
+ projections. fill_to ( base. idx ( ) ) ;
355
+ match projections[ base. idx ( ) ] . entry ( proj. elem . lift ( ) ) {
347
356
Entry :: Occupied ( ent) => {
348
357
Lookup ( LookupKind :: Reuse , * ent. get ( ) )
349
358
}
@@ -362,14 +371,14 @@ impl<'tcx> MovePathLookup<'tcx> {
362
371
// unknown l-value; it will simply panic.
363
372
pub fn find ( & self , lval : & Lvalue < ' tcx > ) -> MovePathIndex {
364
373
match * lval {
365
- Lvalue :: Var ( var_idx) => self . vars [ var_idx as usize ] ,
366
- Lvalue :: Temp ( temp_idx) => self . temps [ temp_idx as usize ] ,
367
- Lvalue :: Arg ( arg_idx) => self . args [ arg_idx as usize ] ,
374
+ Lvalue :: Var ( var_idx) => self . vars [ var_idx as usize ] . unwrap ( ) ,
375
+ Lvalue :: Temp ( temp_idx) => self . temps [ temp_idx as usize ] . unwrap ( ) ,
376
+ Lvalue :: Arg ( arg_idx) => self . args [ arg_idx as usize ] . unwrap ( ) ,
368
377
Lvalue :: Static ( ref def_id) => self . statics [ def_id] ,
369
378
Lvalue :: ReturnPointer => self . return_ptr . unwrap ( ) ,
370
379
Lvalue :: Projection ( ref proj) => {
371
380
let base_index = self . find ( & proj. base ) ;
372
- self . projections [ base_index. 0 as usize ] [ & proj. elem . lift ( ) ]
381
+ self . projections [ base_index. idx ( ) ] [ & proj. elem . lift ( ) ]
373
382
}
374
383
}
375
384
}
@@ -418,8 +427,8 @@ impl<'a, 'tcx> MovePathDataBuilder<'a, 'tcx> {
418
427
match * lval {
419
428
Lvalue :: Var ( _) | Lvalue :: Temp ( _) | Lvalue :: Arg ( _) |
420
429
Lvalue :: Static ( _) | Lvalue :: ReturnPointer => {
421
- sibling = INVALID_MOVE_PATH_INDEX ;
422
- parent = INVALID_MOVE_PATH_INDEX ;
430
+ sibling = None ;
431
+ parent = None ;
423
432
}
424
433
Lvalue :: Projection ( ref proj) => {
425
434
// Here, install new MovePath as new first_child.
@@ -428,22 +437,23 @@ impl<'a, 'tcx> MovePathDataBuilder<'a, 'tcx> {
428
437
429
438
// Note: `parent` previously allocated (Projection
430
439
// case of match above established this).
431
- parent = self . move_path_for ( & proj. base ) ;
440
+ let idx = self . move_path_for ( & proj. base ) ;
441
+ parent = Some ( idx) ;
432
442
433
443
pre_move_paths = self . pre_move_paths . borrow_mut ( ) ;
434
- let parent_move_path = & mut pre_move_paths[ parent . 0 ] ;
444
+ let parent_move_path = & mut pre_move_paths[ idx . idx ( ) ] ;
435
445
436
446
// At last: Swap in the new first_child.
437
447
sibling = parent_move_path. first_child . get ( ) ;
438
- parent_move_path. first_child . set ( mpi) ;
448
+ parent_move_path. first_child . set ( Some ( mpi) ) ;
439
449
}
440
450
} ;
441
451
442
452
let move_path = PreMovePath {
443
453
next_sibling : sibling,
444
454
parent : parent,
445
455
lvalue : lval. clone ( ) ,
446
- first_child : Cell :: new ( INVALID_MOVE_PATH_INDEX ) ,
456
+ first_child : Cell :: new ( None ) ,
447
457
} ;
448
458
449
459
pre_move_paths. push ( move_path) ;
@@ -610,8 +620,8 @@ fn gather_moves<'tcx>(mir: &Mir<'tcx>, tcx: &ty::TyCtxt<'tcx>) -> MoveData<'tcx>
610
620
let mut seen: Vec <_> = move_paths. iter( ) . map( |_| false ) . collect( ) ;
611
621
for ( j, & MoveOut { ref path, ref source } ) in moves. iter( ) . enumerate( ) {
612
622
debug!( "MovePathData moves[{}]: MoveOut {{ path: {:?} = {:?}, source: {:?} }}" ,
613
- j, path, move_paths[ path. 0 ] , source) ;
614
- seen[ path. 0 ] = true ;
623
+ j, path, move_paths[ path. idx ( ) ] , source) ;
624
+ seen[ path. idx ( ) ] = true ;
615
625
}
616
626
for ( j, path) in move_paths. iter( ) . enumerate( ) {
617
627
if !seen[ j] {
@@ -664,24 +674,24 @@ impl<'b, 'a: 'b, 'tcx: 'a> BlockContext<'b, 'a, 'tcx> {
664
674
return ;
665
675
}
666
676
let i = source. index ;
667
- let index = MoveOutIndex ( self . moves . len ( ) ) ;
677
+ let index = MoveOutIndex :: new ( self . moves . len ( ) ) ;
668
678
669
679
let path = builder. move_path_for ( lval) ;
670
680
self . moves . push ( MoveOut { path : path, source : source. clone ( ) } ) ;
671
- self . path_map . fill_to ( path. 0 ) ;
681
+ self . path_map . fill_to ( path. idx ( ) ) ;
672
682
673
683
debug ! ( "ctxt: {:?} add consume of lval: {:?} \
674
684
at index: {:?} \
675
685
to path_map for path: {:?} and \
676
686
to loc_map for loc: {:?}",
677
687
stmt_kind, lval, index, path, source) ;
678
688
679
- debug_assert ! ( path. 0 < self . path_map. len( ) ) ;
689
+ debug_assert ! ( path. idx ( ) < self . path_map. len( ) ) ;
680
690
// this is actually a questionable assert; at the very
681
691
// least, incorrect input code can probably cause it to
682
692
// fire.
683
- assert ! ( self . path_map[ path. 0 ] . iter( ) . find( |idx| * * idx == index) . is_none( ) ) ;
684
- self . path_map [ path. 0 ] . push ( index) ;
693
+ assert ! ( self . path_map[ path. idx ( ) ] . iter( ) . find( |idx| * * idx == index) . is_none( ) ) ;
694
+ self . path_map [ path. idx ( ) ] . push ( index) ;
685
695
686
696
debug_assert ! ( i < self . loc_map_bb. len( ) ) ;
687
697
debug_assert ! ( self . loc_map_bb[ i] . iter( ) . find( |idx| * * idx == index) . is_none( ) ) ;
0 commit comments