8
8
// option. This file may not be copied, modified, or distributed
9
9
// except according to those terms.
10
10
11
- use rustc_data_structures:: fx:: FxHashMap ;
12
11
use rustc_data_structures:: stable_hasher:: { HashStable , StableHasher ,
13
12
StableHashingContextProvider } ;
13
+ use rustc_data_structures:: fx:: { FxHashMap , FxHashSet } ;
14
+ use rustc_data_structures:: indexed_vec:: { Idx , IndexVec } ;
14
15
use session:: config:: OutputType ;
15
16
use std:: cell:: { Ref , RefCell } ;
17
+ use std:: hash:: Hash ;
16
18
use std:: rc:: Rc ;
17
19
use util:: common:: { ProfileQueriesMsg , profq_msg} ;
18
20
@@ -22,7 +24,7 @@ use super::dep_node::{DepNode, DepKind, WorkProductId};
22
24
use super :: query:: DepGraphQuery ;
23
25
use super :: raii;
24
26
use super :: safe:: DepGraphSafe ;
25
- use super :: edges:: { DepGraphEdges , DepNodeIndex } ;
27
+ use super :: edges:: { self , DepGraphEdges } ;
26
28
27
29
#[ derive( Clone ) ]
28
30
pub struct DepGraph {
@@ -38,10 +40,34 @@ pub struct DepGraph {
38
40
fingerprints : Rc < RefCell < FxHashMap < DepNode , Fingerprint > > >
39
41
}
40
42
43
+ /// As a temporary measure, while transitioning to the new DepGraph
44
+ /// implementation, we maintain the old and the new dep-graph encoding in
45
+ /// parallel, so a DepNodeIndex actually contains two indices, one for each
46
+ /// version.
47
+ #[ derive( Copy , Clone , Debug , PartialEq , Eq , Hash ) ]
48
+ pub struct DepNodeIndex {
49
+ legacy : edges:: DepNodeIndex ,
50
+ new : DepNodeIndexNew ,
51
+ }
52
+
53
+ impl DepNodeIndex {
54
+ pub const INVALID : DepNodeIndex = DepNodeIndex {
55
+ legacy : edges:: DepNodeIndex :: INVALID ,
56
+ new : DepNodeIndexNew :: INVALID ,
57
+ } ;
58
+ }
59
+
41
60
struct DepGraphData {
42
- /// The actual graph data.
61
+ /// The old, initial encoding of the dependency graph. This will soon go
62
+ /// away.
43
63
edges : RefCell < DepGraphEdges > ,
44
64
65
+ /// The new encoding of the dependency graph, optimized for red/green
66
+ /// tracking. The `current` field is the dependency graph of only the
67
+ /// current compilation session: We don't merge the previous dep-graph into
68
+ /// current one anymore.
69
+ current : RefCell < CurrentDepGraph > ,
70
+
45
71
/// When we load, there may be `.o` files, cached mir, or other such
46
72
/// things available to us. If we find that they are not dirty, we
47
73
/// load the path to the file storing those work-products here into
@@ -63,6 +89,7 @@ impl DepGraph {
63
89
work_products : RefCell :: new ( FxHashMap ( ) ) ,
64
90
edges : RefCell :: new ( DepGraphEdges :: new ( ) ) ,
65
91
dep_node_debug : RefCell :: new ( FxHashMap ( ) ) ,
92
+ current : RefCell :: new ( CurrentDepGraph :: new ( ) ) ,
66
93
} ) )
67
94
} else {
68
95
None
@@ -82,7 +109,8 @@ impl DepGraph {
82
109
}
83
110
84
111
pub fn in_ignore < ' graph > ( & ' graph self ) -> Option < raii:: IgnoreTask < ' graph > > {
85
- self . data . as_ref ( ) . map ( |data| raii:: IgnoreTask :: new ( & data. edges ) )
112
+ self . data . as_ref ( ) . map ( |data| raii:: IgnoreTask :: new ( & data. edges ,
113
+ & data. current ) )
86
114
}
87
115
88
116
pub fn with_ignore < OP , R > ( & self , op : OP ) -> R
@@ -130,6 +158,7 @@ impl DepGraph {
130
158
{
131
159
if let Some ( ref data) = self . data {
132
160
data. edges . borrow_mut ( ) . push_task ( key) ;
161
+ data. current . borrow_mut ( ) . push_task ( key) ;
133
162
if cfg ! ( debug_assertions) {
134
163
profq_msg ( ProfileQueriesMsg :: TaskBegin ( key. clone ( ) ) )
135
164
} ;
@@ -145,7 +174,9 @@ impl DepGraph {
145
174
if cfg ! ( debug_assertions) {
146
175
profq_msg ( ProfileQueriesMsg :: TaskEnd )
147
176
} ;
148
- let dep_node_index = data. edges . borrow_mut ( ) . pop_task ( key) ;
177
+
178
+ let dep_node_index_legacy = data. edges . borrow_mut ( ) . pop_task ( key) ;
179
+ let dep_node_index_new = data. current . borrow_mut ( ) . pop_task ( key) ;
149
180
150
181
let mut stable_hasher = StableHasher :: new ( ) ;
151
182
result. hash_stable ( & mut hcx, & mut stable_hasher) ;
@@ -155,7 +186,10 @@ impl DepGraph {
155
186
. insert( key, stable_hasher. finish( ) )
156
187
. is_none( ) ) ;
157
188
158
- ( result, dep_node_index)
189
+ ( result, DepNodeIndex {
190
+ legacy : dep_node_index_legacy,
191
+ new : dep_node_index_new,
192
+ } )
159
193
} else {
160
194
if key. kind . fingerprint_needed_for_crate_hash ( ) {
161
195
let mut hcx = cx. create_stable_hashing_context ( ) ;
@@ -180,9 +214,14 @@ impl DepGraph {
180
214
{
181
215
if let Some ( ref data) = self . data {
182
216
data. edges . borrow_mut ( ) . push_anon_task ( ) ;
217
+ data. current . borrow_mut ( ) . push_anon_task ( ) ;
183
218
let result = op ( ) ;
184
- let dep_node = data. edges . borrow_mut ( ) . pop_anon_task ( dep_kind) ;
185
- ( result, dep_node)
219
+ let dep_node_index_legacy = data. edges . borrow_mut ( ) . pop_anon_task ( dep_kind) ;
220
+ let dep_node_index_new = data. current . borrow_mut ( ) . pop_anon_task ( dep_kind) ;
221
+ ( result, DepNodeIndex {
222
+ legacy : dep_node_index_legacy,
223
+ new : dep_node_index_new,
224
+ } )
186
225
} else {
187
226
( op ( ) , DepNodeIndex :: INVALID )
188
227
}
@@ -192,13 +231,15 @@ impl DepGraph {
192
231
pub fn read ( & self , v : DepNode ) {
193
232
if let Some ( ref data) = self . data {
194
233
data. edges . borrow_mut ( ) . read ( v) ;
234
+ data. current . borrow_mut ( ) . read ( v) ;
195
235
}
196
236
}
197
237
198
238
#[ inline]
199
239
pub fn read_index ( & self , v : DepNodeIndex ) {
200
240
if let Some ( ref data) = self . data {
201
- data. edges . borrow_mut ( ) . read_index ( v) ;
241
+ data. edges . borrow_mut ( ) . read_index ( v. legacy ) ;
242
+ data. current . borrow_mut ( ) . read_index ( v. new ) ;
202
243
}
203
244
}
204
245
@@ -215,7 +256,13 @@ impl DepGraph {
215
256
216
257
pub fn alloc_input_node ( & self , node : DepNode ) -> DepNodeIndex {
217
258
if let Some ( ref data) = self . data {
218
- data. edges . borrow_mut ( ) . add_node ( node)
259
+ let dep_node_index_legacy = data. edges . borrow_mut ( ) . add_node ( node) ;
260
+ let dep_node_index_new = data. current . borrow_mut ( )
261
+ . alloc_node ( node, Vec :: new ( ) ) ;
262
+ DepNodeIndex {
263
+ legacy : dep_node_index_legacy,
264
+ new : dep_node_index_new,
265
+ }
219
266
} else {
220
267
DepNodeIndex :: INVALID
221
268
}
@@ -335,3 +382,208 @@ pub struct WorkProduct {
335
382
/// Saved files associated with this CGU
336
383
pub saved_files : Vec < ( OutputType , String ) > ,
337
384
}
385
+
386
+ pub ( super ) struct CurrentDepGraph {
387
+ nodes : IndexVec < DepNodeIndexNew , DepNode > ,
388
+ edges : IndexVec < DepNodeIndexNew , Vec < DepNodeIndexNew > > ,
389
+ node_to_node_index : FxHashMap < DepNode , DepNodeIndexNew > ,
390
+
391
+ task_stack : Vec < OpenTask > ,
392
+ }
393
+
394
+ impl CurrentDepGraph {
395
+ fn new ( ) -> CurrentDepGraph {
396
+ CurrentDepGraph {
397
+ nodes : IndexVec :: new ( ) ,
398
+ edges : IndexVec :: new ( ) ,
399
+ node_to_node_index : FxHashMap ( ) ,
400
+ task_stack : Vec :: new ( ) ,
401
+ }
402
+ }
403
+
404
+ pub ( super ) fn push_ignore ( & mut self ) {
405
+ self . task_stack . push ( OpenTask :: Ignore ) ;
406
+ }
407
+
408
+ pub ( super ) fn pop_ignore ( & mut self ) {
409
+ let popped_node = self . task_stack . pop ( ) . unwrap ( ) ;
410
+ debug_assert_eq ! ( popped_node, OpenTask :: Ignore ) ;
411
+ }
412
+
413
+ pub ( super ) fn push_task ( & mut self , key : DepNode ) {
414
+ self . task_stack . push ( OpenTask :: Regular {
415
+ node : key,
416
+ reads : Vec :: new ( ) ,
417
+ read_set : FxHashSet ( ) ,
418
+ } ) ;
419
+ }
420
+
421
+ pub ( super ) fn pop_task ( & mut self , key : DepNode ) -> DepNodeIndexNew {
422
+ let popped_node = self . task_stack . pop ( ) . unwrap ( ) ;
423
+
424
+ if let OpenTask :: Regular {
425
+ node,
426
+ read_set : _,
427
+ reads
428
+ } = popped_node {
429
+ debug_assert_eq ! ( node, key) ;
430
+ self . alloc_node ( node, reads)
431
+ } else {
432
+ bug ! ( "pop_task() - Expected regular task to be popped" )
433
+ }
434
+ }
435
+
436
+ fn push_anon_task ( & mut self ) {
437
+ self . task_stack . push ( OpenTask :: Anon {
438
+ reads : Vec :: new ( ) ,
439
+ read_set : FxHashSet ( ) ,
440
+ } ) ;
441
+ }
442
+
443
+ fn pop_anon_task ( & mut self , kind : DepKind ) -> DepNodeIndexNew {
444
+ let popped_node = self . task_stack . pop ( ) . unwrap ( ) ;
445
+
446
+ if let OpenTask :: Anon {
447
+ read_set : _,
448
+ reads
449
+ } = popped_node {
450
+ let mut fingerprint = Fingerprint :: zero ( ) ;
451
+ let mut hasher = StableHasher :: new ( ) ;
452
+
453
+ for & read in reads. iter ( ) {
454
+ let read_dep_node = self . nodes [ read] ;
455
+
456
+ :: std:: mem:: discriminant ( & read_dep_node. kind ) . hash ( & mut hasher) ;
457
+
458
+ // Fingerprint::combine() is faster than sending Fingerprint
459
+ // through the StableHasher (at least as long as StableHasher
460
+ // is so slow).
461
+ fingerprint = fingerprint. combine ( read_dep_node. hash ) ;
462
+ }
463
+
464
+ fingerprint = fingerprint. combine ( hasher. finish ( ) ) ;
465
+
466
+ let target_dep_node = DepNode {
467
+ kind,
468
+ hash : fingerprint,
469
+ } ;
470
+
471
+ if let Some ( & index) = self . node_to_node_index . get ( & target_dep_node) {
472
+ return index;
473
+ }
474
+
475
+ self . alloc_node ( target_dep_node, reads)
476
+ } else {
477
+ bug ! ( "pop_anon_task() - Expected anonymous task to be popped" )
478
+ }
479
+ }
480
+
481
+ fn read ( & mut self , source : DepNode ) {
482
+ let dep_node_index = self . maybe_alloc_node ( source) ;
483
+ self . read_index ( dep_node_index) ;
484
+ }
485
+
486
+ fn read_index ( & mut self , source : DepNodeIndexNew ) {
487
+ match self . task_stack . last_mut ( ) {
488
+ Some ( & mut OpenTask :: Regular {
489
+ ref mut reads,
490
+ ref mut read_set,
491
+ node : _,
492
+ } ) => {
493
+ if read_set. insert ( source) {
494
+ reads. push ( source) ;
495
+ }
496
+ }
497
+ Some ( & mut OpenTask :: Anon {
498
+ ref mut reads,
499
+ ref mut read_set,
500
+ } ) => {
501
+ if read_set. insert ( source) {
502
+ reads. push ( source) ;
503
+ }
504
+ }
505
+ Some ( & mut OpenTask :: Ignore ) | None => {
506
+ // ignore
507
+ }
508
+ }
509
+ }
510
+
511
+ fn alloc_node ( & mut self ,
512
+ dep_node : DepNode ,
513
+ edges : Vec < DepNodeIndexNew > )
514
+ -> DepNodeIndexNew {
515
+ debug_assert_eq ! ( self . edges. len( ) , self . nodes. len( ) ) ;
516
+ debug_assert_eq ! ( self . node_to_node_index. len( ) , self . nodes. len( ) ) ;
517
+ debug_assert ! ( !self . node_to_node_index. contains_key( & dep_node) ) ;
518
+ let dep_node_index = DepNodeIndexNew :: new ( self . nodes . len ( ) ) ;
519
+ self . nodes . push ( dep_node) ;
520
+ self . node_to_node_index . insert ( dep_node, dep_node_index) ;
521
+ self . edges . push ( edges) ;
522
+ dep_node_index
523
+ }
524
+
525
+ fn maybe_alloc_node ( & mut self ,
526
+ dep_node : DepNode )
527
+ -> DepNodeIndexNew {
528
+ debug_assert_eq ! ( self . edges. len( ) , self . nodes. len( ) ) ;
529
+ debug_assert_eq ! ( self . node_to_node_index. len( ) , self . nodes. len( ) ) ;
530
+
531
+ let CurrentDepGraph {
532
+ ref mut node_to_node_index,
533
+ ref mut nodes,
534
+ ref mut edges,
535
+ ..
536
+ } = * self ;
537
+
538
+ * node_to_node_index. entry ( dep_node) . or_insert_with ( || {
539
+ let next_id = nodes. len ( ) ;
540
+ nodes. push ( dep_node) ;
541
+ edges. push ( Vec :: new ( ) ) ;
542
+ DepNodeIndexNew :: new ( next_id)
543
+ } )
544
+ }
545
+ }
546
+
547
+ #[ derive( Copy , Clone , Debug , PartialEq , Eq , Hash ) ]
548
+ pub ( super ) struct DepNodeIndexNew {
549
+ index : u32 ,
550
+ }
551
+
552
+ impl Idx for DepNodeIndexNew {
553
+ fn new ( idx : usize ) -> Self {
554
+ DepNodeIndexNew :: new ( idx)
555
+ }
556
+ fn index ( self ) -> usize {
557
+ self . index ( )
558
+ }
559
+ }
560
+
561
+ impl DepNodeIndexNew {
562
+
563
+ const INVALID : DepNodeIndexNew = DepNodeIndexNew {
564
+ index : :: std:: u32:: MAX ,
565
+ } ;
566
+
567
+ fn new ( v : usize ) -> DepNodeIndexNew {
568
+ assert ! ( ( v & 0xFFFF_FFFF ) == v) ;
569
+ DepNodeIndexNew { index : v as u32 }
570
+ }
571
+
572
+ fn index ( self ) -> usize {
573
+ self . index as usize
574
+ }
575
+ }
576
+
577
+ #[ derive( Clone , Debug , PartialEq ) ]
578
+ enum OpenTask {
579
+ Regular {
580
+ node : DepNode ,
581
+ reads : Vec < DepNodeIndexNew > ,
582
+ read_set : FxHashSet < DepNodeIndexNew > ,
583
+ } ,
584
+ Anon {
585
+ reads : Vec < DepNodeIndexNew > ,
586
+ read_set : FxHashSet < DepNodeIndexNew > ,
587
+ } ,
588
+ Ignore ,
589
+ }
0 commit comments