@@ -19,14 +19,21 @@ pub enum ExhaustiveDepId {
19
19
ObjectProperty ( ObjectId , ObjectPropertyId ) ,
20
20
}
21
21
22
+ #[ derive( Debug ) ]
23
+ pub struct ExhaustiveData {
24
+ pub clean : bool ,
25
+ pub temp_deps : Option < FxHashSet < ExhaustiveDepId > > ,
26
+ pub register_deps : Option < FxHashSet < ExhaustiveDepId > > ,
27
+ }
28
+
22
29
#[ derive( Clone ) ]
23
30
pub struct ExhaustiveCallback < ' a > {
24
31
pub handler : Rc < dyn Fn ( & mut Analyzer < ' a > ) + ' a > ,
25
- pub once : bool ,
32
+ pub drain : bool ,
26
33
}
27
34
impl < ' a > PartialEq for ExhaustiveCallback < ' a > {
28
35
fn eq ( & self , other : & Self ) -> bool {
29
- self . once == other. once && Rc :: ptr_eq ( & self . handler , & other. handler )
36
+ self . drain == other. drain && Rc :: ptr_eq ( & self . handler , & other. handler )
30
37
}
31
38
}
32
39
impl < ' a > Eq for ExhaustiveCallback < ' a > { }
@@ -40,7 +47,7 @@ impl<'a> Analyzer<'a> {
40
47
pub fn exec_loop ( & mut self , runner : impl Fn ( & mut Analyzer < ' a > ) + ' a ) {
41
48
let runner = Rc :: new ( runner) ;
42
49
43
- self . exec_exhaustively ( "loop" , runner. clone ( ) , false ) ;
50
+ self . exec_exhaustively ( "loop" , true , false , runner. clone ( ) ) ;
44
51
45
52
let cf_scope = self . cf_scope ( ) ;
46
53
if cf_scope. referred_state != ReferredState :: ReferredClean && cf_scope. deps . may_not_referred ( ) {
@@ -66,51 +73,55 @@ impl<'a> Analyzer<'a> {
66
73
}
67
74
analyzer. pop_cf_scope ( ) ;
68
75
} ) ;
69
- let deps = self . exec_exhaustively ( kind, runner. clone ( ) , false ) ;
70
- self . register_exhaustive_callbacks ( false , runner, deps) ;
76
+ self . exec_exhaustively ( kind, true , true , runner) ;
71
77
}
72
78
73
79
pub fn exec_async_or_generator_fn ( & mut self , runner : impl Fn ( & mut Analyzer < ' a > ) + ' a ) {
74
- let runner = Rc :: new ( runner) ;
75
- let deps = self . exec_exhaustively ( "async/generator" , runner. clone ( ) , true ) ;
76
- self . register_exhaustive_callbacks ( true , runner, deps) ;
80
+ self . exec_exhaustively ( "async/generator" , false , true , Rc :: new ( runner) ) ;
77
81
}
78
82
79
83
fn exec_exhaustively (
80
84
& mut self ,
81
85
_kind : & str ,
86
+ drain : bool ,
87
+ register : bool ,
82
88
runner : Rc < dyn Fn ( & mut Analyzer < ' a > ) + ' a > ,
83
- once : bool ,
84
- ) -> FxHashSet < ExhaustiveDepId > {
85
- self . push_cf_scope ( CfScopeKind :: Exhaustive ( Default :: default ( ) ) , Some ( false ) ) ;
89
+ ) {
90
+ self . push_cf_scope (
91
+ CfScopeKind :: Exhaustive ( ExhaustiveData {
92
+ clean : true ,
93
+ temp_deps : drain. then ( FxHashSet :: default) ,
94
+ register_deps : register. then ( Default :: default) ,
95
+ } ) ,
96
+ Some ( false ) ,
97
+ ) ;
86
98
let mut round_counter = 0 ;
87
- while self . cf_scope_mut ( ) . iterate_exhaustively ( ) {
99
+ loop {
88
100
#[ cfg( feature = "flame" ) ]
89
101
let _scope_guard = flame:: start_guard ( format ! (
90
102
"!{_kind}@{:06X} x{}" ,
91
103
( Rc :: as_ptr( & runner) as * const ( ) as usize ) & 0xFFFFFF ,
92
104
round_counter
93
105
) ) ;
94
-
95
106
runner ( self ) ;
96
107
round_counter += 1 ;
97
- if once {
98
- let data = self . cf_scope_mut ( ) . exhaustive_data_mut ( ) . unwrap ( ) ;
99
- data. clean = true ;
100
- break ;
101
- }
102
108
if round_counter > 1000 {
103
109
unreachable ! ( "Exhaustive loop is too deep" ) ;
104
110
}
111
+ if !self . cf_scope_mut ( ) . post_exhaustive_iterate ( ) {
112
+ break ;
113
+ }
105
114
}
106
115
let id = self . pop_cf_scope ( ) ;
107
116
let data = self . scoping . cf . get_mut ( id) . exhaustive_data_mut ( ) . unwrap ( ) ;
108
- mem:: take ( & mut data. deps )
117
+ if let Some ( register_deps) = data. register_deps . take ( ) {
118
+ self . register_exhaustive_callbacks ( drain, runner, register_deps) ;
119
+ }
109
120
}
110
121
111
122
fn register_exhaustive_callbacks (
112
123
& mut self ,
113
- once : bool ,
124
+ drain : bool ,
114
125
handler : Rc < dyn Fn ( & mut Analyzer < ' a > ) + ' a > ,
115
126
deps : FxHashSet < ExhaustiveDepId > ,
116
127
) {
@@ -119,43 +130,58 @@ impl<'a> Analyzer<'a> {
119
130
. exhaustive_callbacks
120
131
. entry ( id)
121
132
. or_default ( )
122
- . insert ( ExhaustiveCallback { handler : handler. clone ( ) , once } ) ;
133
+ . insert ( ExhaustiveCallback { handler : handler. clone ( ) , drain } ) ;
123
134
}
124
135
}
125
136
126
137
pub fn mark_exhaustive_read ( & mut self , id : ExhaustiveDepId , target : usize ) {
127
- for depth in target..self . scoping . cf . stack . len ( ) {
128
- self . scoping . cf . get_mut_from_depth ( depth) . mark_exhaustive_read ( id) ;
138
+ let mut registered = false ;
139
+ for depth in ( target..self . scoping . cf . stack . len ( ) ) . rev ( ) {
140
+ let scope = self . scoping . cf . get_mut_from_depth ( depth) ;
141
+ if let Some ( data) = scope. exhaustive_data_mut ( ) {
142
+ if data. clean {
143
+ if let Some ( temp_deps) = data. temp_deps . as_mut ( ) {
144
+ temp_deps. insert ( id) ;
145
+ }
146
+ }
147
+ if !registered {
148
+ if let Some ( register_deps) = data. register_deps . as_mut ( ) {
149
+ registered = true ;
150
+ register_deps. insert ( id) ;
151
+ }
152
+ }
153
+ }
129
154
}
130
155
}
131
156
132
157
pub fn mark_exhaustive_write ( & mut self , id : ExhaustiveDepId , target : usize ) -> ( bool , bool ) {
133
- let mut should_consume = false ;
158
+ let mut exhaustive = false ;
134
159
let mut indeterminate = false ;
160
+ let mut need_mark = true ;
135
161
for depth in target..self . scoping . cf . stack . len ( ) {
136
162
let scope = self . scoping . cf . get_mut_from_depth ( depth) ;
137
- if !should_consume {
138
- should_consume |= scope. mark_exhaustive_write ( id) ;
139
- }
140
163
indeterminate |= scope. is_indeterminate ( ) ;
164
+ if let Some ( data) = scope. exhaustive_data_mut ( ) {
165
+ exhaustive = true ;
166
+ if ( need_mark || data. register_deps . is_some ( ) ) && data. clean {
167
+ if let Some ( temp_deps) = & data. temp_deps {
168
+ if temp_deps. contains ( & id) {
169
+ data. clean = false ;
170
+ }
171
+ need_mark = false ;
172
+ }
173
+ }
174
+ }
141
175
}
142
- ( should_consume , indeterminate)
176
+ ( exhaustive , indeterminate)
143
177
}
144
178
145
- pub fn request_exhaustive_callbacks (
146
- & mut self ,
147
- should_consume : bool ,
148
- id : ExhaustiveDepId ,
149
- ) -> bool {
179
+ pub fn request_exhaustive_callbacks ( & mut self , id : ExhaustiveDepId ) -> bool {
150
180
if let Some ( runners) = self . exhaustive_callbacks . get_mut ( & id) {
151
181
if runners. is_empty ( ) {
152
182
false
153
183
} else {
154
- if should_consume {
155
- self . pending_deps . extend ( runners. drain ( ) ) ;
156
- } else {
157
- self . pending_deps . extend ( runners. iter ( ) . cloned ( ) ) ;
158
- }
184
+ self . pending_deps . extend ( runners. drain ( ) ) ;
159
185
true
160
186
}
161
187
} else {
@@ -171,9 +197,8 @@ impl<'a> Analyzer<'a> {
171
197
let runners = mem:: take ( & mut self . pending_deps ) ;
172
198
for runner in runners {
173
199
// let old_count = self.referred_deps.debug_count();
174
- let ExhaustiveCallback { handler : runner, once } = runner;
175
- let deps = self . exec_exhaustively ( "dep" , runner. clone ( ) , once) ;
176
- self . register_exhaustive_callbacks ( once, runner, deps) ;
200
+ let ExhaustiveCallback { handler : runner, drain } = runner;
201
+ self . exec_exhaustively ( "dep" , drain, true , runner. clone ( ) ) ;
177
202
// let new_count = self.referred_deps.debug_count();
178
203
// self.debug += 1;
179
204
}
0 commit comments