@@ -71,15 +71,35 @@ impl fmt::Debug for Item {
71
71
}
72
72
73
73
/// Extra per-location state.
74
- #[ derive( Clone , Debug , PartialEq , Eq ) ]
74
+ #[ derive( Clone , Debug ) ]
75
75
pub struct Stack {
76
76
/// Used *mostly* as a stack; never empty.
77
77
/// Invariants:
78
78
/// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
79
79
/// * Except for `Untagged`, no tag occurs in the stack more than once.
80
80
borrows : Vec < Item > ,
81
+ /// We often want to know what (if any) tag in the stack exists for some `SbTag`.
82
+ /// The tag we're looking for does not occur often enough at the end of the stack for a linear
83
+ /// search to be efficient, so we use the uniqueness guarantee to keep a map from tag to
84
+ /// position in the stack.
85
+ cache : FxHashMap < PtrId , usize > ,
86
+ /// `Untagged` may occur multiple times so we store it outside of the map.
87
+ untagged : Vec < usize > ,
88
+ /// On a read, we need to disable all `Unique` above the granting item. We can avoid most of
89
+ /// this scan by keeping track of the region of the borrow stack that may contain `Unique`s.
90
+ first_unique : usize ,
91
+ last_unique : usize ,
92
+ }
93
+
94
+ impl PartialEq for Stack {
95
+ fn eq ( & self , other : & Self ) -> bool {
96
+ // All the semantics of Stack are in self.borrows, everything else is caching
97
+ self . borrows == other. borrows
98
+ }
81
99
}
82
100
101
+ impl Eq for Stack { }
102
+
83
103
/// Extra per-allocation state.
84
104
#[ derive( Clone , Debug ) ]
85
105
pub struct Stacks {
@@ -256,17 +276,23 @@ impl<'tcx> Stack {
256
276
/// Find the item granting the given kind of access to the given tag, and return where
257
277
/// it is on the stack.
258
278
fn find_granting ( & self , access : AccessKind , tag : SbTag ) -> Option < usize > {
259
- self . borrows
260
- . iter ( )
261
- . enumerate ( ) // we also need to know *where* in the stack
262
- . rev ( ) // search top-to-bottom
263
- // Return permission of first item that grants access.
264
- // We require a permission with the right tag, ensuring U3 and F3.
265
- . find_map (
266
- |( idx, item) | {
267
- if tag == item. tag && item. perm . grants ( access) { Some ( idx) } else { None }
268
- } ,
269
- )
279
+ match tag {
280
+ SbTag :: Untagged => {
281
+ // Search top-to-bottom
282
+ for idx in self . untagged . iter ( ) . rev ( ) {
283
+ // Return permission of the first item that grants access.
284
+ // We require a permission with the right tag, ensuring U3 and F3.
285
+ if self . borrows [ * idx] . perm . grants ( access) {
286
+ return Some ( * idx) ;
287
+ }
288
+ }
289
+ return None ;
290
+ }
291
+ SbTag :: Tagged ( id) =>
292
+ self . cache . get ( & id) . and_then ( |idx| {
293
+ if self . borrows [ * idx] . perm . grants ( access) { Some ( * idx) } else { None }
294
+ } ) ,
295
+ }
270
296
}
271
297
272
298
/// Find the first write-incompatible item above the given one --
@@ -349,6 +375,23 @@ impl<'tcx> Stack {
349
375
for item in self . borrows . drain ( first_incompatible_idx..) . rev ( ) {
350
376
trace ! ( "access: popping item {:?}" , item) ;
351
377
Stack :: check_protector ( & item, Some ( tag) , global) ?;
378
+ // The drain removes elements from `borrows`, but we need to remove those elements
379
+ // from the lookup cache too.
380
+ if let SbTag :: Tagged ( id) = item. tag {
381
+ assert ! ( self . cache. remove( & id) . is_some( ) ) ;
382
+ }
383
+ }
384
+
385
+ while self . untagged . last ( ) >= Some ( & first_incompatible_idx) {
386
+ self . untagged . pop ( ) ;
387
+ }
388
+ if first_incompatible_idx <= self . first_unique {
389
+ // We removed all the Unique items
390
+ self . first_unique = 0 ;
391
+ self . last_unique = 0 ;
392
+ } else {
393
+ // Ensure the range doesn't extend past the new top of the stack
394
+ self . last_unique = self . last_unique . min ( first_incompatible_idx. saturating_sub ( 1 ) ) ;
352
395
}
353
396
} else {
354
397
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
@@ -359,14 +402,26 @@ impl<'tcx> Stack {
359
402
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
360
403
// reference and use that.
361
404
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
362
- for idx in ( ( granting_idx + 1 ) ..self . borrows . len ( ) ) . rev ( ) {
363
- let item = & mut self . borrows [ idx] ;
364
- if item. perm == Permission :: Unique {
365
- trace ! ( "access: disabling item {:?}" , item) ;
366
- Stack :: check_protector ( item, Some ( tag) , global) ?;
367
- item. perm = Permission :: Disabled ;
405
+ if granting_idx < self . last_unique {
406
+ // add 1 so we don't disable the granting item
407
+ let lower = self . first_unique . max ( granting_idx + 1 ) ;
408
+ //let upper = (self.last_unique + 1).min(self.borrows.len());
409
+ for item in & mut self . borrows [ lower..=self . last_unique ] {
410
+ if item. perm == Permission :: Unique {
411
+ trace ! ( "access: disabling item {:?}" , item) ;
412
+ Stack :: check_protector ( & item, Some ( tag) , global) ?;
413
+ item. perm = Permission :: Disabled ;
414
+ }
368
415
}
369
416
}
417
+ if granting_idx < self . first_unique {
418
+ // We disabled all Unique items
419
+ self . first_unique = 0 ;
420
+ self . last_unique = 0 ;
421
+ } else {
422
+ // Truncate the range to granting_idx
423
+ self . last_unique = self . last_unique . min ( granting_idx) ;
424
+ }
370
425
}
371
426
372
427
// Done.
@@ -394,6 +449,11 @@ impl<'tcx> Stack {
394
449
Stack :: check_protector ( & item, None , global) ?;
395
450
}
396
451
452
+ self . cache . clear ( ) ;
453
+ self . untagged . clear ( ) ;
454
+ self . first_unique = 0 ;
455
+ self . last_unique = 0 ;
456
+
397
457
Ok ( ( ) )
398
458
}
399
459
@@ -452,6 +512,43 @@ impl<'tcx> Stack {
452
512
} else {
453
513
trace ! ( "reborrow: adding item {:?}" , new) ;
454
514
self . borrows . insert ( new_idx, new) ;
515
+ // The above insert changes the meaning of every index in the cache >= new_idx, so now
516
+ // we need to find every one of those indexes and increment it.
517
+
518
+ // Adjust the possibly-unique range if an insert occurs before or within it
519
+ if self . first_unique >= new_idx {
520
+ self . first_unique += 1 ;
521
+ }
522
+ if self . last_unique >= new_idx {
523
+ self . last_unique += 1 ;
524
+ }
525
+ if new. perm == Permission :: Unique {
526
+ // Make sure the possibly-unique range contains the new borrow
527
+ self . first_unique = self . first_unique . min ( new_idx) ;
528
+ self . last_unique = self . last_unique . max ( new_idx) ;
529
+ }
530
+ // Repair the lookup cache; indices have shifted if an insert was not at the end
531
+ if new_idx != self . borrows . len ( ) - 1 {
532
+ for item in & self . borrows [ new_idx + 1 ..] {
533
+ if let SbTag :: Tagged ( id) = item. tag {
534
+ * self . cache . get_mut ( & id) . unwrap ( ) += 1 ;
535
+ }
536
+ }
537
+ for idx in self . untagged . iter_mut ( ) . rev ( ) . take_while ( |idx| * * idx >= new_idx) {
538
+ * idx += 1 ;
539
+ }
540
+ }
541
+ // We've now made a (conceptual) hole in the tag lookup cache.
542
+ // Nothing maps to new_idx, because we've adjusted everything >= it.
543
+ match new. tag {
544
+ SbTag :: Untagged => {
545
+ self . untagged . push ( new_idx) ;
546
+ self . untagged . sort ( ) ;
547
+ }
548
+ SbTag :: Tagged ( id) => {
549
+ self . cache . insert ( id, new_idx) ;
550
+ }
551
+ }
455
552
}
456
553
457
554
Ok ( ( ) )
@@ -464,7 +561,17 @@ impl<'tcx> Stacks {
464
561
/// Creates new stack with initial tag.
465
562
fn new ( size : Size , perm : Permission , tag : SbTag ) -> Self {
466
563
let item = Item { perm, tag, protector : None } ;
467
- let stack = Stack { borrows : vec ! [ item] } ;
564
+ let mut cache = FxHashMap :: default ( ) ;
565
+ let mut untagged = Vec :: new ( ) ;
566
+ match item. tag {
567
+ SbTag :: Untagged => {
568
+ untagged. push ( 0 ) ;
569
+ }
570
+ SbTag :: Tagged ( id) => {
571
+ cache. insert ( id, 0 ) ;
572
+ }
573
+ }
574
+ let stack = Stack { borrows : vec ! [ item] , cache, untagged, first_unique : 0 , last_unique : 0 } ;
468
575
469
576
Stacks { stacks : RefCell :: new ( RangeMap :: new ( size, stack) ) }
470
577
}
0 commit comments