Skip to content

Commit fe4bb5c

Browse files
committed
Remove linear searches of borrow stacks
Adding a HashMap from tags to stack locations lets us find a granting item in amortized constant time, or with a linear search of the very small array of untagged locations. Additionally, we keep a range which denotes where there might be an item granting Unique permission in the stack, so that when we invalidate Uniques we do not need to scan much of the stack, and often scan nothing at all.
1 parent dadcbeb commit fe4bb5c

File tree

1 file changed

+126
-19
lines changed

1 file changed

+126
-19
lines changed

src/stacked_borrows.rs

Lines changed: 126 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -71,15 +71,35 @@ impl fmt::Debug for Item {
7171
}
7272

7373
/// Extra per-location state.
74-
#[derive(Clone, Debug, PartialEq, Eq)]
74+
#[derive(Clone, Debug)]
7575
pub struct Stack {
7676
/// Used *mostly* as a stack; never empty.
7777
/// Invariants:
7878
/// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
7979
/// * Except for `Untagged`, no tag occurs in the stack more than once.
8080
borrows: Vec<Item>,
81+
/// We often want to know what (if any) tag in the stack exists for some `SbTag`.
82+
/// The tag we're looking for does not occur often enough at the end of the stack for a linear
83+
/// search to be efficient, so we use the uniqueness guarantee to keep a map from tag to
84+
/// position in the stack.
85+
cache: FxHashMap<PtrId, usize>,
86+
/// `Untagged` may occur multiple times so we store it outside of the map.
87+
untagged: Vec<usize>,
88+
/// On a read, we need to disable all `Unique` above the granting item. We can avoid most of
89+
/// this scan by keeping track of the region of the borrow stack that may contain `Unique`s.
90+
first_unique: usize,
91+
last_unique: usize,
92+
}
93+
94+
impl PartialEq for Stack {
95+
fn eq(&self, other: &Self) -> bool {
96+
// All the semantics of Stack are in self.borrows, everything else is caching
97+
self.borrows == other.borrows
98+
}
8199
}
82100

101+
impl Eq for Stack {}
102+
83103
/// Extra per-allocation state.
84104
#[derive(Clone, Debug)]
85105
pub struct Stacks {
@@ -256,17 +276,23 @@ impl<'tcx> Stack {
256276
/// Find the item granting the given kind of access to the given tag, and return where
257277
/// it is on the stack.
258278
fn find_granting(&self, access: AccessKind, tag: SbTag) -> Option<usize> {
259-
self.borrows
260-
.iter()
261-
.enumerate() // we also need to know *where* in the stack
262-
.rev() // search top-to-bottom
263-
// Return permission of first item that grants access.
264-
// We require a permission with the right tag, ensuring U3 and F3.
265-
.find_map(
266-
|(idx, item)| {
267-
if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None }
268-
},
269-
)
279+
match tag {
280+
SbTag::Untagged => {
281+
// Search top-to-bottom
282+
for idx in self.untagged.iter().rev() {
283+
// Return permission of the first item that grants access.
284+
// We require a permission with the right tag, ensuring U3 and F3.
285+
if self.borrows[*idx].perm.grants(access) {
286+
return Some(*idx);
287+
}
288+
}
289+
return None;
290+
}
291+
SbTag::Tagged(id) =>
292+
self.cache.get(&id).and_then(|idx| {
293+
if self.borrows[*idx].perm.grants(access) { Some(*idx) } else { None }
294+
}),
295+
}
270296
}
271297

272298
/// Find the first write-incompatible item above the given one --
@@ -349,6 +375,23 @@ impl<'tcx> Stack {
349375
for item in self.borrows.drain(first_incompatible_idx..).rev() {
350376
trace!("access: popping item {:?}", item);
351377
Stack::check_protector(&item, Some(tag), global)?;
378+
// The drain removes elements from `borrows`, but we need to remove those elements
379+
// from the lookup cache too.
380+
if let SbTag::Tagged(id) = item.tag {
381+
assert!(self.cache.remove(&id).is_some());
382+
}
383+
}
384+
385+
while self.untagged.last() >= Some(&first_incompatible_idx) {
386+
self.untagged.pop();
387+
}
388+
if first_incompatible_idx <= self.first_unique {
389+
// We removed all the Unique items
390+
self.first_unique = 0;
391+
self.last_unique = 0;
392+
} else {
393+
// Ensure the range doesn't extend past the new top of the stack
394+
self.last_unique = self.last_unique.min(first_incompatible_idx.saturating_sub(1));
352395
}
353396
} else {
354397
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
@@ -359,14 +402,26 @@ impl<'tcx> Stack {
359402
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
360403
// reference and use that.
361404
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
362-
for idx in ((granting_idx + 1)..self.borrows.len()).rev() {
363-
let item = &mut self.borrows[idx];
364-
if item.perm == Permission::Unique {
365-
trace!("access: disabling item {:?}", item);
366-
Stack::check_protector(item, Some(tag), global)?;
367-
item.perm = Permission::Disabled;
405+
if granting_idx < self.last_unique {
406+
// add 1 so we don't disable the granting item
407+
let lower = self.first_unique.max(granting_idx + 1);
408+
//let upper = (self.last_unique + 1).min(self.borrows.len());
409+
for item in &mut self.borrows[lower..=self.last_unique] {
410+
if item.perm == Permission::Unique {
411+
trace!("access: disabling item {:?}", item);
412+
Stack::check_protector(&item, Some(tag), global)?;
413+
item.perm = Permission::Disabled;
414+
}
368415
}
369416
}
417+
if granting_idx < self.first_unique {
418+
// We disabled all Unique items
419+
self.first_unique = 0;
420+
self.last_unique = 0;
421+
} else {
422+
// Truncate the range to granting_idx
423+
self.last_unique = self.last_unique.min(granting_idx);
424+
}
370425
}
371426

372427
// Done.
@@ -394,6 +449,11 @@ impl<'tcx> Stack {
394449
Stack::check_protector(&item, None, global)?;
395450
}
396451

452+
self.cache.clear();
453+
self.untagged.clear();
454+
self.first_unique = 0;
455+
self.last_unique = 0;
456+
397457
Ok(())
398458
}
399459

@@ -452,6 +512,43 @@ impl<'tcx> Stack {
452512
} else {
453513
trace!("reborrow: adding item {:?}", new);
454514
self.borrows.insert(new_idx, new);
515+
// The above insert changes the meaning of every index in the cache >= new_idx, so now
516+
// we need to find every one of those indexes and increment it.
517+
518+
// Adjust the possibly-unique range if an insert occurs before or within it
519+
if self.first_unique >= new_idx {
520+
self.first_unique += 1;
521+
}
522+
if self.last_unique >= new_idx {
523+
self.last_unique += 1;
524+
}
525+
if new.perm == Permission::Unique {
526+
// Make sure the possibly-unique range contains the new borrow
527+
self.first_unique = self.first_unique.min(new_idx);
528+
self.last_unique = self.last_unique.max(new_idx);
529+
}
530+
// Repair the lookup cache; indices have shifted if an insert was not at the end
531+
if new_idx != self.borrows.len() - 1 {
532+
for item in &self.borrows[new_idx + 1..] {
533+
if let SbTag::Tagged(id) = item.tag {
534+
*self.cache.get_mut(&id).unwrap() += 1;
535+
}
536+
}
537+
for idx in self.untagged.iter_mut().rev().take_while(|idx| **idx >= new_idx) {
538+
*idx += 1;
539+
}
540+
}
541+
// We've now made a (conceptual) hole in the tag lookup cache.
542+
// Nothing maps to new_idx, because we've adjusted everything >= it.
543+
match new.tag {
544+
SbTag::Untagged => {
545+
self.untagged.push(new_idx);
546+
self.untagged.sort();
547+
}
548+
SbTag::Tagged(id) => {
549+
self.cache.insert(id, new_idx);
550+
}
551+
}
455552
}
456553

457554
Ok(())
@@ -464,7 +561,17 @@ impl<'tcx> Stacks {
464561
/// Creates new stack with initial tag.
465562
fn new(size: Size, perm: Permission, tag: SbTag) -> Self {
466563
let item = Item { perm, tag, protector: None };
467-
let stack = Stack { borrows: vec![item] };
564+
let mut cache = FxHashMap::default();
565+
let mut untagged = Vec::new();
566+
match item.tag {
567+
SbTag::Untagged => {
568+
untagged.push(0);
569+
}
570+
SbTag::Tagged(id) => {
571+
cache.insert(id, 0);
572+
}
573+
}
574+
let stack = Stack { borrows: vec![item], cache, untagged, first_unique: 0, last_unique: 0 };
468575

469576
Stacks { stacks: RefCell::new(RangeMap::new(size, stack)) }
470577
}

0 commit comments

Comments
 (0)