Skip to content

Commit 1962a70

Browse files
committedMay 23, 2018
Auto merge of #50866 - michaelwoerister:relocations-in-vec, r=oli-obk
Use different datastructure for MIRI relocations This PR makes relocations in MIRI used a sorted vector instead of a `BTreeMap` which should make a few common operations more efficient. Let's see if that's true. r? @oli-obk
2 parents c3733a7 + 95fac99 commit 1962a70

File tree

5 files changed

+545
-26
lines changed

5 files changed

+545
-26
lines changed
 

‎src/librustc/mir/interpret/mod.rs

Lines changed: 34 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};
1212

1313
pub use self::value::{PrimVal, PrimValKind, Value, Pointer, ConstValue};
1414

15-
use std::collections::BTreeMap;
1615
use std::fmt;
1716
use mir;
1817
use hir::def_id::DefId;
@@ -21,9 +20,11 @@ use ty::layout::{self, Align, HasDataLayout, Size};
2120
use middle::region;
2221
use std::iter;
2322
use std::io;
23+
use std::ops::{Deref, DerefMut};
2424
use std::hash::Hash;
2525
use syntax::ast::Mutability;
2626
use rustc_serialize::{Encoder, Decoder, Decodable, Encodable};
27+
use rustc_data_structures::sorted_map::SortedMap;
2728
use rustc_data_structures::fx::FxHashMap;
2829
use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};
2930

@@ -341,7 +342,7 @@ pub struct Allocation {
341342
pub bytes: Vec<u8>,
342343
/// Maps from byte addresses to allocations.
343344
/// Only the first byte of a pointer is inserted into the map.
344-
pub relocations: BTreeMap<Size, AllocId>,
345+
pub relocations: Relocations,
345346
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
346347
pub undef_mask: UndefMask,
347348
/// The alignment of the allocation to detect unaligned reads.
@@ -358,7 +359,7 @@ impl Allocation {
358359
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
359360
Self {
360361
bytes: slice.to_owned(),
361-
relocations: BTreeMap::new(),
362+
relocations: Relocations::new(),
362363
undef_mask,
363364
align,
364365
runtime_mutability: Mutability::Immutable,
@@ -373,7 +374,7 @@ impl Allocation {
373374
assert_eq!(size.bytes() as usize as u64, size.bytes());
374375
Allocation {
375376
bytes: vec![0; size.bytes() as usize],
376-
relocations: BTreeMap::new(),
377+
relocations: Relocations::new(),
377378
undef_mask: UndefMask::new(size),
378379
align,
379380
runtime_mutability: Mutability::Immutable,
@@ -383,6 +384,35 @@ impl Allocation {
383384

384385
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
385386

387+
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
388+
pub struct Relocations(SortedMap<Size, AllocId>);
389+
390+
impl Relocations {
391+
pub fn new() -> Relocations {
392+
Relocations(SortedMap::new())
393+
}
394+
395+
// The caller must guarantee that the given relocations are already sorted
396+
// by address and contain no duplicates.
397+
pub fn from_presorted(r: Vec<(Size, AllocId)>) -> Relocations {
398+
Relocations(SortedMap::from_presorted_elements(r))
399+
}
400+
}
401+
402+
impl Deref for Relocations {
403+
type Target = SortedMap<Size, AllocId>;
404+
405+
fn deref(&self) -> &Self::Target {
406+
&self.0
407+
}
408+
}
409+
410+
impl DerefMut for Relocations {
411+
fn deref_mut(&mut self) -> &mut Self::Target {
412+
&mut self.0
413+
}
414+
}
415+
386416
////////////////////////////////////////////////////////////////////////////////
387417
// Methods to access integers in the target endianness
388418
////////////////////////////////////////////////////////////////////////////////

‎src/librustc_codegen_llvm/mir/constant.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx, alloc: &Allocation) -> ValueRef {
8383
let pointer_size = layout.pointer_size.bytes() as usize;
8484

8585
let mut next_offset = 0;
86-
for (&offset, &alloc_id) in &alloc.relocations {
86+
for &(offset, alloc_id) in alloc.relocations.iter() {
8787
let offset = offset.bytes();
8888
assert_eq!(offset as usize as u64, offset);
8989
let offset = offset as usize;

‎src/librustc_data_structures/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ pub mod control_flow_graph;
7373
pub mod flock;
7474
pub mod sync;
7575
pub mod owning_ref;
76+
pub mod sorted_map;
7677

7778
pub struct OnDrop<F: Fn()>(pub F);
7879

Lines changed: 488 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,488 @@
1+
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
use std::borrow::Borrow;
12+
use std::cmp::Ordering;
13+
use std::convert::From;
14+
use std::mem;
15+
use std::ops::{RangeBounds, Bound, Index, IndexMut};
16+
17+
/// `SortedMap` is a data structure with similar characteristics as BTreeMap but
18+
/// slightly different trade-offs: lookup, inseration, and removal are O(log(N))
19+
/// and elements can be iterated in order cheaply.
20+
///
21+
/// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it
22+
/// stores data in a more compact way. It also supports accessing contiguous
23+
/// ranges of elements as a slice, and slices of already sorted elements can be
24+
/// inserted efficiently.
25+
#[derive(Clone, PartialEq, Eq, Hash, Default, Debug, RustcEncodable, RustcDecodable)]
26+
pub struct SortedMap<K: Ord, V> {
27+
data: Vec<(K,V)>
28+
}
29+
30+
impl<K: Ord, V> SortedMap<K, V> {
31+
32+
#[inline]
33+
pub fn new() -> SortedMap<K, V> {
34+
SortedMap {
35+
data: vec![]
36+
}
37+
}
38+
39+
/// Construct a `SortedMap` from a presorted set of elements. This is faster
40+
/// than creating an empty map and then inserting the elements individually.
41+
///
42+
/// It is up to the caller to make sure that the elements are sorted by key
43+
/// and that there are no duplicates.
44+
#[inline]
45+
pub fn from_presorted_elements(elements: Vec<(K, V)>) -> SortedMap<K, V>
46+
{
47+
debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));
48+
49+
SortedMap {
50+
data: elements
51+
}
52+
}
53+
54+
#[inline]
55+
pub fn insert(&mut self, key: K, mut value: V) -> Option<V> {
56+
match self.lookup_index_for(&key) {
57+
Ok(index) => {
58+
let mut slot = unsafe {
59+
self.data.get_unchecked_mut(index)
60+
};
61+
mem::swap(&mut slot.1, &mut value);
62+
Some(value)
63+
}
64+
Err(index) => {
65+
self.data.insert(index, (key, value));
66+
None
67+
}
68+
}
69+
}
70+
71+
#[inline]
72+
pub fn remove(&mut self, key: &K) -> Option<V> {
73+
match self.lookup_index_for(key) {
74+
Ok(index) => {
75+
Some(self.data.remove(index).1)
76+
}
77+
Err(_) => {
78+
None
79+
}
80+
}
81+
}
82+
83+
#[inline]
84+
pub fn get(&self, key: &K) -> Option<&V> {
85+
match self.lookup_index_for(key) {
86+
Ok(index) => {
87+
unsafe {
88+
Some(&self.data.get_unchecked(index).1)
89+
}
90+
}
91+
Err(_) => {
92+
None
93+
}
94+
}
95+
}
96+
97+
#[inline]
98+
pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {
99+
match self.lookup_index_for(key) {
100+
Ok(index) => {
101+
unsafe {
102+
Some(&mut self.data.get_unchecked_mut(index).1)
103+
}
104+
}
105+
Err(_) => {
106+
None
107+
}
108+
}
109+
}
110+
111+
#[inline]
112+
pub fn clear(&mut self) {
113+
self.data.clear();
114+
}
115+
116+
/// Iterate over elements, sorted by key
117+
#[inline]
118+
pub fn iter(&self) -> ::std::slice::Iter<(K, V)> {
119+
self.data.iter()
120+
}
121+
122+
/// Iterate over the keys, sorted
123+
#[inline]
124+
pub fn keys(&self) -> impl Iterator<Item=&K> + ExactSizeIterator {
125+
self.data.iter().map(|&(ref k, _)| k)
126+
}
127+
128+
/// Iterate over values, sorted by key
129+
#[inline]
130+
pub fn values(&self) -> impl Iterator<Item=&V> + ExactSizeIterator {
131+
self.data.iter().map(|&(_, ref v)| v)
132+
}
133+
134+
#[inline]
135+
pub fn len(&self) -> usize {
136+
self.data.len()
137+
}
138+
139+
#[inline]
140+
pub fn range<R>(&self, range: R) -> &[(K, V)]
141+
where R: RangeBounds<K>
142+
{
143+
let (start, end) = self.range_slice_indices(range);
144+
(&self.data[start .. end])
145+
}
146+
147+
#[inline]
148+
pub fn remove_range<R>(&mut self, range: R)
149+
where R: RangeBounds<K>
150+
{
151+
let (start, end) = self.range_slice_indices(range);
152+
self.data.splice(start .. end, ::std::iter::empty());
153+
}
154+
155+
/// Mutate all keys with the given function `f`. This mutation must not
156+
/// change the sort-order of keys.
157+
#[inline]
158+
pub fn offset_keys<F>(&mut self, f: F)
159+
where F: Fn(&mut K)
160+
{
161+
self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f);
162+
}
163+
164+
/// Inserts a presorted range of elements into the map. If the range can be
165+
/// inserted as a whole in between to existing elements of the map, this
166+
/// will be faster than inserting the elements individually.
167+
///
168+
/// It is up to the caller to make sure that the elements are sorted by key
169+
/// and that there are no duplicates.
170+
#[inline]
171+
pub fn insert_presorted(&mut self, mut elements: Vec<(K, V)>) {
172+
if elements.is_empty() {
173+
return
174+
}
175+
176+
debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));
177+
178+
let start_index = self.lookup_index_for(&elements[0].0);
179+
180+
let drain = match start_index {
181+
Ok(index) => {
182+
let mut drain = elements.drain(..);
183+
self.data[index] = drain.next().unwrap();
184+
drain
185+
}
186+
Err(index) => {
187+
if index == self.data.len() ||
188+
elements.last().unwrap().0 < self.data[index].0 {
189+
// We can copy the whole range without having to mix with
190+
// existing elements.
191+
self.data.splice(index .. index, elements.drain(..));
192+
return
193+
}
194+
195+
let mut drain = elements.drain(..);
196+
self.data.insert(index, drain.next().unwrap());
197+
drain
198+
}
199+
};
200+
201+
// Insert the rest
202+
for (k, v) in drain {
203+
self.insert(k, v);
204+
}
205+
}
206+
207+
/// Looks up the key in `self.data` via `slice::binary_search()`.
208+
#[inline(always)]
209+
fn lookup_index_for(&self, key: &K) -> Result<usize, usize> {
210+
self.data.binary_search_by(|&(ref x, _)| x.cmp(key))
211+
}
212+
213+
#[inline]
214+
fn range_slice_indices<R>(&self, range: R) -> (usize, usize)
215+
where R: RangeBounds<K>
216+
{
217+
let start = match range.start() {
218+
Bound::Included(ref k) => {
219+
match self.lookup_index_for(k) {
220+
Ok(index) | Err(index) => index
221+
}
222+
}
223+
Bound::Excluded(ref k) => {
224+
match self.lookup_index_for(k) {
225+
Ok(index) => index + 1,
226+
Err(index) => index,
227+
}
228+
}
229+
Bound::Unbounded => 0,
230+
};
231+
232+
let end = match range.end() {
233+
Bound::Included(ref k) => {
234+
match self.lookup_index_for(k) {
235+
Ok(index) => index + 1,
236+
Err(index) => index,
237+
}
238+
}
239+
Bound::Excluded(ref k) => {
240+
match self.lookup_index_for(k) {
241+
Ok(index) | Err(index) => index,
242+
}
243+
}
244+
Bound::Unbounded => self.data.len(),
245+
};
246+
247+
(start, end)
248+
}
249+
}
250+
251+
impl<K: Ord, V> IntoIterator for SortedMap<K, V> {
252+
type Item = (K, V);
253+
type IntoIter = ::std::vec::IntoIter<(K, V)>;
254+
fn into_iter(self) -> Self::IntoIter {
255+
self.data.into_iter()
256+
}
257+
}
258+
259+
impl<K: Ord, V, Q: Borrow<K>> Index<Q> for SortedMap<K, V> {
260+
type Output = V;
261+
fn index(&self, index: Q) -> &Self::Output {
262+
let k: &K = index.borrow();
263+
self.get(k).unwrap()
264+
}
265+
}
266+
267+
impl<K: Ord, V, Q: Borrow<K>> IndexMut<Q> for SortedMap<K, V> {
268+
fn index_mut(&mut self, index: Q) -> &mut Self::Output {
269+
let k: &K = index.borrow();
270+
self.get_mut(k).unwrap()
271+
}
272+
}
273+
274+
impl<K: Ord, V, I: Iterator<Item=(K, V)>> From<I> for SortedMap<K, V> {
275+
fn from(data: I) -> Self {
276+
let mut data: Vec<(K, V)> = data.collect();
277+
data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2));
278+
data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| {
279+
k1.cmp(k2) == Ordering::Equal
280+
});
281+
SortedMap {
282+
data
283+
}
284+
}
285+
}
286+
287+
#[cfg(test)]
288+
mod tests {
289+
use super::SortedMap;
290+
291+
#[test]
292+
fn test_insert_and_iter() {
293+
let mut map = SortedMap::new();
294+
let mut expected = Vec::new();
295+
296+
for x in 0 .. 100 {
297+
assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
298+
299+
let x = 1000 - x * 2;
300+
map.insert(x, x);
301+
expected.insert(0, (x, x));
302+
}
303+
}
304+
305+
#[test]
306+
fn test_get_and_index() {
307+
let mut map = SortedMap::new();
308+
let mut expected = Vec::new();
309+
310+
for x in 0 .. 100 {
311+
let x = 1000 - x;
312+
if x & 1 == 0 {
313+
map.insert(x, x);
314+
}
315+
expected.push(x);
316+
}
317+
318+
for mut x in expected {
319+
if x & 1 == 0 {
320+
assert_eq!(map.get(&x), Some(&x));
321+
assert_eq!(map.get_mut(&x), Some(&mut x));
322+
assert_eq!(map[&x], x);
323+
assert_eq!(&mut map[&x], &mut x);
324+
} else {
325+
assert_eq!(map.get(&x), None);
326+
assert_eq!(map.get_mut(&x), None);
327+
}
328+
}
329+
}
330+
331+
#[test]
332+
fn test_range() {
333+
let mut map = SortedMap::new();
334+
map.insert(1, 1);
335+
map.insert(3, 3);
336+
map.insert(6, 6);
337+
map.insert(9, 9);
338+
339+
let keys = |s: &[(_, _)]| {
340+
s.into_iter().map(|e| e.0).collect::<Vec<u32>>()
341+
};
342+
343+
for start in 0 .. 11 {
344+
for end in 0 .. 11 {
345+
if end < start {
346+
continue
347+
}
348+
349+
let mut expected = vec![1, 3, 6, 9];
350+
expected.retain(|&x| x >= start && x < end);
351+
352+
assert_eq!(keys(map.range(start..end)), expected, "range = {}..{}", start, end);
353+
}
354+
}
355+
}
356+
357+
358+
#[test]
359+
fn test_offset_keys() {
360+
let mut map = SortedMap::new();
361+
map.insert(1, 1);
362+
map.insert(3, 3);
363+
map.insert(6, 6);
364+
365+
map.offset_keys(|k| *k += 1);
366+
367+
let mut expected = SortedMap::new();
368+
expected.insert(2, 1);
369+
expected.insert(4, 3);
370+
expected.insert(7, 6);
371+
372+
assert_eq!(map, expected);
373+
}
374+
375+
fn keys(s: SortedMap<u32, u32>) -> Vec<u32> {
376+
s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>()
377+
}
378+
379+
fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> {
380+
s.into_iter().collect::<Vec<(u32, u32)>>()
381+
}
382+
383+
#[test]
384+
fn test_remove_range() {
385+
let mut map = SortedMap::new();
386+
map.insert(1, 1);
387+
map.insert(3, 3);
388+
map.insert(6, 6);
389+
map.insert(9, 9);
390+
391+
for start in 0 .. 11 {
392+
for end in 0 .. 11 {
393+
if end < start {
394+
continue
395+
}
396+
397+
let mut expected = vec![1, 3, 6, 9];
398+
expected.retain(|&x| x < start || x >= end);
399+
400+
let mut map = map.clone();
401+
map.remove_range(start .. end);
402+
403+
assert_eq!(keys(map), expected, "range = {}..{}", start, end);
404+
}
405+
}
406+
}
407+
408+
#[test]
409+
fn test_remove() {
410+
let mut map = SortedMap::new();
411+
let mut expected = Vec::new();
412+
413+
for x in 0..10 {
414+
map.insert(x, x);
415+
expected.push((x, x));
416+
}
417+
418+
for x in 0 .. 10 {
419+
let mut map = map.clone();
420+
let mut expected = expected.clone();
421+
422+
assert_eq!(map.remove(&x), Some(x));
423+
expected.remove(x as usize);
424+
425+
assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
426+
}
427+
}
428+
429+
#[test]
430+
fn test_insert_presorted_non_overlapping() {
431+
let mut map = SortedMap::new();
432+
map.insert(2, 0);
433+
map.insert(8, 0);
434+
435+
map.insert_presorted(vec![(3, 0), (7, 0)]);
436+
437+
let expected = vec![2, 3, 7, 8];
438+
assert_eq!(keys(map), expected);
439+
}
440+
441+
#[test]
442+
fn test_insert_presorted_first_elem_equal() {
443+
let mut map = SortedMap::new();
444+
map.insert(2, 2);
445+
map.insert(8, 8);
446+
447+
map.insert_presorted(vec![(2, 0), (7, 7)]);
448+
449+
let expected = vec![(2, 0), (7, 7), (8, 8)];
450+
assert_eq!(elements(map), expected);
451+
}
452+
453+
#[test]
454+
fn test_insert_presorted_last_elem_equal() {
455+
let mut map = SortedMap::new();
456+
map.insert(2, 2);
457+
map.insert(8, 8);
458+
459+
map.insert_presorted(vec![(3, 3), (8, 0)]);
460+
461+
let expected = vec![(2, 2), (3, 3), (8, 0)];
462+
assert_eq!(elements(map), expected);
463+
}
464+
465+
#[test]
466+
fn test_insert_presorted_shuffle() {
467+
let mut map = SortedMap::new();
468+
map.insert(2, 2);
469+
map.insert(7, 7);
470+
471+
map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]);
472+
473+
let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)];
474+
assert_eq!(elements(map), expected);
475+
}
476+
477+
#[test]
478+
fn test_insert_presorted_at_end() {
479+
let mut map = SortedMap::new();
480+
map.insert(1, 1);
481+
map.insert(2, 2);
482+
483+
map.insert_presorted(vec![(3, 3), (8, 8)]);
484+
485+
let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)];
486+
assert_eq!(elements(map), expected);
487+
}
488+
}

‎src/librustc_mir/interpret/memory.rs

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use std::collections::{btree_map, VecDeque};
1+
use std::collections::VecDeque;
22
use std::ptr;
33

44
use rustc::hir::def_id::DefId;
@@ -519,7 +519,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
519519

520520
fn get_bytes(&self, ptr: MemoryPointer, size: Size, align: Align) -> EvalResult<'tcx, &[u8]> {
521521
assert_ne!(size.bytes(), 0);
522-
if self.relocations(ptr, size)?.count() != 0 {
522+
if self.relocations(ptr, size)?.len() != 0 {
523523
return err!(ReadPointerAsBytes);
524524
}
525525
self.check_defined(ptr, size)?;
@@ -614,9 +614,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
614614
// first copy the relocations to a temporary buffer, because
615615
// `get_bytes_mut` will clear the relocations, which is correct,
616616
// since we don't want to keep any relocations at the target.
617-
618617
let relocations: Vec<_> = self.relocations(src, size)?
619-
.map(|(&offset, &alloc_id)| {
618+
.iter()
619+
.map(|&(offset, alloc_id)| {
620620
// Update relocation offsets for the new positions in the destination allocation.
621621
(offset + dest.offset - src.offset, alloc_id)
622622
})
@@ -648,7 +648,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
648648

649649
self.copy_undef_mask(src, dest, size)?;
650650
// copy back the relocations
651-
self.get_mut(dest.alloc_id)?.relocations.extend(relocations);
651+
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
652652

653653
Ok(())
654654
}
@@ -660,7 +660,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
660660
match alloc.bytes[offset..].iter().position(|&c| c == 0) {
661661
Some(size) => {
662662
let p1 = Size::from_bytes((size + 1) as u64);
663-
if self.relocations(ptr, p1)?.count() != 0 {
663+
if self.relocations(ptr, p1)?.len() != 0 {
664664
return err!(ReadPointerAsBytes);
665665
}
666666
self.check_defined(ptr, p1)?;
@@ -720,7 +720,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
720720
let bytes = read_target_uint(endianness, bytes).unwrap();
721721
// See if we got a pointer
722722
if size != self.pointer_size() {
723-
if self.relocations(ptr, size)?.count() != 0 {
723+
if self.relocations(ptr, size)?.len() != 0 {
724724
return err!(ReadPointerAsBytes);
725725
}
726726
} else {
@@ -808,24 +808,26 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
808808
&self,
809809
ptr: MemoryPointer,
810810
size: Size,
811-
) -> EvalResult<'tcx, btree_map::Range<Size, AllocId>> {
811+
) -> EvalResult<'tcx, &[(Size, AllocId)]> {
812812
let start = ptr.offset.bytes().saturating_sub(self.pointer_size().bytes() - 1);
813813
let end = ptr.offset + size;
814814
Ok(self.get(ptr.alloc_id)?.relocations.range(Size::from_bytes(start)..end))
815815
}
816816

817817
fn clear_relocations(&mut self, ptr: MemoryPointer, size: Size) -> EvalResult<'tcx> {
818-
// Find all relocations overlapping the given range.
819-
let keys: Vec<_> = self.relocations(ptr, size)?.map(|(&k, _)| k).collect();
820-
if keys.is_empty() {
821-
return Ok(());
822-
}
823-
824818
// Find the start and end of the given range and its outermost relocations.
819+
let (first, last) = {
820+
// Find all relocations overlapping the given range.
821+
let relocations = self.relocations(ptr, size)?;
822+
if relocations.is_empty() {
823+
return Ok(());
824+
}
825+
826+
(relocations.first().unwrap().0,
827+
relocations.last().unwrap().0 + self.pointer_size())
828+
};
825829
let start = ptr.offset;
826830
let end = start + size;
827-
let first = *keys.first().unwrap();
828-
let last = *keys.last().unwrap() + self.pointer_size();
829831

830832
let alloc = self.get_mut(ptr.alloc_id)?;
831833

@@ -839,16 +841,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
839841
}
840842

841843
// Forget all the relocations.
842-
for k in keys {
843-
alloc.relocations.remove(&k);
844-
}
844+
alloc.relocations.remove_range(first ..= last);
845845

846846
Ok(())
847847
}
848848

849849
fn check_relocation_edges(&self, ptr: MemoryPointer, size: Size) -> EvalResult<'tcx> {
850-
let overlapping_start = self.relocations(ptr, Size::from_bytes(0))?.count();
851-
let overlapping_end = self.relocations(ptr.offset(size, self)?, Size::from_bytes(0))?.count();
850+
let overlapping_start = self.relocations(ptr, Size::from_bytes(0))?.len();
851+
let overlapping_end = self.relocations(ptr.offset(size, self)?, Size::from_bytes(0))?.len();
852852
if overlapping_start + overlapping_end != 0 {
853853
return err!(ReadPointerAsBytes);
854854
}

0 commit comments

Comments
 (0)
Please sign in to comment.