Skip to content

Commit d1cd084

Browse files
authored
Merge pull request #71 from rust-osdev/dont-leak-back-padding
Consider regions that lead to very small back paddings as unsuitable
2 parents bde3370 + a0c162e commit d1cd084

File tree

3 files changed

+90
-71
lines changed

3 files changed

+90
-71
lines changed

.github/workflows/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ jobs:
135135
name: "Miri tests"
136136
runs-on: ubuntu-latest
137137
env:
138-
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers -Zmiri-ignore-leaks"
138+
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers"
139139
steps:
140140
- uses: actions/checkout@v1
141141
- run: rustup toolchain install nightly --profile minimal --component rust-src miri

src/hole.rs

Lines changed: 26 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -120,31 +120,31 @@ impl Cursor {
120120
alloc_ptr = aligned_addr;
121121
alloc_size = required_size;
122122

123-
// Okay, time to move onto the back padding. Here, we are opportunistic -
124-
// if it fits, we sits. Otherwise we just skip adding the back padding, and
125-
// sort of assume that the allocation is actually a bit larger than it
126-
// actually needs to be.
127-
//
128-
// NOTE: Because we always use `HoleList::align_layout`, the size of
129-
// the new allocation is always "rounded up" to cover any partial gaps that
130-
// would have occurred. For this reason, we DON'T need to "round up"
131-
// to account for an unaligned hole spot.
132-
let hole_layout = Layout::new::<Hole>();
133-
let back_padding_start = align_up(allocation_end, hole_layout.align());
134-
let back_padding_end = back_padding_start.wrapping_add(hole_layout.size());
135-
136-
// Will the proposed new back padding actually fit in the old hole slot?
137-
back_padding = if back_padding_end <= hole_end {
138-
// Yes, it does! Place a back padding node
139-
Some(HoleInfo {
140-
addr: back_padding_start,
141-
size: (hole_end as usize) - (back_padding_start as usize),
142-
})
143-
} else {
144-
// No, it does not. We are now pretending the allocation now
145-
// holds the extra 0..size_of::<Hole>() bytes that are not
146-
// big enough to hold what SHOULD be back_padding
123+
// Okay, time to move onto the back padding.
124+
let back_padding_size = hole_end as usize - allocation_end as usize;
125+
back_padding = if back_padding_size == 0 {
147126
None
127+
} else {
128+
// NOTE: Because we always use `HoleList::align_layout`, the size of
129+
// the new allocation is always "rounded up" to cover any partial gaps that
130+
// would have occurred. For this reason, we DON'T need to "round up"
131+
// to account for an unaligned hole spot.
132+
let hole_layout = Layout::new::<Hole>();
133+
let back_padding_start = align_up(allocation_end, hole_layout.align());
134+
let back_padding_end = back_padding_start.wrapping_add(hole_layout.size());
135+
136+
// Will the proposed new back padding actually fit in the old hole slot?
137+
if back_padding_end <= hole_end {
138+
// Yes, it does! Place a back padding node
139+
Some(HoleInfo {
140+
addr: back_padding_start,
141+
size: back_padding_size,
142+
})
143+
} else {
144+
// No, it does not. We don't want to leak any heap bytes, so we
145+
// consider this hole unsuitable for the requested allocation.
146+
return Err(self);
147+
}
148148
};
149149
}
150150

@@ -697,34 +697,9 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
697697
#[cfg(test)]
698698
pub mod test {
699699
use super::HoleList;
700-
use crate::{align_down_size, Heap};
700+
use crate::{align_down_size, test::new_heap};
701701
use core::mem::size_of;
702-
use std::{alloc::Layout, convert::TryInto, mem::MaybeUninit, prelude::v1::*, ptr::NonNull};
703-
704-
#[repr(align(128))]
705-
struct Chonk<const N: usize> {
706-
data: [MaybeUninit<u8>; N],
707-
}
708-
709-
impl<const N: usize> Chonk<N> {
710-
pub fn new() -> Self {
711-
Self {
712-
data: [MaybeUninit::uninit(); N],
713-
}
714-
}
715-
}
716-
717-
fn new_heap() -> Heap {
718-
const HEAP_SIZE: usize = 1000;
719-
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
720-
let data = &mut heap_space.data;
721-
let assumed_location = data.as_mut_ptr().cast();
722-
723-
let heap = Heap::from_slice(data);
724-
assert_eq!(heap.bottom(), assumed_location);
725-
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
726-
heap
727-
}
702+
use std::{alloc::Layout, convert::TryInto, prelude::v1::*, ptr::NonNull};
728703

729704
#[test]
730705
fn cursor() {

src/test.rs

Lines changed: 63 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
11
use super::*;
2-
use core::alloc::Layout;
3-
use std::mem::{align_of, size_of, MaybeUninit};
4-
use std::prelude::v1::*;
2+
use core::{
3+
alloc::Layout,
4+
ops::{Deref, DerefMut},
5+
};
6+
use std::{
7+
mem::{align_of, size_of, MaybeUninit},
8+
prelude::v1::*,
9+
};
510

611
#[repr(align(128))]
712
struct Chonk<const N: usize> {
@@ -16,30 +21,69 @@ impl<const N: usize> Chonk<N> {
1621
}
1722
}
1823

19-
fn new_heap() -> Heap {
24+
pub struct OwnedHeap<F> {
25+
heap: Heap,
26+
_drop: F,
27+
}
28+
29+
impl<F> Deref for OwnedHeap<F> {
30+
type Target = Heap;
31+
32+
fn deref(&self) -> &Self::Target {
33+
&self.heap
34+
}
35+
}
36+
37+
impl<F> DerefMut for OwnedHeap<F> {
38+
fn deref_mut(&mut self) -> &mut Self::Target {
39+
&mut self.heap
40+
}
41+
}
42+
43+
pub fn new_heap() -> OwnedHeap<impl Sized> {
2044
const HEAP_SIZE: usize = 1000;
21-
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
45+
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
2246
let data = &mut heap_space.data;
2347
let assumed_location = data.as_mut_ptr().cast();
2448

25-
let heap = Heap::from_slice(data);
49+
let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };
2650
assert_eq!(heap.bottom(), assumed_location);
2751
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
28-
heap
52+
53+
let drop = move || {
54+
let _ = heap_space;
55+
};
56+
OwnedHeap { heap, _drop: drop }
2957
}
3058

31-
fn new_max_heap() -> Heap {
59+
fn new_max_heap() -> OwnedHeap<impl Sized> {
3260
const HEAP_SIZE: usize = 1024;
3361
const HEAP_SIZE_MAX: usize = 2048;
34-
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE_MAX>::new()));
62+
let mut heap_space = Box::new(Chonk::<HEAP_SIZE_MAX>::new());
3563
let data = &mut heap_space.data;
3664
let start_ptr = data.as_mut_ptr().cast();
3765

3866
// Unsafe so that we have provenance over the whole allocation.
3967
let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) };
4068
assert_eq!(heap.bottom(), start_ptr);
4169
assert_eq!(heap.size(), HEAP_SIZE);
42-
heap
70+
71+
let drop = move || {
72+
let _ = heap_space;
73+
};
74+
OwnedHeap { heap, _drop: drop }
75+
}
76+
77+
fn new_heap_skip(ct: usize) -> OwnedHeap<impl Sized> {
78+
const HEAP_SIZE: usize = 1000;
79+
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
80+
let data = &mut heap_space.data[ct..];
81+
let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };
82+
83+
let drop = move || {
84+
let _ = heap_space;
85+
};
86+
OwnedHeap { heap, _drop: drop }
4387
}
4488

4589
#[test]
@@ -51,7 +95,15 @@ fn empty() {
5195

5296
#[test]
5397
fn oom() {
54-
let mut heap = new_heap();
98+
const HEAP_SIZE: usize = 1000;
99+
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
100+
let data = &mut heap_space.data;
101+
let assumed_location = data.as_mut_ptr().cast();
102+
103+
let mut heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };
104+
assert_eq!(heap.bottom(), assumed_location);
105+
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
106+
55107
let layout = Layout::from_size_align(heap.size() + 1, align_of::<usize>());
56108
let addr = heap.allocate_first_fit(layout.unwrap());
57109
assert!(addr.is_err());
@@ -388,14 +440,6 @@ fn allocate_multiple_unaligned() {
388440
}
389441
}
390442

391-
fn new_heap_skip(ct: usize) -> Heap {
392-
const HEAP_SIZE: usize = 1000;
393-
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
394-
let data = &mut heap_space.data[ct..];
395-
let heap = Heap::from_slice(data);
396-
heap
397-
}
398-
399443
#[test]
400444
fn allocate_usize() {
401445
let mut heap = new_heap();

0 commit comments

Comments
 (0)