Skip to content

Commit 3fdb3d8

Browse files
committed
[WIP] Create linked list allocator
0 parents  commit 3fdb3d8

File tree

5 files changed

+373
-0
lines changed

5 files changed

+373
-0
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
target
2+
Cargo.lock

Cargo.toml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[package]
2+
name = "linked_list_allocator"
3+
version = "0.1.0"
4+
authors = ["Philipp Oppermann <[email protected]>"]
5+
6+
[dependencies]

src/hole.rs

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
use core::ptr::Unique;
2+
use core::mem::{self, size_of};
3+
use core::intrinsics;
4+
5+
use super::align_up;
6+
7+
pub struct Hole {
8+
pub size: usize,
9+
pub next: Option<Unique<Hole>>,
10+
}
11+
12+
impl Hole {
13+
// Returns the first hole that is big enough starting at the **next** hole. The reason is that
14+
// it is implemented as a single linked list (we need to update the previous pointer). So even
15+
// if _this_ hole would be large enough, it won't be used.
16+
pub fn get_first_fit(&mut self, size: usize, align: usize) -> Option<Unique<Hole>> {
17+
assert!(size % size_of::<usize>() == 0);
18+
// align must be a power of two
19+
assert!(unsafe { intrinsics::ctpop(align) } == 1); // exactly one bit set
20+
21+
// take the next hole and set `self.next` to None
22+
match self.next.take() {
23+
None => None,
24+
Some(mut next) => {
25+
let next_addr = *next as usize;
26+
let start_addr = align_up(next_addr, align);
27+
28+
// the needed padding for the desired alignment
29+
let padding = start_addr - next_addr;
30+
assert!(padding == 0 || padding >= size_of::<usize>() * 2); // TODO
31+
let next_real_size = unsafe { next.get() }.size - padding;
32+
33+
if next_real_size == size {
34+
let next_next: Option<Unique<_>> = unsafe { next.get_mut() }.next.take();
35+
self.next = next_next;
36+
Some(next)
37+
} else if next_real_size > size {
38+
let next_next: Option<Unique<_>> = unsafe { next.get_mut() }.next.take();
39+
let new_hole = Hole {
40+
size: next_real_size - size,
41+
next: next_next,
42+
};
43+
unsafe {
44+
let mut new_hole_ptr = Unique::new((start_addr + size) as *mut Hole);
45+
mem::forget(mem::replace(new_hole_ptr.get_mut(), new_hole));
46+
self.next = Some(new_hole_ptr);
47+
}
48+
Some(next)
49+
} else {
50+
let ret = unsafe { next.get_mut().get_first_fit(size, align) };
51+
self.next = Some(next);
52+
ret
53+
}
54+
}
55+
}
56+
}
57+
58+
pub fn add_hole(&mut self, mut hole: Unique<Hole>) {
59+
unsafe {
60+
if hole.get().size == 0 {
61+
return;
62+
}
63+
assert!(hole.get().size % size_of::<usize>() == 0);
64+
assert!(hole.get().next.is_none());
65+
}
66+
67+
let hole_addr = *hole as usize;
68+
69+
if self.next.as_mut().map_or(false, |n| hole_addr < **n as usize) {
70+
// hole is before start of next hole or this is the last hole
71+
let self_addr = self as *mut _ as usize;
72+
73+
if hole_addr == self_addr + self.size {
74+
// new hole is right behind this hole, so we can just increase this's size
75+
self.size += unsafe { hole.get().size };
76+
} else {
77+
// insert the hole behind this hole
78+
unsafe { hole.get_mut() }.next = self.next.take();
79+
self.next = Some(hole);
80+
}
81+
} else {
82+
// hole is behind next hole
83+
assert!(self.next.is_some());
84+
let next = self.next.as_mut().unwrap();
85+
assert!(hole_addr > **next as usize);
86+
87+
// insert it behind next hole
88+
unsafe { next.get_mut().add_hole(hole) };
89+
}
90+
}
91+
}

src/lib.rs

Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
#![feature(const_fn)]
2+
#![feature(unique)]
3+
#![feature(core_intrinsics)]
4+
#![no_std]
5+
6+
#[cfg(test)]
7+
#[macro_use]
8+
extern crate std;
9+
10+
use core::ptr::Unique;
11+
use core::mem::{self, size_of};
12+
13+
use hole::Hole;
14+
use small_hole::SmallHole;
15+
16+
mod hole;
17+
mod small_hole;
18+
19+
pub struct Heap {
20+
holes: Hole, // dummy
21+
small_holes: SmallHole, // dummy
22+
}
23+
24+
impl Heap {
25+
pub const fn empty() -> Heap {
26+
Heap {
27+
holes: Hole {
28+
size: 0,
29+
next: None,
30+
},
31+
small_holes: SmallHole { next: None },
32+
}
33+
}
34+
35+
pub fn new(heap_bottom: usize, heap_top: usize) -> Heap {
36+
assert!(size_of::<SmallHole>() == size_of::<usize>());
37+
assert!(size_of::<Hole>() == size_of::<usize>() * 2);
38+
39+
let first_hole = Hole {
40+
size: heap_top - heap_bottom,
41+
next: None,
42+
};
43+
44+
let mut first_hole_ptr = unsafe { Unique::new(heap_bottom as *mut Hole) };
45+
unsafe { mem::forget(mem::replace(first_hole_ptr.get_mut(), first_hole)) };
46+
47+
let mut heap = Heap::empty();
48+
heap.holes.next = Some(first_hole_ptr);
49+
heap
50+
}
51+
52+
pub fn allocate_first_fit(&mut self, mut size: usize, align: usize) -> Option<*mut u8> {
53+
size = align_up(size, size_of::<usize>());
54+
let mut ret = None;
55+
56+
if size == size_of::<SmallHole>() {
57+
ret = ret.or_else(|| {
58+
self.small_holes.get_first_fit(align).map(|hole| {
59+
let hole_start_addr = *hole as usize;
60+
assert!(hole_start_addr % align == 0);
61+
hole_start_addr as *mut u8
62+
})
63+
});
64+
}
65+
66+
ret = ret.or_else(|| {
67+
self.holes.get_first_fit(size, align).map(|hole| {
68+
let hole_start_addr = *hole as usize;
69+
let aligned_address = align_up(hole_start_addr, align);
70+
let padding = aligned_address - hole_start_addr;
71+
if padding > 0 {
72+
assert!(unsafe { hole.get().size } - padding >= size);
73+
self.deallocate(*hole as *mut u8, padding, 1);
74+
}
75+
aligned_address as *mut u8
76+
})
77+
});
78+
79+
ret
80+
}
81+
82+
pub fn deallocate(&mut self, ptr: *mut u8, mut size: usize, _align: usize) {
83+
if size <= size_of::<SmallHole>() {
84+
let hole = SmallHole { next: None };
85+
let mut hole_ptr = unsafe { Unique::new(ptr as *mut SmallHole) };
86+
unsafe { mem::forget(mem::replace(hole_ptr.get_mut(), hole)) };
87+
88+
self.small_holes.add_hole(hole_ptr);
89+
} else {
90+
if size < size_of::<Hole>() {
91+
size = size_of::<Hole>();
92+
}
93+
let hole = Hole {
94+
size: size,
95+
next: None,
96+
};
97+
let mut hole_ptr = unsafe { Unique::new(ptr as *mut Hole) };
98+
unsafe { mem::forget(mem::replace(hole_ptr.get_mut(), hole)) };
99+
100+
self.holes.add_hole(hole_ptr);
101+
}
102+
}
103+
}
104+
105+
fn align_down(value: usize, align: usize) -> usize {
106+
value / align * align
107+
}
108+
109+
fn align_up(value: usize, align: usize) -> usize {
110+
align_down(value + align - 1, align)
111+
}
112+
113+
#[cfg(test)]
114+
mod test {
115+
use std::prelude::v1::*;
116+
use std::mem::{size_of, align_of};
117+
use super::*;
118+
119+
fn new_heap() -> Heap {
120+
const HEAP_SIZE: usize = 1000;
121+
let dummy = Box::into_raw(Box::new([0u8; HEAP_SIZE]));
122+
123+
let heap_bottom = dummy as usize;
124+
let heap_top = heap_bottom + HEAP_SIZE;
125+
Heap::new(heap_bottom, heap_top)
126+
}
127+
128+
#[test]
129+
fn allocate_double_usize() {
130+
let mut heap = new_heap();
131+
assert!(heap.allocate_first_fit(size_of::<usize>() * 2, align_of::<usize>()).is_some());
132+
}
133+
134+
#[test]
135+
fn allocate_and_free_double_usize() {
136+
let mut heap = new_heap();
137+
138+
let x = heap.allocate_first_fit(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
139+
unsafe {
140+
*(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
141+
}
142+
heap.deallocate(x, size_of::<usize>() * 2, align_of::<usize>());
143+
}
144+
145+
#[test]
146+
fn reallocate_double_usize() {
147+
let mut heap = new_heap();
148+
149+
let x = heap.allocate_first_fit(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
150+
heap.deallocate(x, size_of::<usize>() * 2, align_of::<usize>());
151+
152+
let y = heap.allocate_first_fit(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
153+
heap.deallocate(y, size_of::<usize>() * 2, align_of::<usize>());
154+
155+
assert_eq!(x, y);
156+
}
157+
158+
#[test]
159+
fn allocate_multiple_sizes() {
160+
let mut heap = new_heap();
161+
let base_size = size_of::<usize>();
162+
let base_align = align_of::<usize>();
163+
164+
let x = heap.allocate_first_fit(base_size * 2, base_align).unwrap();
165+
let y = heap.allocate_first_fit(base_size * 7, base_align).unwrap();
166+
assert_eq!(y as usize, x as usize + base_size * 2);
167+
let z = heap.allocate_first_fit(base_size * 3, base_align * 4).unwrap();
168+
assert_eq!(z as usize % (base_size * 4), 0);
169+
170+
heap.deallocate(x, base_size * 2, base_align);
171+
172+
let a = heap.allocate_first_fit(base_size * 4, base_align).unwrap();
173+
let b = heap.allocate_first_fit(base_size * 2, base_align).unwrap();
174+
assert_eq!(b, x);
175+
176+
heap.deallocate(y, base_size * 7, base_align);
177+
heap.deallocate(z, base_size * 3, base_align * 4);
178+
heap.deallocate(a, base_size * 4, base_align);
179+
heap.deallocate(b, base_size * 2, base_align);
180+
}
181+
182+
#[test]
183+
fn allocate_usize() {
184+
let mut heap = new_heap();
185+
186+
assert!(heap.allocate_first_fit(size_of::<usize>(), 1).is_some());
187+
}
188+
189+
190+
#[test]
191+
fn allocate_usize_in_bigger_block() {
192+
let mut heap = new_heap();
193+
194+
let x = heap.allocate_first_fit(size_of::<usize>() * 2, 1).unwrap();
195+
let y = heap.allocate_first_fit(size_of::<usize>() * 2, 1).unwrap();
196+
heap.deallocate(x, size_of::<usize>() * 2, 1);
197+
198+
let z = heap.allocate_first_fit(size_of::<usize>(), 1);
199+
assert!(z.is_some());
200+
let z = z.unwrap();
201+
assert_eq!(x, z);
202+
203+
heap.deallocate(y, size_of::<usize>() * 2, 1);
204+
heap.deallocate(z, size_of::<usize>(), 1);
205+
}
206+
}

src/small_hole.rs

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
use core::ptr::Unique;
2+
use core::mem::size_of;
3+
use core::intrinsics;
4+
5+
// A hole with size == size_of::<usize>()
6+
pub struct SmallHole {
7+
pub next: Option<Unique<SmallHole>>,
8+
}
9+
10+
impl SmallHole {
11+
// Returns the first hole that has the desired alignment starting at the **next** hole. The
12+
// reason is that it is implemented as a single linked list (we need to update the previous
13+
// pointer). So even if _this_ hole would be large enough, it won't be used.
14+
pub fn get_first_fit(&mut self, align: usize) -> Option<Unique<SmallHole>> {
15+
// align must be a power of two
16+
assert!(unsafe { intrinsics::ctpop(align) } == 1); // exactly one bit set
17+
18+
// take the next hole and set `self.next` to None
19+
match self.next.take() {
20+
None => None,
21+
Some(mut next) => {
22+
let next_addr = *next as usize;
23+
24+
if next_addr % align == 0 {
25+
let next_next: Option<Unique<_>> = unsafe { next.get_mut() }.next.take();
26+
self.next = next_next;
27+
Some(next)
28+
} else {
29+
let ret = unsafe { next.get_mut().get_first_fit(align) };
30+
self.next = Some(next);
31+
ret
32+
}
33+
}
34+
}
35+
}
36+
37+
pub fn add_hole(&mut self, mut hole: Unique<SmallHole>) {
38+
unsafe {
39+
assert!(hole.get().next.is_none());
40+
}
41+
42+
let hole_addr = *hole as usize;
43+
44+
if self.next.as_mut().map_or(false, |n| hole_addr < **n as usize) {
45+
// hole is before start of next hole or this is the last hole
46+
let self_addr = self as *mut _ as usize;
47+
48+
if hole_addr == self_addr + size_of::<usize>() {
49+
// New hole is right behind this hole, so we want to increase this's size.
50+
// But this forms a normal sized hole, so we need to remove this block from the
51+
// small list
52+
unimplemented!();
53+
} else {
54+
// insert the hole behind this hole
55+
unsafe { hole.get_mut() }.next = self.next.take();
56+
self.next = Some(hole);
57+
}
58+
} else {
59+
// hole is behind next hole
60+
assert!(self.next.is_some());
61+
let next = self.next.as_mut().unwrap();
62+
assert!(hole_addr > **next as usize);
63+
64+
// insert it behind next hole
65+
unsafe { next.get_mut().add_hole(hole) };
66+
}
67+
}
68+
}

0 commit comments

Comments
 (0)