Skip to content

Implements bsan_alloc and bsan_dealloc #16

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: bsan
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 24 additions & 1 deletion Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -309,8 +309,9 @@ dependencies = [
"cbindgen",
"hashbrown 0.15.2",
"libc",
"log",
"rustc-hash 2.1.1",
"smallvec",
"test-log",
]

[[package]]
Expand Down Expand Up @@ -5282,6 +5283,28 @@ dependencies = [
"rayon",
]

[[package]]
name = "test-log"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f"
dependencies = [
"env_logger",
"test-log-macros",
"tracing-subscriber",
]

[[package]]
name = "test-log-macros"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.87",
]

[[package]]
name = "thin-vec"
version = "0.2.13"
Expand Down
2 changes: 1 addition & 1 deletion src/llvm-project
5 changes: 4 additions & 1 deletion src/tools/bsan/bsan-rt/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,13 @@ version = "0.1.0"
edition = "2021"

[dependencies]
log = "0.4"
libc = { version = "0.2.169", default-features = false }
hashbrown = { version = "0.15.2", default-features = false, features = ["default-hasher", "nightly", "inline-more"] }
rustc-hash = { version = "2.1.1", default-features = false }
smallvec = { version = "1.14.0" }

[dev-dependencies]
test-log = "0.2.17"

[lib]
name = "bsan_rt"
Expand Down
49 changes: 41 additions & 8 deletions src/tools/bsan/bsan-rt/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,34 +11,65 @@ use crate::*;
/// of a singly-linked list. For this implementation to be sound,
/// the pointer that is returned must not be mutated concurrently.
pub unsafe trait Linkable<T: Sized> {
fn next(&self) -> *mut *mut T;
fn next(&mut self) -> *mut *mut T;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should make the parameter something like node: *mut T instead of &mut self in case there's a situation where multiple threads could call this method. I don't think that's the case at the moment, since this should only be called in the critical section when we lock the free list, but I'm uncertain about the soundness of that w.r.t aliasing

}

/// An mmap-ed chunk of memory that will munmap the chunk on drop.
#[derive(Debug)]
pub struct Block<T: Sized> {
pub size: NonZeroUsize,
pub num_elements: NonZeroUsize,
pub base: NonNull<T>,
pub munmap: MUnmap,
}

impl<T: Sized> Block<T> {
/// The number of instances of T that can fit within the block.
#[inline]
fn len(&self) -> NonZeroUsize {
self.num_elements
}

/// The byte width of the block.
#[inline]
fn byte_width(&self) -> NonZeroUsize {
#[cfg(test)]
let result = self.len().get().checked_mul(mem::size_of::<T>()).unwrap();
#[cfg(not(test))]
let result = unsafe { self.len().get().unchecked_mul(mem::size_of::<T>()) };

unsafe { NonZeroUsize::new_unchecked(result) }
}

/// The last valid, addressable location within the block (at its high-end)
#[inline]
fn last(&self) -> *mut T {
unsafe { self.base.as_ptr().add(self.size.get() - 1) }
unsafe { self.base.as_ptr().add(self.len().get() - 1) }
}

/// The first valid, addressable location within the block (at its low-end)
#[inline]
fn first(&self) -> *mut T {
self.base.as_ptr()
}
}

impl<T> fmt::Debug for Block<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Block")
.field("base", &self.base)
.field("first", &self.first())
.field("last", &self.last())
.field("reserved for num_elements", &self.num_elements)
.finish()
}
}

impl<T> Drop for Block<T> {
fn drop(&mut self) {
// SAFETY: our munmap pointer will be valid by construction of the GlobalCtx.
// We can safely transmute it to c_void since that's what it was originally when
// it was allocated by mmap
let success = unsafe { (self.munmap)(mem::transmute(self.base.as_ptr()), self.size.get()) };
let success =
unsafe { (self.munmap)(mem::transmute(self.base.as_ptr()), self.byte_width().get()) };
if success != 0 {
panic!("Failed to unmap block!");
}
Expand Down Expand Up @@ -67,7 +98,7 @@ unsafe impl<T: Linkable<T>> Sync for BlockAllocator<T> {}

impl<T: Linkable<T>> BlockAllocator<T> {
/// Initializes a BlockAllocator for the given block.
fn new(block: Block<T>) -> Self {
pub fn new(block: Block<T>) -> Self {
BlockAllocator {
// we begin at the high-end of the block and decrement downward
cursor: AtomicPtr::new(block.last() as *mut MaybeUninit<T>),
Expand All @@ -80,7 +111,7 @@ impl<T: Linkable<T>> BlockAllocator<T> {
/// Allocates a new instance from the block.
/// If a prior allocation has been freed, it will be reused instead of
/// incrementing the internal cursor.
fn alloc(&self) -> Option<NonNull<MaybeUninit<T>>> {
pub fn alloc(&self) -> Option<NonNull<MaybeUninit<T>>> {
if !self.free_lock.swap(true, Ordering::Acquire) {
let curr = unsafe { *self.free_list.get() };
let curr = if !curr.is_null() {
Expand Down Expand Up @@ -139,6 +170,8 @@ mod test {
use std::sync::Arc;
use std::thread;

use test_log::test;

use super::*;
use crate::global::test::TEST_HOOKS;
use crate::*;
Expand All @@ -147,7 +180,7 @@ mod test {
}

unsafe impl Linkable<Link> for Link {
fn next(&self) -> *mut *mut Link {
fn next(&mut self) -> *mut *mut Link {
unsafe { mem::transmute(self.link.get()) }
}
}
Expand Down
48 changes: 34 additions & 14 deletions src/tools/bsan/bsan-rt/src/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,11 @@ use crate::*;
#[derive(Debug)]
pub struct GlobalCtx {
hooks: BsanHooks,
// TODO(obraunsdorf): Does the counter need to be AtomicU64, because it would weaken security
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems like 32-bit platforms don't have support for AtomicU64.

PowerPC and MIPS platforms with 32-bit pointers do not have AtomicU64 or AtomicI64 types.1

Footnotes

  1. https://doc.rust-lang.org/std/sync/atomic/#portability

// with a counter that wraps around often if we are on fewer-bit architectures?
next_alloc_id: AtomicUsize,
next_thread_id: AtomicUsize,
alloc_metadata_map: BlockAllocator<AllocMetadata>,
}

const BSAN_MMAP_PROT: i32 = libc::PROT_READ | libc::PROT_WRITE;
Expand All @@ -41,36 +44,53 @@ impl GlobalCtx {
/// Creates a new instance of `GlobalCtx` using the given `BsanHooks`.
/// This function will also initialize our shadow heap
fn new(hooks: BsanHooks) -> Self {
let page_size = unsafe { libc::sysconf(libc::_SC_PAGESIZE) } as usize;
let alloc_metadata_size = mem::size_of::<AllocMetadata>();
debug_assert!(0 < alloc_metadata_size && alloc_metadata_size <= page_size);
//let num_elements = NonZeroUsize::new(page_size / mem::size_of::<AllocMetadata>()).unwrap();
let num_elements = NonZeroUsize::new(1024).unwrap();
let block = Self::generate_block(hooks.mmap, hooks.munmap, num_elements);
Self {
hooks,
next_alloc_id: AtomicUsize::new(AllocId::min().get()),
next_thread_id: AtomicUsize::new(0),
alloc_metadata_map: BlockAllocator::new(block),
}
}

pub fn new_block<T>(&self, num_elements: NonZeroUsize) -> Block<T> {
pub(crate) unsafe fn allocate_lock_location(&self) -> NonNull<MaybeUninit<AllocMetadata>> {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this needs to be an unsafe function. Instead, you can propagate the option upward.

match self.alloc_metadata_map.alloc() {
Some(a) => a,
None => {
log::error!("Failed to allocate lock location");
panic!("Failed to allocate lock location");
}
}
}

fn generate_block<T>(mmap: MMap, munmap: MUnmap, num_elements: NonZeroUsize) -> Block<T> {
let layout = Layout::array::<T>(num_elements.into()).unwrap();
let size = NonZeroUsize::new(layout.size()).unwrap();
let size: NonZero<usize> = NonZeroUsize::new(layout.size()).unwrap();

debug_assert!(mmap as usize != 0);
let base = unsafe {
(self.hooks.mmap)(
ptr::null_mut(),
layout.size(),
BSAN_MMAP_PROT,
BSAN_MMAP_FLAGS,
-1,
0,
)
(mmap)(ptr::null_mut(), layout.size(), BSAN_MMAP_PROT, BSAN_MMAP_FLAGS, -1, 0)
};

if base.is_null() {
panic!("Allocation failed");
}
let base = unsafe { NonNull::new_unchecked(mem::transmute(base)) };
let munmap = self.hooks.munmap;
Block { size, base, munmap }
let munmap = munmap;

Block { num_elements, base, munmap }
}

pub fn new_block<T>(&self, num_elements: NonZeroUsize) -> Block<T> {
Self::generate_block(self.hooks.mmap, self.hooks.munmap, num_elements)
}

fn allocator(&self) -> BsanAllocHooks {
pub(crate) fn allocator(&self) -> BsanAllocHooks {
self.hooks.alloc
}

Expand Down Expand Up @@ -281,7 +301,7 @@ pub unsafe fn global_ctx() -> *mut GlobalCtx {
}

#[cfg(test)]
pub mod test {
pub(crate) mod test {
use crate::*;

unsafe extern "C" fn test_print(ptr: *const c_char) {
Expand Down
Loading
Loading