Skip to content

Implemented Computatationally Efficient Shadowheap #14

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 8 commits into
base: bsan
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/tools/bsan/bsan-rt/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ mod test {
#[test]
fn allocate_from_page_in_parallel() {
let ctx = unsafe { init_global_ctx(TEST_HOOKS.clone()) };
let ctx = unsafe { &*ctx };
let ctx = unsafe { ctx };
let block = ctx.new_block::<Link>(unsafe { NonZero::new_unchecked(200) });
let page = Arc::new(BlockAllocator::<Link>::new(block));
let mut threads = Vec::new();
Expand Down
21 changes: 16 additions & 5 deletions src/tools/bsan/bsan-rt/src/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use block::*;
use hashbrown::{DefaultHashBuilder, HashMap};
use rustc_hash::FxBuildHasher;

use crate::shadow::ShadowHeap;
use crate::*;

/// Every action that requires a heap allocation must be performed through a globally
Expand All @@ -32,6 +33,7 @@ pub struct GlobalCtx {
hooks: BsanHooks,
next_alloc_id: AtomicUsize,
next_thread_id: AtomicUsize,
shadow_heap: ShadowHeap<Provenance>,
}

const BSAN_MMAP_PROT: i32 = libc::PROT_READ | libc::PROT_WRITE;
Expand All @@ -42,12 +44,21 @@ impl GlobalCtx {
/// This function will also initialize our shadow heap
fn new(hooks: BsanHooks) -> Self {
Self {
hooks,
hooks: hooks.clone(),
next_alloc_id: AtomicUsize::new(AllocId::min().get()),
next_thread_id: AtomicUsize::new(0),
shadow_heap: ShadowHeap::new(&hooks),
}
}

pub fn shadow_heap(&self) -> &ShadowHeap<Provenance> {
&self.shadow_heap
}

pub fn hooks(&self) -> &BsanHooks {
&self.hooks
}

pub fn new_block<T>(&self, num_elements: NonZeroUsize) -> Block<T> {
let layout = Layout::array::<T>(num_elements.into()).unwrap();
let size = NonZeroUsize::new(layout.size()).unwrap();
Expand Down Expand Up @@ -266,7 +277,7 @@ pub static GLOBAL_CTX: SyncUnsafeCell<MaybeUninit<GlobalCtx>> =
/// It is marked as `unsafe`, because it relies on the set of function pointers in
/// `BsanHooks` to be valid.
#[inline]
pub unsafe fn init_global_ctx(hooks: BsanHooks) -> *mut GlobalCtx {
pub unsafe fn init_global_ctx<'a>(hooks: BsanHooks) -> &'a GlobalCtx {
(*GLOBAL_CTX.get()).write(GlobalCtx::new(hooks));
global_ctx()
}
Expand All @@ -285,9 +296,9 @@ pub unsafe fn deinit_global_ctx() {
/// The user needs to ensure that the context is initialized, e.g. `bsan_init`
/// has been called and `bsan_deinit` has not yet been called.
#[inline]
pub unsafe fn global_ctx() -> *mut GlobalCtx {
let ctx: *mut MaybeUninit<GlobalCtx> = GLOBAL_CTX.get();
mem::transmute(ctx)
pub unsafe fn global_ctx<'a>() -> &'a GlobalCtx {
let ctx = GLOBAL_CTX.get();
&*mem::transmute::<*mut MaybeUninit<GlobalCtx>, *mut GlobalCtx>(ctx)
}

#[cfg(test)]
Expand Down
44 changes: 31 additions & 13 deletions src/tools/bsan/bsan-rt/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,14 @@ unsafe impl Allocator for BsanAllocHooks {
unsafe {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
size => unsafe {
let raw_ptr: *mut u8 = mem::transmute((self.malloc)(layout.size()));
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
size => {
let ptr = (self.malloc)(layout.size());
if ptr.is_null() {
return Err(AllocError);
}
let ptr = NonNull::new_unchecked(ptr as *mut u8);
Ok(NonNull::slice_from_raw_parts(ptr, size))
},
}
}
}
}
Expand Down Expand Up @@ -153,13 +155,19 @@ pub type Span = usize;
/// and a borrow tag. We also include a pointer to the "lock" location for the allocation,
/// which contains all other metadata used to detect undefined behavior.
#[repr(C)]
#[derive(Clone, Copy)]
#[derive(Clone, Copy, Debug)]
pub struct Provenance {
pub alloc_id: AllocId,
pub bor_tag: BorTag,
pub alloc_info: *mut c_void,
}

impl Default for Provenance {
fn default() -> Self {
Provenance::null()
}
}

impl Provenance {
/// The default provenance value, which is assigned to dangling or invalid
/// pointers.
Expand Down Expand Up @@ -215,7 +223,7 @@ impl AllocInfo {
#[no_mangle]
unsafe extern "C" fn bsan_init(hooks: BsanHooks) {
let ctx = init_global_ctx(hooks);
let ctx = unsafe { &*ctx };
let ctx = unsafe { ctx };
init_local_ctx(ctx);
ui_test!(ctx, "bsan_init");
}
Expand All @@ -225,7 +233,7 @@ unsafe extern "C" fn bsan_init(hooks: BsanHooks) {
/// will be called after this function has executed.
#[no_mangle]
unsafe extern "C" fn bsan_deinit() {
let global_ctx = unsafe { &*global_ctx() };
let global_ctx = unsafe { global_ctx() };
ui_test!(global_ctx, "bsan_deinit");
deinit_local_ctx();
deinit_global_ctx();
Expand Down Expand Up @@ -258,15 +266,25 @@ extern "C" fn bsan_shadow_clear(addr: usize, access_size: usize) {}
/// Loads the provenance of a given address from shadow memory and stores
/// the result in the return pointer.
#[no_mangle]
extern "C" fn bsan_load_prov(prov: *mut MaybeUninit<Provenance>, addr: usize) {
unsafe {
(*prov).write(Provenance::null());
}
unsafe extern "C" fn bsan_load_prov(prov: *mut Provenance, addr: usize) {
debug_assert!(!prov.is_null());

let ctx = global_ctx();
let heap = ctx.shadow_heap();

*prov = heap.load_prov(addr);
}

/// Stores the given provenance value into shadow memory at the location for the given address.
#[no_mangle]
extern "C" fn bsan_store_prov(prov: *const Provenance, addr: usize) {}
unsafe extern "C" fn bsan_store_prov(prov: *const Provenance, addr: usize) {
debug_assert!(!prov.is_null());

let ctx = global_ctx();
let heap = ctx.shadow_heap();

heap.store_prov(ctx.hooks(), prov, addr);
}

/// Pushes a shadow stack frame
#[no_mangle]
Expand Down
18 changes: 13 additions & 5 deletions src/tools/bsan/bsan-rt/src/local.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ pub static LOCAL_CTX: UnsafeCell<MaybeUninit<LocalCtx>> = UnsafeCell::new(MaybeU
///
/// This function should only be called once, when a thread is initialized.
#[inline]
pub unsafe fn init_local_ctx(ctx: &GlobalCtx) -> *mut LocalCtx {
pub unsafe fn init_local_ctx(ctx: &GlobalCtx) -> &LocalCtx {
(*LOCAL_CTX.get()).write(LocalCtx::new(ctx));
local_ctx()
local_ctx_mut()
}

/// Deinitializes the local context object.
Expand All @@ -42,9 +42,17 @@ pub unsafe fn deinit_local_ctx() {
/// # Safety
/// The user needs to ensure that the context is initialized.
#[inline]
pub unsafe fn local_ctx() -> *mut LocalCtx {
let ctx: *mut MaybeUninit<LocalCtx> = LOCAL_CTX.get();
mem::transmute(ctx)
pub unsafe fn local_ctx<'a>() -> &'a LocalCtx {
let ctx = LOCAL_CTX.get();
&*local_ctx_mut()
}

/// # Safety
/// The user needs to ensure that the context is initialized.
#[inline]
pub unsafe fn local_ctx_mut<'a>() -> &'a mut LocalCtx {
let ctx = LOCAL_CTX.get();
&mut *mem::transmute::<*mut MaybeUninit<LocalCtx>, *mut LocalCtx>(ctx)
}

impl Drop for LocalCtx {
Expand Down
Loading