Skip to content

Commit e42cd8c

Browse files
authored
v0.6 (#4)
1 parent 03fa36f commit e42cd8c

File tree

6 files changed

+101
-29
lines changed

6 files changed

+101
-29
lines changed

Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "toolshed"
3-
version = "0.5.0"
3+
version = "0.6.0"
44
authors = ["maciejhirsz <[email protected]>"]
55
license = "MIT/Apache-2.0"
66
description = "Arena allocator and a handful of useful data structures"

README.md

+13-7
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ of `enum`s and find yourself in pain having to put everything in
3232
## Example
3333

3434
```rust
35-
3635
extern crate toolshed;
3736

3837
use toolshed::Arena;
@@ -51,9 +50,15 @@ fn main() {
5150
// Create a new arena
5251
let arena = Arena::new();
5352

54-
// The reference will live until `arena` goes out of scope
55-
let child = arena.alloc(Foo::Integer(42));
56-
let parent = arena.alloc(Foo::Nested(child));
53+
// We allocate first instance of `Foo` in the arena.
54+
//
55+
// Please note that the `alloc` method returns a `&mut` reference.
56+
// Since we want to share our references around, we are going to
57+
// dereference and re-reference them to immutable ones with `&*`.
58+
let child: &Foo = &*arena.alloc(Foo::Integer(42));
59+
60+
// Next instance of `Foo` will contain the child reference.
61+
let parent: &Foo = &*arena.alloc(Foo::Nested(child));
5762

5863
// Empty map does not allocate
5964
let map = Map::new();
@@ -62,8 +67,10 @@ fn main() {
6267
// The reference can be shared, since `Arena` uses interior mutability.
6368
map.insert(&arena, "child", child);
6469

65-
// We can put our `map` on the arena as well.
66-
let map: &Map<&str, &Foo> = arena.alloc(map);
70+
// We can put our `map` on the arena as well. Once again we use the `&*`
71+
// operation to change the reference to be immutable, just to demonstrate
72+
// that our `Map` implementation is perfectly happy with internal mutability.
73+
let map: &Map<&str, &Foo> = &*arena.alloc(map);
6774

6875
// Each insert allocates a small chunk of data on the arena. Since arena is
6976
// preallocated on the heap, these inserts are very, very fast.
@@ -77,7 +84,6 @@ fn main() {
7784
assert_eq!(map.get("parent"), Some(&Foo::Nested(&Foo::Integer(42))));
7885
assert_eq!(map.get("heh"), None);
7986
}
80-
8187
```
8288

8389
## Benches

src/arena.rs

+69-11
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
//! `Arena` is exported at the root of the crate.
33
44
use std::mem::size_of;
5+
use std::ops::Deref;
56
use std::cell::Cell;
67
use std::borrow::Cow;
78

@@ -28,10 +29,10 @@ pub struct Uninitialized<'arena, T: 'arena> {
2829
impl<'arena, T: 'arena> Uninitialized<'arena, T> {
2930
/// Initialize the memory at the pointer with a given value.
3031
#[inline]
31-
pub fn init(self, value: T) -> &'arena T {
32+
pub fn init(self, value: T) -> &'arena mut T {
3233
*self.pointer = value;
3334

34-
&*self.pointer
35+
self.pointer
3536
}
3637

3738
/// Get a reference to the pointer without writing to it.
@@ -46,7 +47,7 @@ impl<'arena, T: 'arena> Uninitialized<'arena, T> {
4647
///
4748
/// **Reading from this reference without calling `init` is undefined behavior.**
4849
#[inline]
49-
pub unsafe fn into_mut(self) -> &'arena mut T {
50+
pub unsafe fn as_mut_ref(self) -> &'arena mut T {
5051
self.pointer
5152
}
5253

@@ -69,6 +70,60 @@ impl<'arena, T: 'arena> From<&'arena mut T> for Uninitialized<'arena, T> {
6970
}
7071
}
7172

73+
/// A wrapper around a `str` slice that has an extra `0` byte allocated following
74+
/// its contents.
75+
pub struct NulTermStr<'arena>(&'arena str);
76+
77+
impl<'arena> NulTermStr<'arena> {
78+
/// Read byte at a given `index`. This does not check for length boundaries,
79+
/// but is guaranteed to return `0` for `index` equal to the length.
80+
///
81+
/// This can be a very useful optimization when reading a long string one
82+
/// byte at a time until termination, if checking for `0` can replace what
83+
/// would otherwise have to be length checks.
84+
///
85+
/// ```rust
86+
/// # extern crate toolshed;
87+
/// # use toolshed::Arena;
88+
/// # fn main() {
89+
/// let arena = Arena::new();
90+
/// let str = arena.alloc_nul_term_str("foo");
91+
///
92+
/// // We can safely get the underlying `&str` at any time.
93+
/// assert_eq!(&str[..], "foo");
94+
///
95+
/// unsafe {
96+
/// // First 3 bytes are known to us
97+
/// assert_eq!(str.byte_unchecked(0), b'f');
98+
/// assert_eq!(str.byte_unchecked(1), b'o');
99+
/// assert_eq!(str.byte_unchecked(2), b'o');
100+
///
101+
/// // Following is safe and guaranteed to be '0'
102+
/// assert_eq!(str.byte_unchecked(3), 0);
103+
///
104+
/// // Reading index 4 would be undefined behavior!
105+
/// }
106+
/// # }
107+
/// ```
108+
pub unsafe fn byte_unchecked(&self, index: usize) -> u8 {
109+
*self.0.as_ptr().add(index)
110+
}
111+
}
112+
113+
impl<'arena> AsRef<str> for NulTermStr<'arena> {
114+
fn as_ref(&self) -> &str {
115+
self.0
116+
}
117+
}
118+
119+
impl<'arena> Deref for NulTermStr<'arena> {
120+
type Target = str;
121+
122+
fn deref(&self) -> &str {
123+
self.0
124+
}
125+
}
126+
72127
impl Arena {
73128
/// Create a new arena with a single preallocated 64KiB page.
74129
pub fn new() -> Self {
@@ -84,7 +139,7 @@ impl Arena {
84139

85140
/// Put the value onto the page of the arena and return a reference to it.
86141
#[inline]
87-
pub fn alloc<'arena, T: Sized + Copy>(&'arena self, value: T) -> &'arena T {
142+
pub fn alloc<'arena, T: Sized + Copy>(&'arena self, value: T) -> &'arena mut T {
88143
self.alloc_uninitialized().init(value)
89144
}
90145

@@ -159,16 +214,19 @@ impl Arena {
159214
/// No checks are performed on the source and whether or not it already contains
160215
/// any nul bytes. While this does not create any memory issues, it assumes that
161216
/// the reader of the source can deal with malformed source.
162-
pub fn alloc_str_with_nul<'arena>(&'arena self, val: &str) -> *const u8 {
217+
pub fn alloc_nul_term_str<'arena>(&'arena self, val: &str) -> NulTermStr {
163218
let len_with_zero = val.len() + 1;
164219
let ptr = self.require(len_with_zero);
165220

166221
unsafe {
167222
use std::ptr::copy_nonoverlapping;
223+
use std::slice::from_raw_parts;
224+
use std::str::from_utf8_unchecked;
168225

169226
copy_nonoverlapping(val.as_ptr(), ptr, val.len());
170-
*ptr.offset(val.len() as isize) = 0;
171-
ptr
227+
*ptr.add(val.len()) = 0;
228+
229+
NulTermStr(from_utf8_unchecked(from_raw_parts(ptr, val.len())))
172230
}
173231
}
174232

@@ -223,7 +281,7 @@ impl Arena {
223281
self.ptr.get()
224282
} else {
225283
self.offset.set(cap);
226-
unsafe { self.ptr.get().offset(offset as isize) }
284+
unsafe { self.ptr.get().add(offset) }
227285
}
228286
}
229287

@@ -361,10 +419,10 @@ mod test {
361419
}
362420

363421
#[test]
364-
fn alloc_str_with_nul() {
422+
fn alloc_nul_term_str() {
365423
let arena = Arena::new();
366-
let ptr = arena.alloc_str_with_nul("abcdefghijk");
367-
let allocated = unsafe { ::std::slice::from_raw_parts(ptr, 12) };
424+
let nts = arena.alloc_nul_term_str("abcdefghijk");
425+
let allocated = unsafe { ::std::slice::from_raw_parts(nts.as_ptr(), 12) };
368426

369427
assert_eq!(arena.offset.get(), 16);
370428
assert_eq!(

src/lib.rs

+16-8
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,15 @@
5151
//! // Create a new arena
5252
//! let arena = Arena::new();
5353
//!
54-
//! // The reference will live until `arena` goes out of scope
55-
//! let child = arena.alloc(Foo::Integer(42));
56-
//! let parent = arena.alloc(Foo::Nested(child));
54+
//! // We allocate first instance of `Foo` in the arena.
55+
//! //
56+
//! // Please note that the `alloc` method returns a `&mut` reference.
57+
//! // Since we want to share our references around, we are going to
58+
//! // dereference and re-reference them to immutable ones with `&*`.
59+
//! let child: &Foo = &*arena.alloc(Foo::Integer(42));
60+
//!
61+
//! // Next instance of `Foo` will contain the child reference.
62+
//! let parent: &Foo = &*arena.alloc(Foo::Nested(child));
5763
//!
5864
//! // Empty map does not allocate
5965
//! let map = Map::new();
@@ -62,8 +68,10 @@
6268
//! // The reference can be shared, since `Arena` uses interior mutability.
6369
//! map.insert(&arena, "child", child);
6470
//!
65-
//! // We can put our `map` on the arena as well.
66-
//! let map: &Map<&str, &Foo> = arena.alloc(map);
71+
//! // We can put our `map` on the arena as well. Once again we use the `&*`
72+
//! // operation to change the reference to be immutable, just to demonstrate
73+
//! // that our `Map` implementation is perfectly happy with internal mutability.
74+
//! let map: &Map<&str, &Foo> = &*arena.alloc(map);
6775
//!
6876
//! // Each insert allocates a small chunk of data on the arena. Since arena is
6977
//! // preallocated on the heap, these inserts are very, very fast.
@@ -92,17 +100,17 @@ extern crate serde_json;
92100

93101
extern crate fxhash;
94102

95-
pub mod cell;
103+
mod cell;
96104
pub mod map;
97105
pub mod set;
98106
pub mod list;
99-
pub mod arena;
107+
mod arena;
100108
mod bloom;
101109
mod impl_partial_eq;
102110
mod impl_debug;
103111

104112
#[cfg(feature = "impl_serialize")]
105113
mod impl_serialize;
106114

107-
pub use arena::Arena;
115+
pub use arena::{Arena, Uninitialized, NulTermStr};
108116
pub use cell::CopyCell;

src/list.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ where
201201
/// Push a new item at the end of the `List`.
202202
#[inline]
203203
pub fn push(&self, arena: &'arena Arena, item: T) {
204-
let next = Some(arena.alloc(ListNode {
204+
let next = Some(&*arena.alloc(ListNode {
205205
value: item,
206206
next: CopyCell::new(None)
207207
}));

src/map.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ where
143143
Some(old)
144144
},
145145
None => {
146-
let new = Some(arena.alloc(MapNode::new(key, hash, value)));
146+
let new = Some(&*arena.alloc(MapNode::new(key, hash, value)));
147147

148148
if let Some(last) = self.last.get() {
149149
last.next.set(new);

0 commit comments

Comments
 (0)