|
| 1 | +//@revisions: stack tree |
| 2 | +//@[tree]compile-flags: -Zmiri-tree-borrows |
| 3 | +#![feature(allocator_api)] |
| 4 | +use std::ptr; |
| 5 | + |
| 6 | +// Test various aliasing-model-related things. |
| 7 | +fn main() { |
| 8 | + read_does_not_invalidate1(); |
| 9 | + read_does_not_invalidate2(); |
| 10 | + mut_raw_then_mut_shr(); |
| 11 | + mut_shr_then_mut_raw(); |
| 12 | + mut_raw_mut(); |
| 13 | + partially_invalidate_mut(); |
| 14 | + drop_after_sharing(); |
| 15 | + // direct_mut_to_const_raw(); |
| 16 | + two_raw(); |
| 17 | + shr_and_raw(); |
| 18 | + disjoint_mutable_subborrows(); |
| 19 | + raw_ref_to_part(); |
| 20 | + array_casts(); |
| 21 | + mut_below_shr(); |
| 22 | + wide_raw_ptr_in_tuple(); |
| 23 | + not_unpin_not_protected(); |
| 24 | + write_does_not_invalidate_all_aliases(); |
| 25 | + box_into_raw_allows_interior_mutable_alias(); |
| 26 | +} |
| 27 | + |
| 28 | +// Make sure that reading from an `&mut` does, like reborrowing to `&`, |
| 29 | +// NOT invalidate other reborrows. |
| 30 | +fn read_does_not_invalidate1() { |
| 31 | + fn foo(x: &mut (i32, i32)) -> &i32 { |
| 32 | + let xraw = x as *mut (i32, i32); |
| 33 | + let ret = unsafe { &(*xraw).1 }; |
| 34 | + let _val = x.1; // we just read, this does NOT invalidate the reborrows. |
| 35 | + ret |
| 36 | + } |
| 37 | + assert_eq!(*foo(&mut (1, 2)), 2); |
| 38 | +} |
| 39 | +// Same as above, but this time we first create a raw, then read from `&mut` |
| 40 | +// and then freeze from the raw. |
| 41 | +fn read_does_not_invalidate2() { |
| 42 | + fn foo(x: &mut (i32, i32)) -> &i32 { |
| 43 | + let xraw = x as *mut (i32, i32); |
| 44 | + let _val = x.1; // we just read, this does NOT invalidate the raw reborrow. |
| 45 | + let ret = unsafe { &(*xraw).1 }; |
| 46 | + ret |
| 47 | + } |
| 48 | + assert_eq!(*foo(&mut (1, 2)), 2); |
| 49 | +} |
| 50 | + |
| 51 | +// Escape a mut to raw, then share the same mut and use the share, then the raw. |
| 52 | +// That should work. |
| 53 | +fn mut_raw_then_mut_shr() { |
| 54 | + let mut x = 2; |
| 55 | + let xref = &mut x; |
| 56 | + let xraw = &mut *xref as *mut _; |
| 57 | + let xshr = &*xref; |
| 58 | + assert_eq!(*xshr, 2); |
| 59 | + unsafe { |
| 60 | + *xraw = 4; |
| 61 | + } |
| 62 | + assert_eq!(x, 4); |
| 63 | +} |
| 64 | + |
| 65 | +// Create first a shared reference and then a raw pointer from a `&mut` |
| 66 | +// should permit mutation through that raw pointer. |
| 67 | +fn mut_shr_then_mut_raw() { |
| 68 | + let xref = &mut 2; |
| 69 | + let _xshr = &*xref; |
| 70 | + let xraw = xref as *mut _; |
| 71 | + unsafe { |
| 72 | + *xraw = 3; |
| 73 | + } |
| 74 | + assert_eq!(*xref, 3); |
| 75 | +} |
| 76 | + |
| 77 | +// Ensure that if we derive from a mut a raw, and then from that a mut, |
| 78 | +// and then read through the original mut, that does not invalidate the raw. |
| 79 | +// This shows that the read-exception for `&mut` applies even if the `Shr` item |
| 80 | +// on the stack is not at the top. |
| 81 | +fn mut_raw_mut() { |
| 82 | + let mut x = 2; |
| 83 | + { |
| 84 | + let xref1 = &mut x; |
| 85 | + let xraw = xref1 as *mut _; |
| 86 | + let _xref2 = unsafe { &mut *xraw }; |
| 87 | + let _val = *xref1; |
| 88 | + unsafe { |
| 89 | + *xraw = 4; |
| 90 | + } |
| 91 | + // we can now use both xraw and xref1, for reading |
| 92 | + assert_eq!(*xref1, 4); |
| 93 | + assert_eq!(unsafe { *xraw }, 4); |
| 94 | + assert_eq!(*xref1, 4); |
| 95 | + assert_eq!(unsafe { *xraw }, 4); |
| 96 | + // we cannot use xref2; see `compile-fail/stacked-borrows/illegal_read4.rs` |
| 97 | + } |
| 98 | + assert_eq!(x, 4); |
| 99 | +} |
| 100 | + |
| 101 | +fn partially_invalidate_mut() { |
| 102 | + let data = &mut (0u8, 0u8); |
| 103 | + let reborrow = &mut *data as *mut (u8, u8); |
| 104 | + let shard = unsafe { &mut (*reborrow).0 }; |
| 105 | + data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap. |
| 106 | + *shard += 1; // so we can still use `shard`. |
| 107 | + assert_eq!(*data, (1, 1)); |
| 108 | +} |
| 109 | + |
| 110 | +// Make sure that we can handle the situation where a location is frozen when being dropped. |
| 111 | +fn drop_after_sharing() { |
| 112 | + let x = String::from("hello!"); |
| 113 | + let _len = x.len(); |
| 114 | +} |
| 115 | + |
| 116 | +// Make sure that we can create two raw pointers from a mutable reference and use them both. |
| 117 | +fn two_raw() { |
| 118 | + unsafe { |
| 119 | + let x = &mut 0; |
| 120 | + let y1 = x as *mut _; |
| 121 | + let y2 = x as *mut _; |
| 122 | + *y1 += 2; |
| 123 | + *y2 += 1; |
| 124 | + } |
| 125 | +} |
| 126 | + |
| 127 | +// Make sure that creating a *mut does not invalidate existing shared references. |
| 128 | +fn shr_and_raw() { |
| 129 | + unsafe { |
| 130 | + use std::mem; |
| 131 | + let x = &mut 0; |
| 132 | + let y1: &i32 = mem::transmute(&*x); // launder lifetimes |
| 133 | + let y2 = x as *mut _; |
| 134 | + let _val = *y1; |
| 135 | + *y2 += 1; |
| 136 | + } |
| 137 | +} |
| 138 | + |
| 139 | +fn disjoint_mutable_subborrows() { |
| 140 | + struct Foo { |
| 141 | + a: String, |
| 142 | + b: Vec<u32>, |
| 143 | + } |
| 144 | + |
| 145 | + unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String { |
| 146 | + &mut (*this).a |
| 147 | + } |
| 148 | + |
| 149 | + unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec<u32> { |
| 150 | + &mut (*this).b |
| 151 | + } |
| 152 | + |
| 153 | + let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] }; |
| 154 | + |
| 155 | + let ptr = &mut foo as *mut Foo; |
| 156 | + |
| 157 | + let a = unsafe { borrow_field_a(ptr) }; |
| 158 | + let b = unsafe { borrow_field_b(ptr) }; |
| 159 | + b.push(4); |
| 160 | + a.push_str(" world"); |
| 161 | + assert_eq!(format!("{:?} {:?}", a, b), r#""hello world" [0, 1, 2, 4]"#); |
| 162 | +} |
| 163 | + |
| 164 | +fn raw_ref_to_part() { |
| 165 | + struct Part { |
| 166 | + _lame: i32, |
| 167 | + } |
| 168 | + |
| 169 | + #[repr(C)] |
| 170 | + struct Whole { |
| 171 | + part: Part, |
| 172 | + extra: i32, |
| 173 | + } |
| 174 | + |
| 175 | + let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 }); |
| 176 | + let whole = ptr::addr_of_mut!(*Box::leak(it)); |
| 177 | + let part = unsafe { ptr::addr_of_mut!((*whole).part) }; |
| 178 | + let typed = unsafe { &mut *(part as *mut Whole) }; |
| 179 | + assert!(typed.extra == 42); |
| 180 | + drop(unsafe { Box::from_raw(whole) }); |
| 181 | +} |
| 182 | + |
| 183 | +/// When casting an array reference to a raw element ptr, that should cover the whole array. |
| 184 | +fn array_casts() { |
| 185 | + let mut x: [usize; 2] = [0, 0]; |
| 186 | + let p = &mut x as *mut usize; |
| 187 | + unsafe { |
| 188 | + *p.add(1) = 1; |
| 189 | + } |
| 190 | + |
| 191 | + let x: [usize; 2] = [0, 1]; |
| 192 | + let p = &x as *const usize; |
| 193 | + assert_eq!(unsafe { *p.add(1) }, 1); |
| 194 | +} |
| 195 | + |
| 196 | +/// Transmuting &&i32 to &&mut i32 is fine. |
| 197 | +fn mut_below_shr() { |
| 198 | + let x = 0; |
| 199 | + let y = &x; |
| 200 | + let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) }; |
| 201 | + let r = &**p; |
| 202 | + let _val = *r; |
| 203 | +} |
| 204 | + |
| 205 | +fn wide_raw_ptr_in_tuple() { |
| 206 | + let mut x: Box<dyn std::any::Any> = Box::new("ouch"); |
| 207 | + let r = &mut *x as *mut dyn std::any::Any; |
| 208 | + // This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw |
| 209 | + // pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and |
| 210 | + // finds a reference (to a vtable) there that it wants to retag... and that would be Wrong. |
| 211 | + let pair = (r, &0); |
| 212 | + let r = unsafe { &mut *pair.0 }; |
| 213 | + // Make sure the fn ptr part of the vtable is still fine. |
| 214 | + r.type_id(); |
| 215 | +} |
| 216 | + |
| 217 | +fn not_unpin_not_protected() { |
| 218 | + // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also |
| 219 | + // don't add protectors. (We could, but until we have a better idea for where we want to go with |
| 220 | + // the self-referential-coroutine situation, it does not seem worth the potential trouble.) |
| 221 | + use std::marker::PhantomPinned; |
| 222 | + |
| 223 | + pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned); |
| 224 | + |
| 225 | + fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) { |
| 226 | + // `f` is allowed to deallocate `x`. |
| 227 | + f(x) |
| 228 | + } |
| 229 | + |
| 230 | + inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| { |
| 231 | + let raw = x as *mut _; |
| 232 | + drop(unsafe { Box::from_raw(raw) }); |
| 233 | + }); |
| 234 | +} |
| 235 | + |
| 236 | +fn write_does_not_invalidate_all_aliases() { |
| 237 | + mod other { |
| 238 | + /// Some private memory to store stuff in. |
| 239 | + static mut S: *mut i32 = 0 as *mut i32; |
| 240 | + |
| 241 | + pub fn lib1(x: &&mut i32) { |
| 242 | + unsafe { |
| 243 | + S = (x as *const &mut i32).cast::<*mut i32>().read(); |
| 244 | + } |
| 245 | + } |
| 246 | + |
| 247 | + pub fn lib2() { |
| 248 | + unsafe { |
| 249 | + *S = 1337; |
| 250 | + } |
| 251 | + } |
| 252 | + } |
| 253 | + |
| 254 | + let x = &mut 0; |
| 255 | + other::lib1(&x); |
| 256 | + *x = 42; // a write to x -- invalidates other pointers? |
| 257 | + other::lib2(); |
| 258 | + assert_eq!(*x, 1337); // oops, the value changed! I guess not all pointers were invalidated |
| 259 | +} |
| 260 | + |
| 261 | +fn box_into_raw_allows_interior_mutable_alias() { |
| 262 | + unsafe { |
| 263 | + let b = Box::new(std::cell::Cell::new(42)); |
| 264 | + let raw = Box::into_raw(b); |
| 265 | + let c = &*raw; |
| 266 | + let d = raw.cast::<i32>(); // bypassing `Cell` -- only okay in Miri tests |
| 267 | + // `c` and `d` should permit arbitrary aliasing with each other now. |
| 268 | + *d = 1; |
| 269 | + c.set(2); |
| 270 | + drop(Box::from_raw(raw)); |
| 271 | + } |
| 272 | +} |
0 commit comments