@@ -285,6 +285,9 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
285
285
/// A raw pointer variant of `get_bytes_mut` that avoids invalidating existing aliases into this memory.
286
286
pub fn get_bytes_mut_ptr ( & mut self , cx : & impl HasDataLayout , range : AllocRange ) -> * mut [ u8 ] {
287
287
self . mark_init ( range, true ) ;
288
+ // This also clears relocations that just overlap with the written range. So writing to some
289
+ // byte can de-initialize its neighbors! See
290
+ // <https://github.com/rust-lang/rust/issues/87184> for details.
288
291
self . clear_relocations ( cx, range) ;
289
292
290
293
assert ! ( range. end( ) . bytes_usize( ) <= self . bytes. len( ) ) ; // need to do our own bounds-check
@@ -327,7 +330,11 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
327
330
cx : & impl HasDataLayout ,
328
331
range : AllocRange ,
329
332
) -> AllocResult < ScalarMaybeUninit < Tag > > {
330
- // `get_bytes_unchecked` tests relocation edges.
333
+ // `get_bytes_with_uninit_and_ptr` tests relocation edges.
334
+ // We deliberately error when loading data that partially has provenance, or partially
335
+ // initialized data (that's the check below), into a scalar. The LLVM semantics of this are
336
+ // unclear so we are conservative. See <https://github.com/rust-lang/rust/issues/69488> for
337
+ // further discussion.
331
338
let bytes = self . get_bytes_with_uninit_and_ptr ( cx, range) ?;
332
339
// Uninit check happens *after* we established that the alignment is correct.
333
340
// We must not return `Ok()` for unaligned pointers!
0 commit comments