Skip to content

Commit 944ffbf

Browse files
committed
initialize unsized locals when copying to the for the first time
1 parent ae1f8ab commit 944ffbf

File tree

2 files changed

+57
-33
lines changed

2 files changed

+57
-33
lines changed

src/librustc_mir/interpret/eval_context.rs

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -702,10 +702,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
702702
LocalValue::Dead => write!(msg, " is dead").unwrap(),
703703
LocalValue::Uninitialized => write!(msg, " is uninitialized").unwrap(),
704704
LocalValue::Live(Operand::Indirect(mplace)) => {
705-
let (ptr, align) = mplace.to_scalar_ptr_align();
706-
match ptr {
705+
match mplace.ptr {
707706
Scalar::Ptr(ptr) => {
708-
write!(msg, " by align({}) ref:", align.bytes()).unwrap();
707+
write!(msg, " by align({}){} ref:",
708+
mplace.align.bytes(),
709+
match mplace.meta {
710+
Some(meta) => format!(" meta({:?})", meta),
711+
None => String::new()
712+
}
713+
).unwrap();
709714
allocs.push(ptr.alloc_id);
710715
}
711716
ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),

src/librustc_mir/interpret/place.rs

Lines changed: 49 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -826,8 +826,6 @@ where
826826
src: OpTy<'tcx, M::PointerTag>,
827827
dest: PlaceTy<'tcx, M::PointerTag>,
828828
) -> EvalResult<'tcx> {
829-
debug_assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
830-
"Cannot copy unsized data");
831829
// We do NOT compare the types for equality, because well-typed code can
832830
// actually "transmute" `&mut T` to `&T` in an assignment without a cast.
833831
assert!(src.layout.details == dest.layout.details,
@@ -836,6 +834,7 @@ where
836834
// Let us see if the layout is simple so we take a shortcut, avoid force_allocation.
837835
let src = match self.try_read_immediate(src)? {
838836
Ok(src_val) => {
837+
assert!(!src.layout.is_unsized(), "cannot have unsized immediates");
839838
// Yay, we got a value that we can write directly.
840839
// FIXME: Add a check to make sure that if `src` is indirect,
841840
// it does not overlap with `dest`.
@@ -846,13 +845,19 @@ where
846845
// Slow path, this does not fit into an immediate. Just memcpy.
847846
trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
848847

849-
let dest = self.force_allocation(dest)?;
850-
let (src_ptr, src_align) = src.to_scalar_ptr_align();
851-
let (dest_ptr, dest_align) = dest.to_scalar_ptr_align();
848+
// This interprets `src.meta` with the `dest` local's layout, if an unsized local
849+
// is being initialized!
850+
let (dest, size) = self.force_allocation_maybe_sized(dest, src.meta)?;
851+
let size = size.unwrap_or_else(|| {
852+
assert!(!dest.layout.is_unsized(),
853+
"Cannot copy into already initialized unsized place");
854+
dest.layout.size
855+
});
856+
assert_eq!(src.meta, dest.meta, "Can only copy between equally-sized instances");
852857
self.memory.copy(
853-
src_ptr, src_align,
854-
dest_ptr, dest_align,
855-
dest.layout.size,
858+
src.ptr, src.align,
859+
dest.ptr, dest.align,
860+
size,
856861
/*nonoverlapping*/ true,
857862
)?;
858863

@@ -870,11 +875,13 @@ where
870875
// Fast path: Just use normal `copy_op`
871876
return self.copy_op(src, dest);
872877
}
873-
// We still require the sizes to match
874-
debug_assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
875-
"Cannot copy unsized data");
878+
// We still require the sizes to match.
876879
assert!(src.layout.size == dest.layout.size,
877880
"Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest);
881+
// Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want
882+
// to avoid that here.
883+
assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
884+
"Cannot transmute unsized data");
878885

879886
// The hard case is `ScalarPair`. `src` is already read from memory in this case,
880887
// using `src.layout` to figure out which bytes to use for the 1st and 2nd field.
@@ -902,11 +909,16 @@ where
902909
/// If the place currently refers to a local that doesn't yet have a matching allocation,
903910
/// create such an allocation.
904911
/// This is essentially `force_to_memplace`.
905-
pub fn force_allocation(
912+
///
913+
/// This supports unsized types and returnes the computed size to avoid some
914+
/// redundant computation when copying; use `force_allocation` for a simpler, sized-only
915+
/// version.
916+
pub fn force_allocation_maybe_sized(
906917
&mut self,
907918
place: PlaceTy<'tcx, M::PointerTag>,
908-
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
909-
let mplace = match place.place {
919+
meta: Option<Scalar<M::PointerTag>>,
920+
) -> EvalResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> {
921+
let (mplace, size) = match place.place {
910922
Place::Local { frame, local } => {
911923
match self.stack[frame].locals[local].access_mut()? {
912924
Ok(local_val) => {
@@ -926,44 +938,51 @@ where
926938
// We need the layout of the local. We can NOT use the layout we got,
927939
// that might e.g., be an inner field of a struct with `Scalar` layout,
928940
// that has different alignment than the outer field.
941+
// We also need to support unsized types, and hence cannot use `allocate`.
929942
let local_layout = self.layout_of_local(&self.stack[frame], local, None)?;
930-
let ptr = self.allocate(local_layout, MemoryKind::Stack);
943+
let (size, align) = self.size_and_align_of(meta, local_layout)?
944+
.expect("Cannot allocate for non-dyn-sized type");
945+
let ptr = self.memory.allocate(size, align, MemoryKind::Stack);
946+
let ptr = M::tag_new_allocation(self, ptr, MemoryKind::Stack);
947+
let mplace = MemPlace { ptr: ptr.into(), align, meta };
931948
if let Some(value) = old_val {
932949
// Preserve old value.
933950
// We don't have to validate as we can assume the local
934951
// was already valid for its type.
935-
self.write_immediate_to_mplace_no_validate(value, ptr)?;
952+
let mplace = MPlaceTy { mplace, layout: local_layout };
953+
self.write_immediate_to_mplace_no_validate(value, mplace)?;
936954
}
937-
let mplace = ptr.mplace;
938955
// Now we can call `access_mut` again, asserting it goes well,
939956
// and actually overwrite things.
940957
*self.stack[frame].locals[local].access_mut().unwrap().unwrap() =
941958
LocalValue::Live(Operand::Indirect(mplace));
942-
mplace
959+
(mplace, Some(size))
943960
}
944-
Err(mplace) => mplace, // this already was an indirect local
961+
Err(mplace) => (mplace, None), // this already was an indirect local
945962
}
946963
}
947-
Place::Ptr(mplace) => mplace
964+
Place::Ptr(mplace) => (mplace, None)
948965
};
949966
// Return with the original layout, so that the caller can go on
950-
Ok(MPlaceTy { mplace, layout: place.layout })
967+
Ok((MPlaceTy { mplace, layout: place.layout }, size))
968+
}
969+
970+
#[inline(always)]
971+
pub fn force_allocation(
972+
&mut self,
973+
place: PlaceTy<'tcx, M::PointerTag>,
974+
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
975+
Ok(self.force_allocation_maybe_sized(place, None)?.0)
951976
}
952977

953978
pub fn allocate(
954979
&mut self,
955980
layout: TyLayout<'tcx>,
956981
kind: MemoryKind<M::MemoryKinds>,
957982
) -> MPlaceTy<'tcx, M::PointerTag> {
958-
if layout.is_unsized() {
959-
assert!(self.tcx.features().unsized_locals, "cannot alloc memory for unsized type");
960-
// FIXME: What should we do here? We should definitely also tag!
961-
MPlaceTy::dangling(layout, self)
962-
} else {
963-
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
964-
let ptr = M::tag_new_allocation(self, ptr, kind);
965-
MPlaceTy::from_aligned_ptr(ptr, layout)
966-
}
983+
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
984+
let ptr = M::tag_new_allocation(self, ptr, kind);
985+
MPlaceTy::from_aligned_ptr(ptr, layout)
967986
}
968987

969988
pub fn write_discriminant_index(

0 commit comments

Comments
 (0)