Skip to content

Commit 9762de1

Browse files
committed
Changes to const allocations - they will now more often use const buffers, even when they are underaligned. Updated readme.
1 parent 5abfe06 commit 9762de1

File tree

7 files changed

+118
-21
lines changed

7 files changed

+118
-21
lines changed

README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ However, if you have access to such a compiler, I can assist you in adding suppo
7777

7878
| Name | Pass | Faliure | Crash \ Timeout| OK precentage
7979
|--------------------|--------|-------|-------|------|
80-
| Core tests | 1662 | 39 | 12 | 97.02% |
80+
| Core tests | 1764 | 48 | 20 | 96.29% |
8181
| Alloc tests | 616 |8 | 40 | 92.77% |
8282
| Alloc benches | 464 | 0 | 0 | 100.00% |
8383
| Test Harness tests | 57 | 0 | 100.00% |
@@ -86,9 +86,9 @@ However, if you have access to such a compiler, I can assist you in adding suppo
8686

8787
C
8888

89-
| Name | Pass | Faliure | OK precentage
90-
|--------------------|--------|-------|------|
91-
| Core tests | 1419 | 294 | 82.83% |
89+
| Name | Pass | Faliure| Crash \ Timeout | OK precentage
90+
|--------------------|--------|------|-------|------|
91+
| Core tests | 1712 | 71 | 8 | 95.59%|
9292

9393
## FAQ
9494

cilly/src/bin/linker/main.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -434,6 +434,8 @@ fn main() {
434434
cilly::builtins::select::generate_int_selects(&mut final_assembly, &mut overrides);
435435
cilly::builtins::insert_swap_at_generic(&mut final_assembly, &mut overrides);
436436
cilly::builtins::insert_bounds_check(&mut final_assembly, &mut overrides);
437+
cilly::builtins::unaligned_read(&mut final_assembly, &mut overrides);
438+
437439
cilly::builtins::casts::insert_casts(&mut final_assembly, &mut overrides);
438440
cilly::builtins::insert_heap(&mut final_assembly, &mut overrides, *C_MODE);
439441
cilly::builtins::rust_assert(&mut final_assembly, &mut overrides);

cilly/src/v2/builtins/mod.rs

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,26 @@ pub fn insert_bounds_check(asm: &mut Assembly, patcher: &mut MissingMethodPatche
6565
};
6666
patcher.insert(name, Box::new(generator));
6767
}
68+
pub fn unaligned_read(asm: &mut Assembly, patcher: &mut MissingMethodPatcher) {
69+
let name = asm.alloc_string("unaligned_read");
70+
let generator = move |mref: Interned<MethodRef>, asm: &mut Assembly| {
71+
let tpe = asm[asm[mref].sig()].output();
72+
let tpe = asm.alloc_type(*tpe);
73+
// Copy to a local
74+
let ptr = asm.alloc_node(CILNode::LdArg(0));
75+
let local = asm.alloc_node(CILNode::LdLocA(0));
76+
let size = asm.size_of(tpe);
77+
let copy = asm.alloc_root(CILRoot::CpBlk(Box::new((local, ptr, size))));
78+
// Ret
79+
let local = asm.alloc_node(CILNode::LdLoc(0));
80+
let ret = asm.alloc_root(CILRoot::Ret(local));
81+
MethodImpl::MethodBody {
82+
blocks: vec![BasicBlock::new(vec![copy, ret], 0, None)],
83+
locals: vec![(None, tpe)],
84+
}
85+
};
86+
patcher.insert(name, Box::new(generator));
87+
}
6888

6989
fn insert_rust_alloc(asm: &mut Assembly, patcher: &mut MissingMethodPatcher) {
7090
let name = asm.alloc_string("__rust_alloc");

cilly/src/v2/c_exporter/utilis.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,7 @@ pub(super) fn mref_to_name(mref: &MethodRef, asm: &Assembly) -> String {
133133
.any(|tpe| matches!(tpe, Type::SIMDVector(_)))
134134
|| mname == "transmute"
135135
|| mname == "create_slice"
136+
|| mname == "unaligned_read"
136137
|| mname == "ovf_check_tuple"
137138
|| mname == "_Unwind_Backtrace"
138139
{

cilly/src/v2/method.rs

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ use super::{
55
bimap::{BiMapIndex, Interned, IntoBiMapIndex},
66
cilnode::{IsPure, MethodKind},
77
class::ClassDefIdx,
8-
Access, Assembly, BasicBlock, CILIterElem, CILNode, ClassDef, ClassRef, FnSig, Int, Type,
8+
Access, Assembly, BasicBlock, CILIterElem, CILNode, ClassDef, ClassRef, FnSig, Int,
9+
IntoAsmIndex, Type,
910
};
1011
use crate::{cil_node::CallOpArgs, cilnode::PtrCastRes, iter::TpeIter};
1112
use crate::{CILRoot, IString};
@@ -18,7 +19,24 @@ pub struct MethodRef {
1819
kind: MethodKind,
1920
generics: Box<[Type]>,
2021
}
21-
22+
impl Interned<MethodRef> {
23+
pub fn builtin(
24+
asm: &mut Assembly,
25+
name: &str,
26+
inputs: &[Type],
27+
output: impl IntoAsmIndex<Type>,
28+
) -> Self {
29+
let main_module = asm.main_module();
30+
let inputs: Box<_> = inputs.to_vec().into();
31+
let output = output.into_idx(asm);
32+
let sig = asm.alloc_sig(FnSig::new(inputs, output));
33+
asm.new_methodref(*main_module, name, sig, MethodKind::Static, [])
34+
}
35+
pub fn unaligned_read(asm: &mut Assembly, tpe: Type) -> Self {
36+
let tpe_ptr = asm.alloc_type(tpe);
37+
Self::builtin(asm, "unaligned_read", &[Type::Ptr(tpe_ptr)], tpe)
38+
}
39+
}
2240
impl MethodRef {
2341
#[must_use]
2442
pub fn into_def(

rustc_codegen_clr_ctx/src/lib.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,10 @@ impl<'tcx, 'asm> MethodCompileCtx<'tcx, 'asm> {
107107
pub fn asm<'s: 'a, 'a>(&'s self) -> &'a Assembly {
108108
self.asm
109109
}
110+
111+
pub fn const_align(&self) -> u64 {
112+
1
113+
}
110114
}
111115
impl<'tcx> rustc_middle::ty::layout::HasTyCtxt<'tcx> for MethodCompileCtx<'tcx, '_> {
112116
fn tcx(&self) -> TyCtxt<'tcx> {

rustc_codgen_clr_operand/src/constant.rs

Lines changed: 67 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -58,15 +58,25 @@ fn create_const_from_data<'tcx>(
5858
Scalar::from_u128(u128::from_ne_bytes(bytes.as_slice().try_into().unwrap()));
5959
return load_const_scalar(scalar, ty, ctx);
6060
}
61-
let ptr = alloc_ptr(alloc_id, &alloc, ctx);
62-
let ty = ctx.monomorphize(ty);
61+
let (ptr, align) = alloc_ptr_unaligned(alloc_id, &alloc, ctx);
62+
if align == u64::MAX {
63+
let ty = ctx.monomorphize(ty);
6364

64-
let tpe = ctx.type_from_cache(ty);
65-
let tpe_ptr = ctx.nptr(tpe);
66-
return CILNode::LdObj {
67-
ptr: Box::new(ptr.cast_ptr(tpe_ptr)),
68-
obj: Box::new(tpe),
69-
};
65+
let tpe = ctx.type_from_cache(ty);
66+
let tpe_ptr = ctx.nptr(tpe);
67+
return CILNode::LdObj {
68+
ptr: Box::new(ptr.cast_ptr(tpe_ptr)),
69+
obj: Box::new(tpe),
70+
};
71+
} else {
72+
let ty = ctx.monomorphize(ty);
73+
74+
let tpe = ctx.type_from_cache(ty);
75+
let tpe_ptr = ctx.nptr(tpe);
76+
77+
let unaligned_read = Interned::unaligned_read(ctx, tpe);
78+
return call!(unaligned_read, [ptr.cast_ptr(tpe_ptr)]);
79+
}
7080
}
7181
let ptr = CILNode::LoadGlobalAllocPtr {
7282
alloc_id: alloc_id.0.into(),
@@ -221,22 +231,64 @@ fn load_scalar_ptr(
221231
}
222232
//panic!("alloc_id:{alloc_id:?}")
223233
}
234+
/// Returns a pointer to an immutable buffer, representing a given allocation.
224235
fn alloc_ptr<'tcx>(
225236
alloc_id: AllocId,
226237
const_alloc: &rustc_middle::mir::interpret::ConstAllocation,
227238
ctx: &mut MethodCompileCtx<'tcx, '_>,
228239
) -> CILNode {
229-
let const_alloc = const_alloc.inner();
240+
let (ptr, align) = alloc_ptr_unaligned(alloc_id, const_alloc, ctx);
230241
// If aligement is small enough to be *guaranteed*, and no pointers are present.
231-
if const_alloc.align.bytes() <= 1 && const_alloc.provenance().ptrs().is_empty() {
232-
CILNode::V2(ctx.bytebuffer(
233-
const_alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..const_alloc.len()),
234-
Int::U8,
235-
))
236-
} else {
242+
if align == u64::MAX || align <= ctx.const_align() {
237243
CILNode::LoadGlobalAllocPtr {
238244
alloc_id: alloc_id.0.into(),
239245
}
246+
} else {
247+
ptr
248+
}
249+
}
250+
/// Returns a pointer to an immutable buffer, representing a given allocation. Pointer may be underaligned, aligement of `u64::MAX` signals that the pointer
251+
/// will be sufficently aligned for `const_alloc`.
252+
fn alloc_ptr_unaligned<'tcx>(
253+
alloc_id: AllocId,
254+
const_alloc: &rustc_middle::mir::interpret::ConstAllocation,
255+
ctx: &mut MethodCompileCtx<'tcx, '_>,
256+
) -> (CILNode, u64) {
257+
let const_alloc = const_alloc.inner();
258+
// If aligement is small enough to be *guaranteed*, and no pointers are present.
259+
if const_alloc.provenance().ptrs().is_empty() {
260+
if const_alloc.align.bytes() <= ctx.const_align() {
261+
//unaligned_read
262+
(
263+
CILNode::V2(
264+
ctx.bytebuffer(
265+
const_alloc
266+
.inspect_with_uninit_and_ptr_outside_interpreter(0..const_alloc.len()),
267+
Int::U8,
268+
),
269+
),
270+
u64::MAX,
271+
)
272+
} else {
273+
//unaligned_read
274+
(
275+
CILNode::V2(
276+
ctx.bytebuffer(
277+
const_alloc
278+
.inspect_with_uninit_and_ptr_outside_interpreter(0..const_alloc.len()),
279+
Int::U8,
280+
),
281+
),
282+
ctx.const_align(),
283+
)
284+
}
285+
} else {
286+
(
287+
CILNode::LoadGlobalAllocPtr {
288+
alloc_id: alloc_id.0.into(),
289+
},
290+
u64::MAX,
291+
)
240292
}
241293
}
242294
fn load_const_scalar<'tcx>(

0 commit comments

Comments
 (0)