Skip to content

Commit b5398b0

Browse files
committed
Update codegen
1 parent f6d49fb commit b5398b0

File tree

1,475 files changed

+1229
-2208692
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,475 files changed

+1229
-2208692
lines changed

.gitmodules

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,3 +32,6 @@
3232
[submodule "library/backtrace"]
3333
path = library/backtrace
3434
url = https://github.com/rust-lang/backtrace-rs.git
35+
[submodule "src/tools/enzyme"]
36+
path = src/tools/enzyme
37+
url = https://github.com/EnzymeAD/Enzyme.git

compiler/rustc_codegen_llvm/src/back/lto.rs

Lines changed: 43 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ pub fn crate_type_allows_lto(crate_type: CrateType) -> bool {
4343
fn prepare_lto(
4444
cgcx: &CodegenContext<LlvmCodegenBackend>,
4545
diag_handler: &Handler,
46-
) -> Result<(Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>), FatalError> {
46+
) -> Result<(Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>), FatalError> {
4747
let export_threshold = match cgcx.lto {
4848
// We're just doing LTO for our one crate
4949
Lto::ThinLocal => SymbolExportLevel::Rust,
@@ -169,8 +169,7 @@ pub(crate) fn run_fat(
169169
cgcx: &CodegenContext<LlvmCodegenBackend>,
170170
modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
171171
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
172-
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
173-
172+
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
174173
let diag_handler = cgcx.create_diag_handler();
175174
let (symbols_below_threshold, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
176175
let symbols_below_threshold =
@@ -182,7 +181,7 @@ pub(crate) fn run_fat(
182181
cached_modules,
183182
upstream_modules,
184183
&symbols_below_threshold,
185-
)
184+
)
186185
}
187186

188187
/// Performs thin LTO by performing necessary global analysis and returning two
@@ -192,7 +191,7 @@ pub(crate) fn run_thin(
192191
cgcx: &CodegenContext<LlvmCodegenBackend>,
193192
modules: Vec<(String, ThinBuffer)>,
194193
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
195-
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
194+
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
196195
let diag_handler = cgcx.create_diag_handler();
197196
let (symbols_below_threshold, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
198197
let symbols_below_threshold =
@@ -201,7 +200,7 @@ pub(crate) fn run_thin(
201200
unreachable!(
202201
"We should never reach this case if the LTO step \
203202
is deferred to the linker"
204-
);
203+
);
205204
}
206205
thin_lto(
207206
cgcx,
@@ -210,7 +209,7 @@ pub(crate) fn run_thin(
210209
upstream_modules,
211210
cached_modules,
212211
&symbols_below_threshold,
213-
)
212+
)
214213
}
215214

216215
pub(crate) fn prepare_thin(module: ModuleCodegen<ModuleLlvm>) -> (String, ThinBuffer) {
@@ -226,7 +225,7 @@ fn fat_lto(
226225
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
227226
mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
228227
symbols_below_threshold: &[*const libc::c_char],
229-
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
228+
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
230229
let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module");
231230
info!("going for a fat lto");
232231

@@ -273,7 +272,7 @@ fn fat_lto(
273272
let cost = unsafe { llvm::LLVMRustModuleCost(module.module_llvm.llmod()) };
274273
(cost, i)
275274
})
276-
.max();
275+
.max();
277276

278277
// If we found a costliest module, we're good to go. Otherwise all our
279278
// inputs were serialized which could happen in the case, for example, that
@@ -347,7 +346,7 @@ fn fat_lto(
347346
llmod,
348347
ptr as *const *const libc::c_char,
349348
symbols_below_threshold.len() as libc::size_t,
350-
);
349+
);
351350
save_temp_bitcode(cgcx, &module, "lto.after-restriction");
352351
}
353352
}
@@ -368,7 +367,7 @@ impl<'a> Linker<'a> {
368367
self.0,
369368
bytecode.as_ptr() as *const libc::c_char,
370369
bytecode.len(),
371-
) {
370+
) {
372371
Ok(())
373372
} else {
374373
Err(())
@@ -422,7 +421,7 @@ fn thin_lto(
422421
serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
423422
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
424423
symbols_below_threshold: &[*const libc::c_char],
425-
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
424+
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
426425
let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
427426
unsafe {
428427
info!("going for that thin, thin LTO");
@@ -500,22 +499,22 @@ fn thin_lto(
500499

501500
let (key_map_path, prev_key_map, curr_key_map) = if let Some(ref incr_comp_session_dir) =
502501
cgcx.incr_comp_session_dir
503-
{
504-
let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
505-
// If the previous file was deleted, or we get an IO error
506-
// reading the file, then we'll just use `None` as the
507-
// prev_key_map, which will force the code to be recompiled.
508-
let prev =
509-
if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
510-
let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
511-
(Some(path), prev, curr)
512-
} else {
513-
// If we don't compile incrementally, we don't need to load the
514-
// import data from LLVM.
515-
assert!(green_modules.is_empty());
516-
let curr = ThinLTOKeysMap::default();
517-
(None, None, curr)
518-
};
502+
{
503+
let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
504+
// If the previous file was deleted, or we get an IO error
505+
// reading the file, then we'll just use `None` as the
506+
// prev_key_map, which will force the code to be recompiled.
507+
let prev =
508+
if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
509+
let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
510+
(Some(path), prev, curr)
511+
} else {
512+
// If we don't compile incrementally, we don't need to load the
513+
// import data from LLVM.
514+
assert!(green_modules.is_empty());
515+
let curr = ThinLTOKeysMap::default();
516+
(None, None, curr)
517+
};
519518
info!("thin LTO cache key map loaded");
520519
info!("prev_key_map: {:#?}", prev_key_map);
521520
info!("curr_key_map: {:#?}", curr_key_map);
@@ -539,20 +538,20 @@ fn thin_lto(
539538
let module_name = module_name_to_str(module_name);
540539
if let (Some(prev_key_map), true) =
541540
(prev_key_map.as_ref(), green_modules.contains_key(module_name))
542-
{
541+
{
542+
assert!(cgcx.incr_comp_session_dir.is_some());
543+
544+
// If a module exists in both the current and the previous session,
545+
// and has the same LTO cache key in both sessions, then we can re-use it
546+
if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
547+
let work_product = green_modules[module_name].clone();
548+
copy_jobs.push(work_product);
549+
info!(" - {}: re-used", module_name);
543550
assert!(cgcx.incr_comp_session_dir.is_some());
544-
545-
// If a module exists in both the current and the previous session,
546-
// and has the same LTO cache key in both sessions, then we can re-use it
547-
if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
548-
let work_product = green_modules[module_name].clone();
549-
copy_jobs.push(work_product);
550-
info!(" - {}: re-used", module_name);
551-
assert!(cgcx.incr_comp_session_dir.is_some());
552-
cgcx.cgu_reuse_tracker.set_actual_reuse(module_name, CguReuse::PostLto);
553-
continue;
554-
}
551+
cgcx.cgu_reuse_tracker.set_actual_reuse(module_name, CguReuse::PostLto);
552+
continue;
555553
}
554+
}
556555

557556
info!(" - {}: re-compiled", module_name);
558557
opt_jobs.push(LtoModuleCodegen::Thin(ThinModule {
@@ -686,7 +685,7 @@ impl Drop for ThinBuffer {
686685
pub unsafe fn optimize_thin_module(
687686
thin_module: ThinModule<LlvmCodegenBackend>,
688687
cgcx: &CodegenContext<LlvmCodegenBackend>,
689-
) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
688+
) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
690689
let diag_handler = cgcx.create_diag_handler();
691690

692691
let module_name = &thin_module.shared.module_names[thin_module.idx];
@@ -855,7 +854,7 @@ impl ThinLTOKeysMap {
855854
data: &ThinData,
856855
modules: &[llvm::ThinLTOModule],
857856
names: &[CString],
858-
) -> Self {
857+
) -> Self {
859858
let keys = iter::zip(modules, names)
860859
.map(|(module, name)| {
861860
let key = build_string(|rust_str| unsafe {
@@ -864,7 +863,7 @@ impl ThinLTOKeysMap {
864863
.expect("Invalid ThinLTO module key");
865864
(name.clone().into_string().unwrap(), key)
866865
})
867-
.collect();
866+
.collect();
868867
Self { keys }
869868
}
870869
}
@@ -880,7 +879,7 @@ pub fn parse_module<'a>(
880879
name: &CStr,
881880
data: &[u8],
882881
diag_handler: &Handler,
883-
) -> Result<&'a llvm::Module, FatalError> {
882+
) -> Result<&'a llvm::Module, FatalError> {
884883
unsafe {
885884
llvm::LLVMRustParseBitcodeForLTO(cx, data.as_ptr(), data.len(), name.as_ptr())
886885
.ok_or_else(|| write::llvm_err(diag_handler, LlvmError::ParseBitcode))

0 commit comments

Comments
 (0)