Skip to content

Commit d764fff

Browse files
committed
Auto merge of #7311 - ehuss:pipeline-timing, r=alexcrichton
Experiment: Create timing report. This is just an experiment, so I'm not sure if we'll want to merge it. This adds an HTML report which gets saved to disk when the build is finished. It is primarily geared for identifying slow dependencies, and for visualizing how pipelining affects the build. Here's an example: https://ehuss.github.io/cargo-timing.html You can mouse over the blocks to highlight the reverse-dependencies that are released when a unit finishes. `syn` is a really good example. It does a few other things, like displaying a message after each unit is finished. See the docs for more information.
2 parents 35c55a9 + 8be10f7 commit d764fff

File tree

15 files changed

+1319
-68
lines changed

15 files changed

+1319
-68
lines changed

Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ git2-curl = "0.11.0"
3737
glob = "0.3.0"
3838
hex = "0.3"
3939
home = "0.5"
40+
humantime = "1.2.0"
4041
ignore = "0.4.7"
4142
lazy_static = "1.2.0"
4243
jobserver = "0.1.13"

src/bin/cargo/cli.rs

+1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ Available unstable (nightly-only) flags:
3636
-Z config-profile -- Read profiles from .cargo/config files
3737
-Z install-upgrade -- `cargo install` will upgrade instead of failing
3838
-Z cache-messages -- Cache compiler messages
39+
-Z timings -- Display concurrency information
3940
4041
Run with 'cargo -Z [FLAG] [SUBCOMMAND]'"
4142
);

src/cargo/core/compiler/context/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
129129
export_dir: Option<PathBuf>,
130130
exec: &Arc<dyn Executor>,
131131
) -> CargoResult<Compilation<'cfg>> {
132-
let mut queue = JobQueue::new(self.bcx);
132+
let mut queue = JobQueue::new(self.bcx, units);
133133
let mut plan = BuildPlan::new();
134134
let build_plan = self.bcx.build_config.build_plan;
135135
self.prepare_units(export_dir, units)?;
@@ -523,6 +523,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
523523
/// Returns whether when `unit` is built whether it should emit metadata as
524524
/// well because some compilations rely on that.
525525
pub fn rmeta_required(&self, unit: &Unit<'a>) -> bool {
526-
self.rmeta_required.contains(unit)
526+
self.rmeta_required.contains(unit) || self.bcx.config.cli_unstable().timings.is_some()
527527
}
528528
}

src/cargo/core/compiler/job_queue.rs

+26-10
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ use super::job::{
1616
Freshness::{self, Dirty, Fresh},
1717
Job,
1818
};
19+
use super::timings::Timings;
1920
use super::{BuildContext, BuildPlan, CompileMode, Context, Unit};
2021
use crate::core::{PackageId, TargetKind};
2122
use crate::handle_error;
@@ -41,6 +42,7 @@ pub struct JobQueue<'a, 'cfg> {
4142
is_release: bool,
4243
progress: Progress<'cfg>,
4344
next_id: u32,
45+
timings: Timings<'a, 'cfg>,
4446
}
4547

4648
pub struct JobState<'a> {
@@ -82,7 +84,7 @@ enum Artifact {
8284
}
8385

8486
enum Message {
85-
Run(String),
87+
Run(u32, String),
8688
BuildPlanMsg(String, ProcessBuilder, Arc<Vec<OutputFile>>),
8789
Stdout(String),
8890
Stderr(String),
@@ -93,7 +95,7 @@ enum Message {
9395

9496
impl<'a> JobState<'a> {
9597
pub fn running(&self, cmd: &ProcessBuilder) {
96-
let _ = self.tx.send(Message::Run(cmd.to_string()));
98+
let _ = self.tx.send(Message::Run(self.id, cmd.to_string()));
9799
}
98100

99101
pub fn build_plan(
@@ -121,7 +123,6 @@ impl<'a> JobState<'a> {
121123
/// This should only be called once because a metadata file can only be
122124
/// produced once!
123125
pub fn rmeta_produced(&self) {
124-
assert!(self.rmeta_required.get());
125126
self.rmeta_required.set(false);
126127
let _ = self
127128
.tx
@@ -130,9 +131,10 @@ impl<'a> JobState<'a> {
130131
}
131132

132133
impl<'a, 'cfg> JobQueue<'a, 'cfg> {
133-
pub fn new(bcx: &BuildContext<'a, 'cfg>) -> JobQueue<'a, 'cfg> {
134+
pub fn new(bcx: &BuildContext<'a, 'cfg>, root_units: &[Unit<'a>]) -> JobQueue<'a, 'cfg> {
134135
let (tx, rx) = channel();
135136
let progress = Progress::with_style("Building", ProgressStyle::Ratio, bcx.config);
137+
let timings = Timings::new(bcx, root_units);
136138
JobQueue {
137139
queue: DependencyQueue::new(),
138140
tx,
@@ -144,6 +146,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
144146
is_release: bcx.build_config.release,
145147
progress,
146148
next_id: 0,
149+
timings,
147150
}
148151
}
149152

@@ -318,6 +321,9 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
318321
// to the jobserver itself.
319322
tokens.truncate(self.active.len() - 1);
320323

324+
self.timings
325+
.mark_concurrency(self.active.len(), queue.len(), self.queue.len());
326+
321327
// Drain all events at once to avoid displaying the progress bar
322328
// unnecessarily.
323329
let events: Vec<_> = self.rx.try_iter().collect();
@@ -330,18 +336,18 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
330336

331337
for event in events {
332338
match event {
333-
Message::Run(cmd) => {
339+
Message::Run(id, cmd) => {
334340
cx.bcx
335341
.config
336342
.shell()
337343
.verbose(|c| c.status("Running", &cmd))?;
344+
self.timings.unit_start(id, self.active[&id]);
338345
}
339346
Message::BuildPlanMsg(module_name, cmd, filenames) => {
340347
plan.update(&module_name, &cmd, &filenames)?;
341348
}
342349
Message::Stdout(out) => {
343-
self.progress.clear();
344-
println!("{}", out);
350+
cx.bcx.config.shell().stdout_println(out);
345351
}
346352
Message::Stderr(err) => {
347353
let mut shell = cx.bcx.config.shell();
@@ -369,7 +375,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
369375
};
370376
info!("end ({:?}): {:?}", unit, result);
371377
match result {
372-
Ok(()) => self.finish(&unit, artifact, cx)?,
378+
Ok(()) => self.finish(id, &unit, artifact, cx)?,
373379
Err(e) => {
374380
let msg = "The following warnings were emitted during compilation:";
375381
self.emit_warnings(Some(msg), &unit, cx)?;
@@ -427,6 +433,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
427433
if !cx.bcx.build_config.build_plan {
428434
cx.bcx.config.shell().status("Finished", message)?;
429435
}
436+
self.timings.finished()?;
430437
Ok(())
431438
} else {
432439
debug!("queue: {:#?}", self.queue);
@@ -542,8 +549,12 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
542549
}
543550

544551
match fresh {
545-
Freshness::Fresh => doit(),
552+
Freshness::Fresh => {
553+
self.timings.add_fresh();
554+
doit()
555+
}
546556
Freshness::Dirty => {
557+
self.timings.add_dirty();
547558
scope.spawn(move |_| doit());
548559
}
549560
}
@@ -581,14 +592,19 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
581592

582593
fn finish(
583594
&mut self,
595+
id: u32,
584596
unit: &Unit<'a>,
585597
artifact: Artifact,
586598
cx: &mut Context<'_, '_>,
587599
) -> CargoResult<()> {
588600
if unit.mode.is_run_custom_build() && cx.bcx.show_warnings(unit.pkg.package_id()) {
589601
self.emit_warnings(None, unit, cx)?;
590602
}
591-
self.queue.finish(unit, &artifact);
603+
let unlocked = self.queue.finish(unit, &artifact);
604+
match artifact {
605+
Artifact::All => self.timings.unit_finished(id, unlocked),
606+
Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked),
607+
}
592608
Ok(())
593609
}
594610

src/cargo/core/compiler/mod.rs

+4-52
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,20 @@ mod layout;
1111
mod links;
1212
mod output_depinfo;
1313
pub mod standard_lib;
14+
mod timings;
1415
mod unit;
1516
pub mod unit_dependencies;
1617

1718
use std::env;
1819
use std::ffi::{OsStr, OsString};
1920
use std::fs::{self, File};
2021
use std::io::Write;
21-
use std::path::{Path, PathBuf};
22+
use std::path::PathBuf;
2223
use std::sync::Arc;
2324

2425
use failure::Error;
2526
use lazycell::LazyCell;
2627
use log::debug;
27-
use same_file::is_same_file;
2828
use serde::Serialize;
2929

3030
pub use self::build_config::{BuildConfig, CompileMode, MessageFormat};
@@ -442,12 +442,12 @@ fn link_targets<'a, 'cfg>(
442442
}
443443
};
444444
destinations.push(dst.clone());
445-
hardlink_or_copy(src, dst)?;
445+
paths::link_or_copy(src, dst)?;
446446
if let Some(ref path) = output.export_path {
447447
let export_dir = export_dir.as_ref().unwrap();
448448
paths::create_dir_all(export_dir)?;
449449

450-
hardlink_or_copy(src, path)?;
450+
paths::link_or_copy(src, path)?;
451451
}
452452
}
453453

@@ -476,54 +476,6 @@ fn link_targets<'a, 'cfg>(
476476
}))
477477
}
478478

479-
/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it.
480-
fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> {
481-
debug!("linking {} to {}", src.display(), dst.display());
482-
if is_same_file(src, dst).unwrap_or(false) {
483-
return Ok(());
484-
}
485-
486-
// NB: we can't use dst.exists(), as if dst is a broken symlink,
487-
// dst.exists() will return false. This is problematic, as we still need to
488-
// unlink dst in this case. symlink_metadata(dst).is_ok() will tell us
489-
// whether dst exists *without* following symlinks, which is what we want.
490-
if fs::symlink_metadata(dst).is_ok() {
491-
paths::remove_file(&dst)?;
492-
}
493-
494-
let link_result = if src.is_dir() {
495-
#[cfg(target_os = "redox")]
496-
use std::os::redox::fs::symlink;
497-
#[cfg(unix)]
498-
use std::os::unix::fs::symlink;
499-
#[cfg(windows)]
500-
use std::os::windows::fs::symlink_dir as symlink;
501-
502-
let dst_dir = dst.parent().unwrap();
503-
let src = if src.starts_with(dst_dir) {
504-
src.strip_prefix(dst_dir).unwrap()
505-
} else {
506-
src
507-
};
508-
symlink(src, dst)
509-
} else {
510-
fs::hard_link(src, dst)
511-
};
512-
link_result
513-
.or_else(|err| {
514-
debug!("link failed {}. falling back to fs::copy", err);
515-
fs::copy(src, dst).map(|_| ())
516-
})
517-
.chain_err(|| {
518-
format!(
519-
"failed to link or copy `{}` to `{}`",
520-
src.display(),
521-
dst.display()
522-
)
523-
})?;
524-
Ok(())
525-
}
526-
527479
// For all plugin dependencies, add their -L paths (now calculated and present
528480
// in `build_script_outputs`) to the dynamic library load path for the command
529481
// to execute.

0 commit comments

Comments
 (0)