Skip to content

Commit 73a1b76

Browse files
committed
Auto merge of #14377 - mo8it:context, r=weihanglo
Use context instead of with_context Replace `.with_context(|| "…")` with `.context("…")` to avoid calling a trivial closure. It is also shorter :) On the other hand, use `.with_context(|| format!(…))` instead of `.context(format!(…))` to avoid unneeded string allocation.
2 parents b66cad8 + af04e54 commit 73a1b76

22 files changed

+63
-68
lines changed

crates/rustfix/tests/parse_and_replace.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -164,8 +164,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
164164

165165
debug!("next up: {:?}", file);
166166
let code = fs::read_to_string(file)?;
167-
let errors =
168-
compile_and_get_json_errors(file).context(format!("could compile {}", file.display()))?;
167+
let errors = compile_and_get_json_errors(file)
168+
.with_context(|| format!("could not compile {}", file.display()))?;
169169
let suggestions =
170170
rustfix::get_suggestions_from_json(&errors, &HashSet::new(), filter_suggestions)
171171
.context("could not load suggestions")?;
@@ -175,10 +175,8 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
175175
}
176176

177177
if std::env::var(settings::CHECK_JSON).is_ok() {
178-
let expected_json = fs::read_to_string(&json_file).context(format!(
179-
"could not load json fixtures for {}",
180-
file.display()
181-
))?;
178+
let expected_json = fs::read_to_string(&json_file)
179+
.with_context(|| format!("could not load json fixtures for {}", file.display()))?;
182180
let expected_suggestions =
183181
rustfix::get_suggestions_from_json(&expected_json, &HashSet::new(), filter_suggestions)
184182
.context("could not load expected suggestions")?;
@@ -194,7 +192,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
194192
}
195193

196194
let fixed = apply_suggestions(&code, &suggestions)
197-
.context(format!("could not apply suggestions to {}", file.display()))?
195+
.with_context(|| format!("could not apply suggestions to {}", file.display()))?
198196
.replace('\r', "");
199197

200198
if std::env::var(settings::RECORD_FIXED_RUST).is_ok() {
@@ -209,7 +207,7 @@ fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Err
209207
}
210208

211209
let expected_fixed = fs::read_to_string(&fixed_file)
212-
.context(format!("could read fixed file for {}", file.display()))?
210+
.with_context(|| format!("could read fixed file for {}", file.display()))?
213211
.replace('\r', "");
214212
ensure!(
215213
fixed.trim() == expected_fixed.trim(),
@@ -236,7 +234,7 @@ fn get_fixture_files(p: &str) -> Result<Vec<PathBuf>, Error> {
236234

237235
fn assert_fixtures(dir: &str, mode: &str) {
238236
let files = get_fixture_files(dir)
239-
.context(format!("couldn't load dir `{}`", dir))
237+
.with_context(|| format!("couldn't load dir `{dir}`"))
240238
.unwrap();
241239
let mut failures = 0;
242240

src/cargo/core/compiler/build_runner/mod.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
100100
let jobserver = match bcx.gctx.jobserver_from_env() {
101101
Some(c) => c.clone(),
102102
None => {
103-
let client = Client::new(bcx.jobs() as usize)
104-
.with_context(|| "failed to create jobserver")?;
103+
let client =
104+
Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
105105
client.acquire_raw()?;
106106
client
107107
}
@@ -354,11 +354,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
354354
.unwrap()
355355
.host
356356
.prepare()
357-
.with_context(|| "couldn't prepare build directories")?;
357+
.context("couldn't prepare build directories")?;
358358
for target in self.files.as_mut().unwrap().target.values_mut() {
359359
target
360360
.prepare()
361-
.with_context(|| "couldn't prepare build directories")?;
361+
.context("couldn't prepare build directories")?;
362362
}
363363

364364
let files = self.files.as_ref().unwrap();

src/cargo/core/compiler/custom_build.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul
431431
// If we have an old build directory, then just move it into place,
432432
// otherwise create it!
433433
paths::create_dir_all(&script_out_dir)
434-
.with_context(|| "failed to create script output directory for build command")?;
434+
.context("failed to create script output directory for build command")?;
435435

436436
// For all our native lib dependencies, pick up their metadata to pass
437437
// along to this custom build command. We're also careful to augment our

src/cargo/core/compiler/future_incompat.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,9 @@ impl OnDiskReports {
211211
report_file
212212
.file()
213213
.read_to_string(&mut file_contents)
214-
.with_context(|| "failed to read report")?;
214+
.context("failed to read report")?;
215215
let on_disk_reports: OnDiskReports =
216-
serde_json::from_str(&file_contents).with_context(|| "failed to load report")?;
216+
serde_json::from_str(&file_contents).context("failed to load report")?;
217217
if on_disk_reports.version != ON_DISK_VERSION {
218218
bail!("unable to read reports; reports were saved from a future version of Cargo");
219219
}

src/cargo/core/compiler/job_queue/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> {
513513
.into_helper_thread(move |token| {
514514
messages.push(Message::Token(token));
515515
})
516-
.with_context(|| "failed to create helper thread for jobserver management")?;
516+
.context("failed to create helper thread for jobserver management")?;
517517

518518
// Create a helper thread to manage the diagnostics for rustfix if
519519
// necessary.
@@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> {
700700
.push(FutureIncompatReportPackage { package_id, items });
701701
}
702702
Message::Token(acquired_token) => {
703-
let token = acquired_token.with_context(|| "failed to acquire jobserver token")?;
703+
let token = acquired_token.context("failed to acquire jobserver token")?;
704704
self.tokens.push(token);
705705
}
706706
}

src/cargo/core/compiler/timings.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ impl<'gctx> Timings<'gctx> {
299299
.sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap());
300300
if self.report_html {
301301
self.report_html(build_runner, error)
302-
.with_context(|| "failed to save timing report")?;
302+
.context("failed to save timing report")?;
303303
}
304304
Ok(())
305305
}

src/cargo/core/global_cache_tracker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ impl GlobalCacheTracker {
543543
/// Deletes files from the global cache based on the given options.
544544
pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> {
545545
self.clean_inner(clean_ctx, gc_opts)
546-
.with_context(|| "failed to clean entries from the global cache")
546+
.context("failed to clean entries from the global cache")
547547
}
548548

549549
#[tracing::instrument(skip_all)]
@@ -575,7 +575,7 @@ impl GlobalCacheTracker {
575575
gc_opts.is_download_cache_size_set(),
576576
&mut delete_paths,
577577
)
578-
.with_context(|| "failed to sync tracking database")?
578+
.context("failed to sync tracking database")?
579579
}
580580
if let Some(max_age) = gc_opts.max_index_age {
581581
let max_age = now - max_age.as_secs();

src/cargo/core/package.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ impl<'gctx> PackageSet<'gctx> {
393393
let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true);
394394
multi
395395
.pipelining(false, multiplexing)
396-
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
396+
.context("failed to enable multiplexing/pipelining in curl")?;
397397

398398
// let's not flood crates.io with connections
399399
multi.set_max_host_connections(2)?;
@@ -681,7 +681,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
681681
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
682682
let pkg = source
683683
.download(id)
684-
.with_context(|| "unable to get packages from source")?;
684+
.context("unable to get packages from source")?;
685685
let (url, descriptor, authorization) = match pkg {
686686
MaybePackage::Ready(pkg) => {
687687
debug!("{} doesn't need a download", id);
@@ -951,7 +951,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
951951
self.set
952952
.multi
953953
.perform()
954-
.with_context(|| "failed to perform http requests")
954+
.context("failed to perform http requests")
955955
})?;
956956
debug!(target: "network", "handles remaining: {}", n);
957957
let results = &mut self.results;
@@ -981,7 +981,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
981981
self.set
982982
.multi
983983
.wait(&mut [], timeout)
984-
.with_context(|| "failed to wait on curl `Multi`")?;
984+
.context("failed to wait on curl `Multi`")?;
985985
}
986986
}
987987
}

src/cargo/core/registry.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -444,8 +444,8 @@ impl<'gctx> PackageRegistry<'gctx> {
444444
patches must point to different sources",
445445
dep.package_name(),
446446
url
447-
))
448-
.context(format!("failed to resolve patches for `{}`", url));
447+
)
448+
.context(format!("failed to resolve patches for `{}`", url)));
449449
}
450450
unlocked_summaries.push(summary);
451451
}

src/cargo/ops/cargo_add/mod.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1085,10 +1085,9 @@ fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult<Depen
10851085
.context("could not find `dependencies` table in `workspace`")?
10861086
.as_table_like()
10871087
.context("could not make `dependencies` into a table")?;
1088-
let dep_item = dependencies.get(toml_key).context(format!(
1089-
"could not find {} in `workspace.dependencies`",
1090-
toml_key
1091-
))?;
1088+
let dep_item = dependencies
1089+
.get(toml_key)
1090+
.with_context(|| format!("could not find {toml_key} in `workspace.dependencies`"))?;
10921091
Dependency::from_toml(root_manifest.parent().unwrap(), toml_key, dep_item)
10931092
}
10941093

src/cargo/ops/cargo_package.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -147,13 +147,13 @@ fn create_package(
147147
.status("Packaging", pkg.package_id().to_string())?;
148148
dst.file().set_len(0)?;
149149
let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
150-
.with_context(|| "failed to prepare local package for uploading")?;
150+
.context("failed to prepare local package for uploading")?;
151151

152152
dst.seek(SeekFrom::Start(0))?;
153153
let src_path = dst.path();
154154
let dst_path = dst.parent().join(&filename);
155155
fs::rename(&src_path, &dst_path)
156-
.with_context(|| "failed to move temporary tarball into final location")?;
156+
.context("failed to move temporary tarball into final location")?;
157157

158158
let dst_metadata = dst
159159
.file()
@@ -331,7 +331,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
331331
if opts.verify {
332332
for (pkg, opts, tarball) in &outputs {
333333
run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
334-
.with_context(|| "failed to verify package tarball")?
334+
.context("failed to verify package tarball")?
335335
}
336336
}
337337

src/cargo/ops/common_for_install_and_uninstall.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ impl InstallTracker {
111111
if contents.is_empty() {
112112
Ok(CrateListingV1::default())
113113
} else {
114-
Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?)
114+
Ok(toml::from_str(&contents).context("invalid TOML found for metadata")?)
115115
}
116116
})()
117117
.with_context(|| {
@@ -127,8 +127,7 @@ impl InstallTracker {
127127
let mut v2 = if contents.is_empty() {
128128
CrateListingV2::default()
129129
} else {
130-
serde_json::from_str(&contents)
131-
.with_context(|| "invalid JSON found for metadata")?
130+
serde_json::from_str(&contents).context("invalid JSON found for metadata")?
132131
};
133132
v2.sync_v1(&v1);
134133
Ok(v2)

src/cargo/ops/vendor.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
3232
}
3333
let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::<Vec<_>>();
3434
let _lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?;
35-
let vendor_config = sync(gctx, &workspaces, opts).with_context(|| "failed to sync")?;
35+
let vendor_config = sync(gctx, &workspaces, opts).context("failed to sync")?;
3636

3737
if gctx.shell().verbosity() != Verbosity::Quiet {
3838
if vendor_config.source.is_empty() {
@@ -113,11 +113,11 @@ fn sync(
113113
// crate to work with.
114114
for ws in workspaces {
115115
let (packages, resolve) =
116-
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
116+
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;
117117

118118
packages
119119
.get_many(resolve.iter())
120-
.with_context(|| "failed to download packages")?;
120+
.context("failed to download packages")?;
121121

122122
for pkg in resolve.iter() {
123123
// Don't delete actual source code!
@@ -145,11 +145,11 @@ fn sync(
145145
// tables about them.
146146
for ws in workspaces {
147147
let (packages, resolve) =
148-
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
148+
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;
149149

150150
packages
151151
.get_many(resolve.iter())
152-
.with_context(|| "failed to download packages")?;
152+
.context("failed to download packages")?;
153153

154154
for pkg in resolve.iter() {
155155
// No need to vendor path crates since they're already in the
@@ -161,7 +161,7 @@ fn sync(
161161
pkg,
162162
packages
163163
.get_one(pkg)
164-
.with_context(|| "failed to fetch package")?
164+
.context("failed to fetch package")?
165165
.clone(),
166166
);
167167

src/cargo/sources/git/oxide.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,6 @@ fn amend_authentication_hints(
187187
_ => None,
188188
};
189189
if let Some(e) = e {
190-
use anyhow::Context;
191190
let auth_message = match e {
192191
gix::protocol::handshake::Error::Credentials(_) => {
193192
"\n* attempted to find username/password via \
@@ -206,7 +205,7 @@ fn amend_authentication_hints(
206205
"if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n",
207206
"https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli"
208207
);
209-
return Err(anyhow::Error::from(err)).context(msg);
208+
return Err(anyhow::Error::from(err).context(msg));
210209
}
211210
_ => None,
212211
};
@@ -225,7 +224,7 @@ fn amend_authentication_hints(
225224
msg.push_str(
226225
"https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli",
227226
);
228-
return Err(anyhow::Error::from(err)).context(msg);
227+
return Err(anyhow::Error::from(err).context(msg));
229228
}
230229
}
231230
Err(err.into())

src/cargo/sources/registry/http_remote.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ impl<'gctx> HttpRegistry<'gctx> {
268268

269269
self.multi
270270
.pipelining(false, self.multiplexing)
271-
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
271+
.context("failed to enable multiplexing/pipelining in curl")?;
272272

273273
// let's not flood the server with connections
274274
self.multi.set_max_host_connections(2)?;
@@ -802,7 +802,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> {
802802
let remaining_in_multi = tls::set(&self.downloads, || {
803803
self.multi
804804
.perform()
805-
.with_context(|| "failed to perform http requests")
805+
.context("failed to perform http requests")
806806
})?;
807807
trace!(target: "network", "{} transfers remaining", remaining_in_multi);
808808

@@ -823,7 +823,7 @@ impl<'gctx> RegistryData for HttpRegistry<'gctx> {
823823
.unwrap_or_else(|| Duration::new(1, 0));
824824
self.multi
825825
.wait(&mut [], timeout)
826-
.with_context(|| "failed to wait on curl `Multi`")?;
826+
.context("failed to wait on curl `Multi`")?;
827827
}
828828
}
829829
}

src/cargo/sources/registry/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -642,10 +642,10 @@ impl<'gctx> RegistrySource<'gctx> {
642642
let prefix = unpack_dir.file_name().unwrap();
643643
let parent = unpack_dir.parent().unwrap();
644644
for entry in tar.entries()? {
645-
let mut entry = entry.with_context(|| "failed to iterate over archive")?;
645+
let mut entry = entry.context("failed to iterate over archive")?;
646646
let entry_path = entry
647647
.path()
648-
.with_context(|| "failed to read entry path")?
648+
.context("failed to read entry path")?
649649
.into_owned();
650650

651651
// We're going to unpack this tarball into the global source

src/cargo/util/cache_lock.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ impl RecursiveLock {
308308
self.is_exclusive = true;
309309
return Ok(());
310310
} else {
311-
return Err(e).with_context(|| "failed to acquire package cache lock");
311+
return Err(e).context("failed to acquire package cache lock");
312312
}
313313
}
314314
}
@@ -331,7 +331,7 @@ impl RecursiveLock {
331331
self.is_exclusive = true;
332332
return Ok(result);
333333
} else {
334-
return Err(e).with_context(|| "failed to acquire package cache lock");
334+
return Err(e).context("failed to acquire package cache lock");
335335
}
336336
}
337337
}

0 commit comments

Comments
 (0)