Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions .github/workflows/spelling.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: Spelling

permissions:
contents: read

on: [pull_request]

env:
RUST_BACKTRACE: 1
CARGO_TERM_COLOR: always
CLICOLOR: 1

concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true

jobs:
spelling:
name: Spell Check with Typos
runs-on: ubuntu-latest
steps:
- name: Checkout Actions Repository
uses: actions/checkout@v5
- name: Spell Check Repo
uses: crate-ci/[email protected]
2 changes: 1 addition & 1 deletion crates/cargo-test-support/src/paths.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ fn build_dir_ignored_path_patterns() -> Vec<String> {
// Ignore MacOS debug symbols as there are many files/directories that would clutter up
// tests few not a lot of benefit.
"[..].dSYM/[..]",
// Ignore Windows debub symbols files (.pdb)
// Ignore Windows debug symbols files (.pdb)
"[..].pdb",
]
.into_iter()
Expand Down
2 changes: 1 addition & 1 deletion crates/cargo-util-schemas/src/lockfile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ impl TomlLockfileSourceId {
.ok_or_else(|| TomlLockfileSourceIdErrorKind::InvalidSource(source.clone()))?;

// Sparse URLs store the kind prefix (sparse+) in the URL. Therefore, for sparse kinds, we
// want to use the raw `source` instead of the splitted `url`.
// want to use the raw `source` instead of the split `url`.
let url = Url::parse(if kind == "sparse" { &source } else { url }).map_err(|msg| {
TomlLockfileSourceIdErrorKind::InvalidUrl {
url: url.to_string(),
Expand Down
6 changes: 3 additions & 3 deletions src/cargo/core/compiler/build_runner/compilation_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,16 +235,16 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
/// Note that some units may share the same directory, so care should be
/// taken in those cases!
fn pkg_dir(&self, unit: &Unit) -> String {
let seperator = match self.ws.gctx().cli_unstable().build_dir_new_layout {
let separator = match self.ws.gctx().cli_unstable().build_dir_new_layout {
true => "/",
false => "-",
};
let name = unit.pkg.package_id().name();
let meta = self.metas[unit];
if let Some(c_extra_filename) = meta.c_extra_filename() {
format!("{}{}{}", name, seperator, c_extra_filename)
format!("{}{}{}", name, separator, c_extra_filename)
} else {
format!("{}{}{}", name, seperator, self.target_short_hash(unit))
format!("{}{}{}", name, separator, self.target_short_hash(unit))
}
}

Expand Down
52 changes: 26 additions & 26 deletions src/cargo/core/compiler/fingerprint/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1031,18 +1031,18 @@ impl Fingerprint {
}
(
LocalFingerprint::CheckDepInfo {
dep_info: adep,
dep_info: a_dep,
checksum: checksum_a,
},
LocalFingerprint::CheckDepInfo {
dep_info: bdep,
dep_info: b_dep,
checksum: checksum_b,
},
) => {
if adep != bdep {
if a_dep != b_dep {
return DirtyReason::DepInfoOutputChanged {
old: bdep.clone(),
new: adep.clone(),
old: b_dep.clone(),
new: a_dep.clone(),
};
}
if checksum_a != checksum_b {
Expand All @@ -1051,48 +1051,48 @@ impl Fingerprint {
}
(
LocalFingerprint::RerunIfChanged {
output: aout,
paths: apaths,
output: a_out,
paths: a_paths,
},
LocalFingerprint::RerunIfChanged {
output: bout,
paths: bpaths,
output: b_out,
paths: b_paths,
},
) => {
if aout != bout {
if a_out != b_out {
return DirtyReason::RerunIfChangedOutputFileChanged {
old: bout.clone(),
new: aout.clone(),
old: b_out.clone(),
new: a_out.clone(),
};
}
if apaths != bpaths {
if a_paths != b_paths {
return DirtyReason::RerunIfChangedOutputPathsChanged {
old: bpaths.clone(),
new: apaths.clone(),
old: b_paths.clone(),
new: a_paths.clone(),
};
}
}
(
LocalFingerprint::RerunIfEnvChanged {
var: akey,
val: avalue,
var: a_key,
val: a_value,
},
LocalFingerprint::RerunIfEnvChanged {
var: bkey,
val: bvalue,
var: b_key,
val: b_value,
},
) => {
if *akey != *bkey {
if *a_key != *b_key {
return DirtyReason::EnvVarsChanged {
old: bkey.clone(),
new: akey.clone(),
old: b_key.clone(),
new: a_key.clone(),
};
}
if *avalue != *bvalue {
if *a_value != *b_value {
return DirtyReason::EnvVarChanged {
name: akey.clone(),
old_value: bvalue.clone(),
new_value: avalue.clone(),
name: a_key.clone(),
old_value: b_value.clone(),
new_value: a_value.clone(),
};
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1675,7 +1675,7 @@ fn build_deps_args(
if build_runner.bcx.gctx.cli_unstable().build_dir_new_layout {
let mut map = BTreeMap::new();

// Recursively add all depenendency args to rustc process
// Recursively add all dependency args to rustc process
add_dep_arg(&mut map, build_runner, unit);

let paths = map.into_iter().map(|(_, path)| path).sorted_unstable();
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/timings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -618,7 +618,7 @@ impl<'gctx> Timings<'gctx> {
AggregatedSections::Sections(mut sections) => {
// We draw the sections in the pipeline graph in a way where the frontend
// section has the "default" build color, and then additional sections
// (codegen, link) are overlayed on top with a different color.
// (codegen, link) are overlaid on top with a different color.
// However, there might be some time after the final (usually link) section,
// which definitely shouldn't be classified as "Frontend". We thus try to
// detect this situation and add a final "Other" section.
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/shell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ impl Shell {
fn file_hyperlink(&mut self, path: &std::path::Path) -> Option<url::Url> {
let mut url = url::Url::from_file_path(path).ok()?;
// Do a best-effort of setting the host in the URL to avoid issues with opening a link
// scoped to the computer you've SSHed into
// scoped to the computer you've SSH'ed into
let hostname = if cfg!(windows) {
// Not supported correctly on windows
None
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1327,7 +1327,7 @@ impl<'gctx> Workspace<'gctx> {

// This is a short term hack to allow `blanket_hint_mostly_unused`
// to run without requiring `-Zcargo-lints`, which should hopefully
// improve the testing expierience while we are collecting feedback
// improve the testing experience while we are collecting feedback
if self.gctx.cli_unstable().profile_hint_mostly_unused {
blanket_hint_mostly_unused(
self.root_maybe(),
Expand Down
38 changes: 19 additions & 19 deletions src/cargo/ops/cargo_compile/compile_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,12 @@ impl CompileFilter {
lib_only: bool,
bins: Vec<String>,
all_bins: bool,
tsts: Vec<String>,
all_tsts: bool,
exms: Vec<String>,
all_exms: bool,
bens: Vec<String>,
all_bens: bool,
tests: Vec<String>,
all_tests: bool,
examples: Vec<String>,
all_examples: bool,
benches: Vec<String>,
all_benches: bool,
all_targets: bool,
) -> CompileFilter {
if all_targets {
Expand All @@ -119,34 +119,34 @@ impl CompileFilter {
LibRule::False
};
let rule_bins = FilterRule::new(bins, all_bins);
let rule_tsts = FilterRule::new(tsts, all_tsts);
let rule_exms = FilterRule::new(exms, all_exms);
let rule_bens = FilterRule::new(bens, all_bens);
let rule_tests = FilterRule::new(tests, all_tests);
let rule_examples = FilterRule::new(examples, all_examples);
let rule_benches = FilterRule::new(benches, all_benches);

CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
CompileFilter::new(rule_lib, rule_bins, rule_tests, rule_examples, rule_benches)
}

/// Constructs a filter from underlying primitives.
pub fn new(
rule_lib: LibRule,
rule_bins: FilterRule,
rule_tsts: FilterRule,
rule_exms: FilterRule,
rule_bens: FilterRule,
rule_tests: FilterRule,
rule_examples: FilterRule,
rule_benches: FilterRule,
) -> CompileFilter {
if rule_lib == LibRule::True
|| rule_bins.is_specific()
|| rule_tsts.is_specific()
|| rule_exms.is_specific()
|| rule_bens.is_specific()
|| rule_tests.is_specific()
|| rule_examples.is_specific()
|| rule_benches.is_specific()
{
CompileFilter::Only {
all_targets: false,
lib: rule_lib,
bins: rule_bins,
examples: rule_exms,
benches: rule_bens,
tests: rule_tsts,
examples: rule_examples,
benches: rule_benches,
tests: rule_tests,
}
} else {
CompileFilter::Default {
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/util/context/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ impl<'de, 'gctx> de::Deserializer<'de> for Deserializer<'gctx> {
let vals: Vec<String> = res
.into_iter()
.map(|val| match val {
CV::String(s, _defintion) => Ok(s),
CV::String(s, _definition) => Ok(s),
other => Err(ConfigError::expected(&self.key, "string", &other)),
})
.collect::<Result<_, _>>()?;
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/util/toml/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1308,7 +1308,7 @@ pub fn to_real_manifest(
let edition_msrv = RustVersion::try_from(edition_msrv).unwrap();
if !edition_msrv.is_compatible_with(pkg_msrv.as_partial()) {
bail!(
"rust-version {} is imcompatible with the version ({}) required by \
"rust-version {} is incompatible with the version ({}) required by \
the specified edition ({})",
pkg_msrv,
edition_msrv,
Expand Down
2 changes: 1 addition & 1 deletion src/doc/src/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3077,7 +3077,7 @@
Some notable changes:
- Renamed `credential-process` to `credential-provider` in Cargo configurations.
- New JSON protocol for communicating with external credential providers via stdin/stdout.
- The GNOME Secert provider now dynamically loads `libsecert`.
- The GNOME Secret provider now dynamically loads `libsecert`.
- The 1password provider is no longer built-in.
- Changed the unstable key for asymmetric tokens from `registry-auth` to `credential-process`.
- ❗️ Removed `--keep-going` flag support from `cargo test` and `cargo bench`.
Expand Down
2 changes: 1 addition & 1 deletion src/doc/src/guide/build-performance.md
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ When invoking `cargo`,
However, when contributing to an application,
you may need to build and test various packages within the application,
which can cause extraneous rebuilds because different sets of features may be activated for common dependencies.
With [`feauture-unification`][feature-unification],
With [`feature-unification`][feature-unification],
you can reuse more dependency builds by ensuring the same set of dependency features are activated,
independent of which package you are currently building and testing.

Expand Down
10 changes: 5 additions & 5 deletions src/doc/src/reference/build-script-examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ Here's an example:
# Cargo.toml

[package]
name = "zuser"
name = "z_user"
version = "0.1.0"
edition = "2024"

Expand All @@ -403,12 +403,12 @@ script:

fn main() {
let mut cfg = cc::Build::new();
cfg.file("src/zuser.c");
cfg.file("src/z_user.c");
if let Some(include) = std::env::var_os("DEP_Z_INCLUDE") {
cfg.include(include);
}
cfg.compile("zuser");
println!("cargo::rerun-if-changed=src/zuser.c");
cfg.compile("z_user");
println!("cargo::rerun-if-changed=src/z_user.c");
}
```

Expand All @@ -417,7 +417,7 @@ the zlib header, and it should find the header, even on systems where it isn't
already installed.

```c
// src/zuser.c
// src/z_user.c

#include "zlib.h"

Expand Down
8 changes: 4 additions & 4 deletions tests/testsuite/bad_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -834,14 +834,14 @@ fn unused_keys() {
version = "0.5.0"
edition = "2015"
authors = ["[email protected]"]
bulid = "foo"
unused = "foo"
"#,
)
.file("src/lib.rs", "pub fn foo() {}")
.build();
p.cargo("check")
.with_stderr_data(str![[r#"
[WARNING] unused manifest key: package.bulid
[WARNING] unused manifest key: package.unused
[CHECKING] foo v0.5.0 ([ROOT]/foo)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s

Expand Down Expand Up @@ -884,15 +884,15 @@ fn unused_keys_in_virtual_manifest() {
r#"
[workspace]
members = ["bar"]
bulid = "foo"
unused = "foo"
"#,
)
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("check --workspace")
.with_stderr_data(str![[r#"
[WARNING] [ROOT]/foo/Cargo.toml: unused manifest key: workspace.bulid
[WARNING] [ROOT]/foo/Cargo.toml: unused manifest key: workspace.unused
[CHECKING] bar v0.0.1 ([ROOT]/foo/bar)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s

Expand Down
10 changes: 5 additions & 5 deletions tests/testsuite/freshness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1459,16 +1459,16 @@ fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) {
// effecting any builds that happened since that time stamp.
let mut cleaned = false;
dir.push(".fingerprint");
for fing in fs::read_dir(&dir).unwrap() {
let fing = fing.unwrap();
for fingerprint in fs::read_dir(&dir).unwrap() {
let fingerprint = fingerprint.unwrap();

let outdated = |f: io::Result<fs::DirEntry>| {
filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap())
<= timestamp
};
if fs::read_dir(fing.path()).unwrap().all(outdated) {
fs::remove_dir_all(fing.path()).unwrap();
println!("remove: {:?}", fing.path());
if fs::read_dir(fingerprint.path()).unwrap().all(outdated) {
fs::remove_dir_all(fingerprint.path()).unwrap();
println!("remove: {:?}", fingerprint.path());
// a real cleaner would remove the big files in deps and build as well
// but fingerprint is sufficient for our tests
cleaned = true;
Expand Down
2 changes: 1 addition & 1 deletion tests/testsuite/git.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2990,7 +2990,7 @@ fn templatedir_doesnt_cause_problems() {
&format!(
r#"
[package]
name = "fo"
name = "foo"
version = "0.5.0"
edition = "2015"
authors = []
Expand Down
Loading