Skip to content

Commit

Permalink
chore: inline format args to improve readability
Browse files Browse the repository at this point in the history
Using this command, fixed format argument inlining to improve readability, and fix a few minor related styling issues like trailing commas in a single line.

```
cargo clippy --all-targets --workspace -- -D clippy::uninlined_format_args
```

Signed-off-by: Yuri Astrakhan <[email protected]>
  • Loading branch information
nyurik committed Feb 14, 2025
1 parent d563e10 commit 1329db9
Show file tree
Hide file tree
Showing 53 changed files with 127 additions and 227 deletions.
7 changes: 3 additions & 4 deletions crates/aws/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ pub fn register_handlers(_additional_prefixes: Option<Url>) {
let object_stores = Arc::new(S3ObjectStoreFactory::default());
let log_stores = Arc::new(S3LogStoreFactory::default());
for scheme in ["s3", "s3a"].iter() {
let url = Url::parse(&format!("{}://", scheme)).unwrap();
let url = Url::parse(&format!("{scheme}://")).unwrap();
factories().insert(url.clone(), object_stores.clone());
logstores().insert(url.clone(), log_stores.clone());
}
Expand Down Expand Up @@ -310,7 +310,7 @@ impl DynamoDbLockClient {
fn get_primary_key(&self, version: i64, table_path: &str) -> HashMap<String, AttributeValue> {
maplit::hashmap! {
constants::ATTR_TABLE_PATH.to_owned() => string_attr(table_path),
constants::ATTR_FILE_NAME.to_owned() => string_attr(format!("{:020}.json", version)),
constants::ATTR_FILE_NAME.to_owned() => string_attr(format!("{version:020}.json")),
}
}

Expand Down Expand Up @@ -669,8 +669,7 @@ fn extract_required_string_field<'a>(
.as_s()
.map_err(|v| LockClientError::InconsistentData {
description: format!(
"mandatory string field '{field_name}' exists, but is not a string: {:#?}",
v,
"mandatory string field '{field_name}' exists, but is not a string: {v:#?}",
),
})
.map(|s| s.as_str())
Expand Down
2 changes: 1 addition & 1 deletion crates/aws/src/logstore/dynamodb_logstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ impl LogStore for S3DynamoDbLogStore {
LockClientError::VersionAlreadyCompleted { version, .. } => {
error!("Trying to abort a completed commit");
TransactionError::LogStoreError {
msg: format!("trying to abort a completed log entry: {}", version),
msg: format!("trying to abort a completed log entry: {version}"),
source: Box::new(err),
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/azure/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ impl LogStoreFactory for AzureFactory {
pub fn register_handlers(_additional_prefixes: Option<Url>) {
let factory = Arc::new(AzureFactory {});
for scheme in ["az", "adl", "azure", "abfs", "abfss"].iter() {
let url = Url::parse(&format!("{}://", scheme)).unwrap();
let url = Url::parse(&format!("{scheme}://")).unwrap();
factories().insert(url.clone(), factory.clone());
logstores().insert(url.clone(), factory.clone());
}
Expand Down
10 changes: 2 additions & 8 deletions crates/azure/tests/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,20 +43,14 @@ impl StorageIntegration for MsftIntegration {
std::env::var("AZURE_STORAGE_ACCOUNT_NAME").unwrap_or(String::from("onelake"));
let container_name = std::env::var("AZURE_STORAGE_CONTAINER_NAME")
.unwrap_or(String::from("delta-rs"));
format!(
"{0}.dfs.fabric.microsoft.com/{1}",
account_name, container_name
)
format!("{account_name}.dfs.fabric.microsoft.com/{container_name}")
}
Self::OnelakeAbfs => {
let account_name =
std::env::var("AZURE_STORAGE_ACCOUNT_NAME").unwrap_or(String::from("onelake"));
let container_name = std::env::var("AZURE_STORAGE_CONTAINER_NAME")
.unwrap_or(String::from("delta-rs"));
format!(
"{0}@{1}.dfs.fabric.microsoft.com",
container_name, account_name
)
format!("{container_name}@{account_name}.dfs.fabric.microsoft.com")
}
}
}
Expand Down
12 changes: 5 additions & 7 deletions crates/benchmarks/src/bin/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,9 +247,9 @@ async fn benchmark_merge_tpcds(

let duration = end.duration_since(start);

println!("Total File count: {}", file_count);
println!("File sample count: {}", file_sample_count);
println!("{:?}", metrics);
println!("Total File count: {file_count}");
println!("File sample count: {file_sample_count}");
println!("{metrics:?}");
println!("Seconds: {}", duration.as_secs_f32());

// Clean up and restore to original state.
Expand Down Expand Up @@ -597,10 +597,9 @@ async fn main() {
"
select name as before_name,
avg(cast(duration_ms as float)) as before_duration_avg
from before where group_id = {}
from before where group_id = {before_group_id}
group by name
",
before_group_id
))
.await
.unwrap();
Expand All @@ -610,10 +609,9 @@ async fn main() {
"
select name as after_name,
avg(cast(duration_ms as float)) as after_duration_avg
from after where group_id = {}
from after where group_id = {after_group_id}
group by name
",
after_group_id
))
.await
.unwrap();
Expand Down
8 changes: 4 additions & 4 deletions crates/catalog-unity/src/credential.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ impl TokenCredential for ClientSecretOAuthProvider {
.form(&[
("client_id", self.client_id.as_str()),
("client_secret", self.client_secret.as_str()),
("scope", &format!("{}/.default", DATABRICKS_RESOURCE_SCOPE)),
("scope", &format!("{DATABRICKS_RESOURCE_SCOPE}/.default")),
("grant_type", "client_credentials"),
])
.send()
Expand Down Expand Up @@ -250,7 +250,7 @@ impl TokenCredential for AzureCliCredential {

let token_response = serde_json::from_str::<AzureCliTokenResponse>(output)
.map_err(|err| UnityCatalogError::AzureCli {
message: format!("failed seserializing token response: {:?}", err),
message: format!("failed seserializing token response: {err:?}"),
})?;
if !token_response.token_type.eq_ignore_ascii_case("bearer") {
return Err(UnityCatalogError::AzureCli {
Expand Down Expand Up @@ -344,7 +344,7 @@ impl TokenCredential for WorkloadIdentityOAuthProvider {
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
),
("client_assertion", token_str.as_str()),
("scope", &format!("{}/.default", DATABRICKS_RESOURCE_SCOPE)),
("scope", &format!("{DATABRICKS_RESOURCE_SCOPE}/.default")),
("grant_type", "client_credentials"),
])
.send()
Expand Down Expand Up @@ -420,7 +420,7 @@ impl TokenCredential for ImdsManagedIdentityOAuthProvider {
&self,
_client: &ClientWithMiddleware,
) -> Result<TemporaryToken<String>, UnityCatalogError> {
let resource_scope = format!("{}/.default", DATABRICKS_RESOURCE_SCOPE);
let resource_scope = format!("{DATABRICKS_RESOURCE_SCOPE}/.default");
let mut query_items = vec![
("api-version", MSI_API_VERSION),
("resource", &resource_scope),
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/delta_datafusion/cdf/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ impl DeltaCdfScan {

impl DisplayAs for DeltaCdfScan {
fn fmt_as(&self, _t: DisplayFormatType, f: &mut Formatter) -> std::fmt::Result {
write!(f, "{:?}", self)
write!(f, "{self:?}")
}
}

Expand Down
16 changes: 6 additions & 10 deletions crates/core/src/delta_datafusion/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -415,8 +415,7 @@ impl DeltaScanConfigBuilder {
Some(name) => {
if column_names.contains(name) {
return Err(DeltaTableError::Generic(format!(
"Unable to add file path column since column with name {} exits",
name
"Unable to add file path column since column with name {name} exits"
)));
}

Expand All @@ -429,7 +428,7 @@ impl DeltaScanConfigBuilder {

while column_names.contains(&name) {
idx += 1;
name = format!("{}_{}", prefix, idx);
name = format!("{prefix}_{idx}");
}

Some(name)
Expand Down Expand Up @@ -1110,8 +1109,7 @@ pub(crate) fn get_null_of_arrow_type(t: &ArrowDataType) -> DeltaResult<ScalarVal
| ArrowDataType::LargeListView(_)
| ArrowDataType::ListView(_)
| ArrowDataType::Map(_, _) => Err(DeltaTableError::Generic(format!(
"Unsupported data type for Delta Lake {}",
t
"Unsupported data type for Delta Lake {t}"
))),
}
}
Expand Down Expand Up @@ -1620,8 +1618,7 @@ impl TreeNodeVisitor<'_> for FindFilesExprProperties {
}
_ => {
self.result = Err(DeltaTableError::Generic(format!(
"Find files predicate contains unsupported expression {}",
expr
"Find files predicate contains unsupported expression {expr}"
)));
return Ok(TreeNodeRecursion::Stop);
}
Expand Down Expand Up @@ -1668,8 +1665,7 @@ fn join_batches_with_add_actions(

for path in iter {
let path = path.ok_or(DeltaTableError::Generic(format!(
"{} cannot be null",
path_column
"{path_column} cannot be null"
)))?;

match actions.remove(path) {
Expand Down Expand Up @@ -2921,7 +2917,7 @@ mod tests {
} else if value.to_string().starts_with("part-") {
LocationType::Data
} else {
panic!("Unknown location type: {:?}", value)
panic!("Unknown location type: {value:?}")
}
}
}
Expand Down
14 changes: 5 additions & 9 deletions crates/core/src/kernel/arrow/extract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,8 @@ pub(crate) fn extract_and_cast<'a, T: Array + 'static>(
arr: &'a dyn ProvidesColumnByName,
name: &'a str,
) -> DeltaResult<&'a T> {
extract_and_cast_opt::<T>(arr, name).ok_or(DeltaTableError::Generic(format!(
"missing-column: {}",
name
)))
extract_and_cast_opt::<T>(arr, name)
.ok_or(DeltaTableError::Generic(format!("missing-column: {name}")))
}

/// Extracts a column by name and casts it to the given type array type `T`.
Expand All @@ -63,8 +61,7 @@ pub(crate) fn extract_column<'a>(
let child = array
.column_by_name(path_step)
.ok_or(ArrowError::SchemaError(format!(
"No such field: {}",
path_step,
"No such field: {path_step}",
)))?;

if let Some(next_path_step) = remaining_path_steps.next() {
Expand Down Expand Up @@ -119,12 +116,11 @@ fn cast_column_as<'a, T: Array + 'static>(
column: &Option<&'a Arc<dyn Array>>,
) -> Result<&'a T, ArrowError> {
column
.ok_or(ArrowError::SchemaError(format!("No such column: {}", name)))?
.ok_or(ArrowError::SchemaError(format!("No such column: {name}")))?
.as_any()
.downcast_ref::<T>()
.ok_or(ArrowError::SchemaError(format!(
"{} is not of esxpected type.",
name
"{name} is not of expected type."
)))
}

Expand Down
37 changes: 9 additions & 28 deletions crates/core/src/kernel/models/actions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,8 +323,7 @@ impl Protocol {
parsed_properties.insert(parsed_key, value.to_string());
} else if raise_if_not_exists {
return Err(Error::Generic(format!(
"Error parsing property '{}':'{}'",
key, value
"Error parsing property '{key}':'{value}'",
)));
}
}
Expand All @@ -340,17 +339,11 @@ impl Protocol {
}
}
_ => {
return Err(Error::Generic(format!(
"delta.minReaderVersion = '{}' is invalid, valid values are ['1','2','3']",
min_reader_version
)))
return Err(Error::Generic(format!("delta.minReaderVersion = '{min_reader_version}' is invalid, valid values are ['1','2','3']")))
}
},
Err(_) => {
return Err(Error::Generic(format!(
"delta.minReaderVersion = '{}' is invalid, valid values are ['1','2','3']",
min_reader_version
)))
return Err(Error::Generic(format!("delta.minReaderVersion = '{min_reader_version}' is invalid, valid values are ['1','2','3']")))
}
}
}
Expand All @@ -366,17 +359,11 @@ impl Protocol {
}
}
_ => {
return Err(Error::Generic(format!(
"delta.minWriterVersion = '{}' is invalid, valid values are ['2','3','4','5','6','7']",
min_writer_version
)))
return Err(Error::Generic(format!("delta.minWriterVersion = '{min_writer_version}' is invalid, valid values are ['2','3','4','5','6','7']")))
}
},
Err(_) => {
return Err(Error::Generic(format!(
"delta.minWriterVersion = '{}' is invalid, valid values are ['2','3','4','5','6','7']",
min_writer_version
)))
return Err(Error::Generic(format!("delta.minWriterVersion = '{min_writer_version}' is invalid, valid values are ['2','3','4','5','6','7']")))
}
}
}
Expand All @@ -403,10 +390,7 @@ impl Protocol {
}
Ok(false) => {}
_ => {
return Err(Error::Generic(format!(
"delta.enableChangeDataFeed = '{}' is invalid, valid values are ['true']",
enable_cdf
)))
return Err(Error::Generic(format!("delta.enableChangeDataFeed = '{enable_cdf}' is invalid, valid values are ['true']")))
}
}
}
Expand Down Expand Up @@ -436,10 +420,7 @@ impl Protocol {
}
Ok(false) => {}
_ => {
return Err(Error::Generic(format!(
"delta.enableDeletionVectors = '{}' is invalid, valid values are ['true']",
enable_dv
)))
return Err(Error::Generic(format!("delta.enableDeletionVectors = '{enable_dv}' is invalid, valid values are ['true']")))
}
}
}
Expand Down Expand Up @@ -694,7 +675,7 @@ impl DeletionVectorDescriptor {
}
let dv_path = parent
.join(&dv_suffix)
.map_err(|_| Error::DeletionVector(format!("invalid path: {}", dv_suffix)))?;
.map_err(|_| Error::DeletionVector(format!("invalid path: {dv_suffix}")))?;
Ok(Some(dv_path))
}
StorageType::AbsolutePath => {
Expand Down Expand Up @@ -1221,7 +1202,7 @@ mod tests {
#[test]
fn test_primitive() {
let types: PrimitiveType = serde_json::from_str("\"string\"").unwrap();
println!("{:?}", types);
println!("{types:?}");
}

// #[test]
Expand Down
6 changes: 3 additions & 3 deletions crates/core/src/kernel/models/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,17 +83,17 @@ impl StructTypeExt for StructType {
)),
Value::Number(sql) => generated_cols.push(GeneratedColumn::new(
&field_path,
&format!("{}", sql),
&format!("{sql}"),
field.data_type(),
)),
Value::Bool(sql) => generated_cols.push(GeneratedColumn::new(
&field_path,
&format!("{}", sql),
&format!("{sql}"),
field.data_type(),
)),
Value::Array(sql) => generated_cols.push(GeneratedColumn::new(
&field_path,
&format!("{:?}", sql),
&format!("{sql:?}"),
field.data_type(),
)),
_ => (), // Other types not sure what to do then
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/kernel/scalars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ fn create_escaped_binary_string(data: &[u8]) -> String {
let mut escaped_string = String::new();
for &byte in data {
// Convert each byte to its two-digit hexadecimal representation
let hex_representation = format!("{:04X}", byte);
let hex_representation = format!("{byte:04X}");
// Append the hexadecimal representation with an escape sequence
escaped_string.push_str("\\u");
escaped_string.push_str(&hex_representation);
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/kernel/snapshot/log_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -893,6 +893,6 @@ mod tests {
.log_data();

let col_stats = file_stats.statistics();
println!("{:?}", col_stats);
println!("{col_stats:?}");
}
}
4 changes: 2 additions & 2 deletions crates/core/src/kernel/snapshot/log_segment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -787,7 +787,7 @@ pub(super) mod tests {
let mut actions = vec![Action::Metadata(metadata), Action::Protocol(protocol)];
for i in 0..10 {
actions.push(Action::Add(Add {
path: format!("part-{}.parquet", i),
path: format!("part-{i}.parquet"),
modification_time: chrono::Utc::now().timestamp_millis(),
..Default::default()
}));
Expand All @@ -811,7 +811,7 @@ pub(super) mod tests {
// remove all but one file
for i in 0..9 {
actions.push(Action::Remove(Remove {
path: format!("part-{}.parquet", i),
path: format!("part-{i}.parquet"),
deletion_timestamp: Some(chrono::Utc::now().timestamp_millis()),
..Default::default()
}))
Expand Down
Loading

0 comments on commit 1329db9

Please sign in to comment.