Skip to content

Commit 5aa689f

Browse files
authored
7.x: fix clippy (#2166)
* fix clippy * fix if else * fix clippy * fix clippy * fix clippy * fix clippy
1 parent e7a72b4 commit 5aa689f

File tree

11 files changed

+17
-25
lines changed

11 files changed

+17
-25
lines changed

ballista/rust/core/src/serde/logical_plan/to_proto.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ impl From<&DataType> for protobuf::arrow_type::ArrowTypeEnum {
198198
DataType::Timestamp(time_unit, timezone) => {
199199
ArrowTypeEnum::Timestamp(protobuf::Timestamp {
200200
time_unit: protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32,
201-
timezone: timezone.to_owned().unwrap_or_else(String::new),
201+
timezone: timezone.to_owned().unwrap_or_default(),
202202
})
203203
}
204204
DataType::Date32 => ArrowTypeEnum::Date32(EmptyMessage {}),

datafusion/src/execution/context.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1151,7 +1151,7 @@ impl ExecutionProps {
11511151
var_type: VarType,
11521152
provider: Arc<dyn VarProvider + Send + Sync>,
11531153
) -> Option<Arc<dyn VarProvider + Send + Sync>> {
1154-
let mut var_providers = self.var_providers.take().unwrap_or_else(HashMap::new);
1154+
let mut var_providers = self.var_providers.take().unwrap_or_default();
11551155

11561156
let old_provider = var_providers.insert(var_type, provider);
11571157

@@ -3602,7 +3602,7 @@ mod tests {
36023602
let logical_plan = ctx.create_logical_plan(sql)?;
36033603
let logical_plan = ctx.optimize(&logical_plan)?;
36043604
let physical_plan = ctx.create_physical_plan(&logical_plan).await?;
3605-
ctx.write_csv(physical_plan, out_dir.to_string()).await
3605+
ctx.write_csv(physical_plan, out_dir).await
36063606
}
36073607

36083608
/// Execute SQL and write results to partitioned parquet files
@@ -3615,7 +3615,7 @@ mod tests {
36153615
let logical_plan = ctx.create_logical_plan(sql)?;
36163616
let logical_plan = ctx.optimize(&logical_plan)?;
36173617
let physical_plan = ctx.create_physical_plan(&logical_plan).await?;
3618-
ctx.write_parquet(physical_plan, out_dir.to_string(), writer_properties)
3618+
ctx.write_parquet(physical_plan, out_dir, writer_properties)
36193619
.await
36203620
}
36213621

datafusion/src/logical_plan/builder.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1041,14 +1041,13 @@ pub(crate) fn expand_wildcard(
10411041
let columns_to_skip = using_columns
10421042
.into_iter()
10431043
// For each USING JOIN condition, only expand to one column in projection
1044-
.map(|cols| {
1044+
.flat_map(|cols| {
10451045
let mut cols = cols.into_iter().collect::<Vec<_>>();
10461046
// sort join columns to make sure we consistently keep the same
10471047
// qualified column
10481048
cols.sort();
10491049
cols.into_iter().skip(1)
10501050
})
1051-
.flatten()
10521051
.collect::<HashSet<_>>();
10531052

10541053
if columns_to_skip.is_empty() {

datafusion/src/logical_plan/plan.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -543,8 +543,7 @@ impl LogicalPlan {
543543
{
544544
self.using_columns.push(
545545
on.iter()
546-
.map(|entry| [&entry.0, &entry.1])
547-
.flatten()
546+
.flat_map(|entry| [&entry.0, &entry.1])
548547
.cloned()
549548
.collect::<HashSet<Column>>(),
550549
);

datafusion/src/optimizer/filter_push_down.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -228,14 +228,13 @@ fn get_pushable_join_predicates<'a>(
228228
let schema_columns = schema
229229
.fields()
230230
.iter()
231-
.map(|f| {
231+
.flat_map(|f| {
232232
[
233233
f.qualified_column(),
234234
// we need to push down filter using unqualified column as well
235235
f.unqualified_column(),
236236
]
237237
})
238-
.flatten()
239238
.collect::<HashSet<_>>();
240239

241240
state

datafusion/src/physical_plan/hash_aggregate.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1176,12 +1176,11 @@ mod tests {
11761176
_partition: usize,
11771177
_runtime: Arc<RuntimeEnv>,
11781178
) -> Result<SendableRecordBatchStream> {
1179-
let stream;
1180-
if self.yield_first {
1181-
stream = TestYieldingStream::New;
1179+
let stream = if self.yield_first {
1180+
TestYieldingStream::New
11821181
} else {
1183-
stream = TestYieldingStream::Yielded;
1184-
}
1182+
TestYieldingStream::Yielded
1183+
};
11851184
Ok(Box::pin(stream))
11861185
}
11871186

datafusion/src/physical_plan/regex_expressions.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ pub fn regexp_replace<T: StringOffsetSizeTrait>(args: &[ArrayRef]) -> Result<Arr
139139
let (pattern, replace_all) = if flags == "g" {
140140
(pattern.to_string(), true)
141141
} else if flags.contains('g') {
142-
(format!("(?{}){}", flags.to_string().replace("g", ""), pattern), true)
142+
(format!("(?{}){}", flags.to_string().replace('g', ""), pattern), true)
143143
} else {
144144
(format!("(?{}){}", flags, pattern), false)
145145
};

datafusion/src/physical_plan/union.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -201,14 +201,12 @@ fn col_stats_union(
201201
.min_value
202202
.zip(right.min_value)
203203
.map(|(a, b)| expressions::helpers::min(&a, &b))
204-
.map(Result::ok)
205-
.flatten();
204+
.and_then(Result::ok);
206205
left.max_value = left
207206
.max_value
208207
.zip(right.max_value)
209208
.map(|(a, b)| expressions::helpers::max(&a, &b))
210-
.map(Result::ok)
211-
.flatten();
209+
.and_then(Result::ok);
212210
left.null_count = left.null_count.zip(right.null_count).map(|(a, b)| a + b);
213211

214212
left

datafusion/src/physical_plan/values.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ mod tests {
190190
async fn values_empty_case() -> Result<()> {
191191
let schema = test_util::aggr_test_schema();
192192
let empty = ValuesExec::try_new(schema, vec![]);
193-
assert!(!empty.is_ok());
193+
assert!(empty.is_err());
194194
Ok(())
195195
}
196196
}

datafusion/tests/merge_fuzz.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,7 @@ async fn run_merge_test(input: Vec<Vec<RecordBatch>>) {
104104
for batch_size in batch_sizes {
105105
let first_batch = input
106106
.iter()
107-
.map(|p| p.iter())
108-
.flatten()
107+
.flat_map(|p| p.iter())
109108
.next()
110109
.expect("at least one batch");
111110
let schema = first_batch.schema();

datafusion/tests/order_spill_fuzz.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,7 @@ async fn run_sort(pool_size: usize, size_spill: Vec<(usize, bool)>) {
5858
let input = vec![make_staggered_batches(size)];
5959
let first_batch = input
6060
.iter()
61-
.map(|p| p.iter())
62-
.flatten()
61+
.flat_map(|p| p.iter())
6362
.next()
6463
.expect("at least one batch");
6564
let schema = first_batch.schema();

0 commit comments

Comments
 (0)