Skip to content

Commit a53b974

Browse files
authored
refactor: use std::slice::from_ref to remove some clones (#13518)
1 parent 58761ac commit a53b974

File tree

10 files changed

+18
-16
lines changed

10 files changed

+18
-16
lines changed

datafusion/core/src/dataframe/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1671,7 +1671,7 @@ impl DataFrame {
16711671
/// # }
16721672
/// ```
16731673
pub fn with_column(self, name: &str, expr: Expr) -> Result<DataFrame> {
1674-
let window_func_exprs = find_window_exprs(&[expr.clone()]);
1674+
let window_func_exprs = find_window_exprs(std::slice::from_ref(&expr));
16751675

16761676
let (window_fn_str, plan) = if window_func_exprs.is_empty() {
16771677
(None, self.plan)

datafusion/core/src/datasource/file_format/arrow.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -476,7 +476,7 @@ mod tests {
476476
.infer_schema(
477477
&state,
478478
&(store.clone() as Arc<dyn ObjectStore>),
479-
&[object_meta.clone()],
479+
std::slice::from_ref(&object_meta),
480480
)
481481
.await?;
482482
let actual_fields = inferred_schema
@@ -515,7 +515,7 @@ mod tests {
515515
.infer_schema(
516516
&state,
517517
&(store.clone() as Arc<dyn ObjectStore>),
518-
&[object_meta.clone()],
518+
std::slice::from_ref(&object_meta),
519519
)
520520
.await;
521521

datafusion/core/src/datasource/file_format/mod.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -573,7 +573,9 @@ pub(crate) mod test_util {
573573
let store = Arc::new(LocalFileSystem::new()) as _;
574574
let meta = local_unpartitioned_file(format!("{store_root}/{file_name}"));
575575

576-
let file_schema = format.infer_schema(state, &store, &[meta.clone()]).await?;
576+
let file_schema = format
577+
.infer_schema(state, &store, std::slice::from_ref(&meta))
578+
.await?;
577579

578580
let statistics = format
579581
.infer_stats(state, &store, file_schema.clone(), &meta)

datafusion/core/src/datasource/physical_plan/avro.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ mod tests {
284284
let meta = local_unpartitioned_file(filename);
285285

286286
let file_schema = AvroFormat {}
287-
.infer_schema(&state, &store, &[meta.clone()])
287+
.infer_schema(&state, &store, std::slice::from_ref(&meta))
288288
.await?;
289289

290290
let avro_exec = AvroExec::new(
@@ -349,7 +349,7 @@ mod tests {
349349
let object_store_url = ObjectStoreUrl::local_filesystem();
350350
let meta = local_unpartitioned_file(filename);
351351
let actual_schema = AvroFormat {}
352-
.infer_schema(&state, &object_store, &[meta.clone()])
352+
.infer_schema(&state, &object_store, std::slice::from_ref(&meta))
353353
.await?;
354354

355355
let mut builder = SchemaBuilder::from(actual_schema.fields());
@@ -422,7 +422,7 @@ mod tests {
422422
let object_store_url = ObjectStoreUrl::local_filesystem();
423423
let meta = local_unpartitioned_file(filename);
424424
let file_schema = AvroFormat {}
425-
.infer_schema(&state, &object_store, &[meta.clone()])
425+
.infer_schema(&state, &object_store, std::slice::from_ref(&meta))
426426
.await?;
427427

428428
let mut partitioned_file = PartitionedFile::from(meta);

datafusion/core/src/datasource/physical_plan/json.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,7 @@ mod tests {
430430
.object_meta;
431431
let schema = JsonFormat::default()
432432
.with_file_compression_type(file_compression_type.to_owned())
433-
.infer_schema(state, &store, &[meta.clone()])
433+
.infer_schema(state, &store, std::slice::from_ref(&meta))
434434
.await
435435
.unwrap();
436436

datafusion/core/src/datasource/physical_plan/parquet/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1702,7 +1702,7 @@ mod tests {
17021702

17031703
let store = Arc::new(LocalFileSystem::new()) as _;
17041704
let file_schema = ParquetFormat::default()
1705-
.infer_schema(&state, &store, &[meta.clone()])
1705+
.infer_schema(&state, &store, std::slice::from_ref(&meta))
17061706
.await?;
17071707

17081708
let group_empty = vec![vec![file_range(&meta, 0, 2)]];
@@ -1734,7 +1734,7 @@ mod tests {
17341734
let meta = local_unpartitioned_file(filename);
17351735

17361736
let schema = ParquetFormat::default()
1737-
.infer_schema(&state, &store, &[meta.clone()])
1737+
.infer_schema(&state, &store, std::slice::from_ref(&meta))
17381738
.await
17391739
.unwrap();
17401740

datafusion/core/tests/parquet/page_pruning.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ async fn get_parquet_exec(state: &SessionState, filter: Expr) -> ParquetExec {
5454
};
5555

5656
let schema = ParquetFormat::default()
57-
.infer_schema(state, &store, &[meta.clone()])
57+
.infer_schema(state, &store, std::slice::from_ref(&meta))
5858
.await
5959
.unwrap();
6060

datafusion/physical-plan/src/aggregates/topk_stream.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ impl Stream for GroupedTopKAggregateStream {
123123
batch.num_rows()
124124
);
125125
if log::log_enabled!(Level::Trace) && batch.num_rows() < 20 {
126-
print_batches(&[batch.clone()])?;
126+
print_batches(std::slice::from_ref(&batch))?;
127127
}
128128
self.row_count += batch.num_rows();
129129
let batches = &[batch];
@@ -165,7 +165,7 @@ impl Stream for GroupedTopKAggregateStream {
165165
batch.num_rows()
166166
);
167167
if log::log_enabled!(Level::Trace) {
168-
print_batches(&[batch.clone()])?;
168+
print_batches(std::slice::from_ref(&batch))?;
169169
}
170170
return Poll::Ready(Some(Ok(batch)));
171171
}

datafusion/sql/src/expr/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
140140
) -> Result<Expr> {
141141
let mut expr = self.sql_expr_to_logical_expr(sql, schema, planner_context)?;
142142
expr = self.rewrite_partial_qualifier(expr, schema);
143-
self.validate_schema_satisfies_exprs(schema, &[expr.clone()])?;
143+
self.validate_schema_satisfies_exprs(schema, std::slice::from_ref(&expr))?;
144144
let (expr, _) = expr.infer_placeholder_types(schema)?;
145145
Ok(expr)
146146
}

datafusion/sql/src/select.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
167167
let group_by_expr = normalize_col(group_by_expr, &projected_plan)?;
168168
self.validate_schema_satisfies_exprs(
169169
base_plan.schema(),
170-
&[group_by_expr.clone()],
170+
std::slice::from_ref(&group_by_expr),
171171
)?;
172172
Ok(group_by_expr)
173173
})
@@ -815,7 +815,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
815815

816816
check_columns_satisfy_exprs(
817817
&column_exprs_post_aggr,
818-
&[having_expr_post_aggr.clone()],
818+
std::slice::from_ref(&having_expr_post_aggr),
819819
"HAVING clause references non-aggregate values",
820820
)?;
821821

0 commit comments

Comments
 (0)