Skip to content

Commit 30813dc

Browse files
authored
Minor: pass ParquetFileMetrics to build_row_filter in parquet (#4161)
1 parent da76feb commit 30813dc

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

datafusion/core/src/physical_plan/file_format/parquet.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -414,8 +414,7 @@ impl FileOpener for ParquetOpener {
414414
table_schema.as_ref(),
415415
builder.metadata(),
416416
reorder_predicates,
417-
&file_metrics.pushdown_rows_filtered,
418-
&file_metrics.pushdown_eval_time,
417+
&file_metrics,
419418
);
420419

421420
match row_filter {

datafusion/core/src/physical_plan/file_format/parquet/row_filter.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ use std::sync::Arc;
3333

3434
use crate::physical_plan::metrics;
3535

36+
use super::ParquetFileMetrics;
37+
3638
/// This module contains utilities for enabling the pushdown of DataFusion filter predicates (which
3739
/// can be any DataFusion `Expr` that evaluates to a `BooleanArray`) to the parquet decoder level in `arrow-rs`.
3840
/// DataFusion will use a `ParquetRecordBatchStream` to read data from parquet into arrow `RecordBatch`es.
@@ -309,9 +311,11 @@ pub fn build_row_filter(
309311
table_schema: &Schema,
310312
metadata: &ParquetMetaData,
311313
reorder_predicates: bool,
312-
rows_filtered: &metrics::Count,
313-
time: &metrics::Time,
314+
file_metrics: &ParquetFileMetrics,
314315
) -> Result<Option<RowFilter>> {
316+
let rows_filtered = &file_metrics.pushdown_rows_filtered;
317+
let time = &file_metrics.pushdown_eval_time;
318+
315319
let predicates = split_conjunction_owned(expr);
316320

317321
let mut candidates: Vec<FilterCandidate> = predicates

0 commit comments

Comments
 (0)