Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions datafusion/physical-plan/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ log = { workspace = true }
parking_lot = { workspace = true }
pin-project-lite = "^0.2.7"
tokio = { workspace = true }
memmap2 = "0.9.5"
bytes = "1.10.1"

[dev-dependencies]
criterion = { workspace = true, features = ["async_futures"] }
Expand Down
24 changes: 18 additions & 6 deletions datafusion/physical-plan/src/joins/sort_merge_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ use std::cmp::Ordering;
use std::collections::{HashMap, VecDeque};
use std::fmt::Formatter;
use std::fs::File;
use std::io::BufReader;
use std::mem::size_of;
use std::ops::Range;
use std::pin::Pin;
Expand Down Expand Up @@ -56,6 +55,7 @@ use crate::{
};

use arrow::array::{types::UInt64Type, *};
use arrow::buffer::Buffer;
use arrow::compute::{
self, concat_batches, filter_record_batch, is_not_null, take, SortOptions,
};
Expand All @@ -75,7 +75,9 @@ use datafusion_physical_expr::PhysicalExprRef;
use datafusion_physical_expr_common::physical_expr::fmt_sql;
use datafusion_physical_expr_common::sort_expr::{LexOrdering, LexRequirement};

use crate::spill::IPCBufferDecoder;
use futures::{Stream, StreamExt};
use memmap2::Mmap;

/// Join execution plan that executes equi-join predicates on multiple partitions using Sort-Merge
/// join algorithm and applies an optional filter post join. Can be used to join arbitrarily large
Expand Down Expand Up @@ -2305,11 +2307,21 @@ fn fetch_right_columns_from_batch_by_idxs(
let mut buffered_cols: Vec<ArrayRef> =
Vec::with_capacity(buffered_indices.len());

let file = BufReader::new(File::open(spill_file.path())?);
let reader = StreamReader::try_new(file, None)?;

for batch in reader {
batch?.columns().iter().for_each(|column| {
// >>> REMOVE THIS BLOCK <<<
// -------------------------------------------
let file = File::open(spill_file.path())?;
let mmap = unsafe { Mmap::map(&file)? };
let bytes = bytes::Bytes::from_owner(mmap);
let buffer = Buffer::from(bytes);
let decoder = IPCBufferDecoder::new(buffer);
// -------------------------------------------

// >>> UNCOMMENT BELOW <<<
// let decoder = IPCBufferDecoder::new(spill_file.path());

for i in 0..decoder.num_batches(){
let batch = decoder.get_batch(i).unwrap().unwrap();
batch.columns().iter().for_each(|column| {
buffered_cols.extend(take(column, &buffered_indices, None))
});
}
Expand Down
6 changes: 3 additions & 3 deletions datafusion/physical-plan/src/spill/in_progress_spill_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@ use arrow::array::RecordBatch;
use datafusion_common::exec_datafusion_err;
use datafusion_execution::disk_manager::RefCountedTempFile;

use super::{spill_manager::SpillManager, IPCStreamWriter};
use super::{spill_manager::SpillManager, IPCFileWriter};

/// Represents an in-progress spill file used for writing `RecordBatch`es to disk, created by `SpillManager`.
/// Caller is able to use this struct to incrementally append in-memory batches to
/// the file, and then finalize the file by calling the `finish` method.
pub struct InProgressSpillFile {
pub(crate) spill_writer: Arc<SpillManager>,
/// Lazily initialized writer
writer: Option<IPCStreamWriter>,
writer: Option<IPCFileWriter>,
/// Lazily initialized in-progress file, it will be moved out when the `finish` method is invoked
in_progress_file: Option<RefCountedTempFile>,
}
Expand Down Expand Up @@ -64,7 +64,7 @@ impl InProgressSpillFile {
if self.writer.is_none() {
let schema = batch.schema();
if let Some(ref in_progress_file) = self.in_progress_file {
self.writer = Some(IPCStreamWriter::new(
self.writer = Some(IPCFileWriter::new(
in_progress_file.path(),
schema.as_ref(),
)?);
Expand Down
Loading
Loading