@@ -22,7 +22,7 @@ use crate::{errors::DataFusionError, expr::PyExpr};
22
22
use datafusion:: arrow:: datatypes:: Schema ;
23
23
use datafusion:: arrow:: pyarrow:: { PyArrowType , ToPyArrow } ;
24
24
use datafusion:: arrow:: util:: pretty;
25
- use datafusion:: dataframe:: DataFrame ;
25
+ use datafusion:: dataframe:: { DataFrame , DataFrameWriteOptions } ;
26
26
use datafusion:: parquet:: basic:: { BrotliLevel , Compression , GzipLevel , ZstdLevel } ;
27
27
use datafusion:: parquet:: file:: properties:: WriterProperties ;
28
28
use datafusion:: prelude:: * ;
@@ -305,7 +305,13 @@ impl PyDataFrame {
305
305
306
306
/// Write a `DataFrame` to a CSV file.
307
307
fn write_csv ( & self , path : & str , py : Python ) -> PyResult < ( ) > {
308
- wait_for_future ( py, self . df . as_ref ( ) . clone ( ) . write_csv ( path) ) ?;
308
+ wait_for_future (
309
+ py,
310
+ self . df
311
+ . as_ref ( )
312
+ . clone ( )
313
+ . write_csv ( path, DataFrameWriteOptions :: new ( ) , None ) ,
314
+ ) ?;
309
315
Ok ( ( ) )
310
316
}
311
317
@@ -357,17 +363,24 @@ impl PyDataFrame {
357
363
358
364
wait_for_future (
359
365
py,
360
- self . df
361
- . as_ref ( )
362
- . clone ( )
363
- . write_parquet ( path, Option :: from ( writer_properties) ) ,
366
+ self . df . as_ref ( ) . clone ( ) . write_parquet (
367
+ path,
368
+ DataFrameWriteOptions :: new ( ) ,
369
+ Option :: from ( writer_properties) ,
370
+ ) ,
364
371
) ?;
365
372
Ok ( ( ) )
366
373
}
367
374
368
375
/// Executes a query and writes the results to a partitioned JSON file.
369
376
fn write_json ( & self , path : & str , py : Python ) -> PyResult < ( ) > {
370
- wait_for_future ( py, self . df . as_ref ( ) . clone ( ) . write_json ( path) ) ?;
377
+ wait_for_future (
378
+ py,
379
+ self . df
380
+ . as_ref ( )
381
+ . clone ( )
382
+ . write_json ( path, DataFrameWriteOptions :: new ( ) ) ,
383
+ ) ?;
371
384
Ok ( ( ) )
372
385
}
373
386
0 commit comments