Skip to content

Commit 2addab3

Browse files
committed
fix: Fixup usages
1 parent 308a442 commit 2addab3

File tree

5 files changed

+15
-15
lines changed

5 files changed

+15
-15
lines changed

benchmarks/src/bin/tpch.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,8 @@ async fn get_table(
431431
}
432432
"parquet" => {
433433
let path = format!("{}/{}", path, table);
434-
let format = ParquetFormat::default().with_enable_pruning(true);
434+
let format = ParquetFormat::new(ctx.config.config_options())
435+
.with_enable_pruning(true);
435436

436437
(Arc::new(format), path, DEFAULT_PARQUET_EXTENSION)
437438
}

datafusion/core/src/config.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,8 @@ impl BuiltInConfigs {
250250
ConfigDefinition::new_bool(
251251
OPT_PARQUET_ENABLE_PRUNING,
252252
"If true, the parquet reader attempts to skip entire row groups based \
253-
on the predicate in the query.",
253+
on the predicate in the query and the metadata (min/max values) stored in \
254+
the parquet file.",
254255
true,
255256
),
256257
ConfigDefinition::new_bool(

datafusion/core/src/datasource/file_format/parquet.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -618,8 +618,7 @@ mod tests {
618618
let (meta, _files) = store_parquet(vec![batch1, batch2]).await?;
619619

620620
let ctx = SessionContext::new();
621-
let config_options = ctx.config_options();
622-
let format = ParquetFormat::new(config_options);
621+
let format = ParquetFormat::new(ctx.config_options());
623622
let schema = format.infer_schema(&store, &meta).await.unwrap();
624623

625624
let stats =
@@ -767,11 +766,7 @@ mod tests {
767766
assert_eq!(store.request_count(), 2);
768767

769768
let ctx = SessionContext::new();
770-
let config_options = ctx.config_options();
771-
config_options
772-
.write()
773-
.set_u64(OPT_PARQUET_METADATA_SIZE_HINT, 9);
774-
let format = ParquetFormat::default(config_options);
769+
let format = ParquetFormat::new(ctx.config_options()).with_metadata_size_hint(9);
775770
let schema = format.infer_schema(&store.upcast(), &meta).await.unwrap();
776771

777772
let stats =
@@ -798,7 +793,8 @@ mod tests {
798793
// ensure the requests were coalesced into a single request
799794
assert_eq!(store.request_count(), 1);
800795

801-
let format = ParquetFormat::default().with_metadata_size_hint(size_hint);
796+
let format =
797+
ParquetFormat::new(ctx.config_options()).with_metadata_size_hint(size_hint);
802798
let schema = format.infer_schema(&store.upcast(), &meta).await.unwrap();
803799
let stats = fetch_statistics(
804800
store.upcast().as_ref(),

datafusion/core/src/physical_plan/file_format/parquet.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1164,6 +1164,7 @@ mod tests {
11641164
use crate::config::ConfigOptions;
11651165
use crate::datasource::file_format::parquet::test_util::store_parquet;
11661166
use crate::datasource::file_format::test_util::scan_format;
1167+
use crate::datasource::file_format::FileFormat;
11671168
use crate::datasource::listing::{FileRange, PartitionedFile};
11681169
use crate::datasource::object_store::ObjectStoreUrl;
11691170
use crate::execution::options::CsvReadOptions;
@@ -1660,7 +1661,7 @@ mod tests {
16601661
async fn parquet_exec_with_projection() -> Result<()> {
16611662
let testdata = crate::test_util::parquet_test_data();
16621663
let filename = "alltypes_plain.parquet";
1663-
let format = ParquetFormat::default();
1664+
let format = ParquetFormat::new(ConfigOptions::new().into_shareable());
16641665
let parquet_exec =
16651666
scan_format(&format, &testdata, filename, Some(vec![0, 1, 2]), None)
16661667
.await
@@ -1742,7 +1743,7 @@ mod tests {
17421743
let meta = local_unpartitioned_file(filename);
17431744

17441745
let store = Arc::new(LocalFileSystem::new()) as _;
1745-
let file_schema = ParquetFormat::default()
1746+
let file_schema = ParquetFormat::new(session_ctx.config_options())
17461747
.infer_schema(&store, &[meta.clone()])
17471748
.await?;
17481749

@@ -1789,7 +1790,7 @@ mod tests {
17891790

17901791
let meta = local_unpartitioned_file(filename);
17911792

1792-
let schema = ParquetFormat::default()
1793+
let schema = ParquetFormat::new(session_ctx.config_options())
17931794
.infer_schema(&store, &[meta.clone()])
17941795
.await
17951796
.unwrap();
@@ -2477,7 +2478,7 @@ mod tests {
24772478

24782479
let meta = local_unpartitioned_file(filename);
24792480

2480-
let schema = ParquetFormat::default()
2481+
let schema = ParquetFormat::new(session_ctx.config_options())
24812482
.infer_schema(&store, &[meta.clone()])
24822483
.await
24832484
.unwrap();

datafusion/proto/src/logical_plan.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,8 @@ impl AsLogicalPlan for LogicalPlanNode {
368368
&FileFormatType::Parquet(protobuf::ParquetFormat {
369369
enable_pruning,
370370
}) => Arc::new(
371-
ParquetFormat::default().with_enable_pruning(enable_pruning),
371+
ParquetFormat::new(ctx.config_options())
372+
.with_enable_pruning(enable_pruning),
372373
),
373374
FileFormatType::Csv(protobuf::CsvFormat {
374375
has_header,

0 commit comments

Comments
 (0)