Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions vortex-datafusion/src/convert/exprs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -372,12 +372,14 @@ mod tests {

let result = ExprRef::try_from_df(&like_expr).unwrap();

assert_snapshot!(result.display_tree().to_string(), @r#"
Like
├── child: GetItem(text_col)
│ └── Root
└── pattern: Literal(value: "test%", dtype: utf8)
"#);
insta::allow_duplicates! {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This makes cargo test -p vortex-datafusion work

assert_snapshot!(result.display_tree().to_string(), @r#"
Like
├── child: GetItem(text_col)
│ └── Root
└── pattern: Literal(value: "test%", dtype: utf8)
"#);
}
}

#[rstest]
Expand Down
77 changes: 77 additions & 0 deletions vortex-datafusion/src/persistent/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ mod tests {
use datafusion::prelude::SessionContext;
use datafusion_datasource::file_format::format_as_file_type;
use datafusion_expr::LogicalPlanBuilder;
use datafusion_physical_plan::display::DisplayableExecutionPlan;
use insta::assert_snapshot;
use rstest::rstest;
use tempfile::{TempDir, tempdir};
Expand Down Expand Up @@ -188,4 +189,80 @@ mod tests {

Ok(())
}

#[tokio::test]
async fn create_table_ordered_by() -> anyhow::Result<()> {
let dir = TempDir::new().unwrap();

let factory: VortexFormatFactory = VortexFormatFactory::new();
let mut session_state_builder = SessionStateBuilder::new().with_default_features();
register_vortex_format_factory(factory, &mut session_state_builder);
let session = SessionContext::new_with_state(session_state_builder.build());

// Vortex
session
.sql(&format!(
"CREATE EXTERNAL TABLE my_tbl_vx \
(c1 VARCHAR NOT NULL, c2 INT NOT NULL) \
STORED AS vortex \
WITH ORDER (c1 ASC)
LOCATION '{}/vx/'",
dir.path().to_str().unwrap()
))
.await?;

session
.sql("INSERT INTO my_tbl_vx VALUES ('air', 5), ('balloon', 42)")
.await?
.collect()
.await?;

session
.sql("INSERT INTO my_tbl_vx VALUES ('zebra', 5)")
.await?
.collect()
.await?;

session
.sql("INSERT INTO my_tbl_vx VALUES ('texas', 2000), ('alabama', 2000)")
.await?
.collect()
.await?;

let df = session
.sql("SELECT * FROM my_tbl_vx ORDER BY c1 ASC limit 3")
.await?;
let (state, plan) = df.clone().into_parts();
let physical_plan = state.create_physical_plan(&plan).await?;

insta::assert_snapshot!(DisplayableExecutionPlan::new(physical_plan.as_ref())
.tree_render().to_string(), @r"
┌───────────────────────────┐
│ SortPreservingMergeExec │
│ -------------------- │
│ c1 ASC NULLS LASTlimit: │
│ 3 │
└─────────────┬─────────────┘
┌─────────────┴─────────────┐
│ DataSourceExec │
│ -------------------- │
│ files: 3 │
│ format: vortex │
└───────────────────────────┘
");
Comment on lines +240 to +252
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shows plan is correct - it has no explicit SortExec node.


let r = df.collect().await?;

insta::assert_snapshot!(pretty_format_batches(&r)?.to_string(), @r"
+---------+------+
| c1 | c2 |
+---------+------+
| air | 5 |
| alabama | 2000 |
| balloon | 42 |
+---------+------+
");

Ok(())
}
}
7 changes: 6 additions & 1 deletion vortex-datafusion/src/persistent/opener.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ pub(crate) struct VortexOpener {
pub limit: Option<usize>,
pub metrics: VortexMetrics,
pub layout_readers: Arc<DashMap<Path, Weak<dyn LayoutReader>>>,
/// Whether the query has output ordering specified
pub has_output_ordering: bool,
}

impl FileOpener for VortexOpener {
Expand All @@ -71,6 +73,7 @@ impl FileOpener for VortexOpener {
let limit = self.limit;
let metrics = self.metrics.clone();
let layout_reader = self.layout_readers.clone();
let has_output_ordering = self.has_output_ordering;

let projected_schema = match projection.as_ref() {
None => logical_schema.clone(),
Expand Down Expand Up @@ -224,7 +227,7 @@ impl FileOpener for VortexOpener {
.with_metrics(metrics)
.with_projection(projection_expr)
.with_some_filter(filter)
.with_ordered(false)
.with_ordered(has_output_ordering)
.map(|chunk| chunk.to_struct().into_record_batch())
.into_stream()
.map_err(|e| {
Expand Down Expand Up @@ -432,6 +435,7 @@ mod tests {
limit: None,
metrics: Default::default(),
layout_readers: Default::default(),
has_output_ordering: false,
};

// filter matches partition value
Expand Down Expand Up @@ -512,6 +516,7 @@ mod tests {
limit: None,
metrics: Default::default(),
layout_readers: Default::default(),
has_output_ordering: false,
};

let filter = col("a").lt(lit(100_i32));
Expand Down
1 change: 1 addition & 0 deletions vortex-datafusion/src/persistent/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ impl FileSource for VortexSource {
limit: base_config.limit,
metrics: partition_metrics,
layout_readers: self.layout_readers.clone(),
has_output_ordering: !base_config.output_ordering.is_empty(),
};

Arc::new(opener)
Expand Down
Loading