diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1cee5139e3fb..216123132476 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -75,6 +75,12 @@ jobs: env: CARGO_HOME: "/github/home/.cargo" CARGO_TARGET_DIR: "/github/home/target" + - name: Check Workspace builds with all features + run: | + cargo check --workspace --benches --features avro,jit,scheduler + env: + CARGO_HOME: "/github/home/.cargo" + CARGO_TARGET_DIR: "/github/home/target" # test the crate linux-test: @@ -115,7 +121,7 @@ jobs: run: | export ARROW_TEST_DATA=$(pwd)/testing/data export PARQUET_TEST_DATA=$(pwd)/parquet-testing/data - cargo test --features avro + cargo test --features avro,jit,scheduler # test datafusion-sql examples cargo run --example sql # test datafusion examples diff --git a/datafusion/core/Cargo.toml b/datafusion/core/Cargo.toml index f42c33fac84d..848d01e53025 100644 --- a/datafusion/core/Cargo.toml +++ b/datafusion/core/Cargo.toml @@ -45,7 +45,7 @@ default = ["crypto_expressions", "regex_expressions", "unicode_expressions"] # Used for testing ONLY: causes all values to hash to the same value (test for collisions) force_hash_collisions = [] # Used to enable JIT code generation -jit = ["datafusion-jit"] +jit = ["datafusion-jit", "datafusion-row/jit"] pyarrow = ["pyo3", "arrow/pyarrow", "datafusion-common/pyarrow"] regex_expressions = ["datafusion-physical-expr/regex_expressions"] # Used to enable scheduler diff --git a/datafusion/core/benches/parquet_query_sql.rs b/datafusion/core/benches/parquet_query_sql.rs index de4dcf66a9d4..e9204f86e52c 100644 --- a/datafusion/core/benches/parquet_query_sql.rs +++ b/datafusion/core/benches/parquet_query_sql.rs @@ -145,7 +145,7 @@ where fn generate_file() -> NamedTempFile { let now = Instant::now(); - let named_file = tempfile::Builder::new() + let mut named_file = tempfile::Builder::new() .prefix("parquet_query_sql") .suffix(".parquet") .tempfile() @@ -260,8 +260,9 @@ fn criterion_benchmark(c: &mut Criterion) { local_rt.block_on(async { let query = context.sql(&query).await.unwrap(); let plan = query.create_physical_plan().await.unwrap(); - let mut stream = - scheduler.schedule(plan, context.task_ctx()).unwrap(); + let results = scheduler.schedule(plan, context.task_ctx()).unwrap(); + + let mut stream = results.stream(); while stream.next().await.transpose().unwrap().is_some() {} }); }); diff --git a/datafusion/core/src/scheduler/mod.rs b/datafusion/core/src/scheduler/mod.rs index 4cdc34b728e4..77c7036bd7f9 100644 --- a/datafusion/core/src/scheduler/mod.rs +++ b/datafusion/core/src/scheduler/mod.rs @@ -51,7 +51,7 @@ //! ```rust //! # use futures::TryStreamExt; //! # use datafusion::prelude::{CsvReadOptions, SessionConfig, SessionContext}; -//! # use datafusion_scheduler::Scheduler; +//! # use datafusion::scheduler::Scheduler; //! //! # #[tokio::main] //! # async fn main() { @@ -68,8 +68,8 @@ //! .unwrap(); //! //! let task = context.task_ctx(); -//! let stream = scheduler.schedule(plan, task).unwrap(); -//! let scheduled: Vec<_> = stream.try_collect().await.unwrap(); +//! let results = scheduler.schedule(plan, task).unwrap(); +//! let scheduled: Vec<_> = results.stream().try_collect().await.unwrap(); //! # } //! ``` //!