Skip to content

Commit

Permalink
Merge remote-tracking branch 'apache/main' into alamb/window_test22
Browse files Browse the repository at this point in the history
  • Loading branch information
alamb committed May 2, 2024
2 parents b8591b3 + 97148bd commit db3d706
Show file tree
Hide file tree
Showing 15 changed files with 13 additions and 148 deletions.
2 changes: 1 addition & 1 deletion datafusion-cli/src/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ pub async fn exec_from_lines(
Ok(_) => {}
Err(err) => eprintln!("{err}"),
}
query = "".to_owned();
query = "".to_string();
} else {
query.push('\n');
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,13 +203,9 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
Arc::new(builder.finish())
}

fn build_primitive_array<T: ArrowPrimitiveType + Resolver>(
&self,
rows: RecordSlice,
col_name: &str,
) -> ArrayRef
fn build_primitive_array<T>(&self, rows: RecordSlice, col_name: &str) -> ArrayRef
where
T: ArrowNumericType,
T: ArrowNumericType + Resolver,
T::Native: num_traits::cast::NumCast,
{
Arc::new(
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/file_format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ impl BatchSerializer for JsonSerializer {
pub struct JsonSink {
/// Config options for writing data
config: FileSinkConfig,
///
/// Writer options for underlying Json writer
writer_options: JsonWriterOptions,
}

Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ async fn fetch_statistics(
pub struct ParquetSink {
/// Config options for writing data
config: FileSinkConfig,
///
/// Underlying parquet options
parquet_options: TableParquetOptions,
/// File metadata from successfully produced parquet files. The Mutex is only used
/// to allow inserting to HashMap from behind borrowed reference in DataSink::write_all.
Expand Down
13 changes: 0 additions & 13 deletions datafusion/core/src/datasource/physical_plan/file_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -519,16 +519,13 @@ mod tests {
use std::sync::Arc;

use super::*;
use crate::datasource::file_format::write::BatchSerializer;
use crate::datasource::object_store::ObjectStoreUrl;
use crate::prelude::SessionContext;
use crate::test::{make_partition, object_store::register_test_store};

use arrow_schema::Schema;
use datafusion_common::{internal_err, Statistics};

use bytes::Bytes;

/// Test `FileOpener` which will simulate errors during file opening or scanning
#[derive(Default)]
struct TestOpener {
Expand Down Expand Up @@ -974,14 +971,4 @@ mod tests {

Ok(())
}

struct TestSerializer {
bytes: Bytes,
}

impl BatchSerializer for TestSerializer {
fn serialize(&self, _batch: RecordBatch, _initial: bool) -> Result<Bytes> {
Ok(self.bytes.clone())
}
}
}
26 changes: 0 additions & 26 deletions datafusion/core/src/execution/context/avro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,29 +57,3 @@ impl SessionContext {
Ok(())
}
}

#[cfg(test)]
mod tests {
use super::*;

use async_trait::async_trait;

// Test for compilation error when calling read_* functions from an #[async_trait] function.
// See https://github.com/apache/datafusion/issues/1154
#[async_trait]
trait CallReadTrait {
async fn call_read_avro(&self) -> DataFrame;
}

struct CallRead {}

#[async_trait]
impl CallReadTrait for CallRead {
async fn call_read_avro(&self) -> DataFrame {
let ctx = SessionContext::new();
ctx.read_avro("dummy", AvroReadOptions::default())
.await
.unwrap()
}
}
}
18 changes: 0 additions & 18 deletions datafusion/core/src/execution/context/csv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ mod tests {
use crate::assert_batches_eq;
use crate::test_util::{plan_and_collect, populate_csv_partitions};

use async_trait::async_trait;
use tempfile::TempDir;

#[tokio::test]
Expand Down Expand Up @@ -125,21 +124,4 @@ mod tests {

Ok(())
}

// Test for compilation error when calling read_* functions from an #[async_trait] function.
// See https://github.com/apache/datafusion/issues/1154
#[async_trait]
trait CallReadTrait {
async fn call_read_csv(&self) -> DataFrame;
}

struct CallRead {}

#[async_trait]
impl CallReadTrait for CallRead {
async fn call_read_csv(&self) -> DataFrame {
let ctx = SessionContext::new();
ctx.read_csv("dummy", CsvReadOptions::new()).await.unwrap()
}
}
}
20 changes: 0 additions & 20 deletions datafusion/core/src/execution/context/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ mod tests {
use datafusion_common::config::TableParquetOptions;
use datafusion_execution::config::SessionConfig;

use async_trait::async_trait;
use tempfile::tempdir;

#[tokio::test]
Expand Down Expand Up @@ -331,23 +330,4 @@ mod tests {
assert_eq!(total_rows, 5);
Ok(())
}

// Test for compilation error when calling read_* functions from an #[async_trait] function.
// See https://github.com/apache/datafusion/issues/1154
#[async_trait]
trait CallReadTrait {
async fn call_read_parquet(&self) -> DataFrame;
}

struct CallRead {}

#[async_trait]
impl CallReadTrait for CallRead {
async fn call_read_parquet(&self) -> DataFrame {
let ctx = SessionContext::new();
ctx.read_parquet("dummy", ParquetReadOptions::default())
.await
.unwrap()
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ fn adjust_input_keys_ordering(
} else {
// By default, push down the parent requirements to children
for child in requirements.children.iter_mut() {
child.data = requirements.data.clone();
child.data.clone_from(&requirements.data);
}
}
Ok(Transformed::yes(requirements))
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2035,7 +2035,7 @@ impl DefaultPhysicalPlanner {
let config = &session_state.config_options().explain;

if !config.physical_plan_only {
stringified_plans = e.stringified_plans.clone();
stringified_plans.clone_from(&e.stringified_plans);
if e.logical_optimization_succeeded {
stringified_plans.push(e.plan.to_stringified(FinalLogicalPlan));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ impl TableFunctionImpl for SimpleCsvTableFunc {
for expr in exprs {
match expr {
Expr::Literal(ScalarValue::Utf8(Some(ref path))) => {
filepath = path.clone()
filepath.clone_from(path);
}
expr => new_exprs.push(expr.clone()),
}
Expand Down
48 changes: 0 additions & 48 deletions datafusion/expr/src/logical_plan/plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2961,54 +2961,6 @@ digraph {
.unwrap()
}

/// Extension plan that panic when trying to access its input plan
#[derive(Debug)]
struct NoChildExtension {
empty_schema: DFSchemaRef,
}

impl UserDefinedLogicalNode for NoChildExtension {
fn as_any(&self) -> &dyn std::any::Any {
unimplemented!()
}

fn name(&self) -> &str {
unimplemented!()
}

fn inputs(&self) -> Vec<&LogicalPlan> {
panic!("Should not be called")
}

fn schema(&self) -> &DFSchemaRef {
&self.empty_schema
}

fn expressions(&self) -> Vec<Expr> {
unimplemented!()
}

fn fmt_for_explain(&self, _: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}

fn from_template(
&self,
_: &[Expr],
_: &[LogicalPlan],
) -> Arc<dyn UserDefinedLogicalNode> {
unimplemented!()
}

fn dyn_hash(&self, _: &mut dyn Hasher) {
unimplemented!()
}

fn dyn_eq(&self, _: &dyn UserDefinedLogicalNode) -> bool {
unimplemented!()
}
}

#[test]
fn test_replace_invalid_placeholder() {
// test empty placeholder
Expand Down
7 changes: 2 additions & 5 deletions datafusion/functions-array/src/resize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,15 +112,12 @@ pub(crate) fn array_resize_inner(arg: &[ArrayRef]) -> Result<ArrayRef> {
}

/// array_resize keep the original array and append the default element to the end
fn general_list_resize<O: OffsetSizeTrait>(
fn general_list_resize<O: OffsetSizeTrait + TryInto<i64>>(
array: &GenericListArray<O>,
count_array: &Int64Array,
field: &FieldRef,
default_element: Option<ArrayRef>,
) -> Result<ArrayRef>
where
O: TryInto<i64>,
{
) -> Result<ArrayRef> {
let data_type = array.value_type();

let values = array.values();
Expand Down
7 changes: 2 additions & 5 deletions datafusion/functions-array/src/reverse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,13 +99,10 @@ pub fn array_reverse_inner(arg: &[ArrayRef]) -> Result<ArrayRef> {
}
}

fn general_array_reverse<O: OffsetSizeTrait>(
fn general_array_reverse<O: OffsetSizeTrait + TryFrom<i64>>(
array: &GenericListArray<O>,
field: &FieldRef,
) -> Result<ArrayRef>
where
O: TryFrom<i64>,
{
) -> Result<ArrayRef> {
let values = array.values();
let original_data = values.to_data();
let capacity = Capacities::Array(original_data.len());
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/rewrite_disjunctive_predicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ fn delete_duplicate_predicates(or_predicates: Vec<Predicate>) -> Predicate {
Predicate::And { args } => {
let args_num = args.len();
if shortest_exprs.is_empty() || args_num < shortest_exprs_len {
shortest_exprs = (*args).clone();
shortest_exprs.clone_from(args);
shortest_exprs_len = args_num;
}
}
Expand Down

0 comments on commit db3d706

Please sign in to comment.