Skip to content

Commit 9d2f049

Browse files
authored
toolchain upgrade and error fixes (apache#15625)
1 parent d09b45d commit 9d2f049

File tree

48 files changed

+140
-168
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+140
-168
lines changed

datafusion-examples/examples/parquet_index.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -685,7 +685,7 @@ fn make_demo_file(path: impl AsRef<Path>, value_range: Range<i32>) -> Result<()>
685685

686686
let num_values = value_range.len();
687687
let file_names =
688-
StringArray::from_iter_values(std::iter::repeat(&filename).take(num_values));
688+
StringArray::from_iter_values(std::iter::repeat_n(&filename, num_values));
689689
let values = Int32Array::from_iter_values(value_range);
690690
let batch = RecordBatch::try_from_iter(vec![
691691
("file_name", Arc::new(file_names) as ArrayRef),

datafusion/common/src/scalar/mod.rs

Lines changed: 25 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use std::convert::Infallible;
2727
use std::fmt;
2828
use std::hash::Hash;
2929
use std::hash::Hasher;
30-
use std::iter::repeat;
30+
use std::iter::repeat_n;
3131
use std::mem::{size_of, size_of_val};
3232
use std::str::FromStr;
3333
use std::sync::Arc;
@@ -802,12 +802,14 @@ fn dict_from_scalar<K: ArrowDictionaryKeyType>(
802802
let values_array = value.to_array_of_size(1)?;
803803

804804
// Create a key array with `size` elements, each of 0
805-
let key_array: PrimitiveArray<K> = repeat(if value.is_null() {
806-
None
807-
} else {
808-
Some(K::default_value())
809-
})
810-
.take(size)
805+
let key_array: PrimitiveArray<K> = repeat_n(
806+
if value.is_null() {
807+
None
808+
} else {
809+
Some(K::default_value())
810+
},
811+
size,
812+
)
811813
.collect();
812814

813815
// create a new DictionaryArray
@@ -2189,8 +2191,7 @@ impl ScalarValue {
21892191
scale: i8,
21902192
size: usize,
21912193
) -> Result<Decimal256Array> {
2192-
Ok(repeat(value)
2193-
.take(size)
2194+
Ok(repeat_n(value, size)
21942195
.collect::<Decimal256Array>()
21952196
.with_precision_and_scale(precision, scale)?)
21962197
}
@@ -2416,69 +2417,59 @@ impl ScalarValue {
24162417
}
24172418
ScalarValue::Utf8(e) => match e {
24182419
Some(value) => {
2419-
Arc::new(StringArray::from_iter_values(repeat(value).take(size)))
2420+
Arc::new(StringArray::from_iter_values(repeat_n(value, size)))
24202421
}
24212422
None => new_null_array(&DataType::Utf8, size),
24222423
},
24232424
ScalarValue::Utf8View(e) => match e {
24242425
Some(value) => {
2425-
Arc::new(StringViewArray::from_iter_values(repeat(value).take(size)))
2426+
Arc::new(StringViewArray::from_iter_values(repeat_n(value, size)))
24262427
}
24272428
None => new_null_array(&DataType::Utf8View, size),
24282429
},
24292430
ScalarValue::LargeUtf8(e) => match e {
24302431
Some(value) => {
2431-
Arc::new(LargeStringArray::from_iter_values(repeat(value).take(size)))
2432+
Arc::new(LargeStringArray::from_iter_values(repeat_n(value, size)))
24322433
}
24332434
None => new_null_array(&DataType::LargeUtf8, size),
24342435
},
24352436
ScalarValue::Binary(e) => match e {
24362437
Some(value) => Arc::new(
2437-
repeat(Some(value.as_slice()))
2438-
.take(size)
2439-
.collect::<BinaryArray>(),
2438+
repeat_n(Some(value.as_slice()), size).collect::<BinaryArray>(),
24402439
),
2441-
None => {
2442-
Arc::new(repeat(None::<&str>).take(size).collect::<BinaryArray>())
2443-
}
2440+
None => Arc::new(repeat_n(None::<&str>, size).collect::<BinaryArray>()),
24442441
},
24452442
ScalarValue::BinaryView(e) => match e {
24462443
Some(value) => Arc::new(
2447-
repeat(Some(value.as_slice()))
2448-
.take(size)
2449-
.collect::<BinaryViewArray>(),
2444+
repeat_n(Some(value.as_slice()), size).collect::<BinaryViewArray>(),
24502445
),
24512446
None => {
2452-
Arc::new(repeat(None::<&str>).take(size).collect::<BinaryViewArray>())
2447+
Arc::new(repeat_n(None::<&str>, size).collect::<BinaryViewArray>())
24532448
}
24542449
},
24552450
ScalarValue::FixedSizeBinary(s, e) => match e {
24562451
Some(value) => Arc::new(
24572452
FixedSizeBinaryArray::try_from_sparse_iter_with_size(
2458-
repeat(Some(value.as_slice())).take(size),
2453+
repeat_n(Some(value.as_slice()), size),
24592454
*s,
24602455
)
24612456
.unwrap(),
24622457
),
24632458
None => Arc::new(
24642459
FixedSizeBinaryArray::try_from_sparse_iter_with_size(
2465-
repeat(None::<&[u8]>).take(size),
2460+
repeat_n(None::<&[u8]>, size),
24662461
*s,
24672462
)
24682463
.unwrap(),
24692464
),
24702465
},
24712466
ScalarValue::LargeBinary(e) => match e {
24722467
Some(value) => Arc::new(
2473-
repeat(Some(value.as_slice()))
2474-
.take(size)
2475-
.collect::<LargeBinaryArray>(),
2476-
),
2477-
None => Arc::new(
2478-
repeat(None::<&str>)
2479-
.take(size)
2480-
.collect::<LargeBinaryArray>(),
2468+
repeat_n(Some(value.as_slice()), size).collect::<LargeBinaryArray>(),
24812469
),
2470+
None => {
2471+
Arc::new(repeat_n(None::<&str>, size).collect::<LargeBinaryArray>())
2472+
}
24822473
},
24832474
ScalarValue::List(arr) => {
24842475
Self::list_to_array_of_size(arr.as_ref() as &dyn Array, size)?
@@ -2606,7 +2597,7 @@ impl ScalarValue {
26062597
child_arrays.push(ar);
26072598
new_fields.push(field.clone());
26082599
}
2609-
let type_ids = repeat(*v_id).take(size);
2600+
let type_ids = repeat_n(*v_id, size);
26102601
let type_ids = ScalarBuffer::<i8>::from_iter(type_ids);
26112602
let value_offsets = match mode {
26122603
UnionMode::Sparse => None,
@@ -2674,7 +2665,7 @@ impl ScalarValue {
26742665
}
26752666

26762667
fn list_to_array_of_size(arr: &dyn Array, size: usize) -> Result<ArrayRef> {
2677-
let arrays = repeat(arr).take(size).collect::<Vec<_>>();
2668+
let arrays = repeat_n(arr, size).collect::<Vec<_>>();
26782669
let ret = match !arrays.is_empty() {
26792670
true => arrow::compute::concat(arrays.as_slice())?,
26802671
false => arr.slice(0, 0),

datafusion/common/src/utils/memory.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use std::mem::size_of;
2525
/// # Parameters
2626
/// - `num_elements`: The number of elements expected in the hash table.
2727
/// - `fixed_size`: A fixed overhead size associated with the collection
28-
/// (e.g., HashSet or HashTable).
28+
/// (e.g., HashSet or HashTable).
2929
/// - `T`: The type of elements stored in the hash table.
3030
///
3131
/// # Details

datafusion/core/src/datasource/listing/table.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1183,7 +1183,7 @@ impl ListingTable {
11831183
/// # Arguments
11841184
/// * `files` - A stream of `Result<PartitionedFile>` items to process
11851185
/// * `limit` - An optional row count limit. If provided, the function will stop collecting files
1186-
/// once the accumulated number of rows exceeds this limit
1186+
/// once the accumulated number of rows exceeds this limit
11871187
/// * `collect_stats` - Whether to collect and accumulate statistics from the files
11881188
///
11891189
/// # Returns

datafusion/core/src/physical_planner.rs

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1023,18 +1023,12 @@ impl DefaultPhysicalPlanner {
10231023
// Collect left & right field indices, the field indices are sorted in ascending order
10241024
let left_field_indices = cols
10251025
.iter()
1026-
.filter_map(|c| match left_df_schema.index_of_column(c) {
1027-
Ok(idx) => Some(idx),
1028-
_ => None,
1029-
})
1026+
.filter_map(|c| left_df_schema.index_of_column(c).ok())
10301027
.sorted()
10311028
.collect::<Vec<_>>();
10321029
let right_field_indices = cols
10331030
.iter()
1034-
.filter_map(|c| match right_df_schema.index_of_column(c) {
1035-
Ok(idx) => Some(idx),
1036-
_ => None,
1037-
})
1031+
.filter_map(|c| right_df_schema.index_of_column(c).ok())
10381032
.sorted()
10391033
.collect::<Vec<_>>();
10401034

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/context_generator.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ use crate::fuzz_cases::aggregation_fuzzer::data_generator::Dataset;
4343
/// - `skip_partial parameters`
4444
/// - hint `sorted` or not
4545
/// - `spilling` or not (TODO, I think a special `MemoryPool` may be needed
46-
/// to support this)
46+
/// to support this)
4747
///
4848
pub struct SessionContextGenerator {
4949
/// Current testing dataset

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/data_generator.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,12 @@ use crate::fuzz_cases::record_batch_generator::{ColumnDescr, RecordBatchGenerato
3333
/// when you call `generate` function
3434
///
3535
/// - `rows_num_range`, the number of rows in the datasets will be randomly generated
36-
/// within this range
36+
/// within this range
3737
///
3838
/// - `sort_keys`, if `sort_keys` are defined, when you call the `generate` function, the generator
39-
/// will generate one `base dataset` firstly. Then the `base dataset` will be sorted
40-
/// based on each `sort_key` respectively. And finally `len(sort_keys) + 1` datasets
41-
/// will be returned
39+
/// will generate one `base dataset` firstly. Then the `base dataset` will be sorted
40+
/// based on each `sort_key` respectively. And finally `len(sort_keys) + 1` datasets
41+
/// will be returned
4242
///
4343
#[derive(Debug, Clone)]
4444
pub struct DatasetGeneratorConfig {

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/fuzzer.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ impl AggregationFuzzer {
270270
/// - `sql`, the selected test sql
271271
///
272272
/// - `dataset_ref`, the input dataset, store it for error reported when found
273-
/// the inconsistency between the one for `ctx` and `expected results`.
273+
/// the inconsistency between the one for `ctx` and `expected results`.
274274
///
275275
struct AggregationFuzzTestTask {
276276
/// Generated session context in current test case

datafusion/core/tests/memory_limit/mod.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -863,11 +863,10 @@ impl Scenario {
863863
single_row_batches,
864864
} => {
865865
use datafusion::physical_expr::expressions::col;
866-
let batches: Vec<Vec<_>> = std::iter::repeat(maybe_split_batches(
867-
dict_batches(),
868-
*single_row_batches,
869-
))
870-
.take(*partitions)
866+
let batches: Vec<Vec<_>> = std::iter::repeat_n(
867+
maybe_split_batches(dict_batches(), *single_row_batches),
868+
*partitions,
869+
)
871870
.collect();
872871

873872
let schema = batches[0][0].schema();

datafusion/core/tests/parquet/mod.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -611,7 +611,7 @@ fn make_bytearray_batch(
611611
large_binary_values: Vec<&[u8]>,
612612
) -> RecordBatch {
613613
let num_rows = string_values.len();
614-
let name: StringArray = std::iter::repeat(Some(name)).take(num_rows).collect();
614+
let name: StringArray = std::iter::repeat_n(Some(name), num_rows).collect();
615615
let service_string: StringArray = string_values.iter().map(Some).collect();
616616
let service_binary: BinaryArray = binary_values.iter().map(Some).collect();
617617
let service_fixedsize: FixedSizeBinaryArray = fixedsize_values
@@ -659,7 +659,7 @@ fn make_bytearray_batch(
659659
/// name | service.name
660660
fn make_names_batch(name: &str, service_name_values: Vec<&str>) -> RecordBatch {
661661
let num_rows = service_name_values.len();
662-
let name: StringArray = std::iter::repeat(Some(name)).take(num_rows).collect();
662+
let name: StringArray = std::iter::repeat_n(Some(name), num_rows).collect();
663663
let service_name: StringArray = service_name_values.iter().map(Some).collect();
664664

665665
let schema = Schema::new(vec![
@@ -698,31 +698,31 @@ fn make_int_batches_with_null(
698698
Int8Array::from_iter(
699699
v8.into_iter()
700700
.map(Some)
701-
.chain(std::iter::repeat(None).take(null_values)),
701+
.chain(std::iter::repeat_n(None, null_values)),
702702
)
703703
.to_data(),
704704
),
705705
make_array(
706706
Int16Array::from_iter(
707707
v16.into_iter()
708708
.map(Some)
709-
.chain(std::iter::repeat(None).take(null_values)),
709+
.chain(std::iter::repeat_n(None, null_values)),
710710
)
711711
.to_data(),
712712
),
713713
make_array(
714714
Int32Array::from_iter(
715715
v32.into_iter()
716716
.map(Some)
717-
.chain(std::iter::repeat(None).take(null_values)),
717+
.chain(std::iter::repeat_n(None, null_values)),
718718
)
719719
.to_data(),
720720
),
721721
make_array(
722722
Int64Array::from_iter(
723723
v64.into_iter()
724724
.map(Some)
725-
.chain(std::iter::repeat(None).take(null_values)),
725+
.chain(std::iter::repeat_n(None, null_values)),
726726
)
727727
.to_data(),
728728
),

0 commit comments

Comments
 (0)