Skip to content

Commit d0feddf

Browse files
committed
Revert "toolchain upgrade and error fixes (apache#15625)"
This reverts commit 9d2f049.
1 parent bdd0c05 commit d0feddf

File tree

47 files changed

+167
-139
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+167
-139
lines changed

datafusion-examples/examples/parquet_index.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -685,7 +685,7 @@ fn make_demo_file(path: impl AsRef<Path>, value_range: Range<i32>) -> Result<()>
685685

686686
let num_values = value_range.len();
687687
let file_names =
688-
StringArray::from_iter_values(std::iter::repeat_n(&filename, num_values));
688+
StringArray::from_iter_values(std::iter::repeat(&filename).take(num_values));
689689
let values = Int32Array::from_iter_values(value_range);
690690
let batch = RecordBatch::try_from_iter(vec![
691691
("file_name", Arc::new(file_names) as ArrayRef),

datafusion/common/src/scalar/mod.rs

Lines changed: 34 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use std::convert::Infallible;
2727
use std::fmt;
2828
use std::hash::Hash;
2929
use std::hash::Hasher;
30-
use std::iter::repeat_n;
30+
use std::iter::repeat;
3131
use std::mem::{size_of, size_of_val};
3232
use std::str::FromStr;
3333
use std::sync::Arc;
@@ -802,14 +802,12 @@ fn dict_from_scalar<K: ArrowDictionaryKeyType>(
802802
let values_array = value.to_array_of_size(1)?;
803803

804804
// Create a key array with `size` elements, each of 0
805-
let key_array: PrimitiveArray<K> = repeat_n(
806-
if value.is_null() {
807-
None
808-
} else {
809-
Some(K::default_value())
810-
},
811-
size,
812-
)
805+
let key_array: PrimitiveArray<K> = repeat(if value.is_null() {
806+
None
807+
} else {
808+
Some(K::default_value())
809+
})
810+
.take(size)
813811
.collect();
814812

815813
// create a new DictionaryArray
@@ -2191,7 +2189,8 @@ impl ScalarValue {
21912189
scale: i8,
21922190
size: usize,
21932191
) -> Result<Decimal256Array> {
2194-
Ok(repeat_n(value, size)
2192+
Ok(repeat(value)
2193+
.take(size)
21952194
.collect::<Decimal256Array>()
21962195
.with_precision_and_scale(precision, scale)?)
21972196
}
@@ -2417,59 +2416,69 @@ impl ScalarValue {
24172416
}
24182417
ScalarValue::Utf8(e) => match e {
24192418
Some(value) => {
2420-
Arc::new(StringArray::from_iter_values(repeat_n(value, size)))
2419+
Arc::new(StringArray::from_iter_values(repeat(value).take(size)))
24212420
}
24222421
None => new_null_array(&DataType::Utf8, size),
24232422
},
24242423
ScalarValue::Utf8View(e) => match e {
24252424
Some(value) => {
2426-
Arc::new(StringViewArray::from_iter_values(repeat_n(value, size)))
2425+
Arc::new(StringViewArray::from_iter_values(repeat(value).take(size)))
24272426
}
24282427
None => new_null_array(&DataType::Utf8View, size),
24292428
},
24302429
ScalarValue::LargeUtf8(e) => match e {
24312430
Some(value) => {
2432-
Arc::new(LargeStringArray::from_iter_values(repeat_n(value, size)))
2431+
Arc::new(LargeStringArray::from_iter_values(repeat(value).take(size)))
24332432
}
24342433
None => new_null_array(&DataType::LargeUtf8, size),
24352434
},
24362435
ScalarValue::Binary(e) => match e {
24372436
Some(value) => Arc::new(
2438-
repeat_n(Some(value.as_slice()), size).collect::<BinaryArray>(),
2437+
repeat(Some(value.as_slice()))
2438+
.take(size)
2439+
.collect::<BinaryArray>(),
24392440
),
2440-
None => Arc::new(repeat_n(None::<&str>, size).collect::<BinaryArray>()),
2441+
None => {
2442+
Arc::new(repeat(None::<&str>).take(size).collect::<BinaryArray>())
2443+
}
24412444
},
24422445
ScalarValue::BinaryView(e) => match e {
24432446
Some(value) => Arc::new(
2444-
repeat_n(Some(value.as_slice()), size).collect::<BinaryViewArray>(),
2447+
repeat(Some(value.as_slice()))
2448+
.take(size)
2449+
.collect::<BinaryViewArray>(),
24452450
),
24462451
None => {
2447-
Arc::new(repeat_n(None::<&str>, size).collect::<BinaryViewArray>())
2452+
Arc::new(repeat(None::<&str>).take(size).collect::<BinaryViewArray>())
24482453
}
24492454
},
24502455
ScalarValue::FixedSizeBinary(s, e) => match e {
24512456
Some(value) => Arc::new(
24522457
FixedSizeBinaryArray::try_from_sparse_iter_with_size(
2453-
repeat_n(Some(value.as_slice()), size),
2458+
repeat(Some(value.as_slice())).take(size),
24542459
*s,
24552460
)
24562461
.unwrap(),
24572462
),
24582463
None => Arc::new(
24592464
FixedSizeBinaryArray::try_from_sparse_iter_with_size(
2460-
repeat_n(None::<&[u8]>, size),
2465+
repeat(None::<&[u8]>).take(size),
24612466
*s,
24622467
)
24632468
.unwrap(),
24642469
),
24652470
},
24662471
ScalarValue::LargeBinary(e) => match e {
24672472
Some(value) => Arc::new(
2468-
repeat_n(Some(value.as_slice()), size).collect::<LargeBinaryArray>(),
2473+
repeat(Some(value.as_slice()))
2474+
.take(size)
2475+
.collect::<LargeBinaryArray>(),
2476+
),
2477+
None => Arc::new(
2478+
repeat(None::<&str>)
2479+
.take(size)
2480+
.collect::<LargeBinaryArray>(),
24692481
),
2470-
None => {
2471-
Arc::new(repeat_n(None::<&str>, size).collect::<LargeBinaryArray>())
2472-
}
24732482
},
24742483
ScalarValue::List(arr) => {
24752484
Self::list_to_array_of_size(arr.as_ref() as &dyn Array, size)?
@@ -2597,7 +2606,7 @@ impl ScalarValue {
25972606
child_arrays.push(ar);
25982607
new_fields.push(field.clone());
25992608
}
2600-
let type_ids = repeat_n(*v_id, size);
2609+
let type_ids = repeat(*v_id).take(size);
26012610
let type_ids = ScalarBuffer::<i8>::from_iter(type_ids);
26022611
let value_offsets = match mode {
26032612
UnionMode::Sparse => None,
@@ -2665,7 +2674,7 @@ impl ScalarValue {
26652674
}
26662675

26672676
fn list_to_array_of_size(arr: &dyn Array, size: usize) -> Result<ArrayRef> {
2668-
let arrays = repeat_n(arr, size).collect::<Vec<_>>();
2677+
let arrays = repeat(arr).take(size).collect::<Vec<_>>();
26692678
let ret = match !arrays.is_empty() {
26702679
true => arrow::compute::concat(arrays.as_slice())?,
26712680
false => arr.slice(0, 0),

datafusion/common/src/utils/memory.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use std::mem::size_of;
2525
/// # Parameters
2626
/// - `num_elements`: The number of elements expected in the hash table.
2727
/// - `fixed_size`: A fixed overhead size associated with the collection
28-
/// (e.g., HashSet or HashTable).
28+
/// (e.g., HashSet or HashTable).
2929
/// - `T`: The type of elements stored in the hash table.
3030
///
3131
/// # Details

datafusion/core/src/datasource/listing/table.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1186,7 +1186,7 @@ impl ListingTable {
11861186
/// # Arguments
11871187
/// * `files` - A stream of `Result<PartitionedFile>` items to process
11881188
/// * `limit` - An optional row count limit. If provided, the function will stop collecting files
1189-
/// once the accumulated number of rows exceeds this limit
1189+
/// once the accumulated number of rows exceeds this limit
11901190
/// * `collect_stats` - Whether to collect and accumulate statistics from the files
11911191
///
11921192
/// # Returns

datafusion/core/src/physical_planner.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1023,12 +1023,18 @@ impl DefaultPhysicalPlanner {
10231023
// Collect left & right field indices, the field indices are sorted in ascending order
10241024
let left_field_indices = cols
10251025
.iter()
1026-
.filter_map(|c| left_df_schema.index_of_column(c).ok())
1026+
.filter_map(|c| match left_df_schema.index_of_column(c) {
1027+
Ok(idx) => Some(idx),
1028+
_ => None,
1029+
})
10271030
.sorted()
10281031
.collect::<Vec<_>>();
10291032
let right_field_indices = cols
10301033
.iter()
1031-
.filter_map(|c| right_df_schema.index_of_column(c).ok())
1034+
.filter_map(|c| match right_df_schema.index_of_column(c) {
1035+
Ok(idx) => Some(idx),
1036+
_ => None,
1037+
})
10321038
.sorted()
10331039
.collect::<Vec<_>>();
10341040

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/context_generator.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ use crate::fuzz_cases::aggregation_fuzzer::data_generator::Dataset;
4343
/// - `skip_partial parameters`
4444
/// - hint `sorted` or not
4545
/// - `spilling` or not (TODO, I think a special `MemoryPool` may be needed
46-
/// to support this)
46+
/// to support this)
4747
///
4848
pub struct SessionContextGenerator {
4949
/// Current testing dataset

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/data_generator.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,12 @@ use crate::fuzz_cases::record_batch_generator::{ColumnDescr, RecordBatchGenerato
3333
/// when you call `generate` function
3434
///
3535
/// - `rows_num_range`, the number of rows in the datasets will be randomly generated
36-
/// within this range
36+
/// within this range
3737
///
3838
/// - `sort_keys`, if `sort_keys` are defined, when you call the `generate` function, the generator
39-
/// will generate one `base dataset` firstly. Then the `base dataset` will be sorted
40-
/// based on each `sort_key` respectively. And finally `len(sort_keys) + 1` datasets
41-
/// will be returned
39+
/// will generate one `base dataset` firstly. Then the `base dataset` will be sorted
40+
/// based on each `sort_key` respectively. And finally `len(sort_keys) + 1` datasets
41+
/// will be returned
4242
///
4343
#[derive(Debug, Clone)]
4444
pub struct DatasetGeneratorConfig {

datafusion/core/tests/fuzz_cases/aggregation_fuzzer/fuzzer.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ impl AggregationFuzzer {
270270
/// - `sql`, the selected test sql
271271
///
272272
/// - `dataset_ref`, the input dataset, store it for error reported when found
273-
/// the inconsistency between the one for `ctx` and `expected results`.
273+
/// the inconsistency between the one for `ctx` and `expected results`.
274274
///
275275
struct AggregationFuzzTestTask {
276276
/// Generated session context in current test case

datafusion/core/tests/memory_limit/mod.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -863,10 +863,11 @@ impl Scenario {
863863
single_row_batches,
864864
} => {
865865
use datafusion::physical_expr::expressions::col;
866-
let batches: Vec<Vec<_>> = std::iter::repeat_n(
867-
maybe_split_batches(dict_batches(), *single_row_batches),
868-
*partitions,
869-
)
866+
let batches: Vec<Vec<_>> = std::iter::repeat(maybe_split_batches(
867+
dict_batches(),
868+
*single_row_batches,
869+
))
870+
.take(*partitions)
870871
.collect();
871872

872873
let schema = batches[0][0].schema();

datafusion/core/tests/parquet/mod.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -611,7 +611,7 @@ fn make_bytearray_batch(
611611
large_binary_values: Vec<&[u8]>,
612612
) -> RecordBatch {
613613
let num_rows = string_values.len();
614-
let name: StringArray = std::iter::repeat_n(Some(name), num_rows).collect();
614+
let name: StringArray = std::iter::repeat(Some(name)).take(num_rows).collect();
615615
let service_string: StringArray = string_values.iter().map(Some).collect();
616616
let service_binary: BinaryArray = binary_values.iter().map(Some).collect();
617617
let service_fixedsize: FixedSizeBinaryArray = fixedsize_values
@@ -659,7 +659,7 @@ fn make_bytearray_batch(
659659
/// name | service.name
660660
fn make_names_batch(name: &str, service_name_values: Vec<&str>) -> RecordBatch {
661661
let num_rows = service_name_values.len();
662-
let name: StringArray = std::iter::repeat_n(Some(name), num_rows).collect();
662+
let name: StringArray = std::iter::repeat(Some(name)).take(num_rows).collect();
663663
let service_name: StringArray = service_name_values.iter().map(Some).collect();
664664

665665
let schema = Schema::new(vec![
@@ -698,31 +698,31 @@ fn make_int_batches_with_null(
698698
Int8Array::from_iter(
699699
v8.into_iter()
700700
.map(Some)
701-
.chain(std::iter::repeat_n(None, null_values)),
701+
.chain(std::iter::repeat(None).take(null_values)),
702702
)
703703
.to_data(),
704704
),
705705
make_array(
706706
Int16Array::from_iter(
707707
v16.into_iter()
708708
.map(Some)
709-
.chain(std::iter::repeat_n(None, null_values)),
709+
.chain(std::iter::repeat(None).take(null_values)),
710710
)
711711
.to_data(),
712712
),
713713
make_array(
714714
Int32Array::from_iter(
715715
v32.into_iter()
716716
.map(Some)
717-
.chain(std::iter::repeat_n(None, null_values)),
717+
.chain(std::iter::repeat(None).take(null_values)),
718718
)
719719
.to_data(),
720720
),
721721
make_array(
722722
Int64Array::from_iter(
723723
v64.into_iter()
724724
.map(Some)
725-
.chain(std::iter::repeat_n(None, null_values)),
725+
.chain(std::iter::repeat(None).take(null_values)),
726726
)
727727
.to_data(),
728728
),

0 commit comments

Comments
 (0)