chore: Update DataFusion (#6753)

* chore: Update datafusion

* fix: Update for changes

* chore: Run cargo hakari tasks

---------

Co-authored-by: CircleCI[bot] <circleci@influxdata.com>
Co-authored-by: kodiakhq[bot] <49736102+kodiakhq[bot]@users.noreply.github.com>
pull/24376/head
Andrew Lamb 2023-01-30 15:48:52 +01:00 committed by GitHub
parent 51e324378c
commit 5b14caa780
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 24 additions and 24 deletions

32
Cargo.lock generated
View File

@ -1402,8 +1402,8 @@ dependencies = [
[[package]]
name = "datafusion"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"ahash 0.8.3",
"arrow",
@ -1448,8 +1448,8 @@ dependencies = [
[[package]]
name = "datafusion-common"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"arrow",
"chrono",
@ -1461,8 +1461,8 @@ dependencies = [
[[package]]
name = "datafusion-expr"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"ahash 0.8.3",
"arrow",
@ -1473,8 +1473,8 @@ dependencies = [
[[package]]
name = "datafusion-optimizer"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"arrow",
"async-trait",
@ -1489,8 +1489,8 @@ dependencies = [
[[package]]
name = "datafusion-physical-expr"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"ahash 0.8.3",
"arrow",
@ -1519,8 +1519,8 @@ dependencies = [
[[package]]
name = "datafusion-proto"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"arrow",
"chrono",
@ -1536,8 +1536,8 @@ dependencies = [
[[package]]
name = "datafusion-row"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"arrow",
"datafusion-common",
@ -1547,8 +1547,8 @@ dependencies = [
[[package]]
name = "datafusion-sql"
version = "16.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=4a33f2708fe8e30f5f9b062a097fc6155f2db2ce#4a33f2708fe8e30f5f9b062a097fc6155f2db2ce"
version = "17.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=9c8bdfed1a51fd17e28dc1d7eb494844551a204b#9c8bdfed1a51fd17e28dc1d7eb494844551a204b"
dependencies = [
"arrow-schema",
"datafusion-common",

View File

@ -117,8 +117,8 @@ license = "MIT OR Apache-2.0"
[workspace.dependencies]
arrow = { version = "31.0.0" }
arrow-flight = { version = "31.0.0" }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev="4a33f2708fe8e30f5f9b062a097fc6155f2db2ce", default-features = false }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev="4a33f2708fe8e30f5f9b062a097fc6155f2db2ce" }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev="9c8bdfed1a51fd17e28dc1d7eb494844551a204b", default-features = false }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev="9c8bdfed1a51fd17e28dc1d7eb494844551a204b" }
hashbrown = { version = "0.13.2" }
parquet = { version = "31.0.0" }

View File

@ -92,8 +92,8 @@
| | DeduplicateExec: [state@4 ASC,city@1 ASC,time@5 ASC], metrics=[elapsed_compute=1.234ms, mem_used=0, num_dupes=2, output_rows=5, spill_count=0, spilled_bytes=0] |
| | SortPreservingMergeExec: [state@4 ASC,city@1 ASC,time@5 ASC], metrics=[elapsed_compute=1.234ms, mem_used=0, output_rows=7, spill_count=0, spilled_bytes=0] |
| | UnionExec, metrics=[elapsed_compute=1.234ms, mem_used=0, output_rows=7, spill_count=0, spilled_bytes=0] |
| | ParquetExec: limit=None, partitions={1 group: [[1/1/1/1/00000000-0000-0000-0000-000000000000.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, output_ordering=[state@4 ASC, city@1 ASC, time@5 ASC], projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=474, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=4, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=0, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning=1.234ms] |
| | ParquetExec: limit=None, partitions={1 group: [[1/1/1/1/00000000-0000-0000-0000-000000000001.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, output_ordering=[state@4 ASC, city@1 ASC, time@5 ASC], projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=632, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=3, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=3, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning=1.234ms] |
| | ParquetExec: limit=None, partitions={2 groups: [[1/1/1/1/00000000-0000-0000-0000-000000000002.parquet], [1/1/1/1/00000000-0000-0000-0000-000000000003.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=1219, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=5, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=5, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning=1.234ms] |
| | ParquetExec: limit=None, partitions={1 group: [[1/1/1/1/00000000-0000-0000-0000-000000000000.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, output_ordering=[state@4 ASC, city@1 ASC, time@5 ASC], projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=474, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=4, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=0, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning_total=1.234ms, time_elapsed_scanning_until_data=1.234ms] |
| | ParquetExec: limit=None, partitions={1 group: [[1/1/1/1/00000000-0000-0000-0000-000000000001.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, output_ordering=[state@4 ASC, city@1 ASC, time@5 ASC], projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=632, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=3, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=3, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning_total=1.234ms, time_elapsed_scanning_until_data=1.234ms] |
| | ParquetExec: limit=None, partitions={2 groups: [[1/1/1/1/00000000-0000-0000-0000-000000000002.parquet], [1/1/1/1/00000000-0000-0000-0000-000000000003.parquet]]}, predicate=state = Dictionary(Int32, Utf8("MA")), pruning_predicate=state_min@0 <= MA AND MA <= state_max@1, projection=[area, city, max_temp, min_temp, state, time], metrics=[bytes_scanned=1219, elapsed_compute=1.234ms, mem_used=0, num_predicate_creation_errors=0, output_rows=5, page_index_eval_time=1.234ms, page_index_rows_filtered=0, predicate_evaluation_errors=0, pushdown_eval_time=1.234ms, pushdown_rows_filtered=5, row_groups_pruned=0, spill_count=0, spilled_bytes=0, time_elapsed_opening=1.234ms, time_elapsed_processing=1.234ms, time_elapsed_scanning_total=1.234ms, time_elapsed_scanning_until_data=1.234ms] |
| | |
----------
----------

View File

@ -244,7 +244,7 @@ fn partition_metrics(metrics: MetricsSet) -> HashMap<Option<usize>, MetricsSet>
let mut hashmap = HashMap::<_, MetricsSet>::new();
for metric in metrics.iter() {
hashmap
.entry(*metric.partition())
.entry(metric.partition())
.or_default()
.push(Arc::clone(metric))
}

View File

@ -29,7 +29,7 @@ bytes = { version = "1", features = ["std"] }
chrono = { version = "0.4", default-features = false, features = ["alloc", "clock", "iana-time-zone", "serde", "std", "winapi"] }
crossbeam-utils = { version = "0.8", features = ["std"] }
crypto-common = { version = "0.1", default-features = false, features = ["std"] }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev = "4a33f2708fe8e30f5f9b062a097fc6155f2db2ce", features = ["async-compression", "bzip2", "compression", "crypto_expressions", "flate2", "regex_expressions", "unicode_expressions", "xz2"] }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev = "9c8bdfed1a51fd17e28dc1d7eb494844551a204b", features = ["async-compression", "bzip2", "compression", "crypto_expressions", "flate2", "regex_expressions", "unicode_expressions", "xz2"] }
digest = { version = "0.10", features = ["alloc", "block-buffer", "core-api", "mac", "std", "subtle"] }
either = { version = "1", features = ["use_std"] }
fixedbitset = { version = "0.4", features = ["std"] }