chore: Update datafusion and `arrow`/`parquet`/`arrow-flight` to `19.0.0` (#5229)

* chore: Update datafusion and `arrow`/`parquet`/`arrow-flight` to `19.0.0`

* chore: Run cargo hakari tasks

* fix: Update for API changes

* fix: clippy

Co-authored-by: CircleCI[bot] <circleci@influxdata.com>
Co-authored-by: kodiakhq[bot] <49736102+kodiakhq[bot]@users.noreply.github.com>
pull/24376/head
Andrew Lamb 2022-07-28 04:10:47 -04:00 committed by GitHub
parent fcce00bf09
commit 9215a534d0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 76 additions and 104 deletions

29
Cargo.lock generated
View File

@ -96,9 +96,9 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "arrow"
version = "18.0.0"
version = "19.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5f89d2bc04fa746ee395d20c4cbfa508e4cce5c00bae816f0fae434fcfb9853"
checksum = "89b7e88e4739c3616cae75adce6660c9c1a80f2660545eb77afbe0e4a0f048a0"
dependencies = [
"ahash",
"bitflags",
@ -123,9 +123,9 @@ dependencies = [
[[package]]
name = "arrow-flight"
version = "18.0.0"
version = "19.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30421ea30c815daae635003d8a40875922a627d55bc098a581ab215f7b7f5d35"
checksum = "18eadbc2d726572e874ccef2bdd0115bc29ef237d3c7b9b7f4415fda3e198974"
dependencies = [
"arrow",
"base64",
@ -1118,7 +1118,7 @@ dependencies = [
[[package]]
name = "datafusion"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"ahash",
"arrow",
@ -1157,7 +1157,7 @@ dependencies = [
[[package]]
name = "datafusion-common"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"arrow",
"object_store",
@ -1169,7 +1169,7 @@ dependencies = [
[[package]]
name = "datafusion-expr"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"ahash",
"arrow",
@ -1180,7 +1180,7 @@ dependencies = [
[[package]]
name = "datafusion-optimizer"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"arrow",
"async-trait",
@ -1195,7 +1195,7 @@ dependencies = [
[[package]]
name = "datafusion-physical-expr"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"ahash",
"arrow",
@ -1219,7 +1219,7 @@ dependencies = [
[[package]]
name = "datafusion-proto"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"arrow",
"datafusion 10.0.0",
@ -1232,7 +1232,7 @@ dependencies = [
[[package]]
name = "datafusion-row"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"arrow",
"datafusion-common",
@ -1243,7 +1243,7 @@ dependencies = [
[[package]]
name = "datafusion-sql"
version = "10.0.0"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c#7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c"
source = "git+https://github.com/apache/arrow-datafusion.git?rev=cd3164918b0415b072a3109f3ccf654da7518ec1#cd3164918b0415b072a3109f3ccf654da7518ec1"
dependencies = [
"ahash",
"arrow",
@ -3366,9 +3366,9 @@ dependencies = [
[[package]]
name = "parquet"
version = "18.0.0"
version = "19.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65f61759af307fad711e7656c705218402a8a79b776c893c20fef96e8ffd2a7d"
checksum = "2cfcf237362047888b342e4f0e213a9b303133b085853e447f2c58e65e00099d"
dependencies = [
"arrow",
"base64",
@ -4590,7 +4590,6 @@ version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
dependencies = [
"indexmap",
"itoa 1.0.2",
"ryu",
"serde",

View File

@ -7,7 +7,7 @@ description = "Apache Arrow utilities"
[dependencies]
ahash = { version = "0.7.5", default-features = false }
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
# used by arrow anyway (needed for printing workaround)
chrono = { version = "0.4", default-features = false }
comfy-table = { version = "6.0", default-features = false }
@ -18,5 +18,5 @@ snafu = "0.7"
workspace-hack = { path = "../workspace-hack"}
[dev-dependencies]
arrow-flight = "18.0.0"
arrow-flight = "19.0.0"
rand = "0.8.3"

View File

@ -401,7 +401,7 @@ mod tests {
#[test]
fn test_deep_clone_array() {
let mut builder = UInt32Array::builder(1000);
builder.append_slice(&[1, 2, 3, 4, 5, 6]).unwrap();
builder.append_slice(&[1, 2, 3, 4, 5, 6]);
let array: ArrayRef = Arc::new(builder.finish());
assert_eq!(array.len(), 6);

View File

@ -5,7 +5,7 @@ authors = ["Luke Bond <luke.n.bond@gmail.com>"]
edition = "2021"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
async-trait = "0.1.56"
backoff = { path = "../backoff" }
bytes = "1.2"

View File

@ -9,6 +9,6 @@ description = "Re-exports datafusion at a specific version"
# Rename to workaround doctest bug
# Turn off optional datafusion features (e.g. don't get support for crypto functions or avro)
upstream = { git = "https://github.com/apache/arrow-datafusion.git", rev="7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c", default-features = false, package = "datafusion" }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev="7b0f2f846a7c8c2ffee2a4f29772cf3527a8d92c" }
upstream = { git = "https://github.com/apache/arrow-datafusion.git", rev="cd3164918b0415b072a3109f3ccf654da7518ec1", default-features = false, package = "datafusion" }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev="cd3164918b0415b072a3109f3ccf654da7518ec1" }
workspace-hack = { path = "../workspace-hack"}

View File

@ -37,7 +37,7 @@ trogging = { path = "../trogging", default-features = false, features = ["clap"]
# Crates.io dependencies, in alphabetical order
ansi_term = "0.12"
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
backtrace = "0.3"
bytes = "1.2"
clap = { version = "3", features = ["derive", "env"] }

View File

@ -20,8 +20,8 @@ mutable_batch_lp = { path = "../mutable_batch_lp", optional = true }
mutable_batch_pb = { path = "../mutable_batch_pb", optional = true }
# Crates.io dependencies, in alphabetical order
arrow = { version = "18.0.0", optional = true }
arrow-flight = { version = "18.0.0", optional = true }
arrow = { version = "19.0.0", optional = true }
arrow-flight = { version = "19.0.0", optional = true }
bytes = "1.2"
futures-util = { version = "0.3", optional = true }
prost = "0.10"

View File

@ -5,8 +5,8 @@ authors = ["Nga Tran <nga-tran@live.com>"]
edition = "2021"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow-flight = "18.0.0"
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow-flight = "19.0.0"
arrow_util = { path = "../arrow_util" }
async-trait = "0.1.56"
backoff = { path = "../backoff" }

View File

@ -14,7 +14,7 @@ description = "IOx Query Interface and Executor"
# 2. Allow for query logic testing without bringing in all the storage systems.
[dependencies] # In alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
async-trait = "0.1"
chrono = { version = "0.4", default-features = false }

View File

@ -265,7 +265,7 @@ pub trait ExecutionContextProvider {
#[cfg(test)]
mod tests {
use arrow::{
array::{ArrayRef, Int64Array, StringBuilder},
array::{ArrayRef, Int64Array, StringArray},
datatypes::{DataType, Field, Schema, SchemaRef},
};
use datafusion::{
@ -379,10 +379,8 @@ mod tests {
// Ensure that nulls in the output set are handled reasonably
// (error, rather than silently ignored)
let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)]));
let mut builder = StringBuilder::new(2);
builder.append_value("foo").unwrap();
builder.append_null().unwrap();
let data = Arc::new(builder.finish());
let array = StringArray::from_iter(vec![Some("foo"), None]);
let data = Arc::new(array);
let batch = RecordBatch::try_new(Arc::clone(&schema), vec![data])
.expect("created new record batch");
let scan = make_plan(schema, vec![batch]);
@ -465,11 +463,8 @@ mod tests {
}
fn to_string_array(strs: &[&str]) -> ArrayRef {
let mut builder = StringBuilder::new(strs.len());
for s in strs {
builder.append_value(s).expect("appending string");
}
Arc::new(builder.finish())
let array: StringArray = strs.iter().map(|s| Some(*s)).collect();
Arc::new(array)
}
// creates a DataFusion plan that reads the RecordBatches into memory

View File

@ -6,7 +6,7 @@ edition = "2021"
description = "IOx test utils and tests"
[dependencies]
arrow = "18.0.0"
arrow = "19.0.0"
bytes = "1.2"
data_types = { path = "../data_types" }
datafusion = { path = "../datafusion" }

View File

@ -23,7 +23,7 @@ trace = { path = "../trace" }
write_buffer = { path = "../write_buffer" }
# Crates.io dependencies, in alphabetical order
arrow-flight = "18.0.0"
arrow-flight = "19.0.0"
async-trait = "0.1"
hyper = "0.14"
thiserror = "1.0.31"

View File

@ -5,7 +5,7 @@ edition = "2021"
description = "A mutable arrow RecordBatch"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
chrono = { version = "0.4", default-features = false }
data_types = { path = "../data_types" }

View File

@ -5,11 +5,11 @@ authors = ["Andrew Lamb <andrew@nerdnetworks.org>"]
edition = "2021"
[dependencies] # In alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
influxdb_tsm = { path = "../influxdb_tsm" }
schema = { path = "../schema" }
snafu = "0.7"
parquet = "18.0.0"
parquet = "19.0.0"
workspace-hack = { path = "../workspace-hack"}
[dev-dependencies] # In alphabetical order

View File

@ -5,7 +5,7 @@ authors = ["Nga Tran <nga-tran@live.com>"]
edition = "2021"
[dependencies] # In alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
base64 = "0.13"
bytes = "1.2"
data_types = { path = "../data_types" }
@ -17,7 +17,7 @@ iox_time = { path = "../iox_time" }
object_store = "0.3.0"
observability_deps = { path = "../observability_deps" }
parking_lot = "0.12"
parquet = {version = "18.0.0", features = ["experimental"]}
parquet = {version = "19.0.0", features = ["experimental"]}
parquet-format = "4.0"
pbjson-types = "0.3"
predicate = { path = "../predicate" }

View File

@ -959,7 +959,7 @@ fn extract_iox_statistics(
mod tests {
use super::*;
use arrow::{
array::{ArrayRef, StringBuilder, TimestampNanosecondBuilder},
array::{ArrayRef, StringArray, TimestampNanosecondArray},
record_batch::RecordBatch,
};
use data_types::CompactionLevel;
@ -1010,9 +1010,8 @@ mod tests {
sort_key: None,
};
let mut builder = StringBuilder::new(1);
builder.append_value("bananas").expect("appending string");
let data: ArrayRef = Arc::new(builder.finish());
let array = StringArray::from_iter([Some("bananas")]);
let data: ArrayRef = Arc::new(array);
let timestamps = to_timestamp_array(&[1647695292000000000]);
@ -1077,10 +1076,7 @@ mod tests {
}
fn to_timestamp_array(timestamps: &[i64]) -> ArrayRef {
let mut builder = TimestampNanosecondBuilder::new(timestamps.len());
builder
.append_slice(timestamps)
.expect("failed to append timestamp values");
Arc::new(builder.finish())
let array: TimestampNanosecondArray = timestamps.iter().map(|v| Some(*v)).collect();
Arc::new(array)
}
}

View File

@ -160,7 +160,7 @@ fn writer_props(meta: &IoxMetadata) -> Result<WriterProperties, prost::EncodeErr
mod tests {
use super::*;
use crate::metadata::IoxParquetMetaData;
use arrow::array::{ArrayRef, StringBuilder};
use arrow::array::{ArrayRef, StringArray};
use bytes::Bytes;
use data_types::{
CompactionLevel, NamespaceId, PartitionId, SequenceNumber, SequencerId, TableId,
@ -227,10 +227,7 @@ mod tests {
}
fn to_string_array(strs: &[&str]) -> ArrayRef {
let mut builder = StringBuilder::new(strs.len());
for s in strs {
builder.append_value(s).expect("appending string");
}
Arc::new(builder.finish())
let array: StringArray = strs.iter().map(|s| Some(*s)).collect();
Arc::new(array)
}
}

View File

@ -382,7 +382,7 @@ fn project_for_parquet_reader(
#[cfg(test)]
mod tests {
use super::*;
use arrow::array::{ArrayRef, Int64Builder, StringBuilder};
use arrow::array::{ArrayRef, Int64Array, StringArray};
use data_types::{
CompactionLevel, NamespaceId, PartitionId, SequenceNumber, SequencerId, TableId,
};
@ -723,19 +723,13 @@ mod tests {
}
fn to_string_array(strs: &[&str]) -> ArrayRef {
let mut builder = StringBuilder::new(strs.len());
for s in strs {
builder.append_value(s).expect("appending string");
}
Arc::new(builder.finish())
let array: StringArray = strs.iter().map(|s| Some(*s)).collect();
Arc::new(array)
}
fn to_int_array(vals: &[i64]) -> ArrayRef {
let mut builder = Int64Builder::new(vals.len());
for x in vals {
builder.append_value(*x).expect("appending string");
}
Arc::new(builder.finish())
let array: Int64Array = vals.iter().map(|v| Some(*v)).collect();
Arc::new(array)
}
fn meta() -> IoxMetadata {

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, sync::Arc};
use arrow::{
array::{ArrayRef, StringBuilder, TimestampNanosecondBuilder},
array::{ArrayRef, StringArray, TimestampNanosecondArray},
record_batch::RecordBatch,
};
use data_types::{
@ -31,7 +31,7 @@ async fn test_decoded_iox_metadata() {
"some_field",
to_string_array(&["bananas", "platanos", "manzana"]),
),
("null_field", null_array(3)),
("null_field", null_string_array(3)),
];
// And the metadata the batch would be encoded with if it came through the
@ -181,7 +181,7 @@ async fn test_decoded_many_columns_with_null_cols_iox_metadata() {
// First column is time
data.push((TIME_COLUMN_NAME.to_string(), to_timestamp_array(&time_arr)));
// Second column contains all nulls
data.push(("column_name_1".to_string(), null_array(num_rows)));
data.push(("column_name_1".to_string(), null_string_array(num_rows)));
// Names of other columns
fn make_col_name(i: usize) -> String {
"column_name_".to_string() + i.to_string().as_str()
@ -345,25 +345,16 @@ async fn test_derive_parquet_file_params() {
}
fn to_string_array(strs: &[&str]) -> ArrayRef {
let mut builder = StringBuilder::new(strs.len());
for s in strs {
builder.append_value(s).expect("appending string");
}
Arc::new(builder.finish())
let array: StringArray = strs.iter().map(|s| Some(*s)).collect();
Arc::new(array)
}
fn to_timestamp_array(timestamps: &[i64]) -> ArrayRef {
let mut builder = TimestampNanosecondBuilder::new(timestamps.len());
builder
.append_slice(timestamps)
.expect("failed to append timestamp values");
Arc::new(builder.finish())
let array: TimestampNanosecondArray = timestamps.iter().map(|v| Some(*v)).collect();
Arc::new(array)
}
fn null_array(num: usize) -> ArrayRef {
let mut builder = StringBuilder::new(num);
for _i in 0..num {
builder.append_null().expect("failed to append null values");
}
Arc::new(builder.finish())
fn null_string_array(num: usize) -> ArrayRef {
let array: StringArray = std::iter::repeat(None as Option<&str>).take(num).collect();
Arc::new(array)
}

View File

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
chrono = { version = "0.4", default-features = false }
data_types = { path = "../data_types" }
datafusion = { path = "../datafusion" }

View File

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
arrow = "18.0.0"
arrow = "19.0.0"
async-trait = "0.1.56"
backoff = { path = "../backoff" }
cache_system = { path = "../cache_system" }

View File

@ -6,7 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
chrono = { version = "0.4", default-features = false }
datafusion = { path = "../datafusion" }
itertools = "0.10.2"

View File

@ -6,7 +6,7 @@ edition = "2021"
description = "Tests of the query engine against different database configurations"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
async-trait = "0.1"
backoff = { path = "../backoff" }
@ -33,7 +33,7 @@ workspace-hack = { path = "../workspace-hack"}
parquet_file = { version = "0.1.0", path = "../parquet_file" }
[dev-dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
snafu = "0.7"
tempfile = "3.1.0"

View File

@ -11,7 +11,7 @@ edition = "2021"
# 2. Keep change/compile/link time down during development when working on just this crate
[dependencies] # In alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
croaring = "0.6"
data_types = { path = "../data_types" }

View File

@ -6,7 +6,7 @@ edition = "2021"
description = "IOx Schema definition"
[dependencies]
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
hashbrown = "0.12"
indexmap = { version = "1.9", features = ["std"] }
itertools = "0.10.1"

View File

@ -18,8 +18,8 @@ trace = { path = "../trace"}
tracker = { path = "../tracker" }
# Crates.io dependencies, in alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow-flight = "18.0.0"
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow-flight = "19.0.0"
bytes = "1.2"
futures = "0.3"
pin-project = "1.0"

View File

@ -18,7 +18,7 @@ trace = { path = "../trace"}
tracker = { path = "../tracker" }
# Crates.io dependencies, in alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
async-trait = "0.1"
futures = "0.3"
pin-project = "1.0"

View File

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies] # In alphabetical order
arrow = { version = "18.0.0", features = ["prettyprint"] }
arrow = { version = "19.0.0", features = ["prettyprint"] }
arrow_util = { path = "../arrow_util" }
assert_cmd = "2.0.2"
bytes = "1.2"

View File

@ -14,7 +14,7 @@ publish = false
### BEGIN HAKARI SECTION
[dependencies]
ahash = { version = "0.7", features = ["std"] }
arrow = { version = "18", features = ["comfy-table", "csv", "csv_crate", "flatbuffers", "ipc", "prettyprint", "rand", "test_utils"] }
arrow = { version = "19", features = ["comfy-table", "csv", "csv_crate", "flatbuffers", "ipc", "prettyprint", "rand", "test_utils"] }
base64 = { version = "0.13", features = ["std"] }
bitflags = { version = "1" }
byteorder = { version = "1", features = ["std"] }
@ -44,7 +44,7 @@ num-integer = { version = "0.1", default-features = false, features = ["i128", "
num-traits = { version = "0.2", features = ["i128", "libm", "std"] }
object_store = { git = "https://github.com/influxdata/object_store_rs", rev = "3c51870ac41a90942c2e45bb499a893d514ed1da", default-features = false, features = ["aws", "azure", "azure_core", "azure_storage", "azure_storage_blobs", "base64", "gcp", "hyper", "hyper-rustls", "reqwest", "rusoto_core", "rusoto_credential", "rusoto_s3", "rusoto_sts", "rustls-pemfile", "serde", "serde_json"] }
once_cell = { version = "1", features = ["alloc", "parking_lot", "parking_lot_core", "race", "std"] }
parquet = { version = "18", features = ["arrow", "async", "base64", "brotli", "experimental", "flate2", "futures", "lz4", "snap", "tokio", "zstd"] }
parquet = { version = "19", features = ["arrow", "async", "base64", "brotli", "experimental", "flate2", "futures", "lz4", "snap", "tokio", "zstd"] }
predicates = { version = "2", features = ["diff", "difflib", "float-cmp", "normalize-line-endings", "regex"] }
prost = { version = "0.10", features = ["prost-derive", "std"] }
prost-types = { version = "0.10", features = ["std"] }
@ -55,7 +55,7 @@ regex-syntax = { version = "0.6", features = ["unicode", "unicode-age", "unicode
reqwest = { version = "0.11", default-features = false, features = ["__rustls", "__tls", "hyper-rustls", "json", "rustls", "rustls-pemfile", "rustls-tls", "rustls-tls-webpki-roots", "serde_json", "stream", "tokio-rustls", "tokio-util", "webpki-roots"] }
ring = { version = "0.16", features = ["alloc", "dev_urandom_fallback", "once_cell", "std"] }
serde = { version = "1", features = ["derive", "rc", "serde_derive", "std"] }
serde_json = { version = "1", features = ["indexmap", "preserve_order", "raw_value", "std"] }
serde_json = { version = "1", features = ["raw_value", "std"] }
sha2 = { version = "0.10", features = ["std"] }
smallvec = { version = "1", default-features = false, features = ["union"] }
sqlx = { version = "0.6", features = ["_rt-tokio", "json", "macros", "migrate", "postgres", "runtime-tokio-rustls", "sqlx-macros", "tls", "uuid"] }
@ -111,7 +111,7 @@ regex-syntax = { version = "0.6", features = ["unicode", "unicode-age", "unicode
ring = { version = "0.16", features = ["alloc", "dev_urandom_fallback", "once_cell", "std"] }
serde = { version = "1", features = ["derive", "rc", "serde_derive", "std"] }
serde_derive = { version = "1" }
serde_json = { version = "1", features = ["indexmap", "preserve_order", "raw_value", "std"] }
serde_json = { version = "1", features = ["raw_value", "std"] }
sha2 = { version = "0.10", features = ["std"] }
smallvec = { version = "1", default-features = false, features = ["union"] }
sqlx-core = { version = "0.6", default-features = false, features = ["_rt-tokio", "_tls-rustls", "any", "base64", "crc", "dirs", "hkdf", "hmac", "json", "md-5", "migrate", "postgres", "rand", "runtime-tokio-rustls", "rustls", "rustls-pemfile", "serde", "serde_json", "sha-1", "sha2", "tokio-stream", "uuid", "webpki-roots", "whoami"] }