fix: Failing CI on main (#24562)

* fix: build, upgrade rustc, and deps

This commit upgrades Rust to 1.75.0, the latest release. We also
upgraded our dependencies to stay up to date and to clear out any
uneeded deps from the lockfile. In order to make sure everything works
this also fixes the build by upgrading the workspace-hack crate using
cargo hikari and removing the `workspace.lint` that was in
influxdb3_write that didn't need to be there, probably from a merge
issue.

With this we can build influxdb3 as our default on main, but this alone
is not enough to fix CI and will be addressed in future commits.

* fix: warnings for influxdb3 build

This commit fixes the warnings emitted by `cargo build` when compiling
influxdb3. Mainly it adds needed lifetimes and removes uneccesary
imports and functions calls.

* fix: all of the clippy lints

This for the most part just applies suggested fixes by clippy with a few
exceptions:

- Generated type crates had additional allows added since we can't
  control what code gets made
- Things that couldn't be automatically fixed were done so manually in
  particular adding a Send bound for traits that created a Future that
  should be Send

We also had to fix a build issue by adding a feature for tokio-compat
due to the upgrade of deps. The workspace crate was updated accordingly.

* fix: failing test due to rust panic message change

Inbetween rustc 1.72 and rustc 1.75 the way that error messages were
displayed when panicing changed. One of our tests depended on the output
of that behavior and this commit updates the error message to the new
form so that tests will pass.

* fix: broken cargo doc link

* fix: cargo formatting run

* fix: add workspace-hack to influxdb3 crates

This was the last change needed to make sure that the workspace-hack
crate CI lint would pass.

* fix: remove tests that can not run anymore

We removed iox code from this code base and as a result some tests
cannot be run anymore and so this commit removes them from the code base
so that we can get a green build.
pull/24563/head
Michael Gattozzi 2024-01-09 15:11:35 -05:00 committed by GitHub
parent 5831cf8cee
commit 8ee13bca48
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 857 additions and 811 deletions

View File

@ -237,18 +237,12 @@ jobs:
steps:
- checkout
- rust_components
- run:
name: Download flight-sql-jdbc-driver-10.0.0.jar
command: make -C influxdb_iox/tests/jdbc_client flight-sql-jdbc-driver-10.0.0.jar
- run:
name: cargo test -p influxdb2_client
command: cargo test -p influxdb2_client
- run:
name: cargo test -p iox_catalog
command: cargo test -p iox_catalog
- run:
name: cargo test --test end_to_end
command: cargo test --test end_to_end
# Run all tests (without external dependencies, like a developer would)
test:

1456
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -281,7 +281,12 @@ mod tests {
}
fn iter_set_bools(bools: &[bool]) -> impl Iterator<Item = usize> + '_ {
bools.iter().enumerate().filter_map(|(x, y)| y.then(|| x))
bools
.iter()
.enumerate()
// Filter out all y that are not true and then return only x
.filter(|&(_, y)| *y)
.map(|(x, _)| x)
}
#[test]

View File

@ -128,10 +128,7 @@ impl<K: AsPrimitive<usize> + FromPrimitive + Zero> StringDictionary<K> {
}
fn hash_str(hasher: &ahash::RandomState, value: &str) -> u64 {
use std::hash::{BuildHasher, Hash, Hasher};
let mut state = hasher.build_hasher();
value.hash(&mut state);
state.finish()
hasher.hash_one(value)
}
impl StringDictionary<i32> {

View File

@ -478,9 +478,12 @@ mod tests {
}
fn peek(&self) -> Option<(&u8, &String, &i8)> {
#[allow(clippy::map_identity)]
self.inner
.iter()
.min_by_key(|(k, _v, o)| (o, k))
// This is a false positive as this actually changes
// Option<&(u8, String, i8)> -> Option<(&u8, &String, &i8)>
.map(|(k, v, o)| (k, v, o))
}

View File

@ -98,7 +98,7 @@ impl From<tonic::transport::Error> for Error {
let details = source
.source()
.map(|e| format!(" ({e})"))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
Self::TransportError { source, details }
}

View File

@ -1521,7 +1521,7 @@ impl TableSummary {
pub fn total_count(&self) -> u64 {
// Assumes that all tables have the same number of rows, so
// pick the first one
let count = self.columns.get(0).map(|c| c.total_count()).unwrap_or(0);
let count = self.columns.first().map(|c| c.total_count()).unwrap_or(0);
// Validate that the counts are consistent across columns
for c in &self.columns {

View File

@ -2,7 +2,11 @@
// crates because of all the generated code it contains that we don't have much
// control over.
#![deny(rustdoc::broken_intra_doc_links, rustdoc::bare_urls)]
#![allow(clippy::derive_partial_eq_without_eq, clippy::needless_borrow)]
#![allow(
clippy::derive_partial_eq_without_eq,
clippy::needless_borrow,
clippy::needless_borrows_for_generic_args
)]
#![warn(unused_crate_dependencies)]
// Workaround for "unused crate" lint false positives.

View File

@ -19,5 +19,5 @@ schema = { path = "../schema" }
serde_json = "1.0.107"
thiserror = "1.0.48"
tokio = { version = "1.32" }
tokio-util = { version = "0.7.9" }
tokio-util = { version = "0.7.9", features = ["compat"] }
workspace-hack = { version = "0.1", path = "../workspace-hack" }

View File

@ -72,7 +72,7 @@ impl RemoteExporter {
// parquet_files are part of the same table, use the table_id
// from the first parquet_file
let table_id = parquet_files
.get(0)
.first()
.map(|parquet_file| parquet_file.table_id);
if let Some(table_id) = table_id {
self.export_table_metadata(&output_directory, table_id)

View File

@ -39,6 +39,7 @@ tikv-jemalloc-sys = { version = "0.5.4", optional = true, features = ["unprefixe
tokio = { version = "1.32", features = ["macros", "net", "parking_lot", "rt-multi-thread", "signal", "sync", "time", "io-std"] }
tokio-util = { version = "0.7.9" }
uuid = { version = "1", features = ["v4"] }
workspace-hack = { version = "0.1", path = "../workspace-hack" }
[features]
default = ["jemalloc_replacing_malloc"]

View File

@ -45,6 +45,7 @@ serde_json = "1.0.107"
serde_urlencoded = "0.7.0"
tower = "0.4.13"
flate2 = "1.0.27"
workspace-hack = { version = "0.1", path = "../workspace-hack" }
[dev-dependencies]
parquet_file = { path = "../parquet_file" }

View File

@ -179,12 +179,8 @@ mod tests {
let addr = get_free_port();
let trace_header_parser = trace_http::ctx::TraceHeaderParser::new();
let metrics = Arc::new(metric::Registry::new());
let common_state = crate::CommonServerState::new(
Arc::clone(&metrics),
None,
trace_header_parser,
addr.clone(),
);
let common_state =
crate::CommonServerState::new(Arc::clone(&metrics), None, trace_header_parser, addr);
let catalog = Arc::new(influxdb3_write::catalog::Catalog::new());
let object_store: Arc<DynObjectStore> = Arc::new(object_store::memory::InMemory::new());
let parquet_store =
@ -240,7 +236,7 @@ mod tests {
"| a | 1970-01-01T00:00:00.000000123 | 1 |",
"+------+-------------------------------+-----+",
];
let actual: Vec<_> = body.split("\n").into_iter().collect();
let actual: Vec<_> = body.split('\n').collect();
assert_eq!(
expected, actual,
"\n\nexpected:\n\n{:#?}\nactual:\n\n{:#?}\n\n",
@ -251,9 +247,9 @@ mod tests {
}
pub(crate) async fn write_lp(
server: impl Into<String>,
database: impl Into<String>,
lp: impl Into<String>,
server: impl Into<String> + Send,
database: impl Into<String> + Send,
lp: impl Into<String> + Send,
authorization: Option<&str>,
) -> Response<Body> {
let server = server.into();
@ -276,9 +272,9 @@ mod tests {
}
pub(crate) async fn query(
server: impl Into<String>,
database: impl Into<String>,
query: impl Into<String>,
server: impl Into<String> + Send,
database: impl Into<String> + Send,
query: impl Into<String> + Send,
authorization: Option<&str>,
) -> Response<Body> {
let client = Client::new();

View File

@ -5,9 +5,6 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
data_types = { path = "../data_types" }
influxdb-line-protocol = { path = "../influxdb_line_protocol" }
@ -16,6 +13,7 @@ iox_query = { path = "../iox_query" }
object_store = { workspace = true }
observability_deps = { path = "../observability_deps" }
schema = { path = "../schema" }
workspace-hack = { version = "0.1", path = "../workspace-hack" }
arrow = { workspace = true }

View File

@ -98,7 +98,7 @@ fn build_node(expr: &Expr, strings_are_regex: bool) -> Result<RPCNode> {
),
Expr::Cast { expr, data_type } => match data_type {
sqlparser::ast::DataType::Custom(ident, _modifiers) => {
if let Some(Ident { value, .. }) = ident.0.get(0) {
if let Some(Ident { value, .. }) = ident.0.first() {
// See https://docs.influxdata.com/influxdb/v1.8/query_language/explore-data/#syntax
match value.as_str() {
"field" => {

View File

@ -13,7 +13,11 @@
missing_debug_implementations,
unused_crate_dependencies
)]
#![allow(clippy::derive_partial_eq_without_eq, clippy::needless_borrow)]
#![allow(
clippy::derive_partial_eq_without_eq,
clippy::needless_borrow,
clippy::needless_borrows_for_generic_args
)]
// Workaround for "unused crate" lint false positives.
use workspace_hack as _;

View File

@ -341,7 +341,7 @@ impl GeneratedTagSets {
let parent_has_ones = self
.has_one_values
.entry(parent_has_one_key.as_str().to_owned())
.or_insert_with(ParentToHasOnes::default);
.or_default();
let has_one_values = self.values.get(has_one.as_str()).expect(
"add_has_ones should never be called before the values collection is created",
@ -354,10 +354,7 @@ impl GeneratedTagSets {
ones_iter.next().unwrap()
});
let has_one_map = parent_has_ones
.id_to_has_ones
.entry(parent.id)
.or_insert_with(BTreeMap::new);
let has_one_map = parent_has_ones.id_to_has_ones.entry(parent.id).or_default();
has_one_map.insert(Arc::clone(&parent_has_one_key), Arc::clone(one_val));
}
}
@ -414,7 +411,7 @@ impl GeneratedTagSets {
let child_vals = self
.child_values
.entry(child_values_key(belongs_to, &spec.name))
.or_insert_with(BTreeMap::new);
.or_default();
child_vals.insert(parent.id, parent_owned);
}
self.values.insert(spec.name.to_string(), all_children);

View File

@ -625,6 +625,7 @@ impl PartialEq for SortableSeries {
impl Eq for SortableSeries {}
impl PartialOrd for SortableSeries {
#[allow(clippy::non_canonical_partial_ord_impl)]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.tag_vals.partial_cmp(&other.tag_vals)
}

View File

@ -620,7 +620,7 @@ mod test {
fn optimize(plan: &LogicalPlan) -> Result<Option<LogicalPlan>> {
let optimizer = Optimizer::with_rules(vec![Arc::new(HandleGapFill)]);
optimizer.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)

View File

@ -89,7 +89,7 @@ impl PhysicalOptimizerRule for DedupSortOrder {
.iter()
.filter(|sort_key| {
match sort_key.get_index_of(col) {
Some(idx) if idx == 0 => {
Some(0) => {
// Column next in sort order from this chunks PoV. This is good.
true
}

View File

@ -81,9 +81,7 @@ impl TestDatabase {
/// Add a test chunk to the database
pub fn add_chunk(&self, partition_key: &str, chunk: Arc<TestChunk>) -> &Self {
let mut partitions = self.partitions.lock();
let chunks = partitions
.entry(partition_key.to_string())
.or_insert_with(BTreeMap::new);
let chunks = partitions.entry(partition_key.to_string()).or_default();
chunks.insert(chunk.id(), chunk);
self
}

View File

@ -295,7 +295,7 @@ impl<'a> TypeEvaluator<'a> {
name @ ("sin" | "cos" | "tan" | "atan" | "exp" | "log" | "ln" | "log2" | "log10"
| "sqrt") => {
match arg_types
.get(0)
.first()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{
Some(
@ -311,7 +311,7 @@ impl<'a> TypeEvaluator<'a> {
// These functions require a single float as input and return a float
name @ ("asin" | "acos") => {
match arg_types
.get(0)
.first()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{
Some(VarRefDataType::Float) | None => Ok(Some(VarRefDataType::Float)),
@ -324,7 +324,7 @@ impl<'a> TypeEvaluator<'a> {
// These functions require two numeric arguments and return a float
name @ ("atan2" | "pow") => {
let (Some(arg0), Some(arg1)) = (arg_types.get(0), arg_types.get(1)) else {
let (Some(arg0), Some(arg1)) = (arg_types.first(), arg_types.get(1)) else {
return error::query(format!("{name} expects 2 arguments"));
};
@ -348,7 +348,7 @@ impl<'a> TypeEvaluator<'a> {
// These functions return the same data type as their input
name @ ("abs" | "floor" | "ceil" | "round") => {
match arg_types
.get(0)
.first()
.cloned()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{

View File

@ -288,7 +288,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::First {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}
@ -300,7 +300,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Last {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}
@ -312,7 +312,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Max {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}
@ -324,7 +324,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Min {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}
@ -336,7 +336,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Percentile {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
n: Self::literal_num(call.args.get(1).unwrap())?,
})
}
@ -349,7 +349,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Sample {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
n: Self::literal_int(call.args.get(1).unwrap())?,
})
}

View File

@ -4,7 +4,7 @@ use arrow::datatypes::{DataType, TimeUnit};
use datafusion::common::{Result, ScalarValue};
use datafusion::logical_expr::{PartitionEvaluator, Signature, TypeSignature, Volatility};
use once_cell::sync::Lazy;
use std::borrow::Borrow;
use std::sync::Arc;
/// The name of the derivative window function.
@ -99,7 +99,7 @@ impl PartitionEvaluator for DifferencePartitionEvaluator {
}
fn delta(curr: &ScalarValue, prev: &ScalarValue) -> Result<f64> {
match (curr.borrow(), prev.borrow()) {
match (curr, prev) {
(ScalarValue::Float64(Some(curr)), ScalarValue::Float64(Some(prev))) => Ok(*curr - *prev),
(ScalarValue::Int64(Some(curr)), ScalarValue::Int64(Some(prev))) => {
Ok(*curr as f64 - *prev as f64)

View File

@ -30,7 +30,7 @@ impl From<tonic::transport::Error> for RpcError {
let details = source
.source()
.map(|e| format!(" ({e})"))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
Self::TransportError { source, details }
}

View File

@ -280,7 +280,7 @@ pub trait Instrument: std::fmt::Debug + Send + Sync {
/// - call finish_metric once complete
fn report(&self, reporter: &mut dyn Reporter);
/// Returns the type as [`Any`](std::any::Any) so that it can be downcast to
/// Returns the type as [`Any`] so that it can be downcast to
/// it underlying type
fn as_any(&self) -> &dyn Any;
}

View File

@ -260,9 +260,12 @@ mod tests {
assert_eq!(
capture.to_string(),
"level = ERROR; message = Thread panic; panic_type = \"unknown\"; panic_info = panicked at 'it's bananas', panic_logging/src/lib.rs:227:13; \n\
level = ERROR; message = Thread panic; panic_type = \"offset_overflow\"; panic_info = panicked at 'offset', panic_logging/src/lib.rs:235:13; \n\
level = ERROR; message = Thread panic; panic_type = \"offset_overflow\"; panic_info = panicked at 'offset overflow', panic_logging/src/lib.rs:244:13; "
"level = ERROR; message = Thread panic; panic_type = \"unknown\"; panic_info = panicked at panic_logging/src/lib.rs:227:13:\n\
it's bananas; \n\
level = ERROR; message = Thread panic; panic_type = \"offset_overflow\"; panic_info = panicked at panic_logging/src/lib.rs:235:13:\n\
offset; \n\
level = ERROR; message = Thread panic; panic_type = \"offset_overflow\"; panic_info = panicked at panic_logging/src/lib.rs:244:13:\n\
offset overflow; "
);
}
}

View File

@ -1,6 +1,6 @@
mod internal;
pub use internal::{Duration, Window};
pub use internal::Duration;
use schema::TIME_DATA_TYPE;
use std::sync::Arc;

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "1.72.1"
channel = "1.75.0"
components = ["rustfmt", "clippy"]

View File

@ -404,8 +404,8 @@ pub fn adjust_sort_key_columns(
let existing_columns_without_time = catalog_sort_key
.iter()
.map(|(col, _opts)| col)
.cloned()
.filter(|col| TIME_COLUMN_NAME != col.as_ref());
.filter(|&col| TIME_COLUMN_NAME != col.as_ref())
.cloned();
let new_columns: Vec<_> = primary_key
.iter()
.filter(|col| !catalog_sort_key.contains(col))

View File

@ -375,7 +375,7 @@ mod tests {
} else {
s
};
let s = panic_on_stream_timeout(s, Duration::from_millis(250));
s
(panic_on_stream_timeout(s, Duration::from_millis(250))) as _
}
}

View File

@ -130,7 +130,8 @@ impl Display for RunQuery {
}
impl IoxGetRequest {
const READ_INFO_TYPE_URL: &str = "type.googleapis.com/influxdata.iox.querier.v1.ReadInfo";
const READ_INFO_TYPE_URL: &'static str =
"type.googleapis.com/influxdata.iox.querier.v1.ReadInfo";
/// Create a new request to run the specified query
pub fn new(database: impl Into<String>, query: RunQuery, is_debug: bool) -> Self {

View File

@ -863,7 +863,7 @@ mod tests {
assert_eq!(wal_entries.len(), 2);
let write_op_entries = wal_entries.into_iter().flatten().collect::<Vec<_>>();
assert_eq!(write_op_entries.len(), 3);
assert_matches!(write_op_entries.get(0), Some(got_op1) => {
assert_matches!(write_op_entries.first(), Some(got_op1) => {
assert_op_shape(got_op1, &w1);
});
assert_matches!(write_op_entries.get(1), Some(got_op2) => {
@ -916,7 +916,7 @@ mod tests {
// error is thrown
assert_matches!(decoder.next(), Some(Ok(batch)) => {
assert_eq!(batch.len(), 1);
assert_op_shape(batch.get(0).unwrap(), &good_write);
assert_op_shape(batch.first().unwrap(), &good_write);
});
assert_matches!(
decoder.next(),

View File

@ -21,9 +21,10 @@ arrow = { git = "https://github.com/alamb/arrow-rs.git", rev = "7c236c06bfb78c0c
arrow-array = { git = "https://github.com/alamb/arrow-rs.git", rev = "7c236c06bfb78c0c877055c1617d9373971511a5", default-features = false, features = ["chrono-tz"] }
arrow-flight = { git = "https://github.com/alamb/arrow-rs.git", rev = "7c236c06bfb78c0c877055c1617d9373971511a5", features = ["flight-sql-experimental"] }
arrow-string = { git = "https://github.com/alamb/arrow-rs.git", rev = "7c236c06bfb78c0c877055c1617d9373971511a5", default-features = false, features = ["dyn_cmp_dict"] }
bitflags = { version = "2", default-features = false, features = ["std"] }
byteorder = { version = "1" }
bytes = { version = "1" }
chrono = { version = "0.4", default-features = false, features = ["alloc", "clock", "serde"] }
chrono = { version = "0.4", features = ["alloc", "serde"] }
clap = { version = "4", features = ["derive", "env", "string"] }
clap_builder = { version = "4", default-features = false, features = ["color", "env", "help", "std", "string", "suggestions", "usage"] }
crossbeam-utils = { version = "0.8" }
@ -44,7 +45,6 @@ futures-util = { version = "0.3", features = ["channel", "io", "sink"] }
getrandom = { version = "0.2", default-features = false, features = ["std"] }
hashbrown = { version = "0.14", features = ["raw"] }
indexmap = { version = "2" }
itertools = { version = "0.10" }
libc = { version = "0.2", features = ["extra_traits"] }
lock_api = { version = "0.4", features = ["arc_lock"] }
log = { version = "0.4", default-features = false, features = ["std"] }
@ -58,17 +58,17 @@ parking_lot = { version = "0.12", features = ["arc_lock"] }
parquet = { git = "https://github.com/alamb/arrow-rs.git", rev = "7c236c06bfb78c0c877055c1617d9373971511a5", features = ["experimental", "object_store"] }
petgraph = { version = "0.6" }
phf_shared = { version = "0.11" }
predicates = { version = "3" }
proptest = { version = "1", default-features = false, features = ["std"] }
prost = { version = "0.11" }
prost-types = { version = "0.11" }
rand = { version = "0.8", features = ["small_rng"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
regex = { version = "1" }
regex-automata = { version = "0.3", default-features = false, features = ["dfa-onepass", "dfa-search", "hybrid", "meta", "nfa-backtrack", "perf-inline", "perf-literal", "unicode"] }
regex-syntax = { version = "0.7" }
regex-automata = { version = "0.4", default-features = false, features = ["dfa-onepass", "dfa-search", "hybrid", "meta", "nfa-backtrack", "perf-inline", "perf-literal", "unicode"] }
regex-syntax-c38e5c1d305a1b54 = { package = "regex-syntax", version = "0.8" }
regex-syntax-ca01ad9e24f5d932 = { package = "regex-syntax", version = "0.7" }
reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls", "stream"] }
ring = { version = "0.16", features = ["std"] }
ring = { version = "0.16", default-features = false, features = ["std"] }
rustls = { version = "0.21", default-features = false, features = ["dangerous_configuration", "logging", "tls12"] }
serde = { version = "1", features = ["derive", "rc"] }
serde_json = { version = "1", features = ["raw_value"] }
@ -79,15 +79,15 @@ sqlx = { version = "0.7", features = ["postgres", "runtime-tokio-rustls", "sqlit
sqlx-core = { version = "0.7", features = ["_rt-tokio", "_tls-rustls", "any", "json", "migrate", "offline", "uuid"] }
sqlx-postgres = { version = "0.7", default-features = false, features = ["any", "json", "migrate", "offline", "uuid"] }
sqlx-sqlite = { version = "0.7", default-features = false, features = ["any", "json", "migrate", "offline", "uuid"] }
strum = { version = "0.25", features = ["derive"] }
thrift = { version = "0.17" }
tokio = { version = "1", features = ["full", "test-util", "tracing"] }
tokio = { version = "1", features = ["full", "tracing"] }
tokio-stream = { version = "0.1", features = ["fs", "net"] }
tokio-util = { version = "0.7", features = ["codec", "compat", "io"] }
tonic = { version = "0.9", features = ["tls-webpki-roots"] }
tower = { version = "0.4", features = ["balance", "buffer", "limit", "timeout", "util"] }
tracing = { version = "0.1", features = ["log", "max_level_trace", "release_max_level_trace"] }
tracing-core = { version = "0.1" }
tracing-log = { version = "0.1" }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "parking_lot"] }
unicode-bidi = { version = "0.3" }
unicode-normalization = { version = "0.1" }
@ -96,6 +96,7 @@ uuid = { version = "1", features = ["v4"] }
[build-dependencies]
ahash = { version = "0.8" }
bitflags = { version = "2", default-features = false, features = ["std"] }
byteorder = { version = "1" }
bytes = { version = "1" }
cc = { version = "1", default-features = false, features = ["parallel"] }
@ -116,7 +117,6 @@ hashbrown = { version = "0.14", features = ["raw"] }
heck = { version = "0.4", features = ["unicode"] }
indexmap = { version = "2" }
itertools = { version = "0.10" }
libc = { version = "0.2", features = ["extra_traits"] }
lock_api = { version = "0.4", features = ["arc_lock"] }
log = { version = "0.4", default-features = false, features = ["std"] }
md-5 = { version = "0.10" }
@ -132,9 +132,8 @@ prost-types = { version = "0.11" }
rand = { version = "0.8", features = ["small_rng"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
regex = { version = "1" }
regex-automata = { version = "0.3", default-features = false, features = ["dfa-onepass", "dfa-search", "hybrid", "meta", "nfa-backtrack", "perf-inline", "perf-literal", "unicode"] }
regex-syntax = { version = "0.7" }
ring = { version = "0.16", features = ["std"] }
regex-automata = { version = "0.4", default-features = false, features = ["dfa-onepass", "dfa-search", "hybrid", "meta", "nfa-backtrack", "perf-inline", "perf-literal", "unicode"] }
regex-syntax-c38e5c1d305a1b54 = { package = "regex-syntax", version = "0.8" }
rustls = { version = "0.21", default-features = false, features = ["dangerous_configuration", "logging", "tls12"] }
serde = { version = "1", features = ["derive", "rc"] }
serde_json = { version = "1", features = ["raw_value"] }
@ -146,7 +145,7 @@ sqlx-postgres = { version = "0.7", default-features = false, features = ["any",
sqlx-sqlite = { version = "0.7", default-features = false, features = ["any", "json", "migrate", "offline", "uuid"] }
syn-dff4ba8e3ae991db = { package = "syn", version = "1", features = ["extra-traits", "full"] }
syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "full", "visit", "visit-mut"] }
tokio = { version = "1", features = ["full", "test-util", "tracing"] }
tokio = { version = "1", features = ["full", "tracing"] }
tokio-stream = { version = "0.1", features = ["fs", "net"] }
tracing = { version = "0.1", features = ["log", "max_level_trace", "release_max_level_trace"] }
tracing-core = { version = "0.1" }
@ -156,47 +155,49 @@ url = { version = "2" }
uuid = { version = "1", features = ["v4"] }
[target.x86_64-unknown-linux-gnu.dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.x86_64-unknown-linux-gnu.build-dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
libc = { version = "0.2", features = ["extra_traits"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.x86_64-apple-darwin.dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.x86_64-apple-darwin.build-dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
libc = { version = "0.2", features = ["extra_traits"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.aarch64-apple-darwin.dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.aarch64-apple-darwin.build-dependencies]
bitflags = { version = "2", default-features = false, features = ["std"] }
once_cell = { version = "1", default-features = false, features = ["unstable"] }
libc = { version = "0.2", features = ["extra_traits"] }
nix = { version = "0.27", features = ["fs", "signal", "user"] }
rustls = { version = "0.21" }
spin = { version = "0.9" }
[target.x86_64-pc-windows-msvc.dependencies]
once_cell = { version = "1", default-features = false, features = ["unstable"] }
rustls = { version = "0.21" }
scopeguard = { version = "1" }
winapi = { version = "0.3", default-features = false, features = ["basetsd", "cfg", "combaseapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "heapapi", "ifdef", "in6addr", "inaddr", "ioapiset", "iphlpapi", "lmaccess", "lmapibuf", "lmcons", "memoryapi", "minwinbase", "minwindef", "netioapi", "ntlsa", "ntsecapi", "objidl", "oleauto", "pdh", "powerbase", "processenv", "psapi", "rpcdce", "sddl", "securitybaseapi", "shellapi", "std", "stringapiset", "synchapi", "sysinfoapi", "wbemcli", "winbase", "wincon", "windef", "winerror", "winioctl", "winnt", "winsock2", "winuser", "ws2ipdef", "ws2tcpip", "wtypesbase"] }
windows-sys = { version = "0.48", features = ["Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] }
spin = { version = "0.9" }
winapi = { version = "0.3", default-features = false, features = ["cfg", "combaseapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "heapapi", "ifdef", "in6addr", "inaddr", "ioapiset", "iphlpapi", "lmaccess", "lmapibuf", "lmcons", "memoryapi", "minwinbase", "minwindef", "netioapi", "ntlsa", "ntsecapi", "ntstatus", "objidl", "oleauto", "pdh", "powerbase", "processenv", "psapi", "rpcdce", "sddl", "securitybaseapi", "shellapi", "std", "synchapi", "sysinfoapi", "wbemcli", "winbase", "wincon", "windef", "winerror", "winioctl", "winnt", "winsock2", "wtypesbase"] }
windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Win32_Foundation", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_Threading", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell"] }
windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] }
[target.x86_64-pc-windows-msvc.build-dependencies]
once_cell = { version = "1", default-features = false, features = ["unstable"] }
rustls = { version = "0.21" }
scopeguard = { version = "1" }
winapi = { version = "0.3", default-features = false, features = ["basetsd", "cfg", "combaseapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "heapapi", "ifdef", "in6addr", "inaddr", "ioapiset", "iphlpapi", "lmaccess", "lmapibuf", "lmcons", "memoryapi", "minwinbase", "minwindef", "netioapi", "ntlsa", "ntsecapi", "objidl", "oleauto", "pdh", "powerbase", "processenv", "psapi", "rpcdce", "sddl", "securitybaseapi", "shellapi", "std", "stringapiset", "synchapi", "sysinfoapi", "wbemcli", "winbase", "wincon", "windef", "winerror", "winioctl", "winnt", "winsock2", "winuser", "ws2ipdef", "ws2tcpip", "wtypesbase"] }
windows-sys = { version = "0.48", features = ["Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] }
spin = { version = "0.9" }
windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Win32_Foundation", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_Threading", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell"] }
windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] }
### END HAKARI SECTION