refactor: Clean up some future clippy warnings from nightly (#3892)

* refactor: clean up new clippy lints

* refactor: complete other cleanups

* fix: ignore overzealous clippy

* fix: re-remove old code
pull/24376/head
Andrew Lamb 2022-03-03 14:14:27 -05:00 committed by GitHub
parent e304613546
commit 677a272095
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 32 additions and 34 deletions

View File

@ -9,12 +9,12 @@ use std::collections::HashMap;
impl Client {
/// List all Labels
pub async fn labels(&self) -> Result<LabelsResponse, RequestError> {
Ok(self.get_labels(None).await?)
self.get_labels(None).await
}
/// List all Labels by organization ID
pub async fn labels_by_org(&self, org_id: &str) -> Result<LabelsResponse, RequestError> {
Ok(self.get_labels(Some(org_id)).await?)
self.get_labels(Some(org_id)).await
}
async fn get_labels(&self, org_id: Option<&str>) -> Result<LabelsResponse, RequestError> {

View File

@ -55,7 +55,7 @@ impl Client {
let body = Body::wrap_stream(body);
Ok(self.write_line_protocol(org, bucket, body).await?)
self.write_line_protocol(org, bucket, body).await
}
}

View File

@ -54,15 +54,14 @@ impl Client {
/// Get information about a specific operation
pub async fn get_operation(&mut self, id: usize) -> Result<IoxOperation, Error> {
Ok(self
.inner
self.inner
.get_operation(GetOperationRequest {
name: id.to_string(),
})
.await?
.into_inner()
.try_into()
.map_err(Error::InvalidResponse)?)
.map_err(Error::InvalidResponse)
}
/// Cancel a given operation
@ -83,8 +82,7 @@ impl Client {
id: usize,
timeout: Option<std::time::Duration>,
) -> Result<IoxOperation, Error> {
Ok(self
.inner
self.inner
.wait_operation(WaitOperationRequest {
name: id.to_string(),
timeout: timeout.map(Into::into),
@ -92,6 +90,6 @@ impl Client {
.await?
.into_inner()
.try_into()
.map_err(Error::InvalidResponse)?)
.map_err(Error::InvalidResponse)
}
}

View File

@ -66,6 +66,8 @@ where
}
}
// &mut Cow is used to avoid a copy, so allow it
#[allow(clippy::ptr_arg)]
async fn validate_mutable_batch<R>(
mb: &MutableBatch,
table_name: &str,

View File

@ -615,7 +615,7 @@ struct FreeCandidate<'a, P> {
access_metrics: AccessMetrics,
}
fn sort_free_candidates<P>(candidates: &mut Vec<FreeCandidate<'_, P>>) {
fn sort_free_candidates<P>(candidates: &mut [FreeCandidate<'_, P>]) {
candidates.sort_unstable_by(|a, b| match a.action.cmp(&b.action) {
// Order candidates with the same FreeAction by last access time
std::cmp::Ordering::Equal => a

View File

@ -146,8 +146,7 @@ mod tests {
let expected: Vec<_> = ranges
.iter()
.cloned()
.flatten()
.flat_map(|r| r.clone())
.filter(|idx| predicate(data[*idx]))
.collect();

View File

@ -87,8 +87,7 @@ struct Expected {
fn filter_vec<T: Clone>(ranges: &[Range<usize>], src: &[T]) -> Vec<T> {
ranges
.iter()
.cloned()
.flatten()
.flat_map(|r| r.clone())
.map(|x| src[x].clone())
.collect()
}

View File

@ -303,7 +303,7 @@ impl PreservedCatalog {
///
/// Note that wiping the catalog will NOT wipe any referenced parquet files.
pub async fn wipe(iox_object_store: &IoxObjectStore) -> Result<()> {
Ok(iox_object_store.wipe_catalog().await.context(WriteSnafu)?)
iox_object_store.wipe_catalog().await.context(WriteSnafu)
}
/// Create new catalog w/o any data.

View File

@ -76,7 +76,7 @@ pub async fn load_parquet_from_store_for_chunk(
store: Arc<IoxObjectStore>,
) -> Result<Vec<u8>> {
let path = chunk.path();
Ok(load_parquet_from_store_for_path(path, store).await?)
load_parquet_from_store_for_path(path, store).await
}
pub async fn load_parquet_from_store_for_path(
@ -97,7 +97,7 @@ pub async fn load_parquet_from_store_for_path(
fn create_column_tag(
name: &str,
data: Vec<Vec<Option<&str>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -140,7 +140,7 @@ fn create_column_tag(
fn create_columns_tag(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -184,7 +184,7 @@ fn create_columns_tag(
fn create_column_field_string(
name: &str,
data: Vec<Vec<Option<&str>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -215,7 +215,7 @@ fn create_column_field_string(
fn create_columns_field_string(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -258,7 +258,7 @@ fn create_columns_field_string(
fn create_column_field_i64(
name: &str,
data: Vec<Vec<Option<i64>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -275,7 +275,7 @@ fn create_column_field_i64(
fn create_columns_field_i64(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -319,7 +319,7 @@ fn create_columns_field_i64(
fn create_column_field_u64(
name: &str,
data: Vec<Vec<Option<u64>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -336,7 +336,7 @@ fn create_column_field_u64(
fn create_columns_field_u64(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -379,7 +379,7 @@ fn create_columns_field_u64(
fn create_column_field_f64(
name: &str,
data: Vec<Vec<Option<f64>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -425,7 +425,7 @@ fn create_column_field_f64(
fn create_columns_field_f64(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -509,7 +509,7 @@ fn create_columns_field_f64(
fn create_column_field_bool(
name: &str,
data: Vec<Vec<Option<bool>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -526,7 +526,7 @@ fn create_column_field_bool(
fn create_columns_field_bool(
column_prefix: &str,
test_size: TestSize,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {
@ -562,7 +562,7 @@ fn create_columns_field_bool(
fn create_column_field_generic<A, T, F>(
name: &str,
data: Vec<Vec<Option<T>>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
f: F,
@ -611,7 +611,7 @@ fn create_column_field_generic<A, T, F>(
fn create_column_timestamp(
data: Vec<Vec<i64>>,
arrow_cols: &mut Vec<Vec<(String, ArrayRef, bool)>>,
arrow_cols: &mut [Vec<(String, ArrayRef, bool)>],
summaries: &mut Vec<ColumnSummary>,
schema_builder: &mut SchemaBuilder,
) {

View File

@ -875,10 +875,10 @@ impl Server {
let database = self.database(&db_name)?;
// attempt to save provided rules in the current state
Ok(database
database
.update_provided_rules(rules)
.await
.context(CanNotUpdateRulesSnafu { db_name })?)
.context(CanNotUpdateRulesSnafu { db_name })
}
/// Closes a chunk and starts moving its data to the read buffer, as a

View File

@ -197,7 +197,7 @@ mod tests {
};
let dsn = std::env::var("DATABASE_URL").unwrap();
let captured_schema_name = schema_name.clone();
Ok(PgPoolOptions::new()
PgPoolOptions::new()
.min_connections(1)
.max_connections(5)
.connect_timeout(Duration::from_secs(2))
@ -222,7 +222,7 @@ mod tests {
})
})
.connect(&dsn)
.await?)
.await
}
// The goal of this test is to verify that the hotswap pool can indeed replace