chore: Upgrade to Rust 1.64 (#5727)

* chore: Upgrade to Rust 1.64

* fix: Use iter find instead of a for loop, thanks clippy

* fix: Remove some needless borrows, thanks clippy

* fix: Use then_some rather than then with a closure, thanks clippy

* fix: Use iter retain rather than filter collect, thanks clippy

Co-authored-by: kodiakhq[bot] <49736102+kodiakhq[bot]@users.noreply.github.com>
pull/24376/head
Carol (Nichols || Goulding) 2022-09-22 14:04:00 -04:00 committed by GitHub
parent 61075d57e2
commit c8108f01e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 15 additions and 23 deletions

View File

@ -89,7 +89,7 @@ impl<K: AsPrimitive<usize> + FromPrimitive + Zero> PackedStringArray<K> {
.push_str(&other.storage[first_offset..end_offset]);
self.offsets.extend(
(&other.offsets[(range.start + 1)..(range.end + 1)])
other.offsets[(range.start + 1)..(range.end + 1)]
.iter()
.map(|x| {
K::from_usize(x.as_() - first_offset + insert_offset)

View File

@ -255,7 +255,7 @@ pub async fn command(connection: Connection, config: Config) -> Result<()> {
let mut client = influxdb_storage_client::Client::new(connection);
// convert predicate with no root node into None.
let predicate = config.predicate.root.is_some().then(|| config.predicate);
let predicate = config.predicate.root.is_some().then_some(config.predicate);
let source = Client::read_source(&config.db_name, 0);
let now = std::time::Instant::now();

View File

@ -239,7 +239,7 @@ where
T: BlockDecoder,
{
fn decode(&mut self, block: &Block) -> Result<BlockData, TsmError> {
(&mut **self).decode(block)
(**self).decode(block)
}
}

View File

@ -277,7 +277,7 @@ mod tests {
// Merge schema of the batches
// The fields in the schema are sorted by column name
let batches = create_batches();
let merged_schema = (&*merge_record_batch_schemas(&batches)).clone();
let merged_schema = (*merge_record_batch_schemas(&batches)).clone();
// Expected Arrow schema
let arrow_schema = Arc::new(arrow::datatypes::Schema::new(vec![

View File

@ -773,7 +773,7 @@ impl PartitionRepo for MemTxn {
let table_ids: HashSet<_> = stage
.tables
.iter()
.filter_map(|table| (table.namespace_id == namespace_id).then(|| table.id))
.filter_map(|table| (table.namespace_id == namespace_id).then_some(table.id))
.collect();
let partitions: Vec<_> = stage
.partitions
@ -937,7 +937,7 @@ impl TombstoneRepo for MemTxn {
let table_ids: HashSet<_> = stage
.tables
.iter()
.filter_map(|table| (table.namespace_id == namespace_id).then(|| table.id))
.filter_map(|table| (table.namespace_id == namespace_id).then_some(table.id))
.collect();
let tombstones: Vec<_> = stage
.tombstones
@ -1112,7 +1112,7 @@ impl ParquetFileRepo for MemTxn {
let table_ids: HashSet<_> = stage
.tables
.iter()
.filter_map(|table| (table.namespace_id == namespace_id).then(|| table.id))
.filter_map(|table| (table.namespace_id == namespace_id).then_some(table.id))
.collect();
let parquet_files: Vec<_> = stage
.parquet_files

View File

@ -160,13 +160,7 @@ impl DataSpec {
/// Get the agent spec by its name
pub fn agent_by_name(&self, name: &str) -> Option<&AgentSpec> {
for a in &self.agents {
if a.name == name {
return Some(a);
}
}
None
self.agents.iter().find(|&a| a.name == name)
}
}

View File

@ -265,7 +265,8 @@ impl InfluxRpcPlanner {
}
}
Ok((!chunks_full.is_empty()).then(|| (table_name, Some((predicate, chunks_full)))))
Ok((!chunks_full.is_empty())
.then_some((table_name, Some((predicate, chunks_full)))))
})
.try_collect()
.await?;
@ -1371,7 +1372,7 @@ where
// rustc seems to heavily confused about the filter step here, esp. it dislikes `.try_filter` and even
// `.try_filter_map` requires some additional type annotations
.try_filter_map(|(table_name, predicate, chunks)| async move {
Ok((!chunks.is_empty()).then(move || (table_name, predicate, chunks)))
Ok((!chunks.is_empty()).then_some((table_name, predicate, chunks)))
as Result<Option<(&str, &Predicate, Vec<_>)>>
})
.and_then(|(table_name, predicate, chunks)| {

View File

@ -43,10 +43,7 @@ impl IoxSystemTable for QueriesTable {
let mut entries = self.query_log.entries();
if let Some(namespace_id) = self.namespace_id_filter {
entries = entries
.into_iter()
.filter(|entry| entry.namespace_id == namespace_id)
.collect();
entries.retain(|entry| entry.namespace_id == namespace_id);
}
let mut offset = 0;

View File

@ -381,7 +381,7 @@ where
+ self
.run_lengths
.iter()
.filter_map(|(rl, v)| v.is_some().then(|| *rl as usize * size_of::<Option<L>>()))
.filter_map(|(rl, v)| v.is_some().then_some(*rl as usize * size_of::<Option<L>>()))
.sum::<usize>()
}

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "1.63"
channel = "1.64"
components = [ "rustfmt", "clippy" ]

View File

@ -99,7 +99,7 @@ impl IoxHeaders {
span_context = match parser.parse(trace_collector, &headers) {
Ok(None) => None,
Ok(Some(ctx)) => ctx.sampled.then(|| ctx),
Ok(Some(ctx)) => ctx.sampled.then_some(ctx),
Err(e) => {
return Err(WriteBufferError::invalid_data(format!(
"Error decoding trace context: {}",