feat: Use the read buffer chunk cache in the querier

pull/24376/head
Carol (Nichols || Goulding) 2022-06-01 17:02:02 -04:00
parent 40d3a09296
commit a4f51d99f6
No known key found for this signature in database
GPG Key ID: E907EE5A736F87D4
2 changed files with 12 additions and 2 deletions

View File

@ -47,6 +47,9 @@ pub struct CatalogCache {
/// tombstone cache
tombstone_cache: TombstoneCache,
/// Read buffer chunk cache
read_buffer_cache: ReadBufferCache,
/// Time provider.
time_provider: Arc<dyn TimeProvider>,
}
@ -104,6 +107,12 @@ impl CatalogCache {
);
let tombstone_cache = TombstoneCache::new(
Arc::clone(&catalog),
backoff_config.clone(),
Arc::clone(&time_provider),
&metric_registry,
Arc::clone(&ram_pool),
);
let read_buffer_cache = ReadBufferCache::new(
backoff_config,
Arc::clone(&time_provider),
&metric_registry,
@ -118,6 +127,7 @@ impl CatalogCache {
processed_tombstones_cache,
parquet_file_cache,
tombstone_cache,
read_buffer_cache,
time_provider,
}
}
@ -164,6 +174,6 @@ impl CatalogCache {
/// Read buffer chunk cache.
pub(crate) fn read_buffer(&self) -> &ReadBufferCache {
unimplemented!("Deliberately not hooking up this cache yet");
&self.read_buffer_cache
}
}

View File

@ -116,7 +116,7 @@ impl Reconciler {
for cached_parquet_file in parquet_files {
if let Some(chunk) = self
.chunk_adapter
.new_querier_parquet_chunk(&cached_parquet_file)
.new_rb_chunk(Arc::clone(&cached_parquet_file))
.await
{
chunks_from_parquet.push(chunk);