fix: Remove test uses of parquet chunks and document as unused

The querier is now using read buffer chunks only, but we're leaving the
parquet chunk code around for the moment.
pull/24376/head
Carol (Nichols || Goulding) 2022-06-02 15:46:18 -04:00
parent d3df9db1a6
commit aa510ae4e6
No known key found for this signature in database
GPG Key ID: E907EE5A736F87D4
2 changed files with 26 additions and 7 deletions

View File

@ -1114,6 +1114,7 @@ mod tests {
use iox_catalog::interface::INITIAL_COMPACTION_LEVEL;
use iox_tests::util::TestCatalog;
use iox_time::SystemProvider;
use parquet_file::chunk::DecodedParquetFile;
use querier::{
cache::CatalogCache,
chunk::{collect_read_filter, ChunkAdapter},
@ -1236,13 +1237,17 @@ mod tests {
);
// create chunks for 2 files
let files1 = files.pop().unwrap();
let decoded_parquet_files1 = DecodedParquetFile::new(files1);
let files0 = files.pop().unwrap();
let decoded_parquet_files0 = DecodedParquetFile::new(files0);
let chunk_0 = adapter
.new_querier_parquet_chunk_from_file_with_metadata(files0)
.new_rb_chunk(Arc::new(decoded_parquet_files0))
.await
.unwrap();
let chunk_1 = adapter
.new_querier_parquet_chunk_from_file_with_metadata(files1)
.new_rb_chunk(Arc::new(decoded_parquet_files1))
.await
.unwrap();
// query the chunks
@ -1457,13 +1462,17 @@ mod tests {
);
// create chunks for 2 files
let files2 = files.pop().unwrap();
let decoded_parquet_files2 = DecodedParquetFile::new(files2);
let files1 = files.pop().unwrap();
let decoded_parquet_files1 = DecodedParquetFile::new(files1);
let chunk_0 = adapter
.new_querier_parquet_chunk_from_file_with_metadata(files1)
.new_rb_chunk(Arc::new(decoded_parquet_files1))
.await
.unwrap();
let chunk_1 = adapter
.new_querier_parquet_chunk_from_file_with_metadata(files2)
.new_rb_chunk(Arc::new(decoded_parquet_files2))
.await
.unwrap();
// query the chunks

View File

@ -300,6 +300,9 @@ impl ChunkAdapter {
/// Create parquet chunk.
///
/// Returns `None` if some data required to create this chunk is already gone from the catalog.
///
/// CURRENTLY UNUSED: The querier is creating and caching read buffer chunks instead, using
/// the `new_rb_chunk` method.
async fn new_parquet_chunk(
&self,
decoded_parquet_file: &DecodedParquetFile,
@ -316,7 +319,11 @@ impl ChunkAdapter {
/// Create new querier Parquet chunk from a catalog record
///
/// Returns `None` if some data required to create this chunk is already gone from the catalog.
pub async fn new_querier_parquet_chunk_from_file_with_metadata(
///
/// CURRENTLY UNUSED: The querier is creating and caching read buffer chunks instead, using
/// the `new_rb_chunk` method.
#[allow(dead_code)]
async fn new_querier_parquet_chunk_from_file_with_metadata(
&self,
parquet_file_with_metadata: ParquetFileWithMetadata,
) -> Option<QuerierParquetChunk> {
@ -327,6 +334,9 @@ impl ChunkAdapter {
/// Create new querier Parquet chunk.
///
/// Returns `None` if some data required to create this chunk is already gone from the catalog.
///
/// CURRENTLY UNUSED: The querier is creating and caching read buffer chunks instead, using
/// the `new_rb_chunk` method.
pub async fn new_querier_parquet_chunk(
&self,
decoded_parquet_file: &DecodedParquetFile,
@ -427,7 +437,7 @@ impl ChunkAdapter {
}
/// collect data for the given chunk
pub async fn collect_read_filter(chunk: &QuerierParquetChunk) -> Vec<RecordBatch> {
pub async fn collect_read_filter(chunk: &dyn QueryChunk) -> Vec<RecordBatch> {
chunk
.read_filter(
IOxSessionContext::default(),
@ -487,7 +497,7 @@ pub mod tests {
// create chunk
let chunk = adapter
.new_querier_parquet_chunk(&DecodedParquetFile::new(parquet_file))
.new_rb_chunk(Arc::new(DecodedParquetFile::new(parquet_file)))
.await
.unwrap();