refactor: tighten up Read Buffer API
parent
dae9f12593
commit
d429cf9aeb
|
@ -1,15 +1,15 @@
|
|||
#![deny(rust_2018_idioms)]
|
||||
#![warn(clippy::clone_on_ref_ptr, clippy::use_self)]
|
||||
#![allow(dead_code, clippy::too_many_arguments)]
|
||||
pub mod chunk;
|
||||
pub(crate) mod column;
|
||||
pub(crate) mod row_group;
|
||||
mod chunk;
|
||||
mod column;
|
||||
mod row_group;
|
||||
mod schema;
|
||||
pub(crate) mod table;
|
||||
pub(crate) mod value;
|
||||
mod table;
|
||||
mod value;
|
||||
|
||||
// Identifiers that are exported as part of the public API.
|
||||
pub use chunk::Chunk;
|
||||
pub use chunk::{Chunk, Error};
|
||||
pub use row_group::{BinaryExpr, Predicate};
|
||||
pub use schema::*;
|
||||
pub use table::ReadFilterResults;
|
||||
|
|
|
@ -26,7 +26,7 @@ use internal_types::{data::ReplicatedWrite, selection::Selection};
|
|||
use object_store::{memory::InMemory, ObjectStore};
|
||||
use parquet_file::{chunk::Chunk, storage::Storage};
|
||||
use query::{Database, DEFAULT_SCHEMA};
|
||||
use read_buffer::chunk::Chunk as ReadBufferChunk;
|
||||
use read_buffer::Chunk as ReadBufferChunk;
|
||||
use tracker::{MemRegistry, TaskTracker, TrackedFutureExt};
|
||||
|
||||
use super::{buffer::Buffer, JobRegistry};
|
||||
|
@ -106,7 +106,7 @@ pub enum Error {
|
|||
|
||||
#[snafu(display("Read Buffer Error in chunk {}: {}", chunk_id, source))]
|
||||
ReadBufferChunkError {
|
||||
source: read_buffer::chunk::Error,
|
||||
source: read_buffer::Error,
|
||||
chunk_id: u32,
|
||||
},
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use data_types::{chunk::ChunkSummary, partition_metadata::TableSummary};
|
|||
use mutable_buffer::chunk::Chunk as MBChunk;
|
||||
use parquet_file::chunk::Chunk as ParquetChunk;
|
||||
use query::PartitionChunk;
|
||||
use read_buffer::chunk::Chunk as ReadBufferChunk;
|
||||
use read_buffer::Chunk as ReadBufferChunk;
|
||||
|
||||
use crate::db::DBChunk;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ use mutable_buffer::chunk::Chunk as MBChunk;
|
|||
use observability_deps::tracing::debug;
|
||||
use parquet_file::chunk::Chunk as ParquetChunk;
|
||||
use query::{exec::stringset::StringSet, predicate::Predicate, PartitionChunk};
|
||||
use read_buffer::chunk::Chunk as ReadBufferChunk;
|
||||
use read_buffer::Chunk as ReadBufferChunk;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use std::{collections::BTreeSet, sync::Arc};
|
||||
|
@ -24,7 +24,7 @@ pub enum Error {
|
|||
|
||||
#[snafu(display("Read Buffer Error in chunk {}: {}", chunk_id, source))]
|
||||
ReadBufferChunkError {
|
||||
source: read_buffer::chunk::Error,
|
||||
source: read_buffer::Error,
|
||||
chunk_id: u32,
|
||||
},
|
||||
|
||||
|
|
|
@ -473,7 +473,7 @@ mod tests {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
let rb = Arc::new(read_buffer::chunk::Chunk::new(22));
|
||||
let rb = Arc::new(read_buffer::Chunk::new(22));
|
||||
|
||||
let chunks = vec![new_chunk(0, Some(0), Some(0))];
|
||||
|
||||
|
|
Loading…
Reference in New Issue