diff --git a/server/src/db.rs b/server/src/db.rs index e942678bfa..8aeec09196 100644 --- a/server/src/db.rs +++ b/server/src/db.rs @@ -380,6 +380,28 @@ impl Db { Ok(DBChunk::snapshot(&chunk)) } + pub fn load_chunk_to_parquet(&self, + partition_key: &str, + chunk_id: u32, + ) -> Result> { + let chunk = { + let partition = self + .catalog + .valid_partition(partition_key) + .context(LoadingChunk { + partition_key, + chunk_id, + })?; + let partition = partition.read(); + + partition.chunk(chunk_id).context(LoadingChunk { + partition_key, + chunk_id, + })? + }; + Ok(&chunk) + } + /// Returns the next write sequence number pub fn next_sequence(&self) -> u64 { self.sequence.fetch_add(1, Ordering::SeqCst) diff --git a/server/src/db/chunk.rs b/server/src/db/chunk.rs index f601a0e27d..88d2a0e97b 100644 --- a/server/src/db/chunk.rs +++ b/server/src/db/chunk.rs @@ -7,7 +7,7 @@ use read_buffer::Database as ReadBufferDb; use snafu::{ResultExt, Snafu}; use tracing::debug; -use std::sync::Arc; +use std::{path::Path, sync::Arc}; use super::{ pred::{to_mutable_buffer_predicate, to_read_buffer_predicate}, @@ -68,7 +68,11 @@ pub enum DBChunk { partition_key: Arc, chunk_id: u32, }, - ParquetFile, // TODO add appropriate type here + ParquetFile { + path: String, // name of the parquet file + partition_key: Arc, + chunk_id: u32, // ID of the chunk stored in this parquet file + } } impl DBChunk {