Merge pull request #186 from influxdata/alamb/refactor-parquet-deps
refactor: clean up parquet library deps and remove use of InputReaderAdapter (related to parquet dependencies)pull/24376/head
commit
ab22384009
|
@ -687,7 +687,6 @@ dependencies = [
|
|||
"delorean_tsm",
|
||||
"env_logger",
|
||||
"log",
|
||||
"parquet",
|
||||
"snafu",
|
||||
]
|
||||
|
||||
|
|
|
@ -7,13 +7,6 @@ edition = "2018"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
# I get a build error when I use this one:
|
||||
# failed to run custom build command for `arrow-flight v0.17.0`
|
||||
#parquet = "0.17.0"
|
||||
# this, we are living on the edge and pull directly from the arrow repo.
|
||||
# https://github.com/apache/arrow/commit/04a1867eeb58f0c515e7ee5a6300a8f61045a6cd
|
||||
parquet = { git = "https://github.com/apache/arrow.git", rev="04a1867eeb58f0c515e7ee5a6300a8f61045a6cd", version = "1.0.0-SNAPSHOT" }
|
||||
|
||||
|
||||
snafu = "0.6.2"
|
||||
env_logger = "0.7.1"
|
||||
|
|
|
@ -8,12 +8,13 @@ use std::collections::{BTreeMap, BTreeSet};
|
|||
use std::io::{BufRead, Seek};
|
||||
|
||||
use log::debug;
|
||||
use parquet::data_type::ByteArray;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use delorean_line_parser::{FieldValue, ParsedLine};
|
||||
use delorean_table::packers::{Packer, Packers};
|
||||
use delorean_table::{DeloreanTableWriter, DeloreanTableWriterSource, Error as TableError};
|
||||
use delorean_table::{
|
||||
packers::{Packer, Packers},
|
||||
ByteArray, DeloreanTableWriter, DeloreanTableWriterSource, Error as TableError,
|
||||
};
|
||||
use delorean_table_schema::{DataType, Schema, SchemaBuilder};
|
||||
use delorean_tsm::mapper::{map_field_columns, ColumnData, TSMMeasurementMapper};
|
||||
use delorean_tsm::reader::{TSMBlockReader, TSMIndexReader};
|
||||
|
@ -703,7 +704,6 @@ mod delorean_ingest_tests {
|
|||
use delorean_table::{DeloreanTableWriter, DeloreanTableWriterSource, Error as TableError};
|
||||
use delorean_table_schema::ColumnDefinition;
|
||||
use delorean_test_helpers::approximately_equal;
|
||||
use parquet::data_type::ByteArray;
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
|
|
|
@ -2,55 +2,13 @@
|
|||
#![deny(rust_2018_idioms)]
|
||||
#![warn(missing_debug_implementations, clippy::explicit_iter_loop)]
|
||||
|
||||
// Export the parts of the parquet crate that are needed to interact with code in this crate
|
||||
pub use parquet::{
|
||||
errors::ParquetError,
|
||||
file::reader::{Length, TryClone},
|
||||
};
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
|
||||
pub mod error;
|
||||
pub mod metadata;
|
||||
pub mod stats;
|
||||
pub mod writer;
|
||||
|
||||
/// Adapts an object that implements Read+Seek to something that also
|
||||
/// implements the parquet TryClone interface, required by the parquet
|
||||
/// reader. This is provided so users of this crate do not have to
|
||||
/// implement a parquet specific trait
|
||||
struct InputReaderAdapter<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
real_reader: R,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> InputReaderAdapter<R> {
|
||||
fn new(real_reader: R, size: u64) -> InputReaderAdapter<R> {
|
||||
InputReaderAdapter { real_reader, size }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> Read for InputReaderAdapter<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
|
||||
self.real_reader.read(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> Seek for InputReaderAdapter<R> {
|
||||
fn seek(&mut self, pos: SeekFrom) -> Result<u64, std::io::Error> {
|
||||
self.real_reader.seek(pos)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> TryClone for InputReaderAdapter<R> {
|
||||
fn try_clone(&self) -> std::result::Result<Self, ParquetError> {
|
||||
Err(ParquetError::NYI(String::from("TryClone for input reader")))
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> Length for InputReaderAdapter<R> {
|
||||
fn len(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use delorean_table_schema::DataType;
|
|||
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
InputReaderAdapter,
|
||||
Length, TryClone,
|
||||
};
|
||||
|
||||
pub fn parquet_schema_as_string(parquet_schema: &schema::types::Type) -> String {
|
||||
|
@ -36,23 +36,22 @@ pub fn data_type_from_parquet_type(parquet_type: parquet::basic::Type) -> DataTy
|
|||
|
||||
/// Print parquet metadata that can be read from `input`, with a total
|
||||
/// size of `input_size` byes
|
||||
pub fn print_parquet_metadata<R: 'static>(input: R, input_size: u64) -> Result<()>
|
||||
pub fn print_parquet_metadata<R: 'static>(input: R) -> Result<()>
|
||||
where
|
||||
R: Read + Seek,
|
||||
R: Read + Seek + TryClone + Length,
|
||||
{
|
||||
let input_adapter = InputReaderAdapter::new(input, input_size);
|
||||
let input_len = input.len();
|
||||
|
||||
let reader =
|
||||
SerializedFileReader::new(input_adapter).map_err(|e| Error::ParquetLibraryError {
|
||||
message: String::from("Creating parquet reader"),
|
||||
source: e,
|
||||
})?;
|
||||
let reader = SerializedFileReader::new(input).map_err(|e| Error::ParquetLibraryError {
|
||||
message: String::from("Creating parquet reader"),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
let parquet_metadata = reader.metadata();
|
||||
let file_metadata = parquet_metadata.file_metadata();
|
||||
let num_columns = file_metadata.schema_descr().num_columns();
|
||||
|
||||
println!("Parquet file size: {} bytes", input_size);
|
||||
println!("Parquet file size: {} bytes", input_len);
|
||||
println!(
|
||||
"Parquet file Schema: {}",
|
||||
parquet_schema_as_string(file_metadata.schema()).trim_end()
|
||||
|
|
|
@ -12,24 +12,21 @@ use delorean_table::stats::{ColumnStats, ColumnStatsBuilder};
|
|||
use crate::{
|
||||
error::{Error, Result},
|
||||
metadata::data_type_from_parquet_type,
|
||||
InputReaderAdapter,
|
||||
Length, TryClone,
|
||||
};
|
||||
|
||||
/// Calculate storage statistics for a particular parquet file that can
|
||||
/// be read from `input`, with a total size of `input_size` byes
|
||||
///
|
||||
/// Returns a Vec of ColumnStats, one for each column in the input
|
||||
pub fn col_stats<R: 'static>(input: R, input_size: u64) -> Result<Vec<ColumnStats>>
|
||||
pub fn col_stats<R: 'static>(input: R) -> Result<Vec<ColumnStats>>
|
||||
where
|
||||
R: Read + Seek,
|
||||
R: Read + Seek + TryClone + Length,
|
||||
{
|
||||
let input_adapter = InputReaderAdapter::new(input, input_size);
|
||||
|
||||
let reader =
|
||||
SerializedFileReader::new(input_adapter).map_err(|e| Error::ParquetLibraryError {
|
||||
message: String::from("Creating parquet reader"),
|
||||
source: e,
|
||||
})?;
|
||||
let reader = SerializedFileReader::new(input).map_err(|e| Error::ParquetLibraryError {
|
||||
message: String::from("Creating parquet reader"),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
let mut stats_builders = BTreeMap::new();
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ use snafu::Snafu;
|
|||
|
||||
use delorean_table_schema::Schema;
|
||||
pub use packers::{Packer, Packers};
|
||||
pub use parquet::data_type::ByteArray;
|
||||
|
||||
#[derive(Snafu, Debug)]
|
||||
pub enum Error {
|
||||
|
|
|
@ -40,8 +40,7 @@ pub fn dump_meta(input_filename: &str) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
FileType::Parquet => {
|
||||
let input_len = input_reader.len();
|
||||
print_parquet_metadata(input_reader, input_len)
|
||||
print_parquet_metadata(input_reader)
|
||||
.map_err(|e| Error::UnableDumpToParquetMetadata { source: e })?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::io::{BufRead, BufReader, Cursor, Read, Seek, SeekFrom};
|
|||
use std::path::Path;
|
||||
|
||||
use crate::commands::error::{Error, Result};
|
||||
use delorean_parquet::ParquetError;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FileType {
|
||||
|
@ -93,6 +94,21 @@ impl Read for InputReader {
|
|||
}
|
||||
}
|
||||
|
||||
impl delorean_parquet::Length for InputReader {
|
||||
fn len(&self) -> u64 {
|
||||
match self {
|
||||
InputReader::FileInputType(file_input_reader) => file_input_reader.file_size,
|
||||
InputReader::MemoryInputType(memory_input_reader) => memory_input_reader.file_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl delorean_parquet::TryClone for InputReader {
|
||||
fn try_clone(&self) -> std::result::Result<Self, ParquetError> {
|
||||
Err(ParquetError::NYI(String::from("TryClone for input reader")))
|
||||
}
|
||||
}
|
||||
|
||||
impl BufRead for InputReader {
|
||||
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
||||
match self {
|
||||
|
|
|
@ -30,7 +30,7 @@ pub fn stats(input_filename: &str) -> Result<()> {
|
|||
let input_len = input_reader.len();
|
||||
(
|
||||
input_len,
|
||||
col_stats(input_reader, input_len)
|
||||
col_stats(input_reader)
|
||||
.map_err(|e| Error::UnableDumpToParquetMetadata { source: e })?,
|
||||
)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue