refactor: Move ts_to_timestamp fn into the only file it's used in

pull/24376/head
Carol (Nichols || Goulding) 2021-07-26 14:16:45 -04:00
parent 7c9a21632b
commit 09e48018a0
2 changed files with 7 additions and 10 deletions

View File

@ -14,7 +14,6 @@ use arrow::{
error::Result,
record_batch::RecordBatch,
};
use chrono::{DateTime, Utc};
use datafusion::{
catalog::schema::SchemaProvider,
datasource::{datasource::Statistics, TableProvider},
@ -153,11 +152,6 @@ where
}
}
// TODO: Use a custom proc macro or serde to reduce the boilerplate
fn time_to_ts(time: Option<DateTime<Utc>>) -> Option<i64> {
time.map(|ts| ts.timestamp_nanos())
}
/// Creates a DataFusion ExecutionPlan node that scans a single batch
/// of records.
fn scan_batch(

View File

@ -1,13 +1,11 @@
use crate::db::{
catalog::Catalog,
system_tables::{time_to_ts, IoxSystemTable},
};
use crate::db::{catalog::Catalog, system_tables::IoxSystemTable};
use arrow::{
array::{StringArray, TimestampNanosecondArray, UInt32Array, UInt64Array},
datatypes::{DataType, Field, Schema, SchemaRef, TimeUnit},
error::Result,
record_batch::RecordBatch,
};
use chrono::{DateTime, Utc};
use data_types::{chunk_metadata::ChunkSummary, error::ErrorLogger};
use std::sync::Arc;
@ -56,6 +54,11 @@ fn chunk_summaries_schema() -> SchemaRef {
]))
}
// TODO: Use a custom proc macro or serde to reduce the boilerplate
fn time_to_ts(time: Option<DateTime<Utc>>) -> Option<i64> {
time.map(|ts| ts.timestamp_nanos())
}
fn from_chunk_summaries(schema: SchemaRef, chunks: Vec<ChunkSummary>) -> Result<RecordBatch> {
let id = chunks.iter().map(|c| Some(c.id)).collect::<UInt32Array>();
let partition_key = chunks