refactor: expose query_tests to benches crate

pull/24376/head
Edd Robinson 2021-04-13 11:18:36 +01:00 committed by kodiakhq[bot]
parent 8f2cf61e0e
commit 3cfbfe8ebf
6 changed files with 24 additions and 2 deletions

View File

@ -105,10 +105,14 @@ use std::num::NonZeroU32;
pub mod buffer;
mod config;
pub mod db;
mod query_tests;
pub mod snapshot;
#[cfg(test)]
mod query_tests;
// This module exposes `query_tests` outside of the crate so that it may be used
// in benchmarks. Do not import this module for non-benchmark purposes!
pub mod benchmarks {
pub use crate::query_tests::*;
}
type DatabaseError = Box<dyn std::error::Error + Send + Sync + 'static>;

View File

@ -1,3 +1,4 @@
#![allow(unused_imports, dead_code, unused_macros)]
pub mod field_columns;
pub mod read_filter;
pub mod read_group;

View File

@ -9,6 +9,7 @@ use query::{
predicate::{Predicate, PredicateBuilder, EMPTY_PREDICATE},
};
#[derive(Debug)]
pub struct TwoMeasurementsMultiSeries {}
#[async_trait]
impl DBSetup for TwoMeasurementsMultiSeries {
@ -309,6 +310,7 @@ async fn test_read_filter_data_pred_unsupported_in_scan() {
run_read_filter_test_case!(TwoMeasurementsMultiSeries {}, predicate, expected_results);
}
#[derive(Debug)]
pub struct MeasurementsSortableTags {}
#[async_trait]
impl DBSetup for MeasurementsSortableTags {

View File

@ -1,5 +1,6 @@
//! This module contains testing scenarios for Db
#[allow(unused_imports, dead_code, unused_macros)]
use query::PartitionChunk;
use async_trait::async_trait;
@ -9,6 +10,7 @@ use crate::db::{test_helpers::write_lp, Db};
use super::utils::{count_mutable_buffer_chunks, count_read_buffer_chunks, make_db};
/// Holds a database and a description of how its data was configured
#[derive(Debug)]
pub struct DBScenario {
pub scenario_name: String,
pub db: Db,
@ -22,6 +24,7 @@ pub trait DBSetup {
}
/// No data
#[derive(Debug)]
pub struct NoData {}
#[async_trait]
impl DBSetup for NoData {
@ -76,6 +79,7 @@ impl DBSetup for NoData {
}
/// Two measurements data in a single mutable buffer chunk
#[derive(Debug)]
pub struct TwoMeasurements {}
#[async_trait]
impl DBSetup for TwoMeasurements {
@ -91,6 +95,7 @@ impl DBSetup for TwoMeasurements {
}
}
#[derive(Debug)]
pub struct TwoMeasurementsUnsignedType {}
#[async_trait]
impl DBSetup for TwoMeasurementsUnsignedType {
@ -109,6 +114,7 @@ impl DBSetup for TwoMeasurementsUnsignedType {
/// Single measurement that has several different chunks with
/// different (but compatible) schema
#[derive(Debug)]
pub struct MultiChunkSchemaMerge {}
#[async_trait]
impl DBSetup for MultiChunkSchemaMerge {
@ -128,6 +134,7 @@ impl DBSetup for MultiChunkSchemaMerge {
}
/// Two measurements data with many null values
#[derive(Debug)]
pub struct TwoMeasurementsManyNulls {}
#[async_trait]
impl DBSetup for TwoMeasurementsManyNulls {
@ -149,6 +156,7 @@ impl DBSetup for TwoMeasurementsManyNulls {
}
}
#[derive(Debug)]
pub struct TwoMeasurementsManyFields {}
#[async_trait]
impl DBSetup for TwoMeasurementsManyFields {
@ -168,6 +176,7 @@ impl DBSetup for TwoMeasurementsManyFields {
}
}
#[derive(Debug)]
pub struct TwoMeasurementsManyFieldsOneChunk {}
#[async_trait]
impl DBSetup for TwoMeasurementsManyFieldsOneChunk {
@ -190,6 +199,7 @@ impl DBSetup for TwoMeasurementsManyFieldsOneChunk {
}
}
#[derive(Debug)]
pub struct OneMeasurementManyFields {}
#[async_trait]
impl DBSetup for OneMeasurementManyFields {
@ -210,6 +220,7 @@ impl DBSetup for OneMeasurementManyFields {
}
/// This data (from end to end test)
#[derive(Debug)]
pub struct EndToEndTest {}
#[async_trait]
impl DBSetup for EndToEndTest {

View File

@ -3,6 +3,8 @@
//! wired all the pieces together (as well as ensure any particularly
//! important SQL does not regress)
#![allow(unused_imports, dead_code, unused_macros)]
use super::scenarios::*;
use arrow_deps::{
arrow::record_batch::RecordBatch, assert_table_eq, datafusion::physical_plan::collect,

View File

@ -1,5 +1,7 @@
//! Tests for the table_names implementation
#![allow(unused_imports, dead_code, unused_macros)]
use arrow_deps::arrow::datatypes::DataType;
use internal_types::{schema::builder::SchemaBuilder, selection::Selection};
use query::{Database, PartitionChunk};