fix: Print all timestamps using RFC3339 format (#2098)

* fix: Use IOx pretty printer rather than arrow pretty printer

* chore: update tests in the query crate

* chore: update influxdb_iox tests

* chore: Update end to end tests

* chore: update query_tests

* chore: update mutable_buffer tests

* refactor: update parquet_file tests

* refactor: update db tests

* chore: update kafka integration test output

* fix: merge conflict
pull/24376/head
Andrew Lamb 2021-07-22 15:04:52 -04:00 committed by GitHub
parent a27d8fd859
commit 01c79f1a1a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 928 additions and 931 deletions

1
Cargo.lock generated
View File

@ -1693,6 +1693,7 @@ version = "0.1.0"
dependencies = [
"arrow",
"arrow-flight",
"arrow_util",
"futures-util",
"generated_types",
"http",

View File

@ -97,6 +97,7 @@ fn optimize_dict_col(
#[cfg(test)]
mod tests {
use super::*;
use crate as arrow_util;
use crate::assert_batches_eq;
use arrow::array::{ArrayDataBuilder, DictionaryArray, Float64Array, Int32Array, StringArray};
use arrow::compute::concat;

View File

@ -18,7 +18,7 @@ macro_rules! assert_batches_eq {
let expected_lines: Vec<String> =
$EXPECTED_LINES.into_iter().map(|s| s.to_string()).collect();
let formatted = arrow::util::pretty::pretty_format_batches($CHUNKS).unwrap();
let formatted = arrow_util::display::pretty_format_batches($CHUNKS).unwrap();
let actual_lines = formatted.trim().split('\n').collect::<Vec<_>>();
@ -51,7 +51,7 @@ macro_rules! assert_batches_sorted_eq {
expected_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()
}
let formatted = arrow::util::pretty::pretty_format_batches($CHUNKS).unwrap();
let formatted = arrow_util::display::pretty_format_batches($CHUNKS).unwrap();
// fix for windows: \r\n -->
let mut actual_lines: Vec<&str> = formatted.trim().lines().collect();

View File

@ -10,11 +10,12 @@ format = ["arrow"]
[dependencies]
# Workspace dependencies, in alphabetical order
arrow = { version = "5.0", optional = true }
arrow-flight = { version = "5.0", optional = true}
arrow_util = { path = "../arrow_util" }
generated_types = { path = "../generated_types" }
# Crates.io dependencies, in alphabetical order
arrow = { version = "5.0", optional = true }
arrow-flight = { version = "5.0", optional = true}
futures-util = { version = "0.3.1", optional = true }
http = "0.2.3"
hyper = "0.14"

View File

@ -126,7 +126,7 @@ impl QueryOutputFormat {
}
fn batches_to_pretty(batches: &[RecordBatch]) -> Result<String> {
arrow::util::pretty::pretty_format_batches(batches).map_err(Error::PrettyArrow)
arrow_util::display::pretty_format_batches(batches).map_err(Error::PrettyArrow)
}
fn batches_to_csv(batches: &[RecordBatch]) -> Result<String> {

View File

@ -436,17 +436,16 @@ mod tests {
let lp = vec!["cpu,host=a val=23 1", "cpu,host=b val=2 1"].join("\n");
let chunk = write_lp_to_new_chunk(&lp).unwrap();
assert_batches_eq!(
vec![
"+------+-------------------------------+-----+",
"| host | time | val |",
"+------+-------------------------------+-----+",
"| a | 1970-01-01 00:00:00.000000001 | 23 |",
"| b | 1970-01-01 00:00:00.000000001 | 2 |",
"+------+-------------------------------+-----+",
],
&chunk_to_batches(&chunk)
);
let expected = vec![
"+------+--------------------------------+-----+",
"| host | time | val |",
"+------+--------------------------------+-----+",
"| a | 1970-01-01T00:00:00.000000001Z | 23 |",
"| b | 1970-01-01T00:00:00.000000001Z | 2 |",
"+------+--------------------------------+-----+",
];
assert_batches_eq!(expected, &chunk_to_batches(&chunk));
}
#[test]
@ -478,19 +477,18 @@ mod tests {
let lp = vec!["cpu,host=a val=14 2"].join("\n");
write_lp_to_chunk(&lp, &mut chunk).unwrap();
assert_batches_eq!(
vec![
"+------+-------------------------------+-----+",
"| host | time | val |",
"+------+-------------------------------+-----+",
"| a | 1970-01-01 00:00:00.000000001 | 23 |",
"| b | 1970-01-01 00:00:00.000000001 | 2 |",
"| c | 1970-01-01 00:00:00.000000001 | 11 |",
"| a | 1970-01-01 00:00:00.000000002 | 14 |",
"+------+-------------------------------+-----+",
],
&chunk_to_batches(&chunk)
);
let expected = vec![
"+------+--------------------------------+-----+",
"| host | time | val |",
"+------+--------------------------------+-----+",
"| a | 1970-01-01T00:00:00.000000001Z | 23 |",
"| b | 1970-01-01T00:00:00.000000001Z | 2 |",
"| c | 1970-01-01T00:00:00.000000001Z | 11 |",
"| a | 1970-01-01T00:00:00.000000002Z | 14 |",
"+------+--------------------------------+-----+",
];
assert_batches_eq!(expected, &chunk_to_batches(&chunk));
}
#[test]

View File

@ -86,14 +86,14 @@ mod tests {
// Now verify return results. This assert_batches_eq still works correctly without the metadata
// We might modify it to make it include checking metadata or add a new comparison checking macro that prints out the metadata too
let expected = vec![
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+----------------------------+",
"| foo_tag_normal | foo_tag_empty | foo_tag_null_some | foo_tag_null_all | foo_field_string_normal | foo_field_string_empty | foo_field_string_null_some | foo_field_string_null_all | foo_field_i64_normal | foo_field_i64_range | foo_field_i64_null_some | foo_field_i64_null_all | foo_field_u64_normal | foo_field_u64_range | foo_field_u64_null_some | foo_field_u64_null_all | foo_field_f64_normal | foo_field_f64_inf | foo_field_f64_zero | foo_field_f64_nan_some | foo_field_f64_nan_all | foo_field_f64_null_some | foo_field_f64_null_all | foo_field_bool_normal | foo_field_bool_null_some | foo_field_bool_null_all | time |",
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+----------------------------+",
"| foo | | | | foo | | | | -1 | -9223372036854775808 | | | 1 | 0 | | | 10.1 | 0 | 0 | NaN | NaN | | | true | | | 1970-01-01 00:00:00.000001 |",
"| bar | | bar | | bar | | bar | | 2 | 9223372036854775807 | 2 | | 2 | 18446744073709551615 | 2 | | 20.1 | inf | -0 | 2 | NaN | 20.1 | | false | false | | 1970-01-01 00:00:00.000002 |",
"| baz | | baz | | baz | | baz | | 3 | -9223372036854775808 | 3 | | 3 | 0 | 3 | | 30.1 | -inf | 0 | 1 | NaN | 30.1 | | true | true | | 1970-01-01 00:00:00.000003 |",
"| foo | | | | foo | | | | 4 | 9223372036854775807 | | | 4 | 18446744073709551615 | | | 40.1 | 1 | -0 | NaN | NaN | | | false | | | 1970-01-01 00:00:00.000004 |",
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+----------------------------+",
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+-----------------------------+",
"| foo_tag_normal | foo_tag_empty | foo_tag_null_some | foo_tag_null_all | foo_field_string_normal | foo_field_string_empty | foo_field_string_null_some | foo_field_string_null_all | foo_field_i64_normal | foo_field_i64_range | foo_field_i64_null_some | foo_field_i64_null_all | foo_field_u64_normal | foo_field_u64_range | foo_field_u64_null_some | foo_field_u64_null_all | foo_field_f64_normal | foo_field_f64_inf | foo_field_f64_zero | foo_field_f64_nan_some | foo_field_f64_nan_all | foo_field_f64_null_some | foo_field_f64_null_all | foo_field_bool_normal | foo_field_bool_null_some | foo_field_bool_null_all | time |",
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+-----------------------------+",
"| foo | | | | foo | | | | -1 | -9223372036854775808 | | | 1 | 0 | | | 10.1 | 0 | 0 | NaN | NaN | | | true | | | 1970-01-01T00:00:00.000001Z |",
"| bar | | bar | | bar | | bar | | 2 | 9223372036854775807 | 2 | | 2 | 18446744073709551615 | 2 | | 20.1 | inf | -0 | 2 | NaN | 20.1 | | false | false | | 1970-01-01T00:00:00.000002Z |",
"| baz | | baz | | baz | | baz | | 3 | -9223372036854775808 | 3 | | 3 | 0 | 3 | | 30.1 | -inf | 0 | 1 | NaN | 30.1 | | true | true | | 1970-01-01T00:00:00.000003Z |",
"| foo | | | | foo | | | | 4 | 9223372036854775807 | | | 4 | 18446744073709551615 | | | 40.1 | 1 | -0 | NaN | NaN | | | false | | | 1970-01-01T00:00:00.000004Z |",
"+----------------+---------------+-------------------+------------------+-------------------------+------------------------+----------------------------+---------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+----------------------+-------------------------+------------------------+----------------------+-------------------+--------------------+------------------------+-----------------------+-------------------------+------------------------+-----------------------+--------------------------+-------------------------+-----------------------------+",
];
assert_eq!(num_rows, actual_num_rows);
assert_batches_eq!(expected.clone(), &record_batches);

View File

@ -302,27 +302,27 @@ mod test {
);
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&[Arc::clone(&chunk1)]).await);
let expected = vec![
"+-----------+------------+------+----------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+----------------------------+",
"| 1000 | 1000 | WA | 1970-01-01 00:00:00.000028 |",
"| 10 | 10 | VT | 1970-01-01 00:00:00.000210 |",
"| 70 | 70 | UT | 1970-01-01 00:00:00.000220 |",
"| 50 | 50 | VT | 1970-01-01 00:00:00.000210 |",
"+-----------+------------+------+----------------------------+",
"+-----------+------------+------+-----------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+-----------------------------+",
"| 1000 | 1000 | WA | 1970-01-01T00:00:00.000028Z |",
"| 10 | 10 | VT | 1970-01-01T00:00:00.000210Z |",
"| 70 | 70 | UT | 1970-01-01T00:00:00.000220Z |",
"| 50 | 50 | VT | 1970-01-01T00:00:00.000210Z |",
"+-----------+------------+------+-----------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&[Arc::clone(&chunk2)]).await);
@ -379,18 +379,18 @@ mod test {
.unwrap();
let expected = vec![
"+-----------+------------+------+-------------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+-------------------------------+",
"| 1000 | 1000 | WA | 1970-01-01 00:00:00.000028 |",
"| 50 | 50 | VT | 1970-01-01 00:00:00.000210 |",
"| 70 | 70 | UT | 1970-01-01 00:00:00.000220 |",
"| 1000 | | MT | 1970-01-01 00:00:00.000001 |",
"| 5 | | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | | AL | 1970-01-01 00:00:00.000000050 |",
"+-----------+------------+------+-------------------------------+",
"+-----------+------------+------+--------------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+--------------------------------+",
"| 1000 | 1000 | WA | 1970-01-01T00:00:00.000028Z |",
"| 50 | 50 | VT | 1970-01-01T00:00:00.000210Z |",
"| 70 | 70 | UT | 1970-01-01T00:00:00.000220Z |",
"| 1000 | | MT | 1970-01-01T00:00:00.000001Z |",
"| 5 | | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | | AL | 1970-01-01T00:00:00.000000050Z |",
"+-----------+------------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batches);
}
@ -437,13 +437,13 @@ mod test {
// Note sorted on time
let expected = vec![
"+-----------+------------+------+-------------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+-------------------------------+",
"| 100 | | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | | CT | 1970-01-01 00:00:00.000000100 |",
"| 1000 | | MT | 1970-01-01 00:00:00.000001 |",
"+-----------+------------+------+-------------------------------+",
"+-----------+------------+------+--------------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+--------------------------------+",
"| 100 | | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | | CT | 1970-01-01T00:00:00.000000100Z |",
"| 1000 | | MT | 1970-01-01T00:00:00.000001Z |",
"+-----------+------------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batches0);
@ -454,15 +454,15 @@ mod test {
// Note sorted on time
let expected = vec![
"+-----------+------------+------+----------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+----------------------------+",
"| 5 | | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | | MT | 1970-01-01 00:00:00.000007 |",
"| 1000 | 1000 | WA | 1970-01-01 00:00:00.000028 |",
"| 50 | 50 | VT | 1970-01-01 00:00:00.000210 |",
"| 70 | 70 | UT | 1970-01-01 00:00:00.000220 |",
"+-----------+------------+------+----------------------------+",
"+-----------+------------+------+-----------------------------+",
"| field_int | field_int2 | tag1 | time |",
"+-----------+------------+------+-----------------------------+",
"| 5 | | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | | MT | 1970-01-01T00:00:00.000007Z |",
"| 1000 | 1000 | WA | 1970-01-01T00:00:00.000028Z |",
"| 50 | 50 | VT | 1970-01-01T00:00:00.000210Z |",
"| 70 | 70 | UT | 1970-01-01T00:00:00.000220Z |",
"+-----------+------------+------+-----------------------------+",
];
assert_batches_eq!(&expected, &batches1);
}

View File

@ -911,15 +911,15 @@ mod test {
let batch = collect(Arc::clone(&input)).await.unwrap();
// data in its original non-sorted form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
@ -928,15 +928,15 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// data is sorted on (tag1, time)
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -991,15 +991,15 @@ mod test {
let batch = collect(Arc::clone(&input)).await.unwrap();
// data in its original non-sorted form
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
@ -1008,15 +1008,15 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// with the provider stats, data is sorted on: (tag1, tag2, time)
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1070,15 +1070,15 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// with provided stats, data is sorted on (tag1, tag2, time)
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1149,20 +1149,20 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1175,15 +1175,15 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// data is sorted on primary key(tag1, tag2, time)
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1252,20 +1252,20 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1286,15 +1286,15 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// expect only 5 values, with "f1" and "timestamp" (even though input has 10)
let expected = vec![
"+-----------+-------------------------------+",
"| field_int | time |",
"+-----------+-------------------------------+",
"| 100 | 1970-01-01 00:00:00.000000050 |",
"| 70 | 1970-01-01 00:00:00.000000100 |",
"| 5 | 1970-01-01 00:00:00.000005 |",
"| 10 | 1970-01-01 00:00:00.000007 |",
"| 1000 | 1970-01-01 00:00:00.000001 |",
"+-----------+-------------------------------+",
"+-----------+--------------------------------+",
"| field_int | time |",
"+-----------+--------------------------------+",
"| 100 | 1970-01-01T00:00:00.000000050Z |",
"| 70 | 1970-01-01T00:00:00.000000100Z |",
"| 5 | 1970-01-01T00:00:00.000005Z |",
"| 10 | 1970-01-01T00:00:00.000007Z |",
"| 1000 | 1970-01-01T00:00:00.000001Z |",
"+-----------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1377,25 +1377,25 @@ mod test {
let chunks = vec![chunk1, chunk2, chunk3];
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+-------------------------------+-------------------------------+",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 | |",
"| 10 | MT | 1970-01-01 00:00:00.000007 | |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 | |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 | |",
"| 5 | MT | 1970-01-01 00:00:00.000005 | |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 | |",
"| 10 | MT | 1970-01-01 00:00:00.000007 | |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 | |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 | |",
"| 5 | MT | 1970-01-01 00:00:00.000005 | |",
"+-----------+------+-------------------------------+-------------------------------+",
"+-----------+------+--------------------------------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+--------------------------------+--------------------------------+",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z | |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z | |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z | |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z | |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z | |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z | |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z | |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z | |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z | |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z | |",
"+-----------+------+--------------------------------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1530,25 +1530,25 @@ mod test {
let chunks = vec![chunk1, chunk2, chunk3];
// data in its original form
let expected = vec![
"+-----------+------+------+-------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+-------------------------------+",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | AL | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | MA | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | AL | 1970-01-01 00:00:00.000005 |",
"| 1000 | MT | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | AL | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | MT | 1970-01-01 00:00:00.000005 |",
"| 1000 | 1000 | CT | 1970-01-01 00:00:00.000001 |",
"| 10 | 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | 70 | AL | 1970-01-01 00:00:00.000000100 |",
"| 100 | 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+------+-------------------------------+",
"+-----------+------+------+--------------------------------+",
"| field_int | tag1 | tag2 | time |",
"+-----------+------+------+--------------------------------+",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | AL | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | MA | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | AL | 1970-01-01T00:00:00.000005Z |",
"| 1000 | MT | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | AL | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | MT | 1970-01-01T00:00:00.000005Z |",
"| 1000 | 1000 | CT | 1970-01-01T00:00:00.000001Z |",
"| 10 | 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | 70 | AL | 1970-01-01T00:00:00.000000100Z |",
"| 100 | 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | 5 | MT | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1561,25 +1561,25 @@ mod test {
let batch = collect(sort_plan.unwrap()).await.unwrap();
// with provided stats, data is sorted on (tag2, tag1, tag3, time)
let expected = vec![
"+-----------+------------+------+------+------+-------------------------------+",
"| field_int | field_int2 | tag1 | tag2 | tag3 | time |",
"+-----------+------------+------+------+------+-------------------------------+",
"| 100 | 100 | | | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | 70 | | | AL | 1970-01-01 00:00:00.000000100 |",
"| 1000 | 1000 | | | CT | 1970-01-01 00:00:00.000001 |",
"| 5 | 5 | | | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | 10 | | | MT | 1970-01-01 00:00:00.000007 |",
"| 100 | | AL | | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | | CT | | AL | 1970-01-01 00:00:00.000000100 |",
"| 1000 | | MT | | CT | 1970-01-01 00:00:00.000001 |",
"| 5 | | MT | | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | | MT | | MT | 1970-01-01 00:00:00.000007 |",
"| 5 | | MT | AL | | 1970-01-01 00:00:00.000005 |",
"| 10 | | MT | AL | | 1970-01-01 00:00:00.000007 |",
"| 70 | | CT | CT | | 1970-01-01 00:00:00.000000100 |",
"| 1000 | | MT | CT | | 1970-01-01 00:00:00.000001 |",
"| 100 | | AL | MA | | 1970-01-01 00:00:00.000000050 |",
"+-----------+------------+------+------+------+-------------------------------+",
"+-----------+------------+------+------+------+--------------------------------+",
"| field_int | field_int2 | tag1 | tag2 | tag3 | time |",
"+-----------+------------+------+------+------+--------------------------------+",
"| 100 | 100 | | | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | 70 | | | AL | 1970-01-01T00:00:00.000000100Z |",
"| 1000 | 1000 | | | CT | 1970-01-01T00:00:00.000001Z |",
"| 5 | 5 | | | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | 10 | | | MT | 1970-01-01T00:00:00.000007Z |",
"| 100 | | AL | | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | | CT | | AL | 1970-01-01T00:00:00.000000100Z |",
"| 1000 | | MT | | CT | 1970-01-01T00:00:00.000001Z |",
"| 5 | | MT | | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | | MT | | MT | 1970-01-01T00:00:00.000007Z |",
"| 5 | | MT | AL | | 1970-01-01T00:00:00.000005Z |",
"| 10 | | MT | AL | | 1970-01-01T00:00:00.000007Z |",
"| 70 | | CT | CT | | 1970-01-01T00:00:00.000000100Z |",
"| 1000 | | MT | CT | | 1970-01-01T00:00:00.000001Z |",
"| 100 | | AL | MA | | 1970-01-01T00:00:00.000000050Z |",
"+-----------+------------+------+------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1614,15 +1614,15 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1631,17 +1631,6 @@ mod test {
deduplicator.build_scan_plan(Arc::from("t"), schema, chunks, Predicate::default());
let batch = collect(plan.unwrap()).await.unwrap();
// No duplicates so no sort at all. The data will stay in their original order
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+-------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1676,20 +1665,20 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 20 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 10 | AL | 1970-01-01 00:00:00.000000050 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 20 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 10 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1699,17 +1688,17 @@ mod test {
let batch = collect(plan.unwrap()).await.unwrap();
// Data must be sorted on (tag1, time) and duplicates removed
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 10 | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 20 | MT | 1970-01-01 00:00:00.000007 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 10 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 20 | MT | 1970-01-01T00:00:00.000007Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1742,20 +1731,20 @@ mod test {
let chunks = vec![chunk];
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 20 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 10 | AL | 1970-01-01 00:00:00.000000050 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 20 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 10 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1777,17 +1766,17 @@ mod test {
// expect just the 7 rows of de-duplicated data
let expected = vec![
"+-----------+-------------------------------+",
"| field_int | time |",
"+-----------+-------------------------------+",
"| 10 | 1970-01-01 00:00:00.000000050 |",
"| 70 | 1970-01-01 00:00:00.000000100 |",
"| 70 | 1970-01-01 00:00:00.000000500 |",
"| 30 | 1970-01-01 00:00:00.000000005 |",
"| 1000 | 1970-01-01 00:00:00.000001 |",
"| 1000 | 1970-01-01 00:00:00.000002 |",
"| 20 | 1970-01-01 00:00:00.000007 |",
"+-----------+-------------------------------+",
"+-----------+--------------------------------+",
"| field_int | time |",
"+-----------+--------------------------------+",
"| 10 | 1970-01-01T00:00:00.000000050Z |",
"| 70 | 1970-01-01T00:00:00.000000100Z |",
"| 70 | 1970-01-01T00:00:00.000000500Z |",
"| 30 | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | 1970-01-01T00:00:00.000001Z |",
"| 1000 | 1970-01-01T00:00:00.000002Z |",
"| 20 | 1970-01-01T00:00:00.000007Z |",
"+-----------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1841,25 +1830,25 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 20 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 10 | AL | 1970-01-01 00:00:00.000000050 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 20 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 10 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -1869,18 +1858,18 @@ mod test {
let batch = collect(plan.unwrap()).await.unwrap();
// Two overlapped chunks will be sort merged on (tag1, time) with duplicates removed
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}
@ -1980,32 +1969,32 @@ mod test {
// data in its original form
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 20 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 10 | AL | 1970-01-01 00:00:00.000000050 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"| 1000 | WA | 1970-01-01 00:00:00.000008 |",
"| 10 | VT | 1970-01-01 00:00:00.000010 |",
"| 70 | UT | 1970-01-01 00:00:00.000020 |",
"| 1000 | WA | 1970-01-01 00:00:00.000028 |",
"| 10 | VT | 1970-01-01 00:00:00.000210 |",
"| 70 | UT | 1970-01-01 00:00:00.000220 |",
"| 50 | VT | 1970-01-01 00:00:00.000210 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 20 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 10 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"| 1000 | WA | 1970-01-01T00:00:00.000008Z |",
"| 10 | VT | 1970-01-01T00:00:00.000010Z |",
"| 70 | UT | 1970-01-01T00:00:00.000020Z |",
"| 1000 | WA | 1970-01-01T00:00:00.000028Z |",
"| 10 | VT | 1970-01-01T00:00:00.000210Z |",
"| 70 | UT | 1970-01-01T00:00:00.000220Z |",
"| 50 | VT | 1970-01-01T00:00:00.000210Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &raw_data(&chunks).await);
@ -2018,24 +2007,24 @@ mod test {
// . chunk3 will stay in its original (rows 1-3)
// . chunk4 will be sorted and deduplicated (rows 4-6)
let expected = vec![
"+-----------+------+-------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+-------------------------------+",
"| 1000 | WA | 1970-01-01 00:00:00.000008 |",
"| 10 | VT | 1970-01-01 00:00:00.000010 |",
"| 70 | UT | 1970-01-01 00:00:00.000020 |",
"| 70 | UT | 1970-01-01 00:00:00.000220 |",
"| 50 | VT | 1970-01-01 00:00:00.000210 |",
"| 1000 | WA | 1970-01-01 00:00:00.000028 |",
"| 100 | AL | 1970-01-01 00:00:00.000000050 |",
"| 70 | CT | 1970-01-01 00:00:00.000000100 |",
"| 70 | CT | 1970-01-01 00:00:00.000000500 |",
"| 30 | MT | 1970-01-01 00:00:00.000000005 |",
"| 1000 | MT | 1970-01-01 00:00:00.000001 |",
"| 1000 | MT | 1970-01-01 00:00:00.000002 |",
"| 5 | MT | 1970-01-01 00:00:00.000005 |",
"| 10 | MT | 1970-01-01 00:00:00.000007 |",
"+-----------+------+-------------------------------+",
"+-----------+------+--------------------------------+",
"| field_int | tag1 | time |",
"+-----------+------+--------------------------------+",
"| 1000 | WA | 1970-01-01T00:00:00.000008Z |",
"| 10 | VT | 1970-01-01T00:00:00.000010Z |",
"| 70 | UT | 1970-01-01T00:00:00.000020Z |",
"| 70 | UT | 1970-01-01T00:00:00.000220Z |",
"| 50 | VT | 1970-01-01T00:00:00.000210Z |",
"| 1000 | WA | 1970-01-01T00:00:00.000028Z |",
"| 100 | AL | 1970-01-01T00:00:00.000000050Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000100Z |",
"| 70 | CT | 1970-01-01T00:00:00.000000500Z |",
"| 30 | MT | 1970-01-01T00:00:00.000000005Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000001Z |",
"| 1000 | MT | 1970-01-01T00:00:00.000002Z |",
"| 5 | MT | 1970-01-01T00:00:00.000005Z |",
"| 10 | MT | 1970-01-01T00:00:00.000007Z |",
"+-----------+------+--------------------------------+",
];
assert_batches_eq!(&expected, &batch);
}

View File

@ -148,11 +148,11 @@ async fn test_field_name_plan() {
.expect("ok running plan");
let expected = vec![
"+--------+--------+--------+--------+-------------------------------+",
"| field1 | field2 | field3 | field4 | time |",
"+--------+--------+--------+--------+-------------------------------+",
"| 70.5 | ss | 2 | | 1970-01-01 00:00:00.000000100 |",
"+--------+--------+--------+--------+-------------------------------+",
"+--------+--------+--------+--------+--------------------------------+",
"| field1 | field2 | field3 | field4 | time |",
"+--------+--------+--------+--------+--------------------------------+",
"| 70.5 | ss | 2 | | 1970-01-01T00:00:00.000000100Z |",
"+--------+--------+--------+--------+--------------------------------+",
];
assert_batches_eq!(expected, &results);

View File

@ -57,7 +57,7 @@ macro_rules! run_read_filter_test_case {
assert_eq!(
expected_results, string_results,
"Error in scenario '{}'\n\nexpected:\n{:#?}\nactual:\n{:#?}",
"Error in scenario '{}'\n\nexpected:\n{:#?}\n\nactual:\n{:#?}",
scenario_name, expected_results, string_results
);
}
@ -86,14 +86,14 @@ async fn test_read_filter_data_no_pred() {
"start_row: 0",
"num_rows: 2",
"Batches:",
"+--------+-------+------+-------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+-------------------------------+",
"| Boston | MA | 70.4 | 1970-01-01 00:00:00.000000100 |",
"| Boston | MA | 72.4 | 1970-01-01 00:00:00.000000250 |",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000200 |",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000350 |",
"+--------+-------+------+-------------------------------+",
"+--------+-------+------+--------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+--------------------------------+",
"| Boston | MA | 70.4 | 1970-01-01T00:00:00.000000100Z |",
"| Boston | MA | 72.4 | 1970-01-01T00:00:00.000000250Z |",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000200Z |",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000350Z |",
"+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: h2o",
"tags",
@ -104,14 +104,14 @@ async fn test_read_filter_data_no_pred() {
"start_row: 2",
"num_rows: 2",
"Batches:",
"+--------+-------+------+-------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+-------------------------------+",
"| Boston | MA | 70.4 | 1970-01-01 00:00:00.000000100 |",
"| Boston | MA | 72.4 | 1970-01-01 00:00:00.000000250 |",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000200 |",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000350 |",
"+--------+-------+------+-------------------------------+",
"+--------+-------+------+--------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+--------------------------------+",
"| Boston | MA | 70.4 | 1970-01-01T00:00:00.000000100Z |",
"| Boston | MA | 72.4 | 1970-01-01T00:00:00.000000250Z |",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000200Z |",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000350Z |",
"+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: o2",
"tags",
@ -123,12 +123,12 @@ async fn test_read_filter_data_no_pred() {
"start_row: 0",
"num_rows: 2",
"Batches:",
"+--------+-------+---------+------+-------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+-------------------------------+",
"| Boston | MA | 50 | 50.4 | 1970-01-01 00:00:00.000000100 |",
"| Boston | MA | 51 | 53.4 | 1970-01-01 00:00:00.000000250 |",
"+--------+-------+---------+------+-------------------------------+",
"+--------+-------+---------+------+--------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+--------------------------------+",
"| Boston | MA | 50 | 50.4 | 1970-01-01T00:00:00.000000100Z |",
"| Boston | MA | 51 | 53.4 | 1970-01-01T00:00:00.000000250Z |",
"+--------+-------+---------+------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurementsMultiSeries {}, predicate, expected_results);
@ -153,11 +153,11 @@ async fn test_read_filter_data_filter() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+------+-------+------+-------------------------------+",
"| city | state | temp | time |",
"+------+-------+------+-------------------------------+",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000200 |",
"+------+-------+------+-------------------------------+",
"+------+-------+------+--------------------------------+",
"| city | state | temp | time |",
"+------+-------+------+--------------------------------+",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000200Z |",
"+------+-------+------+--------------------------------+",
];
run_read_filter_test_case!(
@ -195,11 +195,11 @@ async fn test_read_filter_data_filter_fields() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+--------+-------+------------+-------------------------------+",
"| city | state | other_temp | time |",
"+--------+-------+------------+-------------------------------+",
"| Boston | CA | 72.4 | 1970-01-01 00:00:00.000000350 |",
"+--------+-------+------------+-------------------------------+",
"+--------+-------+------------+--------------------------------+",
"| city | state | other_temp | time |",
"+--------+-------+------------+--------------------------------+",
"| Boston | CA | 72.4 | 1970-01-01T00:00:00.000000350Z |",
"+--------+-------+------------+--------------------------------+",
"SeriesSet",
"table_name: o2",
"tags",
@ -208,11 +208,11 @@ async fn test_read_filter_data_filter_fields() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+------+-------+-------------------------------+",
"| city | state | time |",
"+------+-------+-------------------------------+",
"| | CA | 1970-01-01 00:00:00.000000300 |",
"+------+-------+-------------------------------+",
"+------+-------+--------------------------------+",
"| city | state | time |",
"+------+-------+--------------------------------+",
"| | CA | 1970-01-01T00:00:00.000000300Z |",
"+------+-------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurementsManyFields {}, predicate, expected_results);
@ -246,12 +246,12 @@ async fn test_read_filter_data_pred_no_columns() {
"start_row: 0",
"num_rows: 2",
"Batches:",
"+--------+------+-------------------------------+",
"| region | user | time |",
"+--------+------+-------------------------------+",
"| west | 23.2 | 1970-01-01 00:00:00.000000100 |",
"| west | 21 | 1970-01-01 00:00:00.000000150 |",
"+--------+------+-------------------------------+",
"+--------+------+--------------------------------+",
"| region | user | time |",
"+--------+------+--------------------------------+",
"| west | 23.2 | 1970-01-01T00:00:00.000000100Z |",
"| west | 21 | 1970-01-01T00:00:00.000000150Z |",
"+--------+------+--------------------------------+",
"SeriesSet",
"table_name: disk",
"tags",
@ -261,11 +261,11 @@ async fn test_read_filter_data_pred_no_columns() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+--------+-------+-------------------------------+",
"| region | bytes | time |",
"+--------+-------+-------------------------------+",
"| east | 99 | 1970-01-01 00:00:00.000000200 |",
"+--------+-------+-------------------------------+",
"+--------+-------+--------------------------------+",
"| region | bytes | time |",
"+--------+-------+--------------------------------+",
"| east | 99 | 1970-01-01T00:00:00.000000200Z |",
"+--------+-------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurements {}, predicate, expected_results);
@ -303,11 +303,11 @@ async fn test_read_filter_data_pred_using_regex_match() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+------+-------+------+-------------------------------+",
"| city | state | temp | time |",
"+------+-------+------+-------------------------------+",
"| LA | CA | 90 | 1970-01-01 00:00:00.000000200 |",
"+------+-------+------+-------------------------------+",
"+------+-------+------+--------------------------------+",
"| city | state | temp | time |",
"+------+-------+------+--------------------------------+",
"| LA | CA | 90 | 1970-01-01T00:00:00.000000200Z |",
"+------+-------+------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurementsMultiSeries {}, predicate, expected_results);
@ -332,11 +332,11 @@ async fn test_read_filter_data_pred_using_regex_not_match() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+--------+-------+------+-------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+-------------------------------+",
"| Boston | MA | 72.4 | 1970-01-01 00:00:00.000000250 |",
"+--------+-------+------+-------------------------------+",
"+--------+-------+------+--------------------------------+",
"| city | state | temp | time |",
"+--------+-------+------+--------------------------------+",
"| Boston | MA | 72.4 | 1970-01-01T00:00:00.000000250Z |",
"+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: o2",
"tags",
@ -348,11 +348,11 @@ async fn test_read_filter_data_pred_using_regex_not_match() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+--------+-------+---------+------+-------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+-------------------------------+",
"| Boston | MA | 51 | 53.4 | 1970-01-01 00:00:00.000000250 |",
"+--------+-------+---------+------+-------------------------------+",
"+--------+-------+---------+------+--------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+--------------------------------+",
"| Boston | MA | 51 | 53.4 | 1970-01-01T00:00:00.000000250Z |",
"+--------+-------+---------+------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurementsMultiSeries {}, predicate, expected_results);
@ -384,12 +384,12 @@ async fn test_read_filter_data_pred_unsupported_in_scan() {
"start_row: 0",
"num_rows: 2",
"Batches:",
"+--------+-------+---------+------+-------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+-------------------------------+",
"| Boston | MA | 50 | 50.4 | 1970-01-01 00:00:00.000000100 |",
"| Boston | MA | 51 | 53.4 | 1970-01-01 00:00:00.000000250 |",
"+--------+-------+---------+------+-------------------------------+",
"+--------+-------+---------+------+--------------------------------+",
"| city | state | reading | temp | time |",
"+--------+-------+---------+------+--------------------------------+",
"| Boston | MA | 50 | 50.4 | 1970-01-01T00:00:00.000000100Z |",
"| Boston | MA | 51 | 53.4 | 1970-01-01T00:00:00.000000250Z |",
"+--------+-------+---------+------+--------------------------------+",
];
run_read_filter_test_case!(TwoMeasurementsMultiSeries {}, predicate, expected_results);
@ -430,15 +430,15 @@ async fn test_read_filter_data_plan_order() {
"start_row: 0",
"num_rows: 1",
"Batches:",
"+----------+-------+--------+-------+------+-------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+-------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | A | | 70.4 | 1970-01-01 00:00:00.000001 |",
"| Kingston | MA | A | | 70.1 | 1970-01-01 00:00:00.000000800 |",
"| Kingston | MA | B | | 70.2 | 1970-01-01 00:00:00.000000100 |",
"+----------+-------+--------+-------+------+-------------------------------+",
"+----------+-------+--------+-------+------+--------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+--------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | A | | 70.4 | 1970-01-01T00:00:00.000001Z |",
"| Kingston | MA | A | | 70.1 | 1970-01-01T00:00:00.000000800Z |",
"| Kingston | MA | B | | 70.2 | 1970-01-01T00:00:00.000000100Z |",
"+----------+-------+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: h2o",
"tags",
@ -450,15 +450,15 @@ async fn test_read_filter_data_plan_order() {
"start_row: 1",
"num_rows: 1",
"Batches:",
"+----------+-------+--------+-------+------+-------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+-------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | A | | 70.4 | 1970-01-01 00:00:00.000001 |",
"| Kingston | MA | A | | 70.1 | 1970-01-01 00:00:00.000000800 |",
"| Kingston | MA | B | | 70.2 | 1970-01-01 00:00:00.000000100 |",
"+----------+-------+--------+-------+------+-------------------------------+",
"+----------+-------+--------+-------+------+--------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+--------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | A | | 70.4 | 1970-01-01T00:00:00.000001Z |",
"| Kingston | MA | A | | 70.1 | 1970-01-01T00:00:00.000000800Z |",
"| Kingston | MA | B | | 70.2 | 1970-01-01T00:00:00.000000100Z |",
"+----------+-------+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: h2o",
"tags",
@ -471,15 +471,15 @@ async fn test_read_filter_data_plan_order() {
"start_row: 2",
"num_rows: 1",
"Batches:",
"+----------+-------+--------+-------+------+-------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+-------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | A | | 70.4 | 1970-01-01 00:00:00.000001 |",
"| Kingston | MA | A | | 70.1 | 1970-01-01 00:00:00.000000800 |",
"| Kingston | MA | B | | 70.2 | 1970-01-01 00:00:00.000000100 |",
"+----------+-------+--------+-------+------+-------------------------------+",
"+----------+-------+--------+-------+------+--------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+--------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | A | | 70.4 | 1970-01-01T00:00:00.000001Z |",
"| Kingston | MA | A | | 70.1 | 1970-01-01T00:00:00.000000800Z |",
"| Kingston | MA | B | | 70.2 | 1970-01-01T00:00:00.000000100Z |",
"+----------+-------+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: h2o",
"tags",
@ -492,15 +492,15 @@ async fn test_read_filter_data_plan_order() {
"start_row: 3",
"num_rows: 1",
"Batches:",
"+----------+-------+--------+-------+------+-------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+-------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | A | | 70.4 | 1970-01-01 00:00:00.000001 |",
"| Kingston | MA | A | | 70.1 | 1970-01-01 00:00:00.000000800 |",
"| Kingston | MA | B | | 70.2 | 1970-01-01 00:00:00.000000100 |",
"+----------+-------+--------+-------+------+-------------------------------+",
"+----------+-------+--------+-------+------+--------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+--------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | A | | 70.4 | 1970-01-01T00:00:00.000001Z |",
"| Kingston | MA | A | | 70.1 | 1970-01-01T00:00:00.000000800Z |",
"| Kingston | MA | B | | 70.2 | 1970-01-01T00:00:00.000000100Z |",
"+----------+-------+--------+-------+------+--------------------------------+",
"SeriesSet",
"table_name: h2o",
"tags",
@ -513,15 +513,15 @@ async fn test_read_filter_data_plan_order() {
"start_row: 4",
"num_rows: 1",
"Batches:",
"+----------+-------+--------+-------+------+-------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+-------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01 00:00:00.000000250 |",
"| Boston | MA | A | | 70.4 | 1970-01-01 00:00:00.000001 |",
"| Kingston | MA | A | | 70.1 | 1970-01-01 00:00:00.000000800 |",
"| Kingston | MA | B | | 70.2 | 1970-01-01 00:00:00.000000100 |",
"+----------+-------+--------+-------+------+-------------------------------+",
"+----------+-------+--------+-------+------+--------------------------------+",
"| city | state | zz_tag | other | temp | time |",
"+----------+-------+--------+-------+------+--------------------------------+",
"| Boston | CA | | | 70.3 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | | 5 | 70.5 | 1970-01-01T00:00:00.000000250Z |",
"| Boston | MA | A | | 70.4 | 1970-01-01T00:00:00.000001Z |",
"| Kingston | MA | A | | 70.1 | 1970-01-01T00:00:00.000000800Z |",
"| Kingston | MA | B | | 70.2 | 1970-01-01T00:00:00.000000100Z |",
"+----------+-------+--------+-------+------+--------------------------------+",
];
run_read_filter_test_case!(MeasurementsSortableTags {}, predicate, expected_results);

View File

@ -1,7 +1,7 @@
//! Tests for the Influx gRPC queries
use crate::scenarios::*;
use arrow::util::pretty::pretty_format_batches;
use arrow_util::display::pretty_format_batches;
use async_trait::async_trait;
use datafusion::prelude::*;
use query::{
@ -62,7 +62,7 @@ macro_rules! run_read_group_test_case {
assert_eq!(
expected_results, string_results,
"Error in scenario '{}'\n\nexpected:\n{:#?}\nactual:\n{:#?}",
"Error in scenario '{}'\n\nexpected:\n\n{:#?}\nactual:\n\n{:#?}",
scenario_name, expected_results, string_results
);
}
@ -107,11 +107,11 @@ async fn test_read_group_data_pred() {
let agg = Aggregate::Sum;
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+------+------+-------------------------------+",
"| state | city | temp | time |",
"+-------+------+------+-------------------------------+",
"| CA | LA | 90 | 1970-01-01 00:00:00.000000200 |",
"+-------+------+------+-------------------------------+",
"+-------+------+------+--------------------------------+",
"| state | city | temp | time |",
"+-------+------+------+--------------------------------+",
"| CA | LA | 90 | 1970-01-01T00:00:00.000000200Z |",
"+-------+------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -132,12 +132,12 @@ async fn test_read_group_data_field_restriction() {
let agg = Aggregate::Sum;
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+--------+-------+-------------------------------+",
"| state | city | temp | time |",
"+-------+--------+-------+-------------------------------+",
"| CA | LA | 180 | 1970-01-01 00:00:00.000000350 |",
"| MA | Boston | 142.8 | 1970-01-01 00:00:00.000000250 |",
"+-------+--------+-------+-------------------------------+",
"+-------+--------+-------+--------------------------------+",
"| state | city | temp | time |",
"+-------+--------+-------+--------------------------------+",
"| CA | LA | 180 | 1970-01-01T00:00:00.000000350Z |",
"| MA | Boston | 142.8 | 1970-01-01T00:00:00.000000250Z |",
"+-------+--------+-------+--------------------------------+",
];
run_read_group_test_case!(
@ -190,12 +190,12 @@ async fn test_grouped_series_set_plan_sum() {
// The null field (after predicates) are not sent as series
// Note order of city key (boston --> cambridge)
let expected_results = vec![
"+-------+-----------+----------+------+-------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+-------------------------------+",
"| MA | Boston | | 141 | 1970-01-01 00:00:00.000000400 |",
"| MA | Cambridge | | 163 | 1970-01-01 00:00:00.000000200 |",
"+-------+-----------+----------+------+-------------------------------+",
"+-------+-----------+----------+------+--------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+--------------------------------+",
"| MA | Boston | | 141 | 1970-01-01T00:00:00.000000400Z |",
"| MA | Cambridge | | 163 | 1970-01-01T00:00:00.000000200Z |",
"+-------+-----------+----------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -224,12 +224,12 @@ async fn test_grouped_series_set_plan_count() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+----------+------+-------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+-------------------------------+",
"| MA | Boston | 0 | 2 | 1970-01-01 00:00:00.000000400 |",
"| MA | Cambridge | 0 | 2 | 1970-01-01 00:00:00.000000200 |",
"+-------+-----------+----------+------+-------------------------------+",
"+-------+-----------+----------+------+--------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+--------------------------------+",
"| MA | Boston | 0 | 2 | 1970-01-01T00:00:00.000000400Z |",
"| MA | Cambridge | 0 | 2 | 1970-01-01T00:00:00.000000200Z |",
"+-------+-----------+----------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -258,12 +258,12 @@ async fn test_grouped_series_set_plan_mean() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+----------+------+-------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+-------------------------------+",
"| MA | Boston | | 70.5 | 1970-01-01 00:00:00.000000400 |",
"| MA | Cambridge | | 81.5 | 1970-01-01 00:00:00.000000200 |",
"+-------+-----------+----------+------+-------------------------------+",
"+-------+-----------+----------+------+--------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+--------------------------------+",
"| MA | Boston | | 70.5 | 1970-01-01T00:00:00.000000400Z |",
"| MA | Cambridge | | 81.5 | 1970-01-01T00:00:00.000000200Z |",
"+-------+-----------+----------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -303,11 +303,11 @@ async fn test_grouped_series_set_plan_first() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| MA | Cambridge | true | 1970-01-01 00:00:00.000002 | 7 | 1970-01-01 00:00:00.000002 | 7 | 1970-01-01 00:00:00.000002 | c | 1970-01-01 00:00:00.000002 |",
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| MA | Cambridge | true | 1970-01-01T00:00:00.000002Z | 7 | 1970-01-01T00:00:00.000002Z | 7 | 1970-01-01T00:00:00.000002Z | c | 1970-01-01T00:00:00.000002Z |",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
];
run_read_group_test_case!(
@ -330,11 +330,11 @@ async fn test_grouped_series_set_plan_last() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| MA | Cambridge | false | 1970-01-01 00:00:00.000003 | 6 | 1970-01-01 00:00:00.000003 | 6 | 1970-01-01 00:00:00.000003 | b | 1970-01-01 00:00:00.000003 |",
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| MA | Cambridge | false | 1970-01-01T00:00:00.000003Z | 6 | 1970-01-01T00:00:00.000003Z | 6 | 1970-01-01T00:00:00.000003Z | b | 1970-01-01T00:00:00.000003Z |",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
];
run_read_group_test_case!(
@ -376,11 +376,11 @@ async fn test_grouped_series_set_plan_min() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| MA | Cambridge | false | 1970-01-01 00:00:00.000001 | 6 | 1970-01-01 00:00:00.000003 | 6 | 1970-01-01 00:00:00.000003 | a | 1970-01-01 00:00:00.000002 |",
"+-------+-----------+-------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| MA | Cambridge | false | 1970-01-01T00:00:00.000001Z | 6 | 1970-01-01T00:00:00.000003Z | 6 | 1970-01-01T00:00:00.000003Z | a | 1970-01-01T00:00:00.000002Z |",
"+-------+-----------+-------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
];
run_read_group_test_case!(
@ -420,11 +420,11 @@ async fn test_grouped_series_set_plan_max() {
let group_columns = vec!["state"];
let expected_results = vec![
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"| MA | Cambridge | true | 1970-01-01 00:00:00.000003 | 7 | 1970-01-01 00:00:00.000002 | 7 | 1970-01-01 00:00:00.000002 | z | 1970-01-01 00:00:00.000004 |",
"+-------+-----------+------+----------------------------+---+----------------------------+---+----------------------------+---+----------------------------+",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| state | city | b | time_b | f | time_f | i | time_i | s | time_s |",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
"| MA | Cambridge | true | 1970-01-01T00:00:00.000003Z | 7 | 1970-01-01T00:00:00.000002Z | 7 | 1970-01-01T00:00:00.000002Z | z | 1970-01-01T00:00:00.000004Z |",
"+-------+-----------+------+-----------------------------+---+-----------------------------+---+-----------------------------+---+-----------------------------+",
];
run_read_group_test_case!(
@ -467,13 +467,13 @@ async fn test_grouped_series_set_plan_group_by_state_city() {
let group_columns = vec!["state", "city"];
let expected_results = vec![
"+-------+-----------+----------+------+-------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+-------------------------------+",
"| CA | LA | 21 | 181 | 1970-01-01 00:00:00.000000600 |",
"| MA | Boston | | 141 | 1970-01-01 00:00:00.000000400 |",
"| MA | Cambridge | | 243 | 1970-01-01 00:00:00.000000200 |",
"+-------+-----------+----------+------+-------------------------------+",
"+-------+-----------+----------+------+--------------------------------+",
"| state | city | humidity | temp | time |",
"+-------+-----------+----------+------+--------------------------------+",
"| CA | LA | 21 | 181 | 1970-01-01T00:00:00.000000600Z |",
"| MA | Boston | | 141 | 1970-01-01T00:00:00.000000400Z |",
"| MA | Cambridge | | 243 | 1970-01-01T00:00:00.000000200Z |",
"+-------+-----------+----------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -495,13 +495,13 @@ async fn test_grouped_series_set_plan_group_by_city_state() {
// Test with alternate group key order (note the order of columns is different)
let expected_results = vec![
"+-----------+-------+----------+------+-------------------------------+",
"| city | state | humidity | temp | time |",
"+-----------+-------+----------+------+-------------------------------+",
"| Boston | MA | | 141 | 1970-01-01 00:00:00.000000400 |",
"| Cambridge | MA | | 243 | 1970-01-01 00:00:00.000000200 |",
"| LA | CA | 21 | 181 | 1970-01-01 00:00:00.000000600 |",
"+-----------+-------+----------+------+-------------------------------+",
"+-----------+-------+----------+------+--------------------------------+",
"| city | state | humidity | temp | time |",
"+-----------+-------+----------+------+--------------------------------+",
"| Boston | MA | | 141 | 1970-01-01T00:00:00.000000400Z |",
"| Cambridge | MA | | 243 | 1970-01-01T00:00:00.000000200Z |",
"| LA | CA | 21 | 181 | 1970-01-01T00:00:00.000000600Z |",
"+-----------+-------+----------+------+--------------------------------+",
];
run_read_group_test_case!(
@ -523,17 +523,17 @@ async fn test_grouped_series_set_plan_group_aggregate_none() {
// Expect order of the columns to begin with city/state
let expected_results = vec![
"+-----------+-------+----------+------+-------------------------------+",
"| city | state | humidity | temp | time |",
"+-----------+-------+----------+------+-------------------------------+",
"| Boston | MA | | 70 | 1970-01-01 00:00:00.000000300 |",
"| Boston | MA | | 71 | 1970-01-01 00:00:00.000000400 |",
"| Cambridge | MA | | 80 | 1970-01-01 00:00:00.000000050 |",
"| Cambridge | MA | | 81 | 1970-01-01 00:00:00.000000100 |",
"| Cambridge | MA | | 82 | 1970-01-01 00:00:00.000000200 |",
"| LA | CA | 10 | 90 | 1970-01-01 00:00:00.000000500 |",
"| LA | CA | 11 | 91 | 1970-01-01 00:00:00.000000600 |",
"+-----------+-------+----------+------+-------------------------------+",
"+-----------+-------+----------+------+--------------------------------+",
"| city | state | humidity | temp | time |",
"+-----------+-------+----------+------+--------------------------------+",
"| Boston | MA | | 70 | 1970-01-01T00:00:00.000000300Z |",
"| Boston | MA | | 71 | 1970-01-01T00:00:00.000000400Z |",
"| Cambridge | MA | | 80 | 1970-01-01T00:00:00.000000050Z |",
"| Cambridge | MA | | 81 | 1970-01-01T00:00:00.000000100Z |",
"| Cambridge | MA | | 82 | 1970-01-01T00:00:00.000000200Z |",
"| LA | CA | 10 | 90 | 1970-01-01T00:00:00.000000500Z |",
"| LA | CA | 11 | 91 | 1970-01-01T00:00:00.000000600Z |",
"+-----------+-------+----------+------+--------------------------------+",
];
run_read_group_test_case!(

View File

@ -3,7 +3,7 @@ use crate::scenarios::*;
use server::{db::test_helpers::write_lp, utils::make_db};
use arrow::util::pretty::pretty_format_batches;
use arrow_util::display::pretty_format_batches;
use async_trait::async_trait;
use datafusion::prelude::*;
use query::{
@ -62,7 +62,7 @@ macro_rules! run_read_window_aggregate_test_case {
assert_eq!(
expected_results, string_results,
"Error in scenario '{}'\n\nexpected:\n{:#?}\nactual:\n{:#?}",
"Error in scenario '{}'\n\nexpected:\n{:#?}\n\nactual:\n{:#?}\n",
scenario_name, expected_results, string_results
);
}
@ -131,16 +131,16 @@ async fn test_read_window_aggregate_nanoseconds() {
// note the name of the field is "temp" even though it is the average
let expected_results = vec![
"+--------+-------+-------------------------------+------+",
"| city | state | time | temp |",
"+--------+-------+-------------------------------+------+",
"| Boston | MA | 1970-01-01 00:00:00.000000200 | 70 |",
"| Boston | MA | 1970-01-01 00:00:00.000000400 | 71.5 |",
"| Boston | MA | 1970-01-01 00:00:00.000000600 | 73 |",
"| LA | CA | 1970-01-01 00:00:00.000000200 | 90 |",
"| LA | CA | 1970-01-01 00:00:00.000000400 | 91.5 |",
"| LA | CA | 1970-01-01 00:00:00.000000600 | 93 |",
"+--------+-------+-------------------------------+------+",
"+--------+-------+--------------------------------+------+",
"| city | state | time | temp |",
"+--------+-------+--------------------------------+------+",
"| Boston | MA | 1970-01-01T00:00:00.000000200Z | 70 |",
"| Boston | MA | 1970-01-01T00:00:00.000000400Z | 71.5 |",
"| Boston | MA | 1970-01-01T00:00:00.000000600Z | 73 |",
"| LA | CA | 1970-01-01T00:00:00.000000200Z | 90 |",
"| LA | CA | 1970-01-01T00:00:00.000000400Z | 91.5 |",
"| LA | CA | 1970-01-01T00:00:00.000000600Z | 93 |",
"+--------+-------+--------------------------------+------+",
];
run_read_window_aggregate_test_case!(
@ -217,12 +217,12 @@ async fn test_read_window_aggregate_months() {
// note the name of the field is "temp" even though it is the average
let expected_results = vec![
"+--------+-------+---------------------+------+",
"| city | state | time | temp |",
"+--------+-------+---------------------+------+",
"| Boston | MA | 2020-04-01 00:00:00 | 70.5 |",
"| Boston | MA | 2020-05-01 00:00:00 | 72.5 |",
"+--------+-------+---------------------+------+",
"+--------+-------+----------------------+------+",
"| city | state | time | temp |",
"+--------+-------+----------------------+------+",
"| Boston | MA | 2020-04-01T00:00:00Z | 70.5 |",
"| Boston | MA | 2020-05-01T00:00:00Z | 72.5 |",
"+--------+-------+----------------------+------+",
];
run_read_window_aggregate_test_case!(

View File

@ -1,6 +1,6 @@
use std::sync::Arc;
use arrow::util::pretty::pretty_format_batches;
use arrow_util::display::pretty_format_batches;
use query::{
exec::{
field::FieldIndexes,

View File

@ -60,12 +60,12 @@ async fn chunk_pruning_sql() {
} = setup().await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
let query = "select * from cpu where bar < 3.0";

View File

@ -4,6 +4,7 @@ mod parse;
mod setup;
use arrow::record_batch::RecordBatch;
use arrow_util::display::pretty_format_batches;
use query::{
exec::{Executor, ExecutorType},
frontend::sql::SqlQueryPlanner,
@ -282,7 +283,7 @@ impl<W: Write> Runner<W> {
.await
.expect("Running plan");
let current_results = arrow::util::pretty::pretty_format_batches(&results)
let current_results = pretty_format_batches(&results)
.unwrap()
.trim()
.lines()
@ -352,11 +353,11 @@ SELECT * from disk;
const EXPECTED_OUTPUT: &str = r#"-- Test Setup: TwoMeasurements
-- SQL: SELECT * from disk;
+-------+--------+-------------------------------+
| bytes | region | time |
+-------+--------+-------------------------------+
| 99 | east | 1970-01-01 00:00:00.000000200 |
+-------+--------+-------------------------------+
+-------+--------+--------------------------------+
| bytes | region | time |
+-------+--------+--------------------------------+
| 99 | east | 1970-01-01T00:00:00.000000200Z |
+-------+--------+--------------------------------+
"#;
#[tokio::test]

View File

@ -41,12 +41,12 @@ macro_rules! run_sql_test_case {
#[tokio::test]
async fn sql_select_from_cpu() {
let expected = vec![
"+--------+-------------------------------+------+",
"| region | time | user |",
"+--------+-------------------------------+------+",
"| west | 1970-01-01 00:00:00.000000100 | 23.2 |",
"| west | 1970-01-01 00:00:00.000000150 | 21 |",
"+--------+-------------------------------+------+",
"+--------+--------------------------------+------+",
"| region | time | user |",
"+--------+--------------------------------+------+",
"| west | 1970-01-01T00:00:00.000000100Z | 23.2 |",
"| west | 1970-01-01T00:00:00.000000150Z | 21 |",
"+--------+--------------------------------+------+",
];
run_sql_test_case!(TwoMeasurements {}, "SELECT * from cpu", &expected);
}
@ -72,11 +72,11 @@ async fn sql_select_from_cpu_with_projection() {
#[tokio::test]
async fn sql_select_from_cpu_pred() {
let expected = vec![
"+--------+-------------------------------+------+",
"| region | time | user |",
"+--------+-------------------------------+------+",
"| west | 1970-01-01 00:00:00.000000150 | 21 |",
"+--------+-------------------------------+------+",
"+--------+--------------------------------+------+",
"| region | time | user |",
"+--------+--------------------------------+------+",
"| west | 1970-01-01T00:00:00.000000150Z | 21 |",
"+--------+--------------------------------+------+",
];
run_sql_test_case!(
TwoMeasurements {},
@ -121,11 +121,11 @@ async fn sql_select_from_cpu_group() {
#[tokio::test]
async fn sql_select_from_disk() {
let expected = vec![
"+-------+--------+-------------------------------+",
"| bytes | region | time |",
"+-------+--------+-------------------------------+",
"| 99 | east | 1970-01-01 00:00:00.000000200 |",
"+-------+--------+-------------------------------+",
"+-------+--------+--------------------------------+",
"| bytes | region | time |",
"+-------+--------+--------------------------------+",
"| 99 | east | 1970-01-01T00:00:00.000000200Z |",
"+-------+--------+--------------------------------+",
];
run_sql_test_case!(TwoMeasurements {}, "SELECT * from disk", &expected);
}
@ -133,14 +133,14 @@ async fn sql_select_from_disk() {
#[tokio::test]
async fn sql_select_with_schema_merge() {
let expected = vec![
"+------+--------+--------+-------------------------------+------+",
"| host | region | system | time | user |",
"+------+--------+--------+-------------------------------+------+",
"| | west | 5 | 1970-01-01 00:00:00.000000100 | 23.2 |",
"| | west | 6 | 1970-01-01 00:00:00.000000150 | 21 |",
"| foo | east | | 1970-01-01 00:00:00.000000100 | 23.2 |",
"| bar | west | | 1970-01-01 00:00:00.000000250 | 21 |",
"+------+--------+--------+-------------------------------+------+",
"+------+--------+--------+--------------------------------+------+",
"| host | region | system | time | user |",
"+------+--------+--------+--------------------------------+------+",
"| | west | 5 | 1970-01-01T00:00:00.000000100Z | 23.2 |",
"| | west | 6 | 1970-01-01T00:00:00.000000150Z | 21 |",
"| bar | west | | 1970-01-01T00:00:00.000000250Z | 21 |",
"| foo | east | | 1970-01-01T00:00:00.000000100Z | 23.2 |",
"+------+--------+--------+--------------------------------+------+",
];
run_sql_test_case!(MultiChunkSchemaMerge {}, "SELECT * from cpu", &expected);
}
@ -412,17 +412,17 @@ async fn sql_select_with_schema_merge_subset() {
async fn sql_predicate_pushdown_correctness_1() {
// Test 1: Select everything
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 189 | 7 | 1970-01-01 00:00:00.000000110 | bedford |",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 40000 | 5 | 1970-01-01 00:00:00.000000100 | andover |",
"| 471 | 6 | 1970-01-01 00:00:00.000000110 | tewsbury |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 189 | 7 | 1970-01-01T00:00:00.000000110Z | bedford |",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 40000 | 5 | 1970-01-01T00:00:00.000000100Z | andover |",
"| 471 | 6 | 1970-01-01T00:00:00.000000110Z | tewsbury |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -435,16 +435,16 @@ async fn sql_predicate_pushdown_correctness_1() {
async fn sql_predicate_pushdown_correctness_2() {
// Test 2: One push-down expression: count > 200
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 40000 | 5 | 1970-01-01 00:00:00.000000100 | andover |",
"| 471 | 6 | 1970-01-01 00:00:00.000000110 | tewsbury |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 40000 | 5 | 1970-01-01T00:00:00.000000100Z | andover |",
"| 471 | 6 | 1970-01-01T00:00:00.000000110Z | tewsbury |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -457,15 +457,15 @@ async fn sql_predicate_pushdown_correctness_2() {
async fn sql_predicate_pushdown_correctness_3() {
// Test 3: Two push-down expression: count > 200 and town != 'tewsbury'
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 40000 | 5 | 1970-01-01 00:00:00.000000100 | andover |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 40000 | 5 | 1970-01-01T00:00:00.000000100Z | andover |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -479,14 +479,14 @@ async fn sql_predicate_pushdown_correctness_4() {
// Test 4: Still two push-down expression: count > 200 and town != 'tewsbury'
// even though the results are different
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 40000 | 5 | 1970-01-01 00:00:00.000000100 | andover |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 40000 | 5 | 1970-01-01T00:00:00.000000100Z | andover |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -499,13 +499,13 @@ async fn sql_predicate_pushdown_correctness_4() {
async fn sql_predicate_pushdown_correctness_5() {
// Test 5: three push-down expression: count > 200 and town != 'tewsbury' and count < 40000
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -518,15 +518,15 @@ async fn sql_predicate_pushdown_correctness_5() {
async fn sql_predicate_pushdown_correctness_6() {
// Test 6: two push-down expression: count > 200 and count < 40000
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 471 | 6 | 1970-01-01 00:00:00.000000110 | tewsbury |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 471 | 6 | 1970-01-01T00:00:00.000000110Z | tewsbury |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -539,16 +539,16 @@ async fn sql_predicate_pushdown_correctness_6() {
async fn sql_predicate_pushdown_correctness_7() {
// Test 7: two push-down expression on float: system > 4.0 and system < 7.0
let expected = vec![
"+-------+--------+-------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+-----------+",
"| 372 | 5 | 1970-01-01 00:00:00.000000100 | lexington |",
"| 40000 | 5 | 1970-01-01 00:00:00.000000100 | andover |",
"| 471 | 6 | 1970-01-01 00:00:00.000000110 | tewsbury |",
"| 632 | 5 | 1970-01-01 00:00:00.000000120 | reading |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+-----------+",
"+-------+--------+--------------------------------+-----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+-----------+",
"| 372 | 5 | 1970-01-01T00:00:00.000000100Z | lexington |",
"| 40000 | 5 | 1970-01-01T00:00:00.000000100Z | andover |",
"| 471 | 6 | 1970-01-01T00:00:00.000000110Z | tewsbury |",
"| 632 | 5 | 1970-01-01T00:00:00.000000120Z | reading |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+-----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -561,13 +561,13 @@ async fn sql_predicate_pushdown_correctness_7() {
async fn sql_predicate_pushdown_correctness_8() {
// Test 8: two push-down expression on float: system > 5.0 and system < 7.0
let expected = vec![
"+-------+--------+-------------------------------+----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+----------+",
"| 471 | 6 | 1970-01-01 00:00:00.000000110 | tewsbury |",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+----------+",
"+-------+--------+--------------------------------+----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+----------+",
"| 471 | 6 | 1970-01-01T00:00:00.000000110Z | tewsbury |",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -580,12 +580,12 @@ async fn sql_predicate_pushdown_correctness_8() {
async fn sql_predicate_pushdown_correctness_9() {
// Test 9: three push-down expression: system > 5.0 and town != 'tewsbury' and system < 7.0
let expected = vec![
"+-------+--------+-------------------------------+----------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+----------+",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"| 872 | 6 | 1970-01-01 00:00:00.000000110 | lawrence |",
"+-------+--------+-------------------------------+----------+",
"+-------+--------+--------------------------------+----------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+----------+",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"| 872 | 6 | 1970-01-01T00:00:00.000000110Z | lawrence |",
"+-------+--------+--------------------------------+----------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -599,11 +599,11 @@ async fn sql_predicate_pushdown_correctness_10() {
// Test 10: three push-down expression: system > 5.0 and town != 'tewsbury' and system < 7.0
// even though there are more expressions,(count = 632 or town = 'reading'), in the filter
let expected = vec![
"+-------+--------+-------------------------------+---------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+---------+",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"+-------+--------+-------------------------------+---------+",
"+-------+--------+--------------------------------+---------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+---------+",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"+-------+--------+--------------------------------+---------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -651,11 +651,11 @@ async fn sql_predicate_pushdown_correctness_13() {
//
// Check correctness
let expected = vec![
"+-------+--------+-------------------------------+---------+",
"| count | system | time | town |",
"+-------+--------+-------------------------------+---------+",
"| 632 | 6 | 1970-01-01 00:00:00.000000130 | reading |",
"+-------+--------+-------------------------------+---------+",
"+-------+--------+--------------------------------+---------+",
"| count | system | time | town |",
"+-------+--------+--------------------------------+---------+",
"| 632 | 6 | 1970-01-01T00:00:00.000000130Z | reading |",
"+-------+--------+--------------------------------+---------+",
];
run_sql_test_case!(
TwoMeasurementsPredicatePushDown {},
@ -670,28 +670,28 @@ async fn sql_deduplicate_1() {
let sql =
"select time, state, city, min_temp, max_temp, area from h2o order by time, state, city";
let expected = vec![
"+-------------------------------+-------+---------+----------+----------+------+",
"| time | state | city | min_temp | max_temp | area |",
"+-------------------------------+-------+---------+----------+----------+------+",
"| 1970-01-01 00:00:00.000000050 | MA | Boston | 70.4 | | |",
"| 1970-01-01 00:00:00.000000150 | MA | Bedford | 71.59 | 78.75 | 742 |",
"| 1970-01-01 00:00:00.000000250 | MA | Andover | | 69.2 | |",
"| 1970-01-01 00:00:00.000000250 | MA | Boston | 65.4 | 75.4 | |",
"| 1970-01-01 00:00:00.000000250 | MA | Reading | 53.4 | | |",
"| 1970-01-01 00:00:00.000000300 | CA | SF | 79 | 87.2 | 500 |",
"| 1970-01-01 00:00:00.000000300 | CA | SJ | 78.5 | 88 | |",
"| 1970-01-01 00:00:00.000000350 | CA | SJ | 75.5 | 84.08 | |",
"| 1970-01-01 00:00:00.000000400 | MA | Bedford | 65.22 | 80.75 | 750 |",
"| 1970-01-01 00:00:00.000000400 | MA | Boston | 65.4 | 82.67 | |",
"| 1970-01-01 00:00:00.000000450 | CA | SJ | 77 | 90.7 | |",
"| 1970-01-01 00:00:00.000000500 | CA | SJ | 69.5 | 88.2 | |",
"| 1970-01-01 00:00:00.000000600 | MA | Bedford | | 88.75 | 742 |",
"| 1970-01-01 00:00:00.000000600 | MA | Boston | 67.4 | | |",
"| 1970-01-01 00:00:00.000000600 | MA | Reading | 60.4 | | |",
"| 1970-01-01 00:00:00.000000650 | CA | SF | 68.4 | 85.7 | 500 |",
"| 1970-01-01 00:00:00.000000650 | CA | SJ | 69.5 | 89.2 | |",
"| 1970-01-01 00:00:00.000000700 | CA | SJ | 75.5 | 84.08 | |",
"+-------------------------------+-------+---------+----------+----------+------+",
"+--------------------------------+-------+---------+----------+----------+------+",
"| time | state | city | min_temp | max_temp | area |",
"+--------------------------------+-------+---------+----------+----------+------+",
"| 1970-01-01T00:00:00.000000050Z | MA | Boston | 70.4 | | |",
"| 1970-01-01T00:00:00.000000150Z | MA | Bedford | 71.59 | 78.75 | 742 |",
"| 1970-01-01T00:00:00.000000250Z | MA | Andover | | 69.2 | |",
"| 1970-01-01T00:00:00.000000250Z | MA | Boston | 65.4 | 75.4 | |",
"| 1970-01-01T00:00:00.000000250Z | MA | Reading | 53.4 | | |",
"| 1970-01-01T00:00:00.000000300Z | CA | SF | 79 | 87.2 | 500 |",
"| 1970-01-01T00:00:00.000000300Z | CA | SJ | 78.5 | 88 | |",
"| 1970-01-01T00:00:00.000000350Z | CA | SJ | 75.5 | 84.08 | |",
"| 1970-01-01T00:00:00.000000400Z | MA | Bedford | 65.22 | 80.75 | 750 |",
"| 1970-01-01T00:00:00.000000400Z | MA | Boston | 65.4 | 82.67 | |",
"| 1970-01-01T00:00:00.000000450Z | CA | SJ | 77 | 90.7 | |",
"| 1970-01-01T00:00:00.000000500Z | CA | SJ | 69.5 | 88.2 | |",
"| 1970-01-01T00:00:00.000000600Z | MA | Bedford | | 88.75 | 742 |",
"| 1970-01-01T00:00:00.000000600Z | MA | Boston | 67.4 | | |",
"| 1970-01-01T00:00:00.000000600Z | MA | Reading | 60.4 | | |",
"| 1970-01-01T00:00:00.000000650Z | CA | SF | 68.4 | 85.7 | 500 |",
"| 1970-01-01T00:00:00.000000650Z | CA | SJ | 69.5 | 89.2 | |",
"| 1970-01-01T00:00:00.000000700Z | CA | SJ | 75.5 | 84.08 | |",
"+--------------------------------+-------+---------+----------+----------+------+",
];
run_sql_test_case!(OneMeasurementThreeChunksWithDuplicates {}, sql, &expected);
}
@ -712,14 +712,14 @@ async fn sql_select_non_keys() {
#[tokio::test]
async fn sql_select_all_different_tags_chunks() {
let expected = vec![
"+--------+------------+---------+-------+------+-------------------------------+",
"| city | other_temp | reading | state | temp | time |",
"+--------+------------+---------+-------+------+-------------------------------+",
"| | | | MA | 70.4 | 1970-01-01 00:00:00.000000050 |",
"| | 70.4 | | MA | | 1970-01-01 00:00:00.000000250 |",
"| Boston | | 51 | | 53.4 | 1970-01-01 00:00:00.000000050 |",
"| Boston | 72.4 | | | | 1970-01-01 00:00:00.000000350 |",
"+--------+------------+---------+-------+------+-------------------------------+",
"+--------+------------+---------+-------+------+--------------------------------+",
"| city | other_temp | reading | state | temp | time |",
"+--------+------------+---------+-------+------+--------------------------------+",
"| | | | MA | 70.4 | 1970-01-01T00:00:00.000000050Z |",
"| | 70.4 | | MA | | 1970-01-01T00:00:00.000000250Z |",
"| Boston | | 51 | | 53.4 | 1970-01-01T00:00:00.000000050Z |",
"| Boston | 72.4 | | | | 1970-01-01T00:00:00.000000350Z |",
"+--------+------------+---------+-------+------+--------------------------------+",
];
run_sql_test_case!(
OneMeasurementTwoChunksDifferentTagSet {},

View File

@ -1385,11 +1385,11 @@ mod tests {
let batches = run_query(db, "select * from cpu").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &batches);
}
@ -1539,12 +1539,12 @@ mod tests {
let batches = run_query(db, "select * from cpu order by time").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"| 3 | 1970-01-01 00:00:00.000000030 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"| 3 | 1970-01-01T00:00:00.000000030Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &batches);
}
@ -1626,11 +1626,11 @@ mod tests {
let batches = run_query(db, "select * from cpu").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &batches);
}
@ -1927,11 +1927,11 @@ mod tests {
assert_eq!(mb_chunk.id(), 0);
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"+-----+--------------------------------+",
];
let batches = run_query(Arc::clone(&db), "select * from cpu").await;
assert_batches_sorted_eq!(expected, &batches);
@ -1939,12 +1939,12 @@ mod tests {
// add new data
write_lp(db.as_ref(), "cpu bar=2 20").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
let batches = run_query(Arc::clone(&db), "select * from cpu").await;
assert_batches_sorted_eq!(&expected, &batches);
@ -1985,13 +1985,13 @@ mod tests {
assert_eq!(mb_chunk.id(), 0);
let expected = vec![
"+------+--------+-------------------------------+------+",
"| core | region | time | user |",
"+------+--------+-------------------------------+------+",
"| | west | 1970-01-01 00:00:00.000000010 | 23.2 |",
"| | | 1970-01-01 00:00:00.000000011 | 10 |",
"| one | | 1970-01-01 00:00:00.000000011 | 10 |",
"+------+--------+-------------------------------+------+",
"+------+--------+--------------------------------+------+",
"| core | region | time | user |",
"+------+--------+--------------------------------+------+",
"| | | 1970-01-01T00:00:00.000000011Z | 10 |",
"| | west | 1970-01-01T00:00:00.000000010Z | 23.2 |",
"| one | | 1970-01-01T00:00:00.000000011Z | 10 |",
"+------+--------+--------------------------------+------+",
];
let batches = run_query(Arc::clone(&db), "select * from cpu").await;
assert_batches_sorted_eq!(expected, &batches);
@ -2026,12 +2026,12 @@ mod tests {
// data should be readable
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
let batches = run_query(Arc::clone(&db), "select * from cpu").await;
assert_batches_eq!(&expected, &batches);
@ -2109,12 +2109,12 @@ mod tests {
// data should be readable
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
let batches = run_query(Arc::clone(&db), "select * from cpu").await;
assert_batches_eq!(&expected, &batches);
@ -2190,32 +2190,32 @@ mod tests {
assert_batches_eq!(
&[
"+-----+----------+------+-------------------------------+",
"| bar | tag1 | tag2 | time |",
"+-----+----------+------+-------------------------------+",
"| 1 | cupcakes | | 1970-01-01 00:00:00.000000010 |",
"| 2 | asfd | foo | 1970-01-01 00:00:00.000000020 |",
"| 2 | bingo | foo | 1970-01-01 00:00:00.000000010 |",
"| 2 | bongo | a | 1970-01-01 00:00:00.000000020 |",
"| 2 | bongo | a | 1970-01-01 00:00:00.000000010 |",
"| 3 | | a | 1970-01-01 00:00:00.000000005 |",
"+-----+----------+------+-------------------------------+",
"+-----+----------+------+--------------------------------+",
"| bar | tag1 | tag2 | time |",
"+-----+----------+------+--------------------------------+",
"| 1 | cupcakes | | 1970-01-01T00:00:00.000000010Z |",
"| 2 | asfd | foo | 1970-01-01T00:00:00.000000020Z |",
"| 2 | bingo | foo | 1970-01-01T00:00:00.000000010Z |",
"| 2 | bongo | a | 1970-01-01T00:00:00.000000020Z |",
"| 2 | bongo | a | 1970-01-01T00:00:00.000000010Z |",
"| 3 | | a | 1970-01-01T00:00:00.000000005Z |",
"+-----+----------+------+--------------------------------+",
],
&mb
);
assert_batches_eq!(
&[
"+-----+----------+------+-------------------------------+",
"| bar | tag1 | tag2 | time |",
"+-----+----------+------+-------------------------------+",
"| 1 | cupcakes | | 1970-01-01 00:00:00.000000010 |",
"| 3 | | a | 1970-01-01 00:00:00.000000005 |",
"| 2 | bongo | a | 1970-01-01 00:00:00.000000010 |",
"| 2 | bongo | a | 1970-01-01 00:00:00.000000020 |",
"| 2 | asfd | foo | 1970-01-01 00:00:00.000000020 |",
"| 2 | bingo | foo | 1970-01-01 00:00:00.000000010 |",
"+-----+----------+------+-------------------------------+",
"+-----+----------+------+--------------------------------+",
"| bar | tag1 | tag2 | time |",
"+-----+----------+------+--------------------------------+",
"| 1 | cupcakes | | 1970-01-01T00:00:00.000000010Z |",
"| 3 | | a | 1970-01-01T00:00:00.000000005Z |",
"| 2 | bongo | a | 1970-01-01T00:00:00.000000010Z |",
"| 2 | bongo | a | 1970-01-01T00:00:00.000000020Z |",
"| 2 | asfd | foo | 1970-01-01T00:00:00.000000020Z |",
"| 2 | bingo | foo | 1970-01-01T00:00:00.000000010Z |",
"+-----+----------+------+--------------------------------+",
],
&rb
);
@ -2327,12 +2327,12 @@ mod tests {
read_data_from_parquet_data(Arc::clone(&schema.as_arrow()), parquet_data);
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &record_batches);
}
@ -2469,12 +2469,12 @@ mod tests {
read_data_from_parquet_data(Arc::clone(&schema.as_arrow()), parquet_data);
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &record_batches);
}

View File

@ -183,13 +183,13 @@ mod tests {
];
let expected = vec![
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+---------------------+---------------------+---------------------+-------------+",
"| id | partition_key | table_name | storage | lifecycle_action | memory_bytes | object_store_bytes | row_count | time_of_last_access | time_of_first_write | time_of_last_write | time_closed |",
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+---------------------+---------------------+---------------------+-------------+",
"| 0 | p1 | table1 | OpenMutableBuffer | | 23754 | | 11 | | 1970-01-01 00:00:10 | | |",
"| 1 | p1 | table1 | OpenMutableBuffer | Persisting to Object Storage | 23455 | | 22 | 1970-01-01 00:12:34 | | 1970-01-01 00:01:20 | |",
"| 2 | p1 | table1 | ObjectStoreOnly | | 1234 | 5678 | 33 | 1970-01-01 00:00:05 | 1970-01-01 00:01:40 | 1970-01-01 00:03:20 | |",
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+---------------------+---------------------+---------------------+-------------+",
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+----------------------+----------------------+----------------------+-------------+",
"| id | partition_key | table_name | storage | lifecycle_action | memory_bytes | object_store_bytes | row_count | time_of_last_access | time_of_first_write | time_of_last_write | time_closed |",
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+----------------------+----------------------+----------------------+-------------+",
"| 0 | p1 | table1 | OpenMutableBuffer | | 23754 | | 11 | | 1970-01-01T00:00:10Z | | |",
"| 1 | p1 | table1 | OpenMutableBuffer | Persisting to Object Storage | 23455 | | 22 | 1970-01-01T00:12:34Z | | 1970-01-01T00:01:20Z | |",
"| 2 | p1 | table1 | ObjectStoreOnly | | 1234 | 5678 | 33 | 1970-01-01T00:00:05Z | 1970-01-01T00:01:40Z | 1970-01-01T00:03:20Z | |",
"+----+---------------+------------+-------------------+------------------------------+--------------+--------------------+-----------+----------------------+----------------------+----------------------+-------------+",
];
let schema = chunk_summaries_schema();

View File

@ -139,12 +139,12 @@ mod tests {
];
let expected = vec![
"+---------------+------------+-----------+-------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| partition_key | table_name | row_count | time_of_first_write | time_of_last_write | min_timestamp | max_timestamp |",
"+---------------+------------+-----------+-------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| partition | table | 320 | 1970-01-01 00:00:00 | 1970-01-01 00:00:00.000000020 | 1970-01-01 00:00:00.000000050 | 1970-01-01 00:00:00.000000060 |",
"| partition | table | 2 | 1970-01-01 00:00:00.000000006 | 1970-01-01 00:00:00.000000021 | 1970-01-01 00:00:00.000000001 | 1970-01-01 00:00:00.000000002 |",
"+---------------+------------+-----------+-------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"+---------------+------------+-----------+--------------------------------+--------------------------------+--------------------------------+--------------------------------+",
"| partition_key | table_name | row_count | time_of_first_write | time_of_last_write | min_timestamp | max_timestamp |",
"+---------------+------------+-----------+--------------------------------+--------------------------------+--------------------------------+--------------------------------+",
"| partition | table | 320 | 1970-01-01T00:00:00Z | 1970-01-01T00:00:00.000000020Z | 1970-01-01T00:00:00.000000050Z | 1970-01-01T00:00:00.000000060Z |",
"| partition | table | 2 | 1970-01-01T00:00:00.000000006Z | 1970-01-01T00:00:00.000000021Z | 1970-01-01T00:00:00.000000001Z | 1970-01-01T00:00:00.000000002Z |",
"+---------------+------------+-----------+--------------------------------+--------------------------------+--------------------------------+--------------------------------+",
];
let schema = persistence_windows_schema();

View File

@ -1627,11 +1627,11 @@ mod tests {
let batches = run_query(db, "select * from cpu").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &batches);
}
@ -1672,11 +1672,11 @@ mod tests {
let batches = run_query(db, "select * from cpu").await;
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"+-----+--------------------------------+",
];
assert_batches_eq!(expected, &batches);

View File

@ -954,11 +954,11 @@ mod tests {
let batches = run_query(test_db, "select * from h2o_temperature").await;
let expected = vec![
"+----------------+--------------+-------+-----------------+---------------------+",
"| bottom_degrees | location | state | surface_degrees | time |",
"+----------------+--------------+-------+-----------------+---------------------+",
"| 50.4 | santa_monica | CA | 65.2 | 2021-04-01 14:10:24 |",
"+----------------+--------------+-------+-----------------+---------------------+",
"+----------------+--------------+-------+-----------------+----------------------+",
"| bottom_degrees | location | state | surface_degrees | time |",
"+----------------+--------------+-------+-----------------+----------------------+",
"| 50.4 | santa_monica | CA | 65.2 | 2021-04-01T14:10:24Z |",
"+----------------+--------------+-------+-----------------+----------------------+",
];
assert_batches_eq!(expected, &batches);
}
@ -1102,14 +1102,14 @@ mod tests {
assert_eq!(get_content_type(&response), "text/plain");
let res =
"+----------------+--------------+-------+-----------------+---------------------+\n\
| bottom_degrees | location | state | surface_degrees | time |\n\
+----------------+--------------+-------+-----------------+---------------------+\n\
| 50.4 | santa_monica | CA | 65.2 | 2021-04-01 14:10:24 |\n\
+----------------+--------------+-------+-----------------+---------------------+\n";
let expected = r#"+----------------+--------------+-------+-----------------+----------------------+
| bottom_degrees | location | state | surface_degrees | time |
+----------------+--------------+-------+-----------------+----------------------+
| 50.4 | santa_monica | CA | 65.2 | 2021-04-01T14:10:24Z |
+----------------+--------------+-------+-----------------+----------------------+
"#;
check_response("query", response, StatusCode::OK, Some(res)).await;
check_response("query", response, StatusCode::OK, Some(expected)).await;
// same response is expected if we explicitly request 'format=pretty'
let response = client
@ -1121,7 +1121,7 @@ mod tests {
.await;
assert_eq!(get_content_type(&response), "text/plain");
check_response("query", response, StatusCode::OK, Some(res)).await;
check_response("query", response, StatusCode::OK, Some(expected)).await;
}
#[tokio::test]
@ -1220,11 +1220,11 @@ mod tests {
let batches = run_query(test_db, "select * from h2o_temperature").await;
let expected = vec![
"+----------------+--------------+-------+-----------------+---------------------+",
"| bottom_degrees | location | state | surface_degrees | time |",
"+----------------+--------------+-------+-----------------+---------------------+",
"| 50.4 | santa_monica | CA | 65.2 | 2021-04-01 14:10:24 |",
"+----------------+--------------+-------+-----------------+---------------------+",
"+----------------+--------------+-------+-----------------+----------------------+",
"| bottom_degrees | location | state | surface_degrees | time |",
"+----------------+--------------+-------+-----------------+----------------------+",
"| 50.4 | santa_monica | CA | 65.2 | 2021-04-01T14:10:24Z |",
"+----------------+--------------+-------+-----------------+----------------------+",
];
assert_batches_eq!(expected, &batches);
}

View File

@ -312,11 +312,11 @@ async fn assert_chunk_query_works(fixture: &ServerFixture, db_name: &str) {
let batches = collect_query(query_results).await;
let expected_read_data = vec![
"+--------+------+-------------------------------+",
"| region | user | time |",
"+--------+------+-------------------------------+",
"| west | 23.2 | 1970-01-01 00:00:00.000000100 |",
"+--------+------+-------------------------------+",
"+--------+------+--------------------------------+",
"| region | user | time |",
"+--------+------+--------------------------------+",
"| west | 23.2 | 1970-01-01T00:00:00.000000100Z |",
"+--------+------+--------------------------------+",
];
assert_batches_eq!(expected_read_data, &batches);

View File

@ -36,7 +36,7 @@ pub async fn test() {
assert_eq!(
lines, expected_read_data,
"Actual:\n{:#?}\nExpected:\n{:#?}",
"Actual:\n\n{:#?}\nExpected:\n\n{:#?}",
lines, expected_read_data
);
}

View File

@ -81,12 +81,15 @@ async fn create_database(db_name: &str, addr: &str) {
}
async fn test_read_default(db_name: &str, addr: &str) {
let expected = "+--------+-------------------------------+------+\n\
| region | time | user |\n\
+--------+-------------------------------+------+\n\
| west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\
| west | 1970-01-01 00:00:00.000000150 | 21 |\n\
+--------+-------------------------------+------+";
let expected = r#"
+--------+--------------------------------+------+
| region | time | user |
+--------+--------------------------------+------+
| west | 1970-01-01T00:00:00.000000100Z | 23.2 |
| west | 1970-01-01T00:00:00.000000150Z | 21 |
+--------+--------------------------------+------+
"#
.trim();
Command::cargo_bin("influxdb_iox")
.unwrap()
@ -102,12 +105,15 @@ async fn test_read_default(db_name: &str, addr: &str) {
}
async fn test_read_format_pretty(db_name: &str, addr: &str) {
let expected = "+--------+-------------------------------+------+\n\
| region | time | user |\n\
+--------+-------------------------------+------+\n\
| west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\
| west | 1970-01-01 00:00:00.000000150 | 21 |\n\
+--------+-------------------------------+------+";
let expected = r#"
+--------+--------------------------------+------+
| region | time | user |
+--------+--------------------------------+------+
| west | 1970-01-01T00:00:00.000000100Z | 23.2 |
| west | 1970-01-01T00:00:00.000000150Z | 21 |
+--------+--------------------------------+------+
"#
.trim();
Command::cargo_bin("influxdb_iox")
.unwrap()

View File

@ -212,7 +212,7 @@ impl Scenario {
])
.unwrap();
arrow::util::pretty::pretty_format_batches(&[batch])
arrow_util::display::pretty_format_batches(&[batch])
.unwrap()
.trim()
.split('\n')

View File

@ -109,11 +109,11 @@ async fn test_sql_use_database() {
create_two_partition_database(&db_name, fixture.grpc_channel()).await;
let expected_output = r#"
+------+---------+----------+---------------------+-------+
| host | running | sleeping | time | total |
+------+---------+----------+---------------------+-------+
| foo | 4 | 514 | 2020-06-23 06:38:30 | 519 |
+------+---------+----------+---------------------+-------+
+------+---------+----------+----------------------+-------+
| host | running | sleeping | time | total |
+------+---------+----------+----------------------+-------+
| foo | 4 | 514 | 2020-06-23T06:38:30Z | 519 |
+------+---------+----------+----------------------+-------+
"#
.trim();

View File

@ -116,12 +116,12 @@ async fn test_write_entry() {
}
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000010 |",
"| 2 | 1970-01-01 00:00:00.000000020 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000010Z |",
"| 2 | 1970-01-01T00:00:00.000000020Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
}
@ -298,12 +298,12 @@ async fn test_write_routed() {
}
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000100 |",
"| 2 | 1970-01-01 00:00:00.000000200 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000100Z |",
"| 2 | 1970-01-01T00:00:00.000000200Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
@ -327,11 +327,11 @@ async fn test_write_routed() {
}
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 3 | 1970-01-01 00:00:00.000000300 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 3 | 1970-01-01T00:00:00.000000300Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
@ -357,11 +357,11 @@ async fn test_write_routed() {
}
let expected = vec![
"+-----+-------------------------------+",
"| baz | time |",
"+-----+-------------------------------+",
"| 4 | 1970-01-01 00:00:00.000000400 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| baz | time |",
"+-----+--------------------------------+",
"| 4 | 1970-01-01T00:00:00.000000400Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
}
@ -557,11 +557,11 @@ async fn test_write_routed_no_shard() {
}
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 1 | 1970-01-01 00:00:00.000000100 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 1 | 1970-01-01T00:00:00.000000100Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
@ -585,11 +585,11 @@ async fn test_write_routed_no_shard() {
}
let expected = vec![
"+-----+-------------------------------+",
"| bar | time |",
"+-----+-------------------------------+",
"| 2 | 1970-01-01 00:00:00.000000100 |",
"+-----+-------------------------------+",
"+-----+--------------------------------+",
"| bar | time |",
"+-----+--------------------------------+",
"| 2 | 1970-01-01T00:00:00.000000100Z |",
"+-----+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);

View File

@ -163,14 +163,14 @@ async fn reads_come_from_kafka() {
}
let expected = vec![
"+--------+-------------------------------+------+",
"| region | time | user |",
"+--------+-------------------------------+------+",
"| east | 1970-01-01 00:00:00.000000300 | 76.2 |",
"| east | 1970-01-01 00:00:00.000000350 | 88.7 |",
"| west | 1970-01-01 00:00:00.000000100 | 23.2 |",
"| west | 1970-01-01 00:00:00.000000150 | 21 |",
"+--------+-------------------------------+------+",
"+--------+--------------------------------+------+",
"| region | time | user |",
"+--------+--------------------------------+------+",
"| east | 1970-01-01T00:00:00.000000300Z | 76.2 |",
"| east | 1970-01-01T00:00:00.000000350Z | 88.7 |",
"| west | 1970-01-01T00:00:00.000000100Z | 23.2 |",
"| west | 1970-01-01T00:00:00.000000150Z | 21 |",
"+--------+--------------------------------+------+",
];
assert_batches_sorted_eq!(&expected, &batches);
break;

View File

@ -73,11 +73,11 @@ pub async fn test_write_pb() {
}
let expected = vec![
"+--------+-------------------------------+",
"| mycol1 | time |",
"+--------+-------------------------------+",
"| 5 | 1970-01-01 00:00:00.000000003 |",
"+--------+-------------------------------+",
"+--------+--------------------------------+",
"| mycol1 | time |",
"+--------+--------------------------------+",
"| 5 | 1970-01-01T00:00:00.000000003Z |",
"+--------+--------------------------------+",
];
assert_batches_sorted_eq!(&expected, &batches);
}