Merge branch 'main' into er/fix/flux/2691

pull/24376/head
Edd Robinson 2021-10-22 16:28:21 +01:00 committed by GitHub
commit 6683a1e629
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 19 additions and 5 deletions

View File

@ -240,12 +240,15 @@ impl fmt::Display for Predicate {
} }
if !self.exprs.is_empty() { if !self.exprs.is_empty() {
// Expr doesn't implement `Display` yet, so just the debug version write!(f, " exprs: [")?;
// See https://github.com/apache/arrow-datafusion/issues/347 for (i, expr) in self.exprs.iter().enumerate() {
let display_exprs = self.exprs.iter().map(|e| format!("{:?}", e)); write!(f, "{}", expr)?;
write!(f, " exprs: [{}]", iter_to_str(display_exprs))?; if i < self.exprs.len() - 1 {
write!(f, ", ")?;
}
}
write!(f, "]")?;
} }
Ok(()) Ok(())
} }
} }

View File

@ -1026,6 +1026,7 @@ mod test {
.field("sketchy_sensor", Int64) .field("sketchy_sensor", Int64)
.non_null_field("active", Boolean) .non_null_field("active", Boolean)
.field("msg", Utf8) .field("msg", Utf8)
.field("all_null", Utf8)
.timestamp() .timestamp()
.build() .build()
.unwrap(); .unwrap();
@ -1049,6 +1050,7 @@ mod test {
Some("message b"), Some("message b"),
None, None,
])), ])),
Arc::new(StringArray::from(vec![None, None, None])),
Arc::new(TimestampNanosecondArray::from_vec( Arc::new(TimestampNanosecondArray::from_vec(
vec![i, 2 * i, 3 * i], vec![i, 2 * i, 3 * i],
None, None,
@ -1110,6 +1112,7 @@ mod test {
); );
assert_rb_column_equals(&first_row_group, "active", &exp_active_values); assert_rb_column_equals(&first_row_group, "active", &exp_active_values);
assert_rb_column_equals(&first_row_group, "msg", &exp_msg_values); assert_rb_column_equals(&first_row_group, "msg", &exp_msg_values);
assert_rb_column_equals(&first_row_group, "all_null", &Values::String(vec![None]));
assert_rb_column_equals(&first_row_group, "time", &Values::I64(vec![100])); // first row from first record batch assert_rb_column_equals(&first_row_group, "time", &Values::I64(vec![100])); // first row from first record batch
let second_row_group = itr.next().unwrap(); let second_row_group = itr.next().unwrap();
@ -1122,8 +1125,16 @@ mod test {
&exp_sketchy_sensor_values, &exp_sketchy_sensor_values,
); );
assert_rb_column_equals(&first_row_group, "active", &exp_active_values); assert_rb_column_equals(&first_row_group, "active", &exp_active_values);
assert_rb_column_equals(&first_row_group, "all_null", &Values::String(vec![None]));
assert_rb_column_equals(&second_row_group, "time", &Values::I64(vec![200])); // first row from second record batch assert_rb_column_equals(&second_row_group, "time", &Values::I64(vec![200])); // first row from second record batch
// No rows returned when filtering on all_null column
let predicate = Predicate::new(vec![BinaryExpr::from(("all_null", "!=", "a string"))]);
let mut itr = chunk
.read_filter(predicate, Selection::All, vec![])
.unwrap();
assert!(itr.next().is_none());
// Error when predicate is invalid // Error when predicate is invalid
let predicate = let predicate =
Predicate::with_time_range(&[BinaryExpr::from(("env", "=", 22.3))], 100, 205); Predicate::with_time_range(&[BinaryExpr::from(("env", "=", 22.3))], 100, 205);