From 82588d5c72e769c2fb8bce0b19160f78e9008755 Mon Sep 17 00:00:00 2001 From: "Carol (Nichols || Goulding)" Date: Mon, 5 Apr 2021 14:41:02 -0400 Subject: [PATCH] fix: Don't return Result from test functions --- data_types/src/database_rules.rs | 43 +-- influxdb2_client/src/api/ready.rs | 7 +- influxdb2_client/src/api/setup.rs | 18 +- influxdb2_client/src/data_point.rs | 104 +++--- influxdb2_client/src/lib.rs | 15 +- influxdb_line_protocol/src/lib.rs | 409 +++++++++-------------- influxdb_tsm/src/mapper.rs | 137 ++++---- ingest/src/lib.rs | 207 ++++++------ mutable_buffer/src/column.rs | 54 ++- object_store/src/aws.rs | 40 +-- object_store/src/azure.rs | 13 +- object_store/src/buffer.rs | 23 +- object_store/src/disk.rs | 49 ++- object_store/src/gcp.rs | 32 +- object_store/src/memory.rs | 27 +- query/src/exec.rs | 41 +-- query/src/exec/schema_pivot.rs | 24 +- query/src/exec/seriesset.rs | 30 +- server/src/lib.rs | 37 +- src/commands/run.rs | 16 +- src/influxdb_ioxd/http.rs | 26 +- src/influxdb_ioxd/rpc/storage/service.rs | 34 +- wal/src/lib.rs | 43 +-- wal/tests/no_concurrency.rs | 21 +- 24 files changed, 587 insertions(+), 863 deletions(-) diff --git a/data_types/src/database_rules.rs b/data_types/src/database_rules.rs index 6ee605cac6..ec923618a5 100644 --- a/data_types/src/database_rules.rs +++ b/data_types/src/database_rules.rs @@ -974,35 +974,28 @@ mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - #[test] - fn partition_key_with_table() -> Result { + fn partition_key_with_table() { let template = PartitionTemplate { parts: vec![TemplatePart::Table], }; let line = parse_line("cpu foo=1 10"); assert_eq!("cpu", template.partition_key(&line, &Utc::now()).unwrap()); - - Ok(()) } #[test] - fn partition_key_with_int_field() -> Result { + fn partition_key_with_int_field() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("foo".to_string())], }; let line = parse_line("cpu foo=1 10"); assert_eq!("foo_1", template.partition_key(&line, &Utc::now()).unwrap()); - - Ok(()) } #[test] - fn partition_key_with_float_field() -> Result { + fn partition_key_with_float_field() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("foo".to_string())], }; @@ -1012,12 +1005,10 @@ mod tests { "foo_1.1", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_string_field() -> Result { + fn partition_key_with_string_field() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("foo".to_string())], }; @@ -1027,12 +1018,10 @@ mod tests { "foo_asdf", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_bool_field() -> Result { + fn partition_key_with_bool_field() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("bar".to_string())], }; @@ -1042,12 +1031,10 @@ mod tests { "bar_true", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_tag_column() -> Result { + fn partition_key_with_tag_column() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("region".to_string())], }; @@ -1057,24 +1044,20 @@ mod tests { "region_west", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_missing_column() -> Result { + fn partition_key_with_missing_column() { let template = PartitionTemplate { parts: vec![TemplatePart::Column("not_here".to_string())], }; let line = parse_line("cpu,foo=asdf bar=true 10"); assert_eq!("", template.partition_key(&line, &Utc::now()).unwrap()); - - Ok(()) } #[test] - fn partition_key_with_time() -> Result { + fn partition_key_with_time() { let template = PartitionTemplate { parts: vec![TemplatePart::TimeFormat("%Y-%m-%d %H:%M:%S".to_string())], }; @@ -1084,12 +1067,10 @@ mod tests { "2020-10-10 13:54:57", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_default_time() -> Result { + fn partition_key_with_default_time() { let format_string = "%Y-%m-%d %H:%M:%S"; let template = PartitionTemplate { parts: vec![TemplatePart::TimeFormat(format_string.to_string())], @@ -1101,12 +1082,10 @@ mod tests { default_time.format(format_string).to_string(), template.partition_key(&line, &default_time).unwrap() ); - - Ok(()) } #[test] - fn partition_key_with_many_parts() -> Result { + fn partition_key_with_many_parts() { let template = PartitionTemplate { parts: vec![ TemplatePart::Table, @@ -1123,8 +1102,6 @@ mod tests { "cpu-region_west-usage_system_53.1-2020-10-10 13:54:57", template.partition_key(&line, &Utc::now()).unwrap() ); - - Ok(()) } fn parsed_lines(lp: &str) -> Vec> { diff --git a/influxdb2_client/src/api/ready.rs b/influxdb2_client/src/api/ready.rs index 56ff17feae..504b081771 100644 --- a/influxdb2_client/src/api/ready.rs +++ b/influxdb2_client/src/api/ready.rs @@ -33,11 +33,8 @@ mod tests { use super::*; use mockito::mock; - type Error = Box; - type Result = std::result::Result; - #[tokio::test] - async fn ready() -> Result { + async fn ready() { let token = "some-token"; let mock_server = mock("GET", "/ready") @@ -49,7 +46,5 @@ mod tests { let _result = client.ready().await; mock_server.assert(); - - Ok(()) } } diff --git a/influxdb2_client/src/api/setup.rs b/influxdb2_client/src/api/setup.rs index 03b4612695..3b2ac4eb59 100644 --- a/influxdb2_client/src/api/setup.rs +++ b/influxdb2_client/src/api/setup.rs @@ -117,11 +117,8 @@ mod tests { use super::*; use mockito::mock; - type Error = Box; - type Result = std::result::Result; - #[tokio::test] - async fn is_onboarding_allowed() -> Result { + async fn is_onboarding_allowed() { let token = "some-token"; let mock_server = mock("GET", "/api/v2/setup") @@ -133,11 +130,10 @@ mod tests { let _result = client.is_onboarding_allowed().await; mock_server.assert(); - Ok(()) } #[tokio::test] - async fn onboarding() -> Result { + async fn onboarding() { let token = "some-token"; let username = "some-user"; let org = "some-org"; @@ -169,11 +165,10 @@ mod tests { .await; mock_server.assert(); - Ok(()) } #[tokio::test] - async fn post_setup_user() -> Result { + async fn post_setup_user() { let token = "some-token"; let username = "some-user"; let org = "some-org"; @@ -205,11 +200,10 @@ mod tests { .await; mock_server.assert(); - Ok(()) } #[tokio::test] - async fn onboarding_opt() -> Result { + async fn onboarding_opt() { let token = "some-token"; let username = "some-user"; let org = "some-org"; @@ -233,11 +227,10 @@ mod tests { .await; mock_server.assert(); - Ok(()) } #[tokio::test] - async fn post_setup_user_opt() -> Result { + async fn post_setup_user_opt() { let token = "some-token"; let username = "some-user"; let org = "some-org"; @@ -261,6 +254,5 @@ mod tests { .await; mock_server.assert(); - Ok(()) } } diff --git a/influxdb2_client/src/data_point.rs b/influxdb2_client/src/data_point.rs index 13ea8c61fa..019d497bba 100644 --- a/influxdb2_client/src/data_point.rs +++ b/influxdb2_client/src/data_point.rs @@ -335,64 +335,60 @@ mod tests { use super::*; use std::str; - type Error = Box; - type Result = std::result::Result; - - fn assert_utf8_strings_eq(left: &[u8], right: &[u8]) -> Result { + fn assert_utf8_strings_eq(left: &[u8], right: &[u8]) { assert_eq!( left, right, "\n\nleft string value: `{}`,\nright string value: `{}`", - str::from_utf8(left)?, - str::from_utf8(right)? + str::from_utf8(left).unwrap(), + str::from_utf8(right).unwrap(), ); - Ok(()) } #[test] - fn point_builder_allows_setting_tags_and_fields() -> Result { + fn point_builder_allows_setting_tags_and_fields() { let point = DataPoint::builder("swap") .tag("host", "server01") .tag("name", "disk0") .field("in", 3_i64) .field("out", 4_i64) .timestamp(1) - .build()?; + .build() + .unwrap(); assert_utf8_strings_eq( - &point.data_point_to_vec()?, + &point.data_point_to_vec().unwrap(), b"swap,host=server01,name=disk0 in=3i,out=4i 1\n".as_ref(), - )?; - - Ok(()) + ); } #[test] - fn no_tags_or_timestamp() -> Result { + fn no_tags_or_timestamp() { let point = DataPoint::builder("m0") .field("f0", 1.0) .field("f1", 2_i64) - .build()?; + .build() + .unwrap(); - assert_utf8_strings_eq(&point.data_point_to_vec()?, b"m0 f0=1,f1=2i\n".as_ref())?; - - Ok(()) + assert_utf8_strings_eq( + &point.data_point_to_vec().unwrap(), + b"m0 f0=1,f1=2i\n".as_ref(), + ); } #[test] - fn no_timestamp() -> Result { + fn no_timestamp() { let point = DataPoint::builder("m0") .tag("t0", "v0") .tag("t1", "v1") .field("f1", 2_i64) - .build()?; + .build() + .unwrap(); assert_utf8_strings_eq( - &point.data_point_to_vec()?, + &point.data_point_to_vec().unwrap(), b"m0,t0=v0,t1=v1 f1=2i\n".as_ref(), - )?; - - Ok(()) + ); } #[test] @@ -405,80 +401,72 @@ mod tests { const ALL_THE_DELIMITERS: &str = r#"alpha,beta=delta gamma"epsilon"#; #[test] - fn special_characters_are_escaped_in_measurements() -> Result { + fn special_characters_are_escaped_in_measurements() { assert_utf8_strings_eq( - &ALL_THE_DELIMITERS.measurement_to_vec()?, + &ALL_THE_DELIMITERS.measurement_to_vec().unwrap(), br#"alpha\,beta=delta\ gamma"epsilon"#.as_ref(), - )?; - Ok(()) + ); } #[test] - fn special_characters_are_escaped_in_tag_keys() -> Result { + fn special_characters_are_escaped_in_tag_keys() { assert_utf8_strings_eq( - &ALL_THE_DELIMITERS.tag_key_to_vec()?, + &ALL_THE_DELIMITERS.tag_key_to_vec().unwrap(), br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(), - )?; - Ok(()) + ); } #[test] - fn special_characters_are_escaped_in_tag_values() -> Result { + fn special_characters_are_escaped_in_tag_values() { assert_utf8_strings_eq( - &ALL_THE_DELIMITERS.tag_value_to_vec()?, + &ALL_THE_DELIMITERS.tag_value_to_vec().unwrap(), br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(), - )?; - Ok(()) + ); } #[test] - fn special_characters_are_escaped_in_field_keys() -> Result { + fn special_characters_are_escaped_in_field_keys() { assert_utf8_strings_eq( - &ALL_THE_DELIMITERS.field_key_to_vec()?, + &ALL_THE_DELIMITERS.field_key_to_vec().unwrap(), br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(), - )?; - Ok(()) + ); } #[test] - fn special_characters_are_escaped_in_field_values_of_strings() -> Result { + fn special_characters_are_escaped_in_field_values_of_strings() { assert_utf8_strings_eq( - &FieldValue::from(ALL_THE_DELIMITERS).field_value_to_vec()?, + &FieldValue::from(ALL_THE_DELIMITERS) + .field_value_to_vec() + .unwrap(), br#""alpha,beta=delta gamma\"epsilon""#.as_ref(), - )?; - Ok(()) + ); } #[test] - fn field_value_of_bool() -> Result { + fn field_value_of_bool() { let e = FieldValue::from(true); - assert_utf8_strings_eq(&e.field_value_to_vec()?, b"t")?; + assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"t"); let e = FieldValue::from(false); - assert_utf8_strings_eq(&e.field_value_to_vec()?, b"f")?; - - Ok(()) + assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"f"); } #[test] - fn field_value_of_float() -> Result { + fn field_value_of_float() { let e = FieldValue::from(42_f64); - assert_utf8_strings_eq(&e.field_value_to_vec()?, b"42")?; - Ok(()) + assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"42"); } #[test] - fn field_value_of_integer() -> Result { + fn field_value_of_integer() { let e = FieldValue::from(42_i64); - assert_utf8_strings_eq(&e.field_value_to_vec()?, b"42i")?; - Ok(()) + assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"42i"); } #[test] - fn field_value_of_string() -> Result { + fn field_value_of_string() { let e = FieldValue::from("hello"); - assert_utf8_strings_eq(&e.field_value_to_vec()?, br#""hello""#)?; - Ok(()) + assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), br#""hello""#); } // Clears up the boilerplate of writing to a vector from the tests diff --git a/influxdb2_client/src/lib.rs b/influxdb2_client/src/lib.rs index 314b911bec..9c417b6609 100644 --- a/influxdb2_client/src/lib.rs +++ b/influxdb2_client/src/lib.rs @@ -230,11 +230,8 @@ mod tests { use futures::stream; use mockito::mock; - type Error = Box; - type Result = std::result::Result; - #[tokio::test] - async fn writing_points() -> Result { + async fn writing_points() { let org = "some-org"; let bucket = "some-bucket"; let token = "some-token"; @@ -258,12 +255,14 @@ cpu,host=server01,region=us-west usage=0.87 DataPoint::builder("cpu") .tag("host", "server01") .field("usage", 0.5) - .build()?, + .build() + .unwrap(), DataPoint::builder("cpu") .tag("host", "server01") .tag("region", "us-west") .field("usage", 0.87) - .build()?, + .build() + .unwrap(), ]; // If the requests made are incorrect, Mockito returns status 501 and `write` @@ -274,11 +273,10 @@ cpu,host=server01,region=us-west usage=0.87 let _result = client.write(org, bucket, stream::iter(points)).await; mock_server.assert(); - Ok(()) } #[tokio::test] - async fn create_bucket() -> Result { + async fn create_bucket() { let org_id = "0000111100001111"; let bucket = "some-bucket"; let token = "some-token"; @@ -299,7 +297,6 @@ cpu,host=server01,region=us-west usage=0.87 let _result = client.create_bucket(org_id, bucket).await; mock_server.assert(); - Ok(()) } } diff --git a/influxdb_line_protocol/src/lib.rs b/influxdb_line_protocol/src/lib.rs index 992b78bbd6..d78d5b23d9 100644 --- a/influxdb_line_protocol/src/lib.rs +++ b/influxdb_line_protocol/src/lib.rs @@ -1068,9 +1068,6 @@ mod test { use smallvec::smallvec; use test_helpers::approximately_equal; - type Error = Box; - type Result = std::result::Result; - impl FieldValue<'_> { fn unwrap_i64(&self) -> i64 { match self { @@ -1109,7 +1106,7 @@ mod test { } #[test] - fn escaped_str_basic() -> Result { + fn escaped_str_basic() { // Demonstrate how strings without any escapes are handled. let es = EscapedStr::from("Foo"); assert_eq!(es, "Foo"); @@ -1120,14 +1117,13 @@ mod test { assert!(es.ends_with('o')); assert!(es.ends_with("oo")); assert!(es.ends_with("Foo")); - Ok(()) } #[test] - fn escaped_str_multi() -> Result { + fn escaped_str_multi() { // Get an EscapedStr that has multiple parts by parsing a // measurement name with a non-whitespace escape character - let (remaining, es) = measurement("Foo\\aBar")?; + let (remaining, es) = measurement("Foo\\aBar").unwrap(); assert!(remaining.is_empty()); assert_eq!(es, EscapedStr::from_slices(&["Foo", "\\", "a", "Bar"])); assert!(es.is_escaped()); @@ -1145,12 +1141,10 @@ mod test { assert!(es != "Fo"); assert!(es != "F"); assert!(es != ""); - - Ok(()) } #[test] - fn test_trim_leading() -> Result { + fn test_trim_leading() { assert_eq!(trim_leading(&String::from("")), ""); assert_eq!(trim_leading(&String::from(" a b c ")), "a b c "); assert_eq!(trim_leading(&String::from(" a ")), "a "); @@ -1164,12 +1158,10 @@ mod test { trim_leading(&String::from("#comment\n #comment2\n#comment\na")), "a" ); - - Ok(()) } #[test] - fn test_split_lines() -> Result { + fn test_split_lines() { assert_eq!(split_lines("").collect::>(), vec![""]); assert_eq!(split_lines("foo").collect::>(), vec!["foo"]); assert_eq!( @@ -1205,15 +1197,13 @@ mod test { split_lines("meas tag=val field=1,field=\\\"\nval\"\nnext").collect::>(), vec!["meas tag=val field=1,field=\\\"", "val\"", "next"] ); - Ok(()) } #[test] - fn escaped_str_multi_to_string() -> Result { - let (_, es) = measurement("Foo\\aBar")?; + fn escaped_str_multi_to_string() { + let (_, es) = measurement("Foo\\aBar").unwrap(); // test the From<> implementation assert_eq!(es, "Foo\\aBar"); - Ok(()) } fn parse(s: &str) -> Result>, super::Error> { @@ -1221,53 +1211,46 @@ mod test { } #[test] - fn parse_empty() -> Result { + fn parse_empty() { let input = ""; let vals = parse(input); assert_eq!(vals.unwrap().len(), 0); - Ok(()) } #[test] - fn parse_no_fields() -> Result { + fn parse_no_fields() { let input = "foo 1234"; let vals = parse(input); assert!(matches!(vals, Err(super::Error::FieldSetMissing))); - - Ok(()) } #[test] - fn parse_single_field_integer() -> Result { + fn parse_single_field_integer() { let input = "foo asdf=23i 1234"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); assert_eq!(vals[0].field_set[0].0, "asdf"); assert_eq!(vals[0].field_set[0].1.unwrap_i64(), 23); - - Ok(()) } #[test] - fn parse_single_field_unteger() -> Result { + fn parse_single_field_unteger() { let input = "foo asdf=23u 1234"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); assert_eq!(vals[0].field_set[0].0, "asdf"); assert_eq!(vals[0].field_set[0].1.unwrap_u64(), 23); - - Ok(()) } #[test] - fn parse_single_field_float_no_decimal() -> Result { + fn parse_single_field_float_no_decimal() { let input = "foo asdf=44 546"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(546)); @@ -1276,14 +1259,12 @@ mod test { vals[0].field_set[0].1.unwrap_f64(), 44.0 )); - - Ok(()) } #[test] - fn parse_single_field_float_with_decimal() -> Result { + fn parse_single_field_float_with_decimal() { let input = "foo asdf=3.74 123"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(123)); @@ -1292,38 +1273,32 @@ mod test { vals[0].field_set[0].1.unwrap_f64(), 3.74 )); - - Ok(()) } #[test] - fn parse_single_field_string() -> Result { + fn parse_single_field_string() { let input = r#"foo asdf="the string value" 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); assert_eq!(vals[0].field_set[0].0, "asdf"); assert_eq!(&vals[0].field_set[0].1.unwrap_string(), "the string value"); - - Ok(()) } #[test] - fn parse_single_field_bool() -> Result { + fn parse_single_field_bool() { let input = r#"foo asdf=true 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); assert_eq!(vals[0].field_set[0].0, "asdf"); assert_eq!(vals[0].field_set[0].1.unwrap_bool(), true); - - Ok(()) } #[test] - fn parse_string_values() -> Result { + fn parse_string_values() { let test_data = vec![ (r#"foo asdf="""#, ""), (r#"foo asdf="str val""#, "str val"), @@ -1343,7 +1318,7 @@ mod test { ]; for (input, expected_parsed_string_value) in test_data { - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.tag_set, None); assert_eq!(vals[0].field_set.len(), 1); assert_eq!(vals[0].field_set[0].0, "asdf"); @@ -1352,12 +1327,10 @@ mod test { expected_parsed_string_value ); } - - Ok(()) } #[test] - fn parse_bool_values() -> Result { + fn parse_bool_values() { let test_data = vec![ (r#"foo asdf=t"#, true), (r#"foo asdf=T"#, true), @@ -1372,7 +1345,7 @@ mod test { ]; for (input, expected_parsed_bool_value) in test_data { - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.tag_set, None); assert_eq!(vals[0].field_set.len(), 1); assert_eq!(vals[0].field_set[0].0, "asdf"); @@ -1381,14 +1354,12 @@ mod test { expected_parsed_bool_value ); } - - Ok(()) } #[test] - fn parse_two_fields_integer() -> Result { + fn parse_two_fields_integer() { let input = "foo asdf=23i,bar=5i 1234"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); @@ -1398,14 +1369,12 @@ mod test { assert_eq!(vals[0].field_set[1].0, "bar"); assert_eq!(vals[0].field_set[1].1.unwrap_i64(), 5); - - Ok(()) } #[test] - fn parse_two_fields_unteger() -> Result { + fn parse_two_fields_unteger() { let input = "foo asdf=23u,bar=5u 1234"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); @@ -1415,14 +1384,12 @@ mod test { assert_eq!(vals[0].field_set[1].0, "bar"); assert_eq!(vals[0].field_set[1].1.unwrap_u64(), 5); - - Ok(()) } #[test] - fn parse_two_fields_float() -> Result { + fn parse_two_fields_float() { let input = "foo asdf=23.1,bar=5 1234"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); @@ -1438,14 +1405,12 @@ mod test { vals[0].field_set[1].1.unwrap_f64(), 5.0 )); - - Ok(()) } #[test] - fn parse_mixed_field_types() -> Result { + fn parse_mixed_field_types() { let input = r#"foo asdf=23.1,bar=-5i,qux=9u,baz="the string",frab=false 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(1234)); @@ -1467,23 +1432,19 @@ mod test { assert_eq!(vals[0].field_set[4].0, "frab"); assert_eq!(vals[0].field_set[4].1.unwrap_bool(), false); - - Ok(()) } #[test] - fn parse_negative_integer() -> Result { + fn parse_negative_integer() { let input = "m0 field=-1i 99"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); assert_eq!(vals[0].field_set[0].1.unwrap_i64(), -1); - - Ok(()) } #[test] - fn parse_negative_uinteger() -> Result { + fn parse_negative_uinteger() { let input = "m0 field=-1u 99"; let parsed = parse(input); @@ -1492,39 +1453,37 @@ mod test { "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_scientific_float() -> Result { + fn parse_scientific_float() { // Positive tests let input = "m0 field=-1.234456e+06 1615869152385000000"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=-1.234456E+3 1615869152385000000"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=1.234456e+02 1615869152385000000"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=1.234456E+16 1615869152385000000"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=1.234456E-16"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=1.234456e-03"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); let input = "m0 field=1.234456e-0"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); ///////////////////// @@ -1603,26 +1562,22 @@ mod test { "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_negative_float() -> Result { + fn parse_negative_float() { let input = "m0 field2=-1 99"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); assert!(approximately_equal( vals[0].field_set[0].1.unwrap_f64(), -1.0 )); - - Ok(()) } #[test] - fn parse_out_of_range_integer() -> Result { + fn parse_out_of_range_integer() { let input = "m0 field=99999999999999999999999999999999i 99"; let parsed = parse(input); @@ -1631,12 +1586,10 @@ mod test { "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_out_of_range_uinteger() -> Result { + fn parse_out_of_range_uinteger() { let input = "m0 field=99999999999999999999999999999999u 99"; let parsed = parse(input); @@ -1645,12 +1598,10 @@ mod test { "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_out_of_range_float() -> Result { + fn parse_out_of_range_float() { let input = format!("m0 field={val}.{val} 99", val = "9".repeat(200)); let parsed = parse(&input); @@ -1659,14 +1610,12 @@ mod test { "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_tag_set_included_in_series() -> Result { + fn parse_tag_set_included_in_series() { let input = "foo,tag1=1,tag2=2 value=1 123"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); @@ -1677,25 +1626,21 @@ mod test { assert_eq!(vals[0].series.tag_set.as_ref().unwrap()[1].1, "2"); assert_eq!(vals[0].field_set[0].0, "value"); - - Ok(()) } #[test] - fn parse_tag_set_unsorted() -> Result { + fn parse_tag_set_unsorted() { let input = "foo,tag2=2,tag1=1"; - let (remaining, series) = series(input)?; + let (remaining, series) = series(input).unwrap(); assert!(remaining.is_empty()); - assert_eq!(series.generate_base()?, "foo,tag1=1,tag2=2"); - - Ok(()) + assert_eq!(series.generate_base().unwrap(), "foo,tag1=1,tag2=2"); } #[test] - fn parse_tag_set_duplicate_tags() -> Result { + fn parse_tag_set_duplicate_tags() { let input = "foo,tag=1,tag=2"; - let (remaining, series) = series(input)?; + let (remaining, series) = series(input).unwrap(); assert!(remaining.is_empty()); let err = series @@ -1706,15 +1651,13 @@ mod test { err.to_string(), r#"Must not contain duplicate tags, but "tag" was repeated"# ); - - Ok(()) } #[test] - fn parse_multiple_lines_become_multiple_points() -> Result { + fn parse_multiple_lines_become_multiple_points() { let input = r#"foo value1=1i 123 foo value2=2i 123"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(123)); @@ -1725,15 +1668,13 @@ foo value2=2i 123"#; assert_eq!(vals[1].timestamp, Some(123)); assert_eq!(vals[1].field_set[0].0, "value2"); assert_eq!(vals[1].field_set[0].1.unwrap_i64(), 2); - - Ok(()) } #[test] - fn parse_multiple_measurements_become_multiple_points() -> Result { + fn parse_multiple_measurements_become_multiple_points() { let input = r#"foo value1=1i 123 bar value2=2i 123"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(123)); @@ -1744,41 +1685,35 @@ bar value2=2i 123"#; assert_eq!(vals[1].timestamp, Some(123)); assert_eq!(vals[1].field_set[0].0, "value2"); assert_eq!(vals[1].field_set[0].1.unwrap_i64(), 2); - - Ok(()) } #[test] - fn parse_trailing_whitespace_is_fine() -> Result { + fn parse_trailing_whitespace_is_fine() { let input = r#"foo,tag=val value1=1i 123 "#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals.len(), 1); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(123)); assert_eq!(vals[0].field_set[0].0, "value1"); assert_eq!(vals[0].field_set[0].1.unwrap_i64(), 1); - - Ok(()) } #[test] - fn parse_negative_timestamp() -> Result { + fn parse_negative_timestamp() { let input = r#"foo value1=1i -123"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].timestamp, Some(-123)); assert_eq!(vals[0].field_set[0].0, "value1"); assert_eq!(vals[0].field_set[0].1.unwrap_i64(), 1); - - Ok(()) } #[test] - fn parse_out_of_range_timestamp() -> Result { + fn parse_out_of_range_timestamp() { let input = "m0 field=1i 99999999999999999999999999999999"; let parsed = parse(input); @@ -1787,46 +1722,38 @@ bar value2=2i 123"#; "Wrong error: {:?}", parsed, ); - - Ok(()) } #[test] - fn parse_blank_lines_are_ignored() -> Result { + fn parse_blank_lines_are_ignored() { let input = "\n\n\n"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert!(vals.is_empty()); - - Ok(()) } #[test] - fn parse_commented_lines_are_ignored() -> Result { + fn parse_commented_lines_are_ignored() { let input = "# comment"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert!(vals.is_empty()); - - Ok(()) } #[test] - fn parse_multiple_whitespace_between_elements_is_allowed() -> Result { + fn parse_multiple_whitespace_between_elements_is_allowed() { let input = " measurement a=1i 123 "; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "measurement"); assert_eq!(vals[0].timestamp, Some(123)); assert_eq!(vals[0].field_set[0].0, "a"); assert_eq!(vals[0].field_set[0].1.unwrap_i64(), 1); - - Ok(()) } macro_rules! assert_fully_parsed { ($parse_result:expr, $output:expr $(,)?) => {{ - let (remaining, parsed) = $parse_result?; + let (remaining, parsed) = $parse_result.unwrap(); assert!( remaining.is_empty(), @@ -1834,246 +1761,232 @@ bar value2=2i 123"#; remaining, ); assert_eq!(parsed, $output, "Did not parse the expected output"); - - Ok(()) }}; } #[test] - fn measurement_allows_escaping_comma() -> Result { - assert_fully_parsed!(measurement(r#"wea\,ther"#), r#"wea,ther"#) + fn measurement_allows_escaping_comma() { + assert_fully_parsed!(measurement(r#"wea\,ther"#), r#"wea,ther"#); } #[test] - fn measurement_allows_escaping_space() -> Result { - assert_fully_parsed!(measurement(r#"wea\ ther"#), r#"wea ther"#) + fn measurement_allows_escaping_space() { + assert_fully_parsed!(measurement(r#"wea\ ther"#), r#"wea ther"#); } #[test] - fn measurement_allows_escaping_backslash() -> Result { - assert_fully_parsed!(measurement(r#"\\wea\\ther"#), r#"\wea\ther"#) + fn measurement_allows_escaping_backslash() { + assert_fully_parsed!(measurement(r#"\\wea\\ther"#), r#"\wea\ther"#); } #[test] - fn measurement_allows_backslash_with_unknown_escape() -> Result { - assert_fully_parsed!(measurement(r#"\wea\ther"#), r#"\wea\ther"#) + fn measurement_allows_backslash_with_unknown_escape() { + assert_fully_parsed!(measurement(r#"\wea\ther"#), r#"\wea\ther"#); } #[test] - fn measurement_allows_literal_newline_as_unknown_escape() -> Result { + fn measurement_allows_literal_newline_as_unknown_escape() { assert_fully_parsed!( measurement( r#"weat\ her"# ), "weat\\\nher", - ) + ); } #[test] - fn measurement_disallows_literal_newline() -> Result { + fn measurement_disallows_literal_newline() { let (remaining, parsed) = measurement( r#"weat her"#, - )?; + ) + .unwrap(); assert_eq!(parsed, "weat"); assert_eq!(remaining, "\nher"); - - Ok(()) } #[test] - fn measurement_disallows_ending_in_backslash() -> Result { + fn measurement_disallows_ending_in_backslash() { let parsed = measurement(r#"weather\"#); assert!(matches!( parsed, Err(nom::Err::Failure(super::Error::EndsWithBackslash)) )); - - Ok(()) } #[test] - fn tag_key_allows_escaping_comma() -> Result { - assert_fully_parsed!(tag_key(r#"wea\,ther"#), r#"wea,ther"#) + fn tag_key_allows_escaping_comma() { + assert_fully_parsed!(tag_key(r#"wea\,ther"#), r#"wea,ther"#); } #[test] - fn tag_key_allows_escaping_equal() -> Result { - assert_fully_parsed!(tag_key(r#"wea\=ther"#), r#"wea=ther"#) + fn tag_key_allows_escaping_equal() { + assert_fully_parsed!(tag_key(r#"wea\=ther"#), r#"wea=ther"#); } #[test] - fn tag_key_allows_escaping_space() -> Result { - assert_fully_parsed!(tag_key(r#"wea\ ther"#), r#"wea ther"#) + fn tag_key_allows_escaping_space() { + assert_fully_parsed!(tag_key(r#"wea\ ther"#), r#"wea ther"#); } #[test] - fn tag_key_allows_escaping_backslash() -> Result { - assert_fully_parsed!(tag_key(r#"\\wea\\ther"#), r#"\wea\ther"#) + fn tag_key_allows_escaping_backslash() { + assert_fully_parsed!(tag_key(r#"\\wea\\ther"#), r#"\wea\ther"#); } #[test] - fn tag_key_allows_backslash_with_unknown_escape() -> Result { - assert_fully_parsed!(tag_key(r#"\wea\ther"#), r#"\wea\ther"#) + fn tag_key_allows_backslash_with_unknown_escape() { + assert_fully_parsed!(tag_key(r#"\wea\ther"#), r#"\wea\ther"#); } #[test] - fn tag_key_allows_literal_newline_as_unknown_escape() -> Result { + fn tag_key_allows_literal_newline_as_unknown_escape() { assert_fully_parsed!( tag_key( r#"weat\ her"# ), "weat\\\nher", - ) + ); } #[test] - fn tag_key_disallows_literal_newline() -> Result { + fn tag_key_disallows_literal_newline() { let (remaining, parsed) = tag_key( r#"weat her"#, - )?; + ) + .unwrap(); assert_eq!(parsed, "weat"); assert_eq!(remaining, "\nher"); - - Ok(()) } #[test] - fn tag_key_disallows_ending_in_backslash() -> Result { + fn tag_key_disallows_ending_in_backslash() { let parsed = tag_key(r#"weather\"#); assert!(matches!( parsed, Err(nom::Err::Failure(super::Error::EndsWithBackslash)) )); - - Ok(()) } #[test] - fn tag_value_allows_escaping_comma() -> Result { - assert_fully_parsed!(tag_value(r#"wea\,ther"#), r#"wea,ther"#) + fn tag_value_allows_escaping_comma() { + assert_fully_parsed!(tag_value(r#"wea\,ther"#), r#"wea,ther"#); } #[test] - fn tag_value_allows_escaping_equal() -> Result { - assert_fully_parsed!(tag_value(r#"wea\=ther"#), r#"wea=ther"#) + fn tag_value_allows_escaping_equal() { + assert_fully_parsed!(tag_value(r#"wea\=ther"#), r#"wea=ther"#); } #[test] - fn tag_value_allows_escaping_space() -> Result { - assert_fully_parsed!(tag_value(r#"wea\ ther"#), r#"wea ther"#) + fn tag_value_allows_escaping_space() { + assert_fully_parsed!(tag_value(r#"wea\ ther"#), r#"wea ther"#); } #[test] - fn tag_value_allows_escaping_backslash() -> Result { - assert_fully_parsed!(tag_value(r#"\\wea\\ther"#), r#"\wea\ther"#) + fn tag_value_allows_escaping_backslash() { + assert_fully_parsed!(tag_value(r#"\\wea\\ther"#), r#"\wea\ther"#); } #[test] - fn tag_value_allows_backslash_with_unknown_escape() -> Result { - assert_fully_parsed!(tag_value(r#"\wea\ther"#), r#"\wea\ther"#) + fn tag_value_allows_backslash_with_unknown_escape() { + assert_fully_parsed!(tag_value(r#"\wea\ther"#), r#"\wea\ther"#); } #[test] - fn tag_value_allows_literal_newline_as_unknown_escape() -> Result { + fn tag_value_allows_literal_newline_as_unknown_escape() { assert_fully_parsed!( tag_value( r#"weat\ her"# ), "weat\\\nher", - ) + ); } #[test] - fn tag_value_disallows_literal_newline() -> Result { + fn tag_value_disallows_literal_newline() { let (remaining, parsed) = tag_value( r#"weat her"#, - )?; + ) + .unwrap(); assert_eq!(parsed, "weat"); assert_eq!(remaining, "\nher"); - - Ok(()) } #[test] - fn tag_value_disallows_ending_in_backslash() -> Result { + fn tag_value_disallows_ending_in_backslash() { let parsed = tag_value(r#"weather\"#); assert!(matches!( parsed, Err(nom::Err::Failure(super::Error::EndsWithBackslash)) )); - - Ok(()) } #[test] - fn field_key_allows_escaping_comma() -> Result { - assert_fully_parsed!(field_key(r#"wea\,ther"#), r#"wea,ther"#) + fn field_key_allows_escaping_comma() { + assert_fully_parsed!(field_key(r#"wea\,ther"#), r#"wea,ther"#); } #[test] - fn field_key_allows_escaping_equal() -> Result { - assert_fully_parsed!(field_key(r#"wea\=ther"#), r#"wea=ther"#) + fn field_key_allows_escaping_equal() { + assert_fully_parsed!(field_key(r#"wea\=ther"#), r#"wea=ther"#); } #[test] - fn field_key_allows_escaping_space() -> Result { - assert_fully_parsed!(field_key(r#"wea\ ther"#), r#"wea ther"#) + fn field_key_allows_escaping_space() { + assert_fully_parsed!(field_key(r#"wea\ ther"#), r#"wea ther"#); } #[test] - fn field_key_allows_escaping_backslash() -> Result { - assert_fully_parsed!(field_key(r#"\\wea\\ther"#), r#"\wea\ther"#) + fn field_key_allows_escaping_backslash() { + assert_fully_parsed!(field_key(r#"\\wea\\ther"#), r#"\wea\ther"#); } #[test] - fn field_key_allows_backslash_with_unknown_escape() -> Result { - assert_fully_parsed!(field_key(r#"\wea\ther"#), r#"\wea\ther"#) + fn field_key_allows_backslash_with_unknown_escape() { + assert_fully_parsed!(field_key(r#"\wea\ther"#), r#"\wea\ther"#); } #[test] - fn field_key_allows_literal_newline_as_unknown_escape() -> Result { + fn field_key_allows_literal_newline_as_unknown_escape() { assert_fully_parsed!( field_key( r#"weat\ her"# ), "weat\\\nher", - ) + ); } #[test] - fn field_key_disallows_literal_newline() -> Result { + fn field_key_disallows_literal_newline() { let (remaining, parsed) = field_key( r#"weat her"#, - )?; + ) + .unwrap(); assert_eq!(parsed, "weat"); assert_eq!(remaining, "\nher"); - - Ok(()) } #[test] - fn field_key_disallows_ending_in_backslash() -> Result { + fn field_key_disallows_ending_in_backslash() { let parsed = field_key(r#"weather\"#); assert!(matches!( parsed, Err(nom::Err::Failure(super::Error::EndsWithBackslash)) )); - - Ok(()) } #[test] - fn parse_no_time() -> Result { + fn parse_no_time() { let input = "foo,tag0=value1 asdf=23.1,bar=5i"; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].series.measurement, "foo"); assert_eq!(vals[0].series.tag_set.as_ref().unwrap()[0].0, "tag0"); @@ -2089,12 +2002,10 @@ her"#, assert_eq!(vals[0].field_set[1].0, "bar"); assert_eq!(vals[0].field_set[1].1.unwrap_i64(), 5); - - Ok(()) } #[test] - fn parse_advance_after_error() -> Result { + fn parse_advance_after_error() { // Note that the first line has an error (23.1.22 is not a number) // but there is valid data afterwrds, let input = "foo,tag0=value1 asdf=23.1.22,jkl=4\n\ @@ -2128,12 +2039,10 @@ her"#, parsed_line.field_set[1].1.unwrap_f64(), 5. )); - - Ok(()) } #[test] - fn field_value_display() -> Result { + fn field_value_display() { assert_eq!(FieldValue::I64(-42).to_string(), "-42i"); assert_eq!(FieldValue::U64(42).to_string(), "42u"); assert_eq!(FieldValue::F64(42.11).to_string(), "42.11"); @@ -2143,22 +2052,20 @@ her"#, ); assert_eq!(FieldValue::Boolean(true).to_string(), "true"); assert_eq!(FieldValue::Boolean(false).to_string(), "false"); - Ok(()) } #[test] - fn series_display_no_tags() -> Result { + fn series_display_no_tags() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), tag_set: None, }; assert_eq!(series.to_string(), "m"); - Ok(()) } #[test] - fn series_display_one_tag() -> Result { + fn series_display_one_tag() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), @@ -2168,11 +2075,10 @@ her"#, )]), }; assert_eq!(series.to_string(), "m,tag1=val1"); - Ok(()) } #[test] - fn series_display_two_tags() -> Result { + fn series_display_two_tags() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), @@ -2182,11 +2088,10 @@ her"#, ]), }; assert_eq!(series.to_string(), "m,tag1=val1,tag2=val2"); - Ok(()) } #[test] - fn parsed_line_display_one_field_no_timestamp() -> Result { + fn parsed_line_display_one_field_no_timestamp() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), @@ -2204,11 +2109,10 @@ her"#, }; assert_eq!(parsed_line.to_string(), "m,tag1=val1 field1=42.1"); - Ok(()) } #[test] - fn parsed_line_display_one_field_timestamp() -> Result { + fn parsed_line_display_one_field_timestamp() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), @@ -2226,11 +2130,10 @@ her"#, }; assert_eq!(parsed_line.to_string(), "m,tag1=val1 field1=42.1 33"); - Ok(()) } #[test] - fn parsed_line_display_two_fields_timestamp() -> Result { + fn parsed_line_display_two_fields_timestamp() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m"), @@ -2254,11 +2157,10 @@ her"#, parsed_line.to_string(), "m,tag1=val1 field1=42.1,field2=false 33" ); - Ok(()) } #[test] - fn parsed_line_display_escaped() -> Result { + fn parsed_line_display_escaped() { let series = Series { raw_input: "foo", measurement: EscapedStr::from("m,and m"), @@ -2282,42 +2184,37 @@ her"#, parsed_line.to_string(), r#"m\,and\ m,tag\ \,1=val\ \,1 field\ \,1=Foo\"Bar 33"# ); - Ok(()) } #[test] - fn field_value_returned() -> Result { + fn field_value_returned() { let input = r#"foo asdf=true 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert!(vals[0].field_value("asdf").unwrap().unwrap_bool()); - Ok(()) } #[test] - fn field_value_missing() -> Result { + fn field_value_missing() { let input = r#"foo asdf=true 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].field_value("jkl"), None); - Ok(()) } #[test] - fn tag_value_returned() -> Result { + fn tag_value_returned() { let input = r#"foo,test=stuff asdf=true 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(*vals[0].tag_value("test").unwrap(), "stuff"); - Ok(()) } #[test] - fn tag_value_missing() -> Result { + fn tag_value_missing() { let input = r#"foo,test=stuff asdf=true 1234"#; - let vals = parse(input)?; + let vals = parse(input).unwrap(); assert_eq!(vals[0].tag_value("asdf"), None); - Ok(()) } } diff --git a/influxdb_tsm/src/mapper.rs b/influxdb_tsm/src/mapper.rs index b7c495c6c8..8118c5dac4 100644 --- a/influxdb_tsm/src/mapper.rs +++ b/influxdb_tsm/src/mapper.rs @@ -747,33 +747,37 @@ mod tests { } #[test] - fn conflicting_field_types() -> Result<(), TSMError> { + fn conflicting_field_types() { let mut table = MeasurementTable::new("cpu".to_string(), 0); - table.add_series_data( - vec![("region".to_string(), "west".to_string())], - "value".to_string(), - Block { - max_time: 0, - min_time: 0, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![("region".to_string(), "west".to_string())], + "value".to_string(), + Block { + max_time: 0, + min_time: 0, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); - table.add_series_data( - vec![], - "value".to_string(), - Block { - max_time: 0, - min_time: 0, - offset: 0, - size: 0, - typ: BlockType::Integer, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![], + "value".to_string(), + Block { + max_time: 0, + min_time: 0, + offset: 0, + size: 0, + typ: BlockType::Integer, + reader_idx: 0, + }, + ) + .unwrap(); // The block type for the value field should be Float because the // conflicting integer field should be ignored. @@ -781,50 +785,55 @@ mod tests { *table.field_columns().get("value").unwrap(), BlockType::Float, ); - Ok(()) } #[test] - fn merge_measurement_table() -> Result<(), TSMError> { + fn merge_measurement_table() { let mut table1 = MeasurementTable::new("cpu".to_string(), 0); - table1.add_series_data( - vec![("region".to_string(), "west".to_string())], - "value".to_string(), - Block { - min_time: 101, - max_time: 150, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; + table1 + .add_series_data( + vec![("region".to_string(), "west".to_string())], + "value".to_string(), + Block { + min_time: 101, + max_time: 150, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); let mut table2 = MeasurementTable::new("cpu".to_string(), 1); - table2.add_series_data( - vec![("region".to_string(), "west".to_string())], - "value".to_string(), - Block { - min_time: 0, - max_time: 100, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; - table2.add_series_data( - vec![("server".to_string(), "a".to_string())], - "temp".to_string(), - Block { - min_time: 0, - max_time: 50, - offset: 0, - size: 0, - typ: BlockType::Str, - reader_idx: 0, - }, - )?; + table2 + .add_series_data( + vec![("region".to_string(), "west".to_string())], + "value".to_string(), + Block { + min_time: 0, + max_time: 100, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); + table2 + .add_series_data( + vec![("server".to_string(), "a".to_string())], + "temp".to_string(), + Block { + min_time: 0, + max_time: 50, + offset: 0, + size: 0, + typ: BlockType::Str, + reader_idx: 0, + }, + ) + .unwrap(); table1.merge(&mut table2).unwrap(); assert_eq!(table1.name, "cpu"); @@ -882,8 +891,6 @@ mod tests { field_blocks_temp, ); assert_eq!(table1.tag_set_fields_blocks, exp_tag_set_field_blocks); - - Ok(()) } #[test] diff --git a/ingest/src/lib.rs b/ingest/src/lib.rs index 9f5d996cfb..b94dc6e15d 100644 --- a/ingest/src/lib.rs +++ b/ingest/src/lib.rs @@ -1439,7 +1439,7 @@ mod tests { } #[test] - fn measurement_writer_buffering() -> Result<(), Error> { + fn measurement_writer_buffering() { let log = Arc::new(Mutex::new(WriterLog::new())); let table_writer = Box::new(NoOpWriter::new(Arc::clone(&log), String::from("cpu"))); @@ -1465,20 +1465,26 @@ mod tests { assert_eq!(get_events(&log).len(), 0); // buffer size is 2 we don't expect any writes until three rows are pushed - writer.buffer_line(parsed_lines.next().expect("parse success"))?; + writer + .buffer_line(parsed_lines.next().expect("parse success")) + .unwrap(); assert_eq!(get_events(&log).len(), 0); - writer.buffer_line(parsed_lines.next().expect("parse success"))?; + writer + .buffer_line(parsed_lines.next().expect("parse success")) + .unwrap(); assert_eq!(get_events(&log).len(), 0); // this should cause a flush and write - writer.buffer_line(parsed_lines.next().expect("parse success"))?; + writer + .buffer_line(parsed_lines.next().expect("parse success")) + .unwrap(); assert_eq!( get_events(&log), vec!["[cpu] Wrote batch of 2 cols, 2 rows"] ); // finalize should write out the last line - writer.finalize()?; + writer.finalize().unwrap(); assert_eq!( get_events(&log), vec![ @@ -1487,8 +1493,6 @@ mod tests { "[cpu] Closed", ] ); - - Ok(()) } // ----- Tests for pack_data ----- @@ -1521,8 +1525,11 @@ mod tests { } #[test] - fn pack_data_schema() -> Result<(), Error> { - let schema = parse_data_into_sampler()?.deduce_schema_from_sample()?; + fn pack_data_schema() { + let schema = parse_data_into_sampler() + .unwrap() + .deduce_schema_from_sample() + .unwrap(); // Then the correct schema is extracted println!("Converted to {:#?}", schema); @@ -1553,14 +1560,12 @@ mod tests { "bool_field" ); assert_column_eq!(schema, 5, InfluxColumnType::Timestamp, "time"); - - Ok(()) } #[test] - fn pack_data_value() -> Result<(), Error> { - let mut sampler = parse_data_into_sampler()?; - let schema = sampler.deduce_schema_from_sample()?; + fn pack_data_value() { + let mut sampler = parse_data_into_sampler().unwrap(); + let schema = sampler.deduce_schema_from_sample().unwrap(); let packers = pack_lines(&schema, &sampler.schema_sample); @@ -1667,8 +1672,6 @@ mod tests { assert_eq!(timestamp_packer.get(6).unwrap(), &1_590_488_773_254_480_000); assert!(timestamp_packer.is_null(7)); assert_eq!(timestamp_packer.get(8).unwrap(), &1_590_488_773_254_490_000); - - Ok(()) } // ----- Tests for LineProtocolConverter ----- @@ -1692,7 +1695,7 @@ mod tests { } #[test] - fn conversion_with_multiple_measurements() -> Result<(), Error> { + fn conversion_with_multiple_measurements() { // These lines have interleaved measurements to force the // state machine in LineProtocolConverter::convert through all // the branches @@ -1737,14 +1740,12 @@ mod tests { "[h2o_temperature] Closed", ] ); - - Ok(()) } // ----- Tests for TSM Data ----- #[test] - fn process_measurement_table() -> Result<(), Box> { + fn process_measurement_table() { // Input data - in line protocol format // // cpu,region=east temp=1.2 0 @@ -1782,63 +1783,71 @@ mod tests { let mut table = MeasurementTable::new("cpu".to_string(), 0); // cpu region=east temp= - table.add_series_data( - vec![("region".to_string(), "east".to_string())], - "temp".to_string(), - Block { - min_time: 0, - max_time: 0, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![("region".to_string(), "east".to_string())], + "temp".to_string(), + Block { + min_time: 0, + max_time: 0, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); // cpu region=east voltage= - table.add_series_data( - vec![("region".to_string(), "east".to_string())], - "voltage".to_string(), - Block { - min_time: 1, - max_time: 0, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![("region".to_string(), "east".to_string())], + "voltage".to_string(), + Block { + min_time: 1, + max_time: 0, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); // cpu region=west,server=a temp= - table.add_series_data( - vec![ - ("region".to_string(), "west".to_string()), - ("server".to_string(), "a".to_string()), - ], - "temp".to_string(), - Block { - min_time: 2, - max_time: 0, - offset: 0, - size: 0, - typ: BlockType::Float, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![ + ("region".to_string(), "west".to_string()), + ("server".to_string(), "a".to_string()), + ], + "temp".to_string(), + Block { + min_time: 2, + max_time: 0, + offset: 0, + size: 0, + typ: BlockType::Float, + reader_idx: 0, + }, + ) + .unwrap(); // cpu az=b watts= - table.add_series_data( - vec![("az".to_string(), "b".to_string())], - "watts".to_string(), - Block { - min_time: 3, - max_time: 0, - offset: 0, - size: 0, - typ: BlockType::Unsigned, - reader_idx: 0, - }, - )?; + table + .add_series_data( + vec![("az".to_string(), "b".to_string())], + "watts".to_string(), + Block { + min_time: 3, + max_time: 0, + offset: 0, + size: 0, + typ: BlockType::Unsigned, + reader_idx: 0, + }, + ) + .unwrap(); let mut block_map = BTreeMap::new(); block_map.insert( @@ -1875,7 +1884,8 @@ mod tests { ); let decoder = MockBlockDecoder::new(block_map); - let (schema, packers) = TSMFileConverter::process_measurement_table(decoder, &mut table)?; + let (schema, packers) = + TSMFileConverter::process_measurement_table(decoder, &mut table).unwrap(); assert_column_eq!(schema, 0, InfluxColumnType::Tag, "az"); assert_column_eq!(schema, 1, InfluxColumnType::Tag, "region"); @@ -2005,8 +2015,6 @@ mod tests { Some(4000), ])) ); - - Ok(()) } fn empty_block() -> Block { @@ -2021,33 +2029,39 @@ mod tests { } #[test] - fn merge_input_tables() -> Result<(), Box> { + fn merge_input_tables() { let mut inputs = vec![]; let mut table = MeasurementTable::new("cpu".to_string(), 0); - table.add_series_data( - vec![("region".to_string(), "east".to_string())], - "temp".to_string(), - empty_block(), - )?; + table + .add_series_data( + vec![("region".to_string(), "east".to_string())], + "temp".to_string(), + empty_block(), + ) + .unwrap(); inputs.push(Some(table.clone())); table = MeasurementTable::new("cpu".to_string(), 1); - table.add_series_data( - vec![("server".to_string(), "a".to_string())], - "temp".to_string(), - empty_block(), - )?; + table + .add_series_data( + vec![("server".to_string(), "a".to_string())], + "temp".to_string(), + empty_block(), + ) + .unwrap(); inputs.push(Some(table.clone())); table = MeasurementTable::new("disk".to_string(), 2); - table.add_series_data( - vec![("region".to_string(), "west".to_string())], - "temp".to_string(), - empty_block(), - )?; + table + .add_series_data( + vec![("region".to_string(), "west".to_string())], + "temp".to_string(), + empty_block(), + ) + .unwrap(); inputs.push(Some(table)); - let mut res = TSMFileConverter::merge_input_tables(inputs)?; + let mut res = TSMFileConverter::merge_input_tables(inputs).unwrap(); let mut merged = res.0.unwrap(); inputs = res.1; assert_eq!(merged.name, "cpu".to_string()); @@ -2055,15 +2069,14 @@ mod tests { assert_eq!(inputs[0], None); assert_eq!(inputs[1], None); - res = TSMFileConverter::merge_input_tables(inputs)?; + res = TSMFileConverter::merge_input_tables(inputs).unwrap(); merged = res.0.unwrap(); assert_eq!(merged.name, "disk".to_string()); assert_eq!(res.1, vec![None, None, None]); - Ok(()) } #[test] - fn conversion_tsm_file_single() -> Result<(), Error> { + fn conversion_tsm_file_single() { let file = File::open("../tests/fixtures/merge-tsm/merge_a.tsm.gz"); let mut decoder = GzDecoder::new(file.unwrap()); let mut buf = Vec::new(); @@ -2099,12 +2112,10 @@ mod tests { "[disk] Closed" ], ); - - Ok(()) } #[test] - fn conversion_tsm_files_none_overlapping() -> Result<(), Error> { + fn conversion_tsm_files_none_overlapping() { let mut index_streams = Vec::new(); let mut block_streams = Vec::new(); @@ -2154,7 +2165,5 @@ mod tests { "[disk] Closed" ], ); - - Ok(()) } } diff --git a/mutable_buffer/src/column.rs b/mutable_buffer/src/column.rs index 89a88e111a..2ba6a89ee9 100644 --- a/mutable_buffer/src/column.rs +++ b/mutable_buffer/src/column.rs @@ -339,32 +339,27 @@ impl Column { mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - #[test] - fn test_has_i64_range() -> Result { + fn test_has_i64_range() { let mut stats = StatValues::new(1); stats.update(2); let col = Column::I64(vec![Some(1), None, Some(2)], stats.clone()); - assert!(!col.has_i64_range(-1, 0)?); - assert!(!col.has_i64_range(0, 1)?); - assert!(col.has_i64_range(1, 2)?); - assert!(col.has_i64_range(2, 3)?); - assert!(!col.has_i64_range(3, 4)?); + assert!(!col.has_i64_range(-1, 0).unwrap()); + assert!(!col.has_i64_range(0, 1).unwrap()); + assert!(col.has_i64_range(1, 2).unwrap()); + assert!(col.has_i64_range(2, 3).unwrap()); + assert!(!col.has_i64_range(3, 4).unwrap()); let col = Column::I64(vec![Some(2), None, Some(1)], stats); - assert!(!col.has_i64_range(-1, 0)?); - assert!(!col.has_i64_range(0, 1)?); - assert!(col.has_i64_range(1, 2)?); - assert!(col.has_i64_range(2, 3)?); - assert!(!col.has_i64_range(3, 4)?); - - Ok(()) + assert!(!col.has_i64_range(-1, 0).unwrap()); + assert!(!col.has_i64_range(0, 1).unwrap()); + assert!(col.has_i64_range(1, 2).unwrap()); + assert!(col.has_i64_range(2, 3).unwrap()); + assert!(!col.has_i64_range(3, 4).unwrap()); } #[test] - fn test_has_i64_range_does_not_panic() -> Result { + fn test_has_i64_range_does_not_panic() { // providing the wrong column type should get an internal error, not a panic let col = Column::F64(vec![Some(1.2)], StatValues::new(1.2)); let res = col.has_i64_range(-1, 0); @@ -377,11 +372,10 @@ mod tests { expected, res_string ); - Ok(()) } #[test] - fn test_has_non_null_i64_range_() -> Result { + fn test_has_non_null_i64_range_() { let none_col: Vec> = vec![None, None, None]; let some_col: Vec> = vec![Some(0), Some(0), Some(0)]; @@ -389,19 +383,17 @@ mod tests { stats.update(2); let col = Column::I64(vec![Some(1), None, Some(2)], stats); - assert!(!col.has_non_null_i64_range(&some_col, -1, 0)?); - assert!(!col.has_non_null_i64_range(&some_col, 0, 1)?); - assert!(col.has_non_null_i64_range(&some_col, 1, 2)?); - assert!(col.has_non_null_i64_range(&some_col, 2, 3)?); - assert!(!col.has_non_null_i64_range(&some_col, 3, 4)?); + assert!(!col.has_non_null_i64_range(&some_col, -1, 0).unwrap()); + assert!(!col.has_non_null_i64_range(&some_col, 0, 1).unwrap()); + assert!(col.has_non_null_i64_range(&some_col, 1, 2).unwrap()); + assert!(col.has_non_null_i64_range(&some_col, 2, 3).unwrap()); + assert!(!col.has_non_null_i64_range(&some_col, 3, 4).unwrap()); - assert!(!col.has_non_null_i64_range(&none_col, -1, 0)?); - assert!(!col.has_non_null_i64_range(&none_col, 0, 1)?); - assert!(!col.has_non_null_i64_range(&none_col, 1, 2)?); - assert!(!col.has_non_null_i64_range(&none_col, 2, 3)?); - assert!(!col.has_non_null_i64_range(&none_col, 3, 4)?); - - Ok(()) + assert!(!col.has_non_null_i64_range(&none_col, -1, 0).unwrap()); + assert!(!col.has_non_null_i64_range(&none_col, 0, 1).unwrap()); + assert!(!col.has_non_null_i64_range(&none_col, 1, 2).unwrap()); + assert!(!col.has_non_null_i64_range(&none_col, 2, 3).unwrap()); + assert!(!col.has_non_null_i64_range(&none_col, 3, 4).unwrap()); } #[test] diff --git a/object_store/src/aws.rs b/object_store/src/aws.rs index 16a6cb47af..3d56f35b75 100644 --- a/object_store/src/aws.rs +++ b/object_store/src/aws.rs @@ -482,7 +482,7 @@ mod tests { {} to run", unset_var_names ); - return Ok(()); + return; } else { AwsConfig { access_key_id: env::var("AWS_ACCESS_KEY_ID") @@ -515,7 +515,7 @@ mod tests { } #[tokio::test] - async fn s3_test() -> Result<()> { + async fn s3_test() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_amazon_s3( AmazonS3::new( @@ -527,14 +527,12 @@ mod tests { .expect("Valid S3 config"), ); - check_credentials(put_get_delete_list(&integration).await)?; + check_credentials(put_get_delete_list(&integration).await).unwrap(); check_credentials(list_with_delimiter(&integration).await).unwrap(); - - Ok(()) } #[tokio::test] - async fn s3_test_get_nonexistent_region() -> Result<()> { + async fn s3_test_get_nonexistent_region() { let mut config = maybe_skip_integration!(); // Assumes environment variables do not provide credentials to AWS US West 1 config.region = "us-west-1".into(); @@ -564,12 +562,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_get_nonexistent_location() -> Result<()> { + async fn s3_test_get_nonexistent_location() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_amazon_s3( AmazonS3::new( @@ -605,12 +601,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_get_nonexistent_bucket() -> Result<()> { + async fn s3_test_get_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); @@ -642,12 +636,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_put_nonexistent_region() -> Result<()> { + async fn s3_test_put_nonexistent_region() { let mut config = maybe_skip_integration!(); // Assumes environment variables do not provide credentials to AWS US West 1 config.region = "us-west-1".into(); @@ -691,12 +683,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_put_nonexistent_bucket() -> Result<()> { + async fn s3_test_put_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); @@ -739,12 +729,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_delete_nonexistent_location() -> Result<()> { + async fn s3_test_delete_nonexistent_location() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_amazon_s3( AmazonS3::new( @@ -762,12 +750,10 @@ mod tests { let result = integration.delete(&location).await; assert!(result.is_ok()); - - Ok(()) } #[tokio::test] - async fn s3_test_delete_nonexistent_region() -> Result<()> { + async fn s3_test_delete_nonexistent_region() { let mut config = maybe_skip_integration!(); // Assumes environment variables do not provide credentials to AWS US West 1 config.region = "us-west-1".into(); @@ -801,12 +787,10 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn s3_test_delete_nonexistent_bucket() -> Result<()> { + async fn s3_test_delete_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); @@ -839,7 +823,5 @@ mod tests { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } } diff --git a/object_store/src/azure.rs b/object_store/src/azure.rs index 32fe8e9966..1638e92525 100644 --- a/object_store/src/azure.rs +++ b/object_store/src/azure.rs @@ -280,9 +280,6 @@ mod tests { use crate::ObjectStore; use std::env; - type Error = Box; - type Result = std::result::Result; - #[derive(Debug)] struct AzureConfig { storage_account: String, @@ -324,7 +321,7 @@ mod tests { {} to run", unset_var_names ); - return Ok(()); + return; } else { AzureConfig { storage_account: env::var("AZURE_STORAGE_ACCOUNT") @@ -339,7 +336,7 @@ mod tests { } #[tokio::test] - async fn azure_blob_test() -> Result<()> { + async fn azure_blob_test() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_microsoft_azure(MicrosoftAzure::new( config.storage_account, @@ -347,9 +344,7 @@ mod tests { config.bucket, )); - put_get_delete_list(&integration).await?; - list_with_delimiter(&integration).await?; - - Ok(()) + put_get_delete_list(&integration).await.unwrap(); + list_with_delimiter(&integration).await.unwrap(); } } diff --git a/object_store/src/buffer.rs b/object_store/src/buffer.rs index 10d32483b4..4adfb537ae 100644 --- a/object_store/src/buffer.rs +++ b/object_store/src/buffer.rs @@ -96,7 +96,7 @@ mod tests { .interleave_pending() } - async fn check_stream(buf_stream: BufferedStream) -> Result<()> + async fn check_stream(buf_stream: BufferedStream) where R: AsyncRead + AsyncWrite + AsyncSeek + Unpin, { @@ -106,25 +106,30 @@ mod tests { let content = buf_stream .map_ok(|b| bytes::BytesMut::from(&b[..])) .try_concat() - .await?; + .await + .unwrap(); assert_eq!(content, "foobarbaz"); - Ok(()) } #[tokio::test] - async fn test_buffered_stream() -> Result<()> { + async fn test_buffered_stream() { let backing_store = std::io::Cursor::new(Vec::new()); // in-memory buffer - check_stream(BufferedStream::new(backing_store, test_data()).await?).await + check_stream( + BufferedStream::new(backing_store, test_data()) + .await + .unwrap(), + ) + .await; } #[tokio::test] - async fn test_slurp_stream_tempfile() -> Result<()> { - check_stream(slurp_stream_tempfile(test_data()).await?).await + async fn test_slurp_stream_tempfile() { + check_stream(slurp_stream_tempfile(test_data()).await.unwrap()).await; } #[tokio::test] - async fn test_slurp_stream_memory() -> Result<()> { - check_stream(slurp_stream_memory(test_data()).await?).await + async fn test_slurp_stream_memory() { + check_stream(slurp_stream_memory(test_data()).await.unwrap()).await; } } diff --git a/object_store/src/disk.rs b/object_store/src/disk.rs index 5384cb567a..a1b5e0d6f9 100644 --- a/object_store/src/disk.rs +++ b/object_store/src/disk.rs @@ -263,9 +263,6 @@ impl File { mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - use crate::{ tests::{list_with_delimiter, put_get_delete_list}, Error as ObjectStoreError, ObjectStore, ObjectStoreApi, ObjectStorePath, @@ -274,19 +271,17 @@ mod tests { use tempfile::TempDir; #[tokio::test] - async fn file_test() -> Result<()> { - let root = TempDir::new()?; + async fn file_test() { + let root = TempDir::new().unwrap(); let integration = ObjectStore::new_file(File::new(root.path())); - put_get_delete_list(&integration).await?; - list_with_delimiter(&integration).await?; - - Ok(()) + put_get_delete_list(&integration).await.unwrap(); + list_with_delimiter(&integration).await.unwrap(); } #[tokio::test] - async fn length_mismatch_is_an_error() -> Result<()> { - let root = TempDir::new()?; + async fn length_mismatch_is_an_error() { + let root = TempDir::new().unwrap(); let integration = ObjectStore::new_file(File::new(root.path())); let bytes = stream::once(async { Ok(Bytes::from("hello world")) }); @@ -303,13 +298,11 @@ mod tests { } } )); - - Ok(()) } #[tokio::test] - async fn creates_dir_if_not_present() -> Result<()> { - let root = TempDir::new()?; + async fn creates_dir_if_not_present() { + let root = TempDir::new().unwrap(); let integration = ObjectStore::new_file(File::new(root.path())); let data = Bytes::from("arbitrary data"); @@ -323,22 +316,23 @@ mod tests { futures::stream::once(async move { stream_data }), Some(data.len()), ) - .await?; + .await + .unwrap(); let read_data = integration .get(&location) - .await? + .await + .unwrap() .map_ok(|b| bytes::BytesMut::from(&b[..])) .try_concat() - .await?; + .await + .unwrap(); assert_eq!(&*read_data, data); - - Ok(()) } #[tokio::test] - async fn unknown_length() -> Result<()> { - let root = TempDir::new()?; + async fn unknown_length() { + let root = TempDir::new().unwrap(); let integration = ObjectStore::new_file(File::new(root.path())); let data = Bytes::from("arbitrary data"); @@ -352,16 +346,17 @@ mod tests { futures::stream::once(async move { stream_data }), None, ) - .await?; + .await + .unwrap(); let read_data = integration .get(&location) - .await? + .await + .unwrap() .map_ok(|b| bytes::BytesMut::from(&b[..])) .try_concat() - .await?; + .await + .unwrap(); assert_eq!(&*read_data, data); - - Ok(()) } } diff --git a/object_store/src/gcp.rs b/object_store/src/gcp.rs index eaaf833b27..26dc7ba0f6 100644 --- a/object_store/src/gcp.rs +++ b/object_store/src/gcp.rs @@ -264,9 +264,6 @@ mod test { use bytes::Bytes; use std::env; - type TestError = Box; - type Result = std::result::Result; - const NON_EXISTENT_NAME: &str = "nonexistentname"; #[derive(Debug)] @@ -305,7 +302,7 @@ mod test { {} to run", unset_var_names ); - return Ok(()); + return; } else { GoogleCloudConfig { bucket: env::var("INFLUXDB_IOX_BUCKET") @@ -318,20 +315,19 @@ mod test { } #[tokio::test] - async fn gcs_test() -> Result<()> { + async fn gcs_test() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( config.service_account, config.bucket, )); - put_get_delete_list(&integration).await?; - list_with_delimiter(&integration).await?; - Ok(()) + put_get_delete_list(&integration).await.unwrap(); + list_with_delimiter(&integration).await.unwrap(); } #[tokio::test] - async fn gcs_test_get_nonexistent_location() -> Result<()> { + async fn gcs_test_get_nonexistent_location() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( config.service_account, @@ -360,12 +356,10 @@ mod test { } else { panic!("unexpected error type: {:?}", err) } - - Ok(()) } #[tokio::test] - async fn gcs_test_get_nonexistent_bucket() -> Result<()> { + async fn gcs_test_get_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( @@ -389,12 +383,10 @@ mod test { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } #[tokio::test] - async fn gcs_test_delete_nonexistent_location() -> Result<()> { + async fn gcs_test_delete_nonexistent_location() { let config = maybe_skip_integration!(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( config.service_account, @@ -421,12 +413,10 @@ mod test { } else { panic!("unexpected error type: {:?}", err) } - - Ok(()) } #[tokio::test] - async fn gcs_test_delete_nonexistent_bucket() -> Result<()> { + async fn gcs_test_delete_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( @@ -454,12 +444,10 @@ mod test { } else { panic!("unexpected error type: {:?}", err) } - - Ok(()) } #[tokio::test] - async fn gcs_test_put_nonexistent_bucket() -> Result<()> { + async fn gcs_test_put_nonexistent_bucket() { let mut config = maybe_skip_integration!(); config.bucket = NON_EXISTENT_NAME.into(); let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new( @@ -497,7 +485,5 @@ mod test { } else { panic!("unexpected error type: {:?}", err); } - - Ok(()) } } diff --git a/object_store/src/memory.rs b/object_store/src/memory.rs index f46daac623..58aa22401e 100644 --- a/object_store/src/memory.rs +++ b/object_store/src/memory.rs @@ -176,9 +176,6 @@ impl InMemory { mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - use crate::{ tests::{list_with_delimiter, put_get_delete_list}, Error as ObjectStoreError, ObjectStore, ObjectStoreApi, ObjectStorePath, @@ -186,18 +183,15 @@ mod tests { use futures::stream; #[tokio::test] - async fn in_memory_test() -> Result<()> { + async fn in_memory_test() { let integration = ObjectStore::new_in_memory(InMemory::new()); - put_get_delete_list(&integration).await?; - + put_get_delete_list(&integration).await.unwrap(); list_with_delimiter(&integration).await.unwrap(); - - Ok(()) } #[tokio::test] - async fn length_mismatch_is_an_error() -> Result<()> { + async fn length_mismatch_is_an_error() { let integration = ObjectStore::new_in_memory(InMemory::new()); let bytes = stream::once(async { Ok(Bytes::from("hello world")) }); @@ -214,12 +208,10 @@ mod tests { } } )); - - Ok(()) } #[tokio::test] - async fn unknown_length() -> Result<()> { + async fn unknown_length() { let integration = ObjectStore::new_in_memory(InMemory::new()); let data = Bytes::from("arbitrary data"); @@ -233,16 +225,17 @@ mod tests { futures::stream::once(async move { stream_data }), None, ) - .await?; + .await + .unwrap(); let read_data = integration .get(&location) - .await? + .await + .unwrap() .map_ok(|b| bytes::BytesMut::from(&b[..])) .try_concat() - .await?; + .await + .unwrap(); assert_eq!(&*read_data, data); - - Ok(()) } } diff --git a/query/src/exec.rs b/query/src/exec.rs index 24d7301dde..e3a3611f33 100644 --- a/query/src/exec.rs +++ b/query/src/exec.rs @@ -323,33 +323,30 @@ mod tests { use super::*; #[tokio::test] - async fn executor_known_string_set_plan_ok() -> Result<()> { + async fn executor_known_string_set_plan_ok() { let expected_strings = to_set(&["Foo", "Bar"]); let plan = StringSetPlan::Known(Arc::clone(&expected_strings)); let executor = Executor::default(); - let result_strings = executor.to_string_set(plan).await?; + let result_strings = executor.to_string_set(plan).await.unwrap(); assert_eq!(result_strings, expected_strings); - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_single_plan_no_batches() -> Result<()> { + async fn executor_datafusion_string_set_single_plan_no_batches() { // Test with a single plan that produces no batches let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)])); let scan = make_plan(schema, vec![]); let plan: StringSetPlan = vec![scan].into(); let executor = Executor::new(); - let results = executor.to_string_set(plan).await?; + let results = executor.to_string_set(plan).await.unwrap(); assert_eq!(results, StringSetRef::new(StringSet::new())); - - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_single_plan_one_batch() -> Result<()> { + async fn executor_datafusion_string_set_single_plan_one_batch() { // Test with a single plan that produces one record batch let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)])); let data = to_string_array(&["foo", "bar", "baz", "foo"]); @@ -359,15 +356,13 @@ mod tests { let plan: StringSetPlan = vec![scan].into(); let executor = Executor::new(); - let results = executor.to_string_set(plan).await?; + let results = executor.to_string_set(plan).await.unwrap(); assert_eq!(results, to_set(&["foo", "bar", "baz"])); - - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_single_plan_two_batch() -> Result<()> { + async fn executor_datafusion_string_set_single_plan_two_batch() { // Test with a single plan that produces multiple record batches let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)])); let data1 = to_string_array(&["foo", "bar"]); @@ -380,15 +375,13 @@ mod tests { let plan: StringSetPlan = vec![scan].into(); let executor = Executor::new(); - let results = executor.to_string_set(plan).await?; + let results = executor.to_string_set(plan).await.unwrap(); assert_eq!(results, to_set(&["foo", "bar", "baz"])); - - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_multi_plan() -> Result<()> { + async fn executor_datafusion_string_set_multi_plan() { // Test with multiple datafusion logical plans let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)])); @@ -405,15 +398,13 @@ mod tests { let plan: StringSetPlan = vec![scan1, scan2].into(); let executor = Executor::new(); - let results = executor.to_string_set(plan).await?; + let results = executor.to_string_set(plan).await.unwrap(); assert_eq!(results, to_set(&["foo", "bar", "baz"])); - - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_nulls() -> Result<()> { + async fn executor_datafusion_string_set_nulls() { // Ensure that nulls in the output set are handled reasonably // (error, rather than silently ignored) let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, true)])); @@ -440,12 +431,10 @@ mod tests { expected_error, actual_error, ); - - Ok(()) } #[tokio::test] - async fn executor_datafusion_string_set_bad_schema() -> Result<()> { + async fn executor_datafusion_string_set_bad_schema() { // Ensure that an incorect schema (an int) gives a reasonable error let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Int64, true)])); let data = Arc::new(Int64Array::from(vec![1])); @@ -469,12 +458,10 @@ mod tests { expected_error, actual_error ); - - Ok(()) } #[tokio::test] - async fn make_schema_pivot_is_planned() -> Result<()> { + async fn make_schema_pivot_is_planned() { // Test that all the planning logic is wired up and that we // can make a plan using a SchemaPivot node let schema = Arc::new(Schema::new(vec![ @@ -498,8 +485,6 @@ mod tests { let results = executor.to_string_set(plan).await.expect("Executed plan"); assert_eq!(results, to_set(&["f1", "f2"])); - - Ok(()) } /// return a set for testing diff --git a/query/src/exec/schema_pivot.rs b/query/src/exec/schema_pivot.rs index 8e367b679b..ffe885d142 100644 --- a/query/src/exec/schema_pivot.rs +++ b/query/src/exec/schema_pivot.rs @@ -301,7 +301,7 @@ mod tests { }; #[tokio::test] - async fn schema_pivot_exec_all_null() -> Result<()> { + async fn schema_pivot_exec_all_null() { let case = SchemaTestCase { input_batches: &[TestBatch { a: &[None, None], @@ -315,12 +315,10 @@ mod tests { "TestCase: {:?}", case ); - - Ok(()) } #[tokio::test] - async fn schema_pivot_exec_both_non_null() -> Result<()> { + async fn schema_pivot_exec_both_non_null() { let case = SchemaTestCase { input_batches: &[TestBatch { a: &[Some(1), None], @@ -334,12 +332,10 @@ mod tests { "TestCase: {:?}", case ); - - Ok(()) } #[tokio::test] - async fn schema_pivot_exec_one_non_null() -> Result<()> { + async fn schema_pivot_exec_one_non_null() { let case = SchemaTestCase { input_batches: &[TestBatch { a: &[Some(1), None], @@ -353,12 +349,10 @@ mod tests { "TestCase: {:?}", case ); - - Ok(()) } #[tokio::test] - async fn schema_pivot_exec_both_non_null_two_record_batches() -> Result<()> { + async fn schema_pivot_exec_both_non_null_two_record_batches() { let case = SchemaTestCase { input_batches: &[ TestBatch { @@ -378,12 +372,10 @@ mod tests { "TestCase: {:?}", case ); - - Ok(()) } #[tokio::test] - async fn schema_pivot_exec_one_non_null_in_second_record_batch() -> Result<()> { + async fn schema_pivot_exec_one_non_null_in_second_record_batch() { let case = SchemaTestCase { input_batches: &[ TestBatch { @@ -403,12 +395,10 @@ mod tests { "TestCase: {:?}", case ); - - Ok(()) } #[tokio::test] - async fn schema_pivot_exec_bad_partition() -> Result<()> { + async fn schema_pivot_exec_bad_partition() { // ensure passing in a bad partition generates a reasonable error let pivot = make_schema_pivot(SchemaTestCase::input_schema(), vec![]); @@ -427,8 +417,6 @@ mod tests { expected_error, actual_error ); - - Ok(()) } /// Return a StringSet extracted from the record batch diff --git a/query/src/exec/seriesset.rs b/query/src/exec/seriesset.rs index 9ded3b8680..4753697000 100644 --- a/query/src/exec/seriesset.rs +++ b/query/src/exec/seriesset.rs @@ -402,7 +402,7 @@ mod tests { use super::*; #[tokio::test] - async fn test_convert_empty() -> Result<()> { + async fn test_convert_empty() { let schema = Arc::new(Schema::new(vec![])); let empty_iterator = Box::pin(SizedRecordBatchStream::new(schema, vec![])); @@ -412,12 +412,10 @@ mod tests { let results = convert(table_name, &tag_columns, &field_columns, empty_iterator).await; assert_eq!(results.len(), 0); - - Ok(()) } #[tokio::test] - async fn test_convert_single_series_no_tags() -> Result<()> { + async fn test_convert_single_series_no_tags() { // single series let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), @@ -467,11 +465,10 @@ mod tests { .collect::>(); assert_eq!(expected_data, actual_data); - Ok(()) } #[tokio::test] - async fn test_convert_single_series_no_tags_nulls() -> Result<()> { + async fn test_convert_single_series_no_tags_nulls() { // single series let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), @@ -522,11 +519,10 @@ mod tests { .collect::>(); assert_eq!(expected_data, actual_data); - Ok(()) } #[tokio::test] - async fn test_convert_single_series_one_tag() -> Result<()> { + async fn test_convert_single_series_one_tag() { // single series let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), @@ -558,12 +554,10 @@ mod tests { ); assert_eq!(series_set.start_row, 0); assert_eq!(series_set.num_rows, 2); - - Ok(()) } #[tokio::test] - async fn test_convert_one_tag_multi_series() -> Result<()> { + async fn test_convert_one_tag_multi_series() { let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), Field::new("tag_b", DataType::Utf8, true), @@ -608,13 +602,11 @@ mod tests { ); assert_eq!(series_set2.start_row, 3); assert_eq!(series_set2.num_rows, 2); - - Ok(()) } // two tag columns, three series #[tokio::test] - async fn test_convert_two_tag_multi_series() -> Result<()> { + async fn test_convert_two_tag_multi_series() { let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), Field::new("tag_b", DataType::Utf8, true), @@ -667,12 +659,10 @@ mod tests { ); assert_eq!(series_set3.start_row, 3); assert_eq!(series_set3.num_rows, 2); - - Ok(()) } #[tokio::test] - async fn test_convert_groups() -> Result<()> { + async fn test_convert_groups() { let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), Field::new("tag_b", DataType::Utf8, true), @@ -742,13 +732,11 @@ mod tests { ); assert_eq!(series_set3.start_row, 2); assert_eq!(series_set3.num_rows, 1); - - Ok(()) } // test with no group tags specified #[tokio::test] - async fn test_convert_groups_no_tags() -> Result<()> { + async fn test_convert_groups_no_tags() { let schema = Arc::new(Schema::new(vec![ Field::new("tag_a", DataType::Utf8, true), Field::new("tag_b", DataType::Utf8, true), @@ -795,8 +783,6 @@ mod tests { ); assert_eq!(series_set2.start_row, 1); assert_eq!(series_set2.num_rows, 1); - - Ok(()) } fn extract_group(item: &SeriesSetItem) -> &GroupDescription { diff --git a/server/src/lib.rs b/server/src/lib.rs index fd7514b42d..18896a650a 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -651,11 +651,8 @@ mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - #[tokio::test] - async fn server_api_calls_return_error_with_no_id_set() -> Result { + async fn server_api_calls_return_error_with_no_id_set() { let manager = TestConnectionManager::new(); let store = Arc::new(ObjectStore::new_in_memory(InMemory::new())); let server = Server::new(manager, store); @@ -667,8 +664,6 @@ mod tests { let lines = parsed_lines("cpu foo=1 10"); let resp = server.write_lines("foo", &lines).await.unwrap_err(); assert!(matches!(resp, Error::IdNotSet)); - - Ok(()) } #[tokio::test] @@ -733,7 +728,7 @@ mod tests { } #[tokio::test] - async fn duplicate_database_name_rejected() -> Result { + async fn duplicate_database_name_rejected() { // Covers #643 let manager = TestConnectionManager::new(); @@ -758,12 +753,10 @@ mod tests { if !matches!(got, Error::DatabaseAlreadyExists {..}) { panic!("expected already exists error"); } - - Ok(()) } #[tokio::test] - async fn db_names_sorted() -> Result { + async fn db_names_sorted() { let manager = TestConnectionManager::new(); let store = Arc::new(ObjectStore::new_in_memory(InMemory::new())); let server = Server::new(manager, store); @@ -781,19 +774,20 @@ mod tests { let db_names_sorted = server.db_names_sorted(); assert_eq!(names, db_names_sorted); - - Ok(()) } #[tokio::test] - async fn writes_local() -> Result { + async fn writes_local() { let manager = TestConnectionManager::new(); let store = Arc::new(ObjectStore::new_in_memory(InMemory::new())); let server = Server::new(manager, store); server.set_id(NonZeroU32::new(1).unwrap()).unwrap(); + + let name = DatabaseName::new("foo".to_string()).unwrap(); server - .create_database(DatabaseRules::new(DatabaseName::new("foo").unwrap())) - .await?; + .create_database(DatabaseRules::new(name)) + .await + .unwrap(); let line = "cpu bar=1 10"; let lines: Vec<_> = parse_lines(line).map(|l| l.unwrap()).collect(); @@ -818,12 +812,10 @@ mod tests { "+-----+------+", ]; assert_table_eq!(expected, &batches); - - Ok(()) } #[tokio::test] - async fn close_chunk() -> Result { + async fn close_chunk() { test_helpers::maybe_start_logging(); let manager = TestConnectionManager::new(); let store = Arc::new(ObjectStore::new_in_memory(InMemory::new())); @@ -837,7 +829,8 @@ mod tests { let db_name = DatabaseName::new("foo").unwrap(); server .create_database(DatabaseRules::new(db_name.clone())) - .await?; + .await + .unwrap(); let line = "cpu bar=1 10"; let lines: Vec<_> = parse_lines(line).map(|l| l.unwrap()).collect(); @@ -882,8 +875,6 @@ mod tests { // ensure that we don't leave the server instance hanging around cancel_token.cancel(); let _ = background_handle.await; - - Ok(()) } #[tokio::test] @@ -940,7 +931,7 @@ partition_key: } #[tokio::test] - async fn background_task_cleans_jobs() -> Result { + async fn background_task_cleans_jobs() { let manager = TestConnectionManager::new(); let store = Arc::new(ObjectStore::new_in_memory(InMemory::new())); let server = Arc::new(Server::new(manager, store)); @@ -959,8 +950,6 @@ partition_key: // ensure that we don't leave the server instance hanging around cancel_token.cancel(); let _ = background_handle.await; - - Ok(()) } #[derive(Snafu, Debug, Clone)] diff --git a/src/commands/run.rs b/src/commands/run.rs index 761ad503d9..42111e9da9 100644 --- a/src/commands/run.rs +++ b/src/commands/run.rs @@ -304,18 +304,18 @@ mod tests { } #[test] - fn test_socketaddr() -> Result<(), clap::Error> { - let c = Config::from_iter_safe( - to_vec(&["server", "--api-bind", "127.0.0.1:1234"]).into_iter(), - )?; + fn test_socketaddr() { + let c = + Config::from_iter_safe(to_vec(&["server", "--api-bind", "127.0.0.1:1234"]).into_iter()) + .unwrap(); assert_eq!( c.http_bind_address, SocketAddr::from(([127, 0, 0, 1], 1234)) ); - let c = Config::from_iter_safe( - to_vec(&["server", "--api-bind", "localhost:1234"]).into_iter(), - )?; + let c = + Config::from_iter_safe(to_vec(&["server", "--api-bind", "localhost:1234"]).into_iter()) + .unwrap(); // depending on where the test runs, localhost will either resolve to a ipv4 or // an ipv6 addr. match c.http_bind_address { @@ -336,7 +336,5 @@ mod tests { .expect_err("must fail"), clap::ErrorKind::ValueValidation ); - - Ok(()) } } diff --git a/src/influxdb_ioxd/http.rs b/src/influxdb_ioxd/http.rs index 334a178671..b01a5eb77d 100644 --- a/src/influxdb_ioxd/http.rs +++ b/src/influxdb_ioxd/http.rs @@ -729,11 +729,8 @@ mod tests { use server::{db::Db, ConnectionManagerImpl}; use std::num::NonZeroU32; - type Error = Box; - type Result = std::result::Result; - #[tokio::test] - async fn test_health() -> Result<()> { + async fn test_health() { let test_storage = Arc::new(AppServer::new( ConnectionManagerImpl {}, Arc::new(ObjectStore::new_in_memory(InMemory::new())), @@ -745,11 +742,10 @@ mod tests { // Print the response so if the test fails, we have a log of what went wrong check_response("health", response, StatusCode::OK, "OK").await; - Ok(()) } #[tokio::test] - async fn test_write() -> Result<()> { + async fn test_write() { let test_storage = Arc::new(AppServer::new( ConnectionManagerImpl {}, Arc::new(ObjectStore::new_in_memory(InMemory::new())), @@ -795,8 +791,6 @@ mod tests { "+----------------+--------------+-------+-----------------+------------+", ]; assert_table_eq!(expected, &batches); - - Ok(()) } /// Sets up a test database with some data for testing the query endpoint @@ -837,7 +831,7 @@ mod tests { } #[tokio::test] - async fn test_query_pretty() -> Result<()> { + async fn test_query_pretty() { let (client, server_url) = setup_test_data().await; // send query data @@ -869,12 +863,10 @@ mod tests { assert_eq!(get_content_type(&response), "text/plain"); check_response("query", response, StatusCode::OK, res).await; - - Ok(()) } #[tokio::test] - async fn test_query_csv() -> Result<()> { + async fn test_query_csv() { let (client, server_url) = setup_test_data().await; // send query data @@ -891,12 +883,10 @@ mod tests { let res = "bottom_degrees,location,state,surface_degrees,time\n\ 50.4,santa_monica,CA,65.2,1568756160\n"; check_response("query", response, StatusCode::OK, res).await; - - Ok(()) } #[tokio::test] - async fn test_query_json() -> Result<()> { + async fn test_query_json() { let (client, server_url) = setup_test_data().await; // send a second line of data to demontrate how that works @@ -930,8 +920,6 @@ mod tests { // Note two json records: one record on each line let res = r#"[{"bottom_degrees":50.4,"location":"santa_monica","state":"CA","surface_degrees":65.2,"time":1568756160},{"location":"Boston","state":"MA","surface_degrees":50.2,"time":1568756160}]"#; check_response("query", response, StatusCode::OK, res).await; - - Ok(()) } fn gzip_str(s: &str) -> Vec { @@ -943,7 +931,7 @@ mod tests { } #[tokio::test] - async fn test_gzip_write() -> Result<()> { + async fn test_gzip_write() { let test_storage = Arc::new(AppServer::new( ConnectionManagerImpl {}, Arc::new(ObjectStore::new_in_memory(InMemory::new())), @@ -990,8 +978,6 @@ mod tests { "+----------------+--------------+-------+-----------------+------------+", ]; assert_table_eq!(expected, &batches); - - Ok(()) } #[tokio::test] diff --git a/src/influxdb_ioxd/rpc/storage/service.rs b/src/influxdb_ioxd/rpc/storage/service.rs index d64037db83..94ea311961 100644 --- a/src/influxdb_ioxd/rpc/storage/service.rs +++ b/src/influxdb_ioxd/rpc/storage/service.rs @@ -1091,7 +1091,7 @@ mod tests { } #[tokio::test] - async fn test_storage_rpc_capabilities() -> Result<(), tonic::Status> { + async fn test_storage_rpc_capabilities() { // Start a test gRPC server on a randomally allocated port let mut fixture = Fixture::new().await.expect("Connecting to test server"); @@ -1106,10 +1106,8 @@ mod tests { assert_eq!( expected_capabilities, - fixture.storage_client.capabilities().await? + fixture.storage_client.capabilities().await.unwrap() ); - - Ok(()) } #[tokio::test] @@ -1196,7 +1194,7 @@ mod tests { /// the right parameters are passed into the Database interface /// and that the returned values are sent back via gRPC. #[tokio::test] - async fn test_storage_rpc_tag_keys() -> Result<(), tonic::Status> { + async fn test_storage_rpc_tag_keys() { // Start a test gRPC server on a randomally allocated port let mut fixture = Fixture::new().await.expect("Connecting to test server"); @@ -1229,7 +1227,7 @@ mod tests { predicate: make_state_ma_predicate(), }; - let actual_tag_keys = fixture.storage_client.tag_keys(request).await?; + let actual_tag_keys = fixture.storage_client.tag_keys(request).await.unwrap(); let expected_tag_keys = vec!["_f(0xff)", "_m(0x00)", "k1", "k2", "k3", "k4"]; assert_eq!(actual_tag_keys, expected_tag_keys,); @@ -1256,11 +1254,10 @@ mod tests { "\nActual: {:?}\nExpected: {:?}", actual_predicate, expected_predicate ); - Ok(()) } #[tokio::test] - async fn test_storage_rpc_tag_keys_error() -> Result<(), tonic::Status> { + async fn test_storage_rpc_tag_keys_error() { // Start a test gRPC server on a randomally allocated port let mut fixture = Fixture::new().await.expect("Connecting to test server"); @@ -1293,15 +1290,13 @@ mod tests { let response = fixture.storage_client.tag_keys(request).await; assert_contains!(response.unwrap_err().to_string(), "Sugar we are going down"); - - Ok(()) } /// test the plumbing of the RPC layer for measurement_tag_keys-- /// specifically that the right parameters are passed into the Database /// interface and that the returned values are sent back via gRPC. #[tokio::test] - async fn test_storage_rpc_measurement_tag_keys() -> Result<(), tonic::Status> { + async fn test_storage_rpc_measurement_tag_keys() { test_helpers::maybe_start_logging(); // Start a test gRPC server on a randomally allocated port let mut fixture = Fixture::new().await.expect("Connecting to test server"); @@ -1340,7 +1335,11 @@ mod tests { predicate: make_state_ma_predicate(), }; - let actual_tag_keys = fixture.storage_client.measurement_tag_keys(request).await?; + let actual_tag_keys = fixture + .storage_client + .measurement_tag_keys(request) + .await + .unwrap(); let expected_tag_keys = vec!["_f(0xff)", "_m(0x00)", "k1", "k2", "k3", "k4"]; assert_eq!( @@ -1371,11 +1370,10 @@ mod tests { "\nActual: {:?}\nExpected: {:?}", actual_predicate, expected_predicate ); - Ok(()) } #[tokio::test] - async fn test_storage_rpc_measurement_tag_keys_error() -> Result<(), tonic::Status> { + async fn test_storage_rpc_measurement_tag_keys_error() { test_helpers::maybe_start_logging(); // Start a test gRPC server on a randomally allocated port let mut fixture = Fixture::new().await.expect("Connecting to test server"); @@ -1412,8 +1410,6 @@ mod tests { let response = fixture.storage_client.measurement_tag_keys(request).await; assert_contains!(response.unwrap_err().to_string(), "This is an error"); - - Ok(()) } /// test the plumbing of the RPC layer for tag_keys -- specifically that @@ -1710,7 +1706,7 @@ mod tests { } #[tokio::test] - async fn test_log_on_panic() -> Result<(), tonic::Status> { + async fn test_log_on_panic() { // Send a message to a route that causes a panic and ensure: // 1. We don't use up all executors 2. The panic message // message ends up in the log system @@ -1759,10 +1755,8 @@ mod tests { } // Ensure there are still threads to answer actual client queries - let caps = fixture.storage_client.capabilities().await?; + let caps = fixture.storage_client.capabilities().await.unwrap(); assert!(!caps.is_empty(), "Caps: {:?}", caps); - - Ok(()) } #[tokio::test] diff --git a/wal/src/lib.rs b/wal/src/lib.rs index db31a1619f..132ec37e3d 100644 --- a/wal/src/lib.rs +++ b/wal/src/lib.rs @@ -686,47 +686,42 @@ impl WritePayload { mod tests { use super::*; - type TestError = Box; - type Result = std::result::Result; - #[test] - fn sequence_numbers_are_persisted() -> Result { - let dir = test_helpers::tmp_dir()?; + fn sequence_numbers_are_persisted() { + let dir = test_helpers::tmp_dir().unwrap(); let builder = WalBuilder::new(dir.as_ref()); let mut wal; // Create one in-memory WAL and sync it { - wal = builder.clone().wal()?; + wal = builder.clone().wal().unwrap(); let data = Vec::from("somedata"); - let data = WritePayload::new(data)?; - let seq = wal.append(data)?; + let data = WritePayload::new(data).unwrap(); + let seq = wal.append(data).unwrap(); assert_eq!(0, seq); - wal.sync_all()?; + wal.sync_all().unwrap(); } // Pretend the process restarts { - wal = builder.wal()?; + wal = builder.wal().unwrap(); assert_eq!(1, wal.sequence_number); } - - Ok(()) } #[test] - fn sequence_numbers_increase_by_number_of_pending_entries() -> Result { - let dir = test_helpers::tmp_dir()?; + fn sequence_numbers_increase_by_number_of_pending_entries() { + let dir = test_helpers::tmp_dir().unwrap(); let builder = WalBuilder::new(dir.as_ref()); - let mut wal = builder.wal()?; + let mut wal = builder.wal().unwrap(); // Write 1 entry then sync let data = Vec::from("some"); - let data = WritePayload::new(data)?; - let seq = wal.append(data)?; - wal.sync_all()?; + let data = WritePayload::new(data).unwrap(); + let seq = wal.append(data).unwrap(); + wal.sync_all().unwrap(); assert_eq!(0, seq); // Sequence number should increase by 1 @@ -734,19 +729,17 @@ mod tests { // Write 2 entries then sync let data = Vec::from("other"); - let data = WritePayload::new(data)?; - let seq = wal.append(data)?; + let data = WritePayload::new(data).unwrap(); + let seq = wal.append(data).unwrap(); assert_eq!(1, seq); let data = Vec::from("again"); - let data = WritePayload::new(data)?; - let seq = wal.append(data)?; + let data = WritePayload::new(data).unwrap(); + let seq = wal.append(data).unwrap(); assert_eq!(2, seq); - wal.sync_all()?; + wal.sync_all().unwrap(); // Sequence number should increase by 2 assert_eq!(3, wal.sequence_number); - - Ok(()) } } diff --git a/wal/tests/no_concurrency.rs b/wal/tests/no_concurrency.rs index 35c40c267c..cf1299b3c8 100644 --- a/wal/tests/no_concurrency.rs +++ b/wal/tests/no_concurrency.rs @@ -1,26 +1,21 @@ use wal::{WalBuilder, WritePayload}; -type TestError = Box; -type Result = std::result::Result; - #[test] -fn no_concurrency() -> Result { - let dir = test_helpers::tmp_dir()?; +fn no_concurrency() { + let dir = test_helpers::tmp_dir().unwrap(); let builder = WalBuilder::new(dir.as_ref()); - let mut wal = builder.clone().wal()?; + let mut wal = builder.clone().wal().unwrap(); let data = Vec::from("somedata"); - let payload = WritePayload::new(data)?; - let sequence_number = wal.append(payload)?; - wal.sync_all()?; + let payload = WritePayload::new(data).unwrap(); + let sequence_number = wal.append(payload).unwrap(); + wal.sync_all().unwrap(); assert_eq!(0, sequence_number); - let wal_entries: Result, _> = builder.entries()?.collect(); - let wal_entries = wal_entries?; + let wal_entries: Result, _> = builder.entries().unwrap().collect(); + let wal_entries = wal_entries.unwrap(); assert_eq!(1, wal_entries.len()); assert_eq!(b"somedata".as_ref(), wal_entries[0].as_data()); assert_eq!(0, wal_entries[0].sequence_number()); - - Ok(()) }