From 1a4b2eac26e24cad371bad938732fd7e04fea5ea Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Tue, 8 Dec 2020 22:29:28 -0800 Subject: [PATCH 01/10] fix: Report bucket/location when relevant with object store errors --- object_store/src/lib.rs | 495 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 468 insertions(+), 27 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 8279f43b45..1c52716dba 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -153,19 +153,22 @@ impl GoogleCloudStorage { } ); - let location = location.to_string(); + let location_string = location.to_string(); let bucket_name = self.bucket_name.clone(); let _ = tokio::task::spawn_blocking(move || { cloud_storage::Object::create( &bucket_name, &temporary_non_streaming, - &location, + &location_string, "application/octet-stream", ) }) .await - .context(UnableToPutDataToGcs)?; + .context(UnableToPutDataToGcs { + bucket: self.bucket_name.clone(), + location, + })?; Ok(()) } @@ -174,28 +177,42 @@ impl GoogleCloudStorage { &self, location: &str, ) -> InternalResult>> { - let location = location.to_string(); + let location_string = location.to_string(); let bucket_name = self.bucket_name.clone(); let bytes = tokio::task::spawn_blocking(move || { - cloud_storage::Object::download(&bucket_name, &location) + cloud_storage::Object::download(&bucket_name, &location_string) }) .await - .context(UnableToGetDataFromGcs)? - .context(UnableToGetDataFromGcs2)?; + .context(UnableToGetDataFromGcs { + bucket: self.bucket_name.clone(), + location, + })? + .context(UnableToGetDataFromGcs2 { + bucket: self.bucket_name.clone(), + location, + })?; Ok(futures::stream::once(async move { Ok(bytes.into()) })) } /// Delete the object at the specified location. async fn delete(&self, location: &str) -> InternalResult<()> { - let location = location.to_string(); + let location_string = location.to_string(); let bucket_name = self.bucket_name.clone(); - tokio::task::spawn_blocking(move || cloud_storage::Object::delete(&bucket_name, &location)) - .await - .context(UnableToDeleteDataFromGcs)? - .context(UnableToDeleteDataFromGcs2)?; + tokio::task::spawn_blocking(move || { + cloud_storage::Object::delete(&bucket_name, &location_string) + }) + .await + .context(UnableToDeleteDataFromGcs { + bucket: self.bucket_name.clone(), + location, + })? + .context(UnableToDeleteDataFromGcs2 { + bucket: self.bucket_name.clone(), + location, + })?; Ok(()) } @@ -213,8 +230,12 @@ impl GoogleCloudStorage { None => cloud_storage::Object::list(&bucket_name), }) .await - .context(UnableToListDataFromGcs)? - .context(UnableToListDataFromGcs2)?; + .context(UnableToListDataFromGcs { + bucket: self.bucket_name.clone(), + })? + .context(UnableToListDataFromGcs2 { + bucket: self.bucket_name.clone(), + })?; Ok(futures::stream::once(async move { Ok(objects.into_iter().map(|o| o.name).collect()) @@ -272,7 +293,13 @@ impl AmazonS3 { ..Default::default() }; - self.client.put_object(put_request).await?; + self.client + .put_object(put_request) + .await + .context(UnableToPutDataToS3 { + bucket: self.bucket_name.to_owned(), + location: location.to_string(), + })?; Ok(()) } @@ -289,10 +316,20 @@ impl AmazonS3 { Ok(self .client .get_object(get_request) - .await? + .await + .context(UnableToGetDataFromS3 { + bucket: self.bucket_name.to_owned(), + location: location.to_owned(), + })? .body - .context(NoDataFromS3)? - .context(UnableToGetPieceOfDataFromS3) + .context(NoDataFromS3 { + bucket: self.bucket_name.to_owned(), + location: location.to_string(), + })? + .context(UnableToGetPieceOfDataFromS3 { + bucket: self.bucket_name.to_owned(), + location: location.to_string(), + }) .err_into()) } @@ -304,7 +341,13 @@ impl AmazonS3 { ..Default::default() }; - self.client.delete_object(delete_request).await?; + self.client + .delete_object(delete_request) + .await + .context(UnableToDeleteDataFromS3 { + bucket: self.bucket_name.to_owned(), + location: location.to_owned(), + })?; Ok(()) } @@ -342,7 +385,15 @@ impl AmazonS3 { let resp = match self.client.list_objects_v2(list_request).await { Ok(resp) => resp, - Err(e) => return Some((Err(e.into()), state)), + Err(e) => { + return Some(( + Err(InternalError::UnableToListDataFromS3 { + source: e, + bucket: self.bucket_name.clone(), + }), + state, + )) + } }; let contents = resp.contents.unwrap_or_default(); @@ -589,15 +640,22 @@ impl Error { match self.0 { UnableToPutDataToS3 { source: RusotoError::Credentials(_), + bucket: _, + location: _, } => true, UnableToGetDataFromS3 { source: RusotoError::Credentials(_), + bucket: _, + location: _, } => true, UnableToDeleteDataFromS3 { source: RusotoError::Credentials(_), + bucket: _, + location: _, } => true, UnableToListDataFromS3 { source: RusotoError::Credentials(_), + bucket: _, } => true, _ => false, } @@ -613,49 +671,70 @@ enum InternalError { UnableToPutDataToGcs { source: tokio::task::JoinError, + bucket: String, + location: String, }, UnableToListDataFromGcs { source: tokio::task::JoinError, + bucket: String, }, UnableToListDataFromGcs2 { source: cloud_storage::Error, + bucket: String, }, UnableToDeleteDataFromGcs { source: tokio::task::JoinError, + bucket: String, + location: String, }, UnableToDeleteDataFromGcs2 { source: cloud_storage::Error, + bucket: String, + location: String, }, UnableToGetDataFromGcs { source: tokio::task::JoinError, + bucket: String, + location: String, }, UnableToGetDataFromGcs2 { source: cloud_storage::Error, + bucket: String, + location: String, }, - #[snafu(context(false))] UnableToPutDataToS3 { source: rusoto_core::RusotoError, + bucket: String, + location: String, }, - #[snafu(context(false))] UnableToGetDataFromS3 { source: rusoto_core::RusotoError, + bucket: String, + location: String, }, - #[snafu(context(false))] UnableToDeleteDataFromS3 { source: rusoto_core::RusotoError, + bucket: String, + location: String, + }, + NoDataFromS3 { + bucket: String, + location: String, }, - NoDataFromS3, UnableToReadBytesFromS3 { source: std::io::Error, + bucket: String, + location: String, }, UnableToGetPieceOfDataFromS3 { source: std::io::Error, + bucket: String, + location: String, }, - - #[snafu(context(false))] UnableToListDataFromS3 { source: rusoto_core::RusotoError, + bucket: String, }, UnableToPutDataInMemory { @@ -721,6 +800,7 @@ mod tests { ) }; } + const NON_EXISTANT_NAME: &str = "nonexistantname"; async fn flatten_list_stream( storage: &ObjectStore, @@ -779,8 +859,26 @@ mod tests { Ok(()) } + async fn get_nonexistant_object( + storage: &ObjectStore, + location: Option<&str>, + ) -> Result { + let location = location.unwrap_or("this_file_should_not_exist"); + + let content_list = flatten_list_stream(storage, Some(location)).await?; + assert!(content_list.is_empty()); + + Ok(storage + .get(location) + .await? + .map_ok(|b| bytes::BytesMut::from(&b[..])) + .try_concat() + .await? + .freeze()) + } + // Tests TODO: - // GET nonexisting location + // GET nonexisting location (in_memory/file) // DELETE nonexisting location // PUT overwriting @@ -807,6 +905,120 @@ mod tests { put_get_delete_list(&integration).await?; Ok(()) } + + #[tokio::test] + async fn gcs_test_get_nonexistant_location() -> Result<()> { + let bucket_name = bucket_name()?; + let location_name = NON_EXISTANT_NAME; + let integration = + ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(&bucket_name)); + + let result = get_nonexistant_object(&integration, Some(location_name)) + .await + .unwrap(); + + assert_eq!( + result, + Bytes::from(format!("No such object: {}/{}", bucket_name, location_name)) + ); + + Ok(()) + } + + #[tokio::test] + async fn gcs_test_get_nonexistant_bucket() -> Result<()> { + let bucket_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTANT_NAME; + let integration = + ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); + + let result = get_nonexistant_object(&integration, Some(location_name)) + .await + .unwrap(); + + assert_eq!(result, Bytes::from("Not Found")); + + Ok(()) + } + + #[tokio::test] + async fn gcs_test_delete_nonexistant_location() -> Result<()> { + let bucket_name = bucket_name()?; + let location_name = NON_EXISTANT_NAME; + let integration = + ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(&bucket_name)); + + let result = integration.delete(location_name).await; + + if let Err(e) = result { + if let Error(InternalError::UnableToDeleteDataFromGcs2 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, cloud_storage::Error::Google(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn gcs_test_delete_nonexistant_bucket() -> Result<()> { + let bucket_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTANT_NAME; + let integration = + ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); + + let result = integration.delete(location_name).await; + + if let Err(e) = result { + if let Error(InternalError::UnableToDeleteDataFromGcs2 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, cloud_storage::Error::Google(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn gcs_test_put_nonexistant_bucket() -> Result<()> { + let bucket_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTANT_NAME; + let integration = + ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); + let data = Bytes::from("arbitrary data"); + let stream_data = std::io::Result::Ok(data.clone()); + + let result = integration + .put( + location_name, + futures::stream::once(async move { stream_data }), + data.len(), + ) + .await; + assert!(result.is_ok()); + + Ok(()) + } } #[cfg(test_aws)] @@ -825,6 +1037,235 @@ mod tests { Ok(()) } + #[tokio::test] + async fn s3_test_get_nonexistant_region() -> Result<()> { + // Assumes environment variables do not provide credentials to AWS US West 1 + let (_, bucket_name) = region_and_bucket_name()?; + let region = rusoto_core::Region::UsWest1; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = get_nonexistant_object(&integration, Some(location_name)).await; + if let Err(e) = result { + if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = + e.downcast_ref::() + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, &bucket_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_get_nonexistant_location() -> Result<()> { + let (region, bucket_name) = region_and_bucket_name()?; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = get_nonexistant_object(&integration, Some(location_name)).await; + if let Err(e) = result { + if let Some(Error(InternalError::UnableToGetDataFromS3 { + source, + bucket, + location, + })) = e.downcast_ref::() + { + assert!(matches!( + source, + rusoto_core::RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_)) + )); + assert_eq!(bucket, &bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_get_nonexistant_bucket() -> Result<()> { + let (region, _) = region_and_bucket_name()?; + let bucket_name = NON_EXISTANT_NAME; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = get_nonexistant_object(&integration, Some(location_name)).await; + if let Err(e) = result { + if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = + e.downcast_ref::() + { + assert!(matches!( + source, + rusoto_core::RusotoError::Service( + rusoto_s3::ListObjectsV2Error::NoSuchBucket(_), + ) + )); + assert_eq!(bucket, &bucket_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_put_nonexistant_region() -> Result<()> { + // Assumes environment variables do not provide credentials to AWS US West 1 + let (_, bucket_name) = region_and_bucket_name()?; + let region = rusoto_core::Region::UsWest1; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); + let location_name = NON_EXISTANT_NAME; + let data = Bytes::from("arbitrary data"); + let stream_data = std::io::Result::Ok(data.clone()); + + let result = integration + .put( + location_name, + futures::stream::once(async move { stream_data }), + data.len(), + ) + .await; + + if let Err(e) = result { + if let Error(InternalError::UnableToPutDataToS3 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_put_nonexistant_bucket() -> Result<()> { + let (region, _) = region_and_bucket_name()?; + let bucket_name = NON_EXISTANT_NAME; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); + let location_name = NON_EXISTANT_NAME; + let data = Bytes::from("arbitrary data"); + let stream_data = std::io::Result::Ok(data.clone()); + + let result = integration + .put( + location_name, + futures::stream::once(async move { stream_data }), + data.len(), + ) + .await; + + if let Err(e) = result { + if let Error(InternalError::UnableToPutDataToS3 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_delete_nonexistant_location() -> Result<()> { + let (region, bucket_name) = region_and_bucket_name()?; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = integration.delete(location_name).await; + + assert!(result.is_ok()); + + Ok(()) + } + + #[tokio::test] + async fn s3_test_delete_nonexistant_region() -> Result<()> { + // Assumes environment variables do not provide credentials to AWS US West 1 + let (_, bucket_name) = region_and_bucket_name()?; + let region = rusoto_core::Region::UsWest1; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = integration.delete(location_name).await; + if let Err(e) = result { + if let Error(InternalError::UnableToDeleteDataFromS3 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + + #[tokio::test] + async fn s3_test_delete_nonexistant_bucket() -> Result<()> { + let (region, _) = region_and_bucket_name()?; + let bucket_name = NON_EXISTANT_NAME; + let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); + let location_name = NON_EXISTANT_NAME; + + let result = integration.delete(location_name).await; + if let Err(e) = result { + if let Error(InternalError::UnableToDeleteDataFromS3 { + source, + bucket, + location, + }) = e + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); + } else { + panic!() + } + } else { + panic!() + } + + Ok(()) + } + fn region_and_bucket_name() -> Result<(rusoto_core::Region, String)> { dotenv::dotenv().ok(); From 989d0ecad8bf84ae1329ab4644a363df149b9b76 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Tue, 8 Dec 2020 22:29:35 -0800 Subject: [PATCH 02/10] refactor: set valid format for default s3 bucket name example --- docs/env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/env.example b/docs/env.example index 87be812bdd..1d476f8ffa 100644 --- a/docs/env.example +++ b/docs/env.example @@ -13,7 +13,7 @@ # AWS_ACCESS_KEY_ID=access_key_value # AWS_SECRET_ACCESS_KEY=secret_access_key_value # AWS_DEFAULT_REGION=us-east-2 -# AWS_S3_BUCKET_NAME=bucket_name +# AWS_S3_BUCKET_NAME=bucket-name # # If using Google Cloud Storage as an object store: # GCS_BUCKET_NAME=bucket_name From 62c14de2bc037947948f91d23436ad6c37601e9a Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Tue, 8 Dec 2020 22:50:23 -0800 Subject: [PATCH 03/10] fix: Update pattern match to detect String --- object_store/src/lib.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 1c52716dba..ab97f533db 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -640,22 +640,22 @@ impl Error { match self.0 { UnableToPutDataToS3 { source: RusotoError::Credentials(_), - bucket: _, - location: _, + bucket: String(_), + location: String(_), } => true, UnableToGetDataFromS3 { source: RusotoError::Credentials(_), - bucket: _, - location: _, + bucket: String(_), + location: String(_), } => true, UnableToDeleteDataFromS3 { source: RusotoError::Credentials(_), - bucket: _, - location: _, + bucket: String(_), + location: String(_), } => true, UnableToListDataFromS3 { source: RusotoError::Credentials(_), - bucket: _, + bucket: String(_), } => true, _ => false, } From 4be47b1ccc1994871ffbaf22962d5dbbe52c7273 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Tue, 8 Dec 2020 23:34:52 -0800 Subject: [PATCH 04/10] fix: Move functions to the conditional compilation flag to pass linter --- object_store/src/lib.rs | 107 ++++++++++++++++++++++++---------------- 1 file changed, 64 insertions(+), 43 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index ab97f533db..038e75fc59 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -800,7 +800,6 @@ mod tests { ) }; } - const NON_EXISTANT_NAME: &str = "nonexistantname"; async fn flatten_list_stream( storage: &ObjectStore, @@ -859,24 +858,6 @@ mod tests { Ok(()) } - async fn get_nonexistant_object( - storage: &ObjectStore, - location: Option<&str>, - ) -> Result { - let location = location.unwrap_or("this_file_should_not_exist"); - - let content_list = flatten_list_stream(storage, Some(location)).await?; - assert!(content_list.is_empty()); - - Ok(storage - .get(location) - .await? - .map_ok(|b| bytes::BytesMut::from(&b[..])) - .try_concat() - .await? - .freeze()) - } - // Tests TODO: // GET nonexisting location (in_memory/file) // DELETE nonexisting location @@ -888,6 +869,8 @@ mod tests { use super::*; + const NON_EXISTANT_NAME: &str = "nonexistantname"; + fn bucket_name() -> Result { dotenv::dotenv().ok(); let bucket_name = env::var("GCS_BUCKET_NAME") @@ -896,6 +879,24 @@ mod tests { Ok(bucket_name) } + async fn get_nonexistant_object( + storage: &ObjectStore, + location: Option<&str>, + ) -> Result { + let location = location.unwrap_or("this_file_should_not_exist"); + + let content_list = flatten_list_stream(storage, Some(location)).await?; + assert!(content_list.is_empty()); + + Ok(storage + .get(location) + .await? + .map_ok(|b| bytes::BytesMut::from(&b[..])) + .try_concat() + .await? + .freeze()) + } + #[tokio::test] async fn gcs_test() -> Result<()> { let bucket_name = bucket_name()?; @@ -1027,6 +1028,50 @@ mod tests { use super::*; + const NON_EXISTANT_NAME: &str = "nonexistantname"; + + fn region_and_bucket_name() -> Result<(rusoto_core::Region, String)> { + dotenv::dotenv().ok(); + + let region = env::var("AWS_DEFAULT_REGION") + .map_err(|_| "The environment variable AWS_DEFAULT_REGION must be set to a value like `us-east-2`")?; + let bucket_name = env::var("AWS_S3_BUCKET_NAME") + .map_err(|_| "The environment variable AWS_S3_BUCKET_NAME must be set")?; + + Ok((region.parse()?, bucket_name)) + } + + fn check_credentials(r: Result) -> Result { + if let Err(e) = &r { + let e = &**e; + if let Some(e) = e.downcast_ref::() { + if e.s3_error_due_to_credentials() { + eprintln!("Try setting the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables"); + } + } + } + + r + } + + async fn get_nonexistant_object( + storage: &ObjectStore, + location: Option<&str>, + ) -> Result { + let location = location.unwrap_or("this_file_should_not_exist"); + + let content_list = flatten_list_stream(storage, Some(location)).await?; + assert!(content_list.is_empty()); + + Ok(storage + .get(location) + .await? + .map_ok(|b| bytes::BytesMut::from(&b[..])) + .try_concat() + .await? + .freeze()) + } + #[tokio::test] async fn s3_test() -> Result<()> { let (region, bucket_name) = region_and_bucket_name()?; @@ -1265,30 +1310,6 @@ mod tests { Ok(()) } - - fn region_and_bucket_name() -> Result<(rusoto_core::Region, String)> { - dotenv::dotenv().ok(); - - let region = env::var("AWS_DEFAULT_REGION") - .map_err(|_| "The environment variable AWS_DEFAULT_REGION must be set to a value like `us-east-2`")?; - let bucket_name = env::var("AWS_S3_BUCKET_NAME") - .map_err(|_| "The environment variable AWS_S3_BUCKET_NAME must be set")?; - - Ok((region.parse()?, bucket_name)) - } - - fn check_credentials(r: Result) -> Result { - if let Err(e) = &r { - let e = &**e; - if let Some(e) = e.downcast_ref::() { - if e.s3_error_due_to_credentials() { - eprintln!("Try setting the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables"); - } - } - } - - r - } } mod in_memory { From 625542c31043573c74271c28bcfdc504cc5f636b Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Wed, 9 Dec 2020 10:14:50 -0800 Subject: [PATCH 05/10] fix: Update s3 error function to correct pattern --- object_store/src/lib.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 038e75fc59..733256f792 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -640,22 +640,22 @@ impl Error { match self.0 { UnableToPutDataToS3 { source: RusotoError::Credentials(_), - bucket: String(_), - location: String(_), + bucket: _, + location: _, } => true, UnableToGetDataFromS3 { source: RusotoError::Credentials(_), - bucket: String(_), - location: String(_), + bucket: _, + location: _, } => true, UnableToDeleteDataFromS3 { source: RusotoError::Credentials(_), - bucket: String(_), - location: String(_), + bucket: _, + location: _, } => true, UnableToListDataFromS3 { source: RusotoError::Credentials(_), - bucket: String(_), + bucket: _, } => true, _ => false, } From af8569378ff4e73b7b32fc3fce70c38d431874f6 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Wed, 9 Dec 2020 10:57:38 -0800 Subject: [PATCH 06/10] test: move common variable and function to general test usage --- object_store/src/lib.rs | 62 +++++++++++++++-------------------------- 1 file changed, 22 insertions(+), 40 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 733256f792..35a7117d22 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -791,6 +791,9 @@ mod tests { type Error = Box; type Result = std::result::Result; + #[cfg(any(test_aws, test_gcs))] + const NON_EXISTANT_NAME: &str = "nonexistantname"; + macro_rules! assert_error { ($res:expr, $error_pat:pat$(,)?) => { assert!( @@ -858,6 +861,25 @@ mod tests { Ok(()) } + #[cfg(any(test_aws, test_gcs))] + async fn get_nonexistant_object( + storage: &ObjectStore, + location: Option<&str>, + ) -> Result { + let location = location.unwrap_or("this_file_should_not_exist"); + + let content_list = flatten_list_stream(storage, Some(location)).await?; + assert!(content_list.is_empty()); + + Ok(storage + .get(location) + .await? + .map_ok(|b| bytes::BytesMut::from(&b[..])) + .try_concat() + .await? + .freeze()) + } + // Tests TODO: // GET nonexisting location (in_memory/file) // DELETE nonexisting location @@ -869,8 +891,6 @@ mod tests { use super::*; - const NON_EXISTANT_NAME: &str = "nonexistantname"; - fn bucket_name() -> Result { dotenv::dotenv().ok(); let bucket_name = env::var("GCS_BUCKET_NAME") @@ -879,24 +899,6 @@ mod tests { Ok(bucket_name) } - async fn get_nonexistant_object( - storage: &ObjectStore, - location: Option<&str>, - ) -> Result { - let location = location.unwrap_or("this_file_should_not_exist"); - - let content_list = flatten_list_stream(storage, Some(location)).await?; - assert!(content_list.is_empty()); - - Ok(storage - .get(location) - .await? - .map_ok(|b| bytes::BytesMut::from(&b[..])) - .try_concat() - .await? - .freeze()) - } - #[tokio::test] async fn gcs_test() -> Result<()> { let bucket_name = bucket_name()?; @@ -1028,8 +1030,6 @@ mod tests { use super::*; - const NON_EXISTANT_NAME: &str = "nonexistantname"; - fn region_and_bucket_name() -> Result<(rusoto_core::Region, String)> { dotenv::dotenv().ok(); @@ -1054,24 +1054,6 @@ mod tests { r } - async fn get_nonexistant_object( - storage: &ObjectStore, - location: Option<&str>, - ) -> Result { - let location = location.unwrap_or("this_file_should_not_exist"); - - let content_list = flatten_list_stream(storage, Some(location)).await?; - assert!(content_list.is_empty()); - - Ok(storage - .get(location) - .await? - .map_ok(|b| bytes::BytesMut::from(&b[..])) - .try_concat() - .await? - .freeze()) - } - #[tokio::test] async fn s3_test() -> Result<()> { let (region, bucket_name) = region_and_bucket_name()?; From d179fe68d381d660e18c927a046ba85d995470a2 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Wed, 9 Dec 2020 11:03:19 -0800 Subject: [PATCH 07/10] refactor: replace bucket_name clones with references --- object_store/src/lib.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 35a7117d22..2a07ac1b4a 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -166,7 +166,7 @@ impl GoogleCloudStorage { }) .await .context(UnableToPutDataToGcs { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, location, })?; @@ -185,11 +185,11 @@ impl GoogleCloudStorage { }) .await .context(UnableToGetDataFromGcs { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, location, })? .context(UnableToGetDataFromGcs2 { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, location, })?; @@ -206,11 +206,11 @@ impl GoogleCloudStorage { }) .await .context(UnableToDeleteDataFromGcs { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, location, })? .context(UnableToDeleteDataFromGcs2 { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, location, })?; @@ -231,10 +231,10 @@ impl GoogleCloudStorage { }) .await .context(UnableToListDataFromGcs { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, })? .context(UnableToListDataFromGcs2 { - bucket: self.bucket_name.clone(), + bucket: &self.bucket_name, })?; Ok(futures::stream::once(async move { @@ -297,7 +297,7 @@ impl AmazonS3 { .put_object(put_request) .await .context(UnableToPutDataToS3 { - bucket: self.bucket_name.to_owned(), + bucket: &self.bucket_name, location: location.to_string(), })?; Ok(()) From 146bf59d8d82b547b41362a068124fb0c6f0de27 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Wed, 9 Dec 2020 11:16:53 -0800 Subject: [PATCH 08/10] test: simplify test error matching --- object_store/src/lib.rs | 244 ++++++++++++++++++---------------------- 1 file changed, 108 insertions(+), 136 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 2a07ac1b4a..0d72b5e87d 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -951,23 +951,19 @@ mod tests { let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(&bucket_name)); - let result = integration.delete(location_name).await; + let err = integration.delete(location_name).await.unwrap_err(); - if let Err(e) = result { - if let Error(InternalError::UnableToDeleteDataFromGcs2 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, cloud_storage::Error::Google(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + if let Error(InternalError::UnableToDeleteDataFromGcs2 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, cloud_storage::Error::Google(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -980,23 +976,19 @@ mod tests { let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); - let result = integration.delete(location_name).await; + let err = integration.delete(location_name).await.unwrap_err(); - if let Err(e) = result { - if let Error(InternalError::UnableToDeleteDataFromGcs2 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, cloud_storage::Error::Google(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + if let Error(InternalError::UnableToDeleteDataFromGcs2 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, cloud_storage::Error::Google(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1072,18 +1064,16 @@ mod tests { let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); let location_name = NON_EXISTANT_NAME; - let result = get_nonexistant_object(&integration, Some(location_name)).await; - if let Err(e) = result { - if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = - e.downcast_ref::() - { - assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); - assert_eq!(bucket, &bucket_name); - } else { - panic!() - } + let err = get_nonexistant_object(&integration, Some(location_name)) + .await + .unwrap_err(); + if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = + err.downcast_ref::() + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, &bucket_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1095,25 +1085,23 @@ mod tests { let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); let location_name = NON_EXISTANT_NAME; - let result = get_nonexistant_object(&integration, Some(location_name)).await; - if let Err(e) = result { - if let Some(Error(InternalError::UnableToGetDataFromS3 { + let err = get_nonexistant_object(&integration, Some(location_name)) + .await + .unwrap_err(); + if let Some(Error(InternalError::UnableToGetDataFromS3 { + source, + bucket, + location, + })) = err.downcast_ref::() + { + assert!(matches!( source, - bucket, - location, - })) = e.downcast_ref::() - { - assert!(matches!( - source, - rusoto_core::RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_)) - )); - assert_eq!(bucket, &bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + rusoto_core::RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_)) + )); + assert_eq!(bucket, &bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1126,23 +1114,21 @@ mod tests { let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); let location_name = NON_EXISTANT_NAME; - let result = get_nonexistant_object(&integration, Some(location_name)).await; - if let Err(e) = result { - if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = - e.downcast_ref::() - { - assert!(matches!( - source, - rusoto_core::RusotoError::Service( - rusoto_s3::ListObjectsV2Error::NoSuchBucket(_), - ) - )); - assert_eq!(bucket, &bucket_name); - } else { - panic!() - } + let err = get_nonexistant_object(&integration, Some(location_name)) + .await + .unwrap_err(); + if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = + e.downcast_ref::() + { + assert!(matches!( + source, + rusoto_core::RusotoError::Service( + rusoto_s3::ListObjectsV2Error::NoSuchBucket(_), + ) + )); + assert_eq!(bucket, &bucket_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1158,29 +1144,26 @@ mod tests { let data = Bytes::from("arbitrary data"); let stream_data = std::io::Result::Ok(data.clone()); - let result = integration + let err = integration .put( location_name, futures::stream::once(async move { stream_data }), data.len(), ) - .await; + .await + .unwrap_err(); - if let Err(e) = result { - if let Error(InternalError::UnableToPutDataToS3 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + if let Error(InternalError::UnableToPutDataToS3 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1195,29 +1178,26 @@ mod tests { let data = Bytes::from("arbitrary data"); let stream_data = std::io::Result::Ok(data.clone()); - let result = integration + let err = integration .put( location_name, futures::stream::once(async move { stream_data }), data.len(), ) - .await; + .await + .unwrap_err(); - if let Err(e) = result { - if let Error(InternalError::UnableToPutDataToS3 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + if let Error(InternalError::UnableToPutDataToS3 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1244,22 +1224,18 @@ mod tests { let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); let location_name = NON_EXISTANT_NAME; - let result = integration.delete(location_name).await; - if let Err(e) = result { - if let Error(InternalError::UnableToDeleteDataFromS3 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + let err = integration.delete(location_name).await.unwrap_err(); + if let Error(InternalError::UnableToDeleteDataFromS3 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) @@ -1272,22 +1248,18 @@ mod tests { let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); let location_name = NON_EXISTANT_NAME; - let result = integration.delete(location_name).await; - if let Err(e) = result { - if let Error(InternalError::UnableToDeleteDataFromS3 { - source, - bucket, - location, - }) = e - { - assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); - assert_eq!(bucket, bucket_name); - assert_eq!(location, location_name); - } else { - panic!() - } + let err = integration.delete(location_name).await.unwrap_err(); + if let Error(InternalError::UnableToDeleteDataFromS3 { + source, + bucket, + location, + }) = err + { + assert!(matches!(source, rusoto_core::RusotoError::Unknown(_))); + assert_eq!(bucket, bucket_name); + assert_eq!(location, location_name); } else { - panic!() + panic!("unexpected error type") } Ok(()) From 6247a011442dcf2381649a6abf004e7241a16293 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Thu, 10 Dec 2020 09:17:54 -0800 Subject: [PATCH 09/10] test: update typos --- object_store/src/lib.rs | 82 ++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 43 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index 0d72b5e87d..fef2c7b3c4 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -792,7 +792,7 @@ mod tests { type Result = std::result::Result; #[cfg(any(test_aws, test_gcs))] - const NON_EXISTANT_NAME: &str = "nonexistantname"; + const NON_EXISTENT_NAME: &str = "nonexistentname"; macro_rules! assert_error { ($res:expr, $error_pat:pat$(,)?) => { @@ -862,7 +862,7 @@ mod tests { } #[cfg(any(test_aws, test_gcs))] - async fn get_nonexistant_object( + async fn get_nonexistent_object( storage: &ObjectStore, location: Option<&str>, ) -> Result { @@ -910,15 +910,13 @@ mod tests { } #[tokio::test] - async fn gcs_test_get_nonexistant_location() -> Result<()> { + async fn gcs_test_get_nonexistent_location() -> Result<()> { let bucket_name = bucket_name()?; - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(&bucket_name)); - let result = get_nonexistant_object(&integration, Some(location_name)) - .await - .unwrap(); + let result = get_nonexistent_object(&integration, Some(location_name)).await?; assert_eq!( result, @@ -929,15 +927,13 @@ mod tests { } #[tokio::test] - async fn gcs_test_get_nonexistant_bucket() -> Result<()> { - let bucket_name = NON_EXISTANT_NAME; - let location_name = NON_EXISTANT_NAME; + async fn gcs_test_get_nonexistent_bucket() -> Result<()> { + let bucket_name = NON_EXISTENT_NAME; + let location_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); - let result = get_nonexistant_object(&integration, Some(location_name)) - .await - .unwrap(); + let result = get_nonexistent_object(&integration, Some(location_name)).await?; assert_eq!(result, Bytes::from("Not Found")); @@ -945,9 +941,9 @@ mod tests { } #[tokio::test] - async fn gcs_test_delete_nonexistant_location() -> Result<()> { + async fn gcs_test_delete_nonexistent_location() -> Result<()> { let bucket_name = bucket_name()?; - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(&bucket_name)); @@ -970,9 +966,9 @@ mod tests { } #[tokio::test] - async fn gcs_test_delete_nonexistant_bucket() -> Result<()> { - let bucket_name = NON_EXISTANT_NAME; - let location_name = NON_EXISTANT_NAME; + async fn gcs_test_delete_nonexistent_bucket() -> Result<()> { + let bucket_name = NON_EXISTENT_NAME; + let location_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); @@ -995,9 +991,9 @@ mod tests { } #[tokio::test] - async fn gcs_test_put_nonexistant_bucket() -> Result<()> { - let bucket_name = NON_EXISTANT_NAME; - let location_name = NON_EXISTANT_NAME; + async fn gcs_test_put_nonexistent_bucket() -> Result<()> { + let bucket_name = NON_EXISTENT_NAME; + let location_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_google_cloud_storage(GoogleCloudStorage::new(bucket_name)); let data = Bytes::from("arbitrary data"); @@ -1057,14 +1053,14 @@ mod tests { } #[tokio::test] - async fn s3_test_get_nonexistant_region() -> Result<()> { + async fn s3_test_get_nonexistent_region() -> Result<()> { // Assumes environment variables do not provide credentials to AWS US West 1 let (_, bucket_name) = region_and_bucket_name()?; let region = rusoto_core::Region::UsWest1; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; - let err = get_nonexistant_object(&integration, Some(location_name)) + let err = get_nonexistent_object(&integration, Some(location_name)) .await .unwrap_err(); if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = @@ -1080,12 +1076,12 @@ mod tests { } #[tokio::test] - async fn s3_test_get_nonexistant_location() -> Result<()> { + async fn s3_test_get_nonexistent_location() -> Result<()> { let (region, bucket_name) = region_and_bucket_name()?; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; - let err = get_nonexistant_object(&integration, Some(location_name)) + let err = get_nonexistent_object(&integration, Some(location_name)) .await .unwrap_err(); if let Some(Error(InternalError::UnableToGetDataFromS3 { @@ -1108,13 +1104,13 @@ mod tests { } #[tokio::test] - async fn s3_test_get_nonexistant_bucket() -> Result<()> { + async fn s3_test_get_nonexistent_bucket() -> Result<()> { let (region, _) = region_and_bucket_name()?; - let bucket_name = NON_EXISTANT_NAME; + let bucket_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; - let err = get_nonexistant_object(&integration, Some(location_name)) + let err = get_nonexistent_object(&integration, Some(location_name)) .await .unwrap_err(); if let Some(Error(InternalError::UnableToListDataFromS3 { source, bucket })) = @@ -1135,12 +1131,12 @@ mod tests { } #[tokio::test] - async fn s3_test_put_nonexistant_region() -> Result<()> { + async fn s3_test_put_nonexistent_region() -> Result<()> { // Assumes environment variables do not provide credentials to AWS US West 1 let (_, bucket_name) = region_and_bucket_name()?; let region = rusoto_core::Region::UsWest1; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let data = Bytes::from("arbitrary data"); let stream_data = std::io::Result::Ok(data.clone()); @@ -1170,11 +1166,11 @@ mod tests { } #[tokio::test] - async fn s3_test_put_nonexistant_bucket() -> Result<()> { + async fn s3_test_put_nonexistent_bucket() -> Result<()> { let (region, _) = region_and_bucket_name()?; - let bucket_name = NON_EXISTANT_NAME; + let bucket_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let data = Bytes::from("arbitrary data"); let stream_data = std::io::Result::Ok(data.clone()); @@ -1204,10 +1200,10 @@ mod tests { } #[tokio::test] - async fn s3_test_delete_nonexistant_location() -> Result<()> { + async fn s3_test_delete_nonexistent_location() -> Result<()> { let (region, bucket_name) = region_and_bucket_name()?; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let result = integration.delete(location_name).await; @@ -1217,12 +1213,12 @@ mod tests { } #[tokio::test] - async fn s3_test_delete_nonexistant_region() -> Result<()> { + async fn s3_test_delete_nonexistent_region() -> Result<()> { // Assumes environment variables do not provide credentials to AWS US West 1 let (_, bucket_name) = region_and_bucket_name()?; let region = rusoto_core::Region::UsWest1; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, &bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let err = integration.delete(location_name).await.unwrap_err(); if let Error(InternalError::UnableToDeleteDataFromS3 { @@ -1242,11 +1238,11 @@ mod tests { } #[tokio::test] - async fn s3_test_delete_nonexistant_bucket() -> Result<()> { + async fn s3_test_delete_nonexistent_bucket() -> Result<()> { let (region, _) = region_and_bucket_name()?; - let bucket_name = NON_EXISTANT_NAME; + let bucket_name = NON_EXISTENT_NAME; let integration = ObjectStore::new_amazon_s3(AmazonS3::new(region, bucket_name)); - let location_name = NON_EXISTANT_NAME; + let location_name = NON_EXISTENT_NAME; let err = integration.delete(location_name).await.unwrap_err(); if let Error(InternalError::UnableToDeleteDataFromS3 { From 568065d63f25b602b93ef317e979d870743cb9a7 Mon Sep 17 00:00:00 2001 From: Brandon Sov <9397850+brandonsov@users.noreply.github.com> Date: Thu, 10 Dec 2020 09:41:24 -0800 Subject: [PATCH 10/10] style: rename location_string to location_copy --- object_store/src/lib.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs index fef2c7b3c4..22d22d0b9a 100644 --- a/object_store/src/lib.rs +++ b/object_store/src/lib.rs @@ -153,14 +153,14 @@ impl GoogleCloudStorage { } ); - let location_string = location.to_string(); + let location_copy = location.to_string(); let bucket_name = self.bucket_name.clone(); let _ = tokio::task::spawn_blocking(move || { cloud_storage::Object::create( &bucket_name, &temporary_non_streaming, - &location_string, + &location_copy, "application/octet-stream", ) }) @@ -177,11 +177,11 @@ impl GoogleCloudStorage { &self, location: &str, ) -> InternalResult>> { - let location_string = location.to_string(); + let location_copy = location.to_string(); let bucket_name = self.bucket_name.clone(); let bytes = tokio::task::spawn_blocking(move || { - cloud_storage::Object::download(&bucket_name, &location_string) + cloud_storage::Object::download(&bucket_name, &location_copy) }) .await .context(UnableToGetDataFromGcs { @@ -198,11 +198,11 @@ impl GoogleCloudStorage { /// Delete the object at the specified location. async fn delete(&self, location: &str) -> InternalResult<()> { - let location_string = location.to_string(); + let location_copy = location.to_string(); let bucket_name = self.bucket_name.clone(); tokio::task::spawn_blocking(move || { - cloud_storage::Object::delete(&bucket_name, &location_string) + cloud_storage::Object::delete(&bucket_name, &location_copy) }) .await .context(UnableToDeleteDataFromGcs {