chore: `cargo update`
dependabot currently doesn't work due to https://github.com/dependabot/dependabot-core/issues/4574 Excluded `quote` due to https://github.com/dtolnay/quote/issues/204pull/24376/head
parent
d894b3b991
commit
37bb7f2120
File diff suppressed because it is too large
Load Diff
|
@ -1702,7 +1702,7 @@ mod tests {
|
||||||
.id();
|
.id();
|
||||||
|
|
||||||
// A chunk is now in the object store and still in read buffer
|
// A chunk is now in the object store and still in read buffer
|
||||||
let expected_parquet_size = 1233;
|
let expected_parquet_size = 1231;
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", expected_read_buffer_size);
|
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", expected_read_buffer_size);
|
||||||
// now also in OS
|
// now also in OS
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "object_store", expected_parquet_size);
|
catalog_chunk_size_bytes_metric_eq(registry, "object_store", expected_parquet_size);
|
||||||
|
@ -2133,7 +2133,7 @@ mod tests {
|
||||||
// Read buffer + Parquet chunk size
|
// Read buffer + Parquet chunk size
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "mutable_buffer", 0);
|
catalog_chunk_size_bytes_metric_eq(registry, "mutable_buffer", 0);
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", 1700);
|
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", 1700);
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "object_store", 1233);
|
catalog_chunk_size_bytes_metric_eq(registry, "object_store", 1231);
|
||||||
|
|
||||||
// All the chunks should have different IDs
|
// All the chunks should have different IDs
|
||||||
assert_ne!(mb_chunk.id(), rb_chunk.id());
|
assert_ne!(mb_chunk.id(), rb_chunk.id());
|
||||||
|
@ -2246,7 +2246,7 @@ mod tests {
|
||||||
let registry = test_db.metric_registry.as_ref();
|
let registry = test_db.metric_registry.as_ref();
|
||||||
|
|
||||||
// Read buffer + Parquet chunk size
|
// Read buffer + Parquet chunk size
|
||||||
let object_store_bytes = 1233;
|
let object_store_bytes = 1231;
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "mutable_buffer", 0);
|
catalog_chunk_size_bytes_metric_eq(registry, "mutable_buffer", 0);
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", 1700);
|
catalog_chunk_size_bytes_metric_eq(registry, "read_buffer", 1700);
|
||||||
catalog_chunk_size_bytes_metric_eq(registry, "object_store", object_store_bytes);
|
catalog_chunk_size_bytes_metric_eq(registry, "object_store", object_store_bytes);
|
||||||
|
@ -2725,7 +2725,7 @@ mod tests {
|
||||||
id: chunk_summaries[0].id,
|
id: chunk_summaries[0].id,
|
||||||
storage: ChunkStorage::ReadBufferAndObjectStore,
|
storage: ChunkStorage::ReadBufferAndObjectStore,
|
||||||
lifecycle_action,
|
lifecycle_action,
|
||||||
memory_bytes: 4078, // size of RB and OS chunks
|
memory_bytes: 4073, // size of RB and OS chunks
|
||||||
object_store_bytes: 1557, // size of parquet file
|
object_store_bytes: 1557, // size of parquet file
|
||||||
row_count: 2,
|
row_count: 2,
|
||||||
time_of_last_access: None,
|
time_of_last_access: None,
|
||||||
|
@ -2776,7 +2776,7 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(db.catalog.metrics().memory().mutable_buffer(), 2486 + 1335);
|
assert_eq!(db.catalog.metrics().memory().mutable_buffer(), 2486 + 1335);
|
||||||
assert_eq!(db.catalog.metrics().memory().read_buffer(), 2550);
|
assert_eq!(db.catalog.metrics().memory().read_buffer(), 2550);
|
||||||
assert_eq!(db.catalog.metrics().memory().object_store(), 1528);
|
assert_eq!(db.catalog.metrics().memory().object_store(), 1523);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -302,7 +302,7 @@ File {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
file_size_bytes: 3052,
|
file_size_bytes: 3052,
|
||||||
metadata: b"metadata omitted (934 bytes)",
|
metadata: b"metadata omitted (935 bytes)",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -418,7 +418,7 @@ File {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
file_size_bytes: 3052,
|
file_size_bytes: 3052,
|
||||||
metadata: b"metadata omitted (934 bytes)",
|
metadata: b"metadata omitted (935 bytes)",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|
|
@ -1007,6 +1007,6 @@ mod tests {
|
||||||
let mut generator = ChunkGenerator::new().await;
|
let mut generator = ChunkGenerator::new().await;
|
||||||
let (chunk, _) = generator.generate().await.unwrap();
|
let (chunk, _) = generator.generate().await.unwrap();
|
||||||
let parquet_metadata = chunk.parquet_metadata();
|
let parquet_metadata = chunk.parquet_metadata();
|
||||||
assert_eq!(parquet_metadata.size(), 3729);
|
assert_eq!(parquet_metadata.size(), 3719);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,12 +51,10 @@ tracing-subscriber = { version = "0.3", features = ["alloc", "ansi", "ansi_term"
|
||||||
bytes = { version = "1", features = ["std"] }
|
bytes = { version = "1", features = ["std"] }
|
||||||
cc = { version = "1", default-features = false, features = ["jobserver", "parallel"] }
|
cc = { version = "1", default-features = false, features = ["jobserver", "parallel"] }
|
||||||
either = { version = "1", features = ["use_std"] }
|
either = { version = "1", features = ["use_std"] }
|
||||||
getrandom = { version = "0.2", default-features = false, features = ["std"] }
|
|
||||||
hashbrown = { version = "0.11", features = ["ahash", "inline-more", "raw"] }
|
hashbrown = { version = "0.11", features = ["ahash", "inline-more", "raw"] }
|
||||||
indexmap = { version = "1", default-features = false, features = ["std"] }
|
indexmap = { version = "1", default-features = false, features = ["std"] }
|
||||||
log = { version = "0.4", default-features = false, features = ["std"] }
|
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||||
memchr = { version = "2", features = ["std"] }
|
memchr = { version = "2", features = ["std"] }
|
||||||
rand = { version = "0.8", features = ["alloc", "getrandom", "libc", "rand_chacha", "rand_hc", "small_rng", "std", "std_rng"] }
|
|
||||||
regex = { version = "1", features = ["aho-corasick", "memchr", "perf", "perf-cache", "perf-dfa", "perf-inline", "perf-literal", "std", "unicode", "unicode-age", "unicode-bool", "unicode-case", "unicode-gencat", "unicode-perl", "unicode-script", "unicode-segment"] }
|
regex = { version = "1", features = ["aho-corasick", "memchr", "perf", "perf-cache", "perf-dfa", "perf-inline", "perf-literal", "std", "unicode", "unicode-age", "unicode-bool", "unicode-case", "unicode-gencat", "unicode-perl", "unicode-script", "unicode-segment"] }
|
||||||
regex-syntax = { version = "0.6", features = ["unicode", "unicode-age", "unicode-bool", "unicode-case", "unicode-gencat", "unicode-perl", "unicode-script", "unicode-segment"] }
|
regex-syntax = { version = "0.6", features = ["unicode", "unicode-age", "unicode-bool", "unicode-case", "unicode-gencat", "unicode-perl", "unicode-script", "unicode-segment"] }
|
||||||
serde = { version = "1", features = ["derive", "serde_derive", "std"] }
|
serde = { version = "1", features = ["derive", "serde_derive", "std"] }
|
||||||
|
|
Loading…
Reference in New Issue