diff --git a/import/src/aggregate_tsm_schema/update_catalog.rs b/import/src/aggregate_tsm_schema/update_catalog.rs index 6e1373d8bc..5522730af8 100644 --- a/import/src/aggregate_tsm_schema/update_catalog.rs +++ b/import/src/aggregate_tsm_schema/update_catalog.rs @@ -945,7 +945,7 @@ mod tests { .repositories() .await .topics() - .create_or_get("iox_shared") + .create_or_get("iox-shared") .await .expect("topic created"); let (connection, _join_handle, requests) = create_test_shard_service(MapToShardResponse { @@ -984,8 +984,8 @@ mod tests { let agg_schema: AggregateTSMSchema = json.try_into().unwrap(); update_iox_catalog( &agg_schema, - "iox_shared", - Some("iox_shared"), + "iox-shared", + Some("iox-shared"), Some("inf"), Arc::clone(&catalog), connection, diff --git a/querier/src/cache/read_buffer.rs b/querier/src/cache/read_buffer.rs index c9baf6076f..78128d0aee 100644 --- a/querier/src/cache/read_buffer.rs +++ b/querier/src/cache/read_buffer.rs @@ -201,7 +201,7 @@ async fn read_buffer_chunk_from_stream( let schema = stream.schema(); // create "global" metric object, so that we don't blow up prometheus w/ too many metrics - let metrics = ChunkMetrics::new(metric_registry, "iox_shared"); + let metrics = ChunkMetrics::new(metric_registry, "iox-shared"); let mut builder = read_buffer::RBChunkBuilder::new(schema).with_metrics(metrics); @@ -512,7 +512,7 @@ mod tests { .get_instrument("read_buffer_row_group_total") .unwrap(); let v = g - .get_observer(&Attributes::from(&[("db_name", "iox_shared")])) + .get_observer(&Attributes::from(&[("db_name", "iox-shared")])) .unwrap() .fetch();