fix: Use code backticks around invalid HTML tags in doc strings

pull/24376/head
Carol (Nichols || Goulding) 2022-12-21 14:42:28 -05:00
parent 5f06ab4609
commit 46ff8854ec
No known key found for this signature in database
GPG Key ID: E907EE5A736F87D4
13 changed files with 36 additions and 31 deletions

View File

@ -1810,7 +1810,7 @@ impl<T> StatValues<T> {
/// updates the statistics keeping the min, max and incrementing count.
///
/// The type plumbing exists to allow calling with &str on a StatValues<String>
/// The type plumbing exists to allow calling with `&str` on a `StatValues<String>`.
pub fn update<U: ?Sized>(&mut self, other: &U)
where
T: Borrow<U>,

View File

@ -114,7 +114,7 @@ message ClientHeader {
Metadata metadata = 1;
// The name of the RPC method, which looks something like:
// /<service>/<method>
// `/<service>/<method>`
// Note the leading "/" character.
string method_name = 2;
@ -122,7 +122,7 @@ message ClientHeader {
// servers with different identities.
// The authority is the name of such a server identitiy.
// It is typically a portion of the URI in the form of
// <host> or <host>:<port> .
// `<host>` or `<host>:<port>`.
string authority = 3;
// the RPC timeout

View File

@ -63,7 +63,7 @@ pub enum Expr {
/// A literal wildcard (`*`) with an optional data type selection.
Wildcard(Option<WildcardType>),
/// A DISTINCT <identifier> expression.
/// A DISTINCT `<identifier>` expression.
Distinct(Identifier),
/// Unary operation such as + 5 or - 1h3m

View File

@ -71,26 +71,30 @@ pub enum DataError {
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// parses the the measurement, field key and tag
/// set from a tsm index key
/// Parses the the measurement, field key and tag set from a TSM index key
///
/// It does not provide access to the org and bucket ids on the key, these can
/// be accessed via org_id() and bucket_id() respectively.
/// It does not provide access to the org and bucket IDs on the key; these can be accessed via
/// `org_id()` and `bucket_id()` respectively.
///
/// Loosely based on [points.go](https://github.com/influxdata/influxdb/blob/751d70a213e5fdae837eda13d7ecb37763e69abb/models/points.go#L462)
///
/// The format looks roughly like:
///
/// ```text
/// <org_id bucket_id>,\x00=<measurement>,<tag_keys_str>,\xff=<field_key_str>#!
/// ~#<field_key_str>
/// ```
///
/// For example:
///
/// ```text
/// <org_id bucket_id>,\x00=http_api_request_duration_seconds,status=2XX,\
/// xff=sum#!~#sum
///
/// measurement = "http_api_request"
/// tags = [("status", "2XX")]
/// field = "sum"
/// ```
pub fn parse_tsm_key(key: &[u8]) -> Result<ParsedTsmKey, Error> {
// Wrap in an internal function to translate error types and add key context
parse_tsm_key_internal(key).context(ParsingTsmKeySnafu {

View File

@ -297,7 +297,7 @@ where
}
}
/// Implement ChunkMeta for Arc<dyn QueryChunk>
/// Implement `ChunkMeta` for `Arc<dyn QueryChunk>`
impl QueryChunkMeta for Arc<dyn QueryChunk> {
fn summary(&self) -> Arc<TableSummary> {
self.as_ref().summary()

View File

@ -236,7 +236,7 @@ impl Stream for SchemaAdapterStream {
/// Describes how to create column in the output.
#[derive(Debug)]
enum ColumnMapping {
/// Output column is found at <index> column of the input schema
/// Output column is found at `<index>` column of the input schema
FromInput(usize),
/// Output colum should be synthesized with nulls of the specified type
MakeNull(DataType),

View File

@ -82,9 +82,8 @@ pub fn prune_chunks(
prune_summaries(table_schema, &summaries, predicate)
}
/// Given a Vec of pruning summaries, return a Vec<bool>
/// where `false` indicates that the predicate can be proven to evaluate to
/// `false` for every single row.
/// Given a `Vec` of pruning summaries, return a `Vec<bool>` where `false` indicates that the
/// predicate can be proven to evaluate to `false` for every single row.
pub fn prune_summaries(
table_schema: Arc<Schema>,
summaries: &Vec<Arc<TableSummary>>,

View File

@ -291,7 +291,7 @@ impl Schema {
self.inner.fields().is_empty()
}
/// Returns an iterator of (Option<InfluxColumnType>, &Field) for
/// Returns an iterator of `(Option<InfluxColumnType>, &Field)` for
/// all the columns of this schema, in order
pub fn iter(&self) -> SchemaIter<'_> {
SchemaIter::new(self)

View File

@ -38,22 +38,22 @@ pub enum Error {
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// Convert a set of tag_keys into a form suitable for gRPC transport,
/// adding the special 0x00 (_m) and 0xff (_f) tag keys
/// adding the special `0x00` (`_m`) and `0xff` (`_f`) tag keys
///
/// Namely, a Vec<Vec<u8>>, including the measurement and field names
/// Namely, a `Vec<Vec<u8>>`, including the measurement and field names
pub fn tag_keys_to_byte_vecs(tag_keys: Arc<BTreeSet<String>>) -> Vec<Vec<u8>> {
// special case measurement (0x00) and field (0xff)
// special case measurement (`0x00`) and field (`0xff`)
// ensuring they are in the correct sort order (first and last, respectively)
let mut byte_vecs = Vec::with_capacity(2 + tag_keys.len());
byte_vecs.push(TAG_KEY_MEASUREMENT.to_vec()); // Shown as _m == _measurement
byte_vecs.push(TAG_KEY_MEASUREMENT.to_vec()); // Shown as `_m == _measurement`
tag_keys.iter().for_each(|name| {
byte_vecs.push(name.bytes().collect());
});
byte_vecs.push(TAG_KEY_FIELD.to_vec()); // Shown as _f == _field
byte_vecs.push(TAG_KEY_FIELD.to_vec()); // Shown as `_f == _field`
byte_vecs
}
/// Convert Series and Groups ` into a form suitable for gRPC transport:
/// Convert Series and Groups into a form suitable for gRPC transport:
///
/// ```text
/// (GroupFrame) potentially
@ -157,7 +157,7 @@ fn group_to_frame(group: series::Group) -> Frame {
Frame { data: Some(data) }
}
/// Convert the tag=value pairs from Arc<str> to Vec<u8> for gRPC transport
/// Convert the `tag=value` pairs from `Arc<str>` to `Vec<u8>` for gRPC transport
fn convert_tags(tags: Vec<series::Tag>, tag_key_binary_format: bool) -> Vec<Tag> {
let mut res: Vec<Tag> = tags
.into_iter()

View File

@ -1652,7 +1652,7 @@ pub trait ErrorLogger {
/// Log the contents of self with a string of context. The context
/// should appear in a message such as
///
/// "Error <context>: <formatted error message>
/// "Error `<context>`: `<formatted error message>`
fn log_if_error(self, context: &str) -> Self;
/// Provided method to log an error via the `error!` macro

View File

@ -133,7 +133,7 @@ pub fn maybe_start_logging() {
/// Is a macro so test error
/// messages are on the same line as the failure;
///
/// Both arguments must be convertable into Strings (Into<String>)
/// Both arguments must be convertable into `String`s (`Into<String>`)
macro_rules! assert_contains {
($ACTUAL: expr, $EXPECTED: expr) => {
let actual_value: String = $ACTUAL.into();
@ -152,7 +152,7 @@ macro_rules! assert_contains {
/// a nice error message if that check fails. Is a macro so test error
/// messages are on the same line as the failure;
///
/// Both arguments must be convertable into Strings (Into<String>)
/// Both arguments must be convertable into `String`s (`Into<String>`)
macro_rules! assert_not_contains {
($ACTUAL: expr, $UNEXPECTED: expr) => {
let actual_value: String = $ACTUAL.into();
@ -182,10 +182,10 @@ macro_rules! assert_error {
}
#[macro_export]
/// Assert that `actual` and `expected` values are within `epsilon` of
/// each other. Used to compare values that may fluctuate from run to run (e.g. because they encode timestamps)
/// Assert that `actual` and `expected` values are within `epsilon` of each other. Used to compare
/// values that may fluctuate from run to run (e.g. because they encode timestamps)
///
/// Usage: assert_close!(actual, expected, epsilon);
/// Usage: `assert_close!(actual, expected, epsilon);`
macro_rules! assert_close {
($ACTUAL:expr, $EXPECTED:expr, $EPSILON:expr) => {{
{

View File

@ -159,18 +159,20 @@ impl GrpcRequestBuilder {
self.regex_predicate(tag_key_name, pattern, Comparison::NotRegex)
}
/// Set predicate to tag_name <op> /pattern/
/// Set predicate to `tag_name <op> /pattern/`
///
/// where op is `Regex` or `NotRegEx`
/// The constitution of this request was formed by looking at a real request
/// made to storage, which looked like this:
///
/// ```text
/// root:<
/// node_type:COMPARISON_EXPRESSION
/// children:<node_type:TAG_REF tag_ref_value:"tag_key_name" >
/// children:<node_type:LITERAL regex_value:"pattern" >
/// comparison:REGEX
/// >
/// ```
pub fn regex_predicate(
self,
tag_key_name: impl Into<String>,

View File

@ -27,13 +27,13 @@ pub type Result<T, E = Error> = std::result::Result<T, E>;
/// This struct contains sufficient information to determine the
/// current state of the write as a whole
#[derive(Debug, Default, Clone, PartialEq, Eq)]
/// Summary of a Vec<Vec<DmlMeta>>
/// Summary of a `Vec<Vec<DmlMeta>>`
pub struct WriteSummary {
/// Key is the shard index from the DmlMeta structure (aka kafka
/// Key is the shard index from the `DmlMeta` structure (aka kafka
/// partition id), value is the sequence numbers from that
/// shard.
///
/// Note: BTreeMap to ensure the output is in a consistent order
/// Note: `BTreeMap` to ensure the output is in a consistent order
shards: BTreeMap<ShardIndex, Vec<SequenceNumber>>,
}