diff --git a/Cargo.lock b/Cargo.lock index 223d803f6b..b5edda727c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1504,6 +1504,7 @@ dependencies = [ "influxdb_tsm", "ingest", "lazy_static", + "logfmt", "mem_qe", "mutable_buffer", "object_store", @@ -1721,6 +1722,16 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "logfmt" +version = "0.1.0" +dependencies = [ + "lazy_static", + "regex", + "tracing", + "tracing-subscriber", +] + [[package]] name = "lz4" version = "1.23.2" diff --git a/Cargo.toml b/Cargo.toml index 4a797ef5d8..9d65ef9d11 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "influxdb_tsm", "influxdb2_client", "ingest", + "logfmt", "mem_qe", "mutable_buffer", "object_store", @@ -41,6 +42,7 @@ influxdb_iox_client = { path = "influxdb_iox_client" } influxdb_line_protocol = { path = "influxdb_line_protocol" } influxdb_tsm = { path = "influxdb_tsm" } ingest = { path = "ingest" } +logfmt = { path = "logfmt" } mem_qe = { path = "mem_qe" } mutable_buffer = { path = "mutable_buffer" } object_store = { path = "object_store" } diff --git a/logfmt/Cargo.toml b/logfmt/Cargo.toml new file mode 100644 index 0000000000..06d30e9cd0 --- /dev/null +++ b/logfmt/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "logfmt" +version = "0.1.0" +authors = ["Andrew Lamb "] +description="tracing_subscriber layer for writing out logfmt formatted events" +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +tracing = { version = "0.1" } +tracing-subscriber = "0.2.15" + +[dev-dependencies] +lazy_static = "1.4.0" +regex = "1.4.3" \ No newline at end of file diff --git a/logfmt/src/lib.rs b/logfmt/src/lib.rs new file mode 100644 index 0000000000..b500f55e66 --- /dev/null +++ b/logfmt/src/lib.rs @@ -0,0 +1,336 @@ +use std::borrow::Cow; +use std::{io::Write, time::SystemTime}; +use tracing::{ + field::{Field, Visit}, + subscriber::Interest, + Id, Level, Subscriber, +}; +use tracing_subscriber::{self, fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer}; + +/// Implements a `tracing_subscriber::Layer` which generates +/// [logfmt] formatted log entries, suitable for log ingestion +/// +/// At time of writing, I could find no good existing crate +/// +/// https://github.com/mcountryman/logfmt_logger from @mcountryman +/// looked very small and did not (obviously) work with the tracing subscriber +/// +/// [logfmt](https://brandur.org/logfmt) +pub struct LogFmtLayer { + writer: W, +} + +impl LogFmtLayer { + /// Create a new logfmt Layer to pass into tracing_subscriber + /// + /// Note this layer simply formats and writes to the specified writer. It + /// does not do any filtering for levels itself. Filtering can be done + /// using a EnvFilter + /// + /// For example: + /// ``` + /// use logfmt::LogFmtLayer; + /// use tracing_subscriber::{EnvFilter, prelude::*}; + /// + /// // setup debug logging level + /// std::env::set_var("RUST_LOG", "debug"); + /// + /// // setup formatter to write to stderr + /// let formatter = + /// LogFmtLayer::new(std::io::stderr); + /// + /// tracing_subscriber::registry() + /// .with(EnvFilter::from_default_env()) + /// .with(formatter) + /// .init(); + /// ``` + pub fn new(writer: W) -> Self { + Self { writer } + } +} + +impl Layer for LogFmtLayer +where + W: MakeWriter + 'static, + S: Subscriber + for<'a> LookupSpan<'a>, +{ + fn register_callsite( + &self, + _metadata: &'static tracing::Metadata<'static>, + ) -> tracing::subscriber::Interest { + Interest::always() + } + + fn on_event(&self, event: &tracing::Event<'_>, ctx: Context<'_, S>) { + let writer = self.writer.make_writer(); + let mut p = FieldPrinter::new(writer, event.metadata().level()); + // record fields + event.record(&mut p); + if let Some(span) = ctx.lookup_current() { + p.write_span_id(&span.id()) + } + // record source information + p.write_source_info(event); + p.write_timestamp(); + } + + fn new_span(&self, attrs: &tracing::span::Attributes<'_>, id: &Id, ctx: Context<'_, S>) { + let writer = self.writer.make_writer(); + let metadata = ctx.metadata(id).expect("span should have metadata"); + let mut p = FieldPrinter::new(writer, metadata.level()); + p.write_span_name(metadata.name()); + attrs.record(&mut p); + p.write_span_id(id); + p.write_timestamp(); + } + + fn max_level_hint(&self) -> Option { + None + } +} + +/// This thing is responsible for actually printing log information to +/// a writer +struct FieldPrinter { + writer: W, +} + +impl FieldPrinter { + fn new(mut writer: W, level: &Level) -> Self { + let level_str = match *level { + Level::TRACE => "trace", + Level::DEBUG => "debug", + Level::INFO => "info", + Level::WARN => "warn", + Level::ERROR => "error", + }; + + write!(writer, r#"level={}"#, level_str).ok(); + + Self { writer } + } + + fn write_span_name(&mut self, value: &str) { + write!(self.writer, " span_name=\"{}\"", quote_and_escape(value)).ok(); + } + + fn write_source_info(&mut self, event: &tracing::Event<'_>) { + let metadata = event.metadata(); + write!( + self.writer, + " target=\"{}\"", + quote_and_escape(metadata.target()) + ) + .ok(); + + if let Some(module_path) = metadata.module_path() { + if metadata.target() != module_path { + write!(self.writer, " module_path=\"{}\"", module_path).ok(); + } + } + if let (Some(file), Some(line)) = (metadata.file(), metadata.line()) { + write!(self.writer, " location=\"{}:{}\"", file, line).ok(); + } + } + + fn write_span_id(&mut self, id: &Id) { + write!(self.writer, " span={}", id.into_u64()).ok(); + } + + fn write_timestamp(&mut self) { + let ns_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("System time should have been after the epoch") + .as_nanos(); + + write!(self.writer, " time={:?}", ns_since_epoch).ok(); + } +} + +impl Drop for FieldPrinter { + fn drop(&mut self) { + // finish the log line + writeln!(self.writer).ok(); + } +} + +impl Visit for FieldPrinter { + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + // Note this appears to be invoked via `debug!` and `info! macros + let formatted_value = format!("{:?}", value); + write!( + self.writer, + " {}={}", + translate_field_name(field.name()), + quote_and_escape(&formatted_value) + ) + .ok(); + } + + fn record_i64(&mut self, field: &Field, value: i64) { + write!( + self.writer, + " {}={}", + translate_field_name(field.name()), + value + ) + .ok(); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + write!( + self.writer, + " {}={}", + translate_field_name(field.name()), + value + ) + .ok(); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + write!( + self.writer, + " {}={}", + translate_field_name(field.name()), + value + ) + .ok(); + } + + fn record_str(&mut self, field: &Field, value: &str) { + write!( + self.writer, + " {}={}", + translate_field_name(field.name()), + quote_and_escape(value) + ) + .ok(); + } + + fn record_error(&mut self, field: &Field, value: &(dyn std::error::Error + 'static)) { + let field_name = translate_field_name(field.name()); + + let debug_formatted = format!("{:?}", value); + write!( + self.writer, + " {}={:?}", + field_name, + quote_and_escape(&debug_formatted) + ) + .ok(); + + let display_formatted = format!("{}", value); + write!( + self.writer, + " {}.display={}", + field_name, + quote_and_escape(&display_formatted) + ) + .ok(); + } +} + +/// return true if the string value already starts/ends with quotes and is +/// already properly escaped (all spaces escaped) +fn needs_quotes_and_escaping(value: &str) -> bool { + // mismatches beginning / end quotes + if value.starts_with('"') != value.ends_with('"') { + return true; + } + + // ignore begining/ending quotes, if any + let pre_quoted = value.len() >= 2 && value.starts_with('"') && value.ends_with('"'); + + let value = if pre_quoted { + &value[1..value.len() - 1] + } else { + value + }; + + // unescaped quotes + let c0 = value.chars(); + let c1 = value.chars().skip(1); + if c0.zip(c1).any(|(c0, c1)| c0 != '\\' && c1 == '"') { + return true; + } + + value.contains(' ') && !pre_quoted +} + +/// escape any characters in name as needed, otherwise return string as is +fn quote_and_escape(value: &'_ str) -> Cow<'_, str> { + if needs_quotes_and_escaping(value) { + Cow::Owned(format!("\"{}\"", value.replace("\"", "\\\""))) + } else { + Cow::Borrowed(value) + } +} + +// Translate the field name from tracing into the logfmt style +fn translate_field_name(name: &str) -> &str { + if name == "message" { + "msg" + } else { + name + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn quote_and_escape_len0() { + assert_eq!(quote_and_escape(""), ""); + } + + #[test] + fn quote_and_escape_len1() { + assert_eq!(quote_and_escape("f"), "f"); + } + + #[test] + fn quote_and_escape_len2() { + assert_eq!(quote_and_escape("fo"), "fo"); + } + + #[test] + fn quote_and_escape_len3() { + assert_eq!(quote_and_escape("foo"), "foo"); + } + + #[test] + fn quote_and_escape_len3_1quote_start() { + assert_eq!(quote_and_escape("\"foo"), "\"\\\"foo\""); + } + + #[test] + fn quote_and_escape_len3_1quote_end() { + assert_eq!(quote_and_escape("foo\""), "\"foo\\\"\""); + } + + #[test] + fn quote_and_escape_len3_2quote() { + assert_eq!(quote_and_escape("\"foo\""), "\"foo\""); + } + + #[test] + fn quote_and_escape_space() { + assert_eq!(quote_and_escape("foo bar"), "\"foo bar\""); + } + + #[test] + fn quote_and_escape_space_prequoted() { + assert_eq!(quote_and_escape("\"foo bar\""), "\"foo bar\""); + } + + #[test] + fn quote_and_escape_space_prequoted_but_not_escaped() { + assert_eq!(quote_and_escape("\"foo \"bar\""), "\"\\\"foo \\\"bar\\\"\""); + } + + #[test] + fn quote_and_escape_quoted_quotes() { + assert_eq!(quote_and_escape("foo:\"bar\""), "\"foo:\\\"bar\\\"\""); + } +} diff --git a/logfmt/tests/logging.rs b/logfmt/tests/logging.rs new file mode 100644 index 0000000000..4ca028785e --- /dev/null +++ b/logfmt/tests/logging.rs @@ -0,0 +1,357 @@ +// Note that this needs to be an integration test because since the tracing +// structures are global, once you se a logging subscriber you can't undo +// that.... So punting on that for now + +use logfmt::LogFmtLayer; +use regex::Regex; +use std::{ + error::Error, + fmt, + io::{self, Cursor}, + sync::Mutex, +}; +use tracing::{debug, error, info, span, trace, warn, Level}; +use tracing_subscriber::{fmt::MakeWriter, prelude::*}; + +use lazy_static::lazy_static; + +/// Compares the captured messages with the expected messages, +/// normalizing for time and location +#[macro_export] +macro_rules! assert_logs { + ($CAPTURE: expr, $EXPECTED_LINES: expr) => { + let expected_lines: Vec = $EXPECTED_LINES.iter().map(|&s| s.into()).collect(); + let actual_lines = $CAPTURE.to_strings(); + + let normalized_expected = normalize(expected_lines.iter()); + let normalized_actual = normalize(actual_lines.iter()); + + assert_eq!( + normalized_expected, normalized_actual, + "\n\nexpected:\n\n{:#?}\nactual:\n\n{:#?}\n\nnormalized_expected:\n\n{:#?}\nnormalized_actual:\n\n{:#?}\n\n", + expected_lines, actual_lines, + normalized_expected, normalized_actual + ) + }; +} + +#[test] +fn level() { + let capture = CapturedWriter::new(); + + info!("This is an info message"); + debug!("This is a debug message"); + trace!("This is a trace message"); + warn!("This is a warn message"); + error!("This is a error message"); + + let expected = vec![ + "level=info msg=\"This is an info message\" target=\"logging\" location=\"logfmt/tests/logging.rs:36\" time=1612181556329599000", + "level=debug msg=\"This is a debug message\" target=\"logging\" location=\"logfmt/tests/logging.rs:37\" time=1612181556329618000", + "level=trace msg=\"This is a trace message\" target=\"logging\" location=\"logfmt/tests/logging.rs:38\" time=1612181556329634000", + "level=warn msg=\"This is a warn message\" target=\"logging\" location=\"logfmt/tests/logging.rs:39\" time=1612181556329646000", + "level=error msg=\"This is a error message\" target=\"logging\" location=\"logfmt/tests/logging.rs:40\" time=1612181556329661000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn event_fields_strings() { + let capture = CapturedWriter::new(); + info!( + event_name = "foo bar", + other_event = "baz", + "This is an info message" + ); + + let expected = vec![ + "level=info msg=\"This is an info message\" event_name=\"foo bar\" other_event=baz target=\"logging\" location=\"logfmt/tests/logging.rs:59\" time=1612187170712973000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn test_without_normalization() { + let capture = CapturedWriter::new(); + info!( + event_name = "foo bar", + other_event = "baz", + "This is an info message" + ); + + // double assure that normalization isn't messing with things by + // checking for presence of strings as well + let log_string = normalize(capture.to_strings().iter()).join("\n"); + assert!(log_string.contains("This is an info message")); + assert!(log_string.contains("event_name")); + assert!(log_string.contains("other_event")); + assert!(log_string.contains("baz")); + assert!(log_string.contains("foo bar")); +} + +#[test] +fn event_fields_numeric() { + let capture = CapturedWriter::new(); + info!(bar = 1, frr = false, "This is an info message"); + + let expected = vec![ + "level=info msg=\"This is an info message\" bar=1 frr=false target=\"logging\" location=\"logfmt/tests/logging.rs:72\" time=1612187170712947000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn event_fields_repeated() { + let capture = CapturedWriter::new(); + info!(bar = 1, bar = 2, "This is an info message"); + + let expected = vec![ + "level=info msg=\"This is an info message\" bar=1 bar=2 target=\"logging\" location=\"logfmt/tests/logging.rs:84\" time=1612187170712948000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn event_fields_errors() { + let capture = CapturedWriter::new(); + + let err: Box = + io::Error::new(io::ErrorKind::Other, "shaving yak failed!").into(); + + error!(the_error = err.as_ref(), "This is an error message"); + let expected = vec![ + "level=error msg=\"This is an error message\" the_error=\"\\\"Custom { kind: Other, error: \\\\\\\"shaving yak failed!\\\\\\\" }\\\"\" the_error.display=\"shaving yak failed!\" target=\"logging\" location=\"logfmt/tests/logging.rs:99\" time=1612187170712947000", + ]; + assert_logs!(capture, expected); +} + +#[test] +fn event_fields_structs() { + let capture = CapturedWriter::new(); + let my_struct = TestDebugStruct::new(); + + info!(s = ?my_struct, "This is an info message"); + + let expected = vec![ + "level=info msg=\"This is an info message\" s=\"TestDebugStruct { b: true, s: \\\"The String\\\" }\" target=\"logging\" location=\"logfmt/tests/logging.rs:111\" time=1612187170712937000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn event_spans() { + // Demonstrate the inclusion of span_id (as `span`) + let capture = CapturedWriter::new(); + let span = span!(Level::INFO, "my_span", foo = "bar"); + let enter = span.enter(); + info!(shave = "mo yak!", "info message in span"); + std::mem::drop(enter); + + let expected = vec![ + "level=info span_name=\"my_span\" foo=bar span=1 time=1612209178717290000", + "level=info msg=\"info message in span\" shave=\"mo yak!\" span=1 target=\"logging\" location=\"logfmt/tests/logging.rs:132\" time=1612209178717329000", + ]; + + assert_logs!(capture, expected); +} + +#[test] +fn event_multi_span() { + // Demonstrate the inclusion of span_id (as `span`) + let capture = CapturedWriter::new(); + + let span1 = span!(Level::INFO, "my_span", foo = "bar"); + let _ = span1.enter(); + { + let span2 = span!(Level::INFO, "my_second_span", foo = "baz"); + let _ = span2.enter(); + info!(shave = "yak!", "info message in span 2"); + } + + { + let span3 = span!(Level::INFO, "my_second_span", foo = "brmp"); + let _ = span3.enter(); + info!(shave = "mo yak!", "info message in span 3"); + } + + let expected = vec![ + "level=info span_name=\"my_span\" foo=bar span=1 time=1612209327939714000", + "level=info span_name=\"my_second_span\" foo=baz span=2 time=1612209327939743000", + "level=info msg=\"info message in span 2\" shave=yak! target=\"logging\" location=\"logfmt/tests/logging.rs:154\" time=1612209327939774000", + "level=info span_name=\"my_second_span\" foo=brmp span=3 time=1612209327939795000", + "level=info msg=\"info message in span 3\" shave=\"mo yak!\" target=\"logging\" location=\"logfmt/tests/logging.rs:160\" time=1612209327939828000", + ]; + + assert_logs!(capture, expected); +} + +// TODO: it might be nice to write some tests for time and location, but for now +// just punt + +/// Test structure that has a debug representation +#[derive(Debug)] +struct TestDebugStruct { + b: bool, + s: String, +} +impl TestDebugStruct { + fn new() -> Self { + Self { + b: true, + s: "The String".into(), + } + } +} + +impl fmt::Display for TestDebugStruct { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Display for TestDebugStruct b:{} s:\"{}\"", + self.b, self.s + ) + } +} + +/// Normalize lines for easy comparison +fn normalize<'a>(lines: impl Iterator) -> Vec { + let lines = lines + .map(|line| normalize_timestamp(line)) + .map(|line| normalize_location(&line)) + .collect(); + normalize_spans(lines) +} + +/// s/time=1612187170712947000/time=NORMALIZED/g +fn normalize_timestamp(v: &str) -> String { + let re = Regex::new(r#"time=\d+"#).unwrap(); + re.replace_all(v, "time=NORMALIZED").to_string() +} + +/// s/location=\"logfmt/tests/logging.rs:128\"/location=NORMALIZED/g +fn normalize_location(v: &str) -> String { + let re = Regex::new(r#"location=".*?""#).unwrap(); + re.replace_all(v, "location=NORMALIZED").to_string() +} + +/// s/span=1/span=SPAN1/g +fn normalize_spans(lines: Vec) -> Vec { + // since there can be multiple unique span values, need to normalize them + // differently + // + // Note: we include leading and trailing spaces so that span=2 + // doesn't also match span=21423 + let re = Regex::new(r#" span=(\d+) "#).unwrap(); + let span_ids: Vec = lines + .iter() + .map(|line| re.find_iter(line)) + .flatten() + .map(|m| m.as_str().to_string()) + .collect(); + + // map span ids to something uniform + span_ids + .into_iter() + .enumerate() + .fold(lines, |lines, (idx, orig_id)| { + // replace old span + let new_id = format!(" span=SPAN{} ", idx); + let re = Regex::new(&orig_id).unwrap(); + lines + .into_iter() + .map(|line| re.replace_all(&line as &str, &new_id as &str).to_string()) + .collect() + }) +} + +// Each thread has a local collection of lines that is captured to +// This is needed because the rust test framework runs the +// tests potentially using multiple threads but there is a single +// global logger. +thread_local! { + static LOG_LINES: Mutex>> = Mutex::new(Cursor::new(Vec::new())); +} + +lazy_static! { + // Since we can only setup logging once, we need to have gloabl to + // use it among test cases + static ref GLOBAL_WRITER: Mutex = { + let capture = CapturedWriter::default(); + tracing_subscriber::registry() + .with(LogFmtLayer::new(capture.clone())) + .init(); + Mutex::new(capture) + }; +} + +// This thing captures log lines +#[derive(Default, Clone)] +struct CapturedWriter { + // all state is held in the LOG_LINES thread local variable +} + +impl CapturedWriter { + fn new() -> Self { + let global_writer = GLOBAL_WRITER.lock().expect("mutex poisoned"); + global_writer.clone().clear() + } + + /// Clear all thread local state + fn clear(self) -> Self { + LOG_LINES.with(|lines| { + let mut cursor = lines.lock().expect("mutex poisoned"); + cursor.get_mut().clear() + }); + self + } + + fn to_strings(&self) -> Vec { + LOG_LINES.with(|lines| { + let cursor = lines.lock().expect("mutex poisoned"); + let bytes: Vec = cursor.get_ref().clone(); + String::from_utf8(bytes) + .expect("valid utf8") + .lines() + .map(|s| s.to_string()) + .collect() + }) + } +} + +impl fmt::Display for CapturedWriter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for s in self.to_strings() { + writeln!(f, "{}", s)? + } + Ok(()) + } +} + +impl std::io::Write for CapturedWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + LOG_LINES.with(|lines| { + let mut cursor = lines.lock().expect("mutex poisoned"); + cursor.write(buf) + }) + } + + fn flush(&mut self) -> std::io::Result<()> { + LOG_LINES.with(|lines| { + let mut cursor = lines.lock().expect("mutex poisoned"); + cursor.flush() + }) + } +} + +impl MakeWriter for CapturedWriter { + type Writer = Self; + + fn make_writer(&self) -> Self::Writer { + self.clone() + } +} diff --git a/src/commands/config.rs b/src/commands/config.rs index 232febda7b..881398cf29 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -48,6 +48,13 @@ pub struct Config { #[structopt(long = "--log", env = "RUST_LOG")] pub rust_log: Option, + /// Log message format. Can be one of: + /// + /// "rust" (default) + /// "logfmt" (logfmt/Heroku style - https://brandur.org/logfmt) + #[structopt(long = "--log_format", env = "INFLUXDB_IOX_LOG_FORMAT")] + pub log_format: Option, + /// This sets logging up with a pre-configured set of convenient log levels. /// /// -v means 'info' log levels @@ -153,6 +160,48 @@ fn strip_server(args: impl Iterator) -> Vec { .collect::>() } +/// How to format output logging messages +#[derive(Debug, Clone, Copy)] +pub enum LogFormat { + /// Default formatted logging + /// + /// Example: + /// ``` + /// level=warn msg="NO PERSISTENCE: using memory for object storage" target="influxdb_iox::influxdb_ioxd" + /// ``` + Rust, + + /// Use the (somwhat pretentiously named) Heroku / logfmt formatted output + /// format + /// + /// Example: + /// ``` + /// Jan 31 13:19:39.059 WARN influxdb_iox::influxdb_ioxd: NO PERSISTENCE: using memory for object storage + /// ``` + LogFmt, +} + +impl Default for LogFormat { + fn default() -> Self { + Self::Rust + } +} + +impl std::str::FromStr for LogFormat { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_ascii_lowercase().as_str() { + "rust" => Ok(Self::Rust), + "logfmt" => Ok(Self::LogFmt), + _ => Err(format!( + "Invalid log format '{}'. Valid options: rust, logfmt", + s + )), + } + } +} + #[cfg(test)] mod tests { diff --git a/src/commands/logging.rs b/src/commands/logging.rs index b2d7f71b43..203a04d49e 100644 --- a/src/commands/logging.rs +++ b/src/commands/logging.rs @@ -2,7 +2,7 @@ use tracing_subscriber::{prelude::*, EnvFilter}; -use super::config::Config; +use super::config::{Config, LogFormat}; /// Handles setting up logging levels #[derive(Debug)] @@ -106,20 +106,32 @@ impl LoggingLevel { (None, None) }; - // Configure the logger to write to stderr - let logger = tracing_subscriber::fmt::layer().with_writer(std::io::stderr); - // Register the chain of event subscribers: // // - Jaeger tracing emitter // - Env filter (using RUST_LOG as the filter env) - // - A stdout logger + // - A stderr logger // - tracing_subscriber::registry() + let subscriber = tracing_subscriber::registry() .with(opentelemetry) - .with(EnvFilter::from_default_env()) - .with(logger) - .init(); + // filter messages to only those specified by RUST_LOG environment + .with(EnvFilter::from_default_env()); + + // Configure the logger to write to stderr and install it + let output_stream = std::io::stderr; + + let log_format = config.log_format.as_ref().cloned().unwrap_or_default(); + + match log_format { + LogFormat::Rust => { + let logger = tracing_subscriber::fmt::layer().with_writer(output_stream); + subscriber.with(logger).init(); + } + LogFormat::LogFmt => { + let logger = logfmt::LogFmtLayer::new(output_stream); + subscriber.with(logger).init(); + } + }; drop_handle }