From 8522def0525215c5ded57c2ad66949f29c89fdc5 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 13 Mar 2023 12:34:42 -0500 Subject: [PATCH 1/3] fix(telegraf): replace grok tool link, typo, and style (#4757) (#4793) --- content/telegraf/v1.10/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.11/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.12/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.13/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.14/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.15/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.16/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.17/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.18/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.19/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.20/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.21/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.22/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.23/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.24/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.25/data_formats/input/grok.md | 15 +++++++-------- content/telegraf/v1.9/data_formats/input/grok.md | 15 +++++++-------- 17 files changed, 119 insertions(+), 136 deletions(-) diff --git a/content/telegraf/v1.10/data_formats/input/grok.md b/content/telegraf/v1.10/data_formats/input/grok.md index 9ebef9bfd..4092bf722 100644 --- a/content/telegraf/v1.10/data_formats/input/grok.md +++ b/content/telegraf/v1.10/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.11/data_formats/input/grok.md b/content/telegraf/v1.11/data_formats/input/grok.md index b5916697e..88c14ee33 100644 --- a/content/telegraf/v1.11/data_formats/input/grok.md +++ b/content/telegraf/v1.11/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.12/data_formats/input/grok.md b/content/telegraf/v1.12/data_formats/input/grok.md index 15f52a775..d48495c07 100644 --- a/content/telegraf/v1.12/data_formats/input/grok.md +++ b/content/telegraf/v1.12/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.13/data_formats/input/grok.md b/content/telegraf/v1.13/data_formats/input/grok.md index daf1ed005..7bd570594 100644 --- a/content/telegraf/v1.13/data_formats/input/grok.md +++ b/content/telegraf/v1.13/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.14/data_formats/input/grok.md b/content/telegraf/v1.14/data_formats/input/grok.md index ddbcd61da..37f753966 100644 --- a/content/telegraf/v1.14/data_formats/input/grok.md +++ b/content/telegraf/v1.14/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats (parsers) --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.15/data_formats/input/grok.md b/content/telegraf/v1.15/data_formats/input/grok.md index 0e2962191..09a7dcf7b 100644 --- a/content/telegraf/v1.15/data_formats/input/grok.md +++ b/content/telegraf/v1.15/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.16/data_formats/input/grok.md b/content/telegraf/v1.16/data_formats/input/grok.md index cf47845ac..72168dfdf 100644 --- a/content/telegraf/v1.16/data_formats/input/grok.md +++ b/content/telegraf/v1.16/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.17/data_formats/input/grok.md b/content/telegraf/v1.17/data_formats/input/grok.md index 141f5c82c..b03e48231 100644 --- a/content/telegraf/v1.17/data_formats/input/grok.md +++ b/content/telegraf/v1.17/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.18/data_formats/input/grok.md b/content/telegraf/v1.18/data_formats/input/grok.md index af4efdd2f..9846e1f86 100644 --- a/content/telegraf/v1.18/data_formats/input/grok.md +++ b/content/telegraf/v1.18/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.19/data_formats/input/grok.md b/content/telegraf/v1.19/data_formats/input/grok.md index 7f3e01575..3a2f1c2e6 100644 --- a/content/telegraf/v1.19/data_formats/input/grok.md +++ b/content/telegraf/v1.19/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.20/data_formats/input/grok.md b/content/telegraf/v1.20/data_formats/input/grok.md index 7e7b71cdb..e7482bc7a 100644 --- a/content/telegraf/v1.20/data_formats/input/grok.md +++ b/content/telegraf/v1.20/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.21/data_formats/input/grok.md b/content/telegraf/v1.21/data_formats/input/grok.md index f7a4b15c7..c9780c6f0 100644 --- a/content/telegraf/v1.21/data_formats/input/grok.md +++ b/content/telegraf/v1.21/data_formats/input/grok.md @@ -9,10 +9,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -66,12 +66,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -169,8 +168,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.22/data_formats/input/grok.md b/content/telegraf/v1.22/data_formats/input/grok.md index 45bb90863..8a84bb691 100644 --- a/content/telegraf/v1.22/data_formats/input/grok.md +++ b/content/telegraf/v1.22/data_formats/input/grok.md @@ -9,10 +9,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -66,12 +66,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -169,8 +168,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.23/data_formats/input/grok.md b/content/telegraf/v1.23/data_formats/input/grok.md index 0c48e122f..847a5a1d6 100644 --- a/content/telegraf/v1.23/data_formats/input/grok.md +++ b/content/telegraf/v1.23/data_formats/input/grok.md @@ -9,10 +9,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -66,12 +66,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -169,8 +168,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.24/data_formats/input/grok.md b/content/telegraf/v1.24/data_formats/input/grok.md index ff6d3817c..729566051 100644 --- a/content/telegraf/v1.24/data_formats/input/grok.md +++ b/content/telegraf/v1.24/data_formats/input/grok.md @@ -9,10 +9,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -66,12 +66,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -169,8 +168,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.25/data_formats/input/grok.md b/content/telegraf/v1.25/data_formats/input/grok.md index 4a5b87453..986cbd199 100644 --- a/content/telegraf/v1.25/data_formats/input/grok.md +++ b/content/telegraf/v1.25/data_formats/input/grok.md @@ -9,10 +9,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -66,12 +66,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -169,8 +168,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: diff --git a/content/telegraf/v1.9/data_formats/input/grok.md b/content/telegraf/v1.9/data_formats/input/grok.md index 4e03c4632..5b2146b22 100644 --- a/content/telegraf/v1.9/data_formats/input/grok.md +++ b/content/telegraf/v1.9/data_formats/input/grok.md @@ -8,10 +8,10 @@ menu: parent: Input data formats --- -The grok data format parses line delimited data using a regular expression-like +The grok data format parses line-delimited data using a regular expression-like language. -If you need to become familiar with grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) +For an introduction to grok patterns, see [Grok Basics](https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html#_grok_basics) in the Logstash documentation. The grok parser uses a slightly modified version of logstash "grok" patterns, using the format: @@ -65,12 +65,11 @@ See https://golang.org/pkg/time/#Parse for more details. Telegraf has many of its own [built-in patterns](https://github.com/influxdata/telegraf/blob/master/plugins/parsers/grok/influx_patterns.go), as well as support for most of -[logstash's builtin patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns. +[Logstash's core patterns](https://github.com/logstash-plugins/logstash-patterns-core/blob/main/patterns/ecs-v1/grok-patterns). _Golang regular expressions do not support lookahead or lookbehind. -logstash patterns that depend on these are not supported._ +Logstash patterns that depend on these are not supported._ -If you need help building patterns to match your logs, the -[Grok Debugger application](https://grokdebug.herokuapp.com) might be helpful. +If you need help building patterns to match your logs, [Grok Constructor](https://grokconstructor.appspot.com/) might be helpful. ## Configuration @@ -168,8 +167,8 @@ grok will offset the timestamp accordingly. When saving patterns to the configuration file, keep in mind the different TOML [string](https://github.com/toml-lang/toml#string) types and the escaping rules for each. These escaping rules must be applied in addition to the -escaping required by the grok syntax. Using the Multi-line line literal -syntax with `'''` may be useful. +escaping required by the grok syntax. Using the TOML multi-line literal +syntax (`'''`) may be useful. The following config examples will parse this input file: From fd1cf647da9e17a6a651a07b55cfc5e4aee06c4a Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 13 Mar 2023 13:35:05 -0500 Subject: [PATCH 2/3] fix(api): incorrect link for precision (closes #4675). (#4794) --- content/influxdb/cloud/write-data/developer-tools/api.md | 2 +- content/influxdb/v2.3/write-data/developer-tools/api.md | 2 +- content/influxdb/v2.4/write-data/developer-tools/api.md | 2 +- content/influxdb/v2.5/write-data/developer-tools/api.md | 2 +- content/influxdb/v2.6/write-data/developer-tools/api.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/content/influxdb/cloud/write-data/developer-tools/api.md b/content/influxdb/cloud/write-data/developer-tools/api.md index 3ea2d7661..06b2ec0a7 100644 --- a/content/influxdb/cloud/write-data/developer-tools/api.md +++ b/content/influxdb/cloud/write-data/developer-tools/api.md @@ -15,7 +15,7 @@ Use the `POST` request method and include the following in your request: |:----------- |:---------- | | Organization | Use the `org` query parameter in your request URL. | | Bucket | Use the `bucket` query parameter in your request URL. | -| Precision | Use the [`precision`](/influxdb/cloud/write-data/developer-tools/line-protocol/#timestamp-precision) query parameter in your request URL. Default is `ns` | +| Precision | Use the [`precision`](/influxdb/cloud/reference/glossary/#precision) query parameter in your request URL. Default is `ns` | | API token | Use the `Authorization: Token YOUR_API_TOKEN` header. | | Line protocol | Pass as plain text in your request body. | diff --git a/content/influxdb/v2.3/write-data/developer-tools/api.md b/content/influxdb/v2.3/write-data/developer-tools/api.md index 0ef97b15e..b97592120 100644 --- a/content/influxdb/v2.3/write-data/developer-tools/api.md +++ b/content/influxdb/v2.3/write-data/developer-tools/api.md @@ -15,7 +15,7 @@ Use the `POST` request method and include the following in your request: |:----------- |:---------- | | Organization | Use the `org` query parameter in your request URL. | | Bucket | Use the `bucket` query parameter in your request URL. | -| Timestamp precision | Use the [`precision`](/influxdb/v2.3/write-data/developer-tools/line-protocol/#timestamp-precision) query parameter in your request URL. Default is `ns`. | +| Timestamp precision | Use the [`precision`](/influxdb/v2.3/reference/glossary/#precision) query parameter in your request URL. Default is `ns`. | | API token | Use the `Authorization: Token YOUR_API_TOKEN` header. | | Line protocol | Pass as plain text in your request body. | diff --git a/content/influxdb/v2.4/write-data/developer-tools/api.md b/content/influxdb/v2.4/write-data/developer-tools/api.md index 9dfb2d73f..3e520001e 100644 --- a/content/influxdb/v2.4/write-data/developer-tools/api.md +++ b/content/influxdb/v2.4/write-data/developer-tools/api.md @@ -15,7 +15,7 @@ Use the `POST` request method and include the following in your request: |:----------- |:---------- | | Organization | Use the `org` query parameter in your request URL. | | Bucket | Use the `bucket` query parameter in your request URL. | -| Timestamp precision | Use the [`precision`](/influxdb/v2.4/write-data/developer-tools/line-protocol/#timestamp-precision) query parameter in your request URL. Default is `ns`. | +| Timestamp precision | Use the [`precision`](/influxdb/v2.4/reference/glossary/#precision) query parameter in your request URL. Default is `ns`. | | API token | Use the `Authorization: Token YOUR_API_TOKEN` header. | | Line protocol | Pass as plain text in your request body. | diff --git a/content/influxdb/v2.5/write-data/developer-tools/api.md b/content/influxdb/v2.5/write-data/developer-tools/api.md index 4ddac85e5..a892619a8 100644 --- a/content/influxdb/v2.5/write-data/developer-tools/api.md +++ b/content/influxdb/v2.5/write-data/developer-tools/api.md @@ -15,7 +15,7 @@ Use the `POST` request method and include the following in your request: |:----------- |:---------- | | Organization | Use the `org` query parameter in your request URL. | | Bucket | Use the `bucket` query parameter in your request URL. | -| Timestamp precision | Use the [`precision`](/influxdb/v2.5/write-data/developer-tools/line-protocol/#timestamp-precision) query parameter in your request URL. Default is `ns`. | +| Timestamp precision | Use the [`precision`](/influxdb/v2.5/reference/glossary/#precision) query parameter in your request URL. Default is `ns`. | | API token | Use the `Authorization: Token YOUR_API_TOKEN` header. | | Line protocol | Pass as plain text in your request body. | diff --git a/content/influxdb/v2.6/write-data/developer-tools/api.md b/content/influxdb/v2.6/write-data/developer-tools/api.md index ac2849bbd..cef5dba1b 100644 --- a/content/influxdb/v2.6/write-data/developer-tools/api.md +++ b/content/influxdb/v2.6/write-data/developer-tools/api.md @@ -15,7 +15,7 @@ Use the `POST` request method and include the following in your request: |:----------- |:---------- | | Organization | Use the `org` query parameter in your request URL. | | Bucket | Use the `bucket` query parameter in your request URL. | -| Timestamp precision | Use the [`precision`](/influxdb/v2.6/write-data/developer-tools/line-protocol/#timestamp-precision) query parameter in your request URL. Default is `ns`. | +| Timestamp precision | Use the [`precision`](/influxdb/v2.6/reference/glossary/#precision) query parameter in your request URL. Default is `ns`. | | API token | Use the `Authorization: Token YOUR_API_TOKEN` header. | | Line protocol | Pass as plain text in your request body. | From 61c88ca2b1a296762f23a6ddd4ddcaa5d2aff36c Mon Sep 17 00:00:00 2001 From: Sciator <39964450+Sciator@users.noreply.github.com> Date: Mon, 13 Mar 2023 20:11:01 +0100 Subject: [PATCH 3/3] docs: async-js (#4581) * docs: async-js * docs: added missing async keyword * docs: fixed indentation * docs: Apply suggestions from code review Co-authored-by: Jason Stirnaman --------- Co-authored-by: kelseiv <47797004+kelseiv@users.noreply.github.com> Co-authored-by: Jason Stirnaman --- .../client-libraries/nodejs/query.md | 22 +++---- .../client-libraries/nodejs/query.md | 20 +++---- .../client-libraries/nodejs/query.md | 20 +++---- .../client-library-starter/nodejs.md | 57 +++++++------------ .../v2.3/api-guide/tutorials/nodejs.md | 57 +++++++------------ .../client-libraries/nodejs/query.md | 20 +++---- .../v2.4/api-guide/tutorials/nodejs.md | 57 +++++++------------ .../nodejs/server/devices/_devices.js | 34 +++++------ shared/text/api/v2.0/query/query.mjs | 17 ++---- 9 files changed, 114 insertions(+), 190 deletions(-) diff --git a/content/influxdb/v2.0/api-guide/client-libraries/nodejs/query.md b/content/influxdb/v2.0/api-guide/client-libraries/nodejs/query.md index fbc1c90e5..10973f7f3 100644 --- a/content/influxdb/v2.0/api-guide/client-libraries/nodejs/query.md +++ b/content/influxdb/v2.0/api-guide/client-libraries/nodejs/query.md @@ -14,7 +14,7 @@ aliases: Use the [InfluxDB JavaScript client library](https://github.com/influxdata/influxdb-client-js) in a Node.js environment to query InfluxDB. -The following example sends a Flux query to an InfluxDB bucket and outputs rows from an observable table. +The following example sends a Flux query to an InfluxDB bucket and outputs rows as a JavaScript _asynchronous iterable_ object. ## Before you begin @@ -56,25 +56,21 @@ The following example sends a Flux query to an InfluxDB bucket and outputs rows ``` Replace *`YOUR_BUCKET`* with the name of your InfluxDB bucket. -4. Use the `queryRows()` method of the query client to query InfluxDB. - `queryRows()` takes a Flux query and an [RxJS **Observer**](http://reactivex.io/rxjs/manual/overview.html#observer) object. - The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an [RxJS **Observable**](http://reactivex.io/rxjs/manual/overview.html#observable). - `queryRows()` subscribes your observer to the observable. - Finally, the observer logs the rows from the response to the terminal. +4. Use the `iterateRows()` method of the query client to query InfluxDB. + `iterateRows()` takes a Flux query and returns the [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) of metadata and rows as an asynchronous iterable (`AsyncIterable`). + The following example shows how to write an asynchronous function that uses the `iterateRows()` method to query a bucket and uses the JavaScript `for await...of` statement to iterate over the query results: + ```js - const observer = { - next(row, tableMeta) { - const o = tableMeta.toObject(row) + const myQuery = async () => { + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) console.log( `${o._time} ${o._measurement} in '${o.location}' (${o.sensor_id}): ${o._field}=${o._value}` ) } } - - queryApi.queryRows(fluxQuery, observer) - - ``` + myQuery() ### Complete example diff --git a/content/influxdb/v2.1/api-guide/client-libraries/nodejs/query.md b/content/influxdb/v2.1/api-guide/client-libraries/nodejs/query.md index 1e0186b41..4331ae0dd 100644 --- a/content/influxdb/v2.1/api-guide/client-libraries/nodejs/query.md +++ b/content/influxdb/v2.1/api-guide/client-libraries/nodejs/query.md @@ -14,7 +14,7 @@ aliases: Use the [InfluxDB JavaScript client library](https://github.com/influxdata/influxdb-client-js) in a Node.js environment to query InfluxDB. -The following example sends a Flux query to an InfluxDB bucket and outputs rows from an observable table. +The following example sends a Flux query to an InfluxDB bucket and outputs rows as a JavaScript _asynchronous iterable_ object. ## Before you begin @@ -56,24 +56,20 @@ The following example sends a Flux query to an InfluxDB bucket and outputs rows ``` Replace *`YOUR_BUCKET`* with the name of your InfluxDB bucket. -4. Use the `queryRows()` method of the query client to query InfluxDB. - `queryRows()` takes a Flux query and an [RxJS **Observer**](http://reactivex.io/rxjs/manual/overview.html#observer) object. - The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an [RxJS **Observable**](http://reactivex.io/rxjs/manual/overview.html#observable). - `queryRows()` subscribes your observer to the observable. - Finally, the observer logs the rows from the response to the terminal. +4. Use the `iterateRows()` method of the query client to query InfluxDB. + `iterateRows()` takes a Flux query and returns table as an asynchronous collection. + The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an as an AsyncIterable. ```js - const observer = { - next(row, tableMeta) { - const o = tableMeta.toObject(row) + const myQuery = async () => { + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) console.log( `${o._time} ${o._measurement} in '${o.location}' (${o.sensor_id}): ${o._field}=${o._value}` ) } } - - queryApi.queryRows(fluxQuery, observer) - + myQuery() ``` ### Complete example diff --git a/content/influxdb/v2.2/api-guide/client-libraries/nodejs/query.md b/content/influxdb/v2.2/api-guide/client-libraries/nodejs/query.md index 537d65c10..7ab9d2d3a 100644 --- a/content/influxdb/v2.2/api-guide/client-libraries/nodejs/query.md +++ b/content/influxdb/v2.2/api-guide/client-libraries/nodejs/query.md @@ -14,7 +14,7 @@ aliases: Use the [InfluxDB JavaScript client library](https://github.com/influxdata/influxdb-client-js) in a Node.js environment to query InfluxDB. -The following example sends a Flux query to an InfluxDB bucket and outputs rows from an observable table. +The following example sends a Flux query to an InfluxDB bucket and outputs rows as asynchronous iterable. ## Before you begin @@ -56,24 +56,20 @@ The following example sends a Flux query to an InfluxDB bucket and outputs rows ``` Replace *`YOUR_BUCKET`* with the name of your InfluxDB bucket. -4. Use the `queryRows()` method of the query client to query InfluxDB. - `queryRows()` takes a Flux query and an [RxJS **Observer**](http://reactivex.io/rxjs/manual/overview.html#observer) object. - The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an [RxJS **Observable**](http://reactivex.io/rxjs/manual/overview.html#observable). - `queryRows()` subscribes your observer to the observable. - Finally, the observer logs the rows from the response to the terminal. +4. Use the `iterateRows()` method of the query client to query InfluxDB. + `iterateRows()` takes a Flux query and returns table as an asynchronous collection. + The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an as an AsyncIterable. ```js - const observer = { - next(row, tableMeta) { - const o = tableMeta.toObject(row) + const myQuery = async () => { + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) console.log( `${o._time} ${o._measurement} in '${o.location}' (${o.sensor_id}): ${o._field}=${o._value}` ) } } - - queryApi.queryRows(fluxQuery, observer) - + myQuery() ``` ### Complete example diff --git a/content/influxdb/v2.2/api-guide/tutorials/client-library-starter/nodejs.md b/content/influxdb/v2.2/api-guide/tutorials/client-library-starter/nodejs.md index 009ddc7d9..4ef4b47ce 100644 --- a/content/influxdb/v2.2/api-guide/tutorials/client-library-starter/nodejs.md +++ b/content/influxdb/v2.2/api-guide/tutorials/client-library-starter/nodejs.md @@ -249,26 +249,20 @@ const influxdb = new InfluxDB({url: process.env.INFLUX_URL, token: process.env.I |> last()` const devices = {} - return await new Promise((resolve, reject) => { - queryApi.queryRows(fluxQuery, { - next(row, tableMeta) { - const o = tableMeta.toObject(row) - const deviceId = o.deviceId - if (!deviceId) { - return - } - const device = devices[deviceId] || (devices[deviceId] = {deviceId}) - device[o._field] = o._value - if (!device.updatedAt || device.updatedAt < o._time) { - device.updatedAt = o._time - } - }, - error: reject, - complete() { - resolve(devices) - }, - }) - }) + for await (const {row, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(row) + const deviceId = o.deviceId + if (!deviceId) { + return + } + const device = devices[deviceId] || (devices[deviceId] = {deviceId}) + device[o._field] = o._value + if (!device.updatedAt || device.updatedAt < o._time) { + device.updatedAt = o._time + } + } + + return devices } ``` @@ -284,26 +278,17 @@ for registered devices, processes the data, and returns a Promise with the resul If you invoke the function as `getDevices()` (without a _`deviceId`_), it retrieves all `deviceauth` points and returns a Promise with `{ DEVICE_ID: ROW_DATA }`. -To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI queryRows(query, consumer)` method. -`queryRows` executes the `query` and provides the Annotated CSV result as an Observable to the `consumer`. -`queryRows` has the following TypeScript signature: +To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI iterateRows(query)` asynchronous method. +`iterateRows` executes the `query` and provides the Annotated CSV result as an AsyncIterable. +`iterateRows` has the following TypeScript signature: ```ts -queryRows( - query: string | ParameterizedQuery, - consumer: FluxResultObserver -): void +iterateRows( + query: string | ParameterizedQuery +): AsyncIterable ``` -{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/QueryApi.ts){{% /caption %}} - -The `consumer` that you provide must implement the [`FluxResultObserver` interface](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/results/FluxResultObserver.ts) and provide the following callback functions: - -- `next(row, tableMeta)`: processes the next row and table metadata--for example, to prepare the response. -- `error(error)`: receives and handles errors--for example, by rejecting the Promise. -- `complete()`: signals when all rows have been consumed--for example, by resolving the Promise. - -To learn more about Observers, see the [RxJS Guide](https://rxjs.dev/guide/observer). +{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/af7cf3b6c1003ff0400e91bcb6a0b860668d6458/packages/core/src/QueryApi.ts){{% /caption %}} ## Create the API to register devices diff --git a/content/influxdb/v2.3/api-guide/tutorials/nodejs.md b/content/influxdb/v2.3/api-guide/tutorials/nodejs.md index e7b72439a..f227cc0a5 100644 --- a/content/influxdb/v2.3/api-guide/tutorials/nodejs.md +++ b/content/influxdb/v2.3/api-guide/tutorials/nodejs.md @@ -259,26 +259,20 @@ const influxdb = new InfluxDB({url: process.env.INFLUX_URL, token: process.env.I |> last()` const devices = {} - return await new Promise((resolve, reject) => { - queryApi.queryRows(fluxQuery, { - next(row, tableMeta) { - const o = tableMeta.toObject(row) - const deviceId = o.deviceId - if (!deviceId) { - return - } - const device = devices[deviceId] || (devices[deviceId] = {deviceId}) - device[o._field] = o._value - if (!device.updatedAt || device.updatedAt < o._time) { - device.updatedAt = o._time - } - }, - error: reject, - complete() { - resolve(devices) - }, - }) - }) + for await (const {row, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(row) + const deviceId = o.deviceId + if (!deviceId) { + return + } + const device = devices[deviceId] || (devices[deviceId] = {deviceId}) + device[o._field] = o._value + if (!device.updatedAt || device.updatedAt < o._time) { + device.updatedAt = o._time + } + } + + return devices } ``` @@ -294,26 +288,17 @@ for registered devices, processes the data, and returns a Promise with the resul If you invoke the function as `getDevices()` (without a _`deviceId`_), it retrieves all `deviceauth` points and returns a Promise with `{ DEVICE_ID: ROW_DATA }`. -To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI queryRows(query, consumer)` method. -`queryRows` executes the `query` and provides the Annotated CSV result as an Observable to the `consumer`. -`queryRows` has the following TypeScript signature: +To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI iterateRows(query)` asynchronous method. +`iterateRows` executes the `query` and provides the Annotated CSV result as an AsyncIterable. +`iterateRows` has the following TypeScript signature: ```ts -queryRows( - query: string | ParameterizedQuery, - consumer: FluxResultObserver -): void +iterateRows( + query: string | ParameterizedQuery +): AsyncIterable ``` -{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/QueryApi.ts){{% /caption %}} - -The `consumer` that you provide must implement the [`FluxResultObserver` interface](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/results/FluxResultObserver.ts) and provide the following callback functions: - -- `next(row, tableMeta)`: processes the next row and table metadata--for example, to prepare the response. -- `error(error)`: receives and handles errors--for example, by rejecting the Promise. -- `complete()`: signals when all rows have been consumed--for example, by resolving the Promise. - -To learn more about Observers, see the [RxJS Guide](https://rxjs.dev/guide/observer). +{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/af7cf3b6c1003ff0400e91bcb6a0b860668d6458/packages/core/src/QueryApi.ts){{% /caption %}} ## Create the API to register devices diff --git a/content/influxdb/v2.4/api-guide/client-libraries/nodejs/query.md b/content/influxdb/v2.4/api-guide/client-libraries/nodejs/query.md index b6af697d0..66e30ae15 100644 --- a/content/influxdb/v2.4/api-guide/client-libraries/nodejs/query.md +++ b/content/influxdb/v2.4/api-guide/client-libraries/nodejs/query.md @@ -14,7 +14,7 @@ aliases: Use the [InfluxDB JavaScript client library](https://github.com/influxdata/influxdb-client-js) in a Node.js environment to query InfluxDB. -The following example sends a Flux query to an InfluxDB bucket and outputs rows from an observable table. +The following example sends a Flux query to an InfluxDB bucket and outputs rows as asynchronous iterable. ## Before you begin @@ -56,24 +56,20 @@ The following example sends a Flux query to an InfluxDB bucket and outputs rows ``` Replace *`YOUR_BUCKET`* with the name of your InfluxDB bucket. -4. Use the `queryRows()` method of the query client to query InfluxDB. - `queryRows()` takes a Flux query and an [RxJS **Observer**](http://reactivex.io/rxjs/manual/overview.html#observer) object. - The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an [RxJS **Observable**](http://reactivex.io/rxjs/manual/overview.html#observable). - `queryRows()` subscribes your observer to the observable. - Finally, the observer logs the rows from the response to the terminal. +4. Use the `iterateRows()` method of the query client to query InfluxDB. + `iterateRows()` takes a Flux query and returns table as an asynchronous collection. + The client returns [table](/{{% latest "influxdb" %}}/reference/syntax/annotated-csv/#tables) metadata and rows as an as an AsyncIterable. ```js - const observer = { - next(row, tableMeta) { - const o = tableMeta.toObject(row) + const myQuery = async () => { + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) console.log( `${o._time} ${o._measurement} in '${o.location}' (${o.sensor_id}): ${o._field}=${o._value}` ) } } - - queryApi.queryRows(fluxQuery, observer) - + myQuery() ``` ### Complete example diff --git a/content/influxdb/v2.4/api-guide/tutorials/nodejs.md b/content/influxdb/v2.4/api-guide/tutorials/nodejs.md index 1782b4b4a..e0bd31854 100644 --- a/content/influxdb/v2.4/api-guide/tutorials/nodejs.md +++ b/content/influxdb/v2.4/api-guide/tutorials/nodejs.md @@ -259,26 +259,20 @@ const influxdb = new InfluxDB({url: process.env.INFLUX_URL, token: process.env.I |> last()` const devices = {} - return await new Promise((resolve, reject) => { - queryApi.queryRows(fluxQuery, { - next(row, tableMeta) { - const o = tableMeta.toObject(row) - const deviceId = o.deviceId - if (!deviceId) { - return - } - const device = devices[deviceId] || (devices[deviceId] = {deviceId}) - device[o._field] = o._value - if (!device.updatedAt || device.updatedAt < o._time) { - device.updatedAt = o._time - } - }, - error: reject, - complete() { - resolve(devices) - }, - }) - }) + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) + const deviceId = o.deviceId + if (!deviceId) { + continue + } + const device = devices[deviceId] || (devices[deviceId] = {deviceId}) + device[o._field] = o._value + if (!device.updatedAt || device.updatedAt < o._time) { + device.updatedAt = o._time + } + } + + return devices } ``` @@ -294,26 +288,17 @@ for registered devices, processes the data, and returns a Promise with the resul If you invoke the function as `getDevices()` (without a _`deviceId`_), it retrieves all `deviceauth` points and returns a Promise with `{ DEVICE_ID: ROW_DATA }`. -To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI queryRows(query, consumer)` method. -`queryRows` executes the `query` and provides the Annotated CSV result as an Observable to the `consumer`. -`queryRows` has the following TypeScript signature: +To send the query and process results, the `getDevices(deviceId)` function uses the `QueryAPI iterateRows(query)` asynchronous method. +`iterateRows` executes the `query` and provides the Annotated CSV result as an AsyncIterable. +`iterateRows` has the following TypeScript signature: ```ts -queryRows( - query: string | ParameterizedQuery, - consumer: FluxResultObserver -): void +iterateRows( + query: string | ParameterizedQuery +): AsyncIterable ``` -{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/QueryApi.ts){{% /caption %}} - -The `consumer` that you provide must implement the [`FluxResultObserver` interface](https://github.com/influxdata/influxdb-client-js/blob/3db2942432b993048d152e0d0e8ec8499eedfa60/packages/core/src/results/FluxResultObserver.ts) and provide the following callback functions: - -- `next(row, tableMeta)`: processes the next row and table metadata--for example, to prepare the response. -- `error(error)`: receives and handles errors--for example, by rejecting the Promise. -- `complete()`: signals when all rows have been consumed--for example, by resolving the Promise. - -To learn more about Observers, see the [RxJS Guide](https://rxjs.dev/guide/observer). +{{% caption %}}[@influxdata/influxdb-client-js QueryAPI](https://github.com/influxdata/influxdb-client-js/blob/af7cf3b6c1003ff0400e91bcb6a0b860668d6458/packages/core/src/QueryApi.ts){{% /caption %}} ## Create the API to register devices diff --git a/shared/text/api/v2.0/client-library-examples/nodejs/server/devices/_devices.js b/shared/text/api/v2.0/client-library-examples/nodejs/server/devices/_devices.js index 141fd5702..75ec0d48b 100644 --- a/shared/text/api/v2.0/client-library-examples/nodejs/server/devices/_devices.js +++ b/shared/text/api/v2.0/client-library-examples/nodejs/server/devices/_devices.js @@ -23,27 +23,19 @@ const INFLUX_BUCKET_AUTH = process.env.INFLUX_BUCKET_AUTH |> last()` const devices = {} console.log(`*** QUERY *** \n ${fluxQuery}`) - return await new Promise((resolve, reject) => { - queryApi.queryRows(fluxQuery, { - next(row, tableMeta) { - const o = tableMeta.toObject(row) - const deviceId = o.deviceId - if (!deviceId) { - return - } - const device = devices[deviceId] || (devices[deviceId] = {deviceId}) - device[o._field] = o._value - if (!device.updatedAt || device.updatedAt < o._time) { - device.updatedAt = o._time - } - }, - error: reject, - complete() { - console.log(JSON.stringify(devices)) - resolve(devices) - }, - }) - }) + for await (const {row, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(row) + const deviceId = o.deviceId + if (!deviceId) { + return + } + const device = devices[deviceId] || (devices[deviceId] = {deviceId}) + device[o._field] = o._value + if (!device.updatedAt || device.updatedAt < o._time) { + device.updatedAt = o._time + } + } + return devices } \ No newline at end of file diff --git a/shared/text/api/v2.0/query/query.mjs b/shared/text/api/v2.0/query/query.mjs index c72409948..ff7e704d0 100644 --- a/shared/text/api/v2.0/query/query.mjs +++ b/shared/text/api/v2.0/query/query.mjs @@ -21,21 +21,14 @@ const queryApi = new InfluxDB({url, token}).getQueryApi(org) /** To avoid SQL injection, use a string literal for the query. */ const fluxQuery = 'from(bucket:"air_sensor") |> range(start: 0) |> filter(fn: (r) => r._measurement == "temperature")' -const fluxObserver = { - next(row, tableMeta) { - const o = tableMeta.toObject(row) +const myQuery = async () => { + for await (const {values, tableMeta} of queryApi.iterateRows(fluxQuery)) { + const o = tableMeta.toObject(values) console.log( - `${o._time} ${o._measurement} in ${o.region} (${o.sensor_id}): ${o._field}=${o._value}` + `${o._time} ${o._measurement} in '${o.location}' (${o.sensor_id}): ${o._field}=${o._value}` ) - }, - error(error) { - console.error(error) - console.log('\nFinished ERROR') - }, - complete() { - console.log('\nFinished SUCCESS') } } /** Execute a query and receive line table metadata and rows. */ -queryApi.queryRows(fluxQuery, fluxObserver) +myQuery()