Merge branch 'master' into jmercer/plugin-sync-test
commit
31ec636954
|
|
@ -31,7 +31,8 @@ LogicalPlan
|
||||||
[Mm]onitor
|
[Mm]onitor
|
||||||
MBs?
|
MBs?
|
||||||
PBs?
|
PBs?
|
||||||
Parquet|\b\w*-*parquet-\w*\b|\b--\w*parquet\w*\b|`[^`]*parquet[^`]*`
|
Parquet
|
||||||
|
\w*-?\w*parquet\w*-\w*
|
||||||
Redoc
|
Redoc
|
||||||
SQLAlchemy
|
SQLAlchemy
|
||||||
SQLAlchemy
|
SQLAlchemy
|
||||||
|
|
@ -41,9 +42,11 @@ System.Data.Odbc
|
||||||
TBs?
|
TBs?
|
||||||
\bUI\b
|
\bUI\b
|
||||||
URL
|
URL
|
||||||
|
\w*-?\w*url\w*-\w*
|
||||||
US (East|West|Central|North|South|Northeast|Northwest|Southeast|Southwest)
|
US (East|West|Central|North|South|Northeast|Northwest|Southeast|Southwest)
|
||||||
Unix
|
Unix
|
||||||
WALs?
|
WALs?
|
||||||
|
\w*-?wal-\w*
|
||||||
Webpack
|
Webpack
|
||||||
[pP]y.*\b
|
[pP]y.*\b
|
||||||
\b\w+_\w+\b
|
\b\w+_\w+\b
|
||||||
|
|
@ -82,7 +85,6 @@ quoteChar
|
||||||
retentionRules
|
retentionRules
|
||||||
sourceBucket
|
sourceBucket
|
||||||
tagKey
|
tagKey
|
||||||
url[s]?
|
|
||||||
v2
|
v2
|
||||||
v3
|
v3
|
||||||
venv
|
venv
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,6 @@ call_lefthook()
|
||||||
then
|
then
|
||||||
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
||||||
|
|
||||||
elif go tool lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
go tool lefthook "$@"
|
|
||||||
elif bundle exec lefthook -h >/dev/null 2>&1
|
elif bundle exec lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
bundle exec lefthook "$@"
|
bundle exec lefthook "$@"
|
||||||
|
|
@ -45,21 +42,12 @@ call_lefthook()
|
||||||
elif pnpm lefthook -h >/dev/null 2>&1
|
elif pnpm lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
pnpm lefthook "$@"
|
pnpm lefthook "$@"
|
||||||
elif swift package lefthook >/dev/null 2>&1
|
elif swift package plugin lefthook >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
swift package --build-path .build/lefthook --disable-sandbox lefthook "$@"
|
swift package --disable-sandbox plugin lefthook "$@"
|
||||||
elif command -v mint >/dev/null 2>&1
|
elif command -v mint >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
mint run csjones/lefthook-plugin "$@"
|
mint run csjones/lefthook-plugin "$@"
|
||||||
elif uv run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
uv run lefthook "$@"
|
|
||||||
elif mise exec -- lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
mise exec -- lefthook "$@"
|
|
||||||
elif devbox run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
devbox run lefthook "$@"
|
|
||||||
else
|
else
|
||||||
echo "Can't find lefthook in PATH"
|
echo "Can't find lefthook in PATH"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,6 @@ call_lefthook()
|
||||||
then
|
then
|
||||||
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
||||||
|
|
||||||
elif go tool lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
go tool lefthook "$@"
|
|
||||||
elif bundle exec lefthook -h >/dev/null 2>&1
|
elif bundle exec lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
bundle exec lefthook "$@"
|
bundle exec lefthook "$@"
|
||||||
|
|
@ -45,21 +42,12 @@ call_lefthook()
|
||||||
elif pnpm lefthook -h >/dev/null 2>&1
|
elif pnpm lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
pnpm lefthook "$@"
|
pnpm lefthook "$@"
|
||||||
elif swift package lefthook >/dev/null 2>&1
|
elif swift package plugin lefthook >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
swift package --build-path .build/lefthook --disable-sandbox lefthook "$@"
|
swift package --disable-sandbox plugin lefthook "$@"
|
||||||
elif command -v mint >/dev/null 2>&1
|
elif command -v mint >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
mint run csjones/lefthook-plugin "$@"
|
mint run csjones/lefthook-plugin "$@"
|
||||||
elif uv run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
uv run lefthook "$@"
|
|
||||||
elif mise exec -- lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
mise exec -- lefthook "$@"
|
|
||||||
elif devbox run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
devbox run lefthook "$@"
|
|
||||||
else
|
else
|
||||||
echo "Can't find lefthook in PATH"
|
echo "Can't find lefthook in PATH"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,6 @@ call_lefthook()
|
||||||
then
|
then
|
||||||
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
"$dir/node_modules/lefthook/bin/index.js" "$@"
|
||||||
|
|
||||||
elif go tool lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
go tool lefthook "$@"
|
|
||||||
elif bundle exec lefthook -h >/dev/null 2>&1
|
elif bundle exec lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
bundle exec lefthook "$@"
|
bundle exec lefthook "$@"
|
||||||
|
|
@ -45,21 +42,12 @@ call_lefthook()
|
||||||
elif pnpm lefthook -h >/dev/null 2>&1
|
elif pnpm lefthook -h >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
pnpm lefthook "$@"
|
pnpm lefthook "$@"
|
||||||
elif swift package lefthook >/dev/null 2>&1
|
elif swift package plugin lefthook >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
swift package --build-path .build/lefthook --disable-sandbox lefthook "$@"
|
swift package --disable-sandbox plugin lefthook "$@"
|
||||||
elif command -v mint >/dev/null 2>&1
|
elif command -v mint >/dev/null 2>&1
|
||||||
then
|
then
|
||||||
mint run csjones/lefthook-plugin "$@"
|
mint run csjones/lefthook-plugin "$@"
|
||||||
elif uv run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
uv run lefthook "$@"
|
|
||||||
elif mise exec -- lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
mise exec -- lefthook "$@"
|
|
||||||
elif devbox run lefthook -h >/dev/null 2>&1
|
|
||||||
then
|
|
||||||
devbox run lefthook "$@"
|
|
||||||
else
|
else
|
||||||
echo "Can't find lefthook in PATH"
|
echo "Can't find lefthook in PATH"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ Complete reference for custom Hugo shortcodes used in InfluxData documentation.
|
||||||
- [Content Management](#content-management)
|
- [Content Management](#content-management)
|
||||||
- [Special Purpose](#special-purpose)
|
- [Special Purpose](#special-purpose)
|
||||||
|
|
||||||
---
|
***
|
||||||
|
|
||||||
## Notes and Warnings
|
## Notes and Warnings
|
||||||
|
|
||||||
|
|
@ -146,7 +146,7 @@ Use the `{{< api-endpoint >}}` shortcode to generate a code block that contains
|
||||||
- **method**: HTTP request method (get, post, patch, put, or delete)
|
- **method**: HTTP request method (get, post, patch, put, or delete)
|
||||||
- **endpoint**: API endpoint
|
- **endpoint**: API endpoint
|
||||||
- **api-ref**: Link the endpoint to a specific place in the API documentation
|
- **api-ref**: Link the endpoint to a specific place in the API documentation
|
||||||
- **influxdb_host**: Specify which InfluxDB product host to use _if the `endpoint` contains the `influxdb/host` shortcode_. Uses the current InfluxDB product as default. Supports the following product values:
|
- **influxdb_host**: Specify which InfluxDB product host to use *if the `endpoint` contains the `influxdb/host` shortcode*. Uses the current InfluxDB product as default. Supports the following product values:
|
||||||
- oss
|
- oss
|
||||||
- cloud
|
- cloud
|
||||||
- serverless
|
- serverless
|
||||||
|
|
@ -268,11 +268,11 @@ To link to tabbed content, click on the tab and use the URL parameter shown. It
|
||||||
|
|
||||||
Use the `{{< page-nav >}}` shortcode to add page navigation buttons to a page. These are useful for guiding users through a set of docs that should be read in sequential order. The shortcode has the following parameters:
|
Use the `{{< page-nav >}}` shortcode to add page navigation buttons to a page. These are useful for guiding users through a set of docs that should be read in sequential order. The shortcode has the following parameters:
|
||||||
|
|
||||||
- **prev:** path of the previous document _(optional)_
|
- **prev:** path of the previous document *(optional)*
|
||||||
- **next:** path of the next document _(optional)_
|
- **next:** path of the next document *(optional)*
|
||||||
- **prevText:** override the button text linking to the previous document _(optional)_
|
- **prevText:** override the button text linking to the previous document *(optional)*
|
||||||
- **nextText:** override the button text linking to the next document _(optional)_
|
- **nextText:** override the button text linking to the next document *(optional)*
|
||||||
- **keepTab:** include the currently selected tab in the button link _(optional)_
|
- **keepTab:** include the currently selected tab in the button link *(optional)*
|
||||||
|
|
||||||
The shortcode generates buttons that link to both the previous and next documents. By default, the shortcode uses either the `list_title` or the `title` of the linked document, but you can use `prevText` and `nextText` to override button text.
|
The shortcode generates buttons that link to both the previous and next documents. By default, the shortcode uses either the `list_title` or the `title` of the linked document, but you can use `prevText` and `nextText` to override button text.
|
||||||
|
|
||||||
|
|
@ -308,7 +308,7 @@ The children shortcode can also be used to list only "section" articles (those w
|
||||||
{{< children show="pages" >}}
|
{{< children show="pages" >}}
|
||||||
```
|
```
|
||||||
|
|
||||||
_By default, it displays both sections and pages._
|
*By default, it displays both sections and pages.*
|
||||||
|
|
||||||
Use the `type` argument to specify the format of the children list.
|
Use the `type` argument to specify the format of the children list.
|
||||||
|
|
||||||
|
|
@ -325,7 +325,7 @@ The following list types are available:
|
||||||
|
|
||||||
#### Include a "Read more" link
|
#### Include a "Read more" link
|
||||||
|
|
||||||
To include a "Read more" link with each child summary, set `readmore=true`. _Only the `articles` list type supports "Read more" links._
|
To include a "Read more" link with each child summary, set `readmore=true`. *Only the `articles` list type supports "Read more" links.*
|
||||||
|
|
||||||
```md
|
```md
|
||||||
{{< children readmore=true >}}
|
{{< children readmore=true >}}
|
||||||
|
|
@ -333,7 +333,7 @@ To include a "Read more" link with each child summary, set `readmore=true`. _Onl
|
||||||
|
|
||||||
#### Include a horizontal rule
|
#### Include a horizontal rule
|
||||||
|
|
||||||
To include a horizontal rule after each child summary, set `hr=true`. _Only the `articles` list type supports horizontal rules._
|
To include a horizontal rule after each child summary, set `hr=true`. *Only the `articles` list type supports horizontal rules.*
|
||||||
|
|
||||||
```md
|
```md
|
||||||
{{< children hr=true >}}
|
{{< children hr=true >}}
|
||||||
|
|
@ -390,11 +390,11 @@ This is useful for maintaining and referencing sample code variants in their nat
|
||||||
|
|
||||||
#### Include specific files from the same directory
|
#### Include specific files from the same directory
|
||||||
|
|
||||||
> [!Caution]
|
> \[!Caution]
|
||||||
> **Don't use for code examples**
|
> **Don't use for code examples**
|
||||||
> Using this and `get-shared-text` shortcodes to include code examples prevents the code from being tested.
|
> Using this and `get-shared-text` shortcodes to include code examples prevents the code from being tested.
|
||||||
|
|
||||||
To include the text from one file in another file in the same directory, use the `{{< get-leaf-text >}}` shortcode. The directory that contains both files must be a Hugo [_Leaf Bundle_](https://gohugo.io/content-management/page-bundles/#leaf-bundles), a directory that doesn't have any child directories.
|
To include the text from one file in another file in the same directory, use the `{{< get-leaf-text >}}` shortcode. The directory that contains both files must be a Hugo [*Leaf Bundle*](https://gohugo.io/content-management/page-bundles/#leaf-bundles), a directory that doesn't have any child directories.
|
||||||
|
|
||||||
In the following example, `api` is a leaf bundle. `content` isn't.
|
In the following example, `api` is a leaf bundle. `content` isn't.
|
||||||
|
|
||||||
|
|
@ -695,7 +695,7 @@ Column 2
|
||||||
|
|
||||||
The following options are available:
|
The following options are available:
|
||||||
|
|
||||||
- half _(Default)_
|
- half *(Default)*
|
||||||
- third
|
- third
|
||||||
- quarter
|
- quarter
|
||||||
|
|
||||||
|
|
@ -721,10 +721,10 @@ Click {{< caps >}}Add Data{{< /caps >}}
|
||||||
|
|
||||||
### Authentication token link
|
### Authentication token link
|
||||||
|
|
||||||
Use the `{{% token-link "<descriptor>" "<link_append>%}}` shortcode to automatically generate links to token management documentation. The shortcode accepts two _optional_ arguments:
|
Use the `{{% token-link "<descriptor>" "<link_append>%}}` shortcode to automatically generate links to token management documentation. The shortcode accepts two *optional* arguments:
|
||||||
|
|
||||||
- **descriptor**: An optional token descriptor
|
- **descriptor**: An optional token descriptor
|
||||||
- **link_append**: An optional path to append to the token management link path, `/<product>/<version>/admin/tokens/`.
|
- **link\_append**: An optional path to append to the token management link path, `/<product>/<version>/admin/tokens/`.
|
||||||
|
|
||||||
```md
|
```md
|
||||||
{{% token-link "database" "resource/" %}}
|
{{% token-link "database" "resource/" %}}
|
||||||
|
|
@ -775,7 +775,7 @@ Descriptions should follow consistent patterns:
|
||||||
- Recommended: "your {{% token-link "database" %}}"{{% show-in "enterprise" %}} with permissions on the specified database{{% /show-in %}}
|
- Recommended: "your {{% token-link "database" %}}"{{% show-in "enterprise" %}} with permissions on the specified database{{% /show-in %}}
|
||||||
- Avoid: "your token", "the token", "an authorization token"
|
- Avoid: "your token", "the token", "an authorization token"
|
||||||
3. **Database names**:
|
3. **Database names**:
|
||||||
- Recommended: "the name of the database to [action]"
|
- Recommended: "the name of the database to \[action]"
|
||||||
- Avoid: "your database", "the database name"
|
- Avoid: "your database", "the database name"
|
||||||
4. **Conditional content**:
|
4. **Conditional content**:
|
||||||
- Use `{{% show-in "enterprise" %}}` for content specific to enterprise versions
|
- Use `{{% show-in "enterprise" %}}` for content specific to enterprise versions
|
||||||
|
|
@ -801,9 +801,71 @@ Descriptions should follow consistent patterns:
|
||||||
- `{{% code-placeholder-key %}}`: Use this shortcode to define a placeholder key
|
- `{{% code-placeholder-key %}}`: Use this shortcode to define a placeholder key
|
||||||
- `{{% /code-placeholder-key %}}`: Use this shortcode to close the key name
|
- `{{% /code-placeholder-key %}}`: Use this shortcode to close the key name
|
||||||
|
|
||||||
_The `placeholders` attribute supercedes the deprecated `code-placeholders` shortcode._
|
*The `placeholders` attribute supercedes the deprecated `code-placeholders` shortcode.*
|
||||||
|
|
||||||
#### Example usage
|
#### Automated placeholder syntax
|
||||||
|
|
||||||
|
Use the `docs placeholders` command to automatically add placeholder syntax to code blocks and descriptions:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Process a file
|
||||||
|
npx docs placeholders content/influxdb3/core/admin/upgrade.md
|
||||||
|
|
||||||
|
# Preview changes without modifying the file
|
||||||
|
npx docs placeholders content/influxdb3/core/admin/upgrade.md --dry
|
||||||
|
|
||||||
|
# Get help
|
||||||
|
npx docs placeholders --help
|
||||||
|
```
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
|
||||||
|
1. Detects UPPERCASE placeholders in code blocks
|
||||||
|
2. Adds `{ placeholders="..." }` attribute to code fences
|
||||||
|
3. Wraps placeholder descriptions with `{{% code-placeholder-key %}}` shortcodes
|
||||||
|
|
||||||
|
**Example transformation:**
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
````markdown
|
||||||
|
```bash
|
||||||
|
influxdb3 query \
|
||||||
|
--database SYSTEM_DATABASE \
|
||||||
|
--token ADMIN_TOKEN \
|
||||||
|
"SELECT * FROM system.version"
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the following:
|
||||||
|
|
||||||
|
- **`SYSTEM_DATABASE`**: The name of your system database
|
||||||
|
- **`ADMIN_TOKEN`**: An admin token with read permissions
|
||||||
|
````
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
````markdown
|
||||||
|
```bash { placeholders="ADMIN_TOKEN|SYSTEM_DATABASE" }
|
||||||
|
influxdb3 query \
|
||||||
|
--database SYSTEM_DATABASE \
|
||||||
|
--token ADMIN_TOKEN \
|
||||||
|
"SELECT * FROM system.version"
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the following:
|
||||||
|
|
||||||
|
- {{% code-placeholder-key %}}`SYSTEM_DATABASE`{{% /code-placeholder-key %}}: The name of your system database
|
||||||
|
- {{% code-placeholder-key %}}`ADMIN_TOKEN`{{% /code-placeholder-key %}}: An admin token with read permissions
|
||||||
|
````
|
||||||
|
|
||||||
|
**How it works:**
|
||||||
|
|
||||||
|
- Pattern: Matches words with 2+ characters, all uppercase, can include underscores
|
||||||
|
- Excludes common words: HTTP verbs (GET, POST), protocols (HTTP, HTTPS), SQL keywords (SELECT, FROM), etc.
|
||||||
|
- Idempotent: Running multiple times won't duplicate syntax
|
||||||
|
- Preserves existing `placeholders` attributes and already-wrapped descriptions
|
||||||
|
|
||||||
|
#### Manual placeholder usage
|
||||||
|
|
||||||
```sh { placeholders "DATABASE_NAME|USERNAME|PASSWORD_OR_TOKEN|API_TOKEN|exampleuser@influxdata.com" }
|
```sh { placeholders "DATABASE_NAME|USERNAME|PASSWORD_OR_TOKEN|API_TOKEN|exampleuser@influxdata.com" }
|
||||||
curl --request POST http://localhost:8086/write?db=DATABASE_NAME \
|
curl --request POST http://localhost:8086/write?db=DATABASE_NAME \
|
||||||
|
|
@ -839,7 +901,7 @@ Sample dataset to output. Use either `set` argument name or provide the set as t
|
||||||
|
|
||||||
#### includeNull
|
#### includeNull
|
||||||
|
|
||||||
Specify whether or not to include _null_ values in the dataset. Use either `includeNull` argument name or provide the boolean value as the second argument.
|
Specify whether or not to include *null* values in the dataset. Use either `includeNull` argument name or provide the boolean value as the second argument.
|
||||||
|
|
||||||
#### includeRange
|
#### includeRange
|
||||||
|
|
||||||
|
|
@ -1115,6 +1177,6 @@ The InfluxDB host placeholder that gets replaced by custom domains differs betwe
|
||||||
{{< influxdb/host "serverless" >}}
|
{{< influxdb/host "serverless" >}}
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
***
|
||||||
|
|
||||||
**For working examples**: Test all shortcodes in [content/example.md](content/example.md)
|
**For working examples**: Test all shortcodes in [content/example.md](content/example.md)
|
||||||
|
|
|
||||||
39
README.md
39
README.md
|
|
@ -2,9 +2,9 @@
|
||||||
<img src="/static/img/influx-logo-cubo-dark.png" width="200">
|
<img src="/static/img/influx-logo-cubo-dark.png" width="200">
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
# InfluxDB 2.0 Documentation
|
# InfluxData Product Documentation
|
||||||
|
|
||||||
This repository contains the InfluxDB 2.x documentation published at [docs.influxdata.com](https://docs.influxdata.com).
|
This repository contains the InfluxData product documentation for InfluxDB and related tooling published at [docs.influxdata.com](https://docs.influxdata.com).
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
|
@ -15,6 +15,26 @@ For information about contributing to the InfluxData documentation, see [Contrib
|
||||||
|
|
||||||
For information about testing the documentation, including code block testing, link validation, and style linting, see [Testing guide](DOCS-TESTING.md).
|
For information about testing the documentation, including code block testing, link validation, and style linting, see [Testing guide](DOCS-TESTING.md).
|
||||||
|
|
||||||
|
## Documentation Tools
|
||||||
|
|
||||||
|
This repository includes a `docs` CLI tool for common documentation workflows:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Create new documentation from a draft
|
||||||
|
npx docs create drafts/new-feature.md --products influxdb3_core
|
||||||
|
|
||||||
|
# Edit existing documentation from a URL
|
||||||
|
npx docs edit https://docs.influxdata.com/influxdb3/core/admin/
|
||||||
|
|
||||||
|
# Add placeholder syntax to code blocks
|
||||||
|
npx docs placeholders content/influxdb3/core/admin/upgrade.md
|
||||||
|
|
||||||
|
# Get help
|
||||||
|
npx docs --help
|
||||||
|
```
|
||||||
|
|
||||||
|
The `docs` command is automatically configured when you run `yarn install`.
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
Comprehensive reference documentation for contributors:
|
Comprehensive reference documentation for contributors:
|
||||||
|
|
@ -27,6 +47,7 @@ Comprehensive reference documentation for contributors:
|
||||||
- **[API Documentation](api-docs/README.md)** - API reference generation
|
- **[API Documentation](api-docs/README.md)** - API reference generation
|
||||||
|
|
||||||
### Quick Links
|
### Quick Links
|
||||||
|
|
||||||
- [Style guidelines](DOCS-CONTRIBUTING.md#style-guidelines)
|
- [Style guidelines](DOCS-CONTRIBUTING.md#style-guidelines)
|
||||||
- [Commit guidelines](DOCS-CONTRIBUTING.md#commit-guidelines)
|
- [Commit guidelines](DOCS-CONTRIBUTING.md#commit-guidelines)
|
||||||
- [Code block testing](DOCS-TESTING.md#code-block-testing)
|
- [Code block testing](DOCS-TESTING.md#code-block-testing)
|
||||||
|
|
@ -35,9 +56,9 @@ Comprehensive reference documentation for contributors:
|
||||||
|
|
||||||
InfluxData takes security and our users' trust very seriously.
|
InfluxData takes security and our users' trust very seriously.
|
||||||
If you believe you have found a security issue in any of our open source projects,
|
If you believe you have found a security issue in any of our open source projects,
|
||||||
please responsibly disclose it by contacting security@influxdata.com.
|
please responsibly disclose it by contacting <security@influxdata.com>.
|
||||||
More details about security vulnerability reporting,
|
More details about security vulnerability reporting,
|
||||||
including our GPG key, can be found at https://www.influxdata.com/how-to-report-security-vulnerabilities/.
|
including our GPG key, can be found at <https://www.influxdata.com/how-to-report-security-vulnerabilities/>.
|
||||||
|
|
||||||
## Running the docs locally
|
## Running the docs locally
|
||||||
|
|
||||||
|
|
@ -58,7 +79,13 @@ including our GPG key, can be found at https://www.influxdata.com/how-to-report-
|
||||||
yarn install
|
yarn install
|
||||||
```
|
```
|
||||||
|
|
||||||
_**Note:** The most recent version of Hugo tested with this documentation is **0.149.0**._
|
***Note:** The most recent version of Hugo tested with this documentation is **0.149.0**.*
|
||||||
|
|
||||||
|
After installation, the `docs` command will be available via `npx`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npx docs --help
|
||||||
|
```
|
||||||
|
|
||||||
3. To generate the API docs, see [api-docs/README.md](api-docs/README.md).
|
3. To generate the API docs, see [api-docs/README.md](api-docs/README.md).
|
||||||
|
|
||||||
|
|
@ -71,6 +98,7 @@ including our GPG key, can be found at https://www.influxdata.com/how-to-report-
|
||||||
```sh
|
```sh
|
||||||
npx hugo server
|
npx hugo server
|
||||||
```
|
```
|
||||||
|
|
||||||
5. View the docs at [localhost:1313](http://localhost:1313).
|
5. View the docs at [localhost:1313](http://localhost:1313).
|
||||||
|
|
||||||
### Alternative: Use docker compose
|
### Alternative: Use docker compose
|
||||||
|
|
@ -84,4 +112,5 @@ including our GPG key, can be found at https://www.influxdata.com/how-to-report-
|
||||||
```sh
|
```sh
|
||||||
docker compose up local-dev
|
docker compose up local-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
4. View the docs at [localhost:1313](http://localhost:1313).
|
4. View the docs at [localhost:1313](http://localhost:1313).
|
||||||
|
|
|
||||||
|
|
@ -334,6 +334,7 @@ services:
|
||||||
target: /var/lib/influxdb3/plugins/custom
|
target: /var/lib/influxdb3/plugins/custom
|
||||||
environment:
|
environment:
|
||||||
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-core-admin-token
|
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-core-admin-token
|
||||||
|
- INFLUXDB3_PLUGIN_DIR=/var/lib/influxdb3/plugins
|
||||||
secrets:
|
secrets:
|
||||||
- influxdb3-core-admin-token
|
- influxdb3-core-admin-token
|
||||||
influxdb3-enterprise:
|
influxdb3-enterprise:
|
||||||
|
|
@ -357,6 +358,7 @@ services:
|
||||||
- --verbose
|
- --verbose
|
||||||
environment:
|
environment:
|
||||||
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-enterprise-admin-token
|
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-enterprise-admin-token
|
||||||
|
- INFLUXDB3_PLUGIN_DIR=/var/lib/influxdb3/plugins
|
||||||
volumes:
|
volumes:
|
||||||
- type: bind
|
- type: bind
|
||||||
source: test/.influxdb3/enterprise/data
|
source: test/.influxdb3/enterprise/data
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,210 @@
|
||||||
|
---
|
||||||
|
title: Create and edit InfluxData docs
|
||||||
|
description: Learn how to create and edit InfluxData documentation.
|
||||||
|
tags: [documentation, guide, influxdata]
|
||||||
|
test_only: true
|
||||||
|
---
|
||||||
|
|
||||||
|
Learn how to create and edit InfluxData documentation.
|
||||||
|
|
||||||
|
- [Submit an issue to request new or updated documentation](#submit-an-issue-to-request-new-or-updated-documentation)
|
||||||
|
- [Edit an existing page in your browser](#edit-an-existing-page-in-your-browser)
|
||||||
|
- [Create and edit locally with the docs-v2 repository](#create-and-edit-locally-with-the-docs-v2-repository)
|
||||||
|
- [Helpful resources](#other-resources)
|
||||||
|
|
||||||
|
## Submit an issue to request new or updated documentation
|
||||||
|
|
||||||
|
- **Public**: <https://github.com/influxdata/docs-v2/issues/>
|
||||||
|
- **Private**: <https://github.com/influxdata/DAR/issues/>
|
||||||
|
|
||||||
|
## Edit an existing page in your browser
|
||||||
|
|
||||||
|
**Example**: Editing a product-specific page
|
||||||
|
|
||||||
|
1. Visit <https://docs.influxdata.com> public docs
|
||||||
|
2. Search, Ask AI, or navigate to find the page to edit--for example, <https://docs.influxdata.com/influxdb3/cloud-serverless/get-started/>
|
||||||
|
3. Click the "Edit this page" link at the bottom of the page.
|
||||||
|
This opens the GitHub repository to the file that generates the page
|
||||||
|
4. Click the pencil icon to edit the file in your browser
|
||||||
|
5. [Commit and create a pull request](#commit-and-create-a-pull-request)
|
||||||
|
|
||||||
|
## Create and edit locally with the docs-v2 repository
|
||||||
|
|
||||||
|
Use `docs` scripts with AI agents to help you create and edit documentation locally, especially when working with shared content for multiple products.
|
||||||
|
|
||||||
|
**Prerequisites**:
|
||||||
|
|
||||||
|
1. [Clone or fork the docs-v2 repository](https://github.com/influxdata/docs-v2/):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/influxdata/docs-v2.git
|
||||||
|
cd docs-v2
|
||||||
|
```
|
||||||
|
2. [Install Yarn](https://yarnpkg.com/getting-started/install)
|
||||||
|
3. Run `yarn` in the repository root to install dependencies
|
||||||
|
4. Optional: [Set up GitHub CLI](https://cli.github.com/manual/)
|
||||||
|
|
||||||
|
> [!Tip]
|
||||||
|
> To run and test your changes locally, enter the following command in your terminal:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> yarn hugo server
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> *To refresh shared content after making changes, `touch` or edit the frontmatter file, or stop the server (Ctrl+C) and restart it.*
|
||||||
|
>
|
||||||
|
> To list all available scripts, run:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> yarn run
|
||||||
|
> ```
|
||||||
|
|
||||||
|
### Edit an existing page locally
|
||||||
|
|
||||||
|
Use the `npx docs edit` command to open an existing page in your editor.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx docs edit https://docs.influxdata.com/influxdb3/enterprise/get-started/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create content locally
|
||||||
|
|
||||||
|
Use the `npx docs create` command with your AI agent tool to scaffold frontmatter and generate new content.
|
||||||
|
|
||||||
|
- The `npx docs create` command accepts draft input from stdin or from a file path and generates a prompt file from the draft and your product selections
|
||||||
|
- The prompt file makes AI agents aware of InfluxData docs guidelines, shared content, and product-specific requirements
|
||||||
|
- `npx docs create` is designed to work automatically with `claude`, but you can
|
||||||
|
use the generated prompt file with any AI agent (for example, `copilot` or `codex`)
|
||||||
|
|
||||||
|
> [!Tip]
|
||||||
|
>
|
||||||
|
> `docs-v2` contains custom configuration for agents like Claude and Copilot Agent mode.
|
||||||
|
|
||||||
|
<!-- Coming soon: generate content from an issue with labels -->
|
||||||
|
|
||||||
|
#### Generate content and frontmatter from a draft
|
||||||
|
|
||||||
|
{{% tabs-wrapper %}}
|
||||||
|
{{% tabs %}}
|
||||||
|
[Interactive (Claude Code)](#)
|
||||||
|
[Non-interactive (any agent)](#)
|
||||||
|
{{% /tabs %}}
|
||||||
|
{{% tab-content %}}
|
||||||
|
|
||||||
|
{{% /tab-content %}}
|
||||||
|
{{% tab-content %}}
|
||||||
|
|
||||||
|
1. Open a Claude Code prompt:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
claude code
|
||||||
|
```
|
||||||
|
|
||||||
|
2. In the prompt, run the `docs create` command with the path to your draft file.
|
||||||
|
Optionally, include the `--products` flag and product namespaces to preselect products--for example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx docs create .context/drafts/"Upgrading Enterprise 3 (draft).md" \
|
||||||
|
--products influxdb3_enterprise,influxdb3_core
|
||||||
|
```
|
||||||
|
|
||||||
|
If you don't include the `--products` flag, you'll be prompted to select products after running the command.
|
||||||
|
|
||||||
|
The script first generates a prompt file, then the agent automatically uses it to generate content and frontmatter based on the draft and the products you select.
|
||||||
|
|
||||||
|
{{% /tab-content %}}
|
||||||
|
{{% tab-content %}}
|
||||||
|
|
||||||
|
Use `npx docs create` to generate a prompt file and then pipe it to your preferred AI agent.
|
||||||
|
Include the `--products` flag and product namespaces to preselect products
|
||||||
|
|
||||||
|
The following example uses Copilot to process a draft file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx docs create .context/drafts/"Upgrading Enterprise 3 (draft).md" \
|
||||||
|
--products "influxdb3_enterprise,influxdb3_core" | \
|
||||||
|
copilot --prompt --allow-all-tools
|
||||||
|
```
|
||||||
|
|
||||||
|
{{% /tab-content %}}
|
||||||
|
{{< /tabs-wrapper >}}
|
||||||
|
|
||||||
|
## Review, commit, and create a pull request
|
||||||
|
|
||||||
|
After you create or edit content, test and review your changes, and then create a pull request.
|
||||||
|
|
||||||
|
> [!Important]
|
||||||
|
>
|
||||||
|
> #### Check AI-generated content
|
||||||
|
>
|
||||||
|
> Always review and validate AI-generated content for accuracy.
|
||||||
|
> Make sure example commands are correct for the version you're documenting.
|
||||||
|
|
||||||
|
### Test and review your changes
|
||||||
|
|
||||||
|
Run a local Hugo server to preview your changes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn hugo server
|
||||||
|
```
|
||||||
|
|
||||||
|
Visit <http://localhost:1313> to review your changes in the browser.
|
||||||
|
|
||||||
|
> [!Note]
|
||||||
|
> If you need to preview changes in a live production-like environment
|
||||||
|
> that you can also share with others,
|
||||||
|
> the Docs team can deploy your branch to the staging site.
|
||||||
|
|
||||||
|
### Commit and create a pull request
|
||||||
|
|
||||||
|
1. Commit your changes to a new branch
|
||||||
|
2. Fix any issues found by automated checks
|
||||||
|
3. Push the branch to your fork or to the docs-v2 repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add content
|
||||||
|
git commit -m "feat(product): Your commit message"
|
||||||
|
git push origin your-branch-name
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create a pull request
|
||||||
|
|
||||||
|
1. Create a pull request against the `master` branch of the docs-v2 repository
|
||||||
|
2. Add reviewers:
|
||||||
|
- `@influxdata/docs-team`
|
||||||
|
- team members familiar with the product area
|
||||||
|
- Optionally, assign Copilot to review
|
||||||
|
3. After approval and automated checks are successful, merge the pull request (if you have permissions) or wait for the docs team to merge it.
|
||||||
|
|
||||||
|
{{< tabs-wrapper >}}
|
||||||
|
{{% tabs %}}
|
||||||
|
[GitHub](#)
|
||||||
|
[gh CLI](#)
|
||||||
|
{{% /tabs %}}
|
||||||
|
{{% tab-content %}}
|
||||||
|
|
||||||
|
1. Visit [influxdata/docs-v2 pull requests on GitHub](https://github.com/influxdata/docs-v2/pulls)
|
||||||
|
2. Optional: edit PR title and description
|
||||||
|
3. Optional: set to draft if it needs more work
|
||||||
|
4. When ready for review, assign `@influxdata/docs-team` and other reviewers
|
||||||
|
|
||||||
|
{{% /tab-content %}}
|
||||||
|
{{% tab-content %}}
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gh pr create \
|
||||||
|
--base master \
|
||||||
|
--head your-branch-name \
|
||||||
|
--title "Your PR title" \
|
||||||
|
--body "Your PR description" \
|
||||||
|
--reviewer influxdata/docs-team,<other-reviewers>
|
||||||
|
```
|
||||||
|
|
||||||
|
{{% /tab-content %}}
|
||||||
|
{{< /tabs-wrapper >}}
|
||||||
|
|
||||||
|
## Other resources
|
||||||
|
|
||||||
|
- `DOCS-*.md`: Documentation standards and guidelines
|
||||||
|
- <http://localhost:1313/example/>: View shortcode examples
|
||||||
|
- <https://app.kapa.ai>: Review content gaps identified from Ask AI answers
|
||||||
|
|
@ -51,6 +51,8 @@ Default is `""`.
|
||||||
The hostname of the [meta node](/enterprise_influxdb/v1/concepts/glossary/#meta-node).
|
The hostname of the [meta node](/enterprise_influxdb/v1/concepts/glossary/#meta-node).
|
||||||
This must be resolvable and reachable by all other members of the cluster.
|
This must be resolvable and reachable by all other members of the cluster.
|
||||||
|
|
||||||
|
If HTTPS is enabled with [`https-enabled`](#https-enabled), `hostname` must match a hostname or wildcard in the TLS certificate specified by [`https-certificate`](#https-certificate). If `hostname` does not match, then `http: TLS handshake error from 127.0.0.1` errors are output in the meta node logs and the cluster will not function properly.
|
||||||
|
|
||||||
Environment variable: `INFLUXDB_HOSTNAME`
|
Environment variable: `INFLUXDB_HOSTNAME`
|
||||||
|
|
||||||
-----
|
-----
|
||||||
|
|
@ -152,6 +154,8 @@ Use either:
|
||||||
* PEM-encoded bundle with both the certificate and key (`[bundled-crt-and-key].pem`)
|
* PEM-encoded bundle with both the certificate and key (`[bundled-crt-and-key].pem`)
|
||||||
* Certificate only (`[certificate].crt`)
|
* Certificate only (`[certificate].crt`)
|
||||||
|
|
||||||
|
When HTTPS is enabled, [`hostname`](#hostname) must match a hostname or wildcard in the certificate.
|
||||||
|
|
||||||
Environment variable: `INFLUXDB_META_HTTPS_CERTIFICATE`
|
Environment variable: `INFLUXDB_META_HTTPS_CERTIFICATE`
|
||||||
|
|
||||||
#### https-private-key
|
#### https-private-key
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,73 @@
|
||||||
|
---
|
||||||
|
title: Usage telemetry
|
||||||
|
description: >
|
||||||
|
InfluxData collects information, or _telemetry data_, about the usage of {{% product-name %}} to help improve the product.
|
||||||
|
Learn what data {{% product-name %}} collects and sends to InfluxData, how it's used, and
|
||||||
|
how you can opt out.
|
||||||
|
menu:
|
||||||
|
influxdb_v2:
|
||||||
|
name: Usage telemetry
|
||||||
|
parent: Reference
|
||||||
|
weight: 8
|
||||||
|
related:
|
||||||
|
- /influxdb/v2/reference/cli/influxd/
|
||||||
|
- /influxdb/v2/reference/internals/metrics/
|
||||||
|
---
|
||||||
|
|
||||||
|
InfluxData collects information, or *telemetry data*, about the usage of {{% product-name %}} to help improve the product.
|
||||||
|
Learn what data {{% product-name %}} collects and sends to InfluxData, how it's used, and
|
||||||
|
how you can opt out.
|
||||||
|
|
||||||
|
## Metrics Collection
|
||||||
|
|
||||||
|
For each InfluxDB 2.x installation, we collect the following at startup and then every 8 hours:
|
||||||
|
|
||||||
|
### Tags
|
||||||
|
|
||||||
|
| Tags | Description |
|
||||||
|
| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| arch | Microarchitecture InfluxDB was compiled for |
|
||||||
|
| build date | Date associated with the InfluxDB build |
|
||||||
|
| commit | SHA of commit associated with the InfluxDB build |
|
||||||
|
| cpus | Number of CPUs running InfluxDB |
|
||||||
|
| functions | Flux functions |
|
||||||
|
| id | Snowflake identifier for the InfluxDB instance |
|
||||||
|
| Index partition | Identifies the index partition used by the underlying InfluxDB storage engine |
|
||||||
|
| ip | IP Address of the inbound connection which reports the statistics. This is **not** the specific IP Address of the machine running InfluxDB unless it is exposed directly on the public Internet. |
|
||||||
|
| org | Identifier for an organization. Allows for grouping of statistics by organization within the InfluxDB instance |
|
||||||
|
| os | Operating System InfluxDB is running on |
|
||||||
|
| result | Text allowing grouping of Flux query invocations results |
|
||||||
|
| series file partition | Identifies the series files in use for the underlying InfluxDB storage engine. This is not the metadata about series. |
|
||||||
|
| status | Status of write ahead log (associated to number of successful /failed writes) |
|
||||||
|
| user\_agent | Typically, this is set by the browser, InfluxDB client libraries (includes the language \[Go, JavaScript, Java, C#, Ruby, Python, etc.] and version), and other technologies \[such as third-party dashboarding applications, etc.]. |
|
||||||
|
| version | InfluxDB version |
|
||||||
|
|
||||||
|
With those tag elements, we then leverage a combination of the unique combination of `id`, `ip`, and storage system specifics (where applicable) to capture usage counts of the various subsystems within InfluxDB.
|
||||||
|
|
||||||
|
### Fields
|
||||||
|
|
||||||
|
| Fields | Description |
|
||||||
|
| --------------------------- | ----------------------------------------------------------------------------- |
|
||||||
|
| buckets total counter | Total number of buckets present within the InfluxDB instance |
|
||||||
|
| bytes written counter | Total number of bytes written |
|
||||||
|
| bytes scanned counter | Total number of bytes scanned within the storage system via queries and tasks |
|
||||||
|
| dashboards total counter | Total number of dashboards present within the InfluxDB instance |
|
||||||
|
| flux function total counter | Total number of calls by function invoked within Flux |
|
||||||
|
| http api requests counter | Total number of API invocations by each API path |
|
||||||
|
| query duration histogram | Histogram counting duration of queries into bins |
|
||||||
|
| organizations total counter | Total number of organizations present within the InfluxDB instance |
|
||||||
|
| scrapers total counter | Total number of scrapers configured within the InfluxDB instance |
|
||||||
|
| series total counter | Total number of series present within the InfluxDB instance |
|
||||||
|
| storage total counter | Total number of bytes stored within the InfluxDB instance |
|
||||||
|
| task scheduler gauge | Number of tasks running within the InfluxDB instance |
|
||||||
|
| telegrafs total counter | Total number of Telegraf configurations within the InfluxDB instance |
|
||||||
|
| tokens total counter | Total number of tokens present within the InfluxDB instance |
|
||||||
|
| uptime gauge | Number of seconds InfluxDB has been continuously running |
|
||||||
|
| users total counter | Total number of users present within the InfluxDB instance |
|
||||||
|
| wal current segment gauge | Number of bytes in the current segments for the write ahead log |
|
||||||
|
| wal writes total counter | Total number of writes to the write ahead log by status (ok, fail, etc.) |
|
||||||
|
|
||||||
|
## Disable telemetry
|
||||||
|
|
||||||
|
To "opt-out" of collecting and sending {{% product-name %}} telemetry data,
|
||||||
|
include the `--reporting-disabled` flag with the `influxd` command when starting {{% product-name %}}.
|
||||||
|
|
@ -31,6 +31,7 @@ influxdb3 [GLOBAL-OPTIONS] [COMMAND]
|
||||||
| [query](/influxdb3/enterprise/reference/cli/influxdb3/query/) | Query {{% product-name %}} |
|
| [query](/influxdb3/enterprise/reference/cli/influxdb3/query/) | Query {{% product-name %}} |
|
||||||
| [serve](/influxdb3/enterprise/reference/cli/influxdb3/serve/) | Run the {{% product-name %}} server |
|
| [serve](/influxdb3/enterprise/reference/cli/influxdb3/serve/) | Run the {{% product-name %}} server |
|
||||||
| [show](/influxdb3/enterprise/reference/cli/influxdb3/show/) | List resources |
|
| [show](/influxdb3/enterprise/reference/cli/influxdb3/show/) | List resources |
|
||||||
|
| [stop](/influxdb3/enterprise/reference/cli/influxdb3/stop/) | Mark nodes as stopped |
|
||||||
| [test](/influxdb3/enterprise/reference/cli/influxdb3/test/) | Test plugins |
|
| [test](/influxdb3/enterprise/reference/cli/influxdb3/test/) | Test plugins |
|
||||||
| [update](/influxdb3/enterprise/reference/cli/influxdb3/update/) | Update resources |
|
| [update](/influxdb3/enterprise/reference/cli/influxdb3/update/) | Update resources |
|
||||||
| [write](/influxdb3/enterprise/reference/cli/influxdb3/write/) | Write to {{% product-name %}} |
|
| [write](/influxdb3/enterprise/reference/cli/influxdb3/write/) | Write to {{% product-name %}} |
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
---
|
||||||
|
title: influxdb3 show nodes
|
||||||
|
description: >
|
||||||
|
The `influxdb3 show nodes` command displays information about nodes in your InfluxDB 3 Enterprise cluster.
|
||||||
|
menu:
|
||||||
|
influxdb3_enterprise:
|
||||||
|
parent: influxdb3 show
|
||||||
|
name: influxdb3 show nodes
|
||||||
|
weight: 301
|
||||||
|
related:
|
||||||
|
- /influxdb3/enterprise/reference/cli/influxdb3/stop/node/
|
||||||
|
- /influxdb3/enterprise/reference/cli/influxdb3/serve/
|
||||||
|
source: /shared/influxdb3-cli/show/nodes.md
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
//SOURCE - content/shared/influxdb3-cli/show/nodes.md
|
||||||
|
-->
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
---
|
||||||
|
title: influxdb3 stop
|
||||||
|
description: >
|
||||||
|
The `influxdb3 stop` command marks nodes as stopped in the catalog for your InfluxDB 3 Enterprise cluster.
|
||||||
|
menu:
|
||||||
|
influxdb3_enterprise:
|
||||||
|
parent: influxdb3
|
||||||
|
name: influxdb3 stop
|
||||||
|
weight: 302
|
||||||
|
source: /shared/influxdb3-cli/stop/_index.md
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
//SOURCE - content/shared/influxdb3-cli/stop/_index.md
|
||||||
|
-->
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
---
|
||||||
|
title: influxdb3 stop node
|
||||||
|
description: >
|
||||||
|
The `influxdb3 stop node` command marks a node as stopped in the catalog and frees up its licensed cores for other nodes.
|
||||||
|
menu:
|
||||||
|
influxdb3_enterprise:
|
||||||
|
parent: influxdb3 stop
|
||||||
|
name: influxdb3 stop node
|
||||||
|
weight: 301
|
||||||
|
related:
|
||||||
|
- /influxdb3/enterprise/reference/cli/influxdb3/show/nodes/
|
||||||
|
- /influxdb3/enterprise/reference/cli/influxdb3/serve/
|
||||||
|
source: /shared/influxdb3-cli/stop/node.md
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
//SOURCE - content/shared/influxdb3-cli/stop/node.md
|
||||||
|
-->
|
||||||
|
|
@ -32,8 +32,8 @@ Use the `--regenerate` flag with the
|
||||||
{{% code-placeholders "OPERATOR_TOKEN" %}}
|
{{% code-placeholders "OPERATOR_TOKEN" %}}
|
||||||
```bash
|
```bash
|
||||||
influxdb3 create token --admin \
|
influxdb3 create token --admin \
|
||||||
--regenerate
|
--regenerate \
|
||||||
OPERATOR_TOKEN
|
--token OPERATOR_TOKEN
|
||||||
```
|
```
|
||||||
{{% /code-placeholders %}}
|
{{% /code-placeholders %}}
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,54 +1,132 @@
|
||||||
The `influxdb3 install package` command installs Python packages for use in [InfluxDB 3 processing engine plugins](/influxdb3/version/process/).
|
The `influxdb3 install package` command installs Python packages within the plugin environment for use in [{{< product-name >}} processing engine plugins](/influxdb3/version/process/).
|
||||||
|
Use this command to add external dependencies that your plugins require, such as data processing libraries, notification tools, or forecasting packages.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```bash { placeholders="PACKAGE_NAME" }
|
```bash
|
||||||
influxdb3 install package --packages PACKAGE_NAME
|
influxdb3 install package [OPTIONS] [PACKAGES]...
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace the following:
|
## Arguments
|
||||||
|
|
||||||
- {{% code-placeholder-key %}}`PACKAGE_NAME`{{% /code-placeholder-key %}}: the name of the Python package to install
|
- **`[PACKAGES]...`**: One or more package names to install (space-separated)
|
||||||
|
|
||||||
## Options
|
## Options
|
||||||
|
|
||||||
| Option | Description | Default | Environment | Required |
|
| Option | Description | Default | Environment Variable |
|
||||||
|--------|-------------|---------|-------------|----------|
|
| :---------------------------------------------- | :------------------------------------------------------------------ | :---------------------- | :-------------------------- |
|
||||||
| `--packages` | Python package names to install (comma-separated) | | | |
|
| `-H`, `--host <HOST_URL>` | The host URL of the running {{< product-name >}} server | `http://127.0.0.1:8181` | `INFLUXDB3_HOST_URL` |
|
||||||
| `-r`, `--requirements` | Path to requirements.txt file | | | |
|
| `--token <AUTH_TOKEN>` | The token for authentication with the {{< product-name >}} server | | `INFLUXDB3_AUTH_TOKEN` |
|
||||||
| `-H`, `--host` | Host URL of the running {{< product-name >}} server | `http://127.0.0.1:8181` | `INFLUXDB3_HOST_URL` | |
|
| `--plugin-dir <PLUGIN_DIR>` | Location of the plugins directory | `/plugins` | `INFLUXDB3_PLUGIN_DIR` |
|
||||||
| `--token` | The token for authentication with the InfluxDB 3 server | | `INFLUXDB3_AUTH_TOKEN` | |
|
| `--virtual-env-location <VIRTUAL_ENV_LOCATION>` | Custom virtual environment location | | `VIRTUAL_ENV` |
|
||||||
| `--tls-ca` | Path to a custom TLS certificate authority for testing with self-signed certificates | | `INFLUXDB3_TLS_CA` | |
|
| `--package-manager <PACKAGE_MANAGER>` | Package manager to use: `discover`, `pip`, `uv`, or `disabled` | `discover` | `INFLUXDB3_PACKAGE_MANAGER` |
|
||||||
| `--plugin-dir` | Location of the plugins directory | | `INFLUXDB3_PLUGIN_DIR` | |
|
| `--plugin-repo <PLUGIN_REPO>` | Plugin repository URL | | `INFLUXDB3_PLUGIN_REPO` |
|
||||||
| `--virtual-env-location` | Location of the Python virtual environment | | `VIRTUAL_ENV` | |
|
| `-r`, `--requirements <REQUIREMENTS>` | Path to a `requirements.txt` file | | |
|
||||||
| `--package-manager` | Package manager to use for installing packages | `discover` | | |
|
| `--tls-ca <CA_CERT>` | Custom CA certificate for TLS (useful for self-signed certificates) | | `INFLUXDB3_TLS_CA` |
|
||||||
|
| `-h`, `--help` | Print help information | | |
|
||||||
|
| `--help-all` | Print detailed help information | | |
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Install a single package
|
### Install a single package
|
||||||
|
|
||||||
```bash { placeholders="pandas" }
|
```bash
|
||||||
influxdb3 install package --packages pandas
|
influxdb3 install package pandas
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install multiple packages
|
### Install multiple packages
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
influxdb3 install package --packages pandas,numpy,scipy
|
influxdb3 install package pint pandas requests
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install packages from requirements file
|
### Install packages from a requirements file
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
influxdb3 install package -r requirements.txt
|
influxdb3 install package -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install with authentication
|
### Install packages with custom host and authentication
|
||||||
|
|
||||||
```bash { placeholders="AUTH_TOKEN|pandas" }
|
```bash { placeholders="AUTH_TOKEN" }
|
||||||
influxdb3 install package --token AUTH_TOKEN --packages pandas
|
influxdb3 install package \
|
||||||
|
--host http://localhost:8181 \
|
||||||
|
--token AUTH_TOKEN \
|
||||||
|
pint pandas
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace the following:
|
Replace the following:
|
||||||
|
|
||||||
- {{% code-placeholder-key %}}`AUTH_TOKEN`{{% /code-placeholder-key %}}: your {{% token-link "admin" %}} for your {{< product-name >}} instance
|
- {{% code-placeholder-key %}}`AUTH_TOKEN`{{% /code-placeholder-key %}}: your {{% token-link "admin" %}} for your {{< product-name >}} instance
|
||||||
|
|
||||||
|
### Install packages with a specific package manager
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 install package \
|
||||||
|
--package-manager uv \
|
||||||
|
prophet adtk
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install packages with a custom CA certificate
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 install package \
|
||||||
|
--tls-ca /path/to/ca-cert.pem \
|
||||||
|
requests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Package management
|
||||||
|
|
||||||
|
### Package manager selection
|
||||||
|
|
||||||
|
By default (`--package-manager discover`), the CLI automatically detects and uses the best available package manager:
|
||||||
|
|
||||||
|
1. **uv** (preferred): Faster package installation
|
||||||
|
2. **pip** (fallback): Standard Python package manager
|
||||||
|
|
||||||
|
### Virtual environment
|
||||||
|
|
||||||
|
The CLI manages a virtual environment for plugin packages to avoid conflicts with system Python packages.
|
||||||
|
You can customize the virtual environment location with `--virtual-env-location` or the `VIRTUAL_ENV` environment variable.
|
||||||
|
|
||||||
|
### Security mode
|
||||||
|
|
||||||
|
If your {{< product-name >}} server was started with [`--package-manager disabled`](/influxdb3/version/reference/config-options/#package-manager), the `influxdb3 install package` command is blocked for security and compliance requirements.
|
||||||
|
|
||||||
|
When attempting to install packages with this command while the server has package installation disabled, the command fails with a `403 Forbidden` error:
|
||||||
|
|
||||||
|
```
|
||||||
|
Package installation has been disabled. Contact your administrator for more information.
|
||||||
|
```
|
||||||
|
|
||||||
|
The server's `--package-manager disabled` setting is designed for:
|
||||||
|
|
||||||
|
- **Enterprise security requirements**: Prevent arbitrary package installation
|
||||||
|
- **Compliance environments**: Control exactly which packages are available
|
||||||
|
- **Air-gapped deployments**: Pre-install all dependencies before deployment
|
||||||
|
- **Multi-tenant scenarios**: Prevent tenants from installing potentially malicious packages
|
||||||
|
|
||||||
|
In these environments, administrators must pre-install all required Python packages into the server's virtual environment before starting {{< product-name >}}.
|
||||||
|
|
||||||
|
For more information, see the [`package-manager`](/influxdb3/version/reference/config-options/#package-manager) configuration option.
|
||||||
|
|
||||||
|
### Troubleshooting
|
||||||
|
|
||||||
|
If package installation fails:
|
||||||
|
|
||||||
|
- **Check if package installation is disabled**: If you receive a `403 Forbidden` error, contact your administrator. Package installation may be disabled on your {{< product-name >}} instance.
|
||||||
|
- **Verify network connectivity**: Ensure your {{< product-name >}} instance can reach PyPI or your custom package repository
|
||||||
|
- **Check package names**: Verify package names are correct and available in the package repository
|
||||||
|
- **Review logs**: Check {{< product-name >}} server logs for detailed error messages
|
||||||
|
- **Test with pip**: Try installing the package directly with `pip` to verify it's available
|
||||||
|
- **Use requirements file**: For complex dependencies, use a `requirements.txt` file with version pinning
|
||||||
|
- **Check Docker disk space** (Docker environments only): If running {{< product-name >}} in Docker and seeing "No space left on device" errors, free up disk space:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check Docker disk usage
|
||||||
|
docker system df
|
||||||
|
|
||||||
|
# Remove unused images and build cache
|
||||||
|
docker image prune -af
|
||||||
|
docker buildx prune -af
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
The `influxdb3 show` command lists resources in your {{< product-name >}} server.
|
The `influxdb3 show` command lists resources in your {{< product-name >}} server.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
@ -11,14 +10,15 @@ influxdb3 show <SUBCOMMAND>
|
||||||
|
|
||||||
## Subcommands
|
## Subcommands
|
||||||
|
|
||||||
| Subcommand | Description |
|
| Subcommand | Description | | |
|
||||||
| :---------------------------------------------------------------------- | :--------------------------------------------- |
|
| :---------------------------------------------------------------------- | :------------------------------------------------------------------ | --------------------------- | ---------------- |
|
||||||
| [databases](/influxdb3/version/reference/cli/influxdb3/show/databases/) | List database |
|
| [databases](/influxdb3/version/reference/cli/influxdb3/show/databases/) | List database | | |
|
||||||
{{% show-in "enterprise" %}}| [license](/influxdb3/version/reference/cli/influxdb3/show/license/) | Display license information |{{% /show-in %}}
|
| {{% show-in "enterprise" %}} | [license](/influxdb3/version/reference/cli/influxdb3/show/license/) | Display license information | {{% /show-in %}} |
|
||||||
| [plugins](/influxdb3/version/reference/cli/influxdb3/show/plugins/) | List loaded plugins |
|
| {{% show-in "enterprise" %}} | [nodes](/influxdb3/version/reference/cli/influxdb3/show/nodes/) | Display node information | {{% /show-in %}} |
|
||||||
| [system](/influxdb3/version/reference/cli/influxdb3/show/system/) | Display system table data |
|
| [plugins](/influxdb3/version/reference/cli/influxdb3/show/plugins/) | List loaded plugins | | |
|
||||||
| [tokens](/influxdb3/version/reference/cli/influxdb3/show/tokens/) | List authentication tokens |
|
| [system](/influxdb3/version/reference/cli/influxdb3/show/system/) | Display system table data | | |
|
||||||
| help | Print command help or the help of a subcommand |
|
| [tokens](/influxdb3/version/reference/cli/influxdb3/show/tokens/) | List authentication tokens | | |
|
||||||
|
| help | Print command help or the help of a subcommand | | |
|
||||||
|
|
||||||
## Options
|
## Options
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,122 @@
|
||||||
|
The `influxdb3 show nodes` command displays information about nodes in your {{< product-name >}} cluster, including their state, mode, and resource usage.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 show nodes [OPTIONS]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
| Option | | Description |
|
||||||
|
| :----- | :--------- | :--------------------------------------------------------------------------------------- |
|
||||||
|
| `-H` | `--host` | Host URL of the running {{< product-name >}} server (default is `http://127.0.0.1:8181`) |
|
||||||
|
| | `--format` | Output format: `pretty` (default), `json`, or `csv` |
|
||||||
|
| | `--token` | Authentication token |
|
||||||
|
| | `--tls-ca` | Path to a custom TLS certificate authority (for testing or self-signed certificates) |
|
||||||
|
| `-h` | `--help` | Print help information |
|
||||||
|
|
||||||
|
### Option environment variables
|
||||||
|
|
||||||
|
You can use the following environment variables to set command options:
|
||||||
|
|
||||||
|
| Environment Variable | Option |
|
||||||
|
| :--------------------- | :-------- |
|
||||||
|
| `INFLUXDB3_HOST_URL` | `--host` |
|
||||||
|
| `INFLUXDB3_AUTH_TOKEN` | `--token` |
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
The command displays the following information for each node:
|
||||||
|
|
||||||
|
- **node\_id**: The unique identifier for the node
|
||||||
|
- **node\_catalog\_id**: The internal catalog identifier
|
||||||
|
- **instance\_id**: The unique instance identifier
|
||||||
|
- **mode**: The node's operating modes (ingest, query, process, compact)
|
||||||
|
- **core\_count**: Number of CPU cores allocated to the node
|
||||||
|
- **state**: Current node state (`running` or `stopped`)
|
||||||
|
- **updated\_at**: Timestamp of the last update
|
||||||
|
- **cli\_params**: Command-line parameters used to start the node
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
- [List all nodes in pretty format](#list-all-nodes-in-pretty-format)
|
||||||
|
- [List nodes in JSON format](#list-nodes-in-json-format)
|
||||||
|
- [List nodes on a remote server](#list-nodes-on-a-remote-server)
|
||||||
|
|
||||||
|
### List all nodes in pretty format
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 show nodes
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--pytest-codeblocks:expected-output-->
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------+-----------------+--------------------------------------+--------+------------+---------+---------------------------+
|
||||||
|
| node_id | node_catalog_id | instance_id | mode | core_count | state | updated_at |
|
||||||
|
+----------+-----------------+--------------------------------------+--------+------------+---------+---------------------------+
|
||||||
|
| node-1 | 0 | e38944e4-1204-4bb4-92f3-71138894d674 | ingest | 1 | running | 2025-09-04T10:15:57.126 |
|
||||||
|
| node-2 | 1 | f5418c97-eb6d-47b5-8176-efc0ad7b882e | ingest | 1 | stopped | 2025-09-04T10:16:57.503 |
|
||||||
|
+----------+-----------------+--------------------------------------+--------+------------+---------+---------------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
### List nodes in JSON format
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 show nodes --format json
|
||||||
|
```
|
||||||
|
|
||||||
|
The output is similar to the following:
|
||||||
|
|
||||||
|
<!--pytest-codeblocks:expected-output-->
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node_id": "node-1",
|
||||||
|
"node_catalog_id": 0,
|
||||||
|
"instance_id": "e38944e4-1204-4bb4-92f3-71138894d674",
|
||||||
|
"mode": [
|
||||||
|
"ingest"
|
||||||
|
],
|
||||||
|
"core_count": 1,
|
||||||
|
"state": "running",
|
||||||
|
"updated_at": "2025-09-04T10:15:57.126",
|
||||||
|
"cli_params": "{\"http-bind\":\"127.0.0.1:8181\",\"node-id\":\"node-1\",\"data-dir\":\"/path/to/data\",\"object-store\":\"file\",\"mode\":\"ingest\"}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"node_id": "node-2",
|
||||||
|
"node_catalog_id": 1,
|
||||||
|
"instance_id": "f5418c97-eb6d-47b5-8176-efc0ad7b882e",
|
||||||
|
"mode": [
|
||||||
|
"ingest"
|
||||||
|
],
|
||||||
|
"core_count": 1,
|
||||||
|
"state": "stopped",
|
||||||
|
"updated_at": "2025-09-04T10:16:57.503",
|
||||||
|
"cli_params": "{\"http-bind\":\"127.0.0.1:8182\",\"node-id\":\"node-2\",\"data-dir\":\"/path/to/data\",\"object-store\":\"file\",\"mode\":\"ingest\"}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### List nodes on a remote server
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash { placeholders="AUTH_TOKEN|INFLUXDB_HOST" }
|
||||||
|
influxdb3 show nodes \
|
||||||
|
--host INFLUXDB_HOST \
|
||||||
|
--token AUTH_TOKEN
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the following:
|
||||||
|
|
||||||
|
- {{% code-placeholder-key %}}`AUTH_TOKEN`{{% /code-placeholder-key %}}: Authentication token with sufficient privileges
|
||||||
|
- {{% code-placeholder-key %}}`INFLUXDB_HOST`{{% /code-placeholder-key %}}: Host URL of the running {{< product-name >}} server
|
||||||
|
|
@ -0,0 +1,23 @@
|
||||||
|
The `influxdb3 stop` command marks nodes as stopped in the catalog for your {{< product-name >}} cluster.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 stop <SUBCOMMAND>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Subcommands
|
||||||
|
|
||||||
|
| Subcommand | Description |
|
||||||
|
| :------------------------------------------------------------ | :--------------------------------------------- |
|
||||||
|
| [node](/influxdb3/version/reference/cli/influxdb3/stop/node/) | Mark a node as stopped |
|
||||||
|
| help | Print command help or the help of a subcommand |
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
| Option | | Description |
|
||||||
|
| :----- | :----------- | :------------------------------ |
|
||||||
|
| `-h` | `--help` | Print help information |
|
||||||
|
| | `--help-all` | Print detailed help information |
|
||||||
|
|
@ -0,0 +1,116 @@
|
||||||
|
The `influxdb3 stop node` command marks a node as stopped in the catalog, freeing up the licensed cores it was using for other nodes.
|
||||||
|
|
||||||
|
> [!Important]
|
||||||
|
> This command is designed for cleaning up the catalog **after** you have already stopped the physical instance.
|
||||||
|
> It does not shut down the running process - you must stop the instance first (for example, using `kill` or stopping the container).
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash { placeholders="NODE_ID" }
|
||||||
|
influxdb3 stop node [OPTIONS] --node-id <NODE_ID>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
| Option | | Description |
|
||||||
|
| :----- | :---------- | :--------------------------------------------------------------------------------------- |
|
||||||
|
| | `--node-id` | *({{< req >}})* The node ID to stop |
|
||||||
|
| | `--force` | Skip confirmation prompt |
|
||||||
|
| `-H` | `--host` | Host URL of the running {{< product-name >}} server (default is `http://127.0.0.1:8181`) |
|
||||||
|
| | `--token` | Authentication token |
|
||||||
|
| | `--tls-ca` | Path to a custom TLS certificate authority (for testing or self-signed certificates) |
|
||||||
|
| `-h` | `--help` | Print help information |
|
||||||
|
|
||||||
|
### Option environment variables
|
||||||
|
|
||||||
|
You can use the following environment variables to set command options:
|
||||||
|
|
||||||
|
| Environment Variable | Option |
|
||||||
|
| :--------------------- | :-------- |
|
||||||
|
| `INFLUXDB3_HOST_URL` | `--host` |
|
||||||
|
| `INFLUXDB3_AUTH_TOKEN` | `--token` |
|
||||||
|
|
||||||
|
## Use case
|
||||||
|
|
||||||
|
Use this command when you have forcefully stopped a node instance (for example, using `kill -9` or stopping a container) and need to update the catalog to reflect the change.
|
||||||
|
This frees up the licensed cores from the stopped node so other nodes can use them.
|
||||||
|
|
||||||
|
## Behavior
|
||||||
|
|
||||||
|
When you run this command:
|
||||||
|
|
||||||
|
1. The command marks the specified node as `stopped` in the catalog
|
||||||
|
2. Licensed cores from the stopped node are freed for reuse by other nodes
|
||||||
|
3. Other nodes in the cluster see the update after their catalog sync interval (default 10 seconds)
|
||||||
|
4. The command requires authentication if the server has auth enabled
|
||||||
|
|
||||||
|
> [!Warning]
|
||||||
|
> **Stop the instance first**
|
||||||
|
>
|
||||||
|
> This command only updates catalog metadata.
|
||||||
|
> Always stop the physical instance **before** running this command.
|
||||||
|
> If the instance is still running, it may cause inconsistencies in the cluster.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
- [Clean up catalog after killing a node](#clean-up-catalog-after-killing-a-node)
|
||||||
|
- [Clean up catalog without confirmation](#clean-up-catalog-without-confirmation)
|
||||||
|
- [Clean up catalog on a remote server](#clean-up-catalog-on-a-remote-server)
|
||||||
|
|
||||||
|
In the examples below, replace the following:
|
||||||
|
|
||||||
|
- {{% code-placeholder-key %}}`NODE_ID`{{% /code-placeholder-key %}}:
|
||||||
|
The node identifier for the stopped instance
|
||||||
|
- {{% code-placeholder-key %}}`AUTH_TOKEN`{{% /code-placeholder-key %}}:
|
||||||
|
Authentication token with sufficient privileges
|
||||||
|
- {{% code-placeholder-key %}}`INFLUXDB_HOST`{{% /code-placeholder-key %}}:
|
||||||
|
Host URL of the running {{< product-name >}} server
|
||||||
|
|
||||||
|
### Clean up catalog after killing a node
|
||||||
|
|
||||||
|
This example shows the typical workflow: first stop the instance, then clean up the catalog.
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash { placeholders="NODE_ID|PID" }
|
||||||
|
# First, stop the physical instance (for example, using kill)
|
||||||
|
kill -9 <PID>
|
||||||
|
|
||||||
|
# Then, clean up the catalog
|
||||||
|
influxdb3 stop node --node-id NODE_ID
|
||||||
|
```
|
||||||
|
|
||||||
|
The command prompts for confirmation.
|
||||||
|
|
||||||
|
### Clean up catalog without confirmation
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash { placeholders="NODE_ID" }
|
||||||
|
influxdb3 stop node --node-id NODE_ID --force
|
||||||
|
```
|
||||||
|
|
||||||
|
### Clean up catalog on a remote server
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash { placeholders="AUTH_TOKEN|INFLUXDB_HOST|NODE_ID" }
|
||||||
|
influxdb3 stop node \
|
||||||
|
--host INFLUXDB_HOST \
|
||||||
|
--node-id NODE_ID \
|
||||||
|
--token AUTH_TOKEN
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verify node status
|
||||||
|
|
||||||
|
After stopping a node, verify its status using the [`influxdb3 show nodes`](/influxdb3/version/reference/cli/influxdb3/show/nodes/) command:
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```bash
|
||||||
|
influxdb3 show nodes
|
||||||
|
```
|
||||||
|
|
||||||
|
The stopped node appears with `state: "stopped"` in the output.
|
||||||
|
|
@ -19,14 +19,14 @@ As written data moves through {{% product-name %}}, it follows a structured path
|
||||||
|
|
||||||
- **Process**: InfluxDB validates incoming data before accepting it into the system.
|
- **Process**: InfluxDB validates incoming data before accepting it into the system.
|
||||||
- **Impact**: Prevents malformed or unsupported data from entering the database.
|
- **Impact**: Prevents malformed or unsupported data from entering the database.
|
||||||
- **Details**: The database validates incoming data and stores it in the write buffer (in memory). If [`no_sync=true`](#no-sync-write-option), the server sends a response to acknowledge the write.
|
- **Details**: The database validates incoming data and stores it in the write buffer (in memory). If `no_sync=true`, the server sends a response to acknowledge the write [without waiting for persistence](/influxdb3/version/reference/cli/influxdb3/write/#write-line-protocol-and-immediately-return-a-response).
|
||||||
|
|
||||||
### Write-ahead log (WAL) persistence
|
### Write-ahead log (WAL) persistence
|
||||||
|
|
||||||
- **Process**: The database flushes the write buffer to the WAL every second (default).
|
- **Process**: The database flushes the write buffer to the WAL every second (default).
|
||||||
- **Impact**: Ensures durability by persisting data to object storage.
|
- **Impact**: Ensures durability by persisting data to object storage.
|
||||||
- **Tradeoff**: More frequent flushing improves durability but increases I/O overhead.
|
- **Tradeoff**: More frequent flushing improves durability but increases I/O overhead.
|
||||||
- **Details**: Every second (default), the database flushes the write buffer to the Write-Ahead Log (WAL) for persistence in the Object store. If [`no_sync=false`](#no-sync-write-option) (default), the server sends a response to acknowledge the write.
|
- **Details**: Every second (default), the database flushes the write buffer to the Write-Ahead Log (WAL) for persistence in the object store. If `no_sync=false` (default), the server sends a response to acknowledge the write.
|
||||||
|
|
||||||
### Query availability
|
### Query availability
|
||||||
|
|
||||||
|
|
@ -40,7 +40,8 @@ As written data moves through {{% product-name %}}, it follows a structured path
|
||||||
- **Process**: Every ten minutes (default), data is persisted to Parquet files in object storage.
|
- **Process**: Every ten minutes (default), data is persisted to Parquet files in object storage.
|
||||||
- **Impact**: Provides durable, long-term storage.
|
- **Impact**: Provides durable, long-term storage.
|
||||||
- **Tradeoff**: More frequent persistence reduces reliance on the WAL but increases I/O costs.
|
- **Tradeoff**: More frequent persistence reduces reliance on the WAL but increases I/O costs.
|
||||||
- **Details**: Every ten minutes (default), the {{% product-name %}} persists the oldest data from the queryable buffer to the Object store in Parquet format, and keeps the remaining data (the most recent 5 minutes) in memory.
|
- **Memory usage**: The persistence process uses memory from the configured memory pool ([`exec-mem-pool-bytes`](/influxdb3/version/reference/config-options/#exec-mem-pool-bytes)) when converting data to Parquet format. For write-heavy workloads, ensure adequate memory is allocated.
|
||||||
|
- **Details**: Every ten minutes (default), {{% product-name %}} persists the oldest data from the queryable buffer to the object store in Parquet format, and keeps the remaining data (the most recent 5 minutes) in memory.
|
||||||
|
|
||||||
### In-memory cache
|
### In-memory cache
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ Learn how to avoid unexpected results and recover from errors when writing to
|
||||||
- [Review HTTP status codes](#review-http-status-codes)
|
- [Review HTTP status codes](#review-http-status-codes)
|
||||||
- [Troubleshoot failures](#troubleshoot-failures)
|
- [Troubleshoot failures](#troubleshoot-failures)
|
||||||
- [Troubleshoot rejected points](#troubleshoot-rejected-points)
|
- [Troubleshoot rejected points](#troubleshoot-rejected-points)
|
||||||
|
{{% show-in "core,enterprise" %}}- [Troubleshoot write performance issues](#troubleshoot-write-performance-issues){{% /show-in %}}
|
||||||
|
|
||||||
## Handle write responses
|
## Handle write responses
|
||||||
|
|
||||||
|
|
@ -65,3 +66,43 @@ InfluxDB rejects points that don't match the schema of existing data.
|
||||||
Check for [field data type](/influxdb3/version/reference/syntax/line-protocol/#data-types-and-format)
|
Check for [field data type](/influxdb3/version/reference/syntax/line-protocol/#data-types-and-format)
|
||||||
differences between the rejected data point and points within the same
|
differences between the rejected data point and points within the same
|
||||||
database--for example, did you attempt to write `string` data to an `int` field?
|
database--for example, did you attempt to write `string` data to an `int` field?
|
||||||
|
|
||||||
|
{{% show-in "core,enterprise" %}}
|
||||||
|
|
||||||
|
## Troubleshoot write performance issues
|
||||||
|
|
||||||
|
If you experience slow write performance or timeouts during high-volume ingestion,
|
||||||
|
consider the following:
|
||||||
|
|
||||||
|
### Memory configuration
|
||||||
|
|
||||||
|
{{% product-name %}} uses memory for both query processing and internal data operations,
|
||||||
|
including converting data to Parquet format during persistence.
|
||||||
|
For write-heavy workloads, insufficient memory allocation can cause performance issues.
|
||||||
|
|
||||||
|
**Symptoms of memory-related write issues:**
|
||||||
|
- Slow write performance during data persistence (typically every 10 minutes)
|
||||||
|
- Increased response times during high-volume ingestion
|
||||||
|
- Memory-related errors in server logs
|
||||||
|
|
||||||
|
**Solutions:**
|
||||||
|
- Increase the [`exec-mem-pool-bytes`](/influxdb3/version/reference/config-options/#exec-mem-pool-bytes)
|
||||||
|
configuration to allocate more memory for data operations.
|
||||||
|
For write-heavy workloads, consider setting this to 30-40% of available memory.
|
||||||
|
- Monitor memory usage during peak write periods to identify bottlenecks.
|
||||||
|
- Adjust the [`gen1-duration`](/influxdb3/version/reference/config-options/#gen1-duration)
|
||||||
|
to control how frequently data is persisted to Parquet format.
|
||||||
|
|
||||||
|
### Example configuration for write-heavy workloads
|
||||||
|
|
||||||
|
```bash { placeholders="PERCENTAGE" }
|
||||||
|
influxdb3 serve \
|
||||||
|
--exec-mem-pool-bytes PERCENTAGE \
|
||||||
|
--gen1-duration 15m \
|
||||||
|
# ... other options
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace {{% code-placeholder-key %}}`PERCENTAGE`{{% /code-placeholder-key %}} with the percentage
|
||||||
|
of available memory to allocate (for example, `35%` for write-heavy workloads).
|
||||||
|
|
||||||
|
{{% /show-in %}}
|
||||||
|
|
|
||||||
|
|
@ -16,9 +16,8 @@ settings to use when Telegraf starts.
|
||||||
Each Telegraf plugin has its own set of configuration options.
|
Each Telegraf plugin has its own set of configuration options.
|
||||||
Telegraf also provides global options for configuring specific Telegraf settings.
|
Telegraf also provides global options for configuring specific Telegraf settings.
|
||||||
|
|
||||||
{{% note %}}
|
> [!Note]
|
||||||
See [Get started](/telegraf/v1/get_started/) to quickly get up and running with Telegraf.
|
> See [Get started](/telegraf/v1/get_started/) to quickly get up and running with Telegraf.
|
||||||
{{% /note %}}
|
|
||||||
|
|
||||||
## Generate a configuration file
|
## Generate a configuration file
|
||||||
|
|
||||||
|
|
@ -26,6 +25,7 @@ The `telegraf config` command lets you generate a configuration file using Teleg
|
||||||
|
|
||||||
- [Create a configuration with default input and output plugins](#create-a-configuration-with-default-input-and-output-plugins)
|
- [Create a configuration with default input and output plugins](#create-a-configuration-with-default-input-and-output-plugins)
|
||||||
- [Create a configuration with specific input and output plugins](#create-a-configuration-with-specific-input-and-output-plugins)
|
- [Create a configuration with specific input and output plugins](#create-a-configuration-with-specific-input-and-output-plugins)
|
||||||
|
- [Windows PowerShell v5 encoding](#windows-powershell-v5-encoding)
|
||||||
|
|
||||||
### Create a configuration with default input and output plugins
|
### Create a configuration with default input and output plugins
|
||||||
|
|
||||||
|
|
@ -118,6 +118,23 @@ config > telegraf.conf
|
||||||
For more advanced configuration details, see the
|
For more advanced configuration details, see the
|
||||||
[configuration documentation](/telegraf/v1/administration/configuration/).
|
[configuration documentation](/telegraf/v1/administration/configuration/).
|
||||||
|
|
||||||
|
### Windows PowerShell v5 encoding
|
||||||
|
|
||||||
|
In PowerShell 5, the default encoding is UTF-16LE and not UTF-8.
|
||||||
|
Telegraf expects a valid UTF-8 file.
|
||||||
|
This is not an issue with PowerShell 6 or newer, as well as the Command Prompt
|
||||||
|
or with using the Git Bash shell.
|
||||||
|
|
||||||
|
When using PowerShell 5 or earlier, specify the output encoding when generating
|
||||||
|
a full configuration file:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
telegraf.exe config | Out-File -Encoding utf8 telegraf.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a UTF-8 encoded file with a byte-order mark (BOM).
|
||||||
|
However, Telegraf correctly handles the leading BOM.
|
||||||
|
|
||||||
## Configuration file locations
|
## Configuration file locations
|
||||||
|
|
||||||
When starting Telegraf, use the `--config` flag to specify the configuration file location:
|
When starting Telegraf, use the `--config` flag to specify the configuration file location:
|
||||||
|
|
@ -136,62 +153,91 @@ Telegraf processes each configuration file separately, and
|
||||||
the effective configuration is the union of all the files.
|
the effective configuration is the union of all the files.
|
||||||
If any file isn't a valid configuration, Telegraf returns an error.
|
If any file isn't a valid configuration, Telegraf returns an error.
|
||||||
|
|
||||||
{{% warn %}}
|
> [!Warning]
|
||||||
|
> #### Telegraf doesn't support partial configurations
|
||||||
#### Telegraf doesn't support partial configurations
|
>
|
||||||
|
> Telegraf doesn't concatenate configuration files before processing them.
|
||||||
Telegraf doesn't concatenate configuration files before processing them.
|
> Each configuration file that you provide must be a valid configuration.
|
||||||
Each configuration file that you provide must be a valid configuration.
|
>
|
||||||
|
> If you want to use separate files to manage a configuration, you can use your
|
||||||
If you want to use separate files to manage a configuration, you can use your
|
> own custom code to concatenate and pre-process the files, and then provide the
|
||||||
own custom code to concatenate and pre-process the files, and then provide the
|
> complete configuration to Telegraf--for example:
|
||||||
complete configuration to Telegraf--for example:
|
>
|
||||||
|
> 1. Configure plugin sections and assign partial configs a file extension different
|
||||||
1. Configure plugin sections and assign partial configs a file extension different
|
> from `.conf` to prevent Telegraf loading them--for example:
|
||||||
from `.conf` to prevent Telegraf loading them--for example:
|
>
|
||||||
|
> ```toml
|
||||||
```toml
|
> # main.opcua: Main configuration file
|
||||||
# main.opcua: Main configuration file
|
> ...
|
||||||
...
|
> [[inputs.opcua_listener]]
|
||||||
[[inputs.opcua_listener]]
|
> name = "PluginSection"
|
||||||
name = "PluginSection"
|
> endpoint = "opc.tcp://10.0.0.53:4840"
|
||||||
endpoint = "opc.tcp://10.0.0.53:4840"
|
> ...
|
||||||
...
|
> ```
|
||||||
```
|
>
|
||||||
|
> ```toml
|
||||||
```toml
|
> # group_1.opcua
|
||||||
# group_1.opcua
|
> [[inputs.opcua_listener.group]]
|
||||||
[[inputs.opcua_listener.group]]
|
> name = "SubSection1"
|
||||||
name = "SubSection1"
|
> ...
|
||||||
...
|
> ```
|
||||||
```
|
>
|
||||||
|
> ```toml
|
||||||
```toml
|
> # group_2.opcua
|
||||||
# group_2.opcua
|
> [[inputs.opcua_listener.group]]
|
||||||
[[inputs.opcua_listener.group]]
|
> name = "SubSection2"
|
||||||
name = "SubSection2"
|
> ...
|
||||||
...
|
> ```
|
||||||
```
|
>
|
||||||
|
> 2. Before you start Telegraf, run your custom script to concatenate
|
||||||
2. Before you start Telegraf, run your custom script to concatenate `main.opcua`, `group_1.opcua`,
|
`main.opcua`, `group_1.opcua`,
|
||||||
`group_2.opcua` into a valid `telegraf.conf`.
|
> `group_2.opcua` into a valid `telegraf.conf`.
|
||||||
3. Start Telegraf with the complete, valid `telegraf.conf` configuration.
|
> 3. Start Telegraf with the complete, valid `telegraf.conf` configuration.
|
||||||
|
|
||||||
{{% /warn %}}
|
|
||||||
|
|
||||||
## Set environment variables
|
## Set environment variables
|
||||||
|
|
||||||
Use environment variables anywhere in the configuration file by enclosing them in `${}`.
|
Use environment variables anywhere in the configuration file by enclosing them in `${}`.
|
||||||
For strings, variables must be in quotes (for example, `"test_${STR_VAR}"`).
|
For strings, variables must be in quotes (for example, `"test_${STR_VAR}"`).
|
||||||
For numbers and Booleans, variables must be unquoted (for example, `${INT_VAR}`, `${BOOL_VAR}`).
|
For numbers and booleans, variables must be unquoted (for example, `${INT_VAR}`,
|
||||||
|
`${BOOL_VAR}`).
|
||||||
|
|
||||||
You can also set environment variables using the Linux `export` command: `export password=mypassword`
|
When using double quotes, escape any backslashes (for example: `"C:\\Program Files"`) or
|
||||||
|
other special characters.
|
||||||
|
If using an environment variable with a single backslash, enclose the variable
|
||||||
|
in single quotes to signify a string literal (for example:
|
||||||
|
`'C:\Program Files'`).
|
||||||
|
|
||||||
|
Telegraf also supports Shell parameter expansion for environment variables which
|
||||||
|
allows the following:
|
||||||
|
|
||||||
|
- `${VARIABLE:-default}`: evaluates to `default` if `VARIABLE` is unset or empty
|
||||||
|
in the environment.
|
||||||
|
- `${VARIABLE-default}`: evaluates to `default` only if `VARIABLE` is unset in
|
||||||
|
the environment. Similarly, the following syntax allows you to specify
|
||||||
|
mandatory variables:
|
||||||
|
- `${VARIABLE:?err}`: exits with an error message containing `err` if `VARIABLE`
|
||||||
|
is unset or empty in the environment.
|
||||||
|
- `${VARIABLE?err}`: exits with an error message containing `err` if `VARIABLE`
|
||||||
|
is unset in the environment.
|
||||||
|
|
||||||
|
When using the `.deb` or `.rpm` packages, you can define environment variables
|
||||||
|
in the `/etc/default/telegraf` file.
|
||||||
|
|
||||||
|
You can also set environment variables using the Linux `export` command:
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
```bash
|
||||||
|
export password=mypassword
|
||||||
|
```
|
||||||
|
|
||||||
> **Note:** Use a secret store or environment variables to store sensitive credentials.
|
> **Note:** Use a secret store or environment variables to store sensitive credentials.
|
||||||
|
|
||||||
### Example: Telegraf environment variables
|
### Example: Telegraf environment variables
|
||||||
|
|
||||||
Set environment variables in the Telegraf environment variables file (`/etc/default/telegraf`)--for example:
|
Set environment variables in the Telegraf environment variables file
|
||||||
|
(`/etc/default/telegraf`).
|
||||||
|
|
||||||
|
#### For InfluxDB 1.x:
|
||||||
|
|
||||||
<!--pytest.mark.skip-->
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
|
@ -199,7 +245,31 @@ Set environment variables in the Telegraf environment variables file (`/etc/defa
|
||||||
USER="alice"
|
USER="alice"
|
||||||
INFLUX_URL="http://localhost:8086"
|
INFLUX_URL="http://localhost:8086"
|
||||||
INFLUX_SKIP_DATABASE_CREATION="true"
|
INFLUX_SKIP_DATABASE_CREATION="true"
|
||||||
INFLUX_PASSWORD="monkey123"
|
INFLUX_PASSWORD="passw0rd123"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### For InfluxDB OSS v2:
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```sh
|
||||||
|
INFLUX_HOST="http://localhost:8086"
|
||||||
|
INFLUX_TOKEN="replace_with_your_token"
|
||||||
|
INFLUX_ORG="your_username"
|
||||||
|
INFLUX_BUCKET="replace_with_your_bucket_name"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### For InfluxDB Cloud Serverless:
|
||||||
|
|
||||||
|
<!--pytest.mark.skip-->
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# For AWS West (Oregon)
|
||||||
|
INFLUX_HOST="https://us-west-2-1.aws.cloud2.influxdata.com"
|
||||||
|
# Other Cloud URLs at https://docs.influxdata.com/influxdb/cloud/reference/regions/
|
||||||
|
INFLUX_TOKEN="replace_with_your_token"
|
||||||
|
INFLUX_ORG="yourname@yourcompany.com"
|
||||||
|
INFLUX_BUCKET="replace_with_your_bucket_name"
|
||||||
```
|
```
|
||||||
|
|
||||||
In the Telegraf configuration file (`/etc/telegraf.conf`), reference the variables--for example:
|
In the Telegraf configuration file (`/etc/telegraf.conf`), reference the variables--for example:
|
||||||
|
|
@ -210,10 +280,25 @@ In the Telegraf configuration file (`/etc/telegraf.conf`), reference the variabl
|
||||||
|
|
||||||
[[inputs.mem]]
|
[[inputs.mem]]
|
||||||
|
|
||||||
|
# For InfluxDB 1.x:
|
||||||
[[outputs.influxdb]]
|
[[outputs.influxdb]]
|
||||||
urls = ["${INFLUX_URL}"]
|
urls = ["${INFLUX_URL}"]
|
||||||
skip_database_creation = ${INFLUX_SKIP_DATABASE_CREATION}
|
skip_database_creation = ${INFLUX_SKIP_DATABASE_CREATION}
|
||||||
password = "${INFLUX_PASSWORD}"
|
password = "${INFLUX_PASSWORD}"
|
||||||
|
|
||||||
|
# For InfluxDB OSS 2:
|
||||||
|
[[outputs.influxdb_v2]]
|
||||||
|
urls = ["${INFLUX_HOST}"]
|
||||||
|
token = "${INFLUX_TOKEN}"
|
||||||
|
organization = "${INFLUX_ORG}"
|
||||||
|
bucket = "${INFLUX_BUCKET}"
|
||||||
|
|
||||||
|
# For InfluxDB Cloud:
|
||||||
|
[[outputs.influxdb_v2]]
|
||||||
|
urls = ["${INFLUX_HOST}"]
|
||||||
|
token = "${INFLUX_TOKEN}"
|
||||||
|
organization = "${INFLUX_ORG}"
|
||||||
|
bucket = "${INFLUX_BUCKET}"
|
||||||
```
|
```
|
||||||
|
|
||||||
When Telegraf runs, the effective configuration is the following:
|
When Telegraf runs, the effective configuration is the following:
|
||||||
|
|
@ -222,68 +307,174 @@ When Telegraf runs, the effective configuration is the following:
|
||||||
[global_tags]
|
[global_tags]
|
||||||
user = "alice"
|
user = "alice"
|
||||||
|
|
||||||
|
# For InfluxDB 1.x:
|
||||||
[[outputs.influxdb]]
|
[[outputs.influxdb]]
|
||||||
urls = "http://localhost:8086"
|
urls = ["http://localhost:8086"]
|
||||||
skip_database_creation = true
|
skip_database_creation = true
|
||||||
password = "monkey123"
|
password = "passw0rd123"
|
||||||
|
|
||||||
|
# For InfluxDB OSS 2:
|
||||||
|
[[outputs.influxdb_v2]]
|
||||||
|
urls = ["http://localhost:8086"]
|
||||||
|
token = "replace_with_your_token"
|
||||||
|
organization = "your_username"
|
||||||
|
bucket = "replace_with_your_bucket_name"
|
||||||
|
|
||||||
|
# For InfluxDB Cloud:
|
||||||
|
[[outputs.influxdb_v2]]
|
||||||
|
urls = ["https://us-west-2-1.aws.cloud2.influxdata.com"]
|
||||||
|
token = "replace_with_your_token"
|
||||||
|
organization = "yourname@yourcompany.com"
|
||||||
|
bucket = "replace_with_your_bucket_name"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Secret stores
|
||||||
|
|
||||||
|
Telegraf also supports secret stores for providing credentials or similar.
|
||||||
|
Configure one or more secret store plugins and then reference the secret in
|
||||||
|
your plugin configurations.
|
||||||
|
|
||||||
|
Reference secrets using the following syntax:
|
||||||
|
|
||||||
|
```txt
|
||||||
|
@{<secret_store_id>:<secret_name>}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `secret_store_id`: the unique ID you define for your secret store plugin.
|
||||||
|
- `secret_name`: the name of the secret to use.
|
||||||
|
|
||||||
|
> [!Note]
|
||||||
|
> Both and `secret_store_id` and `secret_name` only support alphanumeric
|
||||||
|
> characters and underscores.
|
||||||
|
|
||||||
|
### Example: Use secret stores
|
||||||
|
|
||||||
|
This example illustrates the use of secret stores in plugins:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[global_tags]
|
||||||
|
user = "alice"
|
||||||
|
|
||||||
|
[[secretstores.os]]
|
||||||
|
id = "local_secrets"
|
||||||
|
|
||||||
|
[[secretstores.jose]]
|
||||||
|
id = "cloud_secrets"
|
||||||
|
path = "/etc/telegraf/secrets"
|
||||||
|
# Optional reference to another secret store to unlock this one.
|
||||||
|
password = "@{local_secrets:cloud_store_passwd}"
|
||||||
|
|
||||||
|
[[inputs.http]]
|
||||||
|
urls = ["http://server.company.org/metrics"]
|
||||||
|
username = "@{local_secrets:company_server_http_metric_user}"
|
||||||
|
password = "@{local_secrets:company_server_http_metric_pass}"
|
||||||
|
|
||||||
|
[[outputs.influxdb_v2]]
|
||||||
|
urls = ["https://us-west-2-1.aws.cloud2.influxdata.com"]
|
||||||
|
token = "@{cloud_secrets:influxdb_token}"
|
||||||
|
organization = "yourname@yourcompany.com"
|
||||||
|
bucket = "replace_with_your_bucket_name"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Notes on secret stores
|
||||||
|
|
||||||
|
Not all plugins support secrets.
|
||||||
|
When using plugins that support secrets, Telegraf locks the memory pages
|
||||||
|
containing the secrets.
|
||||||
|
Therefore, the locked memory limit has to be set to a
|
||||||
|
suitable value.
|
||||||
|
Telegraf will check the limit and the number of used secrets at
|
||||||
|
startup and will warn if your limit is too low.
|
||||||
|
In this case, please increase
|
||||||
|
the limit via `ulimit -l`.
|
||||||
|
|
||||||
|
If you are running Telegraf in a jail you might need to allow locked pages in
|
||||||
|
that jail by setting `allow.mlock = 1;` in your config.
|
||||||
|
|
||||||
## Global tags
|
## Global tags
|
||||||
|
|
||||||
Global tags can be specified in the `[global_tags]` section of the configuration file
|
Global tags can be specified in the `[global_tags]` section of the configuration
|
||||||
in `key="value"` format.
|
file in `key="value"` format.
|
||||||
Telegraf applies the global tags to all metrics gathered on this host.
|
Telegraf applies the global tags to all metrics gathered on this host.
|
||||||
|
|
||||||
## Agent configuration
|
## Agent configuration
|
||||||
|
|
||||||
The `[agent]` section contains the following configuration options:
|
The `[agent]` section contains the following configuration options:
|
||||||
|
|
||||||
- **interval**: Default data collection interval for all inputs
|
- **interval**: Default data collection interval for all inputs.
|
||||||
- **round_interval**: Rounds collection interval to `interval`.
|
- **round_interval**: Rounds collection interval to `interval`.
|
||||||
For example, if `interval` is set to `10s`, then the agent collects on :00, :10, :20, etc.
|
For example, if `interval` is set to `10s`, then the agent collects on :00, :10, :20, etc.
|
||||||
- **metric_batch_size**: Sends metrics to the output in batches of at
|
- **metric_batch_size**: Telegraf sends metrics to outputs in batches of at
|
||||||
most `metric_batch_size` metrics.
|
most `metric_batch_size` metrics.
|
||||||
- **metric_buffer_limit**: Caches `metric_buffer_limit` metrics
|
This controls the size of writes that Telegraf sends to output plugins.
|
||||||
for each output, and flushes this buffer on a successful write.
|
- **metric_buffer_limit**: Maximum number of unwritten metrics per output.
|
||||||
This should be a multiple of `metric_batch_size` and could not be less
|
Increasing this value allows for longer periods of output downtime without
|
||||||
than 2 times `metric_batch_size`.
|
dropping metrics at the cost of higher maximum memory usage.
|
||||||
- **collection_jitter**: Used to jitter the collection by a random amount.
|
The oldest metrics are overwritten in favor of new ones when the buffer fills up.
|
||||||
Each plugin sleeps for a random time within jitter before collecting.
|
- **collection_jitter**: Jitter the collection by a random interval.
|
||||||
This can be used to avoid many plugins querying things like **sysfs** at the
|
Each plugin sleeps for a random time within the defined jitter before collecting.
|
||||||
|
Use this to avoid many plugins querying things like sysfs at the
|
||||||
same time, which can have a measurable effect on the system.
|
same time, which can have a measurable effect on the system.
|
||||||
- **flush_interval**: Default data flushing interval for all outputs.
|
- **collection_offset**: Shift the collection by the given interval.
|
||||||
Don't set this below `interval`.
|
Use this to avoid many plugins querying constraint devices
|
||||||
Maximum `flush_interval` is `flush_interval` + `flush_jitter`
|
at the same time by manually scheduling them in time.
|
||||||
- **flush_jitter**: Jitter the flush interval by a random amount.
|
- **flush_interval**: Default flushing interval for all outputs.
|
||||||
This is primarily to avoid
|
Maximum `flush_interval` is `flush_interval` + `flush_jitter`.
|
||||||
large write spikes for users running a large number of Telegraf instances.
|
- **flush_jitter**: Default flush jitter for all outputs.
|
||||||
For example, a `flush_jitter` of `5s` and `flush_interval` of `10s` means
|
This jitters the flush interval by a random amount.
|
||||||
flushes happen every 10-15s.
|
This is primarily to avoid large write spikes for users
|
||||||
- **precision**: Collected metrics are rounded to the precision specified as an
|
running a large number of Telegraf instances.
|
||||||
`interval` (integer + unit, ex: `1ns`, `1us`, `1ms`, and `1s` . Precision isn't
|
For example, a jitter of `5s` and an interval of `10s` means flushes happen
|
||||||
used for service inputs, such as `logparser` and `statsd`.
|
every 10-15 seconds.
|
||||||
- **debug**: Run Telegraf in debug mode.
|
- **precision**: Round collected metrics to the precision specified as an interval.
|
||||||
- **quiet**: Run Telegraf in quiet mode (error messages only).
|
Precision is _not_ used for service inputs.
|
||||||
- **logtarget**: Controls the destination for logs and can be set to `"file"`,
|
It is up to each individual service input to set the timestamp at the appropriate precision.
|
||||||
`"stderr"`, or, on Windows, `"eventlog"`.
|
- **debug**: Log at debug level.
|
||||||
When set to `"file"`, the output file is determined by the logfile setting.
|
- **quiet**: Log only error level messages.
|
||||||
- **logfile**: If logtarget is set to `“file”`, specify the logfile name.
|
- **logformat**: Log format controls the way messages are logged and can be one
|
||||||
If set to an empty string, then logs are written to stderr.
|
of `text`, `structured` or, on Windows, `eventlog`.
|
||||||
- **logfile_rotation_interval**: Rotates the logfile after the time interval specified.
|
The output file (if any) is determined by the `logfile` setting.
|
||||||
When set to `0`, no time-based rotation is performed.
|
- **structured_log_message_key**: Message key for structured logs, to override
|
||||||
- **logfile_rotation_max_size**: Rotates logfile when it becomes larger than the
|
the default of `msg`.
|
||||||
specified size.
|
Ignored if `logformat` is not `structured`.
|
||||||
When set to `0`, no size-based rotation is performed.
|
- **logfile**: Name of the file to be logged to or stderr if unset or empty.
|
||||||
|
This setting is ignored for the `eventlog` format.
|
||||||
|
- **logfile_rotation_interval**: The logfile rotates after the time interval specified.
|
||||||
|
When set to 0 no time based rotation is performed.
|
||||||
|
- **logfile_rotation_max_size**: The logfile rotates when it becomes larger than the specified size.
|
||||||
|
When set to 0 no size based rotation is performed.
|
||||||
- **logfile_rotation_max_archives**: Maximum number of rotated archives to keep,
|
- **logfile_rotation_max_archives**: Maximum number of rotated archives to keep,
|
||||||
any older logs are deleted.
|
any older logs are deleted.
|
||||||
If set to `-1`, no archives are removed.
|
If set to -1, no archives are removed.
|
||||||
- **log_with_timezone**: Set a timezone to use when logging--for example, `"America/Chicago"`.
|
- **log_with_timezone**: Pick a timezone to use when logging or type 'local' for local time.
|
||||||
To use local time, set to `"local"`.
|
Example: 'America/Chicago'.
|
||||||
See [timezone options and formats](https://socketloop.com/tutorials/golang-display-list-of-timezones-with-gmt).
|
[See this page for options/formats.](https://socketloop.com/tutorials/golang-display-list-of-timezones-with-gmt)
|
||||||
- **hostname**: Override default hostname, if empty use `os.Hostname()`.
|
- **hostname**: Override the default hostname, if empty use `os.Hostname()`.
|
||||||
- **omit_hostname**: If true, do not set the `host` tag in the Telegraf agent.
|
- **omit_hostname**: If set to true, do no set the "host" tag in the Telegraf agent.
|
||||||
- **skip_processors_after_aggregators**: If true, processors do not run again
|
- **snmp_translator**: Method of translating SNMP objects.
|
||||||
after aggregators. Default is false.
|
Can be "netsnmp" (deprecated) which translates by calling external programs
|
||||||
|
`snmptranslate` and `snmptable`, or "gosmi" which translates using the built-in
|
||||||
|
gosmi library.
|
||||||
|
- **statefile**: Name of the file to load the states of plugins from and store the states to.
|
||||||
|
If uncommented and not empty, this file is used to save the state of stateful
|
||||||
|
plugins on termination of Telegraf.
|
||||||
|
If the file exists on start, the state in the file is restored for the plugins.
|
||||||
|
- **always_include_local_tags**: Ensure tags explicitly defined in a plugin _always_ pass tag-filtering
|
||||||
|
via `taginclude` or `tagexclude`.
|
||||||
|
This removes the need to specify local tags twice.
|
||||||
|
- **always_include_global_tags**: Ensure tags explicitly defined in the `global_tags` section will _always_ pass
|
||||||
|
tag-filtering via `taginclude` or `tagexclude`.
|
||||||
|
This removes the need to specify those tags twice.
|
||||||
|
- **skip_processors_after_aggregators**: By default, processors are run a second time after aggregators.
|
||||||
|
Changing this setting to true will skip the second run of processors.
|
||||||
|
- **buffer_strategy**: The type of buffer to use for Telegraf output plugins.
|
||||||
|
Supported modes are `memory`, the default and original buffer type, and `disk`,
|
||||||
|
an experimental disk-backed buffer which serializes all metrics to disk as
|
||||||
|
needed to improve data durability and reduce the chance for data loss.
|
||||||
|
This is only supported at the agent level.
|
||||||
|
- **buffer_directory**: The directory to use when in `disk` buffer mode.
|
||||||
|
Each output plugin makes another subdirectory in this directory with the
|
||||||
|
output plugin's ID.
|
||||||
|
|
||||||
## Input configuration
|
## Input configuration
|
||||||
|
|
||||||
|
|
@ -371,40 +562,79 @@ Filters can be configured per input, output, processor, or aggregator.
|
||||||
|
|
||||||
### Filters
|
### Filters
|
||||||
|
|
||||||
|
Filters fall under two categories:
|
||||||
|
|
||||||
|
- [Selectors](#selectors)
|
||||||
|
- [Modifiers](#modifiers)
|
||||||
|
|
||||||
|
#### Selectors
|
||||||
|
|
||||||
|
Selector filters include or exclude entire metrics.
|
||||||
|
When a metric is excluded from an input or output plugin, the metric is dropped.
|
||||||
|
If a metric is excluded from a processor or aggregator plugin, it skips the
|
||||||
|
plugin and is sent onwards to the next stage of processing.
|
||||||
|
|
||||||
- **namepass**: An array of glob pattern strings.
|
- **namepass**: An array of glob pattern strings.
|
||||||
Only emits points whose measurement name matches a pattern in this list.
|
Only metrics whose measurement name matches a pattern in this list are emitted.
|
||||||
|
Additionally, custom list of separators can be specified using `namepass_separator`.
|
||||||
|
These separators are excluded from wildcard glob pattern matching.
|
||||||
- **namedrop**: The inverse of `namepass`.
|
- **namedrop**: The inverse of `namepass`.
|
||||||
Discards points whose measurement name matches a pattern in this list.
|
If a match is found the metric is discarded.
|
||||||
This test applies to points _after_ they have passed the `namepass` test.
|
This is tested on metrics after they have passed the `namepass` test.
|
||||||
- **fieldpass**: An array of glob pattern strings.
|
Additionally, custom list of separators can be specified using `namedrop_separator`.
|
||||||
Only emits fields whose field key matches a pattern in this list.
|
These separators are excluded from wildcard glob pattern matching.
|
||||||
- **fielddrop**: The inverse of `fieldpass`.
|
- **tagpass**: A table mapping tag keys to arrays of glob pattern strings.
|
||||||
Discards fields that have a field key matching one of the patterns.
|
Only metrics that contain a tag key in the table and a tag value matching one of its
|
||||||
- **tagpass**: A table that maps tag keys to arrays of glob pattern strings.
|
patterns is emitted.
|
||||||
Only emits points that contain a tag key in the table and a tag value that
|
This can either use the explicit table syntax (for example: a subsection using a `[...]` header)
|
||||||
matches one of the associated patterns.
|
or inline table syntax (e.g like a JSON table with `{...}`).
|
||||||
|
Please see the below notes on specifying the table.
|
||||||
- **tagdrop**: The inverse of `tagpass`.
|
- **tagdrop**: The inverse of `tagpass`.
|
||||||
Discards points that contain a tag key in the table and a tag value that
|
If a match is found the metric is discarded.
|
||||||
matches one of the associated patterns.
|
This is tested on metrics after they have passed the `tagpass` test.
|
||||||
This test applies to points _after_ they have passed the `tagpass` test.
|
- **metricpass**: A Common Expression Language (CEL) expression with boolean result where
|
||||||
|
`true` will allow the metric to pass, otherwise the metric is discarded.
|
||||||
|
This filter expression is more general compared to `namepass` and also
|
||||||
|
supports time-based filtering.
|
||||||
|
Further details, such as available functions and expressions, are provided in the
|
||||||
|
CEL language definition as well as in the extension documentation or the CEL language introduction.
|
||||||
|
|
||||||
|
> [!Note]
|
||||||
|
> *As CEL is an _interpreted_ language. This type of filtering is much slower
|
||||||
|
> than `namepass`, `namedrop`, and others.
|
||||||
|
> Consider using the more restrictive filter options where possible in case of
|
||||||
|
> high-throughput scenarios.
|
||||||
|
|
||||||
|
#### Modifiers
|
||||||
|
|
||||||
|
Modifier filters remove tags and fields from a metric.
|
||||||
|
If all fields are removed, the metric is removed and as a result not passed through
|
||||||
|
to the following processors or any output plugin.
|
||||||
|
Tags and fields are modified before a metric is passed to a processor,
|
||||||
|
aggregator, or output plugin.
|
||||||
|
When used with an input plugin the filter applies after the input runs.
|
||||||
|
|
||||||
|
- **fieldinclude**: An array of glob pattern strings.
|
||||||
|
Only fields whose field key matches a pattern in this list are emitted.
|
||||||
|
- **fieldexclude**: The inverse of `fieldinclude`.
|
||||||
|
Fields with a field key matching one of the patterns will be discarded from the metric.
|
||||||
|
This is tested on metrics after they have passed the `fieldinclude` test.
|
||||||
- **taginclude**: An array of glob pattern strings.
|
- **taginclude**: An array of glob pattern strings.
|
||||||
Only tags with a tag key matching one of the patterns are emitted.
|
Only tags with a tag key matching one of the patterns are emitted.
|
||||||
In contrast to `tagpass`, which emits an entire
|
In contrast to `tagpass`, which will pass an entire metric based on its tag,
|
||||||
point if a tag passes, `taginclude` removes all non-matching tags from the
|
`taginclude` removes all non matching tags from the metric.
|
||||||
point. This filter can be used on inputs and outputs, but is more efficient
|
Any tag can be filtered including global tags and the agent `host` tag.
|
||||||
when used on inputs (filtering out tags is more efficient during ingestion).
|
- **tagexclude**: The inverse of `taginclude`.
|
||||||
- **tagexclude**:
|
Tags with a tag key matching one of the patterns will be discarded from the metric.
|
||||||
The inverse of `taginclude`. Tags with a tag key matching one of the patterns
|
Any tag can be filtered including global tags and the agent `host` tag.
|
||||||
are discarded from the point.
|
|
||||||
|
|
||||||
{{% note %}}
|
> [!Note]
|
||||||
#### Include tagpass and tagdrop at the end of your plugin definition
|
> #### Include tagpass and tagdrop at the end of your plugin definition
|
||||||
|
>
|
||||||
Due to the way TOML is parsed, `tagpass` and `tagdrop` parameters
|
> Due to the way TOML is parsed, `tagpass` and `tagdrop` parameters
|
||||||
must be defined at the _end_ of the plugin definition, otherwise subsequent
|
> must be defined at the _end_ of the plugin definition, otherwise subsequent
|
||||||
plugin configuration options are interpreted as part of the tagpass and tagdrop
|
> plugin configuration options are interpreted as part of the tagpass and tagdrop
|
||||||
tables.
|
> tables.
|
||||||
{{% /note %}}
|
|
||||||
|
|
||||||
To learn more about metric filtering, watch the following video:
|
To learn more about metric filtering, watch the following video:
|
||||||
|
|
||||||
|
|
@ -415,7 +645,9 @@ To learn more about metric filtering, watch the following video:
|
||||||
#### Input configuration examples
|
#### Input configuration examples
|
||||||
|
|
||||||
The following example configuration collects per-cpu data, drops any
|
The following example configuration collects per-cpu data, drops any
|
||||||
fields that begin with `time_`, tags measurements with `dc="denver-1"`, and then outputs measurements at a 10 s interval to an InfluxDB database named `telegraf` at the address `192.168.59.103:8086`.
|
fields that begin with `time_`, tags measurements with `dc="denver-1"`, and then
|
||||||
|
outputs measurements at a 10 second interval to an InfluxDB database named
|
||||||
|
`telegraf` at the address `192.168.59.103:8086`.
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[global_tags]
|
[global_tags]
|
||||||
|
|
@ -491,6 +723,26 @@ interpreted as part of the tagpass and tagdrop tables.
|
||||||
namepass = ["rest_client_*"]
|
namepass = ["rest_client_*"]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Input Config: `namepass` and `namedrop` with separators
|
||||||
|
|
||||||
|
```toml
|
||||||
|
# Pass all metrics of type 'A.C.B' and drop all others like 'A.C.D.B'
|
||||||
|
[[inputs.socket_listener]]
|
||||||
|
data_format = "graphite"
|
||||||
|
templates = ["measurement*"]
|
||||||
|
|
||||||
|
namepass = ["A.*.B"]
|
||||||
|
namepass_separator = "."
|
||||||
|
|
||||||
|
# Drop all metrics of type 'A.C.B' and pass all others like 'A.C.D.B'
|
||||||
|
[[inputs.socket_listener]]
|
||||||
|
data_format = "graphite"
|
||||||
|
templates = ["measurement*"]
|
||||||
|
|
||||||
|
namedrop = ["A.*.B"]
|
||||||
|
namedrop_separator = "."
|
||||||
|
```
|
||||||
|
|
||||||
#### Input Config: `taginclude` and `tagexclude`
|
#### Input Config: `taginclude` and `tagexclude`
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
|
|
@ -625,3 +877,55 @@ to the system load metrics due to the `namepass` parameter.
|
||||||
To learn more about configuring the Telegraf agent, watch the following video:
|
To learn more about configuring the Telegraf agent, watch the following video:
|
||||||
|
|
||||||
{{< youtube txUcAxMDBlQ >}}
|
{{< youtube txUcAxMDBlQ >}}
|
||||||
|
|
||||||
|
## Plugin selection via labels and selectors
|
||||||
|
|
||||||
|
You can control which plugin instances are enabled by adding labels to plugin
|
||||||
|
configurations and passing one or more selectors on the command line.
|
||||||
|
|
||||||
|
### Selectors
|
||||||
|
|
||||||
|
Provide selectors with one or more `--select` flags when starting Telegraf.
|
||||||
|
Each `--select` value is a semicolon-separated list of key=value pairs:
|
||||||
|
|
||||||
|
```text
|
||||||
|
<key>=<value>[;<key>=<value>]
|
||||||
|
```
|
||||||
|
|
||||||
|
- Pairs in a single `--select` value are combined with logical AND (all must match).
|
||||||
|
- Multiple `--select` flags are combined with logical OR (a plugin is enabled if it matches any selector set).
|
||||||
|
|
||||||
|
Selectors support simple glob patterns in values (for example `region=us-*`).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
telegraf --config config.conf --config-directory directory/ \
|
||||||
|
--select="app=payments;region=us-*" \
|
||||||
|
--select="env=prod" \
|
||||||
|
--watch-config --print-plugin-config-source=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### Labels
|
||||||
|
|
||||||
|
Add an optional `labels` table to a plugin, similar to `tags`.
|
||||||
|
Keys and values are plain strings.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[[inputs.cpu]]
|
||||||
|
[inputs.cpu.labels]
|
||||||
|
app = "payments"
|
||||||
|
region = "us-east"
|
||||||
|
env = "prod"
|
||||||
|
```
|
||||||
|
|
||||||
|
Telegraf matches the command-line selectors against a plugin's labels to decide
|
||||||
|
whether that plugin instance should be enabled.
|
||||||
|
For details on supported syntax and matching rules, see the labels selectors spec.
|
||||||
|
|
||||||
|
## Transport Layer Security (TLS)
|
||||||
|
|
||||||
|
Many Telegraf plugins support TLS configuration for secure communication.
|
||||||
|
Reference the detailed TLS documentation for configuration options and examples.
|
||||||
|
|
|
||||||
|
|
@ -141,12 +141,14 @@ following libraries are available for loading:
|
||||||
- json: `load("json.star", "json")` provides the functions `json.encode()`,
|
- json: `load("json.star", "json")` provides the functions `json.encode()`,
|
||||||
`json.decode()`, `json.indent()`. See json.star
|
`json.decode()`, `json.indent()`. See json.star
|
||||||
for an example. For more details about the functions, please refer to the
|
for an example. For more details about the functions, please refer to the
|
||||||
[library documentation](https://pkg.go.dev/go.starlark.net/lib/time).
|
[library documentation](https://pkg.go.dev/go.starlark.net/lib/json).
|
||||||
- log: `load("logging.star", "log")` provides the functions `log.debug()`,
|
- log: `load("logging.star", "log")` provides the functions `log.debug()`,
|
||||||
`log.info()`, `log.warn()`, `log.error()`. See
|
`log.info()`, `log.warn()`, `log.error()`. See logging.star` for an example.
|
||||||
logging.star` provides the function
|
- math: `load('math.star', 'math')` provides basic mathematical constants and functions.
|
||||||
[documented in the library](https://pkg.go.dev/go.starlark.net/lib/math). See
|
See math.star for an example. For more details, please refer to the
|
||||||
math.star`. See
|
[library documentation](https://pkg.go.dev/go.starlark.net/lib/math).
|
||||||
|
- time: `load('time.star', 'time')` provides time-related constants and functions.
|
||||||
|
See
|
||||||
time_date.star,
|
time_date.star,
|
||||||
time_duration.star and
|
time_duration.star and
|
||||||
time_timestamp.star for examples. For
|
time_timestamp.star for examples. For
|
||||||
|
|
|
||||||
|
|
@ -212,19 +212,6 @@ influxdb_cloud:
|
||||||
- How is Cloud 2 different from Cloud Serverless?
|
- How is Cloud 2 different from Cloud Serverless?
|
||||||
- How do I manage auth tokens in InfluxDB Cloud 2?
|
- How do I manage auth tokens in InfluxDB Cloud 2?
|
||||||
|
|
||||||
explorer:
|
|
||||||
name: InfluxDB 3 Explorer
|
|
||||||
namespace: explorer
|
|
||||||
menu_category: other
|
|
||||||
list_order: 4
|
|
||||||
versions: [v1]
|
|
||||||
latest: explorer
|
|
||||||
latest_patch: 1.1.0
|
|
||||||
ai_sample_questions:
|
|
||||||
- How do I use InfluxDB 3 Explorer to visualize data?
|
|
||||||
- How do I create a dashboard in InfluxDB 3 Explorer?
|
|
||||||
- How do I query data using InfluxDB 3 Explorer?
|
|
||||||
|
|
||||||
telegraf:
|
telegraf:
|
||||||
name: Telegraf
|
name: Telegraf
|
||||||
namespace: telegraf
|
namespace: telegraf
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,9 @@
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "InfluxDB documentation",
|
"description": "InfluxDB documentation",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"docs": "scripts/docs-cli.js"
|
||||||
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"serialize-javascript": "^6.0.2"
|
"serialize-javascript": "^6.0.2"
|
||||||
},
|
},
|
||||||
|
|
@ -40,6 +43,7 @@
|
||||||
"vanillajs-datepicker": "^1.3.4"
|
"vanillajs-datepicker": "^1.3.4"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"postinstall": "node scripts/setup-local-bin.js",
|
||||||
"docs:create": "node scripts/docs-create.js",
|
"docs:create": "node scripts/docs-create.js",
|
||||||
"docs:edit": "node scripts/docs-edit.js",
|
"docs:edit": "node scripts/docs-edit.js",
|
||||||
"docs:add-placeholders": "node scripts/add-placeholders.js",
|
"docs:add-placeholders": "node scripts/add-placeholders.js",
|
||||||
|
|
@ -82,5 +86,8 @@
|
||||||
"test": "test"
|
"test": "test"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": ""
|
"author": "",
|
||||||
|
"optionalDependencies": {
|
||||||
|
"copilot": "^0.0.2"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
||||||
# Add Placeholders Script
|
|
||||||
|
|
||||||
Automatically adds placeholder syntax to code blocks and placeholder descriptions in markdown files.
|
|
||||||
|
|
||||||
## What it does
|
|
||||||
|
|
||||||
This script finds UPPERCASE placeholders in code blocks and:
|
|
||||||
|
|
||||||
1. **Adds `{ placeholders="PATTERN1|PATTERN2" }` attribute** to code block fences
|
|
||||||
2. **Wraps placeholder descriptions** with `{{% code-placeholder-key %}}` shortcodes
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Direct usage
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Process a single file
|
|
||||||
node scripts/add-placeholders.js <file.md>
|
|
||||||
|
|
||||||
# Dry run to preview changes
|
|
||||||
node scripts/add-placeholders.js <file.md> --dry
|
|
||||||
|
|
||||||
# Example
|
|
||||||
node scripts/add-placeholders.js content/influxdb3/enterprise/admin/upgrade.md
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using npm script
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Process a file
|
|
||||||
yarn docs:add-placeholders <file.md>
|
|
||||||
|
|
||||||
# Dry run
|
|
||||||
yarn docs:add-placeholders <file.md> --dry
|
|
||||||
```
|
|
||||||
|
|
||||||
## Example transformations
|
|
||||||
|
|
||||||
### Before
|
|
||||||
|
|
||||||
````markdown
|
|
||||||
```bash
|
|
||||||
influxdb3 query \
|
|
||||||
--database SYSTEM_DATABASE \
|
|
||||||
--token ADMIN_TOKEN \
|
|
||||||
"SELECT * FROM system.version"
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace the following:
|
|
||||||
|
|
||||||
- **`SYSTEM_DATABASE`**: The name of your system database
|
|
||||||
- **`ADMIN_TOKEN`**: An admin token with read permissions
|
|
||||||
````
|
|
||||||
|
|
||||||
### After
|
|
||||||
|
|
||||||
````markdown
|
|
||||||
```bash { placeholders="ADMIN_TOKEN|SYSTEM_DATABASE" }
|
|
||||||
influxdb3 query \
|
|
||||||
--database SYSTEM_DATABASE \
|
|
||||||
--token ADMIN_TOKEN \
|
|
||||||
"SELECT * FROM system.version"
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace the following:
|
|
||||||
|
|
||||||
- {{% code-placeholder-key %}}`SYSTEM_DATABASE`{{% /code-placeholder-key %}}: The name of your system database
|
|
||||||
- {{% code-placeholder-key %}}`ADMIN_TOKEN`{{% /code-placeholder-key %}}: An admin token with read permissions
|
|
||||||
````
|
|
||||||
|
|
||||||
## How it works
|
|
||||||
|
|
||||||
### Placeholder detection
|
|
||||||
|
|
||||||
The script automatically detects UPPERCASE placeholders in code blocks using these rules:
|
|
||||||
|
|
||||||
- **Pattern**: Matches words with 2+ characters, all uppercase, can include underscores
|
|
||||||
- **Excludes common words**: HTTP verbs (GET, POST), protocols (HTTP, HTTPS), SQL keywords (SELECT, FROM), etc.
|
|
||||||
|
|
||||||
### Code block processing
|
|
||||||
|
|
||||||
1. Finds all code blocks (including indented ones)
|
|
||||||
2. Extracts UPPERCASE placeholders
|
|
||||||
3. Adds `{ placeholders="..." }` attribute to the fence line
|
|
||||||
4. Preserves indentation and language identifiers
|
|
||||||
|
|
||||||
### Description wrapping
|
|
||||||
|
|
||||||
1. Detects "Replace the following:" sections
|
|
||||||
2. Wraps placeholder descriptions matching `- **`PLACEHOLDER`**: description`
|
|
||||||
3. Preserves indentation and formatting
|
|
||||||
4. Skips already-wrapped descriptions
|
|
||||||
|
|
||||||
## Options
|
|
||||||
|
|
||||||
- `--dry` or `-d`: Preview changes without modifying files
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
|
|
||||||
- The script is idempotent - running it multiple times on the same file won't duplicate syntax
|
|
||||||
- Preserves existing `placeholders` attributes in code blocks
|
|
||||||
- Works with both indented and non-indented code blocks
|
|
||||||
- Handles multiple "Replace the following:" sections in a single file
|
|
||||||
|
|
||||||
## Related documentation
|
|
||||||
|
|
||||||
- [DOCS-SHORTCODES.md](../DOCS-SHORTCODES.md) - Complete shortcode reference
|
|
||||||
- [DOCS-CONTRIBUTING.md](../DOCS-CONTRIBUTING.md) - Placeholder conventions and style guidelines
|
|
||||||
|
|
@ -16,7 +16,7 @@ import { readFileSync, writeFileSync } from 'fs';
|
||||||
import { parseArgs } from 'node:util';
|
import { parseArgs } from 'node:util';
|
||||||
|
|
||||||
// Parse command-line arguments
|
// Parse command-line arguments
|
||||||
const { positionals } = parseArgs({
|
const { positionals, values } = parseArgs({
|
||||||
allowPositionals: true,
|
allowPositionals: true,
|
||||||
options: {
|
options: {
|
||||||
dry: {
|
dry: {
|
||||||
|
|
@ -24,19 +24,47 @@ const { positionals } = parseArgs({
|
||||||
short: 'd',
|
short: 'd',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
|
help: {
|
||||||
|
type: 'boolean',
|
||||||
|
short: 'h',
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Show help if requested
|
||||||
|
if (values.help) {
|
||||||
|
console.log(`
|
||||||
|
Add placeholder syntax to code blocks
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
docs placeholders <file.md> [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dry, -d Preview changes without modifying files
|
||||||
|
--help, -h Show this help message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
docs placeholders content/influxdb3/enterprise/admin/upgrade.md
|
||||||
|
docs placeholders content/influxdb3/core/admin/databases/create.md --dry
|
||||||
|
|
||||||
|
What it does:
|
||||||
|
1. Finds UPPERCASE placeholders in code blocks
|
||||||
|
2. Adds { placeholders="PATTERN1|PATTERN2" } attribute to code fences
|
||||||
|
3. Wraps placeholder descriptions with {{% code-placeholder-key %}} shortcodes
|
||||||
|
`);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
if (positionals.length === 0) {
|
if (positionals.length === 0) {
|
||||||
console.error('Usage: node scripts/add-placeholders.js <file.md> [--dry]');
|
console.error('Error: Missing file path argument');
|
||||||
console.error(
|
console.error('Usage: docs placeholders <file.md> [--dry]');
|
||||||
'Example: node scripts/add-placeholders.js content/influxdb3/enterprise/admin/upgrade.md'
|
console.error('Run "docs placeholders --help" for more information');
|
||||||
);
|
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const filePath = positionals[0];
|
const filePath = positionals[0];
|
||||||
const isDryRun = process.argv.includes('--dry') || process.argv.includes('-d');
|
const isDryRun = values.dry;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract UPPERCASE placeholders from a code block
|
* Extract UPPERCASE placeholders from a code block
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,236 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main CLI entry point for docs tools
|
||||||
|
* Supports subcommands: create, edit, placeholders
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* docs create <draft-path> [options]
|
||||||
|
* docs edit <url> [options]
|
||||||
|
* docs placeholders <file.md> [options]
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { spawn } from 'child_process';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
// Get subcommand and remaining arguments
|
||||||
|
const subcommand = process.argv[2];
|
||||||
|
const args = process.argv.slice(3);
|
||||||
|
|
||||||
|
// Map subcommands to script files
|
||||||
|
const subcommands = {
|
||||||
|
create: 'docs-create.js',
|
||||||
|
edit: 'docs-edit.js',
|
||||||
|
placeholders: 'add-placeholders.js',
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print usage information
|
||||||
|
*/
|
||||||
|
function printUsage() {
|
||||||
|
console.log(`
|
||||||
|
Usage: docs <command> [options]
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
create <draft-path> Create new documentation from draft
|
||||||
|
edit <url> Edit existing documentation
|
||||||
|
placeholders <file.md> Add placeholder syntax to code blocks
|
||||||
|
test Run test suite to verify CLI functionality
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
docs create drafts/new-feature.md --products influxdb3_core
|
||||||
|
docs edit https://docs.influxdata.com/influxdb3/core/admin/
|
||||||
|
docs placeholders content/influxdb3/core/admin/upgrade.md
|
||||||
|
docs test
|
||||||
|
|
||||||
|
For command-specific help:
|
||||||
|
docs create --help
|
||||||
|
docs edit --help
|
||||||
|
docs placeholders --help
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle test command (async, so don't continue)
|
||||||
|
if (subcommand === 'test') {
|
||||||
|
runTests();
|
||||||
|
} else if (!subcommand || subcommand === '--help' || subcommand === '-h') {
|
||||||
|
// Handle no subcommand or help
|
||||||
|
printUsage();
|
||||||
|
process.exit(subcommand ? 0 : 1);
|
||||||
|
} else if (!subcommands[subcommand]) {
|
||||||
|
// Validate subcommand
|
||||||
|
console.error(`Error: Unknown command '${subcommand}'`);
|
||||||
|
console.error(`Run 'docs --help' for usage information`);
|
||||||
|
process.exit(1);
|
||||||
|
} else {
|
||||||
|
// Execute the appropriate script
|
||||||
|
const scriptPath = join(__dirname, subcommands[subcommand]);
|
||||||
|
const child = spawn('node', [scriptPath, ...args], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: process.env,
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on('exit', (code) => {
|
||||||
|
process.exit(code || 0);
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on('error', (err) => {
|
||||||
|
console.error(`Failed to execute ${subcommand}:`, err.message);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test function to verify docs CLI functionality
|
||||||
|
* Run with: npx docs test
|
||||||
|
*/
|
||||||
|
function runTests() {
|
||||||
|
import('child_process').then(({ execSync }) => {
|
||||||
|
const tests = [];
|
||||||
|
const testResults = [];
|
||||||
|
|
||||||
|
console.log('\n🧪 Testing docs CLI functionality...\n');
|
||||||
|
|
||||||
|
// Test 1: docs --help
|
||||||
|
tests.push({
|
||||||
|
name: 'docs --help',
|
||||||
|
command: 'npx docs --help',
|
||||||
|
expectedInOutput: [
|
||||||
|
'create <draft-path>',
|
||||||
|
'edit <url>',
|
||||||
|
'placeholders <file.md>',
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 2: docs create --help
|
||||||
|
tests.push({
|
||||||
|
name: 'docs create --help',
|
||||||
|
command: 'npx docs create --help',
|
||||||
|
expectedInOutput: [
|
||||||
|
'Documentation Content Scaffolding',
|
||||||
|
'--products',
|
||||||
|
'Pipe to external agent',
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 3: docs edit --help
|
||||||
|
tests.push({
|
||||||
|
name: 'docs edit --help',
|
||||||
|
command: 'npx docs edit --help',
|
||||||
|
expectedInOutput: ['Documentation File Opener', '--list'],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 4: docs placeholders --help
|
||||||
|
tests.push({
|
||||||
|
name: 'docs placeholders --help',
|
||||||
|
command: 'npx docs placeholders --help',
|
||||||
|
expectedInOutput: [
|
||||||
|
'Add placeholder syntax',
|
||||||
|
'--dry',
|
||||||
|
'code-placeholder-key',
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 5: docs placeholders with missing args shows error
|
||||||
|
tests.push({
|
||||||
|
name: 'docs placeholders (no args)',
|
||||||
|
command: 'npx docs placeholders 2>&1',
|
||||||
|
expectedInOutput: ['Error: Missing file path'],
|
||||||
|
expectFailure: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 6: Verify symlink exists
|
||||||
|
tests.push({
|
||||||
|
name: 'symlink exists',
|
||||||
|
command: 'ls -la node_modules/.bin/docs',
|
||||||
|
expectedInOutput: ['scripts/docs-cli.js'],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 7: Unknown command shows error
|
||||||
|
tests.push({
|
||||||
|
name: 'unknown command',
|
||||||
|
command: 'npx docs invalid-command 2>&1',
|
||||||
|
expectedInOutput: ['Error: Unknown command'],
|
||||||
|
expectFailure: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Run tests
|
||||||
|
for (const test of tests) {
|
||||||
|
try {
|
||||||
|
const output = execSync(test.command, {
|
||||||
|
encoding: 'utf8',
|
||||||
|
stdio: 'pipe',
|
||||||
|
});
|
||||||
|
|
||||||
|
const passed = test.expectedInOutput.every((expected) =>
|
||||||
|
output.includes(expected)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (passed) {
|
||||||
|
console.log(`✅ ${test.name}`);
|
||||||
|
testResults.push({ name: test.name, passed: true });
|
||||||
|
} else {
|
||||||
|
console.log(`❌ ${test.name} - Expected output not found`);
|
||||||
|
console.log(` Expected: ${test.expectedInOutput.join(', ')}`);
|
||||||
|
testResults.push({
|
||||||
|
name: test.name,
|
||||||
|
passed: false,
|
||||||
|
reason: 'Expected output not found',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (test.expectFailure) {
|
||||||
|
// Expected to fail - check if error output contains expected strings
|
||||||
|
const errorOutput =
|
||||||
|
error.stderr?.toString() || error.stdout?.toString() || '';
|
||||||
|
const passed = test.expectedInOutput.every((expected) =>
|
||||||
|
errorOutput.includes(expected)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (passed) {
|
||||||
|
console.log(`✅ ${test.name} (expected failure)`);
|
||||||
|
testResults.push({ name: test.name, passed: true });
|
||||||
|
} else {
|
||||||
|
console.log(`❌ ${test.name} - Expected error message not found`);
|
||||||
|
console.log(` Expected: ${test.expectedInOutput.join(', ')}`);
|
||||||
|
testResults.push({
|
||||||
|
name: test.name,
|
||||||
|
passed: false,
|
||||||
|
reason: 'Expected error message not found',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`❌ ${test.name} - Command failed unexpectedly`);
|
||||||
|
console.log(` Error: ${error.message}`);
|
||||||
|
testResults.push({
|
||||||
|
name: test.name,
|
||||||
|
passed: false,
|
||||||
|
reason: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const passed = testResults.filter((r) => r.passed).length;
|
||||||
|
const failed = testResults.filter((r) => !r.passed).length;
|
||||||
|
|
||||||
|
console.log(`\n📊 Test Results: ${passed}/${tests.length} passed`);
|
||||||
|
|
||||||
|
if (failed > 0) {
|
||||||
|
console.log(`\n❌ Failed tests:`);
|
||||||
|
testResults
|
||||||
|
.filter((r) => !r.passed)
|
||||||
|
.forEach((r) => {
|
||||||
|
console.log(` - ${r.name}: ${r.reason}`);
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
} else {
|
||||||
|
console.log(`\n✅ All tests passed!\n`);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
@ -23,7 +23,12 @@ import {
|
||||||
loadProducts,
|
loadProducts,
|
||||||
analyzeStructure,
|
analyzeStructure,
|
||||||
} from './lib/content-scaffolding.js';
|
} from './lib/content-scaffolding.js';
|
||||||
import { writeJson, readJson, fileExists } from './lib/file-operations.js';
|
import {
|
||||||
|
writeJson,
|
||||||
|
readJson,
|
||||||
|
fileExists,
|
||||||
|
readDraft,
|
||||||
|
} from './lib/file-operations.js';
|
||||||
import { parseMultipleURLs } from './lib/url-parser.js';
|
import { parseMultipleURLs } from './lib/url-parser.js';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
|
@ -36,6 +41,7 @@ const REPO_ROOT = join(__dirname, '..');
|
||||||
const TMP_DIR = join(REPO_ROOT, '.tmp');
|
const TMP_DIR = join(REPO_ROOT, '.tmp');
|
||||||
const CONTEXT_FILE = join(TMP_DIR, 'scaffold-context.json');
|
const CONTEXT_FILE = join(TMP_DIR, 'scaffold-context.json');
|
||||||
const PROPOSAL_FILE = join(TMP_DIR, 'scaffold-proposal.yml');
|
const PROPOSAL_FILE = join(TMP_DIR, 'scaffold-proposal.yml');
|
||||||
|
const PROMPT_FILE = join(TMP_DIR, 'scaffold-prompt.txt');
|
||||||
|
|
||||||
// Colors for console output
|
// Colors for console output
|
||||||
const colors = {
|
const colors = {
|
||||||
|
|
@ -49,25 +55,53 @@ const colors = {
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print colored output
|
* Print colored output to stderr (so it doesn't interfere with piped output)
|
||||||
*/
|
*/
|
||||||
function log(message, color = 'reset') {
|
function log(message, color = 'reset') {
|
||||||
console.log(`${colors[color]}${message}${colors.reset}`);
|
// Write to stderr so logs don't interfere with stdout (prompt path/text)
|
||||||
|
console.error(`${colors[color]}${message}${colors.reset}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if running in Claude Code environment
|
||||||
|
* @returns {boolean} True if Task function is available (Claude Code)
|
||||||
|
*/
|
||||||
|
function isClaudeCode() {
|
||||||
|
return typeof Task !== 'undefined';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Output prompt for use with external tools
|
||||||
|
* @param {string} prompt - The generated prompt text
|
||||||
|
* @param {boolean} printPrompt - If true, force print to stdout
|
||||||
|
*/
|
||||||
|
function outputPromptForExternalUse(prompt, printPrompt = false) {
|
||||||
|
// Auto-detect if stdout is being piped
|
||||||
|
const isBeingPiped = !process.stdout.isTTY;
|
||||||
|
|
||||||
|
// Print prompt text if explicitly requested OR if being piped
|
||||||
|
const shouldPrintText = printPrompt || isBeingPiped;
|
||||||
|
|
||||||
|
if (shouldPrintText) {
|
||||||
|
// Output prompt text to stdout
|
||||||
|
console.log(prompt);
|
||||||
|
} else {
|
||||||
|
// Write prompt to file and output file path
|
||||||
|
writeFileSync(PROMPT_FILE, prompt, 'utf8');
|
||||||
|
console.log(PROMPT_FILE);
|
||||||
|
}
|
||||||
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prompt user for input (works in TTY and non-TTY environments)
|
* Prompt user for input (works in TTY and non-TTY environments)
|
||||||
*/
|
*/
|
||||||
async function promptUser(question) {
|
async function promptUser(question) {
|
||||||
// For non-TTY environments, return empty string
|
|
||||||
if (!process.stdin.isTTY) {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
const readline = await import('readline');
|
const readline = await import('readline');
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
input: process.stdin,
|
input: process.stdin,
|
||||||
output: process.stdout,
|
output: process.stdout,
|
||||||
|
terminal: process.stdin.isTTY !== undefined ? process.stdin.isTTY : true,
|
||||||
});
|
});
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
|
|
@ -91,30 +125,28 @@ function divider() {
|
||||||
function parseArguments() {
|
function parseArguments() {
|
||||||
const { values, positionals } = parseArgs({
|
const { values, positionals } = parseArgs({
|
||||||
options: {
|
options: {
|
||||||
draft: { type: 'string' },
|
'from-draft': { type: 'string' },
|
||||||
from: { type: 'string' },
|
|
||||||
url: { type: 'string', multiple: true },
|
url: { type: 'string', multiple: true },
|
||||||
urls: { type: 'string' },
|
urls: { type: 'string' },
|
||||||
products: { type: 'string' },
|
products: { type: 'string' },
|
||||||
ai: { type: 'string', default: 'claude' },
|
ai: { type: 'string', default: 'claude' },
|
||||||
execute: { type: 'boolean', default: false },
|
execute: { type: 'boolean', default: false },
|
||||||
'context-only': { type: 'boolean', default: false },
|
'context-only': { type: 'boolean', default: false },
|
||||||
|
'print-prompt': { type: 'boolean', default: false },
|
||||||
proposal: { type: 'string' },
|
proposal: { type: 'string' },
|
||||||
'dry-run': { type: 'boolean', default: false },
|
'dry-run': { type: 'boolean', default: false },
|
||||||
yes: { type: 'boolean', default: false },
|
yes: { type: 'boolean', default: false },
|
||||||
help: { type: 'boolean', default: false },
|
help: { type: 'boolean', default: false },
|
||||||
|
'follow-external': { type: 'boolean', default: false },
|
||||||
},
|
},
|
||||||
allowPositionals: true,
|
allowPositionals: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// First positional argument is treated as draft path
|
// First positional argument is treated as draft path
|
||||||
if (positionals.length > 0 && !values.draft && !values.from) {
|
if (positionals.length > 0 && !values['from-draft']) {
|
||||||
values.draft = positionals[0];
|
values.draft = positionals[0];
|
||||||
}
|
} else if (values['from-draft']) {
|
||||||
|
values.draft = values['from-draft'];
|
||||||
// --from is an alias for --draft
|
|
||||||
if (values.from && !values.draft) {
|
|
||||||
values.draft = values.from;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize URLs into array
|
// Normalize URLs into array
|
||||||
|
|
@ -141,63 +173,101 @@ function printUsage() {
|
||||||
${colors.bright}Documentation Content Scaffolding${colors.reset}
|
${colors.bright}Documentation Content Scaffolding${colors.reset}
|
||||||
|
|
||||||
${colors.bright}Usage:${colors.reset}
|
${colors.bright}Usage:${colors.reset}
|
||||||
yarn docs:create <draft-path> Create from draft
|
docs create <draft-path> Create from draft
|
||||||
yarn docs:create --url <url> --draft <path> Create at URL with draft content
|
docs create --url <url> --from-draft <path> Create at URL with draft
|
||||||
|
|
||||||
|
# Or use with yarn:
|
||||||
|
yarn docs:create <draft-path>
|
||||||
|
yarn docs:create --url <url> --from-draft <path>
|
||||||
|
|
||||||
${colors.bright}Options:${colors.reset}
|
${colors.bright}Options:${colors.reset}
|
||||||
<draft-path> Path to draft markdown file (positional argument)
|
<draft-path> Path to draft markdown file (positional argument)
|
||||||
--draft <path> Path to draft markdown file
|
--from-draft <path> Path to draft markdown file
|
||||||
--from <path> Alias for --draft
|
|
||||||
--url <url> Documentation URL for new content location
|
--url <url> Documentation URL for new content location
|
||||||
|
--products <list> Comma-separated product keys (required for stdin)
|
||||||
|
Examples: influxdb3_core, influxdb3_enterprise
|
||||||
|
--follow-external Include external (non-docs.influxdata.com) URLs
|
||||||
|
when extracting links from draft. Without this flag,
|
||||||
|
only local documentation links are followed.
|
||||||
--context-only Stop after context preparation
|
--context-only Stop after context preparation
|
||||||
(for non-Claude tools)
|
(for non-Claude tools)
|
||||||
|
--print-prompt Force prompt text output (auto-enabled when piping)
|
||||||
--proposal <path> Import and execute proposal from JSON file
|
--proposal <path> Import and execute proposal from JSON file
|
||||||
--dry-run Show what would be created without creating
|
--dry-run Show what would be created without creating
|
||||||
--yes Skip confirmation prompt
|
--yes Skip confirmation prompt
|
||||||
--help Show this help message
|
--help Show this help message
|
||||||
|
|
||||||
${colors.bright}Workflow (Create from draft):${colors.reset}
|
${colors.bright}Stdin Support:${colors.reset}
|
||||||
|
When piping content from stdin, you must specify target products:
|
||||||
|
|
||||||
|
cat draft.md | docs create --products influxdb3_core
|
||||||
|
echo "# Content" | docs create --products influxdb3_core,influxdb3_enterprise
|
||||||
|
|
||||||
|
${colors.bright}Link Following:${colors.reset}
|
||||||
|
By default, the script extracts links from your draft and prompts you
|
||||||
|
to select which ones to include as context. This helps the AI:
|
||||||
|
- Maintain consistent terminology
|
||||||
|
- Avoid duplicating content
|
||||||
|
- Add appropriate \`related\` frontmatter links
|
||||||
|
|
||||||
|
Local documentation links are always available for selection.
|
||||||
|
Use --follow-external to also include external URLs (GitHub, etc.)
|
||||||
|
|
||||||
|
${colors.bright}Workflow (Inside Claude Code):${colors.reset}
|
||||||
1. Create a draft markdown file with your content
|
1. Create a draft markdown file with your content
|
||||||
2. Run: yarn docs:create drafts/new-feature.md
|
2. Run: docs create drafts/new-feature.md
|
||||||
3. Script runs all agents automatically
|
3. Script runs all agents automatically
|
||||||
4. Review and confirm to create files
|
4. Review and confirm to create files
|
||||||
|
|
||||||
${colors.bright}Workflow (Create at specific URL):${colors.reset}
|
${colors.bright}Workflow (Pipe to external agent):${colors.reset}
|
||||||
1. Create draft: vim drafts/new-feature.md
|
1. Create draft: vim drafts/new-feature.md
|
||||||
2. Run: yarn docs:create \\
|
2. Pipe to your AI tool (prompt auto-detected):
|
||||||
--url https://docs.influxdata.com/influxdb3/core/admin/new-feature/ \\
|
docs create drafts/new-feature.md --products X | claude -p
|
||||||
--draft drafts/new-feature.md
|
docs create drafts/new-feature.md --products X | copilot -p
|
||||||
3. Script determines structure from URL and uses draft content
|
3. AI generates files based on prompt
|
||||||
4. Review and confirm to create files
|
|
||||||
|
|
||||||
${colors.bright}Workflow (Manual - for non-Claude tools):${colors.reset}
|
|
||||||
1. Prepare context:
|
|
||||||
yarn docs:create --context-only drafts/new-feature.md
|
|
||||||
2. Run your AI tool with templates from scripts/templates/
|
|
||||||
3. Save proposal to .tmp/scaffold-proposal.json
|
|
||||||
4. Execute:
|
|
||||||
yarn docs:create --proposal .tmp/scaffold-proposal.json
|
|
||||||
|
|
||||||
${colors.bright}Examples:${colors.reset}
|
${colors.bright}Examples:${colors.reset}
|
||||||
# Create from draft (AI determines location)
|
# Inside Claude Code - automatic execution
|
||||||
|
docs create drafts/new-feature.md
|
||||||
|
|
||||||
|
# Pipe to external AI tools - prompt auto-detected
|
||||||
|
docs create drafts/new-feature.md --products influxdb3_core | claude -p
|
||||||
|
docs create drafts/new-feature.md --products influxdb3_core | copilot -p
|
||||||
|
|
||||||
|
# Pipe from stdin
|
||||||
|
cat drafts/quick-note.md | docs create --products influxdb3_core | claude -p
|
||||||
|
echo "# Quick note" | docs create --products influxdb3_core | copilot -p
|
||||||
|
|
||||||
|
# Get prompt file path (when not piping)
|
||||||
|
docs create drafts/new-feature.md # Outputs: .tmp/scaffold-prompt.txt
|
||||||
|
|
||||||
|
# Still works with yarn
|
||||||
yarn docs:create drafts/new-feature.md
|
yarn docs:create drafts/new-feature.md
|
||||||
|
|
||||||
# Create at specific URL with draft content
|
# Include external links for context selection
|
||||||
yarn docs:create --url /influxdb3/core/admin/new-feature/ \\
|
docs create --follow-external drafts/api-guide.md
|
||||||
--draft drafts/new-feature.md
|
|
||||||
|
|
||||||
# Preview changes
|
${colors.bright}Smart Behavior:${colors.reset}
|
||||||
yarn docs:create --draft drafts/new-feature.md --dry-run
|
INSIDE Claude Code:
|
||||||
|
→ Automatically runs Task() agent to generate files
|
||||||
|
|
||||||
|
PIPING to another tool:
|
||||||
|
→ Auto-detects piping and outputs prompt text
|
||||||
|
→ No --print-prompt flag needed
|
||||||
|
|
||||||
|
INTERACTIVE (not piping):
|
||||||
|
→ Outputs prompt file path: .tmp/scaffold-prompt.txt
|
||||||
|
→ Use with: code .tmp/scaffold-prompt.txt
|
||||||
|
|
||||||
${colors.bright}Note:${colors.reset}
|
${colors.bright}Note:${colors.reset}
|
||||||
To edit existing pages, use: yarn docs:edit <url>
|
To edit existing pages, use: docs edit <url>
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Phase 1a: Prepare context from URLs
|
* Phase 1a: Prepare context from URLs
|
||||||
*/
|
*/
|
||||||
async function prepareURLPhase(urls, draftPath, options) {
|
async function prepareURLPhase(urls, draftPath, options, stdinContent = null) {
|
||||||
log('\n🔍 Analyzing URLs and finding files...', 'bright');
|
log('\n🔍 Analyzing URLs and finding files...', 'bright');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -258,9 +328,18 @@ async function prepareURLPhase(urls, draftPath, options) {
|
||||||
|
|
||||||
// Build context (include URL analysis)
|
// Build context (include URL analysis)
|
||||||
let context = null;
|
let context = null;
|
||||||
if (draftPath) {
|
let draft;
|
||||||
|
|
||||||
|
if (stdinContent) {
|
||||||
|
// Use stdin content
|
||||||
|
draft = stdinContent;
|
||||||
|
log('✓ Using draft from stdin', 'green');
|
||||||
|
context = prepareContext(draft);
|
||||||
|
} else if (draftPath) {
|
||||||
// Use draft content if provided
|
// Use draft content if provided
|
||||||
context = prepareContext(draftPath);
|
draft = readDraft(draftPath);
|
||||||
|
draft.path = draftPath;
|
||||||
|
context = prepareContext(draft);
|
||||||
} else {
|
} else {
|
||||||
// Minimal context for editing existing pages
|
// Minimal context for editing existing pages
|
||||||
const products = loadProducts();
|
const products = loadProducts();
|
||||||
|
|
@ -351,18 +430,83 @@ async function prepareURLPhase(urls, draftPath, options) {
|
||||||
/**
|
/**
|
||||||
* Phase 1b: Prepare context from draft
|
* Phase 1b: Prepare context from draft
|
||||||
*/
|
*/
|
||||||
async function preparePhase(draftPath, options) {
|
async function preparePhase(draftPath, options, stdinContent = null) {
|
||||||
log('\n🔍 Analyzing draft and repository structure...', 'bright');
|
log('\n🔍 Analyzing draft and repository structure...', 'bright');
|
||||||
|
|
||||||
|
let draft;
|
||||||
|
|
||||||
|
// Handle stdin vs file
|
||||||
|
if (stdinContent) {
|
||||||
|
draft = stdinContent;
|
||||||
|
log('✓ Using draft from stdin', 'green');
|
||||||
|
} else {
|
||||||
// Validate draft exists
|
// Validate draft exists
|
||||||
if (!fileExists(draftPath)) {
|
if (!fileExists(draftPath)) {
|
||||||
log(`✗ Draft file not found: ${draftPath}`, 'red');
|
log(`✗ Draft file not found: ${draftPath}`, 'red');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
draft = readDraft(draftPath);
|
||||||
|
draft.path = draftPath;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Prepare context
|
// Prepare context
|
||||||
const context = prepareContext(draftPath);
|
const context = prepareContext(draft);
|
||||||
|
|
||||||
|
// Extract links from draft
|
||||||
|
const { extractLinks, followLocalLinks, fetchExternalLinks } = await import(
|
||||||
|
'./lib/content-scaffolding.js'
|
||||||
|
);
|
||||||
|
|
||||||
|
const links = extractLinks(draft.content);
|
||||||
|
|
||||||
|
if (links.localFiles.length > 0 || links.external.length > 0) {
|
||||||
|
// Filter external links if flag not set
|
||||||
|
if (!options['follow-external']) {
|
||||||
|
links.external = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let user select which external links to follow
|
||||||
|
// (local files are automatically included)
|
||||||
|
const selected = await selectLinksToFollow(links);
|
||||||
|
|
||||||
|
// Follow selected links
|
||||||
|
const linkedContent = [];
|
||||||
|
|
||||||
|
if (selected.selectedLocal.length > 0) {
|
||||||
|
log('\n📄 Loading local files...', 'cyan');
|
||||||
|
// Determine base path for resolving relative links
|
||||||
|
const basePath = draft.path
|
||||||
|
? dirname(join(REPO_ROOT, draft.path))
|
||||||
|
: REPO_ROOT;
|
||||||
|
const localResults = followLocalLinks(selected.selectedLocal, basePath);
|
||||||
|
linkedContent.push(...localResults);
|
||||||
|
const successCount = localResults.filter((r) => !r.error).length;
|
||||||
|
log(`✓ Loaded ${successCount} local file(s)`, 'green');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (selected.selectedExternal.length > 0) {
|
||||||
|
log('\n🌐 Fetching external URLs...', 'cyan');
|
||||||
|
const externalResults = await fetchExternalLinks(
|
||||||
|
selected.selectedExternal
|
||||||
|
);
|
||||||
|
linkedContent.push(...externalResults);
|
||||||
|
const successCount = externalResults.filter((r) => !r.error).length;
|
||||||
|
log(`✓ Fetched ${successCount} external page(s)`, 'green');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to context
|
||||||
|
if (linkedContent.length > 0) {
|
||||||
|
context.linkedContent = linkedContent;
|
||||||
|
|
||||||
|
// Show any errors
|
||||||
|
const errors = linkedContent.filter((lc) => lc.error);
|
||||||
|
if (errors.length > 0) {
|
||||||
|
log('\n⚠️ Some links could not be loaded:', 'yellow');
|
||||||
|
errors.forEach((e) => log(` • ${e.url}: ${e.error}`, 'yellow'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Write context to temp file
|
// Write context to temp file
|
||||||
writeJson(CONTEXT_FILE, context);
|
writeJson(CONTEXT_FILE, context);
|
||||||
|
|
@ -382,6 +526,12 @@ async function preparePhase(draftPath, options) {
|
||||||
`✓ Found ${context.structure.existingPaths.length} existing pages`,
|
`✓ Found ${context.structure.existingPaths.length} existing pages`,
|
||||||
'green'
|
'green'
|
||||||
);
|
);
|
||||||
|
if (context.linkedContent) {
|
||||||
|
log(
|
||||||
|
`✓ Included ${context.linkedContent.length} linked page(s) as context`,
|
||||||
|
'green'
|
||||||
|
);
|
||||||
|
}
|
||||||
log(
|
log(
|
||||||
`✓ Prepared context → ${CONTEXT_FILE.replace(REPO_ROOT, '.')}`,
|
`✓ Prepared context → ${CONTEXT_FILE.replace(REPO_ROOT, '.')}`,
|
||||||
'green'
|
'green'
|
||||||
|
|
@ -441,25 +591,69 @@ async function selectProducts(context, options) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort products: detected first, then alphabetically within each group
|
||||||
|
allProducts.sort((a, b) => {
|
||||||
|
const aDetected = detected.includes(a);
|
||||||
|
const bDetected = detected.includes(b);
|
||||||
|
|
||||||
|
// Detected products first
|
||||||
|
if (aDetected && !bDetected) return -1;
|
||||||
|
if (!aDetected && bDetected) return 1;
|
||||||
|
|
||||||
|
// Then alphabetically
|
||||||
|
return a.localeCompare(b);
|
||||||
|
});
|
||||||
|
|
||||||
// Case 1: Explicit flag provided
|
// Case 1: Explicit flag provided
|
||||||
if (options.products) {
|
if (options.products) {
|
||||||
const requested = options.products.split(',').map((p) => p.trim());
|
const requestedKeys = options.products.split(',').map((p) => p.trim());
|
||||||
const invalid = requested.filter((p) => !allProducts.includes(p));
|
|
||||||
|
|
||||||
if (invalid.length > 0) {
|
// Map product keys to display names
|
||||||
|
const requestedNames = [];
|
||||||
|
const invalidKeys = [];
|
||||||
|
|
||||||
|
for (const key of requestedKeys) {
|
||||||
|
const product = context.products[key];
|
||||||
|
|
||||||
|
if (product) {
|
||||||
|
// Valid product key found
|
||||||
|
if (product.versions && product.versions.length > 1) {
|
||||||
|
// Multi-version product: add all versions
|
||||||
|
product.versions.forEach((version) => {
|
||||||
|
const displayName = `${product.name} ${version}`;
|
||||||
|
if (allProducts.includes(displayName)) {
|
||||||
|
requestedNames.push(displayName);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Single version product
|
||||||
|
if (allProducts.includes(product.name)) {
|
||||||
|
requestedNames.push(product.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (allProducts.includes(key)) {
|
||||||
|
// It's already a display name (backwards compatibility)
|
||||||
|
requestedNames.push(key);
|
||||||
|
} else {
|
||||||
|
invalidKeys.push(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invalidKeys.length > 0) {
|
||||||
|
const validKeys = Object.keys(context.products).join(', ');
|
||||||
log(
|
log(
|
||||||
`\n✗ Invalid products: ${invalid.join(', ')}\n` +
|
`\n✗ Invalid product keys: ${invalidKeys.join(', ')}\n` +
|
||||||
`Valid products: ${allProducts.join(', ')}`,
|
`Valid keys: ${validKeys}`,
|
||||||
'red'
|
'red'
|
||||||
);
|
);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
log(
|
log(
|
||||||
`✓ Using products from --products flag: ${requested.join(', ')}`,
|
`✓ Using products from --products flag: ${requestedNames.join(', ')}`,
|
||||||
'green'
|
'green'
|
||||||
);
|
);
|
||||||
return requested;
|
return requestedNames;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 2: Unambiguous (single product detected)
|
// Case 2: Unambiguous (single product detected)
|
||||||
|
|
@ -514,6 +708,74 @@ async function selectProducts(context, options) {
|
||||||
return selected;
|
return selected;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prompt user to select which external links to include
|
||||||
|
* Local file paths are automatically followed
|
||||||
|
* @param {object} links - {localFiles, external} from extractLinks
|
||||||
|
* @returns {Promise<object>} {selectedLocal, selectedExternal}
|
||||||
|
*/
|
||||||
|
async function selectLinksToFollow(links) {
|
||||||
|
// Local files are followed automatically (no user prompt)
|
||||||
|
// External links require user selection
|
||||||
|
if (links.external.length === 0) {
|
||||||
|
return {
|
||||||
|
selectedLocal: links.localFiles || [],
|
||||||
|
selectedExternal: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
log('\n🔗 Found external links in draft:\n', 'bright');
|
||||||
|
|
||||||
|
const allLinks = [];
|
||||||
|
let index = 1;
|
||||||
|
|
||||||
|
// Show external links for selection
|
||||||
|
links.external.forEach((link) => {
|
||||||
|
log(` ${index}. ${link}`, 'yellow');
|
||||||
|
allLinks.push({ type: 'external', url: link });
|
||||||
|
index++;
|
||||||
|
});
|
||||||
|
|
||||||
|
const answer = await promptUser(
|
||||||
|
'\nSelect external links to include as context ' +
|
||||||
|
'(comma-separated numbers, or "all"): '
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!answer || answer.toLowerCase() === 'none') {
|
||||||
|
return {
|
||||||
|
selectedLocal: links.localFiles || [],
|
||||||
|
selectedExternal: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let selectedIndices;
|
||||||
|
if (answer.toLowerCase() === 'all') {
|
||||||
|
selectedIndices = Array.from({ length: allLinks.length }, (_, i) => i);
|
||||||
|
} else {
|
||||||
|
selectedIndices = answer
|
||||||
|
.split(',')
|
||||||
|
.map((s) => parseInt(s.trim()) - 1)
|
||||||
|
.filter((i) => i >= 0 && i < allLinks.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
const selectedExternal = [];
|
||||||
|
|
||||||
|
selectedIndices.forEach((i) => {
|
||||||
|
const link = allLinks[i];
|
||||||
|
selectedExternal.push(link.url);
|
||||||
|
});
|
||||||
|
|
||||||
|
log(
|
||||||
|
`\n✓ Following ${links.localFiles?.length || 0} local file(s) ` +
|
||||||
|
`and ${selectedExternal.length} external link(s)`,
|
||||||
|
'green'
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
selectedLocal: links.localFiles || [],
|
||||||
|
selectedExternal,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run single content generator agent with direct file generation (Claude Code)
|
* Run single content generator agent with direct file generation (Claude Code)
|
||||||
*/
|
*/
|
||||||
|
|
@ -577,6 +839,30 @@ function generateClaudePrompt(
|
||||||
**Target Products**: Use \`context.selectedProducts\` field (${selectedProducts.join(', ')})
|
**Target Products**: Use \`context.selectedProducts\` field (${selectedProducts.join(', ')})
|
||||||
**Mode**: ${mode === 'edit' ? 'Edit existing content' : 'Create new documentation'}
|
**Mode**: ${mode === 'edit' ? 'Edit existing content' : 'Create new documentation'}
|
||||||
${isURLBased ? `**URLs**: ${context.urls.map((u) => u.url).join(', ')}` : ''}
|
${isURLBased ? `**URLs**: ${context.urls.map((u) => u.url).join(', ')}` : ''}
|
||||||
|
${
|
||||||
|
context.linkedContent?.length > 0
|
||||||
|
? `
|
||||||
|
**Linked References**: The draft references ${context.linkedContent.length} page(s) from existing documentation.
|
||||||
|
|
||||||
|
These are provided for context to help you:
|
||||||
|
- Maintain consistent terminology and style
|
||||||
|
- Avoid duplicating existing content
|
||||||
|
- Understand related concepts and their structure
|
||||||
|
- Add appropriate links to the \`related\` frontmatter field
|
||||||
|
|
||||||
|
Linked content details available in \`context.linkedContent\`:
|
||||||
|
${context.linkedContent
|
||||||
|
.map((lc) =>
|
||||||
|
lc.error
|
||||||
|
? `- ❌ ${lc.url} (${lc.error})`
|
||||||
|
: `- ✓ [${lc.type}] ${lc.title} (${lc.path || lc.url})`
|
||||||
|
)
|
||||||
|
.join('\n')}
|
||||||
|
|
||||||
|
**Important**: Use this content for context and reference, but do not copy it verbatim. Consider adding relevant pages to the \`related\` field in frontmatter.
|
||||||
|
`
|
||||||
|
: ''
|
||||||
|
}
|
||||||
|
|
||||||
**Your Task**: Generate complete documentation files directly (no proposal step).
|
**Your Task**: Generate complete documentation files directly (no proposal step).
|
||||||
|
|
||||||
|
|
@ -908,16 +1194,40 @@ async function executePhase(options) {
|
||||||
async function main() {
|
async function main() {
|
||||||
const options = parseArguments();
|
const options = parseArguments();
|
||||||
|
|
||||||
// Show help
|
// Show help first (don't wait for stdin)
|
||||||
if (options.help) {
|
if (options.help) {
|
||||||
printUsage();
|
printUsage();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for stdin only if no draft file was provided
|
||||||
|
const hasStdin = !process.stdin.isTTY;
|
||||||
|
let stdinContent = null;
|
||||||
|
|
||||||
|
if (hasStdin && !options.draft) {
|
||||||
|
// Stdin requires --products option
|
||||||
|
if (!options.products) {
|
||||||
|
log(
|
||||||
|
'\n✗ Error: --products is required when piping content from stdin',
|
||||||
|
'red'
|
||||||
|
);
|
||||||
|
log(
|
||||||
|
'Example: echo "# Content" | yarn docs:create --products influxdb3_core',
|
||||||
|
'yellow'
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import readDraftFromStdin
|
||||||
|
const { readDraftFromStdin } = await import('./lib/file-operations.js');
|
||||||
|
log('📥 Reading draft from stdin...', 'cyan');
|
||||||
|
stdinContent = await readDraftFromStdin();
|
||||||
|
}
|
||||||
|
|
||||||
// Determine workflow
|
// Determine workflow
|
||||||
if (options.url && options.url.length > 0) {
|
if (options.url && options.url.length > 0) {
|
||||||
// URL-based workflow requires draft content
|
// URL-based workflow requires draft content
|
||||||
if (!options.draft) {
|
if (!options.draft && !stdinContent) {
|
||||||
log('\n✗ Error: --url requires --draft <path>', 'red');
|
log('\n✗ Error: --url requires --draft <path>', 'red');
|
||||||
log('The --url option specifies WHERE to create content.', 'yellow');
|
log('The --url option specifies WHERE to create content.', 'yellow');
|
||||||
log(
|
log(
|
||||||
|
|
@ -934,29 +1244,75 @@ async function main() {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const context = await prepareURLPhase(options.url, options.draft, options);
|
const context = await prepareURLPhase(
|
||||||
|
options.url,
|
||||||
|
options.draft,
|
||||||
|
options,
|
||||||
|
stdinContent
|
||||||
|
);
|
||||||
|
|
||||||
if (options['context-only']) {
|
if (options['context-only']) {
|
||||||
// Stop after context preparation
|
// Stop after context preparation
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue with AI analysis (Phase 2)
|
// Generate prompt for product selection
|
||||||
|
const selectedProducts = await selectProducts(context, options);
|
||||||
|
const mode = context.urls?.length > 0 ? 'create' : 'create';
|
||||||
|
const isURLBased = true;
|
||||||
|
const hasExistingContent =
|
||||||
|
context.existingContent &&
|
||||||
|
Object.keys(context.existingContent).length > 0;
|
||||||
|
|
||||||
|
const prompt = generateClaudePrompt(
|
||||||
|
context,
|
||||||
|
selectedProducts,
|
||||||
|
mode,
|
||||||
|
isURLBased,
|
||||||
|
hasExistingContent
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check environment and handle prompt accordingly
|
||||||
|
if (!isClaudeCode()) {
|
||||||
|
// Not in Claude Code: output prompt for external use
|
||||||
|
outputPromptForExternalUse(prompt, options['print-prompt']);
|
||||||
|
}
|
||||||
|
|
||||||
|
// In Claude Code: continue with AI analysis (Phase 2)
|
||||||
log('\n🤖 Running AI analysis with specialized agents...\n', 'bright');
|
log('\n🤖 Running AI analysis with specialized agents...\n', 'bright');
|
||||||
await runAgentAnalysis(context, options);
|
await runAgentAnalysis(context, options);
|
||||||
|
|
||||||
// Execute proposal (Phase 3)
|
// Execute proposal (Phase 3)
|
||||||
await executePhase(options);
|
await executePhase(options);
|
||||||
} else if (options.draft) {
|
} else if (options.draft || stdinContent) {
|
||||||
// Draft-based workflow
|
// Draft-based workflow (from file or stdin)
|
||||||
const context = await preparePhase(options.draft, options);
|
const context = await preparePhase(options.draft, options, stdinContent);
|
||||||
|
|
||||||
if (options['context-only']) {
|
if (options['context-only']) {
|
||||||
// Stop after context preparation
|
// Stop after context preparation
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue with AI analysis (Phase 2)
|
// Generate prompt for product selection
|
||||||
|
const selectedProducts = await selectProducts(context, options);
|
||||||
|
const mode = 'create';
|
||||||
|
const isURLBased = false;
|
||||||
|
|
||||||
|
const prompt = generateClaudePrompt(
|
||||||
|
context,
|
||||||
|
selectedProducts,
|
||||||
|
mode,
|
||||||
|
isURLBased,
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check environment and handle prompt accordingly
|
||||||
|
if (!isClaudeCode()) {
|
||||||
|
// Not in Claude Code: output prompt for external use
|
||||||
|
outputPromptForExternalUse(prompt, options['print-prompt']);
|
||||||
|
}
|
||||||
|
|
||||||
|
// In Claude Code: continue with AI analysis (Phase 2)
|
||||||
log('\n🤖 Running AI analysis with specialized agents...\n', 'bright');
|
log('\n🤖 Running AI analysis with specialized agents...\n', 'bright');
|
||||||
await runAgentAnalysis(context, options);
|
await runAgentAnalysis(context, options);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { readdirSync, readFileSync, existsSync, statSync } from 'fs';
|
import { readdirSync, readFileSync, existsSync, statSync } from 'fs';
|
||||||
import { join, dirname } from 'path';
|
import { join, dirname, resolve } from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import matter from 'gray-matter';
|
import matter from 'gray-matter';
|
||||||
|
|
@ -314,12 +314,19 @@ export function findSiblingWeights(dirPath) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepare complete context for AI analysis
|
* Prepare complete context for AI analysis
|
||||||
* @param {string} draftPath - Path to draft file
|
* @param {string|object} draftPathOrObject - Path to draft file or draft object
|
||||||
* @returns {object} Context object
|
* @returns {object} Context object
|
||||||
*/
|
*/
|
||||||
export function prepareContext(draftPath) {
|
export function prepareContext(draftPathOrObject) {
|
||||||
// Read draft
|
// Read draft - handle both file path and draft object
|
||||||
const draft = readDraft(draftPath);
|
let draft;
|
||||||
|
if (typeof draftPathOrObject === 'string') {
|
||||||
|
draft = readDraft(draftPathOrObject);
|
||||||
|
draft.path = draftPathOrObject;
|
||||||
|
} else {
|
||||||
|
// Already a draft object from stdin
|
||||||
|
draft = draftPathOrObject;
|
||||||
|
}
|
||||||
|
|
||||||
// Load products
|
// Load products
|
||||||
const products = loadProducts();
|
const products = loadProducts();
|
||||||
|
|
@ -349,7 +356,7 @@ export function prepareContext(draftPath) {
|
||||||
// Build context
|
// Build context
|
||||||
const context = {
|
const context = {
|
||||||
draft: {
|
draft: {
|
||||||
path: draftPath,
|
path: draft.path || draftPathOrObject,
|
||||||
content: draft.content,
|
content: draft.content,
|
||||||
existingFrontmatter: draft.frontmatter,
|
existingFrontmatter: draft.frontmatter,
|
||||||
},
|
},
|
||||||
|
|
@ -616,7 +623,7 @@ export function detectSharedContent(filePath) {
|
||||||
if (parsed.data && parsed.data.source) {
|
if (parsed.data && parsed.data.source) {
|
||||||
return parsed.data.source;
|
return parsed.data.source;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
// Can't parse, assume not shared
|
// Can't parse, assume not shared
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
@ -663,13 +670,13 @@ export function findSharedContentVariants(sourcePath) {
|
||||||
const relativePath = fullPath.replace(REPO_ROOT + '/', '');
|
const relativePath = fullPath.replace(REPO_ROOT + '/', '');
|
||||||
variants.push(relativePath);
|
variants.push(relativePath);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
// Skip files that can't be parsed
|
// Skip files that can't be parsed
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
// Skip directories we can't read
|
// Skip directories we can't read
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -758,3 +765,127 @@ export function analyzeURLs(parsedURLs) {
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract and categorize links from markdown content
|
||||||
|
* @param {string} content - Markdown content
|
||||||
|
* @returns {object} {localFiles: string[], external: string[]}
|
||||||
|
*/
|
||||||
|
export function extractLinks(content) {
|
||||||
|
const localFiles = [];
|
||||||
|
const external = [];
|
||||||
|
|
||||||
|
// Match markdown links: [text](url)
|
||||||
|
const linkRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
|
||||||
|
let match;
|
||||||
|
|
||||||
|
while ((match = linkRegex.exec(content)) !== null) {
|
||||||
|
const url = match[2];
|
||||||
|
|
||||||
|
// Skip anchor links and mailto
|
||||||
|
if (url.startsWith('#') || url.startsWith('mailto:')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Local file paths (relative paths) - automatically followed
|
||||||
|
if (url.startsWith('../') || url.startsWith('./')) {
|
||||||
|
localFiles.push(url);
|
||||||
|
}
|
||||||
|
// All HTTP/HTTPS URLs (including docs.influxdata.com) - user selects
|
||||||
|
else if (url.startsWith('http://') || url.startsWith('https://')) {
|
||||||
|
external.push(url);
|
||||||
|
}
|
||||||
|
// Absolute paths starting with / are ignored (no base context to resolve)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
localFiles: [...new Set(localFiles)],
|
||||||
|
external: [...new Set(external)],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Follow local file links (relative paths)
|
||||||
|
* @param {string[]} links - Array of relative file paths
|
||||||
|
* @param {string} basePath - Base path to resolve relative links from
|
||||||
|
* @returns {object[]} Array of {url, title, content, path, frontmatter}
|
||||||
|
*/
|
||||||
|
export function followLocalLinks(links, basePath = REPO_ROOT) {
|
||||||
|
const results = [];
|
||||||
|
|
||||||
|
for (const link of links) {
|
||||||
|
try {
|
||||||
|
// Resolve relative path from base path
|
||||||
|
const filePath = resolve(basePath, link);
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
if (existsSync(filePath)) {
|
||||||
|
const fileContent = readFileSync(filePath, 'utf8');
|
||||||
|
const parsed = matter(fileContent);
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
url: link,
|
||||||
|
title: parsed.data?.title || 'Untitled',
|
||||||
|
content: parsed.content,
|
||||||
|
path: filePath.replace(REPO_ROOT + '/', ''),
|
||||||
|
frontmatter: parsed.data,
|
||||||
|
type: 'local',
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
results.push({
|
||||||
|
url: link,
|
||||||
|
error: 'File not found',
|
||||||
|
type: 'local',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
results.push({
|
||||||
|
url: link,
|
||||||
|
error: error.message,
|
||||||
|
type: 'local',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch external URLs
|
||||||
|
* @param {string[]} urls - Array of external URLs
|
||||||
|
* @returns {Promise<object[]>} Array of {url, title, content, type}
|
||||||
|
*/
|
||||||
|
export async function fetchExternalLinks(urls) {
|
||||||
|
// Dynamic import axios
|
||||||
|
const axios = (await import('axios')).default;
|
||||||
|
const results = [];
|
||||||
|
|
||||||
|
for (const url of urls) {
|
||||||
|
try {
|
||||||
|
const response = await axios.get(url, {
|
||||||
|
timeout: 10000,
|
||||||
|
headers: { 'User-Agent': 'InfluxData-Docs-Bot/1.0' },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Extract title from HTML or use URL
|
||||||
|
const titleMatch = response.data.match(/<title>([^<]+)<\/title>/i);
|
||||||
|
const title = titleMatch ? titleMatch[1] : url;
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
url,
|
||||||
|
title,
|
||||||
|
content: response.data,
|
||||||
|
type: 'external',
|
||||||
|
contentType: response.headers['content-type'],
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
results.push({
|
||||||
|
url,
|
||||||
|
error: error.message,
|
||||||
|
type: 'external',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,38 @@ export function readDraft(filePath) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read draft content from stdin
|
||||||
|
* @returns {Promise<{content: string, frontmatter: object, raw: string, path: string}>}
|
||||||
|
*/
|
||||||
|
export async function readDraftFromStdin() {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let data = '';
|
||||||
|
process.stdin.setEncoding('utf8');
|
||||||
|
|
||||||
|
process.stdin.on('data', (chunk) => {
|
||||||
|
data += chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stdin.on('end', () => {
|
||||||
|
try {
|
||||||
|
// Parse with gray-matter to extract frontmatter if present
|
||||||
|
const parsed = matter(data);
|
||||||
|
resolve({
|
||||||
|
content: parsed.content,
|
||||||
|
frontmatter: parsed.data || {},
|
||||||
|
raw: data,
|
||||||
|
path: '<stdin>',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
reject(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stdin.on('error', reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write a markdown file with frontmatter
|
* Write a markdown file with frontmatter
|
||||||
* @param {string} filePath - Path to write to
|
* @param {string} filePath - Path to write to
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup script to make the `docs` command available locally after yarn install.
|
||||||
|
* Creates a symlink in node_modules/.bin/docs pointing to scripts/docs-cli.js
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { existsSync, mkdirSync, symlinkSync, unlinkSync, chmodSync } from 'fs';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const rootDir = join(__dirname, '..');
|
||||||
|
|
||||||
|
const binDir = join(rootDir, 'node_modules', '.bin');
|
||||||
|
const binLink = join(binDir, 'docs');
|
||||||
|
const targetScript = join(rootDir, 'scripts', 'docs-cli.js');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Ensure node_modules/.bin directory exists
|
||||||
|
if (!existsSync(binDir)) {
|
||||||
|
mkdirSync(binDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove existing symlink if it exists
|
||||||
|
if (existsSync(binLink)) {
|
||||||
|
unlinkSync(binLink);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create symlink
|
||||||
|
symlinkSync(targetScript, binLink, 'file');
|
||||||
|
|
||||||
|
// Ensure the target script is executable
|
||||||
|
chmodSync(targetScript, 0o755);
|
||||||
|
|
||||||
|
console.log('✓ Created local `docs` command in node_modules/.bin/');
|
||||||
|
console.log(' You can now use: npx docs <command>');
|
||||||
|
console.log(' Or add node_modules/.bin to your PATH for direct access');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to setup local docs command:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
@ -7,6 +7,7 @@ You are analyzing a documentation draft to generate an intelligent file structur
|
||||||
**Context file**: `.tmp/scaffold-context.json`
|
**Context file**: `.tmp/scaffold-context.json`
|
||||||
|
|
||||||
Read and analyze the context file, which contains:
|
Read and analyze the context file, which contains:
|
||||||
|
|
||||||
- **draft**: The markdown content and any existing frontmatter
|
- **draft**: The markdown content and any existing frontmatter
|
||||||
- **products**: Available InfluxDB products (Core, Enterprise, Cloud, etc.)
|
- **products**: Available InfluxDB products (Core, Enterprise, Cloud, etc.)
|
||||||
- **productHints**: Products mentioned or suggested based on content analysis
|
- **productHints**: Products mentioned or suggested based on content analysis
|
||||||
|
|
@ -54,6 +55,7 @@ For each file, create complete frontmatter with:
|
||||||
### 4. Code Sample Considerations
|
### 4. Code Sample Considerations
|
||||||
|
|
||||||
Based on `versionInfo`:
|
Based on `versionInfo`:
|
||||||
|
|
||||||
- Use version-specific CLI commands (influxdb3, influx, influxctl)
|
- Use version-specific CLI commands (influxdb3, influx, influxctl)
|
||||||
- Reference appropriate API endpoints (/api/v3, /api/v2)
|
- Reference appropriate API endpoints (/api/v3, /api/v2)
|
||||||
- Note testing requirements from `conventions.testing`
|
- Note testing requirements from `conventions.testing`
|
||||||
|
|
@ -61,6 +63,7 @@ Based on `versionInfo`:
|
||||||
### 5. Style Compliance
|
### 5. Style Compliance
|
||||||
|
|
||||||
Follow conventions from `conventions.namingRules`:
|
Follow conventions from `conventions.namingRules`:
|
||||||
|
|
||||||
- Files: Use lowercase with hyphens (e.g., `manage-databases.md`)
|
- Files: Use lowercase with hyphens (e.g., `manage-databases.md`)
|
||||||
- Directories: Use lowercase with hyphens
|
- Directories: Use lowercase with hyphens
|
||||||
- Shared content: Place in appropriate `/content/shared/` subdirectory
|
- Shared content: Place in appropriate `/content/shared/` subdirectory
|
||||||
|
|
@ -133,4 +136,8 @@ Generate a JSON proposal matching the schema in `scripts/schemas/scaffold-propos
|
||||||
4. Generate complete frontmatter for all files
|
4. Generate complete frontmatter for all files
|
||||||
5. Save the proposal to `.tmp/scaffold-proposal.json`
|
5. Save the proposal to `.tmp/scaffold-proposal.json`
|
||||||
|
|
||||||
The proposal will be validated and used by `yarn docs:create --proposal .tmp/scaffold-proposal.json` to create the files.
|
The following command validates and creates files from the proposal:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx docs create --proposal .tmp/scaffold-proposal.json
|
||||||
|
```
|
||||||
|
|
|
||||||
12
yarn.lock
12
yarn.lock
|
|
@ -194,6 +194,11 @@
|
||||||
resolved "https://registry.yarnpkg.com/@evilmartians/lefthook/-/lefthook-1.12.3.tgz#081eca59a6d33646616af844244ce6842cd6b5a5"
|
resolved "https://registry.yarnpkg.com/@evilmartians/lefthook/-/lefthook-1.12.3.tgz#081eca59a6d33646616af844244ce6842cd6b5a5"
|
||||||
integrity sha512-MtXIt8h+EVTv5tCGLzh9UwbA/LRv6esdPJOHlxr8NDKHbFnbo8PvU5uVQcm3PAQTd4DZN3HoyokqrwGwntoq6w==
|
integrity sha512-MtXIt8h+EVTv5tCGLzh9UwbA/LRv6esdPJOHlxr8NDKHbFnbo8PvU5uVQcm3PAQTd4DZN3HoyokqrwGwntoq6w==
|
||||||
|
|
||||||
|
"@github/copilot@latest":
|
||||||
|
version "0.0.353"
|
||||||
|
resolved "https://registry.yarnpkg.com/@github/copilot/-/copilot-0.0.353.tgz#3c8d8a072b3defbd2200c9fe4fb636d633ac7f1e"
|
||||||
|
integrity sha512-OYgCB4Jf7Y/Wor8mNNQcXEt1m1koYm/WwjGsr5mwABSVYXArWUeEfXqVbx+7O87ld5b+aWy2Zaa2bzKV8dmqaw==
|
||||||
|
|
||||||
"@humanfs/core@^0.19.1":
|
"@humanfs/core@^0.19.1":
|
||||||
version "0.19.1"
|
version "0.19.1"
|
||||||
resolved "https://registry.yarnpkg.com/@humanfs/core/-/core-0.19.1.tgz#17c55ca7d426733fe3c561906b8173c336b40a77"
|
resolved "https://registry.yarnpkg.com/@humanfs/core/-/core-0.19.1.tgz#17c55ca7d426733fe3c561906b8173c336b40a77"
|
||||||
|
|
@ -1364,6 +1369,13 @@ confbox@^0.2.2:
|
||||||
resolved "https://registry.yarnpkg.com/confbox/-/confbox-0.2.2.tgz#8652f53961c74d9e081784beed78555974a9c110"
|
resolved "https://registry.yarnpkg.com/confbox/-/confbox-0.2.2.tgz#8652f53961c74d9e081784beed78555974a9c110"
|
||||||
integrity sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==
|
integrity sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==
|
||||||
|
|
||||||
|
copilot@^0.0.2:
|
||||||
|
version "0.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/copilot/-/copilot-0.0.2.tgz#4712810c9182cd784820ed44627bedd32dd377f9"
|
||||||
|
integrity sha512-nedf34AaYj9JnFhRmiJEZemAno2WDXMypq6FW5aCVR0N+QdpQ6viukP1JpvJDChpaMEVvbUkMjmjMifJbO/AgQ==
|
||||||
|
dependencies:
|
||||||
|
"@github/copilot" latest
|
||||||
|
|
||||||
core-util-is@1.0.2:
|
core-util-is@1.0.2:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue