chore(ci): Automation scripts to compare influxdb3 CLI help to reference documentation and generate an audit report, runs influxdb3 core and enterprise using Docker, improves compose.yaml, restructures helper-scripts for different versions

pull/6190/head
Jason Stirnaman 2025-07-02 16:48:51 -05:00
parent b0294eb0ba
commit 9a4721aa40
16 changed files with 1675 additions and 755 deletions

View File

@ -0,0 +1,142 @@
name: Audit Documentation
on:
workflow_dispatch:
inputs:
product:
description: 'Product to audit'
required: true
type: choice
options:
- core
- enterprise
- clustered
- cloud-dedicated
- all-monolith
- all-distributed
version:
description: 'Version to audit (use "local" for running containers)'
required: false
default: 'local'
schedule:
# Run weekly on Mondays at 9 AM UTC
- cron: '0 9 * * 1'
jobs:
audit-cli:
name: Audit CLI Documentation
runs-on: ubuntu-latest
if: contains(fromJSON('["core", "enterprise", "all-monolith"]'), github.event.inputs.product)
steps:
- uses: actions/checkout@v4
- name: Set up Docker
if: github.event.inputs.version == 'local'
run: |
docker compose up -d influxdb3-core influxdb3-enterprise
sleep 10 # Wait for containers to be ready
- name: Run CLI audit
run: |
PRODUCT="${{ github.event.inputs.product }}"
VERSION="${{ github.event.inputs.version }}"
if [ "$PRODUCT" == "all-monolith" ]; then
./helper-scripts/influxdb3-monolith/audit-cli-documentation.sh both $VERSION
else
./helper-scripts/influxdb3-monolith/audit-cli-documentation.sh $PRODUCT $VERSION
fi
- name: Upload CLI audit reports
uses: actions/upload-artifact@v4
with:
name: cli-audit-${{ github.event.inputs.product }}-${{ github.event.inputs.version }}
path: helper-scripts/output/cli-audit/
retention-days: 30
- name: Create CLI audit issue
if: github.event_name == 'schedule' || github.event.inputs.create_issue == 'true'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const product = '${{ github.event.inputs.product }}';
const version = '${{ github.event.inputs.version }}';
// Read audit report
const reportPath = `helper-scripts/output/cli-audit/documentation-audit-${product}-${version}.md`;
const report = fs.readFileSync(reportPath, 'utf8');
// Create issue
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: `CLI Documentation Audit - ${product} ${version}`,
body: report,
labels: ['documentation', 'cli-audit', product]
});
audit-api:
name: Audit API Documentation
runs-on: ubuntu-latest
if: contains(fromJSON('["clustered", "cloud-dedicated", "all-distributed"]'), github.event.inputs.product)
steps:
- uses: actions/checkout@v4
- name: Run API audit
run: |
echo "API audit not yet implemented"
# TODO: Implement API documentation audit
# ./helper-scripts/influxdb3-distributed/audit-api-documentation.sh ${{ github.event.inputs.product }}
- name: Upload API audit reports
if: false # Enable when API audit is implemented
uses: actions/upload-artifact@v4
with:
name: api-audit-${{ github.event.inputs.product }}
path: helper-scripts/output/api-audit/
retention-days: 30
summary:
name: Generate Summary Report
runs-on: ubuntu-latest
needs: [audit-cli, audit-api]
if: always()
steps:
- uses: actions/checkout@v4
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: audit-artifacts/
- name: Generate summary
run: |
echo "# Documentation Audit Summary" > summary.md
echo "Date: $(date)" >> summary.md
echo "Product: ${{ github.event.inputs.product }}" >> summary.md
echo "Version: ${{ github.event.inputs.version }}" >> summary.md
echo "" >> summary.md
# Add CLI audit results if available
if [ -d "audit-artifacts/cli-audit-*" ]; then
echo "## CLI Audit Results" >> summary.md
cat audit-artifacts/cli-audit-*/*.md >> summary.md
fi
# Add API audit results if available
if [ -d "audit-artifacts/api-audit-*" ]; then
echo "## API Audit Results" >> summary.md
cat audit-artifacts/api-audit-*/*.md >> summary.md
fi
- name: Upload summary
uses: actions/upload-artifact@v4
with:
name: audit-summary
path: summary.md
retention-days: 30

63
.github/workflows/prepare-release.yml vendored Normal file
View File

@ -0,0 +1,63 @@
name: Prepare Documentation Release
on:
workflow_dispatch:
inputs:
product:
description: 'Product to release'
required: true
type: choice
options:
- core
- enterprise
- cloud-serverless
- cloud-dedicated
version:
description: 'Version number (e.g., 3.2.1)'
required: true
release_type:
description: 'Release type'
required: true
type: choice
options:
- major
- minor
- patch
- hotfix
jobs:
prepare-release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Create release branch
run: |
git checkout -b docs-release-v${{ inputs.version }}
- name: Generate release notes
run: |
./helper-scripts/common/generate-release-notes.sh \
--product ${{ inputs.product }} \
--version ${{ inputs.version }} \
--output content/influxdb3/${{ inputs.product }}/release-notes/v${{ inputs.version }}.md
- name: Update product versions
run: |
# Script to update data/products.yml
./helper-scripts/common/update-product-version.sh \
--product ${{ inputs.product }} \
--version ${{ inputs.version }}
- name: Create release checklist issue
uses: actions/github-script@v7
with:
script: |
const checklist = require('./.github/scripts/release-checklist.js');
await checklist.createIssue({
github,
context,
product: '${{ inputs.product }}',
version: '${{ inputs.version }}',
releaseType: '${{ inputs.release_type }}'
})

1
.gitignore vendored
View File

@ -11,6 +11,7 @@ node_modules
/content/influxdb*/**/api/**/*.html
!api-docs/**/.config.yml
/api-docs/redoc-static.html*
/helper-scripts/output/*
/telegraf-build
!telegraf-build/templates
!telegraf-build/scripts

View File

@ -1,6 +1,7 @@
# This is a Docker Compose file for the InfluxData documentation site.
## Run documentation tests for code samples.
name: influxdata-docs
# Configure your credentials in the following secrets files.
secrets:
influxdb2-admin-username:
file: ~/.env.influxdb2-admin-username
@ -8,6 +9,10 @@ secrets:
file: ~/.env.influxdb2-admin-password
influxdb2-admin-token:
file: ~/.env.influxdb2-admin-token
influxdb3-core-admin-token:
file: ~/.env.influxdb3-core-admin-token
influxdb3-enterprise-admin-token:
file: ~/.env.influxdb3-enterprise-admin-token
services:
local-dev:
build:
@ -302,6 +307,9 @@ services:
influxdb3-core:
container_name: influxdb3-core
image: influxdb:3-core
# Set variables (except your auth token) for Core in the .env.3core file.
env_file:
- .env.3core
ports:
- 8282:8181
command:
@ -319,14 +327,18 @@ services:
- type: bind
source: test/.influxdb3/core/plugins
target: /var/lib/influxdb3/plugins
environment:
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-core-admin-token
secrets:
- influxdb3-core-admin-token
influxdb3-enterprise:
container_name: influxdb3-enterprise
image: influxdb:3-enterprise
# Set license email and other variables (except your auth token) for Enterprise in the .env.3ent file.
env_file:
- .env.3ent
ports:
- 8181:8181
# Change the INFLUXDB3_LICENSE_EMAIL environment variable to your email address. You can also set it in a `.env` file in the same directory as this compose file. Docker Compose automatically loads the .env file.
# The license email option is only used the first time you run the container; you can't change the license email after the first run.
# The server stores the license in the data directory in the object store and the license is associated with the cluster ID and email.
command:
- influxdb3
- serve
@ -336,7 +348,8 @@ services:
- --object-store=file
- --data-dir=/var/lib/influxdb3/data
- --plugin-dir=/var/lib/influxdb3/plugins
- --license-email=${INFLUXDB3_LICENSE_EMAIL}
environment:
- INFLUXDB3_AUTH_TOKEN=/run/secrets/influxdb3-enterprise-admin-token
volumes:
- type: bind
source: test/.influxdb3/enterprise/data
@ -344,6 +357,8 @@ services:
- type: bind
source: test/.influxdb3/enterprise/plugins
target: /var/lib/influxdb3/plugins
secrets:
- influxdb3-enterprise-admin-token
telegraf-pytest:
container_name: telegraf-pytest
image: influxdata/docs-pytest

View File

@ -1,36 +1,132 @@
# InfluxData documentation helper scripts
# InfluxData Documentation Helper Scripts
This directory contains scripts designed to help make specific maintenance
processes easier.
This directory contains scripts to assist with InfluxDB documentation workflows, including release notes generation, CLI/API documentation auditing, and version management.
## InfluxDB Clustered release artifacts
## Directory Structure
**Script:** `./clustered-release-artifacts.sh`
```
helper-scripts/
├── common/ # Shared scripts used across all products
├── influxdb3-monolith/ # Scripts for InfluxDB 3 Core & Enterprise
├── influxdb3-distributed/ # Scripts for InfluxDB 3 Clustered & Cloud Dedicated
├── cloud-serverless/ # Scripts for InfluxDB Cloud Serverless
└── output/ # Generated outputs from all scripts
```
Each InfluxDB Clustered release has the following associated artifacts that need
to be provided with the release notes:
## Product Categories
- `example-customer.yaml`
- `app-instance-schema.json`
### InfluxDB 3 Monolith
- **Products**: InfluxDB 3 Core, InfluxDB 3 Enterprise
- **Deployment**: Single binary deployment
- **Scripts Location**: `influxdb3-monolith/`
This script uses an InfluxDB Clustered pull secret to pull down the required
assets and store them in `static/downloads/clustered-release-artifacts/<RELEASE>`.
### InfluxDB 3 Distributed
- **Products**: InfluxDB 3 Clustered, InfluxDB 3 Cloud Dedicated
- **Deployment**: Distributed/Kubernetes deployment
- **Scripts Location**: `influxdb3-distributed/`
1. **Set up the pull secret:**
### Cloud Serverless
- **Product**: InfluxDB Cloud Serverless
- **Deployment**: Fully managed cloud service
- **Scripts Location**: `cloud-serverless/`
The **Clustered Pull Secret** (config.json) is available in Docs Team
1Password vault. Download the pull secret and store it in the
`/tmp/influxdbsecret` directory on your local machine.
## Common Scripts
2. Install dependencies:
- [Install `crane`](https://github.com/google/go-containerregistry/tree/main/cmd/crane#installation).
- [Install `jq`](https://jqlang.org/download/)
### `common/generate-release-notes.sh`
Generates release notes by analyzing git commits across multiple repositories.
3. From the root of the docs project directory, run the following command to
execute the script. Provide the release version as an argument to the
script--for example:
**Usage:**
```bash
./common/generate-release-notes.sh [--no-fetch] [--pull] <from_version> <to_version> <primary_repo_path> [additional_repo_paths...]
```
```sh
sh ./helper-scripts/clustered-release-artifacts.sh 20250508-1719206
```
**Example:**
```bash
./common/generate-release-notes.sh v3.1.0 v3.2.0 ~/repos/influxdb ~/repos/influxdb_iox
```
### `common/update-product-version.sh`
Updates product version numbers in `data/products.yml` and related documentation files.
**Usage:**
```bash
./common/update-product-version.sh --product <product> --version <version>
```
**Example:**
```bash
./common/update-product-version.sh --product core --version 3.2.1
```
## Product-Specific Scripts
### InfluxDB 3 Monolith (Core & Enterprise)
See [`influxdb3-monolith/README.md`](influxdb3-monolith/README.md) for detailed documentation.
**Key Scripts:**
- `audit-cli-documentation.sh` - Audits CLI commands against existing documentation
- `setup-auth-tokens.sh` - Sets up authentication tokens for local containers
### InfluxDB 3 Distributed (Clustered & Cloud Dedicated)
See [`influxdb3-distributed/README.md`](influxdb3-distributed/README.md) for detailed documentation.
**Key Scripts:**
- `clustered-release-artifacts.sh` - Downloads release artifacts for Clustered releases
## Output Directory
All scripts write their outputs to organized subdirectories:
```
output/
├── release-notes/ # Generated release notes
├── cli-audit/ # CLI documentation audit reports
├── api-audit/ # API documentation audit reports
└── artifacts/ # Downloaded release artifacts
```
## GitHub Workflow Integration
These scripts are integrated with GitHub Actions workflows:
- **Workflow**: `.github/workflows/prepare-release.yml`
- **Uses**: `generate-release-notes.sh`, `update-product-version.sh`
## Quick Start
1. **Clone the repository**
```bash
git clone https://github.com/influxdata/docs-v2.git
cd docs-v2/helper-scripts
```
2. **Make scripts executable**
```bash
find . -name "*.sh" -type f -exec chmod +x {} \;
```
3. **Run a script**
```bash
# Generate release notes
./common/generate-release-notes.sh v3.1.0 v3.2.0 ~/repos/influxdb
# Audit CLI documentation
./influxdb3-monolith/audit-cli-documentation.sh core local
```
## Contributing
When adding new scripts:
1. Place in the appropriate product directory
2. Follow naming conventions (lowercase with hyphens)
3. Include comprehensive help text and documentation
4. Update the relevant README files
5. Test with all applicable products
6. Ensure outputs go to the `output/` directory
## Archived Scripts
Deprecated scripts are moved to `archive/` subdirectories. These scripts are kept for reference but should not be used in active workflows.

View File

@ -0,0 +1,62 @@
# InfluxDB Cloud Serverless Helper Scripts
This directory contains scripts specific to InfluxDB Cloud Serverless documentation workflows.
## Overview
InfluxDB Cloud Serverless is a fully managed cloud service that requires different documentation maintenance approaches compared to self-hosted products.
## Scripts (Planned)
### audit-api-documentation.sh (TODO)
Audit API documentation against the Cloud Serverless API endpoints.
**Usage:**
```bash
./audit-api-documentation.sh [version]
```
### update-pricing-information.sh (TODO)
Update pricing and billing documentation based on current Cloud Serverless offerings.
**Usage:**
```bash
./update-pricing-information.sh
```
### validate-tutorial-links.sh (TODO)
Validate that tutorial links and examples work with current Cloud Serverless endpoints.
**Usage:**
```bash
./validate-tutorial-links.sh
```
## Considerations for Cloud Serverless
Unlike self-hosted products, Cloud Serverless:
- Has no CLI tool to audit
- Uses exclusively HTTP API endpoints
- Has dynamic pricing that may need regular updates
- Requires authentication against live cloud services for testing
- Has region-specific endpoints and limitations
## Future Development
As Cloud Serverless documentation needs evolve, this directory will be expanded with:
- API endpoint validation scripts
- Tutorial testing automation
- Pricing documentation updates
- Regional documentation maintenance
- Authentication and permissions testing
## Integration
These scripts will integrate with the main documentation workflow via:
- GitHub Actions for automated testing
- Scheduled runs for pricing updates
- PR validation for API changes
- Integration with common utility functions

View File

@ -0,0 +1,117 @@
# Common Helper Scripts
This directory contains scripts that are shared across all InfluxDB documentation products.
## Scripts
### generate-release-notes.sh
Generates release notes by analyzing git commits between two versions across multiple repositories.
**Usage:**
```bash
./generate-release-notes.sh [options] <from_version> <to_version> <primary_repo> [additional_repos...]
```
**Options:**
- `--no-fetch` - Skip fetching latest commits from remote
- `--pull` - Pull latest changes (use with caution)
**Example:**
```bash
# Generate release notes for v3.2.0
./generate-release-notes.sh v3.1.0 v3.2.0 ~/repos/influxdb ~/repos/influxdb_iox
# Skip fetch for faster local testing
./generate-release-notes.sh --no-fetch v3.1.0 v3.2.0 ~/repos/influxdb
```
**Output:**
- Creates `release-notes-<version>.md` in current directory
- Includes sections for Features, Bug Fixes, Breaking Changes, Performance, and API changes
### update-product-version.sh
Updates product version information in documentation configuration files.
**Usage:**
```bash
./update-product-version.sh --product <product> --version <version>
```
**Supported Products:**
- `core` - InfluxDB 3 Core
- `enterprise` - InfluxDB 3 Enterprise
- `clustered` - InfluxDB 3 Clustered
- `cloud-dedicated` - InfluxDB 3 Cloud Dedicated
- `cloud-serverless` - InfluxDB Cloud Serverless
**Example:**
```bash
# Update Core to version 3.2.1
./update-product-version.sh --product core --version 3.2.1
# Update Clustered to version 2024.1
./update-product-version.sh --product clustered --version 2024.1
```
**What it updates:**
- `data/products.yml` - Main product version configuration
- Docker Compose example files
- Installation instructions
- Download links
## Library Functions
### lib/docker-utils.sh
Shared Docker utility functions used by other scripts.
**Available Functions:**
- `check_docker_running()` - Verify Docker daemon is running
- `container_exists()` - Check if a container exists
- `container_running()` - Check if a container is running
- `pull_image()` - Pull Docker image with retry logic
- `load_auth_token()` - Load authentication tokens from secret files
**Usage in scripts:**
```bash
source "$(dirname "$0")/../common/lib/docker-utils.sh"
if container_running "influxdb3-core"; then
echo "Container is running"
fi
```
## Integration with GitHub Actions
These scripts are designed to work in both local development and CI/CD environments:
**Local Development:**
- Assumes Docker Desktop or Docker Engine installed
- Uses local file paths for repositories
- Can work with running containers
**GitHub Actions:**
- Automatically detects CI environment
- Uses workspace paths
- Handles authentication via secrets
## Best Practices
1. **Error Handling**: All scripts use `set -e` to exit on errors
2. **Logging**: Color-coded output for better readability
3. **Validation**: Input validation before processing
4. **Idempotency**: Scripts can be run multiple times safely
5. **Documentation**: Comprehensive help text in each script
## Adding New Common Scripts
When adding scripts to this directory:
1. Ensure they are truly product-agnostic
2. Follow existing naming conventions
3. Add comprehensive documentation
4. Include error handling and validation
5. Update this README
6. Test with all supported products

View File

@ -41,7 +41,7 @@ done
# Parse remaining arguments
FROM_VERSION="${1:-v3.1.0}"
TO_VERSION="${2:-v3.2.0}"
PRIMARY_REPO="${3:-/Users/ja/Documents/github/influxdb}"
PRIMARY_REPO="${3:-${HOME}/Documents/github/influxdb}"
# Collect additional repositories (all arguments after the third)
ADDITIONAL_REPOS=()

View File

@ -0,0 +1,205 @@
#!/bin/bash
# Docker utility functions shared across helper scripts
# Color codes
export RED='\033[0;31m'
export GREEN='\033[0;32m'
export YELLOW='\033[1;33m'
export BLUE='\033[0;34m'
export NC='\033[0m' # No Color
# Check if Docker is running
check_docker_running() {
if ! docker info > /dev/null 2>&1; then
echo -e "${RED}Error: Docker is not running${NC}"
echo "Please start Docker Desktop or Docker Engine"
return 1
fi
return 0
}
# Check if a container exists
container_exists() {
local container_name=$1
docker ps -a --format '{{.Names}}' | grep -q "^${container_name}$"
}
# Check if a container is running
container_running() {
local container_name=$1
docker ps --format '{{.Names}}' | grep -q "^${container_name}$"
}
# Pull Docker image with retry logic
pull_image() {
local image=$1
local max_retries=3
local retry_count=0
echo -e "${BLUE}Pulling image: $image${NC}"
while [ $retry_count -lt $max_retries ]; do
if docker pull "$image"; then
echo -e "${GREEN}✓ Successfully pulled $image${NC}"
return 0
fi
retry_count=$((retry_count + 1))
if [ $retry_count -lt $max_retries ]; then
echo -e "${YELLOW}Retry $retry_count/$max_retries...${NC}"
sleep 2
fi
done
echo -e "${RED}✗ Failed to pull $image after $max_retries attempts${NC}"
return 1
}
# Load authentication token from secret file
load_auth_token() {
local product=$1
local token_var_name=$2
local secret_file="$HOME/.env.influxdb3-${product}-admin-token"
if [ -f "$secret_file" ] && [ -s "$secret_file" ]; then
local token=$(cat "$secret_file")
eval "export $token_var_name='$token'"
return 0
else
echo -e "${YELLOW}Warning: No token found in $secret_file${NC}"
return 1
fi
}
# Start container if not running
ensure_container_running() {
local container_name=$1
local service_name=${2:-$container_name}
if ! container_running "$container_name"; then
echo -e "${YELLOW}Starting $container_name...${NC}"
if docker compose up -d "$service_name"; then
# Wait for container to be ready
local max_wait=30
local waited=0
while [ $waited -lt $max_wait ]; do
if container_running "$container_name"; then
echo -e "${GREEN}$container_name is running${NC}"
return 0
fi
sleep 1
waited=$((waited + 1))
done
echo -e "${RED}✗ Timeout waiting for $container_name to start${NC}"
return 1
else
echo -e "${RED}✗ Failed to start $container_name${NC}"
return 1
fi
fi
return 0
}
# Execute command in container
exec_in_container() {
local container_name=$1
shift
local command="$@"
if ! container_running "$container_name"; then
echo -e "${RED}Error: Container $container_name is not running${NC}"
return 1
fi
docker exec "$container_name" $command
}
# Get container health status
container_health() {
local container_name=$1
if ! container_exists "$container_name"; then
echo "not_found"
return
fi
local status=$(docker inspect --format='{{.State.Status}}' "$container_name" 2>/dev/null)
echo "${status:-unknown}"
}
# Wait for container to be healthy
wait_for_healthy() {
local container_name=$1
local timeout=${2:-60}
echo -e "${BLUE}Waiting for $container_name to be healthy...${NC}"
local elapsed=0
while [ $elapsed -lt $timeout ]; do
local health=$(docker inspect --format='{{.State.Health.Status}}' "$container_name" 2>/dev/null || echo "no_health_check")
if [ "$health" = "healthy" ] || [ "$health" = "no_health_check" ]; then
echo -e "${GREEN}$container_name is ready${NC}"
return 0
fi
sleep 2
elapsed=$((elapsed + 2))
echo -n "."
done
echo -e "\n${RED}✗ Timeout waiting for $container_name to be healthy${NC}"
return 1
}
# Validate Docker image tag format
validate_image_tag() {
local product=$1
local version=$2
# Check version format
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] && [ "$version" != "local" ]; then
echo -e "${RED}Error: Invalid version format: $version${NC}"
echo "Expected format: X.Y.Z (e.g., 3.2.0) or 'local'"
return 1
fi
# Check product name
case "$product" in
core|enterprise|clustered|cloud-dedicated)
return 0
;;
*)
echo -e "${RED}Error: Invalid product: $product${NC}"
echo "Valid products: core, enterprise, clustered, cloud-dedicated"
return 1
;;
esac
}
# Get the correct Docker image name for a product
get_docker_image() {
local product=$1
local version=$2
case "$product" in
core|enterprise)
echo "influxdb:${version}-${product}"
;;
clustered)
echo "us-docker.pkg.dev/influxdb2-artifacts/clustered/influxdb:${version}"
;;
cloud-dedicated)
# Cloud Dedicated typically uses the same image as clustered
echo "us-docker.pkg.dev/influxdb2-artifacts/clustered/influxdb:${version}"
;;
*)
return 1
;;
esac
}

View File

@ -0,0 +1,23 @@
#!/bin/bash
# Automatically updates version numbers in products.yml
set -e
PRODUCT=""
VERSION=""
while [[ $# -gt 0 ]]; do
case $1 in
--product) PRODUCT="$2"; shift 2 ;;
--version) VERSION="$2"; shift 2 ;;
*) echo "Unknown option: $1"; exit 1 ;;
esac
done
# Update products.yml using yq
yq eval -i ".influxdb3_${PRODUCT}.latest_patch = \"${VERSION}\"" data/products.yml
# Update Docker compose examples
find compose-examples/ -name "*.yml" -exec sed -i "s/influxdb:3-${PRODUCT}:[0-9.]+/influxdb:3-${PRODUCT}:${VERSION}/g" {} \;
echo "✅ Updated version to ${VERSION} for ${PRODUCT}"

View File

@ -0,0 +1,36 @@
# InfluxDB 3 distributed (Cloud Dedicated and Clustered) documentation helper scripts
This directory contains scripts designed to help make specific maintenance
processes easier.
## InfluxDB Clustered release artifacts
**Script:** `./clustered-release-artifacts.sh`
Each InfluxDB Clustered release has the following associated artifacts that need
to be provided with the release notes:
- `example-customer.yaml`
- `app-instance-schema.json`
This script uses an InfluxDB Clustered pull secret to pull down the required
assets and store them in `static/downloads/clustered-release-artifacts/<RELEASE>`.
1. **Set up the pull secret:**
The **Clustered Pull Secret** (config.json) is available in Docs Team
1Password vault. Download the pull secret and store it in the
`/tmp/influxdbsecret` directory on your local machine.
2. Install dependencies:
- [Install `crane`](https://github.com/google/go-containerregistry/tree/main/cmd/crane#installation).
- [Install `jq`](https://jqlang.org/download/)
3. From the root of the docs project directory, run the following command to
execute the script. Provide the release version as an argument to the
script--for example:
```sh
sh ./helper-scripts/clustered-release-artifacts.sh 20250508-1719206
```

View File

@ -0,0 +1,405 @@
# InfluxDB 3 Monolith (Core and Enterprise) Helper Scripts
This directory contains helper scripts specifically for InfluxDB 3 Core and Enterprise (monolith deployments), as opposed to distributed/clustered deployments.
## Overview
These scripts help with documentation workflows for InfluxDB 3 Core and Enterprise, including CLI change detection, authentication setup, API analysis, and release preparation.
## Prerequisites
- **Docker and Docker Compose**: For running InfluxDB 3 containers
- **Active containers**: InfluxDB 3 Core and/or Enterprise containers running via `docker compose`
- **Secret files**: Docker Compose secrets for auth tokens (`~/.env.influxdb3-core-admin-token` and `~/.env.influxdb3-enterprise-admin-token`)
- **Python 3**: For API analysis scripts
## Scripts
### 🔐 Authentication & Setup
#### `setup-auth-tokens.sh`
Creates and configures authentication tokens for InfluxDB 3 containers.
**Usage:**
```bash
./setup-auth-tokens.sh [core|enterprise|both]
```
**What it does:**
- Checks existing tokens in secret files (`~/.env.influxdb3-core-admin-token` and `~/.env.influxdb3-enterprise-admin-token`)
- Starts containers if not running
- Creates admin tokens using `influxdb3 create token --admin`
- Updates appropriate secret files with new tokens
- Tests tokens to ensure they work
**Example:**
```bash
# Set up both Core and Enterprise tokens
./setup-auth-tokens.sh both
# Set up only Enterprise
./setup-auth-tokens.sh enterprise
```
### 🔍 CLI Analysis
#### `detect-cli-changes.sh`
Compares CLI help output between different InfluxDB 3 versions to identify changes.
**Usage:**
```bash
./detect-cli-changes.sh [core|enterprise] <old-version> <new-version>
```
**Features:**
- Compare any two versions (released or local containers)
- Extract comprehensive help for all commands and subcommands
- Generate unified diff reports
- Create markdown summaries of changes
- Handle authentication automatically
- **NEW**: Analyze source code changes and correlate with CLI changes
- **NEW**: Identify related features between CLI and backend modifications
- **NEW**: Generate recommended documentation focus areas
**Examples:**
```bash
# Compare two released versions
./detect-cli-changes.sh core 3.1.0 3.2.0
# Compare released vs local development container
./detect-cli-changes.sh enterprise 3.1.0 local
# Use "local" to reference running Docker containers
./detect-cli-changes.sh core 3.1.0 local
```
**Output:**
- `helper-scripts/output/cli-changes/cli-{product}-{version}.txt` - Full CLI help
- `helper-scripts/output/cli-changes/cli-changes-{product}-{old}-to-{new}.diff` - Diff report
- `helper-scripts/output/cli-changes/cli-changes-{product}-{old}-to-{new}-summary.md` - Enhanced summary with:
- CLI changes analysis
- Source code features, breaking changes, and API modifications
- Cross-referenced CLI and source correlations
- Recommended documentation focus areas
- `helper-scripts/output/cli-changes/source-changes-{product}-{old}-to-{new}.md` - Full source code analysis (when available)
#### `compare-cli-local.sh`
Convenience script for comparing a released version against your local running container.
**Usage:**
```bash
./compare-cli-local.sh [core|enterprise] [released-version]
```
**Features:**
- Auto-starts containers if not running
- Shows local container version
- Provides quick testing commands
- Streamlined workflow for development
**Example:**
```bash
# Compare Core local container vs 3.1.0 release
./compare-cli-local.sh core 3.1.0
```
### 🔧 Development Tools
#### `extract_influxdb3_help.py`
Python script for extracting and parsing InfluxDB 3 CLI help output.
**Usage:**
```bash
python3 extract_influxdb3_help.py [options]
```
#### `compare_cli_api.py`
Python script for comparing CLI commands with API endpoints to identify discrepancies.
**Usage:**
```bash
python3 compare_cli_api.py [options]
```
#### `update-product-version.sh`
Updates product version numbers in `data/products.yml` and related files.
**Usage:**
```bash
./update-product-version.sh --product [core|enterprise] --version X.Y.Z
```
**Features:**
- Updates `data/products.yml` with new version
- Updates Docker Compose examples
- Validates version format
**Example:**
```bash
./update-product-version.sh --product core --version 3.2.1
```
## Quick Start Guide
### 1. Initial Setup
```bash
# Navigate to the monolith scripts directory
cd helper-scripts/influxdb3-monolith
# Make scripts executable
chmod +x *.sh
# Set up authentication for both products
./setup-auth-tokens.sh both
# Restart containers to load new secrets
docker compose down && docker compose up -d influxdb3-core influxdb3-enterprise
```
### 2. Basic CLI Analysis
```bash
# Start your containers
docker compose up -d influxdb3-core influxdb3-enterprise
# Compare CLI between versions
./detect-cli-changes.sh core 3.1.0 local
./detect-cli-changes.sh enterprise 3.1.0 local
# Review the output
ls ../output/cli-changes/
```
### 3. Development Workflow
```bash
# Quick comparison during development
./compare-cli-local.sh core 3.1.0
# Check what's changed
cat ../output/cli-changes/cli-changes-core-3.1.0-to-local-summary.md
```
### 4. Enhanced Analysis with Source Code Correlation
When comparing two released versions (not using "local"), the script automatically:
```bash
# Run CLI comparison with source analysis
./detect-cli-changes.sh enterprise 3.1.0 3.2.0
# Review the enhanced summary that includes:
# - CLI changes
# - Source code changes (features, fixes, breaking changes)
# - Correlation between CLI and backend
# - Recommended documentation focus areas
cat ../output/cli-changes/cli-changes-enterprise-3.1.0-to-3.2.0-summary.md
```
**Requirements for source analysis:**
- InfluxDB source repository available (searches common locations)
- Git tags for the versions being compared (e.g., v3.1.0, v3.2.0)
- Works best with the `generate-release-notes.sh` script in parent directory
## Container Integration
The scripts work with your Docker Compose setup:
**Expected container names:**
- `influxdb3-core` (port 8282)
- `influxdb3-enterprise` (port 8181)
**Docker Compose secrets:**
- `influxdb3-core-admin-token` - Admin token for Core (stored in `~/.env.influxdb3-core-admin-token`)
- `influxdb3-enterprise-admin-token` - Admin token for Enterprise (stored in `~/.env.influxdb3-enterprise-admin-token`)
- `INFLUXDB3_LICENSE_EMAIL` - Enterprise license email (set in `.env.3ent` env_file)
## Use Cases
### 📋 Release Documentation
1. **Pre-release analysis:**
```bash
./detect-cli-changes.sh core 3.1.0 3.2.0
```
2. **Update documentation based on changes**
3. **Test new commands and options**
4. **Update CLI reference pages**
### 🔬 Development Testing
1. **Compare local development:**
```bash
./compare-cli-local.sh enterprise 3.1.0
```
2. **Verify new features work**
3. **Test authentication setup**
4. **Validate CLI consistency**
### 🚀 Release Preparation
1. **Update version numbers:**
```bash
./update-product-version.sh --product core --version 3.2.1
```
2. **Generate change reports**
3. **Update examples and tutorials**
## Output Structure
```
helper-scripts/
├── output/
│ └── cli-changes/
│ ├── cli-core-3.1.0.txt # Full CLI help
│ ├── cli-core-3.2.0.txt # Full CLI help
│ ├── cli-changes-core-3.1.0-to-3.2.0.diff # Diff report
│ ├── cli-changes-core-3.1.0-to-3.2.0-summary.md # Enhanced summary with:
│ │ # - CLI changes
│ │ # - Source code analysis
│ │ # - CLI/Source correlations
│ │ # - Documentation recommendations
│ └── source-changes-core-3.1.0-to-3.2.0.md # Full source analysis
└── influxdb3-monolith/
├── README.md # This file
├── setup-auth-tokens.sh # Auth setup
├── detect-cli-changes.sh # CLI comparison with source analysis
├── compare-cli-local.sh # Local comparison
├── extract_influxdb3_help.py # Help extraction
├── compare_cli_api.py # CLI/API comparison
└── update-product-version.sh # Version updates
```
## Error Handling
### Common Issues
**Container not running:**
```bash
# Check status
docker compose ps
# Start specific service
docker compose up -d influxdb3-core
```
**Authentication failures:**
```bash
# Recreate tokens
./setup-auth-tokens.sh both
# Test manually
docker exec influxdb3-core influxdb3 create token --admin
```
**Version not found:**
```bash
# Check available versions
docker pull influxdb:3-core:3.2.0
docker pull influxdb:3-enterprise:3.2.0
```
### Debug Mode
Enable debug output for troubleshooting:
```bash
set -x
./detect-cli-changes.sh core 3.1.0 local
set +x
```
## Integration with CI/CD
### GitHub Actions Example
```yaml
- name: Detect CLI Changes
run: |
cd helper-scripts/influxdb3-monolith
./detect-cli-changes.sh core ${{ env.OLD_VERSION }} ${{ env.NEW_VERSION }}
- name: Upload CLI Analysis
uses: actions/upload-artifact@v3
with:
name: cli-analysis
path: helper-scripts/output/cli-changes/
```
### CircleCI Example
```yaml
- run:
name: CLI Change Detection
command: |
cd helper-scripts/influxdb3-monolith
./detect-cli-changes.sh enterprise 3.1.0 3.2.0
- store_artifacts:
path: helper-scripts/output/cli-changes/
```
## Best Practices
### 🔒 Security
- Secret files (`~/.env.influxdb3-*-admin-token`) are stored in your home directory and not in version control
- Rotate auth tokens regularly by re-running `setup-auth-tokens.sh`
- Use minimal token permissions when possible
### 📚 Documentation
- Run comparisons early in release cycle
- Review all diff output for breaking changes
- Update examples to use new features
- Test all documented commands
### 🔄 Workflow
- Use `local` version for development testing
- Compare against previous stable release
- Generate reports before documentation updates
- Validate changes with stakeholders
## Troubleshooting
### Script Permissions
```bash
chmod +x *.sh
```
### Missing Dependencies
```bash
# Python dependencies
pip3 install -r requirements.txt # if exists
# Docker Compose
docker compose version
```
### Container Health
```bash
# Check container logs
docker logs influxdb3-core
docker logs influxdb3-enterprise
# Test basic connectivity
docker exec influxdb3-core influxdb3 --version
```
## Contributing
When adding new scripts to this directory:
1. **Follow naming conventions**: Use lowercase with hyphens
2. **Add usage documentation**: Include help text in scripts
3. **Handle errors gracefully**: Use proper exit codes
4. **Test with both products**: Ensure Core and Enterprise compatibility
5. **Update this README**: Document new functionality
## Related Documentation
- [InfluxDB 3 Core CLI Reference](/influxdb3/core/reference/cli/)
- [InfluxDB 3 Enterprise CLI Reference](/influxdb3/enterprise/reference/cli/)
- [Release Process Documentation](../../.context/templates/release-checklist-template.md)
- [CLI Testing Guide](../../.context/templates/cli-testing-guide.md)

View File

@ -0,0 +1,316 @@
#!/bin/bash
# Audit CLI documentation against current CLI help output
# Usage: ./audit-cli-documentation.sh [core|enterprise|both] [version]
# Example: ./audit-cli-documentation.sh core 3.2.0
set -e
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Parse arguments
PRODUCT=${1:-both}
VERSION=${2:-local}
echo -e "${BLUE}🔍 InfluxDB 3 CLI Documentation Audit${NC}"
echo "======================================="
echo "Product: $PRODUCT"
echo "Version: $VERSION"
echo ""
# Set up output directory
OUTPUT_DIR="helper-scripts/output/cli-audit"
mkdir -p "$OUTPUT_DIR"
# Load tokens from secret files
load_tokens() {
SECRET_CORE_FILE="$HOME/.env.influxdb3-core-admin-token"
SECRET_ENT_FILE="$HOME/.env.influxdb3-enterprise-admin-token"
if [ -f "$SECRET_CORE_FILE" ] && [ -s "$SECRET_CORE_FILE" ]; then
INFLUXDB3_CORE_TOKEN=$(cat "$SECRET_CORE_FILE")
fi
if [ -f "$SECRET_ENT_FILE" ] && [ -s "$SECRET_ENT_FILE" ]; then
INFLUXDB3_ENTERPRISE_TOKEN=$(cat "$SECRET_ENT_FILE")
fi
}
# Get current CLI help for a product
extract_current_cli() {
local product=$1
local output_file=$2
load_tokens
if [ "$VERSION" == "local" ]; then
local container_name="influxdb3-${product}"
echo -n "Extracting current CLI help from ${container_name}..."
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "^${container_name}$"; then
echo -e " ${RED}${NC}"
echo "Error: Container ${container_name} is not running."
echo "Start it with: docker compose up -d influxdb3-${product}"
return 1
fi
# Extract comprehensive help
docker exec "${container_name}" influxdb3 --help > "$output_file" 2>&1
# Extract all subcommand help
for cmd in create delete disable enable query show test update write; do
echo "" >> "$output_file"
echo "===== influxdb3 $cmd --help =====" >> "$output_file"
docker exec "${container_name}" influxdb3 $cmd --help >> "$output_file" 2>&1 || true
done
# Extract detailed subcommand help
local subcommands=(
"create database"
"create token admin"
"create token"
"create trigger"
"create last_cache"
"create distinct_cache"
"create table"
"show databases"
"show tokens"
"show system"
"delete database"
"delete table"
"delete trigger"
"update database"
"test wal_plugin"
"test schedule_plugin"
)
for subcmd in "${subcommands[@]}"; do
echo "" >> "$output_file"
echo "===== influxdb3 $subcmd --help =====" >> "$output_file"
docker exec "${container_name}" influxdb3 $subcmd --help >> "$output_file" 2>&1 || true
done
echo -e " ${GREEN}${NC}"
else
# Use specific version image
local image="influxdb:${VERSION}-${product}"
echo -n "Extracting CLI help from ${image}..."
if ! docker pull "${image}" > /dev/null 2>&1; then
echo -e " ${RED}${NC}"
echo "Error: Failed to pull image ${image}"
return 1
fi
# Extract help from specific version
docker run --rm "${image}" influxdb3 --help > "$output_file" 2>&1
# Extract subcommand help
for cmd in create delete disable enable query show test update write; do
echo "" >> "$output_file"
echo "===== influxdb3 $cmd --help =====" >> "$output_file"
docker run --rm "${image}" influxdb3 $cmd --help >> "$output_file" 2>&1 || true
done
echo -e " ${GREEN}${NC}"
fi
}
# Parse CLI help to extract structured information
parse_cli_help() {
local help_file=$1
local parsed_file=$2
echo "# CLI Commands and Options" > "$parsed_file"
echo "" >> "$parsed_file"
local current_command=""
local in_options=false
while IFS= read -r line; do
# Detect command headers
if echo "$line" | grep -q "^===== influxdb3.*--help ====="; then
current_command=$(echo "$line" | sed 's/^===== //' | sed 's/ --help =====//')
echo "## $current_command" >> "$parsed_file"
echo "" >> "$parsed_file"
in_options=false
# Detect options sections
elif echo "$line" | grep -q "^Options:"; then
echo "### Options:" >> "$parsed_file"
echo "" >> "$parsed_file"
in_options=true
# Parse option lines
elif [ "$in_options" = true ] && echo "$line" | grep -qE "^\s*-"; then
# Extract option and description
option=$(echo "$line" | grep -oE '\-\-[a-z][a-z0-9-]*' | head -1)
short_option=$(echo "$line" | grep -oE '\s-[a-zA-Z],' | sed 's/[, ]//g')
description=$(echo "$line" | sed 's/^[[:space:]]*-[^[:space:]]*[[:space:]]*//' | sed 's/^[[:space:]]*--[^[:space:]]*[[:space:]]*//')
if [ -n "$option" ]; then
if [ -n "$short_option" ]; then
echo "- \`$short_option, $option\`: $description" >> "$parsed_file"
else
echo "- \`$option\`: $description" >> "$parsed_file"
fi
fi
# Reset options flag for new sections
elif echo "$line" | grep -qE "^[A-Z][a-z]+:$"; then
in_options=false
fi
done < "$help_file"
}
# Find documentation files for a product
find_docs() {
local product=$1
case "$product" in
"core")
echo "content/influxdb3/core/reference/cli/influxdb3"
;;
"enterprise")
echo "content/influxdb3/enterprise/reference/cli/influxdb3"
;;
esac
}
# Audit documentation against CLI
audit_docs() {
local product=$1
local cli_file=$2
local audit_file=$3
local docs_path=$(find_docs "$product")
local shared_path="content/shared/influxdb3-cli"
echo "# CLI Documentation Audit - $product" > "$audit_file"
echo "Generated: $(date)" >> "$audit_file"
echo "" >> "$audit_file"
# Check for missing documentation
echo "## Missing Documentation" >> "$audit_file"
echo "" >> "$audit_file"
local missing_count=0
# Extract commands from CLI help
grep "^===== influxdb3.*--help =====" "$cli_file" | while read -r line; do
local command=$(echo "$line" | sed 's/^===== influxdb3 //' | sed 's/ --help =====//')
local expected_file=""
# Map command to expected documentation file
case "$command" in
"create database") expected_file="create/database.md" ;;
"create token") expected_file="create/token/_index.md" ;;
"create token admin") expected_file="create/token/admin.md" ;;
"create trigger") expected_file="create/trigger.md" ;;
"create table") expected_file="create/table.md" ;;
"create last_cache") expected_file="create/last_cache.md" ;;
"create distinct_cache") expected_file="create/distinct_cache.md" ;;
"show databases") expected_file="show/databases.md" ;;
"show tokens") expected_file="show/tokens.md" ;;
"delete database") expected_file="delete/database.md" ;;
"delete table") expected_file="delete/table.md" ;;
"query") expected_file="query.md" ;;
"write") expected_file="write.md" ;;
*) continue ;;
esac
if [ -n "$expected_file" ]; then
# Check both product-specific and shared docs
local product_file="$docs_path/$expected_file"
local shared_file="$shared_path/$expected_file"
if [ ! -f "$product_file" ] && [ ! -f "$shared_file" ]; then
echo "- **Missing**: Documentation for \`influxdb3 $command\`" >> "$audit_file"
echo " - Expected: \`$product_file\` or \`$shared_file\`" >> "$audit_file"
missing_count=$((missing_count + 1))
fi
fi
done
if [ "$missing_count" -eq 0 ]; then
echo "No missing documentation files detected." >> "$audit_file"
fi
echo "" >> "$audit_file"
# Check for outdated options in existing docs
echo "## Potentially Outdated Documentation" >> "$audit_file"
echo "" >> "$audit_file"
local outdated_count=0
# This would require more sophisticated parsing of markdown files
# For now, we'll note this as a manual review item
echo "**Manual Review Needed**: Compare the following CLI options with existing documentation:" >> "$audit_file"
echo "" >> "$audit_file"
# Extract all options from CLI help
grep -E "^\s*(-[a-zA-Z],?\s*)?--[a-z][a-z0-9-]*" "$cli_file" | sort -u | while read -r option_line; do
local option=$(echo "$option_line" | grep -oE '\--[a-z][a-z0-9-]*')
if [ -n "$option" ]; then
echo "- \`$option\`" >> "$audit_file"
fi
done
echo "" >> "$audit_file"
echo "## Summary" >> "$audit_file"
echo "- Missing documentation files: $missing_count" >> "$audit_file"
echo "- Manual review recommended for option accuracy" >> "$audit_file"
echo "" >> "$audit_file"
echo "📄 Audit complete: $audit_file"
}
# Main execution
case "$PRODUCT" in
"core")
CLI_FILE="$OUTPUT_DIR/current-cli-core-${VERSION}.txt"
AUDIT_FILE="$OUTPUT_DIR/documentation-audit-core-${VERSION}.md"
extract_current_cli "core" "$CLI_FILE"
audit_docs "core" "$CLI_FILE" "$AUDIT_FILE"
;;
"enterprise")
CLI_FILE="$OUTPUT_DIR/current-cli-enterprise-${VERSION}.txt"
AUDIT_FILE="$OUTPUT_DIR/documentation-audit-enterprise-${VERSION}.md"
extract_current_cli "enterprise" "$CLI_FILE"
audit_docs "enterprise" "$CLI_FILE" "$AUDIT_FILE"
;;
"both")
# Core
CLI_FILE_CORE="$OUTPUT_DIR/current-cli-core-${VERSION}.txt"
AUDIT_FILE_CORE="$OUTPUT_DIR/documentation-audit-core-${VERSION}.md"
extract_current_cli "core" "$CLI_FILE_CORE"
audit_docs "core" "$CLI_FILE_CORE" "$AUDIT_FILE_CORE"
# Enterprise
CLI_FILE_ENT="$OUTPUT_DIR/current-cli-enterprise-${VERSION}.txt"
AUDIT_FILE_ENT="$OUTPUT_DIR/documentation-audit-enterprise-${VERSION}.md"
extract_current_cli "enterprise" "$CLI_FILE_ENT"
audit_docs "enterprise" "$CLI_FILE_ENT" "$AUDIT_FILE_ENT"
;;
*)
echo "Usage: $0 [core|enterprise|both] [version]"
exit 1
;;
esac
echo ""
echo -e "${GREEN}✅ CLI documentation audit complete!${NC}"
echo ""
echo "Next steps:"
echo "1. Review the audit reports in: $OUTPUT_DIR"
echo "2. Update missing documentation files"
echo "3. Verify options match current CLI behavior"
echo "4. Update examples and usage patterns"

View File

@ -0,0 +1,164 @@
#!/bin/bash
# Set up authentication tokens for InfluxDB 3 Core and Enterprise containers
# Usage: ./setup-auth-tokens.sh [core|enterprise|both]
set -e
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Parse arguments
TARGET=${1:-both}
echo -e "${BLUE}🔐 InfluxDB 3 Authentication Setup${NC}"
echo "=================================="
echo ""
# Check for and load existing secret files
SECRET_CORE_FILE="$HOME/.env.influxdb3-core-admin-token"
SECRET_ENT_FILE="$HOME/.env.influxdb3-enterprise-admin-token"
if [ -f "$SECRET_CORE_FILE" ]; then
echo "✅ Found existing Core token secret file"
else
echo "📝 Creating new Core token secret file: $SECRET_CORE_FILE"
touch "$SECRET_CORE_FILE"
fi
if [ -f "$SECRET_ENT_FILE" ]; then
echo "✅ Found existing Enterprise token secret file"
else
echo "📝 Creating new Enterprise token secret file: $SECRET_ENT_FILE"
touch "$SECRET_ENT_FILE"
fi
echo ""
# Function to setup auth for a product
setup_auth() {
local product=$1
local container_name="influxdb3-${product}"
local port
local secret_file
case "$product" in
"core")
port="8282"
secret_file="$SECRET_CORE_FILE"
;;
"enterprise")
port="8181"
secret_file="$SECRET_ENT_FILE"
;;
esac
echo -e "${BLUE}Setting up $(echo ${product} | awk '{print toupper(substr($0,1,1)) tolower(substr($0,2))}') authentication...${NC}"
# Check if token already exists in secret file
if [ -s "$secret_file" ]; then
local existing_token=$(cat "$secret_file")
echo "✅ Token already exists in secret file"
echo " Token: ${existing_token:0:20}..."
# Test if the token works
echo -n "🧪 Testing existing token..."
if docker exec "${container_name}" influxdb3 show databases --token "${existing_token}" --host "http://localhost:${port}" > /dev/null 2>&1; then
echo -e " ${GREEN}✓ Working${NC}"
return 0
else
echo -e " ${YELLOW}⚠ Not working, will create new token${NC}"
fi
fi
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "^${container_name}$"; then
echo "🚀 Starting ${container_name} container..."
if ! docker compose up -d "${container_name}"; then
echo -e "${RED}❌ Failed to start container${NC}"
return 1
fi
echo -n "⏳ Waiting for container to be ready..."
sleep 5
echo -e " ${GREEN}${NC}"
else
echo "✅ Container ${container_name} is running"
fi
# Create admin token
echo "🔑 Creating admin token..."
local token_output
if token_output=$(docker exec "${container_name}" influxdb3 create token --admin 2>&1); then
# Extract the token from the "Token: " line
local new_token=$(echo "$token_output" | grep "^Token: " | sed 's/^Token: //' | tr -d '\r\n')
echo -e "${GREEN}Token created successfully!${NC}"
echo " Token: ${new_token:0:20}..."
# Update secret file
echo "${new_token}" > "$secret_file"
echo "📝 Updated secret file: $secret_file"
# Test the new token
echo -n "🧪 Testing new token..."
if docker exec "${container_name}" influxdb3 show databases --token "${new_token}" --host "http://localhost:${port}" > /dev/null 2>&1; then
echo -e " ${GREEN}✓ Working${NC}"
else
echo -e " ${YELLOW}⚠ Test failed, but token was created${NC}"
fi
else
echo -e "${RED}❌ Failed to create token${NC}"
echo "Error output: $token_output"
return 1
fi
echo ""
}
# Main execution
case "$TARGET" in
"core")
setup_auth "core"
;;
"enterprise")
setup_auth "enterprise"
;;
"both")
setup_auth "core"
setup_auth "enterprise"
;;
*)
echo "Usage: $0 [core|enterprise|both]"
exit 1
;;
esac
echo -e "${GREEN}🎉 Authentication setup complete!${NC}"
echo ""
echo "📋 Next steps:"
echo "1. Restart containers to load new secrets:"
echo " docker compose down && docker compose up -d influxdb3-core influxdb3-enterprise"
echo "2. Test CLI commands with authentication:"
echo " ./detect-cli-changes.sh core 3.1.0 local"
echo " ./detect-cli-changes.sh enterprise 3.1.0 local"
echo ""
echo "📄 Your secret files now contain:"
# Show Core tokens
if [ -f "$SECRET_CORE_FILE" ] && [ -s "$SECRET_CORE_FILE" ]; then
token_preview=$(head -c 20 "$SECRET_CORE_FILE")
echo " $SECRET_CORE_FILE: ${token_preview}..."
fi
# Show Enterprise tokens
if [ -f "$SECRET_ENT_FILE" ] && [ -s "$SECRET_ENT_FILE" ]; then
token_preview=$(head -c 20 "$SECRET_ENT_FILE")
echo " $SECRET_ENT_FILE: ${token_preview}..."
fi

View File

@ -1,725 +0,0 @@
// generate-cli-docs.js
const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');
const OUTPUT_DIR = path.join(__dirname, 'content', 'shared', 'influxdb3-cli');
const BASE_CMD = 'influxdb3';
const DEBUG = true; // Set to true for verbose logging
// Debug logging function
function debug(message, data) {
if (DEBUG) {
console.log(`DEBUG: ${message}`);
if (data) console.log(JSON.stringify(data, null, 2));
}
}
// Function to remove ANSI escape codes
function stripAnsiCodes(str) {
// Regular expression to match ANSI escape codes
// eslint-disable-next-line no-control-regex
return str.replace(/[›][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, '');
}
// Ensure output directories exist
function ensureDirectoryExistence(filePath) {
const dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
fs.mkdirSync(dirname);
}
// Get all available commands and subcommands
function getCommands() {
try {
debug('Getting base commands');
let helpOutput = execSync(`${BASE_CMD} --help`).toString();
helpOutput = stripAnsiCodes(helpOutput); // Strip ANSI codes
debug('Cleaned help output received', helpOutput);
// Find all command sections (Common Commands, Resource Management, etc.)
const commandSections = helpOutput.match(/^[A-Za-z\s]+:\s*$([\s\S]+?)(?=^[A-Za-z\s]+:\s*$|\n\s*$|\n[A-Z]|\n\n|$)/gm);
if (!commandSections || commandSections.length === 0) {
debug('No command sections found in help output');
return [];
}
debug(`Found ${commandSections.length} command sections`);
let commands = [];
// Process each section to extract commands
commandSections.forEach(section => {
// Extract command lines (ignoring section headers)
const cmdLines = section.split('\n')
.slice(1) // Skip the section header
.map(line => line.trim())
.filter(line => line && !line.startsWith('-') && !line.startsWith('#')); // Skip empty lines, flags and comments
debug('Command lines in section', cmdLines);
// Extract command names and descriptions
cmdLines.forEach(line => {
// Handle commands with aliases (like "query, q")
const aliasMatch = line.match(/^\s*([a-zA-Z0-9_,-\s]+?)\s{2,}(.+)$/);
if (aliasMatch) {
// Get primary command and any aliases
const commandParts = aliasMatch[1].split(',').map(cmd => cmd.trim());
const primaryCmd = commandParts[0]; // Use the first as primary
const description = aliasMatch[2].trim();
commands.push({
cmd: primaryCmd,
description: description
});
debug(`Added command: ${primaryCmd} - ${description}`);
}
});
});
debug('Extracted commands', commands);
return commands;
} catch (error) {
console.error('Error getting commands:', error.message);
if (DEBUG) console.error(error.stack);
return [];
}
}
// Get subcommands for a specific command
function getSubcommands(cmd) {
try {
debug(`Getting subcommands for: ${cmd}`);
let helpOutput = execSync(`${BASE_CMD} ${cmd} --help`).toString();
helpOutput = stripAnsiCodes(helpOutput); // Strip ANSI codes
debug(`Cleaned help output for ${cmd} received`, helpOutput);
// Look for sections containing commands (similar to top-level help)
// First try to find a dedicated Commands: section
let subcommands = [];
// Try to find a dedicated "Commands:" section first
const commandsMatch = helpOutput.match(/Commands:\s+([\s\S]+?)(?=^[A-Za-z\s]+:\s*$|\n\s*$|\n[A-Z]|\n\n|$)/m);
if (commandsMatch) {
debug(`Found dedicated Commands section for ${cmd}`);
const cmdLines = commandsMatch[1].split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('-') && !line.startsWith('#')); // Skip empty lines, flags, comments
cmdLines.forEach(line => {
const match = line.match(/^\s*([a-zA-Z0-9_,-\s]+?)\s{2,}(.+)$/);
if (match) {
// Get primary command name (before any commas for aliases)
const commandName = match[1].split(',')[0].trim();
const description = match[2].trim();
subcommands.push({
cmd: `${cmd} ${commandName}`,
description: description
});
debug(`Added subcommand: ${cmd} ${commandName} - ${description}`);
}
});
} else {
// Look for sections like "Common Commands:", "Resource Management:", etc.
const sectionMatches = helpOutput.match(/^[A-Za-z\s]+:\s*$([\s\S]+?)(?=^[A-Za-z\s]+:\s*$|\n\s*$|\n[A-Z]|\n\n|$)/gm);
if (sectionMatches) {
debug(`Found ${sectionMatches.length} sections with potential commands for ${cmd}`);
sectionMatches.forEach(section => {
const cmdLines = section.split('\n')
.slice(1) // Skip the section header
.map(line => line.trim())
.filter(line => line && !line.startsWith('-') && !line.startsWith('#')); // Skip empty lines, flags, comments
cmdLines.forEach(line => {
const match = line.match(/^\s*([a-zA-Z0-9_,-\s]+?)\s{2,}(.+)$/);
if (match) {
// Get primary command name (before any commas for aliases)
const commandName = match[1].split(',')[0].trim();
const description = match[2].trim();
subcommands.push({
cmd: `${cmd} ${commandName}`,
description: description
});
debug(`Added subcommand from section: ${cmd} ${commandName} - ${description}`);
}
});
});
}
}
debug(`Extracted ${subcommands.length} subcommands for ${cmd}`, subcommands);
return subcommands;
} catch (error) {
debug(`Error getting subcommands for ${cmd}:`, error.message);
return [];
}
}
// Helper functions to generate descriptions for different command types
function getQueryDescription(cmd, fullCmd) {
return ` executes a query against a running {{< product-name >}} server.`;
}
function getWriteDescription(cmd, fullCmd) {
return ` writes data to a running {{< product-name >}} server.`;
}
function getShowDescription(cmd, fullCmd) {
const cmdParts = cmd.split(' ');
const resourceType = cmdParts.length > 1 ? cmdParts[1] : 'resources';
return ` lists ${resourceType} in your {{< product-name >}} server.`;
}
function getCreateDescription(cmd, fullCmd) {
const cmdParts = cmd.split(' ');
const createType = cmdParts.length > 1 ? cmdParts[1] : 'resources';
return ` creates ${createType} in your {{< product-name >}} server.`;
}
function getDeleteDescription(cmd, fullCmd) {
const cmdParts = cmd.split(' ');
const deleteType = cmdParts.length > 1 ? cmdParts[1] : 'resources';
return ` deletes ${deleteType} from your {{< product-name >}} server.`;
}
function getServeDescription(cmd, fullCmd) {
return ` starts the {{< product-name >}} server.`;
}
function getDefaultDescription(cmd, fullCmd) {
return `.`;
}
// Helper functions to generate examples for different command types
function getQueryExample(cmd) {
return {
title: 'Query data using SQL',
code: `${BASE_CMD} ${cmd} --database DATABASE_NAME "SELECT * FROM home"`
};
}
function getWriteExample(cmd) {
return {
title: 'Write data from a file',
code: `${BASE_CMD} ${cmd} --database DATABASE_NAME --file data.lp`
};
}
function getShowExample(cmd) {
const cmdParts = cmd.split(' ');
const resourceType = cmdParts.length > 1 ? cmdParts[1] : 'resources';
return {
title: `List ${resourceType}`,
code: `${BASE_CMD} ${cmd}`
};
}
function getCreateExample(cmd) {
const cmdParts = cmd.split(' ');
const resourceType = cmdParts.length > 1 ? cmdParts[1] : 'resource';
return {
title: `Create a new ${resourceType}`,
code: `${BASE_CMD} ${cmd} --name new-${resourceType}-name`
};
}
function getDeleteExample(cmd) {
const cmdParts = cmd.split(' ');
const resourceType = cmdParts.length > 1 ? cmdParts[1] : 'resource';
return {
title: `Delete a ${resourceType}`,
code: `${BASE_CMD} ${cmd} --name ${resourceType}-to-delete`
};
}
function getServeExample(cmd) {
return {
title: 'Start the InfluxDB server',
code: `${BASE_CMD} serve --node-id my-node --object-store file --data-dir ~/.influxdb3_data`
};
}
function getDefaultExample(fullCmd, cmd) {
return {
title: `Run the ${fullCmd} command`,
code: `${BASE_CMD} ${cmd}`
};
}
// Generate frontmatter for a command
function generateFrontmatter(cmd) {
const parts = cmd.split(' ');
const lastPart = parts[parts.length - 1];
const fullCmd = cmd === '' ? BASE_CMD : `${BASE_CMD} ${cmd}`;
// Determine a good description based on the command
let description = '';
if (cmd === '') {
description = `The ${BASE_CMD} CLI runs and interacts with the {{< product-name >}} server.`;
} else {
const cmdParts = cmd.split(' ');
const lastCmd = cmdParts[cmdParts.length - 1];
// Use the description helper functions for consistency
switch (lastCmd) {
case 'query':
case 'q':
description = `The \`${fullCmd}\` command${getQueryDescription(cmd, fullCmd)}`;
break;
case 'write':
case 'w':
description = `The \`${fullCmd}\` command${getWriteDescription(cmd, fullCmd)}`;
break;
case 'show':
description = `The \`${fullCmd}\` command${getShowDescription(cmd, fullCmd)}`;
break;
case 'create':
description = `The \`${fullCmd}\` command${getCreateDescription(cmd, fullCmd)}`;
break;
case 'delete':
description = `The \`${fullCmd}\` command${getDeleteDescription(cmd, fullCmd)}`;
break;
case 'serve':
description = `The \`${fullCmd}\` command${getServeDescription(cmd, fullCmd)}`;
break;
default:
description = `The \`${fullCmd}\` command${getDefaultDescription(cmd, fullCmd)}`;
}
}
// Create the frontmatter
let frontmatter = `---
title: ${fullCmd}
description: >
${description}
`;
// Add source attribute for shared files
if (cmd !== '') {
// Build the path relative to the /content/shared/influxdb3-cli/ directory
const relativePath = cmd.split(' ').join('/');
frontmatter += `source: /shared/influxdb3-cli/${relativePath === '' ? '_index' : relativePath}.md
`;
}
// Close the frontmatter
frontmatter += `---
`;
return frontmatter;
}
// Generate Markdown for a command
function generateCommandMarkdown(cmd) {
try {
debug(`Generating markdown for command: ${cmd}`);
const fullCmd = cmd === '' ? BASE_CMD : `${BASE_CMD} ${cmd}`;
let helpOutput = execSync(`${fullCmd} --help`).toString();
helpOutput = stripAnsiCodes(helpOutput); // Strip ANSI codes
debug(`Cleaned help output for ${fullCmd} received`, helpOutput);
// Extract sections from help output
const usageMatch = helpOutput.match(/Usage:\s+([\s\S]+?)(?:\n\n|$)/);
const usage = usageMatch ? usageMatch[1].trim() : '';
const argsMatch = helpOutput.match(/Arguments:\s+([\s\S]+?)(?:\n\n|$)/);
const args = argsMatch ? argsMatch[1].trim() : '';
// Store option sections separately
const optionSections = {};
const optionSectionRegex = /^([A-Za-z\s]+ Options?|Required):\s*$([\s\S]+?)(?=\n^[A-Za-z\s]+:|^$|\n\n)/gm;
let sectionMatch;
while ((sectionMatch = optionSectionRegex.exec(helpOutput)) !== null) {
const sectionTitle = sectionMatch[1].trim();
const sectionContent = sectionMatch[2].trim();
debug(`Found option section: ${sectionTitle}`);
optionSections[sectionTitle] = sectionContent;
}
// Fallback if no specific sections found
if (Object.keys(optionSections).length === 0) {
const flagsMatch = helpOutput.match(/(?:Flags|Options):\s+([\s\S]+?)(?:\n\n|$)/);
if (flagsMatch) {
debug('Using fallback Flags/Options section');
optionSections['Options'] = flagsMatch[1].trim();
}
}
debug('Extracted option sections', optionSections);
// Format flags as a table, processing sections and handling duplicates/multi-lines
let flagsTable = '';
const addedFlags = new Set(); // Track added long flags
const tableRows = [];
const sectionOrder = ['Required', ...Object.keys(optionSections).filter(k => k !== 'Required')]; // Prioritize Required
for (const sectionTitle of sectionOrder) {
if (!optionSections[sectionTitle]) continue;
const sectionContent = optionSections[sectionTitle];
const lines = sectionContent.split('\n');
let i = 0;
while (i < lines.length) {
const line = lines[i];
// Regex to capture flag and start of description
const flagMatch = line.match(/^\s+(?:(-\w),\s+)?(--[\w-]+(?:[=\s]<[^>]+>)?)?\s*(.*)/);
if (flagMatch) {
const shortFlag = flagMatch[1] || '';
const longFlagRaw = flagMatch[2] || ''; // Might be empty if only short flag exists (unlikely here)
const longFlag = longFlagRaw.split(/[=\s]/)[0]; // Get only the flag name, e.g., --cluster-id from --cluster-id <CLUSTER_ID>
let description = flagMatch[3].trim();
// Check for multi-line description (indented lines following)
let j = i + 1;
while (j < lines.length && lines[j].match(/^\s{4,}/)) { // Look for lines with significant indentation
description += ' ' + lines[j].trim();
j++;
}
i = j; // Move main index past the multi-line description
// Clean description
description = description
.replace(/\s+\[default:.*?\]/g, '')
.replace(/\s+\[env:.*?\]/g, '')
.replace(/\s+\[possible values:.*?\]/g, '')
.trim();
// Check if required based on section
const isRequired = sectionTitle === 'Required';
// Add to table if not already added
if (longFlag && !addedFlags.has(longFlag)) {
// Use longFlagRaw which includes the placeholder for display
tableRows.push(`| \`${shortFlag}\` | \`${longFlagRaw.trim()}\` | ${isRequired ? '_({{< req >}})_ ' : ''}${description} |`);
addedFlags.add(longFlag);
debug(`Added flag: ${longFlag} (Required: ${isRequired})`);
} else if (!longFlag && shortFlag && !addedFlags.has(shortFlag)) {
// Handle case where only short flag might exist (though unlikely for this CLI)
tableRows.push(`| \`${shortFlag}\` | | ${isRequired ? '_({{< req >}})_ ' : ''}${description} |`);
addedFlags.add(shortFlag); // Use short flag for tracking if no long flag
debug(`Added flag: ${shortFlag} (Required: ${isRequired})`);
} else if (longFlag) {
debug(`Skipping duplicate flag: ${longFlag}`);
} else {
debug(`Skipping flag line with no long or short flag found: ${line}`);
}
} else {
debug(`Could not parse flag line in section "${sectionTitle}": ${line}`);
i++; // Move to next line if current one doesn't match
}
}
}
if (tableRows.length > 0) {
// Sort rows alphabetically by long flag, putting required flags first
tableRows.sort((a, b) => {
const isARequired = a.includes('_({{< req >}})_');
const isBRequired = b.includes('_({{< req >}})_');
if (isARequired && !isBRequired) return -1;
if (!isARequired && isBRequired) return 1;
// Extract long flag for sorting (second column content between backticks)
const longFlagA = (a.match(/\|\s*`.*?`\s*\|\s*`(--[\w-]+)/) || [])[1] || '';
const longFlagB = (b.match(/\|\s*`.*?`\s*\|\s*`(--[\w-]+)/) || [])[1] || '';
return longFlagA.localeCompare(longFlagB);
});
flagsTable = `| Short | Long | Description |\n| :---- | :--- | :---------- |\n${tableRows.join('\n')}`;
}
// Extract description from help text (appears before Usage section or other sections)
let descriptionText = '';
// Updated regex to stop before any known section header
const descMatches = helpOutput.match(/^([\s\S]+?)(?=Usage:|Common Commands:|Examples:|Options:|Flags:|Required:|Arguments:|$)/);
if (descMatches && descMatches[1]) {
descriptionText = descMatches[1].trim();
}
// Example commands
const examples = [];
// Updated regex to stop before any known section header
const exampleMatch = helpOutput.match(/(?:Example|Examples):\s*([\s\S]+?)(?=\n\n|Usage:|Options:|Flags:|Required:|Arguments:|$)/i);
if (exampleMatch) {
// Found examples in help output, use them
const exampleBlocks = exampleMatch[1].trim().split(/\n\s*#\s+/); // Split by lines starting with # (section comments)
exampleBlocks.forEach((block, index) => {
const lines = block.trim().split('\n');
const titleLine = lines[0].startsWith('#') ? lines[0].substring(1).trim() : `Example ${index + 1}`;
const codeLines = lines.slice(titleLine === `Example ${index + 1}` ? 0 : 1) // Skip title line if we extracted it
.map(line => line.replace(/^\s*\d+\.\s*/, '').trim()) // Remove numbering like "1. "
.filter(line => line);
if (codeLines.length > 0) {
examples.push({ title: titleLine, code: codeLines.join('\n') });
}
});
} else {
// Fallback example generation
if (cmd === '') {
// ... (existing base command examples) ...
} else {
// ... (existing command-specific example generation using helpers) ...
}
}
// Construct markdown content
const frontmatter = generateFrontmatter(cmd);
let markdown = frontmatter;
markdown += `The \`${fullCmd}\` command`;
// Use extracted description if available, otherwise fallback
if (descriptionText) {
markdown += ` ${descriptionText.toLowerCase().replace(/\.$/, '')}.`;
} else if (cmd === '') {
markdown += ` runs and interacts with the {{< product-name >}} server.`;
} else {
// Fallback description generation using helpers
const cmdParts = cmd.split(' ');
const lastCmd = cmdParts[cmdParts.length - 1];
switch (lastCmd) {
case 'query': case 'q': markdown += getQueryDescription(cmd, fullCmd); break;
case 'write': case 'w': markdown += getWriteDescription(cmd, fullCmd); break;
case 'show': markdown += getShowDescription(cmd, fullCmd); break;
case 'create': markdown += getCreateDescription(cmd, fullCmd); break;
case 'delete': markdown += getDeleteDescription(cmd, fullCmd); break;
case 'serve': markdown += getServeDescription(cmd, fullCmd); break;
default: markdown += getDefaultDescription(cmd, fullCmd);
}
}
markdown += `\n\n## Usage\n\n<!--pytest.mark.skip-->\n\n\`\`\`bash\n${usage}\n\`\`\`\n\n`;
if (args) {
markdown += `## Arguments\n\n${args}\n\n`;
}
if (flagsTable) {
markdown += `## Options\n\n${flagsTable}\n\n`;
}
if (examples.length > 0) {
markdown += `## Examples\n\n`;
examples.forEach(ex => {
markdown += `### ${ex.title}\n\n<!--pytest.mark.skip-->\n\n\`\`\`bash\n${ex.code}\n\`\`\`\n\n`;
});
}
return markdown;
} catch (error) {
console.error(`Error generating markdown for '${cmd}':`, error.message);
if (DEBUG) console.error(error.stack);
return null;
}
}
// Generate reference page with proper frontmatter that imports from shared content
function generateReferencePage(cmd, product) {
// Skip the base command since it's not typically needed as a reference
if (cmd === '') {
return null;
}
const parts = cmd.split(' ');
const fullCmd = cmd === '' ? BASE_CMD : `${BASE_CMD} ${cmd}`;
// Build the appropriate menu path
let menuParent;
if (parts.length === 1) {
menuParent = 'influxdb3'; // Top-level command
} else {
// For nested commands, the parent is the command's parent command
menuParent = `influxdb3 ${parts.slice(0, -1).join(' ')}`;
}
// Determine a good description
let description;
const lastCmd = parts.length > 0 ? parts[parts.length - 1] : '';
switch (lastCmd) {
case 'query':
case 'q':
description = `Use the ${fullCmd} command to query data in your {{< product-name >}} instance.`;
break;
case 'write':
case 'w':
description = `Use the ${fullCmd} command to write data to your {{< product-name >}} instance.`;
break;
case 'show':
const showType = parts.length > 1 ? parts[1] : 'resources';
description = `Use the ${fullCmd} command to list ${showType} in your {{< product-name >}} instance.`;
break;
case 'create':
const createType = parts.length > 1 ? parts[1] : 'resources';
description = `Use the ${fullCmd} command to create ${createType} in your {{< product-name >}} instance.`;
break;
case 'delete':
const deleteType = parts.length > 1 ? parts[1] : 'resources';
description = `Use the ${fullCmd} command to delete ${deleteType} from your {{< product-name >}} instance.`;
break;
case 'serve':
description = `Use the ${fullCmd} command to start and run your {{< product-name >}} server.`;
break;
default:
description = `Use the ${fullCmd} command.`;
}
// Build the path to the shared content
const sharedPath = parts.join('/');
// Create the frontmatter for the reference page
const frontmatter = `---
title: ${fullCmd}
description: >
${description}
menu:
${product}:
parent: ${menuParent}
name: ${fullCmd}
weight: 400
source: /shared/influxdb3-cli/${sharedPath}.md
---
<!-- The content for this page is at
// SOURCE content/shared/influxdb3-cli/${sharedPath}.md
-->`;
return frontmatter;
}
// Create the reference page files for different product variants
async function createReferencePages(cmd) {
if (cmd === '') return; // Skip the base command
// Define the InfluxDB products that use this CLI
const products = [
{ id: 'influxdb3_core', path: 'influxdb3/core' },
{ id: 'influxdb3_enterprise', path: 'influxdb3/enterprise' }
];
// Generate reference pages for each product
for (const product of products) {
const frontmatter = generateReferencePage(cmd, product.id);
if (!frontmatter) continue;
const parts = cmd.split(' ');
const cmdPath = parts.join('/');
// Create the directory path for the reference file
const refDirPath = path.join(__dirname, '..', 'content', product.path, 'reference', 'cli', 'influxdb3', ...parts.slice(0, -1));
const refFilePath = path.join(refDirPath, `${parts[parts.length - 1]}.md`);
// Create directory if it doesn't exist
ensureDirectoryExistence(refFilePath);
// Write the reference file
fs.writeFileSync(refFilePath, frontmatter);
console.log(`Generated reference page: ${refFilePath}`);
}
}
// Process a command and its subcommands recursively
async function processCommand(cmd = '', depth = 0) {
debug(`Processing command: "${cmd}" at depth ${depth}`);
// Generate markdown for this command
const markdown = generateCommandMarkdown(cmd);
if (!markdown) {
console.error(`Failed to generate markdown for command: ${cmd}`);
return;
}
// Create file path and write content
let filePath;
if (cmd === '') {
// Base command
filePath = path.join(OUTPUT_DIR, '_index.md');
} else {
const parts = cmd.split(' ');
const dirPath = path.join(OUTPUT_DIR, ...parts.slice(0, -1));
const fileName = parts[parts.length - 1] === '' ? '_index.md' : `${parts[parts.length - 1]}.md`;
filePath = path.join(dirPath, fileName);
// For commands with subcommands, also create an index file
if (depth < 3) { // Limit recursion depth
try {
const subcommandOutput = execSync(`${BASE_CMD} ${cmd} --help`).toString();
if (subcommandOutput.includes('Commands:')) {
const subDirPath = path.join(OUTPUT_DIR, ...parts);
const indexFilePath = path.join(subDirPath, '_index.md');
ensureDirectoryExistence(indexFilePath);
fs.writeFileSync(indexFilePath, markdown);
debug(`Created index file: ${indexFilePath}`);
}
} catch (error) {
debug(`Error checking for subcommands: ${error.message}`);
}
}
}
ensureDirectoryExistence(filePath);
fs.writeFileSync(filePath, markdown);
console.log(`Generated: ${filePath}`);
// Create reference pages for this command
await createReferencePages(cmd);
// Get and process subcommands
if (depth < 3) { // Limit recursion depth
const subcommands = getSubcommands(cmd);
debug(`Found ${subcommands.length} subcommands for "${cmd}"`);
for (const subCmd of subcommands) {
await processCommand(subCmd.cmd, depth + 1);
}
}
}
// Main function
async function main() {
try {
debug('Starting documentation generation');
// Process base command
await processCommand();
// Get top-level commands
const commands = getCommands();
debug(`Found ${commands.length} top-level commands`);
if (commands.length === 0) {
console.warn('Warning: No commands were found. Check the influxdb3 CLI help output format.');
}
// Process each top-level command
for (const { cmd } of commands) {
await processCommand(cmd, 1);
}
console.log('Documentation generation complete!');
} catch (error) {
console.error('Error in main execution:', error.message);
if (DEBUG) console.error(error.stack);
}
}
// Run the script
main();