diff --git a/.ci/link-checker/default.lycherc.toml b/.ci/link-checker/default.lycherc.toml index c247f6730..4ba8e1ede 100644 --- a/.ci/link-checker/default.lycherc.toml +++ b/.ci/link-checker/default.lycherc.toml @@ -1,5 +1,5 @@ # Lychee link checker configuration -# Updated for link-checker v1.3.0 with severity-based classification +# Updated for link-checker v1.3.1 with severity-based classification # # With severity levels, we no longer need to exclude sites that return: # - 403/401/429 (classified as "info" - shown but don't fail CI) diff --git a/.ci/link-checker/production.lycherc.toml b/.ci/link-checker/production.lycherc.toml index 6f72bab1a..b50ed2a20 100644 --- a/.ci/link-checker/production.lycherc.toml +++ b/.ci/link-checker/production.lycherc.toml @@ -1,5 +1,5 @@ # Production Link Checker Configuration for InfluxData docs-v2 -# Updated for link-checker v1.3.0 with severity-based classification +# Updated for link-checker v1.3.1 with severity-based classification # # With severity levels, we no longer need to exclude sites that return: # - 403/401/429 (classified as "info" - shown but don't fail CI) @@ -51,10 +51,12 @@ exclude = [ # TODO: Remove after fixing canonical URL generation or link-checker domain replacement "^https://docs\\.influxdata\\.com/", - # Local file URLs with fragments (workaround for link-checker Hugo pretty URL bug) - # link-checker converts /path/to/page#fragment to file:///path/to/page#fragment - # but the actual file is at /path/to/page/index.html, causing false fragment errors - # TODO: Remove after fixing link-checker to handle Hugo pretty URLs with fragments + # Local file URLs with fragments — lychee resolves /path/to/page#fragment to + # file:///path/to/page#fragment, but the actual file is at /path/to/page/index.html. + # This causes false "Cannot find file" errors for valid pages with Hugo pretty URLs. + # NOTE: This also means lychee CANNOT validate fragments on local files. + # Fragment validation for internal links is a known gap (lychee doesn't open + # index.html to check heading anchors). "^file://.*#", # Common documentation placeholders @@ -85,6 +87,8 @@ warning_codes = [500, 502, 503, 504] info_codes = [401, 403, 429] # Set to true to treat warnings as errors (stricter validation) +# NOTE: Missing local files (file-not-found) have no HTTP status code and +# default to "warning" severity. The workflow reclassifies these as errors. strict = false [ci] @@ -108,6 +112,8 @@ max_execution_time_minutes = 10 [reporting] # Report configuration +# NOTE: lychee's --include-fragments does not validate fragments on local file +# URLs. It only works for HTTP responses. Set to false to avoid confusion. include_fragments = false verbose = false no_progress = true # Disable progress bar in CI diff --git a/.github/workflows/pr-link-check.yml b/.github/workflows/pr-link-check.yml index f4f280fa2..43f739b6e 100644 --- a/.github/workflows/pr-link-check.yml +++ b/.github/workflows/pr-link-check.yml @@ -12,18 +12,18 @@ jobs: link-check: name: Check links in affected files runs-on: ubuntu-latest - + steps: - name: Checkout repository uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Detect content changes id: detect run: | echo "🔍 Detecting changes between ${{ github.base_ref }} and ${{ github.sha }}" - + # For PRs, use the GitHub Files API to get changed files if [[ "${{ github.event_name }}" == "pull_request" ]]; then echo "Using GitHub API to detect PR changes..." @@ -34,23 +34,23 @@ jobs: echo "Using git diff to detect changes..." git diff --name-only ${{ github.event.before }}..${{ github.sha }} > all_changed_files.txt fi - + # Filter for content markdown files CHANGED_FILES=$(grep '^content/.*\.md$' all_changed_files.txt || true) - + echo "📁 All changed files:" cat all_changed_files.txt echo "" echo "📝 Content markdown files:" echo "$CHANGED_FILES" - + if [[ -n "$CHANGED_FILES" ]]; then echo "✅ Found $(echo "$CHANGED_FILES" | wc -l) changed content file(s)" echo "has-changes=true" >> $GITHUB_OUTPUT echo "changed-content<> $GITHUB_OUTPUT echo "$CHANGED_FILES" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - + # Check if any shared content files were modified SHARED_CHANGES=$(echo "$CHANGED_FILES" | grep '^content/shared/' || true) if [[ -n "$SHARED_CHANGES" ]]; then @@ -64,57 +64,57 @@ jobs: echo "has-changes=false" >> $GITHUB_OUTPUT echo "has-shared-content=false" >> $GITHUB_OUTPUT fi - + - name: Skip if no content changes if: steps.detect.outputs.has-changes == 'false' run: | echo "No content changes detected in this PR - skipping link check" echo "✅ **No content changes detected** - link check skipped" >> $GITHUB_STEP_SUMMARY - + - name: Setup Node.js if: steps.detect.outputs.has-changes == 'true' uses: actions/setup-node@v4 with: node-version: '20' cache: 'yarn' - + - name: Install dependencies if: steps.detect.outputs.has-changes == 'true' run: yarn install --frozen-lockfile - + - name: Build Hugo site if: steps.detect.outputs.has-changes == 'true' run: npx hugo --minify - + - name: Download link-checker binary if: steps.detect.outputs.has-changes == 'true' run: | echo "Downloading link-checker binary from docs-v2 releases..." - + # Download from docs-v2's own releases (always accessible) curl -L -H "Accept: application/vnd.github+json" \ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ -o link-checker-info.json \ - "https://api.github.com/repos/influxdata/docs-v2/releases/tags/link-checker-v1.3.0" - + "https://api.github.com/repos/influxdata/docs-v2/releases/tags/link-checker-v1.5.0" + # Extract download URL for linux binary DOWNLOAD_URL=$(jq -r '.assets[] | select(.name | test("link-checker.*linux")) | .url' link-checker-info.json) - + if [[ "$DOWNLOAD_URL" == "null" || -z "$DOWNLOAD_URL" ]]; then echo "❌ No linux binary found in release" echo "Available assets:" jq -r '.assets[].name' link-checker-info.json exit 1 fi - + echo "📥 Downloading: $DOWNLOAD_URL" curl -L -H "Accept: application/octet-stream" \ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ -o link-checker "$DOWNLOAD_URL" - + chmod +x link-checker ./link-checker --version - + - name: Verify link checker config exists if: steps.detect.outputs.has-changes == 'true' run: | @@ -124,26 +124,26 @@ jobs: exit 1 fi echo "✅ Using configuration: .ci/link-checker/production.lycherc.toml" - + - name: Map changed content to public files if: steps.detect.outputs.has-changes == 'true' id: mapping run: | echo "Mapping changed content files to public HTML files..." - + # Create temporary file with changed content files echo "${{ steps.detect.outputs.changed-content }}" > changed-files.txt - + # Map content files to public files PUBLIC_FILES=$(cat changed-files.txt | xargs -r ./link-checker map --existing-only) - + if [[ -n "$PUBLIC_FILES" ]]; then echo "Found affected public files:" echo "$PUBLIC_FILES" echo "public-files<> $GITHUB_OUTPUT echo "$PUBLIC_FILES" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT - + # Count files for summary FILE_COUNT=$(echo "$PUBLIC_FILES" | wc -l) echo "file-count=$FILE_COUNT" >> $GITHUB_OUTPUT @@ -152,83 +152,174 @@ jobs: echo "public-files=" >> $GITHUB_OUTPUT echo "file-count=0" >> $GITHUB_OUTPUT fi - + - name: Run link checker if: steps.detect.outputs.has-changes == 'true' && steps.mapping.outputs.public-files != '' id: link-check run: | echo "Checking links in ${{ steps.mapping.outputs.file-count }} affected files..." - + # Create temporary file with public files list echo "${{ steps.mapping.outputs.public-files }}" > public-files.txt - + # Run link checker with detailed JSON output set +e # Don't fail immediately on error - + cat public-files.txt | xargs -r ./link-checker check \ --config .ci/link-checker/production.lycherc.toml \ --format json \ --output link-check-results.json - + EXIT_CODE=$? - + if [[ -f link-check-results.json ]]; then - # Parse results - BROKEN_COUNT=$(jq -r '.summary.broken_count // 0' link-check-results.json) + # Parse results using actual v1.3.1 JSON structure + ERROR_COUNT=$(jq -r '.summary.error_count // 0' link-check-results.json) + WARNING_COUNT=$(jq -r '.summary.warning_count // 0' link-check-results.json) TOTAL_COUNT=$(jq -r '.summary.total_checked // 0' link-check-results.json) SUCCESS_RATE=$(jq -r '.summary.success_rate // 0' link-check-results.json) - - echo "broken-count=$BROKEN_COUNT" >> $GITHUB_OUTPUT + + # Reclassify file-not-found warnings as errors + # link-checker classifies missing local files as warnings (no HTTP status code), + # but these represent genuinely broken internal links and should fail CI. + FILE_NOT_FOUND_COUNT=$(jq '[.warnings[] | select(.error | test("Cannot find file"))] | length' link-check-results.json 2>/dev/null || echo 0) + if [[ $FILE_NOT_FOUND_COUNT -gt 0 ]]; then + echo "⚠️ Found $FILE_NOT_FOUND_COUNT missing local file(s) — reclassifying as errors" + # Move file-not-found entries from warnings to errors + jq ' + .errors += [.warnings[] | select(.error | test("Cannot find file")) | .severity = "error"] + | .warnings = [.warnings[] | select(.error | test("Cannot find file") | not)] + | .summary.error_count = (.errors | length) + | .summary.warning_count = (.warnings | length) + ' link-check-results.json > link-check-results-fixed.json + mv link-check-results-fixed.json link-check-results.json + + ERROR_COUNT=$(jq -r '.summary.error_count // 0' link-check-results.json) + WARNING_COUNT=$(jq -r '.summary.warning_count // 0' link-check-results.json) + fi + + echo "error-count=$ERROR_COUNT" >> $GITHUB_OUTPUT + echo "warning-count=$WARNING_COUNT" >> $GITHUB_OUTPUT echo "total-count=$TOTAL_COUNT" >> $GITHUB_OUTPUT echo "success-rate=$SUCCESS_RATE" >> $GITHUB_OUTPUT - - if [[ $BROKEN_COUNT -gt 0 ]]; then - echo "❌ Found $BROKEN_COUNT broken links out of $TOTAL_COUNT total links" + + if [[ $ERROR_COUNT -gt 0 ]]; then + echo "❌ Found $ERROR_COUNT broken links out of $TOTAL_COUNT total links" echo "check-result=failed" >> $GITHUB_OUTPUT else - echo "✅ All $TOTAL_COUNT links are valid" + echo "✅ All $TOTAL_COUNT links are valid ($WARNING_COUNT warnings)" echo "check-result=passed" >> $GITHUB_OUTPUT fi else echo "❌ Link check failed to generate results" echo "check-result=error" >> $GITHUB_OUTPUT fi - + exit $EXIT_CODE - + - name: Process and report results if: always() && steps.detect.outputs.has-changes == 'true' && steps.mapping.outputs.public-files != '' + env: + FILE_COUNT: ${{ steps.mapping.outputs.file-count }} + TOTAL_COUNT: ${{ steps.link-check.outputs.total-count }} + ERROR_COUNT: ${{ steps.link-check.outputs.error-count }} + WARNING_COUNT: ${{ steps.link-check.outputs.warning-count }} + SUCCESS_RATE: ${{ steps.link-check.outputs.success-rate }} + CHECK_RESULT: ${{ steps.link-check.outputs.check-result }} run: | if [[ -f link-check-results.json ]]; then - # Create detailed error annotations for broken links - if [[ "${{ steps.link-check.outputs.check-result }}" == "failed" ]]; then - echo "Creating error annotations for broken links..." - - jq -r '.broken_links[]? | - "::error file=\(.file // "unknown"),line=\(.line // 1)::Broken link: \(.url) - \(.error // "Unknown error")"' \ - link-check-results.json || true - fi - - # Generate summary comment - cat >> $GITHUB_STEP_SUMMARY << 'EOF' - ## Link Check Results - - **Files Checked:** ${{ steps.mapping.outputs.file-count }} - **Total Links:** ${{ steps.link-check.outputs.total-count }} - **Broken Links:** ${{ steps.link-check.outputs.broken-count }} - **Success Rate:** ${{ steps.link-check.outputs.success-rate }}% - - EOF - - if [[ "${{ steps.link-check.outputs.check-result }}" == "failed" ]]; then - echo "❌ **Link check failed** - see annotations above for details" >> $GITHUB_STEP_SUMMARY + # Generate summary header + echo "## Link Check Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY + echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY + echo "| Files Checked | ${FILE_COUNT} |" >> $GITHUB_STEP_SUMMARY + echo "| Total Links | ${TOTAL_COUNT} |" >> $GITHUB_STEP_SUMMARY + echo "| Errors | ${ERROR_COUNT} |" >> $GITHUB_STEP_SUMMARY + echo "| Warnings | ${WARNING_COUNT} |" >> $GITHUB_STEP_SUMMARY + echo "| Success Rate | ${SUCCESS_RATE}% |" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + # Report broken links (errors) with annotations + if [[ "${CHECK_RESULT}" == "failed" ]]; then + echo "### Broken Links" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Severity | Source File | Broken URL | Error |" >> $GITHUB_STEP_SUMMARY + echo "|----------|------------|------------|-------|" >> $GITHUB_STEP_SUMMARY + + # Process errors (these fail CI) + jq -c '.errors[]?' link-check-results.json 2>/dev/null | while read -r entry; do + URL=$(echo "$entry" | jq -r '.url // "unknown"') + ERROR=$(echo "$entry" | jq -r '.error // "Unknown error"') + FILE=$(echo "$entry" | jq -r '.file // "unknown"') + LINE=$(echo "$entry" | jq -r '.line // empty') + + # Map public path to content path for annotations + CONTENT_FILE=$(echo "$FILE" | sed 's|.*/public/|content/|' | sed 's|/index\.html$|/_index.md|') + + # Create GitHub annotation + if [[ -n "$LINE" && "$LINE" != "null" ]]; then + echo "::error file=${CONTENT_FILE},line=${LINE}::Broken link: ${URL} (${ERROR})" + else + echo "::error file=${CONTENT_FILE}::Broken link: ${URL} (${ERROR})" + fi + + # Add row to summary table + SAFE_URL=$(echo "$URL" | sed 's/|/\\|/g') + SAFE_ERROR=$(echo "$ERROR" | sed 's/|/\\|/g' | cut -c1-80) + echo "| 🔴 error | \`${CONTENT_FILE}\` | ${SAFE_URL} | ${SAFE_ERROR} |" >> $GITHUB_STEP_SUMMARY + done + + echo "" >> $GITHUB_STEP_SUMMARY + echo "---" >> $GITHUB_STEP_SUMMARY + echo "❌ **Link check failed** — fix the broken links listed above before merging." >> $GITHUB_STEP_SUMMARY else echo "✅ **All links are valid**" >> $GITHUB_STEP_SUMMARY fi + + # Report warnings (don't fail CI, but useful context) + WARNING_ARRAY_LEN=$(jq '.warnings | length' link-check-results.json 2>/dev/null || echo 0) + if [[ "$WARNING_ARRAY_LEN" -gt 0 ]]; then + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY + echo "⚠️ ${WARNING_ARRAY_LEN} warning(s) (do not fail CI)" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Source File | URL | Issue |" >> $GITHUB_STEP_SUMMARY + echo "|------------|-----|-------|" >> $GITHUB_STEP_SUMMARY + + jq -c '.warnings[]?' link-check-results.json 2>/dev/null | head -20 | while read -r entry; do + URL=$(echo "$entry" | jq -r '.url // "unknown"') + ERROR=$(echo "$entry" | jq -r '.error // "Unknown"') + FILE=$(echo "$entry" | jq -r '.file // "unknown"') + CONTENT_FILE=$(echo "$FILE" | sed 's|.*/public/|content/|' | sed 's|/index\.html$|/_index.md|') + SAFE_URL=$(echo "$URL" | sed 's/|/\\|/g') + SAFE_ERROR=$(echo "$ERROR" | sed 's/|/\\|/g' | cut -c1-80) + echo "| \`${CONTENT_FILE}\` | ${SAFE_URL} | ${SAFE_ERROR} |" >> $GITHUB_STEP_SUMMARY + done + + if [[ "$WARNING_ARRAY_LEN" -gt 20 ]]; then + echo "" >> $GITHUB_STEP_SUMMARY + echo "_Showing first 20 of ${WARNING_ARRAY_LEN} warnings. Download the artifact for full results._" >> $GITHUB_STEP_SUMMARY + fi + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY + fi + + # Add helpful tips + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY + echo "💡 Troubleshooting Tips" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "- **404 errors**: The linked page doesn't exist. Check for typos or update the link." >> $GITHUB_STEP_SUMMARY + echo "- **Relative links**: Use relative paths starting with \`/\` for internal links." >> $GITHUB_STEP_SUMMARY + echo "- **Anchors**: Ensure heading anchors match the linked fragment exactly." >> $GITHUB_STEP_SUMMARY + echo "- **Warnings**: External sites may be temporarily unavailable — these don't fail CI." >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY else - echo "⚠️ **Link check could not complete** - no results file generated" >> $GITHUB_STEP_SUMMARY + echo "⚠️ **Link check could not complete** — no results file generated" >> $GITHUB_STEP_SUMMARY fi - + - name: Upload detailed results if: always() && steps.detect.outputs.has-changes == 'true' && steps.mapping.outputs.public-files != '' uses: actions/upload-artifact@v4 @@ -238,4 +329,4 @@ jobs: link-check-results.json changed-files.txt public-files.txt - retention-days: 30 \ No newline at end of file + retention-days: 30 diff --git a/.github/workflows/sync-link-checker-binary.yml b/.github/workflows/sync-link-checker-binary.yml index b0ac46c68..ef7ab3338 100644 --- a/.github/workflows/sync-link-checker-binary.yml +++ b/.github/workflows/sync-link-checker-binary.yml @@ -12,34 +12,80 @@ jobs: sync-binary: name: Sync link-checker binary from docs-tooling runs-on: ubuntu-latest - + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Download binary from docs-tooling release run: | echo "Downloading link-checker ${{ inputs.version }} from docs-tooling..." - - # Download binary from docs-tooling release - curl -L -H "Accept: application/octet-stream" \ - -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ + + # Download binary from docs-tooling release using the GitHub API + # NOTE: requires DOCS_TOOLING_TOKEN secret with read access to docs-tooling releases + RELEASE_INFO=$(curl -sL \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.DOCS_TOOLING_TOKEN }}" \ + "https://api.github.com/repos/influxdata/docs-tooling/releases/tags/link-checker-${{ inputs.version }}") + + # Check if release was found + if echo "$RELEASE_INFO" | jq -e '.message == "Not Found"' >/dev/null 2>&1; then + echo "❌ Release link-checker-${{ inputs.version }} not found in docs-tooling" + exit 1 + fi + + # Download linux binary asset + BINARY_URL=$(echo "$RELEASE_INFO" | jq -r '.assets[] | select(.name == "link-checker-linux-x86_64") | .url') + if [[ -z "$BINARY_URL" || "$BINARY_URL" == "null" ]]; then + echo "❌ No linux binary found in release" + echo "Available assets:" + echo "$RELEASE_INFO" | jq -r '.assets[].name' + exit 1 + fi + + curl -sL \ + -H "Accept: application/octet-stream" \ + -H "Authorization: Bearer ${{ secrets.DOCS_TOOLING_TOKEN }}" \ -o link-checker-linux-x86_64 \ - "https://github.com/influxdata/docs-tooling/releases/download/link-checker-${{ inputs.version }}/link-checker-linux-x86_64" - - # Download checksums - curl -L -H "Accept: application/octet-stream" \ - -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ - -o checksums.txt \ - "https://github.com/influxdata/docs-tooling/releases/download/link-checker-${{ inputs.version }}/checksums.txt" - - # Verify downloads - ls -la link-checker-linux-x86_64 checksums.txt - + "$BINARY_URL" + + # Download checksums if available + CHECKSUMS_URL=$(echo "$RELEASE_INFO" | jq -r '.assets[] | select(.name == "checksums.txt") | .url') + if [[ -n "$CHECKSUMS_URL" && "$CHECKSUMS_URL" != "null" ]]; then + curl -sL \ + -H "Accept: application/octet-stream" \ + -H "Authorization: Bearer ${{ secrets.DOCS_TOOLING_TOKEN }}" \ + -o checksums.txt \ + "$CHECKSUMS_URL" + fi + + # Verify the binary is valid (not an error page) + FILE_SIZE=$(stat -c%s link-checker-linux-x86_64 2>/dev/null || stat -f%z link-checker-linux-x86_64) + if [[ "$FILE_SIZE" -lt 1000 ]]; then + echo "❌ Downloaded binary is only ${FILE_SIZE} bytes - likely a failed download" + echo "Content:" + cat link-checker-linux-x86_64 + exit 1 + fi + + echo "✅ Downloaded binary: ${FILE_SIZE} bytes" + ls -la link-checker-linux-x86_64 + - name: Create docs-v2 release run: | echo "Creating link-checker-${{ inputs.version }} release in docs-v2..." - + + # Collect assets to upload + ASSETS="link-checker-linux-x86_64" + if [[ -f checksums.txt ]]; then + ASSETS="$ASSETS checksums.txt" + fi + gh release create \ + --repo "${{ github.repository }}" \ --title "Link Checker Binary ${{ inputs.version }}" \ - --notes "Link validation tooling binary for docs-v2 GitHub Actions workflows. + --notes "$(cat <}}, database and retention policy combinations are mapped to specific buckets (for more information, see [Database and retention policy mapping](/influxdb/v2/reference/api/influxdb-1x/dbrp/)). -Use the [`from()` function](/flux/v0/stdlib/universe/from) +Use the [`from()` function](/flux/v0/stdlib/influxdata/influxdb/from/) to specify the bucket to query: ###### InfluxQL diff --git a/content/influxdb/v2/install/upgrade/v2-to-v2.md b/content/influxdb/v2/install/upgrade/v2-to-v2.md index e4235487d..70f7a5dfa 100644 --- a/content/influxdb/v2/install/upgrade/v2-to-v2.md +++ b/content/influxdb/v2/install/upgrade/v2-to-v2.md @@ -20,7 +20,7 @@ Upgrade to InfluxDB {{< current-version >}} from an earlier version of InfluxDB {{% note %}} #### InfluxDB 2.0 beta-16 or earlier If you're upgrading from InfluxDB 2.0 beta-16 or earlier, you must first -[upgrade to InfluxDB 2.0](/influxdb/v2/upgrade/v2-beta-to-v2/), +[upgrade to InfluxDB 2.0](/influxdb/v2/install/upgrade/v2-beta-to-v2/), and then complete the steps below. {{% /note %}} diff --git a/content/platform/monitoring/influxdata-platform/monitoring-dashboards.md b/content/platform/monitoring/influxdata-platform/monitoring-dashboards.md index b89f93051..d93390718 100644 --- a/content/platform/monitoring/influxdata-platform/monitoring-dashboards.md +++ b/content/platform/monitoring/influxdata-platform/monitoring-dashboards.md @@ -32,7 +32,7 @@ Telegraf input plugins. To view prebuilt dashboards: ## Import monitoring dashboards Use the dashboards below to visualize and monitor key TICK stack metrics. Download the dashboard file and import it into Chronograf. -For detailed instructions, see [Importing a dashboard](/chronograf/v1/administration/import-export-dashboards/#importing-a-dashboard). +For detailed instructions, see [Import a dashboard](/chronograf/v1/administration/import-export-dashboards/#import-a-dashboard). - [Monitor InfluxDB OSS](#monitor-influxdb-oss) - [Monitor InfluxDB Enterprise](#monitor-influxdb-enterprise) @@ -44,7 +44,7 @@ Use the InfluxDB OSS Monitor dashboard to monitor InfluxDB OSS in Chronograf. Download InfluxDB OSS Monitor dashboard The InfluxDB OSS Monitor dashboard uses data from the `_internal` database -_([not recommended for production](/platform/monitoring/influxdata-platform/internal-vs-external/#disable-the-internal-database-in-production-clusters))_ +_([not recommended for production](/platform/monitoring/influxdata-platform/internal-vs-external/#disable-the-_internal-database-in-production-clusters))_ or collected by the [Telegraf `influxdb` input plugin](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/influxdb). This dashboard contains the following cells: @@ -63,7 +63,7 @@ Use the InfluxDB Enterprise Monitor dashboard to monitor InfluxDB Enterprise in Download InfluxDB Enterprise Monitor dashboard The InfluxDB Enterprise Monitor dashboard uses data from the `_internal` database -_([not recommended for production](/platform/monitoring/influxdata-platform/internal-vs-external/#disable-the-internal-database-in-production-clusters))_ +_([not recommended for production](/platform/monitoring/influxdata-platform/internal-vs-external/#disable-the-_internal-database-in-production-clusters))_ or collected by the [Telegraf `influxdb` input plugin](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/influxdb). This dashboard contains the following cells: