diff --git a/.github/scripts/parse-pr-urls.js b/.github/scripts/parse-pr-urls.js
index 61aed7495..27c2e1435 100644
--- a/.github/scripts/parse-pr-urls.js
+++ b/.github/scripts/parse-pr-urls.js
@@ -4,6 +4,42 @@
* Used when layout/asset changes require author-specified preview pages.
*/
+import { readFileSync } from 'fs';
+import { load } from 'js-yaml';
+import { dirname, join } from 'path';
+import { fileURLToPath } from 'url';
+
+const __dirname = dirname(fileURLToPath(import.meta.url));
+
+/**
+ * Load valid product namespaces from products.yml
+ * @returns {string[]} - Array of valid namespace prefixes
+ * @throws {Error} - If products.yml cannot be read
+ */
+function loadProductNamespaces() {
+ // Navigate from .github/scripts/ to data/products.yml
+ const productsPath = join(__dirname, '../../data/products.yml');
+ const productsYaml = readFileSync(productsPath, 'utf8');
+ const products = load(productsYaml);
+
+ // Extract unique namespaces from all products
+ const namespaces = new Set();
+ for (const product of Object.values(products)) {
+ if (product.namespace) {
+ namespaces.add(product.namespace);
+ }
+ }
+
+ if (namespaces.size === 0) {
+ throw new Error('No product namespaces found in products.yml');
+ }
+
+ return Array.from(namespaces);
+}
+
+// Load namespaces once at module initialization
+const PRODUCT_NAMESPACES = loadProductNamespaces();
+
/**
* Validate URL path for security
* @param {string} path - URL path to validate
@@ -24,20 +60,28 @@ function isValidUrlPath(path) {
// Must start with /
if (!path.startsWith('/')) return false;
- // Must start with known product prefix
- const validPrefixes = [
- '/influxdb3/',
- '/influxdb/',
- '/telegraf/',
- '/kapacitor/',
- '/chronograf/',
- '/flux/',
- '/enterprise_influxdb/'
- ];
+ // Must start with known product prefix (loaded from products.yml)
+ const validPrefixes = PRODUCT_NAMESPACES.map((ns) => `/${ns}/`);
- return validPrefixes.some(prefix => path.startsWith(prefix));
+ return validPrefixes.some((prefix) => path.startsWith(prefix));
}
+/**
+ * Build regex pattern for relative paths
+ * @returns {RegExp} - Pattern matching valid product URL paths
+ */
+function buildRelativePattern() {
+ const namespaceAlternation = PRODUCT_NAMESPACES.join('|');
+ // Match relative paths starting with known product prefixes
+ // Also captures paths in markdown links: [text](/influxdb3/core/)
+ return new RegExp(
+ `(?:^|\\s|\\]|\\)|\\()(\\/(?:${namespaceAlternation})[^\\s)\\]>"']*)`,
+ 'gm'
+ );
+}
+
+const RELATIVE_PATTERN = buildRelativePattern();
+
/**
* Extract documentation URLs from text
* @param {string} text - PR description or comment text
@@ -71,9 +115,9 @@ export function extractDocsUrls(text) {
// Pattern 3: Relative paths starting with known product prefixes
// /influxdb3/core/admin/ or /telegraf/v1/plugins/
- // Updated to also capture paths in markdown links: [text](/influxdb3/core/)
- const relativePattern = /(?:^|\s|\]|\)|\()(\/(?:influxdb3|influxdb|telegraf|kapacitor|chronograf|flux|enterprise_influxdb)[^\s)\]>"']*)/gm;
- while ((match = relativePattern.exec(text)) !== null) {
+ // Reset lastIndex to ensure fresh matching
+ RELATIVE_PATTERN.lastIndex = 0;
+ while ((match = RELATIVE_PATTERN.exec(text)) !== null) {
const path = normalizeUrlPath(match[1]);
if (isValidUrlPath(path)) {
urls.add(path);
@@ -106,7 +150,7 @@ function normalizeUrlPath(urlPath) {
* @returns {string[]} - Array of content file paths
*/
export function urlPathsToContentPaths(urlPaths) {
- return urlPaths.map(urlPath => {
+ return urlPaths.map((urlPath) => {
// Remove leading/trailing slashes and add content prefix
const cleanPath = urlPath.replace(/^\/|\/$/g, '');
return `content/${cleanPath}/_index.md`;
diff --git a/.github/scripts/prepare-preview-files.js b/.github/scripts/prepare-preview-files.js
index 89288e69d..a0c41f549 100644
--- a/.github/scripts/prepare-preview-files.js
+++ b/.github/scripts/prepare-preview-files.js
@@ -6,10 +6,21 @@
* Example: node prepare-preview-files.js '["/influxdb3/core/"]' public preview-staging
*/
-import { cpSync, mkdirSync, existsSync, readdirSync, statSync } from 'fs';
+import {
+ cpSync,
+ mkdirSync,
+ existsSync,
+ readdirSync,
+ statSync,
+ writeFileSync,
+} from 'fs';
import { dirname, join } from 'path';
-const GLOBAL_ASSETS = ['css', 'js', 'fonts', 'img', 'favicons'];
+// Asset directories to copy (Hugo outputs these as directories)
+const ASSET_DIRS = ['js', 'fonts', 'img', 'favicons'];
+
+// File patterns to copy from public root (Hugo fingerprints CSS at root level)
+const ROOT_FILE_PATTERNS = ['.css'];
/**
* Copy a file or directory, creating parent directories as needed
@@ -86,6 +97,45 @@ function copyPage(urlPath, publicDir, stagingDir) {
return success;
}
+/**
+ * Generate an index page listing all preview pages
+ * @param {string[]} pages - Array of URL paths
+ * @param {string} stagingDir - Staging directory
+ */
+function generateIndexPage(pages, stagingDir) {
+ const pageLinks = pages
+ .map((page) => `
${page}`)
+ .join('\n');
+
+ const html = `
+
+
+
+
+ PR Preview
+
+
+
+ PR Preview
+ This preview contains ${pages.length} page(s):
+
+ Generated: ${new Date().toISOString()}
+
+`;
+
+ writeFileSync(join(stagingDir, 'index.html'), html);
+ console.log(' ā index.html (page listing)');
+}
+
/**
* Main function to prepare preview files
* @param {string[]} pages - Array of URL paths to deploy
@@ -108,9 +158,9 @@ function preparePreviewFiles(pages, publicDir, stagingDir) {
// Create staging directory
mkdirSync(stagingDir, { recursive: true });
- // Copy global assets first
+ // Copy asset directories
console.log('š Copying global assets...');
- for (const asset of GLOBAL_ASSETS) {
+ for (const asset of ASSET_DIRS) {
const src = join(publicDir, asset);
const dest = join(stagingDir, asset);
if (safeCopy(src, dest)) {
@@ -118,6 +168,21 @@ function preparePreviewFiles(pages, publicDir, stagingDir) {
}
}
+ // Copy root-level CSS files (Hugo fingerprints these at root)
+ console.log('\nš Copying root CSS files...');
+ if (existsSync(publicDir)) {
+ const rootFiles = readdirSync(publicDir);
+ for (const file of rootFiles) {
+ if (ROOT_FILE_PATTERNS.some((pattern) => file.endsWith(pattern))) {
+ const src = join(publicDir, file);
+ const dest = join(stagingDir, file);
+ if (safeCopy(src, dest)) {
+ console.log(` ā ${file}`);
+ }
+ }
+ }
+ }
+
// Copy selected pages
console.log('\nš Copying pages...');
let copiedCount = 0;
@@ -127,6 +192,9 @@ function preparePreviewFiles(pages, publicDir, stagingDir) {
}
}
+ // Generate index page with clickable links
+ generateIndexPage(pages, stagingDir);
+
console.log(`\nā
Prepared ${copiedCount} pages for preview`);
}
@@ -137,7 +205,9 @@ if (process.argv[1]?.endsWith('prepare-preview-files.js')) {
const stagingDir = process.argv[4] || 'preview-staging';
if (!pagesJson) {
- console.error('Usage: node prepare-preview-files.js [public-dir] [staging-dir]');
+ console.error(
+ 'Usage: node prepare-preview-files.js [public-dir] [staging-dir]'
+ );
process.exit(1);
}
diff --git a/.github/scripts/test-parse-pr-urls.js b/.github/scripts/test-parse-pr-urls.js
index 880e5ab13..414ba442a 100644
--- a/.github/scripts/test-parse-pr-urls.js
+++ b/.github/scripts/test-parse-pr-urls.js
@@ -48,7 +48,7 @@ Please preview:
[
'/influxdb/cloud/admin/tokens/',
'/influxdb3/core/get-started/',
- '/telegraf/v1/plugins/'
+ '/telegraf/v1/plugins/',
].sort(),
'Should extract all three URL formats'
);
@@ -58,7 +58,11 @@ Please preview:
test('Markdown link: [text](/influxdb3/core/)', () => {
const text = 'See [the docs](/influxdb3/core/) for details';
const result = extractDocsUrls(text);
- assertEquals(result, ['/influxdb3/core/'], 'Should extract path from markdown link');
+ assertEquals(
+ result,
+ ['/influxdb3/core/'],
+ 'Should extract path from markdown link'
+ );
});
test('Markdown link: multiple links in a line', () => {
@@ -154,7 +158,11 @@ test('JS injection attempt via single quote is truncated', () => {
// The injection payload after the ' is never captured by the regex
const text = "/influxdb3/test'];console.log('xss');//";
const result = extractDocsUrls(text);
- assertEquals(result, ['/influxdb3/test/'], 'Should truncate before injection payload');
+ assertEquals(
+ result,
+ ['/influxdb3/test/'],
+ 'Should truncate before injection payload'
+ );
});
// Test valid product prefixes
@@ -179,7 +187,21 @@ test('Valid prefix: /telegraf/', () => {
test('Valid prefix: /enterprise_influxdb/', () => {
const text = '/enterprise_influxdb/v1/';
const result = extractDocsUrls(text);
- assertEquals(result, ['/enterprise_influxdb/v1/'], 'Should accept enterprise_influxdb');
+ assertEquals(
+ result,
+ ['/enterprise_influxdb/v1/'],
+ 'Should accept enterprise_influxdb'
+ );
+});
+
+test('Valid prefix: /influxdb3_explorer/ (loaded from products.yml)', () => {
+ const text = '/influxdb3_explorer/explorer/';
+ const result = extractDocsUrls(text);
+ assertEquals(
+ result,
+ ['/influxdb3_explorer/explorer/'],
+ 'Should accept influxdb3_explorer from products.yml'
+ );
});
test('Invalid prefix: /random/', () => {
@@ -238,7 +260,11 @@ test('Deduplication: same URL multiple times', () => {
/influxdb3/core/
`;
const result = extractDocsUrls(text);
- assertEquals(result, ['/influxdb3/core/'], 'Should deduplicate identical URLs');
+ assertEquals(
+ result,
+ ['/influxdb3/core/'],
+ 'Should deduplicate identical URLs'
+ );
});
test('Deduplication: different formats, same path', () => {
@@ -248,7 +274,11 @@ http://localhost:1313/influxdb3/core/
/influxdb3/core/
`;
const result = extractDocsUrls(text);
- assertEquals(result, ['/influxdb3/core/'], 'Should deduplicate different URL formats');
+ assertEquals(
+ result,
+ ['/influxdb3/core/'],
+ 'Should deduplicate different URL formats'
+ );
});
// Test BASE_REF validation regex (from detect-preview-pages.js)
@@ -262,7 +292,11 @@ test('BASE_REF: simple branch name', () => {
test('BASE_REF: branch with slash (feature branch)', () => {
const isValid = BASE_REF_REGEX.test('origin/feature/new-auth');
- assertEquals(isValid, true, 'Should accept branches with / like feature/new-auth');
+ assertEquals(
+ isValid,
+ true,
+ 'Should accept branches with / like feature/new-auth'
+ );
});
test('BASE_REF: branch with multiple slashes', () => {
@@ -277,7 +311,11 @@ test('BASE_REF: branch with dots and numbers', () => {
test('BASE_REF: rejects command injection attempt', () => {
const isValid = BASE_REF_REGEX.test('origin/master; rm -rf /');
- assertEquals(isValid, false, 'Should reject command injection with semicolon');
+ assertEquals(
+ isValid,
+ false,
+ 'Should reject command injection with semicolon'
+ );
});
test('BASE_REF: rejects backtick injection', () => {