chore: Modify AI instructions build script to optimize and reduce instructions size. Add test examples for later

chore: Modify AI instructions build script to optimize and reduce instructions size. Add test examples for later

chore: Modify AI instructions build script to optimize and reduce instructions size. Add test examples for later

fix(qol): Agent-agnostic contributing instructions

chore(test): Untrack influxdb3 data and plugins used in Docker configurations and testing

chore(test): Untrack influxdb3 data and plugins used in Docker configurations and testing

chore: Modify AI instructions build script to optimize and reduce instructions size. Add test examples for later
pull/6185/head
Jason Stirnaman 2025-06-30 14:05:04 -05:00
parent b8c1156a1c
commit 75410f73fc
9 changed files with 147 additions and 1483 deletions

File diff suppressed because it is too large Load Diff

View File

@ -23,6 +23,7 @@ export { buildContributingInstructions };
/** Build instructions from CONTRIBUTING.md
* This script reads CONTRIBUTING.md, formats it appropriately,
* and saves it to .github/instructions/contributing.instructions.md
* Includes optimization to reduce file size for better performance
*/
function buildContributingInstructions() {
// Paths
@ -41,6 +42,9 @@ function buildContributingInstructions() {
// Read the CONTRIBUTING.md file
let content = fs.readFileSync(contributingPath, 'utf8');
// Optimize content by removing less critical sections for Copilot
content = optimizeContentForContext(content);
// Format the content for Copilot instructions with applyTo attribute
content = `---
applyTo: "content/**/*.md, layouts/**/*.html"
@ -59,7 +63,17 @@ ${content}`;
// Write the formatted content to the instructions file
fs.writeFileSync(instructionsPath, content);
console.log(`✅ Generated Copilot instructions at ${instructionsPath}`);
const fileSize = fs.statSync(instructionsPath).size;
const sizeInKB = (fileSize / 1024).toFixed(1);
console.log(
`✅ Generated instructions at ${instructionsPath} (${sizeInKB}KB)`
);
if (fileSize > 40000) {
console.warn(
`⚠️ Instructions file is large (${sizeInKB}KB > 40KB) and may impact performance`
);
}
// Add the file to git if it has changed
try {
@ -74,3 +88,58 @@ ${content}`;
console.warn('⚠️ Could not add instructions file to git:', error.message);
}
}
/**
* Optimize content for Copilot by removing or condensing less critical sections
* while preserving essential documentation guidance
*/
function optimizeContentForContext(content) {
// Remove or condense sections that are less relevant for context assistance
const sectionsToRemove = [
// Installation and setup sections (less relevant for writing docs)
/### Install project dependencies[\s\S]*?(?=\n##|\n###|$)/g,
/### Install Node\.js dependencies[\s\S]*?(?=\n##|\n###|$)/g,
/### Install Docker[\s\S]*?(?=\n##|\n###|$)/g,
/#### Build the test dependency image[\s\S]*?(?=\n##|\n###|$)/g,
/### Install Visual Studio Code extensions[\s\S]*?(?=\n##|\n###|$)/g,
/### Run the documentation locally[\s\S]*?(?=\n##|\n###|$)/g,
// Testing and CI/CD sections (important but can be condensed)
/### Set up test scripts and credentials[\s\S]*?(?=\n##|\n###|$)/g,
/#### Test shell and python code blocks[\s\S]*?(?=\n##|\n###|$)/g,
/#### Troubleshoot tests[\s\S]*?(?=\n##|\n###|$)/g,
/### Pytest collected 0 items[\s\S]*?(?=\n##|\n###|$)/g,
// Long code examples that can be referenced elsewhere
/```[\s\S]{500,}?```/g,
// Repetitive examples
/#### Example[\s\S]*?(?=\n####|\n###|\n##|$)/g,
];
// Remove identified sections
sectionsToRemove.forEach((regex) => {
content = content.replace(regex, '');
});
// Condense whitespace
content = content.replace(/\n{3,}/g, '\n\n');
// Remove HTML comments
content = content.replace(/<!--[\s\S]*?-->/g, '');
// Shorten repetitive content
content = content.replace(/(\{%[^%]+%\})[\s\S]*?\1/g, (match) => {
// If it's a long repeated pattern, show it once with a note
if (match.length > 200) {
const firstOccurrence = match.split('\n\n')[0];
return (
firstOccurrence +
'\n\n[Similar patterns apply - see full CONTRIBUTING.md for complete examples]'
);
}
return match;
});
return content;
}

View File

@ -303,14 +303,7 @@ services:
container_name: influxdb3-core
image: influxdb:3-core
ports:
- 8181:8181
volumes:
- type: bind
source: test/influxdb3/core/data
target: /var/lib/influxdb3/data
- type: bind
source: test/influxdb3/core/plugins
target: /var/lib/influxdb3-plugins
- 8282:8181
command:
- influxdb3
- serve
@ -319,6 +312,13 @@ services:
- --object-store=file
- --data-dir=/var/lib/influxdb3/data
- --plugin-dir=/var/lib/influxdb3/plugins
volumes:
- type: bind
source: test/.influxdb3/core/data
target: /var/lib/influxdb3/data
- type: bind
source: test/.influxdb3/core/plugins
target: /var/lib/influxdb3/plugins
influxdb3-enterprise:
container_name: influxdb3-enterprise
image: influxdb:3-enterprise
@ -339,10 +339,10 @@ services:
- --license-email=${INFLUXDB3_LICENSE_EMAIL}
volumes:
- type: bind
source: test/influxdb3/enterprise/data
source: test/.influxdb3/enterprise/data
target: /var/lib/influxdb3/data
- type: bind
source: test/influxdb3/enterprise/plugins
source: test/.influxdb3/enterprise/plugins
target: /var/lib/influxdb3/plugins
telegraf-pytest:
container_name: telegraf-pytest

1
test/.gitignore vendored
View File

@ -10,5 +10,6 @@ tmp
.config*
.env*
**/.env.test
.influxdb3
.pytest_cache
.test-run.txt

View File

@ -1,24 +0,0 @@
def process_request(influxdb3_local, query_parameters, request_headers, request_body, args=None):
"""
Process an HTTP request to a custom endpoint in the InfluxDB 3 processing engine.
Args:
influxdb3_local: Local InfluxDB API client
query_parameters: Query parameters from the HTTP request
request_headers: Headers from the HTTP request
request_body: Body of the HTTP request
args: Optional arguments passed from the trigger configuration
"""
influxdb3_local.info("Processing HTTP request to custom endpoint")
# Handle HTTP requests to a custom endpoint
# Log the request parameters
influxdb3_local.info(f"Received request with parameters: {query_parameters}")
# Process the request body
if request_body:
import json
data = json.loads(request_body)
influxdb3_local.info(f"Request data: {data}")
# Return a response (automatically converted to JSON)
return {"status": "success", "message": "Request processed"}

View File

@ -1,12 +0,0 @@
def process_scheduled_call(influxdb3_local, call_time, args=None):
"""
Process a scheduled call from the InfluxDB 3 processing engine.
Args:
influxdb3_local: Local InfluxDB API client
call_time: Time when the trigger was called
args: Optional arguments passed from the trigger configuration
"""
influxdb3_local.info(f"Processing scheduled call at {call_time}")
if args:
influxdb3_local.info(f"With arguments: {args}")

View File

@ -1,18 +0,0 @@
def process_writes(influxdb3_local, table_batches, args=None):
"""
Process writes to the InfluxDB 3 processing engine, handling
data persisted to the object store.
"""
# Process data as it's written to the database
for table_batch in table_batches:
table_name = table_batch["table_name"]
rows = table_batch["rows"]
# Log information about the write
influxdb3_local.info(f"Processing {len(rows)} rows from {table_name}")
# Write derived data back to the database
line = LineBuilder("processed_data")
line.tag("source_table", table_name)
line.int64_field("row_count", len(rows))
influxdb3_local.write(line)

View File

@ -0,0 +1,26 @@
# [core3,enterprise3]
# Bearer auth works with v1 /query
curl -v http://localhost:8181/query \
--header "Authorization: Bearer ${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}" \
--data-urlencode "db=sensors" \
--data-urlencode "q=SELECT * FROM home"
# Bearer auth works with v1 /write
curl -v "http://localhost:8181/write?db=sensors" \
--header "Authorization: Bearer ${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}" \
--data-raw "sensors,location=home temperature=23.5 1622547800"
# Basic auth works with v1 /write
curl -v "http://localhost:8181/write?db=sensors" \
--user "admin:${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}" \
--data-raw "sensors,location=home temperature=23.5 1622547800"
# URL auth works with v1 /write
curl -v "http://localhost:8181/write?db=sensors&u=admin&p=${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}" \
--data-raw "sensors,location=home temperature=23.5 1622547800"
# Token auth works with /api/v2/write
curl -v http://localhost:8181/write?db=sensors \
--header "Authorization: Token ${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}" \
--data-raw "sensors,location=home temperature=23.5 1622547800"

View File

@ -0,0 +1,4 @@
# [core3,enterprise3]
# Delete a database with hard delete at date
curl -v -X DELETE "http://localhost:8181/api/v3/configure/database?hard_delete_at=20250701&db=sensors" \
--header "Authorization: Bearer ${INFLUXDB3_ENTERPRISE_ADMIN_TOKEN}"