chore: refactor performance test to generate queries and data together (#22861)

pull/22876/head
Sam Arnold 2021-11-15 12:01:38 -05:00 committed by GitHub
parent fa9ba8e86f
commit 16e3b165ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 168 additions and 197 deletions

View File

@ -30,28 +30,9 @@ executors:
resource_class: windows.medium
shell: bash.exe -eo pipefail
parameters:
aws_teardown:
default: false
type: boolean
aws_teardown_branch:
default: "n/a"
type: string
aws_teardown_sha:
default: "n/a"
type: string
aws_teardown_datestring:
default: "n/a"
type: string
aws_teardown_query_format:
default: "n/a"
type: string
workflows:
version: 2
build:
when:
not: << pipeline.parameters.aws_teardown >>
jobs:
- godeps
- test-race:
@ -117,19 +98,7 @@ workflows:
requires:
- cross_build
- perf_test:
name: perf-test-flux
format: flux-http
record_ingest_results: true
requires:
- cross_build
filters:
branches:
only:
- "master"
- perf_test:
name: perf-test-influxql
format: http
record_ingest_results: false
record_results: true
requires:
- cross_build
filters:
@ -167,10 +136,6 @@ workflows:
- "master"
jobs:
- aws_destroy_by_date
aws_destroy_callback:
when: << pipeline.parameters.aws_teardown >>
jobs:
- aws_destroy_by_name
nightly:
triggers:
@ -694,28 +659,48 @@ jobs:
perf_test:
executor: linux-amd64
docker:
- image: cimg/base:2021.04
resource_class: small
parameters:
format:
type: string
record_ingest_results:
record_results:
type: boolean
steps:
- attach_workspace:
at: /tmp/workspace
- checkout
# To ssh into aws without failing host key checks
- add_ssh_keys:
fingerprints:
- "91:0a:5b:a7:f9:46:77:f3:5d:4a:cf:d2:44:c8:2c:5a"
- run:
name: Run test in AWS instance
no_output_timeout: 20m
command: >
AWS_ACCESS_KEY_ID=${TEST_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY=${TEST_AWS_SECRET_ACCESS_KEY}
TEST_FORMAT=<< parameters.format >>
TEST_RECORD_INGEST_RESULTS=<< parameters.record_ingest_results >>
name: Set up AWS CLI
command: |
./scripts/ci/install-aws-cli.sh
echo "export AWS_ACCESS_KEY_ID=${TEST_AWS_ACCESS_KEY_ID}" >> vars.sh
echo "export AWS_SECRET_ACCESS_KEY=${TEST_AWS_SECRET_ACCESS_KEY}" >> vars.sh
echo "export TEST_RECORD_RESULTS=<< parameters.record_results >>" >> vars.sh
echo 'export DATA_I_TYPE="r5.2xlarge"' >> vars.sh
- run:
name: Set up AWS instance
command: |
scripts/ci/perf_test.sh
- run:
name: Run perf test
no_output_timeout: 20m
command: |
source vars.sh
set -x
ssh "ubuntu@$EC2_IP" "sudo ./run_perftest.sh"
- run:
name: Tear down AWS instance
when: always
command: |
source vars.sh
set -x
if [[ -n "$EC2_INSTANCE_ID" ]] ; then
aws --region us-west-2 ec2 terminate-instances --instance-ids "$EC2_INSTANCE_ID"
fi
aws_destroy_by_date:
executor: linux-amd64
@ -747,24 +732,6 @@ jobs:
AWS_ACCESS_KEY_ID=${TEST_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${TEST_AWS_SECRET_ACCESS_KEY} aws --region us-west-2 ec2 terminate-instances --instance-ids $instance_id
fi
done
aws_destroy_by_name:
executor: linux-amd64
steps:
- attach_workspace:
at: /tmp/workspace
- checkout
- add_ssh_keys:
fingerprints:
- "91:0a:5b:a7:f9:46:77:f3:5d:4a:cf:d2:44:c8:2c:5a"
- run:
name: Destroy AWS instances by constructing name from arguments
no_output_timeout: 20m
command: |
set -x
name=oss-perftest-<< pipeline.parameters.aws_teardown_datestring >>-<< pipeline.parameters.aws_teardown_branch >>-<< pipeline.parameters.aws_teardown_sha >>-<< pipeline.parameters.aws_teardown_query_format >>
instance_id=$(AWS_ACCESS_KEY_ID=${TEST_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${TEST_AWS_SECRET_ACCESS_KEY} aws --region us-west-2 ec2 describe-instances --filters "Name=tag:Name,Values=$name" --query 'Reservations[].Instances[].InstanceId' --output text)
AWS_ACCESS_KEY_ID=${TEST_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${TEST_AWS_SECRET_ACCESS_KEY} aws --region us-west-2 ec2 terminate-instances --instance-ids $instance_id
deploy_nightly:
executor: linux-amd64
environment:

43
scripts/ci/install-aws-cli.sh Executable file
View File

@ -0,0 +1,43 @@
#!/bin/bash -ex
cat << EOF > aws_pub_key
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBF2Cr7UBEADJZHcgusOJl7ENSyumXh85z0TRV0xJorM2B/JL0kHOyigQluUG
ZMLhENaG0bYatdrKP+3H91lvK050pXwnO/R7fB/FSTouki4ciIx5OuLlnJZIxSzx
PqGl0mkxImLNbGWoi6Lto0LYxqHN2iQtzlwTVmq9733zd3XfcXrZ3+LblHAgEt5G
TfNxEKJ8soPLyWmwDH6HWCnjZ/aIQRBTIQ05uVeEoYxSh6wOai7ss/KveoSNBbYz
gbdzoqI2Y8cgH2nbfgp3DSasaLZEdCSsIsK1u05CinE7k2qZ7KgKAUIcT/cR/grk
C6VwsnDU0OUCideXcQ8WeHutqvgZH1JgKDbznoIzeQHJD238GEu+eKhRHcz8/jeG
94zkcgJOz3KbZGYMiTh277Fvj9zzvZsbMBCedV1BTg3TqgvdX4bdkhf5cH+7NtWO
lrFj6UwAsGukBTAOxC0l/dnSmZhJ7Z1KmEWilro/gOrjtOxqRQutlIqG22TaqoPG
fYVN+en3Zwbt97kcgZDwqbuykNt64oZWc4XKCa3mprEGC3IbJTBFqglXmZ7l9ywG
EEUJYOlb2XrSuPWml39beWdKM8kzr1OjnlOm6+lpTRCBfo0wa9F8YZRhHPAkwKkX
XDeOGpWRj4ohOx0d2GWkyV5xyN14p2tQOCdOODmz80yUTgRpPVQUtOEhXQARAQAB
tCFBV1MgQ0xJIFRlYW0gPGF3cy1jbGlAYW1hem9uLmNvbT6JAlQEEwEIAD4WIQT7
Xbd/1cEYuAURraimMQrMRnJHXAUCXYKvtQIbAwUJB4TOAAULCQgHAgYVCgkICwIE
FgIDAQIeAQIXgAAKCRCmMQrMRnJHXJIXEAChLUIkg80uPUkGjE3jejvQSA1aWuAM
yzy6fdpdlRUz6M6nmsUhOExjVIvibEJpzK5mhuSZ4lb0vJ2ZUPgCv4zs2nBd7BGJ
MxKiWgBReGvTdqZ0SzyYH4PYCJSE732x/Fw9hfnh1dMTXNcrQXzwOmmFNNegG0Ox
au+VnpcR5Kz3smiTrIwZbRudo1ijhCYPQ7t5CMp9kjC6bObvy1hSIg2xNbMAN/Do
ikebAl36uA6Y/Uczjj3GxZW4ZWeFirMidKbtqvUz2y0UFszobjiBSqZZHCreC34B
hw9bFNpuWC/0SrXgohdsc6vK50pDGdV5kM2qo9tMQ/izsAwTh/d/GzZv8H4lV9eO
tEis+EpR497PaxKKh9tJf0N6Q1YLRHof5xePZtOIlS3gfvsH5hXA3HJ9yIxb8T0H
QYmVr3aIUes20i6meI3fuV36VFupwfrTKaL7VXnsrK2fq5cRvyJLNzXucg0WAjPF
RrAGLzY7nP1xeg1a0aeP+pdsqjqlPJom8OCWc1+6DWbg0jsC74WoesAqgBItODMB
rsal1y/q+bPzpsnWjzHV8+1/EtZmSc8ZUGSJOPkfC7hObnfkl18h+1QtKTjZme4d
H17gsBJr+opwJw/Zio2LMjQBOqlm3K1A4zFTh7wBC7He6KPQea1p2XAMgtvATtNe
YLZATHZKTJyiqA==
=vYOk
-----END PGP PUBLIC KEY BLOCK-----
EOF
gpg --import aws_pub_key
curl -o awscliv2.sig https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip.sig
curl -o awscliv2.zip https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip
gpg --verify awscliv2.sig awscliv2.zip
unzip awscliv2.zip
sudo ./aws/install

View File

@ -1,45 +1,26 @@
set -ex -o pipefail
case "${TEST_FORMAT}" in
http|flux-http)
;;
*)
>&2 echo "Unknown query format: ${TEST_FORMAT}"
exit 1
;;
esac
source vars.sh
# get latest ubuntu 20.04 ami for us-west-2
ami_id=$(aws --region us-west-2 ssm get-parameters --names /aws/service/canonical/ubuntu/server/20.04/stable/current/amd64/hvm/ebs-gp2/ami-id --query 'Parameters[0].[Value]' --output text)
# get latest ubuntu 21.10 ami for us-west-2
ami_id=$(aws --region us-west-2 ssm get-parameters --names /aws/service/canonical/ubuntu/server/21.10/stable/current/amd64/hvm/ebs-gp2/ami-id --query 'Parameters[0].[Value]' --output text)
# launch ec2 instance
instance_type="r5.2xlarge"
datestring=$(date +%Y%m%d)
instance_info=$(aws --region us-west-2 ec2 run-instances \
--image-id $ami_id \
--instance-type $instance_type \
--instance-type $DATA_I_TYPE \
--block-device-mappings DeviceName=/dev/sda1,Ebs={VolumeSize=200} \
--key-name circleci-oss-test \
--security-group-ids sg-03004366a38eccc97 \
--subnet-id subnet-0c079d746f27ede5e \
--tag-specifications "ResourceType=instance,Tags=[{Key=Name,Value=oss-perftest-$datestring-${CIRCLE_BRANCH}-${CIRCLE_SHA1}-${TEST_FORMAT}}]")
--tag-specifications "ResourceType=instance,Tags=[{Key=Name,Value=oss-perftest-$datestring-${CIRCLE_BRANCH}-${CIRCLE_SHA1}}]")
# get instance info
ec2_instance_id=$(echo $instance_info | jq -r .Instances[].InstanceId)
echo "export EC2_INSTANCE_ID=$ec2_instance_id" >> vars.sh
# pull down the latest influx_tools
AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} aws --region us-west-2 s3 cp s3://perftest-binaries-influxdb/influx_tools/latest_1.8.txt ./latest.txt
latest_tools=$(cat latest.txt | cut -d ' ' -f1)
AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} aws --region us-west-2 s3 cp s3://perftest-binaries-influxdb/influx_tools/$latest_tools ./influx_tools
sleep 60
ec2_ip=$(aws \
--region us-west-2 \
ec2 describe-instances \
--instance-ids $ec2_instance_id \
--query "Reservations[].Instances[].PublicIpAddress" \
--output text)
ec2_ip=""
while [ -z $ec2_ip ]; do
sleep 5
ec2_ip=$(aws \
@ -49,42 +30,29 @@ while [ -z $ec2_ip ]; do
--query "Reservations[].Instances[].PublicIpAddress" \
--output text)
done
trap "aws --region us-west-2 ec2 terminate-instances --instance-ids $ec2_instance_id" KILL
echo "export EC2_IP=$ec2_ip" >> vars.sh
# push binary and script to instance
debname=$(find /tmp/workspace/artifacts/influxdb2*amd64.deb)
base_debname=$(basename $debname)
source_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
scp influx_tools ubuntu@$ec2_ip:/home/ubuntu/influx_tools
# On the first try, add the host key to the list of known hosts
until ssh -o StrictHostKeyChecking=no ubuntu@$ec2_ip echo Connected ; do
echo Tried to ssh to ec2 instance, will retry
sleep 5
done
scp $debname ubuntu@$ec2_ip:/home/ubuntu/$base_debname
scp ${source_dir}/run_perftest.sh ubuntu@$ec2_ip:/home/ubuntu/run_perftest.sh
# install deb in remote vm and create ramdisk for dataset files
RAMDISK_DIR=/mnt/ramdisk
ssh ubuntu@$ec2_ip << EOF
sudo chmod +x /home/ubuntu/influx_tools
sudo DEBIAN_FRONTEND=noninteractive apt-get install --assume-yes /home/ubuntu/$base_debname
sudo systemctl unmask influxdb.service
sudo systemctl start influxdb
sudo mkdir -p ${RAMDISK_DIR}
sudo mount -t tmpfs -o size=32G tmpfs ${RAMDISK_DIR}
EOF
# setup influxdb2
export INFLUXDB2=true
export TEST_ORG=example_org
export TEST_TOKEN=token
result=$(ssh ubuntu@$ec2_ip "curl -s -o /dev/null -H \"Content-Type: application/json\" -XPOST -d '{\"username\": \"default\", \"password\": \"thisisnotused\", \"retentionPeriodSeconds\": 0, \"org\": \"$TEST_ORG\", \"bucket\": \"unused_bucket\", \"token\": \"$TEST_TOKEN\"}' http://localhost:8086/api/v2/setup -w %{http_code}")
if [ "$result" != "201" ]; then
echo "Influxdb2 failed to setup correctly"
exit 1
fi
# run tests
set +x
export COMMIT_TIME=$(git show -s --format=%ct)
echo "running 'ssh ubuntu@$ec2_ip \"nohup sudo AWS_ACCESS_KEY_ID=REDACTED AWS_SECRET_ACCESS_KEY=REDACTED CIRCLE_TEARDOWN=true CIRCLE_TOKEN=REDACTED CLOUD2_BUCKET=${CLOUD2_PERFTEST_BUCKET} CLOUD2_ORG=${CLOUD2_PERFTEST_ORG} DATASET_DIR=${RAMDISK_DIR} DATA_I_TYPE=${instance_type} DB_TOKEN=REDACTED INFLUXDB2=${INFLUXDB2} INFLUXDB_VERSION=${CIRCLE_BRANCH} TEST_FORMAT=${TEST_FORMAT} TEST_RECORD_INGEST_RESULTS=${TEST_RECORD_INGEST_RESULTS} NGINX_HOST=localhost TEST_COMMIT=${CIRCLE_SHA1} TEST_COMMIT_TIME=${COMMIT_TIME} TEST_ORG=${TEST_ORG} TEST_TOKEN=${TEST_TOKEN} CIRCLE_TEARDOWN_DATESTRING=$datestring ./run_perftest.sh > /home/ubuntu/perftest_log.txt 2>&1 &\"'"
ssh ubuntu@$ec2_ip "nohup sudo AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} CIRCLE_TEARDOWN=true CIRCLE_TOKEN=${CIRCLE_API_CALLBACK_TOKEN} CLOUD2_BUCKET=${CLOUD2_PERFTEST_BUCKET} CLOUD2_ORG=${CLOUD2_PERFTEST_ORG} DATASET_DIR=${RAMDISK_DIR} DATA_I_TYPE=${instance_type} DB_TOKEN=${CLOUD2_PERFTEST_TOKEN} INFLUXDB2=${INFLUXDB2} INFLUXDB_VERSION=${CIRCLE_BRANCH} TEST_FORMAT=${TEST_FORMAT} TEST_RECORD_INGEST_RESULTS=${TEST_RECORD_INGEST_RESULTS} NGINX_HOST=localhost TEST_COMMIT=${CIRCLE_SHA1} TEST_COMMIT_TIME=${COMMIT_TIME} TEST_ORG=${TEST_ORG} TEST_TOKEN=${TEST_TOKEN} CIRCLE_TEARDOWN_DATESTRING=$datestring ./run_perftest.sh > /home/ubuntu/perftest_log.txt 2>&1 &"
echo "export TEST_COMMIT_TIME=$(git show -s --format=%ct)" >> vars.sh
echo "export CIRCLE_TEARDOWN=true" >> vars.sh
echo "export CIRCLE_TOKEN=${CIRCLE_API_CALLBACK_TOKEN}" >> vars.sh
echo "export CLOUD2_BUCKET=${CLOUD2_PERFTEST_BUCKET}" >> vars.sh
echo "export CLOUD2_ORG=${CLOUD2_PERFTEST_ORG}" >> vars.sh
echo "export DB_TOKEN=${CLOUD2_PERFTEST_TOKEN}" >> vars.sh
echo "export INFLUXDB_VERSION=${CIRCLE_BRANCH}" >> vars.sh
echo "export NGINX_HOST=localhost" >> vars.sh
echo "export TEST_COMMIT=${CIRCLE_SHA1}" >> vars.sh
scp vars.sh ubuntu@$ec2_ip:/home/ubuntu/vars.sh

View File

@ -2,6 +2,27 @@
echo "Running as user: $(whoami)"
# Source env variables
. /home/ubuntu/vars.sh
# Install influxdb
DEBIAN_FRONTEND=noninteractive apt-get install --assume-yes /home/ubuntu/influxdb2*amd64.deb
systemctl start influxdb
DATASET_DIR=/mnt/ramdisk
mkdir -p "$DATASET_DIR"
mount -t tmpfs -o size=32G tmpfs "$DATASET_DIR"
# set up influxdb
export INFLUXDB2=true
export TEST_ORG=example_org
export TEST_TOKEN=token
result="$(curl -s -o /dev/null -H "Content-Type: application/json" -XPOST -d '{"username": "default", "password": "thisisnotused", "retentionPeriodSeconds": 0, "org": "'"$TEST_ORG"'", "bucket": "unused_bucket", "token": "'"$TEST_TOKEN"'"}' http://localhost:8086/api/v2/setup -w %{http_code})"
if [ "$result" != "201" ] ; then
echo "Influxdb2 failed to setup correctly"
exit 1
fi
# Install Telegraf
wget -qO- https://repos.influxdata.com/influxdb.key | apt-key add -
echo "deb https://repos.influxdata.com/ubuntu focal stable" | tee /etc/apt/sources.list.d/influxdb.list
@ -9,25 +30,13 @@ echo "deb https://repos.influxdata.com/ubuntu focal stable" | tee /etc/apt/sourc
DEBIAN_FRONTEND=noninteractive apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y git jq telegraf awscli
# we need libc6 version 2.32 (released in ubuntu for 20.10 and later) for influx_tools
cp /etc/apt/sources.list /etc/apt/sources.list.d/groovy.list
sed -i 's/focal/groovy/g' /etc/apt/sources.list.d/groovy.list
DEBIAN_FRONTEND=noninteractive apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y libc6 -t groovy
# Install influx_tools
aws --region us-west-2 s3 cp s3://perftest-binaries-influxdb/influx_tools/influx_tools-d3be25b251256755d622792ec91826c5670c6106 ./influx_tools
mv ./influx_tools /usr/bin/influx_tools
chmod 755 /usr/bin/influx_tools
root_branch="$(echo "${INFLUXDB_VERSION}" | rev | cut -d '-' -f1 | rev)"
log_date=$(date +%Y%m%d%H%M%S)
cleanup() {
aws s3 cp /home/ubuntu/perftest_log.txt s3://perftest-logs-influxdb/oss/$root_branch/${TEST_COMMIT}-${log_date}.log
if [ "${CIRCLE_TEARDOWN}" = true ]; then
curl --request POST \
--url https://circleci.com/api/v2/project/github/influxdata/influxdb/pipeline \
--header "Circle-Token: ${CIRCLE_TOKEN}" \
--header 'content-type: application/json' \
--data "{\"branch\":\"${INFLUXDB_VERSION}\", \"parameters\":{\"aws_teardown\": true, \"aws_teardown_branch\":\"${INFLUXDB_VERSION}\", \"aws_teardown_sha\":\"${TEST_COMMIT}\", \"aws_teardown_datestring\":\"${CIRCLE_TEARDOWN_DATESTRING}\", \"aws_teardown_query_format\":\"${TEST_FORMAT}\"}}"
fi
}
trap "cleanup" EXIT KILL
working_dir=$(mktemp -d)
mkdir -p /etc/telegraf
@ -105,25 +114,13 @@ if [ `whoami` = root ]; then
fi
go version
# clone influxdb comparisons
git clone https://github.com/influxdata/influxdb-comparisons.git $working_dir/influxdb-comparisons
cd $working_dir/influxdb-comparisons
# install cmds
# install influxdb-comparisons cmds
go get \
github.com/influxdata/influxdb-comparisons/cmd/bulk_data_gen \
github.com/influxdata/influxdb-comparisons/cmd/bulk_load_influx \
github.com/influxdata/influxdb-comparisons/cmd/bulk_query_gen \
github.com/influxdata/influxdb-comparisons/cmd/query_benchmarker_influxdb
# hack to get the daemon to start up again until https://github.com/influxdata/influxdb/issues/21757 is resolved
systemctl stop influxdb
sed -i 's/User=influxdb/User=root/g' /lib/systemd/system/influxdb.service
sed -i 's/Group=influxdb/Group=root/g' /lib/systemd/system/influxdb.service
systemctl daemon-reload
systemctl unmask influxdb.service
systemctl start influxdb
# Common variables used across all tests
datestring=${TEST_COMMIT_TIME}
seed=$datestring
@ -163,12 +160,12 @@ force_compaction() {
set -e
for shard in $shards; do
if [ -n "$(find $shard -name *.tsm)" ]; then
/home/ubuntu/influx_tools compact-shard -force -verbose -path $shard
# compact as the influxdb user in order to keep file permissions correct
sudo -u influxdb influx_tools compact-shard -force -verbose -path $shard
fi
done
# restart daemon
systemctl unmask influxdb.service
systemctl start influxdb
}
@ -222,13 +219,13 @@ query_types() {
# clear. This function will translate the aliased query use cases to their
# dataset use cases. Effectively this means "for this query use case, run the
# queries against this dataset use case".
query_usecase_alias() {
queries_for_dataset() {
case $1 in
window-agg|group-agg|bare-agg|ungrouped-agg|group-window-transpose|iot|group-window-transpose-low-card)
echo iot
iot)
echo window-agg group-agg bare-agg ungrouped-agg iot group-window-transpose-low-card
;;
metaquery|group-window-transpose-high-card|cardinality)
echo metaquery
metaquery)
echo metaquery group-window-transpose-high-card
;;
multi-measurement)
echo multi-measurement
@ -265,27 +262,11 @@ curl -XPOST -H "Authorization: Token ${TEST_TOKEN}" \
## Run and record tests ##
##########################
# Generate queries to test.
query_files=""
for usecase in window-agg group-agg bare-agg ungrouped-agg group-window-transpose-low-card group-window-transpose-high-card iot metaquery multi-measurement; do
for type in $(query_types $usecase); do
query_fname="${TEST_FORMAT}_${usecase}_${type}"
$GOPATH/bin/bulk_query_gen \
-use-case=$usecase \
-query-type=$type \
-format=influx-${TEST_FORMAT} \
-timestamp-start=$(start_time $usecase) \
-timestamp-end=$(end_time $usecase) \
-queries=$queries \
-scale-var=$scale_var > \
${DATASET_DIR}/$query_fname
query_files="$query_files $query_fname"
done
done
# Generate and ingest bulk data. Record the time spent as an ingest test if
# specified, and run the query performance tests for each dataset.
for usecase in iot metaquery multi-measurement; do
USECASE_DIR="${DATASET_DIR}/$usecase"
mkdir "$USECASE_DIR"
data_fname="influx-bulk-records-usecase-$usecase"
$GOPATH/bin/bulk_data_gen \
-seed=$seed \
@ -293,41 +274,51 @@ for usecase in iot metaquery multi-measurement; do
-scale-var=$scale_var \
-timestamp-start=$(start_time $usecase) \
-timestamp-end=$(end_time $usecase) > \
${DATASET_DIR}/$data_fname
${USECASE_DIR}/$data_fname
load_opts="-file=${DATASET_DIR}/$data_fname -batch-size=$batch -workers=$workers -urls=http://${NGINX_HOST}:8086 -do-abort-on-exist=false -do-db-create=true -backoff=1s -backoff-timeout=300m0s"
load_opts="-file=${USECASE_DIR}/$data_fname -batch-size=$batch -workers=$workers -urls=http://${NGINX_HOST}:8086 -do-abort-on-exist=false -do-db-create=true -backoff=1s -backoff-timeout=300m0s"
if [ -z $INFLUXDB2 ] || [ $INFLUXDB2 = true ]; then
load_opts="$load_opts -organization=$TEST_ORG -token=$TEST_TOKEN"
fi
# Run ingest tests. Only write the results to disk if this run should contribute to ingest-test results.
out=/dev/null
if [ "${TEST_RECORD_INGEST_RESULTS}" = true ]; then
out=$working_dir/test-ingest-$usecase.json
fi
$GOPATH/bin/bulk_load_influx $load_opts | \
jq ". += {branch: \"$INFLUXDB_VERSION\", commit: \"$TEST_COMMIT\", time: \"$datestring\", i_type: \"$DATA_I_TYPE\", use_case: \"$usecase\"}" > ${out}
jq ". += {branch: \"$INFLUXDB_VERSION\", commit: \"$TEST_COMMIT\", time: \"$datestring\", i_type: \"$DATA_I_TYPE\", use_case: \"$usecase\"}" > "$working_dir/test-ingest-$usecase.json"
# Cleanup from the data generation and loading.
force_compaction
rm ${DATASET_DIR}/$data_fname
rm ${USECASE_DIR}/$data_fname
# Generate a DBRP mapping for use by InfluxQL queries.
create_dbrp
# Generate queries to test.
query_files=""
for TEST_FORMAT in http flux-http ; do
for query_usecase in $(queries_for_dataset $usecase) ; do
for type in $(query_types $query_usecase) ; do
query_fname="${TEST_FORMAT}_${query_usecase}_${type}"
$GOPATH/bin/bulk_query_gen \
-use-case=$query_usecase \
-query-type=$type \
-format=influx-${TEST_FORMAT} \
-timestamp-start=$(start_time $query_usecase) \
-timestamp-end=$(end_time $query_usecase) \
-queries=$queries \
-scale-var=$scale_var > \
${USECASE_DIR}/$query_fname
query_files="$query_files $query_fname"
done
done
done
# Run the query tests applicable to this dataset.
for query_file in $query_files; do
for query_file in $query_files; do
format=$(echo $query_file | cut -d '_' -f1)
query_usecase=$(echo $query_file | cut -d '_' -f2)
type=$(echo $query_file | cut -d '_' -f3)
# Only run the query tests for queries applicable to this dataset.
if [ "$usecase" != "$(query_usecase_alias $query_usecase)" ]; then
continue
fi
${GOPATH}/bin/query_benchmarker_influxdb \
-file=${DATASET_DIR}/$query_file \
-file=${USECASE_DIR}/$query_file \
-urls=http://${NGINX_HOST}:8086 \
-debug=0 \
-print-interval=0 \
@ -342,16 +333,18 @@ for usecase in iot metaquery multi-measurement; do
$working_dir/test-query-$format-$query_usecase-$type.json
# Restart daemon between query tests.
systemctl stop influxdb
systemctl unmask influxdb.service
systemctl start influxdb
systemctl restart influxdb
done
# Delete DB to start anew.
curl -X DELETE -H "Authorization: Token ${TEST_TOKEN}" http://${NGINX_HOST}:8086/api/v2/buckets/$(bucket_id)
rm -rf "$USECASE_DIR"
done
echo "Using Telegraph to report results from the following files:"
ls $working_dir
telegraf --debug --once
if [ "${TEST_RECORD_RESULTS}" = "true" ] ; then
telegraf --debug --once
else
telegraf --debug --test
fi