version: 2.1 orbs: aws-cli: circleci/aws-cli@4.1.2 aws-s3: circleci/aws-s3@2.0.0 parameters: cross-container-tag: # when updating the go version, should also update the go version in go.mod description: docker tag for cross build container from quay.io . Created by https://github.com/influxdata/edge/tree/master/dockerfiles/cross-builder . type: string default: go1.23.8-latest workflow: type: string default: build jobs: build_binaries: docker: - image: quay.io/influxdb/cross-builder:<< pipeline.parameters.cross-container-tag >> steps: - attach_workspace: at: /tmp/workspace - checkout - restore_cache: keys: # We save the cache from this one, so don't restore a cache with old junk and then save new stuff alongside. # Start either with the exact right cache, or completely fresh. - influxdb-cache-v1-{{ checksum "go.mod" }} - run: name: Get InfluxDB Version command: | PREFIX=1.x .circleci/scripts/get-version - run: name: Build source tarball command: | set -x printf '{ "version": "%s", "branch": "%s", "sha": "%s" }' "${VERSION}" "${CIRCLE_BRANCH}" "${CIRCLE_SHA1}" >"$(pwd)/.metadata.json" # # Prebuild the man pages so that consumers of the source tarball don't have to build it themselves. (cd man && make build && gzip -9 ./*.1) mkdir -p ./tarball TARBALL_NAME="./tarball/influxdb-src-${CIRCLE_SHA1}.tar.gz" tar --exclude-vcs --exclude './tarball' --transform 'flags=r;s|^|influxdb/|' -vczf "${TARBALL_NAME}" . - store_artifacts: path: tarball/ - persist_to_workspace: root: . paths: - tarball - run: name: Build binaries command: | export CC="$(xcc linux x86_64)" export CGO_ENABLED=1 # linux amd64 (static build) export GOOS=linux export GOARCH=amd64 for cmd in github.com/influxdata/influxdb/cmd/{influxd,influx,influx_inspect} do go build \ -tags "netgo,osusergo,static_build" \ -buildmode=pie \ -ldflags="-s -X \"main.version=${VERSION}\" -X \"main.branch=${CIRCLE_BRANCH}\" -X \"main.commit=${CIRCLE_SHA1}\" -linkmode=external -extld=${CC} -extldflags \"-fno-PIC -static-pie -Wl,-z,stack-size=8388608\"" \ ${cmd} done mkdir -p ./bins target="bins/influxdb_bin_${GOOS}_${GOARCH}-${CIRCLE_SHA1}.tar.gz" tar -czf "${target}" \ influx \ influx_inspect \ influxd md5sum "${target}" > "${target}.md5" sha256sum "${target}" > "${target}.sha256" - store_artifacts: path: bins/ - persist_to_workspace: root: . paths: - bins - save_cache: key: influxdb-cache-v1-{{ checksum "go.mod" }} paths: - /go/pkg/mod - /root/.cargo - /root/.cache/go-build build_packages: docker: - image: us-east1-docker.pkg.dev/influxdata-team-edge/ci-support/ci-packager:latest auth: username: _json_key password: $CISUPPORT_GCS_AUTHORIZATION steps: - checkout - attach_workspace: at: /tmp/workspace - run: | ( cd man ; make build ; gzip -9 ./*.1 ) packager .circleci/packages/config.yaml - persist_to_workspace: root: . paths: - packages - store_artifacts: path: packages sign_packages: circleci_ip_ranges: true docker: - image: quay.io/influxdb/rsign:latest auth: username: $QUAY_RSIGN_USERNAME password: $QUAY_RSIGN_PASSWORD steps: - add_ssh_keys: fingerpints: - fc:7b:6e:a6:38:7c:63:5a:13:be:cb:bb:fa:33:b3:3c - attach_workspace: at: /tmp/workspace - run: | for target in /tmp/workspace/packages/* do case "${target}" in # rsign is shipped on Alpine Linux which uses "busybox ash" instead # of bash. ash is somewhat more posix compliant and is missing some # extensions and niceties from bash. *.deb|*.rpm|*.tar.gz|*.zip) rsign "${target}" ;; esac if [ -f "${target}" ] then # Since all artifacts are present, sign them here. This saves Circle # credits over spinning up another instance just to separate out the # checksum job. sha256sum "${target}" >> "/tmp/workspace/packages/influxdb.${CIRCLE_TAG}.digests" md5sum "${target}" >"${target}.md5" sha256sum "${target}" >"${target}.sha256" fi done - persist_to_workspace: root: /tmp/workspace paths: - packages - store_artifacts: path: /tmp/workspace/packages test_pkgs_64bit: machine: enabled: true docker_layer_caching: true image: ubuntu-2204:current steps: - attach_workspace: at: /tmp/workspace - checkout - run: name: Test 64 bit packages install command: | set -x export WORKING_DIR=$(pwd) # Using subshells with find to get full path of real package files, rather than predicting or hardcoding # The glob pattern with -prune causes find to only return files rooted in packages, # thereby avoiding files whose names would match, but are in subdirectories, i.e. packages/static. "${WORKING_DIR}/releng/packages/spec/clean_install/run.bash" -D \ -p "$(find "/tmp/workspace/packages"/* -prune -name 'influxdb*amd64.deb')" "${WORKING_DIR}/releng/packages/spec/clean_install/run.bash" -R \ -p "$(find "/tmp/workspace/packages"/* -prune -name 'influxdb*x86_64.rpm')" static_code_checks: docker: - image: quay.io/influxdb/cross-builder:<< pipeline.parameters.cross-container-tag >> steps: - attach_workspace: at: /tmp/workspace - checkout - run: ./checkfmt.sh - run: ./generate.sh - run: go vet ./... unit_test: docker: - image: quay.io/influxdb/cross-builder:<< pipeline.parameters.cross-container-tag >> resource_class: xlarge parameters: data: type: string default: inmem race: type: boolean default: false environment: INFLUXDB_DATA_INDEX_VERSION: << parameters.data >> GORACE: halt_on_error=1 steps: - checkout - restore_cache: keys: - influxdb-cache-v1-{{ checksum "go.mod" }} - influxdb-cache-v1 - when: condition: << parameters.race >> steps: - run: name: Execute Tests command: | mkdir -p junit-race-<< parameters.data >> # "resource_class: xlarge" creates a Docker container with eight # virtual cpu cores. However, applications like "nproc" return # the host machine's core count (which in this case is 36). # When less cores are available than advertised, the tests # sometimes fail. # # We'll manually reduce the number of available cores to what # is specified by the CircleCI documentation: # https://circleci.com/product/features/resource-classes/ taskset -c 0-7 \ gotestsum \ --format=standard-verbose \ --junitfile=junit-race-<< parameters.data >>/influxdb.junit.xml \ -- -race ./... - store_test_results: path: junit-race-<< parameters.data >>/ - when: condition: { not: << parameters.race >> } steps: - run: name: Execute Tests command: | mkdir -p junit-<< parameters.data >> # "resource_class: xlarge" creates a Docker container with eight # virtual cpu cores. However, applications like "nproc" return # the host machine's core count (which in this case is 36). # When less cores are available than advertised, the tests # sometimes fail. # # We'll manually reduce the number of available cores to what # is specified by the CircleCI documentation: # https://circleci.com/product/features/resource-classes/ taskset -c 0-7 \ gotestsum \ --format=standard-verbose \ --junitfile=junit-<< parameters.data >>/influxdb.junit.xml \ -- ./... - store_test_results: path: junit-<< parameters.data >>/ fluxtest: docker: - image: quay.io/influxdb/cross-builder:<< pipeline.parameters.cross-container-tag >> steps: - checkout - run: name: Execute test command: ./test-flux.sh || exit 1 no_output_timeout: 1500s changelog: docker: - image: quay.io/influxdb/changelogger:d7093c409adedd8837ef51fa84be0d0f8319177a steps: - checkout - run: name: Generate Changelog command: | PRODUCT=OSS changelogger - store_artifacts: path: changelog_artifacts/ - persist_to_workspace: root: . paths: - changelog_artifacts publish_changelog: parameters: workflow: type: string docker: - image: cimg/python:3.6 steps: - attach_workspace: at: /tmp/workspace - when: condition: equal: [ << parameters.workflow >>, release ] steps: - aws-s3/copy: aws-region: RELEASE_AWS_REGION aws-access-key-id: RELEASE_AWS_ACCESS_KEY_ID aws-secret-access-key: RELEASE_AWS_SECRET_ACCESS_KEY from: /tmp/workspace/changelog_artifacts/CHANGELOG.md to: s3://dl.influxdata.com/influxdb/releases/<< pipeline.git.tag >>/CHANGELOG.<< pipeline.git.tag >>.md - when: condition: equal: [ << parameters.workflow >>, nightly ] steps: - aws-s3/copy: aws-region: RELEASE_AWS_REGION aws-access-key-id: RELEASE_AWS_ACCESS_KEY_ID aws-secret-access-key: RELEASE_AWS_SECRET_ACCESS_KEY from: /tmp/workspace/changelog_artifacts/CHANGELOG.md to: s3://dl.influxdata.com/influxdb/nightlies/<< pipeline.git.branch >>/CHANGELOG.md publish_packages: docker: - image: cimg/python:3.6 steps: - attach_workspace: at: /tmp/workspace - aws-s3/sync: aws-region: RELEASE_AWS_REGION aws-access-key-id: RELEASE_AWS_ACCESS_KEY_ID aws-secret-access-key: RELEASE_AWS_SECRET_ACCESS_KEY from: /tmp/workspace/packages to: s3://dl.influxdata.com/influxdb/releases/<< pipeline.git.tag >> slack: docker: - image: us-east1-docker.pkg.dev/influxdata-team-edge/ci-support/ci-slack:latest auth: username: _json_key password: $CISUPPORT_GCS_AUTHORIZATION steps: - attach_workspace: at: /tmp/workspace - run: command: | SLACK_ARTIFACT_URL=https://dl.influxdata.com/influxdb/releases/<< pipeline.git.tag >> slack environment: SLACK_ARTIFACT_ROOT: /tmp/workspace/packages SLACK_RELEASE_MESSAGE: New InfluxDB Release release_filter: &release_filter filters: tags: # This regex matches what is found in 'scripts/get-version' with the # '[[:digit:]]' transformed into '\d'. This also excludes release # candidate detection, because this filter only matches against # full releases. only: /^v(\d+)(?:\.(\d+))?(?:\.(\d+))?$/ branches: ignore: /.*/ workflows: version: 2.1 release: when: equal: [ << pipeline.parameters.workflow >>, build ] jobs: - build_binaries: <<: *release_filter - build_packages: <<: *release_filter requires: - build_binaries - sign_packages: <<: *release_filter requires: - build_packages - test_pkgs_64bit: <<: *release_filter requires: - build_packages - changelog: <<: *release_filter - publish_changelog: <<: *release_filter workflow: release requires: - changelog - publish_packages: <<: *release_filter requires: - sign_packages - slack: <<: *release_filter requires: - publish_packages - static_code_checks: <<: *release_filter - fluxtest: <<: *release_filter - unit_test: <<: *release_filter name: unit_test_inmem data: inmem - unit_test: <<: *release_filter name: unit_test_tsi1 data: tsi1 - unit_test: <<: *release_filter name: unit_test_race race: true on_push: when: equal: [ << pipeline.parameters.workflow >>, build ] jobs: - build_binaries - build_packages: requires: - build_binaries - test_pkgs_64bit: requires: - build_packages - static_code_checks - fluxtest - unit_test: name: unit_test_inmem data: inmem - unit_test: name: unit_test_tsi1 data: tsi1 - unit_test: name: unit_test_race race: true nightly: when: and: # This requires a pipeline trigger with a custom "workflow" parameter # set to "nightly". Since we want to trigger this workflow on several # branches, we cannot use the trigger name as suggested by the # documentation. # # For more information: # https://circleci.com/docs/scheduled-pipelines/ - equal: [ << pipeline.trigger_source >>, scheduled_pipeline ] - equal: [ << pipeline.parameters.workflow >>, nightly ] jobs: - changelog - publish_changelog: workflow: nightly requires: - changelog - static_code_checks - fluxtest - unit_test: name: unit_test_inmem data: inmem - unit_test: name: unit_test_tsi1 data: tsi1 - unit_test: name: unit_test_race race: true - build_binaries: requires: - changelog - static_code_checks - fluxtest - unit_test_inmem - unit_test_race - unit_test_tsi1 - build_packages: requires: - build_binaries - test_pkgs_64bit: requires: - build_packages