influxdb/.circleci/config.yml

819 lines
28 KiB
YAML

---
# CI Overview
# -----------
#
# Every commit:
#
# The CI for every PR and merge to main runs tests, fmt, lints and compiles debug binaries
#
# On main if all these checks pass it will then additionally compile in "release" mode and
# publish a docker image to quay.io/influxdb/influxdb3:$COMMIT_SHA
#
# Manually trigger build and push of container image for a branch:
#
# Navigate to https://app.circleci.com/pipelines/github/influxdata/influxdb?branch=<branch-name> (<- change this!)
# Then:
#
# - Click "Run Pipeline" in the top-right
# - Expand "Add Parameters"
# - Add a "boolean" parameter called "release_branch" with the value true
# - Click "Run Pipeline"
#
# You can also do this using the CircleCI API:
#
# Using `xh`:
#
# # e.g. using 'xh' (https://github.com/ducaale/xh)
# $ xh -a '<your personal circleCI token>:' POST \
# https://circleci.com/api/v2/project/github/influxdata/influxdb/pipeline \
# parameters:='{"release_branch": true}' branch=chore/ci-tidy-up
#
# ...or equivalent with `curl`:
# $ curl -XPOST -H "Content-Type: application/json" -H "Circle-Token: <your personal circleCI token>" \
# -d '{"parameters": {"release_branch": true}, "branch": "chore/ci-tidy-up"}' \
# https://circleci.com/api/v2/project/github/influxdata/influxdb/pipeline
version: 2.1
orbs:
aws-s3: circleci/aws-s3@2.0.0
terraform: circleci/terraform@2.1.0
rust: circleci/rust@1.6.1
# Unlike when a commit is pushed to a branch, CircleCI does not automatically
# execute a workflow when a tag is pushed to a repository. These filters
# allow the corresponding workflow to execute on any branch or tag.
any_filter: &any_filter
filters:
tags:
only: /.*/
branches:
only: /.*/
# This regex is used to trigger 'release builds' based on tags. For semver,
# package upgrades, etc, the tags must conform to specific schema. Eg:
#
# - v3.0.0-0.alpha.1 (1st alpha release)
# - v3.0.0-0.alpha.1.1 (2nd build of 1st alpha release; ie, no code changes)
# - v3.0.0-0.alpha.2 (2nd alpha release)
# - v3.0.0-0.beta.1 (1st beta release)
# - v3.0.0-0.rc.1 (1st rc release)
# - v3.0.0 (3.0.0 official release)
# - v3.0.0-2 (2nd build of 3.0.0 official release; ie, no code changes)
# - v3.0.1 (3.0.1 official release)
# - v3.1.0 (3.1.0 official release)
#
# If the tag does not conform to the above, then ci-support/ci-packager-next,
# will generate snapshat versions. For easier maintenance, this regex is more
# open than ci-support/ci-packager-next (which enforces a number of
# constraints). See .circleci/packages/config.yaml for details.
release_filter: &release_filter
filters:
tags:
only: /^v[0-9]+\.[0-9]+\.[0-9]+(-[0-9]+(\.(alpha|beta|rc)\.[0-9]+(\.[0-9]+)?)?)?$/
branches:
ignore: /.*/
main_filter: &main_filter
filters:
branches:
only: main
nofork_filter: &nofork_filter
filters:
branches:
ignore: /pull\/[0-9]+/
docker_filter: &docker_filter
filters:
branches:
only:
- main
- /.*docker.*/
# You can conditionally persist debug binaries for the build-dev job (so they
# show up in the 'Artifacts' tab of the job) by:
# 1. Go to https://app.circleci.com/pipelines/github/influxdata/influxdb?branch=<branch-name> (<- change this!)
# 2. Click "Trigger Pipeline" in the top-right
# 3. Adjust the "persist_debug" boolean parameter to true
# 4. Click "Run Pipeline"
parameters:
persist_debug:
type: boolean
default: false
# Consistent environment setup for Python Build Standalone
pbs_config: &pbs_config
PBS_DATE: "20250212"
PBS_VERSION: "3.13.2"
# Consistent Cargo environment configuration
cargo_env: &cargo_env
# Disable incremental compilation to avoid overhead. We are not preserving these files anyway.
CARGO_INCREMENTAL: "0"
# Disable full debug symbol generation to speed up CI build
# "1" means line tables only, which is useful for panic tracebacks.
CARGO_PROFILE_DEV_DEBUG: "1"
# https://github.com/rust-lang/cargo/issues/10280
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
# The `2xlarge` resource class that we use has 32GB RAM but also 16 CPUs. This means we have 2GB RAM per core on
# average. At peak this is a bit tight, so lower the CPU count for cargo a bit.
CARGO_BUILD_JOBS: "12"
commands:
rust_components:
description: Verify installed components
steps:
- run:
name: Activate toolchain
command: |
rustup toolchain install
rustup component add clippy
rustup component add rustfmt
- run:
name: Verify installed components
command: |
rustup --version
rustup show
cargo fmt --version
cargo clippy --version
gcloud-docker-login:
steps:
- run:
name: configure-gar
command: |
gcloud auth activate-service-account "${GCLOUD_SERVICE_ACCOUNT_EMAIL}" --key-file <(echo "${GCLOUD_SERVICE_ACCOUNT_KEY}")
gcloud auth configure-docker us-docker.pkg.dev
quay-docker-login:
steps:
- run:
name: Login to Quay.io
command: echo "${QUAY_INFLUXDB3_PASSWORD}" |docker login --username="${QUAY_INFLUXDB3_USERNAME}" --password-stdin quay.io
setup_pyo3_env:
description: Setup Python PYO3 environment for the specified target
parameters:
target:
type: string
steps:
- run:
name: Extract python for this target
command: |
tar -C /tmp/workspace/python-artifacts -zxvf /tmp/workspace/python-artifacts/all.tar.gz ./<< parameters.target >>
- run:
name: Show PYO3_CONFIG_FILE
command: cat /tmp/workspace/python-artifacts/<< parameters.target >>/pyo3_config_file.txt
- run:
name: Install Target
command: rustup target add << parameters.target >>
- run:
name: Set PYO3 environment variables
command: |
echo "export PYO3_CONFIG_FILE=/tmp/workspace/python-artifacts/<< parameters.target >>/pyo3_config_file.txt" >> $BASH_ENV
echo "export PYO3_CROSS_PYTHON_VERSION=3.13" >> $BASH_ENV
jobs:
fmt:
docker:
- image: quay.io/influxdb/rust:ci
environment:
<<: *cargo_env
steps:
- checkout
- rust_components
- run:
name: Rust fmt
command: cargo fmt --all -- --check
lint:
docker:
- image: quay.io/influxdb/rust:ci
environment:
<<: *cargo_env
steps:
- checkout
- rust_components
- run:
name: Clippy
command: cargo clippy --all-targets --all-features --workspace -- -D warnings
- run:
name: Yamllint
command: yamllint --config-file .circleci/yamllint.yml --strict .
inclusivity:
docker:
- image: cimg/go:1.23
steps:
- checkout
- run:
name: Build and run inclusivity checker
command: |
go install github.com/jdstrand/language-checker@latest
language-checker --exit-1-on-failure .
cargo-audit:
docker:
- image: quay.io/influxdb/rust:ci
environment:
<<: *cargo_env
steps:
- checkout
- rust_components
- run:
name: Install cargo-deny
command: cargo install cargo-deny --locked
- run:
name: cargo-deny Checks
command: cargo deny check -s
doc:
docker:
- image: quay.io/influxdb/rust:ci
resource_class: large # use of a smaller executor runs out of memory
environment:
<<: *cargo_env
# Turn warnings into errors
RUSTDOCFLAGS: "-D warnings -A rustdoc::private-intra-doc-links"
steps:
- checkout
- rust_components
- run:
name: Cargo doc
command: cargo doc --document-private-items --no-deps --workspace
- run:
name: Compress Docs
command: tar -cvzf rustdoc.tar.gz target/doc/
- store_artifacts:
path: rustdoc.tar.gz
# Run all tests
test:
docker:
- image: quay.io/influxdb/rust:ci
resource_class: 2xlarge+ # use of a smaller executor tends crashes on link
environment:
<<: *cargo_env
RUST_BACKTRACE: "1"
TARGET: "x86_64-unknown-linux-gnu" # Define target for PYO3 setup
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- rust_components
- setup_pyo3_env:
target: "x86_64-unknown-linux-gnu"
- run:
name: Setup LD_LIBRARY_PATH for Python
command: |
# Add Python lib directory to LD_LIBRARY_PATH
echo "export LD_LIBRARY_PATH=/tmp/workspace/python-artifacts/x86_64-unknown-linux-gnu/python/lib:$LD_LIBRARY_PATH" >> $BASH_ENV
source $BASH_ENV
# Verify the Python shared library is accessible
ls -la /tmp/workspace/python-artifacts/x86_64-unknown-linux-gnu/python/lib/
- run:
name: cargo nextest
command: |
TEST_LOG= RUST_LOG=info RUST_LOG_SPAN_EVENTS=full RUST_BACKTRACE=1 cargo nextest run --workspace --failure-output immediate-final --no-fail-fast
# Fetch python-build-standalone for official builds
fetch-python:
machine:
image: ubuntu-2204:current
resource_class: medium
environment:
<<: *pbs_config
steps:
- checkout
- run:
name: pull Python Build Standalone
command: |
echo "PBS_DATE=$PBS_DATE"
.circleci/scripts/fetch-python-standalone.bash \
"python-artifacts" \
"$PBS_DATE" \
"$PBS_VERSION"
- store_artifacts:
path: python-artifacts
- persist_to_workspace:
root: .
paths:
- python-artifacts
# Build a dev binary with the default cargo profile
build-dev:
docker:
- image: us-east1-docker.pkg.dev/influxdata-team-edge/ci-support/ci-cross-influxdb3@sha256:98b05538182a2e4eac5ce96150a1f5c552de48a7d3ea8d312d38e45f0dd42611
auth:
username: _json_key
password: $CISUPPORT_GCS_AUTHORIZATION
resource_class: 2xlarge+ # use of a smaller executor tends crashes on link
environment:
<<: *cargo_env
TARGET: << parameters.target >>
parameters:
target:
type: string
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- setup_pyo3_env:
target: << parameters.target >>
- run:
name: Cargo build
command: |
target-env cargo build --target=<< parameters.target >> --workspace
- run:
# this runs diff twice, once to report the changes so that it's easier to see the changed file in circleci
# if it is dirty build should fail with exit 1
name: check build is dirty
command: |
git diff --exit-code
git diff --exit-code --quiet || exit 1
- when:
# this defaults to false, but can set it by triggering a pipeline (see above)
condition: << pipeline.parameters.persist_debug >>
steps:
- run:
name: tar and gzip debug build artifacts
command: |
mkdir -p artifacts
tar --ignore-failed-read -cvf "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar" -C "${PWD}/target/<< parameters.target >>/debug" influxdb3{,.exe}
tar --ignore-failed-read -rvf "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar" -C "/tmp/workspace/python-artifacts/<< parameters.target >>" python
gzip "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar"
- store_artifacts:
path: artifacts
- persist_to_workspace:
root: .
paths:
- artifacts
- when:
condition:
not:
equal: [ << parameters.target >>, x86_64-pc-windows-gnu ]
steps:
- run:
name: Check benches compile
command: |
target-env cargo check --target=<< parameters.target >> --workspace --benches
- run:
name: Check extra features (like prod image)
command: |
target-env cargo check --target=<< parameters.target >> --no-default-features --features="aws,gcp,azure,jemalloc_replacing_malloc,tokio_console"
- when:
condition:
equal: [ << parameters.target >>, x86_64-pc-windows-gnu ]
steps:
- run:
name: Check extra features (like prod image)
command: |
target-env cargo check --target=<< parameters.target >> --no-default-features --features="aws,gcp,azure,jemalloc_replacing_malloc,tokio_console"
# Compile cargo "release" profile binaries for influxdb3 edge releases
build-release:
docker:
- image: us-east1-docker.pkg.dev/influxdata-team-edge/ci-support/ci-cross-influxdb3@sha256:98b05538182a2e4eac5ce96150a1f5c552de48a7d3ea8d312d38e45f0dd42611
auth:
username: _json_key
password: $CISUPPORT_GCS_AUTHORIZATION
resource_class: 2xlarge+
environment:
<<: *cargo_env
TARGET: << parameters.target >>
parameters:
target:
type: string
profile:
type: string
default: release
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- setup_pyo3_env:
target: << parameters.target >>
- run:
name: Cargo release build
command: |
target-env cargo build --target=<< parameters.target >> --profile=<< parameters.profile >> --workspace
# linking might take a while and doesn't produce CLI output
no_output_timeout: 30m
- when:
condition:
or:
- equal: [ << parameters.target >>, aarch64-unknown-linux-gnu ]
- equal: [ << parameters.target >>, x86_64-unknown-linux-gnu ]
steps:
- run:
# XXX: better to use 'cargo:rustc-link-arg=-Wl,-rpath,$ORIGIN/python/lib'
name: adjust RPATH for linux
command: |
# tarballs need $ORIGIN/python/lib, deb/rpm need $ORIGIN/../lib/influxdb3/python/lib
echo "Running: patchelf --set-rpath '$ORIGIN/python/lib:$ORIGIN/../lib/influxdb3/python/lib' '${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3'"
patchelf --set-rpath '$ORIGIN/python/lib:$ORIGIN/../lib/influxdb3/python/lib' "${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3"
- when:
condition:
equal: [ << parameters.target >>, aarch64-apple-darwin ]
steps:
- run:
# XXX: better to use 'cargo:rustc-link-arg=-Wl,-rpath,@executable_path/python/lib'
name: adjust LC_LOAD_DYLIB path for darwin
command: |
export PBS_LIBPYTHON=$(grep '^lib_name=' /tmp/workspace/python-artifacts/<< parameters.target >>/pyo3_config_file.txt | cut -d = -f 2)
echo "Running: /osxcross/bin/aarch64-apple-darwin22.2-install_name_tool -change '/install/lib/lib${PBS_LIBPYTHON}.dylib' '@executable_path/python/lib/lib${PBS_LIBPYTHON}.dylib' '${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3'"
/osxcross/bin/aarch64-apple-darwin22.2-install_name_tool -change "/install/lib/lib${PBS_LIBPYTHON}.dylib" "@executable_path/python/lib/lib${PBS_LIBPYTHON}.dylib" "${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3"
# re-sign after install_name_tool since osxcross won't do it
echo "Running: /usr/local/bin/rcodesign sign '${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3'"
/usr/local/bin/rcodesign sign "${PWD}/target/<< parameters.target >>/<< parameters.profile >>/influxdb3"
- run:
# this runs diff twice, once to report the changes so that it's easier to see the changed file in circleci
# if it is dirty build should fail with exit 1
name: check build is dirty
command: |
git diff --exit-code
git diff --exit-code --quiet || exit 1
- run:
name: tar build artifacts
command: |
mkdir -p artifacts
tar --ignore-failed-read -cvf "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar" -C "${PWD}/target/<< parameters.target >>/<< parameters.profile >>" influxdb3{,.exe}
tar --ignore-failed-read -rvf "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar" -C "/tmp/workspace/python-artifacts/<< parameters.target >>" python
- when:
condition:
equal: [ << parameters.target >>, x86_64-pc-windows-gnu ]
steps:
- run:
# Windows doesn't have a concept of RPATH; instead either PATH
# needs to be set ahead of time (by the user; in a .bat script)
# or the required DLLs need to be in the same directory as the
# executable. For now, copy the *.dll files from the
# python directory up next to the executable (we copy so that
# the python executable in the python directory can still find
# them). influxdb#26113
name: tar Windows DLLs next to influxdb3.exe
command: |
tar --ignore-failed-read -rvf "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar" -C "/tmp/workspace/python-artifacts/<< parameters.target >>/python" $(find "/tmp/workspace/python-artifacts/<< parameters.target >>/python" -maxdepth 1 -name *.dll -printf '%f\n')
- run:
name: gzip build artifacts
command: |
gzip "${PWD}/artifacts/influxdb3-core_<< parameters.target >>.tar"
- store_artifacts:
path: artifacts
- persist_to_workspace:
root: .
paths:
- artifacts
build-packages:
docker:
- image: us-east1-docker.pkg.dev/influxdata-team-edge/ci-support/ci-packager-next@sha256:8223348466129205be30413c597e573114949be4190e66e45eb3e2ff8af4cc25
auth:
username: _json_key
password: $CISUPPORT_GCS_AUTHORIZATION
steps:
- checkout
- attach_workspace:
at: /tmp/workspace
- run: packager .circleci/packages/config.yaml
- persist_to_workspace:
root: .
paths:
- artifacts
- store_artifacts:
path: artifacts/
check_package_deb_amd64:
machine:
image: ubuntu-2204:current
resource_class: medium
steps:
- attach_workspace:
at: /tmp/workspace
- checkout
- run:
name: Validate Debian Package (AMD64)
command: |
sudo .circleci/scripts/package-validation/debian \
/tmp/workspace/artifacts/influxdb3*amd64.deb
check_package_deb_arm64:
machine:
image: ubuntu-2204:current
resource_class: arm.medium
steps:
- attach_workspace:
at: /tmp/workspace
- checkout
- run:
name: Validate Debian Package (ARM64)
command: |
sudo .circleci/scripts/package-validation/debian \
/tmp/workspace/artifacts/influxdb3*arm64.deb
check_package_rpm:
machine:
image: ubuntu-2204:current
resource_class: arm.medium
parameters:
arch:
type: string
steps:
- attach_workspace:
at: /tmp/workspace
- add_ssh_keys:
fingerprints:
- 3a:d1:7a:b7:57:d7:85:0b:76:79:85:51:38:f3:e4:67
- checkout
- run: |
AWS_ACCESS_KEY_ID=$TEST_AWS_ACCESS_KEY_ID \
AWS_SECRET_ACCESS_KEY=$TEST_AWS_SECRET_ACCESS_KEY \
.circleci/scripts/package-validation/redhat << parameters.arch >> /tmp/workspace/artifacts/influxdb3*.<< parameters.arch >>.rpm
sign-packages:
circleci_ip_ranges: true
docker:
- image: quay.io/influxdb/rsign:latest
auth:
username: $QUAY_RSIGN_USERNAME
password: $QUAY_RSIGN_PASSWORD
steps:
- add_ssh_keys:
fingerprints:
- fc:7b:6e:a6:38:7c:63:5a:13:be:cb:bb:fa:33:b3:3c
- attach_workspace:
at: /tmp/workspace
- run: |
# We need this base so that we can filter it out of our checksums
# output and if that changes at all we only need to update it here
WORK_DIR="/tmp/workspace/artifacts/"
for target in ${WORK_DIR}*
do
case "${target}"
in
# rsign is shipped on Alpine Linux which uses "busybox ash" instead
# of bash. ash is somewhat more posix compliant and is missing some
# extensions and niceties from bash.
*.deb|*.rpm|*.tar.gz|*.zip)
rsign "${target}"
;;
esac
if [ -f "${target}" ]
then
# Since all artifacts are present, sign them here. This saves Circle
# credits over spinning up another instance just to separate out the
# checksum job.
sha256sum "${target}" | sed "s#$WORK_DIR##" >> "/tmp/workspace/artifacts/influxdb3-core.${CIRCLE_TAG}.digests"
# write individual checksums
md5sum "${target}" | sed "s#$WORK_DIR##" >> "${target}.md5"
sha256sum "${target}" | sed "s#$WORK_DIR##" >> "${target}.sha256"
fi
done
- persist_to_workspace:
root: /tmp/workspace
paths:
- artifacts
- store_artifacts:
path: /tmp/workspace/artifacts
publish-packages:
docker:
- image: cimg/python:3.12.2
parameters:
# "destination" should be one of:
# - releases
# - nightlies
# - snapshots
destination:
type: string
steps:
- attach_workspace:
at: /tmp/workspace
- aws-s3/sync:
arguments: --acl public-read
aws-region: RELEASE_AWS_REGION
aws-access-key-id: RELEASE_AWS_ACCESS_KEY_ID
aws-secret-access-key: RELEASE_AWS_SECRET_ACCESS_KEY
from: /tmp/workspace/artifacts
to: s3://dl.influxdata.com/influxdb/<< parameters.destination >>
- run:
command: |
export AWS_REGION="${RELEASE_AWS_REGION}"
export AWS_ACCESS_KEY_ID="${RELEASE_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${RELEASE_AWS_SECRET_ACCESS_KEY}"
aws cloudfront create-invalidation --distribution-id "${RELEASE_ARTIFACTS_CLOUDFRONT}" --paths '/influxdb/<< parameters.destination >>/*'
build-docker:
parameters:
platform:
type: string
resource_class:
type: string
image_name:
type: string
default: influxdb3-core
environment:
<<: *pbs_config
machine:
image: default
resource_class: << parameters.resource_class >>
steps:
- checkout
- run:
name: "Set Cargo profile based on branch"
command: |
if [ "$CIRCLE_BRANCH" = "main" ]; then
echo "export DOCKER_PROFILE=release" >> "$BASH_ENV"
else
echo "export DOCKER_PROFILE=quick-release" >> "$BASH_ENV"
fi
- run:
name: Build the docker image
command: |
.circleci/scripts/docker_build_release.bash \
"influxdb3" \
"aws,gcp,azure,jemalloc_replacing_malloc,tokio_console" \
"<< parameters.image_name >>:latest-<< parameters.platform >>" \
"$PBS_DATE" \
"$PBS_VERSION" \
"<< parameters.platform >>" \
"$DOCKER_PROFILE"
# linking might take a while and doesn't produce CLI output
no_output_timeout: 60m
- run: |
docker save << parameters.image_name >>:latest-<< parameters.platform >> > << parameters.image_name >>-<< parameters.platform >>.tar
- persist_to_workspace:
root: .
paths:
- << parameters.image_name >>-<< parameters.platform >>.tar
publish-docker:
docker:
- image: cimg/gcp:2023.02
parameters:
image_name:
type: string
default: influxdb3-core
resource_class: medium
steps:
- checkout
- setup_remote_docker
- quay-docker-login
- attach_workspace:
at: .
- run: |
docker load < << parameters.image_name >>-arm64.tar
docker load < << parameters.image_name >>-amd64.tar
.circleci/scripts/publish.bash << parameters.image_name >> ${CIRCLE_SHA1}
wait-for-docker:
resource_class: small
docker:
- image: busybox
steps:
- run: |
echo build executed successfully
workflows:
version: 2
snapshot:
jobs:
- fetch-python:
<<: *main_filter
- build-release:
<<: *main_filter
name: build-snapshot-<< matrix.target >>
matrix:
parameters:
profile:
- release
target:
- aarch64-apple-darwin
- aarch64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-unknown-linux-gnu
requires:
- fetch-python
- build-packages:
<<: *main_filter
requires:
- build-release
- sign-packages:
<<: *main_filter
requires:
- build-packages
- publish-packages:
<<: *main_filter
matrix:
parameters:
destination: [ snapshots ]
requires:
- build-release
- sign-packages
ci:
jobs:
- fmt:
<<: *any_filter
- lint:
<<: *any_filter
- inclusivity:
<<: *any_filter
- cargo-audit:
<<: *any_filter
- fetch-python:
<<: *any_filter
- test:
<<: *any_filter
requires:
- fetch-python
- build-dev:
# This workflow requires secrets stored in the environment.
# These are not passed to workflows executed on forked
# repositories. In this case, skip the workflow, as it
# will be executed on merge to main anyways.
<<: *nofork_filter
name: build-dev-<< matrix.target >>
matrix:
parameters:
target:
- aarch64-apple-darwin
- aarch64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-unknown-linux-gnu
requires:
- fetch-python
- doc:
<<: *any_filter
- build-release:
<<: *release_filter
name: build-release-<< matrix.target >>
matrix:
parameters:
target:
- aarch64-apple-darwin
- aarch64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-unknown-linux-gnu
requires:
- fetch-python
- build-packages:
<<: *release_filter
requires:
- build-release
- check_package_deb_arm64:
<<: *release_filter
requires:
- build-packages
- check_package_deb_amd64:
<<: *release_filter
requires:
- build-packages
- check_package_rpm:
<<: *nofork_filter
name:
check_package_rpm-<< matrix.arch >>
matrix:
parameters:
arch: [ x86_64, aarch64 ]
requires:
- build-packages
- sign-packages:
<<: *release_filter
requires:
- build-packages
- check_package_rpm
- check_package_deb_arm64
- check_package_deb_amd64
- test
- publish-packages:
<<: *release_filter
matrix:
parameters:
destination: [ releases ]
requires:
- build-release
- sign-packages
- test
- doc
- lint
- fmt
- cargo-audit
- build-docker:
<<: *docker_filter
name: build-docker-amd64
platform: amd64
resource_class: 2xlarge+
- build-docker:
<<: *docker_filter
name: build-docker-arm64
platform: arm64
resource_class: arm.2xlarge
- publish-docker:
<<: *docker_filter
requires:
- build-docker-amd64
- build-docker-arm64