feat: perform basic package validation (#23863)
* chore: remove unused build/ci scripts * feat: validate packages during build * chore: test CentOS aarch64 package * fix: remove x86_64 from parameterized workflow * fix: don't upgrade packages Since some unrelated packages break during upgrade, this no longer upgrades the system before installing influxdb.pull/23959/head
parent
26daa86648
commit
853d6157e3
|
@ -65,6 +65,7 @@ nofork_filter: &nofork_filter
|
||||||
branches:
|
branches:
|
||||||
ignore: /pull\/[0-9]+/
|
ignore: /pull\/[0-9]+/
|
||||||
|
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build:
|
build:
|
||||||
|
@ -133,6 +134,22 @@ workflows:
|
||||||
exclude:
|
exclude:
|
||||||
- { os: darwin, arch: arm64 }
|
- { os: darwin, arch: arm64 }
|
||||||
- { os: windows, arch: arm64 }
|
- { os: windows, arch: arm64 }
|
||||||
|
- check_package_deb_amd64:
|
||||||
|
requires:
|
||||||
|
- build-package-linux-amd64
|
||||||
|
- check_package_deb_arm64:
|
||||||
|
requires:
|
||||||
|
- build-package-linux-arm64
|
||||||
|
- check_package_rpm:
|
||||||
|
<<: *nofork_filter
|
||||||
|
name:
|
||||||
|
check_package_rpm-<< matrix.arch >>
|
||||||
|
matrix:
|
||||||
|
parameters:
|
||||||
|
arch: [ x86_64, aarch64 ]
|
||||||
|
requires:
|
||||||
|
- build-package-linux-amd64
|
||||||
|
- build-package-linux-arm64
|
||||||
- test-downgrade:
|
- test-downgrade:
|
||||||
<<: *any_filter
|
<<: *any_filter
|
||||||
requires:
|
requires:
|
||||||
|
@ -880,3 +897,54 @@ jobs:
|
||||||
root: .
|
root: .
|
||||||
paths:
|
paths:
|
||||||
- changelog_artifacts
|
- changelog_artifacts
|
||||||
|
|
||||||
|
check_package_deb_amd64:
|
||||||
|
machine:
|
||||||
|
image: ubuntu-2204:current
|
||||||
|
resource_class: medium
|
||||||
|
steps:
|
||||||
|
- attach_workspace:
|
||||||
|
at: /tmp/workspace
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: Validate Debian Package (AMD64)
|
||||||
|
command: |
|
||||||
|
sudo .circleci/scripts/package-validation/ubuntu \
|
||||||
|
/tmp/workspace/artifacts/influxdb2*-amd64.deb
|
||||||
|
|
||||||
|
check_package_deb_arm64:
|
||||||
|
machine:
|
||||||
|
image: ubuntu-2204:current
|
||||||
|
resource_class: arm.medium
|
||||||
|
steps:
|
||||||
|
- attach_workspace:
|
||||||
|
at: /tmp/workspace
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: Validate Debian Package (ARM64)
|
||||||
|
command: |
|
||||||
|
sudo .circleci/scripts/package-validation/ubuntu \
|
||||||
|
/tmp/workspace/artifacts/influxdb2*-arm64.deb
|
||||||
|
|
||||||
|
check_package_rpm:
|
||||||
|
parameters:
|
||||||
|
arch:
|
||||||
|
type: string
|
||||||
|
executor: linux-amd64
|
||||||
|
steps:
|
||||||
|
- attach_workspace:
|
||||||
|
at: /tmp/workspace
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: Install Dependencies
|
||||||
|
command: |
|
||||||
|
sudo snap install --classic terraform
|
||||||
|
- add_ssh_keys:
|
||||||
|
fingerprints:
|
||||||
|
- "91:0a:5b:a7:f9:46:77:f3:5d:4a:cf:d2:44:c8:2c:5a"
|
||||||
|
- run:
|
||||||
|
name: Validate RPM Package
|
||||||
|
command: |
|
||||||
|
export AWS_ACCESS_KEY_ID=$TEST_AWS_ACCESS_KEY_ID
|
||||||
|
export AWS_SECRET_ACCESS_KEY=$TEST_AWS_SECRET_ACCESS_KEY
|
||||||
|
.circleci/scripts/package-validation/centos << parameters.arch >> /tmp/workspace/artifacts/influxdb2*.<< parameters.arch >>.rpm
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -o errexit \
|
||||||
|
-o nounset \
|
||||||
|
-o pipefail
|
||||||
|
|
||||||
|
# $1 -> architecture
|
||||||
|
# $2 -> package path
|
||||||
|
arch="${1}"
|
||||||
|
package="$(realpath "${2}")"
|
||||||
|
|
||||||
|
path="$(dirname "$(realpath "${BASH_SOURCE[0]}")")"
|
||||||
|
|
||||||
|
terraform_init() {
|
||||||
|
pushd "${path}/tf" &>/dev/null
|
||||||
|
|
||||||
|
# Unfortunately, CircleCI doesn't offer any RPM based machine images.
|
||||||
|
# This is required to test the functionality of the systemd services.
|
||||||
|
# (systemd doesn't run within docker containers). This will spawn a
|
||||||
|
# CentOS 9 Stream EC2 instance in AWS.
|
||||||
|
terraform init
|
||||||
|
terraform apply \
|
||||||
|
-auto-approve \
|
||||||
|
-var "architecture=${1}" \
|
||||||
|
-var "package_path=${2}" \
|
||||||
|
-var "identifier=${CIRCLE_JOB}"
|
||||||
|
|
||||||
|
popd &>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
terraform_free() {
|
||||||
|
pushd "${path}/tf" &>/dev/null
|
||||||
|
|
||||||
|
terraform destroy \
|
||||||
|
-auto-approve \
|
||||||
|
-var "architecture=${1}" \
|
||||||
|
-var "package_path=${2}" \
|
||||||
|
-var "identifier=${CIRCLE_JOB}"
|
||||||
|
|
||||||
|
popd &>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
terraform_ip() {
|
||||||
|
pushd "${path}/tf" &>/dev/null
|
||||||
|
|
||||||
|
terraform output -raw node_ssh
|
||||||
|
|
||||||
|
popd &>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# This ensures that the associated resources within AWS are released
|
||||||
|
# upon exit or when encountering an error. This is setup before the
|
||||||
|
# call to "terraform apply" so even partially initialized resources
|
||||||
|
# are released.
|
||||||
|
# shellcheck disable=SC2064
|
||||||
|
trap "terraform_free \"${arch}\" \"${package}\"" \
|
||||||
|
SIGINT \
|
||||||
|
SIGTERM \
|
||||||
|
ERR \
|
||||||
|
EXIT
|
||||||
|
|
||||||
|
terraform_init "${arch}" "${package}"
|
||||||
|
|
||||||
|
printf 'Setup complete! Testing %s... (this takes several minutes!)' "${1}"
|
||||||
|
|
||||||
|
# Since terraform *just* created this instance, the host key is not
|
||||||
|
# known. Therefore, we'll disable StrictHostKeyChecking so ssh does
|
||||||
|
# not wait for user input.
|
||||||
|
ssh -o 'StrictHostKeyChecking=no' "ec2-user@$(terraform_ip)" 'sudo ./validate rpm ./influxdb2.rpm'
|
|
@ -0,0 +1,112 @@
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
aws = {
|
||||||
|
source = "hashicorp/aws"
|
||||||
|
version = "~> 2.70"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "architecture" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "identifier" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "package_path" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "aws" {
|
||||||
|
region = "us-east-1"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "aws_ami" "centos" {
|
||||||
|
most_recent = true
|
||||||
|
|
||||||
|
# This information is sourced from https://wiki.centos.org/Cloud/AWS
|
||||||
|
# and should pull the latest AWS-provided CentOS Stream 9 image.
|
||||||
|
filter {
|
||||||
|
name = "name"
|
||||||
|
values = [format("CentOS Stream 9 %s*", var.architecture)]
|
||||||
|
}
|
||||||
|
filter {
|
||||||
|
name = "virtualization-type"
|
||||||
|
values = ["hvm"]
|
||||||
|
}
|
||||||
|
|
||||||
|
owners = ["125523088429"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_security_group" "influxdb_test_package_sg" {
|
||||||
|
ingress {
|
||||||
|
description = "Allow ssh connection"
|
||||||
|
from_port = 22
|
||||||
|
to_port = 22
|
||||||
|
protocol = "tcp"
|
||||||
|
cidr_blocks = ["0.0.0.0/0"]
|
||||||
|
}
|
||||||
|
|
||||||
|
egress {
|
||||||
|
description = "Allow all outgoing"
|
||||||
|
from_port = 0
|
||||||
|
to_port = 0
|
||||||
|
protocol = "all"
|
||||||
|
cidr_blocks = ["0.0.0.0/0"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_instance" "centos" {
|
||||||
|
count = 1
|
||||||
|
ami = data.aws_ami.centos.id
|
||||||
|
instance_type = var.architecture == "x86_64" ? "t2.micro" : "c6g.medium"
|
||||||
|
key_name = "circleci-oss-test"
|
||||||
|
vpc_security_group_ids = [aws_security_group.influxdb_test_package_sg.id]
|
||||||
|
|
||||||
|
tags = {
|
||||||
|
Name = format("circleci_%s_centos_%s", var.identifier, var.architecture)
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "file" {
|
||||||
|
source = var.package_path
|
||||||
|
destination = "/home/ec2-user/influxdb2.rpm"
|
||||||
|
|
||||||
|
connection {
|
||||||
|
type = "ssh"
|
||||||
|
user = "ec2-user"
|
||||||
|
host = self.public_dns
|
||||||
|
agent = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "file" {
|
||||||
|
source = "../validate"
|
||||||
|
destination = "/home/ec2-user/validate"
|
||||||
|
|
||||||
|
connection {
|
||||||
|
type = "ssh"
|
||||||
|
user = "ec2-user"
|
||||||
|
host = self.public_dns
|
||||||
|
agent = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "remote-exec" {
|
||||||
|
inline = [
|
||||||
|
"chmod +x /home/ec2-user/validate",
|
||||||
|
]
|
||||||
|
|
||||||
|
connection {
|
||||||
|
type = "ssh"
|
||||||
|
user = "ec2-user"
|
||||||
|
host = self.public_dns
|
||||||
|
agent = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output "node_ssh" {
|
||||||
|
value = aws_instance.centos.0.public_dns
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -o errexit \
|
||||||
|
-o nounset \
|
||||||
|
-o pipefail
|
||||||
|
|
||||||
|
path="$(dirname "$(realpath "${BASH_SOURCE[0]}")")"
|
||||||
|
|
||||||
|
"${path}/validate" deb "${1}"
|
|
@ -0,0 +1,116 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -o errexit \
|
||||||
|
-o nounset \
|
||||||
|
-o pipefail
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
usage: validate [type] [path]
|
||||||
|
|
||||||
|
Program:
|
||||||
|
This application performs sanity checks on the provided InfluxDB
|
||||||
|
package. InfluxDB should *not* be installed on the system before
|
||||||
|
running this application. This validates new installations and
|
||||||
|
performs specific checks relevant only to InfluxDB.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
type Must be "deb" or "rpm". This option instructs the
|
||||||
|
application to use the package manager associated
|
||||||
|
with "type".
|
||||||
|
path Path to InfluxDB package to validate.
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ ! "${1:-}" ]] || [[ ! "${2:-}" ]]
|
||||||
|
then
|
||||||
|
(usage) && exit 1
|
||||||
|
fi
|
||||||
|
PACKAGE_TYPE="${1}"
|
||||||
|
PACKAGE_PATH="${2}"
|
||||||
|
|
||||||
|
install_deb() {
|
||||||
|
# When installing the package, ensure that the latest repository listings
|
||||||
|
# are available. This might be required so that all dependencies resolve.
|
||||||
|
# Since this needs to be run by CI, we supply "noninteractive" and "-y"
|
||||||
|
# so no prompts stall the pipeline.
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
apt-get update
|
||||||
|
# "apt-get install" should be used instead of "dpkg -i", because "dpkg"
|
||||||
|
# does not resolve dependencies. "apt-get" requires that the package
|
||||||
|
# path looks like a path (either fullpath or prefixed with "./").
|
||||||
|
apt-get install -y binutils "$(realpath "${PACKAGE_PATH}")"
|
||||||
|
}
|
||||||
|
|
||||||
|
install_rpm() {
|
||||||
|
# see "install_deb" for "update"
|
||||||
|
yum update -y
|
||||||
|
yum install -y binutils
|
||||||
|
yum localinstall -y "$(realpath "${PACKAGE_PATH}")"
|
||||||
|
}
|
||||||
|
|
||||||
|
case ${PACKAGE_TYPE}
|
||||||
|
in
|
||||||
|
deb)
|
||||||
|
(install_deb)
|
||||||
|
;;
|
||||||
|
rpm)
|
||||||
|
(install_rpm)
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if ! which influxd &>/dev/null
|
||||||
|
then
|
||||||
|
printf 'ERROR: Failed to locate influxd executable!\n' >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
NEEDED="$(readelf -d "$(which influxd)" | (grep 'NEEDED' || true ))"
|
||||||
|
|
||||||
|
# shellcheck disable=SC2181
|
||||||
|
if [[ ${?} -ne 0 ]]
|
||||||
|
then
|
||||||
|
cat <<'EOF'
|
||||||
|
ERROR: readelf could not analyze the influxd executable! This
|
||||||
|
might be the consequence of installing a package built
|
||||||
|
for another platform OR invalid compiler/linker flags.
|
||||||
|
EOF
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${NEEDED:-}" ]]
|
||||||
|
then
|
||||||
|
cat <<'EOF'
|
||||||
|
ERROR: influxd not statically linked! This may prevent all
|
||||||
|
platforms from running influxd without installing
|
||||||
|
separate dependencies.
|
||||||
|
EOF
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
PIE="$(readelf -d "$(which influxd)" | (grep 'Flags: PIE' || true))"
|
||||||
|
if [[ ! "${PIE:-}" ]]
|
||||||
|
then
|
||||||
|
printf 'ERROR: influxd not linked with "-fPIE"!\n'
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! systemctl is-active influxdb &>/dev/null
|
||||||
|
then
|
||||||
|
systemctl start influxdb
|
||||||
|
fi
|
||||||
|
|
||||||
|
for i in 0..2
|
||||||
|
do
|
||||||
|
if ! systemctl is-active influxdb &>/dev/null
|
||||||
|
then
|
||||||
|
printf 'ERROR: influxdb service failed to start!\n'
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
# Sometimes the service fails several seconds or minutes after
|
||||||
|
# starting. This failure may not propagate to the original
|
||||||
|
# "systemctl start <influxdb>" command. Therefore, we'll
|
||||||
|
# poll the service several times before exiting.
|
||||||
|
sleep 30
|
||||||
|
done
|
||||||
|
|
||||||
|
printf 'Finished validating influxdb!\n'
|
|
@ -1 +0,0 @@
|
||||||
891e7d47827e99947e46c82f509e479aa13acf24
|
|
|
@ -1,30 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
flux_dir=$(go list -m -f '{{.Dir}}' github.com/influxdata/flux)
|
|
||||||
FLUX_RUST_VERSION=$(cat ${flux_dir}/Dockerfile_build | grep 'FROM rust:' | cut -d ' ' -f2 | cut -d ':' -f2)
|
|
||||||
RUST_LATEST_VERSION=${FLUX_RUST_VERSION:-1.53}
|
|
||||||
cd ..
|
|
||||||
rm -rf flux-repo
|
|
||||||
|
|
||||||
# For security, we specify a particular rustup version and a SHA256 hash, computed
|
|
||||||
# ourselves and hardcoded here. When updating `RUSTUP_LATEST_VERSION`:
|
|
||||||
# 1. Download the new rustup script from https://github.com/rust-lang/rustup/releases.
|
|
||||||
# 2. Audit the script and changes to it. You might want to grep for strange URLs...
|
|
||||||
# 3. Update `OUR_RUSTUP_SHA` with the result of running `sha256sum rustup-init.sh`.
|
|
||||||
RUSTUP_LATEST_VERSION=1.24.2
|
|
||||||
OUR_RUSTUP_SHA="40229562d4fa60e102646644e473575bae22ff56c3a706898a47d7241c9c031e"
|
|
||||||
|
|
||||||
|
|
||||||
# Download rustup script
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf \
|
|
||||||
https://raw.githubusercontent.com/rust-lang/rustup/${RUSTUP_LATEST_VERSION}/rustup-init.sh -O
|
|
||||||
|
|
||||||
# Verify checksum of rustup script. Exit with error if check fails.
|
|
||||||
echo "${OUR_RUSTUP_SHA} rustup-init.sh" | sha256sum --check -- \
|
|
||||||
|| { echo "Checksum problem!"; exit 1; }
|
|
||||||
|
|
||||||
# Run rustup.
|
|
||||||
sh rustup-init.sh --default-toolchain "$RUST_LATEST_VERSION" -y
|
|
||||||
export PATH="${HOME}/.cargo/bin:${PATH}"
|
|
|
@ -1,82 +0,0 @@
|
||||||
#!/bin/bash -ex
|
|
||||||
|
|
||||||
|
|
||||||
dependencies="git git-cliff"
|
|
||||||
|
|
||||||
for dependency in $dependencies
|
|
||||||
do
|
|
||||||
if ! command -v $dependency &>/dev/null
|
|
||||||
then
|
|
||||||
echo "error: $dependency was not found in PATH" >&2
|
|
||||||
exit 255
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# The default "starting" commit is a somewhat arbitrary starting point for
|
|
||||||
# cataloging recent commits in a way that breaks from the old convention
|
|
||||||
DEFAULT_START_COMMIT="891e7d47827e99947e46c82f509e479aa13acf24"
|
|
||||||
DEFAULT_NEWEST_COMMIT="$(git rev-parse HEAD)"
|
|
||||||
DEFAULT_COMMIT_RANGE="${DEFAULT_START_COMMIT}..${DEFAULT_NEWEST_COMMIT}"
|
|
||||||
|
|
||||||
COMMIT_RANGE="${DEFAULT_COMMIT_RANGE}"
|
|
||||||
|
|
||||||
DEFAULT_GIT_CLIFF_OPTIONS=""
|
|
||||||
PREPEND_TARGET=""
|
|
||||||
|
|
||||||
function print-usage {
|
|
||||||
cat << EOF >&2
|
|
||||||
usage: $0 [<options>] -- [<git cliff flags>] [<git cliff options>]
|
|
||||||
|
|
||||||
--commit-range <git commit range> The specific range of commits from which to generate the changelog.
|
|
||||||
A hardcoded default sets a range that is contemporaneous with the
|
|
||||||
addition of this script to influxdb CI.
|
|
||||||
Value: $COMMIT_RANGE
|
|
||||||
|
|
||||||
--prepend <file path> Target a file to which to prepend the git-cliff output. This is not
|
|
||||||
the same as the git-cliff prepend option, which can only be used with
|
|
||||||
the -l or -u flags in that tool.
|
|
||||||
Value: $PREPEND_TARGET
|
|
||||||
|
|
||||||
|
|
||||||
Options specified after '--' separator are used by git-cliff directly
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
--commit-range)
|
|
||||||
COMMIT_RANGE="$2"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--prepend)
|
|
||||||
PREPEND_TARGET="$2"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--help)
|
|
||||||
print-usage
|
|
||||||
exit 255
|
|
||||||
;;
|
|
||||||
--)
|
|
||||||
shift
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "error: unknown option '$1'" >&2
|
|
||||||
exit 255
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
shift
|
|
||||||
done
|
|
||||||
|
|
||||||
output="$(git cliff ${@} ${DEFAULT_GIT_CLIFF_OPTIONS} ${COMMIT_RANGE})"
|
|
||||||
|
|
||||||
if [ -n "$PREPEND_TARGET" ] ; then
|
|
||||||
if [ -n "$output" ]; then
|
|
||||||
newline=$'\n\n'
|
|
||||||
echo "${output}${newline}$(cat $PREPEND_TARGET)"
|
|
||||||
else
|
|
||||||
echo "$(cat $PREPEND_TARGET)"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "$output"
|
|
||||||
fi
|
|
|
@ -1 +0,0 @@
|
||||||
out
|
|
|
@ -1,20 +0,0 @@
|
||||||
# Cross-compiler Builds
|
|
||||||
The scripts in this directory are used to build cross-compilers for InfluxDB from source.
|
|
||||||
We build & cache these artifacts separately to speed up cross-builds in CI.
|
|
||||||
|
|
||||||
## Building archives
|
|
||||||
The build scripts are stand-alone, any required variables are defined as constants within
|
|
||||||
their shell code. Running a script will produce a new `.tar.gz` archive under `out/` in this directory.
|
|
||||||
Archives are named after the version(s) of the software they contain + a build timestamp.
|
|
||||||
|
|
||||||
## Uploading builds
|
|
||||||
After building a new archive, follow these steps to add it to our CI image:
|
|
||||||
1. Log into the Errplane AWS console. Credentials are hosted in 1Password, within the `Engineering` vault.
|
|
||||||
2. Navigate to [dl.influxdata.com/influxdb-ci](https://s3.console.aws.amazon.com/s3/buckets/dl.influxdata.com?region=us-east-1&prefix=influxdb-ci/)
|
|
||||||
in the S3 console.
|
|
||||||
3. Navigate to the appropriate sub-directory of `influxdb-ci` for the archive you're uploading. The path varies by cross-compiler.
|
|
||||||
* Native AMD64 `musl-gcc` is hosted under `musl/<musl-version>/`
|
|
||||||
* Cross-compilers for `musl-gcc` (i.e. ARM64) are hosted under `musl/<musl-version>/musl-cross/<musl-cross-make-version>/`
|
|
||||||
* Cross-compilers for macOS `clang` are hosted under `osxcross/<osxcross-hash>/`
|
|
||||||
4. Use the S3 console to upload the `.tar.gz` archive into the directory.
|
|
||||||
5. Update our CircleCI config to point at the new archive.
|
|
|
@ -1,36 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r SCRIPT_DIR=$(cd $(dirname ${0}) >/dev/null 2>&1 && pwd)
|
|
||||||
declare -r OUT_DIR=${SCRIPT_DIR}/out
|
|
||||||
|
|
||||||
declare -r BUILD_IMAGE=ubuntu:20.04
|
|
||||||
declare -r MUSL_VERSION=1.1.24
|
|
||||||
declare -r MUSL_CROSS_MAKE_VERSION=0.9.9
|
|
||||||
|
|
||||||
docker run --rm -i -v ${OUT_DIR}:/out -w /tmp ${BUILD_IMAGE} bash <<EOF
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r BUILD_TIME=\$(date -u '+%Y%m%d%H%M%S')
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
# Install dependencies.
|
|
||||||
apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
ca-certificates \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
make \
|
|
||||||
patch
|
|
||||||
|
|
||||||
# Clone and build musl-cross-make's ARM64 target.
|
|
||||||
git clone https://github.com/richfelker/musl-cross-make.git && \
|
|
||||||
cd musl-cross-make && \
|
|
||||||
git checkout v${MUSL_CROSS_MAKE_VERSION} && \
|
|
||||||
make MUSL_VER=${MUSL_VERSION} TARGET=aarch64-unknown-linux-musl DL_CMD="curl -C - -L -o" install && \
|
|
||||||
mv output /tmp/musl-cross && \
|
|
||||||
cd /tmp
|
|
||||||
|
|
||||||
# Archive the build output.
|
|
||||||
tar czf /out/musl-${MUSL_VERSION}-cross-aarch64-${MUSL_CROSS_MAKE_VERSION}-\${BUILD_TIME}.tar.gz musl-cross
|
|
||||||
EOF
|
|
|
@ -1,33 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r SCRIPT_DIR=$(cd $(dirname ${0}) >/dev/null 2>&1 && pwd)
|
|
||||||
declare -r OUT_DIR=${SCRIPT_DIR}/out
|
|
||||||
|
|
||||||
declare -r BUILD_IMAGE=ubuntu:20.04
|
|
||||||
declare -r MUSL_VERSION=1.1.24
|
|
||||||
|
|
||||||
docker run --rm -i -v ${OUT_DIR}:/out -w /tmp ${BUILD_IMAGE} bash <<EOF
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r BUILD_TIME=\$(date -u '+%Y%m%d%H%M%S')
|
|
||||||
|
|
||||||
# Install dependencies.
|
|
||||||
apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
ca-certificates \
|
|
||||||
curl \
|
|
||||||
patch
|
|
||||||
|
|
||||||
# Build MUSL from source.
|
|
||||||
curl https://musl.libc.org/releases/musl-${MUSL_VERSION}.tar.gz -O && \
|
|
||||||
tar xzf musl-${MUSL_VERSION}.tar.gz && \
|
|
||||||
cd musl-${MUSL_VERSION} &&
|
|
||||||
./configure &&
|
|
||||||
make && \
|
|
||||||
make install && \
|
|
||||||
cd /tmp
|
|
||||||
|
|
||||||
# Archive the build output.
|
|
||||||
cd /usr/local && tar czf /out/musl-${MUSL_VERSION}-\${BUILD_TIME}.tar.gz musl && cd /tmp
|
|
||||||
EOF
|
|
|
@ -1,42 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r SCRIPT_DIR=$(cd $(dirname ${0}) >/dev/null 2>&1 && pwd)
|
|
||||||
declare -r OUT_DIR=${SCRIPT_DIR}/out
|
|
||||||
|
|
||||||
declare -r BUILD_IMAGE=ubuntu:20.04
|
|
||||||
declare -r OSXCROSS_VERSION=5771a847950abefed9a37e2d16ee10e0dd90c641
|
|
||||||
|
|
||||||
docker run --rm -i -v ${OUT_DIR}:/out -w /tmp ${BUILD_IMAGE} bash <<EOF
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
declare -r BUILD_TIME=\$(date -u '+%Y%m%d%H%M%S')
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
# Install dependencies.
|
|
||||||
apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
ca-certificates \
|
|
||||||
clang \
|
|
||||||
cmake \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
libssl-dev \
|
|
||||||
libxml2-dev \
|
|
||||||
llvm-dev \
|
|
||||||
lzma-dev \
|
|
||||||
patch \
|
|
||||||
zlib1g-dev
|
|
||||||
|
|
||||||
# Clone and build osxcross.
|
|
||||||
git clone https://github.com/tpoechtrager/osxcross.git /usr/local/osxcross && \
|
|
||||||
cd /usr/local/osxcross && \
|
|
||||||
git checkout ${OSXCROSS_VERSION} && \
|
|
||||||
curl -L -o ./tarballs/MacOSX10.12.sdk.tar.xz https://storage.googleapis.com/influxdata-team-flux/macos-sdks/MacOSX10.12.sdk.tar.xz && \
|
|
||||||
UNATTENDED=1 PORTABLE=true OCDEBUG=1 ./build.sh && \
|
|
||||||
rm -rf .git build tarballs && \
|
|
||||||
cd /tmp
|
|
||||||
|
|
||||||
# Archive the build output.
|
|
||||||
cd /usr/local && tar czf /out/osxcross-${OSXCROSS_VERSION}-\${BUILD_TIME}.tar.gz osxcross && cd /tmp
|
|
||||||
EOF
|
|
|
@ -1,19 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
function die() {
|
|
||||||
echo "$@" 1>&2
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
GOOS=${GOOS:-$(go env GOOS)}
|
|
||||||
GOARCH=${GOARCH:-$(go env GOARCH)}
|
|
||||||
|
|
||||||
case "${GOOS}_${GOARCH}" in
|
|
||||||
linux_amd64) CC=musl-gcc ;;
|
|
||||||
linux_arm64) CC=aarch64-unknown-linux-musl-gcc ;;
|
|
||||||
darwin_amd64) CC=x86_64-apple-darwin18-clang ;;
|
|
||||||
windows_amd64) CC=x86_64-w64-mingw32-gcc ;;
|
|
||||||
*) die "No cross-compiler set for ${GOOS}_${GOARCH}" ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
exec ${CC} "$@"
|
|
Loading…
Reference in New Issue