summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
authorAustin S. Hemmelgarn <austin@netdata.cloud>2021-03-15 08:31:16 -0400
committerGitHub <noreply@github.com>2021-03-15 08:31:16 -0400
commite54982b6c814d1c6e1dda456fcf691d9b829eb91 (patch)
tree8748ffc75bbc7413fb6ed059ad4cb0a3db619c5e /.github
parent6757aa9239493589ea8f22a1db22aca62ce8259b (diff)
Docker-based packaging workflow in GitHub Actions. (#9964)
* Add a new GHA workflow for building and publishing binary packages. This greatly simplifies the package handling code, significantly reduces the amount of things we are running in Travis, adds better CI for package builds for PRs, finally eliminates LXC from any of our CI, and enables us to make further major improvements much easier. * Migrate repo cleanup to GHA and remove dead code. * Fix RPM package builds. * Fix DEB installation.
Diffstat (limited to '.github')
-rwxr-xr-x.github/scripts/old_package_purging.sh88
-rwxr-xr-x.github/scripts/package_cloud_wrapper.sh48
-rw-r--r--.github/scripts/parse_packagecloud_dist_id.py39
-rw-r--r--.github/workflows/packaging.yml129
4 files changed, 304 insertions, 0 deletions
diff --git a/.github/scripts/old_package_purging.sh b/.github/scripts/old_package_purging.sh
new file mode 100755
index 0000000000..c90c4b7801
--- /dev/null
+++ b/.github/scripts/old_package_purging.sh
@@ -0,0 +1,88 @@
+#!/usr/bin/env bash
+#
+# Script to handle package cloud retention policy
+# Our open source subscription is limited,
+# so we use this sript to control the number of packages maintained historically
+#
+# Dependencies:
+# - PACKAGE_CLOUD_RETENTION_DAYS
+# This is to indicate for how many days back we want to maintain the various RPM and DEB packages on package cloud
+#
+# Copyright : SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud>
+#
+set -e
+
+delete_files_for_version() {
+ local v="$1"
+
+ # Delete the selected filenames in version
+ FILES_IN_VERSION=$(jq --sort-keys --arg v "${v}" '.[] | select ( .version | contains($v))' "${PKG_LIST_FILE}" | grep filename | cut -d':' -f 2)
+
+ # Iterate through the files and delete them
+ for pkg in ${FILES_IN_VERSION/\\n/}; do
+ pkg=${pkg/,/}
+ pkg=${pkg/\"/}
+ pkg=${pkg/\"/}
+ echo "Attempting yank on ${pkg}.."
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO}" "${pkg}" || echo "Nothing to yank or error on ${pkg}"
+ done
+}
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Old packages yanking cancelled"
+ exit 1
+fi
+
+if [ -z "${REPO}" ]; then
+ echo "No REPO variable found"
+ exit 1
+fi
+
+if [ -z ${PKG_CLOUD_TOKEN} ]; then
+ echo "No PKG_CLOUD_TOKEN variable found"
+ exit 1
+fi
+
+if [ -z ${PACKAGE_CLOUD_RETENTION_DAYS} ]; then
+ echo "No PACKAGE_CLOUD_RETENTION_DAYS variable found"
+ exit 1
+fi
+
+TMP_DIR="$(mktemp -d /tmp/netdata-old-package-yanking-XXXXXX)"
+PKG_LIST_FILE="${TMP_DIR}/complete_package_list.json"
+DATE_EPOCH="1970-01-01"
+DATE_UNTIL_TO_DELETE=$(date --date="${PACKAGE_CLOUD_RETENTION_DAYS} day ago" +%Y-%m-%d)
+
+
+echo "Created temp directory: ${TMP_DIR}"
+echo "We will be purging contents up until ${DATE_UNTIL_TO_DELETE}"
+
+echo "Calling package could to retrieve all available packages on ${REPO}"
+curl -sS "https://${PKG_CLOUD_TOKEN}:@packagecloud.io/api/v1/repos/${REPO}/packages.json" > "${PKG_LIST_FILE}"
+
+# Get versions within the desired duration
+#
+VERSIONS_TO_PURGE=$(jq --arg s "${DATE_EPOCH}" --arg e "${DATE_UNTIL_TO_DELETE}" '
+[($s, $e) | strptime("%Y-%m-%d")[0:3]] as $r
+ | map(select(
+ (.created_at[:19] | strptime("%Y-%m-%dT%H:%M:%S")[0:3]) as $d
+ | $d >= $r[0] and $d <= $r[1]
+))' "${PKG_LIST_FILE}" | grep '"version":' | sort -u | sed -e 's/ //g' | cut -d':' -f2)
+
+echo "We will be deleting the following versions: ${VERSIONS_TO_PURGE}"
+for v in ${VERSIONS_TO_PURGE/\n//}; do
+ v=${v/\"/}
+ v=${v/\"/}
+ v=${v/,/}
+ echo "Remove all files for version $v"
+ delete_files_for_version "${v}"
+done
+
+# Done, clean up
+[ -d "${TMP_DIR}" ] && rm -rf "${TMP_DIR}"
diff --git a/.github/scripts/package_cloud_wrapper.sh b/.github/scripts/package_cloud_wrapper.sh
new file mode 100755
index 0000000000..0876b2a363
--- /dev/null
+++ b/.github/scripts/package_cloud_wrapper.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+#
+# This is a tool to help removal of packages from packagecloud.io
+# It utilizes the package_cloud utility provided from packagecloud.io
+#
+# Depends on:
+# 1) package cloud gem (detects absence and installs it)
+#
+# Requires:
+# 1) PKG_CLOUD_TOKEN variable exported
+# 2) To properly install package_cloud when not found, it requires: ruby gcc gcc-c++ ruby-devel
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
+#shellcheck disable=SC2068,SC2145
+set -e
+PKG_CLOUD_CONFIG="$HOME/.package_cloud_configuration.cfg"
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Docker build process aborted"
+ exit 1
+fi
+
+# Install dependency if not there
+if ! command -v package_cloud > /dev/null 2>&1; then
+ echo "No package cloud gem found, installing"
+ gem install -V package_cloud || (echo "Package cloud installation failed. you might want to check if required dependencies are there (ruby gcc gcc-c++ ruby-devel)" && exit 1)
+else
+ echo "Found package_cloud gem, continuing"
+fi
+
+# Check for required token and prepare config
+if [ -z "${PKG_CLOUD_TOKEN}" ]; then
+ echo "Please set PKG_CLOUD_TOKEN to be able to use ${0}"
+ exit 1
+fi
+echo "{\"url\":\"https://packagecloud.io\",\"token\":\"${PKG_CLOUD_TOKEN}\"}" > "${PKG_CLOUD_CONFIG}"
+
+echo "Executing package_cloud with config ${PKG_CLOUD_CONFIG} and parameters $@"
+package_cloud $@ --config="${PKG_CLOUD_CONFIG}"
+
+rm -rf "${PKG_CLOUD_CONFIG}"
+echo "Done!"
diff --git a/.github/scripts/parse_packagecloud_dist_id.py b/.github/scripts/parse_packagecloud_dist_id.py
new file mode 100644
index 0000000000..55ddf4bec5
--- /dev/null
+++ b/.github/scripts/parse_packagecloud_dist_id.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+'''
+Parse the PackageCloud distributions JSON data to get a dist ID for uploads.
+
+This takes the JSON distributions data from Packagecloud on stdin and
+the package format, distribution name and version as arguments, and
+prints either an error message or the parsed distribution ID based on
+the arguments.
+'''
+
+import json
+import sys
+
+fmt = sys.argv[1] # The package format ('deb' or 'rpm')
+distro = sys.argv[2] # The distro name
+version = sys.argv[3] # The distro version
+print(fmt)
+print(distro)
+print(version)
+
+data = json.load(sys.stdin)
+versions = []
+
+for entry in data[fmt]:
+ if entry['display_name'] == distro:
+ versions = entry['versions']
+ break
+
+if not versions:
+ print('Could not find version information for the requested distribution.')
+ sys.exit(-1)
+
+for entry in versions:
+ if entry['version_number'] == version:
+ print(entry['id'])
+ sys.exit(0)
+
+print('Unable to find id for requested version.')
+sys.exit(-1)
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
new file mode 100644
index 0000000000..2aa4d4c53a
--- /dev/null
+++ b/.github/workflows/packaging.yml
@@ -0,0 +1,129 @@
+---
+# Handles building of binary packages for the agent.
+name: Packages
+on:
+ pull_request:
+ branches:
+ - master
+ - develop
+ workflow_dispatch:
+ inputs:
+ type:
+ name: Package build type
+ default: devel
+ required: true
+ version:
+ name: Package version
+ required: false
+jobs:
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ strategy:
+ matrix:
+ include:
+ - {distro: debian, version: "9", pkgclouddistro: Debian, pkgcloudversion: "9.0", format: deb, base_image: debian, platform: linux/amd64, arch: amd64}
+ - {distro: debian, version: "9", pkgclouddistro: Debian, pkgcloudversion: "9.0", format: deb, base_image: debian, platform: linux/i386, arch: i386}
+ - {distro: debian, version: "10", pkgclouddistro: Debian, pkgcloudversion: "10.0", format: deb, base_image: debian, platform: linux/amd64, arch: amd64}
+ - {distro: debian, version: "10", pkgclouddistro: Debian, pkgcloudversion: "10.0", format: deb, base_image: debian, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "16.04", pkgclouddistro: Ubuntu, pkgcloudversion: "16.04", format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "16.04", pkgclouddistro: Ubuntu, pkgcloudversion: "16.04", format: deb, base_image: ubuntu, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "18.04", pkgclouddistro: Ubuntu, pkgcloudversion: "18.04", format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "18.04", pkgclouddistro: Ubuntu, pkgcloudversion: "18.04", format: deb, base_image: ubuntu, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "20.04", pkgclouddistro: Ubuntu, pkgcloudversion: "20.04", format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "20.10", pkgclouddistro: Ubuntu, pkgcloudversion: "20.10", format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: centos, version: "7", pkgclouddistro: Enterprise Linux, pkgcloudversion: "7.0", format: rpm, base_image: centos, platform: linux/amd64, arch: amd64}
+ - {distro: centos, version: "8", pkgclouddistro: Enterprise Linux, pkgcloudversion: "8.0", format: rpm, base_image: centos, platform: linux/amd64, arch: amd64}
+ - {distro: fedora, version: "32", pkgclouddistro: Fedora, pkgcloudversion: "32.0", format: rpm, base_image: fedora, platform: linux/amd64, arch: amd64}
+ - {distro: fedora, version: "33", pkgclouddistro: Fedora, pkgcloudversion: "33.0", format: rpm, base_image: fedora, platform: linux/amd64, arch: amd64}
+ - {distro: opensuse, version: "15.2", pkgclouddistro: openSUSE, pkgcloudversion: "15.2", format: rpm, base_image: opensuse/leap, platform: linux/amd64, arch: amd64}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ steps:
+ - name: Checkout PR # Checkout the PR if it's a PR.
+ if: github.event_name == 'pull_request'
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0 # We need full history for versioning
+ - name: Checkout Tag # Otherwise check out the tag that triggered this.
+ if: github.event_name == 'wrokflow_dispatch'
+ uses: actions/checkout@v2
+ with:
+ refs: ${{ github.event.ref }}
+ fetch-depth: 0 # We need full history for versioning
+ - name: Check Base Branch
+ run: |
+ if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo "runtype=${{ github.event.inputs.type }}" >> $GITHUB_ENV
+ case "${{ github.event.inputs.type }}" in
+ "release")
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}" >> $GITHUB_ENV
+ echo "pkg_version=${{ github.event.inputs.version }}" >> $GITHUB_ENV
+ echo "pkg_retention_days=365" >> $GITHUB_ENV
+ ;;
+ "nightly")
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}-edge" >> $GITHUB_ENV
+ echo "pkg_version=${{ github.event.inputs.version }}" >> $GITHUB_ENV
+ echo "pkg_retention_days=30" >> $GITHUB_ENV
+ ;;
+ *)
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}-devel" >> $GITHUB_ENV
+ echo "pkg_version=$(git describe | sed -e 's/^v//')" >> $GITHUB_ENV
+ echo "pkg_retention_days=30" >> $GITHUB_ENV
+ ;;
+ esac
+ else
+ echo "runtype=test" >> $GITHUB_ENV
+ echo "pkg_version=$(cut -d'-' -f 1 packaging/version | sed -e 's/^v//')" >> $GITHUB_ENV
+ fi
+ - name: Setup QEMU
+ if: matrix.platform != 'linux/amd64'
+ uses: docker/setup-qemu-action@v1
+ - name: Setup Buildx
+ uses: docker/setup-buildx-action@v1
+ - name: Prepare Docker Environment
+ shell: bash
+ run: |
+ echo '{"cgroup-parent": "/actions_job", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null
+ sudo service docker restart
+ - name: Build Packages
+ uses: docker/build-push-action@v2
+ with:
+ platforms: ${{ matrix.platform }}
+ file: packaging/Dockerfile.packager
+ tags: local/package-builder:${{ matrix.distro}}${{ matrix.version }}
+ push: false
+ load: true
+ build-args: |
+ ARCH=${{ matrix.arch }}
+ DISTRO=${{ matrix.distro }}
+ TEST_BASE=${{ matrix.base_image }}
+ DISTRO_VERSION=${{ matrix.version }}
+ PKG_VERSION=${{ env.pkg_version }}
+ - name: Extract Packages
+ shell: bash
+ run: |
+ mkdir -p artifacts
+ docker run --platform ${{ matrix.platform }} -v $PWD/artifacts:/artifacts local/package-builder:${{ matrix.distro }}${{ matrix.version }}
+ - name: Upload
+ if: github.env.runtype == 'release' || github.env.runtype == 'nightly' || github.env.runtype == 'devel'
+ shell: bash
+ run: |
+ # This figures out the distribution ID for the upload.
+ DIST_ID="$(curl https://${{ secrets.PACKAGE_CLOUD_API_TOKEN }}:@packagecloud.io/api/v1/distributions.json | python3 .github/scripts/parse_packagecloud_dist_id.py ${{ matrix.format }} ${{ matrix.pkgclouddistro }} ${{ matrix.pkgcloudversion }})"
+ for pkgfile in artifacts/*.${FORMAT} ; do
+ curl -F "package[distro_version_id]=${BUILD_ID}" \
+ -F "package[package_file]=@${pkgfile}" \
+ https://${{ secrets.PACKAGE_CLOUD_API_TOKEN }}:@packagecloud.io/api/v1/repos/${{ env.repo }}/packages.json || exit 1
+ - name: Clean
+ if: github.env.runtype == 'release' || github.env.runtype == 'nightly' || github.env.runtype == 'devel'
+ shell: bash
+ env:
+ REPO: ${{ env.repo }}
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_TOKEN }}
+ PACKAGE_CLOUD_RETENTION_DAYS: ${{ env.pkg_retention_days }}
+ run: .github/scripts/old_package_purging.sh