summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Schürmann <daschuer@mixxx.org>2021-03-28 23:17:20 +0200
committerGitHub <noreply@github.com>2021-03-28 23:17:20 +0200
commit937d82921b352764becd585995cbe3043f9245ab (patch)
tree5114100996511a79acd19ea8c5782a2519a8bbf8
parent7c519a50ef16d3d5f1515825fdd63129ab68f6fd (diff)
parentc71e64d211243e121b89fe19495576fbf58892cb (diff)
Merge pull request #3748 from mixxxdj/deploy-manifest
Deploy artifact metadata manifest
-rw-r--r--.github/workflows/build.yml59
-rwxr-xr-xtools/deploy.sh11
-rw-r--r--tools/generate_download_metadata.py211
3 files changed, 277 insertions, 4 deletions
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 1025e9f5c6..a7e5cb2338 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -74,6 +74,7 @@ jobs:
buildenv_script: tools/macos_buildenv.sh
artifacts_name: macOS DMG
artifacts_path: build/*.dmg
+ artifacts_slug: macos-macosintel
qt_qpa_platform: offscreen
- name: Windows 2019 (MSVC)
os: windows-2019
@@ -99,6 +100,7 @@ jobs:
buildenv_script: tools/windows_buildenv.bat
artifacts_name: Windows Installer
artifacts_path: build/*.msi
+ artifacts_slug: windows-windows64
qt_qpa_platform: windows
env:
@@ -110,6 +112,9 @@ jobs:
runs-on: ${{ matrix.os }}
name: ${{ matrix.name }}
+ outputs:
+ artifact-macos-macosintel: ${{ steps.generate_artifact_metadata.outputs.artifact-macos-macosintel }}
+ artifact-windows-windows64: ${{ steps.generate_artifact_metadata.outputs.artifact-windows-windows64 }}
steps:
# sccache's handling of the /fp:fast MSVC compiler option is broken, so use our fork with the fix.
@@ -327,13 +332,24 @@ jobs:
pacman -S --noconfirm coreutils bash rsync openssh
Add-Content -Path "$Env:GITHUB_ENV" -Value "PATH=$Env:PATH"
+ - name: "Generate Artifact Metadata"
+ # Generate metadata for file artifact and write it to the job output
+ # using the artifacts_slug value. This also sets the DEPLOY_DIR
+ # environment variable that is used in the deploy.sh script in the next
+ # step.
+ if: github.event_name == 'push'
+ run: python3 tools/generate_download_metadata.py artifact ${{ matrix.artifacts_path }} "${{ matrix.artifacts_slug }}"
+ env:
+ DEPLOY_BASEURL: "https://downloads.mixxx.org"
+ DESTDIR: builds/{git_branch}/${{ runner.os }}
+
- name: "[macOS/Windows] Upload build to downloads.mixxx.org"
# skip deploying Ubuntu builds to downloads.mixxx.org because these are deployed to the PPA
if: runner.os != 'Linux' && github.event_name == 'push' && env.SSH_PASSWORD != null
run: bash tools/deploy.sh ${{ matrix.artifacts_path }}
env:
- DESTDIR: public_html/downloads/builds
- OS: ${{ runner.os }}
+ DESTDIR: public_html/downloads/
+ DEPLOY_ONLY: 0
SSH_HOST: downloads-hostgator.mixxx.org
SSH_KEY: packaging/certificates/downloads-hostgator.mixxx.org.key
SSH_PASSWORD: ${{ secrets.DOWNLOADS_HOSTGATOR_DOT_MIXXX_DOT_ORG_KEY_PASSWORD }}
@@ -352,3 +368,42 @@ jobs:
with:
name: ${{ matrix.artifacts_name }}
path: ${{ matrix.artifacts_path }}
+
+ update_manifest:
+ name: "Update manifest file on download server"
+ runs-on: ubuntu-latest
+ needs: build
+ steps:
+ - name: "Check out repository"
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: "Collect Artifacts Metadata & Write Manifest"
+ # Retrieve the metadata from the matrix job's outputs, merge them into a
+ # single JSON document and then deploy to the server.
+ if: github.event_name == 'push' && env.SSH_PASSWORD != null
+ run: python3 tools/generate_download_metadata.py manifest
+ env:
+ JOB_DATA: ${{ toJSON(needs.build) }}
+ MANIFEST_URL: "https://downloads.mixxx.org/builds/{git_branch}/manifest.json"
+ DESTDIR: "builds/{git_branch}"
+ SSH_PASSWORD: ${{ secrets.DOWNLOADS_HOSTGATOR_DOT_MIXXX_DOT_ORG_KEY_PASSWORD }}
+
+ - name: "Deploy Manifest"
+ if: github.event_name == 'push' && env.SSH_PASSWORD != null && env.MANIFEST_DIRTY != null && env.MANIFEST_DIRTY != '0'
+ run: bash tools/deploy.sh manifest.json
+ env:
+ DESTDIR: public_html/downloads/
+ DEPLOY_ONLY: 1
+ SSH_HOST: downloads-hostgator.mixxx.org
+ SSH_KEY: packaging/certificates/downloads-hostgator.mixxx.org.key
+ SSH_PASSWORD: ${{ secrets.DOWNLOADS_HOSTGATOR_DOT_MIXXX_DOT_ORG_KEY_PASSWORD }}
+ SSH_USER: mixxx
+ UPLOAD_ID: ${{ github.run_id }}
+
+ - name: "Trigger Netlify build"
+ if: env.NETLIFY_BUILD_HOOK != null && env.MANIFEST_DIRTY != null && env.MANIFEST_DIRTY != '0'
+ run: curl -X POST -d '{}' ${{ env.NETLIFY_BUILD_HOOK }}
+ env:
+ NETLIFY_BUILD_HOOK: ${{ secrets.NETLIFY_BUILD_HOOK }}
diff --git a/tools/deploy.sh b/tools/deploy.sh
index 328f160ac3..ec2b417be8 100755
--- a/tools/deploy.sh
+++ b/tools/deploy.sh
@@ -9,12 +9,12 @@ set -eu -o pipefail
[ -z "${SSH_PASSWORD}" ] && echo "Please set the SSH_PASSWORD env var." >&2 && exit 1
[ -z "${SSH_USER}" ] && echo "Please set the SSH_USER env var." >&2 && exit 1
[ -z "${UPLOAD_ID}" ] && echo "Please set the UPLOAD_ID env var." >&2 && exit 1
-[ -z "${OS}" ] && echo "Please set the OS env var." >&2 && exit 1
[ -z "${DESTDIR}" ] && echo "Please set the DESTDIR env var." >&2 && exit 1
+[ -z "${DEPLOY_DIR}" ] && echo "Please set DEPLOY_DIR env var." >&2 && exit 1
SSH="ssh -i ${SSH_KEY} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
GIT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-DEST_PATH="${DESTDIR}/${GIT_BRANCH}/${OS}"
+DEST_PATH="${DESTDIR}/${DEPLOY_DIR}"
TMP_PATH="${DESTDIR}/.tmp/${UPLOAD_ID}"
echo "Deploying to $TMP_PATH, then to $DEST_PATH."
@@ -42,6 +42,13 @@ do
# This prevents users from downloading an incomplete file from the server which has not yet finished deploying.
echo "Deploying artifact: ${FILEPATH}"
FILENAME="$(basename "${FILEPATH}")"
+
+ if [ "${DEPLOY_ONLY}" -eq 1 ]
+ then
+ rsync -e "${SSH}" -r --delete-after "${FILEPATH}" "${SSH_USER}@${SSH_HOST}:${DEST_PATH}"
+ continue
+ fi
+
FILENAME_HASH="${FILENAME}.sha256sum"
FILEPATH_HASH="${FILEPATH}.sha256sum"
diff --git a/tools/generate_download_metadata.py b/tools/generate_download_metadata.py
new file mode 100644
index 0000000000..f3dbc954bf
--- /dev/null
+++ b/tools/generate_download_metadata.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python3
+import argparse
+import datetime
+import functools
+import glob
+import hashlib
+import json
+import os
+import posixpath
+import subprocess
+import urllib.parse
+import urllib.request
+
+
+def url_fetch(url, headers=None, **kwargs):
+ """Make a web request to the given URL and return the response object."""
+ request_headers = {
+ # Override the User-Agent because our download server seems to block
+ # requests with the default UA value and responds "403 Forbidden".
+ "User-Agent": (
+ "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:86.0) Gecko/20100101 "
+ "Firefox/86.0"
+ ),
+ }
+ if headers:
+ request_headers.update(headers)
+ req = urllib.request.Request(url, headers=request_headers, **kwargs)
+ return urllib.request.urlopen(req, timeout=10)
+
+
+def url_exists(url):
+ """Make a HEAD request to the URL and check if the response is "200 OK"."""
+ try:
+ resp = url_fetch(url, method="HEAD")
+ except IOError:
+ return False
+ return resp.status == 200
+
+
+def url_download_json(url):
+ """Returns the JSON object from the given URL or return None."""
+ try:
+ resp = url_fetch(url)
+ manifest_data = resp.read().decode()
+ except IOError:
+ return None
+
+ return json.loads(manifest_data)
+
+
+def sha256(file_path):
+ """Returns the sha256 hexdigest for a file."""
+ with open(file_path, mode="rb") as fp:
+ read_chunk = functools.partial(fp.read, 1024)
+ m = hashlib.sha256()
+ for data in iter(read_chunk, b""):
+ m.update(data)
+ return m.hexdigest()
+
+
+def find_git_branch(path="."):
+ """Return the checked out git branch for the given path."""
+ return subprocess.check_output(
+ ("git", "rev-parse", "--abbrev-ref", "HEAD"),
+ cwd=path,
+ encoding="utf-8",
+ ).strip()
+
+
+def generate_file_metadata(file_path, destdir):
+ """
+ Generate the file metadata for file_Path.
+
+ The destdir argument is used for for generating the download URL.
+ """
+ file_stat = os.stat(file_path)
+ file_sha256 = sha256(file_path)
+ file_name = os.path.basename(file_path)
+
+ commit_id = os.environ["GITHUB_SHA"]
+ github_run_id = os.environ["GITHUB_RUN_ID"]
+ github_server_url = os.environ["GITHUB_SERVER_URL"]
+ github_repository = os.environ["GITHUB_REPOSITORY"]
+ baseurl = os.environ["DEPLOY_BASEURL"]
+
+ return {
+ "commit_id": commit_id,
+ "commit_url": (
+ f"{github_server_url}/{github_repository}/commit/{commit_id}"
+ ),
+ "build_log_url": (
+ f"{github_server_url}/{github_repository}/actions/"
+ f"runs/{github_run_id}"
+ ),
+ "file_url": f"{baseurl}/{destdir}/{file_name}",
+ "file_size": file_stat.st_size,
+ "file_date": datetime.datetime.fromtimestamp(
+ file_stat.st_ctime
+ ).isoformat(),
+ "sha256": file_sha256,
+ "sha256_url": f"{baseurl}/{destdir}/{file_name}.sha256sum",
+ }
+
+
+def collect_manifest_data(job_data):
+ """Parse the job metadata dict and return the manifest data."""
+ job_result = job_data["result"]
+ print(f"Build job result: {job_result}")
+ assert job_result == "success"
+
+ manifest_data = {}
+ for output_name, output_data in job_data["outputs"].items():
+ # Filter out unrelated job outputs that don't start with "artifact-".
+ prefix, _, slug = output_name.partition("-")
+ if prefix != "artifact" or not slug:
+ print(f"Ignoring output '{output_name}'...")
+ continue
+ artifact_data = json.loads(output_data)
+
+ url = artifact_data["file_url"]
+
+ # Make sure that the file actually exists on the download server
+ resp = url_fetch(url, method="HEAD")
+ if not resp.status == 200:
+ raise LookupError(f"Unable to find URL '{url}' on remote server")
+
+ manifest_data[slug] = artifact_data
+
+ return manifest_data
+
+
+def main(argv=None):
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers()
+ artifact_parser = subparsers.add_parser(
+ "artifact", help="Generate artifact metadata from file"
+ )
+ artifact_parser.add_argument("file")
+ artifact_parser.add_argument("slug")
+ artifact_parser.set_defaults(cmd="artifact")
+
+ manifest_parser = subparsers.add_parser(
+ "manifest",
+ help="Collect artifact metadata and generate manifest.json file",
+ )
+ manifest_parser.set_defaults(cmd="manifest")
+
+ args = parser.parse_args(argv)
+
+ git_branch = find_git_branch()
+ destdir = os.environ["DESTDIR"].format(
+ git_branch=git_branch,
+ )
+
+ if args.cmd == "artifact":
+ # Check that we have exactly one file artifact
+ artifact_paths = glob.glob(args.file)
+ assert len(artifact_paths) == 1
+ file_path = artifact_paths[0]
+
+ # Generate metadata and print it
+ metadata = generate_file_metadata(file_path, destdir)
+ print(json.dumps(metadata, indent=2, sort_keys=True))
+
+ if os.getenv("CI") == "true":
+ # Set GitHub Actions job output
+ print(
+ "::set-output name=artifact-{}::{}".format(
+ args.slug, json.dumps(metadata)
+ )
+ )
+
+ # Set the DEPLOY_DIR variable for the next build step
+ url = urllib.parse.urlparse(metadata["file_url"])
+ deploy_dir = posixpath.dirname(url.path)
+ with open(os.environ["GITHUB_ENV"], mode="a") as fp:
+ fp.write(f"DEPLOY_DIR={deploy_dir}\n")
+ elif args.cmd == "manifest":
+ # Parse the JOB_DATA JSON data, generate the manifest data and print it
+ job_data = json.loads(os.environ["JOB_DATA"])
+ manifest_data = collect_manifest_data(job_data)
+ print(json.dumps(manifest_data, indent=2, sort_keys=True))
+
+ # Write the manifest.json for subsequent deployment to the server
+ with open("manifest.json", mode="w") as fp:
+ json.dump(manifest_data, fp, indent=2, sort_keys=True)
+
+ if os.getenv("CI") == "true":
+ # Check if generated manifest.json file differs from the one that
+ # is currently deployed.
+ manifest_url = os.environ["MANIFEST_URL"].format(
+ git_branch=git_branch
+ )
+ try:
+ remote_manifest_data = url_fetch(manifest_url)
+ except IOError:
+ remote_manifest_data = None
+
+ if manifest_data == remote_manifest_data:
+ return
+
+ # The manifest data is different, so we set the DEPLOY_DIR and
+ # MANIFEST_DIRTY env vars.
+ deploy_dir = os.environ["DESTDIR"].format(git_branch=git_branch)
+ with open(os.environ["GITHUB_ENV"], mode="a") as fp:
+ fp.write(f"DEPLOY_DIR={deploy_dir}\n")
+ fp.write("MANIFEST_DIRTY=1\n")
+
+
+if __name__ == "__main__":
+ main()