summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJan Holthuis <jan.holthuis@ruhr-uni-bochum.de>2021-04-13 19:03:17 +0200
committerJan Holthuis <jan.holthuis@ruhr-uni-bochum.de>2021-04-14 20:58:18 +0200
commit2b3b915b1a8030e84f63157b60f1d25225213ae1 (patch)
tree6e91f488777b018d4bf38b2051423c9ca7a400a6
parente026044bc4bbb981acf57c8a2c90dd4029a3dd15 (diff)
CI: Add new deploy.py script
Instead of passing directories and variables around (which error-prone and complicated), this script just prepares a directory structure than can then be rsync'ed to the server.
-rw-r--r--tools/deploy.py327
1 files changed, 327 insertions, 0 deletions
diff --git a/tools/deploy.py b/tools/deploy.py
new file mode 100644
index 0000000000..3df9b105a8
--- /dev/null
+++ b/tools/deploy.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python3
+import argparse
+import datetime
+import functools
+import hashlib
+import json
+import os
+import pathlib
+import shutil
+import subprocess
+import sys
+import urllib.parse
+import urllib.request
+
+
+def url_fetch(url, headers=None, **kwargs):
+ """Make a web request to the given URL and return the response object."""
+ request_headers = {
+ # Override the User-Agent because our download server seems to block
+ # requests with the default UA value and responds "403 Forbidden".
+ "User-Agent": (
+ "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:86.0) Gecko/20100101 "
+ "Firefox/86.0"
+ ),
+ }
+ if headers:
+ request_headers.update(headers)
+ req = urllib.request.Request(url, headers=request_headers, **kwargs)
+ return urllib.request.urlopen(req, timeout=10)
+
+
+def url_exists(url):
+ """Make a HEAD request to the URL and check if the response is "200 OK"."""
+ try:
+ resp = url_fetch(url, method="HEAD")
+ except IOError:
+ return False
+ return resp.status == 200
+
+
+def url_download_json(url):
+ """Returns the JSON object from the given URL or return None."""
+ try:
+ resp = url_fetch(url)
+ manifest_data = resp.read().decode()
+ except IOError:
+ return None
+
+ return json.loads(manifest_data)
+
+
+def sha256(file_path):
+ """Returns the sha256 hexdigest for a file."""
+ with open(file_path, mode="rb") as fp:
+ read_chunk = functools.partial(fp.read, 1024)
+ m = hashlib.sha256()
+ for data in iter(read_chunk, b""):
+ m.update(data)
+ return m.hexdigest()
+
+
+def git_info(info, path="."):
+ """Return the checked out git branch for the given path."""
+ if info == "branch":
+ cmd = ("git", "rev-parse", "--abbrev-ref", "HEAD")
+ elif info == "commit":
+ cmd = ("git", "rev-parse", "HEAD")
+ elif info == "describe":
+ cmd = ("git", "describe")
+ else:
+ raise ValueError("Invalid git info type!")
+
+ return subprocess.check_output(
+ cmd,
+ cwd=path,
+ encoding="utf-8",
+ ).strip()
+
+
+def tree(path):
+ for dirpath, dirnames, filenames in os.walk(top=path):
+ relpath = os.path.relpath(dirpath, start=path)
+ if relpath != ".":
+ yield relpath
+ for filename in filenames:
+ yield os.path.join(relpath, filename)
+
+
+def prepare_deployment(args):
+ # Get artifact and build metadata
+ file_stat = os.stat(args.file)
+ file_sha256 = sha256(args.file)
+
+ try:
+ commit_id = os.environ["GITHUB_SHA"]
+ except KeyError:
+ commit_id = git_info("commit")
+
+ metadata = {
+ "commit_id": commit_id,
+ "file_size": file_stat.st_size,
+ "file_date": datetime.datetime.fromtimestamp(
+ file_stat.st_ctime
+ ).isoformat(),
+ "sha256": file_sha256,
+ }
+
+ if os.getenv("CI") == "true":
+ github_run_id = os.getenv("GITHUB_RUN_ID")
+ github_server_url = os.getenv("GITHUB_SERVER_URL")
+ github_repository = os.getenv("GITHUB_REPOSITORY")
+ metadata.update(
+ {
+ "commit_url": (
+ f"{github_server_url}/{github_repository}/"
+ f"commit/{commit_id}"
+ ),
+ "build_log_url": (
+ f"{github_server_url}/{github_repository}/actions/"
+ f"runs/{github_run_id}"
+ ),
+ }
+ )
+
+ # Build destination path scheme
+ print(f"Destination path pattern: {args.dest_path}")
+ destpath = args.dest_path.format(
+ filename=os.path.basename(args.file),
+ ext=os.path.splitext(args.file)[1],
+ branch=git_info("branch"),
+ commit_id=commit_id,
+ describe=git_info("describe"),
+ slug=args.slug,
+ )
+ print(f"Destination path: {destpath}")
+
+ # Move files to deploy in place and create sha256sum file
+ output_destpath = os.path.join(args.output_dir, destpath)
+ os.makedirs(os.path.dirname(output_destpath), exist_ok=True)
+ shutil.copy2(args.file, output_destpath)
+
+ output_filename = os.path.basename(destpath)
+ with open(f"{output_destpath}.sha256sum", mode="w") as fp:
+ fp.write(f"{file_sha256} {output_filename}\n")
+
+ metadata.update(
+ {
+ "file_url": f"{args.dest_url}/{destpath}",
+ "sha256_url": f"{args.dest_url}/{destpath}.sha256sum",
+ }
+ )
+
+ # Show metadata and files to deploy
+ print("Metadata: ", json.dumps(metadata, indent=2, sort_keys=True))
+ print("Files:")
+ for path in tree(args.output_dir):
+ print(path)
+
+ # Write metadata to GitHub Actions step output, so that it can be used for
+ # manifest creation in the final job after all builds finished.
+ if os.getenv("CI") == "true":
+ # Set GitHub Actions job output
+ print(
+ "::set-output name=artifact-{}::{}".format(
+ args.slug, json.dumps(metadata)
+ )
+ )
+
+ return 0
+
+
+def collect_manifest_data(job_data):
+ """Parse the job metadata dict and return the manifest data."""
+ job_result = job_data["result"]
+ print(f"Build job result: {job_result}")
+ assert job_result == "success"
+
+ manifest_data = {}
+ for output_name, output_data in job_data["outputs"].items():
+ # Filter out unrelated job outputs that don't start with "artifact-".
+ prefix, _, slug = output_name.partition("-")
+ if prefix != "artifact" or not slug:
+ print(f"Ignoring output '{output_name}'...")
+ continue
+ artifact_data = json.loads(output_data)
+
+ url = artifact_data["file_url"]
+
+ # Make sure that the file actually exists on the download server
+ resp = url_fetch(url, method="HEAD")
+ if not resp.status == 200:
+ raise LookupError(f"Unable to find URL '{url}' on remote server")
+
+ manifest_data[slug] = artifact_data
+
+ return manifest_data
+
+
+def generate_manifest(args):
+ try:
+ commit_id = os.getenv("GITHUB_SHA")
+ except KeyError:
+ commit_id = git_info("commit")
+
+ format_data = {
+ "branch": git_info("branch"),
+ "commit_id": commit_id,
+ "describe": git_info("describe"),
+ }
+
+ # Build destination path scheme
+ print(f"Destination path pattern: {args.dest_path}")
+ destpath = args.dest_path.format_map(format_data)
+ print(f"Destination path: {destpath}")
+
+ # Create the deployment directory
+ output_destpath = os.path.join(args.output_dir, destpath)
+ os.makedirs(os.path.dirname(output_destpath), exist_ok=True)
+
+ # Parse the JOB_DATA JSON data, generate the manifest data and print it
+ job_data = json.loads(os.environ["JOB_DATA"])
+ manifest_data = collect_manifest_data(job_data)
+ print("Manifest:", json.dumps(manifest_data, indent=2, sort_keys=True))
+
+ # Write the manifest.json for subsequent deployment to the server
+ with open(output_destpath, mode="w") as fp:
+ json.dump(manifest_data, fp, indent=2, sort_keys=True)
+
+ # If possible, check if the remote manifest is the same as our local one
+ remote_manifest_data = None
+ if args.dest_url:
+ # Check if generated manifest.json file differs from the one that
+ # is currently deployed.
+ manifest_url = f"{args.dest_url}/{destpath}"
+ manifest_url = manifest_url.format_map(format_data)
+
+ try:
+ remote_manifest_data = url_fetch(manifest_url)
+ except IOError:
+ pass
+
+ # Skip deployment if the remote manifest is the same as the local one.
+ if manifest_data != remote_manifest_data:
+ print("Remote manifest differs from local version.")
+ if os.getenv("CI") == "true":
+ with open(os.environ["GITHUB_ENV"], mode="a") as fp:
+ fp.write("MANIFEST_DIRTY=1\n")
+ else:
+ print("Remote manifest is the same as local version.")
+
+ print("Files:")
+ for path in tree(args.output_dir):
+ print(path)
+
+ return 0
+
+
+def main(argv=None):
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers()
+
+ artifact_parser = subparsers.add_parser(
+ "prepare-deployment", help=" artifact metadata from file"
+ )
+ artifact_parser.set_defaults(cmd=prepare_deployment)
+ artifact_parser.add_argument(
+ "--slug",
+ action="store",
+ required=True,
+ help="Artifact identifier for the website's download page",
+ )
+ artifact_parser.add_argument(
+ "--output-dir",
+ action="store",
+ default="deploy",
+ help="Directory to write output to (default: 'deploy')",
+ )
+ artifact_parser.add_argument(
+ "--dest-path",
+ action="store",
+ required=True,
+ help="Destination path inside the output directory",
+ )
+ artifact_parser.add_argument(
+ "--dest-url",
+ action="store",
+ required=True,
+ help="Destination URL prefix",
+ )
+ artifact_parser.add_argument(
+ "file", type=pathlib.Path, help="Local file to deploy"
+ )
+
+ manifest_parser = subparsers.add_parser(
+ "generate-manifest",
+ help="Collect artifact metadata and generate manifest.json file",
+ )
+ manifest_parser.set_defaults(cmd=generate_manifest)
+ manifest_parser.add_argument(
+ "--output-dir",
+ action="store",
+ default="deploy",
+ help="Directory to write output to (default: 'deploy')",
+ )
+ manifest_parser.add_argument(
+ "--dest-path",
+ action="store",
+ required=True,
+ help="Destination path inside the output directory",
+ )
+ manifest_parser.add_argument(
+ "--dest-url", action="store", help="Destination URL prefix"
+ )
+
+ args = parser.parse_args(argv)
+
+ if os.path.exists(args.output_dir):
+ if not os.path.isdir(args.output_dir):
+ raise OSError("Output dir is not a directory!")
+ if os.listdir(args.output_dir):
+ raise OSError("Output dir is not empty!")
+
+ return args.cmd(args)
+
+
+if __name__ == "__main__":
+ sys.exit(main())