summaryrefslogtreecommitdiffstats
path: root/ci/report
diff options
context:
space:
mode:
Diffstat (limited to 'ci/report')
-rwxr-xr-xci/report/build.sh12
-rwxr-xr-xci/report/build_local.sh12
-rwxr-xr-xci/report/download_workflow_artifact.py235
-rwxr-xr-xci/report/final_result.py33
-rwxr-xr-xci/report/report_builder.py207
5 files changed, 499 insertions, 0 deletions
diff --git a/ci/report/build.sh b/ci/report/build.sh
new file mode 100755
index 00000000..aca5531b
--- /dev/null
+++ b/ci/report/build.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Call this file to generate a report from a GitHub Workflow
+
+set -e
+
+python3 -m venv ~/env-builder
+source ~/env-builder/bin/activate
+pip3 install junitparser
+pip3 install junit2html
+
+./ci/report/report_builder.py --folder ./results thin-edge ci_pipeline.yml
diff --git a/ci/report/build_local.sh b/ci/report/build_local.sh
new file mode 100755
index 00000000..2af08917
--- /dev/null
+++ b/ci/report/build_local.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Call this file to generate a report locally
+
+set -e
+
+python3 -m venv ~/env-builder
+source ~/env-builder/bin/activate
+pip3 install junitparser
+pip3 install junit2html
+
+./ci/report/report_builder.py --folder ./results thin-edge ci_pipeline.yml --download
diff --git a/ci/report/download_workflow_artifact.py b/ci/report/download_workflow_artifact.py
new file mode 100755
index 00000000..38ced6a0
--- /dev/null
+++ b/ci/report/download_workflow_artifact.py
@@ -0,0 +1,235 @@
+#!/usr/bin/python3
+"""Download latest thin-edge build artifacts from GitHub.
+
+In order to run it often, you need a GitHub token set to $THEGHTOKEN.
+See https://github.com/settings/tokens to generate a token with repo, workflow scope.
+
+See also here
+https://docs.github.com/en/rest/reference/actions#download-an-artifact
+"""
+
+import argparse
+import json
+import os
+import os.path
+import requests
+from requests.auth import HTTPBasicAuth
+
+
+def download_artifact(
+ url: str, name: str, token: str, user: str, workflowname: str, output: str = None
+) -> None:
+ """Download the artifact and store it as a zip file"""
+ failhard = False
+ headers = {"Accept": "application/vnd.github.v3+json"}
+ auth = HTTPBasicAuth(user, token)
+
+ assert workflowname.endswith(".yml")
+
+ workflowname = os.path.splitext(workflowname)[0]
+
+ print(f"Will try to download file {name}.zip")
+
+ if output:
+ # artifact_filename = os.path.splitext(os.path.basename(output))[0] + ".zip"
+ artifact_filename = os.path.join(os.path.abspath(output), name + ".zip")
+ else:
+ artifact_filename = f"{workflowname}_{name}.zip"
+
+ if os.path.exists(artifact_filename):
+ print(f"Skipped {artifact_filename} as it is already there")
+ if failhard:
+ raise SystemError("File already there!")
+ return
+
+ req = requests.get(url, auth=auth, headers=headers, stream=True)
+ req.raise_for_status()
+
+ with open(os.path.expanduser(artifact_filename), "wb") as thefile:
+ for chunk in req.iter_content(chunk_size=128):
+ thefile.write(chunk)
+ print(f"Downloaded {name}.zip as {artifact_filename}")
+
+
+def get_artifacts_for_runid(
+ runid: int,
+ token: str,
+ user: str,
+ myfilter: str,
+ workflowname: str,
+ output: str = None,
+) -> None:
+ """Download artifacts for a given runid"""
+
+ print("Getting artifacts of workflow")
+
+ url = f"https://api.github.com/repos/{user}/thin-edge.io/actions/runs/{runid}/artifacts"
+ headers = {"Accept": "application/vnd.github.v3+json"}
+ auth = HTTPBasicAuth(user, token)
+
+ req = requests.get(url, auth=auth, headers=headers)
+ req.raise_for_status()
+
+ text = json.loads(req.text)
+
+ try:
+ artifacts = text["artifacts"]
+ except KeyError as err:
+ print("Issue in response:")
+ raise err
+
+ print(f"Found {len(artifacts)} artifacts")
+
+ for artifact in artifacts:
+ try:
+ artifact_name = artifact["name"]
+ artifact_url = artifact["archive_download_url"]
+ except KeyError as err:
+ print("Issue in response:")
+ raise err
+
+ print("Found:", artifact_name)
+ if myfilter is None:
+ download_artifact(
+ artifact_url, artifact_name, token, user, workflowname, output
+ )
+ elif artifact_name.startswith(myfilter):
+ download_artifact(
+ artifact_url, artifact_name, token, user, workflowname, output
+ )
+ else:
+ pass # skip that file
+
+
+def get_workflow(token: str, user: str, name: str) -> int:
+ """
+ Derive the last run ID of a GitHub workflow.
+
+ :param token: GitHub token
+ :param user: GitHub user name
+ :param name: Name of the workflow
+ :return: ID of the workflow
+ """
+
+ print(f"Getting id of last execution of workflow {name}")
+
+ assert name.endswith(".yml")
+
+ headers = {"Accept": "application/vnd.github.v3+json"}
+ auth = HTTPBasicAuth(user, token)
+ index = 0 # Hint: 0 and 1 seem to have an identical meaning when we request
+ param = {"per_page": 1, "page": index}
+
+ # first request:
+ url = f"https://api.github.com/repos/{user}/thin-edge.io/actions/workflows/{name}"
+ req = requests.get(url, params=param, auth=auth, headers=headers)
+ req.raise_for_status()
+
+ stuff = json.loads(req.text)
+
+ # print(json.dumps(stuff, indent=' '))
+
+ wfid = stuff.get("id")
+ if not wfid:
+ raise SystemError(stuff)
+
+ # print(stuff.get('id'))
+
+ print(f"ID of workflow {name} is {wfid}")
+
+ return wfid
+
+
+def get_valid_run(wfid: int, token: str, user: str, state: str, ignore: bool) -> int:
+ """Download the last valid run of workflow that is in requested state"""
+
+ index = 0 # Hint: 0 and 1 seem to have an identical meaning when we request
+ found = False
+ headers = {"Accept": "application/vnd.github.v3+json"}
+ auth = HTTPBasicAuth(user, token)
+
+ url = f"https://api.github.com/repos/{user}/thin-edge.io/actions/workflows/{wfid}/runs"
+
+ print("Getting execution of workflow")
+
+ while not found:
+ param = {"per_page": 1, "page": index}
+ req = requests.get(url, params=param, auth=auth, headers=headers)
+ req.raise_for_status()
+
+ response = json.loads(req.text)
+
+ if not response.get("workflow_runs"):
+ print("GOT ERROR:")
+ print(json.dumps(response, indent=" "))
+ raise SystemError
+
+ try:
+ workflow = response["workflow_runs"][0]
+ workflowname = workflow["name"]
+ wfrunid = int(workflow["id"])
+ wfrun = workflow["run_number"]
+ conclusion = workflow["conclusion"]
+ status = workflow["status"]
+ creation = workflow["created_at"]
+ except KeyError as err:
+ print("Issue in response:")
+ raise err
+
+ print("Workflow : ", workflowname)
+ print("Conclusion : ", conclusion)
+ print("ID : ", wfrunid)
+ print("Run : ", wfrun)
+ print("Status :", status)
+ print("Creation :", creation)
+
+ filename = f"{workflowname}_{wfrun}.json"
+ with open(filename, "w") as thefile:
+ thefile.write(json.dumps(response, indent=" "))
+
+ if (state == conclusion) or ignore:
+ found = True
+ else:
+ print(f"Workflow conclusion was {conclusion}. Trying an older one ...")
+ index += 1
+
+ return wfrunid
+
+
+def main():
+ """main entry point"""
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("username", type=str, help="GitHub Username")
+ parser.add_argument("workflowname", type=str, help="Name of workflow")
+ parser.add_argument(
+ "--filter", type=str, help="Download only files starting with ..."
+ )
+ parser.add_argument("-o", "--output", type=str, help="File to store the result to.")
+ parser.add_argument(
+ "-i", "--ignore", action="store_true", help="Ignore run conclusion"
+ )
+ args = parser.parse_args()
+
+ username = args.username
+ workflowname = os.path.basename(args.workflowname)
+ myfilter = args.filter
+ output = args.output
+ ignore = args.ignore
+
+ token = None
+
+ try:
+ token = os.environ["THEGHTOKEN"]
+ except KeyError:
+ print("Warning: Environment variable THEGHTOKEN not set")
+
+ wfid = get_workflow(token, username, workflowname)
+
+ runid = get_valid_run(wfid, token, username, "success", ignore)
+
+ get_artifacts_for_runid(runid, token, username, myfilter, workflowname, output)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ci/report/final_result.py b/ci/report/final_result.py
new file mode 100755
index 00000000..614b2d7e
--- /dev/null
+++ b/ci/report/final_result.py
@@ -0,0 +1,33 @@
+#!/bin/python3
+
+"""Parse final xml and return an error if there are failures.
+"""
+
+import sys
+from xml.dom.minidom import parse
+
+dom = parse(sys.argv[1])
+
+errors = 0
+failures = 0
+
+for nodes in dom.childNodes:
+ l = nodes.attributes.length
+
+ for node in range(l):
+ attr = nodes.attributes.item(node)
+
+ print(f" {attr.name} : {attr.value}")
+ if attr.name == "failures":
+ failures = int(attr.value)
+ if attr.name == "errors":
+ errors = int(attr.value)
+
+print(f"Recorded {errors} errors and {failures} failures in {sys.argv[1]}")
+
+if errors == 0 and failures == 0:
+ print("Passed, no errors.")
+ sys.exit(0)
+else:
+ print("Failed, there are errors in the test run.")
+ sys.exit(1)
diff --git a/ci/report/report_builder.py b/ci/report/report_builder.py
new file mode 100755
index 00000000..c4821049
--- /dev/null
+++ b/ci/report/report_builder.py
@@ -0,0 +1,207 @@
+#!/usr/bin/python3
+
+"""
+Build a complete report for all our runners
+
+Exemplary call
+
+python3 -m venv ~/env-builder
+source ~/env-builder/bin/activate
+pip3 install junitparser
+pip3 install junit2html python3 -m venv ~/env-pysys
+
+./report_builder.py thin-edge ci_pipeline.yml
+./report_builder.py thin-edge ci_pipeline.yml --download
+
+TODO Export configuration to separate config file
+
+"""
+
+import argparse
+import os
+import subprocess
+import shutil
+
+
+runners_cfg = [
+ {
+ "name": "results_pysys_offsite_m32sd10a",
+ "repo": "thin-edge",
+ "archive": "results_pysys_offsite_m32sd10a.zip",
+ "tests": [
+ "all",
+ "apt",
+ "apama",
+ "docker",
+ "sm",
+ ],
+ },
+ {
+ "name": "results_pysys_offsite_m32sd10b",
+ "repo": "thin-edge",
+ "archive": "results_pysys_offsite_m32sd10b.zip",
+ "tests": [
+ "all",
+ "apt",
+ "apama",
+ "docker",
+ "sm",
+ ],
+ },
+ {
+ "name": "results_pysys_offsite_m32sd10c",
+ "repo": "thin-edge",
+ "archive": "results_pysys_offsite_m32sd10c.zip",
+ "tests": [
+ "all",
+ "apt",
+ "apama",
+ "docker",
+ "sm",
+ ],
+ },
+ {
+ "name": "results_pysys_offsite_m32sd10d",
+ "repo": "thin-edge",
+ "archive": "results_pysys_offsite_m32sd10d.zip",
+ "tests": [
+ "all",
+ "apt",
+ "apama",
+ "docker",
+ "sm",
+ ],
+ },
+]
+
+
+def download_results(repo, workflow):
+ """Download and unzip results from test workflows"""
+
+ scriptfolder = os.path.dirname(os.path.realpath(__file__))
+
+ cmd = (
+ f"{scriptfolder}/download_workflow_artifact.py {repo} {workflow}"
+ + " -o ./ --filter results --ignore"
+ )
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+
+def unpack_reports(runner):
+ """Unpack reports mentioned in the runner configuration"""
+
+ assert os.path.exists(runner["archive"])
+ name = runner["name"]
+ archive = runner["archive"]
+ cmd = f"unzip -q -o -d {name} {archive}"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+
+def postprocess_runner(runner):
+ """Postprocess results from a runner.
+ Fails if a test foler is missing that is mentioned in the runner
+ configuration.
+ """
+
+ name = runner["name"]
+ tests = runner["tests"]
+
+ print(f"Processing: {name} ")
+
+ tags = ["all", "apt", "apama", "docker", "sm", "analytics"]
+ files = ""
+
+ for tag in tags:
+ if tag in tests:
+ folder = f"{name}/PySys/pysys_junit_xml_{tag}"
+ if os.path.exists(folder):
+ files += f"{name}/PySys/pysys_junit_xml_{tag}/*.xml "
+ else:
+ raise SystemError("Folder Expected", folder)
+
+ cmd = f"junitparser merge {files} { name }.xml"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+
+def postprocess(runners):
+ """Create a combined reports from all sources"""
+
+ files = ""
+
+ for runner in runners:
+ name = runner["name"] + ".xml"
+ files += " " + name
+
+ print("Files: ", files)
+
+ # Print summary matrix
+ cmd = f"junit2html --summary-matrix {files}"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+ # Merge all reports
+ cmd = f"junitparser merge {files} all_reports.xml"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+ # Build report matrix
+ cmd = f"junit2html --report-matrix report-matrix.html {files}"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+ # Zip everything
+ cmd = "zip report.zip *.html *.json"
+ print(cmd)
+ subprocess.run(cmd, shell=True, check=True)
+
+
+def main(runners, repo, workflow, folder, download_reports=True):
+ """Main entry point to build the reports"""
+
+ if download_reports:
+ # delete folder contents
+ shutil.rmtree(folder, ignore_errors=True)
+ os.mkdir(folder)
+ else:
+ # reuse folder with downloaded zip files
+ pass
+
+ os.chdir(folder)
+
+ if download_reports:
+ download_results(repo, workflow)
+
+ for runner in runners:
+ unpack_reports(runner)
+
+ for runner in runners:
+ postprocess_runner(runner)
+
+ postprocess(runners)
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("repo", type=str, help="GitHub repository")
+ parser.add_argument("workflow", type=str, help="Name of workflow")
+ parser.add_argument(
+ "--folder",
+ type=str,
+ help="Working folder (Default ./report )",
+ default="./report",
+ )
+ parser.add_argument("--download", action="store_true", help="Download reports")
+
+ args = parser.parse_args()
+
+ main(
+ runners_cfg,
+ args.repo,
+ args.workflow,
+ folder=args.folder,
+ download_reports=args.download,
+ )