summaryrefslogtreecommitdiffstats
path: root/pkgs/common-updater/scripts
diff options
context:
space:
mode:
authorJosé Romildo <malaquias@gmail.com>2022-09-15 15:32:57 -0300
committerJosé Romildo <malaquias@gmail.com>2022-10-01 12:27:09 -0300
commit19a47178461e509bcd3821cea3b92e8deba20a40 (patch)
treef9384e792139c8f7bebf9c01975a1824a699b283 /pkgs/common-updater/scripts
parente78c23cf5b4e3b5534b480ff1d6a534620d6e395 (diff)
directoryListingUpdater: init
Update a package looking for available versions in an html page
Diffstat (limited to 'pkgs/common-updater/scripts')
-rwxr-xr-xpkgs/common-updater/scripts/list-directory-versions65
1 files changed, 65 insertions, 0 deletions
diff --git a/pkgs/common-updater/scripts/list-directory-versions b/pkgs/common-updater/scripts/list-directory-versions
new file mode 100755
index 000000000000..46c9e9d30a5d
--- /dev/null
+++ b/pkgs/common-updater/scripts/list-directory-versions
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+import argparse
+import requests
+import os
+import subprocess
+import json
+import re
+from bs4 import BeautifulSoup
+
+parser = argparse.ArgumentParser(
+ description="Get all available versions listed for a package in a site."
+)
+
+parser.add_argument(
+ "--pname",
+ default=os.environ.get("UPDATE_NIX_PNAME"),
+ required="UPDATE_NIX_PNAME" not in os.environ,
+ help="name of the package",
+)
+parser.add_argument(
+ "--attr-path",
+ default=os.environ.get("UPDATE_NIX_ATTR_PATH"),
+ help="attribute path of the package",
+)
+parser.add_argument("--url", help="url of the page that lists the package versions")
+parser.add_argument("--file", help="file name for writing debugging information")
+
+
+if __name__ == "__main__":
+ args = parser.parse_args()
+
+ pname = args.pname
+
+ attr_path = args.attr_path or pname
+
+ url = args.url or json.loads(
+ subprocess.check_output(
+ [
+ "nix-instantiate",
+ "--json",
+ "--eval",
+ "-E",
+ f"with import ./. {{}}; dirOf (lib.head {attr_path}.src.urls)",
+ ],
+ text=True,
+ )
+ )
+
+ # print a debugging message
+ if args.file:
+ with open(args.file, "a") as f:
+ f.write(f"# Listing versions for {pname} from {url}\n")
+
+ page = requests.get(url)
+ soup = BeautifulSoup(page.content, "html.parser")
+ links = soup.find_all("a")
+ for link in links:
+ link_url = link.get("href", None)
+ if link_url is not None:
+ match = re.fullmatch(
+ rf"{args.pname}-([\d.]+?(-[\d\w.-]+?)?)(\.tar)?(\.[^.]*)", link_url
+ )
+ if match:
+ print(match.group(1))