2023-03-27 17:49:48 +02:00
|
|
|
import time
|
2023-03-21 00:24:52 +01:00
|
|
|
import hashlib
|
2023-03-13 17:40:35 +01:00
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import requests
|
|
|
|
import toml
|
|
|
|
import os
|
2023-03-27 18:41:38 +02:00
|
|
|
import glob
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
from github import Github, InputGitAuthor
|
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
STRATEGIES = ["latest_github_release", "latest_github_tag"]
|
|
|
|
|
|
|
|
GITHUB_LOGIN = open(os.path.dirname(__file__) + "/../../.github_login").read().strip()
|
|
|
|
GITHUB_TOKEN = open(os.path.dirname(__file__) + "/../../.github_token").read().strip()
|
2023-03-27 17:49:48 +02:00
|
|
|
GITHUB_EMAIL = open(os.path.dirname(__file__) + "/../../.github_email").read().strip()
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
github = Github(GITHUB_TOKEN)
|
|
|
|
author = InputGitAuthor(GITHUB_LOGIN, GITHUB_EMAIL)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 18:41:38 +02:00
|
|
|
|
|
|
|
def apps_to_run_auto_update_for():
|
|
|
|
|
|
|
|
catalog = toml.load(open(os.path.dirname(__file__) + "/../../apps.toml"))
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
apps_flagged_as_working_and_on_yunohost_apps_org = [
|
|
|
|
app
|
|
|
|
for app, infos in catalog.items()
|
|
|
|
if infos["state"] == "working"
|
|
|
|
and "/github.com/yunohost-apps" in infos["url"].lower()
|
|
|
|
]
|
2023-03-27 18:41:38 +02:00
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
manifest_tomls = glob.glob(
|
|
|
|
os.path.dirname(__file__) + "/../../.apps_cache/*/manifest.toml"
|
|
|
|
)
|
2023-03-27 18:41:38 +02:00
|
|
|
|
|
|
|
apps_with_manifest_toml = [path.split("/")[-2] for path in manifest_tomls]
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
relevant_apps = list(
|
|
|
|
sorted(
|
|
|
|
set(apps_flagged_as_working_and_on_yunohost_apps_org)
|
|
|
|
& set(apps_with_manifest_toml)
|
|
|
|
)
|
|
|
|
)
|
2023-03-27 18:41:38 +02:00
|
|
|
|
|
|
|
out = []
|
|
|
|
for app in relevant_apps:
|
2023-03-28 00:42:18 +02:00
|
|
|
manifest = toml.load(
|
|
|
|
os.path.dirname(__file__) + f"/../../.apps_cache/{app}/manifest.toml"
|
|
|
|
)
|
2023-03-27 18:41:38 +02:00
|
|
|
sources = manifest.get("resources", {}).get("sources", {})
|
|
|
|
if any("autoupdate" in source for source in sources.values()):
|
|
|
|
out.append(app)
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
def filter_and_get_latest_tag(tags):
|
|
|
|
filter_keywords = ["start", "rc", "beta", "alpha"]
|
|
|
|
tags = [t for t in tags if not any(keyword in t for keyword in filter_keywords)]
|
|
|
|
|
|
|
|
for t in tags:
|
|
|
|
if not re.match(r"^v?[\d\.]*\d$", t):
|
|
|
|
print(f"Ignoring tag {t}, doesn't look like a version number")
|
|
|
|
tags = [t for t in tags if re.match(r"^v?[\d\.]*\d$", t)]
|
|
|
|
|
|
|
|
tag_dict = {t: tag_to_int_tuple(t) for t in tags}
|
|
|
|
tags = sorted(tags, key=tag_dict.get)
|
|
|
|
return tags[-1]
|
|
|
|
|
|
|
|
|
|
|
|
def tag_to_int_tuple(tag):
|
|
|
|
|
|
|
|
tag = tag.strip("v")
|
|
|
|
int_tuple = tag.split(".")
|
|
|
|
assert all(i.isdigit() for i in int_tuple), f"Cant convert {tag} to int tuple :/"
|
|
|
|
return tuple(int(i) for i in int_tuple)
|
|
|
|
|
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
def sha256_of_remote_file(url):
|
|
|
|
print(f"Computing sha256sum for {url} ...")
|
|
|
|
try:
|
|
|
|
r = requests.get(url, stream=True)
|
|
|
|
m = hashlib.sha256()
|
|
|
|
for data in r.iter_content(8192):
|
|
|
|
m.update(data)
|
|
|
|
return m.hexdigest()
|
|
|
|
except Exception as e:
|
|
|
|
print(f"Failed to compute sha256 for {url} : {e}")
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
class AppAutoUpdater:
|
2023-03-27 17:49:48 +02:00
|
|
|
def __init__(self, app_id):
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
# if not os.path.exists(app_path + "/manifest.toml"):
|
2023-03-27 17:49:48 +02:00
|
|
|
# raise Exception("manifest.toml doesnt exists?")
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
# We actually want to look at the manifest on the "testing" (or default) branch
|
|
|
|
self.repo = github.get_repo(f"Yunohost-Apps/{app_id}_ynh")
|
|
|
|
# Determine base branch, either `testing` or default branch
|
|
|
|
try:
|
|
|
|
self.base_branch = self.repo.get_branch("testing").name
|
|
|
|
except:
|
|
|
|
self.base_branch = self.repo.default_branch
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
contents = self.repo.get_contents("manifest.toml", ref=self.base_branch)
|
|
|
|
self.manifest_raw = contents.decoded_content.decode()
|
|
|
|
self.manifest_raw_sha = contents.sha
|
|
|
|
manifest = toml.loads(self.manifest_raw)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
|
|
|
self.current_version = manifest["version"].split("~")[0]
|
|
|
|
self.sources = manifest.get("resources", {}).get("sources")
|
|
|
|
|
|
|
|
if not self.sources:
|
|
|
|
raise Exception("There's no resources.sources in manifest.toml ?")
|
|
|
|
|
|
|
|
self.upstream = manifest.get("upstream", {}).get("code")
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
todos = {}
|
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
for source, infos in self.sources.items():
|
|
|
|
|
|
|
|
if "autoupdate" not in infos:
|
|
|
|
continue
|
|
|
|
|
|
|
|
strategy = infos.get("autoupdate", {}).get("strategy")
|
|
|
|
if strategy not in STRATEGIES:
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
f"Unknown strategy to autoupdate {source}, expected one of {STRATEGIES}, got {strategy}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
|
|
|
asset = infos.get("autoupdate", {}).get("asset", "tarball")
|
|
|
|
|
|
|
|
print(f"Checking {source} ...")
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
new_version, new_asset_urls = self.get_latest_version_and_asset(
|
|
|
|
strategy, asset, infos
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
|
|
|
print(f"Current version in manifest: {self.current_version}")
|
2023-03-27 17:49:48 +02:00
|
|
|
print(f"Newest version on upstream: {new_version}")
|
2023-03-21 00:24:52 +01:00
|
|
|
|
|
|
|
if source == "main":
|
2023-03-27 17:49:48 +02:00
|
|
|
if self.current_version == new_version:
|
2023-03-28 00:42:18 +02:00
|
|
|
print(
|
|
|
|
f"Version is still {new_version}, no update required for {source}"
|
|
|
|
)
|
2023-03-21 00:24:52 +01:00
|
|
|
continue
|
2023-03-27 17:49:48 +02:00
|
|
|
else:
|
|
|
|
print(f"Update needed for {source}")
|
2023-03-28 00:42:18 +02:00
|
|
|
todos[source] = {
|
|
|
|
"new_asset_urls": new_asset_urls,
|
|
|
|
"old_assets": infos,
|
|
|
|
"new_version": new_version,
|
|
|
|
}
|
2023-03-21 00:24:52 +01:00
|
|
|
else:
|
2023-03-27 17:49:48 +02:00
|
|
|
if isinstance(new_asset_urls, str) and infos["url"] == new_asset_urls:
|
2023-03-21 00:24:52 +01:00
|
|
|
print(f"URL is still up to date for asset {source}")
|
|
|
|
continue
|
2023-03-28 00:42:18 +02:00
|
|
|
elif isinstance(new_asset_urls, dict) and new_asset_urls == {
|
|
|
|
k: infos[k]["url"] for k in new_asset_urls.keys()
|
|
|
|
}:
|
2023-03-21 00:24:52 +01:00
|
|
|
print(f"URLs are still up to date for asset {source}")
|
|
|
|
continue
|
2023-03-27 17:49:48 +02:00
|
|
|
else:
|
|
|
|
print(f"Update needed for {source}")
|
2023-03-28 00:42:18 +02:00
|
|
|
todos[source] = {
|
|
|
|
"new_asset_urls": new_asset_urls,
|
|
|
|
"old_assets": infos,
|
|
|
|
}
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
if not todos:
|
|
|
|
return
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
if "main" in todos:
|
|
|
|
new_version = todos["main"]["new_version"]
|
|
|
|
message = f"Upgrade to v{new_version}"
|
|
|
|
new_branch = f"ci-auto-update-{new_version}"
|
|
|
|
else:
|
|
|
|
message = "Upgrade sources"
|
|
|
|
new_branch = "ci-auto-update-sources"
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
try:
|
|
|
|
# Get the commit base for the new branch, and create it
|
|
|
|
commit_sha = self.repo.get_branch(self.base_branch).commit.sha
|
|
|
|
self.repo.create_git_ref(ref=f"refs/heads/{new_branch}", sha=commit_sha)
|
|
|
|
except:
|
|
|
|
pass
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
manifest_new = self.manifest_raw
|
|
|
|
for source, infos in todos.items():
|
2023-03-28 00:42:18 +02:00
|
|
|
manifest_new = self.replace_version_and_asset_in_manifest(
|
|
|
|
manifest_new,
|
|
|
|
infos.get("new_version"),
|
|
|
|
infos["new_asset_urls"],
|
|
|
|
infos["old_assets"],
|
|
|
|
is_main=source == "main",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.repo.update_file(
|
|
|
|
"manifest.toml",
|
|
|
|
message=message,
|
|
|
|
content=manifest_new,
|
|
|
|
sha=self.manifest_raw_sha,
|
|
|
|
branch=new_branch,
|
|
|
|
author=author,
|
|
|
|
)
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
# Wait a bit to preserve the API rate limit
|
|
|
|
time.sleep(1.5)
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
# Open the PR
|
2023-03-28 00:42:18 +02:00
|
|
|
pr = self.repo.create_pull(
|
|
|
|
title=message, body=message, head=new_branch, base=self.base_branch
|
|
|
|
)
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
print("Created PR " + self.repo.full_name + " updated with PR #" + str(pr.id))
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
def get_latest_version_and_asset(self, strategy, asset, infos):
|
|
|
|
|
|
|
|
if "github" in strategy:
|
2023-03-28 00:42:18 +02:00
|
|
|
assert self.upstream and self.upstream.startswith(
|
|
|
|
"https://github.com/"
|
|
|
|
), "When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
|
|
|
self.upstream_repo = self.upstream.replace("https://github.com/", "").strip(
|
|
|
|
"/"
|
|
|
|
)
|
|
|
|
assert (
|
|
|
|
len(self.upstream_repo.split("/")) == 2
|
|
|
|
), "'{self.upstream}' doesn't seem to be a github repository ?"
|
2023-03-13 17:40:35 +01:00
|
|
|
|
|
|
|
if strategy == "latest_github_release":
|
|
|
|
releases = self.github(f"repos/{self.upstream_repo}/releases")
|
2023-03-28 00:42:18 +02:00
|
|
|
tags = [
|
|
|
|
release["tag_name"]
|
|
|
|
for release in releases
|
|
|
|
if not release["draft"] and not release["prerelease"]
|
|
|
|
]
|
2023-03-13 17:40:35 +01:00
|
|
|
latest_version = filter_and_get_latest_tag(tags)
|
|
|
|
if asset == "tarball":
|
2023-03-28 00:42:18 +02:00
|
|
|
latest_tarball = (
|
|
|
|
f"{self.upstream}/archive/refs/tags/{latest_version}.tar.gz"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
return latest_version.strip("v"), latest_tarball
|
|
|
|
# FIXME
|
|
|
|
else:
|
2023-03-28 00:42:18 +02:00
|
|
|
latest_release = [
|
|
|
|
release
|
|
|
|
for release in releases
|
|
|
|
if release["tag_name"] == latest_version
|
|
|
|
][0]
|
|
|
|
latest_assets = {
|
|
|
|
a["name"]: a["browser_download_url"]
|
|
|
|
for a in latest_release["assets"]
|
|
|
|
if not a["name"].endswith(".md5")
|
|
|
|
}
|
2023-03-13 17:40:35 +01:00
|
|
|
if isinstance(asset, str):
|
2023-03-28 00:42:18 +02:00
|
|
|
matching_assets_urls = [
|
|
|
|
url
|
|
|
|
for name, url in latest_assets.items()
|
|
|
|
if re.match(asset, name)
|
|
|
|
]
|
2023-03-13 17:40:35 +01:00
|
|
|
if not matching_assets_urls:
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
f"No assets matching regex '{asset}' for release {latest_version} among {list(latest_assets.keys())}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
elif len(matching_assets_urls) > 1:
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
return latest_version.strip("v"), matching_assets_urls[0]
|
|
|
|
elif isinstance(asset, dict):
|
|
|
|
matching_assets_dicts = {}
|
|
|
|
for asset_name, asset_regex in asset.items():
|
2023-03-28 00:42:18 +02:00
|
|
|
matching_assets_urls = [
|
|
|
|
url
|
|
|
|
for name, url in latest_assets.items()
|
|
|
|
if re.match(asset_regex, name)
|
|
|
|
]
|
2023-03-13 17:40:35 +01:00
|
|
|
if not matching_assets_urls:
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
f"No assets matching regex '{asset}' for release {latest_version} among {list(latest_assets.keys())}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
elif len(matching_assets_urls) > 1:
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
matching_assets_dicts[asset_name] = matching_assets_urls[0]
|
|
|
|
return latest_version.strip("v"), matching_assets_dicts
|
|
|
|
|
|
|
|
elif strategy == "latest_github_tag":
|
|
|
|
if asset != "tarball":
|
2023-03-28 00:42:18 +02:00
|
|
|
raise Exception(
|
|
|
|
"For the latest_github_tag strategy, only asset = 'tarball' is supported"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
tags = self.github(f"repos/{self.upstream_repo}/tags")
|
|
|
|
latest_version = filter_and_get_latest_tag([t["name"] for t in tags])
|
2023-03-28 00:42:18 +02:00
|
|
|
latest_tarball = (
|
|
|
|
f"{self.upstream}/archive/refs/tags/{latest_version}.tar.gz"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
return latest_version.strip("v"), latest_tarball
|
|
|
|
|
|
|
|
def github(self, uri):
|
2023-03-28 00:42:18 +02:00
|
|
|
# print(f'https://api.github.com/{uri}')
|
|
|
|
r = requests.get(
|
|
|
|
f"https://api.github.com/{uri}", auth=(GITHUB_LOGIN, GITHUB_TOKEN)
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
assert r.status_code == 200, r
|
|
|
|
return r.json()
|
|
|
|
|
2023-03-28 00:42:18 +02:00
|
|
|
def replace_version_and_asset_in_manifest(
|
|
|
|
self, content, new_version, new_assets_urls, current_assets, is_main
|
|
|
|
):
|
2023-03-27 17:49:48 +02:00
|
|
|
|
|
|
|
if isinstance(new_assets_urls, str):
|
|
|
|
sha256 = sha256_of_remote_file(new_assets_urls)
|
|
|
|
elif isinstance(new_assets_urls, dict):
|
2023-03-28 00:42:18 +02:00
|
|
|
sha256 = {
|
|
|
|
url: sha256_of_remote_file(url) for url in new_assets_urls.values()
|
|
|
|
}
|
2023-03-27 17:49:48 +02:00
|
|
|
|
|
|
|
if is_main:
|
2023-03-28 00:42:18 +02:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
def repl(m):
|
|
|
|
return m.group(1) + new_version + m.group(3)
|
2023-03-28 00:42:18 +02:00
|
|
|
|
|
|
|
content = re.sub(
|
|
|
|
r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", repl, content
|
|
|
|
)
|
2023-03-27 17:49:48 +02:00
|
|
|
if isinstance(new_assets_urls, str):
|
|
|
|
content = content.replace(current_assets["url"], new_assets_urls)
|
|
|
|
content = content.replace(current_assets["sha256"], sha256)
|
|
|
|
elif isinstance(new_assets_urls, dict):
|
|
|
|
for key, url in new_assets_urls.items():
|
|
|
|
content = content.replace(current_assets[key]["url"], url)
|
|
|
|
content = content.replace(current_assets[key]["sha256"], sha256[url])
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
2023-03-27 18:41:38 +02:00
|
|
|
# Progress bar helper, stolen from https://stackoverflow.com/a/34482761
|
|
|
|
def progressbar(it, prefix="", size=60, file=sys.stdout):
|
|
|
|
it = list(it)
|
|
|
|
count = len(it)
|
2023-03-28 00:42:18 +02:00
|
|
|
|
2023-03-27 18:41:38 +02:00
|
|
|
def show(j, name=""):
|
|
|
|
name += " "
|
2023-03-28 00:42:18 +02:00
|
|
|
x = int(size * j / count)
|
|
|
|
file.write(
|
|
|
|
"%s[%s%s] %i/%i %s\r" % (prefix, "#" * x, "." * (size - x), j, count, name)
|
|
|
|
)
|
2023-03-27 18:41:38 +02:00
|
|
|
file.flush()
|
2023-03-28 00:42:18 +02:00
|
|
|
|
2023-03-27 18:41:38 +02:00
|
|
|
show(0)
|
|
|
|
for i, item in enumerate(it):
|
|
|
|
yield item
|
2023-03-28 00:42:18 +02:00
|
|
|
show(i + 1, item)
|
2023-03-27 18:41:38 +02:00
|
|
|
file.write("\n")
|
|
|
|
file.flush()
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-03-27 18:41:38 +02:00
|
|
|
for app in progressbar(apps_to_run_auto_update_for(), "Checking: ", 40):
|
|
|
|
AppAutoUpdater(app).run()
|