Merge branch 'master' into support_gitlab_autoupgrade
This commit is contained in:
commit
1d95d82137
2 changed files with 98 additions and 47 deletions
|
@ -47,7 +47,6 @@ else:
|
||||||
|
|
||||||
|
|
||||||
def apps_to_run_auto_update_for():
|
def apps_to_run_auto_update_for():
|
||||||
|
|
||||||
catalog = toml.load(open(os.path.dirname(__file__) + "/../../apps.toml"))
|
catalog = toml.load(open(os.path.dirname(__file__) + "/../../apps.toml"))
|
||||||
|
|
||||||
apps_flagged_as_working_and_on_yunohost_apps_org = [
|
apps_flagged_as_working_and_on_yunohost_apps_org = [
|
||||||
|
@ -105,7 +104,6 @@ def filter_and_get_latest_tag(tags, app_id):
|
||||||
|
|
||||||
|
|
||||||
def tag_to_int_tuple(tag):
|
def tag_to_int_tuple(tag):
|
||||||
|
|
||||||
tag = tag.strip("v").strip(".")
|
tag = tag.strip("v").strip(".")
|
||||||
int_tuple = tag.split(".")
|
int_tuple = tag.split(".")
|
||||||
assert all(i.isdigit() for i in int_tuple), f"Cant convert {tag} to int tuple :/"
|
assert all(i.isdigit() for i in int_tuple), f"Cant convert {tag} to int tuple :/"
|
||||||
|
@ -127,7 +125,6 @@ def sha256_of_remote_file(url):
|
||||||
|
|
||||||
class AppAutoUpdater:
|
class AppAutoUpdater:
|
||||||
def __init__(self, app_id, app_id_is_local_app_dir=False):
|
def __init__(self, app_id, app_id_is_local_app_dir=False):
|
||||||
|
|
||||||
if app_id_is_local_app_dir:
|
if app_id_is_local_app_dir:
|
||||||
if not os.path.exists(app_id + "/manifest.toml"):
|
if not os.path.exists(app_id + "/manifest.toml"):
|
||||||
raise Exception("manifest.toml doesnt exists?")
|
raise Exception("manifest.toml doesnt exists?")
|
||||||
|
@ -158,11 +155,9 @@ class AppAutoUpdater:
|
||||||
self.main_upstream = manifest.get("upstream", {}).get("code")
|
self.main_upstream = manifest.get("upstream", {}).get("code")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
||||||
todos = {}
|
todos = {}
|
||||||
|
|
||||||
for source, infos in self.sources.items():
|
for source, infos in self.sources.items():
|
||||||
|
|
||||||
if "autoupdate" not in infos:
|
if "autoupdate" not in infos:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -176,9 +171,16 @@ class AppAutoUpdater:
|
||||||
|
|
||||||
print(f"\n Checking {source} ...")
|
print(f"\n Checking {source} ...")
|
||||||
|
|
||||||
new_version, new_asset_urls = self.get_latest_version_and_asset(
|
if strategy == "latest_github_release":
|
||||||
strategy, asset, infos, source
|
(
|
||||||
)
|
new_version,
|
||||||
|
new_asset_urls,
|
||||||
|
changelog_url,
|
||||||
|
) = self.get_latest_version_and_asset(strategy, asset, infos, source)
|
||||||
|
else:
|
||||||
|
(new_version, new_asset_urls) = self.get_latest_version_and_asset(
|
||||||
|
strategy, asset, infos, source
|
||||||
|
)
|
||||||
|
|
||||||
if source == "main":
|
if source == "main":
|
||||||
print(f"Current version in manifest: {self.current_version}")
|
print(f"Current version in manifest: {self.current_version}")
|
||||||
|
@ -190,8 +192,12 @@ class AppAutoUpdater:
|
||||||
# Though we wrap this in a try/except pass, because don't want to miserably crash
|
# Though we wrap this in a try/except pass, because don't want to miserably crash
|
||||||
# if the tag can't properly be converted to int tuple ...
|
# if the tag can't properly be converted to int tuple ...
|
||||||
try:
|
try:
|
||||||
if tag_to_int_tuple(self.current_version) > tag_to_int_tuple(new_version):
|
if tag_to_int_tuple(self.current_version) > tag_to_int_tuple(
|
||||||
print("Up to date (current version appears more recent than newest version found)")
|
new_version
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
"Up to date (current version appears more recent than newest version found)"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
@ -200,9 +206,15 @@ class AppAutoUpdater:
|
||||||
print("Up to date")
|
print("Up to date")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (isinstance(new_asset_urls, dict) and isinstance(infos.get("url"), str)) \
|
if (
|
||||||
or (isinstance(new_asset_urls, str) and not isinstance(infos.get("url"), str)):
|
isinstance(new_asset_urls, dict) and isinstance(infos.get("url"), str)
|
||||||
raise Exception(f"It looks like there's an inconsistency between the old asset list and the new ones ... one is arch-specific, the other is not ... Did you forget to define arch-specific regexes ? ... New asset url is/are : {new_asset_urls}")
|
) or (
|
||||||
|
isinstance(new_asset_urls, str)
|
||||||
|
and not isinstance(infos.get("url"), str)
|
||||||
|
):
|
||||||
|
raise Exception(
|
||||||
|
f"It looks like there's an inconsistency between the old asset list and the new ones ... one is arch-specific, the other is not ... Did you forget to define arch-specific regexes ? ... New asset url is/are : {new_asset_urls}"
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(new_asset_urls, str) and infos["url"] == new_asset_urls:
|
if isinstance(new_asset_urls, str) and infos["url"] == new_asset_urls:
|
||||||
print(f"URL for asset {source} is up to date")
|
print(f"URL for asset {source} is up to date")
|
||||||
|
@ -226,11 +238,15 @@ class AppAutoUpdater:
|
||||||
return bool(todos)
|
return bool(todos)
|
||||||
|
|
||||||
if "main" in todos:
|
if "main" in todos:
|
||||||
|
if strategy == "latest_github_release":
|
||||||
|
title = f"Upgrade to v{new_version}"
|
||||||
|
message = f"Upgrade to v{new_version}\nChangelog: {changelog_url}"
|
||||||
|
else:
|
||||||
|
title = message = f"Upgrade to v{new_version}"
|
||||||
new_version = todos["main"]["new_version"]
|
new_version = todos["main"]["new_version"]
|
||||||
message = f"Upgrade to v{new_version}"
|
|
||||||
new_branch = f"ci-auto-update-{new_version}"
|
new_branch = f"ci-auto-update-{new_version}"
|
||||||
else:
|
else:
|
||||||
message = "Upgrade sources"
|
title = message = "Upgrade sources"
|
||||||
new_branch = "ci-auto-update-sources"
|
new_branch = "ci-auto-update-sources"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -265,7 +281,7 @@ class AppAutoUpdater:
|
||||||
|
|
||||||
# Open the PR
|
# Open the PR
|
||||||
pr = self.repo.create_pull(
|
pr = self.repo.create_pull(
|
||||||
title=message, body=message, head=new_branch, base=self.base_branch
|
title=title, body=message, head=new_branch, base=self.base_branch
|
||||||
)
|
)
|
||||||
|
|
||||||
print("Created PR " + self.repo.full_name + " updated with PR #" + str(pr.id))
|
print("Created PR " + self.repo.full_name + " updated with PR #" + str(pr.id))
|
||||||
|
@ -273,12 +289,13 @@ class AppAutoUpdater:
|
||||||
return bool(todos)
|
return bool(todos)
|
||||||
|
|
||||||
def get_latest_version_and_asset(self, strategy, asset, infos, source):
|
def get_latest_version_and_asset(self, strategy, asset, infos, source):
|
||||||
|
upstream = (
|
||||||
upstream = infos.get("autoupdate", {}).get("upstream", self.main_upstream).strip("/")
|
infos.get("autoupdate", {}).get("upstream", self.main_upstream).strip("/")
|
||||||
|
)
|
||||||
|
|
||||||
if "github" in strategy:
|
if "github" in strategy:
|
||||||
assert upstream and upstream.startswith(
|
assert (
|
||||||
"https://github.com/"
|
upstream and upstream.startswith("https://github.com/")
|
||||||
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
||||||
api = GithubAPI(upstream, auth=auth)
|
api = GithubAPI(upstream, auth=auth)
|
||||||
elif "gitlab" in strategy:
|
elif "gitlab" in strategy:
|
||||||
|
@ -294,24 +311,24 @@ class AppAutoUpdater:
|
||||||
latest_version_orig, latest_version = filter_and_get_latest_tag(
|
latest_version_orig, latest_version = filter_and_get_latest_tag(
|
||||||
tags, self.app_id
|
tags, self.app_id
|
||||||
)
|
)
|
||||||
|
latest_release = [
|
||||||
|
release
|
||||||
|
for release in releases
|
||||||
|
if release["tag_name"] == latest_version_orig
|
||||||
|
][0]
|
||||||
|
latest_assets = {
|
||||||
|
a["name"]: a["browser_download_url"]
|
||||||
|
for a in latest_release["assets"]
|
||||||
|
if not a["name"].endswith(".md5")
|
||||||
|
}
|
||||||
|
latest_release_html_url = latest_release["html_url"]
|
||||||
if asset == "tarball":
|
if asset == "tarball":
|
||||||
latest_tarball = (
|
latest_tarball = (
|
||||||
api.url_for_ref(latest_version_orig, RefType.tags)
|
api.url_for_ref(latest_version_orig, RefType.tags)
|
||||||
)
|
)
|
||||||
return latest_version, latest_tarball
|
return latest_version, latest_tarball, latest_release_html_url
|
||||||
# FIXME
|
# FIXME
|
||||||
else:
|
else:
|
||||||
latest_release = [
|
|
||||||
release
|
|
||||||
for release in releases
|
|
||||||
if release["tag_name"] == latest_version_orig
|
|
||||||
][0]
|
|
||||||
latest_assets = {
|
|
||||||
a["name"]: a["browser_download_url"]
|
|
||||||
for a in latest_release["assets"]
|
|
||||||
if not a["name"].endswith(".md5")
|
|
||||||
}
|
|
||||||
latest_release_html_url = latest_release["html_url"]
|
|
||||||
if isinstance(asset, str):
|
if isinstance(asset, str):
|
||||||
matching_assets_urls = [
|
matching_assets_urls = [
|
||||||
url
|
url
|
||||||
|
@ -326,7 +343,11 @@ class AppAutoUpdater:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}. Full release details on {latest_release_html_url}"
|
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}. Full release details on {latest_release_html_url}"
|
||||||
)
|
)
|
||||||
return latest_version, matching_assets_urls[0]
|
return (
|
||||||
|
latest_version,
|
||||||
|
matching_assets_urls[0],
|
||||||
|
latest_release_html_url,
|
||||||
|
)
|
||||||
elif isinstance(asset, dict):
|
elif isinstance(asset, dict):
|
||||||
matching_assets_dicts = {}
|
matching_assets_dicts = {}
|
||||||
for asset_name, asset_regex in asset.items():
|
for asset_name, asset_regex in asset.items():
|
||||||
|
@ -344,7 +365,11 @@ class AppAutoUpdater:
|
||||||
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}. Full release details on {latest_release_html_url}"
|
f"Too many assets matching regex '{asset}' for release {latest_version} : {matching_assets_urls}. Full release details on {latest_release_html_url}"
|
||||||
)
|
)
|
||||||
matching_assets_dicts[asset_name] = matching_assets_urls[0]
|
matching_assets_dicts[asset_name] = matching_assets_urls[0]
|
||||||
return latest_version.strip("v"), matching_assets_dicts
|
return (
|
||||||
|
latest_version.strip("v"),
|
||||||
|
matching_assets_dicts,
|
||||||
|
latest_release_html_url,
|
||||||
|
)
|
||||||
|
|
||||||
elif strategy == "latest_github_tag" or strategy == "latest_gitlab_tag":
|
elif strategy == "latest_github_tag" or strategy == "latest_gitlab_tag":
|
||||||
if asset != "tarball":
|
if asset != "tarball":
|
||||||
|
@ -367,8 +392,12 @@ class AppAutoUpdater:
|
||||||
latest_commit = commits[0]
|
latest_commit = commits[0]
|
||||||
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
|
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
|
||||||
# Let's have the version as something like "2023.01.23"
|
# Let's have the version as something like "2023.01.23"
|
||||||
latest_commit_date = datetime.strptime(latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d")
|
latest_commit_date = datetime.strptime(
|
||||||
version_format = infos.get("autoupdate", {}).get("force_version", "%Y.%m.%d")
|
latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d"
|
||||||
|
)
|
||||||
|
version_format = infos.get("autoupdate", {}).get(
|
||||||
|
"force_version", "%Y.%m.%d"
|
||||||
|
)
|
||||||
latest_version = latest_commit_date.strftime(version_format)
|
latest_version = latest_commit_date.strftime(version_format)
|
||||||
|
|
||||||
return latest_version, latest_tarball
|
return latest_version, latest_tarball
|
||||||
|
@ -376,7 +405,6 @@ class AppAutoUpdater:
|
||||||
def replace_version_and_asset_in_manifest(
|
def replace_version_and_asset_in_manifest(
|
||||||
self, content, new_version, new_assets_urls, current_assets, is_main
|
self, content, new_version, new_assets_urls, current_assets, is_main
|
||||||
):
|
):
|
||||||
|
|
||||||
if isinstance(new_assets_urls, str):
|
if isinstance(new_assets_urls, str):
|
||||||
sha256 = sha256_of_remote_file(new_assets_urls)
|
sha256 = sha256_of_remote_file(new_assets_urls)
|
||||||
elif isinstance(new_assets_urls, dict):
|
elif isinstance(new_assets_urls, dict):
|
||||||
|
@ -387,7 +415,7 @@ class AppAutoUpdater:
|
||||||
if is_main:
|
if is_main:
|
||||||
|
|
||||||
def repl(m):
|
def repl(m):
|
||||||
return m.group(1) + new_version + "~ynh1\""
|
return m.group(1) + new_version + '~ynh1"'
|
||||||
|
|
||||||
content = re.sub(
|
content = re.sub(
|
||||||
r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", repl, content
|
r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", repl, content
|
||||||
|
@ -412,7 +440,8 @@ def progressbar(it, prefix="", size=60, file=sys.stdout):
|
||||||
name += " "
|
name += " "
|
||||||
x = int(size * j / count)
|
x = int(size * j / count)
|
||||||
file.write(
|
file.write(
|
||||||
"\n%s[%s%s] %i/%i %s\n" % (prefix, "#" * x, "." * (size - x), j, count, name)
|
"\n%s[%s%s] %i/%i %s\n"
|
||||||
|
% (prefix, "#" * x, "." * (size - x), j, count, name)
|
||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
|
|
||||||
|
@ -432,14 +461,15 @@ def paste_on_haste(data):
|
||||||
TIMEOUT = 3
|
TIMEOUT = 3
|
||||||
try:
|
try:
|
||||||
url = SERVER_URL + "/documents"
|
url = SERVER_URL + "/documents"
|
||||||
response = requests.post(url, data=data.encode('utf-8'), timeout=TIMEOUT)
|
response = requests.post(url, data=data.encode("utf-8"), timeout=TIMEOUT)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
dockey = response.json()['key']
|
dockey = response.json()["key"]
|
||||||
return SERVER_URL + "/raw/" + dockey
|
return SERVER_URL + "/raw/" + dockey
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
print("\033[31mError: {}\033[0m".format(e))
|
print("\033[31mError: {}\033[0m".format(e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
args = [arg for arg in sys.argv[1:] if arg != "--commit-and-create-PR"]
|
args = [arg for arg in sys.argv[1:] if arg != "--commit-and-create-PR"]
|
||||||
|
|
||||||
|
@ -455,6 +485,7 @@ if __name__ == "__main__":
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
apps_failed.append(app)
|
apps_failed.append(app)
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
t = traceback.format_exc()
|
t = traceback.format_exc()
|
||||||
apps_failed_details[app] = t
|
apps_failed_details[app] = t
|
||||||
print(t)
|
print(t)
|
||||||
|
@ -465,8 +496,15 @@ if __name__ == "__main__":
|
||||||
if apps_failed:
|
if apps_failed:
|
||||||
print(f"Apps failed: {', '.join(apps_failed)}")
|
print(f"Apps failed: {', '.join(apps_failed)}")
|
||||||
if os.path.exists("/usr/bin/sendxmpppy"):
|
if os.path.exists("/usr/bin/sendxmpppy"):
|
||||||
paste = '\n=========\n'.join([app + "\n-------\n" + trace + "\n\n" for app, trace in apps_failed_details.items()])
|
paste = "\n=========\n".join(
|
||||||
paste_url = paste_on_haste(paste)
|
[
|
||||||
os.system(f"/usr/bin/sendxmpppy 'Failed to run the source auto-update for : {', '.join(apps_failed)}. Please run manually the `autoupdate_app_sources.py` script on these apps to debug what is happening! Debug log : {paste_url}'")
|
app + "\n-------\n" + trace + "\n\n"
|
||||||
|
for app, trace in apps_failed_details.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
paste_url = paste_on_haste(paste)
|
||||||
|
os.system(
|
||||||
|
f"/usr/bin/sendxmpppy 'Failed to run the source auto-update for : {', '.join(apps_failed)}. Please run manually the `autoupdate_app_sources.py` script on these apps to debug what is happening! Debug log : {paste_url}'"
|
||||||
|
)
|
||||||
if apps_updated:
|
if apps_updated:
|
||||||
print(f"Apps updated: {', '.join(apps_updated)}")
|
print(f"Apps updated: {', '.join(apps_updated)}")
|
||||||
|
|
|
@ -37,6 +37,12 @@ def get_wishlist() -> Dict[str, Dict[str, str]]:
|
||||||
return toml.load(wishlist_path)
|
return toml.load(wishlist_path)
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def get_graveyard() -> Dict[str, Dict[str, str]]:
|
||||||
|
wishlist_path = APPS_ROOT / "graveyard.toml"
|
||||||
|
return toml.load(wishlist_path)
|
||||||
|
|
||||||
|
|
||||||
def validate_schema() -> Generator[str, None, None]:
|
def validate_schema() -> Generator[str, None, None]:
|
||||||
with open(APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8") as file:
|
with open(APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8") as file:
|
||||||
apps_catalog_schema = json.load(file)
|
apps_catalog_schema = json.load(file)
|
||||||
|
@ -50,9 +56,6 @@ def check_app(app: str, infos: Dict[str, Any]) -> Generator[Tuple[str, bool], No
|
||||||
yield "state is missing", True
|
yield "state is missing", True
|
||||||
return
|
return
|
||||||
|
|
||||||
if infos["state"] != "working":
|
|
||||||
return
|
|
||||||
|
|
||||||
# validate that the app is not (anymore?) in the wishlist
|
# validate that the app is not (anymore?) in the wishlist
|
||||||
# we use fuzzy matching because the id in catalog may not be the same exact id as in the wishlist
|
# we use fuzzy matching because the id in catalog may not be the same exact id as in the wishlist
|
||||||
# some entries are ignore-hard-coded, because e.g. radarr an readarr are really different apps...
|
# some entries are ignore-hard-coded, because e.g. radarr an readarr are really different apps...
|
||||||
|
@ -66,6 +69,16 @@ def check_app(app: str, infos: Dict[str, Any]) -> Generator[Tuple[str, bool], No
|
||||||
if wishlist_matches:
|
if wishlist_matches:
|
||||||
yield f"app seems to be listed in wishlist: {wishlist_matches}", True
|
yield f"app seems to be listed in wishlist: {wishlist_matches}", True
|
||||||
|
|
||||||
|
ignored_graveyard_entries = ["mailman"]
|
||||||
|
graveyard_matches = [
|
||||||
|
grave
|
||||||
|
for grave in get_graveyard()
|
||||||
|
if grave not in ignored_graveyard_entries
|
||||||
|
and SequenceMatcher(None, app, grave).ratio() > 0.9
|
||||||
|
]
|
||||||
|
if graveyard_matches:
|
||||||
|
yield f"app seems to be listed in graveyard: {graveyard_matches}", True
|
||||||
|
|
||||||
repo_name = infos.get("url", "").split("/")[-1]
|
repo_name = infos.get("url", "").split("/")[-1]
|
||||||
if repo_name != f"{app}_ynh":
|
if repo_name != f"{app}_ynh":
|
||||||
yield f"repo name should be {app}_ynh, not in {repo_name}", True
|
yield f"repo name should be {app}_ynh, not in {repo_name}", True
|
||||||
|
|
Loading…
Reference in a new issue