2024-02-07 14:49:55 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2024-02-10 13:54:36 +01:00
|
|
|
import argparse
|
2023-03-21 00:24:52 +01:00
|
|
|
import hashlib
|
2024-02-14 22:01:07 +01:00
|
|
|
import multiprocessing
|
2024-02-12 16:59:54 +01:00
|
|
|
import logging
|
2024-02-15 23:13:16 +01:00
|
|
|
from enum import Enum
|
2024-02-15 22:42:06 +01:00
|
|
|
from typing import Any, Optional, Union
|
2023-03-13 17:40:35 +01:00
|
|
|
import re
|
|
|
|
import sys
|
2024-02-10 15:02:05 +01:00
|
|
|
from pathlib import Path
|
2024-02-12 16:59:54 +01:00
|
|
|
from functools import cache
|
2024-05-01 23:00:44 +02:00
|
|
|
from datetime import datetime
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-07 14:49:55 +01:00
|
|
|
import requests
|
|
|
|
import toml
|
2024-02-11 20:04:34 +01:00
|
|
|
import tqdm
|
2024-02-12 16:59:54 +01:00
|
|
|
import github
|
2024-01-23 22:32:31 +01:00
|
|
|
|
2024-02-10 15:13:45 +01:00
|
|
|
# add apps/tools to sys.path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
from rest_api import (
|
|
|
|
GithubAPI,
|
|
|
|
GitlabAPI,
|
|
|
|
GiteaForgejoAPI,
|
2024-02-20 11:57:05 +01:00
|
|
|
DownloadPageAPI,
|
2024-03-11 17:34:33 +01:00
|
|
|
RefType,
|
|
|
|
) # noqa: E402,E501 pylint: disable=import-error,wrong-import-position
|
2024-08-13 11:16:27 +02:00
|
|
|
import appslib.get_apps_repo as get_apps_repo
|
2024-02-15 15:14:19 +01:00
|
|
|
import appslib.logging_sender # noqa: E402 pylint: disable=import-error,wrong-import-position
|
2024-03-11 17:34:33 +01:00
|
|
|
from appslib.utils import (
|
|
|
|
get_catalog,
|
|
|
|
) # noqa: E402 pylint: disable=import-error,wrong-import-position
|
2024-02-10 15:02:05 +01:00
|
|
|
|
2024-08-13 08:43:21 +02:00
|
|
|
TOOLS_DIR = Path(__file__).resolve().parent.parent
|
2024-02-10 15:02:05 +01:00
|
|
|
|
2024-01-23 22:32:31 +01:00
|
|
|
STRATEGIES = [
|
|
|
|
"latest_github_release",
|
|
|
|
"latest_github_tag",
|
|
|
|
"latest_github_commit",
|
|
|
|
"latest_gitlab_release",
|
|
|
|
"latest_gitlab_tag",
|
2024-02-14 03:40:26 +01:00
|
|
|
"latest_gitlab_commit",
|
2024-02-14 03:43:16 +01:00
|
|
|
"latest_gitea_release",
|
|
|
|
"latest_gitea_tag",
|
|
|
|
"latest_gitea_commit",
|
|
|
|
"latest_forgejo_release",
|
|
|
|
"latest_forgejo_tag",
|
2024-02-12 16:59:54 +01:00
|
|
|
"latest_forgejo_commit",
|
2024-02-20 11:57:05 +01:00
|
|
|
"latest_webpage_link",
|
2024-02-12 16:59:54 +01:00
|
|
|
]
|
2023-03-27 17:49:48 +02:00
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
@cache
|
2024-05-01 22:58:24 +02:00
|
|
|
def get_github() -> tuple[
|
|
|
|
Optional[tuple[str, str]],
|
|
|
|
Optional[github.Github],
|
|
|
|
Optional[github.InputGitAuthor],
|
|
|
|
]:
|
2024-02-12 16:59:54 +01:00
|
|
|
try:
|
2024-03-11 17:34:33 +01:00
|
|
|
github_login = (
|
2024-08-13 11:16:27 +02:00
|
|
|
(TOOLS_DIR / ".github_login").open("r", encoding="utf-8").read().strip()
|
2024-03-11 17:34:33 +01:00
|
|
|
)
|
|
|
|
github_token = (
|
2024-08-13 11:16:27 +02:00
|
|
|
(TOOLS_DIR / ".github_token").open("r", encoding="utf-8").read().strip()
|
2024-03-11 17:34:33 +01:00
|
|
|
)
|
|
|
|
github_email = (
|
2024-08-13 11:16:27 +02:00
|
|
|
(TOOLS_DIR / ".github_email").open("r", encoding="utf-8").read().strip()
|
2024-03-11 17:34:33 +01:00
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
|
|
|
|
auth = (github_login, github_token)
|
|
|
|
github_api = github.Github(github_token)
|
|
|
|
author = github.InputGitAuthor(github_login, github_email)
|
|
|
|
return auth, github_api, author
|
|
|
|
except Exception as e:
|
|
|
|
logging.warning(f"Could not get github: {e}")
|
|
|
|
return None, None, None
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 18:41:38 +02:00
|
|
|
|
2024-08-13 11:16:27 +02:00
|
|
|
def apps_to_run_auto_update_for(cache_path: Path) -> list[str]:
|
2023-03-28 00:42:18 +02:00
|
|
|
apps_flagged_as_working_and_on_yunohost_apps_org = [
|
|
|
|
app
|
2024-02-10 15:13:45 +01:00
|
|
|
for app, infos in get_catalog().items()
|
2023-03-28 00:42:18 +02:00
|
|
|
if infos["state"] == "working"
|
|
|
|
and "/github.com/yunohost-apps" in infos["url"].lower()
|
|
|
|
]
|
2023-03-27 18:41:38 +02:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
relevant_apps = []
|
|
|
|
for app in apps_flagged_as_working_and_on_yunohost_apps_org:
|
2024-02-15 11:04:57 +01:00
|
|
|
try:
|
2024-08-13 11:16:27 +02:00
|
|
|
manifest_toml = cache_path / app / "manifest.toml"
|
2024-02-15 11:04:57 +01:00
|
|
|
if manifest_toml.exists():
|
|
|
|
manifest = toml.load(manifest_toml.open("r", encoding="utf-8"))
|
|
|
|
sources = manifest.get("resources", {}).get("sources", {})
|
|
|
|
if any("autoupdate" in source for source in sources.values()):
|
|
|
|
relevant_apps.append(app)
|
|
|
|
except Exception as e:
|
2024-02-15 15:14:19 +01:00
|
|
|
logging.error(f"Error while loading {app}'s manifest: {e}")
|
|
|
|
raise e
|
2024-02-12 16:59:54 +01:00
|
|
|
return relevant_apps
|
|
|
|
|
|
|
|
|
2024-02-27 19:24:28 +01:00
|
|
|
class LocalOrRemoteRepo:
|
2024-02-15 22:42:06 +01:00
|
|
|
def __init__(self, app: Union[str, Path]) -> None:
|
2024-02-27 19:24:28 +01:00
|
|
|
self.local = False
|
|
|
|
self.remote = False
|
2024-02-25 23:51:19 +01:00
|
|
|
|
2024-02-27 19:24:28 +01:00
|
|
|
self.app = app
|
|
|
|
if isinstance(app, Path):
|
|
|
|
# It's local
|
|
|
|
self.local = True
|
|
|
|
self.manifest_path = app / "manifest.toml"
|
|
|
|
|
|
|
|
if not self.manifest_path.exists():
|
|
|
|
raise RuntimeError(f"{app.name}: manifest.toml doesnt exists?")
|
|
|
|
# app is in fact a path
|
2024-03-11 17:34:33 +01:00
|
|
|
self.manifest_raw = (
|
|
|
|
(app / "manifest.toml").open("r", encoding="utf-8").read()
|
|
|
|
)
|
2024-02-27 19:24:28 +01:00
|
|
|
|
|
|
|
elif isinstance(app, str):
|
|
|
|
# It's remote
|
|
|
|
self.remote = True
|
|
|
|
github = get_github()[1]
|
|
|
|
assert github, "Could not get github authentication!"
|
|
|
|
self.repo = github.get_repo(f"Yunohost-Apps/{app}_ynh")
|
|
|
|
self.pr_branch: Optional[str] = None
|
|
|
|
# Determine base branch, either `testing` or default branch
|
|
|
|
try:
|
|
|
|
self.base_branch = self.repo.get_branch("testing").name
|
|
|
|
except Exception:
|
|
|
|
self.base_branch = self.repo.default_branch
|
|
|
|
contents = self.repo.get_contents("manifest.toml", ref=self.base_branch)
|
|
|
|
assert not isinstance(contents, list)
|
|
|
|
self.manifest_raw = contents.decoded_content.decode()
|
|
|
|
self.manifest_raw_sha = contents.sha
|
2024-02-25 23:51:19 +01:00
|
|
|
|
2024-02-27 19:24:28 +01:00
|
|
|
else:
|
|
|
|
raise TypeError(f"Invalid argument type for app: {type(app)}")
|
2024-02-12 16:59:54 +01:00
|
|
|
|
|
|
|
def edit_manifest(self, content: str):
|
|
|
|
self.manifest_raw = content
|
2024-02-27 19:24:28 +01:00
|
|
|
if self.local:
|
|
|
|
self.manifest_path.open("w", encoding="utf-8").write(content)
|
|
|
|
|
|
|
|
def commit(self, message: str):
|
|
|
|
if self.remote:
|
|
|
|
author = get_github()[2]
|
|
|
|
assert author, "Could not get Github author!"
|
|
|
|
assert self.pr_branch is not None, "Did you forget to create a branch?"
|
|
|
|
self.repo.update_file(
|
|
|
|
"manifest.toml",
|
|
|
|
message=message,
|
|
|
|
content=self.manifest_raw,
|
|
|
|
sha=self.manifest_raw_sha,
|
|
|
|
branch=self.pr_branch,
|
|
|
|
author=author,
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-27 19:24:28 +01:00
|
|
|
def new_branch(self, name: str) -> bool:
|
|
|
|
if self.local:
|
|
|
|
logging.warning("Can't create branches for local repositories")
|
2024-02-14 22:01:07 +01:00
|
|
|
return False
|
2024-02-27 19:24:28 +01:00
|
|
|
if self.remote:
|
|
|
|
self.pr_branch = name
|
|
|
|
commit_sha = self.repo.get_branch(self.base_branch).commit.sha
|
|
|
|
if self.pr_branch in [branch.name for branch in self.repo.get_branches()]:
|
|
|
|
print("already existing")
|
|
|
|
return False
|
|
|
|
self.repo.create_git_ref(ref=f"refs/heads/{name}", sha=commit_sha)
|
|
|
|
return True
|
|
|
|
return False
|
2024-02-12 16:59:54 +01:00
|
|
|
|
2024-02-15 22:42:06 +01:00
|
|
|
def create_pr(self, branch: str, title: str, message: str) -> Optional[str]:
|
2024-02-27 19:24:28 +01:00
|
|
|
if self.remote:
|
|
|
|
# Open the PR
|
|
|
|
pr = self.repo.create_pull(
|
|
|
|
title=title, body=message, head=branch, base=self.base_branch
|
|
|
|
)
|
|
|
|
return pr.html_url
|
|
|
|
logging.warning("Can't create pull requests for local repositories")
|
|
|
|
return None
|
2024-02-14 22:01:07 +01:00
|
|
|
|
|
|
|
def get_pr(self, branch: str) -> str:
|
2024-02-27 19:24:28 +01:00
|
|
|
return next(pull.html_url for pull in self.repo.get_pulls(head=branch))
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2023-03-27 17:49:48 +02:00
|
|
|
|
2024-02-15 23:13:16 +01:00
|
|
|
class State(Enum):
|
|
|
|
up_to_date = 0
|
|
|
|
already = 1
|
|
|
|
created = 2
|
|
|
|
failure = 3
|
|
|
|
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
class AppAutoUpdater:
|
2024-02-15 22:42:06 +01:00
|
|
|
def __init__(self, app_id: Union[str, Path]) -> None:
|
2024-02-27 19:24:28 +01:00
|
|
|
self.repo = LocalOrRemoteRepo(app_id)
|
2024-02-12 16:59:54 +01:00
|
|
|
self.manifest = toml.loads(self.repo.manifest_raw)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
self.app_id = self.manifest["id"]
|
|
|
|
self.current_version = self.manifest["version"].split("~")[0]
|
|
|
|
self.sources = self.manifest.get("resources", {}).get("sources")
|
|
|
|
self.main_upstream = self.manifest.get("upstream", {}).get("code")
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if not self.sources:
|
|
|
|
raise RuntimeError("There's no resources.sources in manifest.toml ?")
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
self.main_upstream = self.manifest.get("upstream", {}).get("code")
|
2024-07-08 20:22:36 +02:00
|
|
|
self.latest_commit_weekly = False
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
def run(
|
|
|
|
self, edit: bool = False, commit: bool = False, pr: bool = False
|
|
|
|
) -> tuple[State, str, str, str]:
|
2024-02-15 23:13:16 +01:00
|
|
|
state = State.up_to_date
|
|
|
|
main_version = ""
|
|
|
|
pr_url = ""
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
# Default message
|
|
|
|
pr_title = commit_msg = "Upgrade sources"
|
2024-05-01 23:05:34 +02:00
|
|
|
date = datetime.now().strftime("%y%m%d")
|
2024-05-01 22:57:55 +02:00
|
|
|
branch_name = f"ci-auto-update-sources-{date}"
|
2023-07-24 20:59:46 +02:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
for source, infos in self.sources.items():
|
|
|
|
update = self.get_source_update(source, infos)
|
|
|
|
if update is None:
|
2023-04-04 18:32:12 +02:00
|
|
|
continue
|
2024-02-15 23:13:16 +01:00
|
|
|
# We assume we'll create a PR
|
|
|
|
state = State.created
|
2024-02-12 16:59:54 +01:00
|
|
|
version, assets, msg = update
|
2023-04-04 18:32:12 +02:00
|
|
|
|
|
|
|
if source == "main":
|
2024-02-14 22:01:07 +01:00
|
|
|
main_version = version
|
2024-02-12 16:59:54 +01:00
|
|
|
branch_name = f"ci-auto-update-{version}"
|
2024-07-14 23:17:28 +02:00
|
|
|
pr_title = f"Upgrade to v{version}"
|
|
|
|
|
|
|
|
if msg:
|
|
|
|
commit_msg += f"\n- `{source}` v{version}: {msg}"
|
2023-03-21 00:24:52 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
self.repo.manifest_raw = self.replace_version_and_asset_in_manifest(
|
2024-03-11 17:34:33 +01:00
|
|
|
self.repo.manifest_raw,
|
|
|
|
version,
|
|
|
|
assets,
|
|
|
|
infos,
|
|
|
|
is_main=source == "main",
|
2023-03-28 00:42:18 +02:00
|
|
|
)
|
|
|
|
|
2024-02-15 23:13:16 +01:00
|
|
|
if state == State.up_to_date:
|
|
|
|
return (State.up_to_date, "", "", "")
|
2024-02-14 22:01:07 +01:00
|
|
|
|
2024-07-14 14:36:06 +02:00
|
|
|
if main_version == "":
|
2024-07-14 23:17:28 +02:00
|
|
|
self.repo.manifest_raw = self.bump_version(
|
|
|
|
self.repo.manifest_raw, self.current_version, bump_ynh_level=True
|
|
|
|
)
|
2024-07-14 14:36:06 +02:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if edit:
|
|
|
|
self.repo.edit_manifest(self.repo.manifest_raw)
|
|
|
|
|
2024-02-27 19:24:28 +01:00
|
|
|
try:
|
|
|
|
if pr:
|
|
|
|
self.repo.new_branch(branch_name)
|
|
|
|
except github.GithubException as e:
|
|
|
|
if e.status == 409:
|
|
|
|
print("Branch already exists!")
|
2024-02-15 23:46:12 +01:00
|
|
|
|
|
|
|
try:
|
2024-02-27 19:24:28 +01:00
|
|
|
if commit:
|
|
|
|
self.repo.commit(commit_msg)
|
|
|
|
except github.GithubException as e:
|
|
|
|
if e.status == 409:
|
|
|
|
print("Commits were already commited on branch!")
|
|
|
|
try:
|
|
|
|
if pr:
|
2024-02-15 23:13:16 +01:00
|
|
|
pr_url = self.repo.create_pr(branch_name, pr_title, commit_msg) or ""
|
2024-02-14 22:01:07 +01:00
|
|
|
except github.GithubException as e:
|
|
|
|
if e.status == 422 or e.status == 409:
|
2024-02-15 23:13:16 +01:00
|
|
|
state = State.already
|
|
|
|
pr_url = self.repo.get_pr(branch_name)
|
2024-02-14 22:01:07 +01:00
|
|
|
else:
|
|
|
|
raise
|
2024-02-15 23:13:16 +01:00
|
|
|
return (state, self.current_version, main_version, pr_url)
|
2024-02-14 22:01:07 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2024-03-11 17:34:33 +01:00
|
|
|
def relevant_versions(
|
|
|
|
tags: list[str], app_id: str, version_regex: Optional[str]
|
|
|
|
) -> tuple[str, str]:
|
2024-02-16 23:59:54 +01:00
|
|
|
def apply_version_regex(tag: str) -> Optional[str]:
|
|
|
|
# First preprocessing according to the manifest version_regex…
|
2024-02-17 19:06:20 +01:00
|
|
|
if version_regex:
|
|
|
|
match = re.match(version_regex, tag)
|
|
|
|
if match is None:
|
|
|
|
return None
|
|
|
|
# Basically: either groupdict if named capture gorups, sorted by names, or groups()
|
2024-03-11 17:34:33 +01:00
|
|
|
tag = ".".join(
|
|
|
|
dict(sorted(match.groupdict().items())).values() or match.groups()
|
|
|
|
)
|
2024-02-17 19:06:20 +01:00
|
|
|
|
|
|
|
# Then remove leading v
|
|
|
|
tag = tag.lstrip("v")
|
|
|
|
return tag
|
2024-02-16 23:59:54 +01:00
|
|
|
|
2024-02-15 22:42:06 +01:00
|
|
|
def version_numbers(tag: str) -> Optional[tuple[int, ...]]:
|
2024-02-14 22:01:07 +01:00
|
|
|
filter_keywords = ["start", "rc", "beta", "alpha"]
|
|
|
|
if any(keyword in tag for keyword in filter_keywords):
|
2024-03-11 17:34:33 +01:00
|
|
|
logging.debug(
|
|
|
|
f"Tag {tag} contains filtered keyword from {filter_keywords}."
|
|
|
|
)
|
2024-02-14 22:01:07 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
t_to_check = tag
|
|
|
|
if tag.startswith(app_id + "-"):
|
|
|
|
t_to_check = tag.split("-", 1)[-1]
|
|
|
|
# Boring special case for dokuwiki...
|
|
|
|
elif tag.startswith("release-"):
|
|
|
|
t_to_check = tag.split("-", 1)[-1].replace("-", ".")
|
|
|
|
|
2024-03-06 18:47:56 +01:00
|
|
|
if re.match(r"^v?\d+(\.\d+)*(\-\d+)?$", t_to_check):
|
2024-02-14 22:01:07 +01:00
|
|
|
return AppAutoUpdater.tag_to_int_tuple(t_to_check)
|
|
|
|
print(f"Ignoring tag {t_to_check}, doesn't look like a version number")
|
|
|
|
return None
|
|
|
|
|
2024-02-16 23:59:54 +01:00
|
|
|
tags_dict: dict[tuple[int, ...], tuple[str, str]] = {}
|
|
|
|
for tag in tags:
|
|
|
|
tag_clean = apply_version_regex(tag)
|
|
|
|
if tag_clean is None:
|
|
|
|
continue
|
|
|
|
tag_as_ints = version_numbers(tag_clean)
|
|
|
|
if tag_as_ints is None:
|
|
|
|
continue
|
|
|
|
tags_dict[tag_as_ints] = (tag, tag_clean)
|
|
|
|
|
2024-05-13 18:59:53 +02:00
|
|
|
if app_id == "focalboard":
|
|
|
|
# Stupid ad-hoc patch for focalboard where 7.11.4 doesn't have the proper asset
|
|
|
|
# because idk it was just a patch for mattermost or something
|
|
|
|
if "v7.11.4" in tags_dict:
|
|
|
|
del tags_dict["v7.11.4"]
|
|
|
|
if "7.11.4" in tags_dict:
|
|
|
|
del tags_dict["7.11.4"]
|
|
|
|
|
2024-02-16 23:59:54 +01:00
|
|
|
# sorted will sort by keys, tag_as_ints
|
2024-02-14 22:01:07 +01:00
|
|
|
# reverse=True will set the last release as first element
|
|
|
|
tags_dict = dict(sorted(tags_dict.items(), reverse=True))
|
|
|
|
if not tags_dict:
|
|
|
|
raise RuntimeError("No tags were found after sanity filtering!")
|
2024-02-16 23:59:54 +01:00
|
|
|
the_tag_list, (the_tag_orig, the_tag_clean) = next(iter(tags_dict.items()))
|
2024-02-14 22:01:07 +01:00
|
|
|
assert the_tag_list is not None
|
2024-02-16 23:59:54 +01:00
|
|
|
return the_tag_orig, the_tag_clean
|
2024-02-14 22:01:07 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def tag_to_int_tuple(tag: str) -> tuple[int, ...]:
|
2024-03-05 22:25:57 +01:00
|
|
|
tag = tag.lstrip("v").replace("-", ".").rstrip(".")
|
2024-02-14 22:01:07 +01:00
|
|
|
int_tuple = tag.split(".")
|
2024-03-11 17:34:33 +01:00
|
|
|
assert all(
|
|
|
|
i.isdigit() for i in int_tuple
|
|
|
|
), f"Cant convert {tag} to int tuple :/"
|
2024-02-14 22:01:07 +01:00
|
|
|
return tuple(int(i) for i in int_tuple)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def sha256_of_remote_file(url: str) -> str:
|
|
|
|
print(f"Computing sha256sum for {url} ...")
|
|
|
|
try:
|
|
|
|
r = requests.get(url, stream=True)
|
|
|
|
m = hashlib.sha256()
|
|
|
|
for data in r.iter_content(8192):
|
|
|
|
m.update(data)
|
|
|
|
return m.hexdigest()
|
|
|
|
except Exception as e:
|
|
|
|
raise RuntimeError(f"Failed to compute sha256 for {url} : {e}") from e
|
2024-02-12 16:59:54 +01:00
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
def get_source_update(
|
|
|
|
self, name: str, infos: dict[str, Any]
|
|
|
|
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
2024-02-16 22:10:00 +01:00
|
|
|
autoupdate = infos.get("autoupdate")
|
|
|
|
if autoupdate is None:
|
2024-02-12 16:59:54 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
print(f"\n Checking {name} ...")
|
2024-02-16 22:10:00 +01:00
|
|
|
asset = autoupdate.get("asset", "tarball")
|
|
|
|
strategy = autoupdate.get("strategy")
|
2024-02-12 16:59:54 +01:00
|
|
|
if strategy not in STRATEGIES:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise ValueError(
|
|
|
|
f"Unknown update strategy '{strategy}' for '{name}', expected one of {STRATEGIES}"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
result = self.get_latest_version_and_asset(strategy, asset, infos)
|
2024-02-12 16:59:54 +01:00
|
|
|
if result is None:
|
|
|
|
return None
|
|
|
|
new_version, assets, more_info = result
|
|
|
|
|
|
|
|
if name == "main":
|
|
|
|
print(f"Current version in manifest: {self.current_version}")
|
|
|
|
print(f"Newest version on upstream: {new_version}")
|
|
|
|
|
|
|
|
# Maybe new version is older than current version
|
|
|
|
# Which can happen for example if we manually release a RC,
|
|
|
|
# which is ignored by this script
|
|
|
|
# Though we wrap this in a try/except pass, because don't want to miserably crash
|
|
|
|
# if the tag can't properly be converted to int tuple ...
|
|
|
|
if self.current_version == new_version:
|
|
|
|
print("Up to date")
|
|
|
|
return None
|
|
|
|
try:
|
2024-03-11 17:34:33 +01:00
|
|
|
if self.tag_to_int_tuple(self.current_version) > self.tag_to_int_tuple(
|
|
|
|
new_version
|
|
|
|
):
|
|
|
|
print(
|
|
|
|
"Up to date (current version appears more recent than newest version found)"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
return None
|
|
|
|
except (AssertionError, ValueError):
|
|
|
|
pass
|
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
if (
|
|
|
|
isinstance(assets, dict)
|
|
|
|
and isinstance(infos.get("url"), str)
|
|
|
|
or isinstance(assets, str)
|
|
|
|
and not isinstance(infos.get("url"), str)
|
|
|
|
):
|
2024-02-12 16:59:54 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
"It looks like there's an inconsistency between the old asset list and the new ones... "
|
|
|
|
"One is arch-specific, the other is not... Did you forget to define arch-specific regexes? "
|
|
|
|
f"New asset url is/are : {assets}"
|
|
|
|
)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if isinstance(assets, str) and infos["url"] == assets:
|
|
|
|
print(f"URL for asset {name} is up to date")
|
2024-02-15 11:05:48 +01:00
|
|
|
return None
|
2024-03-11 17:34:33 +01:00
|
|
|
if isinstance(assets, dict) and assets == {
|
|
|
|
k: infos[k]["url"] for k in assets.keys()
|
|
|
|
}:
|
2024-02-12 16:59:54 +01:00
|
|
|
print(f"URLs for asset {name} are up to date")
|
2024-02-15 11:05:48 +01:00
|
|
|
return None
|
2024-02-12 16:59:54 +01:00
|
|
|
print(f"Update needed for {name}")
|
|
|
|
return new_version, assets, more_info
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def find_matching_asset(assets: dict[str, str], regex: str) -> tuple[str, str]:
|
|
|
|
matching_assets = {
|
|
|
|
name: url for name, url in assets.items() if re.match(regex, name)
|
|
|
|
}
|
|
|
|
if not matching_assets:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"No assets matching regex '{regex}' in {list(assets.keys())}"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
if len(matching_assets) > 1:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"Too many assets matching regex '{regex}': {matching_assets}"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
return next(iter(matching_assets.items()))
|
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
def get_latest_version_and_asset(
|
2024-07-14 23:17:28 +02:00
|
|
|
self, strategy: str, asset: Union[str, dict], infos: dict[str, Any]
|
2024-03-11 17:34:33 +01:00
|
|
|
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
2024-07-14 23:17:28 +02:00
|
|
|
autoupdate = infos.get("autoupdate")
|
2024-02-20 12:06:17 +01:00
|
|
|
upstream = autoupdate.get("upstream", self.main_upstream)
|
2024-02-16 23:59:54 +01:00
|
|
|
version_re = autoupdate.get("version_regex", None)
|
2024-04-02 13:27:24 +02:00
|
|
|
allow_prereleases = autoupdate.get("allow_prereleases", False)
|
2024-02-12 16:59:54 +01:00
|
|
|
_, remote_type, revision_type = strategy.split("_")
|
|
|
|
|
2024-02-20 11:57:05 +01:00
|
|
|
api: Union[GithubAPI, GitlabAPI, GiteaForgejoAPI, DownloadPageAPI]
|
2024-02-12 16:59:54 +01:00
|
|
|
if remote_type == "github":
|
2024-05-01 22:58:24 +02:00
|
|
|
assert upstream and upstream.startswith(
|
|
|
|
"https://github.com/"
|
2023-03-28 19:13:50 +02:00
|
|
|
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
2024-02-12 16:59:54 +01:00
|
|
|
api = GithubAPI(upstream, auth=get_github()[0])
|
|
|
|
if remote_type == "gitlab":
|
2024-01-23 23:37:24 +01:00
|
|
|
api = GitlabAPI(upstream)
|
2024-02-12 16:59:54 +01:00
|
|
|
if remote_type in ["gitea", "forgejo"]:
|
2024-02-14 03:40:26 +01:00
|
|
|
api = GiteaForgejoAPI(upstream)
|
2023-03-13 17:40:35 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if revision_type == "release":
|
|
|
|
releases: dict[str, dict[str, Any]] = {
|
2024-04-02 17:45:34 +02:00
|
|
|
release["tag_name"]: release for release in api.releases()
|
2024-02-12 16:59:54 +01:00
|
|
|
}
|
2024-04-02 13:27:24 +02:00
|
|
|
|
|
|
|
if not allow_prereleases:
|
|
|
|
releases = {
|
2024-04-02 17:45:34 +02:00
|
|
|
name: info
|
|
|
|
for name, info in releases.items()
|
2024-04-02 13:27:24 +02:00
|
|
|
if not info["draft"] and not info["prerelease"]
|
|
|
|
}
|
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
latest_version_orig, latest_version = self.relevant_versions(
|
|
|
|
list(releases.keys()), self.app_id, version_re
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
latest_release = releases[latest_version_orig]
|
2024-02-14 05:00:07 +01:00
|
|
|
latest_assets = {
|
|
|
|
a["name"]: a["browser_download_url"]
|
|
|
|
for a in latest_release["assets"]
|
|
|
|
if not a["name"].endswith(".md5")
|
|
|
|
}
|
2024-02-12 16:59:54 +01:00
|
|
|
if remote_type in ["gitea", "forgejo"] and latest_assets == "":
|
2024-02-14 05:00:07 +01:00
|
|
|
# if empty (so only the base asset), take the tarball_url
|
|
|
|
latest_assets = latest_release["tarball_url"]
|
2024-02-14 22:06:13 +01:00
|
|
|
# get the release changelog link
|
|
|
|
latest_release_html_url = latest_release["html_url"]
|
2024-07-14 23:17:28 +02:00
|
|
|
if latest_release_html_url is None or latest_release_html_url == "":
|
|
|
|
latest_release_html_url = api.changelog_for_ref(
|
|
|
|
latest_version_orig, "", RefType.releases
|
|
|
|
)
|
|
|
|
|
2023-03-13 17:40:35 +01:00
|
|
|
if asset == "tarball":
|
2024-02-12 16:59:54 +01:00
|
|
|
latest_tarball = api.url_for_ref(latest_version_orig, RefType.tags)
|
2024-01-04 23:22:20 +01:00
|
|
|
return latest_version, latest_tarball, latest_release_html_url
|
2023-03-13 17:40:35 +01:00
|
|
|
# FIXME
|
2024-02-12 16:59:54 +01:00
|
|
|
if isinstance(asset, str):
|
|
|
|
try:
|
|
|
|
_, url = self.find_matching_asset(latest_assets, asset)
|
|
|
|
return latest_version, url, latest_release_html_url
|
|
|
|
except RuntimeError as e:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"{e}.\nFull release details on {latest_release_html_url}."
|
|
|
|
) from e
|
2024-02-12 16:59:54 +01:00
|
|
|
|
|
|
|
if isinstance(asset, dict):
|
|
|
|
new_assets = {}
|
|
|
|
for asset_name, asset_regex in asset.items():
|
|
|
|
try:
|
|
|
|
_, url = self.find_matching_asset(latest_assets, asset_regex)
|
|
|
|
new_assets[asset_name] = url
|
|
|
|
except RuntimeError as e:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"{e}.\nFull release details on {latest_release_html_url}."
|
|
|
|
) from e
|
2024-02-12 16:59:54 +01:00
|
|
|
return latest_version, new_assets, latest_release_html_url
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
if revision_type == "tag":
|
2023-03-13 17:40:35 +01:00
|
|
|
if asset != "tarball":
|
2024-03-11 17:34:33 +01:00
|
|
|
raise ValueError(
|
|
|
|
"For the latest tag strategies, only asset = 'tarball' is supported"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
tags = [t["name"] for t in api.tags()]
|
2024-03-11 17:34:33 +01:00
|
|
|
latest_version_orig, latest_version = self.relevant_versions(
|
|
|
|
tags, self.app_id, version_re
|
|
|
|
)
|
2024-01-23 22:32:31 +01:00
|
|
|
latest_tarball = api.url_for_ref(latest_version_orig, RefType.tags)
|
2024-07-14 23:17:28 +02:00
|
|
|
return (
|
|
|
|
latest_version,
|
|
|
|
latest_tarball,
|
|
|
|
api.changelog_for_ref(latest_version, "", RefType.tags),
|
|
|
|
)
|
2023-03-28 19:13:50 +02:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if revision_type == "commit":
|
2024-07-08 20:22:36 +02:00
|
|
|
if self.latest_commit_weekly and datetime.now().weekday() != 0:
|
|
|
|
logging.warning("Skipped autoupdater because we're not monday")
|
|
|
|
return None
|
2023-03-28 19:13:50 +02:00
|
|
|
if asset != "tarball":
|
2024-03-11 17:34:33 +01:00
|
|
|
raise ValueError(
|
|
|
|
"For the latest commit strategies, only asset = 'tarball' is supported"
|
|
|
|
)
|
2024-01-23 22:32:31 +01:00
|
|
|
commits = api.commits()
|
2023-03-28 19:13:50 +02:00
|
|
|
latest_commit = commits[0]
|
2024-01-23 22:32:31 +01:00
|
|
|
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
|
2023-03-28 19:13:50 +02:00
|
|
|
# Let's have the version as something like "2023.01.23"
|
2024-03-11 17:34:33 +01:00
|
|
|
latest_commit_date = datetime.strptime(
|
|
|
|
latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d"
|
|
|
|
)
|
2024-02-16 22:10:00 +01:00
|
|
|
version_format = autoupdate.get("force_version", "%Y.%m.%d")
|
2023-04-04 18:32:12 +02:00
|
|
|
latest_version = latest_commit_date.strftime(version_format)
|
2024-07-14 23:17:28 +02:00
|
|
|
return (
|
|
|
|
latest_version,
|
|
|
|
latest_tarball,
|
|
|
|
api.changelog_for_ref(
|
|
|
|
latest_commit["sha"], self.get_old_ref(infos), RefType.commits
|
|
|
|
),
|
|
|
|
)
|
2024-02-20 11:57:05 +01:00
|
|
|
|
|
|
|
if remote_type == "webpage" and revision_type == "link":
|
|
|
|
api = DownloadPageAPI(upstream)
|
|
|
|
links = api.get_web_page_links()
|
2024-08-28 21:08:27 +02:00
|
|
|
latest_version_orig, latest_version = self.relevant_versions(
|
|
|
|
list(links.keys()), self.app_id, version_re
|
|
|
|
)
|
2024-02-20 11:57:05 +01:00
|
|
|
latest_url = links[latest_version_orig]
|
|
|
|
return latest_version, latest_url, ""
|
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_old_ref(infos: dict[str, Any]) -> str:
|
|
|
|
regex = r".*[\/-]([a-f0-9]+)\."
|
|
|
|
if isinstance(infos["url"], str):
|
|
|
|
return re.match(regex, infos["url"]).group(1)
|
|
|
|
if isinstance(infos["url"], dict):
|
|
|
|
for _, url in infos["url"]:
|
|
|
|
return re.match(regex, url).group(1)
|
2024-02-15 11:05:48 +01:00
|
|
|
return None
|
2023-03-28 19:13:50 +02:00
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
def replace_version_and_asset_in_manifest(
|
|
|
|
self,
|
|
|
|
content: str,
|
|
|
|
new_version: str,
|
|
|
|
new_assets_urls: Union[str, dict],
|
|
|
|
current_assets: dict,
|
|
|
|
is_main: bool,
|
|
|
|
):
|
2024-02-12 16:59:54 +01:00
|
|
|
replacements = []
|
2023-03-27 17:49:48 +02:00
|
|
|
if isinstance(new_assets_urls, str):
|
2024-02-12 16:59:54 +01:00
|
|
|
replacements = [
|
|
|
|
(current_assets["url"], new_assets_urls),
|
2024-02-14 22:01:07 +01:00
|
|
|
(current_assets["sha256"], self.sha256_of_remote_file(new_assets_urls)),
|
2024-02-12 16:59:54 +01:00
|
|
|
]
|
|
|
|
if isinstance(new_assets_urls, dict):
|
|
|
|
replacements = [
|
|
|
|
repl
|
2024-03-11 17:34:33 +01:00
|
|
|
for key, url in new_assets_urls.items()
|
|
|
|
for repl in (
|
2024-02-12 16:59:54 +01:00
|
|
|
(current_assets[key]["url"], url),
|
2024-03-11 17:34:33 +01:00
|
|
|
(current_assets[key]["sha256"], self.sha256_of_remote_file(url)),
|
2024-02-12 16:59:54 +01:00
|
|
|
)
|
|
|
|
]
|
2023-03-27 17:49:48 +02:00
|
|
|
|
|
|
|
if is_main:
|
2024-07-14 14:36:06 +02:00
|
|
|
content = self.bump_version(content, new_version)
|
2023-03-28 00:42:18 +02:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
for old, new in replacements:
|
|
|
|
content = content.replace(old, new)
|
2023-03-27 17:49:48 +02:00
|
|
|
|
|
|
|
return content
|
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
def bump_version(
|
|
|
|
self, content: str, new_version: str, bump_ynh_level: bool = False
|
|
|
|
) -> str:
|
2024-07-14 14:36:06 +02:00
|
|
|
ynh_level = 1
|
|
|
|
if bump_ynh_level:
|
2024-07-14 23:17:28 +02:00
|
|
|
ynh_level = (
|
|
|
|
int(
|
|
|
|
re.search(
|
|
|
|
r"\s*version\s*=\s*[\"\'][^~\"\']+~ynh(\d+)[\"\']", content
|
|
|
|
).group(1)
|
|
|
|
)
|
|
|
|
+ 1
|
|
|
|
)
|
2024-07-14 14:36:06 +02:00
|
|
|
|
|
|
|
def repl(m: re.Match) -> str:
|
|
|
|
return m.group(1) + new_version + f'~ynh{ynh_level}"'
|
|
|
|
|
|
|
|
return re.sub(
|
|
|
|
r"(\s*version\s*=\s*[\"\'])([^~\"\']+)(~ynh\d+[\"\'])", repl, content
|
|
|
|
)
|
2023-03-27 17:49:48 +02:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
|
2023-07-24 20:43:30 +02:00
|
|
|
def paste_on_haste(data):
|
|
|
|
# NB: we hardcode this here and can't use the yunopaste command
|
|
|
|
# because this script runs on the same machine than haste is hosted on...
|
|
|
|
# and doesn't have the proper front-end LE cert in this context
|
2024-02-14 22:01:07 +01:00
|
|
|
SERVER_HOST = "http://paste.yunohost.org"
|
2023-07-24 20:43:30 +02:00
|
|
|
TIMEOUT = 3
|
|
|
|
try:
|
2024-02-14 22:01:07 +01:00
|
|
|
url = f"{SERVER_HOST}/documents"
|
2024-01-04 22:09:00 +01:00
|
|
|
response = requests.post(url, data=data.encode("utf-8"), timeout=TIMEOUT)
|
2023-07-24 20:43:30 +02:00
|
|
|
response.raise_for_status()
|
2024-01-04 22:09:00 +01:00
|
|
|
dockey = response.json()["key"]
|
2024-02-14 22:01:07 +01:00
|
|
|
return f"{SERVER_HOST}/raw/{dockey}"
|
2023-07-24 20:43:30 +02:00
|
|
|
except requests.exceptions.RequestException as e:
|
2024-02-12 16:59:54 +01:00
|
|
|
logging.error("\033[31mError: {}\033[0m".format(e))
|
2024-02-14 22:01:07 +01:00
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
class StdoutSwitch:
|
|
|
|
class DummyFile:
|
2024-02-15 11:05:48 +01:00
|
|
|
def __init__(self) -> None:
|
2024-02-14 22:01:07 +01:00
|
|
|
self.result = ""
|
|
|
|
|
2024-02-15 11:05:48 +01:00
|
|
|
def write(self, x: str) -> None:
|
2024-02-14 22:01:07 +01:00
|
|
|
self.result += x
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
self.save_stdout = sys.stdout
|
2024-02-15 11:05:48 +01:00
|
|
|
sys.stdout = self.DummyFile() # type: ignore
|
2024-02-14 22:01:07 +01:00
|
|
|
|
|
|
|
def reset(self) -> str:
|
|
|
|
result = ""
|
|
|
|
if isinstance(sys.stdout, self.DummyFile):
|
|
|
|
result = sys.stdout.result
|
|
|
|
sys.stdout = self.save_stdout
|
|
|
|
return result
|
|
|
|
|
|
|
|
def __exit__(self) -> None:
|
|
|
|
sys.stdout = self.save_stdout
|
|
|
|
|
|
|
|
|
2024-02-15 23:13:16 +01:00
|
|
|
def run_autoupdate_for_multiprocessing(data) -> tuple[str, tuple[State, str, str, str]]:
|
2024-07-08 20:22:36 +02:00
|
|
|
app, edit, commit, pr, latest_commit_weekly = data
|
2024-02-15 00:04:50 +01:00
|
|
|
stdoutswitch = StdoutSwitch()
|
2024-02-14 22:01:07 +01:00
|
|
|
try:
|
2024-07-08 20:22:36 +02:00
|
|
|
autoupdater = AppAutoUpdater(app)
|
|
|
|
autoupdater.latest_commit_weekly = latest_commit_weekly
|
|
|
|
result = autoupdater.run(edit=edit, commit=commit, pr=pr)
|
2024-02-15 23:13:16 +01:00
|
|
|
return (app, result)
|
2024-02-14 22:01:07 +01:00
|
|
|
except Exception:
|
2024-02-15 11:05:48 +01:00
|
|
|
log_str = stdoutswitch.reset()
|
2024-02-14 22:01:07 +01:00
|
|
|
import traceback
|
2024-03-11 17:34:33 +01:00
|
|
|
|
2024-02-14 22:01:07 +01:00
|
|
|
t = traceback.format_exc()
|
2024-02-15 23:13:16 +01:00
|
|
|
return (app, (State.failure, log_str, str(t), ""))
|
2023-07-24 20:43:30 +02:00
|
|
|
|
2024-01-04 22:09:00 +01:00
|
|
|
|
2024-02-10 13:46:11 +01:00
|
|
|
def main() -> None:
|
2024-02-10 13:54:36 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
2024-03-11 17:34:33 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"apps",
|
|
|
|
nargs="*",
|
|
|
|
type=Path,
|
|
|
|
help="If not passed, the script will run on the catalog. Github keys required.",
|
|
|
|
)
|
2024-07-08 20:22:36 +02:00
|
|
|
parser.add_argument(
|
2024-07-12 19:09:32 +02:00
|
|
|
"-w",
|
|
|
|
"--latest-commit-weekly",
|
2024-07-08 20:22:36 +02:00
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=False,
|
2024-07-12 19:09:32 +02:00
|
|
|
help="For latest_commit versions, only run weekly to prevent too many PRs",
|
2024-07-08 20:22:36 +02:00
|
|
|
)
|
2024-03-11 17:34:33 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"--edit",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=True,
|
|
|
|
help="Edit the local files",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--commit",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=False,
|
|
|
|
help="Create a commit with the changes",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--pr",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=False,
|
|
|
|
help="Create a pull request with the changes",
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
parser.add_argument("--paste", action="store_true")
|
2024-03-11 17:34:33 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"-j", "--processes", type=int, default=multiprocessing.cpu_count()
|
|
|
|
)
|
2024-08-13 11:16:27 +02:00
|
|
|
get_apps_repo.add_args(parser)
|
2024-02-10 13:54:36 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2024-02-15 15:14:19 +01:00
|
|
|
appslib.logging_sender.enable()
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
if args.commit and not args.edit:
|
2024-02-15 15:14:19 +01:00
|
|
|
logging.error("--commit requires --edit")
|
|
|
|
sys.exit(1)
|
2024-02-12 16:59:54 +01:00
|
|
|
if args.pr and not args.commit:
|
2024-02-15 15:14:19 +01:00
|
|
|
logging.error("--pr requires --commit")
|
|
|
|
sys.exit(1)
|
2023-04-04 16:25:12 +02:00
|
|
|
|
2024-08-13 11:16:27 +02:00
|
|
|
get_apps_repo.from_args(args)
|
|
|
|
cache_path = get_apps_repo.cache_path(args)
|
|
|
|
|
2024-02-14 22:01:07 +01:00
|
|
|
# Handle apps or no apps
|
2024-08-13 11:16:27 +02:00
|
|
|
apps = list(args.apps) if args.apps else apps_to_run_auto_update_for(cache_path)
|
2024-02-15 23:13:16 +01:00
|
|
|
apps_already = {} # for which a PR already exists
|
2024-02-14 22:01:07 +01:00
|
|
|
apps_updated = {}
|
2024-02-15 23:13:16 +01:00
|
|
|
apps_failed = {}
|
2024-02-11 20:04:34 +01:00
|
|
|
|
2024-02-14 22:01:07 +01:00
|
|
|
with multiprocessing.Pool(processes=args.processes) as pool:
|
2024-03-11 17:34:33 +01:00
|
|
|
tasks = pool.imap(
|
|
|
|
run_autoupdate_for_multiprocessing,
|
2024-07-12 19:09:32 +02:00
|
|
|
(
|
|
|
|
(app, args.edit, args.commit, args.pr, args.latest_commit_weekly)
|
|
|
|
for app in apps
|
|
|
|
),
|
2024-03-11 17:34:33 +01:00
|
|
|
)
|
2024-02-27 19:24:28 +01:00
|
|
|
for app, result in tqdm.tqdm(tasks, total=len(apps), ascii=" ·#"):
|
2024-02-15 23:13:16 +01:00
|
|
|
state, current_version, main_version, pr_url = result
|
|
|
|
if state == State.up_to_date:
|
2024-02-14 22:01:07 +01:00
|
|
|
continue
|
2024-02-15 23:13:16 +01:00
|
|
|
if state == State.already:
|
|
|
|
apps_already[app] = (current_version, main_version, pr_url)
|
|
|
|
if state == State.created:
|
|
|
|
apps_updated[app] = (current_version, main_version, pr_url)
|
|
|
|
if state == State.failure:
|
|
|
|
apps_failed[app] = current_version, main_version # actually stores logs
|
2024-02-14 22:01:07 +01:00
|
|
|
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message = ""
|
2024-03-24 17:52:14 +01:00
|
|
|
matrix_message = "Autoupdater just ran, here are the results:\n"
|
2024-02-15 23:13:16 +01:00
|
|
|
if apps_already:
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n{'=' * 80}\nApps already with an update PR:"
|
|
|
|
matrix_message += f"\n- {len(apps_already)} pending update PRs"
|
2024-02-15 23:13:16 +01:00
|
|
|
for app, info in apps_already.items():
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n- {app}"
|
2024-03-11 17:34:33 +01:00
|
|
|
paste_message += (
|
|
|
|
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
|
|
|
)
|
2024-02-16 00:20:52 +01:00
|
|
|
if info[2]:
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f" see {info[2]}"
|
2024-02-15 23:13:16 +01:00
|
|
|
|
2024-02-14 22:01:07 +01:00
|
|
|
if apps_updated:
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n{'=' * 80}\nApps updated:"
|
|
|
|
matrix_message += f"\n- {len(apps_updated)} new apps PRs"
|
2024-02-14 22:01:07 +01:00
|
|
|
for app, info in apps_updated.items():
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n- {app}"
|
2024-03-11 17:34:33 +01:00
|
|
|
paste_message += (
|
|
|
|
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
|
|
|
)
|
2024-02-16 00:20:52 +01:00
|
|
|
if info[2]:
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f" see {info[2]}"
|
2024-02-14 22:01:07 +01:00
|
|
|
|
|
|
|
if apps_failed:
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n{'=' * 80}\nApps failed:"
|
2024-03-25 03:16:56 +01:00
|
|
|
matrix_message += f"\n- {len(apps_failed)} failed apps updates: {', '.join(str(app) for app in apps_failed.keys())}\n"
|
2024-02-15 23:13:16 +01:00
|
|
|
for app, logs in apps_failed.items():
|
2024-02-24 18:42:58 +01:00
|
|
|
paste_message += f"\n{'='*40}\n{app}\n{'-'*40}\n{logs[0]}\n{logs[1]}\n\n"
|
2024-02-14 22:01:07 +01:00
|
|
|
|
2024-02-24 18:42:58 +01:00
|
|
|
if args.paste:
|
|
|
|
paste_url = paste_on_haste(paste_message)
|
|
|
|
matrix_message += f"\nSee the full log here: {paste_url}"
|
2024-02-14 22:01:07 +01:00
|
|
|
|
2024-03-24 17:52:14 +01:00
|
|
|
appslib.logging_sender.notify(matrix_message, "apps", markdown=True)
|
2024-02-24 18:42:58 +01:00
|
|
|
print(paste_message)
|
2024-02-10 13:46:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|