🎨 Format Python code with Black
This commit is contained in:
parent
359de4c5cf
commit
d81f11f6ee
15 changed files with 670 additions and 251 deletions
|
@ -9,8 +9,11 @@ from typing import Any
|
||||||
|
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
|
from appslib.utils import (
|
||||||
get_catalog, git_repo_age)
|
REPO_APPS_ROOT, # pylint: disable=import-error
|
||||||
|
get_catalog,
|
||||||
|
git_repo_age,
|
||||||
|
)
|
||||||
from git import Repo
|
from git import Repo
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,7 +34,8 @@ def app_cache_clone(app: str, infos: dict[str, str]) -> None:
|
||||||
infos["url"],
|
infos["url"],
|
||||||
to_path=app_cache_folder(app),
|
to_path=app_cache_folder(app),
|
||||||
depth=git_depths.get(infos["state"], git_depths["default"]),
|
depth=git_depths.get(infos["state"], git_depths["default"]),
|
||||||
single_branch=True, branch=infos.get("branch", "master"),
|
single_branch=True,
|
||||||
|
branch=infos.get("branch", "master"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -23,10 +23,14 @@ def git(cmd: list[str], cwd: Optional[Path] = None) -> str:
|
||||||
if cwd:
|
if cwd:
|
||||||
full_cmd.extend(["-C", str(cwd)])
|
full_cmd.extend(["-C", str(cwd)])
|
||||||
full_cmd.extend(cmd)
|
full_cmd.extend(cmd)
|
||||||
return subprocess.check_output(
|
return (
|
||||||
full_cmd,
|
subprocess.check_output(
|
||||||
# env=my_env,
|
full_cmd,
|
||||||
).strip().decode("utf-8")
|
# env=my_env,
|
||||||
|
)
|
||||||
|
.strip()
|
||||||
|
.decode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def git_repo_age(path: Path) -> Union[bool, int]:
|
def git_repo_age(path: Path) -> Union[bool, int]:
|
||||||
|
@ -42,7 +46,8 @@ def get_catalog(working_only: bool = False) -> dict[str, dict[str, Any]]:
|
||||||
catalog = toml.load((REPO_APPS_ROOT / "apps.toml").open("r", encoding="utf-8"))
|
catalog = toml.load((REPO_APPS_ROOT / "apps.toml").open("r", encoding="utf-8"))
|
||||||
if working_only:
|
if working_only:
|
||||||
catalog = {
|
catalog = {
|
||||||
app: infos for app, infos in catalog.items()
|
app: infos
|
||||||
|
for app, infos in catalog.items()
|
||||||
if infos.get("state") != "notworking"
|
if infos.get("state") != "notworking"
|
||||||
}
|
}
|
||||||
return catalog
|
return catalog
|
||||||
|
|
|
@ -7,7 +7,9 @@ import sys
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
catalog = requests.get("https://raw.githubusercontent.com/YunoHost/apps/master/apps.json").json()
|
catalog = requests.get(
|
||||||
|
"https://raw.githubusercontent.com/YunoHost/apps/master/apps.json"
|
||||||
|
).json()
|
||||||
|
|
||||||
my_env = os.environ.copy()
|
my_env = os.environ.copy()
|
||||||
my_env["GIT_TERMINAL_PROMPT"] = "0"
|
my_env["GIT_TERMINAL_PROMPT"] = "0"
|
||||||
|
@ -44,15 +46,19 @@ def git(cmd, in_folder=None):
|
||||||
def progressbar(it, prefix="", size=60, file=sys.stdout):
|
def progressbar(it, prefix="", size=60, file=sys.stdout):
|
||||||
it = list(it)
|
it = list(it)
|
||||||
count = len(it)
|
count = len(it)
|
||||||
|
|
||||||
def show(j, name=""):
|
def show(j, name=""):
|
||||||
name += " "
|
name += " "
|
||||||
x = int(size*j/count)
|
x = int(size * j / count)
|
||||||
file.write("%s[%s%s] %i/%i %s\r" % (prefix, "#"*x, "."*(size-x), j, count, name))
|
file.write(
|
||||||
|
"%s[%s%s] %i/%i %s\r" % (prefix, "#" * x, "." * (size - x), j, count, name)
|
||||||
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
|
|
||||||
show(0)
|
show(0)
|
||||||
for i, item in enumerate(it):
|
for i, item in enumerate(it):
|
||||||
yield item
|
yield item
|
||||||
show(i+1, item["id"])
|
show(i + 1, item["id"])
|
||||||
file.write("\n")
|
file.write("\n")
|
||||||
file.flush()
|
file.flush()
|
||||||
|
|
||||||
|
@ -63,7 +69,10 @@ def build_cache():
|
||||||
folder = os.path.join(".apps_cache", app["id"])
|
folder = os.path.join(".apps_cache", app["id"])
|
||||||
reponame = app["url"].rsplit("/", 1)[-1]
|
reponame = app["url"].rsplit("/", 1)[-1]
|
||||||
git(f"clone --quiet --depth 1 --single-branch {app['url']} {folder}")
|
git(f"clone --quiet --depth 1 --single-branch {app['url']} {folder}")
|
||||||
git(f"remote add fork https://{login}:{token}@github.com/{login}/{reponame}", in_folder=folder)
|
git(
|
||||||
|
f"remote add fork https://{login}:{token}@github.com/{login}/{reponame}",
|
||||||
|
in_folder=folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def apply(patch):
|
def apply(patch):
|
||||||
|
@ -81,7 +90,11 @@ def diff():
|
||||||
|
|
||||||
for app in apps():
|
for app in apps():
|
||||||
folder = os.path.join(".apps_cache", app["id"])
|
folder = os.path.join(".apps_cache", app["id"])
|
||||||
if bool(subprocess.check_output(f"cd {folder} && git diff", shell=True).strip().decode("utf-8")):
|
if bool(
|
||||||
|
subprocess.check_output(f"cd {folder} && git diff", shell=True)
|
||||||
|
.strip()
|
||||||
|
.decode("utf-8")
|
||||||
|
):
|
||||||
print("\n\n\n")
|
print("\n\n\n")
|
||||||
print("=================================")
|
print("=================================")
|
||||||
print("Changes in : " + app["id"])
|
print("Changes in : " + app["id"])
|
||||||
|
@ -92,35 +105,50 @@ def diff():
|
||||||
|
|
||||||
def push(patch):
|
def push(patch):
|
||||||
|
|
||||||
title = "[autopatch] " + open(os.path.join("patches", patch, "pr_title.md")).read().strip()
|
title = (
|
||||||
|
"[autopatch] "
|
||||||
|
+ open(os.path.join("patches", patch, "pr_title.md")).read().strip()
|
||||||
|
)
|
||||||
|
|
||||||
def diff_not_empty(app):
|
def diff_not_empty(app):
|
||||||
folder = os.path.join(".apps_cache", app["id"])
|
folder = os.path.join(".apps_cache", app["id"])
|
||||||
return bool(subprocess.check_output(f"cd {folder} && git diff", shell=True).strip().decode("utf-8"))
|
return bool(
|
||||||
|
subprocess.check_output(f"cd {folder} && git diff", shell=True)
|
||||||
|
.strip()
|
||||||
|
.decode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
def app_is_on_github(app):
|
def app_is_on_github(app):
|
||||||
return "github.com" in app["url"]
|
return "github.com" in app["url"]
|
||||||
|
|
||||||
apps_to_push = [app for app in apps() if diff_not_empty(app) and app_is_on_github(app)]
|
apps_to_push = [
|
||||||
|
app for app in apps() if diff_not_empty(app) and app_is_on_github(app)
|
||||||
|
]
|
||||||
|
|
||||||
with requests.Session() as s:
|
with requests.Session() as s:
|
||||||
s.headers.update({"Authorization": f"token {token}"})
|
s.headers.update({"Authorization": f"token {token}"})
|
||||||
for app in progressbar(apps_to_push, "Forking: ", 40):
|
for app in progressbar(apps_to_push, "Forking: ", 40):
|
||||||
app["repo"] = app["url"][len("https://github.com/"):].strip("/")
|
app["repo"] = app["url"][len("https://github.com/") :].strip("/")
|
||||||
fork_if_needed(app["repo"], s)
|
fork_if_needed(app["repo"], s)
|
||||||
|
|
||||||
for app in progressbar(apps_to_push, "Pushing: ", 40):
|
for app in progressbar(apps_to_push, "Pushing: ", 40):
|
||||||
app["repo"] = app["url"][len("https://github.com/"):].strip("/")
|
app["repo"] = app["url"][len("https://github.com/") :].strip("/")
|
||||||
app_repo_name = app["url"].rsplit("/", 1)[-1]
|
app_repo_name = app["url"].rsplit("/", 1)[-1]
|
||||||
folder = os.path.join(".apps_cache", app["id"])
|
folder = os.path.join(".apps_cache", app["id"])
|
||||||
current_branch = git(f"symbolic-ref --short HEAD", in_folder=folder)
|
current_branch = git(f"symbolic-ref --short HEAD", in_folder=folder)
|
||||||
git(f"reset origin/{current_branch}", in_folder=folder)
|
git(f"reset origin/{current_branch}", in_folder=folder)
|
||||||
git(["commit", "-a", "-m", title, "--author='Yunohost-Bot <>'"], in_folder=folder)
|
git(
|
||||||
|
["commit", "-a", "-m", title, "--author='Yunohost-Bot <>'"],
|
||||||
|
in_folder=folder,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
git(f"remote remove fork", in_folder=folder)
|
git(f"remote remove fork", in_folder=folder)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
git(f"remote add fork https://{login}:{token}@github.com/{login}/{app_repo_name}", in_folder=folder)
|
git(
|
||||||
|
f"remote add fork https://{login}:{token}@github.com/{login}/{app_repo_name}",
|
||||||
|
in_folder=folder,
|
||||||
|
)
|
||||||
git(f"push fork {current_branch}:{patch} --quiet --force", in_folder=folder)
|
git(f"push fork {current_branch}:{patch} --quiet --force", in_folder=folder)
|
||||||
create_pull_request(app["repo"], patch, current_branch, s)
|
create_pull_request(app["repo"], patch, current_branch, s)
|
||||||
|
|
||||||
|
@ -141,11 +169,15 @@ def fork_if_needed(repo, s):
|
||||||
|
|
||||||
def create_pull_request(repo, patch, base_branch, s):
|
def create_pull_request(repo, patch, base_branch, s):
|
||||||
|
|
||||||
PR = {"title": "[autopatch] " + open(os.path.join("patches", patch, "pr_title.md")).read().strip(),
|
PR = {
|
||||||
"body": "This is an automatic PR\n\n" + open(os.path.join("patches", patch, "pr_body.md")).read().strip(),
|
"title": "[autopatch] "
|
||||||
"head": login + ":" + patch,
|
+ open(os.path.join("patches", patch, "pr_title.md")).read().strip(),
|
||||||
"base": base_branch,
|
"body": "This is an automatic PR\n\n"
|
||||||
"maintainer_can_modify": True}
|
+ open(os.path.join("patches", patch, "pr_body.md")).read().strip(),
|
||||||
|
"head": login + ":" + patch,
|
||||||
|
"base": base_branch,
|
||||||
|
"maintainer_can_modify": True,
|
||||||
|
}
|
||||||
|
|
||||||
r = s.post(github_api + f"/repos/{repo}/pulls", json.dumps(PR))
|
r = s.post(github_api + f"/repos/{repo}/pulls", json.dumps(PR))
|
||||||
|
|
||||||
|
@ -159,7 +191,8 @@ def main():
|
||||||
|
|
||||||
action = sys.argv[1]
|
action = sys.argv[1]
|
||||||
if action == "--help":
|
if action == "--help":
|
||||||
print("""
|
print(
|
||||||
|
"""
|
||||||
Example usage:
|
Example usage:
|
||||||
|
|
||||||
# Init local git clone for all apps
|
# Init local git clone for all apps
|
||||||
|
@ -173,7 +206,8 @@ def main():
|
||||||
|
|
||||||
# Push and create pull requests on all apps with non-empty diff
|
# Push and create pull requests on all apps with non-empty diff
|
||||||
./autopatch.py --push explicit-php-version-in-deps
|
./autopatch.py --push explicit-php-version-in-deps
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
elif action == "--build-cache":
|
elif action == "--build-cache":
|
||||||
build_cache()
|
build_cache()
|
||||||
|
|
|
@ -21,10 +21,20 @@ import github
|
||||||
# add apps/tools to sys.path
|
# add apps/tools to sys.path
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
from rest_api import GithubAPI, GitlabAPI, GiteaForgejoAPI, RefType # noqa: E402,E501 pylint: disable=import-error,wrong-import-position
|
from rest_api import (
|
||||||
|
GithubAPI,
|
||||||
|
GitlabAPI,
|
||||||
|
GiteaForgejoAPI,
|
||||||
|
RefType,
|
||||||
|
) # noqa: E402,E501 pylint: disable=import-error,wrong-import-position
|
||||||
import appslib.logging_sender # noqa: E402 pylint: disable=import-error,wrong-import-position
|
import appslib.logging_sender # noqa: E402 pylint: disable=import-error,wrong-import-position
|
||||||
from appslib.utils import REPO_APPS_ROOT, get_catalog # noqa: E402 pylint: disable=import-error,wrong-import-position
|
from appslib.utils import (
|
||||||
from app_caches import app_cache_folder # noqa: E402 pylint: disable=import-error,wrong-import-position
|
REPO_APPS_ROOT,
|
||||||
|
get_catalog,
|
||||||
|
) # noqa: E402 pylint: disable=import-error,wrong-import-position
|
||||||
|
from app_caches import (
|
||||||
|
app_cache_folder,
|
||||||
|
) # noqa: E402 pylint: disable=import-error,wrong-import-position
|
||||||
|
|
||||||
|
|
||||||
STRATEGIES = [
|
STRATEGIES = [
|
||||||
|
@ -44,11 +54,30 @@ STRATEGIES = [
|
||||||
|
|
||||||
|
|
||||||
@cache
|
@cache
|
||||||
def get_github() -> tuple[Optional[tuple[str, str]], Optional[github.Github], Optional[github.InputGitAuthor]]:
|
def get_github() -> tuple[
|
||||||
|
Optional[tuple[str, str]],
|
||||||
|
Optional[github.Github],
|
||||||
|
Optional[github.InputGitAuthor],
|
||||||
|
]:
|
||||||
try:
|
try:
|
||||||
github_login = (REPO_APPS_ROOT / ".github_login").open("r", encoding="utf-8").read().strip()
|
github_login = (
|
||||||
github_token = (REPO_APPS_ROOT / ".github_token").open("r", encoding="utf-8").read().strip()
|
(REPO_APPS_ROOT / ".github_login")
|
||||||
github_email = (REPO_APPS_ROOT / ".github_email").open("r", encoding="utf-8").read().strip()
|
.open("r", encoding="utf-8")
|
||||||
|
.read()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
github_token = (
|
||||||
|
(REPO_APPS_ROOT / ".github_token")
|
||||||
|
.open("r", encoding="utf-8")
|
||||||
|
.read()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
github_email = (
|
||||||
|
(REPO_APPS_ROOT / ".github_email")
|
||||||
|
.open("r", encoding="utf-8")
|
||||||
|
.read()
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
|
||||||
auth = (github_login, github_token)
|
auth = (github_login, github_token)
|
||||||
github_api = github.Github(github_token)
|
github_api = github.Github(github_token)
|
||||||
|
@ -96,7 +125,9 @@ class LocalOrRemoteRepo:
|
||||||
if not self.manifest_path.exists():
|
if not self.manifest_path.exists():
|
||||||
raise RuntimeError(f"{app.name}: manifest.toml doesnt exists?")
|
raise RuntimeError(f"{app.name}: manifest.toml doesnt exists?")
|
||||||
# app is in fact a path
|
# app is in fact a path
|
||||||
self.manifest_raw = (app / "manifest.toml").open("r", encoding="utf-8").read()
|
self.manifest_raw = (
|
||||||
|
(app / "manifest.toml").open("r", encoding="utf-8").read()
|
||||||
|
)
|
||||||
|
|
||||||
elif isinstance(app, str):
|
elif isinstance(app, str):
|
||||||
# It's remote
|
# It's remote
|
||||||
|
@ -187,7 +218,9 @@ class AppAutoUpdater:
|
||||||
|
|
||||||
self.main_upstream = self.manifest.get("upstream", {}).get("code")
|
self.main_upstream = self.manifest.get("upstream", {}).get("code")
|
||||||
|
|
||||||
def run(self, edit: bool = False, commit: bool = False, pr: bool = False) -> tuple[State, str, str, str]:
|
def run(
|
||||||
|
self, edit: bool = False, commit: bool = False, pr: bool = False
|
||||||
|
) -> tuple[State, str, str, str]:
|
||||||
state = State.up_to_date
|
state = State.up_to_date
|
||||||
main_version = ""
|
main_version = ""
|
||||||
pr_url = ""
|
pr_url = ""
|
||||||
|
@ -212,7 +245,11 @@ class AppAutoUpdater:
|
||||||
commit_msg += f"\n{msg}"
|
commit_msg += f"\n{msg}"
|
||||||
|
|
||||||
self.repo.manifest_raw = self.replace_version_and_asset_in_manifest(
|
self.repo.manifest_raw = self.replace_version_and_asset_in_manifest(
|
||||||
self.repo.manifest_raw, version, assets, infos, is_main=source == "main",
|
self.repo.manifest_raw,
|
||||||
|
version,
|
||||||
|
assets,
|
||||||
|
infos,
|
||||||
|
is_main=source == "main",
|
||||||
)
|
)
|
||||||
|
|
||||||
if state == State.up_to_date:
|
if state == State.up_to_date:
|
||||||
|
@ -246,7 +283,9 @@ class AppAutoUpdater:
|
||||||
return (state, self.current_version, main_version, pr_url)
|
return (state, self.current_version, main_version, pr_url)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def relevant_versions(tags: list[str], app_id: str, version_regex: Optional[str]) -> tuple[str, str]:
|
def relevant_versions(
|
||||||
|
tags: list[str], app_id: str, version_regex: Optional[str]
|
||||||
|
) -> tuple[str, str]:
|
||||||
|
|
||||||
def apply_version_regex(tag: str) -> Optional[str]:
|
def apply_version_regex(tag: str) -> Optional[str]:
|
||||||
# First preprocessing according to the manifest version_regex…
|
# First preprocessing according to the manifest version_regex…
|
||||||
|
@ -255,7 +294,9 @@ class AppAutoUpdater:
|
||||||
if match is None:
|
if match is None:
|
||||||
return None
|
return None
|
||||||
# Basically: either groupdict if named capture gorups, sorted by names, or groups()
|
# Basically: either groupdict if named capture gorups, sorted by names, or groups()
|
||||||
tag = ".".join(dict(sorted(match.groupdict().items())).values() or match.groups())
|
tag = ".".join(
|
||||||
|
dict(sorted(match.groupdict().items())).values() or match.groups()
|
||||||
|
)
|
||||||
|
|
||||||
# Then remove leading v
|
# Then remove leading v
|
||||||
tag = tag.lstrip("v")
|
tag = tag.lstrip("v")
|
||||||
|
@ -264,7 +305,9 @@ class AppAutoUpdater:
|
||||||
def version_numbers(tag: str) -> Optional[tuple[int, ...]]:
|
def version_numbers(tag: str) -> Optional[tuple[int, ...]]:
|
||||||
filter_keywords = ["start", "rc", "beta", "alpha"]
|
filter_keywords = ["start", "rc", "beta", "alpha"]
|
||||||
if any(keyword in tag for keyword in filter_keywords):
|
if any(keyword in tag for keyword in filter_keywords):
|
||||||
logging.debug(f"Tag {tag} contains filtered keyword from {filter_keywords}.")
|
logging.debug(
|
||||||
|
f"Tag {tag} contains filtered keyword from {filter_keywords}."
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
t_to_check = tag
|
t_to_check = tag
|
||||||
|
@ -302,7 +345,9 @@ class AppAutoUpdater:
|
||||||
def tag_to_int_tuple(tag: str) -> tuple[int, ...]:
|
def tag_to_int_tuple(tag: str) -> tuple[int, ...]:
|
||||||
tag = tag.lstrip("v").replace("-", ".").rstrip(".")
|
tag = tag.lstrip("v").replace("-", ".").rstrip(".")
|
||||||
int_tuple = tag.split(".")
|
int_tuple = tag.split(".")
|
||||||
assert all(i.isdigit() for i in int_tuple), f"Cant convert {tag} to int tuple :/"
|
assert all(
|
||||||
|
i.isdigit() for i in int_tuple
|
||||||
|
), f"Cant convert {tag} to int tuple :/"
|
||||||
return tuple(int(i) for i in int_tuple)
|
return tuple(int(i) for i in int_tuple)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -317,8 +362,9 @@ class AppAutoUpdater:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise RuntimeError(f"Failed to compute sha256 for {url} : {e}") from e
|
raise RuntimeError(f"Failed to compute sha256 for {url} : {e}") from e
|
||||||
|
|
||||||
def get_source_update(self, name: str, infos: dict[str, Any]
|
def get_source_update(
|
||||||
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
self, name: str, infos: dict[str, Any]
|
||||||
|
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
||||||
autoupdate = infos.get("autoupdate")
|
autoupdate = infos.get("autoupdate")
|
||||||
if autoupdate is None:
|
if autoupdate is None:
|
||||||
return None
|
return None
|
||||||
|
@ -327,7 +373,9 @@ class AppAutoUpdater:
|
||||||
asset = autoupdate.get("asset", "tarball")
|
asset = autoupdate.get("asset", "tarball")
|
||||||
strategy = autoupdate.get("strategy")
|
strategy = autoupdate.get("strategy")
|
||||||
if strategy not in STRATEGIES:
|
if strategy not in STRATEGIES:
|
||||||
raise ValueError(f"Unknown update strategy '{strategy}' for '{name}', expected one of {STRATEGIES}")
|
raise ValueError(
|
||||||
|
f"Unknown update strategy '{strategy}' for '{name}', expected one of {STRATEGIES}"
|
||||||
|
)
|
||||||
|
|
||||||
result = self.get_latest_version_and_asset(strategy, asset, autoupdate)
|
result = self.get_latest_version_and_asset(strategy, asset, autoupdate)
|
||||||
if result is None:
|
if result is None:
|
||||||
|
@ -347,14 +395,22 @@ class AppAutoUpdater:
|
||||||
print("Up to date")
|
print("Up to date")
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
if self.tag_to_int_tuple(self.current_version) > self.tag_to_int_tuple(new_version):
|
if self.tag_to_int_tuple(self.current_version) > self.tag_to_int_tuple(
|
||||||
print("Up to date (current version appears more recent than newest version found)")
|
new_version
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
"Up to date (current version appears more recent than newest version found)"
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
except (AssertionError, ValueError):
|
except (AssertionError, ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if isinstance(assets, dict) and isinstance(infos.get("url"), str) or \
|
if (
|
||||||
isinstance(assets, str) and not isinstance(infos.get("url"), str):
|
isinstance(assets, dict)
|
||||||
|
and isinstance(infos.get("url"), str)
|
||||||
|
or isinstance(assets, str)
|
||||||
|
and not isinstance(infos.get("url"), str)
|
||||||
|
):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"It looks like there's an inconsistency between the old asset list and the new ones... "
|
"It looks like there's an inconsistency between the old asset list and the new ones... "
|
||||||
"One is arch-specific, the other is not... Did you forget to define arch-specific regexes? "
|
"One is arch-specific, the other is not... Did you forget to define arch-specific regexes? "
|
||||||
|
@ -364,7 +420,9 @@ class AppAutoUpdater:
|
||||||
if isinstance(assets, str) and infos["url"] == assets:
|
if isinstance(assets, str) and infos["url"] == assets:
|
||||||
print(f"URL for asset {name} is up to date")
|
print(f"URL for asset {name} is up to date")
|
||||||
return None
|
return None
|
||||||
if isinstance(assets, dict) and assets == {k: infos[k]["url"] for k in assets.keys()}:
|
if isinstance(assets, dict) and assets == {
|
||||||
|
k: infos[k]["url"] for k in assets.keys()
|
||||||
|
}:
|
||||||
print(f"URLs for asset {name} are up to date")
|
print(f"URLs for asset {name} are up to date")
|
||||||
return None
|
return None
|
||||||
print(f"Update needed for {name}")
|
print(f"Update needed for {name}")
|
||||||
|
@ -376,21 +434,26 @@ class AppAutoUpdater:
|
||||||
name: url for name, url in assets.items() if re.match(regex, name)
|
name: url for name, url in assets.items() if re.match(regex, name)
|
||||||
}
|
}
|
||||||
if not matching_assets:
|
if not matching_assets:
|
||||||
raise RuntimeError(f"No assets matching regex '{regex}' in {list(assets.keys())}")
|
raise RuntimeError(
|
||||||
|
f"No assets matching regex '{regex}' in {list(assets.keys())}"
|
||||||
|
)
|
||||||
if len(matching_assets) > 1:
|
if len(matching_assets) > 1:
|
||||||
raise RuntimeError(f"Too many assets matching regex '{regex}': {matching_assets}")
|
raise RuntimeError(
|
||||||
|
f"Too many assets matching regex '{regex}': {matching_assets}"
|
||||||
|
)
|
||||||
return next(iter(matching_assets.items()))
|
return next(iter(matching_assets.items()))
|
||||||
|
|
||||||
def get_latest_version_and_asset(self, strategy: str, asset: Union[str, dict], autoupdate
|
def get_latest_version_and_asset(
|
||||||
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
self, strategy: str, asset: Union[str, dict], autoupdate
|
||||||
|
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
|
||||||
upstream = autoupdate.get("upstream", self.main_upstream).strip("/")
|
upstream = autoupdate.get("upstream", self.main_upstream).strip("/")
|
||||||
version_re = autoupdate.get("version_regex", None)
|
version_re = autoupdate.get("version_regex", None)
|
||||||
_, remote_type, revision_type = strategy.split("_")
|
_, remote_type, revision_type = strategy.split("_")
|
||||||
|
|
||||||
api: Union[GithubAPI, GitlabAPI, GiteaForgejoAPI]
|
api: Union[GithubAPI, GitlabAPI, GiteaForgejoAPI]
|
||||||
if remote_type == "github":
|
if remote_type == "github":
|
||||||
assert (
|
assert upstream and upstream.startswith(
|
||||||
upstream and upstream.startswith("https://github.com/")
|
"https://github.com/"
|
||||||
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
|
||||||
api = GithubAPI(upstream, auth=get_github()[0])
|
api = GithubAPI(upstream, auth=get_github()[0])
|
||||||
if remote_type == "gitlab":
|
if remote_type == "gitlab":
|
||||||
|
@ -404,7 +467,9 @@ class AppAutoUpdater:
|
||||||
for release in api.releases()
|
for release in api.releases()
|
||||||
if not release["draft"] and not release["prerelease"]
|
if not release["draft"] and not release["prerelease"]
|
||||||
}
|
}
|
||||||
latest_version_orig, latest_version = self.relevant_versions(list(releases.keys()), self.app_id, version_re)
|
latest_version_orig, latest_version = self.relevant_versions(
|
||||||
|
list(releases.keys()), self.app_id, version_re
|
||||||
|
)
|
||||||
latest_release = releases[latest_version_orig]
|
latest_release = releases[latest_version_orig]
|
||||||
latest_assets = {
|
latest_assets = {
|
||||||
a["name"]: a["browser_download_url"]
|
a["name"]: a["browser_download_url"]
|
||||||
|
@ -425,7 +490,9 @@ class AppAutoUpdater:
|
||||||
_, url = self.find_matching_asset(latest_assets, asset)
|
_, url = self.find_matching_asset(latest_assets, asset)
|
||||||
return latest_version, url, latest_release_html_url
|
return latest_version, url, latest_release_html_url
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
raise RuntimeError(f"{e}.\nFull release details on {latest_release_html_url}.") from e
|
raise RuntimeError(
|
||||||
|
f"{e}.\nFull release details on {latest_release_html_url}."
|
||||||
|
) from e
|
||||||
|
|
||||||
if isinstance(asset, dict):
|
if isinstance(asset, dict):
|
||||||
new_assets = {}
|
new_assets = {}
|
||||||
|
@ -434,34 +501,50 @@ class AppAutoUpdater:
|
||||||
_, url = self.find_matching_asset(latest_assets, asset_regex)
|
_, url = self.find_matching_asset(latest_assets, asset_regex)
|
||||||
new_assets[asset_name] = url
|
new_assets[asset_name] = url
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
raise RuntimeError(f"{e}.\nFull release details on {latest_release_html_url}.") from e
|
raise RuntimeError(
|
||||||
|
f"{e}.\nFull release details on {latest_release_html_url}."
|
||||||
|
) from e
|
||||||
return latest_version, new_assets, latest_release_html_url
|
return latest_version, new_assets, latest_release_html_url
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if revision_type == "tag":
|
if revision_type == "tag":
|
||||||
if asset != "tarball":
|
if asset != "tarball":
|
||||||
raise ValueError("For the latest tag strategies, only asset = 'tarball' is supported")
|
raise ValueError(
|
||||||
|
"For the latest tag strategies, only asset = 'tarball' is supported"
|
||||||
|
)
|
||||||
tags = [t["name"] for t in api.tags()]
|
tags = [t["name"] for t in api.tags()]
|
||||||
latest_version_orig, latest_version = self.relevant_versions(tags, self.app_id, version_re)
|
latest_version_orig, latest_version = self.relevant_versions(
|
||||||
|
tags, self.app_id, version_re
|
||||||
|
)
|
||||||
latest_tarball = api.url_for_ref(latest_version_orig, RefType.tags)
|
latest_tarball = api.url_for_ref(latest_version_orig, RefType.tags)
|
||||||
return latest_version, latest_tarball, ""
|
return latest_version, latest_tarball, ""
|
||||||
|
|
||||||
if revision_type == "commit":
|
if revision_type == "commit":
|
||||||
if asset != "tarball":
|
if asset != "tarball":
|
||||||
raise ValueError("For the latest commit strategies, only asset = 'tarball' is supported")
|
raise ValueError(
|
||||||
|
"For the latest commit strategies, only asset = 'tarball' is supported"
|
||||||
|
)
|
||||||
commits = api.commits()
|
commits = api.commits()
|
||||||
latest_commit = commits[0]
|
latest_commit = commits[0]
|
||||||
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
|
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
|
||||||
# Let's have the version as something like "2023.01.23"
|
# Let's have the version as something like "2023.01.23"
|
||||||
latest_commit_date = datetime.strptime(latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d")
|
latest_commit_date = datetime.strptime(
|
||||||
|
latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d"
|
||||||
|
)
|
||||||
version_format = autoupdate.get("force_version", "%Y.%m.%d")
|
version_format = autoupdate.get("force_version", "%Y.%m.%d")
|
||||||
latest_version = latest_commit_date.strftime(version_format)
|
latest_version = latest_commit_date.strftime(version_format)
|
||||||
return latest_version, latest_tarball, ""
|
return latest_version, latest_tarball, ""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def replace_version_and_asset_in_manifest(self, content: str, new_version: str, new_assets_urls: Union[str, dict],
|
def replace_version_and_asset_in_manifest(
|
||||||
current_assets: dict, is_main: bool):
|
self,
|
||||||
|
content: str,
|
||||||
|
new_version: str,
|
||||||
|
new_assets_urls: Union[str, dict],
|
||||||
|
current_assets: dict,
|
||||||
|
is_main: bool,
|
||||||
|
):
|
||||||
replacements = []
|
replacements = []
|
||||||
if isinstance(new_assets_urls, str):
|
if isinstance(new_assets_urls, str):
|
||||||
replacements = [
|
replacements = [
|
||||||
|
@ -471,16 +554,21 @@ class AppAutoUpdater:
|
||||||
if isinstance(new_assets_urls, dict):
|
if isinstance(new_assets_urls, dict):
|
||||||
replacements = [
|
replacements = [
|
||||||
repl
|
repl
|
||||||
for key, url in new_assets_urls.items() for repl in (
|
for key, url in new_assets_urls.items()
|
||||||
|
for repl in (
|
||||||
(current_assets[key]["url"], url),
|
(current_assets[key]["url"], url),
|
||||||
(current_assets[key]["sha256"], self.sha256_of_remote_file(url))
|
(current_assets[key]["sha256"], self.sha256_of_remote_file(url)),
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
if is_main:
|
if is_main:
|
||||||
|
|
||||||
def repl(m: re.Match) -> str:
|
def repl(m: re.Match) -> str:
|
||||||
return m.group(1) + new_version + '~ynh1"'
|
return m.group(1) + new_version + '~ynh1"'
|
||||||
content = re.sub(r"(\s*version\s*=\s*[\"\'])([^~\"\']+)(\~ynh\d+[\"\'])", repl, content)
|
|
||||||
|
content = re.sub(
|
||||||
|
r"(\s*version\s*=\s*[\"\'])([^~\"\']+)(\~ynh\d+[\"\'])", repl, content
|
||||||
|
)
|
||||||
|
|
||||||
for old, new in replacements:
|
for old, new in replacements:
|
||||||
content = content.replace(old, new)
|
content = content.replace(old, new)
|
||||||
|
@ -538,22 +626,41 @@ def run_autoupdate_for_multiprocessing(data) -> tuple[str, tuple[State, str, str
|
||||||
except Exception:
|
except Exception:
|
||||||
log_str = stdoutswitch.reset()
|
log_str = stdoutswitch.reset()
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
t = traceback.format_exc()
|
t = traceback.format_exc()
|
||||||
return (app, (State.failure, log_str, str(t), ""))
|
return (app, (State.failure, log_str, str(t), ""))
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("apps", nargs="*", type=Path,
|
parser.add_argument(
|
||||||
help="If not passed, the script will run on the catalog. Github keys required.")
|
"apps",
|
||||||
parser.add_argument("--edit", action=argparse.BooleanOptionalAction, default=True,
|
nargs="*",
|
||||||
help="Edit the local files")
|
type=Path,
|
||||||
parser.add_argument("--commit", action=argparse.BooleanOptionalAction, default=False,
|
help="If not passed, the script will run on the catalog. Github keys required.",
|
||||||
help="Create a commit with the changes")
|
)
|
||||||
parser.add_argument("--pr", action=argparse.BooleanOptionalAction, default=False,
|
parser.add_argument(
|
||||||
help="Create a pull request with the changes")
|
"--edit",
|
||||||
|
action=argparse.BooleanOptionalAction,
|
||||||
|
default=True,
|
||||||
|
help="Edit the local files",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--commit",
|
||||||
|
action=argparse.BooleanOptionalAction,
|
||||||
|
default=False,
|
||||||
|
help="Create a commit with the changes",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--pr",
|
||||||
|
action=argparse.BooleanOptionalAction,
|
||||||
|
default=False,
|
||||||
|
help="Create a pull request with the changes",
|
||||||
|
)
|
||||||
parser.add_argument("--paste", action="store_true")
|
parser.add_argument("--paste", action="store_true")
|
||||||
parser.add_argument("-j", "--processes", type=int, default=multiprocessing.cpu_count())
|
parser.add_argument(
|
||||||
|
"-j", "--processes", type=int, default=multiprocessing.cpu_count()
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
appslib.logging_sender.enable()
|
appslib.logging_sender.enable()
|
||||||
|
@ -572,8 +679,10 @@ def main() -> None:
|
||||||
apps_failed = {}
|
apps_failed = {}
|
||||||
|
|
||||||
with multiprocessing.Pool(processes=args.processes) as pool:
|
with multiprocessing.Pool(processes=args.processes) as pool:
|
||||||
tasks = pool.imap(run_autoupdate_for_multiprocessing,
|
tasks = pool.imap(
|
||||||
((app, args.edit, args.commit, args.pr) for app in apps))
|
run_autoupdate_for_multiprocessing,
|
||||||
|
((app, args.edit, args.commit, args.pr) for app in apps),
|
||||||
|
)
|
||||||
for app, result in tqdm.tqdm(tasks, total=len(apps), ascii=" ·#"):
|
for app, result in tqdm.tqdm(tasks, total=len(apps), ascii=" ·#"):
|
||||||
state, current_version, main_version, pr_url = result
|
state, current_version, main_version, pr_url = result
|
||||||
if state == State.up_to_date:
|
if state == State.up_to_date:
|
||||||
|
@ -592,7 +701,9 @@ def main() -> None:
|
||||||
matrix_message += f"\n- {len(apps_already)} pending update PRs"
|
matrix_message += f"\n- {len(apps_already)} pending update PRs"
|
||||||
for app, info in apps_already.items():
|
for app, info in apps_already.items():
|
||||||
paste_message += f"\n- {app}"
|
paste_message += f"\n- {app}"
|
||||||
paste_message += f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
paste_message += (
|
||||||
|
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
||||||
|
)
|
||||||
if info[2]:
|
if info[2]:
|
||||||
paste_message += f" see {info[2]}"
|
paste_message += f" see {info[2]}"
|
||||||
|
|
||||||
|
@ -601,7 +712,9 @@ def main() -> None:
|
||||||
matrix_message += f"\n- {len(apps_updated)} new apps PRs"
|
matrix_message += f"\n- {len(apps_updated)} new apps PRs"
|
||||||
for app, info in apps_updated.items():
|
for app, info in apps_updated.items():
|
||||||
paste_message += f"\n- {app}"
|
paste_message += f"\n- {app}"
|
||||||
paste_message += f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
paste_message += (
|
||||||
|
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
|
||||||
|
)
|
||||||
if info[2]:
|
if info[2]:
|
||||||
paste_message += f" see {info[2]}"
|
paste_message += f" see {info[2]}"
|
||||||
|
|
||||||
|
|
|
@ -15,11 +15,10 @@ class RefType(Enum):
|
||||||
class GithubAPI:
|
class GithubAPI:
|
||||||
def __init__(self, upstream: str, auth: Optional[tuple[str, str]] = None):
|
def __init__(self, upstream: str, auth: Optional[tuple[str, str]] = None):
|
||||||
self.upstream = upstream
|
self.upstream = upstream
|
||||||
self.upstream_repo = upstream.replace("https://github.com/", "")\
|
self.upstream_repo = upstream.replace("https://github.com/", "").strip("/")
|
||||||
.strip("/")
|
|
||||||
assert (
|
assert (
|
||||||
len(self.upstream_repo.split("/")) == 2
|
len(self.upstream_repo.split("/")) == 2
|
||||||
), f"'{upstream}' doesn't seem to be a github repository ?"
|
), f"'{upstream}' doesn't seem to be a github repository ?"
|
||||||
self.auth = auth
|
self.auth = auth
|
||||||
|
|
||||||
def internal_api(self, uri: str) -> Any:
|
def internal_api(self, uri: str) -> Any:
|
||||||
|
@ -74,7 +73,12 @@ class GitlabAPI:
|
||||||
# Second chance for some buggy gitlab instances...
|
# Second chance for some buggy gitlab instances...
|
||||||
name = self.project_path.split("/")[-1]
|
name = self.project_path.split("/")[-1]
|
||||||
projects = self.internal_api(f"projects?search={name}")
|
projects = self.internal_api(f"projects?search={name}")
|
||||||
project = next(filter(lambda x: x.get("path_with_namespace") == self.project_path, projects))
|
project = next(
|
||||||
|
filter(
|
||||||
|
lambda x: x.get("path_with_namespace") == self.project_path,
|
||||||
|
projects,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
assert isinstance(project, dict)
|
assert isinstance(project, dict)
|
||||||
project_id = project.get("id", None)
|
project_id = project.get("id", None)
|
||||||
|
@ -95,13 +99,11 @@ class GitlabAPI:
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"sha": commit["id"],
|
"sha": commit["id"],
|
||||||
"commit": {
|
"commit": {"author": {"date": commit["committed_date"]}},
|
||||||
"author": {
|
|
||||||
"date": commit["committed_date"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
for commit in self.internal_api(f"projects/{self.project_id}/repository/commits")
|
for commit in self.internal_api(
|
||||||
|
f"projects/{self.project_id}/repository/commits"
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
def releases(self) -> list[dict[str, Any]]:
|
def releases(self) -> list[dict[str, Any]]:
|
||||||
|
@ -114,16 +116,21 @@ class GitlabAPI:
|
||||||
"prerelease": False,
|
"prerelease": False,
|
||||||
"draft": False,
|
"draft": False,
|
||||||
"html_url": release["_links"]["self"],
|
"html_url": release["_links"]["self"],
|
||||||
"assets": [{
|
"assets": [
|
||||||
"name": asset["name"],
|
{
|
||||||
"browser_download_url": asset["direct_asset_url"]
|
"name": asset["name"],
|
||||||
} for asset in release["assets"]["links"]],
|
"browser_download_url": asset["direct_asset_url"],
|
||||||
}
|
}
|
||||||
|
for asset in release["assets"]["links"]
|
||||||
|
],
|
||||||
|
}
|
||||||
for source in release["assets"]["sources"]:
|
for source in release["assets"]["sources"]:
|
||||||
r["assets"].append({
|
r["assets"].append(
|
||||||
"name": f"source.{source['format']}",
|
{
|
||||||
"browser_download_url": source['url']
|
"name": f"source.{source['format']}",
|
||||||
})
|
"browser_download_url": source["url"],
|
||||||
|
}
|
||||||
|
)
|
||||||
retval.append(r)
|
retval.append(r)
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
|
@ -9,6 +9,7 @@ import urllib.request
|
||||||
|
|
||||||
import github
|
import github
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
# Debug
|
# Debug
|
||||||
from rich.traceback import install
|
from rich.traceback import install
|
||||||
|
|
||||||
|
@ -24,23 +25,25 @@ install(width=150, show_locals=True, locals_max_length=None, locals_max_string=N
|
||||||
g = Github(open(".github_token").read().strip())
|
g = Github(open(".github_token").read().strip())
|
||||||
|
|
||||||
# Path to the file to be updated
|
# Path to the file to be updated
|
||||||
path=".github/workflows/updater.yml"
|
path = ".github/workflows/updater.yml"
|
||||||
|
|
||||||
# Title of the PR
|
# Title of the PR
|
||||||
title="[autopatch] Upgrade auto-updater"
|
title = "[autopatch] Upgrade auto-updater"
|
||||||
|
|
||||||
# Body of the PR message
|
# Body of the PR message
|
||||||
body="""
|
body = """
|
||||||
Auto-updater actions need upgrading to continue working:
|
Auto-updater actions need upgrading to continue working:
|
||||||
- actions/checkout@v3
|
- actions/checkout@v3
|
||||||
- peter-evans/create-pull-request@v4
|
- peter-evans/create-pull-request@v4
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Author of the commit
|
# Author of the commit
|
||||||
author=github.InputGitAuthor(open(".github_login").read().strip(), open(".github_email").read().strip())
|
author = github.InputGitAuthor(
|
||||||
|
open(".github_login").read().strip(), open(".github_email").read().strip()
|
||||||
|
)
|
||||||
|
|
||||||
# Name of the branch created for the PR
|
# Name of the branch created for the PR
|
||||||
new_branch="upgrade-auto-updater"
|
new_branch = "upgrade-auto-updater"
|
||||||
|
|
||||||
#####
|
#####
|
||||||
#
|
#
|
||||||
|
@ -48,7 +51,7 @@ new_branch="upgrade-auto-updater"
|
||||||
#
|
#
|
||||||
#####
|
#####
|
||||||
|
|
||||||
with open('processed.txt') as f:
|
with open("processed.txt") as f:
|
||||||
processed = f.read().splitlines()
|
processed = f.read().splitlines()
|
||||||
|
|
||||||
#####
|
#####
|
||||||
|
@ -61,7 +64,7 @@ u = g.get_user("yunohost-bot")
|
||||||
org = g.get_organization("yunohost-apps")
|
org = g.get_organization("yunohost-apps")
|
||||||
|
|
||||||
# For each repositories belonging to the bot (user `u`)
|
# For each repositories belonging to the bot (user `u`)
|
||||||
i=0
|
i = 0
|
||||||
for repo in org.get_repos():
|
for repo in org.get_repos():
|
||||||
if repo.full_name not in processed:
|
if repo.full_name not in processed:
|
||||||
|
|
||||||
|
@ -73,50 +76,64 @@ for repo in org.get_repos():
|
||||||
|
|
||||||
# Make sure the repository has an auto-updater
|
# Make sure the repository has an auto-updater
|
||||||
try:
|
try:
|
||||||
repo.get_contents(path, ref="refs/heads/"+base_branch)
|
repo.get_contents(path, ref="refs/heads/" + base_branch)
|
||||||
except:
|
except:
|
||||||
with open('processed.txt', 'a') as pfile:
|
with open("processed.txt", "a") as pfile:
|
||||||
pfile.write(repo.full_name+'\n')
|
pfile.write(repo.full_name + "\n")
|
||||||
time.sleep(1.5)
|
time.sleep(1.5)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Process the repo
|
# Process the repo
|
||||||
print("Processing "+repo.full_name)
|
print("Processing " + repo.full_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Get the commit base for the new branch, and create it
|
# Get the commit base for the new branch, and create it
|
||||||
commit_sha = repo.get_branch(base_branch).commit.sha
|
commit_sha = repo.get_branch(base_branch).commit.sha
|
||||||
new_branch_ref = repo.create_git_ref(ref="refs/heads/"+new_branch, sha=commit_sha)
|
new_branch_ref = repo.create_git_ref(
|
||||||
|
ref="refs/heads/" + new_branch, sha=commit_sha
|
||||||
|
)
|
||||||
except:
|
except:
|
||||||
new_branch_ref = repo.get_git_ref(ref="heads/"+new_branch)
|
new_branch_ref = repo.get_git_ref(ref="heads/" + new_branch)
|
||||||
|
|
||||||
# Get current file contents
|
# Get current file contents
|
||||||
contents = repo.get_contents(path, ref=new_branch_ref.ref)
|
contents = repo.get_contents(path, ref=new_branch_ref.ref)
|
||||||
|
|
||||||
# Update the file
|
# Update the file
|
||||||
updater_yml = contents.decoded_content.decode("unicode_escape")
|
updater_yml = contents.decoded_content.decode("unicode_escape")
|
||||||
updater_yml = re.sub(r'(?m)uses: actions/checkout@v[\d]+', "uses: actions/checkout@v3", updater_yml)
|
updater_yml = re.sub(
|
||||||
updater_yml = re.sub(r'(?m)uses: peter-evans/create-pull-request@v[\d]+', "uses: peter-evans/create-pull-request@v4", updater_yml)
|
r"(?m)uses: actions/checkout@v[\d]+",
|
||||||
updated = repo.update_file(contents.path,
|
"uses: actions/checkout@v3",
|
||||||
message=title,
|
updater_yml,
|
||||||
content=updater_yml,
|
)
|
||||||
sha=contents.sha,
|
updater_yml = re.sub(
|
||||||
branch=new_branch,
|
r"(?m)uses: peter-evans/create-pull-request@v[\d]+",
|
||||||
author=author)
|
"uses: peter-evans/create-pull-request@v4",
|
||||||
|
updater_yml,
|
||||||
|
)
|
||||||
|
updated = repo.update_file(
|
||||||
|
contents.path,
|
||||||
|
message=title,
|
||||||
|
content=updater_yml,
|
||||||
|
sha=contents.sha,
|
||||||
|
branch=new_branch,
|
||||||
|
author=author,
|
||||||
|
)
|
||||||
|
|
||||||
# Wait a bit to preserve the API rate limit
|
# Wait a bit to preserve the API rate limit
|
||||||
time.sleep(1.5)
|
time.sleep(1.5)
|
||||||
|
|
||||||
# Open the PR
|
# Open the PR
|
||||||
pr = repo.create_pull(title="Upgrade auto-updater", body=body, head=new_branch, base=base_branch)
|
pr = repo.create_pull(
|
||||||
|
title="Upgrade auto-updater", body=body, head=new_branch, base=base_branch
|
||||||
|
)
|
||||||
|
|
||||||
print(repo.full_name+" updated with PR #"+ str(pr.id))
|
print(repo.full_name + " updated with PR #" + str(pr.id))
|
||||||
i=i+1
|
i = i + 1
|
||||||
|
|
||||||
# Wait a bit to preserve the API rate limit
|
# Wait a bit to preserve the API rate limit
|
||||||
time.sleep(1.5)
|
time.sleep(1.5)
|
||||||
|
|
||||||
with open('processed.txt', 'a') as pfile:
|
with open("processed.txt", "a") as pfile:
|
||||||
pfile.write(repo.full_name+'\n')
|
pfile.write(repo.full_name + "\n")
|
||||||
|
|
||||||
print("Done. "+str(i)+" repos processed")
|
print("Done. " + str(i) + " repos processed")
|
||||||
|
|
|
@ -10,17 +10,22 @@ u = g.get_user("yunohost-bot")
|
||||||
|
|
||||||
# Let's build a minimalistic summary table
|
# Let's build a minimalistic summary table
|
||||||
print("| Repository ".ljust(22) + " | Decision |")
|
print("| Repository ".ljust(22) + " | Decision |")
|
||||||
print("| ".ljust(22, '-') + " | -------- |")
|
print("| ".ljust(22, "-") + " | -------- |")
|
||||||
|
|
||||||
# For each repositories belonging to the bot (user `u`)
|
# For each repositories belonging to the bot (user `u`)
|
||||||
for repo in u.get_repos():
|
for repo in u.get_repos():
|
||||||
# Proceed iff the repository is a fork (`parent` key is set) of a repository in our apps organization
|
# Proceed iff the repository is a fork (`parent` key is set) of a repository in our apps organization
|
||||||
if repo.parent.full_name.split('/')[0] != "YunoHost-Apps":
|
if repo.parent.full_name.split("/")[0] != "YunoHost-Apps":
|
||||||
print("| "+repo.name.ljust(20) + " | Skipping |")
|
print("| " + repo.name.ljust(20) + " | Skipping |")
|
||||||
else:
|
else:
|
||||||
# If none of the PRs are opened by the bot, delete the repository
|
# If none of the PRs are opened by the bot, delete the repository
|
||||||
if not any([ (pr.user == u) for pr in list(repo.parent.get_pulls(state='open', sort='created')) ]):
|
if not any(
|
||||||
print("| "+repo.name.ljust(20) + " | Deleting |")
|
[
|
||||||
|
(pr.user == u)
|
||||||
|
for pr in list(repo.parent.get_pulls(state="open", sort="created"))
|
||||||
|
]
|
||||||
|
):
|
||||||
|
print("| " + repo.name.ljust(20) + " | Deleting |")
|
||||||
repo.delete()
|
repo.delete()
|
||||||
else:
|
else:
|
||||||
print("| "+repo.name.ljust(20) + " | Keeping |")
|
print("| " + repo.name.ljust(20) + " | Keeping |")
|
||||||
|
|
|
@ -6,20 +6,29 @@ from difflib import SequenceMatcher
|
||||||
from typing import Any, Dict, Generator, List, Tuple
|
from typing import Any, Dict, Generator, List, Tuple
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
|
from appslib.utils import (
|
||||||
get_antifeatures, get_catalog, get_categories,
|
REPO_APPS_ROOT, # pylint: disable=import-error
|
||||||
get_graveyard, get_wishlist)
|
get_antifeatures,
|
||||||
|
get_catalog,
|
||||||
|
get_categories,
|
||||||
|
get_graveyard,
|
||||||
|
get_wishlist,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_schema() -> Generator[str, None, None]:
|
def validate_schema() -> Generator[str, None, None]:
|
||||||
with open(REPO_APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8") as file:
|
with open(
|
||||||
|
REPO_APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8"
|
||||||
|
) as file:
|
||||||
apps_catalog_schema = json.load(file)
|
apps_catalog_schema = json.load(file)
|
||||||
validator = jsonschema.Draft202012Validator(apps_catalog_schema)
|
validator = jsonschema.Draft202012Validator(apps_catalog_schema)
|
||||||
for error in validator.iter_errors(get_catalog()):
|
for error in validator.iter_errors(get_catalog()):
|
||||||
yield f"at .{'.'.join(error.path)}: {error.message}"
|
yield f"at .{'.'.join(error.path)}: {error.message}"
|
||||||
|
|
||||||
|
|
||||||
def check_app(app: str, infos: Dict[str, Any]) -> Generator[Tuple[str, bool], None, None]:
|
def check_app(
|
||||||
|
app: str, infos: Dict[str, Any]
|
||||||
|
) -> Generator[Tuple[str, bool], None, None]:
|
||||||
if "state" not in infos:
|
if "state" not in infos:
|
||||||
yield "state is missing", True
|
yield "state is missing", True
|
||||||
return
|
return
|
||||||
|
|
123
list_builder.py
123
list_builder.py
|
@ -21,10 +21,15 @@ from git import Repo
|
||||||
import appslib.logging_sender # pylint: disable=import-error
|
import appslib.logging_sender # pylint: disable=import-error
|
||||||
from app_caches import app_cache_folder # pylint: disable=import-error
|
from app_caches import app_cache_folder # pylint: disable=import-error
|
||||||
from app_caches import apps_cache_update_all # pylint: disable=import-error
|
from app_caches import apps_cache_update_all # pylint: disable=import-error
|
||||||
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
|
from appslib.utils import (
|
||||||
get_antifeatures, get_catalog, get_categories)
|
REPO_APPS_ROOT, # pylint: disable=import-error
|
||||||
from packaging_v2.convert_v1_manifest_to_v2_for_catalog import \
|
get_antifeatures,
|
||||||
convert_v1_manifest_to_v2_for_catalog # pylint: disable=import-error
|
get_catalog,
|
||||||
|
get_categories,
|
||||||
|
)
|
||||||
|
from packaging_v2.convert_v1_manifest_to_v2_for_catalog import (
|
||||||
|
convert_v1_manifest_to_v2_for_catalog,
|
||||||
|
) # pylint: disable=import-error
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
|
@ -37,7 +42,7 @@ def categories_list():
|
||||||
infos["id"] = category_id
|
infos["id"] = category_id
|
||||||
for subtag_id, subtag_infos in infos.get("subtags", {}).items():
|
for subtag_id, subtag_infos in infos.get("subtags", {}).items():
|
||||||
subtag_infos["id"] = subtag_id
|
subtag_infos["id"] = subtag_id
|
||||||
infos["subtags"] = list(infos.get('subtags', {}).values())
|
infos["subtags"] = list(infos.get("subtags", {}).values())
|
||||||
return list(new_categories.values())
|
return list(new_categories.values())
|
||||||
|
|
||||||
|
|
||||||
|
@ -54,6 +59,7 @@ def antifeatures_list():
|
||||||
# Actual list build management #
|
# Actual list build management #
|
||||||
################################
|
################################
|
||||||
|
|
||||||
|
|
||||||
def __build_app_dict(data) -> Optional[tuple[str, dict[str, Any]]]:
|
def __build_app_dict(data) -> Optional[tuple[str, dict[str, Any]]]:
|
||||||
name, info = data
|
name, info = data
|
||||||
try:
|
try:
|
||||||
|
@ -93,13 +99,17 @@ def write_catalog_v2(base_catalog, target_dir: Path) -> None:
|
||||||
|
|
||||||
target_file = target_dir / "apps.json"
|
target_file = target_dir / "apps.json"
|
||||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
target_file.open("w", encoding="utf-8").write(
|
||||||
|
json.dumps(full_catalog, sort_keys=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
||||||
result_dict_with_manifest_v2 = copy.deepcopy(base_catalog)
|
result_dict_with_manifest_v2 = copy.deepcopy(base_catalog)
|
||||||
for app in result_dict_with_manifest_v2.values():
|
for app in result_dict_with_manifest_v2.values():
|
||||||
packaging_format = float(str(app["manifest"].get("packaging_format", "")).strip() or "0")
|
packaging_format = float(
|
||||||
|
str(app["manifest"].get("packaging_format", "")).strip() or "0"
|
||||||
|
)
|
||||||
if packaging_format < 2:
|
if packaging_format < 2:
|
||||||
app["manifest"] = convert_v1_manifest_to_v2_for_catalog(app["manifest"])
|
app["manifest"] = convert_v1_manifest_to_v2_for_catalog(app["manifest"])
|
||||||
|
|
||||||
|
@ -117,7 +127,12 @@ def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
||||||
appid = appid.lower()
|
appid = appid.lower()
|
||||||
logo_source = REPO_APPS_ROOT / "logos" / f"{appid}.png"
|
logo_source = REPO_APPS_ROOT / "logos" / f"{appid}.png"
|
||||||
if logo_source.exists():
|
if logo_source.exists():
|
||||||
logo_hash = subprocess.check_output(["sha256sum", logo_source]).strip().decode("utf-8").split()[0]
|
logo_hash = (
|
||||||
|
subprocess.check_output(["sha256sum", logo_source])
|
||||||
|
.strip()
|
||||||
|
.decode("utf-8")
|
||||||
|
.split()[0]
|
||||||
|
)
|
||||||
shutil.copyfile(logo_source, logos_dir / f"{logo_hash}.png")
|
shutil.copyfile(logo_source, logos_dir / f"{logo_hash}.png")
|
||||||
# FIXME: implement something to cleanup old logo stuf in the builds/.../logos/ folder somehow
|
# FIXME: implement something to cleanup old logo stuf in the builds/.../logos/ folder somehow
|
||||||
else:
|
else:
|
||||||
|
@ -132,7 +147,9 @@ def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
||||||
|
|
||||||
target_file = target_dir / "apps.json"
|
target_file = target_dir / "apps.json"
|
||||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
target_file.open("w", encoding="utf-8").write(
|
||||||
|
json.dumps(full_catalog, sort_keys=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def write_catalog_doc(base_catalog, target_dir: Path) -> None:
|
def write_catalog_doc(base_catalog, target_dir: Path) -> None:
|
||||||
|
@ -160,14 +177,13 @@ def write_catalog_doc(base_catalog, target_dir: Path) -> None:
|
||||||
for k, v in base_catalog.items()
|
for k, v in base_catalog.items()
|
||||||
if v["state"] == "working"
|
if v["state"] == "working"
|
||||||
}
|
}
|
||||||
full_catalog = {
|
full_catalog = {"apps": result_dict_doc, "categories": categories_list()}
|
||||||
"apps": result_dict_doc,
|
|
||||||
"categories": categories_list()
|
|
||||||
}
|
|
||||||
|
|
||||||
target_file = target_dir / "apps.json"
|
target_file = target_dir / "apps.json"
|
||||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
target_file.open("w", encoding="utf-8").write(
|
||||||
|
json.dumps(full_catalog, sort_keys=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def build_app_dict(app, infos):
|
def build_app_dict(app, infos):
|
||||||
|
@ -177,15 +193,38 @@ def build_app_dict(app, infos):
|
||||||
|
|
||||||
repo = Repo(this_app_cache)
|
repo = Repo(this_app_cache)
|
||||||
|
|
||||||
commits_in_apps_json = Repo(REPO_APPS_ROOT).git.log(
|
commits_in_apps_json = (
|
||||||
"-S", f"\"{app}\"", "--first-parent", "--reverse", "--date=unix",
|
Repo(REPO_APPS_ROOT)
|
||||||
"--format=%cd", "--", "apps.json").split("\n")
|
.git.log(
|
||||||
|
"-S",
|
||||||
|
f'"{app}"',
|
||||||
|
"--first-parent",
|
||||||
|
"--reverse",
|
||||||
|
"--date=unix",
|
||||||
|
"--format=%cd",
|
||||||
|
"--",
|
||||||
|
"apps.json",
|
||||||
|
)
|
||||||
|
.split("\n")
|
||||||
|
)
|
||||||
if len(commits_in_apps_json) > 1:
|
if len(commits_in_apps_json) > 1:
|
||||||
first_commit = commits_in_apps_json[0]
|
first_commit = commits_in_apps_json[0]
|
||||||
else:
|
else:
|
||||||
commits_in_apps_toml = Repo(REPO_APPS_ROOT).git.log(
|
commits_in_apps_toml = (
|
||||||
"-S", f"[{app}]", "--first-parent", "--reverse", "--date=unix",
|
Repo(REPO_APPS_ROOT)
|
||||||
"--format=%cd", "--", "apps.json", "apps.toml").split("\n")
|
.git.log(
|
||||||
|
"-S",
|
||||||
|
f"[{app}]",
|
||||||
|
"--first-parent",
|
||||||
|
"--reverse",
|
||||||
|
"--date=unix",
|
||||||
|
"--format=%cd",
|
||||||
|
"--",
|
||||||
|
"apps.json",
|
||||||
|
"apps.toml",
|
||||||
|
)
|
||||||
|
.split("\n")
|
||||||
|
)
|
||||||
first_commit = commits_in_apps_toml[0]
|
first_commit = commits_in_apps_toml[0]
|
||||||
|
|
||||||
# Assume the first entry we get (= the oldest) is the time the app was added
|
# Assume the first entry we get (= the oldest) is the time the app was added
|
||||||
|
@ -204,14 +243,18 @@ def build_app_dict(app, infos):
|
||||||
try:
|
try:
|
||||||
_ = repo.commit(infos["revision"])
|
_ = repo.commit(infos["revision"])
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
raise RuntimeError(f"Revision ain't in history ? {infos['revision']}") from err
|
raise RuntimeError(
|
||||||
|
f"Revision ain't in history ? {infos['revision']}"
|
||||||
|
) from err
|
||||||
|
|
||||||
# Find timestamp corresponding to that commit
|
# Find timestamp corresponding to that commit
|
||||||
timestamp = repo.commit(infos["revision"]).committed_date
|
timestamp = repo.commit(infos["revision"]).committed_date
|
||||||
|
|
||||||
# Build the dict with all the infos
|
# Build the dict with all the infos
|
||||||
if (this_app_cache / "manifest.toml").exists():
|
if (this_app_cache / "manifest.toml").exists():
|
||||||
manifest = toml.load((this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict)
|
manifest = toml.load(
|
||||||
|
(this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
manifest = json.load((this_app_cache / "manifest.json").open("r"))
|
manifest = json.load((this_app_cache / "manifest.json").open("r"))
|
||||||
|
|
||||||
|
@ -227,27 +270,45 @@ def build_app_dict(app, infos):
|
||||||
"manifest": manifest,
|
"manifest": manifest,
|
||||||
"state": infos["state"],
|
"state": infos["state"],
|
||||||
"level": infos.get("level", "?"),
|
"level": infos.get("level", "?"),
|
||||||
"maintained": 'package-not-maintained' not in infos.get('antifeatures', []),
|
"maintained": "package-not-maintained" not in infos.get("antifeatures", []),
|
||||||
"high_quality": infos.get("high_quality", False),
|
"high_quality": infos.get("high_quality", False),
|
||||||
"featured": infos.get("featured", False),
|
"featured": infos.get("featured", False),
|
||||||
"category": infos.get("category", None),
|
"category": infos.get("category", None),
|
||||||
"subtags": infos.get("subtags", []),
|
"subtags": infos.get("subtags", []),
|
||||||
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
||||||
"antifeatures": list(
|
"antifeatures": list(
|
||||||
set(list(manifest.get("antifeatures", {}).keys()) + infos.get("antifeatures", []))
|
set(
|
||||||
|
list(manifest.get("antifeatures", {}).keys())
|
||||||
|
+ infos.get("antifeatures", [])
|
||||||
|
)
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("target_dir", type=Path, nargs="?",
|
parser.add_argument(
|
||||||
default=REPO_APPS_ROOT / "builds" / "default",
|
"target_dir",
|
||||||
help="The directory to write the catalogs to")
|
type=Path,
|
||||||
parser.add_argument("-j", "--jobs", type=int, default=multiprocessing.cpu_count(), metavar="N",
|
nargs="?",
|
||||||
help="Allow N threads to run in parallel")
|
default=REPO_APPS_ROOT / "builds" / "default",
|
||||||
parser.add_argument("-c", "--update-cache", action=argparse.BooleanOptionalAction, default=True,
|
help="The directory to write the catalogs to",
|
||||||
help="Update the apps cache")
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-j",
|
||||||
|
"--jobs",
|
||||||
|
type=int,
|
||||||
|
default=multiprocessing.cpu_count(),
|
||||||
|
metavar="N",
|
||||||
|
help="Allow N threads to run in parallel",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--update-cache",
|
||||||
|
action=argparse.BooleanOptionalAction,
|
||||||
|
default=True,
|
||||||
|
help="Update the apps cache",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
appslib.logging_sender.enable()
|
appslib.logging_sender.enable()
|
||||||
|
|
|
@ -9,11 +9,7 @@ from glob import glob
|
||||||
|
|
||||||
|
|
||||||
def check_output(cmd):
|
def check_output(cmd):
|
||||||
return (
|
return subprocess.check_output(cmd, shell=True).decode("utf-8").strip()
|
||||||
subprocess.check_output(cmd, shell=True)
|
|
||||||
.decode("utf-8")
|
|
||||||
.strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_app_sources(folder):
|
def convert_app_sources(folder):
|
||||||
|
@ -35,7 +31,13 @@ def convert_app_sources(folder):
|
||||||
"sha256": D["sum"],
|
"sha256": D["sum"],
|
||||||
}
|
}
|
||||||
|
|
||||||
if D.get("format", "tar.gz") not in ["zip", "tar.gz", "tar.xz", "tgz", "tar.bz2"]:
|
if D.get("format", "tar.gz") not in [
|
||||||
|
"zip",
|
||||||
|
"tar.gz",
|
||||||
|
"tar.xz",
|
||||||
|
"tgz",
|
||||||
|
"tar.bz2",
|
||||||
|
]:
|
||||||
new_D["format"] = D["format"]
|
new_D["format"] = D["format"]
|
||||||
if "filename" in D:
|
if "filename" in D:
|
||||||
new_D["rename"] = D["filename"]
|
new_D["rename"] = D["filename"]
|
||||||
|
@ -115,12 +117,12 @@ def _convert_v1_manifest_to_v2(app_path):
|
||||||
"sso": "?",
|
"sso": "?",
|
||||||
"disk": "50M",
|
"disk": "50M",
|
||||||
"ram.build": "50M",
|
"ram.build": "50M",
|
||||||
"ram.runtime": "50M"
|
"ram.runtime": "50M",
|
||||||
}
|
}
|
||||||
|
|
||||||
maintainers = manifest.get("maintainer", {})
|
maintainers = manifest.get("maintainer", {})
|
||||||
if isinstance(maintainers, list):
|
if isinstance(maintainers, list):
|
||||||
maintainers = [m['name'] for m in maintainers]
|
maintainers = [m["name"] for m in maintainers]
|
||||||
else:
|
else:
|
||||||
maintainers = [maintainers["name"]] if maintainers.get("name") else []
|
maintainers = [maintainers["name"]] if maintainers.get("name") else []
|
||||||
|
|
||||||
|
@ -130,15 +132,30 @@ def _convert_v1_manifest_to_v2(app_path):
|
||||||
manifest["install"] = {}
|
manifest["install"] = {}
|
||||||
for question in install_questions:
|
for question in install_questions:
|
||||||
name = question.pop("name")
|
name = question.pop("name")
|
||||||
if "ask" in question and name in ["domain", "path", "admin", "is_public", "password"]:
|
if "ask" in question and name in [
|
||||||
|
"domain",
|
||||||
|
"path",
|
||||||
|
"admin",
|
||||||
|
"is_public",
|
||||||
|
"password",
|
||||||
|
]:
|
||||||
question.pop("ask")
|
question.pop("ask")
|
||||||
if question.get("example") and question.get("type") in ["domain", "path", "user", "boolean", "password"]:
|
if question.get("example") and question.get("type") in [
|
||||||
|
"domain",
|
||||||
|
"path",
|
||||||
|
"user",
|
||||||
|
"boolean",
|
||||||
|
"password",
|
||||||
|
]:
|
||||||
question.pop("example")
|
question.pop("example")
|
||||||
|
|
||||||
manifest["install"][name] = question
|
manifest["install"][name] = question
|
||||||
|
|
||||||
# Rename is_public to init_main_permission
|
# Rename is_public to init_main_permission
|
||||||
manifest["install"] = {(k if k != "is_public" else "init_main_permission"): v for k, v in manifest["install"].items()}
|
manifest["install"] = {
|
||||||
|
(k if k != "is_public" else "init_main_permission"): v
|
||||||
|
for k, v in manifest["install"].items()
|
||||||
|
}
|
||||||
|
|
||||||
if "init_main_permission" in manifest["install"]:
|
if "init_main_permission" in manifest["install"]:
|
||||||
manifest["install"]["init_main_permission"]["type"] = "group"
|
manifest["install"]["init_main_permission"]["type"] = "group"
|
||||||
|
@ -166,12 +183,16 @@ def _convert_v1_manifest_to_v2(app_path):
|
||||||
|
|
||||||
# FIXME: Parse ynh_permission_create --permission="admin" --url="/wp-login.php" --additional_urls="/wp-admin.php" --allowed=$admin_wordpress
|
# FIXME: Parse ynh_permission_create --permission="admin" --url="/wp-login.php" --additional_urls="/wp-admin.php" --allowed=$admin_wordpress
|
||||||
|
|
||||||
ports = check_output(f"sed -nr 's/(\\w+)=.*ynh_find_port[^0-9]*([0-9]+)\\)/\\1,\\2/p' '{app_path}/scripts/install'")
|
ports = check_output(
|
||||||
|
f"sed -nr 's/(\\w+)=.*ynh_find_port[^0-9]*([0-9]+)\\)/\\1,\\2/p' '{app_path}/scripts/install'"
|
||||||
|
)
|
||||||
if ports:
|
if ports:
|
||||||
manifest["resources"]["ports"] = {}
|
manifest["resources"]["ports"] = {}
|
||||||
for port in ports.split("\n"):
|
for port in ports.split("\n"):
|
||||||
name, default = port.split(",")
|
name, default = port.split(",")
|
||||||
exposed = check_output(f"sed -nr 's/.*yunohost firewall allow .*(TCP|UDP|Both).*${name}/\\1/p' '{app_path}/scripts/install'")
|
exposed = check_output(
|
||||||
|
f"sed -nr 's/.*yunohost firewall allow .*(TCP|UDP|Both).*${name}/\\1/p' '{app_path}/scripts/install'"
|
||||||
|
)
|
||||||
if exposed == "Both":
|
if exposed == "Both":
|
||||||
exposed = True
|
exposed = True
|
||||||
|
|
||||||
|
@ -180,7 +201,9 @@ def _convert_v1_manifest_to_v2(app_path):
|
||||||
name = "main"
|
name = "main"
|
||||||
|
|
||||||
if not default.isdigit():
|
if not default.isdigit():
|
||||||
print(f"Failed to parse '{default}' as a port number ... Will use 12345 instead")
|
print(
|
||||||
|
f"Failed to parse '{default}' as a port number ... Will use 12345 instead"
|
||||||
|
)
|
||||||
default = 12345
|
default = 12345
|
||||||
|
|
||||||
manifest["resources"]["ports"][f"{name}.default"] = int(default)
|
manifest["resources"]["ports"][f"{name}.default"] = int(default)
|
||||||
|
@ -188,35 +211,57 @@ def _convert_v1_manifest_to_v2(app_path):
|
||||||
manifest["resources"]["ports"][f"{name}.exposed"] = exposed
|
manifest["resources"]["ports"][f"{name}.exposed"] = exposed
|
||||||
|
|
||||||
maybequote = "[\"'\"'\"']?"
|
maybequote = "[\"'\"'\"']?"
|
||||||
apt_dependencies = check_output(f"sed -nr 's/.*_dependencies={maybequote}(.*){maybequote}? *$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d '\"' | sed 's@ @\\n@g'")
|
apt_dependencies = check_output(
|
||||||
php_version = check_output(f"sed -nr 's/^ *YNH_PHP_VERSION={maybequote}(.*){maybequote}?$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d \"\\\"'\"")
|
f"sed -nr 's/.*_dependencies={maybequote}(.*){maybequote}? *$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d '\"' | sed 's@ @\\n@g'"
|
||||||
|
)
|
||||||
|
php_version = check_output(
|
||||||
|
f"sed -nr 's/^ *YNH_PHP_VERSION={maybequote}(.*){maybequote}?$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d \"\\\"'\""
|
||||||
|
)
|
||||||
if apt_dependencies.strip():
|
if apt_dependencies.strip():
|
||||||
if php_version:
|
if php_version:
|
||||||
apt_dependencies = apt_dependencies.replace("${YNH_PHP_VERSION}", php_version)
|
apt_dependencies = apt_dependencies.replace(
|
||||||
apt_dependencies = ', '.join([d for d in apt_dependencies.split("\n") if d])
|
"${YNH_PHP_VERSION}", php_version
|
||||||
|
)
|
||||||
|
apt_dependencies = ", ".join([d for d in apt_dependencies.split("\n") if d])
|
||||||
manifest["resources"]["apt"] = {"packages": apt_dependencies}
|
manifest["resources"]["apt"] = {"packages": apt_dependencies}
|
||||||
|
|
||||||
extra_apt_repos = check_output(r"sed -nr 's/.*_extra_app_dependencies.*repo=\"(.*)\".*package=\"(.*)\".*key=\"(.*)\"/\1,\2,\3/p' %s/scripts/install" % app_path)
|
extra_apt_repos = check_output(
|
||||||
|
r"sed -nr 's/.*_extra_app_dependencies.*repo=\"(.*)\".*package=\"(.*)\".*key=\"(.*)\"/\1,\2,\3/p' %s/scripts/install"
|
||||||
|
% app_path
|
||||||
|
)
|
||||||
if extra_apt_repos:
|
if extra_apt_repos:
|
||||||
for i, extra_apt_repo in enumerate(extra_apt_repos.split("\n")):
|
for i, extra_apt_repo in enumerate(extra_apt_repos.split("\n")):
|
||||||
repo, packages, key = extra_apt_repo.split(",")
|
repo, packages, key = extra_apt_repo.split(",")
|
||||||
packages = packages.replace('$', '#FIXME#$')
|
packages = packages.replace("$", "#FIXME#$")
|
||||||
if "apt" not in manifest["resources"]:
|
if "apt" not in manifest["resources"]:
|
||||||
manifest["resources"]["apt"] = {}
|
manifest["resources"]["apt"] = {}
|
||||||
if "extras" not in manifest["resources"]["apt"]:
|
if "extras" not in manifest["resources"]["apt"]:
|
||||||
manifest["resources"]["apt"]["extras"] = []
|
manifest["resources"]["apt"]["extras"] = []
|
||||||
manifest["resources"]["apt"]["extras"].append({
|
manifest["resources"]["apt"]["extras"].append(
|
||||||
"repo": repo,
|
{
|
||||||
"key": key,
|
"repo": repo,
|
||||||
"packages": packages,
|
"key": key,
|
||||||
})
|
"packages": packages,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if os.system(f"grep -q 'ynh_mysql_setup_db' {app_path}/scripts/install") == 0:
|
if os.system(f"grep -q 'ynh_mysql_setup_db' {app_path}/scripts/install") == 0:
|
||||||
manifest["resources"]["database"] = {"type": "mysql"}
|
manifest["resources"]["database"] = {"type": "mysql"}
|
||||||
elif os.system(f"grep -q 'ynh_psql_setup_db' {app_path}/scripts/install") == 0:
|
elif os.system(f"grep -q 'ynh_psql_setup_db' {app_path}/scripts/install") == 0:
|
||||||
manifest["resources"]["database"] = {"type": "postgresql"}
|
manifest["resources"]["database"] = {"type": "postgresql"}
|
||||||
|
|
||||||
keys_to_keep = ["packaging_format", "id", "name", "description", "version", "maintainers", "upstream", "integration", "install", "resources"]
|
keys_to_keep = [
|
||||||
|
"packaging_format",
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"version",
|
||||||
|
"maintainers",
|
||||||
|
"upstream",
|
||||||
|
"integration",
|
||||||
|
"install",
|
||||||
|
"resources",
|
||||||
|
]
|
||||||
|
|
||||||
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
|
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
|
||||||
for key in keys_to_del:
|
for key in keys_to_del:
|
||||||
|
@ -246,19 +291,35 @@ def _dump_v2_manifest_as_toml(manifest):
|
||||||
upstream = table()
|
upstream = table()
|
||||||
for key, value in manifest["upstream"].items():
|
for key, value in manifest["upstream"].items():
|
||||||
upstream[key] = value
|
upstream[key] = value
|
||||||
upstream["cpe"].comment("FIXME: optional but recommended if relevant, this is meant to contain the Common Platform Enumeration, which is sort of a standard id for applications defined by the NIST. In particular, Yunohost may use this is in the future to easily track CVE (=security reports) related to apps. The CPE may be obtained by searching here: https://nvd.nist.gov/products/cpe/search. For example, for Nextcloud, the CPE is 'cpe:2.3:a:nextcloud:nextcloud' (no need to include the version number)")
|
upstream["cpe"].comment(
|
||||||
upstream["fund"].comment("FIXME: optional but recommended (or remove if irrelevant / not applicable). This is meant to be an URL where people can financially support this app, especially when its development is based on volunteers and/or financed by its community. YunoHost may later advertise it in the webadmin.")
|
"FIXME: optional but recommended if relevant, this is meant to contain the Common Platform Enumeration, which is sort of a standard id for applications defined by the NIST. In particular, Yunohost may use this is in the future to easily track CVE (=security reports) related to apps. The CPE may be obtained by searching here: https://nvd.nist.gov/products/cpe/search. For example, for Nextcloud, the CPE is 'cpe:2.3:a:nextcloud:nextcloud' (no need to include the version number)"
|
||||||
|
)
|
||||||
|
upstream["fund"].comment(
|
||||||
|
"FIXME: optional but recommended (or remove if irrelevant / not applicable). This is meant to be an URL where people can financially support this app, especially when its development is based on volunteers and/or financed by its community. YunoHost may later advertise it in the webadmin."
|
||||||
|
)
|
||||||
toml_manifest["upstream"] = upstream
|
toml_manifest["upstream"] = upstream
|
||||||
|
|
||||||
integration = table()
|
integration = table()
|
||||||
for key, value in manifest["integration"].items():
|
for key, value in manifest["integration"].items():
|
||||||
integration.add(key, value)
|
integration.add(key, value)
|
||||||
integration["architectures"].comment('FIXME: can be replaced by a list of supported archs using the dpkg --print-architecture nomenclature (amd64/i386/armhf/arm64), for example: ["amd64", "i386"]')
|
integration["architectures"].comment(
|
||||||
integration["ldap"].comment('FIXME: replace with true, false, or "not_relevant". Not to confuse with the "sso" key : the "ldap" key corresponds to wether or not a user *can* login on the app using its YunoHost credentials.')
|
'FIXME: can be replaced by a list of supported archs using the dpkg --print-architecture nomenclature (amd64/i386/armhf/arm64), for example: ["amd64", "i386"]'
|
||||||
integration["sso"].comment('FIXME: replace with true, false, or "not_relevant". Not to confuse with the "ldap" key : the "sso" key corresponds to wether or not a user is *automatically logged-in* on the app when logged-in on the YunoHost portal.')
|
)
|
||||||
integration["disk"].comment('FIXME: replace with an **estimate** minimum disk requirement. e.g. 20M, 400M, 1G, ...')
|
integration["ldap"].comment(
|
||||||
integration["ram.build"].comment('FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ...')
|
'FIXME: replace with true, false, or "not_relevant". Not to confuse with the "sso" key : the "ldap" key corresponds to wether or not a user *can* login on the app using its YunoHost credentials.'
|
||||||
integration["ram.runtime"].comment('FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ...')
|
)
|
||||||
|
integration["sso"].comment(
|
||||||
|
'FIXME: replace with true, false, or "not_relevant". Not to confuse with the "ldap" key : the "sso" key corresponds to wether or not a user is *automatically logged-in* on the app when logged-in on the YunoHost portal.'
|
||||||
|
)
|
||||||
|
integration["disk"].comment(
|
||||||
|
"FIXME: replace with an **estimate** minimum disk requirement. e.g. 20M, 400M, 1G, ..."
|
||||||
|
)
|
||||||
|
integration["ram.build"].comment(
|
||||||
|
"FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ..."
|
||||||
|
)
|
||||||
|
integration["ram.runtime"].comment(
|
||||||
|
"FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ..."
|
||||||
|
)
|
||||||
toml_manifest["integration"] = integration
|
toml_manifest["integration"] = integration
|
||||||
|
|
||||||
install = table()
|
install = table()
|
||||||
|
@ -267,7 +328,11 @@ def _dump_v2_manifest_as_toml(manifest):
|
||||||
install[key].indent(4)
|
install[key].indent(4)
|
||||||
|
|
||||||
if key in ["domain", "path", "admin", "is_public", "password"]:
|
if key in ["domain", "path", "admin", "is_public", "password"]:
|
||||||
install[key].add(comment("this is a generic question - ask strings are automatically handled by Yunohost's core"))
|
install[key].add(
|
||||||
|
comment(
|
||||||
|
"this is a generic question - ask strings are automatically handled by Yunohost's core"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for lang, value2 in value.get("ask", {}).items():
|
for lang, value2 in value.get("ask", {}).items():
|
||||||
install[key].add(f"ask.{lang}", value2)
|
install[key].add(f"ask.{lang}", value2)
|
||||||
|
@ -305,8 +370,8 @@ def _dump_v2_manifest_as_toml(manifest):
|
||||||
|
|
||||||
toml_manifest_dump = dumps(toml_manifest)
|
toml_manifest_dump = dumps(toml_manifest)
|
||||||
|
|
||||||
regex = re.compile(r'\"((description|ask|help)\.[a-z]{2})\"')
|
regex = re.compile(r"\"((description|ask|help)\.[a-z]{2})\"")
|
||||||
toml_manifest_dump = regex.sub(r'\1', toml_manifest_dump)
|
toml_manifest_dump = regex.sub(r"\1", toml_manifest_dump)
|
||||||
toml_manifest_dump = toml_manifest_dump.replace('"ram.build"', "ram.build")
|
toml_manifest_dump = toml_manifest_dump.replace('"ram.build"', "ram.build")
|
||||||
toml_manifest_dump = toml_manifest_dump.replace('"ram.runtime"', "ram.runtime")
|
toml_manifest_dump = toml_manifest_dump.replace('"ram.runtime"', "ram.runtime")
|
||||||
toml_manifest_dump = toml_manifest_dump.replace('"main.url"', "main.url")
|
toml_manifest_dump = toml_manifest_dump.replace('"main.url"', "main.url")
|
||||||
|
@ -324,7 +389,9 @@ def _dump_v2_manifest_as_toml(manifest):
|
||||||
|
|
||||||
if "ports" in manifest["resources"]:
|
if "ports" in manifest["resources"]:
|
||||||
for port_thing in manifest["resources"]["ports"].keys():
|
for port_thing in manifest["resources"]["ports"].keys():
|
||||||
toml_manifest_dump = toml_manifest_dump.replace(f'"{port_thing}"', f"{port_thing}")
|
toml_manifest_dump = toml_manifest_dump.replace(
|
||||||
|
f'"{port_thing}"', f"{port_thing}"
|
||||||
|
)
|
||||||
|
|
||||||
return toml_manifest_dump
|
return toml_manifest_dump
|
||||||
|
|
||||||
|
@ -395,7 +462,9 @@ def cleanup_scripts_and_conf(folder):
|
||||||
"^.*ynh_script_progression.*Reloading NGINX web server",
|
"^.*ynh_script_progression.*Reloading NGINX web server",
|
||||||
"^.*ynh_systemd_action --service_name=nginx --action=reload",
|
"^.*ynh_systemd_action --service_name=nginx --action=reload",
|
||||||
]
|
]
|
||||||
patterns_to_remove_in_scripts = [re.compile(f"({p})", re.MULTILINE) for p in patterns_to_remove_in_scripts]
|
patterns_to_remove_in_scripts = [
|
||||||
|
re.compile(f"({p})", re.MULTILINE) for p in patterns_to_remove_in_scripts
|
||||||
|
]
|
||||||
|
|
||||||
replaces = [
|
replaces = [
|
||||||
("path_url", "path"),
|
("path_url", "path"),
|
||||||
|
@ -404,13 +473,21 @@ def cleanup_scripts_and_conf(folder):
|
||||||
("FINALPATH", "INSTALL_DIR"),
|
("FINALPATH", "INSTALL_DIR"),
|
||||||
("datadir", "data_dir"),
|
("datadir", "data_dir"),
|
||||||
("DATADIR", "DATA_DIR"),
|
("DATADIR", "DATA_DIR"),
|
||||||
('--source_id="$architecture"', ''),
|
('--source_id="$architecture"', ""),
|
||||||
('--source_id="$YNH_ARCH"', ''),
|
('--source_id="$YNH_ARCH"', ""),
|
||||||
('--source_id=app', ''),
|
("--source_id=app", ""),
|
||||||
('--source_id="app.$architecture"', ''),
|
('--source_id="app.$architecture"', ""),
|
||||||
]
|
]
|
||||||
|
|
||||||
for s in ["_common.sh", "install", "remove", "upgrade", "backup", "restore", "change_url"]:
|
for s in [
|
||||||
|
"_common.sh",
|
||||||
|
"install",
|
||||||
|
"remove",
|
||||||
|
"upgrade",
|
||||||
|
"backup",
|
||||||
|
"restore",
|
||||||
|
"change_url",
|
||||||
|
]:
|
||||||
|
|
||||||
script = f"{folder}/scripts/{s}"
|
script = f"{folder}/scripts/{s}"
|
||||||
|
|
||||||
|
@ -420,10 +497,18 @@ def cleanup_scripts_and_conf(folder):
|
||||||
content = open(script).read()
|
content = open(script).read()
|
||||||
|
|
||||||
for pattern in patterns_to_remove_in_scripts:
|
for pattern in patterns_to_remove_in_scripts:
|
||||||
if "^.*ynh_script_progression.*Reloading NGINX web server" in pattern.pattern and s == "restore":
|
if (
|
||||||
|
"^.*ynh_script_progression.*Reloading NGINX web server"
|
||||||
|
in pattern.pattern
|
||||||
|
and s == "restore"
|
||||||
|
):
|
||||||
# This case is legit
|
# This case is legit
|
||||||
continue
|
continue
|
||||||
if "^.*ynh_systemd_action --service_name=nginx --action=reload" in pattern.pattern and s == "restore":
|
if (
|
||||||
|
"^.*ynh_systemd_action --service_name=nginx --action=reload"
|
||||||
|
in pattern.pattern
|
||||||
|
and s == "restore"
|
||||||
|
):
|
||||||
# This case is legit
|
# This case is legit
|
||||||
continue
|
continue
|
||||||
content = pattern.sub(r"#REMOVEME? \1", content)
|
content = pattern.sub(r"#REMOVEME? \1", content)
|
||||||
|
@ -436,7 +521,9 @@ def cleanup_scripts_and_conf(folder):
|
||||||
pattern = re.compile("(^.*nginx.*$)", re.MULTILINE)
|
pattern = re.compile("(^.*nginx.*$)", re.MULTILINE)
|
||||||
content = pattern.sub(r"#REMOVEME? \1", content)
|
content = pattern.sub(r"#REMOVEME? \1", content)
|
||||||
|
|
||||||
pattern = re.compile("(^.*ynh_script_progress.*Updat.* NGINX.*conf.*$)", re.MULTILINE)
|
pattern = re.compile(
|
||||||
|
"(^.*ynh_script_progress.*Updat.* NGINX.*conf.*$)", re.MULTILINE
|
||||||
|
)
|
||||||
content = pattern.sub(r"\1\n\nynh_change_url_nginx_config", content)
|
content = pattern.sub(r"\1\n\nynh_change_url_nginx_config", content)
|
||||||
|
|
||||||
pattern = re.compile(r"(ynh_clean_check_starting)", re.MULTILINE)
|
pattern = re.compile(r"(ynh_clean_check_starting)", re.MULTILINE)
|
||||||
|
@ -446,7 +533,6 @@ def cleanup_scripts_and_conf(folder):
|
||||||
pattern = re.compile(r"(^\s+path=.*$)", re.MULTILINE)
|
pattern = re.compile(r"(^\s+path=.*$)", re.MULTILINE)
|
||||||
content = pattern.sub(r"#REMOVEME? \1", content)
|
content = pattern.sub(r"#REMOVEME? \1", content)
|
||||||
|
|
||||||
|
|
||||||
open(script, "w").write(content)
|
open(script, "w").write(content)
|
||||||
|
|
||||||
for conf in os.listdir(f"{folder}/conf"):
|
for conf in os.listdir(f"{folder}/conf"):
|
||||||
|
@ -470,15 +556,15 @@ if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Attempt to automatically convert a v1 YunoHost app to v2 (at least as much as possible) : parse the app scripts to auto-generate the manifest.toml, and remove now-useless lines from the app scripts"
|
description="Attempt to automatically convert a v1 YunoHost app to v2 (at least as much as possible) : parse the app scripts to auto-generate the manifest.toml, and remove now-useless lines from the app scripts"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("app_path", help="Path to the app to convert")
|
||||||
"app_path", help="Path to the app to convert"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
manifest = _convert_v1_manifest_to_v2(args.app_path)
|
manifest = _convert_v1_manifest_to_v2(args.app_path)
|
||||||
with open(args.app_path + "/manifest.toml", "w") as manifest_file:
|
with open(args.app_path + "/manifest.toml", "w") as manifest_file:
|
||||||
manifest_file.write("#:schema https://raw.githubusercontent.com/YunoHost/apps/master/schemas/manifest.v2.schema.json\n\n")
|
manifest_file.write(
|
||||||
|
"#:schema https://raw.githubusercontent.com/YunoHost/apps/master/schemas/manifest.v2.schema.json\n\n"
|
||||||
|
)
|
||||||
manifest_file.write(_dump_v2_manifest_as_toml(manifest))
|
manifest_file.write(_dump_v2_manifest_as_toml(manifest))
|
||||||
|
|
||||||
cleanup_scripts_and_conf(args.app_path)
|
cleanup_scripts_and_conf(args.app_path)
|
||||||
|
|
|
@ -17,18 +17,22 @@ def convert_v1_manifest_to_v2_for_catalog(manifest):
|
||||||
manifest["upstream"]["website"] = manifest["url"]
|
manifest["upstream"]["website"] = manifest["url"]
|
||||||
|
|
||||||
manifest["integration"] = {
|
manifest["integration"] = {
|
||||||
"yunohost": manifest.get("requirements", {}).get("yunohost", "").replace(">", "").replace("=", "").replace(" ", ""),
|
"yunohost": manifest.get("requirements", {})
|
||||||
|
.get("yunohost", "")
|
||||||
|
.replace(">", "")
|
||||||
|
.replace("=", "")
|
||||||
|
.replace(" ", ""),
|
||||||
"architectures": "all",
|
"architectures": "all",
|
||||||
"multi_instance": manifest.get("multi_instance", False),
|
"multi_instance": manifest.get("multi_instance", False),
|
||||||
"ldap": "?",
|
"ldap": "?",
|
||||||
"sso": "?",
|
"sso": "?",
|
||||||
"disk": "50M",
|
"disk": "50M",
|
||||||
"ram": {"build": "50M", "runtime": "10M"}
|
"ram": {"build": "50M", "runtime": "10M"},
|
||||||
}
|
}
|
||||||
|
|
||||||
maintainers = manifest.get("maintainer", {})
|
maintainers = manifest.get("maintainer", {})
|
||||||
if isinstance(maintainers, list):
|
if isinstance(maintainers, list):
|
||||||
maintainers = [m['name'] for m in maintainers]
|
maintainers = [m["name"] for m in maintainers]
|
||||||
else:
|
else:
|
||||||
maintainers = [maintainers["name"]] if maintainers.get("name") else []
|
maintainers = [maintainers["name"]] if maintainers.get("name") else []
|
||||||
|
|
||||||
|
@ -39,21 +43,39 @@ def convert_v1_manifest_to_v2_for_catalog(manifest):
|
||||||
manifest["install"] = {}
|
manifest["install"] = {}
|
||||||
for question in install_questions:
|
for question in install_questions:
|
||||||
name = question.pop("name")
|
name = question.pop("name")
|
||||||
if "ask" in question and name in ["domain", "path", "admin", "is_public", "password"]:
|
if "ask" in question and name in [
|
||||||
|
"domain",
|
||||||
|
"path",
|
||||||
|
"admin",
|
||||||
|
"is_public",
|
||||||
|
"password",
|
||||||
|
]:
|
||||||
question.pop("ask")
|
question.pop("ask")
|
||||||
if question.get("example") and question.get("type") in ["domain", "path", "user", "boolean", "password"]:
|
if question.get("example") and question.get("type") in [
|
||||||
|
"domain",
|
||||||
|
"path",
|
||||||
|
"user",
|
||||||
|
"boolean",
|
||||||
|
"password",
|
||||||
|
]:
|
||||||
question.pop("example")
|
question.pop("example")
|
||||||
|
|
||||||
manifest["install"][name] = question
|
manifest["install"][name] = question
|
||||||
|
|
||||||
manifest["resources"] = {
|
manifest["resources"] = {"system_user": {}, "install_dir": {"alias": "final_path"}}
|
||||||
"system_user": {},
|
|
||||||
"install_dir": {
|
|
||||||
"alias": "final_path"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
keys_to_keep = ["packaging_format", "id", "name", "description", "version", "maintainers", "upstream", "integration", "install", "resources"]
|
keys_to_keep = [
|
||||||
|
"packaging_format",
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"version",
|
||||||
|
"maintainers",
|
||||||
|
"upstream",
|
||||||
|
"integration",
|
||||||
|
"install",
|
||||||
|
"resources",
|
||||||
|
]
|
||||||
|
|
||||||
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
|
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
|
||||||
for key in keys_to_del:
|
for key in keys_to_del:
|
||||||
|
|
|
@ -59,7 +59,9 @@ def generate_READMEs(app_path: Path):
|
||||||
if README_template.name == "README.md.j2":
|
if README_template.name == "README.md.j2":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not README_template.name.endswith(".j2") or not README_template.name.startswith("README_"):
|
if not README_template.name.endswith(
|
||||||
|
".j2"
|
||||||
|
) or not README_template.name.startswith("README_"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
language_code = README_template.name.split("_")[1].split(".")[0]
|
language_code = README_template.name.split("_")[1].split(".")[0]
|
||||||
|
|
|
@ -13,15 +13,25 @@ def test_running_make_readme():
|
||||||
name = Path(name)
|
name = Path(name)
|
||||||
DIRECTORY = name / "gotosocial_ynh"
|
DIRECTORY = name / "gotosocial_ynh"
|
||||||
|
|
||||||
subprocess.check_call(["git", "clone", "https://github.com/yunohost-apps/gotosocial_ynh", DIRECTORY, "-q"])
|
subprocess.check_call(
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"clone",
|
||||||
|
"https://github.com/yunohost-apps/gotosocial_ynh",
|
||||||
|
DIRECTORY,
|
||||||
|
"-q",
|
||||||
|
]
|
||||||
|
)
|
||||||
subprocess.check_call(["git", "checkout", COMMIT_ID, "-q"], cwd=DIRECTORY)
|
subprocess.check_call(["git", "checkout", COMMIT_ID, "-q"], cwd=DIRECTORY)
|
||||||
|
|
||||||
print(CWD)
|
print(CWD)
|
||||||
subprocess.check_call([CWD / "../make_readme.py", DIRECTORY])
|
subprocess.check_call([CWD / "../make_readme.py", DIRECTORY])
|
||||||
|
|
||||||
assert open(CWD / "README.md").read() == open(DIRECTORY / "README.md").read()
|
assert open(CWD / "README.md").read() == open(DIRECTORY / "README.md").read()
|
||||||
assert open(CWD / "README_fr.md").read() == open(DIRECTORY / "README_fr.md").read()
|
assert (
|
||||||
|
open(CWD / "README_fr.md").read() == open(DIRECTORY / "README_fr.md").read()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
test_running_make_readme()
|
test_running_make_readme()
|
||||||
|
|
|
@ -35,14 +35,21 @@ async def git(cmd, in_folder=None):
|
||||||
cmd = ["git"] + cmd
|
cmd = ["git"] + cmd
|
||||||
cmd = " ".join(map(shlex.quote, cmd))
|
cmd = " ".join(map(shlex.quote, cmd))
|
||||||
print(cmd)
|
print(cmd)
|
||||||
command = await asyncio.create_subprocess_shell(cmd, env=my_env, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
|
command = await asyncio.create_subprocess_shell(
|
||||||
|
cmd,
|
||||||
|
env=my_env,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.STDOUT,
|
||||||
|
)
|
||||||
data = await command.stdout.read()
|
data = await command.stdout.read()
|
||||||
return data.decode().strip()
|
return data.decode().strip()
|
||||||
|
|
||||||
|
|
||||||
@app.route("/github", methods=["GET"])
|
@app.route("/github", methods=["GET"])
|
||||||
def main_route(request):
|
def main_route(request):
|
||||||
return text("You aren't supposed to go on this page using a browser, it's for webhooks push instead.")
|
return text(
|
||||||
|
"You aren't supposed to go on this page using a browser, it's for webhooks push instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.route("/github", methods=["POST"])
|
@app.route("/github", methods=["POST"])
|
||||||
|
@ -58,7 +65,9 @@ async def on_push(request):
|
||||||
return response.json({"error": "Signing algorightm is not sha1 ?!"}, 501)
|
return response.json({"error": "Signing algorightm is not sha1 ?!"}, 501)
|
||||||
|
|
||||||
# HMAC requires the key to be bytes, but data is string
|
# HMAC requires the key to be bytes, but data is string
|
||||||
mac = hmac.new(github_webhook_secret.encode(), msg=request.body, digestmod=hashlib.sha1)
|
mac = hmac.new(
|
||||||
|
github_webhook_secret.encode(), msg=request.body, digestmod=hashlib.sha1
|
||||||
|
)
|
||||||
|
|
||||||
if not hmac.compare_digest(str(mac.hexdigest()), str(signature)):
|
if not hmac.compare_digest(str(mac.hexdigest()), str(signature)):
|
||||||
return response.json({"error": "Bad signature ?!"}, 403)
|
return response.json({"error": "Bad signature ?!"}, 403)
|
||||||
|
@ -71,19 +80,42 @@ async def on_push(request):
|
||||||
print(f"{repository} -> branch '{branch}'")
|
print(f"{repository} -> branch '{branch}'")
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as folder:
|
with tempfile.TemporaryDirectory() as folder:
|
||||||
await git(["clone", f"https://{login}:{token}@github.com/{repository}", "--single-branch", "--branch", branch, folder])
|
await git(
|
||||||
|
[
|
||||||
|
"clone",
|
||||||
|
f"https://{login}:{token}@github.com/{repository}",
|
||||||
|
"--single-branch",
|
||||||
|
"--branch",
|
||||||
|
branch,
|
||||||
|
folder,
|
||||||
|
]
|
||||||
|
)
|
||||||
generate_READMEs(folder)
|
generate_READMEs(folder)
|
||||||
|
|
||||||
await git(["add", "README*.md"], in_folder=folder)
|
await git(["add", "README*.md"], in_folder=folder)
|
||||||
|
|
||||||
diff_not_empty = await asyncio.create_subprocess_shell(" ".join(["git", "diff", "HEAD", "--compact-summary"]), cwd=folder, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
|
diff_not_empty = await asyncio.create_subprocess_shell(
|
||||||
|
" ".join(["git", "diff", "HEAD", "--compact-summary"]),
|
||||||
|
cwd=folder,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.STDOUT,
|
||||||
|
)
|
||||||
diff_not_empty = await diff_not_empty.stdout.read()
|
diff_not_empty = await diff_not_empty.stdout.read()
|
||||||
diff_not_empty = diff_not_empty.decode().strip()
|
diff_not_empty = diff_not_empty.decode().strip()
|
||||||
if not diff_not_empty:
|
if not diff_not_empty:
|
||||||
print("nothing to do")
|
print("nothing to do")
|
||||||
return text("nothing to do")
|
return text("nothing to do")
|
||||||
|
|
||||||
await git(["commit", "-a", "-m", "Auto-update README", "--author='yunohost-bot <yunohost@yunohost.org>'"], in_folder=folder)
|
await git(
|
||||||
|
[
|
||||||
|
"commit",
|
||||||
|
"-a",
|
||||||
|
"-m",
|
||||||
|
"Auto-update README",
|
||||||
|
"--author='yunohost-bot <yunohost@yunohost.org>'",
|
||||||
|
],
|
||||||
|
in_folder=folder,
|
||||||
|
)
|
||||||
await git(["push", "origin", branch, "--quiet"], in_folder=folder)
|
await git(["push", "origin", branch, "--quiet"], in_folder=folder)
|
||||||
|
|
||||||
return text("ok")
|
return text("ok")
|
||||||
|
|
|
@ -107,7 +107,8 @@ def list_changes(catalog, ci_results) -> dict[str, list[tuple[str, int, int]]]:
|
||||||
|
|
||||||
|
|
||||||
def pretty_changes(changes: dict[str, list[tuple[str, int, int]]]) -> str:
|
def pretty_changes(changes: dict[str, list[tuple[str, int, int]]]) -> str:
|
||||||
pr_body_template = textwrap.dedent("""
|
pr_body_template = textwrap.dedent(
|
||||||
|
"""
|
||||||
{%- if changes["major_regressions"] %}
|
{%- if changes["major_regressions"] %}
|
||||||
### Major regressions 😭
|
### Major regressions 😭
|
||||||
{% for app in changes["major_regressions"] %}
|
{% for app in changes["major_regressions"] %}
|
||||||
|
@ -138,7 +139,8 @@ def pretty_changes(changes: dict[str, list[tuple[str, int, int]]]) -> str:
|
||||||
- [ ] [{{app}} (See latest job if it exists)](https://ci-apps.yunohost.org/ci/apps/{{app}}/latestjob)
|
- [ ] [{{app}} (See latest job if it exists)](https://ci-apps.yunohost.org/ci/apps/{{app}}/latestjob)
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
return jinja2.Environment().from_string(pr_body_template).render(changes=changes)
|
return jinja2.Environment().from_string(pr_body_template).render(changes=changes)
|
||||||
|
|
||||||
|
@ -148,24 +150,34 @@ def make_pull_request(pr_body: str) -> None:
|
||||||
"title": "Update app levels according to CI results",
|
"title": "Update app levels according to CI results",
|
||||||
"body": pr_body,
|
"body": pr_body,
|
||||||
"head": "update_app_levels",
|
"head": "update_app_levels",
|
||||||
"base": "master"
|
"base": "master",
|
||||||
}
|
}
|
||||||
|
|
||||||
with requests.Session() as s:
|
with requests.Session() as s:
|
||||||
s.headers.update({"Authorization": f"token {github_token()}"})
|
s.headers.update({"Authorization": f"token {github_token()}"})
|
||||||
response = s.post(f"https://api.github.com/repos/{APPS_REPO}/pulls", json=pr_data)
|
response = s.post(
|
||||||
|
f"https://api.github.com/repos/{APPS_REPO}/pulls", json=pr_data
|
||||||
|
)
|
||||||
|
|
||||||
if response.status_code == 422:
|
if response.status_code == 422:
|
||||||
response = s.get(f"https://api.github.com/repos/{APPS_REPO}/pulls", data={"head": "update_app_levels"})
|
response = s.get(
|
||||||
|
f"https://api.github.com/repos/{APPS_REPO}/pulls",
|
||||||
|
data={"head": "update_app_levels"},
|
||||||
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
pr_number = response.json()[0]["number"]
|
pr_number = response.json()[0]["number"]
|
||||||
|
|
||||||
# head can't be updated
|
# head can't be updated
|
||||||
del pr_data["head"]
|
del pr_data["head"]
|
||||||
response = s.patch(f"https://api.github.com/repos/{APPS_REPO}/pulls/{pr_number}", json=pr_data)
|
response = s.patch(
|
||||||
|
f"https://api.github.com/repos/{APPS_REPO}/pulls/{pr_number}",
|
||||||
|
json=pr_data,
|
||||||
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
existing_url = response.json()["html_url"]
|
existing_url = response.json()["html_url"]
|
||||||
logging.warning(f"An existing Pull Request has been updated at {existing_url} !")
|
logging.warning(
|
||||||
|
f"An existing Pull Request has been updated at {existing_url} !"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue