1
0
Fork 0

🎨 Format Python code with Black

This commit is contained in:
OniriCorpe 2024-03-11 16:34:33 +00:00 committed by Bram
parent 359de4c5cf
commit d81f11f6ee
15 changed files with 670 additions and 251 deletions

View file

@ -9,8 +9,11 @@ from typing import Any
import tqdm
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
get_catalog, git_repo_age)
from appslib.utils import (
REPO_APPS_ROOT, # pylint: disable=import-error
get_catalog,
git_repo_age,
)
from git import Repo
@ -31,7 +34,8 @@ def app_cache_clone(app: str, infos: dict[str, str]) -> None:
infos["url"],
to_path=app_cache_folder(app),
depth=git_depths.get(infos["state"], git_depths["default"]),
single_branch=True, branch=infos.get("branch", "master"),
single_branch=True,
branch=infos.get("branch", "master"),
)

View file

@ -23,10 +23,14 @@ def git(cmd: list[str], cwd: Optional[Path] = None) -> str:
if cwd:
full_cmd.extend(["-C", str(cwd)])
full_cmd.extend(cmd)
return subprocess.check_output(
full_cmd,
# env=my_env,
).strip().decode("utf-8")
return (
subprocess.check_output(
full_cmd,
# env=my_env,
)
.strip()
.decode("utf-8")
)
def git_repo_age(path: Path) -> Union[bool, int]:
@ -42,7 +46,8 @@ def get_catalog(working_only: bool = False) -> dict[str, dict[str, Any]]:
catalog = toml.load((REPO_APPS_ROOT / "apps.toml").open("r", encoding="utf-8"))
if working_only:
catalog = {
app: infos for app, infos in catalog.items()
app: infos
for app, infos in catalog.items()
if infos.get("state") != "notworking"
}
return catalog

View file

@ -7,7 +7,9 @@ import sys
import requests
catalog = requests.get("https://raw.githubusercontent.com/YunoHost/apps/master/apps.json").json()
catalog = requests.get(
"https://raw.githubusercontent.com/YunoHost/apps/master/apps.json"
).json()
my_env = os.environ.copy()
my_env["GIT_TERMINAL_PROMPT"] = "0"
@ -44,15 +46,19 @@ def git(cmd, in_folder=None):
def progressbar(it, prefix="", size=60, file=sys.stdout):
it = list(it)
count = len(it)
def show(j, name=""):
name += " "
x = int(size*j/count)
file.write("%s[%s%s] %i/%i %s\r" % (prefix, "#"*x, "."*(size-x), j, count, name))
x = int(size * j / count)
file.write(
"%s[%s%s] %i/%i %s\r" % (prefix, "#" * x, "." * (size - x), j, count, name)
)
file.flush()
show(0)
for i, item in enumerate(it):
yield item
show(i+1, item["id"])
show(i + 1, item["id"])
file.write("\n")
file.flush()
@ -63,7 +69,10 @@ def build_cache():
folder = os.path.join(".apps_cache", app["id"])
reponame = app["url"].rsplit("/", 1)[-1]
git(f"clone --quiet --depth 1 --single-branch {app['url']} {folder}")
git(f"remote add fork https://{login}:{token}@github.com/{login}/{reponame}", in_folder=folder)
git(
f"remote add fork https://{login}:{token}@github.com/{login}/{reponame}",
in_folder=folder,
)
def apply(patch):
@ -81,7 +90,11 @@ def diff():
for app in apps():
folder = os.path.join(".apps_cache", app["id"])
if bool(subprocess.check_output(f"cd {folder} && git diff", shell=True).strip().decode("utf-8")):
if bool(
subprocess.check_output(f"cd {folder} && git diff", shell=True)
.strip()
.decode("utf-8")
):
print("\n\n\n")
print("=================================")
print("Changes in : " + app["id"])
@ -92,35 +105,50 @@ def diff():
def push(patch):
title = "[autopatch] " + open(os.path.join("patches", patch, "pr_title.md")).read().strip()
title = (
"[autopatch] "
+ open(os.path.join("patches", patch, "pr_title.md")).read().strip()
)
def diff_not_empty(app):
folder = os.path.join(".apps_cache", app["id"])
return bool(subprocess.check_output(f"cd {folder} && git diff", shell=True).strip().decode("utf-8"))
return bool(
subprocess.check_output(f"cd {folder} && git diff", shell=True)
.strip()
.decode("utf-8")
)
def app_is_on_github(app):
return "github.com" in app["url"]
apps_to_push = [app for app in apps() if diff_not_empty(app) and app_is_on_github(app)]
apps_to_push = [
app for app in apps() if diff_not_empty(app) and app_is_on_github(app)
]
with requests.Session() as s:
s.headers.update({"Authorization": f"token {token}"})
for app in progressbar(apps_to_push, "Forking: ", 40):
app["repo"] = app["url"][len("https://github.com/"):].strip("/")
app["repo"] = app["url"][len("https://github.com/") :].strip("/")
fork_if_needed(app["repo"], s)
for app in progressbar(apps_to_push, "Pushing: ", 40):
app["repo"] = app["url"][len("https://github.com/"):].strip("/")
app["repo"] = app["url"][len("https://github.com/") :].strip("/")
app_repo_name = app["url"].rsplit("/", 1)[-1]
folder = os.path.join(".apps_cache", app["id"])
current_branch = git(f"symbolic-ref --short HEAD", in_folder=folder)
git(f"reset origin/{current_branch}", in_folder=folder)
git(["commit", "-a", "-m", title, "--author='Yunohost-Bot <>'"], in_folder=folder)
git(
["commit", "-a", "-m", title, "--author='Yunohost-Bot <>'"],
in_folder=folder,
)
try:
git(f"remote remove fork", in_folder=folder)
except Exception:
pass
git(f"remote add fork https://{login}:{token}@github.com/{login}/{app_repo_name}", in_folder=folder)
git(
f"remote add fork https://{login}:{token}@github.com/{login}/{app_repo_name}",
in_folder=folder,
)
git(f"push fork {current_branch}:{patch} --quiet --force", in_folder=folder)
create_pull_request(app["repo"], patch, current_branch, s)
@ -141,11 +169,15 @@ def fork_if_needed(repo, s):
def create_pull_request(repo, patch, base_branch, s):
PR = {"title": "[autopatch] " + open(os.path.join("patches", patch, "pr_title.md")).read().strip(),
"body": "This is an automatic PR\n\n" + open(os.path.join("patches", patch, "pr_body.md")).read().strip(),
"head": login + ":" + patch,
"base": base_branch,
"maintainer_can_modify": True}
PR = {
"title": "[autopatch] "
+ open(os.path.join("patches", patch, "pr_title.md")).read().strip(),
"body": "This is an automatic PR\n\n"
+ open(os.path.join("patches", patch, "pr_body.md")).read().strip(),
"head": login + ":" + patch,
"base": base_branch,
"maintainer_can_modify": True,
}
r = s.post(github_api + f"/repos/{repo}/pulls", json.dumps(PR))
@ -159,7 +191,8 @@ def main():
action = sys.argv[1]
if action == "--help":
print("""
print(
"""
Example usage:
# Init local git clone for all apps
@ -173,7 +206,8 @@ def main():
# Push and create pull requests on all apps with non-empty diff
./autopatch.py --push explicit-php-version-in-deps
""")
"""
)
elif action == "--build-cache":
build_cache()

View file

@ -21,10 +21,20 @@ import github
# add apps/tools to sys.path
sys.path.insert(0, str(Path(__file__).parent.parent))
from rest_api import GithubAPI, GitlabAPI, GiteaForgejoAPI, RefType # noqa: E402,E501 pylint: disable=import-error,wrong-import-position
from rest_api import (
GithubAPI,
GitlabAPI,
GiteaForgejoAPI,
RefType,
) # noqa: E402,E501 pylint: disable=import-error,wrong-import-position
import appslib.logging_sender # noqa: E402 pylint: disable=import-error,wrong-import-position
from appslib.utils import REPO_APPS_ROOT, get_catalog # noqa: E402 pylint: disable=import-error,wrong-import-position
from app_caches import app_cache_folder # noqa: E402 pylint: disable=import-error,wrong-import-position
from appslib.utils import (
REPO_APPS_ROOT,
get_catalog,
) # noqa: E402 pylint: disable=import-error,wrong-import-position
from app_caches import (
app_cache_folder,
) # noqa: E402 pylint: disable=import-error,wrong-import-position
STRATEGIES = [
@ -44,11 +54,30 @@ STRATEGIES = [
@cache
def get_github() -> tuple[Optional[tuple[str, str]], Optional[github.Github], Optional[github.InputGitAuthor]]:
def get_github() -> tuple[
Optional[tuple[str, str]],
Optional[github.Github],
Optional[github.InputGitAuthor],
]:
try:
github_login = (REPO_APPS_ROOT / ".github_login").open("r", encoding="utf-8").read().strip()
github_token = (REPO_APPS_ROOT / ".github_token").open("r", encoding="utf-8").read().strip()
github_email = (REPO_APPS_ROOT / ".github_email").open("r", encoding="utf-8").read().strip()
github_login = (
(REPO_APPS_ROOT / ".github_login")
.open("r", encoding="utf-8")
.read()
.strip()
)
github_token = (
(REPO_APPS_ROOT / ".github_token")
.open("r", encoding="utf-8")
.read()
.strip()
)
github_email = (
(REPO_APPS_ROOT / ".github_email")
.open("r", encoding="utf-8")
.read()
.strip()
)
auth = (github_login, github_token)
github_api = github.Github(github_token)
@ -96,7 +125,9 @@ class LocalOrRemoteRepo:
if not self.manifest_path.exists():
raise RuntimeError(f"{app.name}: manifest.toml doesnt exists?")
# app is in fact a path
self.manifest_raw = (app / "manifest.toml").open("r", encoding="utf-8").read()
self.manifest_raw = (
(app / "manifest.toml").open("r", encoding="utf-8").read()
)
elif isinstance(app, str):
# It's remote
@ -187,7 +218,9 @@ class AppAutoUpdater:
self.main_upstream = self.manifest.get("upstream", {}).get("code")
def run(self, edit: bool = False, commit: bool = False, pr: bool = False) -> tuple[State, str, str, str]:
def run(
self, edit: bool = False, commit: bool = False, pr: bool = False
) -> tuple[State, str, str, str]:
state = State.up_to_date
main_version = ""
pr_url = ""
@ -212,7 +245,11 @@ class AppAutoUpdater:
commit_msg += f"\n{msg}"
self.repo.manifest_raw = self.replace_version_and_asset_in_manifest(
self.repo.manifest_raw, version, assets, infos, is_main=source == "main",
self.repo.manifest_raw,
version,
assets,
infos,
is_main=source == "main",
)
if state == State.up_to_date:
@ -246,7 +283,9 @@ class AppAutoUpdater:
return (state, self.current_version, main_version, pr_url)
@staticmethod
def relevant_versions(tags: list[str], app_id: str, version_regex: Optional[str]) -> tuple[str, str]:
def relevant_versions(
tags: list[str], app_id: str, version_regex: Optional[str]
) -> tuple[str, str]:
def apply_version_regex(tag: str) -> Optional[str]:
# First preprocessing according to the manifest version_regex…
@ -255,7 +294,9 @@ class AppAutoUpdater:
if match is None:
return None
# Basically: either groupdict if named capture gorups, sorted by names, or groups()
tag = ".".join(dict(sorted(match.groupdict().items())).values() or match.groups())
tag = ".".join(
dict(sorted(match.groupdict().items())).values() or match.groups()
)
# Then remove leading v
tag = tag.lstrip("v")
@ -264,7 +305,9 @@ class AppAutoUpdater:
def version_numbers(tag: str) -> Optional[tuple[int, ...]]:
filter_keywords = ["start", "rc", "beta", "alpha"]
if any(keyword in tag for keyword in filter_keywords):
logging.debug(f"Tag {tag} contains filtered keyword from {filter_keywords}.")
logging.debug(
f"Tag {tag} contains filtered keyword from {filter_keywords}."
)
return None
t_to_check = tag
@ -302,7 +345,9 @@ class AppAutoUpdater:
def tag_to_int_tuple(tag: str) -> tuple[int, ...]:
tag = tag.lstrip("v").replace("-", ".").rstrip(".")
int_tuple = tag.split(".")
assert all(i.isdigit() for i in int_tuple), f"Cant convert {tag} to int tuple :/"
assert all(
i.isdigit() for i in int_tuple
), f"Cant convert {tag} to int tuple :/"
return tuple(int(i) for i in int_tuple)
@staticmethod
@ -317,8 +362,9 @@ class AppAutoUpdater:
except Exception as e:
raise RuntimeError(f"Failed to compute sha256 for {url} : {e}") from e
def get_source_update(self, name: str, infos: dict[str, Any]
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
def get_source_update(
self, name: str, infos: dict[str, Any]
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
autoupdate = infos.get("autoupdate")
if autoupdate is None:
return None
@ -327,7 +373,9 @@ class AppAutoUpdater:
asset = autoupdate.get("asset", "tarball")
strategy = autoupdate.get("strategy")
if strategy not in STRATEGIES:
raise ValueError(f"Unknown update strategy '{strategy}' for '{name}', expected one of {STRATEGIES}")
raise ValueError(
f"Unknown update strategy '{strategy}' for '{name}', expected one of {STRATEGIES}"
)
result = self.get_latest_version_and_asset(strategy, asset, autoupdate)
if result is None:
@ -347,14 +395,22 @@ class AppAutoUpdater:
print("Up to date")
return None
try:
if self.tag_to_int_tuple(self.current_version) > self.tag_to_int_tuple(new_version):
print("Up to date (current version appears more recent than newest version found)")
if self.tag_to_int_tuple(self.current_version) > self.tag_to_int_tuple(
new_version
):
print(
"Up to date (current version appears more recent than newest version found)"
)
return None
except (AssertionError, ValueError):
pass
if isinstance(assets, dict) and isinstance(infos.get("url"), str) or \
isinstance(assets, str) and not isinstance(infos.get("url"), str):
if (
isinstance(assets, dict)
and isinstance(infos.get("url"), str)
or isinstance(assets, str)
and not isinstance(infos.get("url"), str)
):
raise RuntimeError(
"It looks like there's an inconsistency between the old asset list and the new ones... "
"One is arch-specific, the other is not... Did you forget to define arch-specific regexes? "
@ -364,7 +420,9 @@ class AppAutoUpdater:
if isinstance(assets, str) and infos["url"] == assets:
print(f"URL for asset {name} is up to date")
return None
if isinstance(assets, dict) and assets == {k: infos[k]["url"] for k in assets.keys()}:
if isinstance(assets, dict) and assets == {
k: infos[k]["url"] for k in assets.keys()
}:
print(f"URLs for asset {name} are up to date")
return None
print(f"Update needed for {name}")
@ -376,21 +434,26 @@ class AppAutoUpdater:
name: url for name, url in assets.items() if re.match(regex, name)
}
if not matching_assets:
raise RuntimeError(f"No assets matching regex '{regex}' in {list(assets.keys())}")
raise RuntimeError(
f"No assets matching regex '{regex}' in {list(assets.keys())}"
)
if len(matching_assets) > 1:
raise RuntimeError(f"Too many assets matching regex '{regex}': {matching_assets}")
raise RuntimeError(
f"Too many assets matching regex '{regex}': {matching_assets}"
)
return next(iter(matching_assets.items()))
def get_latest_version_and_asset(self, strategy: str, asset: Union[str, dict], autoupdate
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
def get_latest_version_and_asset(
self, strategy: str, asset: Union[str, dict], autoupdate
) -> Optional[tuple[str, Union[str, dict[str, str]], str]]:
upstream = autoupdate.get("upstream", self.main_upstream).strip("/")
version_re = autoupdate.get("version_regex", None)
_, remote_type, revision_type = strategy.split("_")
api: Union[GithubAPI, GitlabAPI, GiteaForgejoAPI]
if remote_type == "github":
assert (
upstream and upstream.startswith("https://github.com/")
assert upstream and upstream.startswith(
"https://github.com/"
), f"When using strategy {strategy}, having a defined upstream code repo on github.com is required"
api = GithubAPI(upstream, auth=get_github()[0])
if remote_type == "gitlab":
@ -404,7 +467,9 @@ class AppAutoUpdater:
for release in api.releases()
if not release["draft"] and not release["prerelease"]
}
latest_version_orig, latest_version = self.relevant_versions(list(releases.keys()), self.app_id, version_re)
latest_version_orig, latest_version = self.relevant_versions(
list(releases.keys()), self.app_id, version_re
)
latest_release = releases[latest_version_orig]
latest_assets = {
a["name"]: a["browser_download_url"]
@ -425,7 +490,9 @@ class AppAutoUpdater:
_, url = self.find_matching_asset(latest_assets, asset)
return latest_version, url, latest_release_html_url
except RuntimeError as e:
raise RuntimeError(f"{e}.\nFull release details on {latest_release_html_url}.") from e
raise RuntimeError(
f"{e}.\nFull release details on {latest_release_html_url}."
) from e
if isinstance(asset, dict):
new_assets = {}
@ -434,34 +501,50 @@ class AppAutoUpdater:
_, url = self.find_matching_asset(latest_assets, asset_regex)
new_assets[asset_name] = url
except RuntimeError as e:
raise RuntimeError(f"{e}.\nFull release details on {latest_release_html_url}.") from e
raise RuntimeError(
f"{e}.\nFull release details on {latest_release_html_url}."
) from e
return latest_version, new_assets, latest_release_html_url
return None
if revision_type == "tag":
if asset != "tarball":
raise ValueError("For the latest tag strategies, only asset = 'tarball' is supported")
raise ValueError(
"For the latest tag strategies, only asset = 'tarball' is supported"
)
tags = [t["name"] for t in api.tags()]
latest_version_orig, latest_version = self.relevant_versions(tags, self.app_id, version_re)
latest_version_orig, latest_version = self.relevant_versions(
tags, self.app_id, version_re
)
latest_tarball = api.url_for_ref(latest_version_orig, RefType.tags)
return latest_version, latest_tarball, ""
if revision_type == "commit":
if asset != "tarball":
raise ValueError("For the latest commit strategies, only asset = 'tarball' is supported")
raise ValueError(
"For the latest commit strategies, only asset = 'tarball' is supported"
)
commits = api.commits()
latest_commit = commits[0]
latest_tarball = api.url_for_ref(latest_commit["sha"], RefType.commits)
# Let's have the version as something like "2023.01.23"
latest_commit_date = datetime.strptime(latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d")
latest_commit_date = datetime.strptime(
latest_commit["commit"]["author"]["date"][:10], "%Y-%m-%d"
)
version_format = autoupdate.get("force_version", "%Y.%m.%d")
latest_version = latest_commit_date.strftime(version_format)
return latest_version, latest_tarball, ""
return None
def replace_version_and_asset_in_manifest(self, content: str, new_version: str, new_assets_urls: Union[str, dict],
current_assets: dict, is_main: bool):
def replace_version_and_asset_in_manifest(
self,
content: str,
new_version: str,
new_assets_urls: Union[str, dict],
current_assets: dict,
is_main: bool,
):
replacements = []
if isinstance(new_assets_urls, str):
replacements = [
@ -471,16 +554,21 @@ class AppAutoUpdater:
if isinstance(new_assets_urls, dict):
replacements = [
repl
for key, url in new_assets_urls.items() for repl in (
for key, url in new_assets_urls.items()
for repl in (
(current_assets[key]["url"], url),
(current_assets[key]["sha256"], self.sha256_of_remote_file(url))
(current_assets[key]["sha256"], self.sha256_of_remote_file(url)),
)
]
if is_main:
def repl(m: re.Match) -> str:
return m.group(1) + new_version + '~ynh1"'
content = re.sub(r"(\s*version\s*=\s*[\"\'])([^~\"\']+)(\~ynh\d+[\"\'])", repl, content)
content = re.sub(
r"(\s*version\s*=\s*[\"\'])([^~\"\']+)(\~ynh\d+[\"\'])", repl, content
)
for old, new in replacements:
content = content.replace(old, new)
@ -538,22 +626,41 @@ def run_autoupdate_for_multiprocessing(data) -> tuple[str, tuple[State, str, str
except Exception:
log_str = stdoutswitch.reset()
import traceback
t = traceback.format_exc()
return (app, (State.failure, log_str, str(t), ""))
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("apps", nargs="*", type=Path,
help="If not passed, the script will run on the catalog. Github keys required.")
parser.add_argument("--edit", action=argparse.BooleanOptionalAction, default=True,
help="Edit the local files")
parser.add_argument("--commit", action=argparse.BooleanOptionalAction, default=False,
help="Create a commit with the changes")
parser.add_argument("--pr", action=argparse.BooleanOptionalAction, default=False,
help="Create a pull request with the changes")
parser.add_argument(
"apps",
nargs="*",
type=Path,
help="If not passed, the script will run on the catalog. Github keys required.",
)
parser.add_argument(
"--edit",
action=argparse.BooleanOptionalAction,
default=True,
help="Edit the local files",
)
parser.add_argument(
"--commit",
action=argparse.BooleanOptionalAction,
default=False,
help="Create a commit with the changes",
)
parser.add_argument(
"--pr",
action=argparse.BooleanOptionalAction,
default=False,
help="Create a pull request with the changes",
)
parser.add_argument("--paste", action="store_true")
parser.add_argument("-j", "--processes", type=int, default=multiprocessing.cpu_count())
parser.add_argument(
"-j", "--processes", type=int, default=multiprocessing.cpu_count()
)
args = parser.parse_args()
appslib.logging_sender.enable()
@ -572,8 +679,10 @@ def main() -> None:
apps_failed = {}
with multiprocessing.Pool(processes=args.processes) as pool:
tasks = pool.imap(run_autoupdate_for_multiprocessing,
((app, args.edit, args.commit, args.pr) for app in apps))
tasks = pool.imap(
run_autoupdate_for_multiprocessing,
((app, args.edit, args.commit, args.pr) for app in apps),
)
for app, result in tqdm.tqdm(tasks, total=len(apps), ascii=" ·#"):
state, current_version, main_version, pr_url = result
if state == State.up_to_date:
@ -592,7 +701,9 @@ def main() -> None:
matrix_message += f"\n- {len(apps_already)} pending update PRs"
for app, info in apps_already.items():
paste_message += f"\n- {app}"
paste_message += f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
paste_message += (
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
)
if info[2]:
paste_message += f" see {info[2]}"
@ -601,7 +712,9 @@ def main() -> None:
matrix_message += f"\n- {len(apps_updated)} new apps PRs"
for app, info in apps_updated.items():
paste_message += f"\n- {app}"
paste_message += f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
paste_message += (
f" ({info[0]} -> {info[1]})" if info[1] else " (app version did not change)"
)
if info[2]:
paste_message += f" see {info[2]}"

View file

@ -15,11 +15,10 @@ class RefType(Enum):
class GithubAPI:
def __init__(self, upstream: str, auth: Optional[tuple[str, str]] = None):
self.upstream = upstream
self.upstream_repo = upstream.replace("https://github.com/", "")\
.strip("/")
self.upstream_repo = upstream.replace("https://github.com/", "").strip("/")
assert (
len(self.upstream_repo.split("/")) == 2
), f"'{upstream}' doesn't seem to be a github repository ?"
len(self.upstream_repo.split("/")) == 2
), f"'{upstream}' doesn't seem to be a github repository ?"
self.auth = auth
def internal_api(self, uri: str) -> Any:
@ -74,7 +73,12 @@ class GitlabAPI:
# Second chance for some buggy gitlab instances...
name = self.project_path.split("/")[-1]
projects = self.internal_api(f"projects?search={name}")
project = next(filter(lambda x: x.get("path_with_namespace") == self.project_path, projects))
project = next(
filter(
lambda x: x.get("path_with_namespace") == self.project_path,
projects,
)
)
assert isinstance(project, dict)
project_id = project.get("id", None)
@ -95,13 +99,11 @@ class GitlabAPI:
return [
{
"sha": commit["id"],
"commit": {
"author": {
"date": commit["committed_date"]
}
}
"commit": {"author": {"date": commit["committed_date"]}},
}
for commit in self.internal_api(f"projects/{self.project_id}/repository/commits")
for commit in self.internal_api(
f"projects/{self.project_id}/repository/commits"
)
]
def releases(self) -> list[dict[str, Any]]:
@ -114,16 +116,21 @@ class GitlabAPI:
"prerelease": False,
"draft": False,
"html_url": release["_links"]["self"],
"assets": [{
"name": asset["name"],
"browser_download_url": asset["direct_asset_url"]
} for asset in release["assets"]["links"]],
}
"assets": [
{
"name": asset["name"],
"browser_download_url": asset["direct_asset_url"],
}
for asset in release["assets"]["links"]
],
}
for source in release["assets"]["sources"]:
r["assets"].append({
"name": f"source.{source['format']}",
"browser_download_url": source['url']
})
r["assets"].append(
{
"name": f"source.{source['format']}",
"browser_download_url": source["url"],
}
)
retval.append(r)
return retval

View file

@ -9,6 +9,7 @@ import urllib.request
import github
from github import Github
# Debug
from rich.traceback import install
@ -24,23 +25,25 @@ install(width=150, show_locals=True, locals_max_length=None, locals_max_string=N
g = Github(open(".github_token").read().strip())
# Path to the file to be updated
path=".github/workflows/updater.yml"
path = ".github/workflows/updater.yml"
# Title of the PR
title="[autopatch] Upgrade auto-updater"
title = "[autopatch] Upgrade auto-updater"
# Body of the PR message
body="""
body = """
Auto-updater actions need upgrading to continue working:
- actions/checkout@v3
- peter-evans/create-pull-request@v4
"""
# Author of the commit
author=github.InputGitAuthor(open(".github_login").read().strip(), open(".github_email").read().strip())
author = github.InputGitAuthor(
open(".github_login").read().strip(), open(".github_email").read().strip()
)
# Name of the branch created for the PR
new_branch="upgrade-auto-updater"
new_branch = "upgrade-auto-updater"
#####
#
@ -48,7 +51,7 @@ new_branch="upgrade-auto-updater"
#
#####
with open('processed.txt') as f:
with open("processed.txt") as f:
processed = f.read().splitlines()
#####
@ -61,7 +64,7 @@ u = g.get_user("yunohost-bot")
org = g.get_organization("yunohost-apps")
# For each repositories belonging to the bot (user `u`)
i=0
i = 0
for repo in org.get_repos():
if repo.full_name not in processed:
@ -73,50 +76,64 @@ for repo in org.get_repos():
# Make sure the repository has an auto-updater
try:
repo.get_contents(path, ref="refs/heads/"+base_branch)
repo.get_contents(path, ref="refs/heads/" + base_branch)
except:
with open('processed.txt', 'a') as pfile:
pfile.write(repo.full_name+'\n')
with open("processed.txt", "a") as pfile:
pfile.write(repo.full_name + "\n")
time.sleep(1.5)
continue
# Process the repo
print("Processing "+repo.full_name)
print("Processing " + repo.full_name)
try:
# Get the commit base for the new branch, and create it
commit_sha = repo.get_branch(base_branch).commit.sha
new_branch_ref = repo.create_git_ref(ref="refs/heads/"+new_branch, sha=commit_sha)
new_branch_ref = repo.create_git_ref(
ref="refs/heads/" + new_branch, sha=commit_sha
)
except:
new_branch_ref = repo.get_git_ref(ref="heads/"+new_branch)
new_branch_ref = repo.get_git_ref(ref="heads/" + new_branch)
# Get current file contents
contents = repo.get_contents(path, ref=new_branch_ref.ref)
# Update the file
updater_yml = contents.decoded_content.decode("unicode_escape")
updater_yml = re.sub(r'(?m)uses: actions/checkout@v[\d]+', "uses: actions/checkout@v3", updater_yml)
updater_yml = re.sub(r'(?m)uses: peter-evans/create-pull-request@v[\d]+', "uses: peter-evans/create-pull-request@v4", updater_yml)
updated = repo.update_file(contents.path,
message=title,
content=updater_yml,
sha=contents.sha,
branch=new_branch,
author=author)
updater_yml = re.sub(
r"(?m)uses: actions/checkout@v[\d]+",
"uses: actions/checkout@v3",
updater_yml,
)
updater_yml = re.sub(
r"(?m)uses: peter-evans/create-pull-request@v[\d]+",
"uses: peter-evans/create-pull-request@v4",
updater_yml,
)
updated = repo.update_file(
contents.path,
message=title,
content=updater_yml,
sha=contents.sha,
branch=new_branch,
author=author,
)
# Wait a bit to preserve the API rate limit
time.sleep(1.5)
# Open the PR
pr = repo.create_pull(title="Upgrade auto-updater", body=body, head=new_branch, base=base_branch)
pr = repo.create_pull(
title="Upgrade auto-updater", body=body, head=new_branch, base=base_branch
)
print(repo.full_name+" updated with PR #"+ str(pr.id))
i=i+1
print(repo.full_name + " updated with PR #" + str(pr.id))
i = i + 1
# Wait a bit to preserve the API rate limit
time.sleep(1.5)
with open('processed.txt', 'a') as pfile:
pfile.write(repo.full_name+'\n')
with open("processed.txt", "a") as pfile:
pfile.write(repo.full_name + "\n")
print("Done. "+str(i)+" repos processed")
print("Done. " + str(i) + " repos processed")

View file

@ -10,17 +10,22 @@ u = g.get_user("yunohost-bot")
# Let's build a minimalistic summary table
print("| Repository ".ljust(22) + " | Decision |")
print("| ".ljust(22, '-') + " | -------- |")
print("| ".ljust(22, "-") + " | -------- |")
# For each repositories belonging to the bot (user `u`)
for repo in u.get_repos():
# Proceed iff the repository is a fork (`parent` key is set) of a repository in our apps organization
if repo.parent.full_name.split('/')[0] != "YunoHost-Apps":
print("| "+repo.name.ljust(20) + " | Skipping |")
if repo.parent.full_name.split("/")[0] != "YunoHost-Apps":
print("| " + repo.name.ljust(20) + " | Skipping |")
else:
# If none of the PRs are opened by the bot, delete the repository
if not any([ (pr.user == u) for pr in list(repo.parent.get_pulls(state='open', sort='created')) ]):
print("| "+repo.name.ljust(20) + " | Deleting |")
if not any(
[
(pr.user == u)
for pr in list(repo.parent.get_pulls(state="open", sort="created"))
]
):
print("| " + repo.name.ljust(20) + " | Deleting |")
repo.delete()
else:
print("| "+repo.name.ljust(20) + " | Keeping |")
print("| " + repo.name.ljust(20) + " | Keeping |")

View file

@ -6,20 +6,29 @@ from difflib import SequenceMatcher
from typing import Any, Dict, Generator, List, Tuple
import jsonschema
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
get_antifeatures, get_catalog, get_categories,
get_graveyard, get_wishlist)
from appslib.utils import (
REPO_APPS_ROOT, # pylint: disable=import-error
get_antifeatures,
get_catalog,
get_categories,
get_graveyard,
get_wishlist,
)
def validate_schema() -> Generator[str, None, None]:
with open(REPO_APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8") as file:
with open(
REPO_APPS_ROOT / "schemas" / "apps.toml.schema.json", encoding="utf-8"
) as file:
apps_catalog_schema = json.load(file)
validator = jsonschema.Draft202012Validator(apps_catalog_schema)
for error in validator.iter_errors(get_catalog()):
yield f"at .{'.'.join(error.path)}: {error.message}"
def check_app(app: str, infos: Dict[str, Any]) -> Generator[Tuple[str, bool], None, None]:
def check_app(
app: str, infos: Dict[str, Any]
) -> Generator[Tuple[str, bool], None, None]:
if "state" not in infos:
yield "state is missing", True
return

View file

@ -21,10 +21,15 @@ from git import Repo
import appslib.logging_sender # pylint: disable=import-error
from app_caches import app_cache_folder # pylint: disable=import-error
from app_caches import apps_cache_update_all # pylint: disable=import-error
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
get_antifeatures, get_catalog, get_categories)
from packaging_v2.convert_v1_manifest_to_v2_for_catalog import \
convert_v1_manifest_to_v2_for_catalog # pylint: disable=import-error
from appslib.utils import (
REPO_APPS_ROOT, # pylint: disable=import-error
get_antifeatures,
get_catalog,
get_categories,
)
from packaging_v2.convert_v1_manifest_to_v2_for_catalog import (
convert_v1_manifest_to_v2_for_catalog,
) # pylint: disable=import-error
now = time.time()
@ -37,7 +42,7 @@ def categories_list():
infos["id"] = category_id
for subtag_id, subtag_infos in infos.get("subtags", {}).items():
subtag_infos["id"] = subtag_id
infos["subtags"] = list(infos.get('subtags', {}).values())
infos["subtags"] = list(infos.get("subtags", {}).values())
return list(new_categories.values())
@ -54,6 +59,7 @@ def antifeatures_list():
# Actual list build management #
################################
def __build_app_dict(data) -> Optional[tuple[str, dict[str, Any]]]:
name, info = data
try:
@ -93,13 +99,17 @@ def write_catalog_v2(base_catalog, target_dir: Path) -> None:
target_file = target_dir / "apps.json"
target_file.parent.mkdir(parents=True, exist_ok=True)
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
target_file.open("w", encoding="utf-8").write(
json.dumps(full_catalog, sort_keys=True)
)
def write_catalog_v3(base_catalog, target_dir: Path) -> None:
result_dict_with_manifest_v2 = copy.deepcopy(base_catalog)
for app in result_dict_with_manifest_v2.values():
packaging_format = float(str(app["manifest"].get("packaging_format", "")).strip() or "0")
packaging_format = float(
str(app["manifest"].get("packaging_format", "")).strip() or "0"
)
if packaging_format < 2:
app["manifest"] = convert_v1_manifest_to_v2_for_catalog(app["manifest"])
@ -117,7 +127,12 @@ def write_catalog_v3(base_catalog, target_dir: Path) -> None:
appid = appid.lower()
logo_source = REPO_APPS_ROOT / "logos" / f"{appid}.png"
if logo_source.exists():
logo_hash = subprocess.check_output(["sha256sum", logo_source]).strip().decode("utf-8").split()[0]
logo_hash = (
subprocess.check_output(["sha256sum", logo_source])
.strip()
.decode("utf-8")
.split()[0]
)
shutil.copyfile(logo_source, logos_dir / f"{logo_hash}.png")
# FIXME: implement something to cleanup old logo stuf in the builds/.../logos/ folder somehow
else:
@ -132,7 +147,9 @@ def write_catalog_v3(base_catalog, target_dir: Path) -> None:
target_file = target_dir / "apps.json"
target_file.parent.mkdir(parents=True, exist_ok=True)
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
target_file.open("w", encoding="utf-8").write(
json.dumps(full_catalog, sort_keys=True)
)
def write_catalog_doc(base_catalog, target_dir: Path) -> None:
@ -160,14 +177,13 @@ def write_catalog_doc(base_catalog, target_dir: Path) -> None:
for k, v in base_catalog.items()
if v["state"] == "working"
}
full_catalog = {
"apps": result_dict_doc,
"categories": categories_list()
}
full_catalog = {"apps": result_dict_doc, "categories": categories_list()}
target_file = target_dir / "apps.json"
target_file.parent.mkdir(parents=True, exist_ok=True)
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
target_file.open("w", encoding="utf-8").write(
json.dumps(full_catalog, sort_keys=True)
)
def build_app_dict(app, infos):
@ -177,15 +193,38 @@ def build_app_dict(app, infos):
repo = Repo(this_app_cache)
commits_in_apps_json = Repo(REPO_APPS_ROOT).git.log(
"-S", f"\"{app}\"", "--first-parent", "--reverse", "--date=unix",
"--format=%cd", "--", "apps.json").split("\n")
commits_in_apps_json = (
Repo(REPO_APPS_ROOT)
.git.log(
"-S",
f'"{app}"',
"--first-parent",
"--reverse",
"--date=unix",
"--format=%cd",
"--",
"apps.json",
)
.split("\n")
)
if len(commits_in_apps_json) > 1:
first_commit = commits_in_apps_json[0]
else:
commits_in_apps_toml = Repo(REPO_APPS_ROOT).git.log(
"-S", f"[{app}]", "--first-parent", "--reverse", "--date=unix",
"--format=%cd", "--", "apps.json", "apps.toml").split("\n")
commits_in_apps_toml = (
Repo(REPO_APPS_ROOT)
.git.log(
"-S",
f"[{app}]",
"--first-parent",
"--reverse",
"--date=unix",
"--format=%cd",
"--",
"apps.json",
"apps.toml",
)
.split("\n")
)
first_commit = commits_in_apps_toml[0]
# Assume the first entry we get (= the oldest) is the time the app was added
@ -204,14 +243,18 @@ def build_app_dict(app, infos):
try:
_ = repo.commit(infos["revision"])
except ValueError as err:
raise RuntimeError(f"Revision ain't in history ? {infos['revision']}") from err
raise RuntimeError(
f"Revision ain't in history ? {infos['revision']}"
) from err
# Find timestamp corresponding to that commit
timestamp = repo.commit(infos["revision"]).committed_date
# Build the dict with all the infos
if (this_app_cache / "manifest.toml").exists():
manifest = toml.load((this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict)
manifest = toml.load(
(this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict
)
else:
manifest = json.load((this_app_cache / "manifest.json").open("r"))
@ -227,27 +270,45 @@ def build_app_dict(app, infos):
"manifest": manifest,
"state": infos["state"],
"level": infos.get("level", "?"),
"maintained": 'package-not-maintained' not in infos.get('antifeatures', []),
"maintained": "package-not-maintained" not in infos.get("antifeatures", []),
"high_quality": infos.get("high_quality", False),
"featured": infos.get("featured", False),
"category": infos.get("category", None),
"subtags": infos.get("subtags", []),
"potential_alternative_to": infos.get("potential_alternative_to", []),
"antifeatures": list(
set(list(manifest.get("antifeatures", {}).keys()) + infos.get("antifeatures", []))
set(
list(manifest.get("antifeatures", {}).keys())
+ infos.get("antifeatures", [])
)
),
}
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("target_dir", type=Path, nargs="?",
default=REPO_APPS_ROOT / "builds" / "default",
help="The directory to write the catalogs to")
parser.add_argument("-j", "--jobs", type=int, default=multiprocessing.cpu_count(), metavar="N",
help="Allow N threads to run in parallel")
parser.add_argument("-c", "--update-cache", action=argparse.BooleanOptionalAction, default=True,
help="Update the apps cache")
parser.add_argument(
"target_dir",
type=Path,
nargs="?",
default=REPO_APPS_ROOT / "builds" / "default",
help="The directory to write the catalogs to",
)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=multiprocessing.cpu_count(),
metavar="N",
help="Allow N threads to run in parallel",
)
parser.add_argument(
"-c",
"--update-cache",
action=argparse.BooleanOptionalAction,
default=True,
help="Update the apps cache",
)
args = parser.parse_args()
appslib.logging_sender.enable()

View file

@ -9,11 +9,7 @@ from glob import glob
def check_output(cmd):
return (
subprocess.check_output(cmd, shell=True)
.decode("utf-8")
.strip()
)
return subprocess.check_output(cmd, shell=True).decode("utf-8").strip()
def convert_app_sources(folder):
@ -35,7 +31,13 @@ def convert_app_sources(folder):
"sha256": D["sum"],
}
if D.get("format", "tar.gz") not in ["zip", "tar.gz", "tar.xz", "tgz", "tar.bz2"]:
if D.get("format", "tar.gz") not in [
"zip",
"tar.gz",
"tar.xz",
"tgz",
"tar.bz2",
]:
new_D["format"] = D["format"]
if "filename" in D:
new_D["rename"] = D["filename"]
@ -115,12 +117,12 @@ def _convert_v1_manifest_to_v2(app_path):
"sso": "?",
"disk": "50M",
"ram.build": "50M",
"ram.runtime": "50M"
"ram.runtime": "50M",
}
maintainers = manifest.get("maintainer", {})
if isinstance(maintainers, list):
maintainers = [m['name'] for m in maintainers]
maintainers = [m["name"] for m in maintainers]
else:
maintainers = [maintainers["name"]] if maintainers.get("name") else []
@ -130,15 +132,30 @@ def _convert_v1_manifest_to_v2(app_path):
manifest["install"] = {}
for question in install_questions:
name = question.pop("name")
if "ask" in question and name in ["domain", "path", "admin", "is_public", "password"]:
if "ask" in question and name in [
"domain",
"path",
"admin",
"is_public",
"password",
]:
question.pop("ask")
if question.get("example") and question.get("type") in ["domain", "path", "user", "boolean", "password"]:
if question.get("example") and question.get("type") in [
"domain",
"path",
"user",
"boolean",
"password",
]:
question.pop("example")
manifest["install"][name] = question
# Rename is_public to init_main_permission
manifest["install"] = {(k if k != "is_public" else "init_main_permission"): v for k, v in manifest["install"].items()}
manifest["install"] = {
(k if k != "is_public" else "init_main_permission"): v
for k, v in manifest["install"].items()
}
if "init_main_permission" in manifest["install"]:
manifest["install"]["init_main_permission"]["type"] = "group"
@ -166,12 +183,16 @@ def _convert_v1_manifest_to_v2(app_path):
# FIXME: Parse ynh_permission_create --permission="admin" --url="/wp-login.php" --additional_urls="/wp-admin.php" --allowed=$admin_wordpress
ports = check_output(f"sed -nr 's/(\\w+)=.*ynh_find_port[^0-9]*([0-9]+)\\)/\\1,\\2/p' '{app_path}/scripts/install'")
ports = check_output(
f"sed -nr 's/(\\w+)=.*ynh_find_port[^0-9]*([0-9]+)\\)/\\1,\\2/p' '{app_path}/scripts/install'"
)
if ports:
manifest["resources"]["ports"] = {}
for port in ports.split("\n"):
name, default = port.split(",")
exposed = check_output(f"sed -nr 's/.*yunohost firewall allow .*(TCP|UDP|Both).*${name}/\\1/p' '{app_path}/scripts/install'")
exposed = check_output(
f"sed -nr 's/.*yunohost firewall allow .*(TCP|UDP|Both).*${name}/\\1/p' '{app_path}/scripts/install'"
)
if exposed == "Both":
exposed = True
@ -180,7 +201,9 @@ def _convert_v1_manifest_to_v2(app_path):
name = "main"
if not default.isdigit():
print(f"Failed to parse '{default}' as a port number ... Will use 12345 instead")
print(
f"Failed to parse '{default}' as a port number ... Will use 12345 instead"
)
default = 12345
manifest["resources"]["ports"][f"{name}.default"] = int(default)
@ -188,35 +211,57 @@ def _convert_v1_manifest_to_v2(app_path):
manifest["resources"]["ports"][f"{name}.exposed"] = exposed
maybequote = "[\"'\"'\"']?"
apt_dependencies = check_output(f"sed -nr 's/.*_dependencies={maybequote}(.*){maybequote}? *$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d '\"' | sed 's@ @\\n@g'")
php_version = check_output(f"sed -nr 's/^ *YNH_PHP_VERSION={maybequote}(.*){maybequote}?$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d \"\\\"'\"")
apt_dependencies = check_output(
f"sed -nr 's/.*_dependencies={maybequote}(.*){maybequote}? *$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d '\"' | sed 's@ @\\n@g'"
)
php_version = check_output(
f"sed -nr 's/^ *YNH_PHP_VERSION={maybequote}(.*){maybequote}?$/\\1/p' '{app_path}/scripts/_common.sh' 2>/dev/null | tr -d \"\\\"'\""
)
if apt_dependencies.strip():
if php_version:
apt_dependencies = apt_dependencies.replace("${YNH_PHP_VERSION}", php_version)
apt_dependencies = ', '.join([d for d in apt_dependencies.split("\n") if d])
apt_dependencies = apt_dependencies.replace(
"${YNH_PHP_VERSION}", php_version
)
apt_dependencies = ", ".join([d for d in apt_dependencies.split("\n") if d])
manifest["resources"]["apt"] = {"packages": apt_dependencies}
extra_apt_repos = check_output(r"sed -nr 's/.*_extra_app_dependencies.*repo=\"(.*)\".*package=\"(.*)\".*key=\"(.*)\"/\1,\2,\3/p' %s/scripts/install" % app_path)
extra_apt_repos = check_output(
r"sed -nr 's/.*_extra_app_dependencies.*repo=\"(.*)\".*package=\"(.*)\".*key=\"(.*)\"/\1,\2,\3/p' %s/scripts/install"
% app_path
)
if extra_apt_repos:
for i, extra_apt_repo in enumerate(extra_apt_repos.split("\n")):
repo, packages, key = extra_apt_repo.split(",")
packages = packages.replace('$', '#FIXME#$')
packages = packages.replace("$", "#FIXME#$")
if "apt" not in manifest["resources"]:
manifest["resources"]["apt"] = {}
if "extras" not in manifest["resources"]["apt"]:
manifest["resources"]["apt"]["extras"] = []
manifest["resources"]["apt"]["extras"].append({
"repo": repo,
"key": key,
"packages": packages,
})
manifest["resources"]["apt"]["extras"].append(
{
"repo": repo,
"key": key,
"packages": packages,
}
)
if os.system(f"grep -q 'ynh_mysql_setup_db' {app_path}/scripts/install") == 0:
manifest["resources"]["database"] = {"type": "mysql"}
elif os.system(f"grep -q 'ynh_psql_setup_db' {app_path}/scripts/install") == 0:
manifest["resources"]["database"] = {"type": "postgresql"}
keys_to_keep = ["packaging_format", "id", "name", "description", "version", "maintainers", "upstream", "integration", "install", "resources"]
keys_to_keep = [
"packaging_format",
"id",
"name",
"description",
"version",
"maintainers",
"upstream",
"integration",
"install",
"resources",
]
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
for key in keys_to_del:
@ -246,19 +291,35 @@ def _dump_v2_manifest_as_toml(manifest):
upstream = table()
for key, value in manifest["upstream"].items():
upstream[key] = value
upstream["cpe"].comment("FIXME: optional but recommended if relevant, this is meant to contain the Common Platform Enumeration, which is sort of a standard id for applications defined by the NIST. In particular, Yunohost may use this is in the future to easily track CVE (=security reports) related to apps. The CPE may be obtained by searching here: https://nvd.nist.gov/products/cpe/search. For example, for Nextcloud, the CPE is 'cpe:2.3:a:nextcloud:nextcloud' (no need to include the version number)")
upstream["fund"].comment("FIXME: optional but recommended (or remove if irrelevant / not applicable). This is meant to be an URL where people can financially support this app, especially when its development is based on volunteers and/or financed by its community. YunoHost may later advertise it in the webadmin.")
upstream["cpe"].comment(
"FIXME: optional but recommended if relevant, this is meant to contain the Common Platform Enumeration, which is sort of a standard id for applications defined by the NIST. In particular, Yunohost may use this is in the future to easily track CVE (=security reports) related to apps. The CPE may be obtained by searching here: https://nvd.nist.gov/products/cpe/search. For example, for Nextcloud, the CPE is 'cpe:2.3:a:nextcloud:nextcloud' (no need to include the version number)"
)
upstream["fund"].comment(
"FIXME: optional but recommended (or remove if irrelevant / not applicable). This is meant to be an URL where people can financially support this app, especially when its development is based on volunteers and/or financed by its community. YunoHost may later advertise it in the webadmin."
)
toml_manifest["upstream"] = upstream
integration = table()
for key, value in manifest["integration"].items():
integration.add(key, value)
integration["architectures"].comment('FIXME: can be replaced by a list of supported archs using the dpkg --print-architecture nomenclature (amd64/i386/armhf/arm64), for example: ["amd64", "i386"]')
integration["ldap"].comment('FIXME: replace with true, false, or "not_relevant". Not to confuse with the "sso" key : the "ldap" key corresponds to wether or not a user *can* login on the app using its YunoHost credentials.')
integration["sso"].comment('FIXME: replace with true, false, or "not_relevant". Not to confuse with the "ldap" key : the "sso" key corresponds to wether or not a user is *automatically logged-in* on the app when logged-in on the YunoHost portal.')
integration["disk"].comment('FIXME: replace with an **estimate** minimum disk requirement. e.g. 20M, 400M, 1G, ...')
integration["ram.build"].comment('FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ...')
integration["ram.runtime"].comment('FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ...')
integration["architectures"].comment(
'FIXME: can be replaced by a list of supported archs using the dpkg --print-architecture nomenclature (amd64/i386/armhf/arm64), for example: ["amd64", "i386"]'
)
integration["ldap"].comment(
'FIXME: replace with true, false, or "not_relevant". Not to confuse with the "sso" key : the "ldap" key corresponds to wether or not a user *can* login on the app using its YunoHost credentials.'
)
integration["sso"].comment(
'FIXME: replace with true, false, or "not_relevant". Not to confuse with the "ldap" key : the "sso" key corresponds to wether or not a user is *automatically logged-in* on the app when logged-in on the YunoHost portal.'
)
integration["disk"].comment(
"FIXME: replace with an **estimate** minimum disk requirement. e.g. 20M, 400M, 1G, ..."
)
integration["ram.build"].comment(
"FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ..."
)
integration["ram.runtime"].comment(
"FIXME: replace with an **estimate** minimum ram requirement. e.g. 50M, 400M, 1G, ..."
)
toml_manifest["integration"] = integration
install = table()
@ -267,7 +328,11 @@ def _dump_v2_manifest_as_toml(manifest):
install[key].indent(4)
if key in ["domain", "path", "admin", "is_public", "password"]:
install[key].add(comment("this is a generic question - ask strings are automatically handled by Yunohost's core"))
install[key].add(
comment(
"this is a generic question - ask strings are automatically handled by Yunohost's core"
)
)
for lang, value2 in value.get("ask", {}).items():
install[key].add(f"ask.{lang}", value2)
@ -305,8 +370,8 @@ def _dump_v2_manifest_as_toml(manifest):
toml_manifest_dump = dumps(toml_manifest)
regex = re.compile(r'\"((description|ask|help)\.[a-z]{2})\"')
toml_manifest_dump = regex.sub(r'\1', toml_manifest_dump)
regex = re.compile(r"\"((description|ask|help)\.[a-z]{2})\"")
toml_manifest_dump = regex.sub(r"\1", toml_manifest_dump)
toml_manifest_dump = toml_manifest_dump.replace('"ram.build"', "ram.build")
toml_manifest_dump = toml_manifest_dump.replace('"ram.runtime"', "ram.runtime")
toml_manifest_dump = toml_manifest_dump.replace('"main.url"', "main.url")
@ -324,7 +389,9 @@ def _dump_v2_manifest_as_toml(manifest):
if "ports" in manifest["resources"]:
for port_thing in manifest["resources"]["ports"].keys():
toml_manifest_dump = toml_manifest_dump.replace(f'"{port_thing}"', f"{port_thing}")
toml_manifest_dump = toml_manifest_dump.replace(
f'"{port_thing}"', f"{port_thing}"
)
return toml_manifest_dump
@ -395,7 +462,9 @@ def cleanup_scripts_and_conf(folder):
"^.*ynh_script_progression.*Reloading NGINX web server",
"^.*ynh_systemd_action --service_name=nginx --action=reload",
]
patterns_to_remove_in_scripts = [re.compile(f"({p})", re.MULTILINE) for p in patterns_to_remove_in_scripts]
patterns_to_remove_in_scripts = [
re.compile(f"({p})", re.MULTILINE) for p in patterns_to_remove_in_scripts
]
replaces = [
("path_url", "path"),
@ -404,13 +473,21 @@ def cleanup_scripts_and_conf(folder):
("FINALPATH", "INSTALL_DIR"),
("datadir", "data_dir"),
("DATADIR", "DATA_DIR"),
('--source_id="$architecture"', ''),
('--source_id="$YNH_ARCH"', ''),
('--source_id=app', ''),
('--source_id="app.$architecture"', ''),
('--source_id="$architecture"', ""),
('--source_id="$YNH_ARCH"', ""),
("--source_id=app", ""),
('--source_id="app.$architecture"', ""),
]
for s in ["_common.sh", "install", "remove", "upgrade", "backup", "restore", "change_url"]:
for s in [
"_common.sh",
"install",
"remove",
"upgrade",
"backup",
"restore",
"change_url",
]:
script = f"{folder}/scripts/{s}"
@ -420,10 +497,18 @@ def cleanup_scripts_and_conf(folder):
content = open(script).read()
for pattern in patterns_to_remove_in_scripts:
if "^.*ynh_script_progression.*Reloading NGINX web server" in pattern.pattern and s == "restore":
if (
"^.*ynh_script_progression.*Reloading NGINX web server"
in pattern.pattern
and s == "restore"
):
# This case is legit
continue
if "^.*ynh_systemd_action --service_name=nginx --action=reload" in pattern.pattern and s == "restore":
if (
"^.*ynh_systemd_action --service_name=nginx --action=reload"
in pattern.pattern
and s == "restore"
):
# This case is legit
continue
content = pattern.sub(r"#REMOVEME? \1", content)
@ -436,7 +521,9 @@ def cleanup_scripts_and_conf(folder):
pattern = re.compile("(^.*nginx.*$)", re.MULTILINE)
content = pattern.sub(r"#REMOVEME? \1", content)
pattern = re.compile("(^.*ynh_script_progress.*Updat.* NGINX.*conf.*$)", re.MULTILINE)
pattern = re.compile(
"(^.*ynh_script_progress.*Updat.* NGINX.*conf.*$)", re.MULTILINE
)
content = pattern.sub(r"\1\n\nynh_change_url_nginx_config", content)
pattern = re.compile(r"(ynh_clean_check_starting)", re.MULTILINE)
@ -446,7 +533,6 @@ def cleanup_scripts_and_conf(folder):
pattern = re.compile(r"(^\s+path=.*$)", re.MULTILINE)
content = pattern.sub(r"#REMOVEME? \1", content)
open(script, "w").write(content)
for conf in os.listdir(f"{folder}/conf"):
@ -470,15 +556,15 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Attempt to automatically convert a v1 YunoHost app to v2 (at least as much as possible) : parse the app scripts to auto-generate the manifest.toml, and remove now-useless lines from the app scripts"
)
parser.add_argument(
"app_path", help="Path to the app to convert"
)
parser.add_argument("app_path", help="Path to the app to convert")
args = parser.parse_args()
manifest = _convert_v1_manifest_to_v2(args.app_path)
with open(args.app_path + "/manifest.toml", "w") as manifest_file:
manifest_file.write("#:schema https://raw.githubusercontent.com/YunoHost/apps/master/schemas/manifest.v2.schema.json\n\n")
manifest_file.write(
"#:schema https://raw.githubusercontent.com/YunoHost/apps/master/schemas/manifest.v2.schema.json\n\n"
)
manifest_file.write(_dump_v2_manifest_as_toml(manifest))
cleanup_scripts_and_conf(args.app_path)

View file

@ -17,18 +17,22 @@ def convert_v1_manifest_to_v2_for_catalog(manifest):
manifest["upstream"]["website"] = manifest["url"]
manifest["integration"] = {
"yunohost": manifest.get("requirements", {}).get("yunohost", "").replace(">", "").replace("=", "").replace(" ", ""),
"yunohost": manifest.get("requirements", {})
.get("yunohost", "")
.replace(">", "")
.replace("=", "")
.replace(" ", ""),
"architectures": "all",
"multi_instance": manifest.get("multi_instance", False),
"ldap": "?",
"sso": "?",
"disk": "50M",
"ram": {"build": "50M", "runtime": "10M"}
"ram": {"build": "50M", "runtime": "10M"},
}
maintainers = manifest.get("maintainer", {})
if isinstance(maintainers, list):
maintainers = [m['name'] for m in maintainers]
maintainers = [m["name"] for m in maintainers]
else:
maintainers = [maintainers["name"]] if maintainers.get("name") else []
@ -39,21 +43,39 @@ def convert_v1_manifest_to_v2_for_catalog(manifest):
manifest["install"] = {}
for question in install_questions:
name = question.pop("name")
if "ask" in question and name in ["domain", "path", "admin", "is_public", "password"]:
if "ask" in question and name in [
"domain",
"path",
"admin",
"is_public",
"password",
]:
question.pop("ask")
if question.get("example") and question.get("type") in ["domain", "path", "user", "boolean", "password"]:
if question.get("example") and question.get("type") in [
"domain",
"path",
"user",
"boolean",
"password",
]:
question.pop("example")
manifest["install"][name] = question
manifest["resources"] = {
"system_user": {},
"install_dir": {
"alias": "final_path"
}
}
manifest["resources"] = {"system_user": {}, "install_dir": {"alias": "final_path"}}
keys_to_keep = ["packaging_format", "id", "name", "description", "version", "maintainers", "upstream", "integration", "install", "resources"]
keys_to_keep = [
"packaging_format",
"id",
"name",
"description",
"version",
"maintainers",
"upstream",
"integration",
"install",
"resources",
]
keys_to_del = [key for key in manifest.keys() if key not in keys_to_keep]
for key in keys_to_del:

View file

@ -59,7 +59,9 @@ def generate_READMEs(app_path: Path):
if README_template.name == "README.md.j2":
continue
if not README_template.name.endswith(".j2") or not README_template.name.startswith("README_"):
if not README_template.name.endswith(
".j2"
) or not README_template.name.startswith("README_"):
continue
language_code = README_template.name.split("_")[1].split(".")[0]

View file

@ -13,15 +13,25 @@ def test_running_make_readme():
name = Path(name)
DIRECTORY = name / "gotosocial_ynh"
subprocess.check_call(["git", "clone", "https://github.com/yunohost-apps/gotosocial_ynh", DIRECTORY, "-q"])
subprocess.check_call(
[
"git",
"clone",
"https://github.com/yunohost-apps/gotosocial_ynh",
DIRECTORY,
"-q",
]
)
subprocess.check_call(["git", "checkout", COMMIT_ID, "-q"], cwd=DIRECTORY)
print(CWD)
subprocess.check_call([CWD / "../make_readme.py", DIRECTORY])
assert open(CWD / "README.md").read() == open(DIRECTORY / "README.md").read()
assert open(CWD / "README_fr.md").read() == open(DIRECTORY / "README_fr.md").read()
assert (
open(CWD / "README_fr.md").read() == open(DIRECTORY / "README_fr.md").read()
)
if __name__ == '__main__':
if __name__ == "__main__":
test_running_make_readme()

View file

@ -35,14 +35,21 @@ async def git(cmd, in_folder=None):
cmd = ["git"] + cmd
cmd = " ".join(map(shlex.quote, cmd))
print(cmd)
command = await asyncio.create_subprocess_shell(cmd, env=my_env, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
command = await asyncio.create_subprocess_shell(
cmd,
env=my_env,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.STDOUT,
)
data = await command.stdout.read()
return data.decode().strip()
@app.route("/github", methods=["GET"])
def main_route(request):
return text("You aren't supposed to go on this page using a browser, it's for webhooks push instead.")
return text(
"You aren't supposed to go on this page using a browser, it's for webhooks push instead."
)
@app.route("/github", methods=["POST"])
@ -58,7 +65,9 @@ async def on_push(request):
return response.json({"error": "Signing algorightm is not sha1 ?!"}, 501)
# HMAC requires the key to be bytes, but data is string
mac = hmac.new(github_webhook_secret.encode(), msg=request.body, digestmod=hashlib.sha1)
mac = hmac.new(
github_webhook_secret.encode(), msg=request.body, digestmod=hashlib.sha1
)
if not hmac.compare_digest(str(mac.hexdigest()), str(signature)):
return response.json({"error": "Bad signature ?!"}, 403)
@ -71,19 +80,42 @@ async def on_push(request):
print(f"{repository} -> branch '{branch}'")
with tempfile.TemporaryDirectory() as folder:
await git(["clone", f"https://{login}:{token}@github.com/{repository}", "--single-branch", "--branch", branch, folder])
await git(
[
"clone",
f"https://{login}:{token}@github.com/{repository}",
"--single-branch",
"--branch",
branch,
folder,
]
)
generate_READMEs(folder)
await git(["add", "README*.md"], in_folder=folder)
diff_not_empty = await asyncio.create_subprocess_shell(" ".join(["git", "diff", "HEAD", "--compact-summary"]), cwd=folder, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
diff_not_empty = await asyncio.create_subprocess_shell(
" ".join(["git", "diff", "HEAD", "--compact-summary"]),
cwd=folder,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.STDOUT,
)
diff_not_empty = await diff_not_empty.stdout.read()
diff_not_empty = diff_not_empty.decode().strip()
if not diff_not_empty:
print("nothing to do")
return text("nothing to do")
await git(["commit", "-a", "-m", "Auto-update README", "--author='yunohost-bot <yunohost@yunohost.org>'"], in_folder=folder)
await git(
[
"commit",
"-a",
"-m",
"Auto-update README",
"--author='yunohost-bot <yunohost@yunohost.org>'",
],
in_folder=folder,
)
await git(["push", "origin", branch, "--quiet"], in_folder=folder)
return text("ok")

View file

@ -107,7 +107,8 @@ def list_changes(catalog, ci_results) -> dict[str, list[tuple[str, int, int]]]:
def pretty_changes(changes: dict[str, list[tuple[str, int, int]]]) -> str:
pr_body_template = textwrap.dedent("""
pr_body_template = textwrap.dedent(
"""
{%- if changes["major_regressions"] %}
### Major regressions 😭
{% for app in changes["major_regressions"] %}
@ -138,7 +139,8 @@ def pretty_changes(changes: dict[str, list[tuple[str, int, int]]]) -> str:
- [ ] [{{app}} (See latest job if it exists)](https://ci-apps.yunohost.org/ci/apps/{{app}}/latestjob)
{%- endfor %}
{% endif %}
""")
"""
)
return jinja2.Environment().from_string(pr_body_template).render(changes=changes)
@ -148,24 +150,34 @@ def make_pull_request(pr_body: str) -> None:
"title": "Update app levels according to CI results",
"body": pr_body,
"head": "update_app_levels",
"base": "master"
"base": "master",
}
with requests.Session() as s:
s.headers.update({"Authorization": f"token {github_token()}"})
response = s.post(f"https://api.github.com/repos/{APPS_REPO}/pulls", json=pr_data)
response = s.post(
f"https://api.github.com/repos/{APPS_REPO}/pulls", json=pr_data
)
if response.status_code == 422:
response = s.get(f"https://api.github.com/repos/{APPS_REPO}/pulls", data={"head": "update_app_levels"})
response = s.get(
f"https://api.github.com/repos/{APPS_REPO}/pulls",
data={"head": "update_app_levels"},
)
response.raise_for_status()
pr_number = response.json()[0]["number"]
# head can't be updated
del pr_data["head"]
response = s.patch(f"https://api.github.com/repos/{APPS_REPO}/pulls/{pr_number}", json=pr_data)
response = s.patch(
f"https://api.github.com/repos/{APPS_REPO}/pulls/{pr_number}",
json=pr_data,
)
response.raise_for_status()
existing_url = response.json()["html_url"]
logging.warning(f"An existing Pull Request has been updated at {existing_url} !")
logging.warning(
f"An existing Pull Request has been updated at {existing_url} !"
)
else:
response.raise_for_status()