2024-02-07 15:04:55 +01:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import copy
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import subprocess
|
2024-02-07 23:28:54 +01:00
|
|
|
import multiprocessing
|
|
|
|
from pathlib import Path
|
2024-02-07 15:04:55 +01:00
|
|
|
import time
|
2024-02-07 23:28:54 +01:00
|
|
|
import shutil
|
2024-02-07 15:04:55 +01:00
|
|
|
from collections import OrderedDict
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
import tqdm
|
|
|
|
import logging
|
2024-02-07 15:04:55 +01:00
|
|
|
import toml
|
|
|
|
from git import Repo
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
from app_caches import apps_cache_update_all, app_cache_folder # pylint: disable=import-error
|
2024-02-07 15:04:55 +01:00
|
|
|
from packaging_v2.convert_v1_manifest_to_v2_for_catalog import \
|
|
|
|
convert_v1_manifest_to_v2_for_catalog # pylint: disable=import-error
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
from appslib.utils import (REPO_APPS_ROOT, # pylint: disable=import-error
|
|
|
|
get_catalog, git_repo_age)
|
|
|
|
|
|
|
|
# Automatically enables error-to-xmpp
|
|
|
|
import appslib.xmpplogger # pylint: disable=import-error
|
|
|
|
|
|
|
|
|
2024-02-07 15:04:55 +01:00
|
|
|
now = time.time()
|
|
|
|
|
|
|
|
|
|
|
|
# Load categories and reformat the structure to have a list with an "id" key
|
2024-02-07 23:28:54 +01:00
|
|
|
categories = toml.load((REPO_APPS_ROOT / "categories.toml").open("r", encoding="utf-8"))
|
2024-02-07 15:04:55 +01:00
|
|
|
for category_id, infos in categories.items():
|
|
|
|
infos["id"] = category_id
|
|
|
|
for subtag_id, subtag_infos in infos.get("subtags", {}).items():
|
|
|
|
subtag_infos["id"] = subtag_id
|
|
|
|
infos["subtags"] = list(infos.get('subtags', {}).values())
|
|
|
|
|
|
|
|
categories = list(categories.values())
|
|
|
|
|
|
|
|
# (Same for antifeatures)
|
2024-02-07 23:28:54 +01:00
|
|
|
antifeatures = toml.load((REPO_APPS_ROOT / "antifeatures.toml").open("r", encoding="utf-8"))
|
2024-02-07 15:04:55 +01:00
|
|
|
for antifeature_id, infos in antifeatures.items():
|
|
|
|
infos["id"] = antifeature_id
|
|
|
|
antifeatures = list(antifeatures.values())
|
|
|
|
|
|
|
|
# Load the app catalog and filter out the non-working ones
|
2024-02-07 23:28:54 +01:00
|
|
|
catalog = toml.load((REPO_APPS_ROOT / "apps.toml").open("r", encoding="utf-8"))
|
2024-02-07 15:04:55 +01:00
|
|
|
catalog = {
|
|
|
|
app: infos for app, infos in catalog.items() if infos.get("state") != "notworking"
|
|
|
|
}
|
|
|
|
|
|
|
|
my_env = os.environ.copy()
|
|
|
|
my_env["GIT_TERMINAL_PROMPT"] = "0"
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
(REPO_APPS_ROOT / "builds").mkdir(exist_ok=True)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
################################
|
|
|
|
# Actual list build management #
|
|
|
|
################################
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def __build_app_dict(data):
|
|
|
|
name, info = data
|
|
|
|
try:
|
|
|
|
return name, build_app_dict(name, info)
|
|
|
|
except Exception as err:
|
|
|
|
logging.error("Error while updating %s: %s", name, err)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def build_base_catalog():
|
|
|
|
result_dict = {}
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool:
|
|
|
|
tasks = pool.imap(__build_app_dict, catalog.items())
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
for result in tqdm.tqdm(tasks, total=len(catalog.keys()), ascii=" ·#"):
|
|
|
|
assert result is not None
|
|
|
|
name, info = result
|
|
|
|
result_dict[name] = info
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
return result_dict
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def write_catalog_v2(base_catalog, target_dir: Path) -> None:
|
|
|
|
result_dict_with_manifest_v1 = copy.deepcopy(base_catalog)
|
|
|
|
result_dict_with_manifest_v1 = {
|
|
|
|
name: infos
|
|
|
|
for name, infos in result_dict_with_manifest_v1.items()
|
|
|
|
if float(str(infos["manifest"].get("packaging_format", "")).strip() or "0") < 2
|
|
|
|
}
|
|
|
|
full_catalog = {
|
|
|
|
"apps": result_dict_with_manifest_v1,
|
|
|
|
"categories": categories,
|
|
|
|
"antifeatures": antifeatures,
|
2024-02-07 15:04:55 +01:00
|
|
|
}
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
target_file = target_dir / "apps.json"
|
|
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
|
|
|
result_dict_with_manifest_v2 = copy.deepcopy(base_catalog)
|
2024-02-07 15:04:55 +01:00
|
|
|
for app in result_dict_with_manifest_v2.values():
|
|
|
|
packaging_format = float(str(app["manifest"].get("packaging_format", "")).strip() or "0")
|
|
|
|
if packaging_format < 2:
|
|
|
|
app["manifest"] = convert_v1_manifest_to_v2_for_catalog(app["manifest"])
|
|
|
|
|
|
|
|
# We also remove the app install question and resources parts which aint needed anymore by webadmin etc (or at least we think ;P)
|
|
|
|
for app in result_dict_with_manifest_v2.values():
|
|
|
|
if "manifest" in app and "install" in app["manifest"]:
|
|
|
|
del app["manifest"]["install"]
|
|
|
|
if "manifest" in app and "resources" in app["manifest"]:
|
|
|
|
del app["manifest"]["resources"]
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
logos_dir = target_dir / "logos"
|
|
|
|
logos_dir.mkdir(parents=True, exist_ok=True)
|
2024-02-07 15:04:55 +01:00
|
|
|
for appid, app in result_dict_with_manifest_v2.items():
|
|
|
|
appid = appid.lower()
|
2024-02-07 23:28:54 +01:00
|
|
|
logo_source = REPO_APPS_ROOT / "logos" / f"{appid}.png"
|
|
|
|
if logo_source.exists():
|
|
|
|
logo_hash = subprocess.check_output(["sha256sum", logo_source]).strip().decode("utf-8").split()[0]
|
|
|
|
shutil.copyfile(logo_source, logos_dir / f"{logo_hash}.png")
|
2024-02-07 15:04:55 +01:00
|
|
|
# FIXME: implement something to cleanup old logo stuf in the builds/.../logos/ folder somehow
|
|
|
|
else:
|
|
|
|
logo_hash = None
|
|
|
|
app["logo_hash"] = logo_hash
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
full_catalog = {
|
|
|
|
"apps": result_dict_with_manifest_v2,
|
|
|
|
"categories": categories,
|
|
|
|
"antifeatures": antifeatures,
|
|
|
|
}
|
|
|
|
|
|
|
|
target_file = target_dir / "apps.json"
|
|
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
|
|
|
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def write_catalog_doc(base_catalog, target_dir: Path) -> None:
|
2024-02-07 15:04:55 +01:00
|
|
|
def infos_for_doc_catalog(infos):
|
|
|
|
level = infos.get("level")
|
|
|
|
if not isinstance(level, int):
|
|
|
|
level = -1
|
|
|
|
return {
|
|
|
|
"id": infos["id"],
|
|
|
|
"category": infos["category"],
|
|
|
|
"url": infos["git"]["url"],
|
|
|
|
"name": infos["manifest"]["name"],
|
|
|
|
"description": infos["manifest"]["description"],
|
|
|
|
"state": infos["state"],
|
|
|
|
"level": level,
|
|
|
|
"broken": level <= 0,
|
|
|
|
"good_quality": level >= 8,
|
|
|
|
"bad_quality": level <= 5,
|
|
|
|
"antifeatures": infos.get("antifeatures"),
|
|
|
|
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
|
|
|
}
|
|
|
|
|
|
|
|
result_dict_doc = {
|
|
|
|
k: infos_for_doc_catalog(v)
|
2024-02-07 23:28:54 +01:00
|
|
|
for k, v in base_catalog.items()
|
2024-02-07 15:04:55 +01:00
|
|
|
if v["state"] == "working"
|
|
|
|
}
|
2024-02-07 23:28:54 +01:00
|
|
|
full_catalog = {
|
|
|
|
"apps": result_dict_doc,
|
|
|
|
"categories": categories
|
|
|
|
}
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
target_file = target_dir / "apps.json"
|
|
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
target_file.open("w", encoding="utf-8").write(json.dumps(full_catalog, sort_keys=True))
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def build_app_dict(app, infos):
|
2024-02-07 15:04:55 +01:00
|
|
|
# Make sure we have some cache
|
|
|
|
this_app_cache = app_cache_folder(app)
|
2024-02-07 23:28:54 +01:00
|
|
|
assert this_app_cache.exists(), f"No cache yet for {app}"
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
repo = Repo(this_app_cache)
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
commits_in_apps_json = Repo(REPO_APPS_ROOT).git.log(
|
|
|
|
"-S", f"\"{app}\"", "--first-parent", "--reverse", "--date=unix",
|
|
|
|
"--format=%cd", "--", "apps.json").split("\n")
|
|
|
|
if len(commits_in_apps_json) > 1:
|
|
|
|
first_commit = commits_in_apps_json[0]
|
|
|
|
else:
|
|
|
|
commits_in_apps_toml = Repo(REPO_APPS_ROOT).git.log(
|
|
|
|
"-S", f"[{app}]", "--first-parent", "--reverse", "--date=unix",
|
|
|
|
"--format=%cd", "--", "apps.json", "apps.toml").split("\n")
|
|
|
|
first_commit = commits_in_apps_toml[0]
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
# Assume the first entry we get (= the oldest) is the time the app was added
|
2024-02-07 23:28:54 +01:00
|
|
|
infos["added_in_catalog"] = int(first_commit)
|
|
|
|
# int(commit_timestamps_for_this_app_in_catalog.split("\n")[0])
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
infos["branch"] = infos.get("branch", "master")
|
|
|
|
infos["revision"] = infos.get("revision", "HEAD")
|
|
|
|
|
|
|
|
# If using head, find the most recent meaningful commit in logs
|
|
|
|
if infos["revision"] == "HEAD":
|
|
|
|
relevant_files = [
|
|
|
|
"manifest.json",
|
|
|
|
"manifest.toml",
|
|
|
|
"config_panel.toml",
|
|
|
|
"hooks/",
|
|
|
|
"scripts/",
|
|
|
|
"conf/",
|
|
|
|
"sources/",
|
|
|
|
]
|
|
|
|
relevant_commits = repo.iter_commits(paths=relevant_files, full_history=True, all=True)
|
|
|
|
infos["revision"] = next(relevant_commits).hexsha
|
|
|
|
|
|
|
|
# Otherwise, validate commit exists
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
_ = repo.commit(infos["revision"])
|
|
|
|
except ValueError as err:
|
|
|
|
raise RuntimeError(f"Revision ain't in history ? {infos['revision']}") from err
|
|
|
|
|
|
|
|
# Find timestamp corresponding to that commit
|
|
|
|
timestamp = repo.commit(infos["revision"]).committed_date
|
|
|
|
|
|
|
|
# Build the dict with all the infos
|
|
|
|
if (this_app_cache / "manifest.toml").exists():
|
|
|
|
manifest = toml.load((this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict)
|
|
|
|
else:
|
|
|
|
manifest = json.load((this_app_cache / "manifest.json").open("r"))
|
|
|
|
|
|
|
|
return {
|
|
|
|
"id": manifest["id"],
|
|
|
|
"git": {
|
|
|
|
"branch": infos["branch"],
|
|
|
|
"revision": infos["revision"],
|
|
|
|
"url": infos["url"],
|
|
|
|
},
|
|
|
|
"added_in_catalog": infos["added_in_catalog"],
|
|
|
|
"lastUpdate": timestamp,
|
|
|
|
"manifest": manifest,
|
|
|
|
"state": infos["state"],
|
|
|
|
"level": infos.get("level", "?"),
|
2024-02-07 23:28:54 +01:00
|
|
|
"maintained": 'package-not-maintained' not in infos.get('antifeatures', []),
|
2024-02-07 15:04:55 +01:00
|
|
|
"high_quality": infos.get("high_quality", False),
|
|
|
|
"featured": infos.get("featured", False),
|
|
|
|
"category": infos.get("category", None),
|
|
|
|
"subtags": infos.get("subtags", []),
|
|
|
|
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
|
|
|
"antifeatures": list(
|
|
|
|
set(list(manifest.get("antifeatures", {}).keys()) + infos.get("antifeatures", []))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2024-02-07 23:28:54 +01:00
|
|
|
apps_cache_update_all(get_catalog(), parallel=50)
|
|
|
|
|
|
|
|
catalog = build_base_catalog()
|
|
|
|
write_catalog_v2(catalog, REPO_APPS_ROOT / "builds" / "default" / "v2")
|
|
|
|
write_catalog_v3(catalog, REPO_APPS_ROOT / "builds" / "default" / "v3")
|
|
|
|
write_catalog_doc(catalog, REPO_APPS_ROOT / "builds" / "default" / "doc_catalog")
|