2024-02-08 22:34:48 +01:00
|
|
|
#!/usr/bin/env python3
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-14 22:44:19 +01:00
|
|
|
import argparse
|
2024-02-07 15:04:55 +01:00
|
|
|
import copy
|
|
|
|
import json
|
2024-02-08 22:14:35 +01:00
|
|
|
import logging
|
2024-02-07 23:28:54 +01:00
|
|
|
import multiprocessing
|
|
|
|
import shutil
|
2024-02-08 22:14:35 +01:00
|
|
|
import subprocess
|
|
|
|
import time
|
2024-02-07 15:04:55 +01:00
|
|
|
from collections import OrderedDict
|
2024-02-08 22:14:35 +01:00
|
|
|
from functools import cache
|
|
|
|
from pathlib import Path
|
2024-02-09 01:41:33 +01:00
|
|
|
from typing import Any, Optional
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
import toml
|
2024-02-08 22:14:35 +01:00
|
|
|
import tqdm
|
|
|
|
from tqdm.contrib.logging import logging_redirect_tqdm
|
2024-02-07 15:04:55 +01:00
|
|
|
from git import Repo
|
|
|
|
|
2024-02-08 22:18:59 +01:00
|
|
|
import appslib.logging_sender # pylint: disable=import-error
|
2024-02-08 22:14:35 +01:00
|
|
|
from app_caches import app_cache_folder # pylint: disable=import-error
|
|
|
|
from app_caches import apps_cache_update_all # pylint: disable=import-error
|
2024-03-11 17:34:33 +01:00
|
|
|
from appslib.utils import (
|
|
|
|
REPO_APPS_ROOT, # pylint: disable=import-error
|
|
|
|
get_antifeatures,
|
|
|
|
get_catalog,
|
|
|
|
get_categories,
|
|
|
|
)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
now = time.time()
|
|
|
|
|
|
|
|
|
2024-02-08 22:13:11 +01:00
|
|
|
@cache
|
|
|
|
def categories_list():
|
|
|
|
# Load categories and reformat the structure to have a list with an "id" key
|
|
|
|
new_categories = get_categories()
|
|
|
|
for category_id, infos in new_categories.items():
|
|
|
|
infos["id"] = category_id
|
|
|
|
for subtag_id, subtag_infos in infos.get("subtags", {}).items():
|
|
|
|
subtag_infos["id"] = subtag_id
|
2024-03-11 17:34:33 +01:00
|
|
|
infos["subtags"] = list(infos.get("subtags", {}).values())
|
2024-02-08 22:13:11 +01:00
|
|
|
return list(new_categories.values())
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-08 22:13:11 +01:00
|
|
|
@cache
|
|
|
|
def antifeatures_list():
|
|
|
|
# (Same for antifeatures)
|
|
|
|
new_antifeatures = get_antifeatures()
|
|
|
|
for antifeature_id, infos in new_antifeatures.items():
|
|
|
|
infos["id"] = antifeature_id
|
|
|
|
return list(new_antifeatures.values())
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
################################
|
|
|
|
# Actual list build management #
|
|
|
|
################################
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
|
2024-02-09 01:41:33 +01:00
|
|
|
def __build_app_dict(data) -> Optional[tuple[str, dict[str, Any]]]:
|
2024-02-07 23:28:54 +01:00
|
|
|
name, info = data
|
|
|
|
try:
|
|
|
|
return name, build_app_dict(name, info)
|
|
|
|
except Exception as err:
|
2024-04-29 21:33:46 +02:00
|
|
|
logging.error("[List builder] Error while updating %s: %s", name, err)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-14 22:44:19 +01:00
|
|
|
def build_base_catalog(nproc: int):
|
2024-02-07 23:28:54 +01:00
|
|
|
result_dict = {}
|
2024-02-08 22:14:35 +01:00
|
|
|
catalog = get_catalog(working_only=True)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-14 22:44:19 +01:00
|
|
|
with multiprocessing.Pool(processes=nproc) as pool:
|
2024-02-08 22:14:35 +01:00
|
|
|
with logging_redirect_tqdm():
|
|
|
|
tasks = pool.imap(__build_app_dict, catalog.items())
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-08 22:14:35 +01:00
|
|
|
for result in tqdm.tqdm(tasks, total=len(catalog.keys()), ascii=" ·#"):
|
|
|
|
if result is not None:
|
|
|
|
name, info = result
|
|
|
|
result_dict[name] = info
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
return result_dict
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def write_catalog_v3(base_catalog, target_dir: Path) -> None:
|
|
|
|
logos_dir = target_dir / "logos"
|
|
|
|
logos_dir.mkdir(parents=True, exist_ok=True)
|
2024-06-08 15:39:14 +02:00
|
|
|
|
|
|
|
def infos_for_v3(app_id: str, infos: Any) -> Any:
|
|
|
|
# We remove the app install question and resources parts which aint
|
|
|
|
# needed anymore by webadmin etc (or at least we think ;P)
|
|
|
|
if "manifest" in infos and "install" in infos["manifest"]:
|
|
|
|
del infos["manifest"]["install"]
|
|
|
|
if "manifest" in infos and "resources" in infos["manifest"]:
|
|
|
|
del infos["manifest"]["resources"]
|
|
|
|
|
|
|
|
app_id = app_id.lower()
|
|
|
|
logo_source = REPO_APPS_ROOT / "logos" / f"{app_id}.png"
|
2024-02-07 23:28:54 +01:00
|
|
|
if logo_source.exists():
|
2024-03-11 17:34:33 +01:00
|
|
|
logo_hash = (
|
|
|
|
subprocess.check_output(["sha256sum", logo_source])
|
|
|
|
.strip()
|
|
|
|
.decode("utf-8")
|
|
|
|
.split()[0]
|
|
|
|
)
|
2024-02-07 23:28:54 +01:00
|
|
|
shutil.copyfile(logo_source, logos_dir / f"{logo_hash}.png")
|
2024-02-07 15:04:55 +01:00
|
|
|
# FIXME: implement something to cleanup old logo stuf in the builds/.../logos/ folder somehow
|
|
|
|
else:
|
|
|
|
logo_hash = None
|
2024-06-08 15:39:14 +02:00
|
|
|
infos["logo_hash"] = logo_hash
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
full_catalog = {
|
2024-06-08 15:39:14 +02:00
|
|
|
"apps": {app: infos_for_v3(app, info) for app, info in base_catalog.items()},
|
2024-02-08 22:13:11 +01:00
|
|
|
"categories": categories_list(),
|
|
|
|
"antifeatures": antifeatures_list(),
|
2024-02-07 23:28:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
target_file = target_dir / "apps.json"
|
|
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
2024-03-11 17:34:33 +01:00
|
|
|
target_file.open("w", encoding="utf-8").write(
|
|
|
|
json.dumps(full_catalog, sort_keys=True)
|
|
|
|
)
|
2024-02-07 23:28:54 +01:00
|
|
|
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def write_catalog_doc(base_catalog, target_dir: Path) -> None:
|
2024-02-07 15:04:55 +01:00
|
|
|
def infos_for_doc_catalog(infos):
|
|
|
|
level = infos.get("level")
|
|
|
|
if not isinstance(level, int):
|
|
|
|
level = -1
|
|
|
|
return {
|
|
|
|
"id": infos["id"],
|
|
|
|
"category": infos["category"],
|
|
|
|
"url": infos["git"]["url"],
|
|
|
|
"name": infos["manifest"]["name"],
|
|
|
|
"description": infos["manifest"]["description"],
|
|
|
|
"state": infos["state"],
|
|
|
|
"level": level,
|
|
|
|
"broken": level <= 0,
|
|
|
|
"good_quality": level >= 8,
|
|
|
|
"bad_quality": level <= 5,
|
|
|
|
"antifeatures": infos.get("antifeatures"),
|
|
|
|
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
|
|
|
}
|
|
|
|
|
|
|
|
result_dict_doc = {
|
|
|
|
k: infos_for_doc_catalog(v)
|
2024-02-07 23:28:54 +01:00
|
|
|
for k, v in base_catalog.items()
|
2024-02-07 15:04:55 +01:00
|
|
|
if v["state"] == "working"
|
|
|
|
}
|
2024-03-11 17:34:33 +01:00
|
|
|
full_catalog = {"apps": result_dict_doc, "categories": categories_list()}
|
2024-02-07 15:04:55 +01:00
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
target_file = target_dir / "apps.json"
|
|
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
2024-03-11 17:34:33 +01:00
|
|
|
target_file.open("w", encoding="utf-8").write(
|
|
|
|
json.dumps(full_catalog, sort_keys=True)
|
|
|
|
)
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
|
2024-02-07 23:28:54 +01:00
|
|
|
def build_app_dict(app, infos):
|
2024-02-07 15:04:55 +01:00
|
|
|
# Make sure we have some cache
|
|
|
|
this_app_cache = app_cache_folder(app)
|
2024-02-07 23:28:54 +01:00
|
|
|
assert this_app_cache.exists(), f"No cache yet for {app}"
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
repo = Repo(this_app_cache)
|
|
|
|
|
2024-02-28 00:24:10 +01:00
|
|
|
# If added_date is not present, we are in a github action of the PR that adds it... so default to a bad value.
|
|
|
|
infos["added_in_catalog"] = infos.get("added_date", 0)
|
2024-02-07 23:28:54 +01:00
|
|
|
# int(commit_timestamps_for_this_app_in_catalog.split("\n")[0])
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
infos["branch"] = infos.get("branch", "master")
|
|
|
|
infos["revision"] = infos.get("revision", "HEAD")
|
|
|
|
|
|
|
|
# If using head, find the most recent meaningful commit in logs
|
|
|
|
if infos["revision"] == "HEAD":
|
2024-02-27 19:37:53 +01:00
|
|
|
infos["revision"] = repo.head.commit.hexsha
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
# Otherwise, validate commit exists
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
_ = repo.commit(infos["revision"])
|
|
|
|
except ValueError as err:
|
2024-03-11 17:34:33 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"Revision ain't in history ? {infos['revision']}"
|
|
|
|
) from err
|
2024-02-07 15:04:55 +01:00
|
|
|
|
|
|
|
# Find timestamp corresponding to that commit
|
|
|
|
timestamp = repo.commit(infos["revision"]).committed_date
|
|
|
|
|
|
|
|
# Build the dict with all the infos
|
|
|
|
if (this_app_cache / "manifest.toml").exists():
|
2024-03-11 17:34:33 +01:00
|
|
|
manifest = toml.load(
|
|
|
|
(this_app_cache / "manifest.toml").open("r"), _dict=OrderedDict
|
|
|
|
)
|
2024-02-07 15:04:55 +01:00
|
|
|
else:
|
|
|
|
manifest = json.load((this_app_cache / "manifest.json").open("r"))
|
|
|
|
|
|
|
|
return {
|
|
|
|
"id": manifest["id"],
|
|
|
|
"git": {
|
|
|
|
"branch": infos["branch"],
|
|
|
|
"revision": infos["revision"],
|
|
|
|
"url": infos["url"],
|
|
|
|
},
|
|
|
|
"added_in_catalog": infos["added_in_catalog"],
|
|
|
|
"lastUpdate": timestamp,
|
|
|
|
"manifest": manifest,
|
|
|
|
"state": infos["state"],
|
|
|
|
"level": infos.get("level", "?"),
|
2024-03-11 17:34:33 +01:00
|
|
|
"maintained": "package-not-maintained" not in infos.get("antifeatures", []),
|
2024-02-07 15:04:55 +01:00
|
|
|
"high_quality": infos.get("high_quality", False),
|
|
|
|
"featured": infos.get("featured", False),
|
|
|
|
"category": infos.get("category", None),
|
|
|
|
"subtags": infos.get("subtags", []),
|
|
|
|
"potential_alternative_to": infos.get("potential_alternative_to", []),
|
|
|
|
"antifeatures": list(
|
2024-03-11 17:34:33 +01:00
|
|
|
set(
|
|
|
|
list(manifest.get("antifeatures", {}).keys())
|
|
|
|
+ infos.get("antifeatures", [])
|
|
|
|
)
|
2024-02-07 15:04:55 +01:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-02-08 22:14:35 +01:00
|
|
|
def main() -> None:
|
2024-02-14 22:44:19 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
2024-03-11 17:34:33 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"target_dir",
|
|
|
|
type=Path,
|
|
|
|
nargs="?",
|
|
|
|
default=REPO_APPS_ROOT / "builds" / "default",
|
|
|
|
help="The directory to write the catalogs to",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-j",
|
|
|
|
"--jobs",
|
|
|
|
type=int,
|
|
|
|
default=multiprocessing.cpu_count(),
|
|
|
|
metavar="N",
|
|
|
|
help="Allow N threads to run in parallel",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-c",
|
|
|
|
"--update-cache",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=True,
|
|
|
|
help="Update the apps cache",
|
|
|
|
)
|
2024-02-14 22:44:19 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2024-02-08 22:18:59 +01:00
|
|
|
appslib.logging_sender.enable()
|
2024-02-07 23:28:54 +01:00
|
|
|
|
2024-02-14 22:44:19 +01:00
|
|
|
if args.update_cache:
|
|
|
|
print("Updating the cache of all the apps directories...")
|
|
|
|
apps_cache_update_all(get_catalog(), parallel=args.jobs)
|
|
|
|
|
|
|
|
print("Retrieving all apps' information to build the catalog...")
|
|
|
|
catalog = build_base_catalog(args.jobs)
|
|
|
|
|
|
|
|
print(f"Writing the catalogs to {args.target_dir}...")
|
|
|
|
write_catalog_v3(catalog, args.target_dir / "v3")
|
|
|
|
write_catalog_doc(catalog, args.target_dir / "doc_catalog")
|
|
|
|
print("Done!")
|
2024-02-08 22:14:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|