2024-02-07 19:35:07 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import shutil
|
|
|
|
import logging
|
2024-05-21 10:59:49 +02:00
|
|
|
from itertools import repeat
|
2024-02-07 19:35:07 +01:00
|
|
|
from multiprocessing import Pool
|
|
|
|
from pathlib import Path
|
|
|
|
from typing import Any
|
|
|
|
|
|
|
|
import tqdm
|
|
|
|
|
2024-03-11 17:34:33 +01:00
|
|
|
from appslib.utils import (
|
|
|
|
REPO_APPS_ROOT, # pylint: disable=import-error
|
|
|
|
get_catalog,
|
|
|
|
git_repo_age,
|
|
|
|
)
|
2024-02-07 19:35:07 +01:00
|
|
|
from git import Repo
|
|
|
|
|
|
|
|
|
2024-03-13 23:13:27 +01:00
|
|
|
APPS_CACHE_DIR = REPO_APPS_ROOT / ".apps_cache"
|
|
|
|
|
|
|
|
|
2024-02-07 19:35:07 +01:00
|
|
|
def app_cache_folder(app: str) -> Path:
|
2024-03-13 23:13:27 +01:00
|
|
|
return APPS_CACHE_DIR / app
|
2024-02-07 19:35:07 +01:00
|
|
|
|
|
|
|
|
2024-06-02 21:21:52 +02:00
|
|
|
def app_cache_clone(
|
|
|
|
app: str, infos: dict[str, str], all_branches: bool = False
|
|
|
|
) -> None:
|
2024-02-07 19:35:07 +01:00
|
|
|
logging.info("Cloning %s...", app)
|
|
|
|
git_depths = {
|
|
|
|
"notworking": 5,
|
|
|
|
"inprogress": 20,
|
|
|
|
"default": 40,
|
|
|
|
}
|
|
|
|
if app_cache_folder(app).exists():
|
|
|
|
shutil.rmtree(app_cache_folder(app))
|
|
|
|
Repo.clone_from(
|
|
|
|
infos["url"],
|
|
|
|
to_path=app_cache_folder(app),
|
|
|
|
depth=git_depths.get(infos["state"], git_depths["default"]),
|
2024-05-21 10:59:49 +02:00
|
|
|
single_branch=not all_branches,
|
2024-03-11 17:34:33 +01:00
|
|
|
branch=infos.get("branch", "master"),
|
2024-02-07 19:35:07 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-05-21 10:59:49 +02:00
|
|
|
def app_cache_clone_or_update(
|
2024-06-02 21:21:52 +02:00
|
|
|
app: str,
|
|
|
|
infos: dict[str, str],
|
|
|
|
ssh_clone: bool = False,
|
|
|
|
fetch_all_branches: bool = False,
|
|
|
|
) -> None:
|
2024-02-07 19:35:07 +01:00
|
|
|
app_path = app_cache_folder(app)
|
|
|
|
|
2024-05-21 10:59:49 +02:00
|
|
|
# Patch url for ssh clone
|
|
|
|
if ssh_clone:
|
|
|
|
infos["url"] = infos["url"].replace("https://github.com/", "git@github.com:")
|
|
|
|
|
2024-02-07 19:35:07 +01:00
|
|
|
# Don't refresh if already refreshed during last hour
|
|
|
|
age = git_repo_age(app_path)
|
|
|
|
if age is False:
|
2024-05-21 10:59:49 +02:00
|
|
|
app_cache_clone(app, infos, fetch_all_branches)
|
2024-02-07 19:35:07 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# if age < 3600:
|
|
|
|
# logging.info(f"Skipping {app}, it's been updated recently.")
|
|
|
|
# return
|
|
|
|
|
|
|
|
logging.info("Updating %s...", app)
|
|
|
|
repo = Repo(app_path)
|
|
|
|
repo.remote("origin").set_url(infos["url"])
|
|
|
|
|
|
|
|
branch = infos.get("branch", "master")
|
2024-05-21 10:59:49 +02:00
|
|
|
if fetch_all_branches:
|
|
|
|
repo.git.remote("set-branches", "origin", "*")
|
|
|
|
repo.remote("origin").fetch()
|
2024-08-19 17:27:47 +02:00
|
|
|
repo.remote("origin").pull()
|
2024-05-21 10:59:49 +02:00
|
|
|
else:
|
|
|
|
if repo.active_branch != branch:
|
|
|
|
all_branches = [str(b) for b in repo.branches]
|
|
|
|
if branch in all_branches:
|
|
|
|
repo.git.checkout(branch, "--force")
|
|
|
|
else:
|
|
|
|
repo.git.remote("set-branches", "--add", "origin", branch)
|
|
|
|
repo.remote("origin").fetch(f"{branch}:{branch}")
|
2024-02-07 19:35:07 +01:00
|
|
|
|
2024-05-21 10:59:49 +02:00
|
|
|
repo.remote("origin").fetch(refspec=branch, force=True)
|
|
|
|
repo.git.reset("--hard", f"origin/{branch}")
|
2024-02-07 19:35:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
def __app_cache_clone_or_update_mapped(data):
|
2024-05-21 10:59:49 +02:00
|
|
|
name, info, ssh_clone, all_branches = data
|
2024-02-07 19:35:07 +01:00
|
|
|
try:
|
2024-05-21 10:59:49 +02:00
|
|
|
app_cache_clone_or_update(name, info, ssh_clone, all_branches)
|
2024-02-07 19:35:07 +01:00
|
|
|
except Exception as err:
|
2024-04-29 21:22:59 +02:00
|
|
|
logging.error("[App caches] Error while updating %s: %s", name, err)
|
2024-02-07 19:35:07 +01:00
|
|
|
|
|
|
|
|
2024-05-21 10:59:49 +02:00
|
|
|
def apps_cache_update_all(
|
2024-06-02 21:21:52 +02:00
|
|
|
apps: dict[str, dict[str, Any]],
|
|
|
|
parallel: int = 8,
|
|
|
|
ssh_clone: bool = False,
|
|
|
|
all_branches: bool = False,
|
|
|
|
) -> None:
|
2024-02-07 19:35:07 +01:00
|
|
|
with Pool(processes=parallel) as pool:
|
2024-06-02 21:21:52 +02:00
|
|
|
tasks = pool.imap_unordered(
|
|
|
|
__app_cache_clone_or_update_mapped,
|
|
|
|
zip(apps.keys(), apps.values(), repeat(ssh_clone), repeat(all_branches)),
|
|
|
|
)
|
2024-02-07 23:28:54 +01:00
|
|
|
for _ in tqdm.tqdm(tasks, total=len(apps.keys()), ascii=" ·#"):
|
2024-02-07 19:35:07 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2024-03-13 23:13:27 +01:00
|
|
|
def apps_cache_cleanup(apps: dict[str, dict[str, Any]]) -> None:
|
|
|
|
for element in APPS_CACHE_DIR.iterdir():
|
|
|
|
if element.name not in apps.keys():
|
|
|
|
logging.warning(f"Removing {element}...")
|
|
|
|
if element.is_dir():
|
|
|
|
shutil.rmtree(element)
|
|
|
|
else:
|
|
|
|
element.unlink()
|
|
|
|
|
|
|
|
|
2024-02-07 19:35:07 +01:00
|
|
|
def __run_for_catalog():
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("-v", "--verbose", action="store_true")
|
|
|
|
parser.add_argument("-j", "--processes", type=int, default=8)
|
2024-06-02 21:21:52 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"-s",
|
|
|
|
"--ssh",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=False,
|
|
|
|
help="Use ssh clones instead of https",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-b",
|
|
|
|
"--all-branches",
|
|
|
|
action=argparse.BooleanOptionalAction,
|
|
|
|
default=False,
|
|
|
|
help="Download all branches from repo",
|
|
|
|
)
|
2024-03-13 23:19:42 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"-c",
|
|
|
|
"--cleanup",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Remove unknown directories from the app cache",
|
|
|
|
)
|
2024-02-07 19:35:07 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
if args.verbose:
|
|
|
|
logging.getLogger().setLevel(logging.INFO)
|
|
|
|
|
2024-06-24 11:04:24 +02:00
|
|
|
APPS_CACHE_DIR.mkdir(exist_ok=True, parents=True)
|
|
|
|
|
2024-03-13 23:13:27 +01:00
|
|
|
if args.cleanup:
|
|
|
|
apps_cache_cleanup(get_catalog())
|
2024-06-02 21:21:52 +02:00
|
|
|
apps_cache_update_all(
|
|
|
|
get_catalog(),
|
|
|
|
parallel=args.processes,
|
|
|
|
ssh_clone=args.ssh,
|
|
|
|
all_branches=args.all_branches,
|
|
|
|
)
|
2024-02-07 19:35:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
__run_for_catalog()
|