1
0
Fork 0
ynh-apps_tools/autoupdate_app_sources/rest_api.py

226 lines
8.1 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
2024-01-23 23:37:24 +01:00
import re
from enum import Enum
from typing import Any, Optional
from bs4 import BeautifulSoup
from urllib.parse import urljoin
import requests
class RefType(Enum):
tags = 1
commits = 2
releases = 3
class GithubAPI:
def __init__(self, upstream: str, auth: Optional[tuple[str, str]] = None):
self.upstream = upstream.strip("/")
2024-03-11 17:34:33 +01:00
self.upstream_repo = upstream.replace("https://github.com/", "").strip("/")
assert (
2024-03-11 17:34:33 +01:00
len(self.upstream_repo.split("/")) == 2
), f"'{upstream}' doesn't seem to be a github repository ?"
self.auth = auth
2024-02-12 16:59:54 +01:00
def internal_api(self, uri: str) -> Any:
url = f"https://api.github.com/{uri}"
r = requests.get(url, auth=self.auth)
r.raise_for_status()
return r.json()
2024-02-12 16:59:54 +01:00
def tags(self) -> list[dict[str, str]]:
"""Get a list of tags for project."""
return self.internal_api(f"repos/{self.upstream_repo}/tags")
def commits(self) -> list[dict[str, Any]]:
"""Get a list of commits for project."""
return self.internal_api(f"repos/{self.upstream_repo}/commits")
def releases(self) -> list[dict[str, Any]]:
"""Get a list of releases for project."""
return self.internal_api(f"repos/{self.upstream_repo}/releases?per_page=100")
def url_for_ref(self, ref: str, ref_type: RefType) -> str:
"""Get a URL for a ref."""
if ref_type == RefType.tags or ref_type == RefType.releases:
return f"{self.upstream}/archive/refs/tags/{ref}.tar.gz"
elif ref_type == RefType.commits:
return f"{self.upstream}/archive/{ref}.tar.gz"
else:
raise NotImplementedError
2024-01-23 23:37:24 +01:00
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
"""Get a changelog for a ref."""
if ref_type == RefType.commits:
return f"{self.upstream}/compare/{old_ref}...{new_ref}"
else:
return f"{self.upstream}/releases/tag/{new_ref}"
2024-01-23 23:37:24 +01:00
2024-01-23 23:37:24 +01:00
class GitlabAPI:
def __init__(self, upstream: str):
# Find gitlab api root...
self.forge_root = self.get_forge_root(upstream).rstrip("/")
self.project_path = upstream.replace(self.forge_root, "").lstrip("/")
self.project_id = self.find_project_id(self.project_path)
2024-01-23 23:37:24 +01:00
def get_forge_root(self, project_url: str) -> str:
"""A small heuristic based on the content of the html page..."""
r = requests.get(project_url)
r.raise_for_status()
match = re.search(r"const url = `(.*)/api/graphql`", r.text)
assert match is not None
return match.group(1)
2024-01-23 23:37:24 +01:00
def find_project_id(self, project: str) -> int:
try:
project = self.internal_api(f"projects/{project.replace('/', '%2F')}")
except requests.exceptions.HTTPError as err:
if err.response.status_code != 404:
raise
# Second chance for some buggy gitlab instances...
name = self.project_path.split("/")[-1]
projects = self.internal_api(f"projects?search={name}")
2024-03-11 17:34:33 +01:00
project = next(
filter(
lambda x: x.get("path_with_namespace") == self.project_path,
projects,
)
)
2024-02-12 16:59:54 +01:00
assert isinstance(project, dict)
project_id = project.get("id", None)
return project_id
2024-01-23 23:37:24 +01:00
2024-02-12 16:59:54 +01:00
def internal_api(self, uri: str) -> Any:
url = f"{self.forge_root}/api/v4/{uri}"
2024-01-23 23:37:24 +01:00
r = requests.get(url)
r.raise_for_status()
2024-01-23 23:37:24 +01:00
return r.json()
2024-02-12 16:59:54 +01:00
def tags(self) -> list[dict[str, str]]:
2024-01-23 23:37:24 +01:00
"""Get a list of tags for project."""
return self.internal_api(f"projects/{self.project_id}/repository/tags")
2024-02-12 16:59:54 +01:00
def commits(self) -> list[dict[str, Any]]:
2024-01-23 23:37:24 +01:00
"""Get a list of commits for project."""
return [
{
"sha": commit["id"],
2024-03-11 17:34:33 +01:00
"commit": {"author": {"date": commit["committed_date"]}},
2024-01-23 23:37:24 +01:00
}
2024-03-11 17:34:33 +01:00
for commit in self.internal_api(
f"projects/{self.project_id}/repository/commits"
)
2024-02-12 16:59:54 +01:00
]
2024-01-23 23:37:24 +01:00
2024-02-12 16:59:54 +01:00
def releases(self) -> list[dict[str, Any]]:
2024-01-23 23:37:24 +01:00
"""Get a list of releases for project."""
releases = self.internal_api(f"projects/{self.project_id}/releases")
retval = []
for release in releases:
r = {
2024-01-23 23:37:24 +01:00
"tag_name": release["tag_name"],
"prerelease": False,
"draft": False,
"html_url": release["_links"]["self"],
2024-03-11 17:34:33 +01:00
"assets": [
{
"name": asset["name"],
"browser_download_url": asset["direct_asset_url"],
}
for asset in release["assets"]["links"]
],
}
for source in release["assets"]["sources"]:
2024-03-11 17:34:33 +01:00
r["assets"].append(
{
"name": f"source.{source['format']}",
"browser_download_url": source["url"],
}
)
retval.append(r)
return retval
2024-01-23 23:37:24 +01:00
def url_for_ref(self, ref: str, _: RefType) -> str:
name = self.project_path.split("/")[-1]
clean_ref = ref.replace("/", "-")
return f"{self.forge_root}/{self.project_path}/-/archive/{ref}/{name}-{clean_ref}.tar.bz2"
2024-02-14 03:40:26 +01:00
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
"""Get a changelog for a ref."""
if ref_type == RefType.commits:
return (
f"{self.forge_root}/{self.project_path}/-/compare/{old_ref}...{new_ref}"
)
elif ref_type == RefType.tags:
return f"{self.forge_root}/{self.project_path}/-/tags/{new_ref}"
elif ref_type == RefType.releases:
return f"{self.forge_root}/{self.project_path}/-/releases/{new_ref}"
else:
raise NotImplementedError
2024-02-14 03:40:26 +01:00
class GiteaForgejoAPI:
def __init__(self, upstream: str):
# Find gitea/forgejo api root...
self.forge_root = self.get_forge_root(upstream).rstrip("/")
self.project_path = upstream.replace(self.forge_root, "").lstrip("/")
def get_forge_root(self, project_url: str) -> str:
"""A small heuristic based on the content of the html page..."""
r = requests.get(project_url)
r.raise_for_status()
match = re.search(r"appUrl: '([^']*)',", r.text)
assert match is not None
return match.group(1).replace("\\", "")
2024-02-14 03:40:26 +01:00
def internal_api(self, uri: str):
url = f"{self.forge_root}/api/v1/{uri}"
2024-02-14 03:40:26 +01:00
r = requests.get(url)
r.raise_for_status()
2024-02-14 03:40:26 +01:00
return r.json()
def tags(self) -> list[dict[str, Any]]:
2024-02-14 03:40:26 +01:00
"""Get a list of tags for project."""
return self.internal_api(f"repos/{self.project_path}/tags")
2024-02-14 03:40:26 +01:00
def commits(self) -> list[dict[str, Any]]:
2024-02-14 03:40:26 +01:00
"""Get a list of commits for project."""
return self.internal_api(f"repos/{self.project_path}/commits")
2024-02-14 03:40:26 +01:00
def releases(self) -> list[dict[str, Any]]:
2024-02-14 03:40:26 +01:00
"""Get a list of releases for project."""
return self.internal_api(f"repos/{self.project_path}/releases")
2024-02-14 03:40:26 +01:00
def url_for_ref(self, ref: str, _: RefType) -> str:
2024-02-14 03:40:26 +01:00
"""Get a URL for a ref."""
return f"{self.forge_root}/{self.project_path}/archive/{ref}.tar.gz"
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
"""Get a changelog for a ref."""
if ref_type == RefType.commits:
return (
f"{self.forge_root}/{self.project_path}/compare/{old_ref}...{new_ref}"
)
else:
return f"{self.forge_root}/{self.project_path}/releases/tag/{new_ref}"
class DownloadPageAPI:
def __init__(self, upstream: str) -> None:
self.web_page = upstream
def get_web_page_links(self) -> dict[str, str]:
r = requests.get(self.web_page)
r.raise_for_status()
soup = BeautifulSoup(r.text, features="lxml")
return {
link.string: urljoin(self.web_page, link.get("href"))
2024-08-28 21:08:27 +02:00
for link in soup.find_all("a")
}