2024-02-07 14:49:55 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2024-01-23 23:37:24 +01:00
|
|
|
import re
|
2024-02-07 14:49:55 +01:00
|
|
|
from enum import Enum
|
2024-02-15 22:42:06 +01:00
|
|
|
from typing import Any, Optional
|
2024-01-23 22:32:31 +01:00
|
|
|
|
2024-02-20 11:57:05 +01:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from urllib.parse import urljoin
|
2024-02-07 14:49:55 +01:00
|
|
|
import requests
|
|
|
|
|
2024-01-23 22:32:31 +01:00
|
|
|
|
|
|
|
class RefType(Enum):
|
|
|
|
tags = 1
|
|
|
|
commits = 2
|
2024-07-14 23:17:28 +02:00
|
|
|
releases = 3
|
2024-01-23 22:32:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
class GithubAPI:
|
2024-02-15 22:42:06 +01:00
|
|
|
def __init__(self, upstream: str, auth: Optional[tuple[str, str]] = None):
|
2024-02-20 12:06:17 +01:00
|
|
|
self.upstream = upstream.strip("/")
|
2024-03-11 17:34:33 +01:00
|
|
|
self.upstream_repo = upstream.replace("https://github.com/", "").strip("/")
|
2024-01-23 22:32:31 +01:00
|
|
|
assert (
|
2024-03-11 17:34:33 +01:00
|
|
|
len(self.upstream_repo.split("/")) == 2
|
|
|
|
), f"'{upstream}' doesn't seem to be a github repository ?"
|
2024-01-23 22:32:31 +01:00
|
|
|
self.auth = auth
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def internal_api(self, uri: str) -> Any:
|
2024-01-23 22:32:31 +01:00
|
|
|
url = f"https://api.github.com/{uri}"
|
|
|
|
r = requests.get(url, auth=self.auth)
|
2024-02-24 22:15:28 +01:00
|
|
|
r.raise_for_status()
|
2024-01-23 22:32:31 +01:00
|
|
|
return r.json()
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def tags(self) -> list[dict[str, str]]:
|
2024-01-23 22:32:31 +01:00
|
|
|
"""Get a list of tags for project."""
|
|
|
|
return self.internal_api(f"repos/{self.upstream_repo}/tags")
|
|
|
|
|
2024-02-14 23:56:32 +01:00
|
|
|
def commits(self) -> list[dict[str, Any]]:
|
2024-01-23 22:32:31 +01:00
|
|
|
"""Get a list of commits for project."""
|
|
|
|
return self.internal_api(f"repos/{self.upstream_repo}/commits")
|
|
|
|
|
2024-02-14 23:56:32 +01:00
|
|
|
def releases(self) -> list[dict[str, Any]]:
|
2024-01-23 22:32:31 +01:00
|
|
|
"""Get a list of releases for project."""
|
2024-05-13 18:59:53 +02:00
|
|
|
return self.internal_api(f"repos/{self.upstream_repo}/releases?per_page=100")
|
2024-01-23 22:32:31 +01:00
|
|
|
|
|
|
|
def url_for_ref(self, ref: str, ref_type: RefType) -> str:
|
|
|
|
"""Get a URL for a ref."""
|
2024-07-14 23:17:28 +02:00
|
|
|
if ref_type == RefType.tags or ref_type == RefType.releases:
|
2024-01-23 22:32:31 +01:00
|
|
|
return f"{self.upstream}/archive/refs/tags/{ref}.tar.gz"
|
|
|
|
elif ref_type == RefType.commits:
|
|
|
|
return f"{self.upstream}/archive/{ref}.tar.gz"
|
|
|
|
else:
|
|
|
|
raise NotImplementedError
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
|
|
|
|
"""Get a changelog for a ref."""
|
|
|
|
if ref_type == RefType.commits:
|
|
|
|
return f"{self.upstream}/compare/{old_ref}...{new_ref}"
|
|
|
|
else:
|
|
|
|
return f"{self.upstream}/releases/tag/{new_ref}"
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-07-15 16:36:06 +02:00
|
|
|
|
2024-01-23 23:37:24 +01:00
|
|
|
class GitlabAPI:
|
|
|
|
def __init__(self, upstream: str):
|
2024-02-16 19:36:52 +01:00
|
|
|
# Find gitlab api root...
|
2024-02-19 16:38:51 +01:00
|
|
|
self.forge_root = self.get_forge_root(upstream).rstrip("/")
|
2024-09-16 11:53:53 +02:00
|
|
|
self.project_path = upstream.replace(self.forge_root, "").strip("/")
|
2024-02-16 19:07:20 +01:00
|
|
|
self.project_id = self.find_project_id(self.project_path)
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-02-16 19:36:52 +01:00
|
|
|
def get_forge_root(self, project_url: str) -> str:
|
|
|
|
"""A small heuristic based on the content of the html page..."""
|
|
|
|
r = requests.get(project_url)
|
|
|
|
r.raise_for_status()
|
|
|
|
match = re.search(r"const url = `(.*)/api/graphql`", r.text)
|
|
|
|
assert match is not None
|
|
|
|
return match.group(1)
|
|
|
|
|
2024-01-23 23:37:24 +01:00
|
|
|
def find_project_id(self, project: str) -> int:
|
2024-02-24 22:15:28 +01:00
|
|
|
try:
|
|
|
|
project = self.internal_api(f"projects/{project.replace('/', '%2F')}")
|
|
|
|
except requests.exceptions.HTTPError as err:
|
|
|
|
if err.response.status_code != 404:
|
|
|
|
raise
|
|
|
|
# Second chance for some buggy gitlab instances...
|
|
|
|
name = self.project_path.split("/")[-1]
|
|
|
|
projects = self.internal_api(f"projects?search={name}")
|
2024-03-11 17:34:33 +01:00
|
|
|
project = next(
|
|
|
|
filter(
|
|
|
|
lambda x: x.get("path_with_namespace") == self.project_path,
|
|
|
|
projects,
|
|
|
|
)
|
|
|
|
)
|
2024-02-24 22:15:28 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
assert isinstance(project, dict)
|
|
|
|
project_id = project.get("id", None)
|
|
|
|
return project_id
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def internal_api(self, uri: str) -> Any:
|
2024-02-16 19:07:20 +01:00
|
|
|
url = f"{self.forge_root}/api/v4/{uri}"
|
2024-01-23 23:37:24 +01:00
|
|
|
r = requests.get(url)
|
2024-02-24 22:15:28 +01:00
|
|
|
r.raise_for_status()
|
2024-01-23 23:37:24 +01:00
|
|
|
return r.json()
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def tags(self) -> list[dict[str, str]]:
|
2024-01-23 23:37:24 +01:00
|
|
|
"""Get a list of tags for project."""
|
|
|
|
return self.internal_api(f"projects/{self.project_id}/repository/tags")
|
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def commits(self) -> list[dict[str, Any]]:
|
2024-01-23 23:37:24 +01:00
|
|
|
"""Get a list of commits for project."""
|
|
|
|
return [
|
|
|
|
{
|
|
|
|
"sha": commit["id"],
|
2024-03-11 17:34:33 +01:00
|
|
|
"commit": {"author": {"date": commit["committed_date"]}},
|
2024-01-23 23:37:24 +01:00
|
|
|
}
|
2024-03-11 17:34:33 +01:00
|
|
|
for commit in self.internal_api(
|
|
|
|
f"projects/{self.project_id}/repository/commits"
|
|
|
|
)
|
2024-02-12 16:59:54 +01:00
|
|
|
]
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-02-12 16:59:54 +01:00
|
|
|
def releases(self) -> list[dict[str, Any]]:
|
2024-01-23 23:37:24 +01:00
|
|
|
"""Get a list of releases for project."""
|
|
|
|
releases = self.internal_api(f"projects/{self.project_id}/releases")
|
2024-01-24 19:32:24 +01:00
|
|
|
retval = []
|
|
|
|
for release in releases:
|
|
|
|
r = {
|
2024-01-23 23:37:24 +01:00
|
|
|
"tag_name": release["tag_name"],
|
|
|
|
"prerelease": False,
|
|
|
|
"draft": False,
|
|
|
|
"html_url": release["_links"]["self"],
|
2024-03-11 17:34:33 +01:00
|
|
|
"assets": [
|
|
|
|
{
|
|
|
|
"name": asset["name"],
|
|
|
|
"browser_download_url": asset["direct_asset_url"],
|
|
|
|
}
|
|
|
|
for asset in release["assets"]["links"]
|
|
|
|
],
|
|
|
|
}
|
2024-01-24 19:32:24 +01:00
|
|
|
for source in release["assets"]["sources"]:
|
2024-03-11 17:34:33 +01:00
|
|
|
r["assets"].append(
|
|
|
|
{
|
|
|
|
"name": f"source.{source['format']}",
|
|
|
|
"browser_download_url": source["url"],
|
|
|
|
}
|
|
|
|
)
|
2024-01-24 19:32:24 +01:00
|
|
|
retval.append(r)
|
|
|
|
|
|
|
|
return retval
|
2024-01-23 23:37:24 +01:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
def url_for_ref(self, ref: str, _: RefType) -> str:
|
2024-02-16 19:07:20 +01:00
|
|
|
name = self.project_path.split("/")[-1]
|
2024-02-16 23:53:55 +01:00
|
|
|
clean_ref = ref.replace("/", "-")
|
|
|
|
return f"{self.forge_root}/{self.project_path}/-/archive/{ref}/{name}-{clean_ref}.tar.bz2"
|
2024-02-14 03:40:26 +01:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
|
|
|
|
"""Get a changelog for a ref."""
|
|
|
|
if ref_type == RefType.commits:
|
|
|
|
return (
|
|
|
|
f"{self.forge_root}/{self.project_path}/-/compare/{old_ref}...{new_ref}"
|
|
|
|
)
|
|
|
|
elif ref_type == RefType.tags:
|
|
|
|
return f"{self.forge_root}/{self.project_path}/-/tags/{new_ref}"
|
|
|
|
elif ref_type == RefType.releases:
|
|
|
|
return f"{self.forge_root}/{self.project_path}/-/releases/{new_ref}"
|
|
|
|
else:
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2024-02-14 03:40:26 +01:00
|
|
|
|
|
|
|
class GiteaForgejoAPI:
|
|
|
|
def __init__(self, upstream: str):
|
2024-02-16 19:36:52 +01:00
|
|
|
# Find gitea/forgejo api root...
|
2024-02-19 16:38:51 +01:00
|
|
|
self.forge_root = self.get_forge_root(upstream).rstrip("/")
|
2024-02-16 19:36:52 +01:00
|
|
|
self.project_path = upstream.replace(self.forge_root, "").lstrip("/")
|
|
|
|
|
|
|
|
def get_forge_root(self, project_url: str) -> str:
|
|
|
|
"""A small heuristic based on the content of the html page..."""
|
|
|
|
r = requests.get(project_url)
|
|
|
|
r.raise_for_status()
|
|
|
|
match = re.search(r"appUrl: '([^']*)',", r.text)
|
|
|
|
assert match is not None
|
|
|
|
return match.group(1).replace("\\", "")
|
2024-02-14 03:40:26 +01:00
|
|
|
|
|
|
|
def internal_api(self, uri: str):
|
2024-02-16 19:07:20 +01:00
|
|
|
url = f"{self.forge_root}/api/v1/{uri}"
|
2024-02-14 03:40:26 +01:00
|
|
|
r = requests.get(url)
|
2024-02-24 22:15:28 +01:00
|
|
|
r.raise_for_status()
|
2024-02-14 03:40:26 +01:00
|
|
|
return r.json()
|
|
|
|
|
2024-02-14 23:56:32 +01:00
|
|
|
def tags(self) -> list[dict[str, Any]]:
|
2024-02-14 03:40:26 +01:00
|
|
|
"""Get a list of tags for project."""
|
2024-02-16 19:07:20 +01:00
|
|
|
return self.internal_api(f"repos/{self.project_path}/tags")
|
2024-02-14 03:40:26 +01:00
|
|
|
|
2024-02-14 23:56:32 +01:00
|
|
|
def commits(self) -> list[dict[str, Any]]:
|
2024-02-14 03:40:26 +01:00
|
|
|
"""Get a list of commits for project."""
|
2024-02-16 19:07:20 +01:00
|
|
|
return self.internal_api(f"repos/{self.project_path}/commits")
|
2024-02-14 03:40:26 +01:00
|
|
|
|
2024-02-14 23:56:32 +01:00
|
|
|
def releases(self) -> list[dict[str, Any]]:
|
2024-02-14 03:40:26 +01:00
|
|
|
"""Get a list of releases for project."""
|
2024-02-16 19:07:20 +01:00
|
|
|
return self.internal_api(f"repos/{self.project_path}/releases")
|
2024-02-14 03:40:26 +01:00
|
|
|
|
2024-07-14 23:17:28 +02:00
|
|
|
def url_for_ref(self, ref: str, _: RefType) -> str:
|
2024-02-14 03:40:26 +01:00
|
|
|
"""Get a URL for a ref."""
|
2024-02-16 19:07:20 +01:00
|
|
|
return f"{self.forge_root}/{self.project_path}/archive/{ref}.tar.gz"
|
2024-07-14 23:17:28 +02:00
|
|
|
|
|
|
|
def changelog_for_ref(self, new_ref: str, old_ref: str, ref_type: RefType) -> str:
|
|
|
|
"""Get a changelog for a ref."""
|
|
|
|
if ref_type == RefType.commits:
|
|
|
|
return (
|
|
|
|
f"{self.forge_root}/{self.project_path}/compare/{old_ref}...{new_ref}"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return f"{self.forge_root}/{self.project_path}/releases/tag/{new_ref}"
|
2024-02-20 11:57:05 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DownloadPageAPI:
|
|
|
|
def __init__(self, upstream: str) -> None:
|
|
|
|
self.web_page = upstream
|
|
|
|
|
|
|
|
def get_web_page_links(self) -> dict[str, str]:
|
|
|
|
r = requests.get(self.web_page)
|
|
|
|
r.raise_for_status()
|
|
|
|
soup = BeautifulSoup(r.text, features="lxml")
|
|
|
|
|
|
|
|
return {
|
|
|
|
link.string: urljoin(self.web_page, link.get("href"))
|
2024-08-28 21:08:27 +02:00
|
|
|
for link in soup.find_all("a")
|
2024-02-20 11:57:05 +01:00
|
|
|
}
|