New source autoupdate: actually create the PR using PyGithub
This commit is contained in:
parent
d643634be0
commit
df817d364a
1 changed files with 103 additions and 51 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -5,11 +6,19 @@ import requests
|
||||||
import toml
|
import toml
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from github import Github, InputGitAuthor
|
||||||
|
|
||||||
|
#from rich.traceback import install
|
||||||
|
#install(width=150, show_locals=True, locals_max_length=None, locals_max_string=None)
|
||||||
|
|
||||||
STRATEGIES = ["latest_github_release", "latest_github_tag"]
|
STRATEGIES = ["latest_github_release", "latest_github_tag"]
|
||||||
|
|
||||||
GITHUB_LOGIN = open(os.path.dirname(__file__) + "/../../.github_login").read().strip()
|
GITHUB_LOGIN = open(os.path.dirname(__file__) + "/../../.github_login").read().strip()
|
||||||
GITHUB_TOKEN = open(os.path.dirname(__file__) + "/../../.github_token").read().strip()
|
GITHUB_TOKEN = open(os.path.dirname(__file__) + "/../../.github_token").read().strip()
|
||||||
|
GITHUB_EMAIL = open(os.path.dirname(__file__) + "/../../.github_email").read().strip()
|
||||||
|
|
||||||
|
github = Github(GITHUB_TOKEN)
|
||||||
|
author = InputGitAuthor(GITHUB_LOGIN, GITHUB_EMAIL)
|
||||||
|
|
||||||
def filter_and_get_latest_tag(tags):
|
def filter_and_get_latest_tag(tags):
|
||||||
filter_keywords = ["start", "rc", "beta", "alpha"]
|
filter_keywords = ["start", "rc", "beta", "alpha"]
|
||||||
|
@ -33,15 +42,38 @@ def tag_to_int_tuple(tag):
|
||||||
return tuple(int(i) for i in int_tuple)
|
return tuple(int(i) for i in int_tuple)
|
||||||
|
|
||||||
|
|
||||||
|
def sha256_of_remote_file(url):
|
||||||
|
print(f"Computing sha256sum for {url} ...")
|
||||||
|
try:
|
||||||
|
r = requests.get(url, stream=True)
|
||||||
|
m = hashlib.sha256()
|
||||||
|
for data in r.iter_content(8192):
|
||||||
|
m.update(data)
|
||||||
|
return m.hexdigest()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to compute sha256 for {url} : {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class AppAutoUpdater():
|
class AppAutoUpdater():
|
||||||
|
|
||||||
def __init__(self, app_path):
|
def __init__(self, app_id):
|
||||||
|
|
||||||
if not os.path.exists(app_path + "/manifest.toml"):
|
#if not os.path.exists(app_path + "/manifest.toml"):
|
||||||
raise Exception("manifest.toml doesnt exists?")
|
# raise Exception("manifest.toml doesnt exists?")
|
||||||
|
|
||||||
self.app_path = app_path
|
# We actually want to look at the manifest on the "testing" (or default) branch
|
||||||
manifest = toml.load(open(app_path + "/manifest.toml"))
|
self.repo = github.get_repo(f"Yunohost-Apps/{app_id}_ynh")
|
||||||
|
# Determine base branch, either `testing` or default branch
|
||||||
|
try:
|
||||||
|
self.base_branch = self.repo.get_branch("testing").name
|
||||||
|
except:
|
||||||
|
self.base_branch = self.repo.default_branch
|
||||||
|
|
||||||
|
contents = self.repo.get_contents("manifest.toml", ref=self.base_branch)
|
||||||
|
self.manifest_raw = contents.decoded_content.decode()
|
||||||
|
self.manifest_raw_sha = contents.sha
|
||||||
|
manifest = toml.loads(self.manifest_raw)
|
||||||
|
|
||||||
self.current_version = manifest["version"].split("~")[0]
|
self.current_version = manifest["version"].split("~")[0]
|
||||||
self.sources = manifest.get("resources", {}).get("sources")
|
self.sources = manifest.get("resources", {}).get("sources")
|
||||||
|
@ -53,6 +85,8 @@ class AppAutoUpdater():
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
||||||
|
todos = {}
|
||||||
|
|
||||||
for source, infos in self.sources.items():
|
for source, infos in self.sources.items():
|
||||||
|
|
||||||
if "autoupdate" not in infos:
|
if "autoupdate" not in infos:
|
||||||
|
@ -66,61 +100,66 @@ class AppAutoUpdater():
|
||||||
|
|
||||||
print(f"Checking {source} ...")
|
print(f"Checking {source} ...")
|
||||||
|
|
||||||
version, assets = self.get_latest_version_and_asset(strategy, asset, infos)
|
new_version, new_asset_urls = self.get_latest_version_and_asset(strategy, asset, infos)
|
||||||
|
|
||||||
print(f"Current version in manifest: {self.current_version}")
|
print(f"Current version in manifest: {self.current_version}")
|
||||||
print(f"Newest version on upstream: {version}")
|
print(f"Newest version on upstream: {new_version}")
|
||||||
|
|
||||||
if source == "main":
|
if source == "main":
|
||||||
if self.current_version == version:
|
if self.current_version == new_version:
|
||||||
print(f"Version is still {version}, no update required for {source}")
|
print(f"Version is still {new_version}, no update required for {source}")
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if isinstance(assets, str) and infos["url"] == assets:
|
print(f"Update needed for {source}")
|
||||||
|
todos[source] = {"new_asset_urls": new_asset_urls, "old_assets": infos, "new_version": new_version}
|
||||||
|
else:
|
||||||
|
if isinstance(new_asset_urls, str) and infos["url"] == new_asset_urls:
|
||||||
print(f"URL is still up to date for asset {source}")
|
print(f"URL is still up to date for asset {source}")
|
||||||
continue
|
continue
|
||||||
elif isinstance(assets, dict) and assets == {k: infos[k]["url"] for k in assets.keys()}:
|
elif isinstance(new_asset_urls, dict) and new_asset_urls == {k: infos[k]["url"] for k in new_asset_urls.keys()}:
|
||||||
print(f"URLs are still up to date for asset {source}")
|
print(f"URLs are still up to date for asset {source}")
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
print(f"Update needed for {source}")
|
||||||
|
todos[source] = {"new_asset_urls": new_asset_urls, "old_assets": infos}
|
||||||
|
|
||||||
if isinstance(assets, str):
|
if not todos:
|
||||||
sha256 = self.sha256_of_remote_file(assets)
|
return
|
||||||
elif isinstance(assets, dict):
|
|
||||||
sha256 = {url: self.sha256_of_remote_file(url) for url in assets.values()}
|
|
||||||
|
|
||||||
# FIXME: should create a tmp dir in which to make those changes
|
if "main" in todos:
|
||||||
|
new_version = todos["main"]["new_version"]
|
||||||
|
message = f"Upgrade to v{new_version}"
|
||||||
|
new_branch = f"ci-auto-update-{new_version}"
|
||||||
|
else:
|
||||||
|
message = "Upgrade sources"
|
||||||
|
new_branch = "ci-auto-update-sources"
|
||||||
|
|
||||||
if source == "main":
|
try:
|
||||||
self.replace_upstream_version_in_manifest(version)
|
# Get the commit base for the new branch, and create it
|
||||||
if isinstance(assets, str):
|
commit_sha = self.repo.get_branch(self.base_branch).commit.sha
|
||||||
self.replace_string_in_manifest(infos["url"], assets)
|
self.repo.create_git_ref(ref=f"refs/heads/{new_branch}", sha=commit_sha)
|
||||||
self.replace_string_in_manifest(infos["sha256"], sha256)
|
except:
|
||||||
elif isinstance(assets, dict):
|
pass
|
||||||
for key, url in assets.items():
|
|
||||||
self.replace_string_in_manifest(infos[key]["url"], url)
|
|
||||||
self.replace_string_in_manifest(infos[key]["sha256"], sha256[url])
|
|
||||||
|
|
||||||
def replace_upstream_version_in_manifest(self, new_version):
|
manifest_new = self.manifest_raw
|
||||||
|
for source, infos in todos.items():
|
||||||
|
manifest_new = self.replace_version_and_asset_in_manifest(manifest_new, infos.get("new_version"), infos["new_asset_urls"], infos["old_assets"], is_main=source == "main")
|
||||||
|
|
||||||
# FIXME : should be done in a tmp git clone ...?
|
self.repo.update_file("manifest.toml",
|
||||||
manifest_raw = open(self.app_path + "/manifest.toml").read()
|
message=message,
|
||||||
|
content=manifest_new,
|
||||||
|
sha=self.manifest_raw_sha,
|
||||||
|
branch=new_branch,
|
||||||
|
author=author)
|
||||||
|
|
||||||
def repl(m):
|
# Wait a bit to preserve the API rate limit
|
||||||
return m.group(1) + new_version + m.group(3)
|
time.sleep(1.5)
|
||||||
|
|
||||||
print(re.findall(r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", manifest_raw))
|
# Open the PR
|
||||||
|
pr = self.repo.create_pull(title=message, body=message, head=new_branch, base=self.base_branch)
|
||||||
|
|
||||||
manifest_new = re.sub(r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", repl, manifest_raw)
|
print("Created PR " + self.repo.full_name + " updated with PR #" + str(pr.id))
|
||||||
|
|
||||||
open(self.app_path + "/manifest.toml", "w").write(manifest_new)
|
|
||||||
|
|
||||||
def replace_string_in_manifest(self, pattern, replace):
|
|
||||||
|
|
||||||
manifest_raw = open(self.app_path + "/manifest.toml").read()
|
|
||||||
|
|
||||||
manifest_new = manifest_raw.replace(pattern, replace)
|
|
||||||
|
|
||||||
open(self.app_path + "/manifest.toml", "w").write(manifest_new)
|
|
||||||
|
|
||||||
def get_latest_version_and_asset(self, strategy, asset, infos):
|
def get_latest_version_and_asset(self, strategy, asset, infos):
|
||||||
|
|
||||||
|
@ -129,6 +168,7 @@ class AppAutoUpdater():
|
||||||
self.upstream_repo = self.upstream.replace("https://github.com/", "").strip("/")
|
self.upstream_repo = self.upstream.replace("https://github.com/", "").strip("/")
|
||||||
assert len(self.upstream_repo.split("/")) == 2, "'{self.upstream}' doesn't seem to be a github repository ?"
|
assert len(self.upstream_repo.split("/")) == 2, "'{self.upstream}' doesn't seem to be a github repository ?"
|
||||||
|
|
||||||
|
|
||||||
if strategy == "latest_github_release":
|
if strategy == "latest_github_release":
|
||||||
releases = self.github(f"repos/{self.upstream_repo}/releases")
|
releases = self.github(f"repos/{self.upstream_repo}/releases")
|
||||||
tags = [release["tag_name"] for release in releases if not release["draft"] and not release["prerelease"]]
|
tags = [release["tag_name"] for release in releases if not release["draft"] and not release["prerelease"]]
|
||||||
|
@ -172,16 +212,28 @@ class AppAutoUpdater():
|
||||||
assert r.status_code == 200, r
|
assert r.status_code == 200, r
|
||||||
return r.json()
|
return r.json()
|
||||||
|
|
||||||
def sha256_of_remote_file(self, url):
|
def replace_version_and_asset_in_manifest(self, content, new_version, new_assets_urls, current_assets, is_main):
|
||||||
try:
|
|
||||||
r = requests.get(url, stream=True)
|
if isinstance(new_assets_urls, str):
|
||||||
m = hashlib.sha256()
|
sha256 = sha256_of_remote_file(new_assets_urls)
|
||||||
for data in r.iter_content(8192):
|
elif isinstance(new_assets_urls, dict):
|
||||||
m.update(data)
|
sha256 = {url: sha256_of_remote_file(url) for url in new_assets_urls.values()}
|
||||||
return m.hexdigest()
|
|
||||||
except Exception as e:
|
if is_main:
|
||||||
print(f"Failed to compute sha256 for {url} : {e}")
|
def repl(m):
|
||||||
return None
|
return m.group(1) + new_version + m.group(3)
|
||||||
|
content = re.sub(r"(\s*version\s*=\s*[\"\'])([\d\.]+)(\~ynh\d+[\"\'])", repl, content)
|
||||||
|
if isinstance(new_assets_urls, str):
|
||||||
|
content = content.replace(current_assets["url"], new_assets_urls)
|
||||||
|
content = content.replace(current_assets["sha256"], sha256)
|
||||||
|
elif isinstance(new_assets_urls, dict):
|
||||||
|
for key, url in new_assets_urls.items():
|
||||||
|
content = content.replace(current_assets[key]["url"], url)
|
||||||
|
content = content.replace(current_assets[key]["sha256"], sha256[url])
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Reference in a new issue