buildbot-nix/buildbot_nix/github_projects.py

142 lines
4 KiB
Python
Raw Normal View History

2023-09-10 08:11:56 +00:00
import http.client
import json
import urllib.request
from pathlib import Path
from typing import Any
from twisted.python import log
class HttpResponse:
def __init__(self, raw: http.client.HTTPResponse) -> None:
self.raw = raw
def json(self) -> Any:
return json.load(self.raw)
def headers(self) -> http.client.HTTPMessage:
return self.raw.headers
def http_request(
url: str,
method: str = "GET",
headers: dict[str, str] = {},
data: dict[str, Any] | None = None,
) -> HttpResponse:
body = None
if data:
body = json.dumps(data).encode("ascii")
headers = headers.copy()
headers["User-Agent"] = "buildbot-nix"
req = urllib.request.Request(url, headers=headers, method=method, data=body)
2023-10-12 13:59:26 +00:00
try:
resp = urllib.request.urlopen(req)
except urllib.request.HTTPError as e:
body = ""
try:
body = e.fp.read()
except Exception:
pass
raise Exception(
f"Request for {method} {url} failed with {e.code} {e.reason}: {body}"
) from e
2023-09-10 08:11:56 +00:00
return HttpResponse(resp)
def paginated_github_request(url: str, token: str) -> list[dict[str, Any]]:
next_url: str | None = url
2023-10-12 13:59:26 +00:00
items = []
2023-09-10 08:11:56 +00:00
while next_url:
try:
res = http_request(
next_url,
2023-10-12 13:59:26 +00:00
headers={"Authorization": f"Bearer {token}"},
2023-09-10 08:11:56 +00:00
)
except OSError as e:
raise Exception(f"failed to fetch {next_url}: {e}") from e
next_url = None
link = res.headers()["Link"]
if link is not None:
links = link.split(", ")
for link in links: # pagination
link_parts = link.split(";")
if link_parts[1].strip() == 'rel="next"':
next_url = link_parts[0][1:-1]
2023-10-12 13:59:26 +00:00
items += res.json()
return items
2023-09-10 08:11:56 +00:00
class GithubProject:
2023-10-12 13:59:26 +00:00
def __init__(self, data: dict[str, Any]) -> None:
self.data = data
@property
def repo(self) -> str:
return self.data["name"]
@property
def owner(self) -> str:
return self.data["owner"]["login"]
2023-09-10 08:11:56 +00:00
@property
def name(self) -> str:
2023-10-12 13:59:26 +00:00
return self.data["full_name"]
2023-09-10 08:11:56 +00:00
@property
def url(self) -> str:
2023-10-12 13:59:26 +00:00
return self.data["html_url"]
2023-09-10 08:11:56 +00:00
@property
def id(self) -> str:
2023-10-12 13:59:26 +00:00
n = self.data["full_name"]
2023-09-10 08:11:56 +00:00
return n.replace("/", "-")
@property
def default_branch(self) -> str:
2023-10-12 13:59:26 +00:00
return self.data["default_branch"]
2023-09-10 08:11:56 +00:00
@property
def topics(self) -> list[str]:
2023-10-12 13:59:26 +00:00
return self.data["topics"]
2023-10-12 13:59:26 +00:00
def create_project_hook(owner: str, repo: str, token: str, webhook_url: str, webhook_secret) -> None:
2023-10-12 13:59:26 +00:00
hooks = paginated_github_request(
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100", token
)
2023-10-12 13:59:26 +00:00
config = dict(url=webhook_url, content_type="json", insecure_ssl="0", secret=webhook_secret)
2023-10-12 13:59:26 +00:00
data = dict(name="web", active=True, events=["push", "pull_request"], config=config)
headers = {
"Authorization": f"Bearer {token}",
"Accept": "application/vnd.github+json",
"Content-Type": "application/json",
"X-GitHub-Api-Version": "2022-11-28",
}
for hook in hooks:
if hook["config"]["url"] == webhook_url:
log.msg(f"hook for {owner}/{repo} already exists")
return
http_request(
f"https://api.github.com/repos/{owner}/{repo}/hooks",
method="POST",
headers=headers,
data=data,
)
2023-09-10 08:11:56 +00:00
def load_projects(github_token: str, repo_cache_file: Path) -> list[GithubProject]:
if repo_cache_file.exists():
log.msg("fetching github repositories from cache")
repos: list[dict[str, Any]] = json.loads(repo_cache_file.read_text())
else:
log.msg("fetching github repositories from api")
repos = paginated_github_request(
"https://api.github.com/user/repos?per_page=100",
github_token,
)
repo_cache_file.write_text(json.dumps(repos, indent=2))
return [GithubProject(repo) for repo in repos]