2023-09-10 08:11:56 +00:00
|
|
|
import http.client
|
|
|
|
import json
|
2023-10-12 21:01:07 +00:00
|
|
|
import os
|
2023-09-10 08:11:56 +00:00
|
|
|
import urllib.request
|
|
|
|
from pathlib import Path
|
2023-10-12 21:01:07 +00:00
|
|
|
from tempfile import NamedTemporaryFile
|
2023-09-10 08:11:56 +00:00
|
|
|
from typing import Any
|
|
|
|
|
|
|
|
from twisted.python import log
|
|
|
|
|
|
|
|
|
|
|
|
class HttpResponse:
|
|
|
|
def __init__(self, raw: http.client.HTTPResponse) -> None:
|
|
|
|
self.raw = raw
|
|
|
|
|
|
|
|
def json(self) -> Any:
|
|
|
|
return json.load(self.raw)
|
|
|
|
|
|
|
|
def headers(self) -> http.client.HTTPMessage:
|
|
|
|
return self.raw.headers
|
|
|
|
|
|
|
|
|
|
|
|
def http_request(
|
|
|
|
url: str,
|
|
|
|
method: str = "GET",
|
|
|
|
headers: dict[str, str] = {},
|
|
|
|
data: dict[str, Any] | None = None,
|
|
|
|
) -> HttpResponse:
|
|
|
|
body = None
|
|
|
|
if data:
|
|
|
|
body = json.dumps(data).encode("ascii")
|
|
|
|
headers = headers.copy()
|
|
|
|
headers["User-Agent"] = "buildbot-nix"
|
|
|
|
req = urllib.request.Request(url, headers=headers, method=method, data=body)
|
2023-10-12 13:59:26 +00:00
|
|
|
try:
|
|
|
|
resp = urllib.request.urlopen(req)
|
|
|
|
except urllib.request.HTTPError as e:
|
2023-10-12 21:01:07 +00:00
|
|
|
resp_body = ""
|
2023-10-12 13:59:26 +00:00
|
|
|
try:
|
2023-10-12 21:01:07 +00:00
|
|
|
resp_body = e.fp.read().decode("utf-8", "replace")
|
2023-10-12 13:59:26 +00:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
raise Exception(
|
2023-10-12 21:01:07 +00:00
|
|
|
f"Request for {method} {url} failed with {e.code} {e.reason}: {resp_body}"
|
2023-10-12 13:59:26 +00:00
|
|
|
) from e
|
2023-09-10 08:11:56 +00:00
|
|
|
return HttpResponse(resp)
|
|
|
|
|
|
|
|
|
|
|
|
def paginated_github_request(url: str, token: str) -> list[dict[str, Any]]:
|
|
|
|
next_url: str | None = url
|
2023-10-12 13:59:26 +00:00
|
|
|
items = []
|
2023-09-10 08:11:56 +00:00
|
|
|
while next_url:
|
|
|
|
try:
|
|
|
|
res = http_request(
|
|
|
|
next_url,
|
2023-10-12 13:59:26 +00:00
|
|
|
headers={"Authorization": f"Bearer {token}"},
|
2023-09-10 08:11:56 +00:00
|
|
|
)
|
|
|
|
except OSError as e:
|
|
|
|
raise Exception(f"failed to fetch {next_url}: {e}") from e
|
|
|
|
next_url = None
|
|
|
|
link = res.headers()["Link"]
|
|
|
|
if link is not None:
|
|
|
|
links = link.split(", ")
|
|
|
|
for link in links: # pagination
|
|
|
|
link_parts = link.split(";")
|
|
|
|
if link_parts[1].strip() == 'rel="next"':
|
|
|
|
next_url = link_parts[0][1:-1]
|
2023-10-12 13:59:26 +00:00
|
|
|
items += res.json()
|
|
|
|
return items
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
|
2023-10-26 12:54:01 +00:00
|
|
|
def slugify_project_name(name: str) -> str:
|
|
|
|
return name.replace(".", "-").replace("/", "-")
|
|
|
|
|
|
|
|
|
2023-09-10 08:11:56 +00:00
|
|
|
class GithubProject:
|
2023-10-12 13:59:26 +00:00
|
|
|
def __init__(self, data: dict[str, Any]) -> None:
|
|
|
|
self.data = data
|
|
|
|
|
|
|
|
@property
|
|
|
|
def repo(self) -> str:
|
|
|
|
return self.data["name"]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def owner(self) -> str:
|
|
|
|
return self.data["owner"]["login"]
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self) -> str:
|
2023-10-12 13:59:26 +00:00
|
|
|
return self.data["full_name"]
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def url(self) -> str:
|
2023-10-12 13:59:26 +00:00
|
|
|
return self.data["html_url"]
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def id(self) -> str:
|
2023-10-26 12:54:01 +00:00
|
|
|
return slugify_project_name(self.data["full_name"])
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def default_branch(self) -> str:
|
2023-10-12 13:59:26 +00:00
|
|
|
return self.data["default_branch"]
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def topics(self) -> list[str]:
|
2023-10-12 13:59:26 +00:00
|
|
|
return self.data["topics"]
|
|
|
|
|
|
|
|
|
2023-10-12 21:01:07 +00:00
|
|
|
def create_project_hook(
|
|
|
|
owner: str, repo: str, token: str, webhook_url: str, webhook_secret: str
|
|
|
|
) -> None:
|
2023-10-12 13:59:26 +00:00
|
|
|
hooks = paginated_github_request(
|
|
|
|
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100", token
|
|
|
|
)
|
2023-10-12 21:01:07 +00:00
|
|
|
config = dict(
|
|
|
|
url=webhook_url, content_type="json", insecure_ssl="0", secret=webhook_secret
|
|
|
|
)
|
2023-10-12 13:59:26 +00:00
|
|
|
data = dict(name="web", active=True, events=["push", "pull_request"], config=config)
|
|
|
|
headers = {
|
|
|
|
"Authorization": f"Bearer {token}",
|
|
|
|
"Accept": "application/vnd.github+json",
|
|
|
|
"Content-Type": "application/json",
|
|
|
|
"X-GitHub-Api-Version": "2022-11-28",
|
|
|
|
}
|
|
|
|
for hook in hooks:
|
|
|
|
if hook["config"]["url"] == webhook_url:
|
|
|
|
log.msg(f"hook for {owner}/{repo} already exists")
|
|
|
|
return
|
|
|
|
|
|
|
|
http_request(
|
|
|
|
f"https://api.github.com/repos/{owner}/{repo}/hooks",
|
|
|
|
method="POST",
|
|
|
|
headers=headers,
|
|
|
|
data=data,
|
|
|
|
)
|
2023-09-10 08:11:56 +00:00
|
|
|
|
|
|
|
|
2023-10-12 21:01:07 +00:00
|
|
|
def refresh_projects(github_token: str, repo_cache_file: Path) -> None:
|
2023-10-26 09:08:07 +00:00
|
|
|
repos = []
|
|
|
|
|
|
|
|
for repo in paginated_github_request(
|
2023-10-27 08:49:40 +00:00
|
|
|
"https://api.github.com/user/repos?per_page=100",
|
|
|
|
github_token,
|
2023-10-26 09:08:07 +00:00
|
|
|
):
|
|
|
|
if not repo["permissions"]["admin"]:
|
2023-10-27 08:49:40 +00:00
|
|
|
name = repo["full_name"]
|
|
|
|
log.msg(
|
|
|
|
f"skipping {name} because we do not have admin privileges, needed for hook management"
|
|
|
|
)
|
2023-10-26 09:08:07 +00:00
|
|
|
else:
|
|
|
|
repos.append(repo)
|
|
|
|
|
2023-10-12 21:01:07 +00:00
|
|
|
with NamedTemporaryFile("w", delete=False, dir=repo_cache_file.parent) as f:
|
|
|
|
try:
|
|
|
|
f.write(json.dumps(repos))
|
|
|
|
f.flush()
|
|
|
|
os.rename(f.name, repo_cache_file)
|
|
|
|
except OSError:
|
|
|
|
os.unlink(f.name)
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2023-09-10 08:11:56 +00:00
|
|
|
def load_projects(github_token: str, repo_cache_file: Path) -> list[GithubProject]:
|
2023-10-26 08:17:49 +00:00
|
|
|
if not repo_cache_file.exists():
|
|
|
|
log.msg("fetching github repositories")
|
2023-10-12 21:01:07 +00:00
|
|
|
refresh_projects(github_token, repo_cache_file)
|
2023-10-26 08:17:49 +00:00
|
|
|
repos: list[dict[str, Any]] = json.loads(repo_cache_file.read_text())
|
2023-09-10 08:11:56 +00:00
|
|
|
return [GithubProject(repo) for repo in repos]
|