Merge pull request #65 from Mic92/hercules
Hercules ci effects: add cli
This commit is contained in:
commit
22b4caf6fc
9
bin/buildbot-effects
Executable file
9
bin/buildbot-effects
Executable file
|
@ -0,0 +1,9 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from hercules_effects.cli import main
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
243
buildbot_effects/__init__.py
Normal file
243
buildbot_effects/__init__.py
Normal file
|
@ -0,0 +1,243 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shlex
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from collections.abc import Iterator
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
from typing import IO, Any
|
||||||
|
|
||||||
|
from .options import EffectsOptions
|
||||||
|
|
||||||
|
|
||||||
|
class BuildbotEffectsError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def run(
|
||||||
|
cmd: list[str],
|
||||||
|
stdin: int | IO[str] | None = None,
|
||||||
|
stdout: int | IO[str] | None = None,
|
||||||
|
stderr: int | IO[str] | None = None,
|
||||||
|
verbose: bool = True,
|
||||||
|
) -> subprocess.CompletedProcess[str]:
|
||||||
|
if verbose:
|
||||||
|
print("$", shlex.join(cmd), file=sys.stderr)
|
||||||
|
return subprocess.run(
|
||||||
|
cmd,
|
||||||
|
check=True,
|
||||||
|
text=True,
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def git_command(args: list[str], path: Path) -> str:
|
||||||
|
cmd = ["git", "-C", str(path), *args]
|
||||||
|
proc = run(cmd, stdout=subprocess.PIPE)
|
||||||
|
return proc.stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_rev(path: Path) -> str:
|
||||||
|
return git_command(["rev-parse", "--verify", "HEAD"], path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_branch(path: Path) -> str:
|
||||||
|
return git_command(["rev-parse", "--abbrev-ref", "HEAD"], path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_remote_url(path: Path) -> str | None:
|
||||||
|
try:
|
||||||
|
return git_command(["remote", "get-url", "origin"], path)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def git_get_tag(path: Path, rev: str) -> str | None:
|
||||||
|
tags = git_command(["tag", "--points-at", rev], path)
|
||||||
|
if tags:
|
||||||
|
return tags.splitlines()[1]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def effects_args(opts: EffectsOptions) -> dict[str, Any]:
|
||||||
|
rev = opts.rev or get_git_rev(opts.path)
|
||||||
|
short_rev = rev[:7]
|
||||||
|
branch = opts.branch or get_git_branch(opts.path)
|
||||||
|
repo = opts.repo or opts.path.name
|
||||||
|
tag = opts.tag or git_get_tag(opts.path, rev)
|
||||||
|
url = opts.url or get_git_remote_url(opts.path)
|
||||||
|
primary_repo = dict(
|
||||||
|
name=repo,
|
||||||
|
branch=branch,
|
||||||
|
# TODO: support ref
|
||||||
|
ref=None,
|
||||||
|
tag=tag,
|
||||||
|
rev=rev,
|
||||||
|
shortRev=short_rev,
|
||||||
|
remoteHttpUrl=url,
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"primaryRepo": primary_repo,
|
||||||
|
**primary_repo,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def nix_command(*args: str) -> list[str]:
|
||||||
|
return ["nix", "--extra-experimental-features", "nix-command flakes", *args]
|
||||||
|
|
||||||
|
|
||||||
|
def effect_function(opts: EffectsOptions) -> str:
|
||||||
|
args = effects_args(opts)
|
||||||
|
rev = args["rev"]
|
||||||
|
escaped_args = json.dumps(json.dumps(args))
|
||||||
|
url = json.dumps(f"git+file://{opts.path}?rev={rev}#")
|
||||||
|
return f"""(((builtins.getFlake {url}).outputs.herculesCI (builtins.fromJSON {escaped_args})).onPush.default.outputs.hci-effects)"""
|
||||||
|
|
||||||
|
|
||||||
|
def list_effects(opts: EffectsOptions) -> list[str]:
|
||||||
|
cmd = nix_command(
|
||||||
|
"eval",
|
||||||
|
"--json",
|
||||||
|
"--expr",
|
||||||
|
f"builtins.attrNames {effect_function(opts)}",
|
||||||
|
)
|
||||||
|
proc = run(cmd, stdout=subprocess.PIPE)
|
||||||
|
return json.loads(proc.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def instantiate_effects(opts: EffectsOptions) -> str:
|
||||||
|
cmd = [
|
||||||
|
"nix-instantiate",
|
||||||
|
"--expr",
|
||||||
|
f"{effect_function(opts)}.deploy.run",
|
||||||
|
]
|
||||||
|
proc = run(cmd, stdout=subprocess.PIPE)
|
||||||
|
return proc.stdout.rstrip()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_derivation(path: str) -> dict[str, Any]:
|
||||||
|
cmd = [
|
||||||
|
"nix",
|
||||||
|
"--extra-experimental-features",
|
||||||
|
"nix-command flakes",
|
||||||
|
"derivation",
|
||||||
|
"show",
|
||||||
|
f"{path}^*",
|
||||||
|
]
|
||||||
|
proc = run(cmd, stdout=subprocess.PIPE)
|
||||||
|
return json.loads(proc.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def env_args(env: dict[str, str]) -> list[str]:
|
||||||
|
result = []
|
||||||
|
for k, v in env.items():
|
||||||
|
result.append("--setenv")
|
||||||
|
result.append(f"{k}")
|
||||||
|
result.append(f"{v}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def pipe() -> Iterator[tuple[IO[str], IO[str]]]:
|
||||||
|
r, w = os.pipe()
|
||||||
|
r_file = os.fdopen(r, "r")
|
||||||
|
w_file = os.fdopen(w, "w")
|
||||||
|
try:
|
||||||
|
yield r_file, w_file
|
||||||
|
finally:
|
||||||
|
r_file.close()
|
||||||
|
w_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
def run_effects(
|
||||||
|
drv_path: str,
|
||||||
|
drv: dict[str, Any],
|
||||||
|
secrets: dict[str, Any] | None = None,
|
||||||
|
) -> None:
|
||||||
|
if secrets is None:
|
||||||
|
secrets = {}
|
||||||
|
builder = drv["builder"]
|
||||||
|
args = drv["args"]
|
||||||
|
sandboxed_cmd = [
|
||||||
|
builder,
|
||||||
|
*args,
|
||||||
|
]
|
||||||
|
env = {}
|
||||||
|
env["IN_HERCULES_CI_EFFECT"] = "true"
|
||||||
|
env["HERCULES_CI_SECRETS_JSON"] = "/run/secrets.json"
|
||||||
|
env["NIX_BUILD_TOP"] = "/build"
|
||||||
|
bwrap = shutil.which("bwrap")
|
||||||
|
if bwrap is None:
|
||||||
|
msg = "bwrap' executable not found"
|
||||||
|
raise BuildbotEffectsError(msg)
|
||||||
|
|
||||||
|
bubblewrap_cmd = [
|
||||||
|
"nix",
|
||||||
|
"develop",
|
||||||
|
"-i",
|
||||||
|
f"{drv_path}^*",
|
||||||
|
"-c",
|
||||||
|
bwrap,
|
||||||
|
"--unshare-all",
|
||||||
|
"--share-net",
|
||||||
|
"--new-session",
|
||||||
|
"--die-with-parent",
|
||||||
|
"--dir",
|
||||||
|
"/build",
|
||||||
|
"--chdir",
|
||||||
|
"/build",
|
||||||
|
"--tmpfs",
|
||||||
|
"/tmp", # noqa: S108
|
||||||
|
"--tmpfs",
|
||||||
|
"/build",
|
||||||
|
"--proc",
|
||||||
|
"/proc",
|
||||||
|
"--dev",
|
||||||
|
"/dev",
|
||||||
|
"--ro-bind",
|
||||||
|
"/etc/resolv.conf",
|
||||||
|
"/etc/resolv.conf",
|
||||||
|
"--ro-bind",
|
||||||
|
"/etc/hosts",
|
||||||
|
"/etc/hosts",
|
||||||
|
"--ro-bind",
|
||||||
|
"/nix/store",
|
||||||
|
"/nix/store",
|
||||||
|
]
|
||||||
|
|
||||||
|
with NamedTemporaryFile() as tmp:
|
||||||
|
secrets = secrets.copy()
|
||||||
|
secrets["hercules-ci"] = {"data": {"token": "dummy"}}
|
||||||
|
tmp.write(json.dumps(secrets).encode())
|
||||||
|
bubblewrap_cmd.extend(
|
||||||
|
[
|
||||||
|
"--ro-bind",
|
||||||
|
tmp.name,
|
||||||
|
"/run/secrets.json",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
bubblewrap_cmd.extend(env_args(env))
|
||||||
|
bubblewrap_cmd.append("--")
|
||||||
|
bubblewrap_cmd.extend(sandboxed_cmd)
|
||||||
|
with pipe() as (r_file, w_file):
|
||||||
|
print("$", shlex.join(bubblewrap_cmd), file=sys.stderr)
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
bubblewrap_cmd,
|
||||||
|
text=True,
|
||||||
|
stdin=subprocess.DEVNULL,
|
||||||
|
stdout=w_file,
|
||||||
|
stderr=w_file,
|
||||||
|
)
|
||||||
|
w_file.close()
|
||||||
|
with proc:
|
||||||
|
for line in r_file:
|
||||||
|
print(line, end="")
|
||||||
|
proc.wait()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
msg = f"command failed with exit code {proc.returncode}"
|
||||||
|
raise BuildbotEffectsError(msg)
|
85
buildbot_effects/cli.py
Normal file
85
buildbot_effects/cli.py
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
from collections.abc import Callable
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from . import instantiate_effects, list_effects, parse_derivation, run_effects
|
||||||
|
from .options import EffectsOptions
|
||||||
|
|
||||||
|
|
||||||
|
def list_command(options: EffectsOptions) -> None:
|
||||||
|
print(list_effects(options))
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(options: EffectsOptions) -> None:
|
||||||
|
drv_path = instantiate_effects(options)
|
||||||
|
drvs = parse_derivation(drv_path)
|
||||||
|
drv = next(iter(drvs.values()))
|
||||||
|
|
||||||
|
secrets = json.loads(options.secrets.read_text()) if options.secrets else {}
|
||||||
|
run_effects(drv_path, drv, secrets=secrets)
|
||||||
|
|
||||||
|
|
||||||
|
def run_all_command(options: EffectsOptions) -> None:
|
||||||
|
print("TODO")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> tuple[Callable[[EffectsOptions], None], EffectsOptions]:
|
||||||
|
parser = argparse.ArgumentParser(description="Run effects from a hercules-ci flake")
|
||||||
|
parser.add_argument(
|
||||||
|
"--secrets",
|
||||||
|
type=Path,
|
||||||
|
help="Path to a json file with secrets",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--rev",
|
||||||
|
type=str,
|
||||||
|
help="Git revision to use",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--branch",
|
||||||
|
type=str,
|
||||||
|
help="Git branch to use",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--repo",
|
||||||
|
type=str,
|
||||||
|
help="Git repo to prepend to be",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--path",
|
||||||
|
type=str,
|
||||||
|
help="Path to the repository",
|
||||||
|
)
|
||||||
|
subparser = parser.add_subparsers(
|
||||||
|
dest="command",
|
||||||
|
required=True,
|
||||||
|
help="Command to run",
|
||||||
|
)
|
||||||
|
list_parser = subparser.add_parser(
|
||||||
|
"list",
|
||||||
|
help="List available effects",
|
||||||
|
)
|
||||||
|
list_parser.set_defaults(command=list_command)
|
||||||
|
run_parser = subparser.add_parser(
|
||||||
|
"run",
|
||||||
|
help="Run an effect",
|
||||||
|
)
|
||||||
|
run_parser.set_defaults(command=run_command)
|
||||||
|
run_parser.add_argument(
|
||||||
|
"effect",
|
||||||
|
help="Effect to run",
|
||||||
|
)
|
||||||
|
run_all_parser = subparser.add_parser(
|
||||||
|
"run-all",
|
||||||
|
help="Run all effects",
|
||||||
|
)
|
||||||
|
run_all_parser.set_defaults(command=run_all_command)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
return args.command, EffectsOptions(secrets=args.secrets)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
command, options = parse_args()
|
||||||
|
command(options)
|
13
buildbot_effects/options.py
Normal file
13
buildbot_effects/options.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EffectsOptions:
|
||||||
|
secrets: Path | None = None
|
||||||
|
path: Path = field(default_factory=lambda: Path.cwd())
|
||||||
|
repo: str | None = ""
|
||||||
|
rev: str | None = None
|
||||||
|
branch: str | None = None
|
||||||
|
url: str | None = None
|
||||||
|
tag: str | None = None
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
|
@ -10,25 +8,28 @@ from collections import defaultdict
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from buildbot.configurators import ConfiguratorBase
|
from buildbot.configurators import ConfiguratorBase
|
||||||
from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker
|
from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker
|
||||||
from buildbot.process import buildstep, logobserver, remotecommand
|
from buildbot.process import buildstep, logobserver, remotecommand
|
||||||
from buildbot.process.log import Log
|
|
||||||
from buildbot.process.project import Project
|
from buildbot.process.project import Project
|
||||||
from buildbot.process.properties import Interpolate, Properties
|
from buildbot.process.properties import Interpolate, Properties
|
||||||
from buildbot.process.results import ALL_RESULTS, statusToString
|
from buildbot.process.results import ALL_RESULTS, statusToString
|
||||||
from buildbot.steps.trigger import Trigger
|
from buildbot.steps.trigger import Trigger
|
||||||
from buildbot.util import asyncSleep
|
from buildbot.util import asyncSleep
|
||||||
from buildbot.www.authz.endpointmatchers import EndpointMatcherBase, Match
|
from buildbot.www.authz.endpointmatchers import EndpointMatcherBase, Match
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from buildbot.process.log import Log
|
||||||
|
|
||||||
from twisted.internet import defer, threads
|
from twisted.internet import defer, threads
|
||||||
from twisted.logger import Logger
|
from twisted.logger import Logger
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
|
||||||
from .github_projects import (
|
from .github_projects import (
|
||||||
GithubProject,
|
GithubProject,
|
||||||
create_project_hook, # noqa: E402
|
create_project_hook,
|
||||||
load_projects,
|
load_projects,
|
||||||
refresh_projects,
|
refresh_projects,
|
||||||
slugify_project_name,
|
slugify_project_name,
|
||||||
|
@ -39,10 +40,12 @@ SKIPPED_BUILDER_NAME = "skipped-builds"
|
||||||
log = Logger()
|
log = Logger()
|
||||||
|
|
||||||
|
|
||||||
|
class BuildbotNixError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BuildTrigger(Trigger):
|
class BuildTrigger(Trigger):
|
||||||
"""
|
"""Dynamic trigger that creates a build for every attribute."""
|
||||||
Dynamic trigger that creates a build for every attribute.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -122,9 +125,7 @@ class BuildTrigger(Trigger):
|
||||||
return triggered_schedulers
|
return triggered_schedulers
|
||||||
|
|
||||||
def getCurrentSummary(self) -> dict[str, str]: # noqa: N802
|
def getCurrentSummary(self) -> dict[str, str]: # noqa: N802
|
||||||
"""
|
"""The original build trigger will the generic builder name `nix-build` in this case, which is not helpful"""
|
||||||
The original build trigger will the generic builder name `nix-build` in this case, which is not helpful
|
|
||||||
"""
|
|
||||||
if not self.triggeredNames:
|
if not self.triggeredNames:
|
||||||
return {"step": "running"}
|
return {"step": "running"}
|
||||||
summary = []
|
summary = []
|
||||||
|
@ -133,14 +134,13 @@ class BuildTrigger(Trigger):
|
||||||
count = self._result_list.count(status)
|
count = self._result_list.count(status)
|
||||||
if count:
|
if count:
|
||||||
summary.append(
|
summary.append(
|
||||||
f"{self._result_list.count(status)} {statusToString(status, count)}"
|
f"{self._result_list.count(status)} {statusToString(status, count)}",
|
||||||
)
|
)
|
||||||
return {"step": f"({', '.join(summary)})"}
|
return {"step": f"({', '.join(summary)})"}
|
||||||
|
|
||||||
|
|
||||||
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
"""
|
"""Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
|
||||||
Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
|
|
||||||
every attribute.
|
every attribute.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -168,7 +168,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
try:
|
try:
|
||||||
job = json.loads(line)
|
job = json.loads(line)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
raise Exception(f"Failed to parse line: {line}") from e
|
msg = f"Failed to parse line: {line}"
|
||||||
|
raise BuildbotNixError(msg) from e
|
||||||
jobs.append(job)
|
jobs.append(job)
|
||||||
build_props = self.build.getProperties()
|
build_props = self.build.getProperties()
|
||||||
repo_name = build_props.getProperty(
|
repo_name = build_props.getProperty(
|
||||||
|
@ -179,9 +180,7 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
filtered_jobs = []
|
filtered_jobs = []
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
system = job.get("system")
|
system = job.get("system")
|
||||||
if not system: # report eval errors
|
if not system or system in self.supported_systems: # report eval errors
|
||||||
filtered_jobs.append(job)
|
|
||||||
elif system in self.supported_systems:
|
|
||||||
filtered_jobs.append(job)
|
filtered_jobs.append(job)
|
||||||
|
|
||||||
self.build.addStepsAfterCurrentStep(
|
self.build.addStepsAfterCurrentStep(
|
||||||
|
@ -191,8 +190,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
skipped_builds_scheduler=f"{project_id}-nix-skipped-build",
|
skipped_builds_scheduler=f"{project_id}-nix-skipped-build",
|
||||||
name="build flake",
|
name="build flake",
|
||||||
jobs=filtered_jobs,
|
jobs=filtered_jobs,
|
||||||
)
|
),
|
||||||
]
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -203,13 +202,12 @@ class RetryCounter:
|
||||||
def __init__(self, retries: int) -> None:
|
def __init__(self, retries: int) -> None:
|
||||||
self.builds: dict[uuid.UUID, int] = defaultdict(lambda: retries)
|
self.builds: dict[uuid.UUID, int] = defaultdict(lambda: retries)
|
||||||
|
|
||||||
def retry_build(self, id: uuid.UUID) -> int:
|
def retry_build(self, build_id: uuid.UUID) -> int:
|
||||||
retries = self.builds[id]
|
retries = self.builds[build_id]
|
||||||
if retries > 1:
|
if retries > 1:
|
||||||
self.builds[id] = retries - 1
|
self.builds[build_id] = retries - 1
|
||||||
return retries
|
return retries
|
||||||
else:
|
return 0
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
# For now we limit this to two. Often this allows us to make the error log
|
# For now we limit this to two. Often this allows us to make the error log
|
||||||
|
@ -218,9 +216,7 @@ RETRY_COUNTER = RetryCounter(retries=2)
|
||||||
|
|
||||||
|
|
||||||
class EvalErrorStep(steps.BuildStep):
|
class EvalErrorStep(steps.BuildStep):
|
||||||
"""
|
"""Shows the error message of a failed evaluation."""
|
||||||
Shows the error message of a failed evaluation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def run(self) -> Generator[Any, object, Any]:
|
def run(self) -> Generator[Any, object, Any]:
|
||||||
|
@ -233,9 +229,7 @@ class EvalErrorStep(steps.BuildStep):
|
||||||
|
|
||||||
|
|
||||||
class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
|
class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
"""
|
"""Builds a nix derivation."""
|
||||||
Builds a nix derivation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, **kwargs: Any) -> None:
|
def __init__(self, **kwargs: Any) -> None:
|
||||||
kwargs = self.setupShellMixin(kwargs)
|
kwargs = self.setupShellMixin(kwargs)
|
||||||
|
@ -256,8 +250,7 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||||
|
|
||||||
|
|
||||||
class UpdateBuildOutput(steps.BuildStep):
|
class UpdateBuildOutput(steps.BuildStep):
|
||||||
"""
|
"""Updates store paths in a public www directory.
|
||||||
Updates store paths in a public www directory.
|
|
||||||
This is useful to prefetch updates without having to evaluate
|
This is useful to prefetch updates without having to evaluate
|
||||||
on the target machine.
|
on the target machine.
|
||||||
"""
|
"""
|
||||||
|
@ -269,11 +262,11 @@ class UpdateBuildOutput(steps.BuildStep):
|
||||||
def run(self) -> Generator[Any, object, Any]:
|
def run(self) -> Generator[Any, object, Any]:
|
||||||
props = self.build.getProperties()
|
props = self.build.getProperties()
|
||||||
if props.getProperty("branch") != props.getProperty(
|
if props.getProperty("branch") != props.getProperty(
|
||||||
"github.repository.default_branch"
|
"github.repository.default_branch",
|
||||||
):
|
):
|
||||||
return util.SKIPPED
|
return util.SKIPPED
|
||||||
|
|
||||||
attr = os.path.basename(props.getProperty("attr"))
|
attr = Path(props.getProperty("attr")).name
|
||||||
out_path = props.getProperty("out_path")
|
out_path = props.getProperty("out_path")
|
||||||
# XXX don't hardcode this
|
# XXX don't hardcode this
|
||||||
self.path.mkdir(parents=True, exist_ok=True)
|
self.path.mkdir(parents=True, exist_ok=True)
|
||||||
|
@ -319,12 +312,12 @@ def reload_github_projects(
|
||||||
github_token_secret: str,
|
github_token_secret: str,
|
||||||
project_cache_file: Path,
|
project_cache_file: Path,
|
||||||
) -> util.BuilderConfig:
|
) -> util.BuilderConfig:
|
||||||
"""
|
"""Updates the flake an opens a PR for it."""
|
||||||
Updates the flake an opens a PR for it.
|
|
||||||
"""
|
|
||||||
factory = util.BuildFactory()
|
factory = util.BuildFactory()
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
ReloadGithubProjects(github_token_secret, project_cache_file=project_cache_file)
|
ReloadGithubProjects(
|
||||||
|
github_token_secret, project_cache_file=project_cache_file
|
||||||
|
),
|
||||||
)
|
)
|
||||||
return util.BuilderConfig(
|
return util.BuilderConfig(
|
||||||
name="reload-github-projects",
|
name="reload-github-projects",
|
||||||
|
@ -338,20 +331,25 @@ def reload_github_projects(
|
||||||
class GitWithRetry(steps.Git):
|
class GitWithRetry(steps.Git):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def run_vc(
|
def run_vc(
|
||||||
self, branch: str, revision: str, patch: str
|
self,
|
||||||
|
branch: str,
|
||||||
|
revision: str,
|
||||||
|
patch: str,
|
||||||
) -> Generator[Any, object, Any]:
|
) -> Generator[Any, object, Any]:
|
||||||
retry_counter = 0
|
retry_counter = 0
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
res = yield super().run_vc(branch, revision, patch)
|
res = yield super().run_vc(branch, revision, patch)
|
||||||
return res
|
except Exception as e: # noqa: BLE001
|
||||||
except Exception as e:
|
|
||||||
retry_counter += 1
|
retry_counter += 1
|
||||||
if retry_counter == 3:
|
if retry_counter == 3:
|
||||||
raise e
|
msg = "Failed to clone"
|
||||||
|
raise BuildbotNixError(msg) from e
|
||||||
log: Log = yield self.addLog("log")
|
log: Log = yield self.addLog("log")
|
||||||
yield log.addStderr(f"Retrying git clone (error: {e})\n")
|
yield log.addStderr(f"Retrying git clone (error: {e})\n")
|
||||||
yield asyncSleep(2 << retry_counter) # 2, 4, 8
|
yield asyncSleep(2 << retry_counter) # 2, 4, 8
|
||||||
|
else:
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
def nix_eval_config(
|
def nix_eval_config(
|
||||||
|
@ -363,14 +361,13 @@ def nix_eval_config(
|
||||||
worker_count: int,
|
worker_count: int,
|
||||||
max_memory_size: int,
|
max_memory_size: int,
|
||||||
) -> util.BuilderConfig:
|
) -> util.BuilderConfig:
|
||||||
"""
|
"""Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
|
||||||
Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
|
|
||||||
For each evaluated attribute a new build pipeline is started.
|
For each evaluated attribute a new build pipeline is started.
|
||||||
"""
|
"""
|
||||||
factory = util.BuildFactory()
|
factory = util.BuildFactory()
|
||||||
# check out the source
|
# check out the source
|
||||||
url_with_secret = util.Interpolate(
|
url_with_secret = util.Interpolate(
|
||||||
f"https://git:%(secret:{github_token_secret})s@github.com/%(prop:project)s"
|
f"https://git:%(secret:{github_token_secret})s@github.com/%(prop:project)s",
|
||||||
)
|
)
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
GitWithRetry(
|
GitWithRetry(
|
||||||
|
@ -378,7 +375,7 @@ def nix_eval_config(
|
||||||
method="clean",
|
method="clean",
|
||||||
submodules=True,
|
submodules=True,
|
||||||
haltOnFailure=True,
|
haltOnFailure=True,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
|
@ -405,7 +402,7 @@ def nix_eval_config(
|
||||||
],
|
],
|
||||||
haltOnFailure=True,
|
haltOnFailure=True,
|
||||||
locks=[eval_lock.access("exclusive")],
|
locks=[eval_lock.access("exclusive")],
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
return util.BuilderConfig(
|
return util.BuilderConfig(
|
||||||
|
@ -438,9 +435,7 @@ def nix_build_config(
|
||||||
cachix: CachixConfig | None = None,
|
cachix: CachixConfig | None = None,
|
||||||
outputs_path: Path | None = None,
|
outputs_path: Path | None = None,
|
||||||
) -> util.BuilderConfig:
|
) -> util.BuilderConfig:
|
||||||
"""
|
"""Builds one nix flake attribute."""
|
||||||
Builds one nix flake attribute.
|
|
||||||
"""
|
|
||||||
factory = util.BuildFactory()
|
factory = util.BuildFactory()
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
NixBuildCommand(
|
NixBuildCommand(
|
||||||
|
@ -466,7 +461,7 @@ def nix_build_config(
|
||||||
# We increase this over the default since the build output might end up in a different `nix build`.
|
# We increase this over the default since the build output might end up in a different `nix build`.
|
||||||
timeout=60 * 60 * 3,
|
timeout=60 * 60 * 3,
|
||||||
haltOnFailure=True,
|
haltOnFailure=True,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
if cachix:
|
if cachix:
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
|
@ -479,7 +474,7 @@ def nix_build_config(
|
||||||
cachix.name,
|
cachix.name,
|
||||||
util.Interpolate("result-%(prop:attr)s"),
|
util.Interpolate("result-%(prop:attr)s"),
|
||||||
],
|
],
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
|
@ -490,27 +485,27 @@ def nix_build_config(
|
||||||
"--add-root",
|
"--add-root",
|
||||||
# FIXME: cleanup old build attributes
|
# FIXME: cleanup old build attributes
|
||||||
util.Interpolate(
|
util.Interpolate(
|
||||||
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/%(prop:attr)s"
|
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/%(prop:attr)s",
|
||||||
),
|
),
|
||||||
"-r",
|
"-r",
|
||||||
util.Property("out_path"),
|
util.Property("out_path"),
|
||||||
],
|
],
|
||||||
doStepIf=lambda s: s.getProperty("branch")
|
doStepIf=lambda s: s.getProperty("branch")
|
||||||
== s.getProperty("github.repository.default_branch"),
|
== s.getProperty("github.repository.default_branch"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
steps.ShellCommand(
|
steps.ShellCommand(
|
||||||
name="Delete temporary gcroots",
|
name="Delete temporary gcroots",
|
||||||
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
|
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
if outputs_path is not None:
|
if outputs_path is not None:
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
UpdateBuildOutput(
|
UpdateBuildOutput(
|
||||||
name="Update build output",
|
name="Update build output",
|
||||||
path=outputs_path,
|
path=outputs_path,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
return util.BuilderConfig(
|
return util.BuilderConfig(
|
||||||
name=f"{project.name}/nix-build",
|
name=f"{project.name}/nix-build",
|
||||||
|
@ -523,18 +518,17 @@ def nix_build_config(
|
||||||
|
|
||||||
|
|
||||||
def nix_skipped_build_config(
|
def nix_skipped_build_config(
|
||||||
project: GithubProject, worker_names: list[str]
|
project: GithubProject,
|
||||||
|
worker_names: list[str],
|
||||||
) -> util.BuilderConfig:
|
) -> util.BuilderConfig:
|
||||||
"""
|
"""Dummy builder that is triggered when a build is skipped."""
|
||||||
Dummy builder that is triggered when a build is skipped.
|
|
||||||
"""
|
|
||||||
factory = util.BuildFactory()
|
factory = util.BuildFactory()
|
||||||
factory.addStep(
|
factory.addStep(
|
||||||
EvalErrorStep(
|
EvalErrorStep(
|
||||||
name="Nix evaluation",
|
name="Nix evaluation",
|
||||||
doStepIf=lambda s: s.getProperty("error"),
|
doStepIf=lambda s: s.getProperty("error"),
|
||||||
hideStepIf=lambda _, s: not s.getProperty("error"),
|
hideStepIf=lambda _, s: not s.getProperty("error"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# This is just a dummy step showing the cached build
|
# This is just a dummy step showing the cached build
|
||||||
|
@ -543,7 +537,7 @@ def nix_skipped_build_config(
|
||||||
name="Nix build (cached)",
|
name="Nix build (cached)",
|
||||||
doStepIf=lambda _: False,
|
doStepIf=lambda _: False,
|
||||||
hideStepIf=lambda _, s: s.getProperty("error"),
|
hideStepIf=lambda _, s: s.getProperty("error"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
return util.BuilderConfig(
|
return util.BuilderConfig(
|
||||||
name=f"{project.name}/nix-skipped-build",
|
name=f"{project.name}/nix-skipped-build",
|
||||||
|
@ -595,7 +589,7 @@ def config_for_project(
|
||||||
config["schedulers"].extend(
|
config["schedulers"].extend(
|
||||||
[
|
[
|
||||||
schedulers.SingleBranchScheduler(
|
schedulers.SingleBranchScheduler(
|
||||||
name=f"{project.id}-default-branch",
|
name=f"{project.project_id}-default-branch",
|
||||||
change_filter=util.ChangeFilter(
|
change_filter=util.ChangeFilter(
|
||||||
repository=project.url,
|
repository=project.url,
|
||||||
filter_fn=lambda c: c.branch
|
filter_fn=lambda c: c.branch
|
||||||
|
@ -606,7 +600,7 @@ def config_for_project(
|
||||||
),
|
),
|
||||||
# this is compatible with bors or github's merge queue
|
# this is compatible with bors or github's merge queue
|
||||||
schedulers.SingleBranchScheduler(
|
schedulers.SingleBranchScheduler(
|
||||||
name=f"{project.id}-merge-queue",
|
name=f"{project.project_id}-merge-queue",
|
||||||
change_filter=util.ChangeFilter(
|
change_filter=util.ChangeFilter(
|
||||||
repository=project.url,
|
repository=project.url,
|
||||||
branch_re="(gh-readonly-queue/.*|staging|trying)",
|
branch_re="(gh-readonly-queue/.*|staging|trying)",
|
||||||
|
@ -615,35 +609,36 @@ def config_for_project(
|
||||||
),
|
),
|
||||||
# build all pull requests
|
# build all pull requests
|
||||||
schedulers.SingleBranchScheduler(
|
schedulers.SingleBranchScheduler(
|
||||||
name=f"{project.id}-prs",
|
name=f"{project.project_id}-prs",
|
||||||
change_filter=util.ChangeFilter(
|
change_filter=util.ChangeFilter(
|
||||||
repository=project.url, category="pull"
|
repository=project.url,
|
||||||
|
category="pull",
|
||||||
),
|
),
|
||||||
builderNames=[f"{project.name}/nix-eval"],
|
builderNames=[f"{project.name}/nix-eval"],
|
||||||
),
|
),
|
||||||
# this is triggered from `nix-eval`
|
# this is triggered from `nix-eval`
|
||||||
schedulers.Triggerable(
|
schedulers.Triggerable(
|
||||||
name=f"{project.id}-nix-build",
|
name=f"{project.project_id}-nix-build",
|
||||||
builderNames=[f"{project.name}/nix-build"],
|
builderNames=[f"{project.name}/nix-build"],
|
||||||
),
|
),
|
||||||
# this is triggered from `nix-eval` when the build is skipped
|
# this is triggered from `nix-eval` when the build is skipped
|
||||||
schedulers.Triggerable(
|
schedulers.Triggerable(
|
||||||
name=f"{project.id}-nix-skipped-build",
|
name=f"{project.project_id}-nix-skipped-build",
|
||||||
builderNames=[f"{project.name}/nix-skipped-build"],
|
builderNames=[f"{project.name}/nix-skipped-build"],
|
||||||
),
|
),
|
||||||
# allow to manually trigger a nix-build
|
# allow to manually trigger a nix-build
|
||||||
schedulers.ForceScheduler(
|
schedulers.ForceScheduler(
|
||||||
name=f"{project.id}-force",
|
name=f"{project.project_id}-force",
|
||||||
builderNames=[f"{project.name}/nix-eval"],
|
builderNames=[f"{project.name}/nix-eval"],
|
||||||
properties=[
|
properties=[
|
||||||
util.StringParameter(
|
util.StringParameter(
|
||||||
name="project",
|
name="project",
|
||||||
label="Name of the GitHub repository.",
|
label="Name of the GitHub repository.",
|
||||||
default=project.name,
|
default=project.name,
|
||||||
)
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
],
|
||||||
)
|
)
|
||||||
config["builders"].extend(
|
config["builders"].extend(
|
||||||
[
|
[
|
||||||
|
@ -665,18 +660,23 @@ def config_for_project(
|
||||||
outputs_path=outputs_path,
|
outputs_path=outputs_path,
|
||||||
),
|
),
|
||||||
nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]),
|
nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]),
|
||||||
]
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||||
def __init__(self, builders: set[str] = set(), **kwargs: Any) -> None:
|
def __init__(self, builders: set[str] | None = None, **kwargs: Any) -> None:
|
||||||
|
if builders is None:
|
||||||
|
builders = set()
|
||||||
self.builders = builders
|
self.builders = builders
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_builder(
|
def check_builder(
|
||||||
self, endpoint_object: Any, endpoint_dict: dict[str, Any], object_type: str
|
self,
|
||||||
|
endpoint_object: Any,
|
||||||
|
endpoint_dict: dict[str, Any],
|
||||||
|
object_type: str,
|
||||||
) -> Generator[Any, Any, Any]:
|
) -> Generator[Any, Any, Any]:
|
||||||
res = yield endpoint_object.get({}, endpoint_dict)
|
res = yield endpoint_object.get({}, endpoint_dict)
|
||||||
if res is None:
|
if res is None:
|
||||||
|
@ -684,7 +684,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||||
|
|
||||||
builder = yield self.master.data.get(("builders", res["builderid"]))
|
builder = yield self.master.data.get(("builders", res["builderid"]))
|
||||||
if builder["name"] in self.builders:
|
if builder["name"] in self.builders:
|
||||||
log.warn(
|
log.warning(
|
||||||
"Builder {builder} allowed by {role}: {builders}",
|
"Builder {builder} allowed by {role}: {builders}",
|
||||||
builder=builder["name"],
|
builder=builder["name"],
|
||||||
role=self.role,
|
role=self.role,
|
||||||
|
@ -692,7 +692,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||||
)
|
)
|
||||||
return Match(self.master, **{object_type: res})
|
return Match(self.master, **{object_type: res})
|
||||||
else:
|
else:
|
||||||
log.warn(
|
log.warning(
|
||||||
"Builder {builder} not allowed by {role}: {builders}",
|
"Builder {builder} not allowed by {role}: {builders}",
|
||||||
builder=builder["name"],
|
builder=builder["name"],
|
||||||
role=self.role,
|
role=self.role,
|
||||||
|
@ -700,17 +700,26 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||||
)
|
)
|
||||||
|
|
||||||
def match_BuildEndpoint_rebuild( # noqa: N802
|
def match_BuildEndpoint_rebuild( # noqa: N802
|
||||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
self,
|
||||||
|
epobject: Any,
|
||||||
|
epdict: dict[str, Any],
|
||||||
|
options: dict[str, Any],
|
||||||
) -> Generator[Any, Any, Any]:
|
) -> Generator[Any, Any, Any]:
|
||||||
return self.check_builder(epobject, epdict, "build")
|
return self.check_builder(epobject, epdict, "build")
|
||||||
|
|
||||||
def match_BuildEndpoint_stop( # noqa: N802
|
def match_BuildEndpoint_stop( # noqa: N802
|
||||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
self,
|
||||||
|
epobject: Any,
|
||||||
|
epdict: dict[str, Any],
|
||||||
|
options: dict[str, Any],
|
||||||
) -> Generator[Any, Any, Any]:
|
) -> Generator[Any, Any, Any]:
|
||||||
return self.check_builder(epobject, epdict, "build")
|
return self.check_builder(epobject, epdict, "build")
|
||||||
|
|
||||||
def match_BuildRequestEndpoint_stop( # noqa: N802
|
def match_BuildRequestEndpoint_stop( # noqa: N802
|
||||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
self,
|
||||||
|
epobject: Any,
|
||||||
|
epdict: dict[str, Any],
|
||||||
|
options: dict[str, Any],
|
||||||
) -> Generator[Any, Any, Any]:
|
) -> Generator[Any, Any, Any]:
|
||||||
return self.check_builder(epobject, epdict, "buildrequest")
|
return self.check_builder(epobject, epdict, "buildrequest")
|
||||||
|
|
||||||
|
@ -718,7 +727,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||||
def setup_authz(projects: list[GithubProject], admins: list[str]) -> util.Authz:
|
def setup_authz(projects: list[GithubProject], admins: list[str]) -> util.Authz:
|
||||||
allow_rules = []
|
allow_rules = []
|
||||||
allowed_builders_by_org: defaultdict[str, set[str]] = defaultdict(
|
allowed_builders_by_org: defaultdict[str, set[str]] = defaultdict(
|
||||||
lambda: {"reload-github-projects"}
|
lambda: {"reload-github-projects"},
|
||||||
)
|
)
|
||||||
|
|
||||||
for project in projects:
|
for project in projects:
|
||||||
|
@ -751,14 +760,13 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
# Shape of this file:
|
# Shape of this file: [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
|
||||||
# [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
|
|
||||||
github: GithubConfig,
|
github: GithubConfig,
|
||||||
url: str,
|
url: str,
|
||||||
nix_supported_systems: list[str],
|
nix_supported_systems: list[str],
|
||||||
nix_eval_worker_count: int | None,
|
nix_eval_worker_count: int | None,
|
||||||
nix_eval_max_memory_size: int,
|
nix_eval_max_memory_size: int,
|
||||||
nix_workers_secret_name: str = "buildbot-nix-workers",
|
nix_workers_secret_name: str = "buildbot-nix-workers", # noqa: S107
|
||||||
cachix: CachixConfig | None = None,
|
cachix: CachixConfig | None = None,
|
||||||
outputs_path: str | None = None,
|
outputs_path: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -823,7 +831,7 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
[worker_names[0]],
|
[worker_names[0]],
|
||||||
self.github.token(),
|
self.github.token(),
|
||||||
self.github.project_cache_file,
|
self.github.project_cache_file,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
config["workers"].append(worker.LocalWorker(SKIPPED_BUILDER_NAME))
|
config["workers"].append(worker.LocalWorker(SKIPPED_BUILDER_NAME))
|
||||||
config["schedulers"].extend(
|
config["schedulers"].extend(
|
||||||
|
@ -839,7 +847,7 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
builderNames=["reload-github-projects"],
|
builderNames=["reload-github-projects"],
|
||||||
periodicBuildTimer=12 * 60 * 60,
|
periodicBuildTimer=12 * 60 * 60,
|
||||||
),
|
),
|
||||||
]
|
],
|
||||||
)
|
)
|
||||||
config["services"].append(
|
config["services"].append(
|
||||||
reporters.GitHubStatusPush(
|
reporters.GitHubStatusPush(
|
||||||
|
@ -848,11 +856,11 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
# we use `virtual_builder_name` in the webinterface
|
# we use `virtual_builder_name` in the webinterface
|
||||||
# so that we distinguish what has beeing build
|
# so that we distinguish what has beeing build
|
||||||
context=Interpolate("buildbot/%(prop:status_name)s"),
|
context=Interpolate("buildbot/%(prop:status_name)s"),
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
systemd_secrets = secrets.SecretInAFile(
|
systemd_secrets = secrets.SecretInAFile(
|
||||||
dirname=os.environ["CREDENTIALS_DIRECTORY"]
|
dirname=os.environ["CREDENTIALS_DIRECTORY"],
|
||||||
)
|
)
|
||||||
config["secretsProviders"].append(systemd_secrets)
|
config["secretsProviders"].append(systemd_secrets)
|
||||||
|
|
||||||
|
@ -870,7 +878,7 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
if "auth" not in config["www"]:
|
if "auth" not in config["www"]:
|
||||||
config["www"].setdefault("avatar_methods", [])
|
config["www"].setdefault("avatar_methods", [])
|
||||||
config["www"]["avatar_methods"].append(
|
config["www"]["avatar_methods"].append(
|
||||||
util.AvatarGitHub(token=self.github.token())
|
util.AvatarGitHub(token=self.github.token()),
|
||||||
)
|
)
|
||||||
config["www"]["auth"] = util.GitHubAuth(
|
config["www"]["auth"] = util.GitHubAuth(
|
||||||
self.github.oauth_id,
|
self.github.oauth_id,
|
||||||
|
@ -879,5 +887,6 @@ class NixConfigurator(ConfiguratorBase):
|
||||||
)
|
)
|
||||||
|
|
||||||
config["www"]["authz"] = setup_authz(
|
config["www"]["authz"] = setup_authz(
|
||||||
admins=self.github.admins, projects=projects
|
admins=self.github.admins,
|
||||||
|
projects=projects,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
|
import contextlib
|
||||||
import http.client
|
import http.client
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
|
@ -9,6 +9,10 @@ from typing import Any
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
|
|
||||||
|
|
||||||
|
class GithubError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HttpResponse:
|
class HttpResponse:
|
||||||
def __init__(self, raw: http.client.HTTPResponse) -> None:
|
def __init__(self, raw: http.client.HTTPResponse) -> None:
|
||||||
self.raw = raw
|
self.raw = raw
|
||||||
|
@ -23,26 +27,32 @@ class HttpResponse:
|
||||||
def http_request(
|
def http_request(
|
||||||
url: str,
|
url: str,
|
||||||
method: str = "GET",
|
method: str = "GET",
|
||||||
headers: dict[str, str] = {},
|
headers: dict[str, str] | None = None,
|
||||||
data: dict[str, Any] | None = None,
|
data: dict[str, Any] | None = None,
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
body = None
|
body = None
|
||||||
if data:
|
if data:
|
||||||
body = json.dumps(data).encode("ascii")
|
body = json.dumps(data).encode("ascii")
|
||||||
|
if headers is None:
|
||||||
|
headers = {}
|
||||||
headers = headers.copy()
|
headers = headers.copy()
|
||||||
headers["User-Agent"] = "buildbot-nix"
|
headers["User-Agent"] = "buildbot-nix"
|
||||||
req = urllib.request.Request(url, headers=headers, method=method, data=body)
|
|
||||||
|
if not url.startswith("https:"):
|
||||||
|
msg = "url must be https: {url}"
|
||||||
|
raise GithubError(msg)
|
||||||
|
|
||||||
|
req = urllib.request.Request( # noqa: S310
|
||||||
|
url, headers=headers, method=method, data=body
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
resp = urllib.request.urlopen(req)
|
resp = urllib.request.urlopen(req) # noqa: S310
|
||||||
except urllib.request.HTTPError as e:
|
except urllib.request.HTTPError as e:
|
||||||
resp_body = ""
|
resp_body = ""
|
||||||
try:
|
with contextlib.suppress(OSError, UnicodeDecodeError):
|
||||||
resp_body = e.fp.read().decode("utf-8", "replace")
|
resp_body = e.fp.read().decode("utf-8", "replace")
|
||||||
except Exception:
|
msg = f"Request for {method} {url} failed with {e.code} {e.reason}: {resp_body}"
|
||||||
pass
|
raise GithubError(msg) from e
|
||||||
raise Exception(
|
|
||||||
f"Request for {method} {url} failed with {e.code} {e.reason}: {resp_body}"
|
|
||||||
) from e
|
|
||||||
return HttpResponse(resp)
|
return HttpResponse(resp)
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,7 +66,8 @@ def paginated_github_request(url: str, token: str) -> list[dict[str, Any]]:
|
||||||
headers={"Authorization": f"Bearer {token}"},
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
)
|
)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise Exception(f"failed to fetch {next_url}: {e}") from e
|
msg = f"failed to fetch {next_url}: {e}"
|
||||||
|
raise GithubError(msg) from e
|
||||||
next_url = None
|
next_url = None
|
||||||
link = res.headers()["Link"]
|
link = res.headers()["Link"]
|
||||||
if link is not None:
|
if link is not None:
|
||||||
|
@ -94,7 +105,7 @@ class GithubProject:
|
||||||
return self.data["html_url"]
|
return self.data["html_url"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def id(self) -> str:
|
def project_id(self) -> str:
|
||||||
return slugify_project_name(self.data["full_name"])
|
return slugify_project_name(self.data["full_name"])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -111,13 +122,21 @@ class GithubProject:
|
||||||
|
|
||||||
|
|
||||||
def create_project_hook(
|
def create_project_hook(
|
||||||
owner: str, repo: str, token: str, webhook_url: str, webhook_secret: str
|
owner: str,
|
||||||
|
repo: str,
|
||||||
|
token: str,
|
||||||
|
webhook_url: str,
|
||||||
|
webhook_secret: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
hooks = paginated_github_request(
|
hooks = paginated_github_request(
|
||||||
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100", token
|
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100",
|
||||||
|
token,
|
||||||
)
|
)
|
||||||
config = dict(
|
config = dict(
|
||||||
url=webhook_url, content_type="json", insecure_ssl="0", secret=webhook_secret
|
url=webhook_url,
|
||||||
|
content_type="json",
|
||||||
|
insecure_ssl="0",
|
||||||
|
secret=webhook_secret,
|
||||||
)
|
)
|
||||||
data = dict(name="web", active=True, events=["push", "pull_request"], config=config)
|
data = dict(name="web", active=True, events=["push", "pull_request"], config=config)
|
||||||
headers = {
|
headers = {
|
||||||
|
@ -149,18 +168,19 @@ def refresh_projects(github_token: str, repo_cache_file: Path) -> None:
|
||||||
if not repo["permissions"]["admin"]:
|
if not repo["permissions"]["admin"]:
|
||||||
name = repo["full_name"]
|
name = repo["full_name"]
|
||||||
log.msg(
|
log.msg(
|
||||||
f"skipping {name} because we do not have admin privileges, needed for hook management"
|
f"skipping {name} because we do not have admin privileges, needed for hook management",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
repos.append(repo)
|
repos.append(repo)
|
||||||
|
|
||||||
with NamedTemporaryFile("w", delete=False, dir=repo_cache_file.parent) as f:
|
with NamedTemporaryFile("w", delete=False, dir=repo_cache_file.parent) as f:
|
||||||
|
path = Path(f.name)
|
||||||
try:
|
try:
|
||||||
f.write(json.dumps(repos))
|
f.write(json.dumps(repos))
|
||||||
f.flush()
|
f.flush()
|
||||||
os.rename(f.name, repo_cache_file)
|
path.rename(repo_cache_file)
|
||||||
except OSError:
|
except OSError:
|
||||||
os.unlink(f.name)
|
path.unlink()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from buildbot_worker.bot import Worker
|
from buildbot_worker.bot import Worker
|
||||||
|
@ -18,19 +16,27 @@ def require_env(key: str) -> str:
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WorkerConfig:
|
class WorkerConfig:
|
||||||
password: str = Path(require_env("WORKER_PASSWORD_FILE")).read_text().rstrip("\r\n")
|
password: str = field(
|
||||||
worker_count: int = int(
|
default_factory=lambda: Path(require_env("WORKER_PASSWORD_FILE"))
|
||||||
os.environ.get("WORKER_COUNT", str(multiprocessing.cpu_count()))
|
.read_text()
|
||||||
|
.rstrip("\r\n")
|
||||||
)
|
)
|
||||||
buildbot_dir: str = require_env("BUILDBOT_DIR")
|
worker_count: int = int(
|
||||||
master_url: str = require_env("MASTER_URL")
|
os.environ.get("WORKER_COUNT", str(multiprocessing.cpu_count())),
|
||||||
|
)
|
||||||
|
buildbot_dir: Path = field(
|
||||||
|
default_factory=lambda: Path(require_env("BUILDBOT_DIR"))
|
||||||
|
)
|
||||||
|
master_url: str = field(default_factory=lambda: require_env("MASTER_URL"))
|
||||||
|
|
||||||
|
|
||||||
def setup_worker(
|
def setup_worker(
|
||||||
application: service.Application, id: int, config: WorkerConfig
|
application: service.Application,
|
||||||
|
builder_id: int,
|
||||||
|
config: WorkerConfig,
|
||||||
) -> None:
|
) -> None:
|
||||||
basedir = f"{config.buildbot_dir}-{id:03}"
|
basedir = config.buildbot_dir.parent / f"{config.buildbot_dir.name}-{builder_id:03}"
|
||||||
os.makedirs(basedir, mode=0o700, exist_ok=True)
|
basedir.mkdir(parents=True, exist_ok=True, mode=0o700)
|
||||||
|
|
||||||
hostname = socket.gethostname()
|
hostname = socket.gethostname()
|
||||||
workername = f"{hostname}-{id:03}"
|
workername = f"{hostname}-{id:03}"
|
||||||
|
|
|
@ -37,6 +37,8 @@
|
||||||
packages.default = pkgs.mkShell {
|
packages.default = pkgs.mkShell {
|
||||||
packages = [
|
packages = [
|
||||||
pkgs.bashInteractive
|
pkgs.bashInteractive
|
||||||
|
pkgs.mypy
|
||||||
|
pkgs.ruff
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
packages.buildbot-nix = pkgs.python3.pkgs.callPackage ./default.nix { };
|
packages.buildbot-nix = pkgs.python3.pkgs.callPackage ./default.nix { };
|
||||||
|
|
|
@ -21,18 +21,68 @@ classifiers = [
|
||||||
"Programming Language :: Python"
|
"Programming Language :: Python"
|
||||||
]
|
]
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
|
scripts = { buildbot-effects = "hercules_effects.cli:main" }
|
||||||
|
|
||||||
[tool.setuptools]
|
[tool.setuptools]
|
||||||
packages = ["buildbot_nix"]
|
packages = [
|
||||||
|
"buildbot_nix",
|
||||||
|
"buildbot_effects"
|
||||||
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py311"
|
target-version = "py311"
|
||||||
line-length = 88
|
line-length = 88
|
||||||
select = ["E", "F", "I", "U", "N"]
|
select = ["ALL"]
|
||||||
ignore = [ "E501" ]
|
ignore = [
|
||||||
|
# pydocstyle
|
||||||
|
"D",
|
||||||
|
# todo comments
|
||||||
|
"TD",
|
||||||
|
# fixmes
|
||||||
|
"FIX",
|
||||||
|
|
||||||
|
# Unused function argument
|
||||||
|
"ARG001",
|
||||||
|
"ARG002",
|
||||||
|
|
||||||
|
# Missing type annotation for `self` in method
|
||||||
|
"ANN101",
|
||||||
|
# Dynamically typed expressions (typing.Any)
|
||||||
|
"ANN401",
|
||||||
|
# Trailing comma missing
|
||||||
|
"COM812",
|
||||||
|
# Unnecessary `dict` call (rewrite as a literal)
|
||||||
|
"C408",
|
||||||
|
# Boolean-typed positional argument in function definition
|
||||||
|
"FBT001",
|
||||||
|
# Logging statement uses f-string
|
||||||
|
"G004",
|
||||||
|
# disabled on ruff's recommendation as causes problems with the formatter
|
||||||
|
"ISC001",
|
||||||
|
# Use of `assert` detected
|
||||||
|
"S101",
|
||||||
|
# `subprocess` call: check for execution of untrusted input
|
||||||
|
"S603",
|
||||||
|
# Starting a process with a partial executable path
|
||||||
|
"S607",
|
||||||
|
# Boolean default positional argument in function definition
|
||||||
|
"FBT002",
|
||||||
|
|
||||||
|
# Too many statements
|
||||||
|
"PLR0915",
|
||||||
|
# Too many arguments in function definition
|
||||||
|
"PLR0913",
|
||||||
|
"PLR0912", # Too many branches
|
||||||
|
# $X is too complex
|
||||||
|
"C901",
|
||||||
|
|
||||||
|
"E501", # line too long
|
||||||
|
"T201", # `print` found
|
||||||
|
"PLR2004", # Magic value used in comparison
|
||||||
|
]
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
python_version = "3.10"
|
python_version = "3.11"
|
||||||
pretty = true
|
pretty = true
|
||||||
warn_redundant_casts = true
|
warn_redundant_casts = true
|
||||||
disallow_untyped_calls = true
|
disallow_untyped_calls = true
|
||||||
|
|
Loading…
Reference in a new issue