Merge pull request #65 from Mic92/hercules
Hercules ci effects: add cli
This commit is contained in:
commit
22b4caf6fc
9
bin/buildbot-effects
Executable file
9
bin/buildbot-effects
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.append(str(Path(__file__).parent.parent))
|
||||
|
||||
from hercules_effects.cli import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
243
buildbot_effects/__init__.py
Normal file
243
buildbot_effects/__init__.py
Normal file
|
@ -0,0 +1,243 @@
|
|||
import json
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import IO, Any
|
||||
|
||||
from .options import EffectsOptions
|
||||
|
||||
|
||||
class BuildbotEffectsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def run(
|
||||
cmd: list[str],
|
||||
stdin: int | IO[str] | None = None,
|
||||
stdout: int | IO[str] | None = None,
|
||||
stderr: int | IO[str] | None = None,
|
||||
verbose: bool = True,
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
if verbose:
|
||||
print("$", shlex.join(cmd), file=sys.stderr)
|
||||
return subprocess.run(
|
||||
cmd,
|
||||
check=True,
|
||||
text=True,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
|
||||
|
||||
def git_command(args: list[str], path: Path) -> str:
|
||||
cmd = ["git", "-C", str(path), *args]
|
||||
proc = run(cmd, stdout=subprocess.PIPE)
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def get_git_rev(path: Path) -> str:
|
||||
return git_command(["rev-parse", "--verify", "HEAD"], path)
|
||||
|
||||
|
||||
def get_git_branch(path: Path) -> str:
|
||||
return git_command(["rev-parse", "--abbrev-ref", "HEAD"], path)
|
||||
|
||||
|
||||
def get_git_remote_url(path: Path) -> str | None:
|
||||
try:
|
||||
return git_command(["remote", "get-url", "origin"], path)
|
||||
except subprocess.CalledProcessError:
|
||||
return None
|
||||
|
||||
|
||||
def git_get_tag(path: Path, rev: str) -> str | None:
|
||||
tags = git_command(["tag", "--points-at", rev], path)
|
||||
if tags:
|
||||
return tags.splitlines()[1]
|
||||
return None
|
||||
|
||||
|
||||
def effects_args(opts: EffectsOptions) -> dict[str, Any]:
|
||||
rev = opts.rev or get_git_rev(opts.path)
|
||||
short_rev = rev[:7]
|
||||
branch = opts.branch or get_git_branch(opts.path)
|
||||
repo = opts.repo or opts.path.name
|
||||
tag = opts.tag or git_get_tag(opts.path, rev)
|
||||
url = opts.url or get_git_remote_url(opts.path)
|
||||
primary_repo = dict(
|
||||
name=repo,
|
||||
branch=branch,
|
||||
# TODO: support ref
|
||||
ref=None,
|
||||
tag=tag,
|
||||
rev=rev,
|
||||
shortRev=short_rev,
|
||||
remoteHttpUrl=url,
|
||||
)
|
||||
return {
|
||||
"primaryRepo": primary_repo,
|
||||
**primary_repo,
|
||||
}
|
||||
|
||||
|
||||
def nix_command(*args: str) -> list[str]:
|
||||
return ["nix", "--extra-experimental-features", "nix-command flakes", *args]
|
||||
|
||||
|
||||
def effect_function(opts: EffectsOptions) -> str:
|
||||
args = effects_args(opts)
|
||||
rev = args["rev"]
|
||||
escaped_args = json.dumps(json.dumps(args))
|
||||
url = json.dumps(f"git+file://{opts.path}?rev={rev}#")
|
||||
return f"""(((builtins.getFlake {url}).outputs.herculesCI (builtins.fromJSON {escaped_args})).onPush.default.outputs.hci-effects)"""
|
||||
|
||||
|
||||
def list_effects(opts: EffectsOptions) -> list[str]:
|
||||
cmd = nix_command(
|
||||
"eval",
|
||||
"--json",
|
||||
"--expr",
|
||||
f"builtins.attrNames {effect_function(opts)}",
|
||||
)
|
||||
proc = run(cmd, stdout=subprocess.PIPE)
|
||||
return json.loads(proc.stdout)
|
||||
|
||||
|
||||
def instantiate_effects(opts: EffectsOptions) -> str:
|
||||
cmd = [
|
||||
"nix-instantiate",
|
||||
"--expr",
|
||||
f"{effect_function(opts)}.deploy.run",
|
||||
]
|
||||
proc = run(cmd, stdout=subprocess.PIPE)
|
||||
return proc.stdout.rstrip()
|
||||
|
||||
|
||||
def parse_derivation(path: str) -> dict[str, Any]:
|
||||
cmd = [
|
||||
"nix",
|
||||
"--extra-experimental-features",
|
||||
"nix-command flakes",
|
||||
"derivation",
|
||||
"show",
|
||||
f"{path}^*",
|
||||
]
|
||||
proc = run(cmd, stdout=subprocess.PIPE)
|
||||
return json.loads(proc.stdout)
|
||||
|
||||
|
||||
def env_args(env: dict[str, str]) -> list[str]:
|
||||
result = []
|
||||
for k, v in env.items():
|
||||
result.append("--setenv")
|
||||
result.append(f"{k}")
|
||||
result.append(f"{v}")
|
||||
return result
|
||||
|
||||
|
||||
@contextmanager
|
||||
def pipe() -> Iterator[tuple[IO[str], IO[str]]]:
|
||||
r, w = os.pipe()
|
||||
r_file = os.fdopen(r, "r")
|
||||
w_file = os.fdopen(w, "w")
|
||||
try:
|
||||
yield r_file, w_file
|
||||
finally:
|
||||
r_file.close()
|
||||
w_file.close()
|
||||
|
||||
|
||||
def run_effects(
|
||||
drv_path: str,
|
||||
drv: dict[str, Any],
|
||||
secrets: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
if secrets is None:
|
||||
secrets = {}
|
||||
builder = drv["builder"]
|
||||
args = drv["args"]
|
||||
sandboxed_cmd = [
|
||||
builder,
|
||||
*args,
|
||||
]
|
||||
env = {}
|
||||
env["IN_HERCULES_CI_EFFECT"] = "true"
|
||||
env["HERCULES_CI_SECRETS_JSON"] = "/run/secrets.json"
|
||||
env["NIX_BUILD_TOP"] = "/build"
|
||||
bwrap = shutil.which("bwrap")
|
||||
if bwrap is None:
|
||||
msg = "bwrap' executable not found"
|
||||
raise BuildbotEffectsError(msg)
|
||||
|
||||
bubblewrap_cmd = [
|
||||
"nix",
|
||||
"develop",
|
||||
"-i",
|
||||
f"{drv_path}^*",
|
||||
"-c",
|
||||
bwrap,
|
||||
"--unshare-all",
|
||||
"--share-net",
|
||||
"--new-session",
|
||||
"--die-with-parent",
|
||||
"--dir",
|
||||
"/build",
|
||||
"--chdir",
|
||||
"/build",
|
||||
"--tmpfs",
|
||||
"/tmp", # noqa: S108
|
||||
"--tmpfs",
|
||||
"/build",
|
||||
"--proc",
|
||||
"/proc",
|
||||
"--dev",
|
||||
"/dev",
|
||||
"--ro-bind",
|
||||
"/etc/resolv.conf",
|
||||
"/etc/resolv.conf",
|
||||
"--ro-bind",
|
||||
"/etc/hosts",
|
||||
"/etc/hosts",
|
||||
"--ro-bind",
|
||||
"/nix/store",
|
||||
"/nix/store",
|
||||
]
|
||||
|
||||
with NamedTemporaryFile() as tmp:
|
||||
secrets = secrets.copy()
|
||||
secrets["hercules-ci"] = {"data": {"token": "dummy"}}
|
||||
tmp.write(json.dumps(secrets).encode())
|
||||
bubblewrap_cmd.extend(
|
||||
[
|
||||
"--ro-bind",
|
||||
tmp.name,
|
||||
"/run/secrets.json",
|
||||
],
|
||||
)
|
||||
bubblewrap_cmd.extend(env_args(env))
|
||||
bubblewrap_cmd.append("--")
|
||||
bubblewrap_cmd.extend(sandboxed_cmd)
|
||||
with pipe() as (r_file, w_file):
|
||||
print("$", shlex.join(bubblewrap_cmd), file=sys.stderr)
|
||||
proc = subprocess.Popen(
|
||||
bubblewrap_cmd,
|
||||
text=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=w_file,
|
||||
stderr=w_file,
|
||||
)
|
||||
w_file.close()
|
||||
with proc:
|
||||
for line in r_file:
|
||||
print(line, end="")
|
||||
proc.wait()
|
||||
if proc.returncode != 0:
|
||||
msg = f"command failed with exit code {proc.returncode}"
|
||||
raise BuildbotEffectsError(msg)
|
85
buildbot_effects/cli.py
Normal file
85
buildbot_effects/cli.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
import argparse
|
||||
import json
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
|
||||
from . import instantiate_effects, list_effects, parse_derivation, run_effects
|
||||
from .options import EffectsOptions
|
||||
|
||||
|
||||
def list_command(options: EffectsOptions) -> None:
|
||||
print(list_effects(options))
|
||||
|
||||
|
||||
def run_command(options: EffectsOptions) -> None:
|
||||
drv_path = instantiate_effects(options)
|
||||
drvs = parse_derivation(drv_path)
|
||||
drv = next(iter(drvs.values()))
|
||||
|
||||
secrets = json.loads(options.secrets.read_text()) if options.secrets else {}
|
||||
run_effects(drv_path, drv, secrets=secrets)
|
||||
|
||||
|
||||
def run_all_command(options: EffectsOptions) -> None:
|
||||
print("TODO")
|
||||
|
||||
|
||||
def parse_args() -> tuple[Callable[[EffectsOptions], None], EffectsOptions]:
|
||||
parser = argparse.ArgumentParser(description="Run effects from a hercules-ci flake")
|
||||
parser.add_argument(
|
||||
"--secrets",
|
||||
type=Path,
|
||||
help="Path to a json file with secrets",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--rev",
|
||||
type=str,
|
||||
help="Git revision to use",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--branch",
|
||||
type=str,
|
||||
help="Git branch to use",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--repo",
|
||||
type=str,
|
||||
help="Git repo to prepend to be",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
type=str,
|
||||
help="Path to the repository",
|
||||
)
|
||||
subparser = parser.add_subparsers(
|
||||
dest="command",
|
||||
required=True,
|
||||
help="Command to run",
|
||||
)
|
||||
list_parser = subparser.add_parser(
|
||||
"list",
|
||||
help="List available effects",
|
||||
)
|
||||
list_parser.set_defaults(command=list_command)
|
||||
run_parser = subparser.add_parser(
|
||||
"run",
|
||||
help="Run an effect",
|
||||
)
|
||||
run_parser.set_defaults(command=run_command)
|
||||
run_parser.add_argument(
|
||||
"effect",
|
||||
help="Effect to run",
|
||||
)
|
||||
run_all_parser = subparser.add_parser(
|
||||
"run-all",
|
||||
help="Run all effects",
|
||||
)
|
||||
run_all_parser.set_defaults(command=run_all_command)
|
||||
|
||||
args = parser.parse_args()
|
||||
return args.command, EffectsOptions(secrets=args.secrets)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
command, options = parse_args()
|
||||
command(options)
|
13
buildbot_effects/options.py
Normal file
13
buildbot_effects/options.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class EffectsOptions:
|
||||
secrets: Path | None = None
|
||||
path: Path = field(default_factory=lambda: Path.cwd())
|
||||
repo: str | None = ""
|
||||
rev: str | None = None
|
||||
branch: str | None = None
|
||||
url: str | None = None
|
||||
tag: str | None = None
|
|
@ -1,5 +1,3 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
|
@ -10,25 +8,28 @@ from collections import defaultdict
|
|||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from buildbot.configurators import ConfiguratorBase
|
||||
from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker
|
||||
from buildbot.process import buildstep, logobserver, remotecommand
|
||||
from buildbot.process.log import Log
|
||||
from buildbot.process.project import Project
|
||||
from buildbot.process.properties import Interpolate, Properties
|
||||
from buildbot.process.results import ALL_RESULTS, statusToString
|
||||
from buildbot.steps.trigger import Trigger
|
||||
from buildbot.util import asyncSleep
|
||||
from buildbot.www.authz.endpointmatchers import EndpointMatcherBase, Match
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from buildbot.process.log import Log
|
||||
|
||||
from twisted.internet import defer, threads
|
||||
from twisted.logger import Logger
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from .github_projects import (
|
||||
GithubProject,
|
||||
create_project_hook, # noqa: E402
|
||||
create_project_hook,
|
||||
load_projects,
|
||||
refresh_projects,
|
||||
slugify_project_name,
|
||||
|
@ -39,10 +40,12 @@ SKIPPED_BUILDER_NAME = "skipped-builds"
|
|||
log = Logger()
|
||||
|
||||
|
||||
class BuildbotNixError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BuildTrigger(Trigger):
|
||||
"""
|
||||
Dynamic trigger that creates a build for every attribute.
|
||||
"""
|
||||
"""Dynamic trigger that creates a build for every attribute."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -122,9 +125,7 @@ class BuildTrigger(Trigger):
|
|||
return triggered_schedulers
|
||||
|
||||
def getCurrentSummary(self) -> dict[str, str]: # noqa: N802
|
||||
"""
|
||||
The original build trigger will the generic builder name `nix-build` in this case, which is not helpful
|
||||
"""
|
||||
"""The original build trigger will the generic builder name `nix-build` in this case, which is not helpful"""
|
||||
if not self.triggeredNames:
|
||||
return {"step": "running"}
|
||||
summary = []
|
||||
|
@ -133,14 +134,13 @@ class BuildTrigger(Trigger):
|
|||
count = self._result_list.count(status)
|
||||
if count:
|
||||
summary.append(
|
||||
f"{self._result_list.count(status)} {statusToString(status, count)}"
|
||||
f"{self._result_list.count(status)} {statusToString(status, count)}",
|
||||
)
|
||||
return {"step": f"({', '.join(summary)})"}
|
||||
|
||||
|
||||
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||
"""
|
||||
Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
|
||||
"""Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
|
||||
every attribute.
|
||||
"""
|
||||
|
||||
|
@ -168,7 +168,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
|||
try:
|
||||
job = json.loads(line)
|
||||
except json.JSONDecodeError as e:
|
||||
raise Exception(f"Failed to parse line: {line}") from e
|
||||
msg = f"Failed to parse line: {line}"
|
||||
raise BuildbotNixError(msg) from e
|
||||
jobs.append(job)
|
||||
build_props = self.build.getProperties()
|
||||
repo_name = build_props.getProperty(
|
||||
|
@ -179,9 +180,7 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
|||
filtered_jobs = []
|
||||
for job in jobs:
|
||||
system = job.get("system")
|
||||
if not system: # report eval errors
|
||||
filtered_jobs.append(job)
|
||||
elif system in self.supported_systems:
|
||||
if not system or system in self.supported_systems: # report eval errors
|
||||
filtered_jobs.append(job)
|
||||
|
||||
self.build.addStepsAfterCurrentStep(
|
||||
|
@ -191,8 +190,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
|
|||
skipped_builds_scheduler=f"{project_id}-nix-skipped-build",
|
||||
name="build flake",
|
||||
jobs=filtered_jobs,
|
||||
)
|
||||
]
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
return result
|
||||
|
@ -203,13 +202,12 @@ class RetryCounter:
|
|||
def __init__(self, retries: int) -> None:
|
||||
self.builds: dict[uuid.UUID, int] = defaultdict(lambda: retries)
|
||||
|
||||
def retry_build(self, id: uuid.UUID) -> int:
|
||||
retries = self.builds[id]
|
||||
def retry_build(self, build_id: uuid.UUID) -> int:
|
||||
retries = self.builds[build_id]
|
||||
if retries > 1:
|
||||
self.builds[id] = retries - 1
|
||||
self.builds[build_id] = retries - 1
|
||||
return retries
|
||||
else:
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
# For now we limit this to two. Often this allows us to make the error log
|
||||
|
@ -218,9 +216,7 @@ RETRY_COUNTER = RetryCounter(retries=2)
|
|||
|
||||
|
||||
class EvalErrorStep(steps.BuildStep):
|
||||
"""
|
||||
Shows the error message of a failed evaluation.
|
||||
"""
|
||||
"""Shows the error message of a failed evaluation."""
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run(self) -> Generator[Any, object, Any]:
|
||||
|
@ -233,9 +229,7 @@ class EvalErrorStep(steps.BuildStep):
|
|||
|
||||
|
||||
class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
|
||||
"""
|
||||
Builds a nix derivation.
|
||||
"""
|
||||
"""Builds a nix derivation."""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = self.setupShellMixin(kwargs)
|
||||
|
@ -256,8 +250,7 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
|
|||
|
||||
|
||||
class UpdateBuildOutput(steps.BuildStep):
|
||||
"""
|
||||
Updates store paths in a public www directory.
|
||||
"""Updates store paths in a public www directory.
|
||||
This is useful to prefetch updates without having to evaluate
|
||||
on the target machine.
|
||||
"""
|
||||
|
@ -269,11 +262,11 @@ class UpdateBuildOutput(steps.BuildStep):
|
|||
def run(self) -> Generator[Any, object, Any]:
|
||||
props = self.build.getProperties()
|
||||
if props.getProperty("branch") != props.getProperty(
|
||||
"github.repository.default_branch"
|
||||
"github.repository.default_branch",
|
||||
):
|
||||
return util.SKIPPED
|
||||
|
||||
attr = os.path.basename(props.getProperty("attr"))
|
||||
attr = Path(props.getProperty("attr")).name
|
||||
out_path = props.getProperty("out_path")
|
||||
# XXX don't hardcode this
|
||||
self.path.mkdir(parents=True, exist_ok=True)
|
||||
|
@ -319,12 +312,12 @@ def reload_github_projects(
|
|||
github_token_secret: str,
|
||||
project_cache_file: Path,
|
||||
) -> util.BuilderConfig:
|
||||
"""
|
||||
Updates the flake an opens a PR for it.
|
||||
"""
|
||||
"""Updates the flake an opens a PR for it."""
|
||||
factory = util.BuildFactory()
|
||||
factory.addStep(
|
||||
ReloadGithubProjects(github_token_secret, project_cache_file=project_cache_file)
|
||||
ReloadGithubProjects(
|
||||
github_token_secret, project_cache_file=project_cache_file
|
||||
),
|
||||
)
|
||||
return util.BuilderConfig(
|
||||
name="reload-github-projects",
|
||||
|
@ -338,20 +331,25 @@ def reload_github_projects(
|
|||
class GitWithRetry(steps.Git):
|
||||
@defer.inlineCallbacks
|
||||
def run_vc(
|
||||
self, branch: str, revision: str, patch: str
|
||||
self,
|
||||
branch: str,
|
||||
revision: str,
|
||||
patch: str,
|
||||
) -> Generator[Any, object, Any]:
|
||||
retry_counter = 0
|
||||
while True:
|
||||
try:
|
||||
res = yield super().run_vc(branch, revision, patch)
|
||||
return res
|
||||
except Exception as e:
|
||||
except Exception as e: # noqa: BLE001
|
||||
retry_counter += 1
|
||||
if retry_counter == 3:
|
||||
raise e
|
||||
msg = "Failed to clone"
|
||||
raise BuildbotNixError(msg) from e
|
||||
log: Log = yield self.addLog("log")
|
||||
yield log.addStderr(f"Retrying git clone (error: {e})\n")
|
||||
yield asyncSleep(2 << retry_counter) # 2, 4, 8
|
||||
else:
|
||||
return res
|
||||
|
||||
|
||||
def nix_eval_config(
|
||||
|
@ -363,14 +361,13 @@ def nix_eval_config(
|
|||
worker_count: int,
|
||||
max_memory_size: int,
|
||||
) -> util.BuilderConfig:
|
||||
"""
|
||||
Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
|
||||
"""Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
|
||||
For each evaluated attribute a new build pipeline is started.
|
||||
"""
|
||||
factory = util.BuildFactory()
|
||||
# check out the source
|
||||
url_with_secret = util.Interpolate(
|
||||
f"https://git:%(secret:{github_token_secret})s@github.com/%(prop:project)s"
|
||||
f"https://git:%(secret:{github_token_secret})s@github.com/%(prop:project)s",
|
||||
)
|
||||
factory.addStep(
|
||||
GitWithRetry(
|
||||
|
@ -378,7 +375,7 @@ def nix_eval_config(
|
|||
method="clean",
|
||||
submodules=True,
|
||||
haltOnFailure=True,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
factory.addStep(
|
||||
|
@ -405,7 +402,7 @@ def nix_eval_config(
|
|||
],
|
||||
haltOnFailure=True,
|
||||
locks=[eval_lock.access("exclusive")],
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
return util.BuilderConfig(
|
||||
|
@ -438,9 +435,7 @@ def nix_build_config(
|
|||
cachix: CachixConfig | None = None,
|
||||
outputs_path: Path | None = None,
|
||||
) -> util.BuilderConfig:
|
||||
"""
|
||||
Builds one nix flake attribute.
|
||||
"""
|
||||
"""Builds one nix flake attribute."""
|
||||
factory = util.BuildFactory()
|
||||
factory.addStep(
|
||||
NixBuildCommand(
|
||||
|
@ -466,7 +461,7 @@ def nix_build_config(
|
|||
# We increase this over the default since the build output might end up in a different `nix build`.
|
||||
timeout=60 * 60 * 3,
|
||||
haltOnFailure=True,
|
||||
)
|
||||
),
|
||||
)
|
||||
if cachix:
|
||||
factory.addStep(
|
||||
|
@ -479,7 +474,7 @@ def nix_build_config(
|
|||
cachix.name,
|
||||
util.Interpolate("result-%(prop:attr)s"),
|
||||
],
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
factory.addStep(
|
||||
|
@ -490,27 +485,27 @@ def nix_build_config(
|
|||
"--add-root",
|
||||
# FIXME: cleanup old build attributes
|
||||
util.Interpolate(
|
||||
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/%(prop:attr)s"
|
||||
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/%(prop:attr)s",
|
||||
),
|
||||
"-r",
|
||||
util.Property("out_path"),
|
||||
],
|
||||
doStepIf=lambda s: s.getProperty("branch")
|
||||
== s.getProperty("github.repository.default_branch"),
|
||||
)
|
||||
),
|
||||
)
|
||||
factory.addStep(
|
||||
steps.ShellCommand(
|
||||
name="Delete temporary gcroots",
|
||||
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
|
||||
)
|
||||
),
|
||||
)
|
||||
if outputs_path is not None:
|
||||
factory.addStep(
|
||||
UpdateBuildOutput(
|
||||
name="Update build output",
|
||||
path=outputs_path,
|
||||
)
|
||||
),
|
||||
)
|
||||
return util.BuilderConfig(
|
||||
name=f"{project.name}/nix-build",
|
||||
|
@ -523,18 +518,17 @@ def nix_build_config(
|
|||
|
||||
|
||||
def nix_skipped_build_config(
|
||||
project: GithubProject, worker_names: list[str]
|
||||
project: GithubProject,
|
||||
worker_names: list[str],
|
||||
) -> util.BuilderConfig:
|
||||
"""
|
||||
Dummy builder that is triggered when a build is skipped.
|
||||
"""
|
||||
"""Dummy builder that is triggered when a build is skipped."""
|
||||
factory = util.BuildFactory()
|
||||
factory.addStep(
|
||||
EvalErrorStep(
|
||||
name="Nix evaluation",
|
||||
doStepIf=lambda s: s.getProperty("error"),
|
||||
hideStepIf=lambda _, s: not s.getProperty("error"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# This is just a dummy step showing the cached build
|
||||
|
@ -543,7 +537,7 @@ def nix_skipped_build_config(
|
|||
name="Nix build (cached)",
|
||||
doStepIf=lambda _: False,
|
||||
hideStepIf=lambda _, s: s.getProperty("error"),
|
||||
)
|
||||
),
|
||||
)
|
||||
return util.BuilderConfig(
|
||||
name=f"{project.name}/nix-skipped-build",
|
||||
|
@ -595,7 +589,7 @@ def config_for_project(
|
|||
config["schedulers"].extend(
|
||||
[
|
||||
schedulers.SingleBranchScheduler(
|
||||
name=f"{project.id}-default-branch",
|
||||
name=f"{project.project_id}-default-branch",
|
||||
change_filter=util.ChangeFilter(
|
||||
repository=project.url,
|
||||
filter_fn=lambda c: c.branch
|
||||
|
@ -606,7 +600,7 @@ def config_for_project(
|
|||
),
|
||||
# this is compatible with bors or github's merge queue
|
||||
schedulers.SingleBranchScheduler(
|
||||
name=f"{project.id}-merge-queue",
|
||||
name=f"{project.project_id}-merge-queue",
|
||||
change_filter=util.ChangeFilter(
|
||||
repository=project.url,
|
||||
branch_re="(gh-readonly-queue/.*|staging|trying)",
|
||||
|
@ -615,35 +609,36 @@ def config_for_project(
|
|||
),
|
||||
# build all pull requests
|
||||
schedulers.SingleBranchScheduler(
|
||||
name=f"{project.id}-prs",
|
||||
name=f"{project.project_id}-prs",
|
||||
change_filter=util.ChangeFilter(
|
||||
repository=project.url, category="pull"
|
||||
repository=project.url,
|
||||
category="pull",
|
||||
),
|
||||
builderNames=[f"{project.name}/nix-eval"],
|
||||
),
|
||||
# this is triggered from `nix-eval`
|
||||
schedulers.Triggerable(
|
||||
name=f"{project.id}-nix-build",
|
||||
name=f"{project.project_id}-nix-build",
|
||||
builderNames=[f"{project.name}/nix-build"],
|
||||
),
|
||||
# this is triggered from `nix-eval` when the build is skipped
|
||||
schedulers.Triggerable(
|
||||
name=f"{project.id}-nix-skipped-build",
|
||||
name=f"{project.project_id}-nix-skipped-build",
|
||||
builderNames=[f"{project.name}/nix-skipped-build"],
|
||||
),
|
||||
# allow to manually trigger a nix-build
|
||||
schedulers.ForceScheduler(
|
||||
name=f"{project.id}-force",
|
||||
name=f"{project.project_id}-force",
|
||||
builderNames=[f"{project.name}/nix-eval"],
|
||||
properties=[
|
||||
util.StringParameter(
|
||||
name="project",
|
||||
label="Name of the GitHub repository.",
|
||||
default=project.name,
|
||||
)
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
],
|
||||
)
|
||||
config["builders"].extend(
|
||||
[
|
||||
|
@ -665,18 +660,23 @@ def config_for_project(
|
|||
outputs_path=outputs_path,
|
||||
),
|
||||
nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]),
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
||||
def __init__(self, builders: set[str] = set(), **kwargs: Any) -> None:
|
||||
def __init__(self, builders: set[str] | None = None, **kwargs: Any) -> None:
|
||||
if builders is None:
|
||||
builders = set()
|
||||
self.builders = builders
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_builder(
|
||||
self, endpoint_object: Any, endpoint_dict: dict[str, Any], object_type: str
|
||||
self,
|
||||
endpoint_object: Any,
|
||||
endpoint_dict: dict[str, Any],
|
||||
object_type: str,
|
||||
) -> Generator[Any, Any, Any]:
|
||||
res = yield endpoint_object.get({}, endpoint_dict)
|
||||
if res is None:
|
||||
|
@ -684,7 +684,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
|||
|
||||
builder = yield self.master.data.get(("builders", res["builderid"]))
|
||||
if builder["name"] in self.builders:
|
||||
log.warn(
|
||||
log.warning(
|
||||
"Builder {builder} allowed by {role}: {builders}",
|
||||
builder=builder["name"],
|
||||
role=self.role,
|
||||
|
@ -692,7 +692,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
|||
)
|
||||
return Match(self.master, **{object_type: res})
|
||||
else:
|
||||
log.warn(
|
||||
log.warning(
|
||||
"Builder {builder} not allowed by {role}: {builders}",
|
||||
builder=builder["name"],
|
||||
role=self.role,
|
||||
|
@ -700,17 +700,26 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
|||
)
|
||||
|
||||
def match_BuildEndpoint_rebuild( # noqa: N802
|
||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
||||
self,
|
||||
epobject: Any,
|
||||
epdict: dict[str, Any],
|
||||
options: dict[str, Any],
|
||||
) -> Generator[Any, Any, Any]:
|
||||
return self.check_builder(epobject, epdict, "build")
|
||||
|
||||
def match_BuildEndpoint_stop( # noqa: N802
|
||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
||||
self,
|
||||
epobject: Any,
|
||||
epdict: dict[str, Any],
|
||||
options: dict[str, Any],
|
||||
) -> Generator[Any, Any, Any]:
|
||||
return self.check_builder(epobject, epdict, "build")
|
||||
|
||||
def match_BuildRequestEndpoint_stop( # noqa: N802
|
||||
self, epobject: Any, epdict: dict[str, Any], options: dict[str, Any]
|
||||
self,
|
||||
epobject: Any,
|
||||
epdict: dict[str, Any],
|
||||
options: dict[str, Any],
|
||||
) -> Generator[Any, Any, Any]:
|
||||
return self.check_builder(epobject, epdict, "buildrequest")
|
||||
|
||||
|
@ -718,7 +727,7 @@ class AnyProjectEndpointMatcher(EndpointMatcherBase):
|
|||
def setup_authz(projects: list[GithubProject], admins: list[str]) -> util.Authz:
|
||||
allow_rules = []
|
||||
allowed_builders_by_org: defaultdict[str, set[str]] = defaultdict(
|
||||
lambda: {"reload-github-projects"}
|
||||
lambda: {"reload-github-projects"},
|
||||
)
|
||||
|
||||
for project in projects:
|
||||
|
@ -751,14 +760,13 @@ class NixConfigurator(ConfiguratorBase):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
# Shape of this file:
|
||||
# [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
|
||||
# Shape of this file: [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
|
||||
github: GithubConfig,
|
||||
url: str,
|
||||
nix_supported_systems: list[str],
|
||||
nix_eval_worker_count: int | None,
|
||||
nix_eval_max_memory_size: int,
|
||||
nix_workers_secret_name: str = "buildbot-nix-workers",
|
||||
nix_workers_secret_name: str = "buildbot-nix-workers", # noqa: S107
|
||||
cachix: CachixConfig | None = None,
|
||||
outputs_path: str | None = None,
|
||||
) -> None:
|
||||
|
@ -823,7 +831,7 @@ class NixConfigurator(ConfiguratorBase):
|
|||
[worker_names[0]],
|
||||
self.github.token(),
|
||||
self.github.project_cache_file,
|
||||
)
|
||||
),
|
||||
)
|
||||
config["workers"].append(worker.LocalWorker(SKIPPED_BUILDER_NAME))
|
||||
config["schedulers"].extend(
|
||||
|
@ -839,7 +847,7 @@ class NixConfigurator(ConfiguratorBase):
|
|||
builderNames=["reload-github-projects"],
|
||||
periodicBuildTimer=12 * 60 * 60,
|
||||
),
|
||||
]
|
||||
],
|
||||
)
|
||||
config["services"].append(
|
||||
reporters.GitHubStatusPush(
|
||||
|
@ -848,11 +856,11 @@ class NixConfigurator(ConfiguratorBase):
|
|||
# we use `virtual_builder_name` in the webinterface
|
||||
# so that we distinguish what has beeing build
|
||||
context=Interpolate("buildbot/%(prop:status_name)s"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
systemd_secrets = secrets.SecretInAFile(
|
||||
dirname=os.environ["CREDENTIALS_DIRECTORY"]
|
||||
dirname=os.environ["CREDENTIALS_DIRECTORY"],
|
||||
)
|
||||
config["secretsProviders"].append(systemd_secrets)
|
||||
|
||||
|
@ -870,7 +878,7 @@ class NixConfigurator(ConfiguratorBase):
|
|||
if "auth" not in config["www"]:
|
||||
config["www"].setdefault("avatar_methods", [])
|
||||
config["www"]["avatar_methods"].append(
|
||||
util.AvatarGitHub(token=self.github.token())
|
||||
util.AvatarGitHub(token=self.github.token()),
|
||||
)
|
||||
config["www"]["auth"] = util.GitHubAuth(
|
||||
self.github.oauth_id,
|
||||
|
@ -879,5 +887,6 @@ class NixConfigurator(ConfiguratorBase):
|
|||
)
|
||||
|
||||
config["www"]["authz"] = setup_authz(
|
||||
admins=self.github.admins, projects=projects
|
||||
admins=self.github.admins,
|
||||
projects=projects,
|
||||
)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import contextlib
|
||||
import http.client
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
@ -9,6 +9,10 @@ from typing import Any
|
|||
from twisted.python import log
|
||||
|
||||
|
||||
class GithubError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class HttpResponse:
|
||||
def __init__(self, raw: http.client.HTTPResponse) -> None:
|
||||
self.raw = raw
|
||||
|
@ -23,26 +27,32 @@ class HttpResponse:
|
|||
def http_request(
|
||||
url: str,
|
||||
method: str = "GET",
|
||||
headers: dict[str, str] = {},
|
||||
headers: dict[str, str] | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> HttpResponse:
|
||||
body = None
|
||||
if data:
|
||||
body = json.dumps(data).encode("ascii")
|
||||
if headers is None:
|
||||
headers = {}
|
||||
headers = headers.copy()
|
||||
headers["User-Agent"] = "buildbot-nix"
|
||||
req = urllib.request.Request(url, headers=headers, method=method, data=body)
|
||||
|
||||
if not url.startswith("https:"):
|
||||
msg = "url must be https: {url}"
|
||||
raise GithubError(msg)
|
||||
|
||||
req = urllib.request.Request( # noqa: S310
|
||||
url, headers=headers, method=method, data=body
|
||||
)
|
||||
try:
|
||||
resp = urllib.request.urlopen(req)
|
||||
resp = urllib.request.urlopen(req) # noqa: S310
|
||||
except urllib.request.HTTPError as e:
|
||||
resp_body = ""
|
||||
try:
|
||||
with contextlib.suppress(OSError, UnicodeDecodeError):
|
||||
resp_body = e.fp.read().decode("utf-8", "replace")
|
||||
except Exception:
|
||||
pass
|
||||
raise Exception(
|
||||
f"Request for {method} {url} failed with {e.code} {e.reason}: {resp_body}"
|
||||
) from e
|
||||
msg = f"Request for {method} {url} failed with {e.code} {e.reason}: {resp_body}"
|
||||
raise GithubError(msg) from e
|
||||
return HttpResponse(resp)
|
||||
|
||||
|
||||
|
@ -56,7 +66,8 @@ def paginated_github_request(url: str, token: str) -> list[dict[str, Any]]:
|
|||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
except OSError as e:
|
||||
raise Exception(f"failed to fetch {next_url}: {e}") from e
|
||||
msg = f"failed to fetch {next_url}: {e}"
|
||||
raise GithubError(msg) from e
|
||||
next_url = None
|
||||
link = res.headers()["Link"]
|
||||
if link is not None:
|
||||
|
@ -94,7 +105,7 @@ class GithubProject:
|
|||
return self.data["html_url"]
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
def project_id(self) -> str:
|
||||
return slugify_project_name(self.data["full_name"])
|
||||
|
||||
@property
|
||||
|
@ -111,13 +122,21 @@ class GithubProject:
|
|||
|
||||
|
||||
def create_project_hook(
|
||||
owner: str, repo: str, token: str, webhook_url: str, webhook_secret: str
|
||||
owner: str,
|
||||
repo: str,
|
||||
token: str,
|
||||
webhook_url: str,
|
||||
webhook_secret: str,
|
||||
) -> None:
|
||||
hooks = paginated_github_request(
|
||||
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100", token
|
||||
f"https://api.github.com/repos/{owner}/{repo}/hooks?per_page=100",
|
||||
token,
|
||||
)
|
||||
config = dict(
|
||||
url=webhook_url, content_type="json", insecure_ssl="0", secret=webhook_secret
|
||||
url=webhook_url,
|
||||
content_type="json",
|
||||
insecure_ssl="0",
|
||||
secret=webhook_secret,
|
||||
)
|
||||
data = dict(name="web", active=True, events=["push", "pull_request"], config=config)
|
||||
headers = {
|
||||
|
@ -149,18 +168,19 @@ def refresh_projects(github_token: str, repo_cache_file: Path) -> None:
|
|||
if not repo["permissions"]["admin"]:
|
||||
name = repo["full_name"]
|
||||
log.msg(
|
||||
f"skipping {name} because we do not have admin privileges, needed for hook management"
|
||||
f"skipping {name} because we do not have admin privileges, needed for hook management",
|
||||
)
|
||||
else:
|
||||
repos.append(repo)
|
||||
|
||||
with NamedTemporaryFile("w", delete=False, dir=repo_cache_file.parent) as f:
|
||||
path = Path(f.name)
|
||||
try:
|
||||
f.write(json.dumps(repos))
|
||||
f.flush()
|
||||
os.rename(f.name, repo_cache_file)
|
||||
path.rename(repo_cache_file)
|
||||
except OSError:
|
||||
os.unlink(f.name)
|
||||
path.unlink()
|
||||
raise
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import socket
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from buildbot_worker.bot import Worker
|
||||
|
@ -18,19 +16,27 @@ def require_env(key: str) -> str:
|
|||
|
||||
@dataclass
|
||||
class WorkerConfig:
|
||||
password: str = Path(require_env("WORKER_PASSWORD_FILE")).read_text().rstrip("\r\n")
|
||||
worker_count: int = int(
|
||||
os.environ.get("WORKER_COUNT", str(multiprocessing.cpu_count()))
|
||||
password: str = field(
|
||||
default_factory=lambda: Path(require_env("WORKER_PASSWORD_FILE"))
|
||||
.read_text()
|
||||
.rstrip("\r\n")
|
||||
)
|
||||
buildbot_dir: str = require_env("BUILDBOT_DIR")
|
||||
master_url: str = require_env("MASTER_URL")
|
||||
worker_count: int = int(
|
||||
os.environ.get("WORKER_COUNT", str(multiprocessing.cpu_count())),
|
||||
)
|
||||
buildbot_dir: Path = field(
|
||||
default_factory=lambda: Path(require_env("BUILDBOT_DIR"))
|
||||
)
|
||||
master_url: str = field(default_factory=lambda: require_env("MASTER_URL"))
|
||||
|
||||
|
||||
def setup_worker(
|
||||
application: service.Application, id: int, config: WorkerConfig
|
||||
application: service.Application,
|
||||
builder_id: int,
|
||||
config: WorkerConfig,
|
||||
) -> None:
|
||||
basedir = f"{config.buildbot_dir}-{id:03}"
|
||||
os.makedirs(basedir, mode=0o700, exist_ok=True)
|
||||
basedir = config.buildbot_dir.parent / f"{config.buildbot_dir.name}-{builder_id:03}"
|
||||
basedir.mkdir(parents=True, exist_ok=True, mode=0o700)
|
||||
|
||||
hostname = socket.gethostname()
|
||||
workername = f"{hostname}-{id:03}"
|
||||
|
|
|
@ -37,6 +37,8 @@
|
|||
packages.default = pkgs.mkShell {
|
||||
packages = [
|
||||
pkgs.bashInteractive
|
||||
pkgs.mypy
|
||||
pkgs.ruff
|
||||
];
|
||||
};
|
||||
packages.buildbot-nix = pkgs.python3.pkgs.callPackage ./default.nix { };
|
||||
|
|
|
@ -21,18 +21,68 @@ classifiers = [
|
|||
"Programming Language :: Python"
|
||||
]
|
||||
version = "0.0.1"
|
||||
scripts = { buildbot-effects = "hercules_effects.cli:main" }
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["buildbot_nix"]
|
||||
packages = [
|
||||
"buildbot_nix",
|
||||
"buildbot_effects"
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py311"
|
||||
line-length = 88
|
||||
select = ["E", "F", "I", "U", "N"]
|
||||
ignore = [ "E501" ]
|
||||
select = ["ALL"]
|
||||
ignore = [
|
||||
# pydocstyle
|
||||
"D",
|
||||
# todo comments
|
||||
"TD",
|
||||
# fixmes
|
||||
"FIX",
|
||||
|
||||
# Unused function argument
|
||||
"ARG001",
|
||||
"ARG002",
|
||||
|
||||
# Missing type annotation for `self` in method
|
||||
"ANN101",
|
||||
# Dynamically typed expressions (typing.Any)
|
||||
"ANN401",
|
||||
# Trailing comma missing
|
||||
"COM812",
|
||||
# Unnecessary `dict` call (rewrite as a literal)
|
||||
"C408",
|
||||
# Boolean-typed positional argument in function definition
|
||||
"FBT001",
|
||||
# Logging statement uses f-string
|
||||
"G004",
|
||||
# disabled on ruff's recommendation as causes problems with the formatter
|
||||
"ISC001",
|
||||
# Use of `assert` detected
|
||||
"S101",
|
||||
# `subprocess` call: check for execution of untrusted input
|
||||
"S603",
|
||||
# Starting a process with a partial executable path
|
||||
"S607",
|
||||
# Boolean default positional argument in function definition
|
||||
"FBT002",
|
||||
|
||||
# Too many statements
|
||||
"PLR0915",
|
||||
# Too many arguments in function definition
|
||||
"PLR0913",
|
||||
"PLR0912", # Too many branches
|
||||
# $X is too complex
|
||||
"C901",
|
||||
|
||||
"E501", # line too long
|
||||
"T201", # `print` found
|
||||
"PLR2004", # Magic value used in comparison
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.10"
|
||||
python_version = "3.11"
|
||||
pretty = true
|
||||
warn_redundant_casts = true
|
||||
disallow_untyped_calls = true
|
||||
|
|
Loading…
Reference in a new issue