buildbot-nix/buildbot_nix/__init__.py

910 lines
30 KiB
Python
Raw Normal View History

2023-09-10 08:11:56 +00:00
import json
import multiprocessing
import os
import re
2023-09-10 08:11:56 +00:00
import uuid
from collections import defaultdict
2023-09-17 20:14:56 +00:00
from collections.abc import Generator
from dataclasses import dataclass
2023-09-10 08:11:56 +00:00
from pathlib import Path
from typing import TYPE_CHECKING, Any
2023-12-26 18:49:57 +00:00
from buildbot.config.builder import BuilderConfig
2023-09-17 20:14:56 +00:00
from buildbot.configurators import ConfiguratorBase
2024-04-06 13:08:21 +00:00
from buildbot.interfaces import WorkerSetupError
from buildbot.locks import MasterLock
from buildbot.plugins import schedulers, steps, util, worker
2023-09-10 08:11:56 +00:00
from buildbot.process import buildstep, logobserver, remotecommand
2023-09-17 20:14:56 +00:00
from buildbot.process.project import Project
from buildbot.process.properties import Properties
2023-09-10 08:11:56 +00:00
from buildbot.process.results import ALL_RESULTS, statusToString
from buildbot.secrets.providers.file import SecretInAFile
2023-09-10 08:11:56 +00:00
from buildbot.steps.trigger import Trigger
from buildbot.www.authz import Authz
from buildbot.www.authz.endpointmatchers import EndpointMatcherBase, Match
2023-10-27 08:49:40 +00:00
2023-12-26 20:56:36 +00:00
if TYPE_CHECKING:
from buildbot.process.log import StreamLog
from buildbot.www.auth import AuthBase
2023-12-26 20:56:36 +00:00
from twisted.internet import defer
2023-12-26 20:56:36 +00:00
from twisted.logger import Logger
from .common import (
slugify_project_name,
)
from .gitea_projects import GiteaBackend, GiteaConfig
2023-12-03 10:48:01 +00:00
from .github_projects import (
GithubBackend,
GithubConfig,
)
from .projects import GitBackend, GitProject
from .secrets import read_secret_file
2023-11-18 07:18:46 +00:00
SKIPPED_BUILDER_NAME = "skipped-builds"
log = Logger()
2023-09-10 08:11:56 +00:00
2023-12-26 20:56:36 +00:00
class BuildbotNixError(Exception):
pass
2023-09-10 08:11:56 +00:00
class BuildTrigger(Trigger):
2023-12-26 20:56:36 +00:00
"""Dynamic trigger that creates a build for every attribute."""
2023-09-10 08:11:56 +00:00
project: GitProject
2023-09-10 08:11:56 +00:00
def __init__(
2023-11-18 07:18:46 +00:00
self,
project: GitProject,
2023-11-18 07:18:46 +00:00
builds_scheduler: str,
skipped_builds_scheduler: str,
jobs: list[dict[str, Any]],
**kwargs: Any,
2023-09-10 08:11:56 +00:00
) -> None:
if "name" not in kwargs:
kwargs["name"] = "trigger"
self.project = project
2023-09-10 08:11:56 +00:00
self.jobs = jobs
self.config = None
2023-11-18 07:18:46 +00:00
self.builds_scheduler = builds_scheduler
self.skipped_builds_scheduler = skipped_builds_scheduler
2023-09-10 08:11:56 +00:00
Trigger.__init__(
self,
waitForFinish=True,
2023-11-18 07:18:46 +00:00
schedulerNames=[builds_scheduler, skipped_builds_scheduler],
2023-09-10 08:11:56 +00:00
haltOnFailure=True,
flunkOnFailure=True,
sourceStamps=[],
alwaysUseLatest=False,
updateSourceStamp=False,
**kwargs,
)
def createTriggerProperties(self, props: Any) -> Any: # noqa: N802
2023-09-10 08:11:56 +00:00
return props
def getSchedulersAndProperties(self) -> list[tuple[str, Properties]]: # noqa: N802
build_props = self.build.getProperties()
repo_name = self.project.name
project_id = slugify_project_name(repo_name)
2023-09-10 08:11:56 +00:00
source = f"nix-eval-{project_id}"
triggered_schedulers = []
for job in self.jobs:
attr = job.get("attr", "eval-error")
name = attr
if repo_name is not None:
name = f"github:{repo_name}#checks.{name}"
else:
name = f"checks.{name}"
error = job.get("error")
2023-11-18 07:18:46 +00:00
props = Properties()
props.setProperty("virtual_builder_name", name, source)
props.setProperty("status_name", f"nix-build .#checks.{attr}", source)
2023-11-18 07:18:46 +00:00
props.setProperty("virtual_builder_tags", "", source)
if error is not None:
props.setProperty("error", error, source)
triggered_schedulers.append((self.skipped_builds_scheduler, props))
continue
if job.get("isCached"):
triggered_schedulers.append((self.skipped_builds_scheduler, props))
continue
drv_path = job.get("drvPath")
2023-09-10 08:11:56 +00:00
system = job.get("system")
out_path = job.get("outputs", {}).get("out")
build_props.setProperty(f"{attr}-out_path", out_path, source)
build_props.setProperty(f"{attr}-drv_path", drv_path, source)
props.setProperty("attr", attr, source)
props.setProperty("system", system, source)
props.setProperty("drv_path", drv_path, source)
props.setProperty("out_path", out_path, source)
# we use this to identify builds when running a retry
props.setProperty("build_uuid", str(uuid.uuid4()), source)
2023-11-17 15:27:55 +00:00
2023-11-18 07:18:46 +00:00
triggered_schedulers.append((self.builds_scheduler, props))
2023-09-10 08:11:56 +00:00
return triggered_schedulers
def getCurrentSummary(self) -> dict[str, str]: # noqa: N802
2023-12-26 20:56:36 +00:00
"""The original build trigger will the generic builder name `nix-build` in this case, which is not helpful"""
2023-09-10 08:11:56 +00:00
if not self.triggeredNames:
return {"step": "running"}
summary = []
if self._result_list:
for status in ALL_RESULTS:
count = self._result_list.count(status)
if count:
summary.append(
2023-12-26 20:56:36 +00:00
f"{self._result_list.count(status)} {statusToString(status, count)}",
2023-09-10 08:11:56 +00:00
)
return {"step": f"({', '.join(summary)})"}
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
2023-12-26 20:56:36 +00:00
"""Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
2023-09-10 08:11:56 +00:00
every attribute.
"""
project: GitProject
def __init__(
self, project: GitProject, supported_systems: list[str], **kwargs: Any
) -> None:
2023-09-10 08:11:56 +00:00
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.project = project
2023-09-10 08:11:56 +00:00
self.observer = logobserver.BufferLogObserver()
self.addLogObserver("stdio", self.observer)
self.supported_systems = supported_systems
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
# run nix-eval-jobs --flake .#checks to generate the dict of stages
cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand()
2023-09-10 08:11:56 +00:00
yield self.runCommand(cmd)
# if the command passes extract the list of stages
result = cmd.results()
if result == util.SUCCESS:
# create a ShellCommand for each stage and add them to the build
jobs = []
for line in self.observer.getStdout().split("\n"):
if line != "":
try:
job = json.loads(line)
except json.JSONDecodeError as e:
2023-12-26 20:56:36 +00:00
msg = f"Failed to parse line: {line}"
raise BuildbotNixError(msg) from e
2023-09-10 08:11:56 +00:00
jobs.append(job)
repo_name = self.project.name
project_id = slugify_project_name(repo_name)
2023-09-10 08:11:56 +00:00
filtered_jobs = []
for job in jobs:
system = job.get("system")
2023-12-26 20:56:36 +00:00
if not system or system in self.supported_systems: # report eval errors
2023-09-10 08:11:56 +00:00
filtered_jobs.append(job)
self.build.addStepsAfterCurrentStep(
2023-10-27 09:39:11 +00:00
[
BuildTrigger(
self.project,
2023-11-18 07:18:46 +00:00
builds_scheduler=f"{project_id}-nix-build",
skipped_builds_scheduler=f"{project_id}-nix-skipped-build",
name="build flake",
jobs=filtered_jobs,
2023-12-26 20:56:36 +00:00
),
],
2023-09-10 08:11:56 +00:00
)
return result
# FIXME this leaks memory... but probably not enough that we care
class RetryCounter:
def __init__(self, retries: int) -> None:
self.builds: dict[uuid.UUID, int] = defaultdict(lambda: retries)
2023-12-26 20:56:36 +00:00
def retry_build(self, build_id: uuid.UUID) -> int:
retries = self.builds[build_id]
2023-09-10 08:11:56 +00:00
if retries > 1:
2023-12-26 20:56:36 +00:00
self.builds[build_id] = retries - 1
2023-09-10 08:11:56 +00:00
return retries
2023-12-26 20:56:36 +00:00
return 0
2023-09-10 08:11:56 +00:00
# For now we limit this to two. Often this allows us to make the error log
# shorter because we won't see the logs for all previous succeeded builds
RETRY_COUNTER = RetryCounter(retries=2)
2023-11-18 07:18:46 +00:00
class EvalErrorStep(steps.BuildStep):
2023-12-26 20:56:36 +00:00
"""Shows the error message of a failed evaluation."""
2023-11-18 07:18:46 +00:00
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
error = self.getProperty("error")
attr = self.getProperty("attr")
# show eval error
error_log: StreamLog = yield self.addLog("nix_error")
2023-11-18 07:18:46 +00:00
error_log.addStderr(f"{attr} failed to evaluate:\n{error}")
return util.FAILURE
2023-09-10 08:11:56 +00:00
class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
2023-12-26 20:56:36 +00:00
"""Builds a nix derivation."""
2023-09-10 08:11:56 +00:00
def __init__(self, **kwargs: Any) -> None:
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
# run `nix build`
cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand()
2023-09-10 08:11:56 +00:00
yield self.runCommand(cmd)
res = cmd.results()
if res == util.FAILURE:
retries = RETRY_COUNTER.retry_build(self.getProperty("build_uuid"))
if retries > 0:
return util.RETRY
return res
class UpdateBuildOutput(steps.BuildStep):
2023-12-26 20:56:36 +00:00
"""Updates store paths in a public www directory.
2023-09-10 08:11:56 +00:00
This is useful to prefetch updates without having to evaluate
on the target machine.
"""
project: GitProject
def __init__(self, project: GitProject, path: Path, **kwargs: Any) -> None:
2023-09-10 08:11:56 +00:00
super().__init__(**kwargs)
self.project = project
2023-11-04 08:50:23 +00:00
self.path = path
2023-09-10 08:11:56 +00:00
def run(self) -> Generator[Any, object, Any]:
props = self.build.getProperties()
if props.getProperty("branch") != self.project.default_branch:
2023-09-10 08:11:56 +00:00
return util.SKIPPED
2023-11-17 15:27:55 +00:00
2023-12-26 20:56:36 +00:00
attr = Path(props.getProperty("attr")).name
2023-09-10 08:11:56 +00:00
out_path = props.getProperty("out_path")
# XXX don't hardcode this
2023-11-04 08:50:23 +00:00
self.path.mkdir(parents=True, exist_ok=True)
(self.path / attr).write_text(out_path)
2023-09-10 08:11:56 +00:00
return util.SUCCESS
2024-04-06 13:08:21 +00:00
# GitHub somtimes fires the PR webhook before it has computed the merge commit
# This is a workaround to fetch the merge commit and checkout the PR branch in CI
class GitLocalPrMerge(steps.Git):
2023-12-03 10:48:01 +00:00
@defer.inlineCallbacks
def run_vc(
2023-12-26 20:56:36 +00:00
self,
branch: str,
revision: str,
patch: str,
2023-12-03 10:48:01 +00:00
) -> Generator[Any, object, Any]:
2024-04-06 13:08:21 +00:00
build_props = self.build.getProperties()
2024-05-03 10:16:18 +00:00
# TODO: abstract this into an interface as well
merge_base = build_props.getProperty(
"github.base.sha"
) or build_props.getProperty("base_sha")
pr_head = build_props.getProperty("github.head.sha") or build_props.getProperty(
"head_sha"
)
2024-04-06 13:08:21 +00:00
# Not a PR, fallback to default behavior
if merge_base is None or pr_head is None:
res = yield super().run_vc(branch, revision, patch)
return res
# The code below is a modified version of Git.run_vc
self.stdio_log: StreamLog = yield self.addLogForRemoteCommands("stdio")
2024-04-06 13:08:21 +00:00
self.stdio_log.addStdout(f"Merging {merge_base} into {pr_head}\n")
git_installed = yield self.checkFeatureSupport()
if not git_installed:
msg = "git is not installed on worker"
raise WorkerSetupError(msg)
has_git = yield self.pathExists(
self.build.path_module.join(self.workdir, ".git")
)
if not has_git:
yield self._dovccmd(["clone", "--recurse-submodules", self.repourl, "."])
2024-04-06 13:08:21 +00:00
patched = yield self.sourcedirIsPatched()
if patched:
yield self._dovccmd(["clean", "-f", "-f", "-d", "-x"])
yield self._dovccmd(["fetch", "-f", "-t", self.repourl, merge_base, pr_head])
yield self._dovccmd(["checkout", "--detach", "-f", pr_head])
yield self._dovccmd(
[
"-c",
"user.email=buildbot@example.com",
"-c",
"user.name=buildbot",
"merge",
"--no-ff",
"-m",
f"Merge {merge_base} into {pr_head}",
merge_base,
]
)
self.updateSourceProperty("got_revision", pr_head)
res = yield self.parseCommitDescription()
return res
2023-12-03 10:48:01 +00:00
2023-09-10 08:11:56 +00:00
def nix_eval_config(
project: GitProject,
2023-09-10 08:11:56 +00:00
worker_names: list[str],
git_url: str,
2023-09-10 08:11:56 +00:00
supported_systems: list[str],
eval_lock: MasterLock,
2023-11-12 06:07:30 +00:00
worker_count: int,
max_memory_size: int,
) -> BuilderConfig:
2023-12-26 20:56:36 +00:00
"""Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
2023-09-10 08:11:56 +00:00
For each evaluated attribute a new build pipeline is started.
"""
factory = util.BuildFactory()
# check out the source
url_with_secret = util.Interpolate(git_url)
2023-09-10 08:11:56 +00:00
factory.addStep(
2024-04-06 13:08:21 +00:00
GitLocalPrMerge(
2023-09-10 08:11:56 +00:00
repourl=url_with_secret,
method="clean",
submodules=True,
haltOnFailure=True,
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
2024-01-15 12:41:48 +00:00
drv_gcroots_dir = util.Interpolate(
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/drvs/",
)
2023-09-10 08:11:56 +00:00
factory.addStep(
NixEvalCommand(
project=project,
2023-09-10 08:11:56 +00:00
env={},
name="evaluate flake",
supported_systems=supported_systems,
command=[
2023-10-15 09:26:09 +00:00
"nix-eval-jobs",
2023-09-10 08:11:56 +00:00
"--workers",
2023-11-12 06:07:30 +00:00
str(worker_count),
2023-09-10 08:11:56 +00:00
"--max-memory-size",
str(max_memory_size),
"--option",
"accept-flake-config",
"true",
"--gc-roots-dir",
2024-01-15 12:41:48 +00:00
drv_gcroots_dir,
2023-09-10 08:11:56 +00:00
"--force-recurse",
2023-11-17 15:27:55 +00:00
"--check-cache-status",
2023-09-10 08:11:56 +00:00
"--flake",
".#checks",
],
haltOnFailure=True,
locks=[eval_lock.access("exclusive")],
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
2024-01-15 12:41:48 +00:00
factory.addStep(
steps.ShellCommand(
name="Cleanup drv paths",
command=[
"rm",
"-rf",
drv_gcroots_dir,
],
),
)
2023-09-10 08:11:56 +00:00
return util.BuilderConfig(
name=f"{project.name}/nix-eval",
workernames=worker_names,
project=project.name,
factory=factory,
properties=dict(status_name="nix-eval"),
)
2023-12-23 18:54:42 +00:00
@dataclass
class CachixConfig:
name: str
signing_key_secret_name: str | None = None
auth_token_secret_name: str | None = None
def cachix_env(self) -> dict[str, str]:
env = {}
if self.signing_key_secret_name is not None:
env["CACHIX_SIGNING_KEY"] = util.Secret(self.signing_key_secret_name)
if self.auth_token_secret_name is not None:
env["CACHIX_AUTH_TOKEN"] = util.Secret(self.auth_token_secret_name)
return env
2023-09-10 08:11:56 +00:00
def nix_build_config(
project: GitProject,
2023-09-10 08:11:56 +00:00
worker_names: list[str],
2023-12-23 18:54:42 +00:00
cachix: CachixConfig | None = None,
2023-11-04 08:50:23 +00:00
outputs_path: Path | None = None,
) -> BuilderConfig:
2023-12-26 20:56:36 +00:00
"""Builds one nix flake attribute."""
2023-09-10 08:11:56 +00:00
factory = util.BuildFactory()
factory.addStep(
NixBuildCommand(
env={},
name="Build flake attr",
command=[
"nix",
"build",
"-L",
"--option",
"keep-going",
"true",
"--option",
# stop stuck builds after 20 minutes
"--max-silent-time",
str(60 * 20),
2023-09-10 08:11:56 +00:00
"--accept-flake-config",
"--out-link",
util.Interpolate("result-%(prop:attr)s"),
util.Interpolate("%(prop:drv_path)s^*"),
],
# 3 hours, defaults to 20 minutes
# We increase this over the default since the build output might end up in a different `nix build`.
timeout=60 * 60 * 3,
2023-09-10 08:11:56 +00:00
haltOnFailure=True,
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
2023-12-23 18:54:42 +00:00
if cachix:
2023-09-10 08:11:56 +00:00
factory.addStep(
steps.ShellCommand(
name="Upload cachix",
2023-12-23 18:54:42 +00:00
env=cachix.cachix_env(),
2023-09-10 08:11:56 +00:00
command=[
"cachix",
"push",
2023-12-23 18:54:42 +00:00
cachix.name,
2023-09-10 08:11:56 +00:00
util.Interpolate("result-%(prop:attr)s"),
],
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
2023-10-09 15:13:46 +00:00
2023-09-10 08:11:56 +00:00
factory.addStep(
steps.ShellCommand(
name="Register gcroot",
command=[
"nix-store",
"--add-root",
# FIXME: cleanup old build attributes
util.Interpolate(
2023-12-26 20:56:36 +00:00
"/nix/var/nix/gcroots/per-user/buildbot-worker/%(prop:project)s/%(prop:attr)s",
2023-09-10 08:11:56 +00:00
),
"-r",
util.Property("out_path"),
],
doStepIf=lambda s: s.getProperty("branch") == project.default_branch,
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
factory.addStep(
steps.ShellCommand(
name="Delete temporary gcroots",
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
2023-12-26 20:56:36 +00:00
),
2023-09-10 08:11:56 +00:00
)
2023-11-04 08:50:23 +00:00
if outputs_path is not None:
factory.addStep(
UpdateBuildOutput(
project=project,
2023-11-04 08:50:23 +00:00
name="Update build output",
path=outputs_path,
2023-12-26 20:56:36 +00:00
),
2023-11-04 08:50:23 +00:00
)
2023-09-10 08:11:56 +00:00
return util.BuilderConfig(
name=f"{project.name}/nix-build",
project=project.name,
workernames=worker_names,
collapseRequests=False,
env={},
factory=factory,
)
2023-09-17 20:14:56 +00:00
2023-11-18 07:18:46 +00:00
def nix_skipped_build_config(
project: GitProject,
2023-12-26 20:56:36 +00:00
worker_names: list[str],
) -> BuilderConfig:
2023-12-26 20:56:36 +00:00
"""Dummy builder that is triggered when a build is skipped."""
2023-11-18 07:18:46 +00:00
factory = util.BuildFactory()
factory.addStep(
EvalErrorStep(
name="Nix evaluation",
doStepIf=lambda s: s.getProperty("error"),
hideStepIf=lambda _, s: not s.getProperty("error"),
2023-12-26 20:56:36 +00:00
),
2023-11-18 07:18:46 +00:00
)
# This is just a dummy step showing the cached build
factory.addStep(
steps.BuildStep(
name="Nix build (cached)",
doStepIf=lambda _: False,
hideStepIf=lambda _, s: s.getProperty("error"),
2023-12-26 20:56:36 +00:00
),
2023-11-18 07:18:46 +00:00
)
return util.BuilderConfig(
name=f"{project.name}/nix-skipped-build",
project=project.name,
workernames=worker_names,
collapseRequests=False,
env={},
factory=factory,
)
2023-09-17 20:14:56 +00:00
def config_for_project(
config: dict[str, Any],
project: GitProject,
2023-09-17 20:14:56 +00:00
worker_names: list[str],
nix_supported_systems: list[str],
2023-11-12 06:07:30 +00:00
nix_eval_worker_count: int,
2023-09-17 20:14:56 +00:00
nix_eval_max_memory_size: int,
eval_lock: MasterLock,
2023-12-23 18:54:42 +00:00
cachix: CachixConfig | None = None,
2023-11-04 08:50:23 +00:00
outputs_path: Path | None = None,
) -> None:
2023-09-17 20:14:56 +00:00
config["projects"].append(Project(project.name))
config["schedulers"].extend(
[
schedulers.SingleBranchScheduler(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-default-branch",
2023-09-17 20:14:56 +00:00
change_filter=util.ChangeFilter(
repository=project.url,
filter_fn=lambda c: c.branch == project.default_branch,
2023-09-17 20:14:56 +00:00
),
builderNames=[f"{project.name}/nix-eval"],
treeStableTimer=5,
2023-09-17 20:14:56 +00:00
),
# this is compatible with bors or github's merge queue
schedulers.SingleBranchScheduler(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-merge-queue",
2023-09-17 20:14:56 +00:00
change_filter=util.ChangeFilter(
repository=project.url,
branch_re="(gh-readonly-queue/.*|staging|trying)",
),
builderNames=[f"{project.name}/nix-eval"],
),
# build all pull requests
schedulers.SingleBranchScheduler(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-prs",
2023-09-17 20:14:56 +00:00
change_filter=util.ChangeFilter(
2023-12-26 20:56:36 +00:00
repository=project.url,
category="pull",
2023-09-17 20:14:56 +00:00
),
builderNames=[f"{project.name}/nix-eval"],
),
# this is triggered from `nix-eval`
schedulers.Triggerable(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-nix-build",
2023-09-17 20:14:56 +00:00
builderNames=[f"{project.name}/nix-build"],
),
2023-11-18 07:18:46 +00:00
# this is triggered from `nix-eval` when the build is skipped
schedulers.Triggerable(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-nix-skipped-build",
2023-11-18 07:18:46 +00:00
builderNames=[f"{project.name}/nix-skipped-build"],
),
2023-09-17 20:14:56 +00:00
# allow to manually trigger a nix-build
schedulers.ForceScheduler(
2023-12-26 20:56:36 +00:00
name=f"{project.project_id}-force",
2023-10-27 08:49:40 +00:00
builderNames=[f"{project.name}/nix-eval"],
2023-10-27 08:35:26 +00:00
properties=[
util.StringParameter(
name="project",
label=f"Name of the {project.pretty_type} repository.",
2023-10-27 08:49:40 +00:00
default=project.name,
2023-12-26 20:56:36 +00:00
),
2023-10-27 08:49:40 +00:00
],
2023-09-17 20:14:56 +00:00
),
2023-12-26 20:56:36 +00:00
],
2023-09-17 20:14:56 +00:00
)
config["builders"].extend(
[
# Since all workers run on the same machine, we only assign one of them to do the evaluation.
# This should prevent exessive memory usage.
nix_eval_config(
project,
worker_names,
git_url=project.get_project_url(),
2023-09-17 20:14:56 +00:00
supported_systems=nix_supported_systems,
2023-11-12 06:07:30 +00:00
worker_count=nix_eval_worker_count,
2023-09-17 20:14:56 +00:00
max_memory_size=nix_eval_max_memory_size,
eval_lock=eval_lock,
2023-09-17 20:14:56 +00:00
),
nix_build_config(
project,
worker_names,
2023-12-23 18:54:42 +00:00
cachix=cachix,
2023-11-04 08:50:23 +00:00
outputs_path=outputs_path,
2023-09-17 20:14:56 +00:00
),
2023-11-18 07:18:46 +00:00
nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]),
2023-12-26 20:56:36 +00:00
],
2023-09-17 20:14:56 +00:00
)
def normalize_virtual_builder_name(name: str) -> str:
if re.match(r"^[^:]+:", name) is not None:
# rewrites github:nix-community/srvos#checks.aarch64-linux.nixos-stable-example-hardware-hetzner-online-intel -> nix-community/srvos/nix-build
match = re.match(r"[^:]:(?P<owner>[^/]+)/(?P<repo>[^#]+)#.+", name)
if match:
return f"{match['owner']}/{match['repo']}/nix-build"
return name
class AnyProjectEndpointMatcher(EndpointMatcherBase):
2023-12-26 20:56:36 +00:00
def __init__(self, builders: set[str] | None = None, **kwargs: Any) -> None:
if builders is None:
builders = set()
self.builders = builders
super().__init__(**kwargs)
@defer.inlineCallbacks
def check_builder(
2023-12-26 20:56:36 +00:00
self,
endpoint_object: Any,
endpoint_dict: dict[str, Any],
object_type: str,
2024-01-01 07:32:22 +00:00
) -> Generator[defer.Deferred[Match], Any, Any]:
res = yield endpoint_object.get({}, endpoint_dict)
if res is None:
return None
builder = yield self.master.data.get(("builders", res["builderid"]))
builder_name = normalize_virtual_builder_name(builder["name"])
if builder_name in self.builders:
2024-01-01 07:00:13 +00:00
log.warn(
"Builder {builder} allowed by {role}: {builders}",
builder=builder_name,
role=self.role,
builders=self.builders,
)
return Match(self.master, **{object_type: res})
else:
2024-01-01 07:00:13 +00:00
log.warn(
"Builder {builder} not allowed by {role}: {builders}",
builder=builder_name,
role=self.role,
builders=self.builders,
)
def match_BuildEndpoint_rebuild( # noqa: N802
2023-12-26 20:56:36 +00:00
self,
epobject: Any,
epdict: dict[str, Any],
options: dict[str, Any],
2024-01-01 07:32:22 +00:00
) -> defer.Deferred[Match]:
return self.check_builder(epobject, epdict, "build")
def match_BuildEndpoint_stop( # noqa: N802
2023-12-26 20:56:36 +00:00
self,
epobject: Any,
epdict: dict[str, Any],
options: dict[str, Any],
2024-01-01 07:32:22 +00:00
) -> defer.Deferred[Match]:
return self.check_builder(epobject, epdict, "build")
def match_BuildRequestEndpoint_stop( # noqa: N802
2023-12-26 20:56:36 +00:00
self,
epobject: Any,
epdict: dict[str, Any],
options: dict[str, Any],
2024-01-01 07:32:22 +00:00
) -> defer.Deferred[Match]:
return self.check_builder(epobject, epdict, "buildrequest")
def setup_authz(
backends: list[GitBackend], projects: list[GitProject], admins: list[str]
) -> Authz:
allow_rules = []
allowed_builders_by_org: defaultdict[str, set[str]] = defaultdict(
lambda: {backend.reload_builder_name for backend in backends},
)
for project in projects:
if project.belongs_to_org:
for builder in ["nix-build", "nix-skipped-build", "nix-eval"]:
allowed_builders_by_org[project.owner].add(f"{project.name}/{builder}")
for org, allowed_builders in allowed_builders_by_org.items():
allow_rules.append(
AnyProjectEndpointMatcher(
builders=allowed_builders,
role=org,
defaultDeny=False,
),
)
allow_rules.append(util.AnyEndpointMatcher(role="admin", defaultDeny=False))
allow_rules.append(util.AnyControlEndpointMatcher(role="admins"))
return util.Authz(
roleMatchers=[
util.RolesFromUsername(roles=["admin"], usernames=admins),
util.RolesFromGroups(groupPrefix=""), # so we can match on ORG
],
allowRules=allow_rules,
)
class PeriodicWithStartup(schedulers.Periodic):
def __init__(self, *args: Any, run_on_startup: bool = False, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.run_on_startup = run_on_startup
@defer.inlineCallbacks
def activate(self) -> Generator[Any, object, Any]:
if self.run_on_startup:
yield self.setState("last_build", None)
yield super().activate()
2023-09-17 20:14:56 +00:00
class NixConfigurator(ConfiguratorBase):
"""Janitor is a configurator which create a Janitor Builder with all needed Janitor steps"""
def __init__(
self,
2023-12-26 20:56:36 +00:00
# Shape of this file: [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
admins: list[str],
auth_backend: str,
github: GithubConfig | None,
gitea: GiteaConfig | None,
2023-10-12 13:59:26 +00:00
url: str,
2023-09-17 20:14:56 +00:00
nix_supported_systems: list[str],
2023-11-12 06:07:30 +00:00
nix_eval_worker_count: int | None,
nix_eval_max_memory_size: int,
2023-12-26 20:56:36 +00:00
nix_workers_secret_name: str = "buildbot-nix-workers", # noqa: S107
2023-12-23 18:54:42 +00:00
cachix: CachixConfig | None = None,
2023-11-04 08:50:23 +00:00
outputs_path: str | None = None,
2023-09-17 20:14:56 +00:00
) -> None:
super().__init__()
self.nix_workers_secret_name = nix_workers_secret_name
self.nix_eval_max_memory_size = nix_eval_max_memory_size
2023-11-12 06:07:30 +00:00
self.nix_eval_worker_count = nix_eval_worker_count
2023-09-17 20:14:56 +00:00
self.nix_supported_systems = nix_supported_systems
self.auth_backend = auth_backend
self.admins = admins
2023-09-17 20:14:56 +00:00
self.github = github
self.gitea = gitea
2023-10-12 13:59:26 +00:00
self.url = url
2023-12-23 18:54:42 +00:00
self.cachix = cachix
2023-11-04 08:50:23 +00:00
if outputs_path is None:
self.outputs_path = None
else:
self.outputs_path = Path(outputs_path)
2023-09-17 20:14:56 +00:00
def configure(self, config: dict[str, Any]) -> None:
backends: dict[str, GitBackend] = {}
if self.github is not None:
backends["github"] = GithubBackend(self.github)
if self.gitea is not None:
backends["gitea"] = GiteaBackend(self.gitea)
auth: AuthBase | None = (
backends[self.auth_backend].create_auth()
if self.auth_backend != "none"
else None
)
projects: list[GitProject] = []
for backend in backends.values():
projects += backend.load_projects()
2023-09-17 20:14:56 +00:00
worker_config = json.loads(read_secret_file(self.nix_workers_secret_name))
worker_names = []
config.setdefault("projects", [])
config.setdefault("secretsProviders", [])
config.setdefault("www", {})
2023-09-17 20:14:56 +00:00
for item in worker_config:
cores = item.get("cores", 0)
for i in range(cores):
2023-11-09 07:18:03 +00:00
worker_name = f"{item['name']}-{i:03}"
2023-09-17 20:14:56 +00:00
config["workers"].append(worker.Worker(worker_name, item["pass"]))
worker_names.append(worker_name)
eval_lock = util.MasterLock("nix-eval")
2023-10-12 13:59:26 +00:00
2023-10-12 13:59:26 +00:00
for project in projects:
project.create_project_hook(project.owner, project.repo, self.url)
2023-09-17 20:14:56 +00:00
config_for_project(
config,
project,
worker_names,
self.nix_supported_systems,
2023-11-12 06:07:30 +00:00
self.nix_eval_worker_count or multiprocessing.cpu_count(),
2023-09-17 20:14:56 +00:00
self.nix_eval_max_memory_size,
eval_lock,
2023-12-23 18:54:42 +00:00
self.cachix,
2023-11-04 08:50:23 +00:00
self.outputs_path,
2023-09-17 20:14:56 +00:00
)
2023-10-12 13:59:26 +00:00
2023-11-18 07:18:46 +00:00
config["workers"].append(worker.LocalWorker(SKIPPED_BUILDER_NAME))
for backend in backends.values():
# Reload backend projects
config["builders"].append(backend.create_reload_builder([worker_names[0]]))
config["schedulers"].extend(
[
schedulers.ForceScheduler(
name=f"reload-{backend.type}-projects",
builderNames=[backend.reload_builder_name],
buttonName="Update projects",
),
# project list twice a day and on startup
PeriodicWithStartup(
name=f"reload-{backend.type}-projects-bidaily",
builderNames=[backend.reload_builder_name],
periodicBuildTimer=12 * 60 * 60,
run_on_startup=not backend.are_projects_cached(),
),
],
)
config["services"].append(backend.create_reporter())
config.setdefault("secretProviders", [])
config["secretsProviders"].extend(backend.create_secret_providers())
systemd_secrets = SecretInAFile(
2023-12-26 20:56:36 +00:00
dirname=os.environ["CREDENTIALS_DIRECTORY"],
2023-09-17 20:14:56 +00:00
)
config["secretsProviders"].append(systemd_secrets)
config["www"].setdefault("plugins", {})
config["www"]["plugins"].update(dict(base_react={}))
config["www"].setdefault("change_hook_dialects", {})
for backend in backends.values():
config["www"]["change_hook_dialects"][backend.change_hook_name] = (
backend.create_change_hook()
)
config["www"].setdefault("avatar_methods", [])
for backend in backends.values():
avatar_method = backend.create_avatar_method()
print(avatar_method)
if avatar_method is not None:
config["www"]["avatar_methods"].append(avatar_method)
if "auth" not in config["www"]:
# TODO one cannot have multiple auth backends...
if auth is not None:
config["www"]["auth"] = auth
config["www"]["authz"] = setup_authz(
admins=self.admins,
backends=list(backends.values()),
2023-12-26 20:56:36 +00:00
projects=projects,
)