Merge pull request #217 from MagicRB/combined-build-reports-github

Combined build reports GitHub
This commit is contained in:
Jörg Thalheim 2024-07-26 13:47:13 +02:00 committed by GitHub
commit 2620f2cd31
Failed to generate hash of commit
6 changed files with 196 additions and 21 deletions

View file

@ -59,12 +59,14 @@ class BuildTrigger(Trigger):
builds_scheduler: str, builds_scheduler: str,
skipped_builds_scheduler: str, skipped_builds_scheduler: str,
jobs: list[dict[str, Any]], jobs: list[dict[str, Any]],
report_status: bool,
**kwargs: Any, **kwargs: Any,
) -> None: ) -> None:
if "name" not in kwargs: if "name" not in kwargs:
kwargs["name"] = "trigger" kwargs["name"] = "trigger"
self.project = project self.project = project
self.jobs = jobs self.jobs = jobs
self.report_status = report_status
self.config = None self.config = None
self.builds_scheduler = builds_scheduler self.builds_scheduler = builds_scheduler
self.skipped_builds_scheduler = skipped_builds_scheduler self.skipped_builds_scheduler = skipped_builds_scheduler
@ -102,6 +104,7 @@ class BuildTrigger(Trigger):
props.setProperty("virtual_builder_name", name, source) props.setProperty("virtual_builder_name", name, source)
props.setProperty("status_name", f"nix-build .#checks.{attr}", source) props.setProperty("status_name", f"nix-build .#checks.{attr}", source)
props.setProperty("virtual_builder_tags", "", source) props.setProperty("virtual_builder_tags", "", source)
props.setProperty("report_status", self.report_status, source)
drv_path = job.get("drvPath") drv_path = job.get("drvPath")
system = job.get("system") system = job.get("system")
@ -145,6 +148,15 @@ class BuildTrigger(Trigger):
return {"step": f"({', '.join(summary)})"} return {"step": f"({', '.join(summary)})"}
class NixBuildCombined(steps.BuildStep):
"""Shows the error message of a failed evaluation."""
name = "nix-build-combined"
def run(self) -> Generator[Any, object, Any]:
return self.build.results
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep): class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
"""Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for """Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
every attribute. every attribute.
@ -153,7 +165,11 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
project: GitProject project: GitProject
def __init__( def __init__(
self, project: GitProject, supported_systems: list[str], **kwargs: Any self,
project: GitProject,
supported_systems: list[str],
job_report_limit: int | None,
**kwargs: Any,
) -> None: ) -> None:
kwargs = self.setupShellMixin(kwargs) kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs) super().__init__(**kwargs)
@ -161,6 +177,7 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
self.observer = logobserver.BufferLogObserver() self.observer = logobserver.BufferLogObserver()
self.addLogObserver("stdio", self.observer) self.addLogObserver("stdio", self.observer)
self.supported_systems = supported_systems self.supported_systems = supported_systems
self.job_report_limit = job_report_limit
@defer.inlineCallbacks @defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]: def run(self) -> Generator[Any, object, Any]:
@ -190,6 +207,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
if not system or system in self.supported_systems: # report eval errors if not system or system in self.supported_systems: # report eval errors
filtered_jobs.append(job) filtered_jobs.append(job)
self.number_of_jobs = len(filtered_jobs)
self.build.addStepsAfterCurrentStep( self.build.addStepsAfterCurrentStep(
[ [
BuildTrigger( BuildTrigger(
@ -198,8 +217,28 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
skipped_builds_scheduler=f"{project_id}-nix-skipped-build", skipped_builds_scheduler=f"{project_id}-nix-skipped-build",
name="build flake", name="build flake",
jobs=filtered_jobs, jobs=filtered_jobs,
report_status=(
self.job_report_limit is None
or self.number_of_jobs <= self.job_report_limit
),
), ),
], ]
+ (
[
Trigger(
waitForFinish=True,
schedulerNames=[f"{project_id}-nix-build-combined"],
haltOnFailure=True,
flunkOnFailure=True,
sourceStamps=[],
alwaysUseLatest=False,
updateSourceStamp=False,
),
]
if self.job_report_limit is not None
and self.number_of_jobs > self.job_report_limit
else []
),
) )
return result return result
@ -360,6 +399,7 @@ def nix_eval_config(
eval_lock: MasterLock, eval_lock: MasterLock,
worker_count: int, worker_count: int,
max_memory_size: int, max_memory_size: int,
job_report_limit: int | None,
) -> BuilderConfig: ) -> BuilderConfig:
"""Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel. """Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
For each evaluated attribute a new build pipeline is started. For each evaluated attribute a new build pipeline is started.
@ -385,6 +425,7 @@ def nix_eval_config(
env={}, env={},
name="evaluate flake", name="evaluate flake",
supported_systems=supported_systems, supported_systems=supported_systems,
job_report_limit=job_report_limit,
command=[ command=[
"nix-eval-jobs", "nix-eval-jobs",
"--workers", "--workers",
@ -492,6 +533,7 @@ def nix_build_config(
updateSourceStamp=False, updateSourceStamp=False,
doStepIf=do_register_gcroot_if, doStepIf=do_register_gcroot_if,
copy_properties=["out_path", "attr"], copy_properties=["out_path", "attr"],
set_properties={"report_status": False},
), ),
) )
factory.addStep( factory.addStep(
@ -556,6 +598,7 @@ def nix_skipped_build_config(
updateSourceStamp=False, updateSourceStamp=False,
doStepIf=do_register_gcroot_if, doStepIf=do_register_gcroot_if,
copy_properties=["out_path", "attr"], copy_properties=["out_path", "attr"],
set_properties={"report_status": False},
), ),
) )
return util.BuilderConfig( return util.BuilderConfig(
@ -601,6 +644,24 @@ def nix_register_gcroot_config(
) )
def nix_build_combined_config(
project: GitProject,
worker_names: list[str],
) -> BuilderConfig:
factory = util.BuildFactory()
factory.addStep(NixBuildCombined())
return util.BuilderConfig(
name=f"{project.name}/nix-build-combined",
project=project.name,
workernames=worker_names,
collapseRequests=False,
env={},
factory=factory,
properties=dict(status_name="nix-build-combined"),
)
def config_for_project( def config_for_project(
config: dict[str, Any], config: dict[str, Any],
project: GitProject, project: GitProject,
@ -610,6 +671,7 @@ def config_for_project(
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
eval_lock: MasterLock, eval_lock: MasterLock,
post_build_steps: list[steps.BuildStep], post_build_steps: list[steps.BuildStep],
job_report_limit: int | None,
outputs_path: Path | None = None, outputs_path: Path | None = None,
build_retries: int = 1, build_retries: int = 1,
) -> None: ) -> None:
@ -653,6 +715,11 @@ def config_for_project(
name=f"{project.project_id}-nix-skipped-build", name=f"{project.project_id}-nix-skipped-build",
builderNames=[f"{project.name}/nix-skipped-build"], builderNames=[f"{project.name}/nix-skipped-build"],
), ),
# this is triggered from `nix-eval` when the build contains too many outputs
schedulers.Triggerable(
name=f"{project.project_id}-nix-build-combined",
builderNames=[f"{project.name}/nix-build-combined"],
),
schedulers.Triggerable( schedulers.Triggerable(
name=f"{project.project_id}-nix-register-gcroot", name=f"{project.project_id}-nix-register-gcroot",
builderNames=[f"{project.name}/nix-register-gcroot"], builderNames=[f"{project.name}/nix-register-gcroot"],
@ -680,6 +747,7 @@ def config_for_project(
worker_names, worker_names,
git_url=project.get_project_url(), git_url=project.get_project_url(),
supported_systems=nix_supported_systems, supported_systems=nix_supported_systems,
job_report_limit=job_report_limit,
worker_count=nix_eval_worker_count, worker_count=nix_eval_worker_count,
max_memory_size=nix_eval_max_memory_size, max_memory_size=nix_eval_max_memory_size,
eval_lock=eval_lock, eval_lock=eval_lock,
@ -693,6 +761,7 @@ def config_for_project(
), ),
nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]), nix_skipped_build_config(project, [SKIPPED_BUILDER_NAME]),
nix_register_gcroot_config(project, worker_names), nix_register_gcroot_config(project, worker_names),
nix_build_combined_config(project, worker_names),
], ],
) )
@ -842,7 +911,7 @@ class NixConfigurator(ConfiguratorBase):
backends["github"] = GithubBackend(self.config.github, self.config.url) backends["github"] = GithubBackend(self.config.github, self.config.url)
if self.config.gitea is not None: if self.config.gitea is not None:
backends["gitea"] = GiteaBackend(self.config.gitea) backends["gitea"] = GiteaBackend(self.config.gitea, self.config.url)
auth: AuthBase | None = ( auth: AuthBase | None = (
backends[self.config.auth_backend].create_auth() backends[self.config.auth_backend].create_auth()
@ -895,6 +964,7 @@ class NixConfigurator(ConfiguratorBase):
self.config.eval_max_memory_size, self.config.eval_max_memory_size,
eval_lock, eval_lock,
[x.to_buildstep() for x in self.config.post_build_steps], [x.to_buildstep() for x in self.config.post_build_steps],
self.config.job_report_limit,
self.config.outputs_path, self.config.outputs_path,
self.config.build_retries, self.config.build_retries,
) )

View file

@ -172,3 +172,11 @@ def model_validate_project_cache(cls: type[_T], project_cache_file: Path) -> lis
def model_dump_project_cache(repos: list[_T]) -> str: def model_dump_project_cache(repos: list[_T]) -> str:
return json.dumps([repo.model_dump() for repo in repos]) return json.dumps([repo.model_dump() for repo in repos])
def filter_for_combined_builds(reports: Any) -> Any | None:
properties = reports[0]["builds"][0]["properties"]
if "report_status" in properties and not properties["report_status"][0]:
return None
return reports

View file

@ -1,5 +1,6 @@
import os import os
import signal import signal
from collections.abc import Callable
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from urllib.parse import urlparse from urllib.parse import urlparse
@ -13,12 +14,14 @@ from buildbot.www.avatar import AvatarBase
from buildbot_gitea.auth import GiteaAuth # type: ignore[import] from buildbot_gitea.auth import GiteaAuth # type: ignore[import]
from buildbot_gitea.reporter import GiteaStatusPush # type: ignore[import] from buildbot_gitea.reporter import GiteaStatusPush # type: ignore[import]
from pydantic import BaseModel from pydantic import BaseModel
from twisted.internet import defer
from twisted.logger import Logger from twisted.logger import Logger
from twisted.python import log from twisted.python import log
from .common import ( from .common import (
ThreadDeferredBuildStep, ThreadDeferredBuildStep,
atomic_write_file, atomic_write_file,
filter_for_combined_builds,
filter_repos_by_topic, filter_repos_by_topic,
http_request, http_request,
model_dump_project_cache, model_dump_project_cache,
@ -105,11 +108,43 @@ class GiteaProject(GitProject):
return False # self.data["owner"]["type"] == "Organization" return False # self.data["owner"]["type"] == "Organization"
class ModifyingGiteaStatusPush(GiteaStatusPush):
def checkConfig(
self,
modifyingFilter: Callable[[Any], Any | None] = lambda x: x, # noqa: N803
**kwargs: Any,
) -> Any:
self.modifyingFilter = modifyingFilter
return super().checkConfig(**kwargs)
def reconfigService(
self,
modifyingFilter: Callable[[Any], Any | None] = lambda x: x, # noqa: N803
**kwargs: Any,
) -> Any:
self.modifyingFilter = modifyingFilter
return super().reconfigService(**kwargs)
@defer.inlineCallbacks
def sendMessage(self, reports: Any) -> Any:
reports = self.modifyingFilter(reports)
if reports is None:
return
result = yield super().sendMessage(reports)
return result
class GiteaBackend(GitBackend): class GiteaBackend(GitBackend):
config: GiteaConfig config: GiteaConfig
webhook_secret: str
instance_url: str
def __init__(self, config: GiteaConfig) -> None: def __init__(self, config: GiteaConfig, instance_url: str) -> None:
self.config = config self.config = config
self.instance_url = instance_url
def create_reload_builder(self, worker_names: list[str]) -> BuilderConfig: def create_reload_builder(self, worker_names: list[str]) -> BuilderConfig:
"""Updates the flake an opens a PR for it.""" """Updates the flake an opens a PR for it."""
@ -118,7 +153,10 @@ class GiteaBackend(GitBackend):
ReloadGiteaProjects(self.config, self.config.project_cache_file), ReloadGiteaProjects(self.config, self.config.project_cache_file),
) )
factory.addStep( factory.addStep(
CreateGiteaProjectHooks(self.config), CreateGiteaProjectHooks(
self.config,
self.instance_url,
),
) )
return util.BuilderConfig( return util.BuilderConfig(
name=self.reload_builder_name, name=self.reload_builder_name,
@ -127,11 +165,12 @@ class GiteaBackend(GitBackend):
) )
def create_reporter(self) -> ReporterBase: def create_reporter(self) -> ReporterBase:
return GiteaStatusPush( return ModifyingGiteaStatusPush(
self.config.instance_url, baseURL=self.config.instance_url,
Interpolate(self.config.token), token=Interpolate(self.config.token),
context=Interpolate("buildbot/%(prop:status_name)s"), context=Interpolate("buildbot/%(prop:status_name)s"),
context_pr=Interpolate("buildbot/%(prop:status_name)s"), context_pr=Interpolate("buildbot/%(prop:status_name)s"),
modifyingFilter=filter_for_combined_builds,
) )
def create_change_hook(self) -> dict[str, Any]: def create_change_hook(self) -> dict[str, Any]:
@ -198,14 +237,19 @@ class GiteaBackend(GitBackend):
def create_repo_hook( def create_repo_hook(
token: str, webhook_secret: str, owner: str, repo: str, webhook_url: str token: str,
webhook_secret: str,
owner: str,
repo: str,
gitea_url: str,
instance_url: str,
) -> None: ) -> None:
hooks = paginated_github_request( hooks = paginated_github_request(
f"{webhook_url}/api/v1/repos/{owner}/{repo}/hooks?limit=100", f"{gitea_url}/api/v1/repos/{owner}/{repo}/hooks?limit=100",
token, token,
) )
config = dict( config = dict(
url=webhook_url + "change_hook/gitea", url=instance_url + "change_hook/gitea",
content_type="json", content_type="json",
insecure_ssl="0", insecure_ssl="0",
secret=webhook_secret, secret=webhook_secret,
@ -223,13 +267,13 @@ def create_repo_hook(
"Content-Type": "application/json", "Content-Type": "application/json",
} }
for hook in hooks: for hook in hooks:
if hook["config"]["url"] == webhook_url + "change_hook/gitea": if hook["config"]["url"] == instance_url + "change_hook/gitea":
log.msg(f"hook for {owner}/{repo} already exists") log.msg(f"hook for {owner}/{repo} already exists")
return return
log.msg(f"creating hook for {owner}/{repo}") log.msg(f"creating hook for {owner}/{repo}")
http_request( http_request(
f"{webhook_url}/api/v1/repos/{owner}/{repo}/hooks", f"{gitea_url}/api/v1/repos/{owner}/{repo}/hooks",
method="POST", method="POST",
headers=headers, headers=headers,
data=data, data=data,
@ -240,13 +284,16 @@ class CreateGiteaProjectHooks(ThreadDeferredBuildStep):
name = "create_gitea_project_hooks" name = "create_gitea_project_hooks"
config: GiteaConfig config: GiteaConfig
instance_url: str
def __init__( def __init__(
self, self,
config: GiteaConfig, config: GiteaConfig,
instance_url: str,
**kwargs: Any, **kwargs: Any,
) -> None: ) -> None:
self.config = config self.config = config
self.instance_url = instance_url
super().__init__(**kwargs) super().__init__(**kwargs)
def run_deferred(self) -> None: def run_deferred(self) -> None:
@ -254,11 +301,12 @@ class CreateGiteaProjectHooks(ThreadDeferredBuildStep):
for repo in repos: for repo in repos:
create_repo_hook( create_repo_hook(
self.config.token, token=self.config.token,
self.config.webhook_secret, webhook_secret=self.config.webhook_secret,
repo.owner.login, owner=repo.owner.login,
repo.name, repo=repo.name,
self.config.instance_url, gitea_url=self.config.instance_url,
instance_url=self.instance_url,
) )
def run_post(self) -> Any: def run_post(self) -> Any:

View file

@ -2,6 +2,7 @@ import json
import os import os
import signal import signal
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections.abc import Callable
from dataclasses import dataclass from dataclasses import dataclass
from itertools import starmap from itertools import starmap
from pathlib import Path from pathlib import Path
@ -18,12 +19,14 @@ from buildbot.www.auth import AuthBase
from buildbot.www.avatar import AvatarBase, AvatarGitHub from buildbot.www.avatar import AvatarBase, AvatarGitHub
from buildbot.www.oauth2 import GitHubAuth from buildbot.www.oauth2 import GitHubAuth
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, ConfigDict, Field
from twisted.internet import defer
from twisted.logger import Logger from twisted.logger import Logger
from twisted.python import log from twisted.python import log
from .common import ( from .common import (
ThreadDeferredBuildStep, ThreadDeferredBuildStep,
atomic_write_file, atomic_write_file,
filter_for_combined_builds,
filter_repos_by_topic, filter_repos_by_topic,
http_request, http_request,
model_dump_project_cache, model_dump_project_cache,
@ -310,6 +313,35 @@ class GithubAuthBackend(ABC):
pass pass
class ModifyingGitHubStatusPush(GitHubStatusPush):
def checkConfig(
self,
modifyingFilter: Callable[[Any], Any | None] = lambda x: x, # noqa: N803
**kwargs: Any,
) -> Any:
self.modifyingFilter = modifyingFilter
return super().checkConfig(**kwargs)
def reconfigService(
self,
modifyingFilter: Callable[[Any], Any | None] = lambda x: x, # noqa: N803
**kwargs: Any,
) -> Any:
self.modifyingFilter = modifyingFilter
return super().reconfigService(**kwargs)
@defer.inlineCallbacks
def sendMessage(self, reports: Any) -> Any:
reports = self.modifyingFilter(reports)
if reports is None:
return
result = yield super().sendMessage(reports)
return result
class GithubLegacyAuthBackend(GithubAuthBackend): class GithubLegacyAuthBackend(GithubAuthBackend):
auth_type: GitHubLegacyConfig auth_type: GitHubLegacyConfig
@ -329,12 +361,13 @@ class GithubLegacyAuthBackend(GithubAuthBackend):
return [GitHubLegacySecretService(self.token)] return [GitHubLegacySecretService(self.token)]
def create_reporter(self) -> ReporterBase: def create_reporter(self) -> ReporterBase:
return GitHubStatusPush( return ModifyingGitHubStatusPush(
token=self.token.get(), token=self.token.get(),
# Since we dynamically create build steps, # Since we dynamically create build steps,
# we use `virtual_builder_name` in the webinterface # we use `virtual_builder_name` in the webinterface
# so that we distinguish what has beeing build # so that we distinguish what has beeing build
context=Interpolate("buildbot/%(prop:status_name)s"), context=Interpolate("buildbot/%(prop:status_name)s"),
modifyingFilter=filter_for_combined_builds,
) )
def create_reload_builder_steps( def create_reload_builder_steps(
@ -416,12 +449,13 @@ class GithubAppAuthBackend(GithubAuthBackend):
self.project_id_map[props["projectname"]] self.project_id_map[props["projectname"]]
].get() ].get()
return GitHubStatusPush( return ModifyingGitHubStatusPush(
token=WithProperties("%(github_token)s", github_token=get_github_token), token=WithProperties("%(github_token)s", github_token=get_github_token),
# Since we dynamically create build steps, # Since we dynamically create build steps,
# we use `virtual_builder_name` in the webinterface # we use `virtual_builder_name` in the webinterface
# so that we distinguish what has beeing build # so that we distinguish what has beeing build
context=Interpolate("buildbot/%(prop:status_name)s"), context=Interpolate("buildbot/%(prop:status_name)s"),
modifyingFilter=filter_for_combined_builds,
) )
def create_reload_builder_steps( def create_reload_builder_steps(

View file

@ -179,6 +179,7 @@ class BuildbotNixConfig(BaseModel):
outputs_path: Path | None outputs_path: Path | None
url: str url: str
post_build_steps: list[PostBuildStep] post_build_steps: list[PostBuildStep]
job_report_limit: int | None
@property @property
def nix_workers_secret(self) -> str: def nix_workers_secret(self) -> str:

View file

@ -339,6 +339,17 @@ in
default = null; default = null;
example = "/var/www/buildbot/nix-outputs"; example = "/var/www/buildbot/nix-outputs";
}; };
jobReportLimit = lib.mkOption {
type = lib.types.nullOr lib.types.ints.unsigned;
description = ''
The max number of build jobs per `nix-eval` `buildbot-nix` will report to backends (GitHub, Gitea, etc.).
If set to `null`, report everything, if set to `n` (some unsiggned intereger), report builds individually
as long as the number of builds is less than or equal to `n`, then report builds using a combined
`nix-build-combined` build.
'';
default = 50;
};
}; };
}; };
config = lib.mkMerge [ config = lib.mkMerge [
@ -487,6 +498,7 @@ in
outputs_path = cfg.outputsPath; outputs_path = cfg.outputsPath;
url = config.services.buildbot-nix.master.webhookBaseUrl; url = config.services.buildbot-nix.master.webhookBaseUrl;
post_build_steps = cfg.postBuildSteps; post_build_steps = cfg.postBuildSteps;
job_report_limit=if cfg.jobReportLimit == null then "None" else builtins.toJSON cfg.jobReportLimit;
}}").read_text())) }}").read_text()))
) )
'' ''
@ -500,7 +512,9 @@ in
dbUrl = config.services.buildbot-nix.master.dbUrl; dbUrl = config.services.buildbot-nix.master.dbUrl;
package = cfg.buildbotNixpkgs.buildbot.overrideAttrs (old: { package = cfg.buildbotNixpkgs.buildbot.overrideAttrs (old: {
patches = old.patches ++ [ ./0001-master-reporters-github-render-token-for-each-reques.patch ]; patches = old.patches ++ [
./0001-master-reporters-github-render-token-for-each-reques.patch
];
}); });
pythonPackages = pythonPackages =
let let