Merge pull request #11 from Mic92/fixes

remove incorrect skip if path exists
This commit is contained in:
Jörg Thalheim 2023-10-15 11:42:03 +02:00 committed by GitHub
commit 823212d601
Failed to generate hash of commit
2 changed files with 28 additions and 27 deletions

View file

@ -3,6 +3,7 @@
import json import json
import multiprocessing import multiprocessing
import os import os
import random
import signal import signal
import sys import sys
import uuid import uuid
@ -209,12 +210,6 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
log: Log = yield self.addLog("nix_error") log: Log = yield self.addLog("nix_error")
log.addStderr(f"{attr} failed to evaluate:\n{error}") log.addStderr(f"{attr} failed to evaluate:\n{error}")
return util.FAILURE return util.FAILURE
path = self.getProperty("out_path")
# FIXME: actually we should check if it exists in the remote machine
if os.path.exists(path):
# build already succeeded
return util.SKIPPED
# run `nix build` # run `nix build`
cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand() cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand()
@ -438,13 +433,7 @@ def nix_eval_config(
name="evaluate flake", name="evaluate flake",
supported_systems=supported_systems, supported_systems=supported_systems,
command=[ command=[
"nix", "nix-eval-jobs",
"run",
"--option",
"accept-flake-config",
"true",
"github:nix-community/nix-eval-jobs",
"--",
"--workers", "--workers",
multiprocessing.cpu_count(), multiprocessing.cpu_count(),
"--max-memory-size", "--max-memory-size",
@ -585,6 +574,11 @@ def config_for_project(
nix_supported_systems: list[str], nix_supported_systems: list[str],
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
) -> Project: ) -> Project:
## get a deterministic jitter for the project
#random.seed(project.name)
## don't run all projects at the same time
#jitter = random.randint(1, 60) * 60
config["projects"].append(Project(project.name)) config["projects"].append(Project(project.name))
config["schedulers"].extend( config["schedulers"].extend(
[ [
@ -629,14 +623,12 @@ def config_for_project(
builderNames=[f"{project.name}/update-flake"], builderNames=[f"{project.name}/update-flake"],
buttonName="Update flakes", buttonName="Update flakes",
), ),
# updates flakes once a weeek # updates flakes once a week
schedulers.NightlyTriggerable( #schedulers.Periodic(
name=f"{project.id}-update-flake-weekly", # name=f"{project.id}-update-flake-weekly",
builderNames=[f"{project.name}/update-flake"], # builderNames=[f"{project.name}/update-flake"],
hour=3, # periodicBuildTimer=24 * 60 * 60 * 7 + jitter,
minute=0, #),
dayOfWeek=6,
),
] ]
) )
has_cachix_auth_token = os.path.isfile( has_cachix_auth_token = os.path.isfile(
@ -739,12 +731,20 @@ class NixConfigurator(ConfiguratorBase):
self.github.project_cache_file, self.github.project_cache_file,
) )
) )
config["schedulers"].append( config["schedulers"].extend(
[
schedulers.ForceScheduler( schedulers.ForceScheduler(
name="reload-github-projects", name="reload-github-projects",
builderNames=["reload-github-projects"], builderNames=["reload-github-projects"],
buttonName="Update projects", buttonName="Update projects",
) ),
# project list twice a day
schedulers.Periodic(
name="reload-github-projects-bidaily",
builderNames=["reload-github-projects"],
periodicBuildTimer=12 * 60 * 60,
),
]
) )
config["services"] = config.get("services", []) config["services"] = config.get("services", [])
config["services"].append( config["services"].append(

View file

@ -53,6 +53,7 @@ in
pkgs.openssh pkgs.openssh
pkgs.gh pkgs.gh
pkgs.nix pkgs.nix
pkgs.nix-eval-jobs
]; ];
environment.PYTHONPATH = "${python.withPackages (_: [cfg.package])}/${python.sitePackages}"; environment.PYTHONPATH = "${python.withPackages (_: [cfg.package])}/${python.sitePackages}";
environment.MASTER_URL = ''tcp:host=localhost:port=9989''; environment.MASTER_URL = ''tcp:host=localhost:port=9989'';