lint: reformat everything
This commit is contained in:
parent
79813f3f0f
commit
d9b1d23b5e
|
@ -1,6 +1,7 @@
|
|||
let requiredVersion = import ./minver.nix; in
|
||||
let requiredVersion = import ./minver.nix;
|
||||
|
||||
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||
in if !builtins ? nixVersion
|
||||
|| builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||
|
||||
abort ''
|
||||
|
||||
|
|
24
flake.nix
24
flake.nix
|
@ -1,22 +1,19 @@
|
|||
{
|
||||
inputs = {
|
||||
auxlib.url = "github:auxolotl/lib";
|
||||
};
|
||||
inputs = { auxlib.url = "github:auxolotl/lib"; };
|
||||
|
||||
outputs =
|
||||
{ self, auxlib, ... }:
|
||||
outputs = { self, auxlib, ... }:
|
||||
let
|
||||
inherit (auxlib) lib;
|
||||
forAllSystems = self.lib.genAttrs self.lib.systems.flakeExposed;
|
||||
in
|
||||
{
|
||||
in {
|
||||
inherit lib;
|
||||
|
||||
auxPackages = forAllSystems (system:
|
||||
(
|
||||
let requiredVersion = import ./minver.nix; in
|
||||
(let requiredVersion = import ./minver.nix;
|
||||
|
||||
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||
in if !builtins ? nixVersion
|
||||
|| builtins.compareVersions requiredVersion builtins.nixVersion
|
||||
== 1 then
|
||||
abort ''
|
||||
This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade:
|
||||
|
||||
|
@ -37,8 +34,9 @@
|
|||
If you need further help, see https://nixos.org/nixos/support.html
|
||||
''
|
||||
else
|
||||
import ./pkgs/top-level/default.nix { inherit lib; localSystem = system; }
|
||||
)
|
||||
);
|
||||
import ./pkgs/top-level/default.nix {
|
||||
inherit lib;
|
||||
localSystem = system;
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
|
|
@ -35,14 +35,8 @@ with lib.maintainers; {
|
|||
shortName = "LLVM";
|
||||
enableFeatureFreezePing = true;
|
||||
};
|
||||
lix = {
|
||||
members = [];
|
||||
};
|
||||
python = {
|
||||
members = [];
|
||||
};
|
||||
rust = {
|
||||
members = [];
|
||||
};
|
||||
lix = { members = [ ]; };
|
||||
python = { members = [ ]; };
|
||||
rust = { members = [ ]; };
|
||||
}
|
||||
|
||||
|
|
|
@ -1,28 +1,15 @@
|
|||
# Builder for Agda packages.
|
||||
|
||||
{ stdenv, lib, self, Agda, runCommand, makeWrapper, writeText, ghcWithPackages, nixosTests }:
|
||||
{ stdenv, lib, self, Agda, runCommand, makeWrapper, writeText, ghcWithPackages
|
||||
, nixosTests }:
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
attrValues
|
||||
elem
|
||||
filter
|
||||
filterAttrs
|
||||
isAttrs
|
||||
isList
|
||||
platforms
|
||||
;
|
||||
inherit (lib) attrValues elem filter filterAttrs isAttrs isList platforms;
|
||||
|
||||
inherit (lib.strings)
|
||||
concatMapStrings
|
||||
concatMapStringsSep
|
||||
optionalString
|
||||
;
|
||||
inherit (lib.strings) concatMapStrings concatMapStringsSep optionalString;
|
||||
|
||||
withPackages' = {
|
||||
pkgs,
|
||||
ghc ? ghcWithPackages (p: with p; [ ieee754 ])
|
||||
}: let
|
||||
withPackages' = { pkgs, ghc ? ghcWithPackages (p: with p; [ ieee754 ]) }:
|
||||
let
|
||||
pkgs' = if isList pkgs then pkgs else pkgs self;
|
||||
library-file = writeText "libraries" ''
|
||||
${(concatMapStringsSep "\n" (p: "${p}/${p.libraryFile}") pkgs')}
|
||||
|
@ -37,7 +24,8 @@ let
|
|||
inherit withPackages;
|
||||
tests = {
|
||||
inherit (nixosTests) agda;
|
||||
allPackages = withPackages (filter self.lib.isUnbrokenAgdaPackage (attrValues self));
|
||||
allPackages = withPackages
|
||||
(filter self.lib.isUnbrokenAgdaPackage (attrValues self));
|
||||
};
|
||||
};
|
||||
# Agda is a split package with multiple outputs; do not inherit them here.
|
||||
|
@ -50,7 +38,8 @@ let
|
|||
ln -s ${Agda.bin}/bin/agda-mode $out/bin/agda-mode
|
||||
'';
|
||||
|
||||
withPackages = arg: if isAttrs arg then withPackages' arg else withPackages' { pkgs = arg; };
|
||||
withPackages = arg:
|
||||
if isAttrs arg then withPackages' arg else withPackages' { pkgs = arg; };
|
||||
|
||||
extensions = [
|
||||
"agda"
|
||||
|
@ -64,40 +53,40 @@ let
|
|||
"lagda.typ"
|
||||
];
|
||||
|
||||
defaults =
|
||||
{ pname
|
||||
, meta
|
||||
, buildInputs ? []
|
||||
, everythingFile ? "./Everything.agda"
|
||||
, includePaths ? []
|
||||
, libraryName ? pname
|
||||
, libraryFile ? "${libraryName}.agda-lib"
|
||||
, buildPhase ? null
|
||||
, installPhase ? null
|
||||
, extraExtensions ? []
|
||||
, ...
|
||||
}: let
|
||||
agdaWithArgs = withPackages (filter (p: p ? isAgdaDerivation) buildInputs);
|
||||
includePathArgs = concatMapStrings (path: "-i" + path + " ") (includePaths ++ [(dirOf everythingFile)]);
|
||||
in
|
||||
{
|
||||
defaults = { pname, meta, buildInputs ? [ ]
|
||||
, everythingFile ? "./Everything.agda", includePaths ? [ ]
|
||||
, libraryName ? pname, libraryFile ? "${libraryName}.agda-lib"
|
||||
, buildPhase ? null, installPhase ? null, extraExtensions ? [ ], ... }:
|
||||
let
|
||||
agdaWithArgs =
|
||||
withPackages (filter (p: p ? isAgdaDerivation) buildInputs);
|
||||
includePathArgs = concatMapStrings (path: "-i" + path + " ")
|
||||
(includePaths ++ [ (dirOf everythingFile) ]);
|
||||
in {
|
||||
inherit libraryName libraryFile;
|
||||
|
||||
isAgdaDerivation = true;
|
||||
|
||||
buildInputs = buildInputs ++ [ agdaWithArgs ];
|
||||
|
||||
buildPhase = if buildPhase != null then buildPhase else ''
|
||||
buildPhase = if buildPhase != null then
|
||||
buildPhase
|
||||
else ''
|
||||
runHook preBuild
|
||||
agda ${includePathArgs} ${everythingFile}
|
||||
rm ${everythingFile} ${lib.interfaceFile Agda.version everythingFile}
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = if installPhase != null then installPhase else ''
|
||||
installPhase = if installPhase != null then
|
||||
installPhase
|
||||
else ''
|
||||
runHook preInstall
|
||||
mkdir -p $out
|
||||
find \( ${concatMapStringsSep " -or " (p: "-name '*.${p}'") (extensions ++ extraExtensions)} \) -exec cp -p --parents -t "$out" {} +
|
||||
find \( ${
|
||||
concatMapStringsSep " -or " (p: "-name '*.${p}'")
|
||||
(extensions ++ extraExtensions)
|
||||
} \) -exec cp -p --parents -t "$out" {} +
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
|
@ -108,14 +97,17 @@ let
|
|||
# set this only on non-darwin.
|
||||
LC_ALL = optionalString (!stdenv.isDarwin) "C.UTF-8";
|
||||
|
||||
meta = if meta.broken or false then meta // { hydraPlatforms = platforms.none; } else meta;
|
||||
meta = if meta.broken or false then
|
||||
meta // { hydraPlatforms = platforms.none; }
|
||||
else
|
||||
meta;
|
||||
|
||||
# Retrieve all packages from the finished package set that have the current package as a dependency and build them
|
||||
passthru.tests =
|
||||
filterAttrs (name: pkg: self.lib.isUnbrokenAgdaPackage pkg && elem pname (map (pkg: pkg.pname) pkg.buildInputs)) self;
|
||||
passthru.tests = filterAttrs (name: pkg:
|
||||
self.lib.isUnbrokenAgdaPackage pkg
|
||||
&& elem pname (map (pkg: pkg.pname) pkg.buildInputs)) self;
|
||||
};
|
||||
in
|
||||
{
|
||||
in {
|
||||
mkDerivation = args: stdenv.mkDerivation (args // defaults args);
|
||||
|
||||
inherit withPackages withPackages';
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
{ lib }:
|
||||
{
|
||||
/* Returns the Agda interface file to a given Agda file.
|
||||
*
|
||||
* The resulting path may not be normalized.
|
||||
*
|
||||
* Examples:
|
||||
* interfaceFile pkgs.agda.version "./Everything.agda" == "_build/2.6.4.3/agda/./Everything.agdai"
|
||||
* interfaceFile pkgs.agda.version "src/Everything.lagda.tex" == "_build/2.6.4.3/agda/src/Everything.agdai"
|
||||
*/
|
||||
interfaceFile = agdaVersion: agdaFile: "_build/" + agdaVersion + "/agda/" + lib.head (builtins.match ''(.*\.)l?agda(\.(md|org|rst|tex|typ))?'' agdaFile) + "agdai";
|
||||
{ lib }: {
|
||||
# Returns the Agda interface file to a given Agda file.
|
||||
#
|
||||
# The resulting path may not be normalized.
|
||||
#
|
||||
# Examples:
|
||||
# interfaceFile pkgs.agda.version "./Everything.agda" == "_build/2.6.4.3/agda/./Everything.agdai"
|
||||
# interfaceFile pkgs.agda.version "src/Everything.lagda.tex" == "_build/2.6.4.3/agda/src/Everything.agdai"
|
||||
interfaceFile = agdaVersion: agdaFile:
|
||||
"_build/" + agdaVersion + "/agda/" + lib.head
|
||||
(builtins.match "(.*\\.)l?agda(\\.(md|org|rst|tex|typ))?" agdaFile)
|
||||
+ "agdai";
|
||||
|
||||
/* Takes an arbitrary derivation and says whether it is an agda library package
|
||||
* that is not marked as broken.
|
||||
*/
|
||||
isUnbrokenAgdaPackage = pkg: pkg.isAgdaDerivation or false && !pkg.meta.broken;
|
||||
# Takes an arbitrary derivation and says whether it is an agda library package
|
||||
# that is not marked as broken.
|
||||
isUnbrokenAgdaPackage = pkg:
|
||||
pkg.isAgdaDerivation or false && !pkg.meta.broken;
|
||||
}
|
||||
|
|
|
@ -1,44 +1,174 @@
|
|||
{ lib, stdenv
|
||||
, lapack-reference, openblas
|
||||
, isILP64 ? false
|
||||
{ lib, stdenv, lapack-reference, openblas, isILP64 ? false
|
||||
, blasProvider ? openblas }:
|
||||
|
||||
let
|
||||
blasFortranSymbols = [
|
||||
"caxpy" "ccopy" "cdotc" "cdotu" "cgbmv" "cgemm" "cgemv" "cgerc" "cgeru"
|
||||
"chbmv" "chemm" "chemv" "cher" "cher2" "cher2k" "cherk" "chpmv" "chpr"
|
||||
"chpr2" "crotg" "cscal" "csrot" "csscal" "cswap" "csymm" "csyr2k" "csyrk"
|
||||
"ctbmv" "ctbsv" "ctpmv" "ctpsv" "ctrmm" "ctrmv" "ctrsm" "ctrsv" "dasum"
|
||||
"daxpy" "dcabs1" "dcopy" "ddot" "dgbmv" "dgemm" "dgemv" "dger" "dnrm2"
|
||||
"drot" "drotg" "drotm" "drotmg" "dsbmv" "dscal" "dsdot" "dspmv" "dspr"
|
||||
"dspr2" "dswap" "dsymm" "dsymv" "dsyr" "dsyr2" "dsyr2k" "dsyrk" "dtbmv"
|
||||
"dtbsv" "dtpmv" "dtpsv" "dtrmm" "dtrmv" "dtrsm" "dtrsv" "dzasum" "dznrm2"
|
||||
"icamax" "idamax" "isamax" "izamax" "lsame" "sasum" "saxpy" "scabs1"
|
||||
"scasum" "scnrm2" "scopy" "sdot" "sdsdot" "sgbmv" "sgemm" "sgemv"
|
||||
"sger" "snrm2" "srot" "srotg" "srotm" "srotmg" "ssbmv" "sscal" "sspmv"
|
||||
"sspr" "sspr2" "sswap" "ssymm" "ssymv" "ssyr" "ssyr2" "ssyr2k" "ssyrk"
|
||||
"stbmv" "stbsv" "stpmv" "stpsv" "strmm" "strmv" "strsm" "strsv" "xerbla"
|
||||
"xerbla_array" "zaxpy" "zcopy" "zdotc" "zdotu" "zdrot" "zdscal" "zgbmv"
|
||||
"zgemm" "zgemv" "zgerc" "zgeru" "zhbmv" "zhemm" "zhemv" "zher" "zher2"
|
||||
"zher2k" "zherk" "zhpmv" "zhpr" "zhpr2" "zrotg" "zscal" "zswap" "zsymm"
|
||||
"zsyr2k" "zsyrk" "ztbmv" "ztbsv" "ztpmv" "ztpsv" "ztrmm" "ztrmv" "ztrsm"
|
||||
"caxpy"
|
||||
"ccopy"
|
||||
"cdotc"
|
||||
"cdotu"
|
||||
"cgbmv"
|
||||
"cgemm"
|
||||
"cgemv"
|
||||
"cgerc"
|
||||
"cgeru"
|
||||
"chbmv"
|
||||
"chemm"
|
||||
"chemv"
|
||||
"cher"
|
||||
"cher2"
|
||||
"cher2k"
|
||||
"cherk"
|
||||
"chpmv"
|
||||
"chpr"
|
||||
"chpr2"
|
||||
"crotg"
|
||||
"cscal"
|
||||
"csrot"
|
||||
"csscal"
|
||||
"cswap"
|
||||
"csymm"
|
||||
"csyr2k"
|
||||
"csyrk"
|
||||
"ctbmv"
|
||||
"ctbsv"
|
||||
"ctpmv"
|
||||
"ctpsv"
|
||||
"ctrmm"
|
||||
"ctrmv"
|
||||
"ctrsm"
|
||||
"ctrsv"
|
||||
"dasum"
|
||||
"daxpy"
|
||||
"dcabs1"
|
||||
"dcopy"
|
||||
"ddot"
|
||||
"dgbmv"
|
||||
"dgemm"
|
||||
"dgemv"
|
||||
"dger"
|
||||
"dnrm2"
|
||||
"drot"
|
||||
"drotg"
|
||||
"drotm"
|
||||
"drotmg"
|
||||
"dsbmv"
|
||||
"dscal"
|
||||
"dsdot"
|
||||
"dspmv"
|
||||
"dspr"
|
||||
"dspr2"
|
||||
"dswap"
|
||||
"dsymm"
|
||||
"dsymv"
|
||||
"dsyr"
|
||||
"dsyr2"
|
||||
"dsyr2k"
|
||||
"dsyrk"
|
||||
"dtbmv"
|
||||
"dtbsv"
|
||||
"dtpmv"
|
||||
"dtpsv"
|
||||
"dtrmm"
|
||||
"dtrmv"
|
||||
"dtrsm"
|
||||
"dtrsv"
|
||||
"dzasum"
|
||||
"dznrm2"
|
||||
"icamax"
|
||||
"idamax"
|
||||
"isamax"
|
||||
"izamax"
|
||||
"lsame"
|
||||
"sasum"
|
||||
"saxpy"
|
||||
"scabs1"
|
||||
"scasum"
|
||||
"scnrm2"
|
||||
"scopy"
|
||||
"sdot"
|
||||
"sdsdot"
|
||||
"sgbmv"
|
||||
"sgemm"
|
||||
"sgemv"
|
||||
"sger"
|
||||
"snrm2"
|
||||
"srot"
|
||||
"srotg"
|
||||
"srotm"
|
||||
"srotmg"
|
||||
"ssbmv"
|
||||
"sscal"
|
||||
"sspmv"
|
||||
"sspr"
|
||||
"sspr2"
|
||||
"sswap"
|
||||
"ssymm"
|
||||
"ssymv"
|
||||
"ssyr"
|
||||
"ssyr2"
|
||||
"ssyr2k"
|
||||
"ssyrk"
|
||||
"stbmv"
|
||||
"stbsv"
|
||||
"stpmv"
|
||||
"stpsv"
|
||||
"strmm"
|
||||
"strmv"
|
||||
"strsm"
|
||||
"strsv"
|
||||
"xerbla"
|
||||
"xerbla_array"
|
||||
"zaxpy"
|
||||
"zcopy"
|
||||
"zdotc"
|
||||
"zdotu"
|
||||
"zdrot"
|
||||
"zdscal"
|
||||
"zgbmv"
|
||||
"zgemm"
|
||||
"zgemv"
|
||||
"zgerc"
|
||||
"zgeru"
|
||||
"zhbmv"
|
||||
"zhemm"
|
||||
"zhemv"
|
||||
"zher"
|
||||
"zher2"
|
||||
"zher2k"
|
||||
"zherk"
|
||||
"zhpmv"
|
||||
"zhpr"
|
||||
"zhpr2"
|
||||
"zrotg"
|
||||
"zscal"
|
||||
"zswap"
|
||||
"zsymm"
|
||||
"zsyr2k"
|
||||
"zsyrk"
|
||||
"ztbmv"
|
||||
"ztbsv"
|
||||
"ztpmv"
|
||||
"ztpsv"
|
||||
"ztrmm"
|
||||
"ztrmv"
|
||||
"ztrsm"
|
||||
"ztrsv"
|
||||
];
|
||||
|
||||
version = "3";
|
||||
canonicalExtension = if stdenv.hostPlatform.isLinux
|
||||
then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
|
||||
else stdenv.hostPlatform.extensions.sharedLibrary;
|
||||
|
||||
canonicalExtension = if stdenv.hostPlatform.isLinux then
|
||||
"${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
|
||||
else
|
||||
stdenv.hostPlatform.extensions.sharedLibrary;
|
||||
|
||||
blasImplementation = lib.getName blasProvider;
|
||||
blasProvider' = if blasImplementation == "mkl"
|
||||
then blasProvider
|
||||
else blasProvider.override { blas64 = isILP64; };
|
||||
blasProvider' = if blasImplementation == "mkl" then
|
||||
blasProvider
|
||||
else
|
||||
blasProvider.override { blas64 = isILP64; };
|
||||
|
||||
in
|
||||
|
||||
assert isILP64 -> blasImplementation == "mkl" || blasProvider'.blas64;
|
||||
in assert isILP64 -> blasImplementation == "mkl" || blasProvider'.blas64;
|
||||
|
||||
stdenv.mkDerivation {
|
||||
pname = "blas";
|
||||
|
@ -47,7 +177,8 @@ stdenv.mkDerivation {
|
|||
outputs = [ "out" "dev" ];
|
||||
|
||||
meta = (blasProvider'.meta or { }) // {
|
||||
description = "${lib.getName blasProvider} with just the BLAS C and FORTRAN ABI";
|
||||
description =
|
||||
"${lib.getName blasProvider} with just the BLAS C and FORTRAN ABI";
|
||||
};
|
||||
|
||||
passthru = {
|
||||
|
@ -82,8 +213,11 @@ stdenv.mkDerivation {
|
|||
|
||||
'' + (if stdenv.hostPlatform.isElf then ''
|
||||
patchelf --set-soname libblas${canonicalExtension} $out/lib/libblas${canonicalExtension}
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libblas${canonicalExtension}):${lib.getLib blasProvider'}/lib" $out/lib/libblas${canonicalExtension}
|
||||
'' else lib.optionalString (stdenv.hostPlatform.isDarwin) ''
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libblas${canonicalExtension}):${
|
||||
lib.getLib blasProvider'
|
||||
}/lib" $out/lib/libblas${canonicalExtension}
|
||||
'' else
|
||||
lib.optionalString (stdenv.hostPlatform.isDarwin) ''
|
||||
install_name_tool \
|
||||
-id $out/lib/libblas${canonicalExtension} \
|
||||
-add_rpath ${lib.getLib blasProvider'}/lib \
|
||||
|
@ -114,8 +248,11 @@ EOF
|
|||
|
||||
'' + (if stdenv.hostPlatform.isElf then ''
|
||||
patchelf --set-soname libcblas${canonicalExtension} $out/lib/libcblas${canonicalExtension}
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libcblas${canonicalExtension}):${lib.getLib blasProvider'}/lib" $out/lib/libcblas${canonicalExtension}
|
||||
'' else lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libcblas${canonicalExtension}):${
|
||||
lib.getLib blasProvider'
|
||||
}/lib" $out/lib/libcblas${canonicalExtension}
|
||||
'' else
|
||||
lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||
install_name_tool \
|
||||
-id $out/lib/libcblas${canonicalExtension} \
|
||||
-add_rpath ${lib.getLib blasProvider'}/lib \
|
||||
|
@ -125,7 +262,9 @@ EOF
|
|||
ln -s $out/lib/libcblas${canonicalExtension} "$out/lib/libcblas${stdenv.hostPlatform.extensions.sharedLibrary}"
|
||||
fi
|
||||
|
||||
cp ${lib.getDev lapack-reference}/include/cblas{,_mangling}.h $dev/include
|
||||
cp ${
|
||||
lib.getDev lapack-reference
|
||||
}/include/cblas{,_mangling}.h $dev/include
|
||||
|
||||
cat <<EOF > $dev/lib/pkgconfig/cblas.pc
|
||||
Name: cblas
|
||||
|
@ -136,7 +275,9 @@ Libs: -L$out/lib -lcblas
|
|||
EOF
|
||||
'' + lib.optionalString (blasImplementation == "mkl") ''
|
||||
mkdir -p $out/nix-support
|
||||
echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook
|
||||
echo 'export MKL_INTERFACE_LAYER=${
|
||||
lib.optionalString isILP64 "I"
|
||||
}LP64,GNU' > $out/nix-support/setup-hook
|
||||
ln -s $out/lib/libblas${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary}
|
||||
ln -sf ${blasProvider'}/include/* $dev/include
|
||||
'');
|
||||
|
|
|
@ -1,23 +1,21 @@
|
|||
{ lib, stdenv
|
||||
, lapack-reference, openblas
|
||||
, isILP64 ? false
|
||||
{ lib, stdenv, lapack-reference, openblas, isILP64 ? false
|
||||
, lapackProvider ? openblas }:
|
||||
|
||||
let
|
||||
|
||||
version = "3";
|
||||
canonicalExtension = if stdenv.hostPlatform.isLinux
|
||||
then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
|
||||
else stdenv.hostPlatform.extensions.sharedLibrary;
|
||||
canonicalExtension = if stdenv.hostPlatform.isLinux then
|
||||
"${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
|
||||
else
|
||||
stdenv.hostPlatform.extensions.sharedLibrary;
|
||||
|
||||
lapackImplementation = lib.getName lapackProvider;
|
||||
lapackProvider' = if lapackImplementation == "mkl"
|
||||
then lapackProvider
|
||||
else lapackProvider.override { blas64 = isILP64; };
|
||||
lapackProvider' = if lapackImplementation == "mkl" then
|
||||
lapackProvider
|
||||
else
|
||||
lapackProvider.override { blas64 = isILP64; };
|
||||
|
||||
in
|
||||
|
||||
assert isILP64 -> lapackImplementation == "mkl" || lapackProvider'.blas64;
|
||||
in assert isILP64 -> lapackImplementation == "mkl" || lapackProvider'.blas64;
|
||||
|
||||
stdenv.mkDerivation {
|
||||
pname = "lapack";
|
||||
|
@ -26,7 +24,8 @@ stdenv.mkDerivation {
|
|||
outputs = [ "out" "dev" ];
|
||||
|
||||
meta = (lapackProvider'.meta or { }) // {
|
||||
description = "${lib.getName lapackProvider'} with just the LAPACK C and FORTRAN ABI";
|
||||
description =
|
||||
"${lib.getName lapackProvider'} with just the LAPACK C and FORTRAN ABI";
|
||||
};
|
||||
|
||||
passthru = {
|
||||
|
@ -66,7 +65,9 @@ stdenv.mkDerivation {
|
|||
ln -s $out/lib/liblapack${canonicalExtension} "$out/lib/liblapack${stdenv.hostPlatform.extensions.sharedLibrary}"
|
||||
fi
|
||||
|
||||
install -D ${lib.getDev lapack-reference}/include/lapack.h $dev/include/lapack.h
|
||||
install -D ${
|
||||
lib.getDev lapack-reference
|
||||
}/include/lapack.h $dev/include/lapack.h
|
||||
|
||||
cat <<EOF > $dev/lib/pkgconfig/lapack.pc
|
||||
Name: lapack
|
||||
|
@ -76,7 +77,9 @@ Cflags: -I$dev/include
|
|||
Libs: -L$out/lib -llapack
|
||||
EOF
|
||||
|
||||
liblapacke="${lib.getLib lapackProvider'}/lib/liblapacke${canonicalExtension}"
|
||||
liblapacke="${
|
||||
lib.getLib lapackProvider'
|
||||
}/lib/liblapacke${canonicalExtension}"
|
||||
|
||||
if ! [ -e "$liblapacke" ]; then
|
||||
echo "$liblapacke does not exist, ${lapackProvider'.name} does not provide liblapacke."
|
||||
|
@ -88,14 +91,18 @@ EOF
|
|||
|
||||
'' + (lib.optionalString stdenv.hostPlatform.isElf ''
|
||||
patchelf --set-soname liblapacke${canonicalExtension} $out/lib/liblapacke${canonicalExtension}
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapacke${canonicalExtension}):${lib.getLib lapackProvider'}/lib" $out/lib/liblapacke${canonicalExtension}
|
||||
patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapacke${canonicalExtension}):${
|
||||
lib.getLib lapackProvider'
|
||||
}/lib" $out/lib/liblapacke${canonicalExtension}
|
||||
'') + ''
|
||||
|
||||
if [ -f "$out/lib/liblapacke.so.3" ]; then
|
||||
ln -s $out/lib/liblapacke.so.3 $out/lib/liblapacke.so
|
||||
fi
|
||||
|
||||
cp ${lib.getDev lapack-reference}/include/lapacke{,_mangling,_config,_utils}.h $dev/include
|
||||
cp ${
|
||||
lib.getDev lapack-reference
|
||||
}/include/lapacke{,_mangling,_config,_utils}.h $dev/include
|
||||
|
||||
cat <<EOF > $dev/lib/pkgconfig/lapacke.pc
|
||||
Name: lapacke
|
||||
|
@ -106,7 +113,9 @@ Libs: -L$out/lib -llapacke
|
|||
EOF
|
||||
'' + lib.optionalString (lapackImplementation == "mkl") ''
|
||||
mkdir -p $out/nix-support
|
||||
echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook
|
||||
echo 'export MKL_INTERFACE_LAYER=${
|
||||
lib.optionalString isILP64 "I"
|
||||
}LP64,GNU' > $out/nix-support/setup-hook
|
||||
ln -s $out/lib/liblapack${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary}
|
||||
ln -sf ${lapackProvider'}/include/* $dev/include
|
||||
'');
|
||||
|
|
|
@ -1,14 +1,5 @@
|
|||
{ lib
|
||||
, bash
|
||||
, binutils-unwrapped
|
||||
, coreutils
|
||||
, gawk
|
||||
, libarchive
|
||||
, pv
|
||||
, squashfsTools
|
||||
, buildFHSEnv
|
||||
, pkgs
|
||||
}:
|
||||
{ lib, bash, binutils-unwrapped, coreutils, gawk, libarchive, pv, squashfsTools
|
||||
, buildFHSEnv, pkgs }:
|
||||
|
||||
rec {
|
||||
appimage-exec = pkgs.substituteAll {
|
||||
|
@ -26,9 +17,9 @@ rec {
|
|||
];
|
||||
};
|
||||
|
||||
extract = args@{ name ? "${args.pname}-${args.version}", postExtract ? "", src, ... }: pkgs.runCommand "${name}-extracted" {
|
||||
buildInputs = [ appimage-exec ];
|
||||
} ''
|
||||
extract =
|
||||
args@{ name ? "${args.pname}-${args.version}", postExtract ? "", src, ... }:
|
||||
pkgs.runCommand "${name}-extracted" { buildInputs = [ appimage-exec ]; } ''
|
||||
appimage-exec.sh -x $out ${src}
|
||||
${postExtract}
|
||||
'';
|
||||
|
@ -38,40 +29,38 @@ rec {
|
|||
extractType2 = extract;
|
||||
wrapType1 = wrapType2;
|
||||
|
||||
wrapAppImage = args@{
|
||||
src,
|
||||
extraPkgs,
|
||||
meta ? {},
|
||||
...
|
||||
}: buildFHSEnv
|
||||
(defaultFhsEnvArgs // {
|
||||
targetPkgs = pkgs: [ appimage-exec ]
|
||||
++ defaultFhsEnvArgs.targetPkgs pkgs ++ extraPkgs pkgs;
|
||||
wrapAppImage = args@{ src, extraPkgs, meta ? { }, ... }:
|
||||
buildFHSEnv (defaultFhsEnvArgs // {
|
||||
targetPkgs = pkgs:
|
||||
[ appimage-exec ] ++ defaultFhsEnvArgs.targetPkgs pkgs
|
||||
++ extraPkgs pkgs;
|
||||
|
||||
runScript = "appimage-exec.sh -w ${src} --";
|
||||
|
||||
meta = {
|
||||
sourceProvenance = with lib.sourceTypes; [ binaryNativeCode ];
|
||||
} // meta;
|
||||
} // (removeAttrs args (builtins.attrNames (builtins.functionArgs wrapAppImage))));
|
||||
} // (removeAttrs args
|
||||
(builtins.attrNames (builtins.functionArgs wrapAppImage))));
|
||||
|
||||
wrapType2 = args@{ src, extraPkgs ? pkgs: [ ], ... }: wrapAppImage
|
||||
(args // {
|
||||
wrapType2 = args@{ src, extraPkgs ? pkgs: [ ], ... }:
|
||||
wrapAppImage (args // {
|
||||
inherit extraPkgs;
|
||||
src = extract (lib.filterAttrs (key: value: builtins.elem key [ "name" "pname" "version" "src" ]) args);
|
||||
src = extract (lib.filterAttrs
|
||||
(key: value: builtins.elem key [ "name" "pname" "version" "src" ])
|
||||
args);
|
||||
|
||||
# passthru src to make nix-update work
|
||||
# hack to keep the origin position (unsafeGetAttrPos)
|
||||
passthru = lib.pipe args [
|
||||
lib.attrNames
|
||||
(lib.remove "src")
|
||||
(removeAttrs args)
|
||||
] // args.passthru or { };
|
||||
passthru =
|
||||
lib.pipe args [ lib.attrNames (lib.remove "src") (removeAttrs args) ]
|
||||
// args.passthru or { };
|
||||
});
|
||||
|
||||
defaultFhsEnvArgs = {
|
||||
# Most of the packages were taken from the Steam chroot
|
||||
targetPkgs = pkgs: with pkgs; [
|
||||
targetPkgs = pkgs:
|
||||
with pkgs; [
|
||||
gtk3
|
||||
bashInteractive
|
||||
gnome.zenity
|
||||
|
@ -87,7 +76,8 @@ rec {
|
|||
|
||||
# list of libraries expected in an appimage environment:
|
||||
# https://github.com/AppImage/pkg2appimage/blob/master/excludelist
|
||||
multiPkgs = pkgs: with pkgs; [
|
||||
multiPkgs = pkgs:
|
||||
with pkgs; [
|
||||
desktop-file-utils
|
||||
xorg.libXcomposite
|
||||
xorg.libXtst
|
||||
|
|
|
@ -6,9 +6,7 @@
|
|||
# For example, in the Nixpkgs repo:
|
||||
# nix-build -E 'with import ./. {}; mkBinaryCache { rootPaths = [hello]; }'
|
||||
|
||||
{ name ? "binary-cache"
|
||||
, rootPaths
|
||||
}:
|
||||
{ name ? "binary-cache", rootPaths }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
inherit name;
|
||||
|
|
|
@ -5,14 +5,9 @@
|
|||
# script that sets up the right environment variables so that the
|
||||
# compiler and the linker just "work".
|
||||
|
||||
{ name ? ""
|
||||
, lib
|
||||
, stdenvNoCC
|
||||
, runtimeShell
|
||||
, bintools ? null, libc ? null, coreutils ? null, gnugrep ? null
|
||||
, netbsd ? null, netbsdCross ? null
|
||||
, sharedLibraryLoader ?
|
||||
if libc == null then
|
||||
{ name ? "", lib, stdenvNoCC, runtimeShell, bintools ? null, libc ? null
|
||||
, coreutils ? null, gnugrep ? null, netbsd ? null, netbsdCross ? null
|
||||
, sharedLibraryLoader ? if libc == null then
|
||||
null
|
||||
else if stdenvNoCC.targetPlatform.isNetBSD then
|
||||
if !(targetPackages ? netbsdCross) then
|
||||
|
@ -22,16 +17,11 @@
|
|||
else
|
||||
null
|
||||
else
|
||||
lib.getLib libc
|
||||
, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
|
||||
, propagateDoc ? bintools != null && bintools ? man
|
||||
, extraPackages ? [], extraBuildCommands ? ""
|
||||
, isGNU ? bintools.isGNU or false
|
||||
, isLLVM ? bintools.isLLVM or false
|
||||
, isCCTools ? bintools.isCCTools or false
|
||||
, expand-response-params
|
||||
, targetPackages ? {}
|
||||
, useMacosReexportHack ? false
|
||||
lib.getLib libc, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
|
||||
, propagateDoc ? bintools != null && bintools ? man, extraPackages ? [ ]
|
||||
, extraBuildCommands ? "", isGNU ? bintools.isGNU or false
|
||||
, isLLVM ? bintools.isLLVM or false, isCCTools ? bintools.isCCTools or false
|
||||
, expand-response-params, targetPackages ? { }, useMacosReexportHack ? false
|
||||
, wrapGas ? false
|
||||
|
||||
# Note: the hardening flags are part of the bintools-wrapper, rather than
|
||||
|
@ -53,12 +43,10 @@
|
|||
# Except when:
|
||||
# - static aarch64, where compilation works, but produces segfaulting dynamically linked binaries.
|
||||
# - static armv7l, where compilation fails.
|
||||
&& !(targetPlatform.isAarch && targetPlatform.isStatic)
|
||||
) "pie"
|
||||
&& !(targetPlatform.isAarch && targetPlatform.isStatic)) "pie"
|
||||
|
||||
# Darwin code signing support utilities
|
||||
, postLinkSignHook ? null, signingUtils ? null
|
||||
}:
|
||||
, postLinkSignHook ? null, signingUtils ? null }:
|
||||
|
||||
assert nativeTools -> !propagateDoc && nativePrefix != "";
|
||||
assert !nativeTools -> bintools != null && coreutils != null && gnugrep != null;
|
||||
|
@ -67,22 +55,9 @@ assert (noLibc || nativeLibc) == (libc == null);
|
|||
|
||||
let
|
||||
inherit (lib)
|
||||
attrByPath
|
||||
concatStringsSep
|
||||
getBin
|
||||
getDev
|
||||
getLib
|
||||
getName
|
||||
getVersion
|
||||
hasSuffix
|
||||
optional
|
||||
optionalAttrs
|
||||
optionals
|
||||
optionalString
|
||||
platforms
|
||||
removePrefix
|
||||
replaceStrings
|
||||
;
|
||||
attrByPath concatStringsSep getBin getDev getLib getName getVersion
|
||||
hasSuffix optional optionalAttrs optionals optionalString platforms
|
||||
removePrefix replaceStrings;
|
||||
|
||||
inherit (stdenvNoCC) hostPlatform targetPlatform;
|
||||
|
||||
|
@ -108,40 +83,57 @@ let
|
|||
|
||||
# The dynamic linker has different names on different platforms. This is a
|
||||
# shell glob that ought to match it.
|
||||
dynamicLinker =
|
||||
/**/ if sharedLibraryLoader == null then ""
|
||||
else if targetPlatform.libc == "musl" then "${sharedLibraryLoader}/lib/ld-musl-*"
|
||||
else if targetPlatform.libc == "uclibc" then "${sharedLibraryLoader}/lib/ld*-uClibc.so.1"
|
||||
else if (targetPlatform.libc == "bionic" && targetPlatform.is32bit) then "/system/bin/linker"
|
||||
else if (targetPlatform.libc == "bionic" && targetPlatform.is64bit) then "/system/bin/linker64"
|
||||
else if targetPlatform.libc == "nblibc" then "${sharedLibraryLoader}/libexec/ld.elf_so"
|
||||
else if targetPlatform.system == "i686-linux" then "${sharedLibraryLoader}/lib/ld-linux.so.2"
|
||||
else if targetPlatform.system == "x86_64-linux" then "${sharedLibraryLoader}/lib/ld-linux-x86-64.so.2"
|
||||
dynamicLinker = if sharedLibraryLoader == null then
|
||||
""
|
||||
else if targetPlatform.libc == "musl" then
|
||||
"${sharedLibraryLoader}/lib/ld-musl-*"
|
||||
else if targetPlatform.libc == "uclibc" then
|
||||
"${sharedLibraryLoader}/lib/ld*-uClibc.so.1"
|
||||
else if (targetPlatform.libc == "bionic" && targetPlatform.is32bit) then
|
||||
"/system/bin/linker"
|
||||
else if (targetPlatform.libc == "bionic" && targetPlatform.is64bit) then
|
||||
"/system/bin/linker64"
|
||||
else if targetPlatform.libc == "nblibc" then
|
||||
"${sharedLibraryLoader}/libexec/ld.elf_so"
|
||||
else if targetPlatform.system == "i686-linux" then
|
||||
"${sharedLibraryLoader}/lib/ld-linux.so.2"
|
||||
else if targetPlatform.system == "x86_64-linux" then
|
||||
"${sharedLibraryLoader}/lib/ld-linux-x86-64.so.2"
|
||||
# ELFv1 (.1) or ELFv2 (.2) ABI
|
||||
else if targetPlatform.isPower64 then "${sharedLibraryLoader}/lib/ld64.so.*"
|
||||
else if targetPlatform.isPower64 then
|
||||
"${sharedLibraryLoader}/lib/ld64.so.*"
|
||||
# ARM with a wildcard, which can be "" or "-armhf".
|
||||
else if (with targetPlatform; isAarch32 && isLinux) then "${sharedLibraryLoader}/lib/ld-linux*.so.3"
|
||||
else if targetPlatform.system == "aarch64-linux" then "${sharedLibraryLoader}/lib/ld-linux-aarch64.so.1"
|
||||
else if targetPlatform.system == "powerpc-linux" then "${sharedLibraryLoader}/lib/ld.so.1"
|
||||
else if targetPlatform.isMips then "${sharedLibraryLoader}/lib/ld.so.1"
|
||||
else if (with targetPlatform; isAarch32 && isLinux) then
|
||||
"${sharedLibraryLoader}/lib/ld-linux*.so.3"
|
||||
else if targetPlatform.system == "aarch64-linux" then
|
||||
"${sharedLibraryLoader}/lib/ld-linux-aarch64.so.1"
|
||||
else if targetPlatform.system == "powerpc-linux" then
|
||||
"${sharedLibraryLoader}/lib/ld.so.1"
|
||||
else if targetPlatform.isMips then
|
||||
"${sharedLibraryLoader}/lib/ld.so.1"
|
||||
# `ld-linux-riscv{32,64}-<abi>.so.1`
|
||||
else if targetPlatform.isRiscV then "${sharedLibraryLoader}/lib/ld-linux-riscv*.so.1"
|
||||
else if targetPlatform.isLoongArch64 then "${sharedLibraryLoader}/lib/ld-linux-loongarch*.so.1"
|
||||
else if targetPlatform.isDarwin then "/usr/lib/dyld"
|
||||
else if targetPlatform.isFreeBSD then "/libexec/ld-elf.so.1"
|
||||
else if hasSuffix "pc-gnu" targetPlatform.config then "ld.so.1"
|
||||
else "";
|
||||
else if targetPlatform.isRiscV then
|
||||
"${sharedLibraryLoader}/lib/ld-linux-riscv*.so.1"
|
||||
else if targetPlatform.isLoongArch64 then
|
||||
"${sharedLibraryLoader}/lib/ld-linux-loongarch*.so.1"
|
||||
else if targetPlatform.isDarwin then
|
||||
"/usr/lib/dyld"
|
||||
else if targetPlatform.isFreeBSD then
|
||||
"/libexec/ld-elf.so.1"
|
||||
else if hasSuffix "pc-gnu" targetPlatform.config then
|
||||
"ld.so.1"
|
||||
else
|
||||
"";
|
||||
|
||||
in
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
in stdenvNoCC.mkDerivation {
|
||||
pname = targetPrefix
|
||||
+ (if name != "" then name else "${bintoolsName}-wrapper");
|
||||
version = optionalString (bintools != null) bintoolsVersion;
|
||||
|
||||
preferLocalBuild = true;
|
||||
|
||||
outputs = [ "out" ] ++ optionals propagateDoc ([ "man" ] ++ optional (bintools ? info) "info");
|
||||
outputs = [ "out" ]
|
||||
++ optionals propagateDoc ([ "man" ] ++ optional (bintools ? info) "info");
|
||||
|
||||
passthru = {
|
||||
inherit targetPrefix suffixSalt;
|
||||
|
@ -155,7 +147,7 @@ stdenvNoCC.mkDerivation {
|
|||
(setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib")))
|
||||
(when (file-directory-p (concat arg "/lib64"))
|
||||
(setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib64"))))
|
||||
'(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)}))
|
||||
'(${concatStringsSep " " (map (pkg: ''"${pkg}"'') pkgs)}))
|
||||
'';
|
||||
|
||||
inherit defaultHardeningFlags;
|
||||
|
@ -170,8 +162,7 @@ stdenvNoCC.mkDerivation {
|
|||
src=$PWD
|
||||
'';
|
||||
|
||||
installPhase =
|
||||
''
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin $out/nix-support
|
||||
|
||||
wrap() {
|
||||
|
@ -188,7 +179,8 @@ stdenvNoCC.mkDerivation {
|
|||
echo ${nativePrefix} > $out/nix-support/orig-bintools
|
||||
|
||||
ldPath="${nativePrefix}/bin"
|
||||
'' else ''
|
||||
'' else
|
||||
''
|
||||
echo $bintools_bin > $out/nix-support/orig-bintools
|
||||
|
||||
ldPath="${bintools_bin}/bin"
|
||||
|
@ -224,11 +216,15 @@ stdenvNoCC.mkDerivation {
|
|||
|
||||
'' + (if !useMacosReexportHack then ''
|
||||
if [ -e ''${ld:-$ldPath/${targetPrefix}ld} ]; then
|
||||
wrap ${targetPrefix}ld ${./ld-wrapper.sh} ''${ld:-$ldPath/${targetPrefix}ld}
|
||||
wrap ${targetPrefix}ld ${
|
||||
./ld-wrapper.sh
|
||||
} ''${ld:-$ldPath/${targetPrefix}ld}
|
||||
fi
|
||||
'' else ''
|
||||
ldInner="${targetPrefix}ld-reexport-delegate"
|
||||
wrap "$ldInner" ${./macos-sierra-reexport-hack.bash} ''${ld:-$ldPath/${targetPrefix}ld}
|
||||
wrap "$ldInner" ${
|
||||
./macos-sierra-reexport-hack.bash
|
||||
} ''${ld:-$ldPath/${targetPrefix}ld}
|
||||
wrap "${targetPrefix}ld" ${./ld-wrapper.sh} "$out/bin/$ldInner"
|
||||
unset ldInner
|
||||
'') + ''
|
||||
|
@ -242,10 +238,7 @@ stdenvNoCC.mkDerivation {
|
|||
strictDeps = true;
|
||||
depsTargetTargetPropagated = extraPackages;
|
||||
|
||||
setupHooks = [
|
||||
../setup-hooks/role.bash
|
||||
./setup-hook.sh
|
||||
];
|
||||
setupHooks = [ ../setup-hooks/role.bash ./setup-hook.sh ];
|
||||
|
||||
postFixup =
|
||||
##
|
||||
|
@ -253,7 +246,9 @@ stdenvNoCC.mkDerivation {
|
|||
##
|
||||
optionalString (libc != null) (''
|
||||
touch "$out/nix-support/libc-ldflags"
|
||||
echo "-L${libc_lib}${libc.libdir or "/lib"}" >> $out/nix-support/libc-ldflags
|
||||
echo "-L${libc_lib}${
|
||||
libc.libdir or "/lib"
|
||||
}" >> $out/nix-support/libc-ldflags
|
||||
|
||||
echo "${libc_lib}" > $out/nix-support/orig-libc
|
||||
echo "${libc_dev}" > $out/nix-support/orig-libc-dev
|
||||
|
@ -282,14 +277,17 @@ stdenvNoCC.mkDerivation {
|
|||
if [ -n "''${dynamicLinker-}" ]; then
|
||||
echo $dynamicLinker > $out/nix-support/dynamic-linker
|
||||
|
||||
${if targetPlatform.isDarwin then ''
|
||||
${
|
||||
if targetPlatform.isDarwin then ''
|
||||
printf "export LD_DYLD_PATH=%q\n" "$dynamicLinker" >> $out/nix-support/setup-hook
|
||||
'' else optionalString (sharedLibraryLoader != null) ''
|
||||
'' else
|
||||
optionalString (sharedLibraryLoader != null) ''
|
||||
if [ -e ${sharedLibraryLoader}/lib/32/ld-linux.so.2 ]; then
|
||||
echo ${sharedLibraryLoader}/lib/32/ld-linux.so.2 > $out/nix-support/dynamic-linker-m32
|
||||
fi
|
||||
touch $out/nix-support/ld-set-dynamic-linker
|
||||
''}
|
||||
''
|
||||
}
|
||||
fi
|
||||
'')
|
||||
|
||||
|
@ -301,7 +299,9 @@ stdenvNoCC.mkDerivation {
|
|||
# install the wrapper, you get tools like objdump (same for any
|
||||
# binaries of libc).
|
||||
+ optionalString (!nativeTools) ''
|
||||
printWords ${bintools_bin} ${optionalString (libc != null) libc_bin} > $out/nix-support/propagated-user-env-packages
|
||||
printWords ${bintools_bin} ${
|
||||
optionalString (libc != null) libc_bin
|
||||
} > $out/nix-support/propagated-user-env-packages
|
||||
''
|
||||
|
||||
##
|
||||
|
@ -376,19 +376,19 @@ stdenvNoCC.mkDerivation {
|
|||
###
|
||||
### Ensure consistent LC_VERSION_MIN_MACOSX
|
||||
###
|
||||
+ optionalString targetPlatform.isDarwin (
|
||||
let
|
||||
+ optionalString targetPlatform.isDarwin (let
|
||||
inherit (targetPlatform)
|
||||
darwinPlatform darwinSdkVersion
|
||||
darwinMinVersion darwinMinVersionVariable;
|
||||
darwinPlatform darwinSdkVersion darwinMinVersion
|
||||
darwinMinVersionVariable;
|
||||
in ''
|
||||
export darwinPlatform=${darwinPlatform}
|
||||
export darwinMinVersion=${darwinMinVersion}
|
||||
export darwinSdkVersion=${darwinSdkVersion}
|
||||
export darwinMinVersionVariable=${darwinMinVersionVariable}
|
||||
substituteAll ${./add-darwin-ldflags-before.sh} $out/nix-support/add-local-ldflags-before.sh
|
||||
''
|
||||
)
|
||||
substituteAll ${
|
||||
./add-darwin-ldflags-before.sh
|
||||
} $out/nix-support/add-local-ldflags-before.sh
|
||||
'')
|
||||
|
||||
##
|
||||
## Code signing on Apple Silicon
|
||||
|
@ -416,7 +416,8 @@ stdenvNoCC.mkDerivation {
|
|||
env = {
|
||||
# for substitution in utils.bash
|
||||
# TODO(@sternenseemann): invent something cleaner than passing in "" in case of absence
|
||||
expandResponseParams = "${expand-response-params}/bin/expand-response-params";
|
||||
expandResponseParams =
|
||||
"${expand-response-params}/bin/expand-response-params";
|
||||
# TODO(@sternenseemann): rename env var via stdenv rebuild
|
||||
shell = (getBin runtimeShell + runtimeShell.shellPath or "");
|
||||
gnugrep_bin = optionalString (!nativeTools) gnugrep;
|
||||
|
@ -426,14 +427,12 @@ stdenvNoCC.mkDerivation {
|
|||
default_hardening_flags_str = builtins.toString defaultHardeningFlags;
|
||||
};
|
||||
|
||||
meta =
|
||||
let bintools_ = optionalAttrs (bintools != null) bintools; in
|
||||
(optionalAttrs (bintools_ ? meta) (removeAttrs bintools.meta ["priority"])) //
|
||||
{ description =
|
||||
meta = let bintools_ = optionalAttrs (bintools != null) bintools;
|
||||
in (optionalAttrs (bintools_ ? meta)
|
||||
(removeAttrs bintools.meta [ "priority" ])) // {
|
||||
description =
|
||||
attrByPath [ "meta" "description" ] "System binary utilities" bintools_
|
||||
+ " (wrapper script)";
|
||||
priority = 10;
|
||||
} // optionalAttrs useMacosReexportHack {
|
||||
platforms = platforms.darwin;
|
||||
};
|
||||
} // optionalAttrs useMacosReexportHack { platforms = platforms.darwin; };
|
||||
}
|
||||
|
|
|
@ -1,23 +1,9 @@
|
|||
{ stdenv
|
||||
, cacert
|
||||
, lib
|
||||
, writeCBin
|
||||
}:
|
||||
{ stdenv, cacert, lib, writeCBin }:
|
||||
|
||||
args@{
|
||||
name ? "${args.pname}-${args.version}"
|
||||
, bazel
|
||||
, bazelFlags ? []
|
||||
, bazelBuildFlags ? []
|
||||
, bazelTestFlags ? []
|
||||
, bazelRunFlags ? []
|
||||
, runTargetFlags ? []
|
||||
, bazelFetchFlags ? []
|
||||
, bazelTargets ? []
|
||||
, bazelTestTargets ? []
|
||||
, bazelRunTarget ? null
|
||||
, buildAttrs
|
||||
, fetchAttrs
|
||||
args@{ name ? "${args.pname}-${args.version}", bazel, bazelFlags ? [ ]
|
||||
, bazelBuildFlags ? [ ], bazelTestFlags ? [ ], bazelRunFlags ? [ ]
|
||||
, runTargetFlags ? [ ], bazelFetchFlags ? [ ], bazelTargets ? [ ]
|
||||
, bazelTestTargets ? [ ], bazelRunTarget ? null, buildAttrs, fetchAttrs
|
||||
|
||||
# Newer versions of Bazel are moving away from built-in rules_cc and instead
|
||||
# allow fetching it as an external dependency in a WORKSPACE file[1]. If
|
||||
|
@ -28,9 +14,7 @@ args@{
|
|||
# project depends on it via an external dependency.
|
||||
#
|
||||
# [1]: https://github.com/bazelbuild/rules_cc
|
||||
, removeRulesCC ? true
|
||||
, removeLocalConfigCc ? true
|
||||
, removeLocal ? true
|
||||
, removeRulesCC ? true, removeLocalConfigCc ? true, removeLocal ? true
|
||||
|
||||
# Use build --nobuild instead of fetch. This allows fetching the dependencies
|
||||
# required for the build as configured, rather than fetching all the dependencies
|
||||
|
@ -43,25 +27,13 @@ args@{
|
|||
# Bazel wants all headers / libraries to come from, like when using
|
||||
# CROSSTOOL. Weirdly, we can still get the flags through the wrapped
|
||||
# compiler.
|
||||
, dontAddBazelOpts ? false
|
||||
, ...
|
||||
}:
|
||||
, dontAddBazelOpts ? false, ... }:
|
||||
|
||||
let
|
||||
fArgs = removeAttrs args [ "buildAttrs" "fetchAttrs" "removeRulesCC" ] // {
|
||||
inherit
|
||||
name
|
||||
bazelFlags
|
||||
bazelBuildFlags
|
||||
bazelTestFlags
|
||||
bazelRunFlags
|
||||
runTargetFlags
|
||||
bazelFetchFlags
|
||||
bazelTargets
|
||||
bazelTestTargets
|
||||
bazelRunTarget
|
||||
dontAddBazelOpts
|
||||
;
|
||||
inherit name bazelFlags bazelBuildFlags bazelTestFlags bazelRunFlags
|
||||
runTargetFlags bazelFetchFlags bazelTargets bazelTestTargets
|
||||
bazelRunTarget dontAddBazelOpts;
|
||||
};
|
||||
fBuildAttrs = fArgs // buildAttrs;
|
||||
fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "sha256" ];
|
||||
|
@ -83,7 +55,10 @@ let
|
|||
$bazelFlags \
|
||||
${lib.strings.concatStringsSep " " additionalFlags} \
|
||||
${lib.strings.concatStringsSep " " targets} \
|
||||
${lib.optionalString (targetRunFlags != []) " -- " + lib.strings.concatStringsSep " " targetRunFlags}
|
||||
${
|
||||
lib.optionalString (targetRunFlags != [ ]) " -- "
|
||||
+ lib.strings.concatStringsSep " " targetRunFlags
|
||||
}
|
||||
'';
|
||||
# we need this to chmod dangling symlinks on darwin, gnu coreutils refuses to do so:
|
||||
# chmod: cannot operate on dangling symlink '$symlink'
|
||||
|
@ -107,13 +82,13 @@ let
|
|||
}
|
||||
}
|
||||
'';
|
||||
in
|
||||
stdenv.mkDerivation (fBuildAttrs // {
|
||||
in stdenv.mkDerivation (fBuildAttrs // {
|
||||
|
||||
deps = stdenv.mkDerivation (fFetchAttrs // {
|
||||
name = "${name}-deps.tar.gz";
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ fFetchAttrs.impureEnvVars or [];
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars
|
||||
++ fFetchAttrs.impureEnvVars or [ ];
|
||||
|
||||
nativeBuildInputs = fFetchAttrs.nativeBuildInputs or [ ] ++ [ bazel ];
|
||||
|
||||
|
@ -132,8 +107,7 @@ stdenv.mkDerivation (fBuildAttrs // {
|
|||
buildPhase = fFetchAttrs.buildPhase or ''
|
||||
runHook preBuild
|
||||
|
||||
${
|
||||
bazelCmd {
|
||||
${bazelCmd {
|
||||
cmd = if fetchConfigured then "build --nobuild" else "fetch";
|
||||
additionalFlags = [
|
||||
# We disable multithreading for the fetching phase since it can lead to timeouts with many dependencies/threads:
|
||||
|
@ -142,8 +116,7 @@ stdenv.mkDerivation (fBuildAttrs // {
|
|||
"$bazelFetchFlags"
|
||||
] ++ (if fetchConfigured then [ "--jobs" "$NIX_BUILD_CORES" ] else [ ]);
|
||||
targets = fFetchAttrs.bazelTargets ++ fFetchAttrs.bazelTestTargets;
|
||||
}
|
||||
}
|
||||
}}
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
@ -153,10 +126,13 @@ stdenv.mkDerivation (fBuildAttrs // {
|
|||
|
||||
# Remove all built in external workspaces, Bazel will recreate them when building
|
||||
rm -rf $bazelOut/external/{bazel_tools,\@bazel_tools.marker}
|
||||
${lib.optionalString removeRulesCC "rm -rf $bazelOut/external/{rules_cc,\\@rules_cc.marker}"}
|
||||
${lib.optionalString removeRulesCC
|
||||
"rm -rf $bazelOut/external/{rules_cc,\\@rules_cc.marker}"}
|
||||
rm -rf $bazelOut/external/{embedded_jdk,\@embedded_jdk.marker}
|
||||
${lib.optionalString removeLocalConfigCc "rm -rf $bazelOut/external/{local_config_cc,\\@local_config_cc.marker}"}
|
||||
${lib.optionalString removeLocal "rm -rf $bazelOut/external/{local_*,\\@local_*.marker}"}
|
||||
${lib.optionalString removeLocalConfigCc
|
||||
"rm -rf $bazelOut/external/{local_config_cc,\\@local_config_cc.marker}"}
|
||||
${lib.optionalString removeLocal
|
||||
"rm -rf $bazelOut/external/{local_*,\\@local_*.marker}"}
|
||||
|
||||
# Clear markers
|
||||
find $bazelOut/external -name '@*\.marker' -exec sh -c 'echo > {}' \;
|
||||
|
@ -201,7 +177,8 @@ stdenv.mkDerivation (fBuildAttrs // {
|
|||
outputHash = fetchAttrs.sha256;
|
||||
});
|
||||
|
||||
nativeBuildInputs = fBuildAttrs.nativeBuildInputs or [] ++ [ (bazel.override { enableNixHacks = true; }) ];
|
||||
nativeBuildInputs = fBuildAttrs.nativeBuildInputs or [ ]
|
||||
++ [ (bazel.override { enableNixHacks = true; }) ];
|
||||
|
||||
preHook = fBuildAttrs.preHook or "" + ''
|
||||
export bazelOut="$NIX_BUILD_TOP/output"
|
||||
|
@ -257,30 +234,27 @@ stdenv.mkDerivation (fBuildAttrs // {
|
|||
done
|
||||
fi
|
||||
|
||||
${
|
||||
bazelCmd {
|
||||
${bazelCmd {
|
||||
cmd = "test";
|
||||
additionalFlags =
|
||||
["--test_output=errors"] ++ fBuildAttrs.bazelTestFlags ++ ["--jobs" "$NIX_BUILD_CORES"];
|
||||
additionalFlags = [ "--test_output=errors" ] ++ fBuildAttrs.bazelTestFlags
|
||||
++ [ "--jobs" "$NIX_BUILD_CORES" ];
|
||||
targets = fBuildAttrs.bazelTestTargets;
|
||||
}
|
||||
}
|
||||
${
|
||||
bazelCmd {
|
||||
}}
|
||||
${bazelCmd {
|
||||
cmd = "build";
|
||||
additionalFlags = fBuildAttrs.bazelBuildFlags ++ ["--jobs" "$NIX_BUILD_CORES"];
|
||||
additionalFlags = fBuildAttrs.bazelBuildFlags
|
||||
++ [ "--jobs" "$NIX_BUILD_CORES" ];
|
||||
targets = fBuildAttrs.bazelTargets;
|
||||
}
|
||||
}
|
||||
${
|
||||
bazelCmd {
|
||||
}}
|
||||
${bazelCmd {
|
||||
cmd = "run";
|
||||
additionalFlags = fBuildAttrs.bazelRunFlags ++ [ "--jobs" "$NIX_BUILD_CORES" ];
|
||||
additionalFlags = fBuildAttrs.bazelRunFlags
|
||||
++ [ "--jobs" "$NIX_BUILD_CORES" ];
|
||||
# Bazel run only accepts a single target, but `bazelCmd` expects `targets` to be a list.
|
||||
targets = lib.optionals (fBuildAttrs.bazelRunTarget != null) [ fBuildAttrs.bazelRunTarget ];
|
||||
targets = lib.optionals (fBuildAttrs.bazelRunTarget != null)
|
||||
[ fBuildAttrs.bazelRunTarget ];
|
||||
targetRunFlags = fBuildAttrs.runTargetFlags;
|
||||
}
|
||||
}
|
||||
}}
|
||||
runHook postBuild
|
||||
'';
|
||||
})
|
||||
|
|
|
@ -1,21 +1,10 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, runCommandLocal
|
||||
, buildEnv
|
||||
, writeText
|
||||
, writeShellScriptBin
|
||||
, pkgs
|
||||
, pkgsi686Linux
|
||||
}:
|
||||
{ lib, stdenv, runCommandLocal, buildEnv, writeText, writeShellScriptBin, pkgs
|
||||
, pkgsi686Linux }:
|
||||
|
||||
{ profile ? ""
|
||||
, targetPkgs ? pkgs: []
|
||||
, multiPkgs ? pkgs: []
|
||||
{ profile ? "", targetPkgs ? pkgs: [ ], multiPkgs ? pkgs: [ ]
|
||||
, multiArch ? false # Whether to include 32bit packages
|
||||
, extraBuildCommands ? ""
|
||||
, extraBuildCommandsMulti ? ""
|
||||
, extraOutputsToInstall ? []
|
||||
, ... # for name, or pname+version
|
||||
, extraBuildCommands ? "", extraBuildCommandsMulti ? ""
|
||||
, extraOutputsToInstall ? [ ], ... # for name, or pname+version
|
||||
}@args:
|
||||
|
||||
# HOWTO:
|
||||
|
@ -36,9 +25,10 @@
|
|||
let
|
||||
inherit (stdenv) is64bit;
|
||||
|
||||
name = if (args ? pname && args ? version)
|
||||
then "${args.pname}-${args.version}"
|
||||
else args.name;
|
||||
name = if (args ? pname && args ? version) then
|
||||
"${args.pname}-${args.version}"
|
||||
else
|
||||
args.name;
|
||||
|
||||
# "use of glibc_multi is only supported on x86_64-linux"
|
||||
isMultiBuild = multiArch && stdenv.system == "x86_64-linux";
|
||||
|
@ -46,7 +36,8 @@ let
|
|||
|
||||
# list of packages (usually programs) which match the host's architecture
|
||||
# (which includes stuff from multiPkgs)
|
||||
targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs);
|
||||
targetPaths = targetPkgs pkgs
|
||||
++ (if multiPkgs == null then [ ] else multiPkgs pkgs);
|
||||
|
||||
# list of packages which are for x86 (only multiPkgs, only for x86_64 hosts)
|
||||
multiPaths = multiPkgs pkgsi686Linux;
|
||||
|
@ -74,13 +65,13 @@ let
|
|||
bzip2
|
||||
xz
|
||||
];
|
||||
baseMultiPaths = with pkgsi686Linux; [
|
||||
(toString gcc.cc.lib)
|
||||
];
|
||||
baseMultiPaths = with pkgsi686Linux; [ (toString gcc.cc.lib) ];
|
||||
|
||||
ldconfig = writeShellScriptBin "ldconfig" ''
|
||||
# due to a glibc bug, 64-bit ldconfig complains about patchelf'd 32-bit libraries, so we use 32-bit ldconfig when we have them
|
||||
exec ${if isMultiBuild then pkgsi686Linux.glibc.bin else pkgs.glibc.bin}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@"
|
||||
exec ${
|
||||
if isMultiBuild then pkgsi686Linux.glibc.bin else pkgs.glibc.bin
|
||||
}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@"
|
||||
'';
|
||||
|
||||
etcProfile = writeText "profile" ''
|
||||
|
@ -207,9 +198,8 @@ let
|
|||
ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/
|
||||
'';
|
||||
|
||||
setupLibDirs = if isTargetBuild
|
||||
then setupLibDirsTarget
|
||||
else setupLibDirsMulti;
|
||||
setupLibDirs =
|
||||
if isTargetBuild then setupLibDirsTarget else setupLibDirsMulti;
|
||||
|
||||
# the target profile is the actual profile that will be used for the chroot
|
||||
setupTargetProfile = ''
|
||||
|
@ -254,7 +244,8 @@ let
|
|||
|
||||
in runCommandLocal "${name}-fhs" {
|
||||
passthru = {
|
||||
inherit args baseTargetPaths targetPaths baseMultiPaths ldconfig isMultiBuild;
|
||||
inherit args baseTargetPaths targetPaths baseMultiPaths ldconfig
|
||||
isMultiBuild;
|
||||
};
|
||||
} ''
|
||||
mkdir -p $out
|
||||
|
|
|
@ -1,55 +1,45 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, callPackage
|
||||
, runCommandLocal
|
||||
, writeShellScript
|
||||
, glibc
|
||||
, pkgsi686Linux
|
||||
, coreutils
|
||||
, bubblewrap
|
||||
}:
|
||||
{ lib, stdenv, callPackage, runCommandLocal, writeShellScript, glibc
|
||||
, pkgsi686Linux, coreutils, bubblewrap }:
|
||||
|
||||
{ runScript ? "bash"
|
||||
, extraInstallCommands ? ""
|
||||
, meta ? {}
|
||||
, passthru ? {}
|
||||
, extraPreBwrapCmds ? ""
|
||||
, extraBwrapArgs ? []
|
||||
, unshareUser ? false
|
||||
, unshareIpc ? false
|
||||
, unsharePid ? false
|
||||
, unshareNet ? false
|
||||
, unshareUts ? false
|
||||
, unshareCgroup ? false
|
||||
, privateTmp ? false
|
||||
, dieWithParent ? true
|
||||
, ...
|
||||
} @ args:
|
||||
{ runScript ? "bash", extraInstallCommands ? "", meta ? { }, passthru ? { }
|
||||
, extraPreBwrapCmds ? "", extraBwrapArgs ? [ ], unshareUser ? false
|
||||
, unshareIpc ? false, unsharePid ? false, unshareNet ? false, unshareUts ? false
|
||||
, unshareCgroup ? false, privateTmp ? false, dieWithParent ? true, ... }@args:
|
||||
|
||||
assert (!args ? pname || !args ? version) -> (args ? name); # You must provide name if pname or version (preferred) is missing.
|
||||
assert (!args ? pname || !args ? version) -> (args
|
||||
? name); # You must provide name if pname or version (preferred) is missing.
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
concatLines
|
||||
concatStringsSep
|
||||
escapeShellArgs
|
||||
filter
|
||||
optionalString
|
||||
splitString
|
||||
;
|
||||
concatLines concatStringsSep escapeShellArgs filter optionalString
|
||||
splitString;
|
||||
|
||||
inherit (lib.attrsets) removeAttrs;
|
||||
|
||||
name = args.name or "${args.pname}-${args.version}";
|
||||
executableName = args.pname or args.name;
|
||||
# we don't know which have been supplied, and want to avoid defaulting missing attrs to null. Passed into runCommandLocal
|
||||
nameAttrs = lib.filterAttrs (key: value: builtins.elem key [ "name" "pname" "version" ]) args;
|
||||
nameAttrs =
|
||||
lib.filterAttrs (key: value: builtins.elem key [ "name" "pname" "version" ])
|
||||
args;
|
||||
|
||||
buildFHSEnv = callPackage ./buildFHSEnv.nix { };
|
||||
|
||||
fhsenv = buildFHSEnv (removeAttrs args [
|
||||
"runScript" "extraInstallCommands" "meta" "passthru" "extraPreBwrapCmds" "extraBwrapArgs" "dieWithParent"
|
||||
"unshareUser" "unshareCgroup" "unshareUts" "unshareNet" "unsharePid" "unshareIpc" "privateTmp"
|
||||
"runScript"
|
||||
"extraInstallCommands"
|
||||
"meta"
|
||||
"passthru"
|
||||
"extraPreBwrapCmds"
|
||||
"extraBwrapArgs"
|
||||
"dieWithParent"
|
||||
"unshareUser"
|
||||
"unshareCgroup"
|
||||
"unshareUts"
|
||||
"unshareNet"
|
||||
"unsharePid"
|
||||
"unshareIpc"
|
||||
"privateTmp"
|
||||
]);
|
||||
|
||||
etcBindEntries = let
|
||||
|
@ -117,14 +107,18 @@ let
|
|||
EOF
|
||||
ldconfig &> /dev/null
|
||||
'';
|
||||
init = run: writeShellScript "${name}-init" ''
|
||||
init = run:
|
||||
writeShellScript "${name}-init" ''
|
||||
source /etc/profile
|
||||
${createLdConfCache}
|
||||
exec ${run} "$@"
|
||||
'';
|
||||
|
||||
indentLines = str: concatLines (map (s: " " + s) (filter (s: s != "") (splitString "\n" str)));
|
||||
bwrapCmd = { initArgs ? "" }: ''
|
||||
indentLines = str:
|
||||
concatLines
|
||||
(map (s: " " + s) (filter (s: s != "") (splitString "\n" str)));
|
||||
bwrapCmd = { initArgs ? "" }:
|
||||
''
|
||||
${extraPreBwrapCmds}
|
||||
ignored=(/nix /dev /proc /etc ${optionalString privateTmp "/tmp"})
|
||||
ro_mounts=()
|
||||
|
@ -271,9 +265,7 @@ in runCommandLocal name (nameAttrs // {
|
|||
inherit meta;
|
||||
|
||||
passthru = passthru // {
|
||||
env = runCommandLocal "${name}-shell-env" {
|
||||
shellHook = bwrapCmd {};
|
||||
} ''
|
||||
env = runCommandLocal "${name}-shell-env" { shellHook = bwrapCmd { }; } ''
|
||||
echo >&2 ""
|
||||
echo >&2 "*** User chroot 'env' attributes are intended for interactive nix-shell sessions, not for building! ***"
|
||||
echo >&2 ""
|
||||
|
|
|
@ -1,15 +1,23 @@
|
|||
{ lib, callPackage, runCommandLocal, writeScript, stdenv, coreutils }:
|
||||
|
||||
let buildFHSEnv = callPackage ./env.nix { }; in
|
||||
let buildFHSEnv = callPackage ./env.nix { };
|
||||
|
||||
args@{ name, version ? null, runScript ? "bash", extraInstallCommands ? "", meta ? {}, passthru ? {}, ... }:
|
||||
in args@{ name, version ? null, runScript ? "bash", extraInstallCommands ? ""
|
||||
, meta ? { }, passthru ? { }, ... }:
|
||||
|
||||
let
|
||||
env = buildFHSEnv (removeAttrs args [ "version" "runScript" "extraInstallCommands" "meta" "passthru" ]);
|
||||
env = buildFHSEnv (removeAttrs args [
|
||||
"version"
|
||||
"runScript"
|
||||
"extraInstallCommands"
|
||||
"meta"
|
||||
"passthru"
|
||||
]);
|
||||
|
||||
chrootenv = callPackage ./chrootenv { };
|
||||
|
||||
init = run: writeScript "${name}-init" ''
|
||||
init = run:
|
||||
writeScript "${name}-init" ''
|
||||
#! ${stdenv.shell}
|
||||
for i in ${env}/* /host/*; do
|
||||
path="/''${i##*/}"
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
{ stdenv, lib, buildEnv, writeText, pkgs, pkgsi686Linux }:
|
||||
|
||||
{ name
|
||||
, profile ? ""
|
||||
, targetPkgs ? pkgs: []
|
||||
, multiPkgs ? pkgs: []
|
||||
, extraBuildCommands ? ""
|
||||
, extraBuildCommandsMulti ? ""
|
||||
, extraOutputsToInstall ? []
|
||||
}:
|
||||
{ name, profile ? "", targetPkgs ? pkgs: [ ], multiPkgs ? pkgs: [ ]
|
||||
, extraBuildCommands ? "", extraBuildCommandsMulti ? ""
|
||||
, extraOutputsToInstall ? [ ] }:
|
||||
|
||||
# HOWTO:
|
||||
# All packages (most likely programs) returned from targetPkgs will only be
|
||||
|
@ -27,12 +22,14 @@
|
|||
let
|
||||
is64Bit = stdenv.hostPlatform.parsed.cpu.bits == 64;
|
||||
# multi-lib glibc is only supported on x86_64
|
||||
isMultiBuild = multiPkgs != null && stdenv.hostPlatform.system == "x86_64-linux";
|
||||
isMultiBuild = multiPkgs != null && stdenv.hostPlatform.system
|
||||
== "x86_64-linux";
|
||||
isTargetBuild = !isMultiBuild;
|
||||
|
||||
# list of packages (usually programs) which are only be installed for the
|
||||
# host's architecture
|
||||
targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs);
|
||||
targetPaths = targetPkgs pkgs
|
||||
++ (if multiPkgs == null then [ ] else multiPkgs pkgs);
|
||||
|
||||
# list of packages which are installed for both x86 and x86_64 on x86_64
|
||||
# systems
|
||||
|
@ -42,16 +39,26 @@ let
|
|||
# these match the host's architecture, glibc_multi is used for multilib
|
||||
# builds. glibcLocales must be before glibc or glibc_multi as otherwiese
|
||||
# the wrong LOCALE_ARCHIVE will be used where only C.UTF-8 is available.
|
||||
basePkgs = with pkgs;
|
||||
[ glibcLocales
|
||||
basePkgs = with pkgs; [
|
||||
glibcLocales
|
||||
(if isMultiBuild then glibc_multi else glibc)
|
||||
(toString gcc.cc.lib) bashInteractiveFHS coreutils less shadow su
|
||||
gawk diffutils findutils gnused gnugrep
|
||||
gnutar gzip bzip2 xz
|
||||
];
|
||||
baseMultiPkgs = with pkgsi686Linux;
|
||||
[ (toString gcc.cc.lib)
|
||||
(toString gcc.cc.lib)
|
||||
bashInteractiveFHS
|
||||
coreutils
|
||||
less
|
||||
shadow
|
||||
su
|
||||
gawk
|
||||
diffutils
|
||||
findutils
|
||||
gnused
|
||||
gnugrep
|
||||
gnutar
|
||||
gzip
|
||||
bzip2
|
||||
xz
|
||||
];
|
||||
baseMultiPkgs = with pkgsi686Linux; [ (toString gcc.cc.lib) ];
|
||||
|
||||
etcProfile = writeText "profile" ''
|
||||
export PS1='${name}-chrootenv:\u@\h:\w\$ '
|
||||
|
@ -216,8 +223,8 @@ let
|
|||
ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/
|
||||
'';
|
||||
|
||||
setupLibDirs = if isTargetBuild then setupLibDirs_target
|
||||
else setupLibDirs_multi;
|
||||
setupLibDirs =
|
||||
if isTargetBuild then setupLibDirs_target else setupLibDirs_multi;
|
||||
|
||||
# the target profile is the actual profile that will be used for the chroot
|
||||
setupTargetProfile = ''
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, glibcLocales
|
||||
{ lib, stdenv, glibcLocales
|
||||
# The GraalVM derivation to use
|
||||
, graalvmDrv
|
||||
, removeReferencesTo
|
||||
, executable ? args.pname
|
||||
, graalvmDrv, removeReferencesTo, executable ? args.pname
|
||||
# JAR used as input for GraalVM derivation, defaults to src
|
||||
, jar ? args.src
|
||||
, dontUnpack ? (jar == args.src)
|
||||
, jar ? args.src, dontUnpack ? (jar == args.src)
|
||||
# Default native-image arguments. You probably don't want to set this,
|
||||
# except in special cases. In most cases, use extraNativeBuildArgs instead
|
||||
, nativeImageBuildArgs ? [
|
||||
|
@ -20,11 +15,7 @@
|
|||
# Extra arguments to be passed to the native-image
|
||||
, extraNativeImageBuildArgs ? [ ]
|
||||
# XMX size of GraalVM during build
|
||||
, graalvmXmx ? "-J-Xmx6g"
|
||||
, meta ? { }
|
||||
, LC_ALL ? "en_US.UTF-8"
|
||||
, ...
|
||||
} @ args:
|
||||
, graalvmXmx ? "-J-Xmx6g", meta ? { }, LC_ALL ? "en_US.UTF-8", ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = builtins.removeAttrs args [
|
||||
|
@ -40,15 +31,16 @@ let
|
|||
"installPhase"
|
||||
"postInstall"
|
||||
];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
in stdenv.mkDerivation ({
|
||||
inherit dontUnpack jar;
|
||||
|
||||
env = { inherit LC_ALL; };
|
||||
|
||||
nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ graalvmDrv glibcLocales removeReferencesTo ];
|
||||
nativeBuildInputs = (args.nativeBuildInputs or [ ])
|
||||
++ [ graalvmDrv glibcLocales removeReferencesTo ];
|
||||
|
||||
nativeImageBuildArgs = nativeImageBuildArgs ++ extraNativeImageBuildArgs ++ [ graalvmXmx ];
|
||||
nativeImageBuildArgs = nativeImageBuildArgs ++ extraNativeImageBuildArgs
|
||||
++ [ graalvmXmx ];
|
||||
|
||||
buildPhase = args.buildPhase or ''
|
||||
runHook preBuild
|
||||
|
|
|
@ -8,10 +8,8 @@ let
|
|||
src = ./builder.pl;
|
||||
inherit (builtins) storeDir;
|
||||
};
|
||||
in
|
||||
|
||||
lib.makeOverridable
|
||||
({ name
|
||||
in lib.makeOverridable ({ name
|
||||
|
||||
, # The manifest file (if any). A symlink $out/manifest will be
|
||||
# created to it.
|
||||
|
@ -46,15 +44,11 @@ lib.makeOverridable
|
|||
, nativeBuildInputs ? [ ] # Handy e.g. if using makeWrapper in `postBuild`.
|
||||
, buildInputs ? [ ]
|
||||
|
||||
, passthru ? {}
|
||||
, meta ? {}
|
||||
}:
|
||||
, passthru ? { }, meta ? { } }:
|
||||
|
||||
runCommand name
|
||||
rec {
|
||||
inherit manifest ignoreCollisions checkCollisionContents passthru
|
||||
meta pathsToLink extraPrefix postBuild
|
||||
nativeBuildInputs buildInputs;
|
||||
runCommand name rec {
|
||||
inherit manifest ignoreCollisions checkCollisionContents passthru meta
|
||||
pathsToLink extraPrefix postBuild nativeBuildInputs buildInputs;
|
||||
pkgs = builtins.toJSON (map (drv: {
|
||||
paths =
|
||||
# First add the usual output(s): respect if user has chosen explicitly,
|
||||
|
@ -62,9 +56,10 @@ runCommand name
|
|||
# to exist in mkDerivation-created cases. The other cases (e.g. runCommand)
|
||||
# aren't expected to have multiple outputs.
|
||||
(if (!drv ? outputSpecified || !drv.outputSpecified)
|
||||
&& drv.meta.outputsToInstall or null != null
|
||||
then map (outName: drv.${outName}) drv.meta.outputsToInstall
|
||||
else [ drv ])
|
||||
&& drv.meta.outputsToInstall or null != null then
|
||||
map (outName: drv.${outName}) drv.meta.outputsToInstall
|
||||
else
|
||||
[ drv ])
|
||||
# Add any extra outputs specified by the caller of `buildEnv`.
|
||||
++ lib.filter (p: p != null)
|
||||
(builtins.map (outName: drv.${outName} or null) extraOutputsToInstall);
|
||||
|
@ -73,9 +68,9 @@ runCommand name
|
|||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
# XXX: The size is somewhat arbitrary
|
||||
passAsFile = if builtins.stringLength pkgs >= 128*1024 then [ "pkgs" ] else [ ];
|
||||
}
|
||||
''
|
||||
passAsFile =
|
||||
if builtins.stringLength pkgs >= 128 * 1024 then [ "pkgs" ] else [ ];
|
||||
} ''
|
||||
${buildPackages.perl}/bin/perl -w ${builder}
|
||||
eval "$postBuild"
|
||||
'')
|
||||
|
|
|
@ -5,56 +5,54 @@
|
|||
# script that sets up the right environment variables so that the
|
||||
# compiler and the linker just "work".
|
||||
|
||||
{ name ? ""
|
||||
, lib
|
||||
, stdenvNoCC
|
||||
, runtimeShell
|
||||
, cc ? null, libc ? null, bintools, coreutils ? null
|
||||
, zlib ? null
|
||||
, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
|
||||
, propagateDoc ? cc != null && cc ? man
|
||||
, extraTools ? [], extraPackages ? [], extraBuildCommands ? ""
|
||||
, nixSupport ? {}
|
||||
, isGNU ? false, isClang ? cc.isClang or false, isCcache ? cc.isCcache or false, gnugrep ? null
|
||||
, expand-response-params
|
||||
, libcxx ? null
|
||||
{ name ? "", lib, stdenvNoCC, runtimeShell, cc ? null, libc ? null, bintools
|
||||
, coreutils ? null, zlib ? null, nativeTools, noLibc ? false, nativeLibc
|
||||
, nativePrefix ? "", propagateDoc ? cc != null && cc ? man, extraTools ? [ ]
|
||||
, extraPackages ? [ ], extraBuildCommands ? "", nixSupport ? { }, isGNU ? false
|
||||
, isClang ? cc.isClang or false, isCcache ? cc.isCcache or false, gnugrep ? null
|
||||
, expand-response-params, libcxx ? null
|
||||
|
||||
# Whether or not to add `-B` and `-L` to `nix-support/cc-{c,ld}flags`
|
||||
, useCcForLibs ?
|
||||
|
||||
# Always add these flags for Clang, because in order to compile (most
|
||||
# software) it needs libraries that are shipped and compiled with gcc.
|
||||
if isClang then true
|
||||
if isClang then
|
||||
true
|
||||
|
||||
# Never add these flags for a build!=host cross-compiler or a host!=target
|
||||
# ("cross-built-native") compiler; currently nixpkgs has a special build
|
||||
# path for these (`crossStageStatic`). Hopefully at some point that build
|
||||
# path will be merged with this one and this conditional will be removed.
|
||||
else if (with stdenvNoCC; buildPlatform != hostPlatform || hostPlatform != targetPlatform) then false
|
||||
else if (with stdenvNoCC;
|
||||
buildPlatform != hostPlatform || hostPlatform != targetPlatform) then
|
||||
false
|
||||
|
||||
# Never add these flags when wrapping the bootstrapFiles' compiler; it has a
|
||||
# /usr/-like layout with everything smashed into a single outpath, so it has
|
||||
# no trouble finding its own libraries.
|
||||
else if (cc.passthru.isFromBootstrapFiles or false) then false
|
||||
else if (cc.passthru.isFromBootstrapFiles or false) then
|
||||
false
|
||||
|
||||
# Add these flags when wrapping `xgcc` (the first compiler that nixpkgs builds)
|
||||
else if (cc.passthru.isXgcc or false) then true
|
||||
else if (cc.passthru.isXgcc or false) then
|
||||
true
|
||||
|
||||
# Add these flags when wrapping `stdenv.cc`
|
||||
else if (cc.stdenv.cc.cc.passthru.isXgcc or false) then true
|
||||
else if (cc.stdenv.cc.cc.passthru.isXgcc or false) then
|
||||
true
|
||||
|
||||
# Do not add these flags in any other situation. This is `false` mainly to
|
||||
# prevent these flags from being added when wrapping *old* versions of gcc
|
||||
# (e.g. `gcc6Stdenv`), since they will cause the old gcc to get `-B` and
|
||||
# `-L` flags pointing at the new gcc's libstdc++ headers. Example failure:
|
||||
# https://hydra.nixos.org/build/213125495
|
||||
else false
|
||||
else
|
||||
false
|
||||
|
||||
# the derivation at which the `-B` and `-L` flags added by `useCcForLibs` will point
|
||||
, gccForLibs ? if useCcForLibs then cc else null
|
||||
, fortify-headers ? null
|
||||
, includeFortifyHeaders ? null
|
||||
}:
|
||||
, gccForLibs ? if useCcForLibs then cc else null, fortify-headers ? null
|
||||
, includeFortifyHeaders ? null }:
|
||||
|
||||
assert nativeTools -> !propagateDoc && nativePrefix != "";
|
||||
assert !nativeTools -> cc != null && coreutils != null && gnugrep != null;
|
||||
|
@ -63,36 +61,22 @@ assert (noLibc || nativeLibc) == (libc == null);
|
|||
|
||||
let
|
||||
inherit (lib)
|
||||
attrByPath
|
||||
concatMapStrings
|
||||
concatStringsSep
|
||||
escapeShellArg
|
||||
getBin
|
||||
getDev
|
||||
getLib
|
||||
getName
|
||||
getVersion
|
||||
mapAttrsToList
|
||||
optional
|
||||
optionalAttrs
|
||||
optionals
|
||||
optionalString
|
||||
removePrefix
|
||||
replaceStrings
|
||||
toList
|
||||
versionAtLeast
|
||||
;
|
||||
attrByPath concatMapStrings concatStringsSep escapeShellArg getBin getDev
|
||||
getLib getName getVersion mapAttrsToList optional optionalAttrs optionals
|
||||
optionalString removePrefix replaceStrings toList versionAtLeast;
|
||||
|
||||
inherit (stdenvNoCC) hostPlatform targetPlatform;
|
||||
|
||||
includeFortifyHeaders' = if includeFortifyHeaders != null
|
||||
then includeFortifyHeaders
|
||||
else (targetPlatform.libc == "musl" && isGNU);
|
||||
includeFortifyHeaders' = if includeFortifyHeaders != null then
|
||||
includeFortifyHeaders
|
||||
else
|
||||
(targetPlatform.libc == "musl" && isGNU);
|
||||
|
||||
# Prefix for binaries. Customarily ends with a dash separator.
|
||||
#
|
||||
# TODO(@Ericson2314) Make unconditional, or optional but always true by default.
|
||||
targetPrefix = optionalString (targetPlatform != hostPlatform) (targetPlatform.config + "-");
|
||||
targetPrefix = optionalString (targetPlatform != hostPlatform)
|
||||
(targetPlatform.config + "-");
|
||||
|
||||
ccVersion = getVersion cc;
|
||||
ccName = removePrefix targetPrefix (getName cc);
|
||||
|
@ -100,8 +84,8 @@ let
|
|||
libc_bin = optionalString (libc != null) (getBin libc);
|
||||
libc_dev = optionalString (libc != null) (getDev libc);
|
||||
libc_lib = optionalString (libc != null) (getLib libc);
|
||||
cc_solib = getLib cc
|
||||
+ optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}";
|
||||
cc_solib = getLib cc + optionalString (targetPlatform != hostPlatform)
|
||||
"/${targetPlatform.config}";
|
||||
|
||||
# The wrapper scripts use 'cat' and 'grep', so we may need coreutils.
|
||||
coreutils_bin = optionalString (!nativeTools) (getBin coreutils);
|
||||
|
@ -113,15 +97,13 @@ let
|
|||
# unstable implementation detail, however.
|
||||
suffixSalt = replaceStrings [ "-" "." ] [ "_" "_" ] targetPlatform.config;
|
||||
|
||||
useGccForLibs = useCcForLibs
|
||||
&& libcxx == null
|
||||
&& !targetPlatform.isDarwin
|
||||
useGccForLibs = useCcForLibs && libcxx == null && !targetPlatform.isDarwin
|
||||
&& !(targetPlatform.useLLVM or false)
|
||||
&& !(targetPlatform.useAndroidPrebuilt or false)
|
||||
&& !(targetPlatform.isiOS or false)
|
||||
&& gccForLibs != null;
|
||||
&& !(targetPlatform.isiOS or false) && gccForLibs != null;
|
||||
gccForLibs_solib = getLib gccForLibs
|
||||
+ optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}";
|
||||
+ optionalString (targetPlatform != hostPlatform)
|
||||
"/${targetPlatform.config}";
|
||||
|
||||
# Analogously to cc_solib and gccForLibs_solib
|
||||
libcxx_solib = "${getLib libcxx}/lib";
|
||||
|
@ -141,7 +123,9 @@ let
|
|||
# -march=too-modern-cpu
|
||||
|
||||
isGccArchSupported = arch:
|
||||
if targetPlatform.isPower then false else # powerpc does not allow -march=
|
||||
if targetPlatform.isPower then
|
||||
false
|
||||
else # powerpc does not allow -march=
|
||||
if isGNU then
|
||||
{ # Generic
|
||||
x86-64-v2 = versionAtLeast ccVersion "11.0";
|
||||
|
@ -207,7 +191,8 @@ let
|
|||
{
|
||||
cortex-a53 = versionAtLeast ccVersion "3.9"; # llvm dfc5d1
|
||||
}.${tune} or false
|
||||
else false)
|
||||
else
|
||||
false)
|
||||
else if targetPlatform.isPower then
|
||||
# powerpc does not support -march
|
||||
true
|
||||
|
@ -224,15 +209,15 @@ let
|
|||
# Note: this function can make use of ccVersion; for example, `if
|
||||
# versionOlder ccVersion "12" then ...`
|
||||
findBestTuneApproximation = tune:
|
||||
let guess = if isClang
|
||||
then {
|
||||
let
|
||||
guess = if isClang then
|
||||
{
|
||||
# clang does not tune for big.LITTLE chips
|
||||
"cortex-a72.cortex-a53" = "cortex-a72";
|
||||
}.${tune} or tune
|
||||
else tune;
|
||||
in if isGccTuneSupported guess
|
||||
then guess
|
||||
else null;
|
||||
else
|
||||
tune;
|
||||
in if isGccTuneSupported guess then guess else null;
|
||||
|
||||
defaultHardeningFlags = bintools.defaultHardeningFlags or [ ];
|
||||
|
||||
|
@ -241,24 +226,25 @@ let
|
|||
# cc.hardeningUnsupportedFlags is completely ignored - the function
|
||||
# is responsible for including the constant hardeningUnsupportedFlags
|
||||
# list however it sees fit.
|
||||
ccHardeningUnsupportedFlags = if cc ? hardeningUnsupportedFlagsByTargetPlatform
|
||||
then cc.hardeningUnsupportedFlagsByTargetPlatform targetPlatform
|
||||
else (cc.hardeningUnsupportedFlags or []);
|
||||
ccHardeningUnsupportedFlags =
|
||||
if cc ? hardeningUnsupportedFlagsByTargetPlatform then
|
||||
cc.hardeningUnsupportedFlagsByTargetPlatform targetPlatform
|
||||
else
|
||||
(cc.hardeningUnsupportedFlags or [ ]);
|
||||
|
||||
darwinPlatformForCC = optionalString targetPlatform.isDarwin (
|
||||
if (targetPlatform.darwinPlatform == "macos" && isGNU) then "macosx"
|
||||
else targetPlatform.darwinPlatform
|
||||
);
|
||||
darwinPlatformForCC = optionalString targetPlatform.isDarwin
|
||||
(if (targetPlatform.darwinPlatform == "macos" && isGNU) then
|
||||
"macosx"
|
||||
else
|
||||
targetPlatform.darwinPlatform);
|
||||
|
||||
darwinMinVersion = optionalString targetPlatform.isDarwin (
|
||||
targetPlatform.darwinMinVersion
|
||||
);
|
||||
darwinMinVersion =
|
||||
optionalString targetPlatform.isDarwin (targetPlatform.darwinMinVersion);
|
||||
|
||||
darwinMinVersionVariable = optionalString targetPlatform.isDarwin
|
||||
targetPlatform.darwinMinVersionVariable;
|
||||
in
|
||||
|
||||
assert includeFortifyHeaders' -> fortify-headers != null;
|
||||
in assert includeFortifyHeaders' -> fortify-headers != null;
|
||||
|
||||
# Ensure bintools matches
|
||||
assert libc_bin == bintools.libc_bin;
|
||||
|
@ -269,8 +255,7 @@ assert nativeLibc == bintools.nativeLibc;
|
|||
assert nativePrefix == bintools.nativePrefix;
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
pname = targetPrefix
|
||||
+ (if name != "" then name else "${ccName}-wrapper");
|
||||
pname = targetPrefix + (if name != "" then name else "${ccName}-wrapper");
|
||||
version = optionalString (cc != null) ccVersion;
|
||||
|
||||
preferLocalBuild = true;
|
||||
|
@ -292,7 +277,7 @@ stdenvNoCC.mkDerivation {
|
|||
(lambda (arg)
|
||||
(when (file-directory-p (concat arg "/include"))
|
||||
(setenv "NIX_CFLAGS_COMPILE_${suffixSalt}" (concat (getenv "NIX_CFLAGS_COMPILE_${suffixSalt}") " -isystem " arg "/include"))))
|
||||
'(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)}))
|
||||
'(${concatStringsSep " " (map (pkg: ''"${pkg}"'') pkgs)}))
|
||||
'';
|
||||
|
||||
# Expose expand-response-params we are /actually/ using. In stdenv
|
||||
|
@ -315,22 +300,25 @@ stdenvNoCC.mkDerivation {
|
|||
|
||||
wrapper = ./cc-wrapper.sh;
|
||||
|
||||
installPhase =
|
||||
''
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin $out/nix-support
|
||||
|
||||
wrap() {
|
||||
local dst="$1"
|
||||
local wrapper="$2"
|
||||
export prog="$3"
|
||||
export use_response_file_by_default=${if isClang && !isCcache then "1" else "0"}
|
||||
export use_response_file_by_default=${
|
||||
if isClang && !isCcache then "1" else "0"
|
||||
}
|
||||
substituteAll "$wrapper" "$out/bin/$dst"
|
||||
chmod +x "$out/bin/$dst"
|
||||
}
|
||||
''
|
||||
|
||||
+ (if nativeTools then ''
|
||||
echo ${if targetPlatform.isDarwin then cc else nativePrefix} > $out/nix-support/orig-cc
|
||||
echo ${
|
||||
if targetPlatform.isDarwin then cc else nativePrefix
|
||||
} > $out/nix-support/orig-cc
|
||||
|
||||
ccPath="${if targetPlatform.isDarwin then cc else nativePrefix}/bin"
|
||||
'' else ''
|
||||
|
@ -384,7 +372,9 @@ stdenvNoCC.mkDerivation {
|
|||
# No need to wrap gnat, gnatkr, gnatname or gnatprep; we can just symlink them in
|
||||
+ optionalString cc.langAda or false ''
|
||||
for cmd in gnatbind gnatchop gnatclean gnatlink gnatls gnatmake; do
|
||||
wrap ${targetPrefix}$cmd ${./gnat-wrapper.sh} $ccPath/${targetPrefix}$cmd
|
||||
wrap ${targetPrefix}$cmd ${
|
||||
./gnat-wrapper.sh
|
||||
} $ccPath/${targetPrefix}$cmd
|
||||
done
|
||||
|
||||
for cmd in gnat gnatkr gnatname gnatprep; do
|
||||
|
@ -418,12 +408,13 @@ stdenvNoCC.mkDerivation {
|
|||
'';
|
||||
|
||||
strictDeps = true;
|
||||
propagatedBuildInputs = [ bintools ] ++ extraTools ++ optionals cc.langD or cc.langJava or false [ zlib ];
|
||||
depsTargetTargetPropagated = optional (libcxx != null) libcxx ++ extraPackages;
|
||||
propagatedBuildInputs = [ bintools ] ++ extraTools
|
||||
++ optionals cc.langD or cc.langJava or false [ zlib ];
|
||||
depsTargetTargetPropagated = optional (libcxx != null) libcxx
|
||||
++ extraPackages;
|
||||
|
||||
setupHooks = [
|
||||
../setup-hooks/role.bash
|
||||
] ++ optional (cc.langC or true) ./setup-hook.sh
|
||||
setupHooks = [ ../setup-hooks/role.bash ]
|
||||
++ optional (cc.langC or true) ./setup-hook.sh
|
||||
++ optional (cc.langFortran or false) ./fortran-hook.sh
|
||||
++ optional (targetPlatform.isWindows) (stdenvNoCC.mkDerivation {
|
||||
name = "win-dll-hook.sh";
|
||||
|
@ -463,8 +454,7 @@ stdenvNoCC.mkDerivation {
|
|||
+ optionalString (useGccForLibs && isClang) ''
|
||||
|
||||
echo "-B${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-cflags
|
||||
''
|
||||
+ optionalString useGccForLibs ''
|
||||
'' + optionalString useGccForLibs ''
|
||||
echo "-L${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-ldflags
|
||||
echo "-L${gccForLibs_solib}/lib" >> $out/nix-support/cc-ldflags
|
||||
''
|
||||
|
@ -476,17 +466,18 @@ stdenvNoCC.mkDerivation {
|
|||
# vs libstdc++, etc.) since Darwin isn't `useLLVM` on all counts. (See
|
||||
# https://clang.llvm.org/docs/Toolchain.html for all the axes one might
|
||||
# break `useLLVM` into.)
|
||||
+ optionalString (isClang
|
||||
&& targetPlatform.isLinux
|
||||
+ optionalString (isClang && targetPlatform.isLinux
|
||||
&& !(targetPlatform.useAndroidPrebuilt or false)
|
||||
&& !(targetPlatform.useLLVM or false)
|
||||
&& gccForLibs != null) (''
|
||||
&& !(targetPlatform.useLLVM or false) && gccForLibs != null) (''
|
||||
echo "--gcc-toolchain=${gccForLibs}" >> $out/nix-support/cc-cflags
|
||||
|
||||
# Pull in 'cc.out' target to get 'libstdc++fs.a'. It should be in
|
||||
# 'cc.lib'. But it's a gcc package bug.
|
||||
# TODO(trofi): remove once gcc is fixed to move libraries to .lib output.
|
||||
echo "-L${gccForLibs}/${optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}"}/lib" >> $out/nix-support/cc-ldflags
|
||||
echo "-L${gccForLibs}/${
|
||||
optionalString (targetPlatform != hostPlatform)
|
||||
"/${targetPlatform.config}"
|
||||
}/lib" >> $out/nix-support/cc-ldflags
|
||||
''
|
||||
# this ensures that when clang passes -lgcc_s to lld (as it does
|
||||
# when building e.g. firefox), lld is able to find libgcc_s.so
|
||||
|
@ -512,9 +503,13 @@ stdenvNoCC.mkDerivation {
|
|||
+ optionalString (libc != null) (''
|
||||
touch "$out/nix-support/libc-cflags"
|
||||
touch "$out/nix-support/libc-ldflags"
|
||||
echo "-B${libc_lib}${libc.libdir or "/lib/"}" >> $out/nix-support/libc-crt1-cflags
|
||||
echo "-B${libc_lib}${
|
||||
libc.libdir or "/lib/"
|
||||
}" >> $out/nix-support/libc-crt1-cflags
|
||||
'' + optionalString (!(cc.langD or false)) ''
|
||||
echo "-idirafter ${libc_dev}${libc.incdir or "/include"}" >> $out/nix-support/libc-cflags
|
||||
echo "-idirafter ${libc_dev}${
|
||||
libc.incdir or "/include"
|
||||
}" >> $out/nix-support/libc-cflags
|
||||
'' + optionalString (isGNU && (!(cc.langD or false))) ''
|
||||
for dir in "${cc}"/lib/gcc/*/*/include-fixed; do
|
||||
echo '-idirafter' ''${dir} >> $out/nix-support/libc-cflags
|
||||
|
@ -541,7 +536,9 @@ stdenvNoCC.mkDerivation {
|
|||
|
||||
# We have a libc++ directly, we have one via "smuggled" GCC, or we have one
|
||||
# bundled with the C compiler because it is GCC
|
||||
+ optionalString (libcxx != null || (useGccForLibs && gccForLibs.langCC or false) || (isGNU && cc.langCC or false)) ''
|
||||
+ optionalString (libcxx != null
|
||||
|| (useGccForLibs && gccForLibs.langCC or false)
|
||||
|| (isGNU && cc.langCC or false)) ''
|
||||
touch "$out/nix-support/libcxx-cxxflags"
|
||||
touch "$out/nix-support/libcxx-ldflags"
|
||||
''
|
||||
|
@ -549,16 +546,18 @@ stdenvNoCC.mkDerivation {
|
|||
# already knows how to find its own libstdc++, and adding
|
||||
# additional -isystem flags will confuse gfortran (see
|
||||
# https://github.com/NixOS/nixpkgs/pull/209870#issuecomment-1500550903)
|
||||
+ optionalString (libcxx == null && isClang && (useGccForLibs && gccForLibs.langCC or false)) ''
|
||||
+ optionalString (libcxx == null && isClang
|
||||
&& (useGccForLibs && gccForLibs.langCC or false)) ''
|
||||
for dir in ${gccForLibs}/include/c++/*; do
|
||||
echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags
|
||||
done
|
||||
for dir in ${gccForLibs}/include/c++/*/${targetPlatform.config}; do
|
||||
echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags
|
||||
done
|
||||
''
|
||||
+ optionalString (libcxx.isLLVM or false) ''
|
||||
echo "-isystem ${getDev libcxx}/include/c++/v1" >> $out/nix-support/libcxx-cxxflags
|
||||
'' + optionalString (libcxx.isLLVM or false) ''
|
||||
echo "-isystem ${
|
||||
getDev libcxx
|
||||
}/include/c++/v1" >> $out/nix-support/libcxx-cxxflags
|
||||
echo "-stdlib=libc++" >> $out/nix-support/libcxx-ldflags
|
||||
''
|
||||
|
||||
|
@ -589,7 +588,8 @@ stdenvNoCC.mkDerivation {
|
|||
'' + ''
|
||||
echo "$ccLDFlags" >> $out/nix-support/cc-ldflags
|
||||
echo "$ccCFlags" >> $out/nix-support/cc-cflags
|
||||
'' + optionalString (targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false)) ''
|
||||
'' + optionalString
|
||||
(targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false)) ''
|
||||
echo " -L${libcxx_solib}" >> $out/nix-support/cc-ldflags
|
||||
''
|
||||
|
||||
|
@ -607,7 +607,9 @@ stdenvNoCC.mkDerivation {
|
|||
## Hardening support
|
||||
##
|
||||
+ ''
|
||||
export hardening_unsupported_flags="${concatStringsSep " " ccHardeningUnsupportedFlags}"
|
||||
export hardening_unsupported_flags="${
|
||||
concatStringsSep " " ccHardeningUnsupportedFlags
|
||||
}"
|
||||
''
|
||||
|
||||
# Machine flags. These are necessary to support
|
||||
|
@ -622,8 +624,9 @@ stdenvNoCC.mkDerivation {
|
|||
# For clang, this is handled in add-clang-cc-cflags-before.sh
|
||||
|
||||
# TODO: aarch64-darwin has mcpu incompatible with gcc
|
||||
+ optionalString ((targetPlatform ? gcc.arch) && !isClang && !(targetPlatform.isDarwin && targetPlatform.isAarch64) &&
|
||||
isGccArchSupported targetPlatform.gcc.arch) ''
|
||||
+ optionalString ((targetPlatform ? gcc.arch) && !isClang
|
||||
&& !(targetPlatform.isDarwin && targetPlatform.isAarch64)
|
||||
&& isGccArchSupported targetPlatform.gcc.arch) ''
|
||||
echo "-march=${targetPlatform.gcc.arch}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
|
||||
|
@ -631,7 +634,8 @@ stdenvNoCC.mkDerivation {
|
|||
# instead of march. On all other platforms you should use mtune
|
||||
# and march instead.
|
||||
# TODO: aarch64-darwin has mcpu incompatible with gcc
|
||||
+ optionalString ((targetPlatform ? gcc.cpu) && (isClang || !(targetPlatform.isDarwin && targetPlatform.isAarch64))) ''
|
||||
+ optionalString ((targetPlatform ? gcc.cpu)
|
||||
&& (isClang || !(targetPlatform.isDarwin && targetPlatform.isAarch64))) ''
|
||||
echo "-mcpu=${targetPlatform.gcc.cpu}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
|
||||
|
@ -640,19 +644,19 @@ stdenvNoCC.mkDerivation {
|
|||
# vs. soft floats we use it here.
|
||||
+ optionalString (targetPlatform ? gcc.float-abi) ''
|
||||
echo "-mfloat-abi=${targetPlatform.gcc.float-abi}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
+ optionalString (targetPlatform ? gcc.fpu) ''
|
||||
'' + optionalString (targetPlatform ? gcc.fpu) ''
|
||||
echo "-mfpu=${targetPlatform.gcc.fpu}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
+ optionalString (targetPlatform ? gcc.mode) ''
|
||||
'' + optionalString (targetPlatform ? gcc.mode) ''
|
||||
echo "-mmode=${targetPlatform.gcc.mode}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
+ optionalString (targetPlatform ? gcc.thumb) ''
|
||||
echo "-m${if targetPlatform.gcc.thumb then "thumb" else "arm"}" >> $out/nix-support/cc-cflags-before
|
||||
''
|
||||
+ (let tune = if targetPlatform ? gcc.tune
|
||||
then findBestTuneApproximation targetPlatform.gcc.tune
|
||||
else null;
|
||||
'' + optionalString (targetPlatform ? gcc.thumb) ''
|
||||
echo "-m${
|
||||
if targetPlatform.gcc.thumb then "thumb" else "arm"
|
||||
}" >> $out/nix-support/cc-cflags-before
|
||||
'' + (let
|
||||
tune = if targetPlatform ? gcc.tune then
|
||||
findBestTuneApproximation targetPlatform.gcc.tune
|
||||
else
|
||||
null;
|
||||
in optionalString (tune != null) ''
|
||||
echo "-mtune=${tune}" >> $out/nix-support/cc-cflags-before
|
||||
'')
|
||||
|
@ -664,9 +668,11 @@ stdenvNoCC.mkDerivation {
|
|||
hardening_unsupported_flags+=" stackprotector fortify"
|
||||
'' + optionalString targetPlatform.isAvr ''
|
||||
hardening_unsupported_flags+=" stackprotector pic"
|
||||
'' + optionalString (targetPlatform.libc == "newlib" || targetPlatform.libc == "newlib-nano") ''
|
||||
'' + optionalString
|
||||
(targetPlatform.libc == "newlib" || targetPlatform.libc == "newlib-nano") ''
|
||||
hardening_unsupported_flags+=" stackprotector fortify pie pic"
|
||||
'' + optionalString (targetPlatform.libc == "musl" && targetPlatform.isx86_32) ''
|
||||
'' + optionalString
|
||||
(targetPlatform.libc == "musl" && targetPlatform.isx86_32) ''
|
||||
hardening_unsupported_flags+=" stackprotector"
|
||||
'' + optionalString targetPlatform.isNetBSD ''
|
||||
hardening_unsupported_flags+=" stackprotector fortify"
|
||||
|
@ -684,7 +690,9 @@ stdenvNoCC.mkDerivation {
|
|||
|
||||
+ optionalString (libc != null && targetPlatform.isAvr) ''
|
||||
for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do
|
||||
echo "-B${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-crt1-cflags
|
||||
echo "-B${
|
||||
getLib libc
|
||||
}/avr/lib/$isa" >> $out/nix-support/libc-crt1-cflags
|
||||
done
|
||||
''
|
||||
|
||||
|
@ -709,7 +717,9 @@ stdenvNoCC.mkDerivation {
|
|||
''
|
||||
|
||||
+ optionalString cc.langAda or false ''
|
||||
substituteAll ${./add-gnat-extra-flags.sh} $out/nix-support/add-gnat-extra-flags.sh
|
||||
substituteAll ${
|
||||
./add-gnat-extra-flags.sh
|
||||
} $out/nix-support/add-gnat-extra-flags.sh
|
||||
''
|
||||
|
||||
##
|
||||
|
@ -718,29 +728,30 @@ stdenvNoCC.mkDerivation {
|
|||
##
|
||||
+ optionalString isClang ''
|
||||
# Escape twice: once for this script, once for the one it gets substituted into.
|
||||
export march=${escapeShellArg
|
||||
(optionalString (targetPlatform ? gcc.arch)
|
||||
(escapeShellArg "-march=${targetPlatform.gcc.arch}"))}
|
||||
export march=${
|
||||
escapeShellArg (optionalString (targetPlatform ? gcc.arch)
|
||||
(escapeShellArg "-march=${targetPlatform.gcc.arch}"))
|
||||
}
|
||||
export defaultTarget=${targetPlatform.config}
|
||||
substituteAll ${./add-clang-cc-cflags-before.sh} $out/nix-support/add-local-cc-cflags-before.sh
|
||||
substituteAll ${
|
||||
./add-clang-cc-cflags-before.sh
|
||||
} $out/nix-support/add-local-cc-cflags-before.sh
|
||||
''
|
||||
|
||||
##
|
||||
## Extra custom steps
|
||||
##
|
||||
+ extraBuildCommands
|
||||
+ concatStringsSep "; "
|
||||
(mapAttrsToList
|
||||
+ extraBuildCommands + concatStringsSep "; " (mapAttrsToList
|
||||
(name: value: "echo ${toString value} >> $out/nix-support/${name}")
|
||||
nixSupport);
|
||||
|
||||
|
||||
env = {
|
||||
inherit isClang;
|
||||
|
||||
# for substitution in utils.bash
|
||||
# TODO(@sternenseemann): invent something cleaner than passing in "" in case of absence
|
||||
expandResponseParams = "${expand-response-params}/bin/expand-response-params";
|
||||
expandResponseParams =
|
||||
"${expand-response-params}/bin/expand-response-params";
|
||||
# TODO(@sternenseemann): rename env var via stdenv rebuild
|
||||
shell = getBin runtimeShell + runtimeShell.shellPath or "";
|
||||
gnugrep_bin = optionalString (!nativeTools) gnugrep;
|
||||
|
@ -754,10 +765,10 @@ stdenvNoCC.mkDerivation {
|
|||
default_hardening_flags_str = builtins.toString defaultHardeningFlags;
|
||||
};
|
||||
|
||||
meta =
|
||||
let cc_ = optionalAttrs (cc != null) cc; in
|
||||
(optionalAttrs (cc_ ? meta) (removeAttrs cc.meta ["priority"])) //
|
||||
{ description = attrByPath ["meta" "description"] "System C compiler" cc_ + " (wrapper script)";
|
||||
meta = let cc_ = optionalAttrs (cc != null) cc;
|
||||
in (optionalAttrs (cc_ ? meta) (removeAttrs cc.meta [ "priority" ])) // {
|
||||
description = attrByPath [ "meta" "description" ] "System C compiler" cc_
|
||||
+ " (wrapper script)";
|
||||
priority = 10;
|
||||
mainProgram = if name != "" then name else ccName;
|
||||
};
|
||||
|
|
|
@ -1,36 +1,30 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
}:
|
||||
{ lib, buildPackages }:
|
||||
|
||||
let
|
||||
# rudimentary support for cross-compiling
|
||||
# see: https://github.com/NixOS/nixpkgs/pull/279487#discussion_r1444449726
|
||||
inherit (buildPackages)
|
||||
mktemp
|
||||
rsync
|
||||
;
|
||||
in
|
||||
inherit (buildPackages) mktemp rsync;
|
||||
|
||||
rec {
|
||||
/* Prepare a derivation for local builds.
|
||||
*
|
||||
* This function prepares checkpoint builds by storing
|
||||
* the build output and the sources for cross checking.
|
||||
* The build output can be used later to allow checkpoint builds
|
||||
* by passing the derivation output to the `mkCheckpointBuild` function.
|
||||
*
|
||||
* To build a project with checkpoints, follow these steps:
|
||||
* - run `prepareCheckpointBuild` on the desired derivation, e.g.
|
||||
* checkpointArtifacts = prepareCheckpointBuild virtualbox;
|
||||
* - change something you want in the sources of the package,
|
||||
* e.g. using source override:
|
||||
* changedVBox = pkgs.virtuabox.overrideAttrs (old: {
|
||||
* src = path/to/vbox/sources;
|
||||
* };
|
||||
* - use `mkCheckpointBuild changedVBox checkpointArtifacts`
|
||||
* - enjoy shorter build times
|
||||
*/
|
||||
prepareCheckpointBuild = drv: drv.overrideAttrs (old: {
|
||||
in rec {
|
||||
# Prepare a derivation for local builds.
|
||||
#
|
||||
# This function prepares checkpoint builds by storing
|
||||
# the build output and the sources for cross checking.
|
||||
# The build output can be used later to allow checkpoint builds
|
||||
# by passing the derivation output to the `mkCheckpointBuild` function.
|
||||
#
|
||||
# To build a project with checkpoints, follow these steps:
|
||||
# - run `prepareCheckpointBuild` on the desired derivation, e.g.
|
||||
# checkpointArtifacts = prepareCheckpointBuild virtualbox;
|
||||
# - change something you want in the sources of the package,
|
||||
# e.g. using source override:
|
||||
# changedVBox = pkgs.virtuabox.overrideAttrs (old: {
|
||||
# src = path/to/vbox/sources;
|
||||
# };
|
||||
# - use `mkCheckpointBuild changedVBox checkpointArtifacts`
|
||||
# - enjoy shorter build times
|
||||
prepareCheckpointBuild = drv:
|
||||
drv.overrideAttrs (old: {
|
||||
outputs = [ "out" ];
|
||||
name = drv.name + "-checkpointArtifacts";
|
||||
# To determine differences between the state of the build directory
|
||||
|
@ -61,15 +55,15 @@ rec {
|
|||
doDist = false;
|
||||
});
|
||||
|
||||
/* Build a derivation based on the checkpoint output generated by
|
||||
* the `prepareCheckpointBuild` function.
|
||||
*
|
||||
* Usage:
|
||||
* let
|
||||
* checkpointArtifacts = prepareCheckpointBuild drv;
|
||||
* in mkCheckpointBuild drv checkpointArtifacts
|
||||
*/
|
||||
mkCheckpointBuild = drv: checkpointArtifacts: drv.overrideAttrs (old: {
|
||||
# Build a derivation based on the checkpoint output generated by
|
||||
# the `prepareCheckpointBuild` function.
|
||||
#
|
||||
# Usage:
|
||||
# let
|
||||
# checkpointArtifacts = prepareCheckpointBuild drv;
|
||||
# in mkCheckpointBuild drv checkpointArtifacts
|
||||
mkCheckpointBuild = drv: checkpointArtifacts:
|
||||
drv.overrideAttrs (old: {
|
||||
# The actual checkpoint build phase.
|
||||
# We compare the changed sources from a previous build with the current and create a patch.
|
||||
# Afterwards we clean the build directory and copy the previous output files (including the sources).
|
||||
|
|
|
@ -23,8 +23,7 @@ stdenv.mkDerivation {
|
|||
|
||||
empty = rootPaths == [ ];
|
||||
|
||||
buildCommand =
|
||||
''
|
||||
buildCommand = ''
|
||||
out=''${outputs[out]}
|
||||
|
||||
mkdir $out
|
||||
|
|
|
@ -1,136 +1,137 @@
|
|||
{ lib, stdenv, coqPackages, coq, which, fetchzip }@args:
|
||||
|
||||
let
|
||||
lib = import ./extra-lib.nix {
|
||||
inherit (args) lib;
|
||||
};
|
||||
lib = import ./extra-lib.nix { inherit (args) lib; };
|
||||
|
||||
inherit (lib)
|
||||
concatStringsSep
|
||||
flip
|
||||
foldl
|
||||
isFunction
|
||||
isString
|
||||
optional
|
||||
optionalAttrs
|
||||
optionals
|
||||
optionalString
|
||||
pred
|
||||
remove
|
||||
switch
|
||||
versions
|
||||
;
|
||||
concatStringsSep flip foldl isFunction isString optional optionalAttrs
|
||||
optionals optionalString pred remove switch versions;
|
||||
|
||||
inherit (lib.attrsets) removeAttrs;
|
||||
inherit (lib.strings) match;
|
||||
|
||||
isGitHubDomain = d: match "^github.*" d != null;
|
||||
isGitLabDomain = d: match "^gitlab.*" d != null;
|
||||
in
|
||||
|
||||
{ pname,
|
||||
version ? null,
|
||||
fetcher ? null,
|
||||
owner ? "coq-community",
|
||||
domain ? "github.com",
|
||||
repo ? pname,
|
||||
defaultVersion ? null,
|
||||
releaseRev ? (v: v),
|
||||
displayVersion ? {},
|
||||
release ? {},
|
||||
buildInputs ? [],
|
||||
nativeBuildInputs ? [],
|
||||
extraBuildInputs ? [],
|
||||
extraNativeBuildInputs ? [],
|
||||
overrideBuildInputs ? [],
|
||||
overrideNativeBuildInputs ? [],
|
||||
namePrefix ? [ "coq" ],
|
||||
enableParallelBuilding ? true,
|
||||
extraInstallFlags ? [],
|
||||
setCOQBIN ? true,
|
||||
mlPlugin ? false,
|
||||
useMelquiondRemake ? null,
|
||||
dropAttrs ? [],
|
||||
keepAttrs ? [],
|
||||
dropDerivationAttrs ? [],
|
||||
useDuneifVersion ? (x: false),
|
||||
useDune ? false,
|
||||
opam-name ? (concatStringsSep "-" (namePrefix ++ [ pname ])),
|
||||
...
|
||||
}@args:
|
||||
in { pname, version ? null, fetcher ? null, owner ? "coq-community"
|
||||
, domain ? "github.com", repo ? pname, defaultVersion ? null
|
||||
, releaseRev ? (v: v), displayVersion ? { }, release ? { }, buildInputs ? [ ]
|
||||
, nativeBuildInputs ? [ ], extraBuildInputs ? [ ], extraNativeBuildInputs ? [ ]
|
||||
, overrideBuildInputs ? [ ], overrideNativeBuildInputs ? [ ]
|
||||
, namePrefix ? [ "coq" ], enableParallelBuilding ? true, extraInstallFlags ? [ ]
|
||||
, setCOQBIN ? true, mlPlugin ? false, useMelquiondRemake ? null, dropAttrs ? [ ]
|
||||
, keepAttrs ? [ ], dropDerivationAttrs ? [ ], useDuneifVersion ? (x: false)
|
||||
, useDune ? false, opam-name ? (concatStringsSep "-" (namePrefix ++ [ pname ]))
|
||||
, ... }@args:
|
||||
let
|
||||
args-to-remove = foldl (flip remove) ([
|
||||
"version" "fetcher" "repo" "owner" "domain" "releaseRev"
|
||||
"displayVersion" "defaultVersion" "useMelquiondRemake"
|
||||
"version"
|
||||
"fetcher"
|
||||
"repo"
|
||||
"owner"
|
||||
"domain"
|
||||
"releaseRev"
|
||||
"displayVersion"
|
||||
"defaultVersion"
|
||||
"useMelquiondRemake"
|
||||
"release"
|
||||
"buildInputs" "nativeBuildInputs"
|
||||
"extraBuildInputs" "extraNativeBuildInputs"
|
||||
"overrideBuildInputs" "overrideNativeBuildInputs"
|
||||
"buildInputs"
|
||||
"nativeBuildInputs"
|
||||
"extraBuildInputs"
|
||||
"extraNativeBuildInputs"
|
||||
"overrideBuildInputs"
|
||||
"overrideNativeBuildInputs"
|
||||
"namePrefix"
|
||||
"meta" "useDuneifVersion" "useDune" "opam-name"
|
||||
"extraInstallFlags" "setCOQBIN" "mlPlugin"
|
||||
"dropAttrs" "dropDerivationAttrs" "keepAttrs" ] ++ dropAttrs) keepAttrs;
|
||||
fetch = import ../coq/meta-fetch/default.nix
|
||||
{ inherit lib stdenv fetchzip; } ({
|
||||
"meta"
|
||||
"useDuneifVersion"
|
||||
"useDune"
|
||||
"opam-name"
|
||||
"extraInstallFlags"
|
||||
"setCOQBIN"
|
||||
"mlPlugin"
|
||||
"dropAttrs"
|
||||
"dropDerivationAttrs"
|
||||
"keepAttrs"
|
||||
] ++ dropAttrs) keepAttrs;
|
||||
fetch = import ../coq/meta-fetch/default.nix { inherit lib stdenv fetchzip; }
|
||||
({
|
||||
inherit release releaseRev;
|
||||
location = { inherit domain owner repo; };
|
||||
} // optionalAttrs (args ? fetcher) { inherit fetcher; });
|
||||
fetched = fetch (if version != null then version else defaultVersion);
|
||||
display-pkg = n: sep: v:
|
||||
let d = displayVersion.${n} or (if sep == "" then ".." else true); in
|
||||
n + optionalString (v != "" && v != null) (switch d [
|
||||
{ case = true; out = sep + v; }
|
||||
{ case = "."; out = sep + versions.major v; }
|
||||
{ case = ".."; out = sep + versions.majorMinor v; }
|
||||
{ case = "..."; out = sep + versions.majorMinorPatch v; }
|
||||
{ case = isFunction; out = optionalString (d v != "") (sep + d v); }
|
||||
{ case = isString; out = optionalString (d != "") (sep + d); }
|
||||
let d = displayVersion.${n} or (if sep == "" then ".." else true);
|
||||
in n + optionalString (v != "" && v != null) (switch d [
|
||||
{
|
||||
case = true;
|
||||
out = sep + v;
|
||||
}
|
||||
{
|
||||
case = ".";
|
||||
out = sep + versions.major v;
|
||||
}
|
||||
{
|
||||
case = "..";
|
||||
out = sep + versions.majorMinor v;
|
||||
}
|
||||
{
|
||||
case = "...";
|
||||
out = sep + versions.majorMinorPatch v;
|
||||
}
|
||||
{
|
||||
case = isFunction;
|
||||
out = optionalString (d v != "") (sep + d v);
|
||||
}
|
||||
{
|
||||
case = isString;
|
||||
out = optionalString (d != "") (sep + d);
|
||||
}
|
||||
] "") + optionalString (v == null) "-broken";
|
||||
append-version = p: n: p + display-pkg n "" coqPackages.${n}.version + "-";
|
||||
prefix-name = foldl append-version "" namePrefix;
|
||||
useDune = args.useDune or (useDuneifVersion fetched.version);
|
||||
coqlib-flags = switch coq.coq-version [
|
||||
{ case = v: versions.isLe "8.6" v && v != "dev" ;
|
||||
out = [ "COQLIB=$(out)/lib/coq/${coq.coq-version}/" ]; }
|
||||
] [ "COQLIBINSTALL=$(out)/lib/coq/${coq.coq-version}/user-contrib"
|
||||
"COQPLUGININSTALL=$(OCAMLFIND_DESTDIR)" ];
|
||||
docdir-flags = switch coq.coq-version [
|
||||
{ case = v: versions.isLe "8.6" v && v != "dev";
|
||||
out = [ "DOCDIR=$(out)/share/coq/${coq.coq-version}/" ]; }
|
||||
] [ "COQDOCINSTALL=$(out)/share/coq/${coq.coq-version}/user-contrib" ];
|
||||
in
|
||||
coqlib-flags = switch coq.coq-version [{
|
||||
case = v: versions.isLe "8.6" v && v != "dev";
|
||||
out = [ "COQLIB=$(out)/lib/coq/${coq.coq-version}/" ];
|
||||
}] [
|
||||
"COQLIBINSTALL=$(out)/lib/coq/${coq.coq-version}/user-contrib"
|
||||
"COQPLUGININSTALL=$(OCAMLFIND_DESTDIR)"
|
||||
];
|
||||
docdir-flags = switch coq.coq-version [{
|
||||
case = v: versions.isLe "8.6" v && v != "dev";
|
||||
out = [ "DOCDIR=$(out)/share/coq/${coq.coq-version}/" ];
|
||||
}] [ "COQDOCINSTALL=$(out)/share/coq/${coq.coq-version}/user-contrib" ];
|
||||
|
||||
stdenv.mkDerivation (removeAttrs ({
|
||||
in stdenv.mkDerivation (removeAttrs ({
|
||||
|
||||
name = prefix-name + (display-pkg pname "-" fetched.version);
|
||||
|
||||
inherit (fetched) version src;
|
||||
|
||||
nativeBuildInputs = args.overrideNativeBuildInputs
|
||||
or ([ which ]
|
||||
nativeBuildInputs = args.overrideNativeBuildInputs or ([ which ]
|
||||
++ optional useDune coq.ocamlPackages.dune_3
|
||||
++ optionals (useDune || mlPlugin) [ coq.ocamlPackages.ocaml coq.ocamlPackages.findlib ]
|
||||
++ (args.nativeBuildInputs or []) ++ extraNativeBuildInputs);
|
||||
buildInputs = args.overrideBuildInputs
|
||||
or ([ coq ] ++ (args.buildInputs or []) ++ extraBuildInputs);
|
||||
++ optionals (useDune || mlPlugin) [
|
||||
coq.ocamlPackages.ocaml
|
||||
coq.ocamlPackages.findlib
|
||||
] ++ (args.nativeBuildInputs or [ ]) ++ extraNativeBuildInputs);
|
||||
buildInputs = args.overrideBuildInputs or ([ coq ]
|
||||
++ (args.buildInputs or [ ]) ++ extraBuildInputs);
|
||||
inherit enableParallelBuilding;
|
||||
|
||||
meta = ({ platforms = coq.meta.platforms; } //
|
||||
(switch domain [{
|
||||
meta = ({
|
||||
platforms = coq.meta.platforms;
|
||||
} // (switch domain [{
|
||||
case = pred.union isGitHubDomain isGitLabDomain;
|
||||
out = { homepage = "https://${domain}/${owner}/${repo}"; };
|
||||
}] {}) //
|
||||
optionalAttrs (fetched.broken or false) { coqFilter = true; broken = true; }) //
|
||||
(args.meta or {}) ;
|
||||
}] { }) // optionalAttrs (fetched.broken or false) {
|
||||
coqFilter = true;
|
||||
broken = true;
|
||||
}) // (args.meta or { });
|
||||
|
||||
}
|
||||
// (optionalAttrs setCOQBIN { COQBIN = "${coq}/bin/"; })
|
||||
} // (optionalAttrs setCOQBIN { COQBIN = "${coq}/bin/"; })
|
||||
// (optionalAttrs (!args ? installPhase && !args ? useMelquiondRemake) {
|
||||
installFlags =
|
||||
coqlib-flags ++ docdir-flags ++
|
||||
extraInstallFlags;
|
||||
})
|
||||
// (optionalAttrs useDune {
|
||||
installFlags = coqlib-flags ++ docdir-flags ++ extraInstallFlags;
|
||||
}) // (optionalAttrs useDune {
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
dune build -p ${opam-name} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES}
|
||||
|
@ -144,12 +145,11 @@ stdenv.mkDerivation (removeAttrs ({
|
|||
mv $out/lib/TEMPORARY $out/lib/coq/${coq.coq-version}
|
||||
runHook postInstall
|
||||
'';
|
||||
})
|
||||
// (optionalAttrs (args?useMelquiondRemake) rec {
|
||||
}) // (optionalAttrs (args ? useMelquiondRemake) rec {
|
||||
COQUSERCONTRIB = "$out/lib/coq/${coq.coq-version}/user-contrib";
|
||||
preConfigurePhases = "autoconf";
|
||||
configureFlags = [ "--libdir=${COQUSERCONTRIB}/${useMelquiondRemake.logpath or ""}" ];
|
||||
configureFlags =
|
||||
[ "--libdir=${COQUSERCONTRIB}/${useMelquiondRemake.logpath or ""}" ];
|
||||
buildPhase = "./remake -j$NIX_BUILD_CORES";
|
||||
installPhase = "./remake install";
|
||||
})
|
||||
// (removeAttrs args args-to-remove)) dropDerivationAttrs)
|
||||
}) // (removeAttrs args args-to-remove)) dropDerivationAttrs)
|
||||
|
|
|
@ -2,30 +2,16 @@
|
|||
|
||||
let
|
||||
inherit (lib)
|
||||
all
|
||||
concatStringsSep
|
||||
findFirst
|
||||
flip
|
||||
getAttr
|
||||
head
|
||||
isFunction
|
||||
length
|
||||
recursiveUpdate
|
||||
splitVersion
|
||||
tail
|
||||
take
|
||||
versionAtLeast
|
||||
versionOlder
|
||||
zipListsWith
|
||||
;
|
||||
in
|
||||
recursiveUpdate lib (rec {
|
||||
all concatStringsSep findFirst flip getAttr head isFunction length
|
||||
recursiveUpdate splitVersion tail take versionAtLeast versionOlder
|
||||
zipListsWith;
|
||||
in recursiveUpdate lib (rec {
|
||||
|
||||
versions =
|
||||
let
|
||||
versions = let
|
||||
truncate = n: v: concatStringsSep "." (take n (splitVersion v));
|
||||
opTruncate = op: v0: v: let n = length (splitVersion v0); in
|
||||
op (truncate n v) (truncate n v0);
|
||||
opTruncate = op: v0: v:
|
||||
let n = length (splitVersion v0);
|
||||
in op (truncate n v) (truncate n v0);
|
||||
in rec {
|
||||
|
||||
/* Get string of the first n parts of a version string.
|
||||
|
@ -77,7 +63,6 @@ recursiveUpdate lib (rec {
|
|||
=> false
|
||||
- range "8.10" "8.11+" "8.11+beta1"
|
||||
=> false
|
||||
|
||||
*/
|
||||
isGe = opTruncate versionAtLeast;
|
||||
isGt = opTruncate (flip versionOlder);
|
||||
|
@ -98,25 +83,35 @@ recursiveUpdate lib (rec {
|
|||
=> [ [ "y" ] "x" [ "z" "t" ] ]
|
||||
*/
|
||||
splitList = pred: l: # put in file lists
|
||||
let loop = (vv: v: l: if l == [] then vv ++ [v]
|
||||
else let hd = head l; tl = tail l; in
|
||||
if pred hd then loop (vv ++ [ v hd ]) [] tl else loop vv (v ++ [hd]) tl);
|
||||
let
|
||||
loop = (vv: v: l:
|
||||
if l == [ ] then
|
||||
vv ++ [ v ]
|
||||
else
|
||||
let
|
||||
hd = head l;
|
||||
tl = tail l;
|
||||
in if pred hd then
|
||||
loop (vv ++ [ v hd ]) [ ] tl
|
||||
else
|
||||
loop vv (v ++ [ hd ]) tl);
|
||||
in loop [ ] [ ] l;
|
||||
|
||||
pred = {
|
||||
/* Predicate intersection, union, and complement */
|
||||
# Predicate intersection, union, and complement
|
||||
inter = p: q: x: p x && q x;
|
||||
union = p: q: x: p x || q x;
|
||||
compl = p: x: !p x;
|
||||
true = p: true;
|
||||
false = p: false;
|
||||
|
||||
/* predicate "being equal to y" */
|
||||
# predicate "being equal to y"
|
||||
equal = y: x: x == y;
|
||||
};
|
||||
|
||||
/* Emulate a "switch - case" construct,
|
||||
instead of relying on `if then else if ...` */
|
||||
instead of relying on `if then else if ...`
|
||||
*/
|
||||
/* Usage:
|
||||
```nix
|
||||
switch-if [
|
||||
|
@ -126,7 +121,8 @@ recursiveUpdate lib (rec {
|
|||
] default-out
|
||||
```
|
||||
where a if-clause has the form `{ cond = b; out = r; }`
|
||||
the first branch such as `b` is true */
|
||||
the first branch such as `b` is true
|
||||
*/
|
||||
|
||||
switch-if = c: d: (findFirst (getAttr "cond") { } c).out or d;
|
||||
|
||||
|
@ -154,14 +150,22 @@ recursiveUpdate lib (rec {
|
|||
|
||||
if the variables p are not functions,
|
||||
they are converted to a equal p
|
||||
if out is missing the default-out is taken */
|
||||
if out is missing the default-out is taken
|
||||
*/
|
||||
|
||||
switch = var: clauses: default: with pred; let
|
||||
switch = var: clauses: default:
|
||||
with pred;
|
||||
let
|
||||
compare = f: if isFunction f then f else equal f;
|
||||
combine = cl: var:
|
||||
if cl?case then compare cl.case var
|
||||
else all (equal true) (zipListsWith compare cl.cases var); in
|
||||
switch-if (map (cl: { cond = combine cl var; inherit (cl) out; }) clauses) default;
|
||||
if cl ? case then
|
||||
compare cl.case var
|
||||
else
|
||||
all (equal true) (zipListsWith compare cl.cases var);
|
||||
in switch-if (map (cl: {
|
||||
cond = combine cl var;
|
||||
inherit (cl) out;
|
||||
}) clauses) default;
|
||||
|
||||
/* Override arguments to mkCoqDerivation for a Coq library.
|
||||
|
||||
|
@ -207,7 +211,8 @@ recursiveUpdate lib (rec {
|
|||
coqPackages.QuickChick.override { version = "1.4.0"; }
|
||||
```
|
||||
*/
|
||||
overrideCoqDerivation = f: drv: (drv.override (args: {
|
||||
overrideCoqDerivation = f: drv:
|
||||
(drv.override (args: {
|
||||
mkCoqDerivation = drv_: (args.mkCoqDerivation drv_).override f;
|
||||
}));
|
||||
})
|
||||
|
|
|
@ -1,95 +1,118 @@
|
|||
{ lib, stdenv, fetchzip }@args:
|
||||
|
||||
let
|
||||
lib = import ../extra-lib.nix {
|
||||
inherit (args) lib;
|
||||
};
|
||||
lib = import ../extra-lib.nix { inherit (args) lib; };
|
||||
|
||||
inherit (lib)
|
||||
attrNames
|
||||
fakeSha256
|
||||
filter
|
||||
findFirst
|
||||
head
|
||||
isAttrs
|
||||
isPath
|
||||
isString
|
||||
last
|
||||
length
|
||||
optionalAttrs
|
||||
pathExists
|
||||
pred
|
||||
sort
|
||||
switch
|
||||
switch-if
|
||||
versionAtLeast
|
||||
versions
|
||||
;
|
||||
attrNames fakeSha256 filter findFirst head isAttrs isPath isString last
|
||||
length optionalAttrs pathExists pred sort switch switch-if versionAtLeast
|
||||
versions;
|
||||
|
||||
inherit (lib.strings) match split;
|
||||
|
||||
default-fetcher = {domain ? "github.com", owner ? "", repo, rev, name ? "source", sha256 ? null, ...}@args:
|
||||
let ext = if args?sha256 then "zip" else "tar.gz";
|
||||
default-fetcher = { domain ? "github.com", owner ? "", repo, rev
|
||||
, name ? "source", sha256 ? null, ... }@args:
|
||||
let
|
||||
ext = if args ? sha256 then "zip" else "tar.gz";
|
||||
fmt = if args ? sha256 then "zip" else "tarball";
|
||||
pr = match "^#(.*)$" rev;
|
||||
url = switch-if [
|
||||
{ cond = pr == null && (match "^github.*" domain) != null;
|
||||
out = "https://${domain}/${owner}/${repo}/archive/${rev}.${ext}"; }
|
||||
{ cond = pr != null && (match "^github.*" domain) != null;
|
||||
out = "https://api.${domain}/repos/${owner}/${repo}/${fmt}/pull/${head pr}/head"; }
|
||||
{ cond = pr == null && (match "^gitlab.*" domain) != null;
|
||||
out = "https://${domain}/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.${ext}"; }
|
||||
{ cond = (match "(www.)?mpi-sws.org" domain) != null;
|
||||
out = "https://www.mpi-sws.org/~${owner}/${repo}/download/${repo}-${rev}.${ext}";}
|
||||
] (throw "meta-fetch: no fetcher found for domain ${domain} on ${rev}");
|
||||
fetch = x: if args?sha256 then fetchzip (x // { inherit sha256; }) else builtins.fetchTarball x;
|
||||
in fetch { inherit url ; };
|
||||
in
|
||||
{
|
||||
fetcher ? default-fetcher,
|
||||
location,
|
||||
release ? {},
|
||||
releaseRev ? (v: v),
|
||||
}:
|
||||
let isVersion = x: isString x && match "^/.*" x == null && release?${x};
|
||||
shortVersion = x: if (isString x && match "^/.*" x == null)
|
||||
then findFirst (v: versions.majorMinor v == x) null
|
||||
cond = pr == null && (match "^github.*" domain) != null;
|
||||
out = "https://${domain}/${owner}/${repo}/archive/${rev}.${ext}";
|
||||
}
|
||||
{
|
||||
cond = pr != null && (match "^github.*" domain) != null;
|
||||
out = "https://api.${domain}/repos/${owner}/${repo}/${fmt}/pull/${
|
||||
head pr
|
||||
}/head";
|
||||
}
|
||||
{
|
||||
cond = pr == null && (match "^gitlab.*" domain) != null;
|
||||
out =
|
||||
"https://${domain}/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.${ext}";
|
||||
}
|
||||
{
|
||||
cond = (match "(www.)?mpi-sws.org" domain) != null;
|
||||
out =
|
||||
"https://www.mpi-sws.org/~${owner}/${repo}/download/${repo}-${rev}.${ext}";
|
||||
}
|
||||
] (throw "meta-fetch: no fetcher found for domain ${domain} on ${rev}");
|
||||
fetch = x:
|
||||
if args ? sha256 then
|
||||
fetchzip (x // { inherit sha256; })
|
||||
else
|
||||
builtins.fetchTarball x;
|
||||
in fetch { inherit url; };
|
||||
in { fetcher ? default-fetcher, location, release ? { }, releaseRev ? (v: v), }:
|
||||
let
|
||||
isVersion = x: isString x && match "^/.*" x == null && release ? ${x};
|
||||
shortVersion = x:
|
||||
if (isString x && match "^/.*" x == null) then
|
||||
findFirst (v: versions.majorMinor v == x) null
|
||||
(sort versionAtLeast (attrNames release))
|
||||
else null;
|
||||
else
|
||||
null;
|
||||
isShortVersion = x: shortVersion x != null;
|
||||
isPathString = x: isString x && match "^/.*" x != null && pathExists x; in
|
||||
arg:
|
||||
isPathString = x: isString x && match "^/.*" x != null && pathExists x;
|
||||
in arg:
|
||||
switch arg [
|
||||
{ case = isNull; out = { version = "broken"; src = ""; broken = true; }; }
|
||||
{ case = isPathString; out = { version = "dev"; src = arg; }; }
|
||||
{ case = pred.union isVersion isShortVersion;
|
||||
{
|
||||
case = isNull;
|
||||
out = {
|
||||
version = "broken";
|
||||
src = "";
|
||||
broken = true;
|
||||
};
|
||||
}
|
||||
{
|
||||
case = isPathString;
|
||||
out = {
|
||||
version = "dev";
|
||||
src = arg;
|
||||
};
|
||||
}
|
||||
{
|
||||
case = pred.union isVersion isShortVersion;
|
||||
out = let
|
||||
v = if isVersion arg then arg else shortVersion arg;
|
||||
given-sha256 = release.${v}.sha256 or "";
|
||||
sha256 = if given-sha256 == "" then fakeSha256 else given-sha256;
|
||||
rv = release.${v} // { inherit sha256; };
|
||||
in
|
||||
{
|
||||
in {
|
||||
version = rv.version or v;
|
||||
src = rv.src or fetcher (location // { rev = releaseRev v; } // rv);
|
||||
};
|
||||
}
|
||||
{ case = isString;
|
||||
{
|
||||
case = isString;
|
||||
out = let
|
||||
splitted = filter isString (split ":" arg);
|
||||
rev = last splitted;
|
||||
has-owner = length splitted > 1;
|
||||
version = "dev"; in {
|
||||
version = "dev";
|
||||
in {
|
||||
inherit version;
|
||||
src = fetcher (location // { inherit rev; } //
|
||||
(optionalAttrs has-owner { owner = head splitted; }));
|
||||
}; }
|
||||
{ case = isAttrs;
|
||||
src = fetcher (location // {
|
||||
inherit rev;
|
||||
} // (optionalAttrs has-owner { owner = head splitted; }));
|
||||
};
|
||||
}
|
||||
{
|
||||
case = isAttrs;
|
||||
out = {
|
||||
version = arg.version or "dev";
|
||||
src = (arg.fetcher or fetcher) (location // (arg.location or {})); }; }
|
||||
{ case = isPath;
|
||||
src = (arg.fetcher or fetcher) (location // (arg.location or { }));
|
||||
};
|
||||
}
|
||||
{
|
||||
case = isPath;
|
||||
out = {
|
||||
version = "dev";
|
||||
src = builtins.path {path = arg; name = location.name or "source";}; }; }
|
||||
src = builtins.path {
|
||||
path = arg;
|
||||
name = location.name or "source";
|
||||
};
|
||||
};
|
||||
}
|
||||
] (throw "not a valid source description")
|
||||
|
|
|
@ -1,33 +1,16 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, callPackage
|
||||
, runCommand
|
||||
, writeText
|
||||
, pub2nix
|
||||
, dartHooks
|
||||
, makeWrapper
|
||||
, dart
|
||||
, nodejs
|
||||
, darwin
|
||||
, jq
|
||||
, yq
|
||||
}:
|
||||
{ lib, stdenv, callPackage, runCommand, writeText, pub2nix, dartHooks
|
||||
, makeWrapper, dart, nodejs, darwin, jq, yq }:
|
||||
|
||||
{ src
|
||||
, sourceRoot ? "source"
|
||||
{ src, sourceRoot ? "source"
|
||||
, packageRoot ? (lib.removePrefix "/" (lib.removePrefix "source" sourceRoot))
|
||||
, gitHashes ? { }
|
||||
, sdkSourceBuilders ? { }
|
||||
, customSourceBuilders ? { }
|
||||
, gitHashes ? { }, sdkSourceBuilders ? { }, customSourceBuilders ? { }
|
||||
|
||||
, sdkSetupScript ? ""
|
||||
, extraPackageConfigSetup ? ""
|
||||
, sdkSetupScript ? "", extraPackageConfigSetup ? ""
|
||||
|
||||
# Output type to produce. Can be any kind supported by dart
|
||||
# https://dart.dev/tools/dart-compile#types-of-output
|
||||
# If using jit, you might want to pass some arguments to `dartJitFlags`
|
||||
, dartOutputType ? "exe"
|
||||
, dartCompileCommand ? "dart compile"
|
||||
, dartOutputType ? "exe", dartCompileCommand ? "dart compile"
|
||||
, dartCompileFlags ? [ ]
|
||||
# These come at the end of the command, useful to pass flags to the jit run
|
||||
, dartJitFlags ? [ ]
|
||||
|
@ -39,31 +22,40 @@
|
|||
, dartEntryPoints ? null
|
||||
# Used when wrapping aot, jit, kernel, and js builds.
|
||||
# Set to null to disable wrapping.
|
||||
, dartRuntimeCommand ? if dartOutputType == "aot-snapshot" then "${dart}/bin/dartaotruntime"
|
||||
else if (dartOutputType == "jit-snapshot" || dartOutputType == "kernel") then "${dart}/bin/dart"
|
||||
else if dartOutputType == "js" then "${nodejs}/bin/node"
|
||||
else null
|
||||
|
||||
, runtimeDependencies ? [ ]
|
||||
, extraWrapProgramArgs ? ""
|
||||
|
||||
, autoPubspecLock ? null
|
||||
, pubspecLock ? if autoPubspecLock == null then
|
||||
throw "The pubspecLock argument is required. If import-from-derivation is allowed (it isn't in Nixpkgs), you can set autoPubspecLock to the path to a pubspec.lock instead."
|
||||
, dartRuntimeCommand ? if dartOutputType == "aot-snapshot" then
|
||||
"${dart}/bin/dartaotruntime"
|
||||
else if (dartOutputType == "jit-snapshot" || dartOutputType == "kernel") then
|
||||
"${dart}/bin/dart"
|
||||
else if dartOutputType == "js" then
|
||||
"${nodejs}/bin/node"
|
||||
else
|
||||
assert lib.assertMsg (builtins.pathExists autoPubspecLock) "The pubspec.lock file could not be found!";
|
||||
lib.importJSON (runCommand "${lib.getName args}-pubspec-lock-json" { nativeBuildInputs = [ yq ]; } ''yq . '${autoPubspecLock}' > "$out"'')
|
||||
, ...
|
||||
}@args:
|
||||
null
|
||||
|
||||
, runtimeDependencies ? [ ], extraWrapProgramArgs ? ""
|
||||
|
||||
, autoPubspecLock ? null, pubspecLock ? if autoPubspecLock == null then
|
||||
throw
|
||||
"The pubspecLock argument is required. If import-from-derivation is allowed (it isn't in Nixpkgs), you can set autoPubspecLock to the path to a pubspec.lock instead."
|
||||
else
|
||||
assert lib.assertMsg (builtins.pathExists autoPubspecLock)
|
||||
"The pubspec.lock file could not be found!";
|
||||
lib.importJSON (runCommand "${lib.getName args}-pubspec-lock-json" {
|
||||
nativeBuildInputs = [ yq ];
|
||||
} ''yq . '${autoPubspecLock}' > "$out"''), ... }@args:
|
||||
|
||||
let
|
||||
generators = callPackage ./generators.nix { inherit dart; } { buildDrvArgs = args; };
|
||||
generators =
|
||||
callPackage ./generators.nix { inherit dart; } { buildDrvArgs = args; };
|
||||
|
||||
pubspecLockFile = builtins.toJSON pubspecLock;
|
||||
pubspecLockData = pub2nix.readPubspecLock { inherit src packageRoot pubspecLock gitHashes sdkSourceBuilders customSourceBuilders; };
|
||||
pubspecLockData = pub2nix.readPubspecLock {
|
||||
inherit src packageRoot pubspecLock gitHashes sdkSourceBuilders
|
||||
customSourceBuilders;
|
||||
};
|
||||
packageConfig = generators.linkPackageConfig {
|
||||
packageConfig = pub2nix.generatePackageConfig {
|
||||
pname = if args.pname != null then "${args.pname}-${args.version}" else null;
|
||||
pname =
|
||||
if args.pname != null then "${args.pname}-${args.version}" else null;
|
||||
|
||||
dependencies =
|
||||
# Ideally, we'd only include the main dependencies and their transitive
|
||||
|
@ -80,19 +72,25 @@ let
|
|||
extraSetupCommands = extraPackageConfigSetup;
|
||||
};
|
||||
|
||||
inherit (dartHooks.override { inherit dart; }) dartConfigHook dartBuildHook dartInstallHook dartFixupHook;
|
||||
inherit (dartHooks.override { inherit dart; })
|
||||
dartConfigHook dartBuildHook dartInstallHook dartFixupHook;
|
||||
|
||||
baseDerivation = stdenv.mkDerivation (finalAttrs: (builtins.removeAttrs args [ "gitHashes" "sdkSourceBuilders" "pubspecLock" "customSourceBuilders" ]) // {
|
||||
inherit pubspecLockFile packageConfig sdkSetupScript
|
||||
dartCompileCommand dartOutputType dartRuntimeCommand dartCompileFlags
|
||||
dartJitFlags;
|
||||
baseDerivation = stdenv.mkDerivation (finalAttrs:
|
||||
(builtins.removeAttrs args [
|
||||
"gitHashes"
|
||||
"sdkSourceBuilders"
|
||||
"pubspecLock"
|
||||
"customSourceBuilders"
|
||||
]) // {
|
||||
inherit pubspecLockFile packageConfig sdkSetupScript dartCompileCommand
|
||||
dartOutputType dartRuntimeCommand dartCompileFlags dartJitFlags;
|
||||
|
||||
outputs = [ "out" "pubcache" ] ++ args.outputs or [ ];
|
||||
|
||||
dartEntryPoints =
|
||||
if (dartEntryPoints != null)
|
||||
then writeText "entrypoints.json" (builtins.toJSON dartEntryPoints)
|
||||
else null;
|
||||
dartEntryPoints = if (dartEntryPoints != null) then
|
||||
writeText "entrypoints.json" (builtins.toJSON dartEntryPoints)
|
||||
else
|
||||
null;
|
||||
|
||||
runtimeDependencies = map lib.getLib runtimeDependencies;
|
||||
|
||||
|
@ -104,9 +102,7 @@ let
|
|||
dartFixupHook
|
||||
makeWrapper
|
||||
jq
|
||||
] ++ lib.optionals stdenv.isDarwin [
|
||||
darwin.sigtool
|
||||
] ++
|
||||
] ++ lib.optionals stdenv.isDarwin [ darwin.sigtool ] ++
|
||||
# Ensure that we inherit the propagated build inputs from the dependencies.
|
||||
builtins.attrValues pubspecLockData.dependencySources;
|
||||
|
||||
|
@ -120,13 +116,12 @@ let
|
|||
|
||||
passAsFile = [ "pubspecLockFile" ];
|
||||
|
||||
passthru = {
|
||||
pubspecLock = pubspecLockData;
|
||||
} // (args.passthru or { });
|
||||
passthru = { pubspecLock = pubspecLockData; } // (args.passthru or { });
|
||||
|
||||
meta = (args.meta or { }) // { platforms = args.meta.platforms or dart.meta.platforms; };
|
||||
meta = (args.meta or { }) // {
|
||||
platforms = args.meta.platforms or dart.meta.platforms;
|
||||
};
|
||||
});
|
||||
in
|
||||
assert !(builtins.isString dartOutputType && dartOutputType != "") ->
|
||||
throw "dartOutputType must be a non-empty string";
|
||||
in assert !(builtins.isString dartOutputType && dartOutputType != "")
|
||||
-> throw "dartOutputType must be a non-empty string";
|
||||
baseDerivation
|
||||
|
|
|
@ -1,20 +1,11 @@
|
|||
{ lib
|
||||
, stdenvNoCC
|
||||
, dart
|
||||
, dartHooks
|
||||
, jq
|
||||
, yq
|
||||
, cacert
|
||||
}:
|
||||
{ lib, stdenvNoCC, dart, dartHooks, jq, yq, cacert }:
|
||||
|
||||
{
|
||||
# Arguments used in the derivation that builds the Dart package.
|
||||
# Passing these is recommended to ensure that the same steps are made to
|
||||
# prepare the sources in both this derivation and the one that builds the Dart
|
||||
# package.
|
||||
buildDrvArgs ? { }
|
||||
, ...
|
||||
}@args:
|
||||
buildDrvArgs ? { }, ... }@args:
|
||||
|
||||
# This is a derivation and setup hook that can be used to fetch dependencies for Dart projects.
|
||||
# It is designed to be placed in the nativeBuildInputs of a derivation that builds a Dart package.
|
||||
|
@ -38,22 +29,25 @@ let
|
|||
"postPatch"
|
||||
];
|
||||
|
||||
buildDrvInheritArgs = builtins.foldl'
|
||||
(attrs: arg:
|
||||
if buildDrvArgs ? ${arg}
|
||||
then attrs // { ${arg} = buildDrvArgs.${arg}; }
|
||||
else attrs)
|
||||
{ }
|
||||
buildDrvInheritArgNames;
|
||||
buildDrvInheritArgs = builtins.foldl' (attrs: arg:
|
||||
if buildDrvArgs ? ${arg} then
|
||||
attrs // { ${arg} = buildDrvArgs.${arg}; }
|
||||
else
|
||||
attrs) { } buildDrvInheritArgNames;
|
||||
|
||||
drvArgs = buildDrvInheritArgs // (removeAttrs args [ "buildDrvArgs" ]);
|
||||
name = (if drvArgs ? name then drvArgs.name else "${drvArgs.pname}-${drvArgs.version}");
|
||||
name = (if drvArgs ? name then
|
||||
drvArgs.name
|
||||
else
|
||||
"${drvArgs.pname}-${drvArgs.version}");
|
||||
|
||||
# Adds the root package to a dependency package_config.json file from pub2nix.
|
||||
linkPackageConfig = { packageConfig, extraSetupCommands ? "" }: stdenvNoCC.mkDerivation (drvArgs // {
|
||||
linkPackageConfig = { packageConfig, extraSetupCommands ? "" }:
|
||||
stdenvNoCC.mkDerivation (drvArgs // {
|
||||
name = "${name}-package-config-with-root.json";
|
||||
|
||||
nativeBuildInputs = drvArgs.nativeBuildInputs or [ ] ++ args.nativeBuildInputs or [ ] ++ [ jq yq ];
|
||||
nativeBuildInputs = drvArgs.nativeBuildInputs or [ ]
|
||||
++ args.nativeBuildInputs or [ ] ++ [ jq yq ];
|
||||
|
||||
dontBuild = true;
|
||||
|
||||
|
@ -67,8 +61,4 @@ let
|
|||
runHook postInstall
|
||||
'';
|
||||
});
|
||||
in
|
||||
{
|
||||
inherit
|
||||
linkPackageConfig;
|
||||
}
|
||||
in { inherit linkPackageConfig; }
|
||||
|
|
|
@ -11,10 +11,8 @@
|
|||
substitutions.yq = "${yq}/bin/yq";
|
||||
substitutions.jq = "${jq}/bin/jq";
|
||||
} ./dart-build-hook.sh;
|
||||
dartInstallHook = makeSetupHook {
|
||||
name = "dart-install-hook";
|
||||
} ./dart-install-hook.sh;
|
||||
dartFixupHook = makeSetupHook {
|
||||
name = "dart-fixup-hook";
|
||||
} ./dart-fixup-hook.sh;
|
||||
dartInstallHook =
|
||||
makeSetupHook { name = "dart-install-hook"; } ./dart-install-hook.sh;
|
||||
dartFixupHook =
|
||||
makeSetupHook { name = "dart-fixup-hook"; } ./dart-fixup-hook.sh;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
{ lib
|
||||
, runCommand
|
||||
, jq
|
||||
, yq
|
||||
}:
|
||||
{ lib, runCommand, jq, yq }:
|
||||
|
||||
{ pname ? null
|
||||
|
||||
|
@ -10,16 +6,15 @@
|
|||
, dependencies
|
||||
|
||||
# An attribute set of package names to sources.
|
||||
, dependencySources
|
||||
}:
|
||||
, dependencySources }:
|
||||
|
||||
let
|
||||
packages = lib.genAttrs dependencies (dependency: rec {
|
||||
src = dependencySources.${dependency};
|
||||
inherit (src) packageRoot;
|
||||
});
|
||||
in
|
||||
(runCommand "${lib.optionalString (pname != null) "${pname}-"}package-config.json" {
|
||||
in (runCommand
|
||||
"${lib.optionalString (pname != null) "${pname}-"}package-config.json" {
|
||||
inherit packages;
|
||||
|
||||
nativeBuildInputs = [ jq yq ];
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{ lib
|
||||
, callPackage
|
||||
, fetchurl
|
||||
, fetchgit
|
||||
, runCommand
|
||||
}:
|
||||
{ lib, callPackage, fetchurl, fetchgit, runCommand }:
|
||||
|
||||
{
|
||||
# The source directory of the package.
|
||||
|
@ -30,11 +25,11 @@
|
|||
# source, and source files are given in an attribute set argument.
|
||||
#
|
||||
# The passthru of the source derivation should be propagated.
|
||||
, customSourceBuilders ? { }
|
||||
}:
|
||||
, customSourceBuilders ? { } }:
|
||||
|
||||
let
|
||||
dependencyVersions = builtins.mapAttrs (name: details: details.version) pubspecLock.packages;
|
||||
dependencyVersions =
|
||||
builtins.mapAttrs (name: details: details.version) pubspecLock.packages;
|
||||
|
||||
dependencyTypes = {
|
||||
"direct main" = "main";
|
||||
|
@ -43,8 +38,11 @@ let
|
|||
"transitive" = "transitive";
|
||||
};
|
||||
|
||||
dependencies = lib.foldlAttrs
|
||||
(dependencies: name: details: dependencies // { ${dependencyTypes.${details.dependency}} = dependencies.${dependencyTypes.${details.dependency}} ++ [ name ]; })
|
||||
dependencies = lib.foldlAttrs (dependencies: name: details:
|
||||
dependencies // {
|
||||
${dependencyTypes.${details.dependency}} =
|
||||
dependencies.${dependencyTypes.${details.dependency}} ++ [ name ];
|
||||
})
|
||||
(lib.genAttrs (builtins.attrValues dependencyTypes) (dependencyType: [ ]))
|
||||
pubspecLock.packages;
|
||||
|
||||
|
@ -54,47 +52,56 @@ let
|
|||
let
|
||||
archive = fetchurl {
|
||||
name = "pub-${name}-${details.version}.tar.gz";
|
||||
url = "${details.description.url}/packages/${details.description.name}/versions/${details.version}.tar.gz";
|
||||
url =
|
||||
"${details.description.url}/packages/${details.description.name}/versions/${details.version}.tar.gz";
|
||||
sha256 = details.description.sha256;
|
||||
};
|
||||
in
|
||||
runCommand "pub-${name}-${details.version}" { passthru.packageRoot = "."; } ''
|
||||
in runCommand "pub-${name}-${details.version}" {
|
||||
passthru.packageRoot = ".";
|
||||
} ''
|
||||
mkdir -p "$out"
|
||||
tar xf '${archive}' -C "$out"
|
||||
'';
|
||||
|
||||
mkGitDependencySource = name: details: (fetchgit {
|
||||
mkGitDependencySource = name: details:
|
||||
(fetchgit {
|
||||
name = "pub-${name}-${details.version}";
|
||||
url = details.description.url;
|
||||
rev = details.description.resolved-ref;
|
||||
hash = gitHashes.${name} or (throw "A Git hash is required for ${name}! Set to an empty string to obtain it.");
|
||||
hash = gitHashes.${name} or (throw
|
||||
"A Git hash is required for ${name}! Set to an empty string to obtain it.");
|
||||
}).overrideAttrs ({ passthru ? { }, ... }: {
|
||||
passthru = passthru // {
|
||||
packageRoot = details.description.path;
|
||||
};
|
||||
passthru = passthru // { packageRoot = details.description.path; };
|
||||
});
|
||||
|
||||
mkPathDependencySource = name: details:
|
||||
assert lib.assertMsg details.description.relative "Only relative paths are supported - ${name} has an absolue path!";
|
||||
(if lib.isDerivation src then src else (runCommand "pub-${name}-${details.version}" { } ''cp -r '${src}' "$out"'')).overrideAttrs ({ passthru ? { }, ... }: {
|
||||
assert lib.assertMsg details.description.relative
|
||||
"Only relative paths are supported - ${name} has an absolue path!";
|
||||
(if lib.isDerivation src then
|
||||
src
|
||||
else
|
||||
(runCommand "pub-${name}-${details.version}" { }
|
||||
''cp -r '${src}' "$out"'')).overrideAttrs ({ passthru ? { }, ... }: {
|
||||
passthru = passthru // {
|
||||
packageRoot = "${packageRoot}/${details.description.path}";
|
||||
};
|
||||
});
|
||||
|
||||
mkSdkDependencySource = name: details:
|
||||
(sdkSourceBuilders.${details.description} or (throw "No SDK source builder has been given for ${details.description}!")) name;
|
||||
(sdkSourceBuilders.${details.description} or (throw
|
||||
"No SDK source builder has been given for ${details.description}!")) name;
|
||||
|
||||
addDependencySourceUtils = dependencySource: details: dependencySource.overrideAttrs ({ passthru, ... }: {
|
||||
passthru = passthru // {
|
||||
inherit (details) version;
|
||||
};
|
||||
addDependencySourceUtils = dependencySource: details:
|
||||
dependencySource.overrideAttrs ({ passthru, ... }: {
|
||||
passthru = passthru // { inherit (details) version; };
|
||||
});
|
||||
|
||||
sourceBuilders = callPackage ../../../development/compilers/dart/package-source-builders { } // customSourceBuilders;
|
||||
sourceBuilders =
|
||||
callPackage ../../../development/compilers/dart/package-source-builders { }
|
||||
// customSourceBuilders;
|
||||
|
||||
dependencySources = lib.filterAttrs (name: src: src != null) (builtins.mapAttrs
|
||||
(name: details:
|
||||
dependencySources = lib.filterAttrs (name: src: src != null)
|
||||
(builtins.mapAttrs (name: details:
|
||||
(sourceBuilders.${name} or ({ src, ... }: src)) {
|
||||
inherit (details) version source;
|
||||
src = ((addDependencySourceUtils (({
|
||||
|
@ -103,10 +110,8 @@ let
|
|||
"path" = mkPathDependencySource;
|
||||
"sdk" = mkSdkDependencySource;
|
||||
}.${details.source} name) details)) details);
|
||||
})
|
||||
pubspecLock.packages);
|
||||
in
|
||||
{
|
||||
}) pubspecLock.packages);
|
||||
in {
|
||||
inherit
|
||||
# An attribute set of dependency categories to package name lists.
|
||||
dependencies
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
# expr and script based on our lsb_release
|
||||
{ stdenv
|
||||
, lib
|
||||
, substituteAll
|
||||
, coreutils
|
||||
, getopt
|
||||
, modDirVersion ? ""
|
||||
}:
|
||||
{ stdenv, lib, substituteAll, coreutils, getopt, modDirVersion ? "" }:
|
||||
|
||||
substituteAll {
|
||||
name = "uname";
|
||||
|
@ -17,7 +11,10 @@ substituteAll {
|
|||
|
||||
inherit coreutils getopt;
|
||||
|
||||
uSystem = if stdenv.buildPlatform.uname.system != null then stdenv.buildPlatform.uname.system else "unknown";
|
||||
uSystem = if stdenv.buildPlatform.uname.system != null then
|
||||
stdenv.buildPlatform.uname.system
|
||||
else
|
||||
"unknown";
|
||||
inherit (stdenv.buildPlatform.uname) processor;
|
||||
|
||||
# uname -o
|
||||
|
@ -25,12 +22,12 @@ substituteAll {
|
|||
# https://github.com/coreutils/coreutils/blob/7fc84d1c0f6b35231b0b4577b70aaa26bf548a7c/src/uname.c#L373-L374
|
||||
# https://stackoverflow.com/questions/61711186/where-does-host-operating-system-in-uname-c-comes-from
|
||||
# https://github.com/coreutils/gnulib/blob/master/m4/host-os.m4
|
||||
operatingSystem =
|
||||
if stdenv.buildPlatform.isLinux
|
||||
then "GNU/Linux"
|
||||
else if stdenv.buildPlatform.isDarwin
|
||||
then "Darwin" # darwin isn't in host-os.m4 so where does this come from?
|
||||
else "unknown";
|
||||
operatingSystem = if stdenv.buildPlatform.isLinux then
|
||||
"GNU/Linux"
|
||||
else if stdenv.buildPlatform.isDarwin then
|
||||
"Darwin" # darwin isn't in host-os.m4 so where does this come from?
|
||||
else
|
||||
"unknown";
|
||||
|
||||
# in os-specific/linux module packages
|
||||
# --replace '$(shell uname -r)' "${kernel.modDirVersion}" \
|
||||
|
@ -38,7 +35,8 @@ substituteAll {
|
|||
modDirVersion = if modDirVersion != "" then modDirVersion else "unknown";
|
||||
|
||||
meta = with lib; {
|
||||
description = "Print certain system information (hardcoded with lib/system values)";
|
||||
description =
|
||||
"Print certain system information (hardcoded with lib/system values)";
|
||||
mainProgram = "uname";
|
||||
longDescription = ''
|
||||
This package provides a replacement for `uname` whose output depends only
|
||||
|
|
|
@ -11,15 +11,12 @@
|
|||
# `dhallDirectoryToNix` utility. It is not possible to use
|
||||
# `dhallDirectoryToNix` in Nixpkgs, since the Nixpkgs Hydra doesn't allow IFD.
|
||||
|
||||
{ src
|
||||
, # The file to import, relative to the src root directory
|
||||
file ? "package.dhall"
|
||||
}@args:
|
||||
{ src, # The file to import, relative to the src root directory
|
||||
file ? "package.dhall" }@args:
|
||||
|
||||
let
|
||||
generatedPkg = dhallPackages.generateDhallDirectoryPackage args;
|
||||
|
||||
builtPkg = dhallPackages.callPackage generatedPkg { };
|
||||
|
||||
in
|
||||
dhallPackageToNix builtPkg
|
||||
in dhallPackageToNix builtPkg
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
# `dhallPackageToNix` is a utility function to take a Nixpkgs Dhall package
|
||||
# (created with a function like `dhallPackages.buildDhallDirectoryPackage`)
|
||||
# and read it in as a Nix expression.
|
||||
|
@ -32,5 +31,4 @@ dhallPackage:
|
|||
nativeBuildInputs = [ dhall-nix ];
|
||||
};
|
||||
|
||||
in
|
||||
import drv
|
||||
in import drv
|
||||
|
|
|
@ -32,7 +32,5 @@ let
|
|||
buildInputs = [ dhall-nix ];
|
||||
};
|
||||
|
||||
in
|
||||
import drv;
|
||||
in
|
||||
dhallToNix
|
||||
in import drv;
|
||||
in dhallToNix
|
||||
|
|
|
@ -1,12 +1,4 @@
|
|||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchurl,
|
||||
linkFarm,
|
||||
dub,
|
||||
ldc,
|
||||
removeReferencesTo,
|
||||
}:
|
||||
{ lib, stdenv, fetchurl, linkFarm, dub, ldc, removeReferencesTo, }:
|
||||
|
||||
# See https://nixos.org/manual/nixpkgs/unstable#dlang for more detailed usage information
|
||||
|
||||
|
@ -23,18 +15,10 @@
|
|||
# The flags to pass to `dub test`.
|
||||
dubTestFlags ? [ ],
|
||||
# The D compiler to be used by `dub`.
|
||||
compiler ? ldc,
|
||||
...
|
||||
}@args:
|
||||
compiler ? ldc, ... }@args:
|
||||
|
||||
let
|
||||
makeDubDep =
|
||||
{
|
||||
pname,
|
||||
version,
|
||||
sha256,
|
||||
}:
|
||||
{
|
||||
makeDubDep = { pname, version, sha256, }: {
|
||||
inherit pname version;
|
||||
src = fetchurl {
|
||||
name = "dub-${pname}-${version}.zip";
|
||||
|
@ -45,37 +29,31 @@ let
|
|||
|
||||
lockJson = if lib.isPath dubLock then lib.importJSON dubLock else dubLock;
|
||||
|
||||
lockedDeps = lib.mapAttrsToList (
|
||||
pname: { version, sha256 }: makeDubDep { inherit pname version sha256; }
|
||||
) lockJson.dependencies;
|
||||
lockedDeps = lib.mapAttrsToList
|
||||
(pname: { version, sha256 }: makeDubDep { inherit pname version sha256; })
|
||||
lockJson.dependencies;
|
||||
|
||||
# a directory with multiple single element registries
|
||||
# one big directory with all .zip files leads to version parsing errors
|
||||
# when the name of a package is a prefix of the name of another package
|
||||
dubRegistryBase = linkFarm "dub-registry-base" (
|
||||
map (dep: {
|
||||
dubRegistryBase = linkFarm "dub-registry-base" (map (dep: {
|
||||
name = "${dep.pname}/${dep.pname}-${dep.version}.zip";
|
||||
path = dep.src;
|
||||
}) lockedDeps
|
||||
);
|
||||
}) lockedDeps);
|
||||
|
||||
combinedFlags = "--skip-registry=all --compiler=${lib.getExe compiler} ${toString dubFlags}";
|
||||
combinedBuildFlags = "${combinedFlags} --build=${dubBuildType} ${toString dubBuildFlags}";
|
||||
combinedFlags = "--skip-registry=all --compiler=${lib.getExe compiler} ${
|
||||
toString dubFlags
|
||||
}";
|
||||
combinedBuildFlags =
|
||||
"${combinedFlags} --build=${dubBuildType} ${toString dubBuildFlags}";
|
||||
combinedTestFlags = "${combinedFlags} ${toString dubTestFlags}";
|
||||
in
|
||||
stdenv.mkDerivation (
|
||||
builtins.removeAttrs args [ "dubLock" ]
|
||||
// {
|
||||
in stdenv.mkDerivation (builtins.removeAttrs args [ "dubLock" ] // {
|
||||
strictDeps = args.strictDeps or true;
|
||||
|
||||
nativeBuildInputs = args.nativeBuildInputs or [ ] ++ [
|
||||
dub
|
||||
compiler
|
||||
removeReferencesTo
|
||||
];
|
||||
nativeBuildInputs = args.nativeBuildInputs or [ ]
|
||||
++ [ dub compiler removeReferencesTo ];
|
||||
|
||||
configurePhase =
|
||||
args.configurePhase or ''
|
||||
configurePhase = args.configurePhase or ''
|
||||
runHook preConfigure
|
||||
|
||||
export DUB_HOME="$NIX_BUILD_TOP/.dub"
|
||||
|
@ -89,8 +67,7 @@ stdenv.mkDerivation (
|
|||
runHook postConfigure
|
||||
'';
|
||||
|
||||
buildPhase =
|
||||
args.buildPhase or ''
|
||||
buildPhase = args.buildPhase or ''
|
||||
runHook preBuild
|
||||
|
||||
dub build ${combinedBuildFlags}
|
||||
|
@ -100,8 +77,7 @@ stdenv.mkDerivation (
|
|||
|
||||
doCheck = args.doCheck or false;
|
||||
|
||||
checkPhase =
|
||||
args.checkPhase or ''
|
||||
checkPhase = args.checkPhase or ''
|
||||
runHook preCheck
|
||||
|
||||
dub test ${combinedTestFlags}
|
||||
|
@ -117,8 +93,5 @@ stdenv.mkDerivation (
|
|||
|
||||
disallowedReferences = [ compiler ];
|
||||
|
||||
meta = {
|
||||
platforms = dub.meta.platforms;
|
||||
} // args.meta or { };
|
||||
}
|
||||
)
|
||||
meta = { platforms = dub.meta.platforms; } // args.meta or { };
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
{ callPackage }:
|
||||
{
|
||||
{ callPackage }: {
|
||||
buildDubPackage = callPackage ./builddubpackage { };
|
||||
dub-to-nix = callPackage ./dub-to-nix { };
|
||||
}
|
||||
|
|
|
@ -1,17 +1,9 @@
|
|||
{
|
||||
lib,
|
||||
runCommand,
|
||||
makeWrapper,
|
||||
python3,
|
||||
nix,
|
||||
}:
|
||||
{ lib, runCommand, makeWrapper, python3, nix, }:
|
||||
|
||||
runCommand "dub-to-nix"
|
||||
{
|
||||
runCommand "dub-to-nix" {
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
buildInputs = [ python3 ];
|
||||
}
|
||||
''
|
||||
} ''
|
||||
install -Dm755 ${./dub-to-nix.py} "$out/bin/dub-to-nix"
|
||||
patchShebangs "$out/bin/dub-to-nix"
|
||||
wrapProgram "$out/bin/dub-to-nix" \
|
||||
|
|
|
@ -1,59 +1,20 @@
|
|||
{ bashInteractive
|
||||
, buildPackages
|
||||
, cacert
|
||||
, callPackage
|
||||
, closureInfo
|
||||
, coreutils
|
||||
, e2fsprogs
|
||||
, proot
|
||||
, fakeNss
|
||||
, fakeroot
|
||||
, file
|
||||
, go
|
||||
, jq
|
||||
, jshon
|
||||
, lib
|
||||
, makeWrapper
|
||||
, moreutils
|
||||
, nix
|
||||
, nixosTests
|
||||
, pigz
|
||||
, rsync
|
||||
, runCommand
|
||||
, runtimeShell
|
||||
, shadow
|
||||
, skopeo
|
||||
, storeDir ? builtins.storeDir
|
||||
, substituteAll
|
||||
, symlinkJoin
|
||||
, tarsum
|
||||
, util-linux
|
||||
, vmTools
|
||||
, writeClosure
|
||||
, writeScript
|
||||
, writeShellScriptBin
|
||||
, writeText
|
||||
, writeTextDir
|
||||
, writePython3
|
||||
, zstd
|
||||
}:
|
||||
{ bashInteractive, buildPackages, cacert, callPackage, closureInfo, coreutils
|
||||
, e2fsprogs, proot, fakeNss, fakeroot, file, go, jq, jshon, lib, makeWrapper
|
||||
, moreutils, nix, nixosTests, pigz, rsync, runCommand, runtimeShell, shadow
|
||||
, skopeo, storeDir ? builtins.storeDir, substituteAll, symlinkJoin, tarsum
|
||||
, util-linux, vmTools, writeClosure, writeScript, writeShellScriptBin, writeText
|
||||
, writeTextDir, writePython3, zstd }:
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
optionals
|
||||
optionalString
|
||||
;
|
||||
inherit (lib) optionals optionalString;
|
||||
|
||||
inherit (lib)
|
||||
escapeShellArgs
|
||||
toList
|
||||
;
|
||||
inherit (lib) escapeShellArgs toList;
|
||||
|
||||
mkDbExtraCommand = contents:
|
||||
let
|
||||
contentsList = if builtins.isList contents then contents else [ contents ];
|
||||
in
|
||||
''
|
||||
contentsList =
|
||||
if builtins.isList contents then contents else [ contents ];
|
||||
in ''
|
||||
echo "Generating the nix database..."
|
||||
echo "Warning: only the database of the deepest Nix layer is loaded."
|
||||
echo " If you want to use nix commands in the container, it would"
|
||||
|
@ -63,7 +24,9 @@ let
|
|||
# A user is required by nix
|
||||
# https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478
|
||||
export USER=nobody
|
||||
${buildPackages.nix}/bin/nix-store --load-db < ${closureInfo {rootPaths = contentsList;}}/registration
|
||||
${buildPackages.nix}/bin/nix-store --load-db < ${
|
||||
closureInfo { rootPaths = contentsList; }
|
||||
}/registration
|
||||
# Reset registration times to make the image reproducible
|
||||
${buildPackages.sqlite}/bin/sqlite3 nix/var/nix/db/db.sqlite "UPDATE ValidPaths SET registrationTime = ''${SOURCE_DATE_EPOCH}"
|
||||
|
||||
|
@ -101,34 +64,32 @@ let
|
|||
};
|
||||
};
|
||||
|
||||
compressorForImage = compressor: imageName: compressors.${compressor} or
|
||||
(throw "in docker image ${imageName}: compressor must be one of: [${toString builtins.attrNames compressors}]");
|
||||
compressorForImage = compressor: imageName:
|
||||
compressors.${compressor} or (throw
|
||||
"in docker image ${imageName}: compressor must be one of: [${
|
||||
toString builtins.attrNames compressors
|
||||
}]");
|
||||
|
||||
in
|
||||
rec {
|
||||
in rec {
|
||||
examples = callPackage ./examples.nix {
|
||||
inherit buildImage buildLayeredImage fakeNss pullImage shadowSetup buildImageWithNixDb streamNixShellImage;
|
||||
inherit buildImage buildLayeredImage fakeNss pullImage shadowSetup
|
||||
buildImageWithNixDb streamNixShellImage;
|
||||
};
|
||||
|
||||
tests = {
|
||||
inherit (nixosTests)
|
||||
docker-tools
|
||||
docker-tools-overlay
|
||||
docker-tools docker-tools-overlay
|
||||
# requires remote builder
|
||||
# docker-tools-cross
|
||||
;
|
||||
};
|
||||
|
||||
pullImage =
|
||||
let
|
||||
fixName = name: builtins.replaceStrings [ "/" ":" ] [ "-" "-" ] name;
|
||||
in
|
||||
{ imageName
|
||||
let fixName = name: builtins.replaceStrings [ "/" ":" ] [ "-" "-" ] name;
|
||||
in { imageName
|
||||
# To find the digest of an image, you can use skopeo:
|
||||
# see doc/functions.xml
|
||||
, imageDigest
|
||||
, sha256
|
||||
, os ? "linux"
|
||||
, imageDigest, sha256, os ? "linux"
|
||||
, # Image architecture, defaults to the architecture of the `hostPlatform` when unset
|
||||
arch ? defaultArchitecture
|
||||
# This is used to set name to the pulled image
|
||||
|
@ -138,11 +99,9 @@ rec {
|
|||
# This is used to disable TLS certificate verification, allowing access to http registries on (hopefully) trusted networks
|
||||
, tlsVerify ? true
|
||||
|
||||
, name ? fixName "docker-image-${finalImageName}-${finalImageTag}.tar"
|
||||
}:
|
||||
, name ? fixName "docker-image-${finalImageName}-${finalImageTag}.tar" }:
|
||||
|
||||
runCommand name
|
||||
{
|
||||
runCommand name {
|
||||
inherit imageDigest;
|
||||
imageName = finalImageName;
|
||||
imageTag = finalImageTag;
|
||||
|
@ -173,14 +132,8 @@ rec {
|
|||
inherit tarsum; # pkgs.dockerTools.tarsum
|
||||
|
||||
# buildEnv creates symlinks to dirs, which is hard to edit inside the overlay VM
|
||||
mergeDrvs =
|
||||
{ derivations
|
||||
, onlyDeps ? false
|
||||
}:
|
||||
runCommand "merge-drvs"
|
||||
{
|
||||
inherit derivations onlyDeps;
|
||||
} ''
|
||||
mergeDrvs = { derivations, onlyDeps ? false }:
|
||||
runCommand "merge-drvs" { inherit derivations onlyDeps; } ''
|
||||
if [[ -n "$onlyDeps" ]]; then
|
||||
echo $derivations > $out
|
||||
exit 0
|
||||
|
@ -228,20 +181,10 @@ rec {
|
|||
'';
|
||||
|
||||
# Run commands in a virtual machine.
|
||||
runWithOverlay =
|
||||
{ name
|
||||
, fromImage ? null
|
||||
, fromImageName ? null
|
||||
, fromImageTag ? null
|
||||
, diskSize ? 1024
|
||||
, buildVMMemorySize ? 512
|
||||
, preMount ? ""
|
||||
, postMount ? ""
|
||||
, postUmount ? ""
|
||||
}:
|
||||
vmTools.runInLinuxVM (
|
||||
runCommand name
|
||||
{
|
||||
runWithOverlay = { name, fromImage ? null, fromImageName ? null
|
||||
, fromImageTag ? null, diskSize ? 1024, buildVMMemorySize ? 512
|
||||
, preMount ? "", postMount ? "", postUmount ? "" }:
|
||||
vmTools.runInLinuxVM (runCommand name {
|
||||
preVM = vmTools.createEmptyImage {
|
||||
size = diskSize;
|
||||
fullName = "docker-run-disk";
|
||||
|
@ -338,7 +281,8 @@ rec {
|
|||
${postUmount}
|
||||
'');
|
||||
|
||||
exportImage = { name ? fromImage.name, fromImage, fromImageName ? null, fromImageTag ? null, diskSize ? 1024 }:
|
||||
exportImage = { name ? fromImage.name, fromImage, fromImageName ? null
|
||||
, fromImageTag ? null, diskSize ? 1024 }:
|
||||
runWithOverlay {
|
||||
inherit name fromImage fromImageName fromImageTag diskSize;
|
||||
|
||||
|
@ -366,36 +310,30 @@ rec {
|
|||
'';
|
||||
|
||||
# Create a "layer" (set of files).
|
||||
mkPureLayer =
|
||||
{
|
||||
mkPureLayer = {
|
||||
# Name of the layer
|
||||
name
|
||||
, # JSON containing configuration and metadata for this layer.
|
||||
baseJson
|
||||
, # Files to add to the layer.
|
||||
name, # JSON containing configuration and metadata for this layer.
|
||||
baseJson, # Files to add to the layer.
|
||||
copyToRoot ? null
|
||||
, # When copying the contents into the image, preserve symlinks to
|
||||
# directories (see `rsync -K`). Otherwise, transform those symlinks
|
||||
# into directories.
|
||||
keepContentsDirlinks ? false
|
||||
, # Additional commands to run on the layer before it is tar'd up.
|
||||
extraCommands ? ""
|
||||
, uid ? 0
|
||||
, gid ? 0
|
||||
}:
|
||||
runCommand "docker-layer-${name}"
|
||||
{
|
||||
extraCommands ? "", uid ? 0, gid ? 0 }:
|
||||
runCommand "docker-layer-${name}" {
|
||||
inherit baseJson extraCommands;
|
||||
contents = copyToRoot;
|
||||
nativeBuildInputs = [ jshon rsync tarsum ];
|
||||
}
|
||||
''
|
||||
} ''
|
||||
mkdir layer
|
||||
if [[ -n "$contents" ]]; then
|
||||
echo "Adding contents..."
|
||||
for item in $contents; do
|
||||
echo "Adding $item"
|
||||
rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/
|
||||
rsync -a${
|
||||
if keepContentsDirlinks then "K" else "k"
|
||||
} --chown=0:0 $item/ layer/
|
||||
done
|
||||
else
|
||||
echo "No contents to add to layer."
|
||||
|
@ -410,7 +348,9 @@ rec {
|
|||
# Tar up the layer and throw it into 'layer.tar'.
|
||||
echo "Packing layer..."
|
||||
mkdir $out
|
||||
tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=${toString uid} --group=${toString gid} -cf - . | tee -p $out/layer.tar | tarsum)
|
||||
tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=${
|
||||
toString uid
|
||||
} --group=${toString gid} -cf - . | tee -p $out/layer.tar | tarsum)
|
||||
|
||||
# Add a 'checksum' field to the JSON, with the value set to the
|
||||
# checksum of the tarball.
|
||||
|
@ -425,11 +365,9 @@ rec {
|
|||
# Make a "root" layer; required if we need to execute commands as a
|
||||
# privileged user on the image. The commands themselves will be
|
||||
# performed in a virtual machine sandbox.
|
||||
mkRootLayer =
|
||||
{
|
||||
mkRootLayer = {
|
||||
# Name of the image.
|
||||
name
|
||||
, # Script to run as root. Bash.
|
||||
name, # Script to run as root. Bash.
|
||||
runAsRoot
|
||||
, # Files to add to the layer. If null, an empty layer will be created.
|
||||
# To add packages to /bin, use `buildEnv` or similar.
|
||||
|
@ -439,26 +377,21 @@ rec {
|
|||
# into directories.
|
||||
keepContentsDirlinks ? false
|
||||
, # JSON containing configuration and metadata for this layer.
|
||||
baseJson
|
||||
, # Existing image onto which to append the new layer.
|
||||
fromImage ? null
|
||||
, # Name of the image we're appending onto.
|
||||
fromImageName ? null
|
||||
, # Tag of the image we're appending onto.
|
||||
baseJson, # Existing image onto which to append the new layer.
|
||||
fromImage ? null, # Name of the image we're appending onto.
|
||||
fromImageName ? null, # Tag of the image we're appending onto.
|
||||
fromImageTag ? null
|
||||
, # How much disk to allocate for the temporary virtual machine.
|
||||
diskSize ? 1024
|
||||
, # How much memory to allocate for the temporary virtual machine.
|
||||
buildVMMemorySize ? 512
|
||||
, # Commands (bash) to run on the layer; these do not require sudo.
|
||||
extraCommands ? ""
|
||||
}:
|
||||
extraCommands ? "" }:
|
||||
# Generate an executable script from the `runAsRoot` text.
|
||||
let
|
||||
runAsRootScript = shellScript "run-as-root.sh" runAsRoot;
|
||||
extraCommandsScript = shellScript "extra-commands.sh" extraCommands;
|
||||
in
|
||||
runWithOverlay {
|
||||
in runWithOverlay {
|
||||
name = "docker-layer-${name}";
|
||||
|
||||
inherit fromImage fromImageName fromImageTag diskSize buildVMMemorySize;
|
||||
|
@ -467,7 +400,9 @@ rec {
|
|||
echo "Adding contents..."
|
||||
for item in ${escapeShellArgs (map (c: "${c}") (toList copyToRoot))}; do
|
||||
echo "Adding $item..."
|
||||
rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/
|
||||
rsync -a${
|
||||
if keepContentsDirlinks then "K" else "k"
|
||||
} --chown=0:0 $item/ layer/
|
||||
done
|
||||
|
||||
chmod ug+w layer
|
||||
|
@ -515,18 +450,20 @@ rec {
|
|||
'';
|
||||
};
|
||||
|
||||
buildLayeredImage = lib.makeOverridable ({ name, compressor ? "gz", ... }@args:
|
||||
buildLayeredImage = lib.makeOverridable
|
||||
({ name, compressor ? "gz", ... }@args:
|
||||
let
|
||||
stream = streamLayeredImage (builtins.removeAttrs args ["compressor"]);
|
||||
stream =
|
||||
streamLayeredImage (builtins.removeAttrs args [ "compressor" ]);
|
||||
compress = compressorForImage compressor name;
|
||||
in
|
||||
runCommand "${baseNameOf name}.tar${compress.ext}"
|
||||
{
|
||||
in runCommand "${baseNameOf name}.tar${compress.ext}" {
|
||||
inherit (stream) imageName;
|
||||
passthru = { inherit (stream) imageTag; inherit stream; };
|
||||
passthru = {
|
||||
inherit (stream) imageTag;
|
||||
inherit stream;
|
||||
};
|
||||
nativeBuildInputs = compress.nativeInputs;
|
||||
} "${stream} | ${compress.compress} > $out"
|
||||
);
|
||||
} "${stream} | ${compress.compress} > $out");
|
||||
|
||||
# 1. extract the base image
|
||||
# 2. create the layer
|
||||
|
@ -534,13 +471,10 @@ rec {
|
|||
# 4. compute the layer id
|
||||
# 5. put the layer in the image
|
||||
# 6. repack the image
|
||||
buildImage = lib.makeOverridable (
|
||||
args@{
|
||||
buildImage = lib.makeOverridable (args@{
|
||||
# Image name.
|
||||
name
|
||||
, # Image tag, when null then the nix output hash will be used.
|
||||
tag ? null
|
||||
, # Parent image, to append to.
|
||||
name, # Image tag, when null then the nix output hash will be used.
|
||||
tag ? null, # Parent image, to append to.
|
||||
fromImage ? null
|
||||
, # Name of the parent image; will be read from the image otherwise.
|
||||
fromImageName ? null
|
||||
|
@ -557,65 +491,52 @@ rec {
|
|||
, # Image architecture, defaults to the architecture of the `hostPlatform` when unset
|
||||
architecture ? defaultArchitecture
|
||||
, # Optional bash script to run on the files prior to fixturizing the layer.
|
||||
extraCommands ? ""
|
||||
, uid ? 0
|
||||
, gid ? 0
|
||||
extraCommands ? "", uid ? 0, gid ? 0
|
||||
, # Optional bash script to run as root on the image when provisioning.
|
||||
runAsRoot ? null
|
||||
, # Size of the virtual machine disk to provision when building the image.
|
||||
diskSize ? 1024
|
||||
, # Size of the virtual machine memory to provision when building the image.
|
||||
buildVMMemorySize ? 512
|
||||
, # Time of creation of the image.
|
||||
buildVMMemorySize ? 512, # Time of creation of the image.
|
||||
created ? "1970-01-01T00:00:01Z"
|
||||
, # Compressor to use. One of: none, gz, zstd.
|
||||
compressor ? "gz"
|
||||
, # Deprecated.
|
||||
contents ? null
|
||||
,
|
||||
}:
|
||||
compressor ? "gz", # Deprecated.
|
||||
contents ? null, }:
|
||||
|
||||
let
|
||||
checked =
|
||||
lib.warnIf (contents != null)
|
||||
checked = lib.warnIf (contents != null)
|
||||
"in docker image ${name}: The contents parameter is deprecated. Change to copyToRoot if the contents are designed to be copied to the root filesystem, such as when you use `buildEnv` or similar between contents and your packages. Use copyToRoot = buildEnv { ... }; or similar if you intend to add packages to /bin."
|
||||
lib.throwIf (contents != null && copyToRoot != null) "in docker image ${name}: You can not specify both contents and copyToRoot."
|
||||
;
|
||||
lib.throwIf (contents != null && copyToRoot != null)
|
||||
"in docker image ${name}: You can not specify both contents and copyToRoot.";
|
||||
|
||||
rootContents = if copyToRoot == null then contents else copyToRoot;
|
||||
|
||||
baseName = baseNameOf name;
|
||||
|
||||
# Create a JSON blob of the configuration. Set the date to unix zero.
|
||||
baseJson =
|
||||
let
|
||||
baseJson = let
|
||||
pure = writeText "${baseName}-config.json" (builtins.toJSON {
|
||||
inherit created config architecture;
|
||||
preferLocalBuild = true;
|
||||
os = "linux";
|
||||
});
|
||||
impure = runCommand "${baseName}-config.json"
|
||||
{
|
||||
impure = runCommand "${baseName}-config.json" {
|
||||
nativeBuildInputs = [ jq ];
|
||||
preferLocalBuild = true;
|
||||
}
|
||||
''
|
||||
} ''
|
||||
jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out
|
||||
'';
|
||||
in
|
||||
if created == "now" then impure else pure;
|
||||
in if created == "now" then impure else pure;
|
||||
|
||||
compress = compressorForImage compressor name;
|
||||
|
||||
layer =
|
||||
if runAsRoot == null
|
||||
then
|
||||
mkPureLayer
|
||||
{
|
||||
layer = if runAsRoot == null then
|
||||
mkPureLayer {
|
||||
name = baseName;
|
||||
inherit baseJson keepContentsDirlinks extraCommands uid gid;
|
||||
copyToRoot = rootContents;
|
||||
} else
|
||||
}
|
||||
else
|
||||
mkRootLayer {
|
||||
name = baseName;
|
||||
inherit baseJson fromImage fromImageName fromImageTag
|
||||
|
@ -623,8 +544,7 @@ rec {
|
|||
extraCommands;
|
||||
copyToRoot = rootContents;
|
||||
};
|
||||
result = runCommand "docker-image-${baseName}.tar${compress.ext}"
|
||||
{
|
||||
result = runCommand "docker-image-${baseName}.tar${compress.ext}" {
|
||||
nativeBuildInputs = [ jshon jq moreutils ] ++ compress.nativeInputs;
|
||||
# Image name must be lowercase
|
||||
imageName = lib.toLower name;
|
||||
|
@ -633,11 +553,11 @@ rec {
|
|||
layerClosure = writeClosure [ layer ];
|
||||
passthru.buildArgs = args;
|
||||
passthru.layer = layer;
|
||||
passthru.imageTag =
|
||||
if tag != null
|
||||
then tag
|
||||
passthru.imageTag = if tag != null then
|
||||
tag
|
||||
else
|
||||
lib.head (lib.strings.splitString "-" (baseNameOf (builtins.unsafeDiscardStringContext result.outPath)));
|
||||
lib.head (lib.strings.splitString "-"
|
||||
(baseNameOf (builtins.unsafeDiscardStringContext result.outPath)));
|
||||
} ''
|
||||
${lib.optionalString (tag == null) ''
|
||||
outName="$(basename "$out")"
|
||||
|
@ -784,20 +704,16 @@ rec {
|
|||
echo "Finished."
|
||||
'';
|
||||
|
||||
in
|
||||
checked result
|
||||
);
|
||||
in checked result);
|
||||
|
||||
# Merge the tarballs of images built with buildImage into a single
|
||||
# tarball that contains all images. Running `docker load` on the resulting
|
||||
# tarball will load the images into the docker daemon.
|
||||
mergeImages = images: runCommand "merge-docker-images"
|
||||
{
|
||||
mergeImages = images:
|
||||
runCommand "merge-docker-images" {
|
||||
inherit images;
|
||||
nativeBuildInputs = [ file jq ]
|
||||
++ compressors.none.nativeInputs
|
||||
++ compressors.gz.nativeInputs
|
||||
++ compressors.zstd.nativeInputs;
|
||||
nativeBuildInputs = [ file jq ] ++ compressors.none.nativeInputs
|
||||
++ compressors.gz.nativeInputs ++ compressors.zstd.nativeInputs;
|
||||
} ''
|
||||
mkdir image inputs
|
||||
# Extract images
|
||||
|
@ -842,7 +758,6 @@ rec {
|
|||
) > $out
|
||||
'';
|
||||
|
||||
|
||||
# Provide a /etc/passwd and /etc/group that contain root and nobody.
|
||||
# Useful when packaging binaries that insist on using nss to look up
|
||||
# username/groups (like nginx).
|
||||
|
@ -879,44 +794,29 @@ rec {
|
|||
# the container.
|
||||
# Be careful since this doesn't work well with multilayer.
|
||||
# TODO: add the dependencies of the config json.
|
||||
buildImageWithNixDb = args@{ copyToRoot ? contents, contents ? null, extraCommands ? "", ... }: (
|
||||
buildImage (args // {
|
||||
buildImageWithNixDb =
|
||||
args@{ copyToRoot ? contents, contents ? null, extraCommands ? "", ... }:
|
||||
(buildImage (args // {
|
||||
extraCommands = (mkDbExtraCommand copyToRoot) + extraCommands;
|
||||
})
|
||||
);
|
||||
}));
|
||||
|
||||
# TODO: add the dependencies of the config json.
|
||||
buildLayeredImageWithNixDb = args@{ contents ? null, extraCommands ? "", ... }: (
|
||||
buildLayeredImage (args // {
|
||||
buildLayeredImageWithNixDb =
|
||||
args@{ contents ? null, extraCommands ? "", ... }:
|
||||
(buildLayeredImage (args // {
|
||||
extraCommands = (mkDbExtraCommand contents) + extraCommands;
|
||||
})
|
||||
);
|
||||
}));
|
||||
|
||||
# Arguments are documented in ../../../doc/build-helpers/images/dockertools.section.md
|
||||
streamLayeredImage = lib.makeOverridable (
|
||||
{
|
||||
name
|
||||
, tag ? null
|
||||
, fromImage ? null
|
||||
, contents ? [ ]
|
||||
, config ? { }
|
||||
, architecture ? defaultArchitecture
|
||||
, created ? "1970-01-01T00:00:01Z"
|
||||
, uid ? 0
|
||||
, gid ? 0
|
||||
, uname ? "root"
|
||||
, gname ? "root"
|
||||
, maxLayers ? 100
|
||||
, extraCommands ? ""
|
||||
, fakeRootCommands ? ""
|
||||
, enableFakechroot ? false
|
||||
, includeStorePaths ? true
|
||||
, passthru ? {}
|
||||
,
|
||||
}:
|
||||
assert
|
||||
(lib.assertMsg (maxLayers > 1)
|
||||
"the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${toString maxLayers})");
|
||||
streamLayeredImage = lib.makeOverridable ({ name, tag ? null, fromImage ? null
|
||||
, contents ? [ ], config ? { }, architecture ? defaultArchitecture
|
||||
, created ? "1970-01-01T00:00:01Z", uid ? 0, gid ? 0, uname ? "root"
|
||||
, gname ? "root", maxLayers ? 100, extraCommands ? "", fakeRootCommands ? ""
|
||||
, enableFakechroot ? false, includeStorePaths ? true, passthru ? { }, }:
|
||||
assert (lib.assertMsg (maxLayers > 1)
|
||||
"the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${
|
||||
toString maxLayers
|
||||
})");
|
||||
let
|
||||
baseName = baseNameOf name;
|
||||
|
||||
|
@ -926,8 +826,10 @@ rec {
|
|||
os = "linux";
|
||||
});
|
||||
|
||||
contentsList = if builtins.isList contents then contents else [ contents ];
|
||||
bind-paths = builtins.toString (builtins.map (path: "--bind=${path}:${path}!") [
|
||||
contentsList =
|
||||
if builtins.isList contents then contents else [ contents ];
|
||||
bind-paths = builtins.toString
|
||||
(builtins.map (path: "--bind=${path}:${path}!") [
|
||||
"/dev/"
|
||||
"/proc/"
|
||||
"/sys/"
|
||||
|
@ -942,11 +844,8 @@ rec {
|
|||
name = "${baseName}-customisation-layer";
|
||||
paths = contentsList;
|
||||
inherit extraCommands fakeRootCommands;
|
||||
nativeBuildInputs = [
|
||||
fakeroot
|
||||
] ++ optionals enableFakechroot [
|
||||
proot
|
||||
];
|
||||
nativeBuildInputs = [ fakeroot ]
|
||||
++ optionals enableFakechroot [ proot ];
|
||||
postBuild = ''
|
||||
mv $out old_out
|
||||
(cd old_out; eval "$extraCommands" )
|
||||
|
@ -983,25 +882,24 @@ rec {
|
|||
'';
|
||||
};
|
||||
|
||||
closureRoots = lib.optionals includeStorePaths /* normally true */ (
|
||||
[ baseJson customisationLayer ]
|
||||
);
|
||||
overallClosure = writeText "closure" (lib.concatStringsSep " " closureRoots);
|
||||
closureRoots = lib.optionals includeStorePaths # normally true
|
||||
([ baseJson customisationLayer ]);
|
||||
overallClosure =
|
||||
writeText "closure" (lib.concatStringsSep " " closureRoots);
|
||||
|
||||
# These derivations are only created as implementation details of docker-tools,
|
||||
# so they'll be excluded from the created images.
|
||||
unnecessaryDrvs = [ baseJson overallClosure customisationLayer ];
|
||||
|
||||
conf = runCommand "${baseName}-conf.json"
|
||||
{
|
||||
conf = runCommand "${baseName}-conf.json" {
|
||||
inherit fromImage maxLayers created uid gid uname gname;
|
||||
imageName = lib.toLower name;
|
||||
preferLocalBuild = true;
|
||||
passthru.imageTag =
|
||||
if tag != null
|
||||
then tag
|
||||
passthru.imageTag = if tag != null then
|
||||
tag
|
||||
else
|
||||
lib.head (lib.strings.splitString "-" (baseNameOf (builtins.unsafeDiscardStringContext conf.outPath)));
|
||||
lib.head (lib.strings.splitString "-"
|
||||
(baseNameOf (builtins.unsafeDiscardStringContext conf.outPath)));
|
||||
paths = buildPackages.referencesByPopularity overallClosure;
|
||||
nativeBuildInputs = [ jq ];
|
||||
} ''
|
||||
|
@ -1020,9 +918,10 @@ rec {
|
|||
fi
|
||||
|
||||
paths() {
|
||||
cat $paths ${lib.concatMapStringsSep " "
|
||||
(path: "| (grep -v ${path} || true)")
|
||||
unnecessaryDrvs}
|
||||
cat $paths ${
|
||||
lib.concatMapStringsSep " " (path: "| (grep -v ${path} || true)")
|
||||
unnecessaryDrvs
|
||||
}
|
||||
}
|
||||
|
||||
# Compute the number of layers that are already used by a potential
|
||||
|
@ -1080,7 +979,9 @@ rec {
|
|||
"gname": $gname
|
||||
}
|
||||
' --arg store_dir "${storeDir}" \
|
||||
--argjson from_image ${if fromImage == null then "null" else "'\"${fromImage}\"'"} \
|
||||
--argjson from_image ${
|
||||
if fromImage == null then "null" else "'\"${fromImage}\"'"
|
||||
} \
|
||||
--slurpfile store_layers store_layers.json \
|
||||
--arg customisation_layer ${customisationLayer} \
|
||||
--arg repo_tag "$imageName:$imageTag" \
|
||||
|
@ -1092,8 +993,7 @@ rec {
|
|||
tee $out
|
||||
'';
|
||||
|
||||
result = runCommand "stream-${baseName}"
|
||||
{
|
||||
result = runCommand "stream-${baseName}" {
|
||||
inherit (conf) imageName;
|
||||
preferLocalBuild = true;
|
||||
passthru = passthru // {
|
||||
|
@ -1107,32 +1007,26 @@ rec {
|
|||
} ''
|
||||
makeWrapper ${streamScript} $out --add-flags ${conf}
|
||||
'';
|
||||
in
|
||||
result
|
||||
);
|
||||
in result);
|
||||
|
||||
# This function streams a docker image that behaves like a nix-shell for a derivation
|
||||
streamNixShellImage =
|
||||
{ # The derivation whose environment this docker image should be based on
|
||||
drv
|
||||
, # Image Name
|
||||
drv, # Image Name
|
||||
name ? drv.name + "-env"
|
||||
, # Image tag, the Nix's output hash will be used if null
|
||||
tag ? null
|
||||
, # User id to run the container as. Defaults to 1000, because many
|
||||
# binaries don't like to be run as root
|
||||
uid ? 1000
|
||||
, # Group id to run the container as, see also uid
|
||||
gid ? 1000
|
||||
, # The home directory of the user
|
||||
uid ? 1000, # Group id to run the container as, see also uid
|
||||
gid ? 1000, # The home directory of the user
|
||||
homeDirectory ? "/build"
|
||||
, # The path to the bash binary to use as the shell. See `NIX_BUILD_SHELL` in `man nix-shell`
|
||||
shell ? bashInteractive + "/bin/bash"
|
||||
, # Run this command in the environment of the derivation, in an interactive shell. See `--command` in `man nix-shell`
|
||||
command ? null
|
||||
, # Same as `command`, but runs the command in a non-interactive shell instead. See `--run` in `man nix-shell`
|
||||
run ? null
|
||||
}:
|
||||
run ? null }:
|
||||
assert lib.assertMsg (!(drv.drvAttrs.__structuredAttrs or false))
|
||||
"streamNixShellImage: Does not work with the derivation ${drv.name} because it uses __structuredAttrs";
|
||||
assert lib.assertMsg (command == null || run == null)
|
||||
|
@ -1141,7 +1035,9 @@ rec {
|
|||
|
||||
# A binary that calls the command to build the derivation
|
||||
builder = writeShellScriptBin "buildDerivation" ''
|
||||
exec ${lib.escapeShellArg (stringValue drv.drvAttrs.builder)} ${lib.escapeShellArgs (map stringValue drv.drvAttrs.args)}
|
||||
exec ${lib.escapeShellArg (stringValue drv.drvAttrs.builder)} ${
|
||||
lib.escapeShellArgs (map stringValue drv.drvAttrs.args)
|
||||
}
|
||||
'';
|
||||
|
||||
staticPath = "${dirOf shell}:${lib.makeBinPath [ builder ]}";
|
||||
|
@ -1180,20 +1076,24 @@ rec {
|
|||
# We can't just use `toString` on all derivation attributes because that
|
||||
# would not put path literals in the closure. So we explicitly copy
|
||||
# those into the store here
|
||||
if builtins.typeOf value == "path" then "${value}"
|
||||
else if builtins.typeOf value == "list" then toString (map stringValue value)
|
||||
else toString value;
|
||||
if builtins.typeOf value == "path" then
|
||||
"${value}"
|
||||
else if builtins.typeOf value == "list" then
|
||||
toString (map stringValue value)
|
||||
else
|
||||
toString value;
|
||||
|
||||
# https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L992-L1004
|
||||
drvEnv = lib.mapAttrs' (name: value:
|
||||
let str = stringValue value;
|
||||
in if lib.elem name (drv.drvAttrs.passAsFile or [])
|
||||
then lib.nameValuePair "${name}Path" (writeText "pass-as-text-${name}" str)
|
||||
else lib.nameValuePair name str
|
||||
) drv.drvAttrs //
|
||||
in if lib.elem name (drv.drvAttrs.passAsFile or [ ]) then
|
||||
lib.nameValuePair "${name}Path" (writeText "pass-as-text-${name}" str)
|
||||
else
|
||||
lib.nameValuePair name str) drv.drvAttrs //
|
||||
# A mapping from output name to the nix store path where they should end up
|
||||
# https://github.com/NixOS/nix/blob/2.8.0/src/libexpr/primops.cc#L1253
|
||||
lib.genAttrs drv.outputs (output: builtins.unsafeDiscardStringContext drv.${output}.outPath);
|
||||
lib.genAttrs drv.outputs
|
||||
(output: builtins.unsafeDiscardStringContext drv.${output}.outPath);
|
||||
|
||||
# Environment variables set in the image
|
||||
envVars = {
|
||||
|
@ -1239,7 +1139,6 @@ rec {
|
|||
TERM = "xterm-256color";
|
||||
};
|
||||
|
||||
|
||||
in streamLayeredImage {
|
||||
inherit name tag;
|
||||
contents = [
|
||||
|
@ -1251,11 +1150,11 @@ rec {
|
|||
# Slightly differs however: We use the passed-in homeDirectory instead of sandboxBuildDir.
|
||||
# We're doing this because it's arguably a bug in Nix that sandboxBuildDir is used here: https://github.com/NixOS/nix/issues/6379
|
||||
extraPasswdLines = [
|
||||
"nixbld:x:${toString uid}:${toString gid}:Build user:${homeDirectory}:/noshell"
|
||||
];
|
||||
extraGroupLines = [
|
||||
"nixbld:!:${toString gid}:"
|
||||
"nixbld:x:${toString uid}:${
|
||||
toString gid
|
||||
}:Build user:${homeDirectory}:/noshell"
|
||||
];
|
||||
extraGroupLines = [ "nixbld:!:${toString gid}:" ];
|
||||
})
|
||||
];
|
||||
|
||||
|
@ -1277,9 +1176,7 @@ rec {
|
|||
config.Cmd =
|
||||
# https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L185-L186
|
||||
# https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L534-L536
|
||||
if run == null
|
||||
then [ shell "--rcfile" rcfile ]
|
||||
else [ shell rcfile ];
|
||||
if run == null then [ shell "--rcfile" rcfile ] else [ shell rcfile ];
|
||||
config.WorkingDir = sandboxBuildDir;
|
||||
config.Env = lib.mapAttrsToList (name: value: "${name}=${value}") envVars;
|
||||
};
|
||||
|
@ -1289,9 +1186,7 @@ rec {
|
|||
let
|
||||
stream = streamNixShellImage (builtins.removeAttrs args [ "compressor" ]);
|
||||
compress = compressorForImage compressor drv.name;
|
||||
in
|
||||
runCommand "${drv.name}-env.tar${compress.ext}"
|
||||
{
|
||||
in runCommand "${drv.name}-env.tar${compress.ext}" {
|
||||
inherit (stream) imageName;
|
||||
passthru = { inherit (stream) imageTag; };
|
||||
nativeBuildInputs = compress.nativeInputs;
|
||||
|
|
|
@ -7,7 +7,8 @@
|
|||
# $ nix-build '<nixpkgs>' -A dockerTools.examples.redis
|
||||
# $ docker load < result
|
||||
|
||||
{ pkgs, buildImage, buildLayeredImage, fakeNss, pullImage, shadowSetup, buildImageWithNixDb, pkgsCross, streamNixShellImage }:
|
||||
{ pkgs, buildImage, buildLayeredImage, fakeNss, pullImage, shadowSetup
|
||||
, buildImageWithNixDb, pkgsCross, streamNixShellImage }:
|
||||
|
||||
let
|
||||
nixosLib = import ../../../nixos/lib {
|
||||
|
@ -17,9 +18,7 @@ let
|
|||
};
|
||||
evalMinimalConfig = module: nixosLib.evalModules { modules = [ module ]; };
|
||||
|
||||
in
|
||||
|
||||
rec {
|
||||
in rec {
|
||||
# 1. basic example
|
||||
bash = buildImage {
|
||||
name = "bash";
|
||||
|
@ -53,9 +52,7 @@ rec {
|
|||
config = {
|
||||
Cmd = [ "/bin/redis-server" ];
|
||||
WorkingDir = "/data";
|
||||
Volumes = {
|
||||
"/data" = {};
|
||||
};
|
||||
Volumes = { "/data" = { }; };
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -82,14 +79,10 @@ rec {
|
|||
nginxWebRoot = pkgs.writeTextDir "index.html" ''
|
||||
<html><body><h1>Hello from NGINX</h1></body></html>
|
||||
'';
|
||||
in
|
||||
buildLayeredImage {
|
||||
in buildLayeredImage {
|
||||
name = "nginx-container";
|
||||
tag = "latest";
|
||||
contents = [
|
||||
fakeNss
|
||||
pkgs.nginx
|
||||
];
|
||||
contents = [ fakeNss pkgs.nginx ];
|
||||
|
||||
extraCommands = ''
|
||||
mkdir -p tmp/nginx_client_body
|
||||
|
@ -101,16 +94,15 @@ rec {
|
|||
|
||||
config = {
|
||||
Cmd = [ "nginx" "-c" nginxConf ];
|
||||
ExposedPorts = {
|
||||
"${nginxPort}/tcp" = {};
|
||||
};
|
||||
ExposedPorts = { "${nginxPort}/tcp" = { }; };
|
||||
};
|
||||
};
|
||||
|
||||
# 4. example of pulling an image. could be used as a base for other images
|
||||
nixFromDockerHub = pullImage {
|
||||
imageName = "nixos/nix";
|
||||
imageDigest = "sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357";
|
||||
imageDigest =
|
||||
"sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357";
|
||||
sha256 = "19fw0n3wmddahzr20mhdqv6jkjn1kanh6n2mrr08ai53dr8ph5n7";
|
||||
finalImageTag = "2.2.1";
|
||||
finalImageName = "nix";
|
||||
|
@ -119,7 +111,8 @@ rec {
|
|||
# NOTE: Only use this for testing, or you'd be wasting a lot of time, network and space.
|
||||
testNixFromDockerHub = pkgs.testers.invalidateFetcherByDrvHash pullImage {
|
||||
imageName = "nixos/nix";
|
||||
imageDigest = "sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357";
|
||||
imageDigest =
|
||||
"sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357";
|
||||
sha256 = "19fw0n3wmddahzr20mhdqv6jkjn1kanh6n2mrr08ai53dr8ph5n7";
|
||||
finalImageTag = "2.2.1";
|
||||
finalImageName = "nix";
|
||||
|
@ -131,13 +124,7 @@ rec {
|
|||
copyToRoot = pkgs.buildEnv {
|
||||
name = "image-root";
|
||||
pathsToLink = [ "/bin" ];
|
||||
paths = [
|
||||
pkgs.coreutils
|
||||
pkgs.bash
|
||||
pkgs.emacs
|
||||
pkgs.vim
|
||||
pkgs.nano
|
||||
];
|
||||
paths = [ pkgs.coreutils pkgs.bash pkgs.emacs pkgs.vim pkgs.nano ];
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -231,9 +218,7 @@ rec {
|
|||
config = {
|
||||
Env = [ "PATH=${pkgs.coreutils}/bin/" ];
|
||||
WorkingDir = "/example-output";
|
||||
Cmd = [
|
||||
"${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo"
|
||||
];
|
||||
Cmd = [ "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo" ];
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -249,9 +234,7 @@ rec {
|
|||
config = {
|
||||
Env = [ "PATH=${pkgs.coreutils}/bin/" ];
|
||||
WorkingDir = "/example-output";
|
||||
Cmd = [
|
||||
"${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo"
|
||||
];
|
||||
Cmd = [ "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo" ];
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -312,12 +295,7 @@ rec {
|
|||
environmentVariablesParent = pkgs.dockerTools.buildImage {
|
||||
name = "parent";
|
||||
tag = "latest";
|
||||
config = {
|
||||
Env = [
|
||||
"FROM_PARENT=true"
|
||||
"LAST_LAYER=parent"
|
||||
];
|
||||
};
|
||||
config = { Env = [ "FROM_PARENT=true" "LAST_LAYER=parent" ]; };
|
||||
};
|
||||
|
||||
environmentVariables = pkgs.dockerTools.buildImage {
|
||||
|
@ -329,12 +307,7 @@ rec {
|
|||
pathsToLink = [ "/bin" ];
|
||||
paths = [ pkgs.coreutils ];
|
||||
};
|
||||
config = {
|
||||
Env = [
|
||||
"FROM_CHILD=true"
|
||||
"LAST_LAYER=child"
|
||||
];
|
||||
};
|
||||
config = { Env = [ "FROM_CHILD=true" "LAST_LAYER=child" ]; };
|
||||
};
|
||||
|
||||
environmentVariablesLayered = pkgs.dockerTools.buildLayeredImage {
|
||||
|
@ -342,12 +315,7 @@ rec {
|
|||
fromImage = environmentVariablesParent;
|
||||
tag = "latest";
|
||||
contents = [ pkgs.coreutils ];
|
||||
config = {
|
||||
Env = [
|
||||
"FROM_CHILD=true"
|
||||
"LAST_LAYER=child"
|
||||
];
|
||||
};
|
||||
config = { Env = [ "FROM_CHILD=true" "LAST_LAYER=child" ]; };
|
||||
};
|
||||
|
||||
# 16. Create another layered image, for comparing layers with image 10.
|
||||
|
@ -371,9 +339,7 @@ rec {
|
|||
bulk-layer = pkgs.dockerTools.buildLayeredImage {
|
||||
name = "bulk-layer";
|
||||
tag = "latest";
|
||||
contents = with pkgs; [
|
||||
coreutils hello
|
||||
];
|
||||
contents = with pkgs; [ coreutils hello ];
|
||||
maxLayers = 2;
|
||||
};
|
||||
|
||||
|
@ -383,9 +349,7 @@ rec {
|
|||
name = "layered-bulk-layer";
|
||||
tag = "latest";
|
||||
fromImage = two-layered-image;
|
||||
contents = with pkgs; [
|
||||
coreutils hello
|
||||
];
|
||||
contents = with pkgs; [ coreutils hello ];
|
||||
maxLayers = 4;
|
||||
};
|
||||
|
||||
|
@ -455,8 +419,7 @@ rec {
|
|||
|
||||
# 23. Ensure that layers are unpacked in the correct order before the
|
||||
# runAsRoot script is executed.
|
||||
layersUnpackOrder =
|
||||
let
|
||||
layersUnpackOrder = let
|
||||
layerOnTopOf = parent: layerName:
|
||||
pkgs.dockerTools.buildImage {
|
||||
name = "layers-unpack-order-${layerName}";
|
||||
|
@ -532,47 +495,42 @@ rec {
|
|||
};
|
||||
|
||||
# buildLayeredImage with non-root user
|
||||
bashLayeredWithUser =
|
||||
let
|
||||
nonRootShadowSetup = { user, uid, gid ? uid }: with pkgs; [
|
||||
(
|
||||
writeTextDir "etc/shadow" ''
|
||||
bashLayeredWithUser = let
|
||||
nonRootShadowSetup = { user, uid, gid ? uid }:
|
||||
with pkgs; [
|
||||
(writeTextDir "etc/shadow" ''
|
||||
root:!x:::::::
|
||||
${user}:!:::::::
|
||||
''
|
||||
)
|
||||
(
|
||||
writeTextDir "etc/passwd" ''
|
||||
'')
|
||||
(writeTextDir "etc/passwd" ''
|
||||
root:x:0:0::/root:${runtimeShell}
|
||||
${user}:x:${toString uid}:${toString gid}::/home/${user}:
|
||||
''
|
||||
)
|
||||
(
|
||||
writeTextDir "etc/group" ''
|
||||
'')
|
||||
(writeTextDir "etc/group" ''
|
||||
root:x:0:
|
||||
${user}:x:${toString gid}:
|
||||
''
|
||||
)
|
||||
(
|
||||
writeTextDir "etc/gshadow" ''
|
||||
'')
|
||||
(writeTextDir "etc/gshadow" ''
|
||||
root:x::
|
||||
${user}:x::
|
||||
''
|
||||
)
|
||||
'')
|
||||
];
|
||||
in
|
||||
pkgs.dockerTools.buildLayeredImage {
|
||||
in pkgs.dockerTools.buildLayeredImage {
|
||||
name = "bash-layered-with-user";
|
||||
tag = "latest";
|
||||
contents = [ pkgs.bash pkgs.coreutils ] ++ nonRootShadowSetup { uid = 999; user = "somebody"; };
|
||||
contents = [ pkgs.bash pkgs.coreutils ] ++ nonRootShadowSetup {
|
||||
uid = 999;
|
||||
user = "somebody";
|
||||
};
|
||||
};
|
||||
|
||||
# basic example, with cross compilation
|
||||
cross = let
|
||||
# Cross compile for x86_64 if on aarch64
|
||||
crossPkgs =
|
||||
if pkgs.stdenv.hostPlatform.system == "aarch64-linux" then pkgsCross.gnu64
|
||||
else pkgsCross.aarch64-multiplatform;
|
||||
crossPkgs = if pkgs.stdenv.hostPlatform.system == "aarch64-linux" then
|
||||
pkgsCross.gnu64
|
||||
else
|
||||
pkgsCross.aarch64-multiplatform;
|
||||
in crossPkgs.dockerTools.buildImage {
|
||||
name = "hello-cross";
|
||||
tag = "latest";
|
||||
|
@ -584,16 +542,16 @@ rec {
|
|||
};
|
||||
|
||||
# layered image where a store path is itself a symlink
|
||||
layeredStoreSymlink =
|
||||
let
|
||||
layeredStoreSymlink = let
|
||||
target = pkgs.writeTextDir "dir/target" "Content doesn't matter.";
|
||||
symlink = pkgs.runCommand "symlink" { } "ln -s ${target} $out";
|
||||
in
|
||||
pkgs.dockerTools.buildLayeredImage {
|
||||
in pkgs.dockerTools.buildLayeredImage {
|
||||
name = "layeredstoresymlink";
|
||||
tag = "latest";
|
||||
contents = [ pkgs.bash symlink ];
|
||||
} // { passthru = { inherit symlink; }; };
|
||||
} // {
|
||||
passthru = { inherit symlink; };
|
||||
};
|
||||
|
||||
# image with registry/ prefix
|
||||
prefixedImage = pkgs.dockerTools.buildImage {
|
||||
|
@ -613,44 +571,34 @@ rec {
|
|||
layeredImageWithFakeRootCommands = pkgs.dockerTools.buildLayeredImage {
|
||||
name = "layered-image-with-fake-root-commands";
|
||||
tag = "latest";
|
||||
contents = [
|
||||
pkgs.pkgsStatic.busybox
|
||||
];
|
||||
contents = [ pkgs.pkgsStatic.busybox ];
|
||||
fakeRootCommands = ''
|
||||
mkdir -p ./home/alice
|
||||
chown 1000 ./home/alice
|
||||
ln -s ${pkgs.hello.overrideAttrs (o: {
|
||||
ln -s ${
|
||||
pkgs.hello.overrideAttrs (o: {
|
||||
# A unique `hello` to make sure that it isn't included via another mechanism by accident.
|
||||
configureFlags = o.configureFlags or [] ++ [ " --program-prefix=layeredImageWithFakeRootCommands-" ];
|
||||
configureFlags = o.configureFlags or [ ]
|
||||
++ [ " --program-prefix=layeredImageWithFakeRootCommands-" ];
|
||||
doCheck = false;
|
||||
})} ./hello
|
||||
})
|
||||
} ./hello
|
||||
'';
|
||||
};
|
||||
|
||||
# tarball consisting of both bash and redis images
|
||||
mergedBashAndRedis = pkgs.dockerTools.mergeImages [
|
||||
bash
|
||||
redis
|
||||
];
|
||||
mergedBashAndRedis = pkgs.dockerTools.mergeImages [ bash redis ];
|
||||
|
||||
# tarball consisting of bash (without tag) and redis images
|
||||
mergedBashNoTagAndRedis = pkgs.dockerTools.mergeImages [
|
||||
bashNoTag
|
||||
redis
|
||||
];
|
||||
mergedBashNoTagAndRedis = pkgs.dockerTools.mergeImages [ bashNoTag redis ];
|
||||
|
||||
# tarball consisting of bash and layered image with different owner of the
|
||||
# /home/alice directory
|
||||
mergedBashFakeRoot = pkgs.dockerTools.mergeImages [
|
||||
bash
|
||||
layeredImageWithFakeRootCommands
|
||||
];
|
||||
mergedBashFakeRoot =
|
||||
pkgs.dockerTools.mergeImages [ bash layeredImageWithFakeRootCommands ];
|
||||
|
||||
mergeVaryingCompressor = pkgs.dockerTools.mergeImages [
|
||||
redis
|
||||
bashUncompressed
|
||||
bashZstdCompressed
|
||||
];
|
||||
mergeVaryingCompressor =
|
||||
pkgs.dockerTools.mergeImages [ redis bashUncompressed bashZstdCompressed ];
|
||||
|
||||
helloOnRoot = pkgs.dockerTools.streamLayeredImage {
|
||||
name = "hello";
|
||||
|
@ -691,14 +639,10 @@ rec {
|
|||
enableFakechroot = true;
|
||||
};
|
||||
|
||||
etc =
|
||||
let
|
||||
etc = let
|
||||
inherit (pkgs) lib;
|
||||
nixosCore = (evalMinimalConfig ({ config, ... }: {
|
||||
imports = [
|
||||
pkgs.pkgsModule
|
||||
../../../nixos/modules/system/etc/etc.nix
|
||||
];
|
||||
imports = [ pkgs.pkgsModule ../../../nixos/modules/system/etc/etc.nix ];
|
||||
environment.etc."some-config-file" = {
|
||||
text = ''
|
||||
127.0.0.1 localhost
|
||||
|
@ -774,14 +718,10 @@ rec {
|
|||
|
||||
copyToRoot = pkgs.buildEnv {
|
||||
name = "image-with-certs-root";
|
||||
paths = [
|
||||
pkgs.coreutils
|
||||
pkgs.dockerTools.caCertificates
|
||||
];
|
||||
paths = [ pkgs.coreutils pkgs.dockerTools.caCertificates ];
|
||||
};
|
||||
|
||||
config = {
|
||||
};
|
||||
config = { };
|
||||
};
|
||||
|
||||
nix-shell-basic = streamNixShellImage {
|
||||
|
@ -804,11 +744,7 @@ rec {
|
|||
nix-shell-inputs = streamNixShellImage {
|
||||
name = "nix-shell-inputs";
|
||||
tag = "latest";
|
||||
drv = pkgs.mkShell {
|
||||
nativeBuildInputs = [
|
||||
pkgs.hello
|
||||
];
|
||||
};
|
||||
drv = pkgs.mkShell { nativeBuildInputs = [ pkgs.hello ]; };
|
||||
command = ''
|
||||
hello
|
||||
'';
|
||||
|
|
|
@ -17,7 +17,8 @@ stdenv.mkDerivation {
|
|||
preferLocalBuild = true;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Script used to obtain source hashes for dockerTools.pullImage";
|
||||
description =
|
||||
"Script used to obtain source hashes for dockerTools.pullImage";
|
||||
mainProgram = "nix-prefetch-docker";
|
||||
maintainers = with maintainers; [ offline ];
|
||||
platforms = platforms.unix;
|
||||
|
|
|
@ -32,11 +32,7 @@ stdenv.mkDerivation {
|
|||
runHook postInstall
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
tests = {
|
||||
dockerTools = nixosTests.docker-tools;
|
||||
};
|
||||
};
|
||||
passthru = { tests = { dockerTools = nixosTests.docker-tools; }; };
|
||||
|
||||
meta.platforms = go.meta.platforms;
|
||||
meta.mainProgram = "tarsum";
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
{ buildDotnetModule, emptyDirectory, mkNugetDeps, dotnet-sdk }:
|
||||
|
||||
{ pname
|
||||
, version
|
||||
{ pname, version
|
||||
# Name of the nuget package to install, if different from pname
|
||||
, nugetName ? pname
|
||||
# Hash of the nuget package to install, will be given on first build
|
||||
|
@ -12,9 +11,7 @@
|
|||
# a default of `pname` instead of null, to avoid auto-wrapping everything
|
||||
, executables ? pname
|
||||
# The dotnet runtime to use, dotnet tools need a full SDK to function
|
||||
, dotnet-runtime ? dotnet-sdk
|
||||
, ...
|
||||
} @ args:
|
||||
, dotnet-runtime ? dotnet-sdk, ... }@args:
|
||||
|
||||
buildDotnetModule (args // {
|
||||
inherit pname version dotnet-runtime executables;
|
||||
|
@ -23,8 +20,13 @@ buildDotnetModule (args // {
|
|||
|
||||
nugetDeps = mkNugetDeps {
|
||||
name = pname;
|
||||
nugetDeps = { fetchNuGet }: [
|
||||
(fetchNuGet { pname = nugetName; inherit version; sha256 = nugetSha256; })
|
||||
nugetDeps = { fetchNuGet }:
|
||||
[
|
||||
(fetchNuGet {
|
||||
pname = nugetName;
|
||||
inherit version;
|
||||
sha256 = nugetSha256;
|
||||
})
|
||||
] ++ (nugetDeps fetchNuGet);
|
||||
};
|
||||
|
||||
|
|
|
@ -1,24 +1,9 @@
|
|||
{ lib
|
||||
, stdenvNoCC
|
||||
, callPackage
|
||||
, writeShellScript
|
||||
, srcOnly
|
||||
, linkFarmFromDrvs
|
||||
, symlinkJoin
|
||||
, makeWrapper
|
||||
, dotnetCorePackages
|
||||
, mkNugetSource
|
||||
, mkNugetDeps
|
||||
, nuget-to-nix
|
||||
, cacert
|
||||
, coreutils
|
||||
, runtimeShellPackage
|
||||
}:
|
||||
{ lib, stdenvNoCC, callPackage, writeShellScript, srcOnly, linkFarmFromDrvs
|
||||
, symlinkJoin, makeWrapper, dotnetCorePackages, mkNugetSource, mkNugetDeps
|
||||
, nuget-to-nix, cacert, coreutils, runtimeShellPackage }:
|
||||
|
||||
{ name ? "${args.pname}-${args.version}"
|
||||
, pname ? name
|
||||
, enableParallelBuilding ? true
|
||||
, doCheck ? false
|
||||
{ name ? "${args.pname}-${args.version}", pname ? name
|
||||
, enableParallelBuilding ? true, doCheck ? false
|
||||
# Flags to pass to `makeWrapper`. This is done to avoid double wrapping.
|
||||
, makeWrapperArgs ? [ ]
|
||||
|
||||
|
@ -85,38 +70,41 @@
|
|||
# The dotnet runtime to use.
|
||||
, dotnet-runtime ? dotnetCorePackages.runtime_6_0
|
||||
# The dotnet SDK to run tests against. This can differentiate from the SDK compiled against.
|
||||
, dotnet-test-sdk ? dotnet-sdk
|
||||
, ...
|
||||
} @ args:
|
||||
, dotnet-test-sdk ? dotnet-sdk, ... }@args:
|
||||
|
||||
let
|
||||
platforms =
|
||||
if args ? meta.platforms
|
||||
then lib.intersectLists args.meta.platforms dotnet-sdk.meta.platforms
|
||||
else dotnet-sdk.meta.platforms;
|
||||
platforms = if args ? meta.platforms then
|
||||
lib.intersectLists args.meta.platforms dotnet-sdk.meta.platforms
|
||||
else
|
||||
dotnet-sdk.meta.platforms;
|
||||
|
||||
inherit (callPackage ./hooks {
|
||||
inherit dotnet-sdk dotnet-test-sdk disabledTests nuget-source dotnet-runtime runtimeDeps buildType;
|
||||
runtimeId =
|
||||
if runtimeId != null
|
||||
then runtimeId
|
||||
else dotnetCorePackages.systemToDotnetRid stdenvNoCC.targetPlatform.system;
|
||||
}) dotnetConfigureHook dotnetBuildHook dotnetCheckHook dotnetInstallHook dotnetFixupHook;
|
||||
inherit dotnet-sdk dotnet-test-sdk disabledTests nuget-source dotnet-runtime
|
||||
runtimeDeps buildType;
|
||||
runtimeId = if runtimeId != null then
|
||||
runtimeId
|
||||
else
|
||||
dotnetCorePackages.systemToDotnetRid stdenvNoCC.targetPlatform.system;
|
||||
})
|
||||
dotnetConfigureHook dotnetBuildHook dotnetCheckHook dotnetInstallHook
|
||||
dotnetFixupHook;
|
||||
|
||||
localDeps =
|
||||
if (projectReferences != [ ])
|
||||
then linkFarmFromDrvs "${name}-project-references" projectReferences
|
||||
else null;
|
||||
localDeps = if (projectReferences != [ ]) then
|
||||
linkFarmFromDrvs "${name}-project-references" projectReferences
|
||||
else
|
||||
null;
|
||||
|
||||
_nugetDeps =
|
||||
if (nugetDeps != null) then
|
||||
if lib.isDerivation nugetDeps
|
||||
then nugetDeps
|
||||
else mkNugetDeps {
|
||||
_nugetDeps = if (nugetDeps != null) then
|
||||
if lib.isDerivation nugetDeps then
|
||||
nugetDeps
|
||||
else
|
||||
mkNugetDeps {
|
||||
inherit name;
|
||||
sourceFile = nugetDeps;
|
||||
}
|
||||
else throw "Defining the `nugetDeps` attribute is required, as to lock the NuGet dependencies. This file can be generated by running the `passthru.fetch-deps` script.";
|
||||
else
|
||||
throw
|
||||
"Defining the `nugetDeps` attribute is required, as to lock the NuGet dependencies. This file can be generated by running the `passthru.fetch-deps` script.";
|
||||
|
||||
# contains the actual package dependencies
|
||||
dependenciesSource = mkNugetSource {
|
||||
|
@ -131,7 +119,8 @@ let
|
|||
sdkDeps = lib.lists.flatten [ dotnet-sdk.packages ];
|
||||
|
||||
sdkSource = let
|
||||
version = dotnet-sdk.version or (lib.concatStringsSep "-" dotnet-sdk.versions);
|
||||
version =
|
||||
dotnet-sdk.version or (lib.concatStringsSep "-" dotnet-sdk.versions);
|
||||
in mkNugetSource {
|
||||
name = "dotnet-sdk-${version}-source";
|
||||
deps = sdkDeps;
|
||||
|
@ -143,8 +132,7 @@ let
|
|||
};
|
||||
|
||||
nugetDepsFile = _nugetDeps.sourceFile;
|
||||
in
|
||||
stdenvNoCC.mkDerivation (args // {
|
||||
in stdenvNoCC.mkDerivation (args // {
|
||||
nativeBuildInputs = args.nativeBuildInputs or [ ] ++ [
|
||||
dotnetConfigureHook
|
||||
dotnetBuildHook
|
||||
|
@ -159,23 +147,29 @@ stdenvNoCC.mkDerivation (args // {
|
|||
|
||||
# Parse the version attr into a format acceptable for the Version msbuild property
|
||||
# The actual version attr is saved in InformationalVersion, which accepts an arbitrary string
|
||||
versionForDotnet = if !(lib.hasAttr "version" args) || args.version == null
|
||||
then null else let
|
||||
versionForDotnet =
|
||||
if !(lib.hasAttr "version" args) || args.version == null then
|
||||
null
|
||||
else
|
||||
let
|
||||
components = lib.pipe args.version [
|
||||
lib.splitVersion
|
||||
(lib.filter (x: (lib.strings.match "[0-9]+" x) != null))
|
||||
(lib.filter (x: (lib.toIntBase10 x) < 65535)) # one version component in dotnet has to fit in 16 bits
|
||||
(lib.filter (x:
|
||||
(lib.toIntBase10 x)
|
||||
< 65535)) # one version component in dotnet has to fit in 16 bits
|
||||
];
|
||||
in if (lib.length components) == 0
|
||||
then null
|
||||
else lib.concatStringsSep "." ((lib.take 4 components)
|
||||
++ (if (lib.length components) < 4
|
||||
then lib.replicate (4 - (lib.length components)) "0"
|
||||
else [ ]));
|
||||
in if (lib.length components) == 0 then
|
||||
null
|
||||
else
|
||||
lib.concatStringsSep "." ((lib.take 4 components)
|
||||
++ (if (lib.length components) < 4 then
|
||||
lib.replicate (4 - (lib.length components)) "0"
|
||||
else
|
||||
[ ]));
|
||||
|
||||
makeWrapperArgs = args.makeWrapperArgs or [ ] ++ [
|
||||
"--prefix LD_LIBRARY_PATH : ${dotnet-sdk.icu}/lib"
|
||||
];
|
||||
makeWrapperArgs = args.makeWrapperArgs or [ ]
|
||||
++ [ "--prefix LD_LIBRARY_PATH : ${dotnet-sdk.icu}/lib" ];
|
||||
|
||||
# Stripping breaks the executable
|
||||
dontStrip = args.dontStrip or true;
|
||||
|
@ -188,27 +182,34 @@ stdenvNoCC.mkDerivation (args // {
|
|||
passthru = {
|
||||
inherit nuget-source;
|
||||
} // lib.optionalAttrs (!lib.isDerivation nugetDeps) {
|
||||
fetch-deps =
|
||||
let
|
||||
fetch-deps = let
|
||||
flags = dotnetFlags ++ dotnetRestoreFlags;
|
||||
runtimeIds =
|
||||
if runtimeId != null
|
||||
then [ runtimeId ]
|
||||
else map (system: dotnetCorePackages.systemToDotnetRid system) platforms;
|
||||
runtimeIds = if runtimeId != null then
|
||||
[ runtimeId ]
|
||||
else
|
||||
map (system: dotnetCorePackages.systemToDotnetRid system) platforms;
|
||||
defaultDepsFile =
|
||||
# Wire in the nugetDeps file such that running the script with no args
|
||||
# runs it agains the correct deps file by default.
|
||||
# Note that toString is necessary here as it results in the path at
|
||||
# eval time (i.e. to the file in your local Nixpkgs checkout) rather
|
||||
# than the Nix store path of the path after it's been imported.
|
||||
if lib.isPath nugetDepsFile && !lib.hasPrefix "${builtins.storeDir}/" (toString nugetDepsFile)
|
||||
then toString nugetDepsFile
|
||||
else ''$(mktemp -t "${pname}-deps-XXXXXX.nix")'';
|
||||
in
|
||||
writeShellScript "fetch-${pname}-deps" ''
|
||||
if lib.isPath nugetDepsFile
|
||||
&& !lib.hasPrefix "${builtins.storeDir}/" (toString nugetDepsFile) then
|
||||
toString nugetDepsFile
|
||||
else
|
||||
''$(mktemp -t "${pname}-deps-XXXXXX.nix")'';
|
||||
in writeShellScript "fetch-${pname}-deps" ''
|
||||
set -euo pipefail
|
||||
|
||||
export PATH="${lib.makeBinPath [ coreutils runtimeShellPackage dotnet-sdk (nuget-to-nix.override { inherit dotnet-sdk; }) ]}"
|
||||
export PATH="${
|
||||
lib.makeBinPath [
|
||||
coreutils
|
||||
runtimeShellPackage
|
||||
dotnet-sdk
|
||||
(nuget-to-nix.override { inherit dotnet-sdk; })
|
||||
]
|
||||
}"
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
|
@ -264,7 +265,10 @@ stdenvNoCC.mkDerivation (args // {
|
|||
--runtime "$rid" \
|
||||
--no-cache \
|
||||
--force \
|
||||
${lib.optionalString (!enableParallelBuilding) "--disable-parallel"} \
|
||||
${
|
||||
lib.optionalString (!enableParallelBuilding)
|
||||
"--disable-parallel"
|
||||
} \
|
||||
${lib.optionalString (flags != [ ]) (toString flags)}
|
||||
}
|
||||
|
||||
|
@ -289,7 +293,7 @@ stdenvNoCC.mkDerivation (args // {
|
|||
${dotnet-sdk}/bin/dotnet tool restore
|
||||
cp -r $HOME/.nuget/packages/* $tmp/nuget_pkgs || true
|
||||
|
||||
for rid in "${lib.concatStringsSep "\" \"" runtimeIds}"; do
|
||||
for rid in "${lib.concatStringsSep ''" "'' runtimeIds}"; do
|
||||
(( ''${#projectFiles[@]} == 0 )) && dotnetRestore "" "$rid"
|
||||
|
||||
for project in ''${projectFiles[@]-} ''${testProjectFiles[@]-}; do
|
||||
|
@ -320,4 +324,6 @@ stdenvNoCC.mkDerivation (args // {
|
|||
# ICU tries to unconditionally load files from /usr/share/icu on Darwin, which makes builds fail
|
||||
# in the sandbox, so disable ICU on Darwin. This, as far as I know, shouldn't cause any built packages
|
||||
# to behave differently, just the dotnet build tool.
|
||||
// lib.optionalAttrs stdenvNoCC.isDarwin { DOTNET_SYSTEM_GLOBALIZATION_INVARIANT = 1; })
|
||||
// lib.optionalAttrs stdenvNoCC.isDarwin {
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT = 1;
|
||||
})
|
||||
|
|
|
@ -1,27 +1,10 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, which
|
||||
, coreutils
|
||||
, zlib
|
||||
, openssl
|
||||
, callPackage
|
||||
, makeSetupHook
|
||||
, makeWrapper
|
||||
, dotnet-sdk
|
||||
, dotnet-test-sdk
|
||||
, disabledTests
|
||||
, nuget-source
|
||||
, dotnet-runtime
|
||||
, runtimeDeps
|
||||
, buildType
|
||||
, runtimeId
|
||||
}:
|
||||
{ lib, stdenv, which, coreutils, zlib, openssl, callPackage, makeSetupHook
|
||||
, makeWrapper, dotnet-sdk, dotnet-test-sdk, disabledTests, nuget-source
|
||||
, dotnet-runtime, runtimeDeps, buildType, runtimeId }:
|
||||
assert (builtins.isString runtimeId);
|
||||
|
||||
let
|
||||
libraryPath = lib.makeLibraryPath runtimeDeps;
|
||||
in
|
||||
{
|
||||
let libraryPath = lib.makeLibraryPath runtimeDeps;
|
||||
in {
|
||||
dotnetConfigureHook = callPackage ({ }:
|
||||
makeSetupHook {
|
||||
name = "dotnet-configure-hook";
|
||||
|
@ -44,9 +27,7 @@ in
|
|||
makeSetupHook {
|
||||
name = "dotnet-build-hook";
|
||||
propagatedBuildInputs = [ dotnet-sdk ];
|
||||
substitutions = {
|
||||
inherit buildType runtimeId;
|
||||
};
|
||||
substitutions = { inherit buildType runtimeId; };
|
||||
} ./dotnet-build-hook.sh) { };
|
||||
|
||||
dotnetCheckHook = callPackage ({ }:
|
||||
|
@ -55,12 +36,12 @@ in
|
|||
propagatedBuildInputs = [ dotnet-test-sdk ];
|
||||
substitutions = {
|
||||
inherit buildType runtimeId libraryPath;
|
||||
disabledTests = lib.optionalString (disabledTests != [])
|
||||
(let
|
||||
escapedNames = lib.lists.map (n: lib.replaceStrings [","] ["%2C"] n) disabledTests;
|
||||
disabledTests = lib.optionalString (disabledTests != [ ]) (let
|
||||
escapedNames =
|
||||
lib.lists.map (n: lib.replaceStrings [ "," ] [ "%2C" ] n)
|
||||
disabledTests;
|
||||
filters = lib.lists.map (n: "FullyQualifiedName!=${n}") escapedNames;
|
||||
in
|
||||
"${lib.concatStringsSep "&" filters}");
|
||||
in "${lib.concatStringsSep "&" filters}");
|
||||
};
|
||||
} ./dotnet-check-hook.sh) { };
|
||||
|
||||
|
@ -68,9 +49,7 @@ in
|
|||
makeSetupHook {
|
||||
name = "dotnet-install-hook";
|
||||
propagatedBuildInputs = [ dotnet-sdk ];
|
||||
substitutions = {
|
||||
inherit buildType runtimeId;
|
||||
};
|
||||
substitutions = { inherit buildType runtimeId; };
|
||||
} ./dotnet-install-hook.sh) { };
|
||||
|
||||
dotnetFixupHook = callPackage ({ }:
|
||||
|
|
|
@ -1,30 +1,22 @@
|
|||
{ stdenv, lib, makeWrapper, pkg-config, mono, dotnetbuildhelpers }:
|
||||
|
||||
attrsOrig @
|
||||
{ pname
|
||||
, version
|
||||
, nativeBuildInputs ? []
|
||||
, xBuildFiles ? [ ]
|
||||
attrsOrig@{ pname, version, nativeBuildInputs ? [ ], xBuildFiles ? [ ]
|
||||
, xBuildFlags ? [ "/p:Configuration=Release" ]
|
||||
, outputFiles ? [ "bin/Release/*" ]
|
||||
, dllFiles ? [ "*.dll" ]
|
||||
, exeFiles ? [ "*.exe" ]
|
||||
, outputFiles ? [ "bin/Release/*" ], dllFiles ? [ "*.dll" ], exeFiles ? [
|
||||
"*.exe"
|
||||
]
|
||||
# Additional arguments to pass to the makeWrapper function, which wraps
|
||||
# generated binaries.
|
||||
, makeWrapperArgs ? [ ]
|
||||
, ... }:
|
||||
, makeWrapperArgs ? [ ], ... }:
|
||||
let
|
||||
arrayToShell = (a: toString (map (lib.escape (lib.stringToCharacters "\\ ';$`()|<>\t") ) a));
|
||||
arrayToShell =
|
||||
(a: toString (map (lib.escape (lib.stringToCharacters "\\ ';$`()|<> ")) a));
|
||||
|
||||
attrs = {
|
||||
inherit pname version;
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
makeWrapper
|
||||
dotnetbuildhelpers
|
||||
mono
|
||||
] ++ nativeBuildInputs;
|
||||
nativeBuildInputs = [ pkg-config makeWrapper dotnetbuildhelpers mono ]
|
||||
++ nativeBuildInputs;
|
||||
|
||||
configurePhase = ''
|
||||
runHook preConfigure
|
||||
|
@ -112,5 +104,5 @@ attrsOrig @
|
|||
runHook postInstall
|
||||
'';
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation (attrs // (builtins.removeAttrs attrsOrig [ "nativeBuildInputs" ] ))
|
||||
in stdenv.mkDerivation
|
||||
(attrs // (builtins.removeAttrs attrsOrig [ "nativeBuildInputs" ]))
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
{ runCommand, mono, pkg-config }:
|
||||
runCommand
|
||||
"dotnetbuildhelpers"
|
||||
{ preferLocalBuild = true; }
|
||||
''
|
||||
runCommand "dotnetbuildhelpers" { preferLocalBuild = true; } ''
|
||||
target="$out/bin"
|
||||
mkdir -p "$target"
|
||||
|
||||
for script in ${./create-pkg-config-for-dll.sh} ${./patch-fsharp-targets.sh} ${./remove-duplicated-dlls.sh} ${./placate-nuget.sh} ${./placate-paket.sh}
|
||||
for script in ${./create-pkg-config-for-dll.sh} ${
|
||||
./patch-fsharp-targets.sh
|
||||
} ${./remove-duplicated-dlls.sh} ${./placate-nuget.sh} ${./placate-paket.sh}
|
||||
do
|
||||
scriptName="$(basename "$script" | cut -f 2- -d -)"
|
||||
cp -v "$script" "$target"/"$scriptName"
|
||||
|
|
|
@ -1,16 +1,7 @@
|
|||
{ lib, stdenv, dotnetfx }:
|
||||
{ name
|
||||
, src
|
||||
, baseDir ? "."
|
||||
, slnFile
|
||||
, targets ? "ReBuild"
|
||||
, verbosity ? "detailed"
|
||||
, options ? "/p:Configuration=Debug;Platform=Win32"
|
||||
, assemblyInputs ? []
|
||||
, preBuild ? ""
|
||||
, modifyPublicMain ? false
|
||||
, mainClassFile ? null
|
||||
}:
|
||||
{ name, src, baseDir ? ".", slnFile, targets ? "ReBuild", verbosity ? "detailed"
|
||||
, options ? "/p:Configuration=Debug;Platform=Win32", assemblyInputs ? [ ]
|
||||
, preBuild ? "", modifyPublicMain ? false, mainClassFile ? null }:
|
||||
|
||||
assert modifyPublicMain -> mainClassFile != null;
|
||||
|
||||
|
@ -69,7 +60,9 @@ stdenv.mkDerivation {
|
|||
fi
|
||||
|
||||
mkdir -p $out
|
||||
MSBuild.exe ${toString slnFile} /nologo /t:${targets} /p:IntermediateOutputPath=$(cygpath --windows $out)\\ /p:OutputPath=$(cygpath --windows $out)\\ /verbosity:${verbosity} ${options}
|
||||
MSBuild.exe ${
|
||||
toString slnFile
|
||||
} /nologo /t:${targets} /p:IntermediateOutputPath=$(cygpath --windows $out)\\ /p:OutputPath=$(cygpath --windows $out)\\ /verbosity:${verbosity} ${options}
|
||||
|
||||
# Because .NET assemblies store strings as UTF-16 internally, we cannot detect
|
||||
# hashes. Therefore a text files containing the proper paths is created
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
{ lib, stdenv, dotnetfx }:
|
||||
|
||||
let dotnetenv =
|
||||
{
|
||||
let
|
||||
dotnetenv = {
|
||||
buildSolution = import ./build-solution.nix {
|
||||
inherit lib stdenv;
|
||||
dotnetfx = dotnetfx.pkg;
|
||||
};
|
||||
|
||||
buildWrapper = import ./wrapper.nix {
|
||||
inherit dotnetenv;
|
||||
};
|
||||
buildWrapper = import ./wrapper.nix { inherit dotnetenv; };
|
||||
|
||||
inherit (dotnetfx) assembly20Path wcfPath referenceAssembly30Path referenceAssembly35Path;
|
||||
inherit (dotnetfx)
|
||||
assembly20Path wcfPath referenceAssembly30Path referenceAssembly35Path;
|
||||
};
|
||||
in
|
||||
dotnetenv
|
||||
in dotnetenv
|
||||
|
|
|
@ -1,19 +1,9 @@
|
|||
{ dotnetenv }:
|
||||
|
||||
{ name
|
||||
, src
|
||||
, baseDir ? "."
|
||||
, slnFile
|
||||
, targets ? "ReBuild"
|
||||
, verbosity ? "detailed"
|
||||
, options ? "/p:Configuration=Debug;Platform=Win32"
|
||||
, assemblyInputs ? []
|
||||
, preBuild ? ""
|
||||
, namespace
|
||||
, mainClassName
|
||||
, mainClassFile
|
||||
, modifyPublicMain ? true
|
||||
}:
|
||||
{ name, src, baseDir ? ".", slnFile, targets ? "ReBuild", verbosity ? "detailed"
|
||||
, options ? "/p:Configuration=Debug;Platform=Win32", assemblyInputs ? [ ]
|
||||
, preBuild ? "", namespace, mainClassName, mainClassFile
|
||||
, modifyPublicMain ? true }:
|
||||
|
||||
let
|
||||
application = dotnetenv.buildSolution {
|
||||
|
@ -21,8 +11,7 @@ let
|
|||
inherit options assemblyInputs preBuild;
|
||||
inherit modifyPublicMain mainClassFile;
|
||||
};
|
||||
in
|
||||
dotnetenv.buildSolution {
|
||||
in dotnetenv.buildSolution {
|
||||
name = "${name}-wrapper";
|
||||
src = ./Wrapper;
|
||||
slnFile = "Wrapper.sln";
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
{ fetchurl, buildDotnetPackage, unzip }:
|
||||
|
||||
attrs @
|
||||
{ pname
|
||||
, version
|
||||
, url ? "https://www.nuget.org/api/v2/package/${pname}/${version}"
|
||||
, sha256 ? ""
|
||||
, md5 ? ""
|
||||
, ...
|
||||
}:
|
||||
attrs@{ pname, version
|
||||
, url ? "https://www.nuget.org/api/v2/package/${pname}/${version}", sha256 ? ""
|
||||
, md5 ? "", ... }:
|
||||
if md5 != "" then
|
||||
throw "fetchnuget does not support md5 anymore, please use sha256"
|
||||
else
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
{ lib, python3, stdenvNoCC }:
|
||||
|
||||
{ name
|
||||
, description ? ""
|
||||
, deps ? []
|
||||
, ...
|
||||
}@args:
|
||||
{ name, description ? "", deps ? [ ], ... }@args:
|
||||
|
||||
stdenvNoCC.mkDerivation (lib.recursiveUpdate {
|
||||
inherit name;
|
||||
|
|
|
@ -1,35 +1,17 @@
|
|||
{ lib
|
||||
, runCommandLocal
|
||||
, runtimeShell
|
||||
, substituteAll
|
||||
, nix
|
||||
, coreutils
|
||||
, jq
|
||||
, yq
|
||||
, curl
|
||||
, gnugrep
|
||||
, gawk
|
||||
, dotnet-sdk
|
||||
}:
|
||||
{ lib, runCommandLocal, runtimeShell, substituteAll, nix, coreutils, jq, yq
|
||||
, curl, gnugrep, gawk, dotnet-sdk }:
|
||||
|
||||
runCommandLocal "nuget-to-nix" {
|
||||
script = substituteAll {
|
||||
src = ./nuget-to-nix.sh;
|
||||
inherit runtimeShell;
|
||||
|
||||
binPath = lib.makeBinPath [
|
||||
nix
|
||||
coreutils
|
||||
jq
|
||||
yq
|
||||
curl
|
||||
gnugrep
|
||||
gawk
|
||||
dotnet-sdk
|
||||
];
|
||||
binPath =
|
||||
lib.makeBinPath [ nix coreutils jq yq curl gnugrep gawk dotnet-sdk ];
|
||||
};
|
||||
|
||||
meta.description = "Convert a nuget packages directory to a lockfile for buildDotnetModule";
|
||||
meta.description =
|
||||
"Convert a nuget packages directory to a lockfile for buildDotnetModule";
|
||||
} ''
|
||||
install -Dm755 $script $out/bin/nuget-to-nix
|
||||
''
|
||||
|
|
|
@ -4,9 +4,11 @@
|
|||
{ lib, writeText, inherit-local }:
|
||||
|
||||
rec {
|
||||
withPackages = pkgs': let
|
||||
withPackages = pkgs':
|
||||
let
|
||||
pkgs = builtins.filter (x: x != null) pkgs';
|
||||
extras = map (x: x.emacsBufferSetup pkgs) (builtins.filter (builtins.hasAttr "emacsBufferSetup") pkgs);
|
||||
extras = map (x: x.emacsBufferSetup pkgs)
|
||||
(builtins.filter (builtins.hasAttr "emacsBufferSetup") pkgs);
|
||||
in writeText "dir-locals.el" ''
|
||||
(require 'inherit-local "${inherit-local}/share/emacs/site-lisp/elpa/inherit-local-${inherit-local.version}/inherit-local.elc")
|
||||
|
||||
|
@ -42,10 +44,16 @@ rec {
|
|||
(inherit-local 'process-environment)
|
||||
; setenv modifies in place, so copy the environment first
|
||||
(setq process-environment (copy-tree process-environment))
|
||||
(setenv "PATH" (concat "${lib.makeSearchPath "bin" pkgs}:" (getenv "PATH")))
|
||||
(inherit-local-permanent exec-path (append '(${builtins.concatStringsSep " " (map (p: "\"${p}/bin\"") pkgs)}) exec-path))
|
||||
(setenv "PATH" (concat "${
|
||||
lib.makeSearchPath "bin" pkgs
|
||||
}:" (getenv "PATH")))
|
||||
(inherit-local-permanent exec-path (append '(${
|
||||
builtins.concatStringsSep " " (map (p: ''"${p}/bin"'') pkgs)
|
||||
}) exec-path))
|
||||
|
||||
(inherit-local-permanent eshell-path-env (concat "${lib.makeSearchPath "bin" pkgs}:" (if (boundp 'eshell-path-env) eshell-path-env (getenv "PATH"))))
|
||||
(inherit-local-permanent eshell-path-env (concat "${
|
||||
lib.makeSearchPath "bin" pkgs
|
||||
}:" (if (boundp 'eshell-path-env) eshell-path-env (getenv "PATH"))))
|
||||
|
||||
(setq nixpkgs--is-nixpkgs-buffer t)
|
||||
(inherit-local 'nixpkgs--is-nixpkgs-buffer)
|
||||
|
@ -56,22 +64,21 @@ rec {
|
|||
# in one directory
|
||||
haskellMonoRepo = { project-root # The monorepo root
|
||||
, haskellPackages # The composed haskell packages set that contains all of the packages
|
||||
}: { root }:
|
||||
}:
|
||||
{ root }:
|
||||
let # The haskell paths.
|
||||
haskell-paths = lib.filesystem.haskellPathsInDir project-root;
|
||||
# Find the haskell package that the 'root' is in, if any.
|
||||
haskell-path-parent =
|
||||
let filtered = builtins.filter (name:
|
||||
lib.hasPrefix (toString (project-root + "/${name}")) (toString root)
|
||||
) (builtins.attrNames haskell-paths);
|
||||
in
|
||||
if filtered == [] then null else builtins.head filtered;
|
||||
haskell-path-parent = let
|
||||
filtered = builtins.filter (name:
|
||||
lib.hasPrefix (toString (project-root + "/${name}")) (toString root))
|
||||
(builtins.attrNames haskell-paths);
|
||||
in if filtered == [ ] then null else builtins.head filtered;
|
||||
# We're in the directory of a haskell package
|
||||
is-haskell-package = haskell-path-parent != null;
|
||||
haskell-package = haskellPackages.${haskell-path-parent};
|
||||
# GHC environment with all needed deps for the haskell package
|
||||
haskell-package-env =
|
||||
builtins.head haskell-package.env.nativeBuildInputs;
|
||||
in
|
||||
lib.optionalAttrs is-haskell-package (withPackages [ haskell-package-env ]);
|
||||
haskell-package-env = builtins.head haskell-package.env.nativeBuildInputs;
|
||||
in lib.optionalAttrs is-haskell-package
|
||||
(withPackages [ haskell-package-env ]);
|
||||
}
|
||||
|
|
|
@ -4,16 +4,10 @@
|
|||
|
||||
let
|
||||
handledArgs = [ "files" "fileSpecs" "meta" ];
|
||||
genericBuild = import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; };
|
||||
genericBuild =
|
||||
import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; };
|
||||
|
||||
in
|
||||
|
||||
{ pname
|
||||
, version
|
||||
, src
|
||||
, meta ? {}
|
||||
, ...
|
||||
}@args:
|
||||
in { pname, version, src, meta ? { }, ... }@args:
|
||||
|
||||
genericBuild ({
|
||||
|
||||
|
@ -30,7 +24,8 @@ genericBuild ({
|
|||
'';
|
||||
|
||||
meta = {
|
||||
homepage = args.src.meta.homepage or "https://elpa.gnu.org/packages/${pname}.html";
|
||||
homepage =
|
||||
args.src.meta.homepage or "https://elpa.gnu.org/packages/${pname}.html";
|
||||
} // meta;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,17 +20,11 @@ let
|
|||
fi
|
||||
'';
|
||||
|
||||
in
|
||||
|
||||
{ pname
|
||||
, version
|
||||
, buildInputs ? []
|
||||
, packageRequires ? []
|
||||
, meta ? {}
|
||||
, ...
|
||||
in { pname, version, buildInputs ? [ ], packageRequires ? [ ], meta ? { }, ...
|
||||
}@args:
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: ({
|
||||
stdenv.mkDerivation (finalAttrs:
|
||||
({
|
||||
name = "emacs-${pname}-${finalAttrs.version}";
|
||||
|
||||
unpackCmd = ''
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
{ lib, stdenv, fetchFromGitHub, emacs, texinfo, writeText, gcc }:
|
||||
|
||||
let
|
||||
genericBuild = import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; };
|
||||
genericBuild =
|
||||
import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; };
|
||||
|
||||
packageBuild = stdenv.mkDerivation {
|
||||
name = "package-build";
|
||||
|
@ -20,35 +21,20 @@ let
|
|||
dontConfigure = true;
|
||||
dontBuild = true;
|
||||
|
||||
installPhase = "
|
||||
mkdir -p $out
|
||||
cp -r * $out
|
||||
";
|
||||
installPhase = "\n mkdir -p $out\n cp -r * $out\n ";
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
{ /*
|
||||
pname: Nix package name without special symbols and without version or
|
||||
in {
|
||||
/* pname: Nix package name without special symbols and without version or
|
||||
"emacs-" prefix.
|
||||
*/
|
||||
pname
|
||||
/*
|
||||
ename: Original Emacs package name, possibly containing special symbols.
|
||||
*/
|
||||
, ename ? null
|
||||
, version
|
||||
, recipe
|
||||
, meta ? {}
|
||||
, ...
|
||||
}@args:
|
||||
# ename: Original Emacs package name, possibly containing special symbols.
|
||||
, ename ? null, version, recipe, meta ? { }, ... }@args:
|
||||
|
||||
genericBuild ({
|
||||
|
||||
ename =
|
||||
if ename == null
|
||||
then pname
|
||||
else ename;
|
||||
ename = if ename == null then pname else ename;
|
||||
|
||||
elpa2nix = ./elpa2nix.el;
|
||||
melpa2nix = ./melpa2nix.el;
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
/*
|
||||
|
||||
# Usage
|
||||
/* # Usage
|
||||
|
||||
`emacs.pkgs.withPackages` takes a single argument: a function from a package
|
||||
set to a list of packages (the packages that will be available in
|
||||
|
@ -29,7 +27,6 @@ let customEmacsPackages =
|
|||
});
|
||||
in customEmacsPackages.withPackages (epkgs: [ epkgs.evil epkgs.magit ])
|
||||
```
|
||||
|
||||
*/
|
||||
|
||||
{ lib, lndir, makeBinaryWrapper, runCommand, gcc }:
|
||||
|
@ -38,17 +35,11 @@ let
|
|||
inherit (self) emacs;
|
||||
withNativeCompilation = emacs.withNativeCompilation or false;
|
||||
withTreeSitter = emacs.withTreeSitter or false;
|
||||
in
|
||||
packagesFun: # packages explicitly requested by the user
|
||||
in packagesFun: # packages explicitly requested by the user
|
||||
let
|
||||
explicitRequires =
|
||||
if lib.isFunction packagesFun
|
||||
then packagesFun self
|
||||
else packagesFun;
|
||||
in
|
||||
runCommand
|
||||
(lib.appendToName "with-packages" emacs).name
|
||||
{
|
||||
if lib.isFunction packagesFun then packagesFun self else packagesFun;
|
||||
in runCommand (lib.appendToName "with-packages" emacs).name {
|
||||
inherit emacs explicitRequires;
|
||||
nativeBuildInputs = [ emacs lndir makeBinaryWrapper ];
|
||||
|
||||
|
@ -57,14 +48,12 @@ runCommand
|
|||
|
||||
# Store all paths we want to add to emacs here, so that we only need to add
|
||||
# one path to the load lists
|
||||
deps = runCommand "emacs-packages-deps"
|
||||
({
|
||||
deps = runCommand "emacs-packages-deps" ({
|
||||
inherit explicitRequires lndir emacs;
|
||||
nativeBuildInputs = lib.optional withNativeCompilation gcc;
|
||||
} // lib.optionalAttrs withNativeCompilation {
|
||||
inherit (emacs) LIBRARY_PATH;
|
||||
})
|
||||
''
|
||||
}) ''
|
||||
findInputsOld() {
|
||||
local pkg="$1"; shift
|
||||
local var="$1"; shift
|
||||
|
@ -128,12 +117,16 @@ runCommand
|
|||
linkEmacsPackage() {
|
||||
linkPath "$1" "bin" "bin"
|
||||
linkPath "$1" "share/emacs/site-lisp" "share/emacs/site-lisp"
|
||||
${lib.optionalString withNativeCompilation ''
|
||||
${
|
||||
lib.optionalString withNativeCompilation ''
|
||||
linkPath "$1" "share/emacs/native-lisp" "share/emacs/native-lisp"
|
||||
''}
|
||||
${lib.optionalString withTreeSitter ''
|
||||
''
|
||||
}
|
||||
${
|
||||
lib.optionalString withTreeSitter ''
|
||||
linkPath "$1" "lib" "lib"
|
||||
''}
|
||||
''
|
||||
}
|
||||
}
|
||||
|
||||
# Iterate over the array of inputs (avoiding nix's own interpolation)
|
||||
|
@ -186,8 +179,7 @@ runCommand
|
|||
'';
|
||||
|
||||
inherit (emacs) meta;
|
||||
}
|
||||
''
|
||||
} ''
|
||||
mkdir -p "$out/bin"
|
||||
|
||||
# Wrap emacs and friends so they find our site-start.el before the original.
|
||||
|
@ -219,7 +211,9 @@ runCommand
|
|||
$out/Applications/Emacs.app/Contents
|
||||
|
||||
|
||||
substitute ${./wrapper.sh} $out/Applications/Emacs.app/Contents/MacOS/Emacs \
|
||||
substitute ${
|
||||
./wrapper.sh
|
||||
} $out/Applications/Emacs.app/Contents/MacOS/Emacs \
|
||||
--subst-var-by bash ${emacs.stdenv.shell} \
|
||||
--subst-var-by wrapperSiteLisp "$deps/share/emacs/site-lisp" \
|
||||
--subst-var-by wrapperSiteLispNative "$deps/share/emacs/native-lisp" \
|
||||
|
|
|
@ -27,7 +27,8 @@ stdenv.mkDerivation {
|
|||
'';
|
||||
|
||||
meta = {
|
||||
description = "Internal tool used by the nixpkgs wrapper scripts for processing response files";
|
||||
description =
|
||||
"Internal tool used by the nixpkgs wrapper scripts for processing response files";
|
||||
longDescription = ''
|
||||
expand-response-params is a tool that allows for obtaining a full list of all
|
||||
arguments passed in a given compiler command line including those passed via
|
||||
|
|
|
@ -2,17 +2,22 @@
|
|||
# Useful when packaging binaries that insist on using nss to look up
|
||||
# username/groups (like nginx).
|
||||
# /bin/sh is fine to not exist, and provided by another shim.
|
||||
{ lib, symlinkJoin, writeTextDir, runCommand, extraPasswdLines ? [], extraGroupLines ? [] }:
|
||||
{ lib, symlinkJoin, writeTextDir, runCommand, extraPasswdLines ? [ ]
|
||||
, extraGroupLines ? [ ] }:
|
||||
symlinkJoin {
|
||||
name = "fake-nss";
|
||||
paths = [
|
||||
(writeTextDir "etc/passwd" ''
|
||||
root:x:0:0:root user:/var/empty:/bin/sh
|
||||
${lib.concatStrings (map (line: line + "\n") extraPasswdLines)}nobody:x:65534:65534:nobody:/var/empty:/bin/sh
|
||||
${
|
||||
lib.concatStrings (map (line: line + "\n") extraPasswdLines)
|
||||
}nobody:x:65534:65534:nobody:/var/empty:/bin/sh
|
||||
'')
|
||||
(writeTextDir "etc/group" ''
|
||||
root:x:0:
|
||||
${lib.concatStrings (map (line: line + "\n") extraGroupLines)}nobody:x:65534:
|
||||
${
|
||||
lib.concatStrings (map (line: line + "\n") extraGroupLines)
|
||||
}nobody:x:65534:
|
||||
'')
|
||||
(writeTextDir "etc/nsswitch.conf" ''
|
||||
hosts: files dns
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
{ fetchgit, fetchzip, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ owner
|
||||
, repo
|
||||
, rev
|
||||
, domain ? "git.9front.org"
|
||||
, name ? "source"
|
||||
, leaveDotGit ? false
|
||||
, deepClone ? false
|
||||
lib.makeOverridable ({ owner, repo, rev, domain ? "git.9front.org"
|
||||
, name ? "source", leaveDotGit ? false, deepClone ? false
|
||||
, ... # For hash agility
|
||||
}@args:
|
||||
|
||||
let
|
||||
passthruAttrs = removeAttrs args [ "domain" "owner" "repo" "rev" "leaveDotGit" "deepClone" ];
|
||||
passthruAttrs = removeAttrs args [
|
||||
"domain"
|
||||
"owner"
|
||||
"repo"
|
||||
"rev"
|
||||
"leaveDotGit"
|
||||
"deepClone"
|
||||
];
|
||||
|
||||
useFetchGit = leaveDotGit || deepClone;
|
||||
fetcher = if useFetchGit then fetchgit else fetchzip;
|
||||
|
@ -26,11 +27,9 @@ lib.makeOverridable (
|
|||
} else {
|
||||
url = "https://${domain}/${owner}/${repo}/${rev}/snap.tar.gz";
|
||||
|
||||
passthru = {
|
||||
inherit gitRepoUrl;
|
||||
passthru = { inherit gitRepoUrl; };
|
||||
}) // passthruAttrs // {
|
||||
inherit name;
|
||||
};
|
||||
}) // passthruAttrs // { inherit name; };
|
||||
in
|
||||
|
||||
fetcher fetcherArgs // { inherit rev; }
|
||||
)
|
||||
in fetcher fetcherArgs // { inherit rev; })
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
{ fetchzip, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ owner, repo, rev, name ? "source"
|
||||
, ... # For hash agility
|
||||
}@args: fetchzip ({
|
||||
lib.makeOverridable ({ owner, repo, rev, name ? "source", ... # For hash agility
|
||||
}@args:
|
||||
fetchzip ({
|
||||
inherit name;
|
||||
url = "https://bitbucket.org/${owner}/${repo}/get/${rev}.tar.gz";
|
||||
meta.homepage = "https://bitbucket.org/${owner}/${repo}/";
|
||||
} // removeAttrs args [ "owner" "repo" "rev" ]) // { inherit rev; }
|
||||
)
|
||||
} // removeAttrs args [ "owner" "repo" "rev" ]) // {
|
||||
inherit rev;
|
||||
})
|
||||
|
|
|
@ -4,12 +4,14 @@ let
|
|||
let
|
||||
components = lib.splitString "#" version;
|
||||
hash = lib.last components;
|
||||
ver = if builtins.length components == 1 then (cleanName version) else hash;
|
||||
ver =
|
||||
if builtins.length components == 1 then (cleanName version) else hash;
|
||||
in ver;
|
||||
|
||||
cleanName = name: lib.replaceStrings [ "/" ":" ] [ "-" "-" ] name;
|
||||
|
||||
fetchbower = name: version: target: outputHash: stdenvNoCC.mkDerivation {
|
||||
fetchbower = name: version: target: outputHash:
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "${cleanName name}-${bowerVersion version}";
|
||||
buildCommand = ''
|
||||
fetch-bower --quiet --out=$PWD/out "${name}" "${target}" "${version}"
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
|
||||
{ stdenvNoCC, cvs, openssh, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{cvsRoot, module, tag ? null, date ? null, sha256}:
|
||||
lib.makeOverridable ({ cvsRoot, module, tag ? null, date ? null, sha256 }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "cvs-export";
|
||||
|
@ -18,5 +17,4 @@ stdenvNoCC.mkDerivation {
|
|||
outputHash = sha256;
|
||||
|
||||
inherit cvsRoot module sha256 tag date;
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
{ stdenvNoCC, darcs, cacert, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ url
|
||||
, rev ? null
|
||||
, context ? null
|
||||
, sha256 ? ""
|
||||
, name ? "fetchdarcs"
|
||||
}:
|
||||
lib.makeOverridable
|
||||
({ url, rev ? null, context ? null, sha256 ? "", name ? "fetchdarcs" }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
|
@ -17,5 +12,4 @@ stdenvNoCC.mkDerivation {
|
|||
outputHash = sha256;
|
||||
|
||||
inherit url rev context name;
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
{ lib, fetchpatch }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ pname, version, debianRevision ? null, area ? "main",
|
||||
patch, name ? patch, hash }:
|
||||
lib.makeOverridable ({ pname, version, debianRevision ? null, area ? "main"
|
||||
, patch, name ? patch, hash }:
|
||||
let
|
||||
inherit (lib.strings) hasPrefix substring;
|
||||
prefix =
|
||||
substring 0 (if hasPrefix "lib" pname then 4 else 1) pname;
|
||||
versionString =
|
||||
if debianRevision == null then version
|
||||
else "${version}-${debianRevision}";
|
||||
prefix = substring 0 (if hasPrefix "lib" pname then 4 else 1) pname;
|
||||
versionString = if debianRevision == null then
|
||||
version
|
||||
else
|
||||
"${version}-${debianRevision}";
|
||||
in fetchpatch {
|
||||
inherit name hash;
|
||||
url =
|
||||
"https://sources.debian.org/data/${area}/${prefix}/"
|
||||
url = "https://sources.debian.org/data/${area}/${prefix}/"
|
||||
+ "${pname}/${versionString}/debian/patches/${patch}";
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -32,8 +32,6 @@
|
|||
# DOCKER_CREDENTIALS path
|
||||
let
|
||||
pathParts =
|
||||
(builtins.filter
|
||||
({prefix, path}: "DOCKER_CREDENTIALS" == prefix)
|
||||
(builtins.filter ({ prefix, path }: "DOCKER_CREDENTIALS" == prefix)
|
||||
builtins.nixPath);
|
||||
in
|
||||
lib.optionalString (pathParts != []) ((builtins.head pathParts).path)
|
||||
in lib.optionalString (pathParts != [ ]) ((builtins.head pathParts).path)
|
||||
|
|
|
@ -1,52 +1,39 @@
|
|||
{ stdenv, lib, coreutils, bash, gnutar, writeText }:
|
||||
let
|
||||
stripScheme =
|
||||
builtins.replaceStrings [ "https://" "http://" ] [ "" "" ];
|
||||
stripNixStore =
|
||||
s: lib.removePrefix "${builtins.storeDir}/" s;
|
||||
in
|
||||
{ name
|
||||
, registry ? "https://registry-1.docker.io/v2/"
|
||||
, repository ? "library"
|
||||
, imageName
|
||||
, tag
|
||||
, imageLayers
|
||||
, imageConfig
|
||||
, image ? "${stripScheme registry}/${repository}/${imageName}:${tag}"
|
||||
}:
|
||||
stripScheme = builtins.replaceStrings [ "https://" "http://" ] [ "" "" ];
|
||||
stripNixStore = s: lib.removePrefix "${builtins.storeDir}/" s;
|
||||
in { name, registry ? "https://registry-1.docker.io/v2/", repository ? "library"
|
||||
, imageName, tag, imageLayers, imageConfig
|
||||
, image ? "${stripScheme registry}/${repository}/${imageName}:${tag}" }:
|
||||
|
||||
# Make sure there are *no* slashes in the repository or container
|
||||
# names since we use these to make the output derivation name for the
|
||||
# nix-store path.
|
||||
assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository);
|
||||
assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName);
|
||||
assert null
|
||||
== lib.findFirst (c: "/" == c) null (lib.stringToCharacters repository);
|
||||
assert null
|
||||
== lib.findFirst (c: "/" == c) null (lib.stringToCharacters imageName);
|
||||
|
||||
let
|
||||
# Abuse paths to collapse possible double slashes
|
||||
repoTag0 = builtins.toString (/. + "/${stripScheme registry}/${repository}/${imageName}");
|
||||
repoTag0 = builtins.toString
|
||||
(/. + "/${stripScheme registry}/${repository}/${imageName}");
|
||||
repoTag1 = lib.removePrefix "/" repoTag0;
|
||||
|
||||
layers = builtins.map stripNixStore imageLayers;
|
||||
|
||||
manifest =
|
||||
writeText "manifest.json" (builtins.toJSON [
|
||||
{ Config = stripNixStore imageConfig;
|
||||
manifest = writeText "manifest.json" (builtins.toJSON [{
|
||||
Config = stripNixStore imageConfig;
|
||||
Layers = layers;
|
||||
RepoTags = [ "${repoTag1}:${tag}" ];
|
||||
}]);
|
||||
|
||||
repositories =
|
||||
writeText "repositories" (builtins.toJSON {
|
||||
${repoTag1} = {
|
||||
${tag} = lib.last layers;
|
||||
};
|
||||
});
|
||||
repositories = writeText "repositories"
|
||||
(builtins.toJSON { ${repoTag1} = { ${tag} = lib.last layers; }; });
|
||||
|
||||
imageFileStorePaths =
|
||||
writeText "imageFileStorePaths.txt"
|
||||
imageFileStorePaths = writeText "imageFileStorePaths.txt"
|
||||
(lib.concatStringsSep "\n" ((lib.unique imageLayers) ++ [ imageConfig ]));
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
in stdenv.mkDerivation {
|
||||
builder = ./fetchdocker-builder.sh;
|
||||
buildInputs = [ coreutils ];
|
||||
preferLocalBuild = true;
|
||||
|
@ -55,7 +42,5 @@ stdenv.mkDerivation {
|
|||
inherit bash gnutar manifest repositories;
|
||||
inherit imageFileStorePaths;
|
||||
|
||||
passthru = {
|
||||
inherit image;
|
||||
};
|
||||
passthru = { inherit image; };
|
||||
}
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }:
|
||||
let
|
||||
generic-fetcher =
|
||||
import ./generic-fetcher.nix pkgargs;
|
||||
in
|
||||
let generic-fetcher = import ./generic-fetcher.nix pkgargs;
|
||||
|
||||
args@{ repository ? "library", imageName, tag, ... }:
|
||||
in args@{ repository ? "library", imageName, tag, ... }:
|
||||
|
||||
generic-fetcher ({
|
||||
fetcher = "hocker-config";
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }:
|
||||
let
|
||||
generic-fetcher =
|
||||
import ./generic-fetcher.nix pkgargs;
|
||||
in
|
||||
let generic-fetcher = import ./generic-fetcher.nix pkgargs;
|
||||
|
||||
args@{ layerDigest, ... }:
|
||||
in args@{ layerDigest, ... }:
|
||||
|
||||
generic-fetcher ({
|
||||
fetcher = "hocker-layer";
|
||||
|
|
|
@ -2,37 +2,30 @@
|
|||
let
|
||||
awk = "${gawk}/bin/awk";
|
||||
dockerCredentialsFile = import ./credentials.nix { inherit lib; };
|
||||
in
|
||||
{ fetcher
|
||||
, name
|
||||
, registry ? "https://registry-1.docker.io/v2/"
|
||||
, repository ? "library"
|
||||
, imageName
|
||||
, sha256
|
||||
, tag ? ""
|
||||
, layerDigest ? ""
|
||||
}:
|
||||
in { fetcher, name, registry ? "https://registry-1.docker.io/v2/"
|
||||
, repository ? "library", imageName, sha256, tag ? "", layerDigest ? "" }:
|
||||
|
||||
# There must be no slashes in the repository or container names since
|
||||
# we use these to make the output derivation name for the nix store
|
||||
# path
|
||||
assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository);
|
||||
assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName);
|
||||
assert null
|
||||
== lib.findFirst (c: "/" == c) null (lib.stringToCharacters repository);
|
||||
assert null
|
||||
== lib.findFirst (c: "/" == c) null (lib.stringToCharacters imageName);
|
||||
|
||||
# Only allow hocker-config and hocker-layer as fetchers for now
|
||||
assert (builtins.elem fetcher [ "hocker-config" "hocker-layer" ]);
|
||||
|
||||
# If layerDigest is non-empty then it must not have a 'sha256:' prefix!
|
||||
assert
|
||||
(if layerDigest != ""
|
||||
then !lib.hasPrefix "sha256:" layerDigest
|
||||
else true);
|
||||
assert (if layerDigest != "" then
|
||||
!lib.hasPrefix "sha256:" layerDigest
|
||||
else
|
||||
true);
|
||||
|
||||
let
|
||||
layerDigestFlag =
|
||||
lib.optionalString (layerDigest != "") "--layer ${layerDigest}";
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
in stdenv.mkDerivation {
|
||||
inherit name;
|
||||
builder = writeText "${fetcher}-builder.sh" ''
|
||||
source "$stdenv/setup"
|
||||
|
|
|
@ -1,36 +1,21 @@
|
|||
{ stdenv
|
||||
, fetchurl
|
||||
, jq
|
||||
, strip-nondeterminism
|
||||
, unzip
|
||||
, writeScript
|
||||
, zip
|
||||
}:
|
||||
{ stdenv, fetchurl, jq, strip-nondeterminism, unzip, writeScript, zip }:
|
||||
|
||||
{ name
|
||||
, url ? null
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
, sha512 ? ""
|
||||
, fixedExtid ? null
|
||||
, hash ? ""
|
||||
, src ? ""
|
||||
}:
|
||||
{ name, url ? null, sha1 ? "", sha256 ? "", sha512 ? "", fixedExtid ? null
|
||||
, hash ? "", src ? "" }:
|
||||
|
||||
let
|
||||
extid = if fixedExtid == null then "nixos@${name}" else fixedExtid;
|
||||
source = if url == null then src else
|
||||
source = if url == null then
|
||||
src
|
||||
else
|
||||
fetchurl {
|
||||
url = url;
|
||||
inherit sha1 sha256 sha512 hash;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
in stdenv.mkDerivation {
|
||||
inherit name;
|
||||
|
||||
passthru = {
|
||||
inherit extid;
|
||||
};
|
||||
passthru = { inherit extid; };
|
||||
|
||||
builder = writeScript "xpibuilder" ''
|
||||
source $stdenv/setup
|
||||
|
@ -48,10 +33,5 @@ stdenv.mkDerivation {
|
|||
rm -r "$out/$UUID"
|
||||
'';
|
||||
|
||||
nativeBuildInputs = [
|
||||
jq
|
||||
strip-nondeterminism
|
||||
unzip
|
||||
zip
|
||||
];
|
||||
nativeBuildInputs = [ jq strip-nondeterminism unzip zip ];
|
||||
}
|
||||
|
|
|
@ -4,17 +4,17 @@
|
|||
simple = testers.invalidateFetcherByDrvHash fetchFirefoxAddon {
|
||||
name = "image-search-options";
|
||||
# Chosen because its only 147KB
|
||||
url = "https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi";
|
||||
url =
|
||||
"https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi";
|
||||
sha256 = "sha256-H73YWX/DKxvhEwKpWOo7orAQ7c/rQywpljeyxYxv0Gg=";
|
||||
};
|
||||
overridden-source =
|
||||
let
|
||||
overridden-source = let
|
||||
image-search-options = fetchurl {
|
||||
url = "https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi";
|
||||
url =
|
||||
"https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi";
|
||||
sha256 = "sha256-H73YWX/DKxvhEwKpWOo7orAQ7c/rQywpljeyxYxv0Gg=";
|
||||
};
|
||||
in
|
||||
testers.invalidateFetcherByDrvHash fetchFirefoxAddon {
|
||||
in testers.invalidateFetcherByDrvHash fetchFirefoxAddon {
|
||||
name = "image-search-options";
|
||||
src = image-search-options;
|
||||
};
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
{ stdenv, lib, fossil, cacert }:
|
||||
|
||||
{ name ? null
|
||||
, url
|
||||
, rev
|
||||
, sha256 ? ""
|
||||
, hash ? ""
|
||||
}:
|
||||
{ name ? null, url, rev, sha256 ? "", hash ? "" }:
|
||||
|
||||
if hash != "" && sha256 != "" then
|
||||
throw "Only one of sha256 or hash can be set"
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
{lib, stdenvNoCC, git, git-lfs, cacert}: let
|
||||
urlToName = url: rev: let
|
||||
{ lib, stdenvNoCC, git, git-lfs, cacert }:
|
||||
let
|
||||
urlToName = url: rev:
|
||||
let
|
||||
inherit (lib) removeSuffix splitString last;
|
||||
base = last (splitString ":" (baseNameOf (removeSuffix "/" url)));
|
||||
|
||||
|
@ -7,29 +9,24 @@
|
|||
|
||||
short = builtins.substring 0 7 rev;
|
||||
|
||||
appendShort = lib.optionalString ((builtins.match "[a-f0-9]*" rev) != null) "-${short}";
|
||||
in "${if matched == null then base else builtins.head matched}${appendShort}";
|
||||
in
|
||||
lib.makeOverridable (
|
||||
{ url, rev ? "HEAD", sha256 ? "", hash ? "", leaveDotGit ? deepClone
|
||||
, fetchSubmodules ? true, deepClone ? false
|
||||
, branchName ? null
|
||||
, sparseCheckout ? []
|
||||
, nonConeMode ? false
|
||||
appendShort =
|
||||
lib.optionalString ((builtins.match "[a-f0-9]*" rev) != null)
|
||||
"-${short}";
|
||||
in "${
|
||||
if matched == null then base else builtins.head matched
|
||||
}${appendShort}";
|
||||
in lib.makeOverridable ({ url, rev ? "HEAD", sha256 ? "", hash ? ""
|
||||
, leaveDotGit ? deepClone, fetchSubmodules ? true, deepClone ? false
|
||||
, branchName ? null, sparseCheckout ? [ ], nonConeMode ? false
|
||||
, name ? urlToName url rev
|
||||
, # Shell code executed after the file has been fetched
|
||||
# successfully. This can do things like check or transform the file.
|
||||
postFetch ? ""
|
||||
, preferLocalBuild ? true
|
||||
, fetchLFS ? false
|
||||
postFetch ? "", preferLocalBuild ? true, fetchLFS ? false
|
||||
, # Shell code to build a netrc file for BASIC auth
|
||||
netrcPhase ? null
|
||||
, # Impure env vars (https://nixos.org/nix/manual/#sec-advanced-attributes)
|
||||
# needed for netrcPhase
|
||||
netrcImpureEnvVars ? []
|
||||
, meta ? {}
|
||||
, allowedRequisites ? null
|
||||
}:
|
||||
netrcImpureEnvVars ? [ ], meta ? { }, allowedRequisites ? null }:
|
||||
|
||||
/* NOTE:
|
||||
fetchgit has one problem: git fetch only works for refs.
|
||||
|
@ -60,15 +57,15 @@ if hash != "" && sha256 != "" then
|
|||
throw "Only one of sha256 or hash can be set"
|
||||
else if builtins.isString sparseCheckout then
|
||||
# Changed to throw on 2023-06-04
|
||||
throw "Please provide directories/patterns for sparse checkout as a list of strings. Passing a (multi-line) string is not supported any more."
|
||||
throw
|
||||
"Please provide directories/patterns for sparse checkout as a list of strings. Passing a (multi-line) string is not supported any more."
|
||||
else
|
||||
stdenvNoCC.mkDerivation {
|
||||
inherit name;
|
||||
builder = ./builder.sh;
|
||||
fetcher = ./nix-prefetch-git;
|
||||
|
||||
nativeBuildInputs = [ git ]
|
||||
++ lib.optionals fetchLFS [ git-lfs ];
|
||||
nativeBuildInputs = [ git ] ++ lib.optionals fetchLFS [ git-lfs ];
|
||||
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
outputHashMode = "recursive";
|
||||
|
@ -84,9 +81,12 @@ stdenvNoCC.mkDerivation {
|
|||
# > from standard in as a newline-delimited list instead of from the arguments.
|
||||
sparseCheckout = builtins.concatStringsSep "\n" sparseCheckout;
|
||||
|
||||
inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName nonConeMode postFetch;
|
||||
inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName
|
||||
nonConeMode postFetch;
|
||||
|
||||
postHook = if netrcPhase == null then null else ''
|
||||
postHook = if netrcPhase == null then
|
||||
null
|
||||
else ''
|
||||
${netrcPhase}
|
||||
# required that git uses the netrc file
|
||||
mv {,.}netrc
|
||||
|
@ -96,15 +96,10 @@ stdenvNoCC.mkDerivation {
|
|||
|
||||
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ netrcImpureEnvVars ++ [
|
||||
"GIT_PROXY_COMMAND" "NIX_GIT_SSL_CAINFO" "SOCKS_SERVER"
|
||||
];
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ netrcImpureEnvVars
|
||||
++ [ "GIT_PROXY_COMMAND" "NIX_GIT_SSL_CAINFO" "SOCKS_SERVER" ];
|
||||
|
||||
inherit preferLocalBuild meta allowedRequisites;
|
||||
|
||||
passthru = {
|
||||
gitRepoUrl = url;
|
||||
};
|
||||
}
|
||||
)
|
||||
passthru = { gitRepoUrl = url; };
|
||||
})
|
||||
|
|
|
@ -10,10 +10,7 @@
|
|||
name = "sparse-checkout-nix-source";
|
||||
url = "https://github.com/NixOS/nix";
|
||||
rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
|
||||
sparseCheckout = [
|
||||
"src"
|
||||
"tests"
|
||||
];
|
||||
sparseCheckout = [ "src" "tests" ];
|
||||
sha256 = "sha256-g1PHGTWgAcd/+sXHo1o6AjVWCvC6HiocOfMbMh873LQ=";
|
||||
};
|
||||
|
||||
|
@ -21,10 +18,7 @@
|
|||
name = "sparse-checkout-non-cone-nix-source";
|
||||
url = "https://github.com/NixOS/nix";
|
||||
rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
|
||||
sparseCheckout = [
|
||||
"src"
|
||||
"tests"
|
||||
];
|
||||
sparseCheckout = [ "src" "tests" ];
|
||||
nonConeMode = true;
|
||||
sha256 = "sha256-FknO6C/PSnMPfhUqObD4vsW4PhkwdmPa9blNzcNvJQ4=";
|
||||
};
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
|
||||
{ lib, fetchFromGitHub }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ domain, ... }@args:
|
||||
lib.makeOverridable ({ domain, ... }@args:
|
||||
|
||||
fetchFromGitHub ((removeAttrs args [ "domain" ]) // { githubBase = domain; })
|
||||
)
|
||||
fetchFromGitHub ((removeAttrs args [ "domain" ]) // { githubBase = domain; }))
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
{ lib, fetchgit, fetchzip }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ owner, repo, rev, name ? "source"
|
||||
, fetchSubmodules ? false, leaveDotGit ? null
|
||||
, deepClone ? false, private ? false, forceFetchGit ? false
|
||||
, sparseCheckout ? []
|
||||
, githubBase ? "github.com", varPrefix ? null
|
||||
, meta ? { }
|
||||
lib.makeOverridable ({ owner, repo, rev, name ? "source"
|
||||
, fetchSubmodules ? false, leaveDotGit ? null, deepClone ? false
|
||||
, private ? false, forceFetchGit ? false, sparseCheckout ? [ ]
|
||||
, githubBase ? "github.com", varPrefix ? null, meta ? { }
|
||||
, ... # For hash agility
|
||||
}@args:
|
||||
|
||||
let
|
||||
|
||||
position = (if args.meta.description or null != null
|
||||
then builtins.unsafeGetAttrPos "description" args.meta
|
||||
else builtins.unsafeGetAttrPos "rev" args
|
||||
);
|
||||
position = (if args.meta.description or null != null then
|
||||
builtins.unsafeGetAttrPos "description" args.meta
|
||||
else
|
||||
builtins.unsafeGetAttrPos "rev" args);
|
||||
baseUrl = "https://${githubBase}/${owner}/${repo}";
|
||||
newMeta = meta // {
|
||||
homepage = meta.homepage or baseUrl;
|
||||
|
@ -23,16 +20,30 @@ let
|
|||
# to indicate where derivation originates, similar to make-derivation.nix's mkDerivation
|
||||
position = "${position.file}:${toString position.line}";
|
||||
};
|
||||
passthruAttrs = removeAttrs args [ "owner" "repo" "rev" "fetchSubmodules" "forceFetchGit" "private" "githubBase" "varPrefix" ];
|
||||
varBase = "NIX${lib.optionalString (varPrefix != null) "_${varPrefix}"}_GITHUB_PRIVATE_";
|
||||
useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone || forceFetchGit || (sparseCheckout != []);
|
||||
passthruAttrs = removeAttrs args [
|
||||
"owner"
|
||||
"repo"
|
||||
"rev"
|
||||
"fetchSubmodules"
|
||||
"forceFetchGit"
|
||||
"private"
|
||||
"githubBase"
|
||||
"varPrefix"
|
||||
];
|
||||
varBase = "NIX${
|
||||
lib.optionalString (varPrefix != null) "_${varPrefix}"
|
||||
}_GITHUB_PRIVATE_";
|
||||
useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone
|
||||
|| forceFetchGit || (sparseCheckout != [ ]);
|
||||
# We prefer fetchzip in cases we don't need submodules as the hash
|
||||
# is more stable in that case.
|
||||
fetcher =
|
||||
if useFetchGit then fetchgit
|
||||
fetcher = if useFetchGit then
|
||||
fetchgit
|
||||
# fetchzip may not be overridable when using external tools, for example nix-prefetch
|
||||
else if fetchzip ? override then fetchzip.override { withUnzip = false; }
|
||||
else fetchzip;
|
||||
else if fetchzip ? override then
|
||||
fetchzip.override { withUnzip = false; }
|
||||
else
|
||||
fetchzip;
|
||||
privateAttrs = lib.optionalAttrs private {
|
||||
netrcPhase = ''
|
||||
if [ -z "''$${varBase}USERNAME" -o -z "''$${varBase}PASSWORD" ]; then
|
||||
|
@ -50,19 +61,20 @@ let
|
|||
|
||||
gitRepoUrl = "${baseUrl}.git";
|
||||
|
||||
fetcherArgs = (if useFetchGit
|
||||
then {
|
||||
inherit rev deepClone fetchSubmodules sparseCheckout; url = gitRepoUrl;
|
||||
fetcherArgs = (if useFetchGit then
|
||||
{
|
||||
inherit rev deepClone fetchSubmodules sparseCheckout;
|
||||
url = gitRepoUrl;
|
||||
} // lib.optionalAttrs (leaveDotGit != null) { inherit leaveDotGit; }
|
||||
else {
|
||||
url = "${baseUrl}/archive/${rev}.tar.gz";
|
||||
|
||||
passthru = {
|
||||
inherit gitRepoUrl;
|
||||
passthru = { inherit gitRepoUrl; };
|
||||
}) // privateAttrs // passthruAttrs // {
|
||||
inherit name;
|
||||
};
|
||||
}
|
||||
) // privateAttrs // passthruAttrs // { inherit name; };
|
||||
in
|
||||
|
||||
fetcher fetcherArgs // { meta = newMeta; inherit rev owner repo; }
|
||||
)
|
||||
in fetcher fetcherArgs // {
|
||||
meta = newMeta;
|
||||
inherit rev owner repo;
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{ fetchzip, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ url, rev, name ? "source", ... } @ args:
|
||||
lib.makeOverridable ({ url, rev, name ? "source", ... }@args:
|
||||
|
||||
fetchzip ({
|
||||
inherit name;
|
||||
url = "${url}/+archive/${rev}.tar.gz";
|
||||
stripRoot = false;
|
||||
meta.homepage = url;
|
||||
} // removeAttrs args [ "url" "rev" ]) // { inherit rev; }
|
||||
)
|
||||
} // removeAttrs args [ "url" "rev" ]) // {
|
||||
inherit rev;
|
||||
})
|
||||
|
|
|
@ -2,20 +2,32 @@
|
|||
|
||||
lib.makeOverridable (
|
||||
# gitlab example
|
||||
{ owner, repo, rev, protocol ? "https", domain ? "gitlab.com", name ? "source", group ? null
|
||||
, fetchSubmodules ? false, leaveDotGit ? false
|
||||
, deepClone ? false, forceFetchGit ? false
|
||||
, sparseCheckout ? []
|
||||
{ owner, repo, rev, protocol ? "https", domain ? "gitlab.com", name ? "source"
|
||||
, group ? null, fetchSubmodules ? false, leaveDotGit ? false
|
||||
, deepClone ? false, forceFetchGit ? false, sparseCheckout ? [ ]
|
||||
, ... # For hash agility
|
||||
}@args:
|
||||
|
||||
let
|
||||
slug = lib.concatStringsSep "/" ((lib.optional (group != null) group) ++ [ owner repo ]);
|
||||
slug = lib.concatStringsSep "/"
|
||||
((lib.optional (group != null) group) ++ [ owner repo ]);
|
||||
escapedSlug = lib.replaceStrings [ "." "/" ] [ "%2E" "%2F" ] slug;
|
||||
escapedRev = lib.replaceStrings [ "+" "%" "/" ] [ "%2B" "%25" "%2F" ] rev;
|
||||
passthruAttrs = removeAttrs args [ "protocol" "domain" "owner" "group" "repo" "rev" "fetchSubmodules" "forceFetchGit" "leaveDotGit" "deepClone" ];
|
||||
passthruAttrs = removeAttrs args [
|
||||
"protocol"
|
||||
"domain"
|
||||
"owner"
|
||||
"group"
|
||||
"repo"
|
||||
"rev"
|
||||
"fetchSubmodules"
|
||||
"forceFetchGit"
|
||||
"leaveDotGit"
|
||||
"deepClone"
|
||||
];
|
||||
|
||||
useFetchGit = fetchSubmodules || leaveDotGit || deepClone || forceFetchGit || (sparseCheckout != []);
|
||||
useFetchGit = fetchSubmodules || leaveDotGit || deepClone || forceFetchGit
|
||||
|| (sparseCheckout != [ ]);
|
||||
fetcher = if useFetchGit then fetchgit else fetchzip;
|
||||
|
||||
gitRepoUrl = "${protocol}://${domain}/${slug}.git";
|
||||
|
@ -24,13 +36,15 @@ let
|
|||
inherit rev deepClone fetchSubmodules sparseCheckout leaveDotGit;
|
||||
url = gitRepoUrl;
|
||||
} else {
|
||||
url = "${protocol}://${domain}/api/v4/projects/${escapedSlug}/repository/archive.tar.gz?sha=${escapedRev}";
|
||||
url =
|
||||
"${protocol}://${domain}/api/v4/projects/${escapedSlug}/repository/archive.tar.gz?sha=${escapedRev}";
|
||||
|
||||
passthru = {
|
||||
inherit gitRepoUrl;
|
||||
passthru = { inherit gitRepoUrl; };
|
||||
}) // passthruAttrs // {
|
||||
inherit name;
|
||||
};
|
||||
}) // passthruAttrs // { inherit name; };
|
||||
in
|
||||
|
||||
fetcher fetcherArgs // { meta.homepage = "${protocol}://${domain}/${slug}/"; inherit rev owner repo; }
|
||||
)
|
||||
in fetcher fetcherArgs // {
|
||||
meta.homepage = "${protocol}://${domain}/${slug}/";
|
||||
inherit rev owner repo;
|
||||
})
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
{ runCommand, git, lib }:
|
||||
|
||||
lib.makeOverridable (
|
||||
src:
|
||||
lib.makeOverridable (src:
|
||||
|
||||
let
|
||||
srcStr = toString src;
|
||||
|
@ -40,5 +39,4 @@ let
|
|||
| tar xf - -C $out
|
||||
'';
|
||||
|
||||
in nixPath
|
||||
)
|
||||
in nixPath)
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
{ lib, stdenvNoCC, mercurial }:
|
||||
{ name ? null
|
||||
, url
|
||||
, rev ? null
|
||||
, sha256 ? null
|
||||
, hash ? null
|
||||
, fetchSubrepos ? false
|
||||
, preferLocalBuild ? true }:
|
||||
{ name ? null, url, rev ? null, sha256 ? null, hash ? null
|
||||
, fetchSubrepos ? false, preferLocalBuild ? true }:
|
||||
|
||||
if hash != null && sha256 != null then
|
||||
throw "Only one of sha256 or hash can be set"
|
||||
|
|
|
@ -1,48 +1,47 @@
|
|||
{ stdenv
|
||||
, curl
|
||||
}:
|
||||
{ stdenv, curl }:
|
||||
|
||||
{ ipfs
|
||||
, url ? ""
|
||||
, curlOpts ? ""
|
||||
, outputHash ? ""
|
||||
, outputHashAlgo ? ""
|
||||
, md5 ? ""
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
, sha512 ? ""
|
||||
, meta ? {}
|
||||
, port ? "8080"
|
||||
, postFetch ? ""
|
||||
, preferLocalBuild ? true
|
||||
}:
|
||||
{ ipfs, url ? "", curlOpts ? "", outputHash ? "", outputHashAlgo ? "", md5 ? ""
|
||||
, sha1 ? "", sha256 ? "", sha512 ? "", meta ? { }, port ? "8080", postFetch ? ""
|
||||
, preferLocalBuild ? true }:
|
||||
|
||||
let
|
||||
|
||||
hasHash = (outputHash != "" && outputHashAlgo != "")
|
||||
|| md5 != "" || sha1 != "" || sha256 != "" || sha512 != "";
|
||||
hasHash = (outputHash != "" && outputHashAlgo != "") || md5 != "" || sha1
|
||||
!= "" || sha256 != "" || sha512 != "";
|
||||
|
||||
in
|
||||
|
||||
if (!hasHash) then throw "Specify sha for fetchipfs fixed-output derivation" else stdenv.mkDerivation {
|
||||
in if (!hasHash) then
|
||||
throw "Specify sha for fetchipfs fixed-output derivation"
|
||||
else
|
||||
stdenv.mkDerivation {
|
||||
name = ipfs;
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [ curl ];
|
||||
|
||||
# New-style output content requirements.
|
||||
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
|
||||
if sha512 != "" then "sha512" else if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
|
||||
outputHash = if outputHash != "" then outputHash else
|
||||
if sha512 != "" then sha512 else if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||
outputHashAlgo = if outputHashAlgo != "" then
|
||||
outputHashAlgo
|
||||
else if sha512 != "" then
|
||||
"sha512"
|
||||
else if sha256 != "" then
|
||||
"sha256"
|
||||
else if sha1 != "" then
|
||||
"sha1"
|
||||
else
|
||||
"md5";
|
||||
outputHash = if outputHash != "" then
|
||||
outputHash
|
||||
else if sha512 != "" then
|
||||
sha512
|
||||
else if sha256 != "" then
|
||||
sha256
|
||||
else if sha1 != "" then
|
||||
sha1
|
||||
else
|
||||
md5;
|
||||
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit curlOpts
|
||||
postFetch
|
||||
ipfs
|
||||
url
|
||||
port
|
||||
meta;
|
||||
inherit curlOpts postFetch ipfs url port meta;
|
||||
|
||||
# Doing the download on a remote machine just duplicates network
|
||||
# traffic, so don't do that.
|
||||
|
|
|
@ -8,16 +8,11 @@ let
|
|||
"https://oss.sonatype.org/content/repositories/public"
|
||||
"https://repo.typesafe.com/typesafe/releases"
|
||||
];
|
||||
in
|
||||
|
||||
args@
|
||||
{ # Example: "org.apache.httpcomponents"
|
||||
groupId
|
||||
, # Example: "httpclient"
|
||||
artifactId
|
||||
, # Example: "4.3.6"
|
||||
version
|
||||
, # Example: "jdk11"
|
||||
in args@{ # Example: "org.apache.httpcomponents"
|
||||
groupId, # Example: "httpclient"
|
||||
artifactId, # Example: "4.3.6"
|
||||
version, # Example: "jdk11"
|
||||
classifier ? null
|
||||
, # List of maven repositories from where to fetch the artifact.
|
||||
# Example: [ http://oss.sonatype.org/content/repositories/public ].
|
||||
|
@ -25,11 +20,9 @@ args@
|
|||
# The `url` and `urls` parameters, if specified should point to the JAR
|
||||
# file and will take precedence over the `repos` parameter. Only one of `url`
|
||||
# and `urls` can be specified, not both.
|
||||
, url ? ""
|
||||
, urls ? []
|
||||
, url ? "", urls ? [ ]
|
||||
, # The rest of the arguments are just forwarded to `fetchurl`.
|
||||
...
|
||||
}:
|
||||
... }:
|
||||
|
||||
# only one of url and urls can be specified at a time.
|
||||
assert (url == "") || (urls == [ ]);
|
||||
|
@ -37,7 +30,8 @@ assert (url == "") || (urls == []);
|
|||
assert (repos != [ ]) || (url != "") || (urls != [ ]);
|
||||
|
||||
let
|
||||
pname = (lib.replaceStrings [ "." ] [ "_" ] groupId) + "_" + (lib.replaceStrings [ "." ] [ "_" ] artifactId);
|
||||
pname = (lib.replaceStrings [ "." ] [ "_" ] groupId) + "_"
|
||||
+ (lib.replaceStrings [ "." ] [ "_" ] artifactId);
|
||||
suffix = lib.optionalString (classifier != null) "-${classifier}";
|
||||
filename = "${artifactId}-${version}${suffix}.jar";
|
||||
mkJarUrl = repoUrl:
|
||||
|
@ -48,17 +42,24 @@ let
|
|||
version
|
||||
filename
|
||||
];
|
||||
urls_ =
|
||||
if url != "" then [url]
|
||||
else if urls != [] then urls
|
||||
else map mkJarUrl repos;
|
||||
jar =
|
||||
fetchurl (
|
||||
builtins.removeAttrs args [ "groupId" "artifactId" "version" "classifier" "repos" "url" ]
|
||||
// { urls = urls_; name = "${pname}-${version}.jar"; }
|
||||
);
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
urls_ = if url != "" then
|
||||
[ url ]
|
||||
else if urls != [ ] then
|
||||
urls
|
||||
else
|
||||
map mkJarUrl repos;
|
||||
jar = fetchurl (builtins.removeAttrs args [
|
||||
"groupId"
|
||||
"artifactId"
|
||||
"version"
|
||||
"classifier"
|
||||
"repos"
|
||||
"url"
|
||||
] // {
|
||||
urls = urls_;
|
||||
name = "${pname}-${version}.jar";
|
||||
});
|
||||
in stdenv.mkDerivation {
|
||||
inherit pname version;
|
||||
dontUnpack = true;
|
||||
# By moving the jar to $out/share/java we make it discoverable by java
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# You can specify some extra mirrors and a cache DB via options
|
||||
{lib, stdenvNoCC, monotone, defaultDBMirrors ? [], cacheDB ? "./mtn-checkout.db"}:
|
||||
{ lib, stdenvNoCC, monotone, defaultDBMirrors ? [ ]
|
||||
, cacheDB ? "./mtn-checkout.db" }:
|
||||
# dbs is a list of strings
|
||||
# each is an url for sync
|
||||
|
||||
# selector is mtn selector, like h:org.example.branch
|
||||
#
|
||||
{name ? "mtn-checkout", dbs ? [], sha256
|
||||
, selector ? "h:" + branch, branch}:
|
||||
{ name ? "mtn-checkout", dbs ? [ ], sha256, selector ? "h:" + branch, branch }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
|
|
|
@ -1,14 +1,6 @@
|
|||
{ stdenv, fetchzip, applyPatches, lib, ... }:
|
||||
{ url
|
||||
, hash ? ""
|
||||
, sha256 ? ""
|
||||
, appName ? null
|
||||
, appVersion ? null
|
||||
, license
|
||||
, patches ? [ ]
|
||||
, description ? null
|
||||
, homepage ? null
|
||||
}:
|
||||
{ url, hash ? "", sha256 ? "", appName ? null, appVersion ? null, license
|
||||
, patches ? [ ], description ? null, homepage ? null }:
|
||||
applyPatches ({
|
||||
inherit patches;
|
||||
src = fetchzip {
|
||||
|
@ -27,9 +19,7 @@ applyPatches ({
|
|||
inherit homepage;
|
||||
} // lib.optionalAttrs (description != null) {
|
||||
longDescription = description;
|
||||
} // lib.optionalAttrs (homepage != null) {
|
||||
inherit homepage;
|
||||
};
|
||||
} // lib.optionalAttrs (homepage != null) { inherit homepage; };
|
||||
};
|
||||
} // lib.optionalAttrs (appName != null && appVersion != null) {
|
||||
name = "nextcloud-app-${appName}-${appVersion}";
|
||||
|
|
|
@ -6,16 +6,10 @@
|
|||
|
||||
{ lib, fetchurl, patchutils }:
|
||||
|
||||
{ relative ? null
|
||||
, stripLen ? 0
|
||||
{ relative ? null, stripLen ? 0
|
||||
, decode ? "cat" # custom command to decode patch e.g. base64 -d
|
||||
, extraPrefix ? null
|
||||
, excludes ? []
|
||||
, includes ? []
|
||||
, revert ? false
|
||||
, postFetch ? ""
|
||||
, ...
|
||||
}@args:
|
||||
, extraPrefix ? null, excludes ? [ ], includes ? [ ], revert ? false
|
||||
, postFetch ? "", ... }@args:
|
||||
let
|
||||
args' = if relative != null then {
|
||||
stripLen = 1 + lib.length (lib.splitString "/" relative) + stripLen;
|
||||
|
@ -23,12 +17,9 @@ let
|
|||
} else {
|
||||
inherit stripLen extraPrefix;
|
||||
};
|
||||
in let
|
||||
inherit (args') stripLen extraPrefix;
|
||||
in
|
||||
lib.throwIfNot (excludes == [] || includes == [])
|
||||
"fetchpatch: cannot use excludes and includes simultaneously"
|
||||
fetchurl ({
|
||||
in let inherit (args') stripLen extraPrefix;
|
||||
in lib.throwIfNot (excludes == [ ] || includes == [ ])
|
||||
"fetchpatch: cannot use excludes and includes simultaneously" fetchurl ({
|
||||
postFetch = ''
|
||||
tmpfile="$TMPDIR/patch"
|
||||
|
||||
|
@ -40,7 +31,9 @@ fetchurl ({
|
|||
set +e
|
||||
${decode} < "$out" > "$tmpfile"
|
||||
if [ $? -ne 0 ] || [ ! -s "$tmpfile" ]; then
|
||||
echo 'Failed to decode patch with command "'${lib.escapeShellArg decode}'"' >&2
|
||||
echo 'Failed to decode patch with command "'${
|
||||
lib.escapeShellArg decode
|
||||
}'"' >&2
|
||||
echo 'Fetched file was (limited to 128 bytes):' >&2
|
||||
od -A x -t x1z -v -N 128 "$out" >&2
|
||||
exit 1
|
||||
|
@ -49,17 +42,22 @@ fetchurl ({
|
|||
mv "$tmpfile" "$out"
|
||||
|
||||
"${patchutils}/bin/lsdiff" \
|
||||
${lib.optionalString (relative != null) "-p1 -i ${lib.escapeShellArg relative}/'*'"} \
|
||||
${
|
||||
lib.optionalString (relative != null)
|
||||
"-p1 -i ${lib.escapeShellArg relative}/'*'"
|
||||
} \
|
||||
"$out" \
|
||||
| sort -u | sed -e 's/[*?]/\\&/g' \
|
||||
| xargs -I{} \
|
||||
"${patchutils}/bin/filterdiff" \
|
||||
--include={} \
|
||||
--strip=${toString stripLen} \
|
||||
${lib.optionalString (extraPrefix != null) ''
|
||||
${
|
||||
lib.optionalString (extraPrefix != null) ''
|
||||
--addoldprefix=a/${lib.escapeShellArg extraPrefix} \
|
||||
--addnewprefix=b/${lib.escapeShellArg extraPrefix} \
|
||||
''} \
|
||||
''
|
||||
} \
|
||||
--clean "$out" > "$tmpfile"
|
||||
|
||||
if [ ! -s "$tmpfile" ]; then
|
||||
|
@ -72,8 +70,14 @@ fetchurl ({
|
|||
|
||||
${patchutils}/bin/filterdiff \
|
||||
-p1 \
|
||||
${builtins.toString (builtins.map (x: "-x ${lib.escapeShellArg x}") excludes)} \
|
||||
${builtins.toString (builtins.map (x: "-i ${lib.escapeShellArg x}") includes)} \
|
||||
${
|
||||
builtins.toString
|
||||
(builtins.map (x: "-x ${lib.escapeShellArg x}") excludes)
|
||||
} \
|
||||
${
|
||||
builtins.toString
|
||||
(builtins.map (x: "-i ${lib.escapeShellArg x}") includes)
|
||||
} \
|
||||
"$tmpfile" > "$out"
|
||||
|
||||
if [ ! -s "$out" ]; then
|
||||
|
@ -88,6 +92,12 @@ fetchurl ({
|
|||
mv "$tmpfile" "$out"
|
||||
'' + postFetch;
|
||||
} // builtins.removeAttrs args [
|
||||
"relative" "stripLen" "decode" "extraPrefix" "excludes" "includes" "revert"
|
||||
"relative"
|
||||
"stripLen"
|
||||
"decode"
|
||||
"extraPrefix"
|
||||
"excludes"
|
||||
"includes"
|
||||
"revert"
|
||||
"postFetch"
|
||||
])
|
||||
|
|
|
@ -1,35 +1,49 @@
|
|||
{ testers, fetchpatch, ... }:
|
||||
|
||||
let
|
||||
isFetchpatch2 = fetchpatch.version == 2;
|
||||
in
|
||||
let isFetchpatch2 = fetchpatch.version == 2;
|
||||
|
||||
{
|
||||
in {
|
||||
simple = testers.invalidateFetcherByDrvHash fetchpatch {
|
||||
url = "https://github.com/facebook/zstd/pull/2724/commits/e1f85dbca3a0ed5ef06c8396912a0914db8dea6a.patch";
|
||||
sha256 = if isFetchpatch2 then "sha256-01BrkHLye4KOdqCw3tv7AJzIF6578pl2fl270TJFTmw=" else "sha256-PuYAqnJWAE+L9bsroOnnBGJhERW8LHrGSLtIEkKU9vg=";
|
||||
url =
|
||||
"https://github.com/facebook/zstd/pull/2724/commits/e1f85dbca3a0ed5ef06c8396912a0914db8dea6a.patch";
|
||||
sha256 = if isFetchpatch2 then
|
||||
"sha256-01BrkHLye4KOdqCw3tv7AJzIF6578pl2fl270TJFTmw="
|
||||
else
|
||||
"sha256-PuYAqnJWAE+L9bsroOnnBGJhERW8LHrGSLtIEkKU9vg=";
|
||||
};
|
||||
|
||||
relative = testers.invalidateFetcherByDrvHash fetchpatch {
|
||||
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
||||
url =
|
||||
"https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
||||
relative = "include";
|
||||
sha256 = if isFetchpatch2 then "sha256-1TtmuKeNIl/Yp+sfzBMR8Ue78tPIgjqGgjasa5IN52o=" else "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
|
||||
sha256 = if isFetchpatch2 then
|
||||
"sha256-1TtmuKeNIl/Yp+sfzBMR8Ue78tPIgjqGgjasa5IN52o="
|
||||
else
|
||||
"sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
|
||||
};
|
||||
|
||||
full = testers.invalidateFetcherByDrvHash fetchpatch {
|
||||
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
||||
url =
|
||||
"https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
||||
relative = "test";
|
||||
stripLen = 1;
|
||||
extraPrefix = "foo/bar/";
|
||||
excludes = [ "foo/bar/bernoulli_no_atomic_mp.cpp" ];
|
||||
revert = true;
|
||||
sha256 = if isFetchpatch2 then "sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM=" else "sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM=";
|
||||
sha256 = if isFetchpatch2 then
|
||||
"sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM="
|
||||
else
|
||||
"sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM=";
|
||||
};
|
||||
|
||||
decode = testers.invalidateFetcherByDrvHash fetchpatch {
|
||||
name = "gcc.patch";
|
||||
url = "https://chromium.googlesource.com/aosp/platform/external/libchrome/+/f37ae3b1a873d74182a2ac31d96742ead9c1f523^!?format=TEXT";
|
||||
url =
|
||||
"https://chromium.googlesource.com/aosp/platform/external/libchrome/+/f37ae3b1a873d74182a2ac31d96742ead9c1f523^!?format=TEXT";
|
||||
decode = "base64 -d";
|
||||
sha256 = if isFetchpatch2 then "sha256-oMvPlmzE51ArI+EvFxONXkqmNee39106/O1ikG0Bdso=" else "sha256-SJHk8XrutqAyoIdORlhCpBCN626P+uzed7mjKz5eQYY=";
|
||||
sha256 = if isFetchpatch2 then
|
||||
"sha256-oMvPlmzE51ArI+EvFxONXkqmNee39106/O1ikG0Bdso="
|
||||
else
|
||||
"sha256-SJHk8XrutqAyoIdORlhCpBCN626P+uzed7mjKz5eQYY=";
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
{ lib, stdenvNoCC, pijul, cacert }:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ url
|
||||
, hash ? ""
|
||||
, change ? null
|
||||
, state ? null
|
||||
, channel ? "main"
|
||||
, name ? "fetchpijul"
|
||||
lib.makeOverridable ({ url, hash ? "", change ? null, state ? null
|
||||
, channel ? "main", name ? "fetchpijul"
|
||||
, # TODO: Changes in pijul are unordered so there's many ways to end up with the same repository state.
|
||||
# This makes leaveDotPijul unfeasible to implement until pijul CLI implements
|
||||
# a way of reordering changes to sort them in a consistent and deterministic manner.
|
||||
|
@ -47,13 +42,9 @@ else
|
|||
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = if hash != "" then
|
||||
hash
|
||||
else
|
||||
lib.fakeSha256;
|
||||
outputHash = if hash != "" then hash else lib.fakeSha256;
|
||||
|
||||
inherit url change state channel;
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,28 +1,33 @@
|
|||
# `fetchPypi` function for fetching artifacts from PyPI.
|
||||
{ fetchurl
|
||||
, makeOverridable
|
||||
}:
|
||||
{ fetchurl, makeOverridable }:
|
||||
|
||||
let
|
||||
computeUrl = {format ? "setuptools", ... } @attrs: let
|
||||
computeWheelUrl = {pname, version, dist ? "py2.py3", python ? "py2.py3", abi ? "none", platform ? "any"}:
|
||||
computeUrl = { format ? "setuptools", ... }@attrs:
|
||||
let
|
||||
computeWheelUrl = { pname, version, dist ? "py2.py3", python ? "py2.py3"
|
||||
, abi ? "none", platform ? "any" }:
|
||||
# Fetch a wheel. By default we fetch an universal wheel.
|
||||
# See https://www.python.org/dev/peps/pep-0427/#file-name-convention for details regarding the optional arguments.
|
||||
"https://files.pythonhosted.org/packages/${dist}/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}-${python}-${abi}-${platform}.whl";
|
||||
"https://files.pythonhosted.org/packages/${dist}/${
|
||||
builtins.substring 0 1 pname
|
||||
}/${pname}/${pname}-${version}-${python}-${abi}-${platform}.whl";
|
||||
|
||||
computeSourceUrl = { pname, version, extension ? "tar.gz" }:
|
||||
# Fetch a source tarball.
|
||||
"mirror://pypi/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}.${extension}";
|
||||
"mirror://pypi/${
|
||||
builtins.substring 0 1 pname
|
||||
}/${pname}/${pname}-${version}.${extension}";
|
||||
|
||||
compute = (if format == "wheel" then computeWheelUrl
|
||||
else if format == "setuptools" then computeSourceUrl
|
||||
else throw "Unsupported format ${format}");
|
||||
compute = (if format == "wheel" then
|
||||
computeWheelUrl
|
||||
else if format == "setuptools" then
|
||||
computeSourceUrl
|
||||
else
|
||||
throw "Unsupported format ${format}");
|
||||
|
||||
in compute (builtins.removeAttrs attrs [ "format" ]);
|
||||
|
||||
in makeOverridable( {format ? "setuptools", sha256 ? "", hash ? "", ... } @attrs:
|
||||
let
|
||||
url = computeUrl (builtins.removeAttrs attrs ["sha256" "hash"]) ;
|
||||
in fetchurl {
|
||||
inherit url sha256 hash;
|
||||
})
|
||||
in makeOverridable
|
||||
({ format ? "setuptools", sha256 ? "", hash ? "", ... }@attrs:
|
||||
let url = computeUrl (builtins.removeAttrs attrs [ "sha256" "hash" ]);
|
||||
in fetchurl { inherit url sha256 hash; })
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
# Fetch from PyPi legacy API as documented in https://warehouse.pypa.io/api-reference/legacy.html
|
||||
{ runCommand
|
||||
, lib
|
||||
, python3
|
||||
}:
|
||||
{ runCommand, lib, python3 }:
|
||||
{
|
||||
# package name
|
||||
pname,
|
||||
|
@ -15,21 +12,16 @@
|
|||
# SRI hash
|
||||
hash,
|
||||
# allow overriding the derivation name
|
||||
name ? null,
|
||||
}:
|
||||
name ? null, }:
|
||||
let
|
||||
urls' = urls ++ lib.optional (url != null) url;
|
||||
|
||||
pathParts = lib.filter ({ prefix, path }: "NETRC" == prefix) builtins.nixPath;
|
||||
netrc_file =
|
||||
if (pathParts != [ ])
|
||||
then (lib.head pathParts).path
|
||||
else "";
|
||||
netrc_file = if (pathParts != [ ]) then (lib.head pathParts).path else "";
|
||||
|
||||
in
|
||||
# Assert that we have at least one URL
|
||||
assert urls' != [ ]; runCommand file
|
||||
({
|
||||
in assert urls' != [ ];
|
||||
runCommand file ({
|
||||
nativeBuildInputs = [ python3 ];
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||
outputHashMode = "flat";
|
||||
|
@ -37,9 +29,10 @@ assert urls' != [ ]; runCommand file
|
|||
outputHashAlgo = if hash == "" then "sha256" else null;
|
||||
outputHash = hash;
|
||||
NETRC = netrc_file;
|
||||
}
|
||||
// (lib.optionalAttrs (name != null) {inherit name;}))
|
||||
''
|
||||
python ${./fetch-legacy.py} ${lib.concatStringsSep " " (map (url: "--url ${lib.escapeShellArg url}") urls')} --pname ${pname} --filename ${file}
|
||||
} // (lib.optionalAttrs (name != null) { inherit name; })) ''
|
||||
python ${./fetch-legacy.py} ${
|
||||
lib.concatStringsSep " "
|
||||
(map (url: "--url ${lib.escapeShellArg url}") urls')
|
||||
} --pname ${pname} --filename ${file}
|
||||
mv ${file} $out
|
||||
''
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
{ fetchzip }:
|
||||
|
||||
# gitweb example, snapshot support is optional in gitweb
|
||||
{ repo, rev, name ? "source"
|
||||
, ... # For hash agility
|
||||
}@args: fetchzip ({
|
||||
{ repo, rev, name ? "source", ... # For hash agility
|
||||
}@args:
|
||||
fetchzip ({
|
||||
inherit name;
|
||||
url = "https://repo.or.cz/${repo}.git/snapshot/${rev}.tar.gz";
|
||||
meta.homepage = "https://repo.or.cz/${repo}.git/";
|
||||
} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; }
|
||||
} // removeAttrs args [ "repo" "rev" ]) // {
|
||||
inherit rev;
|
||||
}
|
||||
|
|
|
@ -3,19 +3,13 @@
|
|||
{ name, manifest, rev ? "HEAD", sha256
|
||||
# Optional parameters:
|
||||
, repoRepoURL ? "", repoRepoRev ? "", referenceDir ? "", manifestName ? ""
|
||||
, localManifests ? [], createMirror ? false, useArchive ? false
|
||||
}:
|
||||
, localManifests ? [ ], createMirror ? false, useArchive ? false }:
|
||||
|
||||
assert repoRepoRev != "" -> repoRepoURL != "";
|
||||
assert createMirror -> !useArchive;
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
concatMapStringsSep
|
||||
concatStringsSep
|
||||
fetchers
|
||||
optionalString
|
||||
;
|
||||
inherit (lib) concatMapStringsSep concatStringsSep fetchers optionalString;
|
||||
|
||||
extraRepoInitFlags = [
|
||||
(optionalString (repoRepoURL != "") "--repo-url=${repoRepoURL}")
|
||||
|
@ -46,9 +40,8 @@ in stdenvNoCC.mkDerivation {
|
|||
preferLocalBuild = true;
|
||||
enableParallelBuilding = true;
|
||||
|
||||
impureEnvVars = fetchers.proxyImpureEnvVars ++ [
|
||||
"GIT_PROXY_COMMAND" "SOCKS_SERVER"
|
||||
];
|
||||
impureEnvVars = fetchers.proxyImpureEnvVars
|
||||
++ [ "GIT_PROXY_COMMAND" "SOCKS_SERVER" ];
|
||||
|
||||
nativeBuildInputs = [ gitRepo cacert ];
|
||||
|
||||
|
@ -64,7 +57,9 @@ in stdenvNoCC.mkDerivation {
|
|||
mkdir .repo
|
||||
${optionalString (local_manifests != [ ]) ''
|
||||
mkdir .repo/local_manifests
|
||||
for local_manifest in ${concatMapStringsSep " " toString local_manifests}; do
|
||||
for local_manifest in ${
|
||||
concatMapStringsSep " " toString local_manifests
|
||||
}; do
|
||||
cp $local_manifest .repo/local_manifests/$(stripHash $local_manifest)
|
||||
done
|
||||
''}
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
{ lib, runCommand, awscli }:
|
||||
|
||||
{ s3url
|
||||
, name ? builtins.baseNameOf s3url
|
||||
, sha256
|
||||
, region ? "us-east-1"
|
||||
{ s3url, name ? builtins.baseNameOf s3url, sha256, region ? "us-east-1"
|
||||
, credentials ? null # Default to looking at local EC2 metadata service
|
||||
, recursiveHash ? false
|
||||
, postFetch ? null
|
||||
}:
|
||||
, recursiveHash ? false, postFetch ? null }:
|
||||
|
||||
let
|
||||
mkCredentials = { access_key_id, secret_access_key, session_token ? null }: {
|
||||
|
@ -16,7 +11,8 @@ let
|
|||
AWS_SESSION_TOKEN = session_token;
|
||||
};
|
||||
|
||||
credentialAttrs = lib.optionalAttrs (credentials != null) (mkCredentials credentials);
|
||||
credentialAttrs =
|
||||
lib.optionalAttrs (credentials != null) (mkCredentials credentials);
|
||||
in runCommand name ({
|
||||
nativeBuildInputs = [ awscli ];
|
||||
|
||||
|
|
|
@ -2,11 +2,13 @@
|
|||
|
||||
lib.makeOverridable (
|
||||
# cgit example, snapshot support is optional in cgit
|
||||
{ repo, rev, name ? "source"
|
||||
, ... # For hash agility
|
||||
}@args: fetchzip ({
|
||||
{ repo, rev, name ? "source", ... # For hash agility
|
||||
}@args:
|
||||
fetchzip ({
|
||||
inherit name;
|
||||
url = "https://git.savannah.gnu.org/cgit/${repo}.git/snapshot/${repo}-${rev}.tar.gz";
|
||||
url =
|
||||
"https://git.savannah.gnu.org/cgit/${repo}.git/snapshot/${repo}-${rev}.tar.gz";
|
||||
meta.homepage = "https://git.savannah.gnu.org/cgit/${repo}.git/";
|
||||
} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; }
|
||||
)
|
||||
} // removeAttrs args [ "repo" "rev" ]) // {
|
||||
inherit rev;
|
||||
})
|
||||
|
|
|
@ -1,21 +1,9 @@
|
|||
{ fetchgit, fetchhg, fetchzip, lib }:
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
assertOneOf
|
||||
makeOverridable
|
||||
optionalString
|
||||
;
|
||||
in
|
||||
let inherit (lib) assertOneOf makeOverridable optionalString;
|
||||
|
||||
makeOverridable (
|
||||
{ owner
|
||||
, repo, rev
|
||||
, domain ? "sr.ht"
|
||||
, vc ? "git"
|
||||
, name ? "source"
|
||||
, fetchSubmodules ? false
|
||||
, ... # For hash agility
|
||||
in makeOverridable ({ owner, repo, rev, domain ? "sr.ht", vc ? "git"
|
||||
, name ? "source", fetchSubmodules ? false, ... # For hash agility
|
||||
}@args:
|
||||
|
||||
assert (assertOneOf "vc" vc [ "hg" "git" ]);
|
||||
|
@ -26,7 +14,13 @@ let
|
|||
baseArgs = {
|
||||
inherit name;
|
||||
} // removeAttrs args [
|
||||
"owner" "repo" "rev" "domain" "vc" "name" "fetchSubmodules"
|
||||
"owner"
|
||||
"repo"
|
||||
"rev"
|
||||
"domain"
|
||||
"vc"
|
||||
"name"
|
||||
"fetchSubmodules"
|
||||
];
|
||||
vcArgs = baseArgs // {
|
||||
inherit rev;
|
||||
|
@ -49,14 +43,11 @@ let
|
|||
postFetch = optionalString (vc == "hg") ''
|
||||
rm -f "$out/.hg_archival.txt"
|
||||
''; # impure file; see #12002
|
||||
passthru = {
|
||||
gitRepoUrl = urlFor "git";
|
||||
};
|
||||
passthru = { gitRepoUrl = urlFor "git"; };
|
||||
};
|
||||
};
|
||||
};
|
||||
in cases.${fetcher}.fetch cases.${fetcher}.arguments // {
|
||||
inherit rev;
|
||||
meta.homepage = "${baseUrl}";
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
{ lib, stdenvNoCC, buildPackages
|
||||
, subversion, glibcLocales, sshSupport ? true, openssh ? null
|
||||
}:
|
||||
{ lib, stdenvNoCC, buildPackages, subversion, glibcLocales, sshSupport ? true
|
||||
, openssh ? null }:
|
||||
|
||||
{ url, rev ? "HEAD", sha256 ? "", hash ? ""
|
||||
, ignoreExternals ? false, ignoreKeywords ? false, name ? null
|
||||
, preferLocalBuild ? true
|
||||
}:
|
||||
{ url, rev ? "HEAD", sha256 ? "", hash ? "", ignoreExternals ? false
|
||||
, ignoreKeywords ? false, name ? null, preferLocalBuild ? true }:
|
||||
|
||||
assert sshSupport -> openssh != null;
|
||||
|
||||
|
@ -19,20 +16,22 @@ let
|
|||
(p: if head p == "" then tail p else p) # ~ drop final slash if any
|
||||
(reverseList (splitString "/" url));
|
||||
path = [ (removeSuffix "/" (head path_)) ] ++ (tail path_);
|
||||
in
|
||||
# ../repo/trunk -> repo
|
||||
if fst path == "trunk" then snd path
|
||||
in if fst path == "trunk" then
|
||||
snd path
|
||||
# ../repo/branches/branch -> repo-branch
|
||||
else if snd path == "branches" then "${trd path}-${fst path}"
|
||||
else if snd path == "branches" then
|
||||
"${trd path}-${fst path}"
|
||||
# ../repo/tags/tag -> repo-tag
|
||||
else if snd path == "tags" then "${trd path}-${fst path}"
|
||||
else if snd path == "tags" then
|
||||
"${trd path}-${fst path}"
|
||||
# ../repo (no trunk) -> repo
|
||||
else fst path;
|
||||
else
|
||||
fst path;
|
||||
|
||||
name_ = if name == null then "${repoName}-r${toString rev}" else name;
|
||||
in
|
||||
|
||||
if hash != "" && sha256 != "" then
|
||||
in if hash != "" && sha256 != "" then
|
||||
throw "Only one of sha256 or hash can be set"
|
||||
else
|
||||
stdenvNoCC.mkDerivation {
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
runCommand: subversion: repository:
|
||||
import (runCommand "head-revision"
|
||||
{ buildInputs = [ subversion ];
|
||||
import (runCommand "head-revision" {
|
||||
buildInputs = [ subversion ];
|
||||
dummy = builtins.currentTime;
|
||||
}
|
||||
''
|
||||
} ''
|
||||
rev=$(echo p | svn ls -v --depth empty ${repository} |awk '{ print $1 }')
|
||||
echo "[ \"$rev\" ]" > $out
|
||||
echo Latest revision is $rev
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
{ stdenvNoCC, subversion, sshSupport ? true, openssh ? null, expect }:
|
||||
{ username, password, url, rev ? "HEAD", sha256 ? "" }:
|
||||
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "svn-export-ssh";
|
||||
builder = ./builder.sh;
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
{ lib, runCommand, transmission_noSystemd, rqbit, writeShellScript, formats, cacert, rsync }:
|
||||
let
|
||||
urlRegexp = ''.*xt=urn:bt[im]h:([^&]{64}|[^&]{40}).*'';
|
||||
in
|
||||
{ url
|
||||
, name ?
|
||||
if (builtins.match urlRegexp url) == null then
|
||||
{ lib, runCommand, transmission_noSystemd, rqbit, writeShellScript, formats
|
||||
, cacert, rsync }:
|
||||
let urlRegexp = ".*xt=urn:bt[im]h:([^&]{64}|[^&]{40}).*";
|
||||
in { url, name ? if (builtins.match urlRegexp url) == null then
|
||||
"bittorrent"
|
||||
else
|
||||
"bittorrent-" + builtins.head (builtins.match urlRegexp url)
|
||||
, config ? if (backend == "transmission") then { } else throw "json config for configuring fetchFromBitorrent only works with the transmission backend"
|
||||
, hash
|
||||
, backend ? "transmission"
|
||||
, recursiveHash ? true
|
||||
, postFetch ? ""
|
||||
, postUnpack ? ""
|
||||
, meta ? {}
|
||||
}:
|
||||
"bittorrent-" + builtins.head (builtins.match urlRegexp url), config ?
|
||||
if (backend == "transmission") then
|
||||
{ }
|
||||
else
|
||||
throw
|
||||
"json config for configuring fetchFromBitorrent only works with the transmission backend"
|
||||
, hash, backend ? "transmission", recursiveHash ? true, postFetch ? ""
|
||||
, postUnpack ? "", meta ? { } }:
|
||||
let
|
||||
afterSuccess = writeShellScript "fetch-bittorrent-done.sh" ''
|
||||
${postUnpack}
|
||||
|
@ -29,10 +25,15 @@ let
|
|||
kill $PPID
|
||||
'';
|
||||
jsonConfig = (formats.json { }).generate "jsonConfig" config;
|
||||
in
|
||||
runCommand name {
|
||||
in runCommand name {
|
||||
inherit meta;
|
||||
nativeBuildInputs = [ cacert ] ++ (if (backend == "transmission" ) then [ transmission_noSystemd ] else if (backend == "rqbit") then [ rqbit ] else throw "rqbit or transmission are the only available backends for fetchtorrent");
|
||||
nativeBuildInputs = [ cacert ] ++ (if (backend == "transmission") then
|
||||
[ transmission_noSystemd ]
|
||||
else if (backend == "rqbit") then
|
||||
[ rqbit ]
|
||||
else
|
||||
throw
|
||||
"rqbit or transmission are the only available backends for fetchtorrent");
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
outputHash = hash;
|
||||
outputHashMode = if recursiveHash then "recursive" else "flat";
|
||||
|
@ -41,8 +42,7 @@ runCommand name {
|
|||
# by external tools, such as tools that may want to seed fetchtorrent calls
|
||||
# in nixpkgs
|
||||
inherit url;
|
||||
}
|
||||
(if (backend == "transmission") then ''
|
||||
} (if (backend == "transmission") then ''
|
||||
export HOME=$TMP
|
||||
export downloadedDirectory=$out/downloadedDirectory
|
||||
mkdir -p $downloadedDirectory
|
||||
|
@ -55,8 +55,7 @@ runCommand name {
|
|||
}
|
||||
trap handleChild CHLD
|
||||
transmission-cli --port $(shuf -n 1 -i 49152-65535) --portmap --finish ${afterSuccess} --download-dir $downloadedDirectory --config-dir "$HOME"/.config/transmission "$url"
|
||||
'' else
|
||||
''
|
||||
'' else ''
|
||||
export HOME=$TMP
|
||||
rqbit --disable-dht-persistence --http-api-listen-addr "127.0.0.1:$(shuf -n 1 -i 49152-65535)" download -o $out --exit-on-finish "$url"
|
||||
'')
|
||||
|
|
|
@ -17,9 +17,8 @@ let
|
|||
free = true; # no use in advertisement
|
||||
}
|
||||
];
|
||||
in
|
||||
|
||||
{
|
||||
in {
|
||||
http-link = testers.invalidateFetcherByDrvHash fetchtorrent {
|
||||
url = "https://webtorrent.io/torrents/wired-cd.torrent";
|
||||
hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo=";
|
||||
|
@ -27,7 +26,8 @@ in
|
|||
inherit (wired-cd) meta;
|
||||
};
|
||||
magnet-link = testers.invalidateFetcherByDrvHash fetchtorrent {
|
||||
url = "magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent";
|
||||
url =
|
||||
"magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent";
|
||||
hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo=";
|
||||
backend = "transmission";
|
||||
inherit (wired-cd) meta;
|
||||
|
@ -39,7 +39,8 @@ in
|
|||
inherit (wired-cd) meta;
|
||||
};
|
||||
magnet-link-rqbit = testers.invalidateFetcherByDrvHash fetchtorrent {
|
||||
url = "magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent";
|
||||
url =
|
||||
"magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent";
|
||||
hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo=";
|
||||
backend = "rqbit";
|
||||
inherit (wired-cd) meta;
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
let mirrors = import ./mirrors.nix; in
|
||||
let mirrors = import ./mirrors.nix;
|
||||
|
||||
{ system }:
|
||||
in { system }:
|
||||
|
||||
{ url ? builtins.head urls
|
||||
, urls ? []
|
||||
, sha256 ? ""
|
||||
, hash ? ""
|
||||
, name ? baseNameOf (toString url)
|
||||
}:
|
||||
{ url ? builtins.head urls, urls ? [ ], sha256 ? "", hash ? ""
|
||||
, name ? baseNameOf (toString url) }:
|
||||
|
||||
# assert exactly one hash is set
|
||||
assert hash != "" || sha256 != "";
|
||||
|
@ -19,7 +15,9 @@ import <nix/fetchurl.nix> {
|
|||
url =
|
||||
# Handle mirror:// URIs. Since <nix/fetchurl.nix> currently
|
||||
# supports only one URI, use the first listed mirror.
|
||||
let m = builtins.match "mirror://([a-z]+)/(.*)" url; in
|
||||
if m == null then url
|
||||
else builtins.head (mirrors.${builtins.elemAt m 0}) + (builtins.elemAt m 1);
|
||||
let m = builtins.match "mirror://([a-z]+)/(.*)" url;
|
||||
in if m == null then
|
||||
url
|
||||
else
|
||||
builtins.head (mirrors.${builtins.elemAt m 0}) + (builtins.elemAt m 1);
|
||||
}
|
||||
|
|
|
@ -11,8 +11,7 @@ let
|
|||
# fetchurl instantiations via environment variables. This makes the
|
||||
# resulting store derivations (.drv files) much smaller, which in
|
||||
# turn makes nix-env/nix-instantiate faster.
|
||||
mirrorsFile =
|
||||
buildPackages.stdenvNoCC.mkDerivation ({
|
||||
mirrorsFile = buildPackages.stdenvNoCC.mkDerivation ({
|
||||
name = "mirrors-list";
|
||||
strictDeps = true;
|
||||
builder = ./write-mirror-list.sh;
|
||||
|
@ -36,9 +35,7 @@ let
|
|||
"NIX_CONNECT_TIMEOUT"
|
||||
] ++ (map (site: "NIX_MIRRORS_${site}") sites);
|
||||
|
||||
in
|
||||
|
||||
{ # URL to fetch.
|
||||
in { # URL to fetch.
|
||||
url ? ""
|
||||
|
||||
, # Alternatively, a list of URLs specifying alternative download
|
||||
|
@ -58,18 +55,13 @@ in
|
|||
name ? ""
|
||||
|
||||
# for versioned downloads optionally take pname + version.
|
||||
, pname ? ""
|
||||
, version ? ""
|
||||
, pname ? "", version ? ""
|
||||
|
||||
, # SRI hash.
|
||||
hash ? ""
|
||||
|
||||
, # Legacy ways of specifying the hash.
|
||||
outputHash ? ""
|
||||
, outputHashAlgo ? ""
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
, sha512 ? ""
|
||||
outputHash ? "", outputHashAlgo ? "", sha1 ? "", sha256 ? "", sha512 ? ""
|
||||
|
||||
, recursiveHash ? false
|
||||
|
||||
|
@ -106,51 +98,67 @@ in
|
|||
, preferLocalBuild ? true
|
||||
|
||||
# Additional packages needed as part of a fetch
|
||||
, nativeBuildInputs ? [ ]
|
||||
}:
|
||||
, nativeBuildInputs ? [ ] }:
|
||||
|
||||
let
|
||||
urls_ =
|
||||
if urls != [] && url == "" then
|
||||
(if lib.isList urls then urls
|
||||
else throw "`urls` is not a list")
|
||||
urls_ = if urls != [ ] && url == "" then
|
||||
(if lib.isList urls then urls else throw "`urls` is not a list")
|
||||
else if urls == [ ] && url != "" then
|
||||
(if lib.isString url then [url]
|
||||
else throw "`url` is not a string")
|
||||
else throw "fetchurl requires either `url` or `urls` to be set";
|
||||
(if lib.isString url then [ url ] else throw "`url` is not a string")
|
||||
else
|
||||
throw "fetchurl requires either `url` or `urls` to be set";
|
||||
|
||||
hash_ =
|
||||
if with lib.lists; length (filter (s: s != "") [ hash outputHash sha1 sha256 sha512 ]) > 1
|
||||
then throw "multiple hashes passed to fetchurl" else
|
||||
hash_ = if with lib.lists;
|
||||
length (filter (s: s != "") [ hash outputHash sha1 sha256 sha512 ]) > 1 then
|
||||
throw "multiple hashes passed to fetchurl"
|
||||
else
|
||||
|
||||
if hash != "" then { outputHashAlgo = null; outputHash = hash; }
|
||||
else if outputHash != "" then
|
||||
if outputHashAlgo != "" then { inherit outputHashAlgo outputHash; }
|
||||
else throw "fetchurl was passed outputHash without outputHashAlgo"
|
||||
else if sha512 != "" then { outputHashAlgo = "sha512"; outputHash = sha512; }
|
||||
else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; }
|
||||
else if sha1 != "" then { outputHashAlgo = "sha1"; outputHash = sha1; }
|
||||
else if cacert != null then { outputHashAlgo = "sha256"; outputHash = ""; }
|
||||
else throw "fetchurl requires a hash for fixed-output derivation: ${lib.concatStringsSep ", " urls_}";
|
||||
in
|
||||
if hash != "" then {
|
||||
outputHashAlgo = null;
|
||||
outputHash = hash;
|
||||
} else if outputHash != "" then
|
||||
if outputHashAlgo != "" then {
|
||||
inherit outputHashAlgo outputHash;
|
||||
} else
|
||||
throw "fetchurl was passed outputHash without outputHashAlgo"
|
||||
else if sha512 != "" then {
|
||||
outputHashAlgo = "sha512";
|
||||
outputHash = sha512;
|
||||
} else if sha256 != "" then {
|
||||
outputHashAlgo = "sha256";
|
||||
outputHash = sha256;
|
||||
} else if sha1 != "" then {
|
||||
outputHashAlgo = "sha1";
|
||||
outputHash = sha1;
|
||||
} else if cacert != null then {
|
||||
outputHashAlgo = "sha256";
|
||||
outputHash = "";
|
||||
} else
|
||||
throw "fetchurl requires a hash for fixed-output derivation: ${
|
||||
lib.concatStringsSep ", " urls_
|
||||
}";
|
||||
|
||||
assert (lib.isList curlOpts) -> lib.warn ''
|
||||
fetchurl for ${toString (builtins.head urls_)}: curlOpts is a list (${lib.generators.toPretty { multiline = false; } curlOpts}), which is not supported anymore.
|
||||
in assert (lib.isList curlOpts) -> lib.warn ''
|
||||
fetchurl for ${toString (builtins.head urls_)}: curlOpts is a list (${
|
||||
lib.generators.toPretty { multiline = false; } curlOpts
|
||||
}), which is not supported anymore.
|
||||
- If you wish to get the same effect as before, for elements with spaces (even if escaped) to expand to multiple curl arguments, use a string argument instead:
|
||||
curlOpts = ${lib.strings.escapeNixString (toString curlOpts)};
|
||||
- If you wish for each list element to be passed as a separate curl argument, allowing arguments to contain spaces, use curlOptsList instead:
|
||||
curlOptsList = [ ${lib.concatMapStringsSep " " lib.strings.escapeNixString curlOpts} ];'' true;
|
||||
curlOptsList = [ ${
|
||||
lib.concatMapStringsSep " " lib.strings.escapeNixString curlOpts
|
||||
} ];'' true;
|
||||
|
||||
stdenvNoCC.mkDerivation ((
|
||||
if (pname != "" && version != "") then
|
||||
{ inherit pname version; }
|
||||
stdenvNoCC.mkDerivation ((if (pname != "" && version != "") then {
|
||||
inherit pname version;
|
||||
} else {
|
||||
name = if showURLs then
|
||||
"urls"
|
||||
else if name != "" then
|
||||
name
|
||||
else
|
||||
{ name =
|
||||
if showURLs then "urls"
|
||||
else if name != "" then name
|
||||
else baseNameOf (toString (builtins.head urls_));
|
||||
}
|
||||
) // {
|
||||
baseNameOf (toString (builtins.head urls_));
|
||||
}) // {
|
||||
builder = ./builder.sh;
|
||||
|
||||
nativeBuildInputs = [ curl ] ++ nativeBuildInputs;
|
||||
|
@ -164,11 +172,15 @@ stdenvNoCC.mkDerivation ((
|
|||
# New-style output content requirements.
|
||||
inherit (hash_) outputHashAlgo outputHash;
|
||||
|
||||
SSL_CERT_FILE = if (hash_.outputHash == "" || hash_.outputHash == lib.fakeSha256 || hash_.outputHash == lib.fakeSha512 || hash_.outputHash == lib.fakeHash)
|
||||
then "${cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||
else "/no-cert-file.crt";
|
||||
SSL_CERT_FILE = if (hash_.outputHash == "" || hash_.outputHash
|
||||
== lib.fakeSha256 || hash_.outputHash == lib.fakeSha512 || hash_.outputHash
|
||||
== lib.fakeHash) then
|
||||
"${cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||
else
|
||||
"/no-cert-file.crt";
|
||||
|
||||
outputHashMode = if (recursiveHash || executable) then "recursive" else "flat";
|
||||
outputHashMode =
|
||||
if (recursiveHash || executable) then "recursive" else "flat";
|
||||
|
||||
inherit curlOpts;
|
||||
curlOptsList = lib.escapeShellArgs curlOptsList;
|
||||
|
@ -180,7 +192,9 @@ stdenvNoCC.mkDerivation ((
|
|||
|
||||
inherit preferLocalBuild;
|
||||
|
||||
postHook = if netrcPhase == null then null else ''
|
||||
postHook = if netrcPhase == null then
|
||||
null
|
||||
else ''
|
||||
${netrcPhase}
|
||||
curlOpts="$curlOpts --netrc-file $PWD/netrc"
|
||||
'';
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue