Compare commits

...

2 commits

Author SHA1 Message Date
isabel roses e0d98ae789
chore: add formatting commit to git-blame-ignore-revs 2024-06-30 09:17:54 +01:00
isabel roses a505937410
style: formating 2024-06-30 09:16:52 +01:00
1438 changed files with 178898 additions and 131776 deletions

2
.git-blame-ignore-revs Normal file
View file

@ -0,0 +1,2 @@
# formatting
a5059374106b6b1148a3cc6673c27ec1829380ea

View file

@ -1,6 +1,8 @@
let requiredVersion = import ./lib/minver.nix; in
let
requiredVersion = import ./lib/minver.nix;
in
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
abort ''
@ -25,4 +27,4 @@ if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.
else
import ./pkgs/top-level/impure.nix
import ./pkgs/top-level/impure.nix

View file

@ -1,16 +1,20 @@
{
outputs = { self, ... }:
outputs =
{ self, ... }:
let
forAllSystems = self.lib.genAttrs self.lib.systems.flakeExposed;
in
{
lib = import ./lib;
auxPackages = forAllSystems (system:
auxPackages = forAllSystems (
system:
(
let requiredVersion = import ./lib/minver.nix; in
let
requiredVersion = import ./lib/minver.nix;
in
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
abort ''
This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade:

View file

@ -1,99 +1,100 @@
{ "\t" = 9;
{
"\t" = 9;
"\n" = 10;
"\r" = 13;
" " = 32;
"!" = 33;
" " = 32;
"!" = 33;
"\"" = 34;
"#" = 35;
"$" = 36;
"%" = 37;
"&" = 38;
"'" = 39;
"(" = 40;
")" = 41;
"*" = 42;
"+" = 43;
"," = 44;
"-" = 45;
"." = 46;
"/" = 47;
"0" = 48;
"1" = 49;
"2" = 50;
"3" = 51;
"4" = 52;
"5" = 53;
"6" = 54;
"7" = 55;
"8" = 56;
"9" = 57;
":" = 58;
";" = 59;
"<" = 60;
"=" = 61;
">" = 62;
"?" = 63;
"@" = 64;
"A" = 65;
"B" = 66;
"C" = 67;
"D" = 68;
"E" = 69;
"F" = 70;
"G" = 71;
"H" = 72;
"I" = 73;
"J" = 74;
"K" = 75;
"L" = 76;
"M" = 77;
"N" = 78;
"O" = 79;
"P" = 80;
"Q" = 81;
"R" = 82;
"S" = 83;
"T" = 84;
"U" = 85;
"V" = 86;
"W" = 87;
"X" = 88;
"Y" = 89;
"Z" = 90;
"[" = 91;
"#" = 35;
"$" = 36;
"%" = 37;
"&" = 38;
"'" = 39;
"(" = 40;
")" = 41;
"*" = 42;
"+" = 43;
"," = 44;
"-" = 45;
"." = 46;
"/" = 47;
"0" = 48;
"1" = 49;
"2" = 50;
"3" = 51;
"4" = 52;
"5" = 53;
"6" = 54;
"7" = 55;
"8" = 56;
"9" = 57;
":" = 58;
";" = 59;
"<" = 60;
"=" = 61;
">" = 62;
"?" = 63;
"@" = 64;
"A" = 65;
"B" = 66;
"C" = 67;
"D" = 68;
"E" = 69;
"F" = 70;
"G" = 71;
"H" = 72;
"I" = 73;
"J" = 74;
"K" = 75;
"L" = 76;
"M" = 77;
"N" = 78;
"O" = 79;
"P" = 80;
"Q" = 81;
"R" = 82;
"S" = 83;
"T" = 84;
"U" = 85;
"V" = 86;
"W" = 87;
"X" = 88;
"Y" = 89;
"Z" = 90;
"[" = 91;
"\\" = 92;
"]" = 93;
"^" = 94;
"_" = 95;
"`" = 96;
"a" = 97;
"b" = 98;
"c" = 99;
"d" = 100;
"e" = 101;
"f" = 102;
"g" = 103;
"h" = 104;
"i" = 105;
"j" = 106;
"k" = 107;
"l" = 108;
"m" = 109;
"n" = 110;
"o" = 111;
"p" = 112;
"q" = 113;
"r" = 114;
"s" = 115;
"t" = 116;
"u" = 117;
"v" = 118;
"w" = 119;
"x" = 120;
"y" = 121;
"z" = 122;
"{" = 123;
"|" = 124;
"}" = 125;
"~" = 126;
"]" = 93;
"^" = 94;
"_" = 95;
"`" = 96;
"a" = 97;
"b" = 98;
"c" = 99;
"d" = 100;
"e" = 101;
"f" = 102;
"g" = 103;
"h" = 104;
"i" = 105;
"j" = 106;
"k" = 107;
"l" = 108;
"m" = 109;
"n" = 110;
"o" = 111;
"p" = 112;
"q" = 113;
"r" = 114;
"s" = 115;
"t" = 116;
"u" = 117;
"v" = 118;
"w" = 119;
"x" = 120;
"y" = 121;
"z" = 122;
"{" = 123;
"|" = 124;
"}" = 125;
"~" = 126;
}

View file

@ -36,10 +36,7 @@ rec {
:::
*/
# TODO(Profpatsch): add tests that check stderr
assertMsg =
pred:
msg:
pred || builtins.throw msg;
assertMsg = pred: msg: pred || builtins.throw msg;
/**
Specialized `assertMsg` for checking if `val` is one of the elements
@ -81,14 +78,10 @@ rec {
:::
*/
assertOneOf =
name:
val:
xs:
assertMsg
(lib.elem val xs)
"${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} val}";
name: val: xs:
assertMsg (lib.elem val xs) "${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
lib.generators.toPretty { } val
}";
/**
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
@ -133,12 +126,9 @@ rec {
:::
*/
assertEachOneOf =
name:
vals:
xs:
assertMsg
(lib.all (val: lib.elem val xs) vals)
"each element in ${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} vals}";
name: vals: xs:
assertMsg (lib.all (val: lib.elem val xs) vals)
"each element in ${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
lib.generators.toPretty { } vals
}";
}

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,6 @@ rec {
`toGNUCommandLineShell` returns an escaped shell string.
# Inputs
`options`
@ -22,7 +21,6 @@ rec {
: 2\. Function argument
# Examples
:::{.example}
## `lib.cli.toGNUCommandLineShell` usage example
@ -60,45 +58,51 @@ rec {
:::
*/
toGNUCommandLineShell =
options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
toGNUCommandLineShell = options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
toGNUCommandLine = {
# how to string-format the option name;
# by default one character is a short option (`-`),
# more than one characters a long option (`--`).
mkOptionName ?
k: if builtins.stringLength k == 1
then "-${k}"
else "--${k}",
toGNUCommandLine =
{
# how to string-format the option name;
# by default one character is a short option (`-`),
# more than one characters a long option (`--`).
mkOptionName ? k: if builtins.stringLength k == 1 then "-${k}" else "--${k}",
# how to format a boolean value to a command list;
# by default its a flag option
# (only the option name if true, left out completely if false).
mkBool ? k: v: lib.optional v (mkOptionName k),
# how to format a boolean value to a command list;
# by default its a flag option
# (only the option name if true, left out completely if false).
mkBool ? k: v: lib.optional v (mkOptionName k),
# how to format a list value to a command list;
# by default the option name is repeated for each value
# and `mkOption` is applied to the values themselves.
mkList ? k: v: lib.concatMap (mkOption k) v,
# how to format a list value to a command list;
# by default the option name is repeated for each value
# and `mkOption` is applied to the values themselves.
mkList ? k: v: lib.concatMap (mkOption k) v,
# how to format any remaining value to a command list;
# on the toplevel, booleans and lists are handled by `mkBool` and `mkList`,
# though they can still appear as values of a list.
# By default, everything is printed verbatim and complex types
# are forbidden (lists, attrsets, functions). `null` values are omitted.
mkOption ?
k: v: if v == null
then []
else [ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
# how to format any remaining value to a command list;
# on the toplevel, booleans and lists are handled by `mkBool` and `mkList`,
# though they can still appear as values of a list.
# By default, everything is printed verbatim and complex types
# are forbidden (lists, attrsets, functions). `null` values are omitted.
mkOption ?
k: v:
if v == null then
[ ]
else
[
(mkOptionName k)
(lib.generators.mkValueStringDefault { } v)
],
}:
options:
let
render = k: v:
if builtins.isBool v then mkBool k v
else if builtins.isList v then mkList k v
else mkOption k v;
let
render =
k: v:
if builtins.isBool v then
mkBool k v
else if builtins.isList v then
mkList k v
else
mkOption k v;
in
builtins.concatLists (lib.mapAttrsToList render options);
in
builtins.concatLists (lib.mapAttrsToList render options);
}

View file

@ -1,20 +1,42 @@
{ lib }:
let
inherit (builtins)
intersectAttrs;
inherit (builtins) intersectAttrs;
inherit (lib)
functionArgs isFunction mirrorFunctionArgs isAttrs setFunctionArgs
optionalAttrs attrNames filter elemAt concatStringsSep sortOn take length
filterAttrs optionalString flip pathIsDirectory head pipe isDerivation listToAttrs
mapAttrs seq flatten deepSeq warnIf isInOldestRelease extends
functionArgs
isFunction
mirrorFunctionArgs
isAttrs
setFunctionArgs
optionalAttrs
attrNames
filter
elemAt
concatStringsSep
sortOn
take
length
filterAttrs
optionalString
flip
pathIsDirectory
head
pipe
isDerivation
listToAttrs
mapAttrs
seq
flatten
deepSeq
warnIf
isInOldestRelease
extends
;
inherit (lib.strings) levenshtein levenshteinAtMost;
in
rec {
/**
`overrideDerivation drv f` takes a derivation (i.e., the result
of a call to the builtin function `derivation`) and returns a new
@ -40,7 +62,6 @@ rec {
You should in general prefer `drv.overrideAttrs` over this function;
see the nixpkgs manual for more information on overriding.
# Inputs
`drv`
@ -74,20 +95,21 @@ rec {
:::
*/
overrideDerivation = drv: f:
overrideDerivation =
drv: f:
let
newDrv = derivation (drv.drvAttrs // (f drv));
in flip (extendDerivation (seq drv.drvPath true)) newDrv (
{ meta = drv.meta or {};
passthru = if drv ? passthru then drv.passthru else {};
in
flip (extendDerivation (seq drv.drvPath true)) newDrv (
{
meta = drv.meta or { };
passthru = if drv ? passthru then drv.passthru else { };
}
//
(drv.passthru or {})
//
optionalAttrs (drv ? __spliced) {
__spliced = {} // (mapAttrs (_: sDrv: overrideDerivation sDrv f) drv.__spliced);
});
// (drv.passthru or { })
// optionalAttrs (drv ? __spliced) {
__spliced = { } // (mapAttrs (_: sDrv: overrideDerivation sDrv f) drv.__spliced);
}
);
/**
`makeOverridable` takes a function from attribute set to attribute set and
@ -97,7 +119,6 @@ rec {
Please refer to documentation on [`<pkg>.overrideDerivation`](#sec-pkg-overrideDerivation) to learn about `overrideDerivation` and caveats
related to its use.
# Inputs
`f`
@ -128,37 +149,39 @@ rec {
:::
*/
makeOverridable = f:
makeOverridable =
f:
let
# Creates a functor with the same arguments as f
mirrorArgs = mirrorFunctionArgs f;
in
mirrorArgs (origArgs:
let
result = f origArgs;
mirrorArgs (
origArgs:
let
result = f origArgs;
# Changes the original arguments with (potentially a function that returns) a set of new attributes
overrideWith = newArgs: origArgs // (if isFunction newArgs then newArgs origArgs else newArgs);
# Changes the original arguments with (potentially a function that returns) a set of new attributes
overrideWith = newArgs: origArgs // (if isFunction newArgs then newArgs origArgs else newArgs);
# Re-call the function but with different arguments
overrideArgs = mirrorArgs (newArgs: makeOverridable f (overrideWith newArgs));
# Change the result of the function call by applying g to it
overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
in
# Re-call the function but with different arguments
overrideArgs = mirrorArgs (newArgs: makeOverridable f (overrideWith newArgs));
# Change the result of the function call by applying g to it
overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
in
if isAttrs result then
result // {
result
// {
override = overrideArgs;
overrideDerivation = fdrv: overrideResult (x: overrideDerivation x fdrv);
${if result ? overrideAttrs then "overrideAttrs" else null} = fdrv:
overrideResult (x: x.overrideAttrs fdrv);
${if result ? overrideAttrs then "overrideAttrs" else null} =
fdrv: overrideResult (x: x.overrideAttrs fdrv);
}
else if isFunction result then
# Transform the result into a functor while propagating its arguments
setFunctionArgs result (functionArgs result) // {
override = overrideArgs;
}
else result);
setFunctionArgs result (functionArgs result) // { override = overrideArgs; }
else
result
);
/**
Call the package function in the file `fn` with the required
@ -188,7 +211,6 @@ rec {
<!-- TODO: Apply "Example:" tag to the examples above -->
# Inputs
`autoArgs`
@ -209,7 +231,8 @@ rec {
callPackageWith :: AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a
```
*/
callPackageWith = autoArgs: fn: args:
callPackageWith =
autoArgs: fn: args:
let
f = if isFunction fn then fn else import fn;
fargs = functionArgs f;
@ -222,59 +245,72 @@ rec {
# wouldn't be passed to it
missingArgs =
# Filter out arguments that have a default value
(filterAttrs (name: value: ! value)
# Filter out arguments that would be passed
(removeAttrs fargs (attrNames allArgs)));
(
filterAttrs (name: value: !value)
# Filter out arguments that would be passed
(removeAttrs fargs (attrNames allArgs))
);
# Get a list of suggested argument names for a given missing one
getSuggestions = arg: pipe (autoArgs // args) [
attrNames
# Only use ones that are at most 2 edits away. While mork would work,
# levenshteinAtMost is only fast for 2 or less.
(filter (levenshteinAtMost 2 arg))
# Put strings with shorter distance first
(sortOn (levenshtein arg))
# Only take the first couple results
(take 3)
# Quote all entries
(map (x: "\"" + x + "\""))
];
getSuggestions =
arg:
pipe (autoArgs // args) [
attrNames
# Only use ones that are at most 2 edits away. While mork would work,
# levenshteinAtMost is only fast for 2 or less.
(filter (levenshteinAtMost 2 arg))
# Put strings with shorter distance first
(sortOn (levenshtein arg))
# Only take the first couple results
(take 3)
# Quote all entries
(map (x: "\"" + x + "\""))
];
prettySuggestions = suggestions:
if suggestions == [] then ""
else if length suggestions == 1 then ", did you mean ${elemAt suggestions 0}?"
else ", did you mean ${concatStringsSep ", " (lib.init suggestions)} or ${lib.last suggestions}?";
prettySuggestions =
suggestions:
if suggestions == [ ] then
""
else if length suggestions == 1 then
", did you mean ${elemAt suggestions 0}?"
else
", did you mean ${concatStringsSep ", " (lib.init suggestions)} or ${lib.last suggestions}?";
errorForArg = arg:
errorForArg =
arg:
let
loc = builtins.unsafeGetAttrPos arg fargs;
# loc' can be removed once lib/minver.nix is >2.3.4, since that includes
# https://github.com/NixOS/nix/pull/3468 which makes loc be non-null
loc' = if loc != null then loc.file + ":" + toString loc.line
else if ! isFunction fn then
loc' =
if loc != null then
loc.file + ":" + toString loc.line
else if !isFunction fn then
toString fn + optionalString (pathIsDirectory fn) "/default.nix"
else "<unknown location>";
in "Function called without required argument \"${arg}\" at "
else
"<unknown location>";
in
"Function called without required argument \"${arg}\" at "
+ "${loc'}${prettySuggestions (getSuggestions arg)}";
# Only show the error for the first missing argument
error = errorForArg (head (attrNames missingArgs));
in if missingArgs == {}
then makeOverridable f allArgs
# This needs to be an abort so it can't be caught with `builtins.tryEval`,
# which is used by nix-env and ofborg to filter out packages that don't evaluate.
# This way we're forced to fix such errors in Nixpkgs,
# which is especially relevant with allowAliases = false
else abort "lib.customisation.callPackageWith: ${error}";
in
if missingArgs == { } then
makeOverridable f allArgs
# This needs to be an abort so it can't be caught with `builtins.tryEval`,
# which is used by nix-env and ofborg to filter out packages that don't evaluate.
# This way we're forced to fix such errors in Nixpkgs,
# which is especially relevant with allowAliases = false
else
abort "lib.customisation.callPackageWith: ${error}";
/**
Like callPackage, but for a function that returns an attribute
set of derivations. The override function is added to the
individual attributes.
# Inputs
`autoArgs`
@ -295,7 +331,8 @@ rec {
callPackagesWith :: AttrSet -> ((AttrSet -> AttrSet) | Path) -> AttrSet -> AttrSet
```
*/
callPackagesWith = autoArgs: fn: args:
callPackagesWith =
autoArgs: fn: args:
let
f = if isFunction fn then fn else import fn;
auto = intersectAttrs (functionArgs f) autoArgs;
@ -304,18 +341,19 @@ rec {
pkgs = f origArgs;
mkAttrOverridable = name: _: makeOverridable (mirrorArgs (newArgs: (f newArgs).${name})) origArgs;
in
if isDerivation pkgs then throw
("function `callPackages` was called on a *single* derivation "
+ ''"${pkgs.name or "<unknown-name>"}";''
+ " did you mean to use `callPackage` instead?")
else mapAttrs mkAttrOverridable pkgs;
if isDerivation pkgs then
throw (
"function `callPackages` was called on a *single* derivation "
+ ''"${pkgs.name or "<unknown-name>"}";''
+ " did you mean to use `callPackage` instead?"
)
else
mapAttrs mkAttrOverridable pkgs;
/**
Add attributes to each output of a derivation without changing
the derivation itself and check a given condition when evaluating.
# Inputs
`condition`
@ -336,34 +374,48 @@ rec {
extendDerivation :: Bool -> Any -> Derivation -> Derivation
```
*/
extendDerivation = condition: passthru: drv:
extendDerivation =
condition: passthru: drv:
let
outputs = drv.outputs or [ "out" ];
commonAttrs = drv // (listToAttrs outputsList) //
({ all = map (x: x.value) outputsList; }) // passthru;
commonAttrs =
drv // (listToAttrs outputsList) // ({ all = map (x: x.value) outputsList; }) // passthru;
outputToAttrListElement = outputName:
{ name = outputName;
value = commonAttrs // {
outputToAttrListElement = outputName: {
name = outputName;
value =
commonAttrs
// {
inherit (drv.${outputName}) type outputName;
outputSpecified = true;
drvPath = assert condition; drv.${outputName}.drvPath;
outPath = assert condition; drv.${outputName}.outPath;
} //
drvPath =
assert condition;
drv.${outputName}.drvPath;
outPath =
assert condition;
drv.${outputName}.outPath;
}
//
# TODO: give the derivation control over the outputs.
# `overrideAttrs` may not be the only attribute that needs
# updating when switching outputs.
optionalAttrs (passthru?overrideAttrs) {
optionalAttrs (passthru ? overrideAttrs) {
# TODO: also add overrideAttrs when overrideAttrs is not custom, e.g. when not splicing.
overrideAttrs = f: (passthru.overrideAttrs f).${outputName};
};
};
};
outputsList = map outputToAttrListElement outputs;
in commonAttrs // {
drvPath = assert condition; drv.drvPath;
outPath = assert condition; drv.outPath;
in
commonAttrs
// {
drvPath =
assert condition;
drv.drvPath;
outPath =
assert condition;
drv.outPath;
};
/**
@ -372,7 +424,6 @@ rec {
result to ensure that there are no thunks kept alive to prevent
garbage collection.
# Inputs
`drv`
@ -385,21 +436,29 @@ rec {
hydraJob :: (Derivation | Null) -> (Derivation | Null)
```
*/
hydraJob = drv:
hydraJob =
drv:
let
outputs = drv.outputs or ["out"];
outputs = drv.outputs or [ "out" ];
commonAttrs =
{ inherit (drv) name system meta; inherit outputs; }
{
inherit (drv) name system meta;
inherit outputs;
}
// optionalAttrs (drv._hydraAggregate or false) {
_hydraAggregate = true;
constituents = map hydraJob (flatten drv.constituents);
}
// (listToAttrs outputsList);
makeOutput = outputName:
let output = drv.${outputName}; in
{ name = outputName;
makeOutput =
outputName:
let
output = drv.${outputName};
in
{
name = outputName;
value = commonAttrs // {
outPath = output.outPath;
drvPath = output.drvPath;
@ -411,8 +470,8 @@ rec {
outputsList = map makeOutput outputs;
drv' = (head outputsList).value;
in if drv == null then null else
deepSeq drv' drv';
in
if drv == null then null else deepSeq drv' drv';
/**
Make an attribute set (a "scope") from functions that take arguments from that same attribute set.
@ -538,23 +597,27 @@ rec {
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> scope
```
*/
makeScope = newScope: f:
let self = f self // {
newScope = scope: newScope (self // scope);
callPackage = self.newScope {};
overrideScope = g: makeScope newScope (extends g f);
# Remove after 24.11 is released.
overrideScope' = g: warnIf (isInOldestRelease 2311)
makeScope =
newScope: f:
let
self = f self // {
newScope = scope: newScope (self // scope);
callPackage = self.newScope { };
overrideScope = g: makeScope newScope (extends g f);
# Remove after 24.11 is released.
overrideScope' =
g:
warnIf (isInOldestRelease 2311)
"`overrideScope'` (from `lib.makeScope`) has been renamed to `overrideScope`."
(makeScope newScope (extends g f));
packages = f;
};
in self;
packages = f;
};
in
self;
/**
backward compatibility with old uncurried form; deprecated
# Inputs
`splicePackages`
@ -583,9 +646,14 @@ rec {
*/
makeScopeWithSplicing =
splicePackages: newScope: otherSplices: keep: extra: f:
makeScopeWithSplicing'
{ inherit splicePackages newScope; }
{ inherit otherSplices keep extra f; };
makeScopeWithSplicing' { inherit splicePackages newScope; } {
inherit
otherSplices
keep
extra
f
;
};
/**
Like makeScope, but aims to support cross compilation. It's still ugly, but
@ -612,30 +680,29 @@ rec {
```
*/
makeScopeWithSplicing' =
{ splicePackages
, newScope
}:
{ otherSplices
# Attrs from `self` which won't be spliced.
# Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
# ex: `keep = (self: { inherit (self) aAttr; })`
, keep ? (_self: {})
# Additional attrs to add to the sets `callPackage`.
# When the package is from a subset (but not a subset within a package IS #211340)
# within `spliced0` it will be spliced.
# When using an package outside the set but it's available from `pkgs`, use the package from `pkgs.__splicedPackages`.
# If the package is not available within the set or in `pkgs`, such as a package in a let binding, it will not be spliced
# ex:
# ```
# nix-repl> darwin.apple_sdk.frameworks.CoreFoundation
# «derivation ...CoreFoundation-11.0.0.drv»
# nix-repl> darwin.CoreFoundation
# error: attribute 'CoreFoundation' missing
# nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
# «derivation ...CoreFoundation-11.0.0.drv»
# ```
, extra ? (_spliced0: {})
, f
{ splicePackages, newScope }:
{
otherSplices,
# Attrs from `self` which won't be spliced.
# Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
# ex: `keep = (self: { inherit (self) aAttr; })`
keep ? (_self: { }),
# Additional attrs to add to the sets `callPackage`.
# When the package is from a subset (but not a subset within a package IS #211340)
# within `spliced0` it will be spliced.
# When using an package outside the set but it's available from `pkgs`, use the package from `pkgs.__splicedPackages`.
# If the package is not available within the set or in `pkgs`, such as a package in a let binding, it will not be spliced
# ex:
# ```
# nix-repl> darwin.apple_sdk.frameworks.CoreFoundation
# «derivation ...CoreFoundation-11.0.0.drv»
# nix-repl> darwin.CoreFoundation
# error: attribute 'CoreFoundation' missing
# nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
# «derivation ...CoreFoundation-11.0.0.drv»
# ```
extra ? (_spliced0: { }),
f,
}:
let
spliced0 = splicePackages {
@ -652,13 +719,15 @@ rec {
callPackage = newScope spliced; # == self.newScope {};
# N.B. the other stages of the package set spliced in are *not*
# overridden.
overrideScope = g: (makeScopeWithSplicing'
{ inherit splicePackages newScope; }
{ inherit otherSplices keep extra;
overrideScope =
g:
(makeScopeWithSplicing' { inherit splicePackages newScope; } {
inherit otherSplices keep extra;
f = extends g f;
});
packages = f;
};
in self;
in
self;
}

View file

@ -1,16 +1,17 @@
/* Collection of functions useful for debugging
broken nix expressions.
/*
Collection of functions useful for debugging
broken nix expressions.
* `trace`-like functions take two values, print
the first to stderr and return the second.
* `traceVal`-like functions take one argument
which both printed and returned.
* `traceSeq`-like functions fully evaluate their
traced value before printing (not just to weak
head normal form like trace does by default).
* Functions that end in `-Fn` take an additional
function as their first argument, which is applied
to the traced value before it is printed.
* `trace`-like functions take two values, print
the first to stderr and return the second.
* `traceVal`-like functions take one argument
which both printed and returned.
* `traceSeq`-like functions fully evaluate their
traced value before printing (not just to weak
head normal form like trace does by default).
* Functions that end in `-Fn` take an additional
function as their first argument, which is applied
to the traced value before it is printed.
*/
{ lib }:
let
@ -25,21 +26,23 @@ let
generators
id
mapAttrs
trace;
trace
;
in
rec {
# -- TRACING --
/* Conditionally trace the supplied message, based on a predicate.
/*
Conditionally trace the supplied message, based on a predicate.
Type: traceIf :: bool -> string -> a -> a
Type: traceIf :: bool -> string -> a -> a
Example:
traceIf true "hello" 3
trace: hello
=> 3
Example:
traceIf true "hello" 3
trace: hello
=> 3
*/
traceIf =
# Predicate to check
@ -47,128 +50,155 @@ rec {
# Message that should be traced
msg:
# Value to return
x: if pred then trace msg x else x;
x:
if pred then trace msg x else x;
/* Trace the supplied value after applying a function to it, and
return the original value.
/*
Trace the supplied value after applying a function to it, and
return the original value.
Type: traceValFn :: (a -> b) -> a -> a
Type: traceValFn :: (a -> b) -> a -> a
Example:
traceValFn (v: "mystring ${v}") "foo"
trace: mystring foo
=> "foo"
Example:
traceValFn (v: "mystring ${v}") "foo"
trace: mystring foo
=> "foo"
*/
traceValFn =
# Function to apply
f:
# Value to trace and return
x: trace (f x) x;
x:
trace (f x) x;
/* Trace the supplied value and return it.
/*
Trace the supplied value and return it.
Type: traceVal :: a -> a
Type: traceVal :: a -> a
Example:
traceVal 42
# trace: 42
=> 42
Example:
traceVal 42
# trace: 42
=> 42
*/
traceVal = traceValFn id;
/* `builtins.trace`, but the value is `builtins.deepSeq`ed first.
/*
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
Type: traceSeq :: a -> b -> b
Type: traceSeq :: a -> b -> b
Example:
trace { a.b.c = 3; } null
trace: { a = <CODE>; }
=> null
traceSeq { a.b.c = 3; } null
trace: { a = { b = { c = 3; }; }; }
=> null
Example:
trace { a.b.c = 3; } null
trace: { a = <CODE>; }
=> null
traceSeq { a.b.c = 3; } null
trace: { a = { b = { c = 3; }; }; }
=> null
*/
traceSeq =
# The value to trace
x:
# The value to return
y: trace (builtins.deepSeq x x) y;
y:
trace (builtins.deepSeq x x) y;
/* Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
/*
Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
Example:
traceSeqN 2 { a.b.c = 3; } null
trace: { a = { b = {}; }; }
=> null
Example:
traceSeqN 2 { a.b.c = 3; } null
trace: { a = { b = {}; }; }
=> null
Type: traceSeqN :: Int -> a -> b -> b
*/
traceSeqN = depth: x: y:
let snip = v: if isList v then noQuotes "[]" v
else if isAttrs v then noQuotes "{}" v
else v;
noQuotes = str: v: { __pretty = const str; val = v; };
modify = n: fn: v: if (n == 0) then fn v
else if isList v then map (modify (n - 1) fn) v
else if isAttrs v then mapAttrs
(const (modify (n - 1) fn)) v
else v;
in trace (generators.toPretty { allowPrettyValues = true; }
(modify depth snip x)) y;
Type: traceSeqN :: Int -> a -> b -> b
*/
traceSeqN =
depth: x: y:
let
snip =
v:
if isList v then
noQuotes "[]" v
else if isAttrs v then
noQuotes "{}" v
else
v;
noQuotes = str: v: {
__pretty = const str;
val = v;
};
modify =
n: fn: v:
if (n == 0) then
fn v
else if isList v then
map (modify (n - 1) fn) v
else if isAttrs v then
mapAttrs (const (modify (n - 1) fn)) v
else
v;
in
trace (generators.toPretty { allowPrettyValues = true; } (modify depth snip x)) y;
/* A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
/*
A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
*/
traceValSeqFn =
# Function to apply
f:
# Value to trace
v: traceValFn f (builtins.deepSeq v v);
v:
traceValFn f (builtins.deepSeq v v);
/* A combination of `traceVal` and `traceSeq`. */
# A combination of `traceVal` and `traceSeq`.
traceValSeq = traceValSeqFn id;
/* A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced. */
/*
A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced.
*/
traceValSeqNFn =
# Function to apply
f:
depth:
f: depth:
# Value to trace
v: traceSeqN depth (f v) v;
v:
traceSeqN depth (f v) v;
/* A combination of `traceVal` and `traceSeqN`. */
# A combination of `traceVal` and `traceSeqN`.
traceValSeqN = traceValSeqNFn id;
/* Trace the input and output of a function `f` named `name`,
both down to `depth`.
/*
Trace the input and output of a function `f` named `name`,
both down to `depth`.
This is useful for adding around a function call,
to see the before/after of values as they are transformed.
This is useful for adding around a function call,
to see the before/after of values as they are transformed.
Example:
traceFnSeqN 2 "id" (x: x) { a.b.c = 3; }
trace: { fn = "id"; from = { a.b = {}; }; to = { a.b = {}; }; }
=> { a.b.c = 3; }
Example:
traceFnSeqN 2 "id" (x: x) { a.b.c = 3; }
trace: { fn = "id"; from = { a.b = {}; }; to = { a.b = {}; }; }
=> { a.b.c = 3; }
*/
traceFnSeqN = depth: name: f: v:
let res = f v;
in lib.traceSeqN
(depth + 1)
{
fn = name;
from = v;
to = res;
}
res;
traceFnSeqN =
depth: name: f: v:
let
res = f v;
in
lib.traceSeqN (depth + 1) {
fn = name;
from = v;
to = res;
} res;
# -- TESTING --
/* Evaluates a set of tests.
/*
Evaluates a set of tests.
A test is an attribute set `{expr, expected}`,
denoting an expression and its expected result.
@ -228,19 +258,41 @@ rec {
*/
runTests =
# Tests to run
tests: concatLists (attrValues (mapAttrs (name: test:
let testsToRun = if tests ? tests then tests.tests else [];
in if (substring 0 4 name == "test" || elem name testsToRun)
&& ((testsToRun == []) || elem name tests.tests)
&& (test.expr != test.expected)
tests:
concatLists (
attrValues (
mapAttrs (
name: test:
let
testsToRun = if tests ? tests then tests.tests else [ ];
in
if
(substring 0 4 name == "test" || elem name testsToRun)
&& ((testsToRun == [ ]) || elem name tests.tests)
&& (test.expr != test.expected)
then [ { inherit name; expected = test.expected; result = test.expr; } ]
else [] ) tests));
then
[
{
inherit name;
expected = test.expected;
result = test.expr;
}
]
else
[ ]
) tests
)
);
/* Create a test assuming that list elements are `true`.
/*
Create a test assuming that list elements are `true`.
Example:
{ testX = allTrue [ true ]; }
Example:
{ testX = allTrue [ true ]; }
*/
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
testAllTrue = expr: {
inherit expr;
expected = map (x: true) expr;
};
}

View file

@ -1,171 +1,500 @@
/* Library of low-level helper functions for nix expressions.
*
* Please implement (mostly) exhaustive unit tests
* for new functions in `./tests.nix`.
*/
/*
Library of low-level helper functions for nix expressions.
Please implement (mostly) exhaustive unit tests
for new functions in `./tests.nix`.
*/
let
inherit (import ./fixed-points.nix { inherit lib; }) makeExtensible;
lib = makeExtensible (self: let
callLibs = file: import file { lib = self; };
in {
lib = makeExtensible (
self:
let
callLibs = file: import file { lib = self; };
in
{
# often used, or depending on very little
trivial = callLibs ./trivial.nix;
fixedPoints = callLibs ./fixed-points.nix;
# often used, or depending on very little
trivial = callLibs ./trivial.nix;
fixedPoints = callLibs ./fixed-points.nix;
# datatypes
attrsets = callLibs ./attrsets.nix;
lists = callLibs ./lists.nix;
strings = callLibs ./strings.nix;
stringsWithDeps = callLibs ./strings-with-deps.nix;
# datatypes
attrsets = callLibs ./attrsets.nix;
lists = callLibs ./lists.nix;
strings = callLibs ./strings.nix;
stringsWithDeps = callLibs ./strings-with-deps.nix;
# packaging
customisation = callLibs ./customisation.nix;
derivations = callLibs ./derivations.nix;
maintainers = import ../maintainers/maintainer-list.nix;
teams = callLibs ../maintainers/team-list.nix;
meta = callLibs ./meta.nix;
versions = callLibs ./versions.nix;
# packaging
customisation = callLibs ./customisation.nix;
derivations = callLibs ./derivations.nix;
maintainers = import ../maintainers/maintainer-list.nix;
teams = callLibs ../maintainers/team-list.nix;
meta = callLibs ./meta.nix;
versions = callLibs ./versions.nix;
# module system
modules = callLibs ./modules.nix;
options = callLibs ./options.nix;
types = callLibs ./types.nix;
# module system
modules = callLibs ./modules.nix;
options = callLibs ./options.nix;
types = callLibs ./types.nix;
# constants
licenses = callLibs ./licenses.nix;
sourceTypes = callLibs ./source-types.nix;
systems = callLibs ./systems;
# constants
licenses = callLibs ./licenses.nix;
sourceTypes = callLibs ./source-types.nix;
systems = callLibs ./systems;
# serialization
cli = callLibs ./cli.nix;
gvariant = callLibs ./gvariant.nix;
generators = callLibs ./generators.nix;
# serialization
cli = callLibs ./cli.nix;
gvariant = callLibs ./gvariant.nix;
generators = callLibs ./generators.nix;
# misc
asserts = callLibs ./asserts.nix;
debug = callLibs ./debug.nix;
misc = callLibs ./deprecated/misc.nix;
# misc
asserts = callLibs ./asserts.nix;
debug = callLibs ./debug.nix;
misc = callLibs ./deprecated/misc.nix;
# domain-specific
fetchers = callLibs ./fetchers.nix;
# domain-specific
fetchers = callLibs ./fetchers.nix;
# Eval-time filesystem handling
path = callLibs ./path;
filesystem = callLibs ./filesystem.nix;
fileset = callLibs ./fileset;
sources = callLibs ./sources.nix;
# Eval-time filesystem handling
path = callLibs ./path;
filesystem = callLibs ./filesystem.nix;
fileset = callLibs ./fileset;
sources = callLibs ./sources.nix;
# back-compat aliases
platforms = self.systems.doubles;
# back-compat aliases
platforms = self.systems.doubles;
# linux kernel configuration
kernel = callLibs ./kernel.nix;
# linux kernel configuration
kernel = callLibs ./kernel.nix;
inherit (builtins) add addErrorContext attrNames concatLists
deepSeq elem elemAt filter genericClosure genList getAttr
hasAttr head isAttrs isBool isInt isList isPath isString length
lessThan listToAttrs pathExists readFile replaceStrings seq
stringLength sub substring tail trace;
inherit (self.trivial) id const pipe concat or and xor bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease
mod compare splitByAndCompare
functionArgs setFunctionArgs isFunction toFunction mirrorFunctionArgs
toHexString toBaseDigits inPureEvalMode;
inherit (self.fixedPoints) fix fix' converge extends composeExtensions
composeManyExtensions makeExtensible makeExtensibleWithCustomName;
inherit (self.attrsets) attrByPath hasAttrByPath setAttrByPath
getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
filterAttrsRecursive foldlAttrs foldAttrs collect nameValuePair mapAttrs
mapAttrs' mapAttrsToList attrsToList concatMapAttrs mapAttrsRecursive
mapAttrsRecursiveCond genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs mergeAttrsList overrideExisting showAttrPath getOutput
getBin getLib getDev getMan chooseDevOutputs zipWithNames zip
recurseIntoAttrs dontRecurseIntoAttrs cartesianProduct cartesianProductOfSets
mapCartesianProduct updateManyAttrsByPath;
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
ifilter0 concatMap flatten remove findSingle findFirst any all count
optional optionals toList range replicate partition zipListsWith zipLists
reverseList listDfs toposort sort sortOn naturalSort compareLists take
drop sublist last init crossLists unique allUnique intersectLists
subtractLists mutuallyExclusive groupBy groupBy';
inherit (self.strings) concatStrings concatMapStrings concatImapStrings
intersperse concatStringsSep concatMapStringsSep
concatImapStringsSep concatLines makeSearchPath makeSearchPathOutput
makeLibraryPath makeIncludePath makeBinPath optionalString
hasInfix hasPrefix hasSuffix stringToCharacters stringAsChars escape
escapeShellArg escapeShellArgs
isStorePath isStringLike
isValidPosixName toShellVar toShellVars
escapeRegex escapeURL escapeXML replaceChars lowerChars
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast
getName getVersion
cmakeOptionType cmakeBool cmakeFeature
mesonOption mesonBool mesonEnable
nameFromURL enableFeature enableFeatureAs withFeature
withFeatureAs fixedWidthString fixedWidthNumber
toInt toIntBase10 readPathsFromFile fileContents;
inherit (self.stringsWithDeps) textClosureList textClosureMap
noDepEntry fullDepEntry packEntry stringAfter;
inherit (self.customisation) overrideDerivation makeOverridable
callPackageWith callPackagesWith extendDerivation hydraJob
makeScope makeScopeWithSplicing makeScopeWithSplicing';
inherit (self.derivations) lazyDerivation optionalDrvAttr;
inherit (self.meta) addMetaAttrs dontDistribute setName updateName
appendToName mapDerivationAttrset setPrio lowPrio lowPrioSet hiPrio
hiPrioSet getLicenseFromSpdxId getExe getExe';
inherit (self.filesystem) pathType pathIsDirectory pathIsRegularFile
packagesFromDirectoryRecursive;
inherit (self.sources) cleanSourceFilter
cleanSource sourceByRegex sourceFilesBySuffices
commitIdFromGitRepo cleanSourceWith pathHasContext
canCleanSource pathIsGitRepo;
inherit (self.modules) evalModules setDefaultModuleLocation
unifyModuleSyntax applyModuleArgsIfFunction mergeModules
mergeModules' mergeOptionDecls mergeDefinitions
pushDownProperties dischargeProperties filterOverrides
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
mkOptionDefault mkDefault mkImageMediaOverride mkForce mkVMOverride
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
mkRenamedOptionModule mkRenamedOptionModuleWith
mkMergedOptionModule mkChangedOptionModule
mkAliasOptionModule mkDerivedConfig doRename
mkAliasOptionModuleMD;
evalOptionValue = lib.warn "External use of `lib.evalOptionValue` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/." self.modules.evalOptionValue;
inherit (self.options) isOption mkEnableOption mkSinkUndeclaredOptions
mergeDefaultOption mergeOneOption mergeEqualOption mergeUniqueOption
getValues getFiles
optionAttrSetToDocList optionAttrSetToDocList'
scrubOptionValue literalExpression literalExample
showOption showOptionWithDefLocs showFiles
unknownModule mkOption mkPackageOption mkPackageOptionMD
mdDoc literalMD;
inherit (self.types) isType setType defaultTypeMerge defaultFunctor
isOptionType mkOptionType;
inherit (self.asserts)
assertMsg assertOneOf;
inherit (self.debug) traceIf traceVal traceValFn
traceSeq traceSeqN traceValSeq
traceValSeqFn traceValSeqN traceValSeqNFn traceFnSeqN
runTests testAllTrue;
inherit (self.misc) maybeEnv defaultMergeArg defaultMerge foldArgs
maybeAttrNullable maybeAttr ifEnable checkFlag getValue
checkReqs uniqList uniqListExt condConcat lazyGenericClosure
innerModifySumArgs modifySumArgs innerClosePropagation
closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
mergeAttrsWithFunc mergeAttrsConcatenateValues
mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
mergeAttrsByFuncDefaultsClean mergeAttrBy
fakeHash fakeSha256 fakeSha512
nixType imap;
inherit (self.versions)
splitVersion;
});
in lib
inherit (builtins)
add
addErrorContext
attrNames
concatLists
deepSeq
elem
elemAt
filter
genericClosure
genList
getAttr
hasAttr
head
isAttrs
isBool
isInt
isList
isPath
isString
length
lessThan
listToAttrs
pathExists
readFile
replaceStrings
seq
stringLength
sub
substring
tail
trace
;
inherit (self.trivial)
id
const
pipe
concat
or
and
xor
bitAnd
bitOr
bitXor
bitNot
boolToString
mergeAttrs
flip
mapNullable
inNixShell
isFloat
min
max
importJSON
importTOML
warn
warnIf
warnIfNot
throwIf
throwIfNot
checkListOfEnum
info
showWarnings
nixpkgsVersion
version
isInOldestRelease
mod
compare
splitByAndCompare
functionArgs
setFunctionArgs
isFunction
toFunction
mirrorFunctionArgs
toHexString
toBaseDigits
inPureEvalMode
;
inherit (self.fixedPoints)
fix
fix'
converge
extends
composeExtensions
composeManyExtensions
makeExtensible
makeExtensibleWithCustomName
;
inherit (self.attrsets)
attrByPath
hasAttrByPath
setAttrByPath
getAttrFromPath
attrVals
attrValues
getAttrs
catAttrs
filterAttrs
filterAttrsRecursive
foldlAttrs
foldAttrs
collect
nameValuePair
mapAttrs
mapAttrs'
mapAttrsToList
attrsToList
concatMapAttrs
mapAttrsRecursive
mapAttrsRecursiveCond
genAttrs
isDerivation
toDerivation
optionalAttrs
zipAttrsWithNames
zipAttrsWith
zipAttrs
recursiveUpdateUntil
recursiveUpdate
matchAttrs
mergeAttrsList
overrideExisting
showAttrPath
getOutput
getBin
getLib
getDev
getMan
chooseDevOutputs
zipWithNames
zip
recurseIntoAttrs
dontRecurseIntoAttrs
cartesianProduct
cartesianProductOfSets
mapCartesianProduct
updateManyAttrsByPath
;
inherit (self.lists)
singleton
forEach
foldr
fold
foldl
foldl'
imap0
imap1
ifilter0
concatMap
flatten
remove
findSingle
findFirst
any
all
count
optional
optionals
toList
range
replicate
partition
zipListsWith
zipLists
reverseList
listDfs
toposort
sort
sortOn
naturalSort
compareLists
take
drop
sublist
last
init
crossLists
unique
allUnique
intersectLists
subtractLists
mutuallyExclusive
groupBy
groupBy'
;
inherit (self.strings)
concatStrings
concatMapStrings
concatImapStrings
intersperse
concatStringsSep
concatMapStringsSep
concatImapStringsSep
concatLines
makeSearchPath
makeSearchPathOutput
makeLibraryPath
makeIncludePath
makeBinPath
optionalString
hasInfix
hasPrefix
hasSuffix
stringToCharacters
stringAsChars
escape
escapeShellArg
escapeShellArgs
isStorePath
isStringLike
isValidPosixName
toShellVar
toShellVars
escapeRegex
escapeURL
escapeXML
replaceChars
lowerChars
upperChars
toLower
toUpper
addContextFrom
splitString
removePrefix
removeSuffix
versionOlder
versionAtLeast
getName
getVersion
cmakeOptionType
cmakeBool
cmakeFeature
mesonOption
mesonBool
mesonEnable
nameFromURL
enableFeature
enableFeatureAs
withFeature
withFeatureAs
fixedWidthString
fixedWidthNumber
toInt
toIntBase10
readPathsFromFile
fileContents
;
inherit (self.stringsWithDeps)
textClosureList
textClosureMap
noDepEntry
fullDepEntry
packEntry
stringAfter
;
inherit (self.customisation)
overrideDerivation
makeOverridable
callPackageWith
callPackagesWith
extendDerivation
hydraJob
makeScope
makeScopeWithSplicing
makeScopeWithSplicing'
;
inherit (self.derivations) lazyDerivation optionalDrvAttr;
inherit (self.meta)
addMetaAttrs
dontDistribute
setName
updateName
appendToName
mapDerivationAttrset
setPrio
lowPrio
lowPrioSet
hiPrio
hiPrioSet
getLicenseFromSpdxId
getExe
getExe'
;
inherit (self.filesystem)
pathType
pathIsDirectory
pathIsRegularFile
packagesFromDirectoryRecursive
;
inherit (self.sources)
cleanSourceFilter
cleanSource
sourceByRegex
sourceFilesBySuffices
commitIdFromGitRepo
cleanSourceWith
pathHasContext
canCleanSource
pathIsGitRepo
;
inherit (self.modules)
evalModules
setDefaultModuleLocation
unifyModuleSyntax
applyModuleArgsIfFunction
mergeModules
mergeModules'
mergeOptionDecls
mergeDefinitions
pushDownProperties
dischargeProperties
filterOverrides
sortProperties
fixupOptionType
mkIf
mkAssert
mkMerge
mkOverride
mkOptionDefault
mkDefault
mkImageMediaOverride
mkForce
mkVMOverride
mkFixStrictness
mkOrder
mkBefore
mkAfter
mkAliasDefinitions
mkAliasAndWrapDefinitions
fixMergeModules
mkRemovedOptionModule
mkRenamedOptionModule
mkRenamedOptionModuleWith
mkMergedOptionModule
mkChangedOptionModule
mkAliasOptionModule
mkDerivedConfig
doRename
mkAliasOptionModuleMD
;
evalOptionValue = lib.warn "External use of `lib.evalOptionValue` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/." self.modules.evalOptionValue;
inherit (self.options)
isOption
mkEnableOption
mkSinkUndeclaredOptions
mergeDefaultOption
mergeOneOption
mergeEqualOption
mergeUniqueOption
getValues
getFiles
optionAttrSetToDocList
optionAttrSetToDocList'
scrubOptionValue
literalExpression
literalExample
showOption
showOptionWithDefLocs
showFiles
unknownModule
mkOption
mkPackageOption
mkPackageOptionMD
mdDoc
literalMD
;
inherit (self.types)
isType
setType
defaultTypeMerge
defaultFunctor
isOptionType
mkOptionType
;
inherit (self.asserts) assertMsg assertOneOf;
inherit (self.debug)
traceIf
traceVal
traceValFn
traceSeq
traceSeqN
traceValSeq
traceValSeqFn
traceValSeqN
traceValSeqNFn
traceFnSeqN
runTests
testAllTrue
;
inherit (self.misc)
maybeEnv
defaultMergeArg
defaultMerge
foldArgs
maybeAttrNullable
maybeAttr
ifEnable
checkFlag
getValue
checkReqs
uniqList
uniqListExt
condConcat
lazyGenericClosure
innerModifySumArgs
modifySumArgs
innerClosePropagation
closePropagation
mapAttrsFlatten
nvs
setAttr
setAttrMerge
mergeAttrsWithFunc
mergeAttrsConcatenateValues
mergeAttrsNoOverride
mergeAttrByFunc
mergeAttrsByFuncDefaults
mergeAttrsByFuncDefaultsClean
mergeAttrBy
fakeHash
fakeSha256
fakeSha512
nixType
imap
;
inherit (self.versions) splitVersion;
}
);
in
lib

View file

@ -34,153 +34,217 @@ let
inherit (lib.attrsets) removeAttrs;
# returns default if env var is not set
maybeEnv = name: default:
let value = builtins.getEnv name; in
maybeEnv =
name: default:
let
value = builtins.getEnv name;
in
if value == "" then default else value;
defaultMergeArg = x : y: if builtins.isAttrs y then
y
else
(y x);
defaultMergeArg = x: y: if builtins.isAttrs y then y else (y x);
defaultMerge = x: y: x // (defaultMergeArg x y);
foldArgs = merger: f: init: x:
let arg = (merger init (defaultMergeArg init x));
# now add the function with composed args already applied to the final attrs
base = (setAttrMerge "passthru" {} (f arg)
( z: z // {
function = foldArgs merger f arg;
args = (attrByPath ["passthru" "args"] {} z) // x;
} ));
withStdOverrides = base // {
override = base.passthru.function;
};
in
withStdOverrides;
foldArgs =
merger: f: init: x:
let
arg = (merger init (defaultMergeArg init x));
# now add the function with composed args already applied to the final attrs
base = (
setAttrMerge "passthru" { } (f arg) (
z:
z
// {
function = foldArgs merger f arg;
args =
(attrByPath [
"passthru"
"args"
] { } z)
// x;
}
)
);
withStdOverrides = base // {
override = base.passthru.function;
};
in
withStdOverrides;
# shortcut for attrByPath ["name"] default attrs
maybeAttrNullable = maybeAttr;
# shortcut for attrByPath ["name"] default attrs
maybeAttr = name: default: attrs: attrs.${name} or default;
maybeAttr =
name: default: attrs:
attrs.${name} or default;
# Return the second argument if the first one is true or the empty version
# of the second argument.
ifEnable = cond: val:
if cond then val
else if builtins.isList val then []
else if builtins.isAttrs val then {}
ifEnable =
cond: val:
if cond then
val
else if builtins.isList val then
[ ]
else if builtins.isAttrs val then
{ }
# else if builtins.isString val then ""
else if val == true || val == false then false
else null;
else if val == true || val == false then
false
else
null;
# Return true only if there is an attribute and it is true.
checkFlag = attrSet: name:
if name == "true" then true else
if name == "false" then false else
if (elem name (attrByPath ["flags"] [] attrSet)) then true else
attrByPath [name] false attrSet ;
checkFlag =
attrSet: name:
if name == "true" then
true
else if name == "false" then
false
else if (elem name (attrByPath [ "flags" ] [ ] attrSet)) then
true
else
attrByPath [ name ] false attrSet;
# Input : attrSet, [ [name default] ... ], name
# Output : its value or default.
getValue = attrSet: argList: name:
( attrByPath [name] (if checkFlag attrSet name then true else
if argList == [] then null else
let x = builtins.head argList; in
if (head x) == name then
(head (tail x))
else (getValue attrSet
(tail argList) name)) attrSet );
getValue =
attrSet: argList: name:
(attrByPath [ name ] (
if checkFlag attrSet name then
true
else if argList == [ ] then
null
else
let
x = builtins.head argList;
in
if (head x) == name then (head (tail x)) else (getValue attrSet (tail argList) name)
) attrSet);
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
# Output : are reqs satisfied? It's asserted.
checkReqs = attrSet: argList: condList:
(
foldr and true
(map (x: let name = (head x); in
((checkFlag attrSet name) ->
(foldr and true
(map (y: let val=(getValue attrSet argList y); in
(val!=null) && (val!=false))
(tail x))))) condList));
checkReqs =
attrSet: argList: condList:
(foldr and true (
map (
x:
let
name = (head x);
in
(
(checkFlag attrSet name)
-> (foldr and true (
map (
y:
let
val = (getValue attrSet argList y);
in
(val != null) && (val != false)
) (tail x)
))
)
) condList
));
# This function has O(n^2) performance.
uniqList = { inputList, acc ? [] }:
let go = xs: acc:
if xs == []
then []
else let x = head xs;
y = if elem x acc then [] else [x];
in y ++ go (tail xs) (y ++ acc);
in go inputList acc;
uniqListExt = { inputList,
outputList ? [],
getter ? (x: x),
compare ? (x: y: x==y) }:
if inputList == [] then outputList else
let x = head inputList;
isX = y: (compare (getter y) (getter x));
newOutputList = outputList ++
(if any isX outputList then [] else [x]);
in uniqListExt { outputList = newOutputList;
inputList = (tail inputList);
inherit getter compare;
};
condConcat = name: list: checker:
if list == [] then name else
if checker (head list) then
condConcat
(name + (head (tail list)))
(tail (tail list))
checker
else condConcat
name (tail (tail list)) checker;
lazyGenericClosure = {startSet, operator}:
uniqList =
{
inputList,
acc ? [ ],
}:
let
work = list: doneKeys: result:
if list == [] then
go =
xs: acc:
if xs == [ ] then
[ ]
else
let
x = head xs;
y = if elem x acc then [ ] else [ x ];
in
y ++ go (tail xs) (y ++ acc);
in
go inputList acc;
uniqListExt =
{
inputList,
outputList ? [ ],
getter ? (x: x),
compare ? (x: y: x == y),
}:
if inputList == [ ] then
outputList
else
let
x = head inputList;
isX = y: (compare (getter y) (getter x));
newOutputList = outputList ++ (if any isX outputList then [ ] else [ x ]);
in
uniqListExt {
outputList = newOutputList;
inputList = (tail inputList);
inherit getter compare;
};
condConcat =
name: list: checker:
if list == [ ] then
name
else if checker (head list) then
condConcat (name + (head (tail list))) (tail (tail list)) checker
else
condConcat name (tail (tail list)) checker;
lazyGenericClosure =
{ startSet, operator }:
let
work =
list: doneKeys: result:
if list == [ ] then
result
else
let x = head list; key = x.key; in
let
x = head list;
key = x.key;
in
if elem key doneKeys then
work (tail list) doneKeys result
else
work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result);
work (tail list ++ operator x) ([ key ] ++ doneKeys) ([ x ] ++ result);
in
work startSet [] [];
work startSet [ ] [ ];
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
innerModifySumArgs f x (a // b);
modifySumArgs = f: x: innerModifySumArgs f x {};
innerModifySumArgs =
f: x: a: b:
if b == null then (f a b) // x else innerModifySumArgs f x (a // b);
modifySumArgs = f: x: innerModifySumArgs f x { };
innerClosePropagation =
acc: xs:
if xs == [ ] then
acc
else
let
y = head xs;
ys = tail xs;
in
if !isAttrs y then
innerClosePropagation acc ys
else
let
acc' = [ y ] ++ acc;
in
innerClosePropagation acc' (uniqList {
inputList =
(maybeAttrNullable "propagatedBuildInputs" [ ] y)
++ (maybeAttrNullable "propagatedNativeBuildInputs" [ ] y)
++ ys;
acc = acc';
});
innerClosePropagation = acc: xs:
if xs == []
then acc
else let y = head xs;
ys = tail xs;
in if ! isAttrs y
then innerClosePropagation acc ys
else let acc' = [y] ++ acc;
in innerClosePropagation
acc'
(uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y)
++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y)
++ ys;
acc = acc';
}
);
closePropagationSlow = list: (uniqList {inputList = (innerClosePropagation [] list);});
closePropagationSlow = list: (uniqList { inputList = (innerClosePropagation [ ] list); });
# This is an optimisation of closePropagation which avoids the O(n^2) behavior
# Using a list of derivations, it generates the full closure of the propagatedXXXBuildInputs
@ -188,28 +252,35 @@ let
# attribute of each derivation.
# On some benchmarks, it performs up to 15 times faster than closePropagation.
# See https://github.com/NixOS/nixpkgs/pull/194391 for details.
closePropagationFast = list:
builtins.map (x: x.val) (builtins.genericClosure {
startSet = builtins.map (x: {
key = x.outPath;
val = x;
}) (builtins.filter (x: x != null) list);
operator = item:
if !builtins.isAttrs item.val then
[ ]
else
builtins.concatMap (x:
if x != null then [{
key = x.outPath;
val = x;
}] else
[ ]) ((item.val.propagatedBuildInputs or [ ])
++ (item.val.propagatedNativeBuildInputs or [ ]));
});
closePropagationFast =
list:
builtins.map (x: x.val) (
builtins.genericClosure {
startSet = builtins.map (x: {
key = x.outPath;
val = x;
}) (builtins.filter (x: x != null) list);
operator =
item:
if !builtins.isAttrs item.val then
[ ]
else
builtins.concatMap (
x:
if x != null then
[
{
key = x.outPath;
val = x;
}
]
else
[ ]
) ((item.val.propagatedBuildInputs or [ ]) ++ (item.val.propagatedNativeBuildInputs or [ ]));
}
);
closePropagation = if builtins ? genericClosure
then closePropagationFast
else closePropagationSlow;
closePropagation = if builtins ? genericClosure then closePropagationFast else closePropagationSlow;
# calls a function (f attr value ) for each record item. returns a list
mapAttrsFlatten = f: r: map (attr: f attr r.${attr}) (attrNames r);
@ -217,26 +288,29 @@ let
# attribute set containing one attribute
nvs = name: value: listToAttrs [ (nameValuePair name value) ];
# adds / replaces an attribute of an attribute set
setAttr = set: name: v: set // (nvs name v);
setAttr =
set: name: v:
set // (nvs name v);
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
# setAttrMerge "a" [] { a = [2];} (x: x ++ [3]) -> { a = [2 3]; }
# setAttrMerge "a" [] { } (x: x ++ [3]) -> { a = [ 3]; }
setAttrMerge = name: default: attrs: f:
setAttrMerge =
name: default: attrs: f:
setAttr attrs name (f (maybeAttr name default attrs));
# Using f = a: b = b the result is similar to //
# merge attributes with custom function handling the case that the attribute
# exists in both sets
mergeAttrsWithFunc = f: set1: set2:
foldr (n: set: if set ? ${n}
then setAttr set n (f set.${n} set2.${n})
else set )
(set2 // set1) (attrNames set2);
mergeAttrsWithFunc =
f: set1: set2:
foldr (n: set: if set ? ${n} then setAttr set n (f set.${n} set2.${n}) else set) (set2 // set1) (
attrNames set2
);
# merging two attribute set concatenating the values of same attribute names
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a: b: (toList a) ++ (toList b) );
mergeAttrsConcatenateValues = mergeAttrsWithFunc (a: b: (toList a) ++ (toList b));
# merges attributes using //, if a name exists in both attributes
# an error will be triggered unless its listed in mergeLists
@ -245,20 +319,31 @@ let
# merging buildPhase doesn't really make sense. The cases will be rare where appending /prefixing will fit your needs?
# in these cases the first buildPhase will override the second one
# ! deprecated, use mergeAttrByFunc instead
mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"],
overrideSnd ? [ "buildPhase" ]
}: attrs1: attrs2:
foldr (n: set:
setAttr set n ( if set ? ${n}
then # merge
if elem n mergeLists # attribute contains list, merge them by concatenating
then attrs2.${n} ++ attrs1.${n}
else if elem n overrideSnd
then attrs1.${n}
else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
else attrs2.${n} # add attribute not existing in attr1
)) attrs1 (attrNames attrs2);
mergeAttrsNoOverride =
{
mergeLists ? [
"buildInputs"
"propagatedBuildInputs"
],
overrideSnd ? [ "buildPhase" ],
}:
attrs1: attrs2:
foldr (
n: set:
setAttr set n (
if set ? ${n} then # merge
if
elem n mergeLists # attribute contains list, merge them by concatenating
then
attrs2.${n} ++ attrs1.${n}
else if elem n overrideSnd then
attrs1.${n}
else
throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
else
attrs2.${n} # add attribute not existing in attr1
)
) attrs1 (attrNames attrs2);
# example usage:
# mergeAttrByFunc {
@ -271,48 +356,83 @@ let
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
# is used by defaultOverridableDelayableArgs and can be used when composing using
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
mergeAttrByFunc = x: y:
mergeAttrByFunc =
x: y:
let
mergeAttrBy2 = { mergeAttrBy = mergeAttrs; }
// (maybeAttr "mergeAttrBy" {} x)
// (maybeAttr "mergeAttrBy" {} y); in
foldr mergeAttrs {} [
x y
(mapAttrs ( a: v: # merge special names using given functions
if x ? ${a}
then if y ? ${a}
then v x.${a} y.${a} # both have attr, use merge func
else x.${a} # only x has attr
else y.${a} # only y has attr)
) (removeAttrs mergeAttrBy2
# don't merge attrs which are neither in x nor y
(filter (a: ! x ? ${a} && ! y ? ${a})
(attrNames mergeAttrBy2))
)
mergeAttrBy2 = {
mergeAttrBy = mergeAttrs;
} // (maybeAttr "mergeAttrBy" { } x) // (maybeAttr "mergeAttrBy" { } y);
in
foldr mergeAttrs { } [
x
y
(mapAttrs
(
a: v: # merge special names using given functions
if x ? ${a} then
if y ? ${a} then
v x.${a} y.${a} # both have attr, use merge func
else
x.${a} # only x has attr
else
y.${a} # only y has attr)
)
(
removeAttrs mergeAttrBy2
# don't merge attrs which are neither in x nor y
(filter (a: !x ? ${a} && !y ? ${a}) (attrNames mergeAttrBy2))
)
)
];
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"];
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) [ "mergeAttrBy" ];
# sane defaults (same name as attr name so that inherit can be used)
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
listToAttrs (map (n: nameValuePair n concat)
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
// listToAttrs (map (n: nameValuePair n mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
// listToAttrs (map (n: nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
;
listToAttrs (
map (n: nameValuePair n concat) [
"nativeBuildInputs"
"buildInputs"
"propagatedBuildInputs"
"configureFlags"
"prePhases"
"postAll"
"patches"
]
)
// listToAttrs (
map (n: nameValuePair n mergeAttrs) [
"passthru"
"meta"
"cfg"
"flags"
]
)
// listToAttrs (
map (n: nameValuePair n (a: b: "${a}\n${b}")) [
"preConfigure"
"postInstall"
]
);
nixType = x:
if isAttrs x then
if x ? outPath then "derivation"
else "attrs"
else if isFunction x then "function"
else if isList x then "list"
else if x == true then "bool"
else if x == false then "bool"
else if x == null then "null"
else if isInt x then "int"
else "string";
nixType =
x:
if isAttrs x then
if x ? outPath then "derivation" else "attrs"
else if isFunction x then
"function"
else if isList x then
"list"
else if x == true then
"bool"
else if x == false then
"bool"
else if x == null then
"null"
else if isInt x then
"int"
else
"string";
/**
# Deprecated

View file

@ -1,20 +1,18 @@
{ lib }:
let
inherit (lib)
genAttrs
isString
throwIfNot
;
inherit (lib) genAttrs isString throwIfNot;
showMaybeAttrPosPre = prefix: attrName: v:
let pos = builtins.unsafeGetAttrPos attrName v;
in if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
showMaybeAttrPosPre =
prefix: attrName: v:
let
pos = builtins.unsafeGetAttrPos attrName v;
in
if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
showMaybePackagePosPre = prefix: pkg:
if pkg?meta.position && isString pkg.meta.position
then "${prefix}${pkg.meta.position}"
else "";
showMaybePackagePosPre =
prefix: pkg:
if pkg ? meta.position && isString pkg.meta.position then "${prefix}${pkg.meta.position}" else "";
in
{
/*
@ -61,35 +59,33 @@ in
(lazyDerivation { inherit derivation }).passthru
(lazyDerivation { inherit derivation }).pythonPath
*/
lazyDerivation =
args@{
# The derivation to be wrapped.
derivation
, # Optional meta attribute.
derivation,
# Optional meta attribute.
#
# While this function is primarily about derivations, it can improve
# the `meta` package attribute, which is usually specified through
# `mkDerivation`.
meta ? null
, # Optional extra values to add to the returned attrset.
meta ? null,
# Optional extra values to add to the returned attrset.
#
# This can be used for adding package attributes, such as `tests`.
passthru ? { }
, # Optional list of assumed outputs. Default: ["out"]
passthru ? { },
# Optional list of assumed outputs. Default: ["out"]
#
# This must match the set of outputs that the returned derivation has.
# You must use this when the derivation has multiple outputs.
outputs ? [ "out" ]
outputs ? [ "out" ],
}:
let
# These checks are strict in `drv` and some `drv` attributes, but the
# attrset spine returned by lazyDerivation does not depend on it.
# Instead, the individual derivation attributes do depend on it.
checked =
throwIfNot (derivation.type or null == "derivation")
"lazyDerivation: input must be a derivation."
throwIfNot (derivation.type or null == "derivation") "lazyDerivation: input must be a derivation."
throwIfNot
# NOTE: Technically we could require our outputs to be a subset of the
# actual ones, or even leave them unchecked and fail on a lazy basis.
@ -139,7 +135,13 @@ in
# A fixed set of derivation values, so that `lazyDerivation` can return
# its attrset before evaluating `derivation`.
# This must only list attributes that are available on _all_ derivations.
inherit (checked) outPath outputName drvPath name system;
inherit (checked)
outPath
outputName
drvPath
name
system
;
inherit outputs;
# The meta attribute can either be taken from the derivation, or if the
@ -149,29 +151,31 @@ in
// genAttrs outputs (outputName: checked.${outputName})
// passthru;
/* Conditionally set a derivation attribute.
/*
Conditionally set a derivation attribute.
Because `mkDerivation` sets `__ignoreNulls = true`, a derivation
attribute set to `null` will not impact the derivation output hash.
Thus, this function passes through its `value` argument if the `cond`
is `true`, but returns `null` if not.
Because `mkDerivation` sets `__ignoreNulls = true`, a derivation
attribute set to `null` will not impact the derivation output hash.
Thus, this function passes through its `value` argument if the `cond`
is `true`, but returns `null` if not.
Type: optionalDrvAttr :: Bool -> a -> a | Null
Type: optionalDrvAttr :: Bool -> a -> a | Null
Example:
(stdenv.mkDerivation {
name = "foo";
x = optionalDrvAttr true 1;
y = optionalDrvAttr false 1;
}).drvPath == (stdenv.mkDerivation {
name = "foo";
x = 1;
}).drvPath
=> true
Example:
(stdenv.mkDerivation {
name = "foo";
x = optionalDrvAttr true 1;
y = optionalDrvAttr false 1;
}).drvPath == (stdenv.mkDerivation {
name = "foo";
x = 1;
}).drvPath
=> true
*/
optionalDrvAttr =
# Condition
cond:
# Attribute value
value: if cond then value else null;
value:
if cond then value else null;
}

View file

@ -7,7 +7,11 @@
# easy proxy configuration. This is impure, but a fixed-output
# derivation like fetchurl is allowed to do so since its result is
# by definition pure.
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
}

View file

@ -57,7 +57,6 @@
If you need more file set functions,
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
## Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
@ -127,35 +126,20 @@ let
nixVersion
;
inherit (lib.lists)
elemAt
imap0
;
inherit (lib.lists) elemAt imap0;
inherit (lib.path)
hasPrefix
splitRoot
;
inherit (lib.path) hasPrefix splitRoot;
inherit (lib.strings)
isStringLike
versionOlder
;
inherit (lib.strings) isStringLike versionOlder;
inherit (lib.filesystem)
pathType
;
inherit (lib.filesystem) pathType;
inherit (lib.sources)
cleanSourceWith
;
inherit (lib.sources) cleanSourceWith;
inherit (lib.trivial)
isFunction
pipe
;
inherit (lib.trivial) isFunction pipe;
in {
in
{
/*
Create a file set from a path that may or may not exist:
@ -171,14 +155,12 @@ in {
*/
maybeMissing =
path:
if ! isPath path then
if !isPath path then
if isStringLike path then
throw ''
lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
throw ''lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
else
throw ''
lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
_emptyWithoutBase
else
_singleton path;
@ -209,10 +191,10 @@ in {
*/
trace =
/*
The file set to trace.
The file set to trace.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
*/
fileset:
let
@ -220,9 +202,7 @@ in {
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
in
seq
(_printFileset actualFileset)
(x: x);
seq (_printFileset actualFileset) (x: x);
/*
Incrementally evaluate and trace a file set in a pretty way.
@ -256,10 +236,10 @@ in {
*/
traceVal =
/*
The file set to trace and return.
The file set to trace and return.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
*/
fileset:
let
@ -267,8 +247,7 @@ in {
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
in
seq
(_printFileset actualFileset)
seq (_printFileset actualFileset)
# We could also return the original fileset argument here,
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
actualFileset;
@ -340,31 +319,31 @@ in {
}
=> <error>
*/
toSource = {
/*
(required) The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
`root` has to be a directory.
toSource =
{
/*
(required) The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
`root` has to be a directory.
:::{.note}
Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
The only way to change which files get added to the store is by changing the `fileset` attribute.
:::
*/
root,
/*
(required) The file set whose files to import into the store.
File sets can be created using other functions in this library.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
:::{.note}
Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
The only way to change which files get added to the store is by changing the `fileset` attribute.
:::
*/
root,
/*
(required) The file set whose files to import into the store.
File sets can be created using other functions in this library.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
:::{.note}
If a directory does not recursively contain any file, it is omitted from the store path contents.
:::
*/
fileset,
}:
:::{.note}
If a directory does not recursively contain any file, it is omitted from the store path contents.
:::
*/
fileset,
}:
let
# We cannot rename matched attribute arguments, so let's work around it with an extra `let in` statement
filesetArg = fileset;
@ -375,7 +354,7 @@ in {
filesetFilesystemRoot = (splitRoot fileset._internalBase).root;
sourceFilter = _toSourceFilter fileset;
in
if ! isPath root then
if !isPath root then
if root ? _isLibCleanSourceWith then
throw ''
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
@ -386,38 +365,34 @@ in {
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
throw ''lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
# Currently all Nix paths have the same filesystem root, but this could change in the future.
# See also ../path/README.md
else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
else if !fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
throw ''
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
`root`: Filesystem root is "${toString rootFilesystemRoot}"
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
Different filesystem roots are not supported.''
else if ! pathExists root then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
else if !pathExists root then
throw ''lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
else if pathType root != "directory" then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as ${toString (dirOf root)}, and set `fileset` to the file path.''
else if ! fileset._internalIsEmptyWithoutBase && ! hasPrefix root fileset._internalBase then
else if !fileset._internalIsEmptyWithoutBase && !hasPrefix root fileset._internalBase then
throw ''
lib.fileset.toSource: `fileset` could contain files in ${toString fileset._internalBase}, which is not under the `root` (${toString root}). Potential solutions:
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
else
seq sourceFilter
cleanSourceWith {
seq sourceFilter cleanSourceWith {
name = "source";
src = root;
filter = sourceFilter;
};
/*
The list of file paths contained in the given file set.
@ -446,8 +421,7 @@ in {
# The file set whose file paths to return.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset:
_toList (_coerce "lib.fileset.toList: Argument" fileset);
fileset: _toList (_coerce "lib.fileset.toList: Argument" fileset);
/*
The file set containing all files that are in either of two given file sets.
@ -479,8 +453,8 @@ in {
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset2:
_unionMany
(_coerceMany "lib.fileset.union" [
_unionMany (
_coerceMany "lib.fileset.union" [
{
context = "First argument";
value = fileset1;
@ -489,7 +463,8 @@ in {
context = "Second argument";
value = fileset2;
}
]);
]
);
/*
The file set containing all files that are in any of the given file sets.
@ -527,16 +502,17 @@ in {
# The elements can also be paths,
# which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
filesets:
if ! isList filesets then
throw ''
lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
if !isList filesets then
throw ''lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
else
pipe filesets [
# Annotate the elements with context, used by _coerceMany for better errors
(imap0 (i: el: {
context = "Element ${toString i}";
value = el;
}))
(imap0 (
i: el: {
context = "Element ${toString i}";
value = el;
}
))
(_coerceMany "lib.fileset.unions")
_unionMany
];
@ -576,9 +552,7 @@ in {
}
];
in
_intersection
(elemAt filesets 0)
(elemAt filesets 1);
_intersection (elemAt filesets 0) (elemAt filesets 1);
/*
The file set containing all files from the first file set that are not in the second file set.
@ -628,9 +602,7 @@ in {
}
];
in
_difference
(elemAt filesets 0)
(elemAt filesets 1);
_difference (elemAt filesets 0) (elemAt filesets 1);
/*
Filter a file set to only contain files matching some predicate.
@ -682,66 +654,64 @@ in {
predicate:
# The path whose files to filter
path:
if ! isFunction predicate then
throw ''
lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
else if ! isPath path then
if !isFunction predicate then
throw ''lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
else if !isPath path then
if path._type or "" == "fileset" then
throw ''
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
else
throw ''
lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
throw ''lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
throw ''lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
else
_fileFilter predicate path;
/*
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
Type:
fromSource :: SourceLike -> FileSet
Type:
fromSource :: SourceLike -> FileSet
Example:
# There's no cleanSource-like function for file sets yet,
# but we can just convert cleanSource to a file set and use it that way
toSource {
root = ./.;
fileset = fromSource (lib.sources.cleanSource ./.);
}
Example:
# There's no cleanSource-like function for file sets yet,
# but we can just convert cleanSource to a file set and use it that way
toSource {
root = ./.;
fileset = fromSource (lib.sources.cleanSource ./.);
}
# Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
# but removing a subdirectory using file set functions
difference
(fromSource (lib.sources.sourceByRegex ./. [
"^README\.md$"
# This regex includes everything in ./doc
"^doc(/.*)?$"
])
./doc/generated
# Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
# but removing a subdirectory using file set functions
difference
(fromSource (lib.sources.sourceByRegex ./. [
"^README\.md$"
# This regex includes everything in ./doc
"^doc(/.*)?$"
])
./doc/generated
# Use cleanSource, but limit it to only include ./Makefile and files under ./src
intersection
(fromSource (lib.sources.cleanSource ./.))
(unions [
./Makefile
./src
]);
# Use cleanSource, but limit it to only include ./Makefile and files under ./src
intersection
(fromSource (lib.sources.cleanSource ./.))
(unions [
./Makefile
./src
]);
*/
fromSource = source:
fromSource =
source:
let
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
# which are technically internal to lib.sources,
@ -751,17 +721,15 @@ in {
path = if isFiltered then source.origSrc else source;
in
# We can only support sources created from paths
if ! isPath path then
if !isPath path then
if isStringLike path then
throw ''
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
throw ''lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
throw ''lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
else if isFiltered then
_fromSourceFilter path source.filter
else
@ -789,12 +757,7 @@ in {
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
*/
path:
_fromFetchGit
"gitTracked"
"argument"
path
{};
path: _fromFetchGit "gitTracked" "argument" path { };
/*
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
@ -842,19 +805,14 @@ in {
This directory must contain a `.git` file or subdirectory.
*/
path:
if ! isBool recurseSubmodules then
if !isBool recurseSubmodules then
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
else
_fromFetchGit
"gitTrackedWith"
"second argument"
path
_fromFetchGit "gitTrackedWith" "second argument" path
# This is the only `fetchGit` parameter that makes sense in this context.
# We can't just pass `submodules = recurseSubmodules` here because
# this would fail for Nix versions that don't support `submodules`.
(lib.optionalAttrs recurseSubmodules {
submodules = true;
});
(lib.optionalAttrs recurseSubmodules { submodules = true; });
}

View file

@ -1,4 +1,6 @@
{ lib ? import ../. }:
{
lib ? import ../.,
}:
let
inherit (builtins)
@ -23,9 +25,7 @@ let
zipAttrsWith
;
inherit (lib.filesystem)
pathType
;
inherit (lib.filesystem) pathType;
inherit (lib.lists)
all
@ -49,10 +49,7 @@ let
splitStorePath
;
inherit (lib.path.subpath)
components
join
;
inherit (lib.path.subpath) components join;
inherit (lib.strings)
isStringLike
@ -63,9 +60,7 @@ let
versionAtLeast
;
inherit (lib.trivial)
inPureEvalMode
;
inherit (lib.trivial) inPureEvalMode;
in
# Rare case of justified usage of rec:
# - This file is internal, so the return value doesn't matter, no need to make things overridable
@ -87,7 +82,8 @@ rec {
let
parts = splitRoot filesetV0._internalBase;
in
filesetV0 // {
filesetV0
// {
_internalVersion = 1;
_internalBaseRoot = parts.root;
_internalBaseComponents = components parts.subpath;
@ -98,15 +94,14 @@ rec {
(
filesetV1:
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
filesetV1 // {
_internalVersion = 2;
}
filesetV1 // { _internalVersion = 2; }
)
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
(
filesetV2:
filesetV2 // {
filesetV2
// {
# All v1 file sets are not the new empty file set
_internalIsEmptyWithoutBase = false;
_internalVersion = 3;
@ -136,7 +131,8 @@ rec {
# Create a fileset, see ./README.md#fileset
# Type: path -> filesetTree -> fileset
_create = base: tree:
_create =
base: tree:
let
# Decompose the base into its components
# See ../path/README.md for why we're not just using `toString`
@ -162,7 +158,8 @@ rec {
# Coerce a value to a fileset, erroring when the value cannot be coerced.
# The string gives the context for error messages.
# Type: String -> (fileset | Path) -> fileset
_coerce = context: value:
_coerce =
context: value:
if value._type or "" == "fileset" then
if value._internalVersion > _currentVersion then
throw ''
@ -173,12 +170,14 @@ rec {
else if value._internalVersion < _currentVersion then
let
# Get all the migration functions necessary to convert from the old to the current version
migrationsToApply = sublist value._internalVersion (_currentVersion - value._internalVersion) migrations;
migrationsToApply = sublist value._internalVersion (
_currentVersion - value._internalVersion
) migrations;
in
foldl' (value: migration: migration value) value migrationsToApply
else
value
else if ! isPath value then
else if !isPath value then
if value ? _isLibCleanSourceWith then
throw ''
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
@ -189,9 +188,8 @@ rec {
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
else
throw ''
${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
else if ! pathExists value then
throw ''${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
else if !pathExists value then
throw ''
${context} (${toString value}) is a path that does not exist.
To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.''
@ -201,22 +199,21 @@ rec {
# Coerce many values to filesets, erroring when any value cannot be coerced,
# or if the filesystem root of the values doesn't match.
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
_coerceMany = functionContext: list:
_coerceMany =
functionContext: list:
let
filesets = map ({ context, value }:
_coerce "${functionContext}: ${context}" value
) list;
filesets = map ({ context, value }: _coerce "${functionContext}: ${context}" value) list;
# Find the first value with a base, there may be none!
firstWithBase = findFirst (fileset: ! fileset._internalIsEmptyWithoutBase) null filesets;
firstWithBase = findFirst (fileset: !fileset._internalIsEmptyWithoutBase) null filesets;
# This value is only accessed if first != null
firstBaseRoot = firstWithBase._internalBaseRoot;
# Finds the first element with a filesystem root different than the first element, if any
differentIndex = findFirstIndex (fileset:
differentIndex = findFirstIndex (
fileset:
# The empty value without a base doesn't have a base path
! fileset._internalIsEmptyWithoutBase
&& firstBaseRoot != fileset._internalBaseRoot
!fileset._internalIsEmptyWithoutBase && firstBaseRoot != fileset._internalBaseRoot
) null filesets;
in
# Only evaluates `differentIndex` if there are any elements with a base
@ -231,7 +228,8 @@ rec {
# Create a file set from a path.
# Type: Path -> fileset
_singleton = path:
_singleton =
path:
let
type = pathType path;
in
@ -244,21 +242,18 @@ rec {
# "default.nix" = <type>;
# }
# See ./README.md#single-files
_create (dirOf path)
{
${baseNameOf path} = type;
};
_create (dirOf path) { ${baseNameOf path} = type; };
# Expand a directory representation to an equivalent one in attribute set form.
# All directory entries are included in the result.
# Type: Path -> filesetTree -> { <name> = filesetTree; }
_directoryEntries = path: value:
_directoryEntries =
path: value:
if value == "directory" then
readDir path
else
# Set all entries not present to null
mapAttrs (name: value: null) (readDir path)
// value;
mapAttrs (name: value: null) (readDir path) // value;
/*
A normalisation of a filesetTree suitable filtering with `builtins.path`:
@ -271,7 +266,8 @@ rec {
Type: Path -> filesetTree -> filesetTree
*/
_normaliseTreeFilter = path: tree:
_normaliseTreeFilter =
path: tree:
if tree == "directory" || isAttrs tree then
let
entries = _directoryEntries path tree;
@ -301,7 +297,8 @@ rec {
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
*/
_normaliseTreeMinimal = path: tree:
_normaliseTreeMinimal =
path: tree:
if tree == "directory" || isAttrs tree then
let
entries = _directoryEntries path tree;
@ -334,9 +331,11 @@ rec {
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
_printMinimalTree = base: tree:
_printMinimalTree =
base: tree:
let
treeSuffix = tree:
treeSuffix =
tree:
if isAttrs tree then
""
else if tree == "directory" then
@ -349,14 +348,15 @@ rec {
" (${tree})";
# Only for attribute set trees
traceTreeAttrs = prevLine: indent: tree:
foldl' (prevLine: name:
traceTreeAttrs =
prevLine: indent: tree:
foldl' (
prevLine: name:
let
subtree = tree.${name};
# Evaluating this prints the line for this subtree
thisLine =
trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
thisLine = trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
in
if subtree == null || subtree == "emptyDir" then
# Don't print anything at all if this subtree is empty
@ -378,24 +378,24 @@ rec {
else
trace "${toString base}${treeSuffix tree}" null;
in
if isAttrs tree then
traceTreeAttrs firstLine "" tree
else
firstLine;
if isAttrs tree then traceTreeAttrs firstLine "" tree else firstLine;
# Pretty-print a file set in a pretty way when the resulting value is evaluated
# Type: fileset -> Null
_printFileset = fileset:
_printFileset =
fileset:
if fileset._internalIsEmptyWithoutBase then
trace "(empty)" null
else
_printMinimalTree fileset._internalBase
(_normaliseTreeMinimal fileset._internalBase fileset._internalTree);
_printMinimalTree fileset._internalBase (
_normaliseTreeMinimal fileset._internalBase fileset._internalTree
);
# Turn a fileset into a source filter function suitable for `builtins.path`
# Only directories recursively containing at least one files are recursed into
# Type: fileset -> (String -> String -> Bool)
_toSourceFilter = fileset:
_toSourceFilter =
fileset:
let
# Simplify the tree, necessary to make sure all empty directories are null
# which has the effect that they aren't included in the result
@ -403,7 +403,7 @@ rec {
# The base path as a string with a single trailing slash
baseString =
if fileset._internalBaseComponents == [] then
if fileset._internalBaseComponents == [ ] then
# Need to handle the filesystem root specially
"/"
else
@ -414,9 +414,11 @@ rec {
# Check whether a list of path components under the base path exists in the tree.
# This function is called often, so it should be fast.
# Type: [ String ] -> Bool
inTree = components:
inTree =
components:
let
recurse = index: localTree:
recurse =
index: localTree:
if isAttrs localTree then
# We have an attribute set, meaning this is a directory with at least one file
if index >= length components then
@ -431,7 +433,8 @@ rec {
# If it's not an attribute set it can only be either null (in which case it's not included)
# or a string ("directory" or "regular", etc.) in which case it's included
localTree != null;
in recurse 0 tree;
in
recurse 0 tree;
# Filter suited when there's no files
empty = _: _: false;
@ -483,16 +486,14 @@ rec {
# Special case because the code below assumes that the _internalBase is always included in the result
# which shouldn't be done when we have no files at all in the base
# This also forces the tree before returning the filter, leads to earlier error messages
if fileset._internalIsEmptyWithoutBase || tree == null then
empty
else
nonEmpty;
if fileset._internalIsEmptyWithoutBase || tree == null then empty else nonEmpty;
# Turn a builtins.filterSource-based source filter on a root path into a file set
# containing only files included by the filter.
# The filter is lazily called as necessary to determine whether paths are included
# Type: Path -> (String -> String -> Bool) -> fileset
_fromSourceFilter = root: sourceFilter:
_fromSourceFilter =
root: sourceFilter:
let
# During the recursion we need to track both:
# - The path value such that we can safely call `readDir` on it
@ -503,9 +504,10 @@ rec {
# which is a fairly expensive operation
# Create a file set from a directory entry
fromDirEntry = path: pathString: type:
fromDirEntry =
path: pathString: type:
# The filter needs to run on the path as a string
if ! sourceFilter pathString type then
if !sourceFilter pathString type then
null
else if type == "directory" then
fromDir path pathString
@ -513,7 +515,8 @@ rec {
type;
# Create a file set from a directory
fromDir = path: pathString:
fromDir =
path: pathString:
mapAttrs
# This looks a bit funny, but we need both the path-based and the path string-based values
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
@ -536,20 +539,17 @@ rec {
else
# Direct files are always included by builtins.path without calling the filter
# But we need to lift up the base path to its parent to satisfy the base path invariant
_create (dirOf root)
{
${baseNameOf root} = rootPathType;
};
_create (dirOf root) { ${baseNameOf root} = rootPathType; };
# Turns a file set into the list of file paths it includes.
# Type: fileset -> [ Path ]
_toList = fileset:
_toList =
fileset:
let
recurse = path: tree:
recurse =
path: tree:
if isAttrs tree then
concatLists (mapAttrsToList (name: value:
recurse (path + "/${name}") value
) tree)
concatLists (mapAttrsToList (name: value: recurse (path + "/${name}") value) tree)
else if tree == "directory" then
recurse path (readDir path)
else if tree == null then
@ -565,9 +565,11 @@ rec {
# Transforms the filesetTree of a file set to a shorter base path, e.g.
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
# => { bar = null; }
_shortenTreeBase = targetBaseComponents: fileset:
_shortenTreeBase =
targetBaseComponents: fileset:
let
recurse = index:
recurse =
index:
# If we haven't reached the required depth yet
if index < length fileset._internalBaseComponents then
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
@ -581,9 +583,11 @@ rec {
# Transforms the filesetTree of a file set to a longer base path, e.g.
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
# => { baz = "regular"; }
_lengthenTreeBase = targetBaseComponents: fileset:
_lengthenTreeBase =
targetBaseComponents: fileset:
let
recurse = index: tree:
recurse =
index: tree:
# If the filesetTree is an attribute set and we haven't reached the required depth yet
if isAttrs tree && index < length targetBaseComponents then
# Recurse with the tree under the right component (which might not exist)
@ -602,10 +606,11 @@ rec {
# Computes the union of a list of filesets.
# The filesets must already be coerced and validated to be in the same filesystem root
# Type: [ Fileset ] -> Fileset
_unionMany = filesets:
_unionMany =
filesets:
let
# All filesets that have a base, aka not the ones that are the empty value without a base
filesetsWithBase = filter (fileset: ! fileset._internalIsEmptyWithoutBase) filesets;
filesetsWithBase = filter (fileset: !fileset._internalIsEmptyWithoutBase) filesets;
# The first fileset that has a base.
# This value is only accessed if there are at all.
@ -618,13 +623,13 @@ rec {
# A list of path components common to all base paths.
# Note that commonPrefix can only be fully evaluated,
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
commonBaseComponents = foldl'
(components: el: commonPrefix components el._internalBaseComponents)
firstWithBase._internalBaseComponents
# We could also not do the `tail` here to avoid a list allocation,
# but then we'd have to pay for a potentially expensive
# but unnecessary `commonPrefix` call
(tail filesetsWithBase);
commonBaseComponents =
foldl' (components: el: commonPrefix components el._internalBaseComponents)
firstWithBase._internalBaseComponents
# We could also not do the `tail` here to avoid a list allocation,
# but then we'd have to pay for a potentially expensive
# but unnecessary `commonPrefix` call
(tail filesetsWithBase);
# The common base path assembled from a filesystem root and the common components
commonBase = append firstWithBase._internalBaseRoot (join commonBaseComponents);
@ -643,15 +648,13 @@ rec {
resultTree = _unionTrees trees;
in
# If there's no values with a base, we have no files
if filesetsWithBase == [ ] then
_emptyWithoutBase
else
_create commonBase resultTree;
if filesetsWithBase == [ ] then _emptyWithoutBase else _create commonBase resultTree;
# The union of multiple filesetTree's with the same base path.
# Later elements are only evaluated if necessary.
# Type: [ filesetTree ] -> filesetTree
_unionTrees = trees:
_unionTrees =
trees:
let
stringIndex = findFirstIndex isString null trees;
withoutNull = filter (tree: tree != null) trees;
@ -671,18 +674,15 @@ rec {
# Computes the intersection of a list of filesets.
# The filesets must already be coerced and validated to be in the same filesystem root
# Type: Fileset -> Fileset -> Fileset
_intersection = fileset1: fileset2:
_intersection =
fileset1: fileset2:
let
# The common base components prefix, e.g.
# (/foo/bar, /foo/bar/baz) -> /foo/bar
# (/foo/bar, /foo/baz) -> /foo
commonBaseComponentsLength =
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
length (
commonPrefix
fileset1._internalBaseComponents
fileset2._internalBaseComponents
);
length (commonPrefix fileset1._internalBaseComponents fileset2._internalBaseComponents);
# To be able to intersect filesetTree's together, they need to have the same base path.
# Base paths can be intersected by taking the longest one (if any)
@ -725,12 +725,11 @@ rec {
# The intersection of two filesetTree's with the same base path
# The second element is only evaluated as much as necessary.
# Type: filesetTree -> filesetTree -> filesetTree
_intersectTree = lhs: rhs:
_intersectTree =
lhs: rhs:
if isAttrs lhs && isAttrs rhs then
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
mapAttrs
(name: _intersectTree lhs.${name})
(builtins.intersectAttrs lhs rhs)
mapAttrs (name: _intersectTree lhs.${name}) (builtins.intersectAttrs lhs rhs)
else if lhs == null || isString rhs then
# If the lhs is null, the result should also be null
# And if the rhs is the identity element
@ -743,18 +742,15 @@ rec {
# Compute the set difference between two file sets.
# The filesets must already be coerced and validated to be in the same filesystem root.
# Type: Fileset -> Fileset -> Fileset
_difference = positive: negative:
_difference =
positive: negative:
let
# The common base components prefix, e.g.
# (/foo/bar, /foo/bar/baz) -> /foo/bar
# (/foo/bar, /foo/baz) -> /foo
commonBaseComponentsLength =
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
length (
commonPrefix
positive._internalBaseComponents
negative._internalBaseComponents
);
length (commonPrefix positive._internalBaseComponents negative._internalBaseComponents);
# We need filesetTree's with the same base to be able to compute the difference between them
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
@ -786,9 +782,7 @@ rec {
null;
resultingTree =
_differenceTree
positive._internalBase
positive._internalTree
_differenceTree positive._internalBase positive._internalTree
negativeTreeWithPositiveBase;
in
# If the first file set is empty, we can never have any files in the result
@ -805,7 +799,8 @@ rec {
# Computes the set difference of two filesetTree's
# Type: Path -> filesetTree -> filesetTree
_differenceTree = path: lhs: rhs:
_differenceTree =
path: lhs: rhs:
# If the lhs doesn't have any files, or the right hand side includes all files
if lhs == null || isString rhs then
# The result will always be empty
@ -816,17 +811,19 @@ rec {
lhs
else
# Otherwise we always have two attribute sets to recurse into
mapAttrs (name: lhsValue:
_differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
) (_directoryEntries path lhs);
mapAttrs (name: lhsValue: _differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)) (
_directoryEntries path lhs
);
# Filters all files in a path based on a predicate
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
_fileFilter = predicate: root:
_fileFilter =
predicate: root:
let
# Check the predicate for a single file
# Type: String -> String -> filesetTree
fromFile = name: type:
fromFile =
name: type:
if
predicate {
inherit name type;
@ -834,7 +831,8 @@ rec {
# To ensure forwards compatibility with more arguments being added in the future,
# adding an attribute which can't be deconstructed :)
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." = null;
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." =
null;
}
then
type
@ -843,12 +841,10 @@ rec {
# Check the predicate for all files in a directory
# Type: Path -> filesetTree
fromDir = path:
mapAttrs (name: type:
if type == "directory" then
fromDir (path + "/${name}")
else
fromFile name type
fromDir =
path:
mapAttrs (
name: type: if type == "directory" then fromDir (path + "/${name}") else fromFile name type
) (readDir path);
rootType = pathType root;
@ -857,10 +853,7 @@ rec {
_create root (fromDir root)
else
# Single files are turned into a directory containing that file or nothing.
_create (dirOf root) {
${baseNameOf root} =
fromFile (baseNameOf root) rootType;
};
_create (dirOf root) { ${baseNameOf root} = fromFile (baseNameOf root) rootType; };
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
# https://github.com/NixOS/nix/commit/55cefd41d63368d4286568e2956afd535cb44018
@ -876,22 +869,21 @@ rec {
# - The store path must not include files that don't exist in the respective local path.
#
# Type: Path -> String -> FileSet
_mirrorStorePath = localPath: storePath:
_mirrorStorePath =
localPath: storePath:
let
recurse = focusedStorePath:
mapAttrs (name: type:
if type == "directory" then
recurse (focusedStorePath + "/${name}")
else
type
recurse =
focusedStorePath:
mapAttrs (
name: type: if type == "directory" then recurse (focusedStorePath + "/${name}") else type
) (builtins.readDir focusedStorePath);
in
_create localPath
(recurse storePath);
_create localPath (recurse storePath);
# Create a file set from the files included in the result of a fetchGit call
# Type: String -> String -> Path -> Attrs -> FileSet
_fromFetchGit = function: argument: path: extraFetchGitAttrs:
_fromFetchGit =
function: argument: path: extraFetchGitAttrs:
let
# The code path for when isStorePath is true
tryStorePath =
@ -922,31 +914,33 @@ rec {
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
# the unnecessarily import could be avoided.
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
fetchResult = fetchGit ({
url = path;
}
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
# For newer versions this was fixed, but the `shallow` flag is required.
# The only behavioral difference is that for shallow clones, `fetchGit` doesn't return a `revCount`,
# which we don't need here, so it's fine to always pass it.
fetchResult = fetchGit (
{
url = path;
}
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
# For newer versions this was fixed, but the `shallow` flag is required.
# The only behavioral difference is that for shallow clones, `fetchGit` doesn't return a `revCount`,
# which we don't need here, so it's fine to always pass it.
# Unfortunately this means older Nix versions get a poor error message for shallow repositories, and there's no good way to improve that.
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
# and would also require more code to handle worktrees where `.git` is a file.
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
// extraFetchGitAttrs);
# Unfortunately this means older Nix versions get a poor error message for shallow repositories, and there's no good way to improve that.
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
# and would also require more code to handle worktrees where `.git` is a file.
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
// extraFetchGitAttrs
);
in
# We can identify local working directories by checking for .git,
# see https://git-scm.com/docs/gitrepository-layout#_description.
# Note that `builtins.fetchGit` _does_ work for bare repositories (where there's no `.git`),
# even though `git ls-files` wouldn't return any files in that case.
if ! pathExists (path + "/.git") then
if !pathExists (path + "/.git") then
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to point to a local working tree of a Git repository, but it's not."
else
_mirrorStorePath path fetchResult.outPath;
in
if ! isPath path then
if !isPath path then
throw "lib.fileset.${function}: Expected the ${argument} to be a path, but it's a ${typeOf path} instead."
else if pathType path != "directory" then
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to be a directory, but it's a file instead."

View file

@ -8,18 +8,21 @@
# }
self: super: {
path = super.path // {
splitRoot = path:
splitRoot =
path:
let
parts = super.path.splitRoot path;
components = self.path.subpath.components parts.subpath;
count = self.length components;
rootIndex = count - self.lists.findFirstIndex
(component: component == "mock-root")
(self.length components)
(self.reverseList components);
rootIndex =
count
- self.lists.findFirstIndex (component: component == "mock-root") (self.length components) (
self.reverseList components
);
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
subpath = self.path.subpath.join (self.drop rootIndex components);
in {
in
{
inherit root subpath;
};
};

View file

@ -6,25 +6,13 @@
# Tested in lib/tests/filesystem.sh
let
inherit (builtins)
readDir
pathExists
toString
;
inherit (builtins) readDir pathExists toString;
inherit (lib.attrsets)
mapAttrs'
filterAttrs
;
inherit (lib.attrsets) mapAttrs' filterAttrs;
inherit (lib.filesystem)
pathType
;
inherit (lib.filesystem) pathType;
inherit (lib.strings)
hasSuffix
removeSuffix
;
inherit (lib.strings) hasSuffix removeSuffix;
in
{
@ -46,17 +34,21 @@ in
pathType =
builtins.readFileType or
# Nix <2.14 compatibility shim
(path:
if ! pathExists path
(
path:
if
!pathExists path
# Fail irrecoverably to mimic the historic behavior of this function and
# the new builtins.readFileType
then abort "lib.filesystem.pathType: Path ${toString path} does not exist."
then
abort "lib.filesystem.pathType: Path ${toString path} does not exist."
# The filesystem root is the only path where `dirOf / == /` and
# `baseNameOf /` is not valid. We can detect this and directly return
# "directory", since we know the filesystem root can't be anything else.
else if dirOf path == path
then "directory"
else (readDir (dirOf path)).${baseNameOf path}
else if dirOf path == path then
"directory"
else
(readDir (dirOf path)).${baseNameOf path}
);
/*
@ -75,8 +67,7 @@ in
pathIsDirectory /some/file.nix
=> false
*/
pathIsDirectory = path:
pathExists path && pathType path == "directory";
pathIsDirectory = path: pathExists path && pathType path == "directory";
/*
Whether a path exists and is a regular file, meaning not a symlink or any other special file type.
@ -94,8 +85,7 @@ in
pathIsRegularFile /some/file.nix
=> true
*/
pathIsRegularFile = path:
pathExists path && pathType path == "regular";
pathIsRegularFile = path: pathExists path && pathType path == "regular";
/*
A map of all haskell packages defined in the given path,
@ -107,19 +97,20 @@ in
haskellPathsInDir =
# The directory within to search
root:
let # Files in the root
root-files = builtins.attrNames (builtins.readDir root);
# Files with their full paths
root-files-with-paths =
map (file:
{ name = file; value = root + "/${file}"; }
) root-files;
# Subdirectories of the root with a cabal file.
cabal-subdirs =
builtins.filter ({ name, value }:
builtins.pathExists (value + "/${name}.cabal")
) root-files-with-paths;
in builtins.listToAttrs cabal-subdirs;
let
# Files in the root
root-files = builtins.attrNames (builtins.readDir root);
# Files with their full paths
root-files-with-paths = map (file: {
name = file;
value = root + "/${file}";
}) root-files;
# Subdirectories of the root with a cabal file.
cabal-subdirs = builtins.filter (
{ name, value }: builtins.pathExists (value + "/${name}.cabal")
) root-files-with-paths;
in
builtins.listToAttrs cabal-subdirs;
/*
Find the first directory containing a file matching 'pattern'
upward from a given 'file'.
@ -132,23 +123,28 @@ in
pattern:
# The file to start searching upward from
file:
let go = path:
let files = builtins.attrNames (builtins.readDir path);
matches = builtins.filter (match: match != null)
(map (builtins.match pattern) files);
in
if builtins.length matches != 0
then { inherit path matches; }
else if path == /.
then null
else go (dirOf path);
parent = dirOf file;
isDir =
let base = baseNameOf file;
type = (builtins.readDir parent).${base} or null;
in file == /. || type == "directory";
in go (if isDir then file else parent);
let
go =
path:
let
files = builtins.attrNames (builtins.readDir path);
matches = builtins.filter (match: match != null) (map (builtins.match pattern) files);
in
if builtins.length matches != 0 then
{ inherit path matches; }
else if path == /. then
null
else
go (dirOf path);
parent = dirOf file;
isDir =
let
base = baseNameOf file;
type = (builtins.readDir parent).${base} or null;
in
file == /. || type == "directory";
in
go (if isDir then file else parent);
/*
Given a directory, return a flattened list of all files within it recursively.
@ -158,12 +154,15 @@ in
listFilesRecursive =
# The path to recursively list
dir:
lib.flatten (lib.mapAttrsToList (name: type:
if type == "directory" then
lib.filesystem.listFilesRecursive (dir + "/${name}")
else
dir + "/${name}"
) (builtins.readDir dir));
lib.flatten (
lib.mapAttrsToList (
name: type:
if type == "directory" then
lib.filesystem.listFilesRecursive (dir + "/${name}")
else
dir + "/${name}"
) (builtins.readDir dir)
);
/*
Transform a directory tree containing package files suitable for
@ -263,49 +262,44 @@ in
let
# Determine if a directory entry from `readDir` indicates a package or
# directory of packages.
directoryEntryIsPackage = basename: type:
type == "directory" || hasSuffix ".nix" basename;
directoryEntryIsPackage = basename: type: type == "directory" || hasSuffix ".nix" basename;
# List directory entries that indicate packages in the given `path`.
packageDirectoryEntries = path:
filterAttrs directoryEntryIsPackage (readDir path);
packageDirectoryEntries = path: filterAttrs directoryEntryIsPackage (readDir path);
# Transform a directory entry (a `basename` and `type` pair) into a
# package.
directoryEntryToAttrPair = subdirectory: basename: type:
directoryEntryToAttrPair =
subdirectory: basename: type:
let
path = subdirectory + "/${basename}";
in
if type == "regular"
then
{
name = removeSuffix ".nix" basename;
value = callPackage path { };
}
if type == "regular" then
{
name = removeSuffix ".nix" basename;
value = callPackage path { };
}
else if type == "directory" then
{
name = basename;
value = packagesFromDirectory path;
}
else
if type == "directory"
then
{
name = basename;
value = packagesFromDirectory path;
}
else
throw
''
throw ''
lib.filesystem.packagesFromDirectoryRecursive: Unsupported file type ${type} at path ${toString subdirectory}
'';
# Transform a directory into a package (if there's a `package.nix`) or
# set of packages (otherwise).
packagesFromDirectory = path:
packagesFromDirectory =
path:
let
defaultPackagePath = path + "/package.nix";
in
if pathExists defaultPackagePath
then callPackage defaultPackagePath { }
else mapAttrs'
(directoryEntryToAttrPair path)
(packageDirectoryEntries path);
if pathExists defaultPackagePath then
callPackage defaultPackagePath { }
else
mapAttrs' (directoryEntryToAttrPair path) (packageDirectoryEntries path);
in
packagesFromDirectory directory;
}

View file

@ -72,7 +72,12 @@ rec {
fix (self: [ 1 2 (elemAt self 0 + elemAt self 1) ])
=> [ 1 2 3 ]
*/
fix = f: let x = f x; in x;
fix =
f:
let
x = f x;
in
x;
/*
A variant of `fix` that records the original recursive attribute set in the
@ -81,7 +86,14 @@ rec {
This is useful in combination with the `extends` function to
implement deep overriding.
*/
fix' = f: let x = f x // { __unfix__ = f; }; in x;
fix' =
f:
let
x = f x // {
__unfix__ = f;
};
in
x;
/*
Return the fixpoint that `f` converges to when called iteratively, starting
@ -94,13 +106,12 @@ rec {
Type: (a -> a) -> a -> a
*/
converge = f: x:
converge =
f: x:
let
x' = f x;
in
if x' == x
then x
else converge f x';
if x' == x then x else converge f x';
/*
Extend a function using an overlay.
@ -109,7 +120,6 @@ rec {
A fixed-point function is a function which is intended to be evaluated by passing the result of itself as the argument.
This is possible due to Nix's lazy evaluation.
A fixed-point function returning an attribute set has the form
```nix
@ -259,9 +269,11 @@ rec {
*/
composeExtensions =
f: g: final: prev:
let fApplied = f final prev;
prev' = prev // fApplied;
in fApplied // g final prev';
let
fApplied = f final prev;
prev' = prev // fApplied;
in
fApplied // g final prev';
/*
Compose several extending functions of the type expected by 'extends' into
@ -273,8 +285,7 @@ rec {
^final ^prev ^overrides ^final ^prev ^overrides
```
*/
composeManyExtensions =
lib.foldr (x: y: composeExtensions x y) (final: prev: {});
composeManyExtensions = lib.foldr (x: y: composeExtensions x y) (final: prev: { });
/*
Create an overridable, recursive attribute set. For example:
@ -302,8 +313,13 @@ rec {
Same as `makeExtensible` but the name of the extending attribute is
customized.
*/
makeExtensibleWithCustomName = extenderName: rattrs:
fix' (self: (rattrs self) // {
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
});
makeExtensibleWithCustomName =
extenderName: rattrs:
fix' (
self:
(rattrs self)
// {
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
}
);
}

View file

@ -13,8 +13,9 @@ finalLib: prevLib: # lib overlay
{
trivial = prevLib.trivial // {
versionSuffix =
".${finalLib.substring 0 8 (self.lastModifiedDate or "19700101")}.${self.shortRev or "dirty"}";
versionSuffix = ".${
finalLib.substring 0 8 (self.lastModifiedDate or "19700101")
}.${self.shortRev or "dirty"}";
revisionWithDefault = default: self.rev or default;
};
}

View file

@ -1,10 +1,12 @@
{
description = "Library of low-level helper functions for nix expressions.";
outputs = { self }:
outputs =
{ self }:
let
lib0 = import ./.;
in {
in
{
lib = lib0.extend (import ./flake-version-info.nix self);
};
}

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,12 @@
let
inherit (lib)
concatMapStringsSep concatStrings escape head replaceStrings;
concatMapStringsSep
concatStrings
escape
head
replaceStrings
;
mkPrimitive = t: v: {
_type = "gvariant";
@ -41,10 +46,11 @@ let
variant = "v";
};
/* Check if a value is a GVariant value
/*
Check if a value is a GVariant value
Type:
isGVariant :: Any -> Bool
Type:
isGVariant :: Any -> Bool
*/
isGVariant = v: v._type or "" == "gvariant";
@ -53,13 +59,15 @@ rec {
inherit type isGVariant;
/* Returns the GVariant value that most closely matches the given Nix value.
If no GVariant value can be found unambiguously then error is thrown.
/*
Returns the GVariant value that most closely matches the given Nix value.
If no GVariant value can be found unambiguously then error is thrown.
Type:
mkValue :: Any -> gvariant
Type:
mkValue :: Any -> gvariant
*/
mkValue = v:
mkValue =
v:
if builtins.isBool v then
mkBoolean v
else if builtins.isFloat v then
@ -73,71 +81,75 @@ rec {
else
throw "The GVariant type of ${v} can't be inferred.";
/* Returns the GVariant array from the given type of the elements and a Nix list.
/*
Returns the GVariant array from the given type of the elements and a Nix list.
Type:
mkArray :: [Any] -> gvariant
Type:
mkArray :: [Any] -> gvariant
Example:
# Creating a string array
lib.gvariant.mkArray [ "a" "b" "c" ]
Example:
# Creating a string array
lib.gvariant.mkArray [ "a" "b" "c" ]
*/
mkArray = elems:
mkArray =
elems:
let
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (map (v: v.type) vs))
"Elements in a list should have same type."
(head vs).type;
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (
map (v: v.type) vs
)) "Elements in a list should have same type." (head vs).type;
in
mkPrimitive (type.arrayOf elemType) vs // {
__toString = self:
"@${self.type} [${concatMapStringsSep "," toString self.value}]";
mkPrimitive (type.arrayOf elemType) vs
// {
__toString = self: "@${self.type} [${concatMapStringsSep "," toString self.value}]";
};
/* Returns the GVariant array from the given empty Nix list.
/*
Returns the GVariant array from the given empty Nix list.
Type:
mkEmptyArray :: gvariant.type -> gvariant
Type:
mkEmptyArray :: gvariant.type -> gvariant
Example:
# Creating an empty string array
lib.gvariant.mkEmptyArray (lib.gvariant.type.string)
Example:
# Creating an empty string array
lib.gvariant.mkEmptyArray (lib.gvariant.type.string)
*/
mkEmptyArray = elemType: mkPrimitive (type.arrayOf elemType) [ ] // {
__toString = self: "@${self.type} []";
};
mkEmptyArray =
elemType: mkPrimitive (type.arrayOf elemType) [ ] // { __toString = self: "@${self.type} []"; };
/*
Returns the GVariant variant from the given Nix value. Variants are containers
of different GVariant type.
/* Returns the GVariant variant from the given Nix value. Variants are containers
of different GVariant type.
Type:
mkVariant :: Any -> gvariant
Type:
mkVariant :: Any -> gvariant
Example:
lib.gvariant.mkArray [
(lib.gvariant.mkVariant "a string")
(lib.gvariant.mkVariant (lib.gvariant.mkInt32 1))
]
Example:
lib.gvariant.mkArray [
(lib.gvariant.mkVariant "a string")
(lib.gvariant.mkVariant (lib.gvariant.mkInt32 1))
]
*/
mkVariant = elem:
let gvarElem = mkValue elem;
in mkPrimitive type.variant gvarElem // {
__toString = self: "<${toString self.value}>";
};
mkVariant =
elem:
let
gvarElem = mkValue elem;
in
mkPrimitive type.variant gvarElem // { __toString = self: "<${toString self.value}>"; };
/* Returns the GVariant dictionary entry from the given key and value.
/*
Returns the GVariant dictionary entry from the given key and value.
Type:
mkDictionaryEntry :: String -> Any -> gvariant
Type:
mkDictionaryEntry :: String -> Any -> gvariant
Example:
# A dictionary describing an Epiphanys search provider
[
(lib.gvariant.mkDictionaryEntry "url" (lib.gvariant.mkVariant "https://duckduckgo.com/?q=%s&t=epiphany"))
(lib.gvariant.mkDictionaryEntry "bang" (lib.gvariant.mkVariant "!d"))
(lib.gvariant.mkDictionaryEntry "name" (lib.gvariant.mkVariant "DuckDuckGo"))
]
Example:
# A dictionary describing an Epiphanys search provider
[
(lib.gvariant.mkDictionaryEntry "url" (lib.gvariant.mkVariant "https://duckduckgo.com/?q=%s&t=epiphany"))
(lib.gvariant.mkDictionaryEntry "bang" (lib.gvariant.mkVariant "!d"))
(lib.gvariant.mkDictionaryEntry "name" (lib.gvariant.mkVariant "DuckDuckGo"))
]
*/
mkDictionaryEntry =
# The key of the entry
@ -149,143 +161,162 @@ rec {
value' = mkValue value;
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
in
mkPrimitive dictionaryType { inherit name value; } // {
mkPrimitive dictionaryType { inherit name value; }
// {
__toString = self: "@${self.type} {${name'},${value'}}";
};
/* Returns the GVariant maybe from the given element type.
/*
Returns the GVariant maybe from the given element type.
Type:
mkMaybe :: gvariant.type -> Any -> gvariant
Type:
mkMaybe :: gvariant.type -> Any -> gvariant
*/
mkMaybe = elemType: elem:
mkPrimitive (type.maybeOf elemType) elem // {
__toString = self:
if self.value == null then
"@${self.type} nothing"
else
"just ${toString self.value}";
mkMaybe =
elemType: elem:
mkPrimitive (type.maybeOf elemType) elem
// {
__toString =
self: if self.value == null then "@${self.type} nothing" else "just ${toString self.value}";
};
/* Returns the GVariant nothing from the given element type.
/*
Returns the GVariant nothing from the given element type.
Type:
mkNothing :: gvariant.type -> gvariant
Type:
mkNothing :: gvariant.type -> gvariant
*/
mkNothing = elemType: mkMaybe elemType null;
/* Returns the GVariant just from the given Nix value.
/*
Returns the GVariant just from the given Nix value.
Type:
mkJust :: Any -> gvariant
Type:
mkJust :: Any -> gvariant
*/
mkJust = elem: let gvarElem = mkValue elem; in mkMaybe gvarElem.type gvarElem;
mkJust =
elem:
let
gvarElem = mkValue elem;
in
mkMaybe gvarElem.type gvarElem;
/* Returns the GVariant tuple from the given Nix list.
/*
Returns the GVariant tuple from the given Nix list.
Type:
mkTuple :: [Any] -> gvariant
Type:
mkTuple :: [Any] -> gvariant
*/
mkTuple = elems:
mkTuple =
elems:
let
gvarElems = map mkValue elems;
tupleType = type.tupleOf (map (e: e.type) gvarElems);
in
mkPrimitive tupleType gvarElems // {
__toString = self:
"@${self.type} (${concatMapStringsSep "," toString self.value})";
mkPrimitive tupleType gvarElems
// {
__toString = self: "@${self.type} (${concatMapStringsSep "," toString self.value})";
};
/* Returns the GVariant boolean from the given Nix bool value.
/*
Returns the GVariant boolean from the given Nix bool value.
Type:
mkBoolean :: Bool -> gvariant
Type:
mkBoolean :: Bool -> gvariant
*/
mkBoolean = v:
mkPrimitive type.boolean v // {
__toString = self: if self.value then "true" else "false";
};
mkBoolean =
v: mkPrimitive type.boolean v // { __toString = self: if self.value then "true" else "false"; };
/* Returns the GVariant string from the given Nix string value.
/*
Returns the GVariant string from the given Nix string value.
Type:
mkString :: String -> gvariant
Type:
mkString :: String -> gvariant
*/
mkString = v:
let sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
in mkPrimitive type.string v // {
__toString = self: "'${sanitize self.value}'";
};
mkString =
v:
let
sanitize =
s:
replaceStrings [ "\n" ] [ "\\n" ] (
escape [
"'"
"\\"
] s
);
in
mkPrimitive type.string v // { __toString = self: "'${sanitize self.value}'"; };
/* Returns the GVariant object path from the given Nix string value.
/*
Returns the GVariant object path from the given Nix string value.
Type:
mkObjectpath :: String -> gvariant
Type:
mkObjectpath :: String -> gvariant
*/
mkObjectpath = v:
mkPrimitive type.string v // {
__toString = self: "objectpath '${escape [ "'" ] self.value}'";
};
mkObjectpath =
v: mkPrimitive type.string v // { __toString = self: "objectpath '${escape [ "'" ] self.value}'"; };
/* Returns the GVariant uchar from the given Nix int value.
/*
Returns the GVariant uchar from the given Nix int value.
Type:
mkUchar :: Int -> gvariant
Type:
mkUchar :: Int -> gvariant
*/
mkUchar = mkPrimitive type.uchar;
/* Returns the GVariant int16 from the given Nix int value.
/*
Returns the GVariant int16 from the given Nix int value.
Type:
mkInt16 :: Int -> gvariant
Type:
mkInt16 :: Int -> gvariant
*/
mkInt16 = mkPrimitive type.int16;
/* Returns the GVariant uint16 from the given Nix int value.
/*
Returns the GVariant uint16 from the given Nix int value.
Type:
mkUint16 :: Int -> gvariant
Type:
mkUint16 :: Int -> gvariant
*/
mkUint16 = mkPrimitive type.uint16;
/* Returns the GVariant int32 from the given Nix int value.
/*
Returns the GVariant int32 from the given Nix int value.
Type:
mkInt32 :: Int -> gvariant
Type:
mkInt32 :: Int -> gvariant
*/
mkInt32 = v:
mkPrimitive type.int32 v // {
__toString = self: toString self.value;
};
mkInt32 = v: mkPrimitive type.int32 v // { __toString = self: toString self.value; };
/* Returns the GVariant uint32 from the given Nix int value.
/*
Returns the GVariant uint32 from the given Nix int value.
Type:
mkUint32 :: Int -> gvariant
Type:
mkUint32 :: Int -> gvariant
*/
mkUint32 = mkPrimitive type.uint32;
/* Returns the GVariant int64 from the given Nix int value.
/*
Returns the GVariant int64 from the given Nix int value.
Type:
mkInt64 :: Int -> gvariant
Type:
mkInt64 :: Int -> gvariant
*/
mkInt64 = mkPrimitive type.int64;
/* Returns the GVariant uint64 from the given Nix int value.
/*
Returns the GVariant uint64 from the given Nix int value.
Type:
mkUint64 :: Int -> gvariant
Type:
mkUint64 :: Int -> gvariant
*/
mkUint64 = mkPrimitive type.uint64;
/* Returns the GVariant double from the given Nix float value.
/*
Returns the GVariant double from the given Nix float value.
Type:
mkDouble :: Float -> gvariant
Type:
mkDouble :: Float -> gvariant
*/
mkDouble = v:
mkPrimitive type.double v // {
__toString = self: toString self.value;
};
mkDouble = v: mkPrimitive type.double v // { __toString = self: toString self.value; };
}

View file

@ -5,22 +5,34 @@ let
in
{
# Keeping these around in case we decide to change this horrible implementation :)
option = x:
x // { optional = true; };
yes = { tristate = "y"; optional = false; };
no = { tristate = "n"; optional = false; };
module = { tristate = "m"; optional = false; };
unset = { tristate = null; optional = false; };
freeform = x: { freeform = x; optional = false; };
option = x: x // { optional = true; };
yes = {
tristate = "y";
optional = false;
};
no = {
tristate = "n";
optional = false;
};
module = {
tristate = "m";
optional = false;
};
unset = {
tristate = null;
optional = false;
};
freeform = x: {
freeform = x;
optional = false;
};
# Common patterns/legacy used in common-config/hardened/config.nix
whenHelpers = version: {
whenAtLeast = ver: mkIf (versionAtLeast version ver);
whenOlder = ver: mkIf (versionOlder version ver);
whenOlder = ver: mkIf (versionOlder version ver);
# range is (inclusive, exclusive)
whenBetween = verLow: verHigh: mkIf (versionAtLeast version verLow && versionOlder version verHigh);
};

File diff suppressed because it is too large Load diff

View file

@ -4,12 +4,29 @@
{ lib }:
let
inherit (lib.strings) toInt;
inherit (lib.trivial) compare min id warn pipe;
inherit (lib.trivial)
compare
min
id
warn
pipe
;
inherit (lib.attrsets) mapAttrs;
in
rec {
inherit (builtins) head tail length isList elemAt concatLists filter elem genList map;
inherit (builtins)
head
tail
length
isList
elemAt
concatLists
filter
elem
genList
map
;
/**
Create a list consisting of a single element. `singleton x` is
@ -39,7 +56,7 @@ rec {
:::
*/
singleton = x: [x];
singleton = x: [ x ];
/**
Apply the function to each element in the list.
@ -81,7 +98,6 @@ rec {
`list` with `nul` as the starting value, i.e.,
`foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
# Inputs
`op`
@ -118,14 +134,13 @@ rec {
:::
*/
foldr = op: nul: list:
foldr =
op: nul: list:
let
len = length list;
fold' = n:
if n == len
then nul
else op (elemAt list n) (fold' (n + 1));
in fold' 0;
fold' = n: if n == len then nul else op (elemAt list n) (fold' (n + 1));
in
fold' 0;
/**
`fold` is an alias of `foldr` for historic reasons
@ -133,7 +148,6 @@ rec {
# FIXME(Profpatsch): deprecate?
fold = foldr;
/**
left fold, like `foldr`, but from the left:
@ -175,13 +189,12 @@ rec {
:::
*/
foldl = op: nul: list:
foldl =
op: nul: list:
let
foldl' = n:
if n == -1
then nul
else op (foldl' (n - 1)) (elemAt list n);
in foldl' (length list - 1);
foldl' = n: if n == -1 then nul else op (foldl' (n - 1)) (elemAt list n);
in
foldl' (length list - 1);
/**
Reduce a list by applying a binary operator from left to right,
@ -260,13 +273,11 @@ rec {
:::
*/
foldl' =
op:
acc:
op: acc:
# The builtin `foldl'` is a bit lazier than one might expect.
# See https://github.com/NixOS/nix/pull/7158.
# In particular, the initial accumulator value is not forced before the first iteration starts.
builtins.seq acc
(builtins.foldl' op acc);
builtins.seq acc (builtins.foldl' op acc);
/**
Map with index starting from 0
@ -303,7 +314,6 @@ rec {
/**
Map with index starting from 1
# Inputs
`f`
@ -373,12 +383,9 @@ rec {
:::
*/
ifilter0 =
ipred:
input:
ipred: input:
map (idx: elemAt input idx) (
filter (idx: ipred idx (elemAt input idx)) (
genList (x: x) (length input)
)
filter (idx: ipred idx (elemAt input idx)) (genList (x: x) (length input))
);
/**
@ -407,14 +414,12 @@ rec {
Flatten the argument into a single list; that is, nested lists are
spliced into the top-level lists.
# Inputs
`x`
: 1\. Function argument
# Examples
:::{.example}
## `lib.lists.flatten` usage example
@ -428,15 +433,11 @@ rec {
:::
*/
flatten = x:
if isList x
then concatMap (y: flatten y) x
else [x];
flatten = x: if isList x then concatMap (y: flatten y) x else [ x ];
/**
Remove elements equal to 'e' from a list. Useful for buildInputs.
# Inputs
`e`
@ -464,8 +465,7 @@ rec {
:::
*/
remove =
e: filter (x: x != e);
remove = e: filter (x: x != e);
/**
Find the sole element in the list matching the specified
@ -474,7 +474,6 @@ rec {
Returns `default` if no such element exists, or
`multiple` if there are multiple matching elements.
# Inputs
`pred`
@ -515,14 +514,17 @@ rec {
:::
*/
findSingle =
pred:
default:
multiple:
list:
let found = filter pred list; len = length found;
in if len == 0 then default
else if len != 1 then multiple
else head found;
pred: default: multiple: list:
let
found = filter pred list;
len = length found;
in
if len == 0 then
default
else if len != 1 then
multiple
else
head found;
/**
Find the first index in the list matching the specified
@ -562,9 +564,7 @@ rec {
:::
*/
findFirstIndex =
pred:
default:
list:
pred: default: list:
let
# A naive recursive implementation would be much simpler, but
# would also overflow the evaluator stack. We use `foldl'` as a workaround
@ -579,12 +579,13 @@ rec {
# - if index >= 0 then pred (elemAt list index) and all elements before (elemAt list index) didn't satisfy pred
#
# We start with index -1 and the 0'th element of the list, which satisfies the invariant
resultIndex = foldl' (index: el:
resultIndex = foldl' (
index: el:
if index < 0 then
# No match yet before the current index, we need to check the element
if pred el then
# We have a match! Turn it into the actual index to prevent future iterations from modifying it
- index - 1
-index - 1
else
# Still no match, update the index to the next element (we're counting down, so minus one)
index - 1
@ -593,10 +594,7 @@ rec {
index
) (-1) list;
in
if resultIndex < 0 then
default
else
resultIndex;
if resultIndex < 0 then default else resultIndex;
/**
Find the first element in the list matching the specified
@ -636,16 +634,11 @@ rec {
:::
*/
findFirst =
pred:
default:
list:
pred: default: list:
let
index = findFirstIndex pred null list;
in
if index == null then
default
else
elemAt list index;
if index == null then default else elemAt list index;
/**
Return true if function `pred` returns true for at least one
@ -744,8 +737,7 @@ rec {
:::
*/
count =
pred: foldl' (c: x: if pred x then c + 1 else c) 0;
count = pred: foldl' (c: x: if pred x then c + 1 else c) 0;
/**
Return a singleton list or an empty list, depending on a boolean
@ -781,7 +773,7 @@ rec {
:::
*/
optional = cond: elem: if cond then [elem] else [];
optional = cond: elem: if cond then [ elem ] else [ ];
/**
Return a list or an empty list, depending on a boolean value.
@ -815,10 +807,7 @@ rec {
:::
*/
optionals =
cond:
elems: if cond then elems else [];
optionals = cond: elems: if cond then elems else [ ];
/**
If argument is a list, return it; else, wrap it in a singleton
@ -844,7 +833,7 @@ rec {
:::
*/
toList = x: if isList x then x else [x];
toList = x: if isList x then x else [ x ];
/**
Return a list of integers from `first` up to and including `last`.
@ -878,13 +867,7 @@ rec {
:::
*/
range =
first:
last:
if first > last then
[]
else
genList (n: first + n) (last - first + 1);
range = first: last: if first > last then [ ] else genList (n: first + n) (last - first + 1);
/**
Return a list with `n` copies of an element.
@ -976,7 +959,6 @@ rec {
: 4\. Function argument
# Examples
:::{.example}
## `lib.lists.groupBy'` usage example
@ -1001,15 +983,21 @@ rec {
:::
*/
groupBy' = op: nul: pred: lst: mapAttrs (name: foldl op nul) (groupBy pred lst);
groupBy' =
op: nul: pred: lst:
mapAttrs (name: foldl op nul) (groupBy pred lst);
groupBy = builtins.groupBy or (
pred: foldl' (r: e:
let
key = pred e;
in
r // { ${key} = (r.${key} or []) ++ [e]; }
) {});
groupBy =
builtins.groupBy or (
pred:
foldl' (
r: e:
let
key = pred e;
in
r // { ${key} = (r.${key} or [ ]) ++ [ e ]; }
) { }
);
/**
Merges two lists of the same size together. If the sizes aren't the same
@ -1048,11 +1036,8 @@ rec {
:::
*/
zipListsWith =
f:
fst:
snd:
genList
(n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
f: fst: snd:
genList (n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
/**
Merges two lists of the same size together. If the sizes aren't the same
@ -1113,8 +1098,12 @@ rec {
:::
*/
reverseList = xs:
let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
reverseList =
xs:
let
l = length xs;
in
genList (n: elemAt xs (l - n - 1)) l;
/**
Depth-First Search (DFS) for lists `list != []`.
@ -1122,7 +1111,6 @@ rec {
`before a b == true` means that `b` depends on `a` (there's an
edge from `b` to `a`).
# Inputs
`stopOnCycles`
@ -1137,7 +1125,6 @@ rec {
: 3\. Function argument
# Examples
:::{.example}
## `lib.lists.listDfs` usage example
@ -1158,22 +1145,32 @@ rec {
:::
*/
listDfs = stopOnCycles: before: list:
listDfs =
stopOnCycles: before: list:
let
dfs' = us: visited: rest:
dfs' =
us: visited: rest:
let
c = filter (x: before x us) visited;
b = partition (x: before x us) rest;
in if stopOnCycles && (length c > 0)
then { cycle = us; loops = c; inherit visited rest; }
else if length b.right == 0
then # nothing is before us
{ minimal = us; inherit visited rest; }
else # grab the first one before us and continue
dfs' (head b.right)
([ us ] ++ visited)
(tail b.right ++ b.wrong);
in dfs' (head list) [] (tail list);
in
if stopOnCycles && (length c > 0) then
{
cycle = us;
loops = c;
inherit visited rest;
}
else if length b.right == 0 then
# nothing is before us
{
minimal = us;
inherit visited rest;
}
else
# grab the first one before us and continue
dfs' (head b.right) ([ us ] ++ visited) (tail b.right ++ b.wrong);
in
dfs' (head list) [ ] (tail list);
/**
Sort a list based on a partial ordering using DFS. This
@ -1183,7 +1180,6 @@ rec {
`before a b == true` means that `b` should be after `a`
in the result.
# Inputs
`before`
@ -1194,7 +1190,6 @@ rec {
: 2\. Function argument
# Examples
:::{.example}
## `lib.lists.toposort` usage example
@ -1215,24 +1210,28 @@ rec {
:::
*/
toposort = before: list:
toposort =
before: list:
let
dfsthis = listDfs true before list;
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
in
if length list < 2
then # finish
{ result = list; }
else if dfsthis ? cycle
then # there's a cycle, starting from the current vertex, return it
{ cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
inherit (dfsthis) loops; }
else if toporest ? cycle
then # there's a cycle somewhere else in the graph, return it
toporest
# Slow, but short. Can be made a bit faster with an explicit stack.
else # there are no cycles
{ result = [ dfsthis.minimal ] ++ toporest.result; };
if length list < 2 then
# finish
{ result = list; }
else if dfsthis ? cycle then
# there's a cycle, starting from the current vertex, return it
{
cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
inherit (dfsthis) loops;
}
else if toporest ? cycle then
# there's a cycle somewhere else in the graph, return it
toporest
# Slow, but short. Can be made a bit faster with an explicit stack.
else
# there are no cycles
{ result = [ dfsthis.minimal ] ++ toporest.result; };
/**
Sort a list based on a comparator function which compares two
@ -1288,7 +1287,6 @@ rec {
sortOn f == sort (p: q: f p < f q)
```
# Inputs
`f`
@ -1316,18 +1314,22 @@ rec {
:::
*/
sortOn = f: list:
sortOn =
f: list:
let
# Heterogenous list as pair may be ugly, but requires minimal allocations.
pairs = map (x: [(f x) x]) list;
pairs = map (x: [
(f x)
x
]) list;
in
map
(x: builtins.elemAt x 1)
(sort
# Compare the first element of the pairs
# Do not factor out the `<`, to avoid calls in hot code; duplicate instead.
(a: b: head a < head b)
pairs);
map (x: builtins.elemAt x 1) (
sort
# Compare the first element of the pairs
# Do not factor out the `<`, to avoid calls in hot code; duplicate instead.
(a: b: head a < head b)
pairs
);
/**
Compare two lists element-by-element.
@ -1346,7 +1348,6 @@ rec {
: 3\. Function argument
# Examples
:::{.example}
## `lib.lists.compareLists` usage example
@ -1364,30 +1365,28 @@ rec {
:::
*/
compareLists = cmp: a: b:
if a == []
then if b == []
then 0
else -1
else if b == []
then 1
else let rel = cmp (head a) (head b); in
if rel == 0
then compareLists cmp (tail a) (tail b)
else rel;
compareLists =
cmp: a: b:
if a == [ ] then
if b == [ ] then 0 else -1
else if b == [ ] then
1
else
let
rel = cmp (head a) (head b);
in
if rel == 0 then compareLists cmp (tail a) (tail b) else rel;
/**
Sort list using "Natural sorting".
Numeric portions of strings are sorted in numeric order.
# Inputs
`lst`
: 1\. Function argument
# Examples
:::{.example}
## `lib.lists.naturalSort` usage example
@ -1403,18 +1402,21 @@ rec {
:::
*/
naturalSort = lst:
naturalSort =
lst:
let
vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
prepared = map (x: [ (vectorise x) x ]) lst; # remember vectorised version for O(n) regex splits
prepared = map (x: [
(vectorise x)
x
]) lst; # remember vectorised version for O(n) regex splits
less = a: b: (compareLists compare (head a) (head b)) < 0;
in
map (x: elemAt x 1) (sort less prepared);
map (x: elemAt x 1) (sort less prepared);
/**
Return the first (at most) N elements of a list.
# Inputs
`count`
@ -1444,13 +1446,11 @@ rec {
:::
*/
take =
count: sublist 0 count;
take = count: sublist 0 count;
/**
Remove the first (at most) N elements of a list.
# Inputs
`count`
@ -1480,14 +1480,11 @@ rec {
:::
*/
drop =
count:
list: sublist count (length list) list;
drop = count: list: sublist count (length list) list;
/**
Whether the first list is a prefix of the second list.
# Inputs
`list1`
@ -1517,10 +1514,7 @@ rec {
:::
*/
hasPrefix =
list1:
list2:
take (length list1) list2 == list1;
hasPrefix = list1: list2: take (length list1) list2 == list1;
/**
Remove the first list as a prefix from the second list.
@ -1556,8 +1550,7 @@ rec {
:::
*/
removePrefix =
list1:
list2:
list1: list2:
if hasPrefix list1 list2 then
drop (length list1) list2
else
@ -1601,20 +1594,22 @@ rec {
:::
*/
sublist =
start:
count:
list:
let len = length list; in
genList
(n: elemAt list (n + start))
(if start >= len then 0
else if start + count > len then len - start
else count);
start: count: list:
let
len = length list;
in
genList (n: elemAt list (n + start)) (
if start >= len then
0
else if start + count > len then
len - start
else
count
);
/**
The common prefix of two lists.
# Inputs
`list1`
@ -1647,8 +1642,7 @@ rec {
:::
*/
commonPrefix =
list1:
list2:
list1: list2:
let
# Zip the lists together into a list of booleans whether each element matches
matchings = zipListsWith (fst: snd: fst != snd) list1 list2;
@ -1665,7 +1659,6 @@ rec {
This function throws an error if the list is empty.
# Inputs
`list`
@ -1689,8 +1682,9 @@ rec {
:::
*/
last = list:
assert lib.assertMsg (list != []) "lists.last: list must not be empty!";
last =
list:
assert lib.assertMsg (list != [ ]) "lists.last: list must not be empty!";
elemAt list (length list - 1);
/**
@ -1698,7 +1692,6 @@ rec {
This function throws an error if the list is empty.
# Inputs
`list`
@ -1722,15 +1715,14 @@ rec {
:::
*/
init = list:
assert lib.assertMsg (list != []) "lists.init: list must not be empty!";
init =
list:
assert lib.assertMsg (list != [ ]) "lists.init: list must not be empty!";
take (length list - 1) list;
/**
Return the image of the cross product of some lists by a function.
# Examples
:::{.example}
## `lib.lists.crossLists` usage example
@ -1748,25 +1740,23 @@ rec {
```
:::
*/
crossLists = warn
''lib.crossLists is deprecated, use lib.mapCartesianProduct instead.
crossLists = warn ''
lib.crossLists is deprecated, use lib.mapCartesianProduct instead.
For example, the following function call:
For example, the following function call:
nix-repl> lib.crossLists (x: y: x+y) [[1 2] [3 4]]
[ 4 5 5 6 ]
nix-repl> lib.crossLists (x: y: x+y) [[1 2] [3 4]]
[ 4 5 5 6 ]
Can now be replaced by the following one:
Can now be replaced by the following one:
nix-repl> lib.mapCartesianProduct ({x,y}: x+y) { x = [1 2]; y = [3 4]; }
[ 4 5 5 6 ]
''
(f: foldl (fs: args: concatMap (f: map f args) fs) [f]);
nix-repl> lib.mapCartesianProduct ({x,y}: x+y) { x = [1 2]; y = [3 4]; }
[ 4 5 5 6 ]
'' (f: foldl (fs: args: concatMap (f: map f args) fs) [ f ]);
/**
Remove duplicate elements from the `list`. O(n^2) complexity.
# Inputs
`list`
@ -1790,12 +1780,11 @@ rec {
:::
*/
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ];
/**
Check if list contains only unique elements. O(n^2) complexity.
# Inputs
`list`
@ -1823,7 +1812,6 @@ rec {
*/
allUnique = list: (length (unique list) == length list);
/**
Intersects list 'list1' and another list (`list2`).
@ -1839,7 +1827,6 @@ rec {
: Second list
# Examples
:::{.example}
## `lib.lists.intersectLists` usage example
@ -1868,7 +1855,6 @@ rec {
: Second list
# Examples
:::{.example}
## `lib.lists.subtractLists` usage example

View file

@ -1,184 +1,218 @@
/* Some functions for manipulating meta attributes, as well as the
name attribute. */
/*
Some functions for manipulating meta attributes, as well as the
name attribute.
*/
{ lib }:
let
inherit (lib) matchAttrs any all isDerivation getBin assertMsg;
inherit (lib)
matchAttrs
any
all
isDerivation
getBin
assertMsg
;
inherit (builtins) isString match typeOf;
in
rec {
/*
Add to or override the meta attributes of the given
derivation.
/* Add to or override the meta attributes of the given
derivation.
Example:
addMetaAttrs {description = "Bla blah";} somePkg
Example:
addMetaAttrs {description = "Bla blah";} somePkg
*/
addMetaAttrs = newAttrs: drv:
drv // { meta = (drv.meta or {}) // newAttrs; };
addMetaAttrs = newAttrs: drv: drv // { meta = (drv.meta or { }) // newAttrs; };
# Disable Hydra builds of given derivation.
dontDistribute = drv: addMetaAttrs { hydraPlatforms = [ ]; } drv;
/* Disable Hydra builds of given derivation.
/*
Change the symbolic name of a package for presentation purposes
(i.e., so that nix-env users can tell them apart).
*/
dontDistribute = drv: addMetaAttrs { hydraPlatforms = []; } drv;
setName = name: drv: drv // { inherit name; };
/*
Like `setName`, but takes the previous name as an argument.
/* Change the symbolic name of a package for presentation purposes
(i.e., so that nix-env users can tell them apart).
Example:
updateName (oldName: oldName + "-experimental") somePkg
*/
setName = name: drv: drv // {inherit name;};
updateName = updater: drv: drv // { name = updater (drv.name); };
/* Like `setName`, but takes the previous name as an argument.
Example:
updateName (oldName: oldName + "-experimental") somePkg
/*
Append a suffix to the name of a package (before the version
part).
*/
updateName = updater: drv: drv // {name = updater (drv.name);};
appendToName =
suffix:
updateName (
name:
let
x = builtins.parseDrvName name;
in
"${x.name}-${suffix}-${x.version}"
);
# Apply a function to each derivation and only to derivations in an attrset.
mapDerivationAttrset =
f: set: lib.mapAttrs (name: pkg: if lib.isDerivation pkg then (f pkg) else pkg) set;
/* Append a suffix to the name of a package (before the version
part). */
appendToName = suffix: updateName (name:
let x = builtins.parseDrvName name; in "${x.name}-${suffix}-${x.version}");
/* Apply a function to each derivation and only to derivations in an attrset.
*/
mapDerivationAttrset = f: set: lib.mapAttrs (name: pkg: if lib.isDerivation pkg then (f pkg) else pkg) set;
/* Set the nix-env priority of the package.
*/
# Set the nix-env priority of the package.
setPrio = priority: addMetaAttrs { inherit priority; };
/* Decrease the nix-env priority of the package, i.e., other
versions/variants of the package will be preferred.
/*
Decrease the nix-env priority of the package, i.e., other
versions/variants of the package will be preferred.
*/
lowPrio = setPrio 10;
/* Apply lowPrio to an attrset with derivations
*/
# Apply lowPrio to an attrset with derivations
lowPrioSet = set: mapDerivationAttrset lowPrio set;
/* Increase the nix-env priority of the package, i.e., this
version/variant of the package will be preferred.
/*
Increase the nix-env priority of the package, i.e., this
version/variant of the package will be preferred.
*/
hiPrio = setPrio (-10);
/* Apply hiPrio to an attrset with derivations
*/
# Apply hiPrio to an attrset with derivations
hiPrioSet = set: mapDerivationAttrset hiPrio set;
/*
Check to see if a platform is matched by the given `meta.platforms`
element.
/* Check to see if a platform is matched by the given `meta.platforms`
element.
A `meta.platform` pattern is either
A `meta.platform` pattern is either
1. (legacy) a system string.
1. (legacy) a system string.
2. (modern) a pattern for the entire platform structure (see `lib.systems.inspect.platformPatterns`).
2. (modern) a pattern for the entire platform structure (see `lib.systems.inspect.platformPatterns`).
3. (modern) a pattern for the platform `parsed` field (see `lib.systems.inspect.patterns`).
3. (modern) a pattern for the platform `parsed` field (see `lib.systems.inspect.patterns`).
We can inject these into a pattern for the whole of a structured platform,
and then match that.
We can inject these into a pattern for the whole of a structured platform,
and then match that.
Example:
lib.meta.platformMatch { system = "aarch64-darwin"; } "aarch64-darwin"
=> true
*/
platformMatch =
platform: elem:
(
# Check with simple string comparison if elem was a string.
#
# The majority of comparisons done with this function will be against meta.platforms
# which contains a simple platform string.
#
# Avoiding an attrset allocation results in significant performance gains (~2-30) across the board in OfBorg
# because this is a hot path for nixpkgs.
if isString elem then
platform ? system && elem == platform.system
else
matchAttrs (
# Normalize platform attrset.
if elem ? parsed then elem else { parsed = elem; }
) platform
);
Example:
lib.meta.platformMatch { system = "aarch64-darwin"; } "aarch64-darwin"
/*
Check if a package is available on a given platform.
A package is available on a platform if both
1. One of `meta.platforms` pattern matches the given
platform, or `meta.platforms` is not present.
2. None of `meta.badPlatforms` pattern matches the given platform.
Example:
lib.meta.availableOn { system = "aarch64-darwin"; } pkg.zsh
=> true
*/
platformMatch = platform: elem: (
# Check with simple string comparison if elem was a string.
#
# The majority of comparisons done with this function will be against meta.platforms
# which contains a simple platform string.
#
# Avoiding an attrset allocation results in significant performance gains (~2-30) across the board in OfBorg
# because this is a hot path for nixpkgs.
if isString elem then platform ? system && elem == platform.system
else matchAttrs (
# Normalize platform attrset.
if elem ? parsed then elem
else { parsed = elem; }
) platform
);
availableOn =
platform: pkg:
((!pkg ? meta.platforms) || any (platformMatch platform) pkg.meta.platforms)
&& all (elem: !platformMatch platform elem) (pkg.meta.badPlatforms or [ ]);
/* Check if a package is available on a given platform.
/*
Get the corresponding attribute in lib.licenses
from the SPDX ID.
For SPDX IDs, see
https://spdx.org/licenses
A package is available on a platform if both
Type:
getLicenseFromSpdxId :: str -> AttrSet
1. One of `meta.platforms` pattern matches the given
platform, or `meta.platforms` is not present.
2. None of `meta.badPlatforms` pattern matches the given platform.
Example:
lib.meta.availableOn { system = "aarch64-darwin"; } pkg.zsh
=> true
*/
availableOn = platform: pkg:
((!pkg?meta.platforms) || any (platformMatch platform) pkg.meta.platforms) &&
all (elem: !platformMatch platform elem) (pkg.meta.badPlatforms or []);
/* Get the corresponding attribute in lib.licenses
from the SPDX ID.
For SPDX IDs, see
https://spdx.org/licenses
Type:
getLicenseFromSpdxId :: str -> AttrSet
Example:
lib.getLicenseFromSpdxId "MIT" == lib.licenses.mit
=> true
lib.getLicenseFromSpdxId "mIt" == lib.licenses.mit
=> true
lib.getLicenseFromSpdxId "MY LICENSE"
=> trace: warning: getLicenseFromSpdxId: No license matches the given SPDX ID: MY LICENSE
=> { shortName = "MY LICENSE"; }
Example:
lib.getLicenseFromSpdxId "MIT" == lib.licenses.mit
=> true
lib.getLicenseFromSpdxId "mIt" == lib.licenses.mit
=> true
lib.getLicenseFromSpdxId "MY LICENSE"
=> trace: warning: getLicenseFromSpdxId: No license matches the given SPDX ID: MY LICENSE
=> { shortName = "MY LICENSE"; }
*/
getLicenseFromSpdxId =
let
spdxLicenses = lib.mapAttrs (id: ls: assert lib.length ls == 1; builtins.head ls)
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
in licstr:
spdxLicenses.${ lib.toLower licstr } or (
lib.warn "getLicenseFromSpdxId: No license matches the given SPDX ID: ${licstr}"
{ shortName = licstr; }
);
spdxLicenses =
lib.mapAttrs
(
id: ls:
assert lib.length ls == 1;
builtins.head ls
)
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
in
licstr:
spdxLicenses.${lib.toLower licstr}
or (lib.warn "getLicenseFromSpdxId: No license matches the given SPDX ID: ${licstr}" {
shortName = licstr;
});
/* Get the path to the main program of a package based on meta.mainProgram
/*
Get the path to the main program of a package based on meta.mainProgram
Type: getExe :: package -> string
Type: getExe :: package -> string
Example:
getExe pkgs.hello
=> "/nix/store/g124820p9hlv4lj8qplzxw1c44dxaw1k-hello-2.12/bin/hello"
getExe pkgs.mustache-go
=> "/nix/store/am9ml4f4ywvivxnkiaqwr0hyxka1xjsf-mustache-go-1.3.0/bin/mustache"
Example:
getExe pkgs.hello
=> "/nix/store/g124820p9hlv4lj8qplzxw1c44dxaw1k-hello-2.12/bin/hello"
getExe pkgs.mustache-go
=> "/nix/store/am9ml4f4ywvivxnkiaqwr0hyxka1xjsf-mustache-go-1.3.0/bin/mustache"
*/
getExe = x: getExe' x (x.meta.mainProgram or (
# This could be turned into an error when 23.05 is at end of life
lib.warn "getExe: Package ${lib.strings.escapeNixIdentifier x.meta.name or x.pname or x.name} does not have the meta.mainProgram attribute. We'll assume that the main program has the same name for now, but this behavior is deprecated, because it leads to surprising errors when the assumption does not hold. If the package has a main program, please set `meta.mainProgram` in its definition to make this warning go away. Otherwise, if the package does not have a main program, or if you don't control its definition, use getExe' to specify the name to the program, such as lib.getExe' foo \"bar\"."
lib.getName
x
));
getExe =
x:
getExe' x (
x.meta.mainProgram or (
# This could be turned into an error when 23.05 is at end of life
lib.warn
"getExe: Package ${
lib.strings.escapeNixIdentifier x.meta.name or x.pname or x.name
} does not have the meta.mainProgram attribute. We'll assume that the main program has the same name for now, but this behavior is deprecated, because it leads to surprising errors when the assumption does not hold. If the package has a main program, please set `meta.mainProgram` in its definition to make this warning go away. Otherwise, if the package does not have a main program, or if you don't control its definition, use getExe' to specify the name to the program, such as lib.getExe' foo \"bar\"."
lib.getName
x
)
);
/* Get the path of a program of a derivation.
/*
Get the path of a program of a derivation.
Type: getExe' :: derivation -> string -> string
Example:
getExe' pkgs.hello "hello"
=> "/nix/store/g124820p9hlv4lj8qplzxw1c44dxaw1k-hello-2.12/bin/hello"
getExe' pkgs.imagemagick "convert"
=> "/nix/store/5rs48jamq7k6sal98ymj9l4k2bnwq515-imagemagick-7.1.1-15/bin/convert"
Type: getExe' :: derivation -> string -> string
Example:
getExe' pkgs.hello "hello"
=> "/nix/store/g124820p9hlv4lj8qplzxw1c44dxaw1k-hello-2.12/bin/hello"
getExe' pkgs.imagemagick "convert"
=> "/nix/store/5rs48jamq7k6sal98ymj9l4k2bnwq515-imagemagick-7.1.1-15/bin/convert"
*/
getExe' = x: y:
getExe' =
x: y:
assert assertMsg (isDerivation x)
"lib.meta.getExe': The first argument is of type ${typeOf x}, but it should be a derivation instead.";
assert assertMsg (isString y)

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
/* Nixpkgs/NixOS option handling. */
# Nixpkgs/NixOS option handling.
{ lib }:
let
@ -25,448 +25,531 @@ let
optionals
take
;
inherit (lib.attrsets)
attrByPath
optionalAttrs
;
inherit (lib.strings)
concatMapStrings
concatStringsSep
;
inherit (lib.types)
mkOptionType
;
inherit (lib.lists)
last
;
inherit (lib.attrsets) attrByPath optionalAttrs;
inherit (lib.strings) concatMapStrings concatStringsSep;
inherit (lib.types) mkOptionType;
inherit (lib.lists) last;
prioritySuggestion = ''
Use `lib.mkForce value` or `lib.mkDefault value` to change the priority on any of these definitions.
Use `lib.mkForce value` or `lib.mkDefault value` to change the priority on any of these definitions.
'';
in
rec {
/* Returns true when the given argument is an option
/*
Returns true when the given argument is an option
Type: isOption :: a -> bool
Type: isOption :: a -> bool
Example:
isOption 1 // => false
isOption (mkOption {}) // => true
Example:
isOption 1 // => false
isOption (mkOption {}) // => true
*/
isOption = lib.isType "option";
/* Creates an Option attribute set. mkOption accepts an attribute set with the following keys:
/*
Creates an Option attribute set. mkOption accepts an attribute set with the following keys:
All keys default to `null` when not given.
All keys default to `null` when not given.
Example:
mkOption { } // => { _type = "option"; }
mkOption { default = "foo"; } // => { _type = "option"; default = "foo"; }
Example:
mkOption { } // => { _type = "option"; }
mkOption { default = "foo"; } // => { _type = "option"; default = "foo"; }
*/
mkOption =
{
# Default value used when no definition is given in the configuration.
default ? null,
# Textual representation of the default, for the manual.
defaultText ? null,
# Example value used in the manual.
example ? null,
# String describing the option.
description ? null,
# Related packages used in the manual (see `genRelatedPackages` in ../nixos/lib/make-options-doc/default.nix).
relatedPackages ? null,
# Option type, providing type-checking and value merging.
type ? null,
# Function that converts the option value to something else.
apply ? null,
# Whether the option is for NixOS developers only.
internal ? null,
# Whether the option shows up in the manual. Default: true. Use false to hide the option and any sub-options from submodules. Use "shallow" to hide only sub-options.
visible ? null,
# Whether the option can be set only once
readOnly ? null,
} @ attrs:
# Default value used when no definition is given in the configuration.
default ? null,
# Textual representation of the default, for the manual.
defaultText ? null,
# Example value used in the manual.
example ? null,
# String describing the option.
description ? null,
# Related packages used in the manual (see `genRelatedPackages` in ../nixos/lib/make-options-doc/default.nix).
relatedPackages ? null,
# Option type, providing type-checking and value merging.
type ? null,
# Function that converts the option value to something else.
apply ? null,
# Whether the option is for NixOS developers only.
internal ? null,
# Whether the option shows up in the manual. Default: true. Use false to hide the option and any sub-options from submodules. Use "shallow" to hide only sub-options.
visible ? null,
# Whether the option can be set only once
readOnly ? null,
}@attrs:
attrs // { _type = "option"; };
/* Creates an Option attribute set for a boolean value option i.e an
option to be toggled on or off:
/*
Creates an Option attribute set for a boolean value option i.e an
option to be toggled on or off:
Example:
mkEnableOption "foo"
=> { _type = "option"; default = false; description = "Whether to enable foo."; example = true; type = { ... }; }
Example:
mkEnableOption "foo"
=> { _type = "option"; default = false; description = "Whether to enable foo."; example = true; type = { ... }; }
*/
mkEnableOption =
# Name for the created option
name: mkOption {
default = false;
example = true;
description = "Whether to enable ${name}.";
type = lib.types.bool;
};
name:
mkOption {
default = false;
example = true;
description = "Whether to enable ${name}.";
type = lib.types.bool;
};
/* Creates an Option attribute set for an option that specifies the
package a module should use for some purpose.
/*
Creates an Option attribute set for an option that specifies the
package a module should use for some purpose.
The package is specified in the third argument under `default` as a list of strings
representing its attribute path in nixpkgs (or another package set).
Because of this, you need to pass nixpkgs itself (usually `pkgs` in a module;
alternatively to nixpkgs itself, another package set) as the first argument.
The package is specified in the third argument under `default` as a list of strings
representing its attribute path in nixpkgs (or another package set).
Because of this, you need to pass nixpkgs itself (usually `pkgs` in a module;
alternatively to nixpkgs itself, another package set) as the first argument.
If you pass another package set you should set the `pkgsText` option.
This option is used to display the expression for the package set. It is `"pkgs"` by default.
If your expression is complex you should parenthesize it, as the `pkgsText` argument
is usually immediately followed by an attribute lookup (`.`).
If you pass another package set you should set the `pkgsText` option.
This option is used to display the expression for the package set. It is `"pkgs"` by default.
If your expression is complex you should parenthesize it, as the `pkgsText` argument
is usually immediately followed by an attribute lookup (`.`).
The second argument may be either a string or a list of strings.
It provides the display name of the package in the description of the generated option
(using only the last element if the passed value is a list)
and serves as the fallback value for the `default` argument.
The second argument may be either a string or a list of strings.
It provides the display name of the package in the description of the generated option
(using only the last element if the passed value is a list)
and serves as the fallback value for the `default` argument.
To include extra information in the description, pass `extraDescription` to
append arbitrary text to the generated description.
To include extra information in the description, pass `extraDescription` to
append arbitrary text to the generated description.
You can also pass an `example` value, either a literal string or an attribute path.
You can also pass an `example` value, either a literal string or an attribute path.
The `default` argument can be omitted if the provided name is
an attribute of pkgs (if `name` is a string) or a valid attribute path in pkgs (if `name` is a list).
You can also set `default` to just a string in which case it is interpreted as an attribute name
(a singleton attribute path, if you will).
The `default` argument can be omitted if the provided name is
an attribute of pkgs (if `name` is a string) or a valid attribute path in pkgs (if `name` is a list).
You can also set `default` to just a string in which case it is interpreted as an attribute name
(a singleton attribute path, if you will).
If you wish to explicitly provide no default, pass `null` as `default`.
If you wish to explicitly provide no default, pass `null` as `default`.
If you want users to be able to set no package, pass `nullable = true`.
In this mode a `default = null` will not be interpreted as no default and is interpreted literally.
If you want users to be able to set no package, pass `nullable = true`.
In this mode a `default = null` will not be interpreted as no default and is interpreted literally.
Type: mkPackageOption :: pkgs -> (string|[string]) -> { nullable? :: bool, default? :: string|[string], example? :: null|string|[string], extraDescription? :: string, pkgsText? :: string } -> option
Type: mkPackageOption :: pkgs -> (string|[string]) -> { nullable? :: bool, default? :: string|[string], example? :: null|string|[string], extraDescription? :: string, pkgsText? :: string } -> option
Example:
mkPackageOption pkgs "hello" { }
=> { ...; default = pkgs.hello; defaultText = literalExpression "pkgs.hello"; description = "The hello package to use."; type = package; }
Example:
mkPackageOption pkgs "hello" { }
=> { ...; default = pkgs.hello; defaultText = literalExpression "pkgs.hello"; description = "The hello package to use."; type = package; }
Example:
mkPackageOption pkgs "GHC" {
default = [ "ghc" ];
example = "pkgs.haskell.packages.ghc92.ghc.withPackages (hkgs: [ hkgs.primes ])";
}
=> { ...; default = pkgs.ghc; defaultText = literalExpression "pkgs.ghc"; description = "The GHC package to use."; example = literalExpression "pkgs.haskell.packages.ghc92.ghc.withPackages (hkgs: [ hkgs.primes ])"; type = package; }
Example:
mkPackageOption pkgs "GHC" {
default = [ "ghc" ];
example = "pkgs.haskell.packages.ghc92.ghc.withPackages (hkgs: [ hkgs.primes ])";
}
=> { ...; default = pkgs.ghc; defaultText = literalExpression "pkgs.ghc"; description = "The GHC package to use."; example = literalExpression "pkgs.haskell.packages.ghc92.ghc.withPackages (hkgs: [ hkgs.primes ])"; type = package; }
Example:
mkPackageOption pkgs [ "python3Packages" "pytorch" ] {
extraDescription = "This is an example and doesn't actually do anything.";
}
=> { ...; default = pkgs.python3Packages.pytorch; defaultText = literalExpression "pkgs.python3Packages.pytorch"; description = "The pytorch package to use. This is an example and doesn't actually do anything."; type = package; }
Example:
mkPackageOption pkgs [ "python3Packages" "pytorch" ] {
extraDescription = "This is an example and doesn't actually do anything.";
}
=> { ...; default = pkgs.python3Packages.pytorch; defaultText = literalExpression "pkgs.python3Packages.pytorch"; description = "The pytorch package to use. This is an example and doesn't actually do anything."; type = package; }
Example:
mkPackageOption pkgs "nushell" {
nullable = true;
}
=> { ...; default = pkgs.nushell; defaultText = literalExpression "pkgs.nushell"; description = "The nushell package to use."; type = nullOr package; }
Example:
mkPackageOption pkgs "nushell" {
nullable = true;
}
=> { ...; default = pkgs.nushell; defaultText = literalExpression "pkgs.nushell"; description = "The nushell package to use."; type = nullOr package; }
Example:
mkPackageOption pkgs "coreutils" {
default = null;
}
=> { ...; description = "The coreutils package to use."; type = package; }
Example:
mkPackageOption pkgs "coreutils" {
default = null;
}
=> { ...; description = "The coreutils package to use."; type = package; }
Example:
mkPackageOption pkgs "dbus" {
nullable = true;
default = null;
}
=> { ...; default = null; description = "The dbus package to use."; type = nullOr package; }
Example:
mkPackageOption pkgs "dbus" {
nullable = true;
default = null;
}
=> { ...; default = null; description = "The dbus package to use."; type = nullOr package; }
Example:
mkPackageOption pkgs.javaPackages "OpenJFX" {
default = "openjfx20";
pkgsText = "pkgs.javaPackages";
}
=> { ...; default = pkgs.javaPackages.openjfx20; defaultText = literalExpression "pkgs.javaPackages.openjfx20"; description = "The OpenJFX package to use."; type = package; }
Example:
mkPackageOption pkgs.javaPackages "OpenJFX" {
default = "openjfx20";
pkgsText = "pkgs.javaPackages";
}
=> { ...; default = pkgs.javaPackages.openjfx20; defaultText = literalExpression "pkgs.javaPackages.openjfx20"; description = "The OpenJFX package to use."; type = package; }
*/
mkPackageOption =
# Package set (an instantiation of nixpkgs such as pkgs in modules or another package set)
pkgs:
# Name for the package, shown in option description
name:
{
# Whether the package can be null, for example to disable installing a package altogether (defaults to false)
nullable ? false,
# The attribute path where the default package is located (may be omitted, in which case it is copied from `name`)
default ? name,
# A string or an attribute path to use as an example (may be omitted)
example ? null,
# Additional text to include in the option description (may be omitted)
extraDescription ? "",
# Representation of the package set passed as pkgs (defaults to `"pkgs"`)
pkgsText ? "pkgs"
}:
let
name' = if isList name then last name else name;
default' = if isList default then default else [ default ];
defaultText = concatStringsSep "." default';
defaultValue = attrByPath default'
(throw "${defaultText} cannot be found in ${pkgsText}") pkgs;
defaults = if default != null then {
default = defaultValue;
defaultText = literalExpression ("${pkgsText}." + defaultText);
} else optionalAttrs nullable {
default = null;
};
in mkOption (defaults // {
description = "The ${name'} package to use."
+ (if extraDescription == "" then "" else " ") + extraDescription;
# Name for the package, shown in option description
name:
{
# Whether the package can be null, for example to disable installing a package altogether (defaults to false)
nullable ? false,
# The attribute path where the default package is located (may be omitted, in which case it is copied from `name`)
default ? name,
# A string or an attribute path to use as an example (may be omitted)
example ? null,
# Additional text to include in the option description (may be omitted)
extraDescription ? "",
# Representation of the package set passed as pkgs (defaults to `"pkgs"`)
pkgsText ? "pkgs",
}:
let
name' = if isList name then last name else name;
default' = if isList default then default else [ default ];
defaultText = concatStringsSep "." default';
defaultValue = attrByPath default' (throw "${defaultText} cannot be found in ${pkgsText}") pkgs;
defaults =
if default != null then
{
default = defaultValue;
defaultText = literalExpression ("${pkgsText}." + defaultText);
}
else
optionalAttrs nullable { default = null; };
in
mkOption (
defaults
// {
description =
"The ${name'} package to use." + (if extraDescription == "" then "" else " ") + extraDescription;
type = with lib.types; (if nullable then nullOr else lib.id) package;
} // optionalAttrs (example != null) {
example = literalExpression
(if isList example then "${pkgsText}." + concatStringsSep "." example else example);
});
}
// optionalAttrs (example != null) {
example = literalExpression (
if isList example then "${pkgsText}." + concatStringsSep "." example else example
);
}
);
/* Alias of mkPackageOption. Previously used to create options with markdown
documentation, which is no longer required.
/*
Alias of mkPackageOption. Previously used to create options with markdown
documentation, which is no longer required.
*/
mkPackageOptionMD = mkPackageOption;
/* This option accepts anything, but it does not produce any result.
/*
This option accepts anything, but it does not produce any result.
This is useful for sharing a module across different module sets
without having to implement similar features as long as the
values of the options are not accessed. */
mkSinkUndeclaredOptions = attrs: mkOption ({
internal = true;
visible = false;
default = false;
description = "Sink for option definitions.";
type = mkOptionType {
name = "sink";
check = x: true;
merge = loc: defs: false;
};
apply = x: throw "Option value is not readable because the option is not declared.";
} // attrs);
This is useful for sharing a module across different module sets
without having to implement similar features as long as the
values of the options are not accessed.
*/
mkSinkUndeclaredOptions =
attrs:
mkOption (
{
internal = true;
visible = false;
default = false;
description = "Sink for option definitions.";
type = mkOptionType {
name = "sink";
check = x: true;
merge = loc: defs: false;
};
apply = x: throw "Option value is not readable because the option is not declared.";
}
// attrs
);
mergeDefaultOption = loc: defs:
let list = getValues defs; in
if length list == 1 then head list
else if all isFunction list then x: mergeDefaultOption loc (map (f: f x) list)
else if all isList list then concatLists list
else if all isAttrs list then foldl' lib.mergeAttrs {} list
else if all isBool list then foldl' lib.or false list
else if all isString list then lib.concatStrings list
else if all isInt list && all (x: x == head list) list then head list
else throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
mergeDefaultOption =
loc: defs:
let
list = getValues defs;
in
if length list == 1 then
head list
else if all isFunction list then
x: mergeDefaultOption loc (map (f: f x) list)
else if all isList list then
concatLists list
else if all isAttrs list then
foldl' lib.mergeAttrs { } list
else if all isBool list then
foldl' lib.or false list
else if all isString list then
lib.concatStrings list
else if all isInt list && all (x: x == head list) list then
head list
else
throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
/*
Require a single definition.
WARNING: Does not perform nested checks, as this does not run the merge function!
*/
*/
mergeOneOption = mergeUniqueOption { message = ""; };
/*
Require a single definition.
NOTE: When the type is not checked completely by check, pass a merge function for further checking (of sub-attributes, etc).
*/
mergeUniqueOption = args@{
*/
mergeUniqueOption =
args@{
message,
# WARNING: the default merge function assumes that the definition is a valid (option) value. You MUST pass a merge function if the return value needs to be
# - type checked beyond what .check does (which should be very litte; only on the value head; not attribute values, etc)
# - if you want attribute values to be checked, or list items
# - if you want coercedTo-like behavior to work
merge ? loc: defs: (head defs).value }:
merge ? loc: defs: (head defs).value,
}:
loc: defs:
if length defs == 1
then merge loc defs
else
assert length defs > 1;
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
if length defs == 1 then
merge loc defs
else
assert length defs > 1;
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
/* "Merge" option definitions by checking that they all have the same value. */
mergeEqualOption = loc: defs:
if defs == [] then abort "This case should never happen."
# "Merge" option definitions by checking that they all have the same value.
mergeEqualOption =
loc: defs:
if defs == [ ] then
abort "This case should never happen."
# Return early if we only have one element
# This also makes it work for functions, because the foldl' below would try
# to compare the first element with itself, which is false for functions
else if length defs == 1 then (head defs).value
else (foldl' (first: def:
if def.value != first.value then
throw "The option `${showOption loc}' has conflicting definition values:${showDefs [ first def ]}\n${prioritySuggestion}"
else
first) (head defs) (tail defs)).value;
else if length defs == 1 then
(head defs).value
else
(foldl' (
first: def:
if def.value != first.value then
throw "The option `${showOption loc}' has conflicting definition values:${
showDefs [
first
def
]
}\n${prioritySuggestion}"
else
first
) (head defs) (tail defs)).value;
/* Extracts values of all "value" keys of the given list.
/*
Extracts values of all "value" keys of the given list.
Type: getValues :: [ { value :: a; } ] -> [a]
Type: getValues :: [ { value :: a; } ] -> [a]
Example:
getValues [ { value = 1; } { value = 2; } ] // => [ 1 2 ]
getValues [ ] // => [ ]
Example:
getValues [ { value = 1; } { value = 2; } ] // => [ 1 2 ]
getValues [ ] // => [ ]
*/
getValues = map (x: x.value);
/* Extracts values of all "file" keys of the given list
/*
Extracts values of all "file" keys of the given list
Type: getFiles :: [ { file :: a; } ] -> [a]
Type: getFiles :: [ { file :: a; } ] -> [a]
Example:
getFiles [ { file = "file1"; } { file = "file2"; } ] // => [ "file1" "file2" ]
getFiles [ ] // => [ ]
Example:
getFiles [ { file = "file1"; } { file = "file2"; } ] // => [ "file1" "file2" ]
getFiles [ ] // => [ ]
*/
getFiles = map (x: x.file);
# Generate documentation template from the list of option declaration like
# the set generated with filterOptionSets.
optionAttrSetToDocList = optionAttrSetToDocList' [];
optionAttrSetToDocList = optionAttrSetToDocList' [ ];
optionAttrSetToDocList' = _: options:
concatMap (opt:
optionAttrSetToDocList' =
_: options:
concatMap (
opt:
let
name = showOption opt.loc;
docOption = {
loc = opt.loc;
inherit name;
description = opt.description or null;
declarations = filter (x: x != unknownModule) opt.declarations;
internal = opt.internal or false;
visible =
if (opt?visible && opt.visible == "shallow")
then true
else opt.visible or true;
readOnly = opt.readOnly or false;
type = opt.type.description or "unspecified";
}
// optionalAttrs (opt ? example) {
example =
builtins.addErrorContext "while evaluating the example of option `${name}`" (
docOption =
{
loc = opt.loc;
inherit name;
description = opt.description or null;
declarations = filter (x: x != unknownModule) opt.declarations;
internal = opt.internal or false;
visible = if (opt ? visible && opt.visible == "shallow") then true else opt.visible or true;
readOnly = opt.readOnly or false;
type = opt.type.description or "unspecified";
}
// optionalAttrs (opt ? example) {
example = builtins.addErrorContext "while evaluating the example of option `${name}`" (
renderOptionValue opt.example
);
}
// optionalAttrs (opt ? defaultText || opt ? default) {
default =
builtins.addErrorContext "while evaluating the ${if opt?defaultText then "defaultText" else "default value"} of option `${name}`" (
renderOptionValue (opt.defaultText or opt.default)
);
}
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) { inherit (opt) relatedPackages; };
}
// optionalAttrs (opt ? defaultText || opt ? default) {
default = builtins.addErrorContext "while evaluating the ${
if opt ? defaultText then "defaultText" else "default value"
} of option `${name}`" (renderOptionValue (opt.defaultText or opt.default));
}
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) {
inherit (opt) relatedPackages;
};
subOptions =
let ss = opt.type.getSubOptions opt.loc;
in if ss != {} then optionAttrSetToDocList' opt.loc ss else [];
let
ss = opt.type.getSubOptions opt.loc;
in
if ss != { } then optionAttrSetToDocList' opt.loc ss else [ ];
subOptionsVisible = docOption.visible && opt.visible or null != "shallow";
in
# To find infinite recursion in NixOS option docs:
# builtins.trace opt.loc
[ docOption ] ++ optionals subOptionsVisible subOptions) (collect isOption options);
# To find infinite recursion in NixOS option docs:
# builtins.trace opt.loc
[ docOption ] ++ optionals subOptionsVisible subOptions
) (collect isOption options);
/*
This function recursively removes all derivation attributes from
`x` except for the `name` attribute.
/* This function recursively removes all derivation attributes from
`x` except for the `name` attribute.
This is to make the generation of `options.xml` much more
efficient: the XML representation of derivations is very large
(on the order of megabytes) and is not actually used by the
manual generator.
This is to make the generation of `options.xml` much more
efficient: the XML representation of derivations is very large
(on the order of megabytes) and is not actually used by the
manual generator.
This function was made obsolete by renderOptionValue and is kept for
compatibility with out-of-tree code.
This function was made obsolete by renderOptionValue and is kept for
compatibility with out-of-tree code.
*/
scrubOptionValue = x:
scrubOptionValue =
x:
if isDerivation x then
{ type = "derivation"; drvPath = x.name; outPath = x.name; name = x.name; }
else if isList x then map scrubOptionValue x
else if isAttrs x then mapAttrs (n: v: scrubOptionValue v) (removeAttrs x ["_args"])
else x;
{
type = "derivation";
drvPath = x.name;
outPath = x.name;
name = x.name;
}
else if isList x then
map scrubOptionValue x
else if isAttrs x then
mapAttrs (n: v: scrubOptionValue v) (removeAttrs x [ "_args" ])
else
x;
/* Ensures that the given option value (default or example) is a `_type`d string
by rendering Nix values to `literalExpression`s.
/*
Ensures that the given option value (default or example) is a `_type`d string
by rendering Nix values to `literalExpression`s.
*/
renderOptionValue = v:
if v ? _type && v ? text then v
else literalExpression (lib.generators.toPretty {
multiline = true;
allowPrettyValues = true;
} v);
renderOptionValue =
v:
if v ? _type && v ? text then
v
else
literalExpression (
lib.generators.toPretty {
multiline = true;
allowPrettyValues = true;
} v
);
/* For use in the `defaultText` and `example` option attributes. Causes the
given string to be rendered verbatim in the documentation as Nix code. This
is necessary for complex values, e.g. functions, or values that depend on
other values or packages.
/*
For use in the `defaultText` and `example` option attributes. Causes the
given string to be rendered verbatim in the documentation as Nix code. This
is necessary for complex values, e.g. functions, or values that depend on
other values or packages.
*/
literalExpression = text:
if ! isString text then throw "literalExpression expects a string."
else { _type = "literalExpression"; inherit text; };
literalExpression =
text:
if !isString text then
throw "literalExpression expects a string."
else
{
_type = "literalExpression";
inherit text;
};
literalExample = lib.warn "lib.literalExample is deprecated, use lib.literalExpression instead, or use lib.literalMD for a non-Nix description." literalExpression;
/* Transition marker for documentation that's already migrated to markdown
syntax. Has been a no-op for some while and been removed from nixpkgs.
Kept here to alert downstream users who may not be aware of the migration's
completion that it should be removed from modules.
/*
Transition marker for documentation that's already migrated to markdown
syntax. Has been a no-op for some while and been removed from nixpkgs.
Kept here to alert downstream users who may not be aware of the migration's
completion that it should be removed from modules.
*/
mdDoc = lib.warn "lib.mdDoc will be removed from nixpkgs in 24.11. Option descriptions are now in Markdown by default; you can remove any remaining uses of lib.mdDoc.";
/* For use in the `defaultText` and `example` option attributes. Causes the
given MD text to be inserted verbatim in the documentation, for when
a `literalExpression` would be too hard to read.
/*
For use in the `defaultText` and `example` option attributes. Causes the
given MD text to be inserted verbatim in the documentation, for when
a `literalExpression` would be too hard to read.
*/
literalMD = text:
if ! isString text then throw "literalMD expects a string."
else { _type = "literalMD"; inherit text; };
literalMD =
text:
if !isString text then
throw "literalMD expects a string."
else
{
_type = "literalMD";
inherit text;
};
# Helper functions.
/* Convert an option, described as a list of the option parts to a
human-readable version.
/*
Convert an option, described as a list of the option parts to a
human-readable version.
Example:
(showOption ["foo" "bar" "baz"]) == "foo.bar.baz"
(showOption ["foo" "bar.baz" "tux"]) == "foo.\"bar.baz\".tux"
(showOption ["windowManager" "2bwm" "enable"]) == "windowManager.\"2bwm\".enable"
Example:
(showOption ["foo" "bar" "baz"]) == "foo.bar.baz"
(showOption ["foo" "bar.baz" "tux"]) == "foo.\"bar.baz\".tux"
(showOption ["windowManager" "2bwm" "enable"]) == "windowManager.\"2bwm\".enable"
Placeholders will not be quoted as they are not actual values:
(showOption ["foo" "*" "bar"]) == "foo.*.bar"
(showOption ["foo" "<name>" "bar"]) == "foo.<name>.bar"
Placeholders will not be quoted as they are not actual values:
(showOption ["foo" "*" "bar"]) == "foo.*.bar"
(showOption ["foo" "<name>" "bar"]) == "foo.<name>.bar"
*/
showOption = parts: let
escapeOptionPart = part:
let
# We assume that these are "special values" and not real configuration data.
# If it is real configuration data, it is rendered incorrectly.
specialIdentifiers = [
"<name>" # attrsOf (submodule {})
"*" # listOf (submodule {})
"<function body>" # functionTo
];
in if builtins.elem part specialIdentifiers
then part
else lib.strings.escapeNixIdentifier part;
in (concatStringsSep ".") (map escapeOptionPart parts);
showOption =
parts:
let
escapeOptionPart =
part:
let
# We assume that these are "special values" and not real configuration data.
# If it is real configuration data, it is rendered incorrectly.
specialIdentifiers = [
"<name>" # attrsOf (submodule {})
"*" # listOf (submodule {})
"<function body>" # functionTo
];
in
if builtins.elem part specialIdentifiers then part else lib.strings.escapeNixIdentifier part;
in
(concatStringsSep ".") (map escapeOptionPart parts);
showFiles = files: concatStringsSep " and " (map (f: "`${f}'") files);
showDefs = defs: concatMapStrings (def:
let
# Pretty print the value for display, if successful
prettyEval = builtins.tryEval
(lib.generators.toPretty { }
(lib.generators.withRecursion { depthLimit = 10; throwOnDepthLimit = false; } def.value));
# Split it into its lines
lines = filter (v: ! isList v) (builtins.split "\n" prettyEval.value);
# Only display the first 5 lines, and indent them for better visibility
value = concatStringsSep "\n " (take 5 lines ++ optional (length lines > 5) "...");
result =
# Don't print any value if evaluating the value strictly fails
if ! prettyEval.success then ""
# Put it on a new line if it consists of multiple
else if length lines > 1 then ":\n " + value
else ": " + value;
in "\n- In `${def.file}'${result}"
) defs;
showDefs =
defs:
concatMapStrings (
def:
let
# Pretty print the value for display, if successful
prettyEval = builtins.tryEval (
lib.generators.toPretty { } (
lib.generators.withRecursion {
depthLimit = 10;
throwOnDepthLimit = false;
} def.value
)
);
# Split it into its lines
lines = filter (v: !isList v) (builtins.split "\n" prettyEval.value);
# Only display the first 5 lines, and indent them for better visibility
value = concatStringsSep "\n " (take 5 lines ++ optional (length lines > 5) "...");
result =
# Don't print any value if evaluating the value strictly fails
if !prettyEval.success then
""
# Put it on a new line if it consists of multiple
else if length lines > 1 then
":\n " + value
else
": " + value;
in
"\n- In `${def.file}'${result}"
) defs;
showOptionWithDefLocs = opt: ''
${showOption opt.loc}, with values defined in:
${concatMapStringsSep "\n" (defFile: " - ${defFile}") opt.files}
'';
${showOption opt.loc}, with values defined in:
${concatMapStringsSep "\n" (defFile: " - ${defFile}") opt.files}
'';
unknownModule = "<unknown-file>";

View file

@ -1,4 +1,4 @@
/* Functions for working with path values. */
# Functions for working with path values.
# See ./README.md for internal docs
{ lib }:
let
@ -27,22 +27,16 @@ let
listHasPrefix = lib.lists.hasPrefix;
inherit (lib.strings)
concatStringsSep
substring
;
inherit (lib.strings) concatStringsSep substring;
inherit (lib.asserts)
assertMsg
;
inherit (lib.asserts) assertMsg;
inherit (lib.path.subpath)
isValid
;
inherit (lib.path.subpath) isValid;
# Return the reason why a subpath is invalid, or `null` if it's valid
subpathInvalidReason = value:
if ! isString value then
subpathInvalidReason =
value:
if !isString value then
"The given value is of type ${builtins.typeOf value}, but a string was expected"
else if value == "" then
"The given string is empty"
@ -51,11 +45,13 @@ let
# We don't support ".." components, see ./path.md#parent-directory
else if match "(.*/)?\\.\\.(/.*)?" value != null then
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
else null;
else
null;
# Split and normalise a relative path string into its components.
# Error for ".." components and doesn't include "." components
splitRelPath = path:
splitRelPath =
path:
let
# Split the string into its parts using regex for efficiency. This regex
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
@ -86,26 +82,31 @@ let
componentCount = partCount - skipEnd - skipStart;
in
# Special case of a single "." path component. Such a case leaves a
# componentCount of -1 due to the skipStart/skipEnd not verifying that
# they don't refer to the same character
if path == "." then []
# Special case of a single "." path component. Such a case leaves a
# componentCount of -1 due to the skipStart/skipEnd not verifying that
# they don't refer to the same character
if path == "." then
[ ]
# Generate the result list directly. This is more efficient than a
# combination of `filter`, `init` and `tail`, because here we don't
# allocate any intermediate lists
else genList (index:
# Generate the result list directly. This is more efficient than a
# combination of `filter`, `init` and `tail`, because here we don't
# allocate any intermediate lists
else
genList (
index:
# To get to the element we need to add the number of parts we skip and
# multiply by two due to the interleaved layout of `parts`
elemAt parts ((skipStart + index) * 2)
) componentCount;
# Join relative path components together
joinRelPath = components:
joinRelPath =
components:
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
"./" +
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
(if components == [] then "." else concatStringsSep "/" components);
"./"
+
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
(if components == [ ] then "." else concatStringsSep "/" components);
# Type: Path -> { root :: Path, components :: [ String ] }
#
@ -117,11 +118,18 @@ let
# because it can distinguish different filesystem roots
deconstructPath =
let
recurse = components: base:
recurse =
components: base:
# If the parent of a path is the path itself, then it's a filesystem root
if base == dirOf base then { root = base; inherit components; }
else recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
in recurse [];
if base == dirOf base then
{
root = base;
inherit components;
}
else
recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
in
recurse [ ];
# The components of the store directory, typically [ "nix" "store" ]
storeDirComponents = splitRelPath ("./" + storeDir);
@ -132,7 +140,8 @@ let
#
# Whether path components have a store path as a prefix, according to
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
componentsHaveStorePathPrefix = components:
componentsHaveStorePathPrefix =
components:
# path starts with the store directory (typically /nix/store)
listHasPrefix storeDirComponents components
# is not the store directory itself, meaning there's at least one extra component
@ -145,7 +154,9 @@ let
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
in /* No rec! Add dependencies on this file at the top. */ {
in
# No rec! Add dependencies on this file at the top.
{
/*
Append a subpath string to a path.
@ -194,8 +205,8 @@ in /* No rec! Add dependencies on this file at the top. */ {
path:
# The subpath string to append
subpath:
assert assertMsg (isPath path) ''
lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
assert assertMsg (isPath path)
''lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
assert assertMsg (isValid subpath) ''
lib.path.append: Second argument is not a valid subpath string:
${subpathInvalidReason subpath}'';
@ -225,25 +236,23 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
hasPrefix =
path1:
assert assertMsg
(isPath path1)
assert assertMsg (isPath path1)
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
let
path1Deconstructed = deconstructPath path1;
in
path2:
assert assertMsg
(isPath path2)
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
let
path2Deconstructed = deconstructPath path2;
in
assert assertMsg
(path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
take (length path1Deconstructed.components) path2Deconstructed.components == path1Deconstructed.components;
path2:
assert assertMsg (isPath path2)
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
let
path2Deconstructed = deconstructPath path2;
in
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
take (length path1Deconstructed.components) path2Deconstructed.components
== path1Deconstructed.components;
/*
Remove the first path as a component-wise prefix from the second path.
@ -270,33 +279,29 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
removePrefix =
path1:
assert assertMsg
(isPath path1)
assert assertMsg (isPath path1)
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
let
path1Deconstructed = deconstructPath path1;
path1Length = length path1Deconstructed.components;
in
path2:
assert assertMsg
(isPath path2)
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
let
path2Deconstructed = deconstructPath path2;
success = take path1Length path2Deconstructed.components == path1Deconstructed.components;
components =
if success then
drop path1Length path2Deconstructed.components
else
throw ''
lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
in
assert assertMsg
(path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
joinRelPath components;
path2:
assert assertMsg (isPath path2)
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
let
path2Deconstructed = deconstructPath path2;
success = take path1Length path2Deconstructed.components == path1Deconstructed.components;
components =
if success then
drop path1Length path2Deconstructed.components
else
throw ''lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
in
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
joinRelPath components;
/*
Split the filesystem root from a [path](https://nixos.org/manual/nix/stable/language/values.html#type-path).
@ -336,12 +341,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
splitRoot =
# The path to split the root off of
path:
assert assertMsg
(isPath path)
assert assertMsg (isPath path)
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
let
deconstructed = deconstructPath path;
in {
in
{
root = deconstructed.root;
subpath = joinRelPath deconstructed.components;
};
@ -387,12 +392,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
=> true
*/
hasStorePathPrefix = path:
hasStorePathPrefix =
path:
let
deconstructed = deconstructPath path;
in
assert assertMsg
(isPath path)
assert assertMsg (isPath path)
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
assert assertMsg
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
@ -446,9 +451,7 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
subpath.isValid =
# The value to check
value:
subpathInvalidReason value == null;
value: subpathInvalidReason value == null;
/*
Join subpath strings together using `/`, returning a normalised subpath string.
@ -511,18 +514,20 @@ in /* No rec! Add dependencies on this file at the top. */ {
# The list of subpaths to join together
subpaths:
# Fast in case all paths are valid
if all isValid subpaths
then joinRelPath (concatMap splitRelPath subpaths)
if all isValid subpaths then
joinRelPath (concatMap splitRelPath subpaths)
else
# Otherwise we take our time to gather more info for a better error message
# Strictly go through each path, throwing on the first invalid one
# Tracks the list index in the fold accumulator
foldl' (i: path:
if isValid path
then i + 1
else throw ''
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
${subpathInvalidReason path}''
foldl' (
i: path:
if isValid path then
i + 1
else
throw ''
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
${subpathInvalidReason path}''
) 0 subpaths;
/*

View file

@ -2,8 +2,8 @@
nixpkgs ? ../../..,
system ? builtins.currentSystem,
pkgs ? import nixpkgs {
config = {};
overlays = [];
config = { };
overlays = [ ];
inherit system;
},
nixVersions ? import ../../tests/nix-for-tests.nix { inherit pkgs; },
@ -12,34 +12,36 @@
seed ? null,
}:
pkgs.runCommand "lib-path-tests" {
nativeBuildInputs = [
nixVersions.stable
] ++ (with pkgs; [
jq
bc
]);
} ''
# Needed to make Nix evaluation work
export TEST_ROOT=$(pwd)/test-tmp
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_STORE_DIR=$TEST_ROOT/store
export PAGER=cat
pkgs.runCommand "lib-path-tests"
{
nativeBuildInputs =
[ nixVersions.stable ]
++ (with pkgs; [
jq
bc
]);
}
''
# Needed to make Nix evaluation work
export TEST_ROOT=$(pwd)/test-tmp
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_STORE_DIR=$TEST_ROOT/store
export PAGER=cat
cp -r ${libpath} lib
export TEST_LIB=$PWD/lib
cp -r ${libpath} lib
export TEST_LIB=$PWD/lib
echo "Running unit tests lib/path/tests/unit.nix"
nix-instantiate --eval --show-trace \
--argstr libpath "$TEST_LIB" \
lib/path/tests/unit.nix
echo "Running unit tests lib/path/tests/unit.nix"
nix-instantiate --eval --show-trace \
--argstr libpath "$TEST_LIB" \
lib/path/tests/unit.nix
echo "Running property tests lib/path/tests/prop.sh"
bash lib/path/tests/prop.sh ${toString seed}
echo "Running property tests lib/path/tests/prop.sh"
bash lib/path/tests/prop.sh ${toString seed}
touch $out
''
touch $out
''

View file

@ -16,14 +16,15 @@ let
lib = import libpath;
# read each file into a string
strings = map (name:
builtins.readFile (dir + "/${name}")
) (builtins.attrNames (builtins.readDir dir));
strings = map (name: builtins.readFile (dir + "/${name}")) (
builtins.attrNames (builtins.readDir dir)
);
inherit (lib.path.subpath) normalise isValid;
inherit (lib.asserts) assertMsg;
normaliseAndCheck = str:
normaliseAndCheck =
str:
let
originalValid = isValid str;
@ -33,28 +34,27 @@ let
absConcatOrig = /. + ("/" + str);
absConcatNormalised = /. + ("/" + tryOnce.value);
in
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
assert assertMsg
(originalValid -> tryOnce.success)
"Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
assert assertMsg
(! originalValid -> ! tryOnce.success)
"Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
assert assertMsg (
originalValid -> tryOnce.success
) "Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
assert assertMsg (
!originalValid -> !tryOnce.success
) "Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
# Check normalisation idempotency
assert assertMsg
(originalValid -> tryTwice.success)
"For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
assert assertMsg
(originalValid -> tryOnce.value == tryTwice.value)
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
# Check normalisation idempotency
assert assertMsg (
originalValid -> tryTwice.success
) "For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
assert assertMsg (originalValid -> tryOnce.value == tryTwice.value)
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
assert assertMsg
(originalValid -> absConcatOrig == absConcatNormalised)
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
assert assertMsg (originalValid -> absConcatOrig == absConcatNormalised)
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
# Return an empty string when failed
if tryOnce.success then tryOnce.value else "";
# Return an empty string when failed
if tryOnce.success then tryOnce.value else "";
in lib.genAttrs strings normaliseAndCheck
in
lib.genAttrs strings normaliseAndCheck

View file

@ -3,7 +3,14 @@
{ libpath }:
let
lib = import libpath;
inherit (lib.path) hasPrefix removePrefix append splitRoot hasStorePathPrefix subpath;
inherit (lib.path)
hasPrefix
removePrefix
append
splitRoot
hasStorePathPrefix
subpath
;
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
storeDirPath = /. + builtins.storeDir;
@ -79,15 +86,24 @@ let
testSplitRootExample1 = {
expr = splitRoot /foo/bar;
expected = { root = /.; subpath = "./foo/bar"; };
expected = {
root = /.;
subpath = "./foo/bar";
};
};
testSplitRootExample2 = {
expr = splitRoot /.;
expected = { root = /.; subpath = "./."; };
expected = {
root = /.;
subpath = "./.";
};
};
testSplitRootExample3 = {
expr = splitRoot /foo/../bar;
expected = { root = /.; subpath = "./bar"; };
expected = {
root = /.;
subpath = "./bar";
};
};
testSplitRootExample4 = {
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
@ -111,7 +127,9 @@ let
expected = false;
};
testHasStorePathPrefixExample5 = {
expr = hasStorePathPrefix (storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq");
expr = hasStorePathPrefix (
storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq"
);
expected = false;
};
testHasStorePathPrefixExample6 = {
@ -188,11 +206,18 @@ let
# Test examples from the lib.path.subpath.join documentation
testSubpathJoinExample1 = {
expr = subpath.join [ "foo" "bar/baz" ];
expr = subpath.join [
"foo"
"bar/baz"
];
expected = "./foo/bar/baz";
};
testSubpathJoinExample2 = {
expr = subpath.join [ "./foo" "." "bar//./baz/" ];
expr = subpath.join [
"./foo"
"."
"bar//./baz/"
];
expected = "./foo/bar/baz";
};
testSubpathJoinExample3 = {
@ -273,7 +298,11 @@ let
};
testSubpathComponentsExample2 = {
expr = subpath.components "./foo//bar/./baz/";
expected = [ "foo" "bar" "baz" ];
expected = [
"foo"
"bar"
"baz"
];
};
testSubpathComponentsExample3 = {
expr = (builtins.tryEval (subpath.components "/foo")).success;
@ -281,5 +310,7 @@ let
};
};
in
if cases == [] then "Unit tests successful"
else throw "Path unit tests failed: ${lib.generators.toPretty {} cases}"
if cases == [ ] then
"Unit tests successful"
else
throw "Path unit tests failed: ${lib.generators.toPretty { } cases}"

View file

@ -5,15 +5,16 @@ let
shortName = tname;
isSource = false;
};
in lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
in
lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
fromSource = {
isSource = true;
};
binaryNativeCode = {};
binaryNativeCode = { };
binaryBytecode = {};
binaryBytecode = { };
binaryFirmware = {};
binaryFirmware = { };
}

View file

@ -1,44 +1,50 @@
/* Functions for copying sources to the Nix store. */
# Functions for copying sources to the Nix store.
{ lib }:
# Tested in lib/tests/sources.sh
let
inherit (builtins)
match
split
storeDir
;
inherit (builtins) match split storeDir;
inherit (lib)
boolToString
filter
isString
readFile
;
inherit (lib.filesystem)
pathIsRegularFile
;
inherit (lib.filesystem) pathIsRegularFile;
/*
A basic filter for `cleanSourceWith` that removes
directories of version control system, backup files (*~)
and some generated files.
*/
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
# Filter out version control software files/directories
(baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
# Filter out editor backup / swap files.
lib.hasSuffix "~" baseName ||
match "^\\.sw[a-z]$" baseName != null ||
match "^\\..*\\.sw[a-z]$" baseName != null ||
cleanSourceFilter =
name: type:
let
baseName = baseNameOf (toString name);
in
!(
# Filter out version control software files/directories
(
baseName == ".git"
|| type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")
)
||
# Filter out editor backup / swap files.
lib.hasSuffix "~" baseName
|| match "^\\.sw[a-z]$" baseName != null
|| match "^\\..*\\.sw[a-z]$" baseName != null
||
# Filter out generates files.
lib.hasSuffix ".o" baseName ||
lib.hasSuffix ".so" baseName ||
# Filter out nix-build result symlinks
(type == "symlink" && lib.hasPrefix "result" baseName) ||
# Filter out sockets and other types of files we can't have in the store.
(type == "unknown")
);
# Filter out generates files.
lib.hasSuffix ".o" baseName
|| lib.hasSuffix ".so" baseName
||
# Filter out nix-build result symlinks
(type == "symlink" && lib.hasPrefix "result" baseName)
||
# Filter out sockets and other types of files we can't have in the store.
(type == "unknown")
);
/*
Filters a source tree removing version control files and directories using cleanSourceFilter.
@ -46,7 +52,12 @@ let
Example:
cleanSource ./.
*/
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
cleanSource =
src:
cleanSourceWith {
filter = cleanSourceFilter;
inherit src;
};
/*
Like `builtins.filterSource`, except it will compose with itself,
@ -65,7 +76,6 @@ let
builtins.filterSource f (builtins.filterSource g ./.)
# Fails!
*/
cleanSourceWith =
{
@ -80,11 +90,12 @@ let
filter ? _path: _type: true,
# Optional name to use as part of the store path.
# This defaults to `src.name` or otherwise `"source"`.
name ? null
name ? null,
}:
let
orig = toSourceAttributes src;
in fromSourceAttributes {
in
fromSourceAttributes {
inherit (orig) origSrc;
filter = path: type: filter path type && orig.filter path type;
name = if name != null then name else orig.name;
@ -101,31 +112,40 @@ let
let
attrs = toSourceAttributes src;
in
fromSourceAttributes (
attrs // {
filter = path: type:
let
r = attrs.filter path type;
in
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
}
) // {
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
};
fromSourceAttributes (
attrs
// {
filter =
path: type:
let
r = attrs.filter path type;
in
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
}
)
// {
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
};
/*
Filter sources by a list of regular expressions.
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
*/
sourceByRegex = src: regexes:
sourceByRegex =
src: regexes:
let
isFiltered = src ? _isLibCleanSourceWith;
origSrc = if isFiltered then src.origSrc else src;
in lib.cleanSourceWith {
filter = (path: type:
let relPath = lib.removePrefix (toString origSrc + "/") (toString path);
in lib.any (re: match re relPath != null) regexes);
in
lib.cleanSourceWith {
filter = (
path: type:
let
relPath = lib.removePrefix (toString origSrc + "/") (toString path);
in
lib.any (re: match re relPath != null) regexes
);
inherit src;
};
@ -145,21 +165,29 @@ let
src:
# A list of file suffix strings
exts:
let filter = name: type:
let base = baseNameOf (toString name);
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
in cleanSourceWith { inherit filter src; };
let
filter =
name: type:
let
base = baseNameOf (toString name);
in
type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
in
cleanSourceWith { inherit filter src; };
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path)?value;
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path) ? value;
/*
Get the commit id of a git repo.
Example: commitIdFromGitRepo <nixpkgs/.git>
*/
commitIdFromGitRepo = path:
let commitIdOrError = _commitIdFromGitRepoOrError path;
in commitIdOrError.value or (throw commitIdOrError.error);
commitIdFromGitRepo =
path:
let
commitIdOrError = _commitIdFromGitRepoOrError path;
in
commitIdOrError.value or (throw commitIdOrError.error);
# Get the commit id of a git repo.
@ -168,55 +196,68 @@ let
# Example: commitIdFromGitRepo <nixpkgs/.git>
# not exported, used for commitIdFromGitRepo
_commitIdFromGitRepoOrError =
let readCommitFromFile = file: path:
let fileName = path + "/${file}";
packedRefsName = path + "/packed-refs";
absolutePath = base: path:
if lib.hasPrefix "/" path
then path
else toString (/. + "${base}/${path}");
in if pathIsRegularFile path
# Resolve git worktrees. See gitrepository-layout(5)
then
let m = match "^gitdir: (.*)$" (lib.fileContents path);
in if m == null
then { error = "File contains no gitdir reference: " + path; }
else
let gitDir = absolutePath (dirOf path) (lib.head m);
commonDir'' = if pathIsRegularFile "${gitDir}/commondir"
then lib.fileContents "${gitDir}/commondir"
else gitDir;
commonDir' = lib.removeSuffix "/" commonDir'';
commonDir = absolutePath gitDir commonDir';
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
in readCommitFromFile refFile commonDir
let
readCommitFromFile =
file: path:
let
fileName = path + "/${file}";
packedRefsName = path + "/packed-refs";
absolutePath =
base: path: if lib.hasPrefix "/" path then path else toString (/. + "${base}/${path}");
in
if
pathIsRegularFile path
# Resolve git worktrees. See gitrepository-layout(5)
then
let
m = match "^gitdir: (.*)$" (lib.fileContents path);
in
if m == null then
{ error = "File contains no gitdir reference: " + path; }
else
let
gitDir = absolutePath (dirOf path) (lib.head m);
commonDir'' =
if pathIsRegularFile "${gitDir}/commondir" then lib.fileContents "${gitDir}/commondir" else gitDir;
commonDir' = lib.removeSuffix "/" commonDir'';
commonDir = absolutePath gitDir commonDir';
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
in
readCommitFromFile refFile commonDir
else if pathIsRegularFile fileName
# Sometimes git stores the commitId directly in the file but
# sometimes it stores something like: «ref: refs/heads/branch-name»
then
let fileContent = lib.fileContents fileName;
matchRef = match "^ref: (.*)$" fileContent;
in if matchRef == null
then { value = fileContent; }
else readCommitFromFile (lib.head matchRef) path
else if
pathIsRegularFile fileName
# Sometimes git stores the commitId directly in the file but
# sometimes it stores something like: «ref: refs/heads/branch-name»
then
let
fileContent = lib.fileContents fileName;
matchRef = match "^ref: (.*)$" fileContent;
in
if matchRef == null then { value = fileContent; } else readCommitFromFile (lib.head matchRef) path
else if pathIsRegularFile packedRefsName
# Sometimes, the file isn't there at all and has been packed away in the
# packed-refs file, so we have to grep through it:
then
let fileContent = readFile packedRefsName;
matchRef = match "([a-z0-9]+) ${file}";
isRef = s: isString s && (matchRef s) != null;
# there is a bug in libstdc++ leading to stackoverflow for long strings:
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
refs = filter isRef (split "\n" fileContent);
in if refs == []
then { error = "Could not find " + file + " in " + packedRefsName; }
else { value = lib.head (matchRef (lib.head refs)); }
else if
pathIsRegularFile packedRefsName
# Sometimes, the file isn't there at all and has been packed away in the
# packed-refs file, so we have to grep through it:
then
let
fileContent = readFile packedRefsName;
matchRef = match "([a-z0-9]+) ${file}";
isRef = s: isString s && (matchRef s) != null;
# there is a bug in libstdc++ leading to stackoverflow for long strings:
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
refs = filter isRef (split "\n" fileContent);
in
if refs == [ ] then
{ error = "Could not find " + file + " in " + packedRefsName; }
else
{ value = lib.head (matchRef (lib.head refs)); }
else { error = "Not a .git directory: " + toString path; };
in readCommitFromFile "HEAD";
else
{ error = "Not a .git directory: " + toString path; };
in
readCommitFromFile "HEAD";
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
@ -233,7 +274,8 @@ let
# like class of objects in the wild.
# (Existing ones being: paths, strings, sources and x//{outPath})
# So instead of exposing internals, we build a library of combinator functions.
toSourceAttributes = src:
toSourceAttributes =
src:
let
isFiltered = src ? _isLibCleanSourceWith;
in
@ -247,26 +289,37 @@ let
# fromSourceAttributes : SourceAttrs -> Source
#
# Inverse of toSourceAttributes for Source objects.
fromSourceAttributes = { origSrc, filter, name }:
fromSourceAttributes =
{
origSrc,
filter,
name,
}:
{
_isLibCleanSourceWith = true;
inherit origSrc filter name;
outPath = builtins.path { inherit filter name; path = origSrc; };
outPath = builtins.path {
inherit filter name;
path = origSrc;
};
};
in {
in
{
pathType = lib.warnIf (lib.isInOldestRelease 2305)
"lib.sources.pathType has been moved to lib.filesystem.pathType."
lib.filesystem.pathType;
pathType = lib.warnIf (lib.isInOldestRelease
2305
) "lib.sources.pathType has been moved to lib.filesystem.pathType." lib.filesystem.pathType;
pathIsDirectory = lib.warnIf (lib.isInOldestRelease 2305)
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
lib.filesystem.pathIsDirectory;
pathIsDirectory =
lib.warnIf (lib.isInOldestRelease 2305)
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
lib.filesystem.pathIsDirectory;
pathIsRegularFile = lib.warnIf (lib.isInOldestRelease 2305)
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
lib.filesystem.pathIsRegularFile;
pathIsRegularFile =
lib.warnIf (lib.isInOldestRelease 2305)
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
lib.filesystem.pathIsRegularFile;
inherit
pathIsGitRepo

View file

@ -1,44 +1,44 @@
{ lib }:
/*
Usage:
Usage:
You define you custom builder script by adding all build steps to a list.
for example:
builder = writeScript "fsg-4.4-builder"
(textClosure [doUnpack addInputs preBuild doMake installPhase doForceShare]);
You define you custom builder script by adding all build steps to a list.
for example:
builder = writeScript "fsg-4.4-builder"
(textClosure [doUnpack addInputs preBuild doMake installPhase doForceShare]);
a step is defined by noDepEntry, fullDepEntry or packEntry.
To ensure that prerequisite are met those are added before the task itself by
textClosureDupList. Duplicated items are removed again.
a step is defined by noDepEntry, fullDepEntry or packEntry.
To ensure that prerequisite are met those are added before the task itself by
textClosureDupList. Duplicated items are removed again.
See trace/nixpkgs/trunk/pkgs/top-level/builder-defs.nix for some predefined build steps
See trace/nixpkgs/trunk/pkgs/top-level/builder-defs.nix for some predefined build steps
Attention:
Attention:
let
pkgs = (import <nixpkgs>) {};
in let
inherit (pkgs.stringsWithDeps) fullDepEntry packEntry noDepEntry textClosureMap;
inherit (pkgs.lib) id;
let
pkgs = (import <nixpkgs>) {};
in let
inherit (pkgs.stringsWithDeps) fullDepEntry packEntry noDepEntry textClosureMap;
inherit (pkgs.lib) id;
nameA = noDepEntry "Text a";
nameB = fullDepEntry "Text b" ["nameA"];
nameC = fullDepEntry "Text c" ["nameA"];
nameA = noDepEntry "Text a";
nameB = fullDepEntry "Text b" ["nameA"];
nameC = fullDepEntry "Text c" ["nameA"];
stages = {
nameHeader = noDepEntry "#! /bin/sh \n";
inherit nameA nameB nameC;
};
in
textClosureMap id stages
[ "nameHeader" "nameA" "nameB" "nameC"
nameC # <- added twice. add a dep entry if you know that it will be added once only [1]
"nameB" # <- this will not be added again because the attr name (reference) is used
]
stages = {
nameHeader = noDepEntry "#! /bin/sh \n";
inherit nameA nameB nameC;
};
in
textClosureMap id stages
[ "nameHeader" "nameA" "nameB" "nameC"
nameC # <- added twice. add a dep entry if you know that it will be added once only [1]
"nameB" # <- this will not be added again because the attr name (reference) is used
]
# result: Str("#! /bin/sh \n\nText a\nText b\nText c\nText c",[])
# result: Str("#! /bin/sh \n\nText a\nText b\nText c\nText c",[])
[1] maybe this behaviour should be removed to keep things simple (?)
[1] maybe this behaviour should be removed to keep things simple (?)
*/
let
@ -52,32 +52,63 @@ let
in
rec {
/* !!! The interface of this function is kind of messed up, since
it's way too overloaded and almost but not quite computes a
topological sort of the depstrings. */
/*
!!! The interface of this function is kind of messed up, since
it's way too overloaded and almost but not quite computes a
topological sort of the depstrings.
*/
textClosureList = predefined: arg:
textClosureList =
predefined: arg:
let
f = done: todo:
if todo == [] then {result = []; inherit done;}
f =
done: todo:
if todo == [ ] then
{
result = [ ];
inherit done;
}
else
let entry = head todo; in
let
entry = head todo;
in
if isAttrs entry then
let x = f done entry.deps;
y = f x.done (tail todo);
in { result = x.result ++ [entry.text] ++ y.result;
done = y.done;
}
else if done ? ${entry} then f done (tail todo)
else f (done // listToAttrs [{name = entry; value = 1;}]) ([predefined.${entry}] ++ tail todo);
in (f {} arg).result;
let
x = f done entry.deps;
y = f x.done (tail todo);
in
{
result = x.result ++ [ entry.text ] ++ y.result;
done = y.done;
}
else if done ? ${entry} then
f done (tail todo)
else
f (
done
// listToAttrs [
{
name = entry;
value = 1;
}
]
) ([ predefined.${entry} ] ++ tail todo);
in
(f { } arg).result;
textClosureMap = f: predefined: names:
textClosureMap =
f: predefined: names:
concatStringsSep "\n" (map f (textClosureList predefined names));
noDepEntry = text: {inherit text; deps = [];};
fullDepEntry = text: deps: {inherit text deps;};
packEntry = deps: {inherit deps; text="";};
noDepEntry = text: {
inherit text;
deps = [ ];
};
fullDepEntry = text: deps: { inherit text deps; };
packEntry = deps: {
inherit deps;
text = "";
};
stringAfter = deps: text: { inherit text deps; };

File diff suppressed because it is too large Load diff

View file

@ -5,86 +5,348 @@ rec {
features = {
# x86_64 Generic
# Spec: https://gitlab.com/x86-psABIs/x86-64-ABI/
default = [ ];
x86-64 = [ ];
x86-64-v2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
x86-64-v3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "fma" ];
x86-64-v4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "avx512" "fma" ];
default = [ ];
x86-64 = [ ];
x86-64-v2 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
];
x86-64-v3 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"avx"
"avx2"
"fma"
];
x86-64-v4 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"avx"
"avx2"
"avx512"
"fma"
];
# x86_64 Intel
nehalem = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
westmere = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
sandybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
ivybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
haswell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
broadwell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
skylake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
skylake-avx512 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
cannonlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
icelake-client = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
icelake-server = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
cascadelake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
cooperlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
tigerlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
alderlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
nehalem = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
];
westmere = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
];
sandybridge = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
];
ivybridge = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
];
haswell = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"fma"
];
broadwell = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"fma"
];
skylake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"fma"
];
skylake-avx512 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
cannonlake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
icelake-client = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
icelake-server = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
cascadelake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
cooperlake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
tigerlake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
alderlake = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
"avx2"
"fma"
];
# x86_64 AMD
btver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
btver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
bdver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
bdver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
bdver3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
bdver4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" "fma4" ];
znver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
znver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
znver3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
znver4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "avx512" "fma" ];
btver1 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
];
btver2 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"aes"
"avx"
];
bdver1 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"fma"
"fma4"
];
bdver2 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"fma"
"fma4"
];
bdver3 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"fma"
"fma4"
];
bdver4 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"avx2"
"fma"
"fma4"
];
znver1 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"avx2"
"fma"
];
znver2 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"avx2"
"fma"
];
znver3 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"avx2"
"fma"
];
znver4 = [
"sse3"
"ssse3"
"sse4_1"
"sse4_2"
"sse4a"
"aes"
"avx"
"avx2"
"avx512"
"fma"
];
# other
armv5te = [ ];
armv6 = [ ];
armv7-a = [ ];
armv8-a = [ ];
mips32 = [ ];
loongson2f = [ ];
armv5te = [ ];
armv6 = [ ];
armv7-a = [ ];
armv8-a = [ ];
mips32 = [ ];
loongson2f = [ ];
};
# a superior CPU has all the features of an inferior and is able to build and test code for it
inferiors = {
# x86_64 Generic
default = [ ];
x86-64 = [ ];
x86-64-v2 = [ "x86-64" ];
default = [ ];
x86-64 = [ ];
x86-64-v2 = [ "x86-64" ];
x86-64-v3 = [ "x86-64-v2" ] ++ inferiors.x86-64-v2;
x86-64-v4 = [ "x86-64-v3" ] ++ inferiors.x86-64-v3;
# x86_64 Intel
# https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html
nehalem = [ "x86-64-v2" ] ++ inferiors.x86-64-v2;
westmere = [ "nehalem" ] ++ inferiors.nehalem;
sandybridge = [ "westmere" ] ++ inferiors.westmere;
ivybridge = [ "sandybridge" ] ++ inferiors.sandybridge;
nehalem = [ "x86-64-v2" ] ++ inferiors.x86-64-v2;
westmere = [ "nehalem" ] ++ inferiors.nehalem;
sandybridge = [ "westmere" ] ++ inferiors.westmere;
ivybridge = [ "sandybridge" ] ++ inferiors.sandybridge;
haswell = lib.unique ([ "ivybridge" "x86-64-v3" ] ++ inferiors.ivybridge ++ inferiors.x86-64-v3);
broadwell = [ "haswell" ] ++ inferiors.haswell;
skylake = [ "broadwell" ] ++ inferiors.broadwell;
haswell = lib.unique (
[
"ivybridge"
"x86-64-v3"
]
++ inferiors.ivybridge
++ inferiors.x86-64-v3
);
broadwell = [ "haswell" ] ++ inferiors.haswell;
skylake = [ "broadwell" ] ++ inferiors.broadwell;
skylake-avx512 = lib.unique ([ "skylake" "x86-64-v4" ] ++ inferiors.skylake ++ inferiors.x86-64-v4);
cannonlake = [ "skylake-avx512" ] ++ inferiors.skylake-avx512;
icelake-client = [ "cannonlake" ] ++ inferiors.cannonlake;
skylake-avx512 = lib.unique (
[
"skylake"
"x86-64-v4"
]
++ inferiors.skylake
++ inferiors.x86-64-v4
);
cannonlake = [ "skylake-avx512" ] ++ inferiors.skylake-avx512;
icelake-client = [ "cannonlake" ] ++ inferiors.cannonlake;
icelake-server = [ "icelake-client" ] ++ inferiors.icelake-client;
cascadelake = [ "cannonlake" ] ++ inferiors.cannonlake;
cooperlake = [ "cascadelake" ] ++ inferiors.cascadelake;
tigerlake = [ "icelake-server" ] ++ inferiors.icelake-server;
cascadelake = [ "cannonlake" ] ++ inferiors.cannonlake;
cooperlake = [ "cascadelake" ] ++ inferiors.cascadelake;
tigerlake = [ "icelake-server" ] ++ inferiors.icelake-server;
# CX16 does not exist on alderlake, while it does on nearly all other intel CPUs
alderlake = [ ];
alderlake = [ ];
# x86_64 AMD
# TODO: fill this (need testing)
btver1 = [ ];
btver2 = [ ];
bdver1 = [ ];
bdver2 = [ ];
bdver3 = [ ];
bdver4 = [ ];
btver1 = [ ];
btver2 = [ ];
bdver1 = [ ];
bdver2 = [ ];
bdver3 = [ ];
bdver4 = [ ];
# Regarding `skylake` as inferior of `znver1`, there are reports of
# successful usage by Gentoo users and Phoronix benchmarking of different
# `-march` targets.
@ -104,33 +366,42 @@ rec {
# https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html
# https://en.wikichip.org/wiki/amd/microarchitectures/zen
# https://en.wikichip.org/wiki/intel/microarchitectures/skylake
znver1 = [ "skylake" ] ++ inferiors.skylake; # Includes haswell and x86-64-v3
znver2 = [ "znver1" ] ++ inferiors.znver1;
znver3 = [ "znver2" ] ++ inferiors.znver2;
znver4 = lib.unique ([ "znver3" "x86-64-v4" ] ++ inferiors.znver3 ++ inferiors.x86-64-v4);
znver1 = [ "skylake" ] ++ inferiors.skylake; # Includes haswell and x86-64-v3
znver2 = [ "znver1" ] ++ inferiors.znver1;
znver3 = [ "znver2" ] ++ inferiors.znver2;
znver4 = lib.unique (
[
"znver3"
"x86-64-v4"
]
++ inferiors.znver3
++ inferiors.x86-64-v4
);
# other
armv5te = [ ];
armv6 = [ ];
armv7-a = [ ];
armv8-a = [ ];
mips32 = [ ];
loongson2f = [ ];
armv5te = [ ];
armv6 = [ ];
armv7-a = [ ];
armv8-a = [ ];
mips32 = [ ];
loongson2f = [ ];
};
predicates = let
featureSupport = feature: x: builtins.elem feature features.${x} or [];
in {
sse3Support = featureSupport "sse3";
ssse3Support = featureSupport "ssse3";
sse4_1Support = featureSupport "sse4_1";
sse4_2Support = featureSupport "sse4_2";
sse4_aSupport = featureSupport "sse4a";
avxSupport = featureSupport "avx";
avx2Support = featureSupport "avx2";
avx512Support = featureSupport "avx512";
aesSupport = featureSupport "aes";
fmaSupport = featureSupport "fma";
fma4Support = featureSupport "fma4";
};
predicates =
let
featureSupport = feature: x: builtins.elem feature features.${x} or [ ];
in
{
sse3Support = featureSupport "sse3";
ssse3Support = featureSupport "ssse3";
sse4_1Support = featureSupport "sse4_1";
sse4_2Support = featureSupport "sse4_2";
sse4_aSupport = featureSupport "sse4a";
avxSupport = featureSupport "avx";
avx2Support = featureSupport "avx2";
avx512Support = featureSupport "avx512";
aesSupport = featureSupport "aes";
fmaSupport = featureSupport "fma";
fma4Support = featureSupport "fma4";
};
}

View file

@ -42,8 +42,10 @@ let
both arguments have been `elaborate`-d.
*/
equals =
let removeFunctions = a: filterAttrs (_: v: !isFunction v) a;
in a: b: removeFunctions a == removeFunctions b;
let
removeFunctions = a: filterAttrs (_: v: !isFunction v) a;
in
a: b: removeFunctions a == removeFunctions b;
/**
List of all Nix system doubles the nixpkgs flake will expose the package set
@ -61,343 +63,444 @@ let
# `parsed` is inferred from args, both because there are two options with one
# clearly preferred, and to prevent cycles. A simpler fixed point where the RHS
# always just used `final.*` would fail on both counts.
elaborate = args': let
args = if isString args' then { system = args'; }
else args';
elaborate =
args':
let
args = if isString args' then { system = args'; } else args';
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
rust = args.rust or args.rustc or {};
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
rust = args.rust or args.rustc or { };
final = {
# Prefer to parse `config` as it is strictly more informative.
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
system = parse.doubleFromSystem final.parsed;
config = parse.tripleFromSystem final.parsed;
# Determine whether we can execute binaries built for the provided platform.
canExecute = platform:
final.isAndroid == platform.isAndroid &&
parse.isCompatible final.parsed.cpu platform.parsed.cpu
&& final.parsed.kernel == platform.parsed.kernel;
isCompatible = _: throw "2022-05-23: isCompatible has been removed in favor of canExecute, refer to the 22.11 changelog for details";
# Derived meta-data
libc =
/**/ if final.isDarwin then "libSystem"
else if final.isMinGW then "msvcrt"
else if final.isWasi then "wasilibc"
else if final.isRedox then "relibc"
else if final.isMusl then "musl"
else if final.isUClibc then "uclibc"
else if final.isAndroid then "bionic"
else if final.isLinux /* default */ then "glibc"
else if final.isFreeBSD then "fblibc"
else if final.isNetBSD then "nblibc"
else if final.isAvr then "avrlibc"
else if final.isGhcjs then null
else if final.isNone then "newlib"
# TODO(@Ericson2314) think more about other operating systems
else "native/impure";
# Choose what linker we wish to use by default. Someday we might also
# choose the C compiler, runtime library, C++ standard library, etc. in
# this way, nice and orthogonally, and deprecate `useLLVM`. But due to
# the monolithic GCC build we cannot actually make those choices
# independently, so we are just doing `linker` and keeping `useLLVM` for
# now.
linker =
/**/ if final.useLLVM or false then "lld"
else if final.isDarwin then "cctools"
# "bfd" and "gold" both come from GNU binutils. The existence of Gold
# is why we use the more obscure "bfd" and not "binutils" for this
# choice.
else "bfd";
# The standard lib directory name that non-nixpkgs binaries distributed
# for this platform normally assume.
libDir = if final.isLinux then
if final.isx86_64 || final.isMips64 || final.isPower64
then "lib64"
else "lib"
else null;
extensions = optionalAttrs final.hasSharedLibraries {
sharedLibrary =
if final.isDarwin then ".dylib"
else if final.isWindows then ".dll"
else ".so";
} // {
staticLibrary =
/**/ if final.isWindows then ".lib"
else ".a";
library =
/**/ if final.isStatic then final.extensions.staticLibrary
else final.extensions.sharedLibrary;
executable =
/**/ if final.isWindows then ".exe"
else "";
};
# Misc boolean options
useAndroidPrebuilt = false;
useiOSPrebuilt = false;
# Output from uname
uname = {
# uname -s
system = {
linux = "Linux";
windows = "Windows";
darwin = "Darwin";
netbsd = "NetBSD";
freebsd = "FreeBSD";
openbsd = "OpenBSD";
wasi = "Wasi";
redox = "Redox";
genode = "Genode";
}.${final.parsed.kernel.name} or null;
# uname -m
processor =
if final.isPower64
then "ppc64${optionalString final.isLittleEndian "le"}"
else if final.isPower
then "ppc${optionalString final.isLittleEndian "le"}"
else if final.isMips64
then "mips64" # endianness is *not* included on mips64
else final.parsed.cpu.name;
# uname -r
release = null;
};
# It is important that hasSharedLibraries==false when the platform has no
# dynamic library loader. Various tools (including the gcc build system)
# have knowledge of which platforms are incapable of dynamic linking, and
# will still build on/for those platforms with --enable-shared, but simply
# omit any `.so` build products such as libgcc_s.so. When that happens,
# it causes hard-to-troubleshoot build failures.
hasSharedLibraries = with final;
(isAndroid || isGnu || isMusl # Linux (allows multiple libcs)
|| isDarwin || isSunOS || isOpenBSD || isFreeBSD || isNetBSD # BSDs
|| isCygwin || isMinGW # Windows
) && !isStatic;
# The difference between `isStatic` and `hasSharedLibraries` is mainly the
# addition of the `staticMarker` (see make-derivation.nix). Some
# platforms, like embedded machines without a libc (e.g. arm-none-eabi)
# don't support dynamic linking, but don't get the `staticMarker`.
# `pkgsStatic` sets `isStatic=true`, so `pkgsStatic.hostPlatform` always
# has the `staticMarker`.
isStatic = final.isWasm || final.isRedox;
# Just a guess, based on `system`
inherit
({
linux-kernel = args.linux-kernel or {};
gcc = args.gcc or {};
} // platforms.select final)
linux-kernel gcc;
# TODO: remove after 23.05 is EOL, with an error pointing to the rust.* attrs.
rustc = args.rustc or {};
linuxArch =
if final.isAarch32 then "arm"
else if final.isAarch64 then "arm64"
else if final.isx86_32 then "i386"
else if final.isx86_64 then "x86_64"
# linux kernel does not distinguish microblaze/microblazeel
else if final.isMicroBlaze then "microblaze"
else if final.isMips32 then "mips"
else if final.isMips64 then "mips" # linux kernel does not distinguish mips32/mips64
else if final.isPower then "powerpc"
else if final.isRiscV then "riscv"
else if final.isS390 then "s390"
else if final.isLoongArch64 then "loongarch"
else final.parsed.cpu.name;
# https://source.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106
ubootArch =
if final.isx86_32 then "x86" # not i386
else if final.isMips64 then "mips64" # uboot *does* distinguish between mips32/mips64
else final.linuxArch; # other cases appear to agree with linuxArch
qemuArch =
if final.isAarch32 then "arm"
else if final.isS390 && !final.isS390x then null
else if final.isx86_64 then "x86_64"
else if final.isx86 then "i386"
else if final.isMips64n32 then "mipsn32${optionalString final.isLittleEndian "el"}"
else if final.isMips64 then "mips64${optionalString final.isLittleEndian "el"}"
else final.uname.processor;
# Name used by UEFI for architectures.
efiArch =
if final.isx86_32 then "ia32"
else if final.isx86_64 then "x64"
else if final.isAarch32 then "arm"
else if final.isAarch64 then "aa64"
else final.parsed.cpu.name;
darwinArch = {
armv7a = "armv7";
aarch64 = "arm64";
}.${final.parsed.cpu.name} or final.parsed.cpu.name;
darwinPlatform =
if final.isMacOS then "macos"
else if final.isiOS then "ios"
else null;
# The canonical name for this attribute is darwinSdkVersion, but some
# platforms define the old name "sdkVer".
darwinSdkVersion = final.sdkVer or (if final.isAarch64 then "11.0" else "10.12");
darwinMinVersion = final.darwinSdkVersion;
darwinMinVersionVariable =
if final.isMacOS then "MACOSX_DEPLOYMENT_TARGET"
else if final.isiOS then "IPHONEOS_DEPLOYMENT_TARGET"
else null;
} // (
let
selectEmulator = pkgs:
let
qemu-user = pkgs.qemu.override {
smartcardSupport = false;
spiceSupport = false;
openGLSupport = false;
virglSupport = false;
vncSupport = false;
gtkSupport = false;
sdlSupport = false;
alsaSupport = false;
pulseSupport = false;
pipewireSupport = false;
jackSupport = false;
smbdSupport = false;
seccompSupport = false;
tpmSupport = false;
capstoneSupport = false;
enableDocs = false;
hostCpuTargets = [ "${final.qemuArch}-linux-user" ];
final =
{
# Prefer to parse `config` as it is strictly more informative.
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
system = parse.doubleFromSystem final.parsed;
config = parse.tripleFromSystem final.parsed;
# Determine whether we can execute binaries built for the provided platform.
canExecute =
platform:
final.isAndroid == platform.isAndroid
&& parse.isCompatible final.parsed.cpu platform.parsed.cpu
&& final.parsed.kernel == platform.parsed.kernel;
isCompatible =
_:
throw "2022-05-23: isCompatible has been removed in favor of canExecute, refer to the 22.11 changelog for details";
# Derived meta-data
libc =
if final.isDarwin then
"libSystem"
else if final.isMinGW then
"msvcrt"
else if final.isWasi then
"wasilibc"
else if final.isRedox then
"relibc"
else if final.isMusl then
"musl"
else if final.isUClibc then
"uclibc"
else if final.isAndroid then
"bionic"
else if
final.isLinux # default
then
"glibc"
else if final.isFreeBSD then
"fblibc"
else if final.isNetBSD then
"nblibc"
else if final.isAvr then
"avrlibc"
else if final.isGhcjs then
null
else if final.isNone then
"newlib"
# TODO(@Ericson2314) think more about other operating systems
else
"native/impure";
# Choose what linker we wish to use by default. Someday we might also
# choose the C compiler, runtime library, C++ standard library, etc. in
# this way, nice and orthogonally, and deprecate `useLLVM`. But due to
# the monolithic GCC build we cannot actually make those choices
# independently, so we are just doing `linker` and keeping `useLLVM` for
# now.
linker =
if final.useLLVM or false then
"lld"
else if final.isDarwin then
"cctools"
# "bfd" and "gold" both come from GNU binutils. The existence of Gold
# is why we use the more obscure "bfd" and not "binutils" for this
# choice.
else
"bfd";
# The standard lib directory name that non-nixpkgs binaries distributed
# for this platform normally assume.
libDir =
if final.isLinux then
if final.isx86_64 || final.isMips64 || final.isPower64 then "lib64" else "lib"
else
null;
extensions =
optionalAttrs final.hasSharedLibraries {
sharedLibrary =
if final.isDarwin then
".dylib"
else if final.isWindows then
".dll"
else
".so";
}
// {
staticLibrary = if final.isWindows then ".lib" else ".a";
library = if final.isStatic then final.extensions.staticLibrary else final.extensions.sharedLibrary;
executable = if final.isWindows then ".exe" else "";
};
wine = (pkgs.winePackagesFor "wine${toString final.parsed.cpu.bits}").minimal;
in
if pkgs.stdenv.hostPlatform.canExecute final
then "${pkgs.runtimeShell} -c '\"$@\"' --"
else if final.isWindows
then "${wine}/bin/wine${optionalString (final.parsed.cpu.bits == 64) "64"}"
else if final.isLinux && pkgs.stdenv.hostPlatform.isLinux && final.qemuArch != null
then "${qemu-user}/bin/qemu-${final.qemuArch}"
else if final.isWasi
then "${pkgs.wasmtime}/bin/wasmtime"
else if final.isMmix
then "${pkgs.mmixware}/bin/mmix"
else null;
in {
emulatorAvailable = pkgs: (selectEmulator pkgs) != null;
# Misc boolean options
useAndroidPrebuilt = false;
useiOSPrebuilt = false;
emulator = pkgs:
if (final.emulatorAvailable pkgs)
then selectEmulator pkgs
else throw "Don't know how to run ${final.config} executables.";
# Output from uname
uname = {
# uname -s
system =
{
linux = "Linux";
windows = "Windows";
darwin = "Darwin";
netbsd = "NetBSD";
freebsd = "FreeBSD";
openbsd = "OpenBSD";
wasi = "Wasi";
redox = "Redox";
genode = "Genode";
}
.${final.parsed.kernel.name} or null;
}) // mapAttrs (n: v: v final.parsed) inspect.predicates
// mapAttrs (n: v: v final.gcc.arch or "default") architectures.predicates
// args // {
rust = rust // {
# Once args.rustc.platform.target-family is deprecated and
# removed, there will no longer be any need to modify any
# values from args.rust.platform, so we can drop all the
# "args ? rust" etc. checks, and merge args.rust.platform in
# /after/.
platform = rust.platform or {} // {
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch
arch =
/**/ if rust ? platform then rust.platform.arch
else if final.isAarch32 then "arm"
else if final.isMips64 then "mips64" # never add "el" suffix
else if final.isPower64 then "powerpc64" # never add "le" suffix
else final.parsed.cpu.name;
# uname -m
processor =
if final.isPower64 then
"ppc64${optionalString final.isLittleEndian "le"}"
else if final.isPower then
"ppc${optionalString final.isLittleEndian "le"}"
else if final.isMips64 then
"mips64" # endianness is *not* included on mips64
else
final.parsed.cpu.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_os
os =
/**/ if rust ? platform then rust.platform.os or "none"
else if final.isDarwin then "macos"
else final.parsed.kernel.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family
target-family =
/**/ if args ? rust.platform.target-family then args.rust.platform.target-family
else if args ? rustc.platform.target-family
then
(
# Since https://github.com/rust-lang/rust/pull/84072
# `target-family` is a list instead of single value.
let
f = args.rustc.platform.target-family;
in
if isList f then f else [ f ]
)
else optional final.isUnix "unix"
++ optional final.isWindows "windows";
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor
vendor = let
inherit (final.parsed) vendor;
in rust.platform.vendor or {
"w64" = "pc";
}.${vendor.name} or vendor.name;
# uname -r
release = null;
};
# The name of the rust target, even if it is custom. Adjustments are
# because rust has slightly different naming conventions than we do.
rustcTarget = let
inherit (final.parsed) cpu kernel abi;
cpu_ = rust.platform.arch or {
"armv7a" = "armv7";
"armv7l" = "armv7";
"armv6l" = "arm";
"armv5tel" = "armv5te";
"riscv64" = "riscv64gc";
}.${cpu.name} or cpu.name;
vendor_ = final.rust.platform.vendor;
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
in args.rust.rustcTarget or args.rustc.config
or "${cpu_}-${vendor_}-${kernel.name}${optionalString (abi.name != "unknown") "-${abi.name}"}";
# It is important that hasSharedLibraries==false when the platform has no
# dynamic library loader. Various tools (including the gcc build system)
# have knowledge of which platforms are incapable of dynamic linking, and
# will still build on/for those platforms with --enable-shared, but simply
# omit any `.so` build products such as libgcc_s.so. When that happens,
# it causes hard-to-troubleshoot build failures.
hasSharedLibraries =
with final;
(
isAndroid
|| isGnu
|| isMusl # Linux (allows multiple libcs)
|| isDarwin
|| isSunOS
|| isOpenBSD
|| isFreeBSD
|| isNetBSD # BSDs
|| isCygwin
|| isMinGW # Windows
)
&& !isStatic;
# The name of the rust target if it is standard, or the json file
# containing the custom target spec.
rustcTargetSpec = rust.rustcTargetSpec or (
/**/ if rust ? platform
then builtins.toFile (final.rust.rustcTarget + ".json") (toJSON rust.platform)
else final.rust.rustcTarget);
# The difference between `isStatic` and `hasSharedLibraries` is mainly the
# addition of the `staticMarker` (see make-derivation.nix). Some
# platforms, like embedded machines without a libc (e.g. arm-none-eabi)
# don't support dynamic linking, but don't get the `staticMarker`.
# `pkgsStatic` sets `isStatic=true`, so `pkgsStatic.hostPlatform` always
# has the `staticMarker`.
isStatic = final.isWasm || final.isRedox;
# The name of the rust target if it is standard, or the
# basename of the file containing the custom target spec,
# without the .json extension.
#
# This is the name used by Cargo for target subdirectories.
cargoShortTarget =
removeSuffix ".json" (baseNameOf "${final.rust.rustcTargetSpec}");
# Just a guess, based on `system`
inherit
(
{
linux-kernel = args.linux-kernel or { };
gcc = args.gcc or { };
}
// platforms.select final
)
linux-kernel
gcc
;
# When used as part of an environment variable name, triples are
# uppercased and have all hyphens replaced by underscores:
#
# https://github.com/rust-lang/cargo/pull/9169
# https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431
cargoEnvVarTarget =
replaceStrings ["-"] ["_"]
(toUpper final.rust.cargoShortTarget);
# TODO: remove after 23.05 is EOL, with an error pointing to the rust.* attrs.
rustc = args.rustc or { };
# True if the target is no_std
# https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421
isNoStdTarget =
any (t: hasInfix t final.rust.rustcTarget) ["-none" "nvptx" "switch" "-uefi"];
linuxArch =
if final.isAarch32 then
"arm"
else if final.isAarch64 then
"arm64"
else if final.isx86_32 then
"i386"
else if final.isx86_64 then
"x86_64"
# linux kernel does not distinguish microblaze/microblazeel
else if final.isMicroBlaze then
"microblaze"
else if final.isMips32 then
"mips"
else if final.isMips64 then
"mips" # linux kernel does not distinguish mips32/mips64
else if final.isPower then
"powerpc"
else if final.isRiscV then
"riscv"
else if final.isS390 then
"s390"
else if final.isLoongArch64 then
"loongarch"
else
final.parsed.cpu.name;
# https://source.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106
ubootArch =
if final.isx86_32 then
"x86" # not i386
else if final.isMips64 then
"mips64" # uboot *does* distinguish between mips32/mips64
else
final.linuxArch; # other cases appear to agree with linuxArch
qemuArch =
if final.isAarch32 then
"arm"
else if final.isS390 && !final.isS390x then
null
else if final.isx86_64 then
"x86_64"
else if final.isx86 then
"i386"
else if final.isMips64n32 then
"mipsn32${optionalString final.isLittleEndian "el"}"
else if final.isMips64 then
"mips64${optionalString final.isLittleEndian "el"}"
else
final.uname.processor;
# Name used by UEFI for architectures.
efiArch =
if final.isx86_32 then
"ia32"
else if final.isx86_64 then
"x64"
else if final.isAarch32 then
"arm"
else if final.isAarch64 then
"aa64"
else
final.parsed.cpu.name;
darwinArch =
{
armv7a = "armv7";
aarch64 = "arm64";
}
.${final.parsed.cpu.name} or final.parsed.cpu.name;
darwinPlatform =
if final.isMacOS then
"macos"
else if final.isiOS then
"ios"
else
null;
# The canonical name for this attribute is darwinSdkVersion, but some
# platforms define the old name "sdkVer".
darwinSdkVersion = final.sdkVer or (if final.isAarch64 then "11.0" else "10.12");
darwinMinVersion = final.darwinSdkVersion;
darwinMinVersionVariable =
if final.isMacOS then
"MACOSX_DEPLOYMENT_TARGET"
else if final.isiOS then
"IPHONEOS_DEPLOYMENT_TARGET"
else
null;
}
// (
let
selectEmulator =
pkgs:
let
qemu-user = pkgs.qemu.override {
smartcardSupport = false;
spiceSupport = false;
openGLSupport = false;
virglSupport = false;
vncSupport = false;
gtkSupport = false;
sdlSupport = false;
alsaSupport = false;
pulseSupport = false;
pipewireSupport = false;
jackSupport = false;
smbdSupport = false;
seccompSupport = false;
tpmSupport = false;
capstoneSupport = false;
enableDocs = false;
hostCpuTargets = [ "${final.qemuArch}-linux-user" ];
};
wine = (pkgs.winePackagesFor "wine${toString final.parsed.cpu.bits}").minimal;
in
if pkgs.stdenv.hostPlatform.canExecute final then
"${pkgs.runtimeShell} -c '\"$@\"' --"
else if final.isWindows then
"${wine}/bin/wine${optionalString (final.parsed.cpu.bits == 64) "64"}"
else if final.isLinux && pkgs.stdenv.hostPlatform.isLinux && final.qemuArch != null then
"${qemu-user}/bin/qemu-${final.qemuArch}"
else if final.isWasi then
"${pkgs.wasmtime}/bin/wasmtime"
else if final.isMmix then
"${pkgs.mmixware}/bin/mmix"
else
null;
in
{
emulatorAvailable = pkgs: (selectEmulator pkgs) != null;
emulator =
pkgs:
if (final.emulatorAvailable pkgs) then
selectEmulator pkgs
else
throw "Don't know how to run ${final.config} executables.";
}
)
// mapAttrs (n: v: v final.parsed) inspect.predicates
// mapAttrs (n: v: v final.gcc.arch or "default") architectures.predicates
// args
// {
rust = rust // {
# Once args.rustc.platform.target-family is deprecated and
# removed, there will no longer be any need to modify any
# values from args.rust.platform, so we can drop all the
# "args ? rust" etc. checks, and merge args.rust.platform in
# /after/.
platform = rust.platform or { } // {
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch
arch =
if rust ? platform then
rust.platform.arch
else if final.isAarch32 then
"arm"
else if final.isMips64 then
"mips64" # never add "el" suffix
else if final.isPower64 then
"powerpc64" # never add "le" suffix
else
final.parsed.cpu.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_os
os =
if rust ? platform then
rust.platform.os or "none"
else if final.isDarwin then
"macos"
else
final.parsed.kernel.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family
target-family =
if args ? rust.platform.target-family then
args.rust.platform.target-family
else if args ? rustc.platform.target-family then
(
# Since https://github.com/rust-lang/rust/pull/84072
# `target-family` is a list instead of single value.
let
f = args.rustc.platform.target-family;
in
if isList f then f else [ f ]
)
else
optional final.isUnix "unix" ++ optional final.isWindows "windows";
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor
vendor =
let
inherit (final.parsed) vendor;
in
rust.platform.vendor or {
"w64" = "pc";
}
.${vendor.name} or vendor.name;
};
# The name of the rust target, even if it is custom. Adjustments are
# because rust has slightly different naming conventions than we do.
rustcTarget =
let
inherit (final.parsed) cpu kernel abi;
cpu_ =
rust.platform.arch or {
"armv7a" = "armv7";
"armv7l" = "armv7";
"armv6l" = "arm";
"armv5tel" = "armv5te";
"riscv64" = "riscv64gc";
}
.${cpu.name} or cpu.name;
vendor_ = final.rust.platform.vendor;
in
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
args.rust.rustcTarget or args.rustc.config
or "${cpu_}-${vendor_}-${kernel.name}${optionalString (abi.name != "unknown") "-${abi.name}"}";
# The name of the rust target if it is standard, or the json file
# containing the custom target spec.
rustcTargetSpec =
rust.rustcTargetSpec or (
if rust ? platform then
builtins.toFile (final.rust.rustcTarget + ".json") (toJSON rust.platform)
else
final.rust.rustcTarget
);
# The name of the rust target if it is standard, or the
# basename of the file containing the custom target spec,
# without the .json extension.
#
# This is the name used by Cargo for target subdirectories.
cargoShortTarget = removeSuffix ".json" (baseNameOf "${final.rust.rustcTargetSpec}");
# When used as part of an environment variable name, triples are
# uppercased and have all hyphens replaced by underscores:
#
# https://github.com/rust-lang/cargo/pull/9169
# https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431
cargoEnvVarTarget = replaceStrings [ "-" ] [ "_" ] (toUpper final.rust.cargoShortTarget);
# True if the target is no_std
# https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421
isNoStdTarget = any (t: hasInfix t final.rust.rustcTarget) [
"-none"
"nvptx"
"switch"
"-uefi"
];
};
};
};
in assert final.useAndroidPrebuilt -> final.isAndroid;
assert foldl
(pass: { assertion, message }:
if assertion final
then pass
else throw message)
true
(final.parsed.abi.assertions or []);
in
assert final.useAndroidPrebuilt -> final.isAndroid;
assert foldl (pass: { assertion, message }: if assertion final then pass else throw message) true (
final.parsed.abi.assertions or [ ]
);
final;
in

View file

@ -7,16 +7,23 @@ let
all = [
# Cygwin
"i686-cygwin" "x86_64-cygwin"
"i686-cygwin"
"x86_64-cygwin"
# Darwin
"x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin"
"x86_64-darwin"
"i686-darwin"
"aarch64-darwin"
"armv7a-darwin"
# FreeBSD
"i686-freebsd13" "x86_64-freebsd13"
"i686-freebsd13"
"x86_64-freebsd13"
# Genode
"aarch64-genode" "i686-genode" "x86_64-genode"
"aarch64-genode"
"i686-genode"
"x86_64-genode"
# illumos
"x86_64-solaris"
@ -25,95 +32,176 @@ let
"javascript-ghcjs"
# Linux
"aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux"
"armv7l-linux" "i686-linux" "loongarch64-linux" "m68k-linux" "microblaze-linux"
"microblazeel-linux" "mips-linux" "mips64-linux" "mips64el-linux"
"mipsel-linux" "powerpc64-linux" "powerpc64le-linux" "riscv32-linux"
"riscv64-linux" "s390-linux" "s390x-linux" "x86_64-linux"
"aarch64-linux"
"armv5tel-linux"
"armv6l-linux"
"armv7a-linux"
"armv7l-linux"
"i686-linux"
"loongarch64-linux"
"m68k-linux"
"microblaze-linux"
"microblazeel-linux"
"mips-linux"
"mips64-linux"
"mips64el-linux"
"mipsel-linux"
"powerpc64-linux"
"powerpc64le-linux"
"riscv32-linux"
"riscv64-linux"
"s390-linux"
"s390x-linux"
"x86_64-linux"
# MMIXware
"mmix-mmixware"
# NetBSD
"aarch64-netbsd" "armv6l-netbsd" "armv7a-netbsd" "armv7l-netbsd"
"i686-netbsd" "m68k-netbsd" "mipsel-netbsd" "powerpc-netbsd"
"riscv32-netbsd" "riscv64-netbsd" "x86_64-netbsd"
"aarch64-netbsd"
"armv6l-netbsd"
"armv7a-netbsd"
"armv7l-netbsd"
"i686-netbsd"
"m68k-netbsd"
"mipsel-netbsd"
"powerpc-netbsd"
"riscv32-netbsd"
"riscv64-netbsd"
"x86_64-netbsd"
# none
"aarch64_be-none" "aarch64-none" "arm-none" "armv6l-none" "avr-none" "i686-none"
"microblaze-none" "microblazeel-none" "mips-none" "mips64-none" "msp430-none" "or1k-none" "m68k-none"
"powerpc-none" "powerpcle-none" "riscv32-none" "riscv64-none" "rx-none"
"s390-none" "s390x-none" "vc4-none" "x86_64-none"
"aarch64_be-none"
"aarch64-none"
"arm-none"
"armv6l-none"
"avr-none"
"i686-none"
"microblaze-none"
"microblazeel-none"
"mips-none"
"mips64-none"
"msp430-none"
"or1k-none"
"m68k-none"
"powerpc-none"
"powerpcle-none"
"riscv32-none"
"riscv64-none"
"rx-none"
"s390-none"
"s390x-none"
"vc4-none"
"x86_64-none"
# OpenBSD
"i686-openbsd" "x86_64-openbsd"
"i686-openbsd"
"x86_64-openbsd"
# Redox
"x86_64-redox"
# WASI
"wasm64-wasi" "wasm32-wasi"
"wasm64-wasi"
"wasm32-wasi"
# Windows
"x86_64-windows" "i686-windows"
"x86_64-windows"
"i686-windows"
];
allParsed = map parse.mkSystemFromString all;
filterDoubles = f: map parse.doubleFromSystem (lists.filter f allParsed);
in {
in
{
inherit all;
none = [];
none = [ ];
arm = filterDoubles predicates.isAarch32;
armv7 = filterDoubles predicates.isArmv7;
aarch64 = filterDoubles predicates.isAarch64;
x86 = filterDoubles predicates.isx86;
i686 = filterDoubles predicates.isi686;
x86_64 = filterDoubles predicates.isx86_64;
microblaze = filterDoubles predicates.isMicroBlaze;
mips = filterDoubles predicates.isMips;
mmix = filterDoubles predicates.isMmix;
power = filterDoubles predicates.isPower;
riscv = filterDoubles predicates.isRiscV;
riscv32 = filterDoubles predicates.isRiscV32;
riscv64 = filterDoubles predicates.isRiscV64;
rx = filterDoubles predicates.isRx;
vc4 = filterDoubles predicates.isVc4;
or1k = filterDoubles predicates.isOr1k;
m68k = filterDoubles predicates.isM68k;
s390 = filterDoubles predicates.isS390;
s390x = filterDoubles predicates.isS390x;
loongarch64 = filterDoubles predicates.isLoongArch64;
js = filterDoubles predicates.isJavaScript;
arm = filterDoubles predicates.isAarch32;
armv7 = filterDoubles predicates.isArmv7;
aarch64 = filterDoubles predicates.isAarch64;
x86 = filterDoubles predicates.isx86;
i686 = filterDoubles predicates.isi686;
x86_64 = filterDoubles predicates.isx86_64;
microblaze = filterDoubles predicates.isMicroBlaze;
mips = filterDoubles predicates.isMips;
mmix = filterDoubles predicates.isMmix;
power = filterDoubles predicates.isPower;
riscv = filterDoubles predicates.isRiscV;
riscv32 = filterDoubles predicates.isRiscV32;
riscv64 = filterDoubles predicates.isRiscV64;
rx = filterDoubles predicates.isRx;
vc4 = filterDoubles predicates.isVc4;
or1k = filterDoubles predicates.isOr1k;
m68k = filterDoubles predicates.isM68k;
s390 = filterDoubles predicates.isS390;
s390x = filterDoubles predicates.isS390x;
loongarch64 = filterDoubles predicates.isLoongArch64;
js = filterDoubles predicates.isJavaScript;
bigEndian = filterDoubles predicates.isBigEndian;
littleEndian = filterDoubles predicates.isLittleEndian;
bigEndian = filterDoubles predicates.isBigEndian;
littleEndian = filterDoubles predicates.isLittleEndian;
cygwin = filterDoubles predicates.isCygwin;
darwin = filterDoubles predicates.isDarwin;
freebsd = filterDoubles predicates.isFreeBSD;
cygwin = filterDoubles predicates.isCygwin;
darwin = filterDoubles predicates.isDarwin;
freebsd = filterDoubles predicates.isFreeBSD;
# Should be better, but MinGW is unclear.
gnu = filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnu; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabi; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabihf; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabin32; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabi64; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabielfv1; })
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabielfv2; });
illumos = filterDoubles predicates.isSunOS;
linux = filterDoubles predicates.isLinux;
netbsd = filterDoubles predicates.isNetBSD;
openbsd = filterDoubles predicates.isOpenBSD;
unix = filterDoubles predicates.isUnix;
wasi = filterDoubles predicates.isWasi;
redox = filterDoubles predicates.isRedox;
windows = filterDoubles predicates.isWindows;
genode = filterDoubles predicates.isGenode;
gnu =
filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnu;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnueabi;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnueabihf;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnuabin32;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnuabi64;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnuabielfv1;
})
++ filterDoubles (matchAttrs {
kernel = parse.kernels.linux;
abi = parse.abis.gnuabielfv2;
});
illumos = filterDoubles predicates.isSunOS;
linux = filterDoubles predicates.isLinux;
netbsd = filterDoubles predicates.isNetBSD;
openbsd = filterDoubles predicates.isOpenBSD;
unix = filterDoubles predicates.isUnix;
wasi = filterDoubles predicates.isWasi;
redox = filterDoubles predicates.isRedox;
windows = filterDoubles predicates.isWindows;
genode = filterDoubles predicates.isGenode;
embedded = filterDoubles predicates.isNone;
embedded = filterDoubles predicates.isNone;
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "armv7a-linux" "aarch64-linux" "powerpc64-linux" "powerpc64le-linux" "aarch64-darwin" "riscv64-linux"];
mesaPlatforms = [
"i686-linux"
"x86_64-linux"
"x86_64-darwin"
"armv5tel-linux"
"armv6l-linux"
"armv7l-linux"
"armv7a-linux"
"aarch64-linux"
"powerpc64-linux"
"powerpc64le-linux"
"aarch64-darwin"
"riscv64-linux"
];
}

View file

@ -5,9 +5,7 @@
let
platforms = import ./platforms.nix { inherit lib; };
riscv = bits: {
config = "riscv${bits}-unknown-linux-gnu";
};
riscv = bits: { config = "riscv${bits}-unknown-linux-gnu"; };
in
rec {
@ -26,7 +24,9 @@ rec {
};
ppc64-musl = {
config = "powerpc64-unknown-linux-musl";
gcc = { abi = "elfv2"; };
gcc = {
abi = "elfv2";
};
};
sheevaplug = {
@ -95,16 +95,28 @@ rec {
} // platforms.fuloong2f_n32;
# can execute on 32bit chip
mips-linux-gnu = { config = "mips-unknown-linux-gnu"; } // platforms.gcc_mips32r2_o32;
mipsel-linux-gnu = { config = "mipsel-unknown-linux-gnu"; } // platforms.gcc_mips32r2_o32;
mips-linux-gnu = {
config = "mips-unknown-linux-gnu";
} // platforms.gcc_mips32r2_o32;
mipsel-linux-gnu = {
config = "mipsel-unknown-linux-gnu";
} // platforms.gcc_mips32r2_o32;
# require 64bit chip (for more registers, 64-bit floating point, 64-bit "long long") but use 32bit pointers
mips64-linux-gnuabin32 = { config = "mips64-unknown-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
mips64el-linux-gnuabin32 = { config = "mips64el-unknown-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
mips64-linux-gnuabin32 = {
config = "mips64-unknown-linux-gnuabin32";
} // platforms.gcc_mips64r2_n32;
mips64el-linux-gnuabin32 = {
config = "mips64el-unknown-linux-gnuabin32";
} // platforms.gcc_mips64r2_n32;
# 64bit pointers
mips64-linux-gnuabi64 = { config = "mips64-unknown-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
mips64el-linux-gnuabi64 = { config = "mips64el-unknown-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
mips64-linux-gnuabi64 = {
config = "mips64-unknown-linux-gnuabi64";
} // platforms.gcc_mips64r2_64;
mips64el-linux-gnuabi64 = {
config = "mips64el-unknown-linux-gnuabi64";
} // platforms.gcc_mips64r2_64;
muslpi = raspberryPi // {
config = "armv6l-unknown-linux-musleabihf";
@ -114,12 +126,20 @@ rec {
config = "aarch64-unknown-linux-musl";
};
gnu64 = { config = "x86_64-unknown-linux-gnu"; };
gnu64 = {
config = "x86_64-unknown-linux-gnu";
};
gnu64_simplekernel = gnu64 // platforms.pc_simplekernel; # see test/cross/default.nix
gnu32 = { config = "i686-unknown-linux-gnu"; };
gnu32 = {
config = "i686-unknown-linux-gnu";
};
musl64 = { config = "x86_64-unknown-linux-musl"; };
musl32 = { config = "i686-unknown-linux-musl"; };
musl64 = {
config = "x86_64-unknown-linux-musl";
};
musl32 = {
config = "i686-unknown-linux-musl";
};
riscv64 = riscv "64";
riscv32 = riscv "32";
@ -294,13 +314,13 @@ rec {
aarch64-darwin = {
config = "aarch64-apple-darwin";
xcodePlatform = "MacOSX";
platform = {};
platform = { };
};
x86_64-darwin = {
config = "x86_64-apple-darwin";
xcodePlatform = "MacOSX";
platform = {};
platform = { };
};
#

View file

@ -38,123 +38,434 @@ rec {
# `lib.attrsets.matchAttrs`, which requires a match on *all* attributes of
# the product.
isi686 = { cpu = cpuTypes.i686; };
isx86_32 = { cpu = { family = "x86"; bits = 32; }; };
isx86_64 = { cpu = { family = "x86"; bits = 64; }; };
isPower = { cpu = { family = "power"; }; };
isPower64 = { cpu = { family = "power"; bits = 64; }; };
isi686 = {
cpu = cpuTypes.i686;
};
isx86_32 = {
cpu = {
family = "x86";
bits = 32;
};
};
isx86_64 = {
cpu = {
family = "x86";
bits = 64;
};
};
isPower = {
cpu = {
family = "power";
};
};
isPower64 = {
cpu = {
family = "power";
bits = 64;
};
};
# This ABI is the default in NixOS PowerPC64 BE, but not on mainline GCC,
# so it sometimes causes issues in certain packages that makes the wrong
# assumption on the used ABI.
isAbiElfv2 = [
{ abi = { abi = "elfv2"; }; }
{ abi = { name = "musl"; }; cpu = { family = "power"; bits = 64; }; }
{
abi = {
abi = "elfv2";
};
}
{
abi = {
name = "musl";
};
cpu = {
family = "power";
bits = 64;
};
}
];
isx86 = { cpu = { family = "x86"; }; };
isAarch32 = { cpu = { family = "arm"; bits = 32; }; };
isArmv7 = map ({ arch, ... }: { cpu = { inherit arch; }; })
(filter (cpu: hasPrefix "armv7" cpu.arch or "")
(attrValues cpuTypes));
isAarch64 = { cpu = { family = "arm"; bits = 64; }; };
isAarch = { cpu = { family = "arm"; }; };
isMicroBlaze = { cpu = { family = "microblaze"; }; };
isMips = { cpu = { family = "mips"; }; };
isMips32 = { cpu = { family = "mips"; bits = 32; }; };
isMips64 = { cpu = { family = "mips"; bits = 64; }; };
isMips64n32 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "n32"; }; };
isMips64n64 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "64"; }; };
isMmix = { cpu = { family = "mmix"; }; };
isRiscV = { cpu = { family = "riscv"; }; };
isRiscV32 = { cpu = { family = "riscv"; bits = 32; }; };
isRiscV64 = { cpu = { family = "riscv"; bits = 64; }; };
isRx = { cpu = { family = "rx"; }; };
isSparc = { cpu = { family = "sparc"; }; };
isSparc64 = { cpu = { family = "sparc"; bits = 64; }; };
isWasm = { cpu = { family = "wasm"; }; };
isMsp430 = { cpu = { family = "msp430"; }; };
isVc4 = { cpu = { family = "vc4"; }; };
isAvr = { cpu = { family = "avr"; }; };
isAlpha = { cpu = { family = "alpha"; }; };
isOr1k = { cpu = { family = "or1k"; }; };
isM68k = { cpu = { family = "m68k"; }; };
isS390 = { cpu = { family = "s390"; }; };
isS390x = { cpu = { family = "s390"; bits = 64; }; };
isLoongArch64 = { cpu = { family = "loongarch"; bits = 64; }; };
isJavaScript = { cpu = cpuTypes.javascript; };
isx86 = {
cpu = {
family = "x86";
};
};
isAarch32 = {
cpu = {
family = "arm";
bits = 32;
};
};
isArmv7 = map (
{ arch, ... }:
{
cpu = {
inherit arch;
};
}
) (filter (cpu: hasPrefix "armv7" cpu.arch or "") (attrValues cpuTypes));
isAarch64 = {
cpu = {
family = "arm";
bits = 64;
};
};
isAarch = {
cpu = {
family = "arm";
};
};
isMicroBlaze = {
cpu = {
family = "microblaze";
};
};
isMips = {
cpu = {
family = "mips";
};
};
isMips32 = {
cpu = {
family = "mips";
bits = 32;
};
};
isMips64 = {
cpu = {
family = "mips";
bits = 64;
};
};
isMips64n32 = {
cpu = {
family = "mips";
bits = 64;
};
abi = {
abi = "n32";
};
};
isMips64n64 = {
cpu = {
family = "mips";
bits = 64;
};
abi = {
abi = "64";
};
};
isMmix = {
cpu = {
family = "mmix";
};
};
isRiscV = {
cpu = {
family = "riscv";
};
};
isRiscV32 = {
cpu = {
family = "riscv";
bits = 32;
};
};
isRiscV64 = {
cpu = {
family = "riscv";
bits = 64;
};
};
isRx = {
cpu = {
family = "rx";
};
};
isSparc = {
cpu = {
family = "sparc";
};
};
isSparc64 = {
cpu = {
family = "sparc";
bits = 64;
};
};
isWasm = {
cpu = {
family = "wasm";
};
};
isMsp430 = {
cpu = {
family = "msp430";
};
};
isVc4 = {
cpu = {
family = "vc4";
};
};
isAvr = {
cpu = {
family = "avr";
};
};
isAlpha = {
cpu = {
family = "alpha";
};
};
isOr1k = {
cpu = {
family = "or1k";
};
};
isM68k = {
cpu = {
family = "m68k";
};
};
isS390 = {
cpu = {
family = "s390";
};
};
isS390x = {
cpu = {
family = "s390";
bits = 64;
};
};
isLoongArch64 = {
cpu = {
family = "loongarch";
bits = 64;
};
};
isJavaScript = {
cpu = cpuTypes.javascript;
};
is32bit = { cpu = { bits = 32; }; };
is64bit = { cpu = { bits = 64; }; };
isILP32 = [ { cpu = { family = "wasm"; bits = 32; }; } ] ++
map (a: { abi = { abi = a; }; }) [ "n32" "ilp32" "x32" ];
isBigEndian = { cpu = { significantByte = significantBytes.bigEndian; }; };
isLittleEndian = { cpu = { significantByte = significantBytes.littleEndian; }; };
is32bit = {
cpu = {
bits = 32;
};
};
is64bit = {
cpu = {
bits = 64;
};
};
isILP32 =
[
{
cpu = {
family = "wasm";
bits = 32;
};
}
]
++ map
(a: {
abi = {
abi = a;
};
})
[
"n32"
"ilp32"
"x32"
];
isBigEndian = {
cpu = {
significantByte = significantBytes.bigEndian;
};
};
isLittleEndian = {
cpu = {
significantByte = significantBytes.littleEndian;
};
};
isBSD = { kernel = { families = { inherit (kernelFamilies) bsd; }; }; };
isDarwin = { kernel = { families = { inherit (kernelFamilies) darwin; }; }; };
isUnix = [ isBSD isDarwin isLinux isSunOS isCygwin isRedox ];
isBSD = {
kernel = {
families = {
inherit (kernelFamilies) bsd;
};
};
};
isDarwin = {
kernel = {
families = {
inherit (kernelFamilies) darwin;
};
};
};
isUnix = [
isBSD
isDarwin
isLinux
isSunOS
isCygwin
isRedox
];
isMacOS = { kernel = kernels.macos; };
isiOS = { kernel = kernels.ios; };
isLinux = { kernel = kernels.linux; };
isSunOS = { kernel = kernels.solaris; };
isFreeBSD = { kernel = { name = "freebsd"; }; };
isNetBSD = { kernel = kernels.netbsd; };
isOpenBSD = { kernel = kernels.openbsd; };
isWindows = { kernel = kernels.windows; };
isCygwin = { kernel = kernels.windows; abi = abis.cygnus; };
isMinGW = { kernel = kernels.windows; abi = abis.gnu; };
isWasi = { kernel = kernels.wasi; };
isRedox = { kernel = kernels.redox; };
isGhcjs = { kernel = kernels.ghcjs; };
isGenode = { kernel = kernels.genode; };
isNone = { kernel = kernels.none; };
isMacOS = {
kernel = kernels.macos;
};
isiOS = {
kernel = kernels.ios;
};
isLinux = {
kernel = kernels.linux;
};
isSunOS = {
kernel = kernels.solaris;
};
isFreeBSD = {
kernel = {
name = "freebsd";
};
};
isNetBSD = {
kernel = kernels.netbsd;
};
isOpenBSD = {
kernel = kernels.openbsd;
};
isWindows = {
kernel = kernels.windows;
};
isCygwin = {
kernel = kernels.windows;
abi = abis.cygnus;
};
isMinGW = {
kernel = kernels.windows;
abi = abis.gnu;
};
isWasi = {
kernel = kernels.wasi;
};
isRedox = {
kernel = kernels.redox;
};
isGhcjs = {
kernel = kernels.ghcjs;
};
isGenode = {
kernel = kernels.genode;
};
isNone = {
kernel = kernels.none;
};
isAndroid = [ { abi = abis.android; } { abi = abis.androideabi; } ];
isGnu = with abis; map (a: { abi = a; }) [ gnuabi64 gnuabin32 gnu gnueabi gnueabihf gnuabielfv1 gnuabielfv2 ];
isMusl = with abis; map (a: { abi = a; }) [ musl musleabi musleabihf muslabin32 muslabi64 ];
isUClibc = with abis; map (a: { abi = a; }) [ uclibc uclibceabi uclibceabihf ];
isAndroid = [
{ abi = abis.android; }
{ abi = abis.androideabi; }
];
isGnu =
with abis;
map (a: { abi = a; }) [
gnuabi64
gnuabin32
gnu
gnueabi
gnueabihf
gnuabielfv1
gnuabielfv2
];
isMusl =
with abis;
map (a: { abi = a; }) [
musl
musleabi
musleabihf
muslabin32
muslabi64
];
isUClibc =
with abis;
map (a: { abi = a; }) [
uclibc
uclibceabi
uclibceabihf
];
isEfi = [
{ cpu = { family = "arm"; version = "6"; }; }
{ cpu = { family = "arm"; version = "7"; }; }
{ cpu = { family = "arm"; version = "8"; }; }
{ cpu = { family = "riscv"; }; }
{ cpu = { family = "x86"; }; }
{
cpu = {
family = "arm";
version = "6";
};
}
{
cpu = {
family = "arm";
version = "7";
};
}
{
cpu = {
family = "arm";
version = "8";
};
}
{
cpu = {
family = "riscv";
};
}
{
cpu = {
family = "x86";
};
}
];
isElf = { kernel.execFormat = execFormats.elf; };
isMacho = { kernel.execFormat = execFormats.macho; };
isElf = {
kernel.execFormat = execFormats.elf;
};
isMacho = {
kernel.execFormat = execFormats.macho;
};
};
# given two patterns, return a pattern which is their logical AND.
# Since a pattern is a list-of-disjuncts, this needs to
patternLogicalAnd = pat1_: pat2_:
patternLogicalAnd =
pat1_: pat2_:
let
# patterns can be either a list or a (bare) singleton; turn
# them into singletons for uniform handling
pat1 = toList pat1_;
pat2 = toList pat2_;
in
concatMap (attr1:
map (attr2:
recursiveUpdateUntil
(path: subattr1: subattr2:
if (builtins.intersectAttrs subattr1 subattr2) == {} || subattr1 == subattr2
then true
else throw ''
pattern conflict at path ${toString path}:
${toJSON subattr1}
${toJSON subattr2}
'')
attr1
attr2
)
pat2)
pat1;
concatMap (
attr1:
map (
attr2:
recursiveUpdateUntil (
path: subattr1: subattr2:
if (builtins.intersectAttrs subattr1 subattr2) == { } || subattr1 == subattr2 then
true
else
throw ''
pattern conflict at path ${toString path}:
${toJSON subattr1}
${toJSON subattr2}
''
) attr1 attr2
) pat2
) pat1;
matchAnyAttrs = patterns:
if isList patterns then attrs: any (pattern: matchAttrs pattern attrs) patterns
else matchAttrs patterns;
matchAnyAttrs =
patterns:
if isList patterns then
attrs: any (pattern: matchAttrs pattern attrs) patterns
else
matchAttrs patterns;
predicates = mapAttrs (_: matchAnyAttrs) patterns;
@ -163,7 +474,9 @@ rec {
# that `lib.meta.availableOn` can distinguish them from the patterns which
# apply only to the `parsed` field.
platformPatterns = mapAttrs (_: p: { parsed = {}; } // p) {
isStatic = { isStatic = true; };
platformPatterns = mapAttrs (_: p: { parsed = { }; } // p) {
isStatic = {
isStatic = true;
};
};
}

File diff suppressed because it is too large Load diff

View file

@ -18,9 +18,7 @@ rec {
};
};
pc_simplekernel = lib.recursiveUpdate pc {
linux-kernel.autoModules = false;
};
pc_simplekernel = lib.recursiveUpdate pc { linux-kernel.autoModules = false; };
powernv = {
linux-kernel = {
@ -490,12 +488,42 @@ rec {
};
# can execute on 32bit chip
gcc_mips32r2_o32 = { gcc = { arch = "mips32r2"; abi = "32"; }; };
gcc_mips32r6_o32 = { gcc = { arch = "mips32r6"; abi = "32"; }; };
gcc_mips64r2_n32 = { gcc = { arch = "mips64r2"; abi = "n32"; }; };
gcc_mips64r6_n32 = { gcc = { arch = "mips64r6"; abi = "n32"; }; };
gcc_mips64r2_64 = { gcc = { arch = "mips64r2"; abi = "64"; }; };
gcc_mips64r6_64 = { gcc = { arch = "mips64r6"; abi = "64"; }; };
gcc_mips32r2_o32 = {
gcc = {
arch = "mips32r2";
abi = "32";
};
};
gcc_mips32r6_o32 = {
gcc = {
arch = "mips32r6";
abi = "32";
};
};
gcc_mips64r2_n32 = {
gcc = {
arch = "mips64r2";
abi = "n32";
};
};
gcc_mips64r6_n32 = {
gcc = {
arch = "mips64r6";
abi = "n32";
};
};
gcc_mips64r2_64 = {
gcc = {
arch = "mips64r2";
abi = "64";
};
};
gcc_mips64r6_64 = {
gcc = {
arch = "mips64r6";
abi = "64";
};
};
# based on:
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
@ -546,27 +574,38 @@ rec {
# This function takes a minimally-valid "platform" and returns an
# attrset containing zero or more additional attrs which should be
# included in the platform in order to further elaborate it.
select = platform:
select =
platform:
# x86
/**/ if platform.isx86 then pc
if platform.isx86 then
pc
# ARM
else if platform.isAarch32 then let
version = platform.parsed.cpu.version or null;
in if version == null then pc
else if lib.versionOlder version "6" then sheevaplug
else if lib.versionOlder version "7" then raspberrypi
else armv7l-hf-multiplatform
else if platform.isAarch32 then
let
version = platform.parsed.cpu.version or null;
in
if version == null then
pc
else if lib.versionOlder version "6" then
sheevaplug
else if lib.versionOlder version "7" then
raspberrypi
else
armv7l-hf-multiplatform
else if platform.isAarch64 then
if platform.isDarwin then apple-m1
else aarch64-multiplatform
if platform.isDarwin then apple-m1 else aarch64-multiplatform
else if platform.isRiscV then riscv-multiplatform
else if platform.isRiscV then
riscv-multiplatform
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then (import ./examples.nix { inherit lib; }).mipsel-linux-gnu
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then
(import ./examples.nix { inherit lib; }).mipsel-linux-gnu
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then powernv
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then
powernv
else { };
else
{ };
}

View file

@ -1,7 +1,10 @@
# Throws an error if any of our lib tests fail.
let tests = [ "misc" "systems" ];
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
in if all == []
then null
else throw (builtins.toJSON all)
let
tests = [
"misc"
"systems"
];
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
in
if all == [ ] then null else throw (builtins.toJSON all)

View file

@ -1,11 +1,10 @@
{ lib, ... }:
let
inherit (lib) types;
in {
in
{
options = {
name = lib.mkOption {
type = types.str;
};
name = lib.mkOption { type = types.str; };
email = lib.mkOption {
type = types.nullOr types.str;
default = null;
@ -23,10 +22,8 @@ in {
default = null;
};
keys = lib.mkOption {
type = types.listOf (types.submodule {
options.fingerprint = lib.mkOption { type = types.str; };
});
default = [];
type = types.listOf (types.submodule { options.fingerprint = lib.mkOption { type = types.str; }; });
default = [ ];
};
};
}

View file

@ -1,53 +1,76 @@
# to run these tests (and the others)
# nix-build nixpkgs/lib/tests/release.nix
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
{ # The pkgs used for dependencies for the testing itself
pkgs ? import ../.. {}
, lib ? pkgs.lib
{
# The pkgs used for dependencies for the testing itself
pkgs ? import ../.. { },
lib ? pkgs.lib,
}:
let
checkMaintainer = handle: uncheckedAttrs:
let
prefix = [ "lib" "maintainers" handle ];
checkedAttrs = (lib.modules.evalModules {
inherit prefix;
modules = [
./maintainer-module.nix
{
_file = toString ../../maintainers/maintainer-list.nix;
config = uncheckedAttrs;
}
];
}).config;
checkMaintainer =
handle: uncheckedAttrs:
let
prefix = [
"lib"
"maintainers"
handle
];
checkedAttrs =
(lib.modules.evalModules {
inherit prefix;
modules = [
./maintainer-module.nix
{
_file = toString ../../maintainers/maintainer-list.nix;
config = uncheckedAttrs;
}
];
}).config;
checks = lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
# Calling this too often would hit non-authenticated API limits, but this
# shouldn't happen since such errors will get fixed rather quickly
info=$(curl -sS https://api.github.com/users/${checkedAttrs.github})
id=$(jq -r '.id' <<< "$info")
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
echo -e " githubId = $id;\n"
'' ++ lib.optional (checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
'' ++ lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
'';
in lib.deepSeq checkedAttrs checks;
checks =
lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
# Calling this too often would hit non-authenticated API limits, but this
# shouldn't happen since such errors will get fixed rather quickly
info=$(curl -sS https://api.github.com/users/${checkedAttrs.github})
id=$(jq -r '.id' <<< "$info")
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
echo -e " githubId = $id;\n"
''
++
lib.optional
(checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null)
''
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
''
++
lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email)
''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
'';
in
lib.deepSeq checkedAttrs checks;
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
success = pkgs.runCommand "checked-maintainers-success" {} ">$out";
success = pkgs.runCommand "checked-maintainers-success" { } ">$out";
failure = pkgs.runCommand "checked-maintainers-failure" {
nativeBuildInputs = [ pkgs.curl pkgs.jq ];
outputHash = "sha256:${lib.fakeSha256}";
outputHAlgo = "sha256";
outputHashMode = "flat";
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
} ''
${lib.concatStringsSep "\n" missingGithubIds}
exit 1
'';
in if missingGithubIds == [] then success else failure
failure =
pkgs.runCommand "checked-maintainers-failure"
{
nativeBuildInputs = [
pkgs.curl
pkgs.jq
];
outputHash = "sha256:${lib.fakeSha256}";
outputHAlgo = "sha256";
outputHashMode = "flat";
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
}
''
${lib.concatStringsSep "\n" missingGithubIds}
exit 1
'';
in
if missingGithubIds == [ ] then success else failure

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,9 @@
{ lib, ... }: {
options.dummy = lib.mkOption { type = lib.types.anything; default = {}; };
{ lib, ... }:
{
options.dummy = lib.mkOption {
type = lib.types.anything;
default = { };
};
freeformType =
let
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
@ -7,8 +11,6 @@
# modifying types like this breaks type merging.
# This test makes sure that type merging is not performed when only a single declaration exists.
# Don't modify types in practice!
a // {
merge = loc: defs: { freeformItems = a.merge loc defs; };
};
a // { merge = loc: defs: { freeformItems = a.merge loc defs; }; };
config.foo.bar = "ok";
}

View file

@ -30,7 +30,7 @@ in
# mkAliasOptionModule sets warnings, so this has to be defined.
warnings = mkOption {
internal = true;
default = [];
default = [ ];
type = types.listOf types.str;
example = [ "The `foo' service is deprecated and will go away soon!" ];
description = ''
@ -46,14 +46,16 @@ in
# Disable the aliased option with a high priority so it
# should override the next import.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enableAlias = mkForce false;
}
)
# Enable the normal (non-aliased) option.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enable = true;
}

View file

@ -30,7 +30,7 @@ in
# mkAliasOptionModule sets warnings, so this has to be defined.
warnings = mkOption {
internal = true;
default = [];
default = [ ];
type = types.listOf types.str;
example = [ "The `foo' service is deprecated and will go away soon!" ];
description = ''
@ -46,14 +46,16 @@ in
# Disable the aliased option, but with a default (low) priority so it
# should be able to be overridden by the next import.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enableAlias = mkDefault false;
}
)
# Enable the normal (non-aliased) option.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enable = true;
}

View file

@ -1,7 +1,6 @@
{ lib, config, ... }: {
options.conditionalWorks = lib.mkOption {
default = ! config.value ? foo;
};
{ lib, config, ... }:
{
options.conditionalWorks = lib.mkOption { default = !config.value ? foo; };
config.value.foo = lib.mkIf false "should not be defined";
}

View file

@ -1,7 +1,6 @@
{ lib, config, ... }: {
options.isLazy = lib.mkOption {
default = ! config.value ? foo;
};
{ lib, config, ... }:
{
options.isLazy = lib.mkOption { default = !config.value ? foo; };
config.value.bar = throw "is not lazy";
}

View file

@ -1,14 +1,24 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.lazyAttrsOf lib.types.boolByOr;
};
options.value = lib.mkOption { type = lib.types.lazyAttrsOf lib.types.boolByOr; };
config.value = {
falseFalse = lib.mkMerge [ false false ];
trueFalse = lib.mkMerge [ true false ];
falseTrue = lib.mkMerge [ false true ];
trueTrue = lib.mkMerge [ true true ];
falseFalse = lib.mkMerge [
false
false
];
trueFalse = lib.mkMerge [
true
false
];
falseTrue = lib.mkMerge [
false
true
];
trueTrue = lib.mkMerge [
true
true
];
};
}

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options = {
sub = {
nixosOk = lib.mkOption {
@ -40,37 +41,37 @@
];
config = {
_module.freeformType = lib.types.anything;
ok =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
];
};
ok = lib.evalModules {
class = "nixos";
modules = [ ./module-class-is-nixos.nix ];
};
fail =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
./module-class-is-darwin.nix
];
};
fail = lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
./module-class-is-darwin.nix
];
};
fail-anon =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
{ _file = "foo.nix#darwinModules.default";
_class = "darwin";
config = {};
imports = [];
}
];
};
fail-anon = lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
{
_file = "foo.nix#darwinModules.default";
_class = "darwin";
config = { };
imports = [ ];
}
];
};
sub.nixosOk = { _class = "nixos"; };
sub.nixosFail = { imports = [ ./module-class-is-darwin.nix ]; };
sub.nixosOk = {
_class = "nixos";
};
sub.nixosFail = {
imports = [ ./module-class-is-darwin.nix ];
};
};
}

View file

@ -1,24 +1,22 @@
{ lib, options, ... }:
let discardPositions = lib.mapAttrs (k: v: v);
let
discardPositions = lib.mapAttrs (k: v: v);
in
# unsafeGetAttrPos is unspecified best-effort behavior, so we only want to consider this test on an evaluator that satisfies some basic assumptions about this function.
assert builtins.unsafeGetAttrPos "a" { a = true; } != null;
assert builtins.unsafeGetAttrPos "a" (discardPositions { a = true; }) == null;
assert
builtins.unsafeGetAttrPos "a" (discardPositions {
a = true;
}) == null;
{
imports = [
{
options.imported.line10 = lib.mkOption {
type = lib.types.int;
};
options.imported.line10 = lib.mkOption { type = lib.types.int; };
# Simulates various patterns of generating modules such as
# programs.firefox.nativeMessagingHosts.ff2mpv. We don't expect to get
# line numbers for these, but we can fall back on knowing the file.
options.generated = discardPositions {
line18 = lib.mkOption {
type = lib.types.int;
};
};
options.generated = discardPositions { line18 = lib.mkOption { type = lib.types.int; }; };
options.submoduleLine34.extraOptLine23 = lib.mkOption {
default = 1;
@ -27,23 +25,25 @@ assert builtins.unsafeGetAttrPos "a" (discardPositions { a = true; }) == null;
}
];
options.nested.nestedLine30 = lib.mkOption {
type = lib.types.int;
};
options.nested.nestedLine30 = lib.mkOption { type = lib.types.int; };
options.submoduleLine34 = lib.mkOption {
default = { };
type = lib.types.submoduleWith {
modules = [
({ options, ... }: {
options.submodDeclLine39 = lib.mkOption { };
})
(
{ options, ... }:
{
options.submodDeclLine39 = lib.mkOption { };
}
)
{ freeformType = with lib.types; lazyAttrsOf (uniq unspecified); }
];
};
};
config = {
submoduleLine34.submodDeclLine39 = (options.submoduleLine34.type.getSubOptions [ ]).submodDeclLine39.declarationPositions;
submoduleLine34.submodDeclLine39 =
(options.submoduleLine34.type.getSubOptions [ ]).submodDeclLine39.declarationPositions;
};
}

View file

@ -1,13 +1,13 @@
{ lib, ... }:
let
deathtrapArgs = lib.mapAttrs
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
(lib.functionArgs lib.mkOption);
deathtrapArgs = lib.mapAttrs (
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
) (lib.functionArgs lib.mkOption);
in
{
options.value = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = {};
default = { };
};
options.testing-laziness-so-don't-read-me = lib.mkOption deathtrapArgs;
}

View file

@ -1,25 +1,27 @@
{ lib, ... }:
let
submod = { ... }: {
options = {
enable = lib.mkOption {
default = false;
example = true;
type = lib.types.bool;
description = ''
Some descriptive text
'';
submod =
{ ... }:
{
options = {
enable = lib.mkOption {
default = false;
example = true;
type = lib.types.bool;
description = ''
Some descriptive text
'';
};
};
};
};
in
{
options = {
attrsOfSub = lib.mkOption {
default = {};
example = {};
default = { };
example = { };
type = lib.types.attrsOf (lib.types.submodule [ submod ]);
description = ''
Some descriptive text

View file

@ -8,11 +8,9 @@ in
modules = [ ];
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
};
default = {};
default = { };
};
# config-dependent options: won't recommend, but useful for making this test parameterized
options.shorthandOnlyDefinesConfig = mkOption {
default = false;
};
options.shorthandOnlyDefinesConfig = mkOption { default = false; };
}

View file

@ -1,5 +1,4 @@
{ lib, ... }: {
options.value = lib.mkOption {
type = lib.types.either lib.types.int lib.types.str;
};
{ lib, ... }:
{
options.value = lib.mkOption { type = lib.types.either lib.types.int lib.types.str; };
}

View file

@ -2,8 +2,6 @@
{
options = {
value = lib.mkOption {
type = lib.types.ints.between (-21) 43;
};
value = lib.mkOption { type = lib.types.ints.between (-21) 43; };
};
}

View file

@ -2,8 +2,6 @@
{
options.set = {
value = lib.mkOption {
type = lib.types.ints.positive;
};
value = lib.mkOption { type = lib.types.ints.positive; };
};
}

View file

@ -2,8 +2,6 @@
{
options = {
value = lib.mkOption {
type = lib.types.ints.positive;
};
value = lib.mkOption { type = lib.types.ints.positive; };
};
}

View file

@ -2,8 +2,6 @@
{
options = {
value = lib.mkOption {
type = lib.types.ints.unsigned;
};
value = lib.mkOption { type = lib.types.ints.unsigned; };
};
}

View file

@ -1,6 +1,7 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
default = {};
default = { };
};
}

View file

@ -1,19 +1,17 @@
{ lib, ... }: let
{ lib, ... }:
let
pkgs.hello = {
type = "derivation";
pname = "hello";
};
in {
in
{
options = {
package = lib.mkPackageOption pkgs "hello" { };
namedPackage = lib.mkPackageOption pkgs "Hello" {
default = [ "hello" ];
};
namedPackage = lib.mkPackageOption pkgs "Hello" { default = [ "hello" ]; };
namedPackageSingletonDefault = lib.mkPackageOption pkgs "Hello" {
default = "hello";
};
namedPackageSingletonDefault = lib.mkPackageOption pkgs "Hello" { default = "hello"; };
pathPackage = lib.mkPackageOption pkgs [ "hello" ] { };
@ -21,33 +19,31 @@ in {
example = "pkgs.hello.override { stdenv = pkgs.clangStdenv; }";
};
packageWithPathExample = lib.mkPackageOption pkgs "hello" {
example = [ "hello" ];
};
packageWithPathExample = lib.mkPackageOption pkgs "hello" { example = [ "hello" ]; };
packageWithExtraDescription = lib.mkPackageOption pkgs "hello" {
extraDescription = "Example extra description.";
};
undefinedPackage = lib.mkPackageOption pkgs "hello" {
default = null;
};
undefinedPackage = lib.mkPackageOption pkgs "hello" { default = null; };
nullablePackage = lib.mkPackageOption pkgs "hello" {
nullable = true;
default = null;
};
nullablePackageWithDefault = lib.mkPackageOption pkgs "hello" {
nullable = true;
};
nullablePackageWithDefault = lib.mkPackageOption pkgs "hello" { nullable = true; };
packageWithPkgsText = lib.mkPackageOption pkgs "hello" {
pkgsText = "myPkgs";
};
packageWithPkgsText = lib.mkPackageOption pkgs "hello" { pkgsText = "myPkgs"; };
packageFromOtherSet = let myPkgs = {
hello = pkgs.hello // { pname = "hello-other"; };
}; in lib.mkPackageOption myPkgs "hello" { };
packageFromOtherSet =
let
myPkgs = {
hello = pkgs.hello // {
pname = "hello-other";
};
};
in
lib.mkPackageOption myPkgs "hello" { };
};
}

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.oneOf [
lib.types.int

View file

@ -3,7 +3,9 @@
{
options.set = lib.mkOption {
default = { };
example = { a = 1; };
example = {
a = 1;
};
type = lib.types.attrsOf lib.types.int;
description = ''
Some descriptive text

View file

@ -1,25 +1,32 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
inherit (lib.evalModules {
modules = [
{
options.inner = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
];
}) type;
default = {};
inherit
(lib.evalModules {
modules = [
{
options.inner = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
];
})
type
;
default = { };
};
config.submodule = lib.mkMerge [
({ lib, ... }: {
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
})
(
{ lib, ... }:
{
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
)
{
inner = true;
outer = true;

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
@ -10,16 +11,19 @@
}
];
};
default = {};
default = { };
};
config.submodule = lib.mkMerge [
({ lib, ... }: {
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
})
(
{ lib, ... }:
{
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
)
{
inner = true;
outer = true;

View file

@ -1,13 +1,13 @@
{ lib, ... }: let
{ lib, ... }:
let
sub.options.config = lib.mkOption {
type = lib.types.bool;
default = false;
};
in {
in
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [ sub ];
};
default = {};
type = lib.types.submoduleWith { modules = [ sub ]; };
default = { };
};
}

View file

@ -1,11 +1,8 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
./declare-enable.nix
];
};
default = {};
type = lib.types.submoduleWith { modules = [ ./declare-enable.nix ]; };
default = { };
};
config.submodule = ./define-enable.nix;

View file

@ -1,14 +1,16 @@
{ lib, ... }: let
{ lib, ... }:
let
sub.options.config = lib.mkOption {
type = lib.types.bool;
default = false;
};
in {
in
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [ sub ];
shorthandOnlyDefinesConfig = true;
};
default = {};
default = { };
};
}

View file

@ -1,17 +1,19 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
({ lib, ... }: {
options.foo = lib.mkOption {
default = lib.foo;
};
})
(
{ lib, ... }:
{
options.foo = lib.mkOption { default = lib.foo; };
}
)
];
specialArgs.lib = lib // {
foo = "foo";
};
};
default = {};
default = { };
};
}

View file

@ -1,9 +1,10 @@
{ lib, moduleType, ... }:
let inherit (lib) mkOption types;
let
inherit (lib) mkOption types;
in
{
options.variants = mkOption {
type = types.lazyAttrsOf moduleType;
default = {};
default = { };
};
}

View file

@ -1,8 +1,15 @@
{ lib ? import ../.., modules ? [] }:
{
lib ? import ../..,
modules ? [ ],
}:
{
inherit (lib.evalModules {
inherit modules;
specialArgs.modulesPath = ./.;
}) config options;
inherit
(lib.evalModules {
inherit modules;
specialArgs.modulesPath = ./.;
})
config
options
;
}

View file

@ -1,19 +1,28 @@
{ config, lib, ... }:
let
inherit (lib) types mkOption setDefaultModuleLocation evalModules;
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
inherit (lib)
types
mkOption
setDefaultModuleLocation
evalModules
;
inherit (types)
deferredModule
lazyAttrsOf
submodule
str
raw
enum
;
in
{
options = {
deferred = mkOption {
type = deferredModule;
};
result = mkOption {
default = (evalModules { modules = [ config.deferred ]; }).config.result;
};
deferred = mkOption { type = deferredModule; };
result = mkOption { default = (evalModules { modules = [ config.deferred ]; }).config.result; };
};
config = {
deferred = { ... }:
deferred =
{ ... }:
# this should be an attrset, so this fails
true;
};

View file

@ -1,7 +1,14 @@
{ lib, ... }:
let
inherit (lib) types mkOption setDefaultModuleLocation;
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
inherit (types)
deferredModule
lazyAttrsOf
submodule
str
raw
enum
;
in
{
imports = [
@ -9,27 +16,37 @@ in
# - nodes.<name>
# - default
# where all nodes include the default
({ config, ... }: {
_file = "generic.nix";
options.nodes = mkOption {
type = lazyAttrsOf (submodule { imports = [ config.default ]; });
default = {};
};
options.default = mkOption {
type = deferredModule;
default = { };
description = ''
Module that is included in all nodes.
'';
};
})
(
{ config, ... }:
{
_file = "generic.nix";
options.nodes = mkOption {
type = lazyAttrsOf (submodule {
imports = [ config.default ];
});
default = { };
};
options.default = mkOption {
type = deferredModule;
default = { };
description = ''
Module that is included in all nodes.
'';
};
}
)
{
_file = "default-1.nix";
default = { config, ... }: {
options.settingsDict = lib.mkOption { type = lazyAttrsOf str; default = {}; };
options.bottom = lib.mkOption { type = enum []; };
};
default =
{ config, ... }:
{
options.settingsDict = lib.mkOption {
type = lazyAttrsOf str;
default = { };
};
options.bottom = lib.mkOption { type = enum [ ]; };
};
}
{
@ -49,9 +66,11 @@ in
{
_file = "nodes-foo-c-is-a.nix";
nodes.foo = { config, ... }: {
settingsDict.c = config.settingsDict.a;
};
nodes.foo =
{ config, ... }:
{
settingsDict.c = config.settingsDict.a;
};
}
];

View file

@ -1,3 +1 @@
{
attrsOfSub.bar.enable = true;
}
{ attrsOfSub.bar.enable = true; }

View file

@ -1,3 +1 @@
{
attrsOfSub.bar = {};
}
{ attrsOfSub.bar = { }; }

View file

@ -1,3 +1 @@
{
attrsOfSub.foo.enable = true;
}
{ attrsOfSub.foo.enable = true; }

View file

@ -1,7 +1,5 @@
{ lib, ... }:
{
attrsOfSub.foo = lib.mkForce {
enable = false;
};
attrsOfSub.foo = lib.mkForce { enable = false; };
}

View file

@ -1,7 +1,5 @@
{ config, lib, ... }:
{
attrsOfSub.foo = lib.mkIf config.enable {
enable = true;
};
attrsOfSub.foo = lib.mkIf config.enable { enable = true; };
}

View file

@ -1,3 +1 @@
{
attrsOfSub.foo = {};
}
{ attrsOfSub.foo = { }; }

View file

@ -1,7 +1,5 @@
{ lib, ... }:
{
attrsOfSub = lib.mkForce {
foo.enable = false;
};
attrsOfSub = lib.mkForce { foo.enable = false; };
}

View file

@ -1,7 +1,5 @@
{ config, lib, ... }:
{
attrsOfSub = lib.mkIf config.enable {
foo.enable = true;
};
attrsOfSub = lib.mkIf config.enable { foo.enable = true; };
}

View file

@ -1,3 +1 @@
{
config.enable = abort "oops";
}
{ config.enable = abort "oops"; }

View file

@ -1,3 +1 @@
{
config.enable = throw "oops";
}
{ config.enable = throw "oops"; }

View file

@ -1,3 +1 @@
{
enable = true;
}
{ enable = true; }

View file

@ -1,5 +1,3 @@
{ lib, ... }:
lib.mkForce {
attrsOfSub.foo.enable = false;
}
lib.mkForce { attrsOfSub.foo.enable = false; }

View file

@ -1,5 +1,3 @@
{ lib, ... }:
lib.mkForce {
enable = false;
}
lib.mkForce { enable = false; }

View file

@ -1,15 +1,24 @@
{ config, ... }: {
class = { "just" = "data"; };
{ config, ... }:
{
class = {
"just" = "data";
};
a = "one";
b = "two";
meta = "meta";
_module.args.result =
let r = builtins.removeAttrs config [ "_module" ];
in builtins.trace (builtins.deepSeq r r) (r == {
a = "one";
b = "two";
class = { "just" = "data"; };
meta = "meta";
});
let
r = builtins.removeAttrs config [ "_module" ];
in
builtins.trace (builtins.deepSeq r r) (
r == {
a = "one";
b = "two";
class = {
"just" = "data";
};
meta = "meta";
}
);
}

View file

@ -1,5 +1,3 @@
{ config, lib, ... }:
lib.mkIf config.enable {
attrsOfSub.foo.enable = true;
}
lib.mkIf config.enable { attrsOfSub.foo.enable = true; }

View file

@ -1,3 +1 @@
{
_module.check = false;
}
{ _module.check = false; }

View file

@ -5,12 +5,11 @@
{
# Always defined, but the value depends on the presence of an option.
config.set = {
value = if options ? set.enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? set.enable) {
enable = true;
};
config.set =
{
value = if options ? set.enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? set.enable) { enable = true; };
}

View file

@ -5,12 +5,11 @@
{
# Always defined, but the value depends on the presence of an option.
config = {
value = if options ? enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? enable) {
enable = true;
};
config =
{
value = if options ? enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? enable) { enable = true; };
}

View file

@ -1,3 +1,4 @@
{ config, ... }: {
{ config, ... }:
{
settingsDict.a = config.settingsDict.b;
}

View file

@ -1,3 +1 @@
{
submodule.config.config = true;
}
{ submodule.config.config = true; }

View file

@ -1,3 +1 @@
{
submodule.config = true;
}
{ submodule.config = true; }

View file

@ -1,3 +1 @@
{
value = -23;
}
{ value = -23; }

View file

@ -1,3 +1 @@
{
value = 42;
}
{ value = 42; }

Some files were not shown because too many files have changed in this diff Show more