Compare commits
2 commits
17c9487682
...
e0d98ae789
Author | SHA1 | Date | |
---|---|---|---|
isabel roses | e0d98ae789 | ||
isabel roses | a505937410 |
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
# formatting
|
||||||
|
a5059374106b6b1148a3cc6673c27ec1829380ea
|
|
@ -1,6 +1,8 @@
|
||||||
let requiredVersion = import ./lib/minver.nix; in
|
let
|
||||||
|
requiredVersion = import ./lib/minver.nix;
|
||||||
|
in
|
||||||
|
|
||||||
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||||
|
|
||||||
abort ''
|
abort ''
|
||||||
|
|
||||||
|
|
12
flake.nix
12
flake.nix
|
@ -1,16 +1,20 @@
|
||||||
{
|
{
|
||||||
outputs = { self, ... }:
|
outputs =
|
||||||
|
{ self, ... }:
|
||||||
let
|
let
|
||||||
forAllSystems = self.lib.genAttrs self.lib.systems.flakeExposed;
|
forAllSystems = self.lib.genAttrs self.lib.systems.flakeExposed;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
lib = import ./lib;
|
lib = import ./lib;
|
||||||
|
|
||||||
auxPackages = forAllSystems (system:
|
auxPackages = forAllSystems (
|
||||||
|
system:
|
||||||
(
|
(
|
||||||
let requiredVersion = import ./lib/minver.nix; in
|
let
|
||||||
|
requiredVersion = import ./lib/minver.nix;
|
||||||
|
in
|
||||||
|
|
||||||
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
|
||||||
abort ''
|
abort ''
|
||||||
This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade:
|
This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade:
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{ "\t" = 9;
|
{
|
||||||
|
"\t" = 9;
|
||||||
"\n" = 10;
|
"\n" = 10;
|
||||||
"\r" = 13;
|
"\r" = 13;
|
||||||
" " = 32;
|
" " = 32;
|
||||||
|
|
|
@ -36,10 +36,7 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
# TODO(Profpatsch): add tests that check stderr
|
# TODO(Profpatsch): add tests that check stderr
|
||||||
assertMsg =
|
assertMsg = pred: msg: pred || builtins.throw msg;
|
||||||
pred:
|
|
||||||
msg:
|
|
||||||
pred || builtins.throw msg;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Specialized `assertMsg` for checking if `val` is one of the elements
|
Specialized `assertMsg` for checking if `val` is one of the elements
|
||||||
|
@ -81,14 +78,10 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
assertOneOf =
|
assertOneOf =
|
||||||
name:
|
name: val: xs:
|
||||||
val:
|
assertMsg (lib.elem val xs) "${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
|
||||||
xs:
|
lib.generators.toPretty { } val
|
||||||
assertMsg
|
}";
|
||||||
(lib.elem val xs)
|
|
||||||
"${name} must be one of ${
|
|
||||||
lib.generators.toPretty {} xs}, but is: ${
|
|
||||||
lib.generators.toPretty {} val}";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
||||||
|
@ -133,12 +126,9 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
assertEachOneOf =
|
assertEachOneOf =
|
||||||
name:
|
name: vals: xs:
|
||||||
vals:
|
assertMsg (lib.all (val: lib.elem val xs) vals)
|
||||||
xs:
|
"each element in ${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
|
||||||
assertMsg
|
lib.generators.toPretty { } vals
|
||||||
(lib.all (val: lib.elem val xs) vals)
|
}";
|
||||||
"each element in ${name} must be one of ${
|
|
||||||
lib.generators.toPretty {} xs}, but is: ${
|
|
||||||
lib.generators.toPretty {} vals}";
|
|
||||||
}
|
}
|
||||||
|
|
435
lib/attrsets.nix
435
lib/attrsets.nix
|
@ -5,14 +5,40 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins) head length;
|
inherit (builtins) head length;
|
||||||
inherit (lib.trivial) isInOldestRelease mergeAttrs warn warnIf;
|
inherit (lib.trivial)
|
||||||
inherit (lib.strings) concatStringsSep concatMapStringsSep escapeNixIdentifier sanitizeDerivationName;
|
isInOldestRelease
|
||||||
inherit (lib.lists) foldr foldl' concatMap elemAt all partition groupBy take foldl;
|
mergeAttrs
|
||||||
|
warn
|
||||||
|
warnIf
|
||||||
|
;
|
||||||
|
inherit (lib.strings)
|
||||||
|
concatStringsSep
|
||||||
|
concatMapStringsSep
|
||||||
|
escapeNixIdentifier
|
||||||
|
sanitizeDerivationName
|
||||||
|
;
|
||||||
|
inherit (lib.lists)
|
||||||
|
foldr
|
||||||
|
foldl'
|
||||||
|
concatMap
|
||||||
|
elemAt
|
||||||
|
all
|
||||||
|
partition
|
||||||
|
groupBy
|
||||||
|
take
|
||||||
|
foldl
|
||||||
|
;
|
||||||
in
|
in
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
inherit (builtins) attrNames listToAttrs hasAttr isAttrs getAttr removeAttrs;
|
inherit (builtins)
|
||||||
|
attrNames
|
||||||
|
listToAttrs
|
||||||
|
hasAttr
|
||||||
|
isAttrs
|
||||||
|
getAttr
|
||||||
|
removeAttrs
|
||||||
|
;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return an attribute from nested attribute sets.
|
Return an attribute from nested attribute sets.
|
||||||
|
@ -25,7 +51,6 @@ rec {
|
||||||
(x.${f p}."example.com" or 6) == attrByPath [ (f p) "example.com" ] 6 x
|
(x.${f p}."example.com" or 6) == attrByPath [ (f p) "example.com" ] 6 x
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrPath`
|
`attrPath`
|
||||||
|
@ -63,19 +88,20 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
attrByPath =
|
attrByPath =
|
||||||
attrPath:
|
attrPath: default: set:
|
||||||
default:
|
|
||||||
set:
|
|
||||||
let
|
let
|
||||||
lenAttrPath = length attrPath;
|
lenAttrPath = length attrPath;
|
||||||
attrByPath' = n: s: (
|
attrByPath' =
|
||||||
if n == lenAttrPath then s
|
n: s:
|
||||||
else (
|
(
|
||||||
|
if n == lenAttrPath then
|
||||||
|
s
|
||||||
|
else
|
||||||
|
(
|
||||||
let
|
let
|
||||||
attr = elemAt attrPath n;
|
attr = elemAt attrPath n;
|
||||||
in
|
in
|
||||||
if s ? ${attr} then attrByPath' (n + 1) s.${attr}
|
if s ? ${attr} then attrByPath' (n + 1) s.${attr} else default
|
||||||
else default
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
in
|
in
|
||||||
|
@ -97,7 +123,6 @@ rec {
|
||||||
hasAttrByPath [] x == true
|
hasAttrByPath [] x == true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrPath`
|
`attrPath`
|
||||||
|
@ -131,17 +156,18 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
hasAttrByPath =
|
hasAttrByPath =
|
||||||
attrPath:
|
attrPath: e:
|
||||||
e:
|
|
||||||
let
|
let
|
||||||
lenAttrPath = length attrPath;
|
lenAttrPath = length attrPath;
|
||||||
hasAttrByPath' = n: s: (
|
hasAttrByPath' =
|
||||||
n == lenAttrPath || (
|
n: s:
|
||||||
|
(
|
||||||
|
n == lenAttrPath
|
||||||
|
|| (
|
||||||
let
|
let
|
||||||
attr = elemAt attrPath n;
|
attr = elemAt attrPath n;
|
||||||
in
|
in
|
||||||
if s ? ${attr} then hasAttrByPath' (n + 1) s.${attr}
|
if s ? ${attr} then hasAttrByPath' (n + 1) s.${attr} else false
|
||||||
else false
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
in
|
in
|
||||||
|
@ -164,7 +190,6 @@ rec {
|
||||||
hasAttrByPath (attrsets.longestValidPathPrefix p x) x == true
|
hasAttrByPath (attrsets.longestValidPathPrefix p x) x == true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrPath`
|
`attrPath`
|
||||||
|
@ -200,8 +225,7 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
longestValidPathPrefix =
|
longestValidPathPrefix =
|
||||||
attrPath:
|
attrPath: v:
|
||||||
v:
|
|
||||||
let
|
let
|
||||||
lenAttrPath = length attrPath;
|
lenAttrPath = length attrPath;
|
||||||
getPrefixForSetAtIndex =
|
getPrefixForSetAtIndex =
|
||||||
|
@ -221,8 +245,7 @@ rec {
|
||||||
attr = elemAt attrPath remainingPathIndex;
|
attr = elemAt attrPath remainingPathIndex;
|
||||||
in
|
in
|
||||||
if remainingSet ? ${attr} then
|
if remainingSet ? ${attr} then
|
||||||
getPrefixForSetAtIndex
|
getPrefixForSetAtIndex remainingSet.${attr} # advance from the set to the attribute value
|
||||||
remainingSet.${attr} # advance from the set to the attribute value
|
|
||||||
(remainingPathIndex + 1) # advance the path
|
(remainingPathIndex + 1) # advance the path
|
||||||
else
|
else
|
||||||
# The attribute doesn't exist, so we return the prefix up to the
|
# The attribute doesn't exist, so we return the prefix up to the
|
||||||
|
@ -234,7 +257,6 @@ rec {
|
||||||
/**
|
/**
|
||||||
Create a new attribute set with `value` set at the nested attribute location specified in `attrPath`.
|
Create a new attribute set with `value` set at the nested attribute location specified in `attrPath`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrPath`
|
`attrPath`
|
||||||
|
@ -263,15 +285,12 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
setAttrByPath =
|
setAttrByPath =
|
||||||
attrPath:
|
attrPath: value:
|
||||||
value:
|
|
||||||
let
|
let
|
||||||
len = length attrPath;
|
len = length attrPath;
|
||||||
atDepth = n:
|
atDepth = n: if n == len then value else { ${elemAt attrPath n} = atDepth (n + 1); };
|
||||||
if n == len
|
in
|
||||||
then value
|
atDepth 0;
|
||||||
else { ${elemAt attrPath n} = atDepth (n + 1); };
|
|
||||||
in atDepth 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like `attrByPath`, but without a default value. If it doesn't find the
|
Like `attrByPath`, but without a default value. If it doesn't find the
|
||||||
|
@ -285,7 +304,6 @@ rec {
|
||||||
x.${f p}."example.com" == getAttrByPath [ (f p) "example.com" ] x
|
x.${f p}."example.com" == getAttrByPath [ (f p) "example.com" ] x
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrPath`
|
`attrPath`
|
||||||
|
@ -317,14 +335,12 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
getAttrFromPath =
|
getAttrFromPath =
|
||||||
attrPath:
|
attrPath: set:
|
||||||
set:
|
|
||||||
attrByPath attrPath (abort ("cannot find attribute `" + concatStringsSep "." attrPath + "'")) set;
|
attrByPath attrPath (abort ("cannot find attribute `" + concatStringsSep "." attrPath + "'")) set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Map each attribute in the given set and merge them into a new attribute set.
|
Map each attribute in the given set and merge them into a new attribute set.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -357,12 +373,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
concatMapAttrs = f: v:
|
concatMapAttrs = f: v: foldl' mergeAttrs { } (attrValues (mapAttrs f v));
|
||||||
foldl' mergeAttrs { }
|
|
||||||
(attrValues
|
|
||||||
(mapAttrs f v)
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Update or set specific paths of an attribute set.
|
Update or set specific paths of an attribute set.
|
||||||
|
@ -420,13 +431,15 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
updateManyAttrsByPath = let
|
updateManyAttrsByPath =
|
||||||
|
let
|
||||||
# When recursing into attributes, instead of updating the `path` of each
|
# When recursing into attributes, instead of updating the `path` of each
|
||||||
# update using `tail`, which needs to allocate an entirely new list,
|
# update using `tail`, which needs to allocate an entirely new list,
|
||||||
# we just pass a prefix length to use and make sure to only look at the
|
# we just pass a prefix length to use and make sure to only look at the
|
||||||
# path without the prefix length, so that we can reuse the original list
|
# path without the prefix length, so that we can reuse the original list
|
||||||
# entries.
|
# entries.
|
||||||
go = prefixLength: hasValue: value: updates:
|
go =
|
||||||
|
prefixLength: hasValue: value: updates:
|
||||||
let
|
let
|
||||||
# Splits updates into ones on this level (split.right)
|
# Splits updates into ones on this level (split.right)
|
||||||
# And ones on levels further down (split.wrong)
|
# And ones on levels further down (split.wrong)
|
||||||
|
@ -438,51 +451,58 @@ rec {
|
||||||
# Applies only nested modification to the input value
|
# Applies only nested modification to the input value
|
||||||
withNestedMods =
|
withNestedMods =
|
||||||
# Return the value directly if we don't have any nested modifications
|
# Return the value directly if we don't have any nested modifications
|
||||||
if split.wrong == [] then
|
if split.wrong == [ ] then
|
||||||
if hasValue then value
|
if hasValue then
|
||||||
|
value
|
||||||
else
|
else
|
||||||
# Throw an error if there is no value. This `head` call here is
|
# Throw an error if there is no value. This `head` call here is
|
||||||
# safe, but only in this branch since `go` could only be called
|
# safe, but only in this branch since `go` could only be called
|
||||||
# with `hasValue == false` for nested updates, in which case
|
# with `hasValue == false` for nested updates, in which case
|
||||||
# it's also always called with at least one update
|
# it's also always called with at least one update
|
||||||
let updatePath = (head split.right).path; in
|
let
|
||||||
throw
|
updatePath = (head split.right).path;
|
||||||
( "updateManyAttrsByPath: Path '${showAttrPath updatePath}' does "
|
in
|
||||||
|
throw (
|
||||||
|
"updateManyAttrsByPath: Path '${showAttrPath updatePath}' does "
|
||||||
+ "not exist in the given value, but the first update to this "
|
+ "not exist in the given value, but the first update to this "
|
||||||
+ "path tries to access the existing value.")
|
+ "path tries to access the existing value."
|
||||||
|
)
|
||||||
else
|
else
|
||||||
# If there are nested modifications, try to apply them to the value
|
# If there are nested modifications, try to apply them to the value
|
||||||
if ! hasValue then
|
if !hasValue then
|
||||||
# But if we don't have a value, just use an empty attribute set
|
# But if we don't have a value, just use an empty attribute set
|
||||||
# as the value, but simplify the code a bit
|
# as the value, but simplify the code a bit
|
||||||
mapAttrs (name: go (prefixLength + 1) false null) nested
|
mapAttrs (name: go (prefixLength + 1) false null) nested
|
||||||
else if isAttrs value then
|
else if isAttrs value then
|
||||||
# If we do have a value and it's an attribute set, override it
|
# If we do have a value and it's an attribute set, override it
|
||||||
# with the nested modifications
|
# with the nested modifications
|
||||||
value //
|
value // mapAttrs (name: go (prefixLength + 1) (value ? ${name}) value.${name}) nested
|
||||||
mapAttrs (name: go (prefixLength + 1) (value ? ${name}) value.${name}) nested
|
|
||||||
else
|
else
|
||||||
# However if it's not an attribute set, we can't apply the nested
|
# However if it's not an attribute set, we can't apply the nested
|
||||||
# modifications, throw an error
|
# modifications, throw an error
|
||||||
let updatePath = (head split.wrong).path; in
|
let
|
||||||
throw
|
updatePath = (head split.wrong).path;
|
||||||
( "updateManyAttrsByPath: Path '${showAttrPath updatePath}' needs to "
|
in
|
||||||
|
throw (
|
||||||
|
"updateManyAttrsByPath: Path '${showAttrPath updatePath}' needs to "
|
||||||
+ "be updated, but path '${showAttrPath (take prefixLength updatePath)}' "
|
+ "be updated, but path '${showAttrPath (take prefixLength updatePath)}' "
|
||||||
+ "of the given value is not an attribute set, so we can't "
|
+ "of the given value is not an attribute set, so we can't "
|
||||||
+ "update an attribute inside of it.");
|
+ "update an attribute inside of it."
|
||||||
|
);
|
||||||
|
|
||||||
|
in
|
||||||
# We get the final result by applying all the updates on this level
|
# We get the final result by applying all the updates on this level
|
||||||
# after having applied all the nested updates
|
# after having applied all the nested updates
|
||||||
# We use foldl instead of foldl' so that in case of multiple updates,
|
# We use foldl instead of foldl' so that in case of multiple updates,
|
||||||
# intermediate values aren't evaluated if not needed
|
# intermediate values aren't evaluated if not needed
|
||||||
in foldl (acc: el: el.update acc) withNestedMods split.right;
|
foldl (acc: el: el.update acc) withNestedMods split.right;
|
||||||
|
|
||||||
in updates: value: go 0 true value updates;
|
in
|
||||||
|
updates: value: go 0 true value updates;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return the specified attributes from a set.
|
Return the specified attributes from a set.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`nameList`
|
`nameList`
|
||||||
|
@ -510,10 +530,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
attrVals =
|
attrVals = nameList: set: map (x: set.${x}) nameList;
|
||||||
nameList:
|
|
||||||
set: map (x: set.${x}) nameList;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return the values of all attributes in the given set, sorted by
|
Return the values of all attributes in the given set, sorted by
|
||||||
|
@ -538,12 +555,10 @@ rec {
|
||||||
*/
|
*/
|
||||||
attrValues = builtins.attrValues;
|
attrValues = builtins.attrValues;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Given a set of attribute names, return the set of the corresponding
|
Given a set of attribute names, return the set of the corresponding
|
||||||
attributes from the given set.
|
attributes from the given set.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`names`
|
`names`
|
||||||
|
@ -571,9 +586,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
getAttrs =
|
getAttrs = names: attrs: genAttrs names (name: attrs.${name});
|
||||||
names:
|
|
||||||
attrs: genAttrs names (name: attrs.${name});
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Collect each attribute named `attr` from a list of attribute
|
Collect each attribute named `attr` from a list of attribute
|
||||||
|
@ -608,12 +621,10 @@ rec {
|
||||||
*/
|
*/
|
||||||
catAttrs = builtins.catAttrs;
|
catAttrs = builtins.catAttrs;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Filter an attribute set by removing all attributes for which the
|
Filter an attribute set by removing all attributes for which the
|
||||||
given predicate return false.
|
given predicate return false.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
|
@ -642,16 +653,21 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
filterAttrs =
|
filterAttrs =
|
||||||
pred:
|
pred: set:
|
||||||
set:
|
listToAttrs (
|
||||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
concatMap (
|
||||||
|
name:
|
||||||
|
let
|
||||||
|
v = set.${name};
|
||||||
|
in
|
||||||
|
if pred name v then [ (nameValuePair name v) ] else [ ]
|
||||||
|
) (attrNames set)
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Filter an attribute set recursively by removing all attributes for
|
Filter an attribute set recursively by removing all attributes for
|
||||||
which the given predicate return false.
|
which the given predicate return false.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
|
@ -680,17 +696,17 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
filterAttrsRecursive =
|
filterAttrsRecursive =
|
||||||
pred:
|
pred: set:
|
||||||
set:
|
|
||||||
listToAttrs (
|
listToAttrs (
|
||||||
concatMap (name:
|
concatMap (
|
||||||
let v = set.${name}; in
|
name:
|
||||||
if pred name v then [
|
let
|
||||||
(nameValuePair name (
|
v = set.${name};
|
||||||
if isAttrs v then filterAttrsRecursive pred v
|
in
|
||||||
else v
|
if pred name v then
|
||||||
))
|
[ (nameValuePair name (if isAttrs v then filterAttrsRecursive pred v else v)) ]
|
||||||
] else []
|
else
|
||||||
|
[ ]
|
||||||
) (attrNames set)
|
) (attrNames set)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -704,7 +720,6 @@ rec {
|
||||||
There is a completely different function `lib.foldAttrs`
|
There is a completely different function `lib.foldAttrs`
|
||||||
which has nothing to do with this function, despite the similar name.
|
which has nothing to do with this function, despite the similar name.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -773,16 +788,13 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
foldlAttrs = f: init: set:
|
foldlAttrs =
|
||||||
foldl'
|
f: init: set:
|
||||||
(acc: name: f acc name set.${name})
|
foldl' (acc: name: f acc name set.${name}) init (attrNames set);
|
||||||
init
|
|
||||||
(attrNames set);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Apply fold functions to values grouped by key.
|
Apply fold functions to values grouped by key.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`op`
|
`op`
|
||||||
|
@ -815,22 +827,16 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
foldAttrs =
|
foldAttrs =
|
||||||
op:
|
op: nul: list_of_attrs:
|
||||||
nul:
|
foldr (
|
||||||
list_of_attrs:
|
n: a: foldr (name: o: o // { ${name} = op n.${name} (a.${name} or nul); }) a (attrNames n)
|
||||||
foldr (n: a:
|
) { } list_of_attrs;
|
||||||
foldr (name: o:
|
|
||||||
o // { ${name} = op n.${name} (a.${name} or nul); }
|
|
||||||
) a (attrNames n)
|
|
||||||
) {} list_of_attrs;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Recursively collect sets that verify a given predicate named `pred`
|
Recursively collect sets that verify a given predicate named `pred`
|
||||||
from the set `attrs`. The recursion is stopped when the predicate is
|
from the set `attrs`. The recursion is stopped when the predicate is
|
||||||
verified.
|
verified.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
|
@ -863,19 +869,17 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
collect =
|
collect =
|
||||||
pred:
|
pred: attrs:
|
||||||
attrs:
|
|
||||||
if pred attrs then
|
if pred attrs then
|
||||||
[ attrs ]
|
[ attrs ]
|
||||||
else if isAttrs attrs then
|
else if isAttrs attrs then
|
||||||
concatMap (collect pred) (attrValues attrs)
|
concatMap (collect pred) (attrValues attrs)
|
||||||
else
|
else
|
||||||
[];
|
[ ];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return the cartesian product of attribute set value combinations.
|
Return the cartesian product of attribute set value combinations.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrsOfLists`
|
`attrsOfLists`
|
||||||
|
@ -906,12 +910,12 @@ rec {
|
||||||
*/
|
*/
|
||||||
cartesianProduct =
|
cartesianProduct =
|
||||||
attrsOfLists:
|
attrsOfLists:
|
||||||
foldl' (listOfAttrs: attrName:
|
foldl' (
|
||||||
concatMap (attrs:
|
listOfAttrs: attrName:
|
||||||
map (listValue: attrs // { ${attrName} = listValue; }) attrsOfLists.${attrName}
|
concatMap (
|
||||||
|
attrs: map (listValue: attrs // { ${attrName} = listValue; }) attrsOfLists.${attrName}
|
||||||
) listOfAttrs
|
) listOfAttrs
|
||||||
) [{}] (attrNames attrsOfLists);
|
) [ { } ] (attrNames attrsOfLists);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return the result of function f applied to the cartesian product of attribute set value combinations.
|
Return the result of function f applied to the cartesian product of attribute set value combinations.
|
||||||
|
@ -943,14 +947,12 @@ rec {
|
||||||
```
|
```
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
*/
|
*/
|
||||||
mapCartesianProduct = f: attrsOfLists: map f (cartesianProduct attrsOfLists);
|
mapCartesianProduct = f: attrsOfLists: map f (cartesianProduct attrsOfLists);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Utility function that creates a `{name, value}` pair as expected by `builtins.listToAttrs`.
|
Utility function that creates a `{name, value}` pair as expected by `builtins.listToAttrs`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`name`
|
`name`
|
||||||
|
@ -978,11 +980,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
nameValuePair =
|
nameValuePair = name: value: { inherit name value; };
|
||||||
name:
|
|
||||||
value:
|
|
||||||
{ inherit name value; };
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Apply a function to each element in an attribute set, creating a new attribute set.
|
Apply a function to each element in an attribute set, creating a new attribute set.
|
||||||
|
@ -1017,13 +1015,11 @@ rec {
|
||||||
*/
|
*/
|
||||||
mapAttrs = builtins.mapAttrs;
|
mapAttrs = builtins.mapAttrs;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like `mapAttrs`, but allows the name of each attribute to be
|
Like `mapAttrs`, but allows the name of each attribute to be
|
||||||
changed in addition to the value. The applied function should
|
changed in addition to the value. The applied function should
|
||||||
return both the new name and value as a `nameValuePair`.
|
return both the new name and value as a `nameValuePair`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -1052,11 +1048,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mapAttrs' =
|
mapAttrs' = f: set: listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
|
||||||
f:
|
|
||||||
set:
|
|
||||||
listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Call a function for each attribute in the given set and return
|
Call a function for each attribute in the given set and return
|
||||||
|
@ -1090,10 +1082,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mapAttrsToList =
|
mapAttrsToList = f: attrs: map (name: f name attrs.${name}) (attrNames attrs);
|
||||||
f:
|
|
||||||
attrs:
|
|
||||||
map (name: f name attrs.${name}) (attrNames attrs);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Deconstruct an attrset to a list of name-value pairs as expected by [`builtins.listToAttrs`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-listToAttrs).
|
Deconstruct an attrset to a list of name-value pairs as expected by [`builtins.listToAttrs`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-listToAttrs).
|
||||||
|
@ -1140,7 +1129,6 @@ rec {
|
||||||
*/
|
*/
|
||||||
attrsToList = mapAttrsToList nameValuePair;
|
attrsToList = mapAttrsToList nameValuePair;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like `mapAttrs`, except that it recursively applies itself to the *leaf* attributes of a potentially-nested attribute set:
|
Like `mapAttrs`, except that it recursively applies itself to the *leaf* attributes of a potentially-nested attribute set:
|
||||||
the second argument of the function will never be an attrset.
|
the second argument of the function will never be an attrset.
|
||||||
|
@ -1166,11 +1154,7 @@ rec {
|
||||||
mapAttrsRecursive :: ([String] -> a -> b) -> AttrSet -> AttrSet
|
mapAttrsRecursive :: ([String] -> a -> b) -> AttrSet -> AttrSet
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mapAttrsRecursive =
|
mapAttrsRecursive = f: set: mapAttrsRecursiveCond (as: true) f set;
|
||||||
f:
|
|
||||||
set:
|
|
||||||
mapAttrsRecursiveCond (as: true) f set;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like `mapAttrsRecursive`, but it takes an additional predicate that tells it whether to recurse into an attribute set.
|
Like `mapAttrsRecursive`, but it takes an additional predicate that tells it whether to recurse into an attribute set.
|
||||||
|
@ -1196,25 +1180,21 @@ rec {
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
mapAttrsRecursiveCond =
|
mapAttrsRecursiveCond =
|
||||||
cond:
|
cond: f: set:
|
||||||
f:
|
|
||||||
set:
|
|
||||||
let
|
let
|
||||||
recurse = path:
|
recurse =
|
||||||
mapAttrs
|
path:
|
||||||
(name: value:
|
mapAttrs (
|
||||||
if isAttrs value && cond value
|
name: value:
|
||||||
then recurse (path ++ [ name ]) value
|
if isAttrs value && cond value then recurse (path ++ [ name ]) value else f (path ++ [ name ]) value
|
||||||
else f (path ++ [ name ]) value);
|
);
|
||||||
in
|
in
|
||||||
recurse [ ] set;
|
recurse [ ] set;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Generate an attribute set by mapping a function over a list of
|
Generate an attribute set by mapping a function over a list of
|
||||||
attribute names.
|
attribute names.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`names`
|
`names`
|
||||||
|
@ -1242,17 +1222,12 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
genAttrs =
|
genAttrs = names: f: listToAttrs (map (n: nameValuePair n (f n)) names);
|
||||||
names:
|
|
||||||
f:
|
|
||||||
listToAttrs (map (n: nameValuePair n (f n)) names);
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Check whether the argument is a derivation. Any set with
|
Check whether the argument is a derivation. Any set with
|
||||||
`{ type = "derivation"; }` counts as a derivation.
|
`{ type = "derivation"; }` counts as a derivation.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`value`
|
`value`
|
||||||
|
@ -1279,13 +1254,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
isDerivation =
|
isDerivation = value: value.type or null == "derivation";
|
||||||
value: value.type or null == "derivation";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Converts a store path to a fake derivation.
|
Converts a store path to a fake derivation.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`path`
|
`path`
|
||||||
|
@ -1302,22 +1275,21 @@ rec {
|
||||||
path:
|
path:
|
||||||
let
|
let
|
||||||
path' = builtins.storePath path;
|
path' = builtins.storePath path;
|
||||||
res =
|
res = {
|
||||||
{ type = "derivation";
|
type = "derivation";
|
||||||
name = sanitizeDerivationName (builtins.substring 33 (-1) (baseNameOf path'));
|
name = sanitizeDerivationName (builtins.substring 33 (-1) (baseNameOf path'));
|
||||||
outPath = path';
|
outPath = path';
|
||||||
outputs = [ "out" ];
|
outputs = [ "out" ];
|
||||||
out = res;
|
out = res;
|
||||||
outputName = "out";
|
outputName = "out";
|
||||||
};
|
};
|
||||||
in res;
|
in
|
||||||
|
res;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
If `cond` is true, return the attribute set `as`,
|
If `cond` is true, return the attribute set `as`,
|
||||||
otherwise an empty attribute set.
|
otherwise an empty attribute set.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`cond`
|
`cond`
|
||||||
|
@ -1347,17 +1319,12 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
optionalAttrs =
|
optionalAttrs = cond: as: if cond then as else { };
|
||||||
cond:
|
|
||||||
as:
|
|
||||||
if cond then as else {};
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Merge sets of attributes and use the function `f` to merge attributes
|
Merge sets of attributes and use the function `f` to merge attributes
|
||||||
values.
|
values.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`names`
|
`names`
|
||||||
|
@ -1390,14 +1357,13 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
zipAttrsWithNames =
|
zipAttrsWithNames =
|
||||||
names:
|
names: f: sets:
|
||||||
f:
|
listToAttrs (
|
||||||
sets:
|
map (name: {
|
||||||
listToAttrs (map (name: {
|
|
||||||
inherit name;
|
inherit name;
|
||||||
value = f name (catAttrs name sets);
|
value = f name (catAttrs name sets);
|
||||||
}) names);
|
}) names
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Merge sets of attributes and use the function f to merge attribute values.
|
Merge sets of attributes and use the function f to merge attribute values.
|
||||||
|
@ -1428,7 +1394,6 @@ rec {
|
||||||
zipAttrsWith =
|
zipAttrsWith =
|
||||||
builtins.zipAttrsWith or (f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets);
|
builtins.zipAttrsWith or (f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Merge sets of attributes and combine each attribute value in to a list.
|
Merge sets of attributes and combine each attribute value in to a list.
|
||||||
|
|
||||||
|
@ -1459,7 +1424,6 @@ rec {
|
||||||
The result is the same as `foldl mergeAttrs { }`, but the performance is better for large inputs.
|
The result is the same as `foldl mergeAttrs { }`, but the performance is better for large inputs.
|
||||||
For n list elements, each with an attribute set containing m unique attributes, the complexity of this operation is O(nm log n).
|
For n list elements, each with an attribute set containing m unique attributes, the complexity of this operation is O(nm log n).
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list`
|
`list`
|
||||||
|
@ -1485,17 +1449,18 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
mergeAttrsList = list:
|
mergeAttrsList =
|
||||||
|
list:
|
||||||
let
|
let
|
||||||
# `binaryMerge start end` merges the elements at indices `index` of `list` such that `start <= index < end`
|
# `binaryMerge start end` merges the elements at indices `index` of `list` such that `start <= index < end`
|
||||||
# Type: Int -> Int -> Attrs
|
# Type: Int -> Int -> Attrs
|
||||||
binaryMerge = start: end:
|
binaryMerge =
|
||||||
|
start: end:
|
||||||
# assert start < end; # Invariant
|
# assert start < end; # Invariant
|
||||||
if end - start >= 2 then
|
if end - start >= 2 then
|
||||||
# If there's at least 2 elements, split the range in two, recurse on each part and merge the result
|
# If there's at least 2 elements, split the range in two, recurse on each part and merge the result
|
||||||
# The invariant is satisfied because each half will have at least 1 element
|
# The invariant is satisfied because each half will have at least 1 element
|
||||||
binaryMerge start (start + (end - start) / 2)
|
binaryMerge start (start + (end - start) / 2) // binaryMerge (start + (end - start) / 2) end
|
||||||
// binaryMerge (start + (end - start) / 2) end
|
|
||||||
else
|
else
|
||||||
# Otherwise there will be exactly 1 element due to the invariant, in which case we just return it directly
|
# Otherwise there will be exactly 1 element due to the invariant, in which case we just return it directly
|
||||||
elemAt list start;
|
elemAt list start;
|
||||||
|
@ -1506,7 +1471,6 @@ rec {
|
||||||
else
|
else
|
||||||
binaryMerge 0 (length list);
|
binaryMerge 0 (length list);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Does the same as the update operator '//' except that attributes are
|
Does the same as the update operator '//' except that attributes are
|
||||||
merged until the given predicate is verified. The predicate should
|
merged until the given predicate is verified. The predicate should
|
||||||
|
@ -1515,7 +1479,6 @@ rec {
|
||||||
the predicate is satisfied, the value of the first attribute set is
|
the predicate is satisfied, the value of the first attribute set is
|
||||||
replaced by the value of the second attribute set.
|
replaced by the value of the second attribute set.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
|
@ -1564,20 +1527,25 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
recursiveUpdateUntil =
|
recursiveUpdateUntil =
|
||||||
pred:
|
pred: lhs: rhs:
|
||||||
lhs:
|
let
|
||||||
rhs:
|
f =
|
||||||
let f = attrPath:
|
attrPath:
|
||||||
zipAttrsWith (n: values:
|
zipAttrsWith (
|
||||||
let here = attrPath ++ [n]; in
|
n: values:
|
||||||
if length values == 1
|
let
|
||||||
|| pred here (elemAt values 1) (head values) then
|
here = attrPath ++ [ n ];
|
||||||
|
in
|
||||||
|
if length values == 1 || pred here (elemAt values 1) (head values) then
|
||||||
head values
|
head values
|
||||||
else
|
else
|
||||||
f here values
|
f here values
|
||||||
);
|
);
|
||||||
in f [] [rhs lhs];
|
in
|
||||||
|
f [ ] [
|
||||||
|
rhs
|
||||||
|
lhs
|
||||||
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
A recursive variant of the update operator ‘//’. The recursion
|
A recursive variant of the update operator ‘//’. The recursion
|
||||||
|
@ -1585,7 +1553,6 @@ rec {
|
||||||
in which case the right hand side value takes precedence over the
|
in which case the right hand side value takes precedence over the
|
||||||
left hand side value.
|
left hand side value.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`lhs`
|
`lhs`
|
||||||
|
@ -1623,17 +1590,17 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
recursiveUpdate =
|
recursiveUpdate =
|
||||||
lhs:
|
lhs: rhs:
|
||||||
rhs:
|
recursiveUpdateUntil (
|
||||||
recursiveUpdateUntil (path: lhs: rhs: !(isAttrs lhs && isAttrs rhs)) lhs rhs;
|
path: lhs: rhs:
|
||||||
|
!(isAttrs lhs && isAttrs rhs)
|
||||||
|
) lhs rhs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Recurse into every attribute set of the first argument and check that:
|
Recurse into every attribute set of the first argument and check that:
|
||||||
- Each attribute path also exists in the second argument.
|
- Each attribute path also exists in the second argument.
|
||||||
- If the attribute's value is not a nested attribute set, it must have the same value in the right argument.
|
- If the attribute's value is not a nested attribute set, it must have the same value in the right argument.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pattern`
|
`pattern`
|
||||||
|
@ -1662,30 +1629,27 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
matchAttrs =
|
matchAttrs =
|
||||||
pattern:
|
pattern: attrs:
|
||||||
attrs:
|
|
||||||
assert isAttrs pattern;
|
assert isAttrs pattern;
|
||||||
all
|
all (
|
||||||
( # Compare equality between `pattern` & `attrs`.
|
# Compare equality between `pattern` & `attrs`.
|
||||||
attr:
|
attr:
|
||||||
# Missing attr, not equal.
|
# Missing attr, not equal.
|
||||||
attrs ? ${attr} && (
|
attrs ? ${attr}
|
||||||
|
&& (
|
||||||
let
|
let
|
||||||
lhs = pattern.${attr};
|
lhs = pattern.${attr};
|
||||||
rhs = attrs.${attr};
|
rhs = attrs.${attr};
|
||||||
in
|
in
|
||||||
# If attrset check recursively
|
# If attrset check recursively
|
||||||
if isAttrs lhs then isAttrs rhs && matchAttrs lhs rhs
|
if isAttrs lhs then isAttrs rhs && matchAttrs lhs rhs else lhs == rhs
|
||||||
else lhs == rhs
|
|
||||||
)
|
)
|
||||||
)
|
) (attrNames pattern);
|
||||||
(attrNames pattern);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Override only the attributes that are already present in the old set
|
Override only the attributes that are already present in the old set
|
||||||
useful for deep-overriding.
|
useful for deep-overriding.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`old`
|
`old`
|
||||||
|
@ -1717,11 +1681,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
overrideExisting =
|
overrideExisting = old: new: mapAttrs (name: value: new.${name} or value) old;
|
||||||
old:
|
|
||||||
new:
|
|
||||||
mapAttrs (name: value: new.${name} or value) old;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Turns a list of strings into a human-readable description of those
|
Turns a list of strings into a human-readable description of those
|
||||||
|
@ -1729,7 +1689,6 @@ rec {
|
||||||
not intended to be machine-readable.
|
not intended to be machine-readable.
|
||||||
Create a new attribute set with `value` set at the nested attribute location specified in `attrPath`.
|
Create a new attribute set with `value` set at the nested attribute location specified in `attrPath`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`path`
|
`path`
|
||||||
|
@ -1757,15 +1716,12 @@ rec {
|
||||||
*/
|
*/
|
||||||
showAttrPath =
|
showAttrPath =
|
||||||
path:
|
path:
|
||||||
if path == [] then "<root attribute path>"
|
if path == [ ] then "<root attribute path>" else concatMapStringsSep "." escapeNixIdentifier path;
|
||||||
else concatMapStringsSep "." escapeNixIdentifier path;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get a package output.
|
Get a package output.
|
||||||
If no output is found, fallback to `.out` and then to the default.
|
If no output is found, fallback to `.out` and then to the default.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`output`
|
`output`
|
||||||
|
@ -1793,10 +1749,9 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
getOutput = output: pkg:
|
getOutput =
|
||||||
if ! pkg ? outputSpecified || ! pkg.outputSpecified
|
output: pkg:
|
||||||
then pkg.${output} or pkg.out or pkg
|
if !pkg ? outputSpecified || !pkg.outputSpecified then pkg.${output} or pkg.out or pkg else pkg;
|
||||||
else pkg;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get a package's `bin` output.
|
Get a package's `bin` output.
|
||||||
|
@ -1827,7 +1782,6 @@ rec {
|
||||||
*/
|
*/
|
||||||
getBin = getOutput "bin";
|
getBin = getOutput "bin";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get a package's `lib` output.
|
Get a package's `lib` output.
|
||||||
If the output does not exist, fallback to `.out` and then to the default.
|
If the output does not exist, fallback to `.out` and then to the default.
|
||||||
|
@ -1857,7 +1811,6 @@ rec {
|
||||||
*/
|
*/
|
||||||
getLib = getOutput "lib";
|
getLib = getOutput "lib";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get a package's `dev` output.
|
Get a package's `dev` output.
|
||||||
If the output does not exist, fallback to `.out` and then to the default.
|
If the output does not exist, fallback to `.out` and then to the default.
|
||||||
|
@ -1887,7 +1840,6 @@ rec {
|
||||||
*/
|
*/
|
||||||
getDev = getOutput "dev";
|
getDev = getOutput "dev";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get a package's `man` output.
|
Get a package's `man` output.
|
||||||
If the output does not exist, fallback to `.out` and then to the default.
|
If the output does not exist, fallback to `.out` and then to the default.
|
||||||
|
@ -1941,7 +1893,6 @@ rec {
|
||||||
This function only affects a single attribute set; it does not
|
This function only affects a single attribute set; it does not
|
||||||
apply itself recursively for nested attribute sets.
|
apply itself recursively for nested attribute sets.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrs`
|
`attrs`
|
||||||
|
@ -1969,14 +1920,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
recurseIntoAttrs =
|
recurseIntoAttrs = attrs: attrs // { recurseForDerivations = true; };
|
||||||
attrs:
|
|
||||||
attrs // { recurseForDerivations = true; };
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Undo the effect of recurseIntoAttrs.
|
Undo the effect of recurseIntoAttrs.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`attrs`
|
`attrs`
|
||||||
|
@ -1989,9 +1937,7 @@ rec {
|
||||||
dontRecurseIntoAttrs :: AttrSet -> AttrSet
|
dontRecurseIntoAttrs :: AttrSet -> AttrSet
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
dontRecurseIntoAttrs =
|
dontRecurseIntoAttrs = attrs: attrs // { recurseForDerivations = false; };
|
||||||
attrs:
|
|
||||||
attrs // { recurseForDerivations = false; };
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
`unionOfDisjoint x y` is equal to `x // y // z` where the
|
`unionOfDisjoint x y` is equal to `x // y // z` where the
|
||||||
|
@ -1999,7 +1945,6 @@ rec {
|
||||||
`y`, and all values `assert` with an error message. This
|
`y`, and all values `assert` with an error message. This
|
||||||
operator is commutative, unlike (//).
|
operator is commutative, unlike (//).
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`x`
|
`x`
|
||||||
|
@ -2016,25 +1961,25 @@ rec {
|
||||||
unionOfDisjoint :: AttrSet -> AttrSet -> AttrSet
|
unionOfDisjoint :: AttrSet -> AttrSet -> AttrSet
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
unionOfDisjoint = x: y:
|
unionOfDisjoint =
|
||||||
|
x: y:
|
||||||
let
|
let
|
||||||
intersection = builtins.intersectAttrs x y;
|
intersection = builtins.intersectAttrs x y;
|
||||||
collisions = lib.concatStringsSep " " (builtins.attrNames intersection);
|
collisions = lib.concatStringsSep " " (builtins.attrNames intersection);
|
||||||
mask = builtins.mapAttrs (name: value: builtins.throw
|
mask = builtins.mapAttrs (
|
||||||
"unionOfDisjoint: collision on ${name}; complete list: ${collisions}")
|
name: value: builtins.throw "unionOfDisjoint: collision on ${name}; complete list: ${collisions}"
|
||||||
intersection;
|
) intersection;
|
||||||
in
|
in
|
||||||
(x // y) // mask;
|
(x // y) // mask;
|
||||||
|
|
||||||
# DEPRECATED
|
# DEPRECATED
|
||||||
zipWithNames = warn
|
zipWithNames = warn "lib.zipWithNames is a deprecated alias of lib.zipAttrsWithNames." zipAttrsWithNames;
|
||||||
"lib.zipWithNames is a deprecated alias of lib.zipAttrsWithNames." zipAttrsWithNames;
|
|
||||||
|
|
||||||
# DEPRECATED
|
# DEPRECATED
|
||||||
zip = warn
|
zip = warn "lib.zip is a deprecated alias of lib.zipAttrsWith." zipAttrsWith;
|
||||||
"lib.zip is a deprecated alias of lib.zipAttrsWith." zipAttrsWith;
|
|
||||||
|
|
||||||
# DEPRECATED
|
# DEPRECATED
|
||||||
cartesianProductOfSets = warnIf (isInOldestRelease 2405)
|
cartesianProductOfSets = warnIf (isInOldestRelease
|
||||||
"lib.cartesianProductOfSets is a deprecated alias of lib.cartesianProduct." cartesianProduct;
|
2405
|
||||||
|
) "lib.cartesianProductOfSets is a deprecated alias of lib.cartesianProduct." cartesianProduct;
|
||||||
}
|
}
|
||||||
|
|
36
lib/cli.nix
36
lib/cli.nix
|
@ -11,7 +11,6 @@ rec {
|
||||||
|
|
||||||
`toGNUCommandLineShell` returns an escaped shell string.
|
`toGNUCommandLineShell` returns an escaped shell string.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`options`
|
`options`
|
||||||
|
@ -22,7 +21,6 @@ rec {
|
||||||
|
|
||||||
: 2\. Function argument
|
: 2\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.cli.toGNUCommandLineShell` usage example
|
## `lib.cli.toGNUCommandLineShell` usage example
|
||||||
|
@ -60,17 +58,14 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toGNUCommandLineShell =
|
toGNUCommandLineShell = options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
|
||||||
options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
|
|
||||||
|
|
||||||
toGNUCommandLine = {
|
toGNUCommandLine =
|
||||||
|
{
|
||||||
# how to string-format the option name;
|
# how to string-format the option name;
|
||||||
# by default one character is a short option (`-`),
|
# by default one character is a short option (`-`),
|
||||||
# more than one characters a long option (`--`).
|
# more than one characters a long option (`--`).
|
||||||
mkOptionName ?
|
mkOptionName ? k: if builtins.stringLength k == 1 then "-${k}" else "--${k}",
|
||||||
k: if builtins.stringLength k == 1
|
|
||||||
then "-${k}"
|
|
||||||
else "--${k}",
|
|
||||||
|
|
||||||
# how to format a boolean value to a command list;
|
# how to format a boolean value to a command list;
|
||||||
# by default it’s a flag option
|
# by default it’s a flag option
|
||||||
|
@ -88,16 +83,25 @@ rec {
|
||||||
# By default, everything is printed verbatim and complex types
|
# By default, everything is printed verbatim and complex types
|
||||||
# are forbidden (lists, attrsets, functions). `null` values are omitted.
|
# are forbidden (lists, attrsets, functions). `null` values are omitted.
|
||||||
mkOption ?
|
mkOption ?
|
||||||
k: v: if v == null
|
k: v:
|
||||||
then []
|
if v == null then
|
||||||
else [ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
|
[ ]
|
||||||
|
else
|
||||||
|
[
|
||||||
|
(mkOptionName k)
|
||||||
|
(lib.generators.mkValueStringDefault { } v)
|
||||||
|
],
|
||||||
}:
|
}:
|
||||||
options:
|
options:
|
||||||
let
|
let
|
||||||
render = k: v:
|
render =
|
||||||
if builtins.isBool v then mkBool k v
|
k: v:
|
||||||
else if builtins.isList v then mkList k v
|
if builtins.isBool v then
|
||||||
else mkOption k v;
|
mkBool k v
|
||||||
|
else if builtins.isList v then
|
||||||
|
mkList k v
|
||||||
|
else
|
||||||
|
mkOption k v;
|
||||||
|
|
||||||
in
|
in
|
||||||
builtins.concatLists (lib.mapAttrsToList render options);
|
builtins.concatLists (lib.mapAttrsToList render options);
|
||||||
|
|
|
@ -1,20 +1,42 @@
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins) intersectAttrs;
|
||||||
intersectAttrs;
|
|
||||||
inherit (lib)
|
inherit (lib)
|
||||||
functionArgs isFunction mirrorFunctionArgs isAttrs setFunctionArgs
|
functionArgs
|
||||||
optionalAttrs attrNames filter elemAt concatStringsSep sortOn take length
|
isFunction
|
||||||
filterAttrs optionalString flip pathIsDirectory head pipe isDerivation listToAttrs
|
mirrorFunctionArgs
|
||||||
mapAttrs seq flatten deepSeq warnIf isInOldestRelease extends
|
isAttrs
|
||||||
|
setFunctionArgs
|
||||||
|
optionalAttrs
|
||||||
|
attrNames
|
||||||
|
filter
|
||||||
|
elemAt
|
||||||
|
concatStringsSep
|
||||||
|
sortOn
|
||||||
|
take
|
||||||
|
length
|
||||||
|
filterAttrs
|
||||||
|
optionalString
|
||||||
|
flip
|
||||||
|
pathIsDirectory
|
||||||
|
head
|
||||||
|
pipe
|
||||||
|
isDerivation
|
||||||
|
listToAttrs
|
||||||
|
mapAttrs
|
||||||
|
seq
|
||||||
|
flatten
|
||||||
|
deepSeq
|
||||||
|
warnIf
|
||||||
|
isInOldestRelease
|
||||||
|
extends
|
||||||
;
|
;
|
||||||
inherit (lib.strings) levenshtein levenshteinAtMost;
|
inherit (lib.strings) levenshtein levenshteinAtMost;
|
||||||
|
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
`overrideDerivation drv f` takes a derivation (i.e., the result
|
`overrideDerivation drv f` takes a derivation (i.e., the result
|
||||||
of a call to the builtin function `derivation`) and returns a new
|
of a call to the builtin function `derivation`) and returns a new
|
||||||
|
@ -40,7 +62,6 @@ rec {
|
||||||
You should in general prefer `drv.overrideAttrs` over this function;
|
You should in general prefer `drv.overrideAttrs` over this function;
|
||||||
see the nixpkgs manual for more information on overriding.
|
see the nixpkgs manual for more information on overriding.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`drv`
|
`drv`
|
||||||
|
@ -74,20 +95,21 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
overrideDerivation = drv: f:
|
overrideDerivation =
|
||||||
|
drv: f:
|
||||||
let
|
let
|
||||||
newDrv = derivation (drv.drvAttrs // (f drv));
|
newDrv = derivation (drv.drvAttrs // (f drv));
|
||||||
in flip (extendDerivation (seq drv.drvPath true)) newDrv (
|
in
|
||||||
{ meta = drv.meta or {};
|
flip (extendDerivation (seq drv.drvPath true)) newDrv (
|
||||||
passthru = if drv ? passthru then drv.passthru else {};
|
{
|
||||||
|
meta = drv.meta or { };
|
||||||
|
passthru = if drv ? passthru then drv.passthru else { };
|
||||||
}
|
}
|
||||||
//
|
// (drv.passthru or { })
|
||||||
(drv.passthru or {})
|
// optionalAttrs (drv ? __spliced) {
|
||||||
//
|
__spliced = { } // (mapAttrs (_: sDrv: overrideDerivation sDrv f) drv.__spliced);
|
||||||
optionalAttrs (drv ? __spliced) {
|
}
|
||||||
__spliced = {} // (mapAttrs (_: sDrv: overrideDerivation sDrv f) drv.__spliced);
|
);
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
`makeOverridable` takes a function from attribute set to attribute set and
|
`makeOverridable` takes a function from attribute set to attribute set and
|
||||||
|
@ -97,7 +119,6 @@ rec {
|
||||||
Please refer to documentation on [`<pkg>.overrideDerivation`](#sec-pkg-overrideDerivation) to learn about `overrideDerivation` and caveats
|
Please refer to documentation on [`<pkg>.overrideDerivation`](#sec-pkg-overrideDerivation) to learn about `overrideDerivation` and caveats
|
||||||
related to its use.
|
related to its use.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -128,12 +149,14 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
makeOverridable = f:
|
makeOverridable =
|
||||||
|
f:
|
||||||
let
|
let
|
||||||
# Creates a functor with the same arguments as f
|
# Creates a functor with the same arguments as f
|
||||||
mirrorArgs = mirrorFunctionArgs f;
|
mirrorArgs = mirrorFunctionArgs f;
|
||||||
in
|
in
|
||||||
mirrorArgs (origArgs:
|
mirrorArgs (
|
||||||
|
origArgs:
|
||||||
let
|
let
|
||||||
result = f origArgs;
|
result = f origArgs;
|
||||||
|
|
||||||
|
@ -146,19 +169,19 @@ rec {
|
||||||
overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
|
overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
|
||||||
in
|
in
|
||||||
if isAttrs result then
|
if isAttrs result then
|
||||||
result // {
|
result
|
||||||
|
// {
|
||||||
override = overrideArgs;
|
override = overrideArgs;
|
||||||
overrideDerivation = fdrv: overrideResult (x: overrideDerivation x fdrv);
|
overrideDerivation = fdrv: overrideResult (x: overrideDerivation x fdrv);
|
||||||
${if result ? overrideAttrs then "overrideAttrs" else null} = fdrv:
|
${if result ? overrideAttrs then "overrideAttrs" else null} =
|
||||||
overrideResult (x: x.overrideAttrs fdrv);
|
fdrv: overrideResult (x: x.overrideAttrs fdrv);
|
||||||
}
|
}
|
||||||
else if isFunction result then
|
else if isFunction result then
|
||||||
# Transform the result into a functor while propagating its arguments
|
# Transform the result into a functor while propagating its arguments
|
||||||
setFunctionArgs result (functionArgs result) // {
|
setFunctionArgs result (functionArgs result) // { override = overrideArgs; }
|
||||||
override = overrideArgs;
|
else
|
||||||
}
|
result
|
||||||
else result);
|
);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Call the package function in the file `fn` with the required
|
Call the package function in the file `fn` with the required
|
||||||
|
@ -188,7 +211,6 @@ rec {
|
||||||
|
|
||||||
<!-- TODO: Apply "Example:" tag to the examples above -->
|
<!-- TODO: Apply "Example:" tag to the examples above -->
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`autoArgs`
|
`autoArgs`
|
||||||
|
@ -209,7 +231,8 @@ rec {
|
||||||
callPackageWith :: AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a
|
callPackageWith :: AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
callPackageWith = autoArgs: fn: args:
|
callPackageWith =
|
||||||
|
autoArgs: fn: args:
|
||||||
let
|
let
|
||||||
f = if isFunction fn then fn else import fn;
|
f = if isFunction fn then fn else import fn;
|
||||||
fargs = functionArgs f;
|
fargs = functionArgs f;
|
||||||
|
@ -222,12 +245,16 @@ rec {
|
||||||
# wouldn't be passed to it
|
# wouldn't be passed to it
|
||||||
missingArgs =
|
missingArgs =
|
||||||
# Filter out arguments that have a default value
|
# Filter out arguments that have a default value
|
||||||
(filterAttrs (name: value: ! value)
|
(
|
||||||
|
filterAttrs (name: value: !value)
|
||||||
# Filter out arguments that would be passed
|
# Filter out arguments that would be passed
|
||||||
(removeAttrs fargs (attrNames allArgs)));
|
(removeAttrs fargs (attrNames allArgs))
|
||||||
|
);
|
||||||
|
|
||||||
# Get a list of suggested argument names for a given missing one
|
# Get a list of suggested argument names for a given missing one
|
||||||
getSuggestions = arg: pipe (autoArgs // args) [
|
getSuggestions =
|
||||||
|
arg:
|
||||||
|
pipe (autoArgs // args) [
|
||||||
attrNames
|
attrNames
|
||||||
# Only use ones that are at most 2 edits away. While mork would work,
|
# Only use ones that are at most 2 edits away. While mork would work,
|
||||||
# levenshteinAtMost is only fast for 2 or less.
|
# levenshteinAtMost is only fast for 2 or less.
|
||||||
|
@ -240,41 +267,50 @@ rec {
|
||||||
(map (x: "\"" + x + "\""))
|
(map (x: "\"" + x + "\""))
|
||||||
];
|
];
|
||||||
|
|
||||||
prettySuggestions = suggestions:
|
prettySuggestions =
|
||||||
if suggestions == [] then ""
|
suggestions:
|
||||||
else if length suggestions == 1 then ", did you mean ${elemAt suggestions 0}?"
|
if suggestions == [ ] then
|
||||||
else ", did you mean ${concatStringsSep ", " (lib.init suggestions)} or ${lib.last suggestions}?";
|
""
|
||||||
|
else if length suggestions == 1 then
|
||||||
|
", did you mean ${elemAt suggestions 0}?"
|
||||||
|
else
|
||||||
|
", did you mean ${concatStringsSep ", " (lib.init suggestions)} or ${lib.last suggestions}?";
|
||||||
|
|
||||||
errorForArg = arg:
|
errorForArg =
|
||||||
|
arg:
|
||||||
let
|
let
|
||||||
loc = builtins.unsafeGetAttrPos arg fargs;
|
loc = builtins.unsafeGetAttrPos arg fargs;
|
||||||
# loc' can be removed once lib/minver.nix is >2.3.4, since that includes
|
# loc' can be removed once lib/minver.nix is >2.3.4, since that includes
|
||||||
# https://github.com/NixOS/nix/pull/3468 which makes loc be non-null
|
# https://github.com/NixOS/nix/pull/3468 which makes loc be non-null
|
||||||
loc' = if loc != null then loc.file + ":" + toString loc.line
|
loc' =
|
||||||
else if ! isFunction fn then
|
if loc != null then
|
||||||
|
loc.file + ":" + toString loc.line
|
||||||
|
else if !isFunction fn then
|
||||||
toString fn + optionalString (pathIsDirectory fn) "/default.nix"
|
toString fn + optionalString (pathIsDirectory fn) "/default.nix"
|
||||||
else "<unknown location>";
|
else
|
||||||
in "Function called without required argument \"${arg}\" at "
|
"<unknown location>";
|
||||||
|
in
|
||||||
|
"Function called without required argument \"${arg}\" at "
|
||||||
+ "${loc'}${prettySuggestions (getSuggestions arg)}";
|
+ "${loc'}${prettySuggestions (getSuggestions arg)}";
|
||||||
|
|
||||||
# Only show the error for the first missing argument
|
# Only show the error for the first missing argument
|
||||||
error = errorForArg (head (attrNames missingArgs));
|
error = errorForArg (head (attrNames missingArgs));
|
||||||
|
|
||||||
in if missingArgs == {}
|
in
|
||||||
then makeOverridable f allArgs
|
if missingArgs == { } then
|
||||||
|
makeOverridable f allArgs
|
||||||
# This needs to be an abort so it can't be caught with `builtins.tryEval`,
|
# This needs to be an abort so it can't be caught with `builtins.tryEval`,
|
||||||
# which is used by nix-env and ofborg to filter out packages that don't evaluate.
|
# which is used by nix-env and ofborg to filter out packages that don't evaluate.
|
||||||
# This way we're forced to fix such errors in Nixpkgs,
|
# This way we're forced to fix such errors in Nixpkgs,
|
||||||
# which is especially relevant with allowAliases = false
|
# which is especially relevant with allowAliases = false
|
||||||
else abort "lib.customisation.callPackageWith: ${error}";
|
else
|
||||||
|
abort "lib.customisation.callPackageWith: ${error}";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like callPackage, but for a function that returns an attribute
|
Like callPackage, but for a function that returns an attribute
|
||||||
set of derivations. The override function is added to the
|
set of derivations. The override function is added to the
|
||||||
individual attributes.
|
individual attributes.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`autoArgs`
|
`autoArgs`
|
||||||
|
@ -295,7 +331,8 @@ rec {
|
||||||
callPackagesWith :: AttrSet -> ((AttrSet -> AttrSet) | Path) -> AttrSet -> AttrSet
|
callPackagesWith :: AttrSet -> ((AttrSet -> AttrSet) | Path) -> AttrSet -> AttrSet
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
callPackagesWith = autoArgs: fn: args:
|
callPackagesWith =
|
||||||
|
autoArgs: fn: args:
|
||||||
let
|
let
|
||||||
f = if isFunction fn then fn else import fn;
|
f = if isFunction fn then fn else import fn;
|
||||||
auto = intersectAttrs (functionArgs f) autoArgs;
|
auto = intersectAttrs (functionArgs f) autoArgs;
|
||||||
|
@ -304,18 +341,19 @@ rec {
|
||||||
pkgs = f origArgs;
|
pkgs = f origArgs;
|
||||||
mkAttrOverridable = name: _: makeOverridable (mirrorArgs (newArgs: (f newArgs).${name})) origArgs;
|
mkAttrOverridable = name: _: makeOverridable (mirrorArgs (newArgs: (f newArgs).${name})) origArgs;
|
||||||
in
|
in
|
||||||
if isDerivation pkgs then throw
|
if isDerivation pkgs then
|
||||||
("function `callPackages` was called on a *single* derivation "
|
throw (
|
||||||
|
"function `callPackages` was called on a *single* derivation "
|
||||||
+ ''"${pkgs.name or "<unknown-name>"}";''
|
+ ''"${pkgs.name or "<unknown-name>"}";''
|
||||||
+ " did you mean to use `callPackage` instead?")
|
+ " did you mean to use `callPackage` instead?"
|
||||||
else mapAttrs mkAttrOverridable pkgs;
|
)
|
||||||
|
else
|
||||||
|
mapAttrs mkAttrOverridable pkgs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Add attributes to each output of a derivation without changing
|
Add attributes to each output of a derivation without changing
|
||||||
the derivation itself and check a given condition when evaluating.
|
the derivation itself and check a given condition when evaluating.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`condition`
|
`condition`
|
||||||
|
@ -336,34 +374,48 @@ rec {
|
||||||
extendDerivation :: Bool -> Any -> Derivation -> Derivation
|
extendDerivation :: Bool -> Any -> Derivation -> Derivation
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
extendDerivation = condition: passthru: drv:
|
extendDerivation =
|
||||||
|
condition: passthru: drv:
|
||||||
let
|
let
|
||||||
outputs = drv.outputs or [ "out" ];
|
outputs = drv.outputs or [ "out" ];
|
||||||
|
|
||||||
commonAttrs = drv // (listToAttrs outputsList) //
|
commonAttrs =
|
||||||
({ all = map (x: x.value) outputsList; }) // passthru;
|
drv // (listToAttrs outputsList) // ({ all = map (x: x.value) outputsList; }) // passthru;
|
||||||
|
|
||||||
outputToAttrListElement = outputName:
|
outputToAttrListElement = outputName: {
|
||||||
{ name = outputName;
|
name = outputName;
|
||||||
value = commonAttrs // {
|
value =
|
||||||
|
commonAttrs
|
||||||
|
// {
|
||||||
inherit (drv.${outputName}) type outputName;
|
inherit (drv.${outputName}) type outputName;
|
||||||
outputSpecified = true;
|
outputSpecified = true;
|
||||||
drvPath = assert condition; drv.${outputName}.drvPath;
|
drvPath =
|
||||||
outPath = assert condition; drv.${outputName}.outPath;
|
assert condition;
|
||||||
} //
|
drv.${outputName}.drvPath;
|
||||||
|
outPath =
|
||||||
|
assert condition;
|
||||||
|
drv.${outputName}.outPath;
|
||||||
|
}
|
||||||
|
//
|
||||||
# TODO: give the derivation control over the outputs.
|
# TODO: give the derivation control over the outputs.
|
||||||
# `overrideAttrs` may not be the only attribute that needs
|
# `overrideAttrs` may not be the only attribute that needs
|
||||||
# updating when switching outputs.
|
# updating when switching outputs.
|
||||||
optionalAttrs (passthru?overrideAttrs) {
|
optionalAttrs (passthru ? overrideAttrs) {
|
||||||
# TODO: also add overrideAttrs when overrideAttrs is not custom, e.g. when not splicing.
|
# TODO: also add overrideAttrs when overrideAttrs is not custom, e.g. when not splicing.
|
||||||
overrideAttrs = f: (passthru.overrideAttrs f).${outputName};
|
overrideAttrs = f: (passthru.overrideAttrs f).${outputName};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
outputsList = map outputToAttrListElement outputs;
|
outputsList = map outputToAttrListElement outputs;
|
||||||
in commonAttrs // {
|
in
|
||||||
drvPath = assert condition; drv.drvPath;
|
commonAttrs
|
||||||
outPath = assert condition; drv.outPath;
|
// {
|
||||||
|
drvPath =
|
||||||
|
assert condition;
|
||||||
|
drv.drvPath;
|
||||||
|
outPath =
|
||||||
|
assert condition;
|
||||||
|
drv.outPath;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -372,7 +424,6 @@ rec {
|
||||||
result to ensure that there are no thunks kept alive to prevent
|
result to ensure that there are no thunks kept alive to prevent
|
||||||
garbage collection.
|
garbage collection.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`drv`
|
`drv`
|
||||||
|
@ -385,21 +436,29 @@ rec {
|
||||||
hydraJob :: (Derivation | Null) -> (Derivation | Null)
|
hydraJob :: (Derivation | Null) -> (Derivation | Null)
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
hydraJob = drv:
|
hydraJob =
|
||||||
|
drv:
|
||||||
let
|
let
|
||||||
outputs = drv.outputs or ["out"];
|
outputs = drv.outputs or [ "out" ];
|
||||||
|
|
||||||
commonAttrs =
|
commonAttrs =
|
||||||
{ inherit (drv) name system meta; inherit outputs; }
|
{
|
||||||
|
inherit (drv) name system meta;
|
||||||
|
inherit outputs;
|
||||||
|
}
|
||||||
// optionalAttrs (drv._hydraAggregate or false) {
|
// optionalAttrs (drv._hydraAggregate or false) {
|
||||||
_hydraAggregate = true;
|
_hydraAggregate = true;
|
||||||
constituents = map hydraJob (flatten drv.constituents);
|
constituents = map hydraJob (flatten drv.constituents);
|
||||||
}
|
}
|
||||||
// (listToAttrs outputsList);
|
// (listToAttrs outputsList);
|
||||||
|
|
||||||
makeOutput = outputName:
|
makeOutput =
|
||||||
let output = drv.${outputName}; in
|
outputName:
|
||||||
{ name = outputName;
|
let
|
||||||
|
output = drv.${outputName};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
name = outputName;
|
||||||
value = commonAttrs // {
|
value = commonAttrs // {
|
||||||
outPath = output.outPath;
|
outPath = output.outPath;
|
||||||
drvPath = output.drvPath;
|
drvPath = output.drvPath;
|
||||||
|
@ -411,8 +470,8 @@ rec {
|
||||||
outputsList = map makeOutput outputs;
|
outputsList = map makeOutput outputs;
|
||||||
|
|
||||||
drv' = (head outputsList).value;
|
drv' = (head outputsList).value;
|
||||||
in if drv == null then null else
|
in
|
||||||
deepSeq drv' drv';
|
if drv == null then null else deepSeq drv' drv';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Make an attribute set (a "scope") from functions that take arguments from that same attribute set.
|
Make an attribute set (a "scope") from functions that take arguments from that same attribute set.
|
||||||
|
@ -538,23 +597,27 @@ rec {
|
||||||
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> scope
|
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> scope
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
makeScope = newScope: f:
|
makeScope =
|
||||||
let self = f self // {
|
newScope: f:
|
||||||
|
let
|
||||||
|
self = f self // {
|
||||||
newScope = scope: newScope (self // scope);
|
newScope = scope: newScope (self // scope);
|
||||||
callPackage = self.newScope {};
|
callPackage = self.newScope { };
|
||||||
overrideScope = g: makeScope newScope (extends g f);
|
overrideScope = g: makeScope newScope (extends g f);
|
||||||
# Remove after 24.11 is released.
|
# Remove after 24.11 is released.
|
||||||
overrideScope' = g: warnIf (isInOldestRelease 2311)
|
overrideScope' =
|
||||||
|
g:
|
||||||
|
warnIf (isInOldestRelease 2311)
|
||||||
"`overrideScope'` (from `lib.makeScope`) has been renamed to `overrideScope`."
|
"`overrideScope'` (from `lib.makeScope`) has been renamed to `overrideScope`."
|
||||||
(makeScope newScope (extends g f));
|
(makeScope newScope (extends g f));
|
||||||
packages = f;
|
packages = f;
|
||||||
};
|
};
|
||||||
in self;
|
in
|
||||||
|
self;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
backward compatibility with old uncurried form; deprecated
|
backward compatibility with old uncurried form; deprecated
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`splicePackages`
|
`splicePackages`
|
||||||
|
@ -583,9 +646,14 @@ rec {
|
||||||
*/
|
*/
|
||||||
makeScopeWithSplicing =
|
makeScopeWithSplicing =
|
||||||
splicePackages: newScope: otherSplices: keep: extra: f:
|
splicePackages: newScope: otherSplices: keep: extra: f:
|
||||||
makeScopeWithSplicing'
|
makeScopeWithSplicing' { inherit splicePackages newScope; } {
|
||||||
{ inherit splicePackages newScope; }
|
inherit
|
||||||
{ inherit otherSplices keep extra f; };
|
otherSplices
|
||||||
|
keep
|
||||||
|
extra
|
||||||
|
f
|
||||||
|
;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Like makeScope, but aims to support cross compilation. It's still ugly, but
|
Like makeScope, but aims to support cross compilation. It's still ugly, but
|
||||||
|
@ -612,14 +680,13 @@ rec {
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
makeScopeWithSplicing' =
|
makeScopeWithSplicing' =
|
||||||
{ splicePackages
|
{ splicePackages, newScope }:
|
||||||
, newScope
|
{
|
||||||
}:
|
otherSplices,
|
||||||
{ otherSplices
|
|
||||||
# Attrs from `self` which won't be spliced.
|
# Attrs from `self` which won't be spliced.
|
||||||
# Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
|
# Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
|
||||||
# ex: `keep = (self: { inherit (self) aAttr; })`
|
# ex: `keep = (self: { inherit (self) aAttr; })`
|
||||||
, keep ? (_self: {})
|
keep ? (_self: { }),
|
||||||
# Additional attrs to add to the sets `callPackage`.
|
# Additional attrs to add to the sets `callPackage`.
|
||||||
# When the package is from a subset (but not a subset within a package IS #211340)
|
# When the package is from a subset (but not a subset within a package IS #211340)
|
||||||
# within `spliced0` it will be spliced.
|
# within `spliced0` it will be spliced.
|
||||||
|
@ -634,8 +701,8 @@ rec {
|
||||||
# nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
|
# nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
|
||||||
# «derivation ...CoreFoundation-11.0.0.drv»
|
# «derivation ...CoreFoundation-11.0.0.drv»
|
||||||
# ```
|
# ```
|
||||||
, extra ? (_spliced0: {})
|
extra ? (_spliced0: { }),
|
||||||
, f
|
f,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
spliced0 = splicePackages {
|
spliced0 = splicePackages {
|
||||||
|
@ -652,13 +719,15 @@ rec {
|
||||||
callPackage = newScope spliced; # == self.newScope {};
|
callPackage = newScope spliced; # == self.newScope {};
|
||||||
# N.B. the other stages of the package set spliced in are *not*
|
# N.B. the other stages of the package set spliced in are *not*
|
||||||
# overridden.
|
# overridden.
|
||||||
overrideScope = g: (makeScopeWithSplicing'
|
overrideScope =
|
||||||
{ inherit splicePackages newScope; }
|
g:
|
||||||
{ inherit otherSplices keep extra;
|
(makeScopeWithSplicing' { inherit splicePackages newScope; } {
|
||||||
|
inherit otherSplices keep extra;
|
||||||
f = extends g f;
|
f = extends g f;
|
||||||
});
|
});
|
||||||
packages = f;
|
packages = f;
|
||||||
};
|
};
|
||||||
in self;
|
in
|
||||||
|
self;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
150
lib/debug.nix
150
lib/debug.nix
|
@ -1,4 +1,5 @@
|
||||||
/* Collection of functions useful for debugging
|
/*
|
||||||
|
Collection of functions useful for debugging
|
||||||
broken nix expressions.
|
broken nix expressions.
|
||||||
|
|
||||||
* `trace`-like functions take two values, print
|
* `trace`-like functions take two values, print
|
||||||
|
@ -25,14 +26,16 @@ let
|
||||||
generators
|
generators
|
||||||
id
|
id
|
||||||
mapAttrs
|
mapAttrs
|
||||||
trace;
|
trace
|
||||||
|
;
|
||||||
in
|
in
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
# -- TRACING --
|
# -- TRACING --
|
||||||
|
|
||||||
/* Conditionally trace the supplied message, based on a predicate.
|
/*
|
||||||
|
Conditionally trace the supplied message, based on a predicate.
|
||||||
|
|
||||||
Type: traceIf :: bool -> string -> a -> a
|
Type: traceIf :: bool -> string -> a -> a
|
||||||
|
|
||||||
|
@ -47,9 +50,11 @@ rec {
|
||||||
# Message that should be traced
|
# Message that should be traced
|
||||||
msg:
|
msg:
|
||||||
# Value to return
|
# Value to return
|
||||||
x: if pred then trace msg x else x;
|
x:
|
||||||
|
if pred then trace msg x else x;
|
||||||
|
|
||||||
/* Trace the supplied value after applying a function to it, and
|
/*
|
||||||
|
Trace the supplied value after applying a function to it, and
|
||||||
return the original value.
|
return the original value.
|
||||||
|
|
||||||
Type: traceValFn :: (a -> b) -> a -> a
|
Type: traceValFn :: (a -> b) -> a -> a
|
||||||
|
@ -63,9 +68,11 @@ rec {
|
||||||
# Function to apply
|
# Function to apply
|
||||||
f:
|
f:
|
||||||
# Value to trace and return
|
# Value to trace and return
|
||||||
x: trace (f x) x;
|
x:
|
||||||
|
trace (f x) x;
|
||||||
|
|
||||||
/* Trace the supplied value and return it.
|
/*
|
||||||
|
Trace the supplied value and return it.
|
||||||
|
|
||||||
Type: traceVal :: a -> a
|
Type: traceVal :: a -> a
|
||||||
|
|
||||||
|
@ -76,7 +83,8 @@ rec {
|
||||||
*/
|
*/
|
||||||
traceVal = traceValFn id;
|
traceVal = traceValFn id;
|
||||||
|
|
||||||
/* `builtins.trace`, but the value is `builtins.deepSeq`ed first.
|
/*
|
||||||
|
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
|
||||||
|
|
||||||
Type: traceSeq :: a -> b -> b
|
Type: traceSeq :: a -> b -> b
|
||||||
|
|
||||||
|
@ -92,9 +100,11 @@ rec {
|
||||||
# The value to trace
|
# The value to trace
|
||||||
x:
|
x:
|
||||||
# The value to return
|
# The value to return
|
||||||
y: trace (builtins.deepSeq x x) y;
|
y:
|
||||||
|
trace (builtins.deepSeq x x) y;
|
||||||
|
|
||||||
/* Like `traceSeq`, but only evaluate down to depth n.
|
/*
|
||||||
|
Like `traceSeq`, but only evaluate down to depth n.
|
||||||
This is very useful because lots of `traceSeq` usages
|
This is very useful because lots of `traceSeq` usages
|
||||||
lead to an infinite recursion.
|
lead to an infinite recursion.
|
||||||
|
|
||||||
|
@ -105,20 +115,36 @@ rec {
|
||||||
|
|
||||||
Type: traceSeqN :: Int -> a -> b -> b
|
Type: traceSeqN :: Int -> a -> b -> b
|
||||||
*/
|
*/
|
||||||
traceSeqN = depth: x: y:
|
traceSeqN =
|
||||||
let snip = v: if isList v then noQuotes "[…]" v
|
depth: x: y:
|
||||||
else if isAttrs v then noQuotes "{…}" v
|
let
|
||||||
else v;
|
snip =
|
||||||
noQuotes = str: v: { __pretty = const str; val = v; };
|
v:
|
||||||
modify = n: fn: v: if (n == 0) then fn v
|
if isList v then
|
||||||
else if isList v then map (modify (n - 1) fn) v
|
noQuotes "[…]" v
|
||||||
else if isAttrs v then mapAttrs
|
else if isAttrs v then
|
||||||
(const (modify (n - 1) fn)) v
|
noQuotes "{…}" v
|
||||||
else v;
|
else
|
||||||
in trace (generators.toPretty { allowPrettyValues = true; }
|
v;
|
||||||
(modify depth snip x)) y;
|
noQuotes = str: v: {
|
||||||
|
__pretty = const str;
|
||||||
|
val = v;
|
||||||
|
};
|
||||||
|
modify =
|
||||||
|
n: fn: v:
|
||||||
|
if (n == 0) then
|
||||||
|
fn v
|
||||||
|
else if isList v then
|
||||||
|
map (modify (n - 1) fn) v
|
||||||
|
else if isAttrs v then
|
||||||
|
mapAttrs (const (modify (n - 1) fn)) v
|
||||||
|
else
|
||||||
|
v;
|
||||||
|
in
|
||||||
|
trace (generators.toPretty { allowPrettyValues = true; } (modify depth snip x)) y;
|
||||||
|
|
||||||
/* A combination of `traceVal` and `traceSeq` that applies a
|
/*
|
||||||
|
A combination of `traceVal` and `traceSeq` that applies a
|
||||||
provided function to the value to be traced after `deepSeq`ing
|
provided function to the value to be traced after `deepSeq`ing
|
||||||
it.
|
it.
|
||||||
*/
|
*/
|
||||||
|
@ -126,24 +152,28 @@ rec {
|
||||||
# Function to apply
|
# Function to apply
|
||||||
f:
|
f:
|
||||||
# Value to trace
|
# Value to trace
|
||||||
v: traceValFn f (builtins.deepSeq v v);
|
v:
|
||||||
|
traceValFn f (builtins.deepSeq v v);
|
||||||
|
|
||||||
/* A combination of `traceVal` and `traceSeq`. */
|
# A combination of `traceVal` and `traceSeq`.
|
||||||
traceValSeq = traceValSeqFn id;
|
traceValSeq = traceValSeqFn id;
|
||||||
|
|
||||||
/* A combination of `traceVal` and `traceSeqN` that applies a
|
/*
|
||||||
provided function to the value to be traced. */
|
A combination of `traceVal` and `traceSeqN` that applies a
|
||||||
|
provided function to the value to be traced.
|
||||||
|
*/
|
||||||
traceValSeqNFn =
|
traceValSeqNFn =
|
||||||
# Function to apply
|
# Function to apply
|
||||||
f:
|
f: depth:
|
||||||
depth:
|
|
||||||
# Value to trace
|
# Value to trace
|
||||||
v: traceSeqN depth (f v) v;
|
v:
|
||||||
|
traceSeqN depth (f v) v;
|
||||||
|
|
||||||
/* A combination of `traceVal` and `traceSeqN`. */
|
# A combination of `traceVal` and `traceSeqN`.
|
||||||
traceValSeqN = traceValSeqNFn id;
|
traceValSeqN = traceValSeqNFn id;
|
||||||
|
|
||||||
/* Trace the input and output of a function `f` named `name`,
|
/*
|
||||||
|
Trace the input and output of a function `f` named `name`,
|
||||||
both down to `depth`.
|
both down to `depth`.
|
||||||
|
|
||||||
This is useful for adding around a function call,
|
This is useful for adding around a function call,
|
||||||
|
@ -154,21 +184,21 @@ rec {
|
||||||
trace: { fn = "id"; from = { a.b = {…}; }; to = { a.b = {…}; }; }
|
trace: { fn = "id"; from = { a.b = {…}; }; to = { a.b = {…}; }; }
|
||||||
=> { a.b.c = 3; }
|
=> { a.b.c = 3; }
|
||||||
*/
|
*/
|
||||||
traceFnSeqN = depth: name: f: v:
|
traceFnSeqN =
|
||||||
let res = f v;
|
depth: name: f: v:
|
||||||
in lib.traceSeqN
|
let
|
||||||
(depth + 1)
|
res = f v;
|
||||||
{
|
in
|
||||||
|
lib.traceSeqN (depth + 1) {
|
||||||
fn = name;
|
fn = name;
|
||||||
from = v;
|
from = v;
|
||||||
to = res;
|
to = res;
|
||||||
}
|
} res;
|
||||||
res;
|
|
||||||
|
|
||||||
|
|
||||||
# -- TESTING --
|
# -- TESTING --
|
||||||
|
|
||||||
/* Evaluates a set of tests.
|
/*
|
||||||
|
Evaluates a set of tests.
|
||||||
|
|
||||||
A test is an attribute set `{expr, expected}`,
|
A test is an attribute set `{expr, expected}`,
|
||||||
denoting an expression and its expected result.
|
denoting an expression and its expected result.
|
||||||
|
@ -228,19 +258,41 @@ rec {
|
||||||
*/
|
*/
|
||||||
runTests =
|
runTests =
|
||||||
# Tests to run
|
# Tests to run
|
||||||
tests: concatLists (attrValues (mapAttrs (name: test:
|
tests:
|
||||||
let testsToRun = if tests ? tests then tests.tests else [];
|
concatLists (
|
||||||
in if (substring 0 4 name == "test" || elem name testsToRun)
|
attrValues (
|
||||||
&& ((testsToRun == []) || elem name tests.tests)
|
mapAttrs (
|
||||||
|
name: test:
|
||||||
|
let
|
||||||
|
testsToRun = if tests ? tests then tests.tests else [ ];
|
||||||
|
in
|
||||||
|
if
|
||||||
|
(substring 0 4 name == "test" || elem name testsToRun)
|
||||||
|
&& ((testsToRun == [ ]) || elem name tests.tests)
|
||||||
&& (test.expr != test.expected)
|
&& (test.expr != test.expected)
|
||||||
|
|
||||||
then [ { inherit name; expected = test.expected; result = test.expr; } ]
|
then
|
||||||
else [] ) tests));
|
[
|
||||||
|
{
|
||||||
|
inherit name;
|
||||||
|
expected = test.expected;
|
||||||
|
result = test.expr;
|
||||||
|
}
|
||||||
|
]
|
||||||
|
else
|
||||||
|
[ ]
|
||||||
|
) tests
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
/* Create a test assuming that list elements are `true`.
|
/*
|
||||||
|
Create a test assuming that list elements are `true`.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
{ testX = allTrue [ true ]; }
|
{ testX = allTrue [ true ]; }
|
||||||
*/
|
*/
|
||||||
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
|
testAllTrue = expr: {
|
||||||
|
inherit expr;
|
||||||
|
expected = map (x: true) expr;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
549
lib/default.nix
549
lib/default.nix
|
@ -1,15 +1,19 @@
|
||||||
/* Library of low-level helper functions for nix expressions.
|
/*
|
||||||
*
|
Library of low-level helper functions for nix expressions.
|
||||||
* Please implement (mostly) exhaustive unit tests
|
|
||||||
* for new functions in `./tests.nix`.
|
Please implement (mostly) exhaustive unit tests
|
||||||
*/
|
for new functions in `./tests.nix`.
|
||||||
|
*/
|
||||||
let
|
let
|
||||||
|
|
||||||
inherit (import ./fixed-points.nix { inherit lib; }) makeExtensible;
|
inherit (import ./fixed-points.nix { inherit lib; }) makeExtensible;
|
||||||
|
|
||||||
lib = makeExtensible (self: let
|
lib = makeExtensible (
|
||||||
|
self:
|
||||||
|
let
|
||||||
callLibs = file: import file { lib = self; };
|
callLibs = file: import file { lib = self; };
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
# often used, or depending on very little
|
# often used, or depending on very little
|
||||||
trivial = callLibs ./trivial.nix;
|
trivial = callLibs ./trivial.nix;
|
||||||
|
@ -64,108 +68,433 @@ let
|
||||||
# linux kernel configuration
|
# linux kernel configuration
|
||||||
kernel = callLibs ./kernel.nix;
|
kernel = callLibs ./kernel.nix;
|
||||||
|
|
||||||
inherit (builtins) add addErrorContext attrNames concatLists
|
inherit (builtins)
|
||||||
deepSeq elem elemAt filter genericClosure genList getAttr
|
add
|
||||||
hasAttr head isAttrs isBool isInt isList isPath isString length
|
addErrorContext
|
||||||
lessThan listToAttrs pathExists readFile replaceStrings seq
|
attrNames
|
||||||
stringLength sub substring tail trace;
|
concatLists
|
||||||
inherit (self.trivial) id const pipe concat or and xor bitAnd bitOr bitXor
|
deepSeq
|
||||||
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
|
elem
|
||||||
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
|
elemAt
|
||||||
info showWarnings nixpkgsVersion version isInOldestRelease
|
filter
|
||||||
mod compare splitByAndCompare
|
genericClosure
|
||||||
functionArgs setFunctionArgs isFunction toFunction mirrorFunctionArgs
|
genList
|
||||||
toHexString toBaseDigits inPureEvalMode;
|
getAttr
|
||||||
inherit (self.fixedPoints) fix fix' converge extends composeExtensions
|
hasAttr
|
||||||
composeManyExtensions makeExtensible makeExtensibleWithCustomName;
|
head
|
||||||
inherit (self.attrsets) attrByPath hasAttrByPath setAttrByPath
|
isAttrs
|
||||||
getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
|
isBool
|
||||||
filterAttrsRecursive foldlAttrs foldAttrs collect nameValuePair mapAttrs
|
isInt
|
||||||
mapAttrs' mapAttrsToList attrsToList concatMapAttrs mapAttrsRecursive
|
isList
|
||||||
mapAttrsRecursiveCond genAttrs isDerivation toDerivation optionalAttrs
|
isPath
|
||||||
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
|
isString
|
||||||
recursiveUpdate matchAttrs mergeAttrsList overrideExisting showAttrPath getOutput
|
length
|
||||||
getBin getLib getDev getMan chooseDevOutputs zipWithNames zip
|
lessThan
|
||||||
recurseIntoAttrs dontRecurseIntoAttrs cartesianProduct cartesianProductOfSets
|
listToAttrs
|
||||||
mapCartesianProduct updateManyAttrsByPath;
|
pathExists
|
||||||
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
|
readFile
|
||||||
ifilter0 concatMap flatten remove findSingle findFirst any all count
|
replaceStrings
|
||||||
optional optionals toList range replicate partition zipListsWith zipLists
|
seq
|
||||||
reverseList listDfs toposort sort sortOn naturalSort compareLists take
|
stringLength
|
||||||
drop sublist last init crossLists unique allUnique intersectLists
|
sub
|
||||||
subtractLists mutuallyExclusive groupBy groupBy';
|
substring
|
||||||
inherit (self.strings) concatStrings concatMapStrings concatImapStrings
|
tail
|
||||||
intersperse concatStringsSep concatMapStringsSep
|
trace
|
||||||
concatImapStringsSep concatLines makeSearchPath makeSearchPathOutput
|
;
|
||||||
makeLibraryPath makeIncludePath makeBinPath optionalString
|
inherit (self.trivial)
|
||||||
hasInfix hasPrefix hasSuffix stringToCharacters stringAsChars escape
|
id
|
||||||
escapeShellArg escapeShellArgs
|
const
|
||||||
isStorePath isStringLike
|
pipe
|
||||||
isValidPosixName toShellVar toShellVars
|
concat
|
||||||
escapeRegex escapeURL escapeXML replaceChars lowerChars
|
or
|
||||||
upperChars toLower toUpper addContextFrom splitString
|
and
|
||||||
removePrefix removeSuffix versionOlder versionAtLeast
|
xor
|
||||||
getName getVersion
|
bitAnd
|
||||||
cmakeOptionType cmakeBool cmakeFeature
|
bitOr
|
||||||
mesonOption mesonBool mesonEnable
|
bitXor
|
||||||
nameFromURL enableFeature enableFeatureAs withFeature
|
bitNot
|
||||||
withFeatureAs fixedWidthString fixedWidthNumber
|
boolToString
|
||||||
toInt toIntBase10 readPathsFromFile fileContents;
|
mergeAttrs
|
||||||
inherit (self.stringsWithDeps) textClosureList textClosureMap
|
flip
|
||||||
noDepEntry fullDepEntry packEntry stringAfter;
|
mapNullable
|
||||||
inherit (self.customisation) overrideDerivation makeOverridable
|
inNixShell
|
||||||
callPackageWith callPackagesWith extendDerivation hydraJob
|
isFloat
|
||||||
makeScope makeScopeWithSplicing makeScopeWithSplicing';
|
min
|
||||||
|
max
|
||||||
|
importJSON
|
||||||
|
importTOML
|
||||||
|
warn
|
||||||
|
warnIf
|
||||||
|
warnIfNot
|
||||||
|
throwIf
|
||||||
|
throwIfNot
|
||||||
|
checkListOfEnum
|
||||||
|
info
|
||||||
|
showWarnings
|
||||||
|
nixpkgsVersion
|
||||||
|
version
|
||||||
|
isInOldestRelease
|
||||||
|
mod
|
||||||
|
compare
|
||||||
|
splitByAndCompare
|
||||||
|
functionArgs
|
||||||
|
setFunctionArgs
|
||||||
|
isFunction
|
||||||
|
toFunction
|
||||||
|
mirrorFunctionArgs
|
||||||
|
toHexString
|
||||||
|
toBaseDigits
|
||||||
|
inPureEvalMode
|
||||||
|
;
|
||||||
|
inherit (self.fixedPoints)
|
||||||
|
fix
|
||||||
|
fix'
|
||||||
|
converge
|
||||||
|
extends
|
||||||
|
composeExtensions
|
||||||
|
composeManyExtensions
|
||||||
|
makeExtensible
|
||||||
|
makeExtensibleWithCustomName
|
||||||
|
;
|
||||||
|
inherit (self.attrsets)
|
||||||
|
attrByPath
|
||||||
|
hasAttrByPath
|
||||||
|
setAttrByPath
|
||||||
|
getAttrFromPath
|
||||||
|
attrVals
|
||||||
|
attrValues
|
||||||
|
getAttrs
|
||||||
|
catAttrs
|
||||||
|
filterAttrs
|
||||||
|
filterAttrsRecursive
|
||||||
|
foldlAttrs
|
||||||
|
foldAttrs
|
||||||
|
collect
|
||||||
|
nameValuePair
|
||||||
|
mapAttrs
|
||||||
|
mapAttrs'
|
||||||
|
mapAttrsToList
|
||||||
|
attrsToList
|
||||||
|
concatMapAttrs
|
||||||
|
mapAttrsRecursive
|
||||||
|
mapAttrsRecursiveCond
|
||||||
|
genAttrs
|
||||||
|
isDerivation
|
||||||
|
toDerivation
|
||||||
|
optionalAttrs
|
||||||
|
zipAttrsWithNames
|
||||||
|
zipAttrsWith
|
||||||
|
zipAttrs
|
||||||
|
recursiveUpdateUntil
|
||||||
|
recursiveUpdate
|
||||||
|
matchAttrs
|
||||||
|
mergeAttrsList
|
||||||
|
overrideExisting
|
||||||
|
showAttrPath
|
||||||
|
getOutput
|
||||||
|
getBin
|
||||||
|
getLib
|
||||||
|
getDev
|
||||||
|
getMan
|
||||||
|
chooseDevOutputs
|
||||||
|
zipWithNames
|
||||||
|
zip
|
||||||
|
recurseIntoAttrs
|
||||||
|
dontRecurseIntoAttrs
|
||||||
|
cartesianProduct
|
||||||
|
cartesianProductOfSets
|
||||||
|
mapCartesianProduct
|
||||||
|
updateManyAttrsByPath
|
||||||
|
;
|
||||||
|
inherit (self.lists)
|
||||||
|
singleton
|
||||||
|
forEach
|
||||||
|
foldr
|
||||||
|
fold
|
||||||
|
foldl
|
||||||
|
foldl'
|
||||||
|
imap0
|
||||||
|
imap1
|
||||||
|
ifilter0
|
||||||
|
concatMap
|
||||||
|
flatten
|
||||||
|
remove
|
||||||
|
findSingle
|
||||||
|
findFirst
|
||||||
|
any
|
||||||
|
all
|
||||||
|
count
|
||||||
|
optional
|
||||||
|
optionals
|
||||||
|
toList
|
||||||
|
range
|
||||||
|
replicate
|
||||||
|
partition
|
||||||
|
zipListsWith
|
||||||
|
zipLists
|
||||||
|
reverseList
|
||||||
|
listDfs
|
||||||
|
toposort
|
||||||
|
sort
|
||||||
|
sortOn
|
||||||
|
naturalSort
|
||||||
|
compareLists
|
||||||
|
take
|
||||||
|
drop
|
||||||
|
sublist
|
||||||
|
last
|
||||||
|
init
|
||||||
|
crossLists
|
||||||
|
unique
|
||||||
|
allUnique
|
||||||
|
intersectLists
|
||||||
|
subtractLists
|
||||||
|
mutuallyExclusive
|
||||||
|
groupBy
|
||||||
|
groupBy'
|
||||||
|
;
|
||||||
|
inherit (self.strings)
|
||||||
|
concatStrings
|
||||||
|
concatMapStrings
|
||||||
|
concatImapStrings
|
||||||
|
intersperse
|
||||||
|
concatStringsSep
|
||||||
|
concatMapStringsSep
|
||||||
|
concatImapStringsSep
|
||||||
|
concatLines
|
||||||
|
makeSearchPath
|
||||||
|
makeSearchPathOutput
|
||||||
|
makeLibraryPath
|
||||||
|
makeIncludePath
|
||||||
|
makeBinPath
|
||||||
|
optionalString
|
||||||
|
hasInfix
|
||||||
|
hasPrefix
|
||||||
|
hasSuffix
|
||||||
|
stringToCharacters
|
||||||
|
stringAsChars
|
||||||
|
escape
|
||||||
|
escapeShellArg
|
||||||
|
escapeShellArgs
|
||||||
|
isStorePath
|
||||||
|
isStringLike
|
||||||
|
isValidPosixName
|
||||||
|
toShellVar
|
||||||
|
toShellVars
|
||||||
|
escapeRegex
|
||||||
|
escapeURL
|
||||||
|
escapeXML
|
||||||
|
replaceChars
|
||||||
|
lowerChars
|
||||||
|
upperChars
|
||||||
|
toLower
|
||||||
|
toUpper
|
||||||
|
addContextFrom
|
||||||
|
splitString
|
||||||
|
removePrefix
|
||||||
|
removeSuffix
|
||||||
|
versionOlder
|
||||||
|
versionAtLeast
|
||||||
|
getName
|
||||||
|
getVersion
|
||||||
|
cmakeOptionType
|
||||||
|
cmakeBool
|
||||||
|
cmakeFeature
|
||||||
|
mesonOption
|
||||||
|
mesonBool
|
||||||
|
mesonEnable
|
||||||
|
nameFromURL
|
||||||
|
enableFeature
|
||||||
|
enableFeatureAs
|
||||||
|
withFeature
|
||||||
|
withFeatureAs
|
||||||
|
fixedWidthString
|
||||||
|
fixedWidthNumber
|
||||||
|
toInt
|
||||||
|
toIntBase10
|
||||||
|
readPathsFromFile
|
||||||
|
fileContents
|
||||||
|
;
|
||||||
|
inherit (self.stringsWithDeps)
|
||||||
|
textClosureList
|
||||||
|
textClosureMap
|
||||||
|
noDepEntry
|
||||||
|
fullDepEntry
|
||||||
|
packEntry
|
||||||
|
stringAfter
|
||||||
|
;
|
||||||
|
inherit (self.customisation)
|
||||||
|
overrideDerivation
|
||||||
|
makeOverridable
|
||||||
|
callPackageWith
|
||||||
|
callPackagesWith
|
||||||
|
extendDerivation
|
||||||
|
hydraJob
|
||||||
|
makeScope
|
||||||
|
makeScopeWithSplicing
|
||||||
|
makeScopeWithSplicing'
|
||||||
|
;
|
||||||
inherit (self.derivations) lazyDerivation optionalDrvAttr;
|
inherit (self.derivations) lazyDerivation optionalDrvAttr;
|
||||||
inherit (self.meta) addMetaAttrs dontDistribute setName updateName
|
inherit (self.meta)
|
||||||
appendToName mapDerivationAttrset setPrio lowPrio lowPrioSet hiPrio
|
addMetaAttrs
|
||||||
hiPrioSet getLicenseFromSpdxId getExe getExe';
|
dontDistribute
|
||||||
inherit (self.filesystem) pathType pathIsDirectory pathIsRegularFile
|
setName
|
||||||
packagesFromDirectoryRecursive;
|
updateName
|
||||||
inherit (self.sources) cleanSourceFilter
|
appendToName
|
||||||
cleanSource sourceByRegex sourceFilesBySuffices
|
mapDerivationAttrset
|
||||||
commitIdFromGitRepo cleanSourceWith pathHasContext
|
setPrio
|
||||||
canCleanSource pathIsGitRepo;
|
lowPrio
|
||||||
inherit (self.modules) evalModules setDefaultModuleLocation
|
lowPrioSet
|
||||||
unifyModuleSyntax applyModuleArgsIfFunction mergeModules
|
hiPrio
|
||||||
mergeModules' mergeOptionDecls mergeDefinitions
|
hiPrioSet
|
||||||
pushDownProperties dischargeProperties filterOverrides
|
getLicenseFromSpdxId
|
||||||
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
|
getExe
|
||||||
mkOptionDefault mkDefault mkImageMediaOverride mkForce mkVMOverride
|
getExe'
|
||||||
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
|
;
|
||||||
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
|
inherit (self.filesystem)
|
||||||
mkRenamedOptionModule mkRenamedOptionModuleWith
|
pathType
|
||||||
mkMergedOptionModule mkChangedOptionModule
|
pathIsDirectory
|
||||||
mkAliasOptionModule mkDerivedConfig doRename
|
pathIsRegularFile
|
||||||
mkAliasOptionModuleMD;
|
packagesFromDirectoryRecursive
|
||||||
|
;
|
||||||
|
inherit (self.sources)
|
||||||
|
cleanSourceFilter
|
||||||
|
cleanSource
|
||||||
|
sourceByRegex
|
||||||
|
sourceFilesBySuffices
|
||||||
|
commitIdFromGitRepo
|
||||||
|
cleanSourceWith
|
||||||
|
pathHasContext
|
||||||
|
canCleanSource
|
||||||
|
pathIsGitRepo
|
||||||
|
;
|
||||||
|
inherit (self.modules)
|
||||||
|
evalModules
|
||||||
|
setDefaultModuleLocation
|
||||||
|
unifyModuleSyntax
|
||||||
|
applyModuleArgsIfFunction
|
||||||
|
mergeModules
|
||||||
|
mergeModules'
|
||||||
|
mergeOptionDecls
|
||||||
|
mergeDefinitions
|
||||||
|
pushDownProperties
|
||||||
|
dischargeProperties
|
||||||
|
filterOverrides
|
||||||
|
sortProperties
|
||||||
|
fixupOptionType
|
||||||
|
mkIf
|
||||||
|
mkAssert
|
||||||
|
mkMerge
|
||||||
|
mkOverride
|
||||||
|
mkOptionDefault
|
||||||
|
mkDefault
|
||||||
|
mkImageMediaOverride
|
||||||
|
mkForce
|
||||||
|
mkVMOverride
|
||||||
|
mkFixStrictness
|
||||||
|
mkOrder
|
||||||
|
mkBefore
|
||||||
|
mkAfter
|
||||||
|
mkAliasDefinitions
|
||||||
|
mkAliasAndWrapDefinitions
|
||||||
|
fixMergeModules
|
||||||
|
mkRemovedOptionModule
|
||||||
|
mkRenamedOptionModule
|
||||||
|
mkRenamedOptionModuleWith
|
||||||
|
mkMergedOptionModule
|
||||||
|
mkChangedOptionModule
|
||||||
|
mkAliasOptionModule
|
||||||
|
mkDerivedConfig
|
||||||
|
doRename
|
||||||
|
mkAliasOptionModuleMD
|
||||||
|
;
|
||||||
evalOptionValue = lib.warn "External use of `lib.evalOptionValue` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/." self.modules.evalOptionValue;
|
evalOptionValue = lib.warn "External use of `lib.evalOptionValue` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/." self.modules.evalOptionValue;
|
||||||
inherit (self.options) isOption mkEnableOption mkSinkUndeclaredOptions
|
inherit (self.options)
|
||||||
mergeDefaultOption mergeOneOption mergeEqualOption mergeUniqueOption
|
isOption
|
||||||
getValues getFiles
|
mkEnableOption
|
||||||
optionAttrSetToDocList optionAttrSetToDocList'
|
mkSinkUndeclaredOptions
|
||||||
scrubOptionValue literalExpression literalExample
|
mergeDefaultOption
|
||||||
showOption showOptionWithDefLocs showFiles
|
mergeOneOption
|
||||||
unknownModule mkOption mkPackageOption mkPackageOptionMD
|
mergeEqualOption
|
||||||
mdDoc literalMD;
|
mergeUniqueOption
|
||||||
inherit (self.types) isType setType defaultTypeMerge defaultFunctor
|
getValues
|
||||||
isOptionType mkOptionType;
|
getFiles
|
||||||
inherit (self.asserts)
|
optionAttrSetToDocList
|
||||||
assertMsg assertOneOf;
|
optionAttrSetToDocList'
|
||||||
inherit (self.debug) traceIf traceVal traceValFn
|
scrubOptionValue
|
||||||
traceSeq traceSeqN traceValSeq
|
literalExpression
|
||||||
traceValSeqFn traceValSeqN traceValSeqNFn traceFnSeqN
|
literalExample
|
||||||
runTests testAllTrue;
|
showOption
|
||||||
inherit (self.misc) maybeEnv defaultMergeArg defaultMerge foldArgs
|
showOptionWithDefLocs
|
||||||
maybeAttrNullable maybeAttr ifEnable checkFlag getValue
|
showFiles
|
||||||
checkReqs uniqList uniqListExt condConcat lazyGenericClosure
|
unknownModule
|
||||||
innerModifySumArgs modifySumArgs innerClosePropagation
|
mkOption
|
||||||
closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
|
mkPackageOption
|
||||||
mergeAttrsWithFunc mergeAttrsConcatenateValues
|
mkPackageOptionMD
|
||||||
mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
|
mdDoc
|
||||||
mergeAttrsByFuncDefaultsClean mergeAttrBy
|
literalMD
|
||||||
fakeHash fakeSha256 fakeSha512
|
;
|
||||||
nixType imap;
|
inherit (self.types)
|
||||||
inherit (self.versions)
|
isType
|
||||||
splitVersion;
|
setType
|
||||||
});
|
defaultTypeMerge
|
||||||
in lib
|
defaultFunctor
|
||||||
|
isOptionType
|
||||||
|
mkOptionType
|
||||||
|
;
|
||||||
|
inherit (self.asserts) assertMsg assertOneOf;
|
||||||
|
inherit (self.debug)
|
||||||
|
traceIf
|
||||||
|
traceVal
|
||||||
|
traceValFn
|
||||||
|
traceSeq
|
||||||
|
traceSeqN
|
||||||
|
traceValSeq
|
||||||
|
traceValSeqFn
|
||||||
|
traceValSeqN
|
||||||
|
traceValSeqNFn
|
||||||
|
traceFnSeqN
|
||||||
|
runTests
|
||||||
|
testAllTrue
|
||||||
|
;
|
||||||
|
inherit (self.misc)
|
||||||
|
maybeEnv
|
||||||
|
defaultMergeArg
|
||||||
|
defaultMerge
|
||||||
|
foldArgs
|
||||||
|
maybeAttrNullable
|
||||||
|
maybeAttr
|
||||||
|
ifEnable
|
||||||
|
checkFlag
|
||||||
|
getValue
|
||||||
|
checkReqs
|
||||||
|
uniqList
|
||||||
|
uniqListExt
|
||||||
|
condConcat
|
||||||
|
lazyGenericClosure
|
||||||
|
innerModifySumArgs
|
||||||
|
modifySumArgs
|
||||||
|
innerClosePropagation
|
||||||
|
closePropagation
|
||||||
|
mapAttrsFlatten
|
||||||
|
nvs
|
||||||
|
setAttr
|
||||||
|
setAttrMerge
|
||||||
|
mergeAttrsWithFunc
|
||||||
|
mergeAttrsConcatenateValues
|
||||||
|
mergeAttrsNoOverride
|
||||||
|
mergeAttrByFunc
|
||||||
|
mergeAttrsByFuncDefaults
|
||||||
|
mergeAttrsByFuncDefaultsClean
|
||||||
|
mergeAttrBy
|
||||||
|
fakeHash
|
||||||
|
fakeSha256
|
||||||
|
fakeSha512
|
||||||
|
nixType
|
||||||
|
imap
|
||||||
|
;
|
||||||
|
inherit (self.versions) splitVersion;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in
|
||||||
|
lib
|
||||||
|
|
|
@ -34,153 +34,217 @@ let
|
||||||
inherit (lib.attrsets) removeAttrs;
|
inherit (lib.attrsets) removeAttrs;
|
||||||
|
|
||||||
# returns default if env var is not set
|
# returns default if env var is not set
|
||||||
maybeEnv = name: default:
|
maybeEnv =
|
||||||
let value = builtins.getEnv name; in
|
name: default:
|
||||||
|
let
|
||||||
|
value = builtins.getEnv name;
|
||||||
|
in
|
||||||
if value == "" then default else value;
|
if value == "" then default else value;
|
||||||
|
|
||||||
defaultMergeArg = x : y: if builtins.isAttrs y then
|
defaultMergeArg = x: y: if builtins.isAttrs y then y else (y x);
|
||||||
y
|
|
||||||
else
|
|
||||||
(y x);
|
|
||||||
defaultMerge = x: y: x // (defaultMergeArg x y);
|
defaultMerge = x: y: x // (defaultMergeArg x y);
|
||||||
foldArgs = merger: f: init: x:
|
foldArgs =
|
||||||
let arg = (merger init (defaultMergeArg init x));
|
merger: f: init: x:
|
||||||
|
let
|
||||||
|
arg = (merger init (defaultMergeArg init x));
|
||||||
# now add the function with composed args already applied to the final attrs
|
# now add the function with composed args already applied to the final attrs
|
||||||
base = (setAttrMerge "passthru" {} (f arg)
|
base = (
|
||||||
( z: z // {
|
setAttrMerge "passthru" { } (f arg) (
|
||||||
|
z:
|
||||||
|
z
|
||||||
|
// {
|
||||||
function = foldArgs merger f arg;
|
function = foldArgs merger f arg;
|
||||||
args = (attrByPath ["passthru" "args"] {} z) // x;
|
args =
|
||||||
} ));
|
(attrByPath [
|
||||||
|
"passthru"
|
||||||
|
"args"
|
||||||
|
] { } z)
|
||||||
|
// x;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
withStdOverrides = base // {
|
withStdOverrides = base // {
|
||||||
override = base.passthru.function;
|
override = base.passthru.function;
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
withStdOverrides;
|
withStdOverrides;
|
||||||
|
|
||||||
|
|
||||||
# shortcut for attrByPath ["name"] default attrs
|
# shortcut for attrByPath ["name"] default attrs
|
||||||
maybeAttrNullable = maybeAttr;
|
maybeAttrNullable = maybeAttr;
|
||||||
|
|
||||||
# shortcut for attrByPath ["name"] default attrs
|
# shortcut for attrByPath ["name"] default attrs
|
||||||
maybeAttr = name: default: attrs: attrs.${name} or default;
|
maybeAttr =
|
||||||
|
name: default: attrs:
|
||||||
|
attrs.${name} or default;
|
||||||
|
|
||||||
# Return the second argument if the first one is true or the empty version
|
# Return the second argument if the first one is true or the empty version
|
||||||
# of the second argument.
|
# of the second argument.
|
||||||
ifEnable = cond: val:
|
ifEnable =
|
||||||
if cond then val
|
cond: val:
|
||||||
else if builtins.isList val then []
|
if cond then
|
||||||
else if builtins.isAttrs val then {}
|
val
|
||||||
|
else if builtins.isList val then
|
||||||
|
[ ]
|
||||||
|
else if builtins.isAttrs val then
|
||||||
|
{ }
|
||||||
# else if builtins.isString val then ""
|
# else if builtins.isString val then ""
|
||||||
else if val == true || val == false then false
|
else if val == true || val == false then
|
||||||
else null;
|
false
|
||||||
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
# Return true only if there is an attribute and it is true.
|
# Return true only if there is an attribute and it is true.
|
||||||
checkFlag = attrSet: name:
|
checkFlag =
|
||||||
if name == "true" then true else
|
attrSet: name:
|
||||||
if name == "false" then false else
|
if name == "true" then
|
||||||
if (elem name (attrByPath ["flags"] [] attrSet)) then true else
|
true
|
||||||
attrByPath [name] false attrSet ;
|
else if name == "false" then
|
||||||
|
false
|
||||||
|
else if (elem name (attrByPath [ "flags" ] [ ] attrSet)) then
|
||||||
|
true
|
||||||
|
else
|
||||||
|
attrByPath [ name ] false attrSet;
|
||||||
|
|
||||||
# Input : attrSet, [ [name default] ... ], name
|
# Input : attrSet, [ [name default] ... ], name
|
||||||
# Output : its value or default.
|
# Output : its value or default.
|
||||||
getValue = attrSet: argList: name:
|
getValue =
|
||||||
( attrByPath [name] (if checkFlag attrSet name then true else
|
attrSet: argList: name:
|
||||||
if argList == [] then null else
|
(attrByPath [ name ] (
|
||||||
let x = builtins.head argList; in
|
if checkFlag attrSet name then
|
||||||
if (head x) == name then
|
true
|
||||||
(head (tail x))
|
else if argList == [ ] then
|
||||||
else (getValue attrSet
|
null
|
||||||
(tail argList) name)) attrSet );
|
else
|
||||||
|
let
|
||||||
|
x = builtins.head argList;
|
||||||
|
in
|
||||||
|
if (head x) == name then (head (tail x)) else (getValue attrSet (tail argList) name)
|
||||||
|
) attrSet);
|
||||||
|
|
||||||
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
|
# Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ]
|
||||||
# Output : are reqs satisfied? It's asserted.
|
# Output : are reqs satisfied? It's asserted.
|
||||||
checkReqs = attrSet: argList: condList:
|
checkReqs =
|
||||||
|
attrSet: argList: condList:
|
||||||
|
(foldr and true (
|
||||||
|
map (
|
||||||
|
x:
|
||||||
|
let
|
||||||
|
name = (head x);
|
||||||
|
in
|
||||||
|
|
||||||
(
|
(
|
||||||
foldr and true
|
(checkFlag attrSet name)
|
||||||
(map (x: let name = (head x); in
|
-> (foldr and true (
|
||||||
|
map (
|
||||||
((checkFlag attrSet name) ->
|
y:
|
||||||
(foldr and true
|
let
|
||||||
(map (y: let val=(getValue attrSet argList y); in
|
val = (getValue attrSet argList y);
|
||||||
(val!=null) && (val!=false))
|
in
|
||||||
(tail x))))) condList));
|
(val != null) && (val != false)
|
||||||
|
) (tail x)
|
||||||
|
))
|
||||||
|
)
|
||||||
|
) condList
|
||||||
|
));
|
||||||
|
|
||||||
# This function has O(n^2) performance.
|
# This function has O(n^2) performance.
|
||||||
uniqList = { inputList, acc ? [] }:
|
uniqList =
|
||||||
let go = xs: acc:
|
{
|
||||||
if xs == []
|
inputList,
|
||||||
then []
|
acc ? [ ],
|
||||||
else let x = head xs;
|
}:
|
||||||
y = if elem x acc then [] else [x];
|
let
|
||||||
in y ++ go (tail xs) (y ++ acc);
|
go =
|
||||||
in go inputList acc;
|
xs: acc:
|
||||||
|
if xs == [ ] then
|
||||||
|
[ ]
|
||||||
|
else
|
||||||
|
let
|
||||||
|
x = head xs;
|
||||||
|
y = if elem x acc then [ ] else [ x ];
|
||||||
|
in
|
||||||
|
y ++ go (tail xs) (y ++ acc);
|
||||||
|
in
|
||||||
|
go inputList acc;
|
||||||
|
|
||||||
uniqListExt = { inputList,
|
uniqListExt =
|
||||||
outputList ? [],
|
{
|
||||||
|
inputList,
|
||||||
|
outputList ? [ ],
|
||||||
getter ? (x: x),
|
getter ? (x: x),
|
||||||
compare ? (x: y: x==y) }:
|
compare ? (x: y: x == y),
|
||||||
if inputList == [] then outputList else
|
}:
|
||||||
let x = head inputList;
|
if inputList == [ ] then
|
||||||
|
outputList
|
||||||
|
else
|
||||||
|
let
|
||||||
|
x = head inputList;
|
||||||
isX = y: (compare (getter y) (getter x));
|
isX = y: (compare (getter y) (getter x));
|
||||||
newOutputList = outputList ++
|
newOutputList = outputList ++ (if any isX outputList then [ ] else [ x ]);
|
||||||
(if any isX outputList then [] else [x]);
|
in
|
||||||
in uniqListExt { outputList = newOutputList;
|
uniqListExt {
|
||||||
|
outputList = newOutputList;
|
||||||
inputList = (tail inputList);
|
inputList = (tail inputList);
|
||||||
inherit getter compare;
|
inherit getter compare;
|
||||||
};
|
};
|
||||||
|
|
||||||
condConcat = name: list: checker:
|
condConcat =
|
||||||
if list == [] then name else
|
name: list: checker:
|
||||||
if checker (head list) then
|
if list == [ ] then
|
||||||
condConcat
|
name
|
||||||
(name + (head (tail list)))
|
else if checker (head list) then
|
||||||
(tail (tail list))
|
condConcat (name + (head (tail list))) (tail (tail list)) checker
|
||||||
checker
|
else
|
||||||
else condConcat
|
condConcat name (tail (tail list)) checker;
|
||||||
name (tail (tail list)) checker;
|
|
||||||
|
|
||||||
lazyGenericClosure = {startSet, operator}:
|
lazyGenericClosure =
|
||||||
|
{ startSet, operator }:
|
||||||
let
|
let
|
||||||
work = list: doneKeys: result:
|
work =
|
||||||
if list == [] then
|
list: doneKeys: result:
|
||||||
|
if list == [ ] then
|
||||||
result
|
result
|
||||||
else
|
else
|
||||||
let x = head list; key = x.key; in
|
let
|
||||||
|
x = head list;
|
||||||
|
key = x.key;
|
||||||
|
in
|
||||||
if elem key doneKeys then
|
if elem key doneKeys then
|
||||||
work (tail list) doneKeys result
|
work (tail list) doneKeys result
|
||||||
else
|
else
|
||||||
work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result);
|
work (tail list ++ operator x) ([ key ] ++ doneKeys) ([ x ] ++ result);
|
||||||
in
|
in
|
||||||
work startSet [] [];
|
work startSet [ ] [ ];
|
||||||
|
|
||||||
innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else
|
innerModifySumArgs =
|
||||||
innerModifySumArgs f x (a // b);
|
f: x: a: b:
|
||||||
modifySumArgs = f: x: innerModifySumArgs f x {};
|
if b == null then (f a b) // x else innerModifySumArgs f x (a // b);
|
||||||
|
modifySumArgs = f: x: innerModifySumArgs f x { };
|
||||||
|
|
||||||
|
innerClosePropagation =
|
||||||
innerClosePropagation = acc: xs:
|
acc: xs:
|
||||||
if xs == []
|
if xs == [ ] then
|
||||||
then acc
|
acc
|
||||||
else let y = head xs;
|
else
|
||||||
|
let
|
||||||
|
y = head xs;
|
||||||
ys = tail xs;
|
ys = tail xs;
|
||||||
in if ! isAttrs y
|
in
|
||||||
then innerClosePropagation acc ys
|
if !isAttrs y then
|
||||||
else let acc' = [y] ++ acc;
|
innerClosePropagation acc ys
|
||||||
in innerClosePropagation
|
else
|
||||||
acc'
|
let
|
||||||
(uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y)
|
acc' = [ y ] ++ acc;
|
||||||
++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y)
|
in
|
||||||
|
innerClosePropagation acc' (uniqList {
|
||||||
|
inputList =
|
||||||
|
(maybeAttrNullable "propagatedBuildInputs" [ ] y)
|
||||||
|
++ (maybeAttrNullable "propagatedNativeBuildInputs" [ ] y)
|
||||||
++ ys;
|
++ ys;
|
||||||
acc = acc';
|
acc = acc';
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
|
||||||
closePropagationSlow = list: (uniqList {inputList = (innerClosePropagation [] list);});
|
closePropagationSlow = list: (uniqList { inputList = (innerClosePropagation [ ] list); });
|
||||||
|
|
||||||
# This is an optimisation of closePropagation which avoids the O(n^2) behavior
|
# This is an optimisation of closePropagation which avoids the O(n^2) behavior
|
||||||
# Using a list of derivations, it generates the full closure of the propagatedXXXBuildInputs
|
# Using a list of derivations, it generates the full closure of the propagatedXXXBuildInputs
|
||||||
|
@ -188,28 +252,35 @@ let
|
||||||
# attribute of each derivation.
|
# attribute of each derivation.
|
||||||
# On some benchmarks, it performs up to 15 times faster than closePropagation.
|
# On some benchmarks, it performs up to 15 times faster than closePropagation.
|
||||||
# See https://github.com/NixOS/nixpkgs/pull/194391 for details.
|
# See https://github.com/NixOS/nixpkgs/pull/194391 for details.
|
||||||
closePropagationFast = list:
|
closePropagationFast =
|
||||||
builtins.map (x: x.val) (builtins.genericClosure {
|
list:
|
||||||
|
builtins.map (x: x.val) (
|
||||||
|
builtins.genericClosure {
|
||||||
startSet = builtins.map (x: {
|
startSet = builtins.map (x: {
|
||||||
key = x.outPath;
|
key = x.outPath;
|
||||||
val = x;
|
val = x;
|
||||||
}) (builtins.filter (x: x != null) list);
|
}) (builtins.filter (x: x != null) list);
|
||||||
operator = item:
|
operator =
|
||||||
|
item:
|
||||||
if !builtins.isAttrs item.val then
|
if !builtins.isAttrs item.val then
|
||||||
[ ]
|
[ ]
|
||||||
else
|
else
|
||||||
builtins.concatMap (x:
|
builtins.concatMap (
|
||||||
if x != null then [{
|
x:
|
||||||
|
if x != null then
|
||||||
|
[
|
||||||
|
{
|
||||||
key = x.outPath;
|
key = x.outPath;
|
||||||
val = x;
|
val = x;
|
||||||
}] else
|
}
|
||||||
[ ]) ((item.val.propagatedBuildInputs or [ ])
|
]
|
||||||
++ (item.val.propagatedNativeBuildInputs or [ ]));
|
else
|
||||||
});
|
[ ]
|
||||||
|
) ((item.val.propagatedBuildInputs or [ ]) ++ (item.val.propagatedNativeBuildInputs or [ ]));
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
closePropagation = if builtins ? genericClosure
|
closePropagation = if builtins ? genericClosure then closePropagationFast else closePropagationSlow;
|
||||||
then closePropagationFast
|
|
||||||
else closePropagationSlow;
|
|
||||||
|
|
||||||
# calls a function (f attr value ) for each record item. returns a list
|
# calls a function (f attr value ) for each record item. returns a list
|
||||||
mapAttrsFlatten = f: r: map (attr: f attr r.${attr}) (attrNames r);
|
mapAttrsFlatten = f: r: map (attr: f attr r.${attr}) (attrNames r);
|
||||||
|
@ -217,26 +288,29 @@ let
|
||||||
# attribute set containing one attribute
|
# attribute set containing one attribute
|
||||||
nvs = name: value: listToAttrs [ (nameValuePair name value) ];
|
nvs = name: value: listToAttrs [ (nameValuePair name value) ];
|
||||||
# adds / replaces an attribute of an attribute set
|
# adds / replaces an attribute of an attribute set
|
||||||
setAttr = set: name: v: set // (nvs name v);
|
setAttr =
|
||||||
|
set: name: v:
|
||||||
|
set // (nvs name v);
|
||||||
|
|
||||||
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
|
# setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name)
|
||||||
# setAttrMerge "a" [] { a = [2];} (x: x ++ [3]) -> { a = [2 3]; }
|
# setAttrMerge "a" [] { a = [2];} (x: x ++ [3]) -> { a = [2 3]; }
|
||||||
# setAttrMerge "a" [] { } (x: x ++ [3]) -> { a = [ 3]; }
|
# setAttrMerge "a" [] { } (x: x ++ [3]) -> { a = [ 3]; }
|
||||||
setAttrMerge = name: default: attrs: f:
|
setAttrMerge =
|
||||||
|
name: default: attrs: f:
|
||||||
setAttr attrs name (f (maybeAttr name default attrs));
|
setAttr attrs name (f (maybeAttr name default attrs));
|
||||||
|
|
||||||
# Using f = a: b = b the result is similar to //
|
# Using f = a: b = b the result is similar to //
|
||||||
# merge attributes with custom function handling the case that the attribute
|
# merge attributes with custom function handling the case that the attribute
|
||||||
# exists in both sets
|
# exists in both sets
|
||||||
mergeAttrsWithFunc = f: set1: set2:
|
mergeAttrsWithFunc =
|
||||||
foldr (n: set: if set ? ${n}
|
f: set1: set2:
|
||||||
then setAttr set n (f set.${n} set2.${n})
|
foldr (n: set: if set ? ${n} then setAttr set n (f set.${n} set2.${n}) else set) (set2 // set1) (
|
||||||
else set )
|
attrNames set2
|
||||||
(set2 // set1) (attrNames set2);
|
);
|
||||||
|
|
||||||
# merging two attribute set concatenating the values of same attribute names
|
# merging two attribute set concatenating the values of same attribute names
|
||||||
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
|
# eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; }
|
||||||
mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a: b: (toList a) ++ (toList b) );
|
mergeAttrsConcatenateValues = mergeAttrsWithFunc (a: b: (toList a) ++ (toList b));
|
||||||
|
|
||||||
# merges attributes using //, if a name exists in both attributes
|
# merges attributes using //, if a name exists in both attributes
|
||||||
# an error will be triggered unless its listed in mergeLists
|
# an error will be triggered unless its listed in mergeLists
|
||||||
|
@ -245,20 +319,31 @@ let
|
||||||
# merging buildPhase doesn't really make sense. The cases will be rare where appending /prefixing will fit your needs?
|
# merging buildPhase doesn't really make sense. The cases will be rare where appending /prefixing will fit your needs?
|
||||||
# in these cases the first buildPhase will override the second one
|
# in these cases the first buildPhase will override the second one
|
||||||
# ! deprecated, use mergeAttrByFunc instead
|
# ! deprecated, use mergeAttrByFunc instead
|
||||||
mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"],
|
mergeAttrsNoOverride =
|
||||||
overrideSnd ? [ "buildPhase" ]
|
{
|
||||||
}: attrs1: attrs2:
|
mergeLists ? [
|
||||||
foldr (n: set:
|
"buildInputs"
|
||||||
setAttr set n ( if set ? ${n}
|
"propagatedBuildInputs"
|
||||||
then # merge
|
],
|
||||||
if elem n mergeLists # attribute contains list, merge them by concatenating
|
overrideSnd ? [ "buildPhase" ],
|
||||||
then attrs2.${n} ++ attrs1.${n}
|
}:
|
||||||
else if elem n overrideSnd
|
attrs1: attrs2:
|
||||||
then attrs1.${n}
|
foldr (
|
||||||
else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
|
n: set:
|
||||||
else attrs2.${n} # add attribute not existing in attr1
|
setAttr set n (
|
||||||
)) attrs1 (attrNames attrs2);
|
if set ? ${n} then # merge
|
||||||
|
if
|
||||||
|
elem n mergeLists # attribute contains list, merge them by concatenating
|
||||||
|
then
|
||||||
|
attrs2.${n} ++ attrs1.${n}
|
||||||
|
else if elem n overrideSnd then
|
||||||
|
attrs1.${n}
|
||||||
|
else
|
||||||
|
throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined"
|
||||||
|
else
|
||||||
|
attrs2.${n} # add attribute not existing in attr1
|
||||||
|
)
|
||||||
|
) attrs1 (attrNames attrs2);
|
||||||
|
|
||||||
# example usage:
|
# example usage:
|
||||||
# mergeAttrByFunc {
|
# mergeAttrByFunc {
|
||||||
|
@ -271,48 +356,83 @@ let
|
||||||
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
|
# { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; }
|
||||||
# is used by defaultOverridableDelayableArgs and can be used when composing using
|
# is used by defaultOverridableDelayableArgs and can be used when composing using
|
||||||
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
|
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
|
||||||
mergeAttrByFunc = x: y:
|
mergeAttrByFunc =
|
||||||
|
x: y:
|
||||||
let
|
let
|
||||||
mergeAttrBy2 = { mergeAttrBy = mergeAttrs; }
|
mergeAttrBy2 = {
|
||||||
// (maybeAttr "mergeAttrBy" {} x)
|
mergeAttrBy = mergeAttrs;
|
||||||
// (maybeAttr "mergeAttrBy" {} y); in
|
} // (maybeAttr "mergeAttrBy" { } x) // (maybeAttr "mergeAttrBy" { } y);
|
||||||
foldr mergeAttrs {} [
|
in
|
||||||
x y
|
foldr mergeAttrs { } [
|
||||||
(mapAttrs ( a: v: # merge special names using given functions
|
x
|
||||||
if x ? ${a}
|
y
|
||||||
then if y ? ${a}
|
(mapAttrs
|
||||||
then v x.${a} y.${a} # both have attr, use merge func
|
(
|
||||||
else x.${a} # only x has attr
|
a: v: # merge special names using given functions
|
||||||
else y.${a} # only y has attr)
|
if x ? ${a} then
|
||||||
) (removeAttrs mergeAttrBy2
|
if y ? ${a} then
|
||||||
|
v x.${a} y.${a} # both have attr, use merge func
|
||||||
|
else
|
||||||
|
x.${a} # only x has attr
|
||||||
|
else
|
||||||
|
y.${a} # only y has attr)
|
||||||
|
)
|
||||||
|
(
|
||||||
|
removeAttrs mergeAttrBy2
|
||||||
# don't merge attrs which are neither in x nor y
|
# don't merge attrs which are neither in x nor y
|
||||||
(filter (a: ! x ? ${a} && ! y ? ${a})
|
(filter (a: !x ? ${a} && !y ? ${a}) (attrNames mergeAttrBy2))
|
||||||
(attrNames mergeAttrBy2))
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
];
|
];
|
||||||
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
|
mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; };
|
||||||
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"];
|
mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) [ "mergeAttrBy" ];
|
||||||
|
|
||||||
# sane defaults (same name as attr name so that inherit can be used)
|
# sane defaults (same name as attr name so that inherit can be used)
|
||||||
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
|
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
|
||||||
listToAttrs (map (n: nameValuePair n concat)
|
listToAttrs (
|
||||||
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
|
map (n: nameValuePair n concat) [
|
||||||
// listToAttrs (map (n: nameValuePair n mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
|
"nativeBuildInputs"
|
||||||
// listToAttrs (map (n: nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
|
"buildInputs"
|
||||||
;
|
"propagatedBuildInputs"
|
||||||
|
"configureFlags"
|
||||||
|
"prePhases"
|
||||||
|
"postAll"
|
||||||
|
"patches"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
// listToAttrs (
|
||||||
|
map (n: nameValuePair n mergeAttrs) [
|
||||||
|
"passthru"
|
||||||
|
"meta"
|
||||||
|
"cfg"
|
||||||
|
"flags"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
// listToAttrs (
|
||||||
|
map (n: nameValuePair n (a: b: "${a}\n${b}")) [
|
||||||
|
"preConfigure"
|
||||||
|
"postInstall"
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
nixType = x:
|
nixType =
|
||||||
|
x:
|
||||||
if isAttrs x then
|
if isAttrs x then
|
||||||
if x ? outPath then "derivation"
|
if x ? outPath then "derivation" else "attrs"
|
||||||
else "attrs"
|
else if isFunction x then
|
||||||
else if isFunction x then "function"
|
"function"
|
||||||
else if isList x then "list"
|
else if isList x then
|
||||||
else if x == true then "bool"
|
"list"
|
||||||
else if x == false then "bool"
|
else if x == true then
|
||||||
else if x == null then "null"
|
"bool"
|
||||||
else if isInt x then "int"
|
else if x == false then
|
||||||
else "string";
|
"bool"
|
||||||
|
else if x == null then
|
||||||
|
"null"
|
||||||
|
else if isInt x then
|
||||||
|
"int"
|
||||||
|
else
|
||||||
|
"string";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
# Deprecated
|
# Deprecated
|
||||||
|
|
|
@ -1,20 +1,18 @@
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (lib)
|
inherit (lib) genAttrs isString throwIfNot;
|
||||||
genAttrs
|
|
||||||
isString
|
|
||||||
throwIfNot
|
|
||||||
;
|
|
||||||
|
|
||||||
showMaybeAttrPosPre = prefix: attrName: v:
|
showMaybeAttrPosPre =
|
||||||
let pos = builtins.unsafeGetAttrPos attrName v;
|
prefix: attrName: v:
|
||||||
in if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
|
let
|
||||||
|
pos = builtins.unsafeGetAttrPos attrName v;
|
||||||
|
in
|
||||||
|
if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
|
||||||
|
|
||||||
showMaybePackagePosPre = prefix: pkg:
|
showMaybePackagePosPre =
|
||||||
if pkg?meta.position && isString pkg.meta.position
|
prefix: pkg:
|
||||||
then "${prefix}${pkg.meta.position}"
|
if pkg ? meta.position && isString pkg.meta.position then "${prefix}${pkg.meta.position}" else "";
|
||||||
else "";
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
|
@ -61,35 +59,33 @@ in
|
||||||
(lazyDerivation { inherit derivation }).passthru
|
(lazyDerivation { inherit derivation }).passthru
|
||||||
|
|
||||||
(lazyDerivation { inherit derivation }).pythonPath
|
(lazyDerivation { inherit derivation }).pythonPath
|
||||||
|
|
||||||
*/
|
*/
|
||||||
lazyDerivation =
|
lazyDerivation =
|
||||||
args@{
|
args@{
|
||||||
# The derivation to be wrapped.
|
# The derivation to be wrapped.
|
||||||
derivation
|
derivation,
|
||||||
, # Optional meta attribute.
|
# Optional meta attribute.
|
||||||
#
|
#
|
||||||
# While this function is primarily about derivations, it can improve
|
# While this function is primarily about derivations, it can improve
|
||||||
# the `meta` package attribute, which is usually specified through
|
# the `meta` package attribute, which is usually specified through
|
||||||
# `mkDerivation`.
|
# `mkDerivation`.
|
||||||
meta ? null
|
meta ? null,
|
||||||
, # Optional extra values to add to the returned attrset.
|
# Optional extra values to add to the returned attrset.
|
||||||
#
|
#
|
||||||
# This can be used for adding package attributes, such as `tests`.
|
# This can be used for adding package attributes, such as `tests`.
|
||||||
passthru ? { }
|
passthru ? { },
|
||||||
, # Optional list of assumed outputs. Default: ["out"]
|
# Optional list of assumed outputs. Default: ["out"]
|
||||||
#
|
#
|
||||||
# This must match the set of outputs that the returned derivation has.
|
# This must match the set of outputs that the returned derivation has.
|
||||||
# You must use this when the derivation has multiple outputs.
|
# You must use this when the derivation has multiple outputs.
|
||||||
outputs ? [ "out" ]
|
outputs ? [ "out" ],
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
# These checks are strict in `drv` and some `drv` attributes, but the
|
# These checks are strict in `drv` and some `drv` attributes, but the
|
||||||
# attrset spine returned by lazyDerivation does not depend on it.
|
# attrset spine returned by lazyDerivation does not depend on it.
|
||||||
# Instead, the individual derivation attributes do depend on it.
|
# Instead, the individual derivation attributes do depend on it.
|
||||||
checked =
|
checked =
|
||||||
throwIfNot (derivation.type or null == "derivation")
|
throwIfNot (derivation.type or null == "derivation") "lazyDerivation: input must be a derivation."
|
||||||
"lazyDerivation: input must be a derivation."
|
|
||||||
throwIfNot
|
throwIfNot
|
||||||
# NOTE: Technically we could require our outputs to be a subset of the
|
# NOTE: Technically we could require our outputs to be a subset of the
|
||||||
# actual ones, or even leave them unchecked and fail on a lazy basis.
|
# actual ones, or even leave them unchecked and fail on a lazy basis.
|
||||||
|
@ -139,7 +135,13 @@ in
|
||||||
# A fixed set of derivation values, so that `lazyDerivation` can return
|
# A fixed set of derivation values, so that `lazyDerivation` can return
|
||||||
# its attrset before evaluating `derivation`.
|
# its attrset before evaluating `derivation`.
|
||||||
# This must only list attributes that are available on _all_ derivations.
|
# This must only list attributes that are available on _all_ derivations.
|
||||||
inherit (checked) outPath outputName drvPath name system;
|
inherit (checked)
|
||||||
|
outPath
|
||||||
|
outputName
|
||||||
|
drvPath
|
||||||
|
name
|
||||||
|
system
|
||||||
|
;
|
||||||
inherit outputs;
|
inherit outputs;
|
||||||
|
|
||||||
# The meta attribute can either be taken from the derivation, or if the
|
# The meta attribute can either be taken from the derivation, or if the
|
||||||
|
@ -149,7 +151,8 @@ in
|
||||||
// genAttrs outputs (outputName: checked.${outputName})
|
// genAttrs outputs (outputName: checked.${outputName})
|
||||||
// passthru;
|
// passthru;
|
||||||
|
|
||||||
/* Conditionally set a derivation attribute.
|
/*
|
||||||
|
Conditionally set a derivation attribute.
|
||||||
|
|
||||||
Because `mkDerivation` sets `__ignoreNulls = true`, a derivation
|
Because `mkDerivation` sets `__ignoreNulls = true`, a derivation
|
||||||
attribute set to `null` will not impact the derivation output hash.
|
attribute set to `null` will not impact the derivation output hash.
|
||||||
|
@ -173,5 +176,6 @@ in
|
||||||
# Condition
|
# Condition
|
||||||
cond:
|
cond:
|
||||||
# Attribute value
|
# Attribute value
|
||||||
value: if cond then value else null;
|
value:
|
||||||
|
if cond then value else null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,11 @@
|
||||||
# easy proxy configuration. This is impure, but a fixed-output
|
# easy proxy configuration. This is impure, but a fixed-output
|
||||||
# derivation like fetchurl is allowed to do so since its result is
|
# derivation like fetchurl is allowed to do so since its result is
|
||||||
# by definition pure.
|
# by definition pure.
|
||||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
"http_proxy"
|
||||||
|
"https_proxy"
|
||||||
|
"ftp_proxy"
|
||||||
|
"all_proxy"
|
||||||
|
"no_proxy"
|
||||||
];
|
];
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,6 @@
|
||||||
If you need more file set functions,
|
If you need more file set functions,
|
||||||
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
|
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
|
||||||
|
|
||||||
|
|
||||||
## Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
|
## Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
|
||||||
|
|
||||||
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
|
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
|
||||||
|
@ -127,35 +126,20 @@ let
|
||||||
nixVersion
|
nixVersion
|
||||||
;
|
;
|
||||||
|
|
||||||
inherit (lib.lists)
|
inherit (lib.lists) elemAt imap0;
|
||||||
elemAt
|
|
||||||
imap0
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.path)
|
inherit (lib.path) hasPrefix splitRoot;
|
||||||
hasPrefix
|
|
||||||
splitRoot
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.strings)
|
inherit (lib.strings) isStringLike versionOlder;
|
||||||
isStringLike
|
|
||||||
versionOlder
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.filesystem)
|
inherit (lib.filesystem) pathType;
|
||||||
pathType
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.sources)
|
inherit (lib.sources) cleanSourceWith;
|
||||||
cleanSourceWith
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.trivial)
|
inherit (lib.trivial) isFunction pipe;
|
||||||
isFunction
|
|
||||||
pipe
|
|
||||||
;
|
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Create a file set from a path that may or may not exist:
|
Create a file set from a path that may or may not exist:
|
||||||
|
@ -171,14 +155,12 @@ in {
|
||||||
*/
|
*/
|
||||||
maybeMissing =
|
maybeMissing =
|
||||||
path:
|
path:
|
||||||
if ! isPath path then
|
if !isPath path then
|
||||||
if isStringLike path then
|
if isStringLike path then
|
||||||
throw ''
|
throw ''lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
|
||||||
lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
|
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
|
else if !pathExists path then
|
||||||
else if ! pathExists path then
|
|
||||||
_emptyWithoutBase
|
_emptyWithoutBase
|
||||||
else
|
else
|
||||||
_singleton path;
|
_singleton path;
|
||||||
|
@ -220,9 +202,7 @@ in {
|
||||||
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
||||||
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
|
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
|
||||||
in
|
in
|
||||||
seq
|
seq (_printFileset actualFileset) (x: x);
|
||||||
(_printFileset actualFileset)
|
|
||||||
(x: x);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Incrementally evaluate and trace a file set in a pretty way.
|
Incrementally evaluate and trace a file set in a pretty way.
|
||||||
|
@ -267,8 +247,7 @@ in {
|
||||||
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
|
||||||
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
|
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
|
||||||
in
|
in
|
||||||
seq
|
seq (_printFileset actualFileset)
|
||||||
(_printFileset actualFileset)
|
|
||||||
# We could also return the original fileset argument here,
|
# We could also return the original fileset argument here,
|
||||||
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
|
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
|
||||||
actualFileset;
|
actualFileset;
|
||||||
|
@ -340,7 +319,8 @@ in {
|
||||||
}
|
}
|
||||||
=> <error>
|
=> <error>
|
||||||
*/
|
*/
|
||||||
toSource = {
|
toSource =
|
||||||
|
{
|
||||||
/*
|
/*
|
||||||
(required) The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
|
(required) The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
|
||||||
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
|
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
|
||||||
|
@ -361,7 +341,6 @@ in {
|
||||||
:::{.note}
|
:::{.note}
|
||||||
If a directory does not recursively contain any file, it is omitted from the store path contents.
|
If a directory does not recursively contain any file, it is omitted from the store path contents.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
*/
|
*/
|
||||||
fileset,
|
fileset,
|
||||||
}:
|
}:
|
||||||
|
@ -375,7 +354,7 @@ in {
|
||||||
filesetFilesystemRoot = (splitRoot fileset._internalBase).root;
|
filesetFilesystemRoot = (splitRoot fileset._internalBase).root;
|
||||||
sourceFilter = _toSourceFilter fileset;
|
sourceFilter = _toSourceFilter fileset;
|
||||||
in
|
in
|
||||||
if ! isPath root then
|
if !isPath root then
|
||||||
if root ? _isLibCleanSourceWith then
|
if root ? _isLibCleanSourceWith then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
|
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
|
||||||
|
@ -386,38 +365,34 @@ in {
|
||||||
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
|
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
|
||||||
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
|
||||||
lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
|
|
||||||
# Currently all Nix paths have the same filesystem root, but this could change in the future.
|
# Currently all Nix paths have the same filesystem root, but this could change in the future.
|
||||||
# See also ../path/README.md
|
# See also ../path/README.md
|
||||||
else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
|
else if !fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
|
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
|
||||||
`root`: Filesystem root is "${toString rootFilesystemRoot}"
|
`root`: Filesystem root is "${toString rootFilesystemRoot}"
|
||||||
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
|
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
|
||||||
Different filesystem roots are not supported.''
|
Different filesystem roots are not supported.''
|
||||||
else if ! pathExists root then
|
else if !pathExists root then
|
||||||
throw ''
|
throw ''lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
|
||||||
lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
|
|
||||||
else if pathType root != "directory" then
|
else if pathType root != "directory" then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
|
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
|
||||||
- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
|
- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
|
||||||
- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as ${toString (dirOf root)}, and set `fileset` to the file path.''
|
- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as ${toString (dirOf root)}, and set `fileset` to the file path.''
|
||||||
else if ! fileset._internalIsEmptyWithoutBase && ! hasPrefix root fileset._internalBase then
|
else if !fileset._internalIsEmptyWithoutBase && !hasPrefix root fileset._internalBase then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.toSource: `fileset` could contain files in ${toString fileset._internalBase}, which is not under the `root` (${toString root}). Potential solutions:
|
lib.fileset.toSource: `fileset` could contain files in ${toString fileset._internalBase}, which is not under the `root` (${toString root}). Potential solutions:
|
||||||
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
|
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
|
||||||
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
|
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
|
||||||
else
|
else
|
||||||
seq sourceFilter
|
seq sourceFilter cleanSourceWith {
|
||||||
cleanSourceWith {
|
|
||||||
name = "source";
|
name = "source";
|
||||||
src = root;
|
src = root;
|
||||||
filter = sourceFilter;
|
filter = sourceFilter;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The list of file paths contained in the given file set.
|
The list of file paths contained in the given file set.
|
||||||
|
|
||||||
|
@ -446,8 +421,7 @@ in {
|
||||||
# The file set whose file paths to return.
|
# The file set whose file paths to return.
|
||||||
# This argument can also be a path,
|
# This argument can also be a path,
|
||||||
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
|
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
|
||||||
fileset:
|
fileset: _toList (_coerce "lib.fileset.toList: Argument" fileset);
|
||||||
_toList (_coerce "lib.fileset.toList: Argument" fileset);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The file set containing all files that are in either of two given file sets.
|
The file set containing all files that are in either of two given file sets.
|
||||||
|
@ -479,8 +453,8 @@ in {
|
||||||
# This argument can also be a path,
|
# This argument can also be a path,
|
||||||
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
|
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
|
||||||
fileset2:
|
fileset2:
|
||||||
_unionMany
|
_unionMany (
|
||||||
(_coerceMany "lib.fileset.union" [
|
_coerceMany "lib.fileset.union" [
|
||||||
{
|
{
|
||||||
context = "First argument";
|
context = "First argument";
|
||||||
value = fileset1;
|
value = fileset1;
|
||||||
|
@ -489,7 +463,8 @@ in {
|
||||||
context = "Second argument";
|
context = "Second argument";
|
||||||
value = fileset2;
|
value = fileset2;
|
||||||
}
|
}
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The file set containing all files that are in any of the given file sets.
|
The file set containing all files that are in any of the given file sets.
|
||||||
|
@ -527,16 +502,17 @@ in {
|
||||||
# The elements can also be paths,
|
# The elements can also be paths,
|
||||||
# which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
|
# which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
|
||||||
filesets:
|
filesets:
|
||||||
if ! isList filesets then
|
if !isList filesets then
|
||||||
throw ''
|
throw ''lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
|
||||||
lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
|
|
||||||
else
|
else
|
||||||
pipe filesets [
|
pipe filesets [
|
||||||
# Annotate the elements with context, used by _coerceMany for better errors
|
# Annotate the elements with context, used by _coerceMany for better errors
|
||||||
(imap0 (i: el: {
|
(imap0 (
|
||||||
|
i: el: {
|
||||||
context = "Element ${toString i}";
|
context = "Element ${toString i}";
|
||||||
value = el;
|
value = el;
|
||||||
}))
|
}
|
||||||
|
))
|
||||||
(_coerceMany "lib.fileset.unions")
|
(_coerceMany "lib.fileset.unions")
|
||||||
_unionMany
|
_unionMany
|
||||||
];
|
];
|
||||||
|
@ -576,9 +552,7 @@ in {
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
in
|
in
|
||||||
_intersection
|
_intersection (elemAt filesets 0) (elemAt filesets 1);
|
||||||
(elemAt filesets 0)
|
|
||||||
(elemAt filesets 1);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The file set containing all files from the first file set that are not in the second file set.
|
The file set containing all files from the first file set that are not in the second file set.
|
||||||
|
@ -628,9 +602,7 @@ in {
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
in
|
in
|
||||||
_difference
|
_difference (elemAt filesets 0) (elemAt filesets 1);
|
||||||
(elemAt filesets 0)
|
|
||||||
(elemAt filesets 1);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Filter a file set to only contain files matching some predicate.
|
Filter a file set to only contain files matching some predicate.
|
||||||
|
@ -682,20 +654,17 @@ in {
|
||||||
predicate:
|
predicate:
|
||||||
# The path whose files to filter
|
# The path whose files to filter
|
||||||
path:
|
path:
|
||||||
if ! isFunction predicate then
|
if !isFunction predicate then
|
||||||
throw ''
|
throw ''lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
|
||||||
lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
|
else if !isPath path then
|
||||||
else if ! isPath path then
|
|
||||||
if path._type or "" == "fileset" then
|
if path._type or "" == "fileset" then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
|
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
|
||||||
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
|
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
|
else if !pathExists path then
|
||||||
else if ! pathExists path then
|
throw ''lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
|
||||||
throw ''
|
|
||||||
lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
|
|
||||||
else
|
else
|
||||||
_fileFilter predicate path;
|
_fileFilter predicate path;
|
||||||
|
|
||||||
|
@ -741,7 +710,8 @@ in {
|
||||||
./src
|
./src
|
||||||
]);
|
]);
|
||||||
*/
|
*/
|
||||||
fromSource = source:
|
fromSource =
|
||||||
|
source:
|
||||||
let
|
let
|
||||||
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
|
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
|
||||||
# which are technically internal to lib.sources,
|
# which are technically internal to lib.sources,
|
||||||
|
@ -751,17 +721,15 @@ in {
|
||||||
path = if isFiltered then source.origSrc else source;
|
path = if isFiltered then source.origSrc else source;
|
||||||
in
|
in
|
||||||
# We can only support sources created from paths
|
# We can only support sources created from paths
|
||||||
if ! isPath path then
|
if !isPath path then
|
||||||
if isStringLike path then
|
if isStringLike path then
|
||||||
throw ''
|
throw ''
|
||||||
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
|
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
|
||||||
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
|
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
|
||||||
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
|
else if !pathExists path then
|
||||||
else if ! pathExists path then
|
throw ''lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
|
||||||
throw ''
|
|
||||||
lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
|
|
||||||
else if isFiltered then
|
else if isFiltered then
|
||||||
_fromSourceFilter path source.filter
|
_fromSourceFilter path source.filter
|
||||||
else
|
else
|
||||||
|
@ -789,12 +757,7 @@ in {
|
||||||
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
|
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
|
||||||
This directory must contain a `.git` file or subdirectory.
|
This directory must contain a `.git` file or subdirectory.
|
||||||
*/
|
*/
|
||||||
path:
|
path: _fromFetchGit "gitTracked" "argument" path { };
|
||||||
_fromFetchGit
|
|
||||||
"gitTracked"
|
|
||||||
"argument"
|
|
||||||
path
|
|
||||||
{};
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
|
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
|
||||||
|
@ -842,19 +805,14 @@ in {
|
||||||
This directory must contain a `.git` file or subdirectory.
|
This directory must contain a `.git` file or subdirectory.
|
||||||
*/
|
*/
|
||||||
path:
|
path:
|
||||||
if ! isBool recurseSubmodules then
|
if !isBool recurseSubmodules then
|
||||||
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."
|
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."
|
||||||
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
|
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
|
||||||
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
|
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
|
||||||
else
|
else
|
||||||
_fromFetchGit
|
_fromFetchGit "gitTrackedWith" "second argument" path
|
||||||
"gitTrackedWith"
|
|
||||||
"second argument"
|
|
||||||
path
|
|
||||||
# This is the only `fetchGit` parameter that makes sense in this context.
|
# This is the only `fetchGit` parameter that makes sense in this context.
|
||||||
# We can't just pass `submodules = recurseSubmodules` here because
|
# We can't just pass `submodules = recurseSubmodules` here because
|
||||||
# this would fail for Nix versions that don't support `submodules`.
|
# this would fail for Nix versions that don't support `submodules`.
|
||||||
(lib.optionalAttrs recurseSubmodules {
|
(lib.optionalAttrs recurseSubmodules { submodules = true; });
|
||||||
submodules = true;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
{ lib ? import ../. }:
|
{
|
||||||
|
lib ? import ../.,
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
|
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
|
@ -23,9 +25,7 @@ let
|
||||||
zipAttrsWith
|
zipAttrsWith
|
||||||
;
|
;
|
||||||
|
|
||||||
inherit (lib.filesystem)
|
inherit (lib.filesystem) pathType;
|
||||||
pathType
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.lists)
|
inherit (lib.lists)
|
||||||
all
|
all
|
||||||
|
@ -49,10 +49,7 @@ let
|
||||||
splitStorePath
|
splitStorePath
|
||||||
;
|
;
|
||||||
|
|
||||||
inherit (lib.path.subpath)
|
inherit (lib.path.subpath) components join;
|
||||||
components
|
|
||||||
join
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.strings)
|
inherit (lib.strings)
|
||||||
isStringLike
|
isStringLike
|
||||||
|
@ -63,9 +60,7 @@ let
|
||||||
versionAtLeast
|
versionAtLeast
|
||||||
;
|
;
|
||||||
|
|
||||||
inherit (lib.trivial)
|
inherit (lib.trivial) inPureEvalMode;
|
||||||
inPureEvalMode
|
|
||||||
;
|
|
||||||
in
|
in
|
||||||
# Rare case of justified usage of rec:
|
# Rare case of justified usage of rec:
|
||||||
# - This file is internal, so the return value doesn't matter, no need to make things overridable
|
# - This file is internal, so the return value doesn't matter, no need to make things overridable
|
||||||
|
@ -87,7 +82,8 @@ rec {
|
||||||
let
|
let
|
||||||
parts = splitRoot filesetV0._internalBase;
|
parts = splitRoot filesetV0._internalBase;
|
||||||
in
|
in
|
||||||
filesetV0 // {
|
filesetV0
|
||||||
|
// {
|
||||||
_internalVersion = 1;
|
_internalVersion = 1;
|
||||||
_internalBaseRoot = parts.root;
|
_internalBaseRoot = parts.root;
|
||||||
_internalBaseComponents = components parts.subpath;
|
_internalBaseComponents = components parts.subpath;
|
||||||
|
@ -98,15 +94,14 @@ rec {
|
||||||
(
|
(
|
||||||
filesetV1:
|
filesetV1:
|
||||||
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
|
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
|
||||||
filesetV1 // {
|
filesetV1 // { _internalVersion = 2; }
|
||||||
_internalVersion = 2;
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
|
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
|
||||||
(
|
(
|
||||||
filesetV2:
|
filesetV2:
|
||||||
filesetV2 // {
|
filesetV2
|
||||||
|
// {
|
||||||
# All v1 file sets are not the new empty file set
|
# All v1 file sets are not the new empty file set
|
||||||
_internalIsEmptyWithoutBase = false;
|
_internalIsEmptyWithoutBase = false;
|
||||||
_internalVersion = 3;
|
_internalVersion = 3;
|
||||||
|
@ -136,7 +131,8 @@ rec {
|
||||||
|
|
||||||
# Create a fileset, see ./README.md#fileset
|
# Create a fileset, see ./README.md#fileset
|
||||||
# Type: path -> filesetTree -> fileset
|
# Type: path -> filesetTree -> fileset
|
||||||
_create = base: tree:
|
_create =
|
||||||
|
base: tree:
|
||||||
let
|
let
|
||||||
# Decompose the base into its components
|
# Decompose the base into its components
|
||||||
# See ../path/README.md for why we're not just using `toString`
|
# See ../path/README.md for why we're not just using `toString`
|
||||||
|
@ -162,7 +158,8 @@ rec {
|
||||||
# Coerce a value to a fileset, erroring when the value cannot be coerced.
|
# Coerce a value to a fileset, erroring when the value cannot be coerced.
|
||||||
# The string gives the context for error messages.
|
# The string gives the context for error messages.
|
||||||
# Type: String -> (fileset | Path) -> fileset
|
# Type: String -> (fileset | Path) -> fileset
|
||||||
_coerce = context: value:
|
_coerce =
|
||||||
|
context: value:
|
||||||
if value._type or "" == "fileset" then
|
if value._type or "" == "fileset" then
|
||||||
if value._internalVersion > _currentVersion then
|
if value._internalVersion > _currentVersion then
|
||||||
throw ''
|
throw ''
|
||||||
|
@ -173,12 +170,14 @@ rec {
|
||||||
else if value._internalVersion < _currentVersion then
|
else if value._internalVersion < _currentVersion then
|
||||||
let
|
let
|
||||||
# Get all the migration functions necessary to convert from the old to the current version
|
# Get all the migration functions necessary to convert from the old to the current version
|
||||||
migrationsToApply = sublist value._internalVersion (_currentVersion - value._internalVersion) migrations;
|
migrationsToApply = sublist value._internalVersion (
|
||||||
|
_currentVersion - value._internalVersion
|
||||||
|
) migrations;
|
||||||
in
|
in
|
||||||
foldl' (value: migration: migration value) value migrationsToApply
|
foldl' (value: migration: migration value) value migrationsToApply
|
||||||
else
|
else
|
||||||
value
|
value
|
||||||
else if ! isPath value then
|
else if !isPath value then
|
||||||
if value ? _isLibCleanSourceWith then
|
if value ? _isLibCleanSourceWith then
|
||||||
throw ''
|
throw ''
|
||||||
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
|
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
|
||||||
|
@ -189,9 +188,8 @@ rec {
|
||||||
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
|
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
|
||||||
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
|
||||||
${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
|
else if !pathExists value then
|
||||||
else if ! pathExists value then
|
|
||||||
throw ''
|
throw ''
|
||||||
${context} (${toString value}) is a path that does not exist.
|
${context} (${toString value}) is a path that does not exist.
|
||||||
To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.''
|
To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.''
|
||||||
|
@ -201,22 +199,21 @@ rec {
|
||||||
# Coerce many values to filesets, erroring when any value cannot be coerced,
|
# Coerce many values to filesets, erroring when any value cannot be coerced,
|
||||||
# or if the filesystem root of the values doesn't match.
|
# or if the filesystem root of the values doesn't match.
|
||||||
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
|
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
|
||||||
_coerceMany = functionContext: list:
|
_coerceMany =
|
||||||
|
functionContext: list:
|
||||||
let
|
let
|
||||||
filesets = map ({ context, value }:
|
filesets = map ({ context, value }: _coerce "${functionContext}: ${context}" value) list;
|
||||||
_coerce "${functionContext}: ${context}" value
|
|
||||||
) list;
|
|
||||||
|
|
||||||
# Find the first value with a base, there may be none!
|
# Find the first value with a base, there may be none!
|
||||||
firstWithBase = findFirst (fileset: ! fileset._internalIsEmptyWithoutBase) null filesets;
|
firstWithBase = findFirst (fileset: !fileset._internalIsEmptyWithoutBase) null filesets;
|
||||||
# This value is only accessed if first != null
|
# This value is only accessed if first != null
|
||||||
firstBaseRoot = firstWithBase._internalBaseRoot;
|
firstBaseRoot = firstWithBase._internalBaseRoot;
|
||||||
|
|
||||||
# Finds the first element with a filesystem root different than the first element, if any
|
# Finds the first element with a filesystem root different than the first element, if any
|
||||||
differentIndex = findFirstIndex (fileset:
|
differentIndex = findFirstIndex (
|
||||||
|
fileset:
|
||||||
# The empty value without a base doesn't have a base path
|
# The empty value without a base doesn't have a base path
|
||||||
! fileset._internalIsEmptyWithoutBase
|
!fileset._internalIsEmptyWithoutBase && firstBaseRoot != fileset._internalBaseRoot
|
||||||
&& firstBaseRoot != fileset._internalBaseRoot
|
|
||||||
) null filesets;
|
) null filesets;
|
||||||
in
|
in
|
||||||
# Only evaluates `differentIndex` if there are any elements with a base
|
# Only evaluates `differentIndex` if there are any elements with a base
|
||||||
|
@ -231,7 +228,8 @@ rec {
|
||||||
|
|
||||||
# Create a file set from a path.
|
# Create a file set from a path.
|
||||||
# Type: Path -> fileset
|
# Type: Path -> fileset
|
||||||
_singleton = path:
|
_singleton =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
type = pathType path;
|
type = pathType path;
|
||||||
in
|
in
|
||||||
|
@ -244,21 +242,18 @@ rec {
|
||||||
# "default.nix" = <type>;
|
# "default.nix" = <type>;
|
||||||
# }
|
# }
|
||||||
# See ./README.md#single-files
|
# See ./README.md#single-files
|
||||||
_create (dirOf path)
|
_create (dirOf path) { ${baseNameOf path} = type; };
|
||||||
{
|
|
||||||
${baseNameOf path} = type;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Expand a directory representation to an equivalent one in attribute set form.
|
# Expand a directory representation to an equivalent one in attribute set form.
|
||||||
# All directory entries are included in the result.
|
# All directory entries are included in the result.
|
||||||
# Type: Path -> filesetTree -> { <name> = filesetTree; }
|
# Type: Path -> filesetTree -> { <name> = filesetTree; }
|
||||||
_directoryEntries = path: value:
|
_directoryEntries =
|
||||||
|
path: value:
|
||||||
if value == "directory" then
|
if value == "directory" then
|
||||||
readDir path
|
readDir path
|
||||||
else
|
else
|
||||||
# Set all entries not present to null
|
# Set all entries not present to null
|
||||||
mapAttrs (name: value: null) (readDir path)
|
mapAttrs (name: value: null) (readDir path) // value;
|
||||||
// value;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
A normalisation of a filesetTree suitable filtering with `builtins.path`:
|
A normalisation of a filesetTree suitable filtering with `builtins.path`:
|
||||||
|
@ -271,7 +266,8 @@ rec {
|
||||||
|
|
||||||
Type: Path -> filesetTree -> filesetTree
|
Type: Path -> filesetTree -> filesetTree
|
||||||
*/
|
*/
|
||||||
_normaliseTreeFilter = path: tree:
|
_normaliseTreeFilter =
|
||||||
|
path: tree:
|
||||||
if tree == "directory" || isAttrs tree then
|
if tree == "directory" || isAttrs tree then
|
||||||
let
|
let
|
||||||
entries = _directoryEntries path tree;
|
entries = _directoryEntries path tree;
|
||||||
|
@ -301,7 +297,8 @@ rec {
|
||||||
|
|
||||||
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
|
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
|
||||||
*/
|
*/
|
||||||
_normaliseTreeMinimal = path: tree:
|
_normaliseTreeMinimal =
|
||||||
|
path: tree:
|
||||||
if tree == "directory" || isAttrs tree then
|
if tree == "directory" || isAttrs tree then
|
||||||
let
|
let
|
||||||
entries = _directoryEntries path tree;
|
entries = _directoryEntries path tree;
|
||||||
|
@ -334,9 +331,11 @@ rec {
|
||||||
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
|
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
|
||||||
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
|
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
|
||||||
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
|
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
|
||||||
_printMinimalTree = base: tree:
|
_printMinimalTree =
|
||||||
|
base: tree:
|
||||||
let
|
let
|
||||||
treeSuffix = tree:
|
treeSuffix =
|
||||||
|
tree:
|
||||||
if isAttrs tree then
|
if isAttrs tree then
|
||||||
""
|
""
|
||||||
else if tree == "directory" then
|
else if tree == "directory" then
|
||||||
|
@ -349,14 +348,15 @@ rec {
|
||||||
" (${tree})";
|
" (${tree})";
|
||||||
|
|
||||||
# Only for attribute set trees
|
# Only for attribute set trees
|
||||||
traceTreeAttrs = prevLine: indent: tree:
|
traceTreeAttrs =
|
||||||
foldl' (prevLine: name:
|
prevLine: indent: tree:
|
||||||
|
foldl' (
|
||||||
|
prevLine: name:
|
||||||
let
|
let
|
||||||
subtree = tree.${name};
|
subtree = tree.${name};
|
||||||
|
|
||||||
# Evaluating this prints the line for this subtree
|
# Evaluating this prints the line for this subtree
|
||||||
thisLine =
|
thisLine = trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
|
||||||
trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
|
|
||||||
in
|
in
|
||||||
if subtree == null || subtree == "emptyDir" then
|
if subtree == null || subtree == "emptyDir" then
|
||||||
# Don't print anything at all if this subtree is empty
|
# Don't print anything at all if this subtree is empty
|
||||||
|
@ -378,24 +378,24 @@ rec {
|
||||||
else
|
else
|
||||||
trace "${toString base}${treeSuffix tree}" null;
|
trace "${toString base}${treeSuffix tree}" null;
|
||||||
in
|
in
|
||||||
if isAttrs tree then
|
if isAttrs tree then traceTreeAttrs firstLine "" tree else firstLine;
|
||||||
traceTreeAttrs firstLine "" tree
|
|
||||||
else
|
|
||||||
firstLine;
|
|
||||||
|
|
||||||
# Pretty-print a file set in a pretty way when the resulting value is evaluated
|
# Pretty-print a file set in a pretty way when the resulting value is evaluated
|
||||||
# Type: fileset -> Null
|
# Type: fileset -> Null
|
||||||
_printFileset = fileset:
|
_printFileset =
|
||||||
|
fileset:
|
||||||
if fileset._internalIsEmptyWithoutBase then
|
if fileset._internalIsEmptyWithoutBase then
|
||||||
trace "(empty)" null
|
trace "(empty)" null
|
||||||
else
|
else
|
||||||
_printMinimalTree fileset._internalBase
|
_printMinimalTree fileset._internalBase (
|
||||||
(_normaliseTreeMinimal fileset._internalBase fileset._internalTree);
|
_normaliseTreeMinimal fileset._internalBase fileset._internalTree
|
||||||
|
);
|
||||||
|
|
||||||
# Turn a fileset into a source filter function suitable for `builtins.path`
|
# Turn a fileset into a source filter function suitable for `builtins.path`
|
||||||
# Only directories recursively containing at least one files are recursed into
|
# Only directories recursively containing at least one files are recursed into
|
||||||
# Type: fileset -> (String -> String -> Bool)
|
# Type: fileset -> (String -> String -> Bool)
|
||||||
_toSourceFilter = fileset:
|
_toSourceFilter =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
# Simplify the tree, necessary to make sure all empty directories are null
|
# Simplify the tree, necessary to make sure all empty directories are null
|
||||||
# which has the effect that they aren't included in the result
|
# which has the effect that they aren't included in the result
|
||||||
|
@ -403,7 +403,7 @@ rec {
|
||||||
|
|
||||||
# The base path as a string with a single trailing slash
|
# The base path as a string with a single trailing slash
|
||||||
baseString =
|
baseString =
|
||||||
if fileset._internalBaseComponents == [] then
|
if fileset._internalBaseComponents == [ ] then
|
||||||
# Need to handle the filesystem root specially
|
# Need to handle the filesystem root specially
|
||||||
"/"
|
"/"
|
||||||
else
|
else
|
||||||
|
@ -414,9 +414,11 @@ rec {
|
||||||
# Check whether a list of path components under the base path exists in the tree.
|
# Check whether a list of path components under the base path exists in the tree.
|
||||||
# This function is called often, so it should be fast.
|
# This function is called often, so it should be fast.
|
||||||
# Type: [ String ] -> Bool
|
# Type: [ String ] -> Bool
|
||||||
inTree = components:
|
inTree =
|
||||||
|
components:
|
||||||
let
|
let
|
||||||
recurse = index: localTree:
|
recurse =
|
||||||
|
index: localTree:
|
||||||
if isAttrs localTree then
|
if isAttrs localTree then
|
||||||
# We have an attribute set, meaning this is a directory with at least one file
|
# We have an attribute set, meaning this is a directory with at least one file
|
||||||
if index >= length components then
|
if index >= length components then
|
||||||
|
@ -431,7 +433,8 @@ rec {
|
||||||
# If it's not an attribute set it can only be either null (in which case it's not included)
|
# If it's not an attribute set it can only be either null (in which case it's not included)
|
||||||
# or a string ("directory" or "regular", etc.) in which case it's included
|
# or a string ("directory" or "regular", etc.) in which case it's included
|
||||||
localTree != null;
|
localTree != null;
|
||||||
in recurse 0 tree;
|
in
|
||||||
|
recurse 0 tree;
|
||||||
|
|
||||||
# Filter suited when there's no files
|
# Filter suited when there's no files
|
||||||
empty = _: _: false;
|
empty = _: _: false;
|
||||||
|
@ -483,16 +486,14 @@ rec {
|
||||||
# Special case because the code below assumes that the _internalBase is always included in the result
|
# Special case because the code below assumes that the _internalBase is always included in the result
|
||||||
# which shouldn't be done when we have no files at all in the base
|
# which shouldn't be done when we have no files at all in the base
|
||||||
# This also forces the tree before returning the filter, leads to earlier error messages
|
# This also forces the tree before returning the filter, leads to earlier error messages
|
||||||
if fileset._internalIsEmptyWithoutBase || tree == null then
|
if fileset._internalIsEmptyWithoutBase || tree == null then empty else nonEmpty;
|
||||||
empty
|
|
||||||
else
|
|
||||||
nonEmpty;
|
|
||||||
|
|
||||||
# Turn a builtins.filterSource-based source filter on a root path into a file set
|
# Turn a builtins.filterSource-based source filter on a root path into a file set
|
||||||
# containing only files included by the filter.
|
# containing only files included by the filter.
|
||||||
# The filter is lazily called as necessary to determine whether paths are included
|
# The filter is lazily called as necessary to determine whether paths are included
|
||||||
# Type: Path -> (String -> String -> Bool) -> fileset
|
# Type: Path -> (String -> String -> Bool) -> fileset
|
||||||
_fromSourceFilter = root: sourceFilter:
|
_fromSourceFilter =
|
||||||
|
root: sourceFilter:
|
||||||
let
|
let
|
||||||
# During the recursion we need to track both:
|
# During the recursion we need to track both:
|
||||||
# - The path value such that we can safely call `readDir` on it
|
# - The path value such that we can safely call `readDir` on it
|
||||||
|
@ -503,9 +504,10 @@ rec {
|
||||||
# which is a fairly expensive operation
|
# which is a fairly expensive operation
|
||||||
|
|
||||||
# Create a file set from a directory entry
|
# Create a file set from a directory entry
|
||||||
fromDirEntry = path: pathString: type:
|
fromDirEntry =
|
||||||
|
path: pathString: type:
|
||||||
# The filter needs to run on the path as a string
|
# The filter needs to run on the path as a string
|
||||||
if ! sourceFilter pathString type then
|
if !sourceFilter pathString type then
|
||||||
null
|
null
|
||||||
else if type == "directory" then
|
else if type == "directory" then
|
||||||
fromDir path pathString
|
fromDir path pathString
|
||||||
|
@ -513,7 +515,8 @@ rec {
|
||||||
type;
|
type;
|
||||||
|
|
||||||
# Create a file set from a directory
|
# Create a file set from a directory
|
||||||
fromDir = path: pathString:
|
fromDir =
|
||||||
|
path: pathString:
|
||||||
mapAttrs
|
mapAttrs
|
||||||
# This looks a bit funny, but we need both the path-based and the path string-based values
|
# This looks a bit funny, but we need both the path-based and the path string-based values
|
||||||
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
|
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
|
||||||
|
@ -536,20 +539,17 @@ rec {
|
||||||
else
|
else
|
||||||
# Direct files are always included by builtins.path without calling the filter
|
# Direct files are always included by builtins.path without calling the filter
|
||||||
# But we need to lift up the base path to its parent to satisfy the base path invariant
|
# But we need to lift up the base path to its parent to satisfy the base path invariant
|
||||||
_create (dirOf root)
|
_create (dirOf root) { ${baseNameOf root} = rootPathType; };
|
||||||
{
|
|
||||||
${baseNameOf root} = rootPathType;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Turns a file set into the list of file paths it includes.
|
# Turns a file set into the list of file paths it includes.
|
||||||
# Type: fileset -> [ Path ]
|
# Type: fileset -> [ Path ]
|
||||||
_toList = fileset:
|
_toList =
|
||||||
|
fileset:
|
||||||
let
|
let
|
||||||
recurse = path: tree:
|
recurse =
|
||||||
|
path: tree:
|
||||||
if isAttrs tree then
|
if isAttrs tree then
|
||||||
concatLists (mapAttrsToList (name: value:
|
concatLists (mapAttrsToList (name: value: recurse (path + "/${name}") value) tree)
|
||||||
recurse (path + "/${name}") value
|
|
||||||
) tree)
|
|
||||||
else if tree == "directory" then
|
else if tree == "directory" then
|
||||||
recurse path (readDir path)
|
recurse path (readDir path)
|
||||||
else if tree == null then
|
else if tree == null then
|
||||||
|
@ -565,9 +565,11 @@ rec {
|
||||||
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
||||||
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
||||||
# => { bar = null; }
|
# => { bar = null; }
|
||||||
_shortenTreeBase = targetBaseComponents: fileset:
|
_shortenTreeBase =
|
||||||
|
targetBaseComponents: fileset:
|
||||||
let
|
let
|
||||||
recurse = index:
|
recurse =
|
||||||
|
index:
|
||||||
# If we haven't reached the required depth yet
|
# If we haven't reached the required depth yet
|
||||||
if index < length fileset._internalBaseComponents then
|
if index < length fileset._internalBaseComponents then
|
||||||
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
|
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
|
||||||
|
@ -581,9 +583,11 @@ rec {
|
||||||
# Transforms the filesetTree of a file set to a longer base path, e.g.
|
# Transforms the filesetTree of a file set to a longer base path, e.g.
|
||||||
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
|
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
|
||||||
# => { baz = "regular"; }
|
# => { baz = "regular"; }
|
||||||
_lengthenTreeBase = targetBaseComponents: fileset:
|
_lengthenTreeBase =
|
||||||
|
targetBaseComponents: fileset:
|
||||||
let
|
let
|
||||||
recurse = index: tree:
|
recurse =
|
||||||
|
index: tree:
|
||||||
# If the filesetTree is an attribute set and we haven't reached the required depth yet
|
# If the filesetTree is an attribute set and we haven't reached the required depth yet
|
||||||
if isAttrs tree && index < length targetBaseComponents then
|
if isAttrs tree && index < length targetBaseComponents then
|
||||||
# Recurse with the tree under the right component (which might not exist)
|
# Recurse with the tree under the right component (which might not exist)
|
||||||
|
@ -602,10 +606,11 @@ rec {
|
||||||
# Computes the union of a list of filesets.
|
# Computes the union of a list of filesets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root
|
# The filesets must already be coerced and validated to be in the same filesystem root
|
||||||
# Type: [ Fileset ] -> Fileset
|
# Type: [ Fileset ] -> Fileset
|
||||||
_unionMany = filesets:
|
_unionMany =
|
||||||
|
filesets:
|
||||||
let
|
let
|
||||||
# All filesets that have a base, aka not the ones that are the empty value without a base
|
# All filesets that have a base, aka not the ones that are the empty value without a base
|
||||||
filesetsWithBase = filter (fileset: ! fileset._internalIsEmptyWithoutBase) filesets;
|
filesetsWithBase = filter (fileset: !fileset._internalIsEmptyWithoutBase) filesets;
|
||||||
|
|
||||||
# The first fileset that has a base.
|
# The first fileset that has a base.
|
||||||
# This value is only accessed if there are at all.
|
# This value is only accessed if there are at all.
|
||||||
|
@ -618,8 +623,8 @@ rec {
|
||||||
# A list of path components common to all base paths.
|
# A list of path components common to all base paths.
|
||||||
# Note that commonPrefix can only be fully evaluated,
|
# Note that commonPrefix can only be fully evaluated,
|
||||||
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
|
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
|
||||||
commonBaseComponents = foldl'
|
commonBaseComponents =
|
||||||
(components: el: commonPrefix components el._internalBaseComponents)
|
foldl' (components: el: commonPrefix components el._internalBaseComponents)
|
||||||
firstWithBase._internalBaseComponents
|
firstWithBase._internalBaseComponents
|
||||||
# We could also not do the `tail` here to avoid a list allocation,
|
# We could also not do the `tail` here to avoid a list allocation,
|
||||||
# but then we'd have to pay for a potentially expensive
|
# but then we'd have to pay for a potentially expensive
|
||||||
|
@ -643,15 +648,13 @@ rec {
|
||||||
resultTree = _unionTrees trees;
|
resultTree = _unionTrees trees;
|
||||||
in
|
in
|
||||||
# If there's no values with a base, we have no files
|
# If there's no values with a base, we have no files
|
||||||
if filesetsWithBase == [ ] then
|
if filesetsWithBase == [ ] then _emptyWithoutBase else _create commonBase resultTree;
|
||||||
_emptyWithoutBase
|
|
||||||
else
|
|
||||||
_create commonBase resultTree;
|
|
||||||
|
|
||||||
# The union of multiple filesetTree's with the same base path.
|
# The union of multiple filesetTree's with the same base path.
|
||||||
# Later elements are only evaluated if necessary.
|
# Later elements are only evaluated if necessary.
|
||||||
# Type: [ filesetTree ] -> filesetTree
|
# Type: [ filesetTree ] -> filesetTree
|
||||||
_unionTrees = trees:
|
_unionTrees =
|
||||||
|
trees:
|
||||||
let
|
let
|
||||||
stringIndex = findFirstIndex isString null trees;
|
stringIndex = findFirstIndex isString null trees;
|
||||||
withoutNull = filter (tree: tree != null) trees;
|
withoutNull = filter (tree: tree != null) trees;
|
||||||
|
@ -671,18 +674,15 @@ rec {
|
||||||
# Computes the intersection of a list of filesets.
|
# Computes the intersection of a list of filesets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root
|
# The filesets must already be coerced and validated to be in the same filesystem root
|
||||||
# Type: Fileset -> Fileset -> Fileset
|
# Type: Fileset -> Fileset -> Fileset
|
||||||
_intersection = fileset1: fileset2:
|
_intersection =
|
||||||
|
fileset1: fileset2:
|
||||||
let
|
let
|
||||||
# The common base components prefix, e.g.
|
# The common base components prefix, e.g.
|
||||||
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
||||||
# (/foo/bar, /foo/baz) -> /foo
|
# (/foo/bar, /foo/baz) -> /foo
|
||||||
commonBaseComponentsLength =
|
commonBaseComponentsLength =
|
||||||
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
||||||
length (
|
length (commonPrefix fileset1._internalBaseComponents fileset2._internalBaseComponents);
|
||||||
commonPrefix
|
|
||||||
fileset1._internalBaseComponents
|
|
||||||
fileset2._internalBaseComponents
|
|
||||||
);
|
|
||||||
|
|
||||||
# To be able to intersect filesetTree's together, they need to have the same base path.
|
# To be able to intersect filesetTree's together, they need to have the same base path.
|
||||||
# Base paths can be intersected by taking the longest one (if any)
|
# Base paths can be intersected by taking the longest one (if any)
|
||||||
|
@ -725,12 +725,11 @@ rec {
|
||||||
# The intersection of two filesetTree's with the same base path
|
# The intersection of two filesetTree's with the same base path
|
||||||
# The second element is only evaluated as much as necessary.
|
# The second element is only evaluated as much as necessary.
|
||||||
# Type: filesetTree -> filesetTree -> filesetTree
|
# Type: filesetTree -> filesetTree -> filesetTree
|
||||||
_intersectTree = lhs: rhs:
|
_intersectTree =
|
||||||
|
lhs: rhs:
|
||||||
if isAttrs lhs && isAttrs rhs then
|
if isAttrs lhs && isAttrs rhs then
|
||||||
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
|
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
|
||||||
mapAttrs
|
mapAttrs (name: _intersectTree lhs.${name}) (builtins.intersectAttrs lhs rhs)
|
||||||
(name: _intersectTree lhs.${name})
|
|
||||||
(builtins.intersectAttrs lhs rhs)
|
|
||||||
else if lhs == null || isString rhs then
|
else if lhs == null || isString rhs then
|
||||||
# If the lhs is null, the result should also be null
|
# If the lhs is null, the result should also be null
|
||||||
# And if the rhs is the identity element
|
# And if the rhs is the identity element
|
||||||
|
@ -743,18 +742,15 @@ rec {
|
||||||
# Compute the set difference between two file sets.
|
# Compute the set difference between two file sets.
|
||||||
# The filesets must already be coerced and validated to be in the same filesystem root.
|
# The filesets must already be coerced and validated to be in the same filesystem root.
|
||||||
# Type: Fileset -> Fileset -> Fileset
|
# Type: Fileset -> Fileset -> Fileset
|
||||||
_difference = positive: negative:
|
_difference =
|
||||||
|
positive: negative:
|
||||||
let
|
let
|
||||||
# The common base components prefix, e.g.
|
# The common base components prefix, e.g.
|
||||||
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
# (/foo/bar, /foo/bar/baz) -> /foo/bar
|
||||||
# (/foo/bar, /foo/baz) -> /foo
|
# (/foo/bar, /foo/baz) -> /foo
|
||||||
commonBaseComponentsLength =
|
commonBaseComponentsLength =
|
||||||
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
|
||||||
length (
|
length (commonPrefix positive._internalBaseComponents negative._internalBaseComponents);
|
||||||
commonPrefix
|
|
||||||
positive._internalBaseComponents
|
|
||||||
negative._internalBaseComponents
|
|
||||||
);
|
|
||||||
|
|
||||||
# We need filesetTree's with the same base to be able to compute the difference between them
|
# We need filesetTree's with the same base to be able to compute the difference between them
|
||||||
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
|
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
|
||||||
|
@ -786,9 +782,7 @@ rec {
|
||||||
null;
|
null;
|
||||||
|
|
||||||
resultingTree =
|
resultingTree =
|
||||||
_differenceTree
|
_differenceTree positive._internalBase positive._internalTree
|
||||||
positive._internalBase
|
|
||||||
positive._internalTree
|
|
||||||
negativeTreeWithPositiveBase;
|
negativeTreeWithPositiveBase;
|
||||||
in
|
in
|
||||||
# If the first file set is empty, we can never have any files in the result
|
# If the first file set is empty, we can never have any files in the result
|
||||||
|
@ -805,7 +799,8 @@ rec {
|
||||||
|
|
||||||
# Computes the set difference of two filesetTree's
|
# Computes the set difference of two filesetTree's
|
||||||
# Type: Path -> filesetTree -> filesetTree
|
# Type: Path -> filesetTree -> filesetTree
|
||||||
_differenceTree = path: lhs: rhs:
|
_differenceTree =
|
||||||
|
path: lhs: rhs:
|
||||||
# If the lhs doesn't have any files, or the right hand side includes all files
|
# If the lhs doesn't have any files, or the right hand side includes all files
|
||||||
if lhs == null || isString rhs then
|
if lhs == null || isString rhs then
|
||||||
# The result will always be empty
|
# The result will always be empty
|
||||||
|
@ -816,17 +811,19 @@ rec {
|
||||||
lhs
|
lhs
|
||||||
else
|
else
|
||||||
# Otherwise we always have two attribute sets to recurse into
|
# Otherwise we always have two attribute sets to recurse into
|
||||||
mapAttrs (name: lhsValue:
|
mapAttrs (name: lhsValue: _differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)) (
|
||||||
_differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
|
_directoryEntries path lhs
|
||||||
) (_directoryEntries path lhs);
|
);
|
||||||
|
|
||||||
# Filters all files in a path based on a predicate
|
# Filters all files in a path based on a predicate
|
||||||
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
|
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
|
||||||
_fileFilter = predicate: root:
|
_fileFilter =
|
||||||
|
predicate: root:
|
||||||
let
|
let
|
||||||
# Check the predicate for a single file
|
# Check the predicate for a single file
|
||||||
# Type: String -> String -> filesetTree
|
# Type: String -> String -> filesetTree
|
||||||
fromFile = name: type:
|
fromFile =
|
||||||
|
name: type:
|
||||||
if
|
if
|
||||||
predicate {
|
predicate {
|
||||||
inherit name type;
|
inherit name type;
|
||||||
|
@ -834,7 +831,8 @@ rec {
|
||||||
|
|
||||||
# To ensure forwards compatibility with more arguments being added in the future,
|
# To ensure forwards compatibility with more arguments being added in the future,
|
||||||
# adding an attribute which can't be deconstructed :)
|
# adding an attribute which can't be deconstructed :)
|
||||||
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." = null;
|
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." =
|
||||||
|
null;
|
||||||
}
|
}
|
||||||
then
|
then
|
||||||
type
|
type
|
||||||
|
@ -843,12 +841,10 @@ rec {
|
||||||
|
|
||||||
# Check the predicate for all files in a directory
|
# Check the predicate for all files in a directory
|
||||||
# Type: Path -> filesetTree
|
# Type: Path -> filesetTree
|
||||||
fromDir = path:
|
fromDir =
|
||||||
mapAttrs (name: type:
|
path:
|
||||||
if type == "directory" then
|
mapAttrs (
|
||||||
fromDir (path + "/${name}")
|
name: type: if type == "directory" then fromDir (path + "/${name}") else fromFile name type
|
||||||
else
|
|
||||||
fromFile name type
|
|
||||||
) (readDir path);
|
) (readDir path);
|
||||||
|
|
||||||
rootType = pathType root;
|
rootType = pathType root;
|
||||||
|
@ -857,10 +853,7 @@ rec {
|
||||||
_create root (fromDir root)
|
_create root (fromDir root)
|
||||||
else
|
else
|
||||||
# Single files are turned into a directory containing that file or nothing.
|
# Single files are turned into a directory containing that file or nothing.
|
||||||
_create (dirOf root) {
|
_create (dirOf root) { ${baseNameOf root} = fromFile (baseNameOf root) rootType; };
|
||||||
${baseNameOf root} =
|
|
||||||
fromFile (baseNameOf root) rootType;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
|
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
|
||||||
# https://github.com/NixOS/nix/commit/55cefd41d63368d4286568e2956afd535cb44018
|
# https://github.com/NixOS/nix/commit/55cefd41d63368d4286568e2956afd535cb44018
|
||||||
|
@ -876,22 +869,21 @@ rec {
|
||||||
# - The store path must not include files that don't exist in the respective local path.
|
# - The store path must not include files that don't exist in the respective local path.
|
||||||
#
|
#
|
||||||
# Type: Path -> String -> FileSet
|
# Type: Path -> String -> FileSet
|
||||||
_mirrorStorePath = localPath: storePath:
|
_mirrorStorePath =
|
||||||
|
localPath: storePath:
|
||||||
let
|
let
|
||||||
recurse = focusedStorePath:
|
recurse =
|
||||||
mapAttrs (name: type:
|
focusedStorePath:
|
||||||
if type == "directory" then
|
mapAttrs (
|
||||||
recurse (focusedStorePath + "/${name}")
|
name: type: if type == "directory" then recurse (focusedStorePath + "/${name}") else type
|
||||||
else
|
|
||||||
type
|
|
||||||
) (builtins.readDir focusedStorePath);
|
) (builtins.readDir focusedStorePath);
|
||||||
in
|
in
|
||||||
_create localPath
|
_create localPath (recurse storePath);
|
||||||
(recurse storePath);
|
|
||||||
|
|
||||||
# Create a file set from the files included in the result of a fetchGit call
|
# Create a file set from the files included in the result of a fetchGit call
|
||||||
# Type: String -> String -> Path -> Attrs -> FileSet
|
# Type: String -> String -> Path -> Attrs -> FileSet
|
||||||
_fromFetchGit = function: argument: path: extraFetchGitAttrs:
|
_fromFetchGit =
|
||||||
|
function: argument: path: extraFetchGitAttrs:
|
||||||
let
|
let
|
||||||
# The code path for when isStorePath is true
|
# The code path for when isStorePath is true
|
||||||
tryStorePath =
|
tryStorePath =
|
||||||
|
@ -922,7 +914,8 @@ rec {
|
||||||
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
|
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
|
||||||
# the unnecessarily import could be avoided.
|
# the unnecessarily import could be avoided.
|
||||||
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
|
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
|
||||||
fetchResult = fetchGit ({
|
fetchResult = fetchGit (
|
||||||
|
{
|
||||||
url = path;
|
url = path;
|
||||||
}
|
}
|
||||||
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
|
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
|
||||||
|
@ -934,19 +927,20 @@ rec {
|
||||||
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
|
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
|
||||||
# and would also require more code to handle worktrees where `.git` is a file.
|
# and would also require more code to handle worktrees where `.git` is a file.
|
||||||
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
|
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
|
||||||
// extraFetchGitAttrs);
|
// extraFetchGitAttrs
|
||||||
|
);
|
||||||
in
|
in
|
||||||
# We can identify local working directories by checking for .git,
|
# We can identify local working directories by checking for .git,
|
||||||
# see https://git-scm.com/docs/gitrepository-layout#_description.
|
# see https://git-scm.com/docs/gitrepository-layout#_description.
|
||||||
# Note that `builtins.fetchGit` _does_ work for bare repositories (where there's no `.git`),
|
# Note that `builtins.fetchGit` _does_ work for bare repositories (where there's no `.git`),
|
||||||
# even though `git ls-files` wouldn't return any files in that case.
|
# even though `git ls-files` wouldn't return any files in that case.
|
||||||
if ! pathExists (path + "/.git") then
|
if !pathExists (path + "/.git") then
|
||||||
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to point to a local working tree of a Git repository, but it's not."
|
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to point to a local working tree of a Git repository, but it's not."
|
||||||
else
|
else
|
||||||
_mirrorStorePath path fetchResult.outPath;
|
_mirrorStorePath path fetchResult.outPath;
|
||||||
|
|
||||||
in
|
in
|
||||||
if ! isPath path then
|
if !isPath path then
|
||||||
throw "lib.fileset.${function}: Expected the ${argument} to be a path, but it's a ${typeOf path} instead."
|
throw "lib.fileset.${function}: Expected the ${argument} to be a path, but it's a ${typeOf path} instead."
|
||||||
else if pathType path != "directory" then
|
else if pathType path != "directory" then
|
||||||
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to be a directory, but it's a file instead."
|
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to be a directory, but it's a file instead."
|
||||||
|
|
|
@ -8,18 +8,21 @@
|
||||||
# }
|
# }
|
||||||
self: super: {
|
self: super: {
|
||||||
path = super.path // {
|
path = super.path // {
|
||||||
splitRoot = path:
|
splitRoot =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
parts = super.path.splitRoot path;
|
parts = super.path.splitRoot path;
|
||||||
components = self.path.subpath.components parts.subpath;
|
components = self.path.subpath.components parts.subpath;
|
||||||
count = self.length components;
|
count = self.length components;
|
||||||
rootIndex = count - self.lists.findFirstIndex
|
rootIndex =
|
||||||
(component: component == "mock-root")
|
count
|
||||||
(self.length components)
|
- self.lists.findFirstIndex (component: component == "mock-root") (self.length components) (
|
||||||
(self.reverseList components);
|
self.reverseList components
|
||||||
|
);
|
||||||
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
|
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
|
||||||
subpath = self.path.subpath.join (self.drop rootIndex components);
|
subpath = self.path.subpath.join (self.drop rootIndex components);
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit root subpath;
|
inherit root subpath;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,25 +6,13 @@
|
||||||
|
|
||||||
# Tested in lib/tests/filesystem.sh
|
# Tested in lib/tests/filesystem.sh
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins) readDir pathExists toString;
|
||||||
readDir
|
|
||||||
pathExists
|
|
||||||
toString
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.attrsets)
|
inherit (lib.attrsets) mapAttrs' filterAttrs;
|
||||||
mapAttrs'
|
|
||||||
filterAttrs
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.filesystem)
|
inherit (lib.filesystem) pathType;
|
||||||
pathType
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.strings)
|
inherit (lib.strings) hasSuffix removeSuffix;
|
||||||
hasSuffix
|
|
||||||
removeSuffix
|
|
||||||
;
|
|
||||||
in
|
in
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -46,17 +34,21 @@ in
|
||||||
pathType =
|
pathType =
|
||||||
builtins.readFileType or
|
builtins.readFileType or
|
||||||
# Nix <2.14 compatibility shim
|
# Nix <2.14 compatibility shim
|
||||||
(path:
|
(
|
||||||
if ! pathExists path
|
path:
|
||||||
|
if
|
||||||
|
!pathExists path
|
||||||
# Fail irrecoverably to mimic the historic behavior of this function and
|
# Fail irrecoverably to mimic the historic behavior of this function and
|
||||||
# the new builtins.readFileType
|
# the new builtins.readFileType
|
||||||
then abort "lib.filesystem.pathType: Path ${toString path} does not exist."
|
then
|
||||||
|
abort "lib.filesystem.pathType: Path ${toString path} does not exist."
|
||||||
# The filesystem root is the only path where `dirOf / == /` and
|
# The filesystem root is the only path where `dirOf / == /` and
|
||||||
# `baseNameOf /` is not valid. We can detect this and directly return
|
# `baseNameOf /` is not valid. We can detect this and directly return
|
||||||
# "directory", since we know the filesystem root can't be anything else.
|
# "directory", since we know the filesystem root can't be anything else.
|
||||||
else if dirOf path == path
|
else if dirOf path == path then
|
||||||
then "directory"
|
"directory"
|
||||||
else (readDir (dirOf path)).${baseNameOf path}
|
else
|
||||||
|
(readDir (dirOf path)).${baseNameOf path}
|
||||||
);
|
);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -75,8 +67,7 @@ in
|
||||||
pathIsDirectory /some/file.nix
|
pathIsDirectory /some/file.nix
|
||||||
=> false
|
=> false
|
||||||
*/
|
*/
|
||||||
pathIsDirectory = path:
|
pathIsDirectory = path: pathExists path && pathType path == "directory";
|
||||||
pathExists path && pathType path == "directory";
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Whether a path exists and is a regular file, meaning not a symlink or any other special file type.
|
Whether a path exists and is a regular file, meaning not a symlink or any other special file type.
|
||||||
|
@ -94,8 +85,7 @@ in
|
||||||
pathIsRegularFile /some/file.nix
|
pathIsRegularFile /some/file.nix
|
||||||
=> true
|
=> true
|
||||||
*/
|
*/
|
||||||
pathIsRegularFile = path:
|
pathIsRegularFile = path: pathExists path && pathType path == "regular";
|
||||||
pathExists path && pathType path == "regular";
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
A map of all haskell packages defined in the given path,
|
A map of all haskell packages defined in the given path,
|
||||||
|
@ -107,19 +97,20 @@ in
|
||||||
haskellPathsInDir =
|
haskellPathsInDir =
|
||||||
# The directory within to search
|
# The directory within to search
|
||||||
root:
|
root:
|
||||||
let # Files in the root
|
let
|
||||||
|
# Files in the root
|
||||||
root-files = builtins.attrNames (builtins.readDir root);
|
root-files = builtins.attrNames (builtins.readDir root);
|
||||||
# Files with their full paths
|
# Files with their full paths
|
||||||
root-files-with-paths =
|
root-files-with-paths = map (file: {
|
||||||
map (file:
|
name = file;
|
||||||
{ name = file; value = root + "/${file}"; }
|
value = root + "/${file}";
|
||||||
) root-files;
|
}) root-files;
|
||||||
# Subdirectories of the root with a cabal file.
|
# Subdirectories of the root with a cabal file.
|
||||||
cabal-subdirs =
|
cabal-subdirs = builtins.filter (
|
||||||
builtins.filter ({ name, value }:
|
{ name, value }: builtins.pathExists (value + "/${name}.cabal")
|
||||||
builtins.pathExists (value + "/${name}.cabal")
|
|
||||||
) root-files-with-paths;
|
) root-files-with-paths;
|
||||||
in builtins.listToAttrs cabal-subdirs;
|
in
|
||||||
|
builtins.listToAttrs cabal-subdirs;
|
||||||
/*
|
/*
|
||||||
Find the first directory containing a file matching 'pattern'
|
Find the first directory containing a file matching 'pattern'
|
||||||
upward from a given 'file'.
|
upward from a given 'file'.
|
||||||
|
@ -132,23 +123,28 @@ in
|
||||||
pattern:
|
pattern:
|
||||||
# The file to start searching upward from
|
# The file to start searching upward from
|
||||||
file:
|
file:
|
||||||
let go = path:
|
let
|
||||||
let files = builtins.attrNames (builtins.readDir path);
|
go =
|
||||||
matches = builtins.filter (match: match != null)
|
path:
|
||||||
(map (builtins.match pattern) files);
|
let
|
||||||
|
files = builtins.attrNames (builtins.readDir path);
|
||||||
|
matches = builtins.filter (match: match != null) (map (builtins.match pattern) files);
|
||||||
in
|
in
|
||||||
if builtins.length matches != 0
|
if builtins.length matches != 0 then
|
||||||
then { inherit path matches; }
|
{ inherit path matches; }
|
||||||
else if path == /.
|
else if path == /. then
|
||||||
then null
|
null
|
||||||
else go (dirOf path);
|
else
|
||||||
|
go (dirOf path);
|
||||||
parent = dirOf file;
|
parent = dirOf file;
|
||||||
isDir =
|
isDir =
|
||||||
let base = baseNameOf file;
|
let
|
||||||
|
base = baseNameOf file;
|
||||||
type = (builtins.readDir parent).${base} or null;
|
type = (builtins.readDir parent).${base} or null;
|
||||||
in file == /. || type == "directory";
|
in
|
||||||
in go (if isDir then file else parent);
|
file == /. || type == "directory";
|
||||||
|
in
|
||||||
|
go (if isDir then file else parent);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Given a directory, return a flattened list of all files within it recursively.
|
Given a directory, return a flattened list of all files within it recursively.
|
||||||
|
@ -158,12 +154,15 @@ in
|
||||||
listFilesRecursive =
|
listFilesRecursive =
|
||||||
# The path to recursively list
|
# The path to recursively list
|
||||||
dir:
|
dir:
|
||||||
lib.flatten (lib.mapAttrsToList (name: type:
|
lib.flatten (
|
||||||
|
lib.mapAttrsToList (
|
||||||
|
name: type:
|
||||||
if type == "directory" then
|
if type == "directory" then
|
||||||
lib.filesystem.listFilesRecursive (dir + "/${name}")
|
lib.filesystem.listFilesRecursive (dir + "/${name}")
|
||||||
else
|
else
|
||||||
dir + "/${name}"
|
dir + "/${name}"
|
||||||
) (builtins.readDir dir));
|
) (builtins.readDir dir)
|
||||||
|
);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Transform a directory tree containing package files suitable for
|
Transform a directory tree containing package files suitable for
|
||||||
|
@ -263,49 +262,44 @@ in
|
||||||
let
|
let
|
||||||
# Determine if a directory entry from `readDir` indicates a package or
|
# Determine if a directory entry from `readDir` indicates a package or
|
||||||
# directory of packages.
|
# directory of packages.
|
||||||
directoryEntryIsPackage = basename: type:
|
directoryEntryIsPackage = basename: type: type == "directory" || hasSuffix ".nix" basename;
|
||||||
type == "directory" || hasSuffix ".nix" basename;
|
|
||||||
|
|
||||||
# List directory entries that indicate packages in the given `path`.
|
# List directory entries that indicate packages in the given `path`.
|
||||||
packageDirectoryEntries = path:
|
packageDirectoryEntries = path: filterAttrs directoryEntryIsPackage (readDir path);
|
||||||
filterAttrs directoryEntryIsPackage (readDir path);
|
|
||||||
|
|
||||||
# Transform a directory entry (a `basename` and `type` pair) into a
|
# Transform a directory entry (a `basename` and `type` pair) into a
|
||||||
# package.
|
# package.
|
||||||
directoryEntryToAttrPair = subdirectory: basename: type:
|
directoryEntryToAttrPair =
|
||||||
|
subdirectory: basename: type:
|
||||||
let
|
let
|
||||||
path = subdirectory + "/${basename}";
|
path = subdirectory + "/${basename}";
|
||||||
in
|
in
|
||||||
if type == "regular"
|
if type == "regular" then
|
||||||
then
|
|
||||||
{
|
{
|
||||||
name = removeSuffix ".nix" basename;
|
name = removeSuffix ".nix" basename;
|
||||||
value = callPackage path { };
|
value = callPackage path { };
|
||||||
}
|
}
|
||||||
else
|
else if type == "directory" then
|
||||||
if type == "directory"
|
|
||||||
then
|
|
||||||
{
|
{
|
||||||
name = basename;
|
name = basename;
|
||||||
value = packagesFromDirectory path;
|
value = packagesFromDirectory path;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
throw
|
throw ''
|
||||||
''
|
|
||||||
lib.filesystem.packagesFromDirectoryRecursive: Unsupported file type ${type} at path ${toString subdirectory}
|
lib.filesystem.packagesFromDirectoryRecursive: Unsupported file type ${type} at path ${toString subdirectory}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# Transform a directory into a package (if there's a `package.nix`) or
|
# Transform a directory into a package (if there's a `package.nix`) or
|
||||||
# set of packages (otherwise).
|
# set of packages (otherwise).
|
||||||
packagesFromDirectory = path:
|
packagesFromDirectory =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
defaultPackagePath = path + "/package.nix";
|
defaultPackagePath = path + "/package.nix";
|
||||||
in
|
in
|
||||||
if pathExists defaultPackagePath
|
if pathExists defaultPackagePath then
|
||||||
then callPackage defaultPackagePath { }
|
callPackage defaultPackagePath { }
|
||||||
else mapAttrs'
|
else
|
||||||
(directoryEntryToAttrPair path)
|
mapAttrs' (directoryEntryToAttrPair path) (packageDirectoryEntries path);
|
||||||
(packageDirectoryEntries path);
|
|
||||||
in
|
in
|
||||||
packagesFromDirectory directory;
|
packagesFromDirectory directory;
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,7 +72,12 @@ rec {
|
||||||
fix (self: [ 1 2 (elemAt self 0 + elemAt self 1) ])
|
fix (self: [ 1 2 (elemAt self 0 + elemAt self 1) ])
|
||||||
=> [ 1 2 3 ]
|
=> [ 1 2 3 ]
|
||||||
*/
|
*/
|
||||||
fix = f: let x = f x; in x;
|
fix =
|
||||||
|
f:
|
||||||
|
let
|
||||||
|
x = f x;
|
||||||
|
in
|
||||||
|
x;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
A variant of `fix` that records the original recursive attribute set in the
|
A variant of `fix` that records the original recursive attribute set in the
|
||||||
|
@ -81,7 +86,14 @@ rec {
|
||||||
This is useful in combination with the `extends` function to
|
This is useful in combination with the `extends` function to
|
||||||
implement deep overriding.
|
implement deep overriding.
|
||||||
*/
|
*/
|
||||||
fix' = f: let x = f x // { __unfix__ = f; }; in x;
|
fix' =
|
||||||
|
f:
|
||||||
|
let
|
||||||
|
x = f x // {
|
||||||
|
__unfix__ = f;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
x;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Return the fixpoint that `f` converges to when called iteratively, starting
|
Return the fixpoint that `f` converges to when called iteratively, starting
|
||||||
|
@ -94,13 +106,12 @@ rec {
|
||||||
|
|
||||||
Type: (a -> a) -> a -> a
|
Type: (a -> a) -> a -> a
|
||||||
*/
|
*/
|
||||||
converge = f: x:
|
converge =
|
||||||
|
f: x:
|
||||||
let
|
let
|
||||||
x' = f x;
|
x' = f x;
|
||||||
in
|
in
|
||||||
if x' == x
|
if x' == x then x else converge f x';
|
||||||
then x
|
|
||||||
else converge f x';
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Extend a function using an overlay.
|
Extend a function using an overlay.
|
||||||
|
@ -109,7 +120,6 @@ rec {
|
||||||
A fixed-point function is a function which is intended to be evaluated by passing the result of itself as the argument.
|
A fixed-point function is a function which is intended to be evaluated by passing the result of itself as the argument.
|
||||||
This is possible due to Nix's lazy evaluation.
|
This is possible due to Nix's lazy evaluation.
|
||||||
|
|
||||||
|
|
||||||
A fixed-point function returning an attribute set has the form
|
A fixed-point function returning an attribute set has the form
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
|
@ -259,9 +269,11 @@ rec {
|
||||||
*/
|
*/
|
||||||
composeExtensions =
|
composeExtensions =
|
||||||
f: g: final: prev:
|
f: g: final: prev:
|
||||||
let fApplied = f final prev;
|
let
|
||||||
|
fApplied = f final prev;
|
||||||
prev' = prev // fApplied;
|
prev' = prev // fApplied;
|
||||||
in fApplied // g final prev';
|
in
|
||||||
|
fApplied // g final prev';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Compose several extending functions of the type expected by 'extends' into
|
Compose several extending functions of the type expected by 'extends' into
|
||||||
|
@ -273,8 +285,7 @@ rec {
|
||||||
^final ^prev ^overrides ^final ^prev ^overrides
|
^final ^prev ^overrides ^final ^prev ^overrides
|
||||||
```
|
```
|
||||||
*/
|
*/
|
||||||
composeManyExtensions =
|
composeManyExtensions = lib.foldr (x: y: composeExtensions x y) (final: prev: { });
|
||||||
lib.foldr (x: y: composeExtensions x y) (final: prev: {});
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Create an overridable, recursive attribute set. For example:
|
Create an overridable, recursive attribute set. For example:
|
||||||
|
@ -302,8 +313,13 @@ rec {
|
||||||
Same as `makeExtensible` but the name of the extending attribute is
|
Same as `makeExtensible` but the name of the extending attribute is
|
||||||
customized.
|
customized.
|
||||||
*/
|
*/
|
||||||
makeExtensibleWithCustomName = extenderName: rattrs:
|
makeExtensibleWithCustomName =
|
||||||
fix' (self: (rattrs self) // {
|
extenderName: rattrs:
|
||||||
|
fix' (
|
||||||
|
self:
|
||||||
|
(rattrs self)
|
||||||
|
// {
|
||||||
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
|
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,8 +13,9 @@ finalLib: prevLib: # lib overlay
|
||||||
|
|
||||||
{
|
{
|
||||||
trivial = prevLib.trivial // {
|
trivial = prevLib.trivial // {
|
||||||
versionSuffix =
|
versionSuffix = ".${
|
||||||
".${finalLib.substring 0 8 (self.lastModifiedDate or "19700101")}.${self.shortRev or "dirty"}";
|
finalLib.substring 0 8 (self.lastModifiedDate or "19700101")
|
||||||
|
}.${self.shortRev or "dirty"}";
|
||||||
revisionWithDefault = default: self.rev or default;
|
revisionWithDefault = default: self.rev or default;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
{
|
{
|
||||||
description = "Library of low-level helper functions for nix expressions.";
|
description = "Library of low-level helper functions for nix expressions.";
|
||||||
|
|
||||||
outputs = { self }:
|
outputs =
|
||||||
|
{ self }:
|
||||||
let
|
let
|
||||||
lib0 = import ./.;
|
lib0 = import ./.;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
lib = lib0.extend (import ./flake-version-info.nix self);
|
lib = lib0.extend (import ./flake-version-info.nix self);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,19 @@
|
||||||
/* Functions that generate widespread file
|
/*
|
||||||
* formats from nix data structures.
|
Functions that generate widespread file
|
||||||
*
|
formats from nix data structures.
|
||||||
* They all follow a similar interface:
|
|
||||||
* generator { config-attrs } data
|
They all follow a similar interface:
|
||||||
*
|
generator { config-attrs } data
|
||||||
* `config-attrs` are “holes” in the generators
|
|
||||||
* with sensible default implementations that
|
`config-attrs` are “holes” in the generators
|
||||||
* can be overwritten. The default implementations
|
with sensible default implementations that
|
||||||
* are mostly generators themselves, called with
|
can be overwritten. The default implementations
|
||||||
* their respective default values; they can be reused.
|
are mostly generators themselves, called with
|
||||||
*
|
their respective default values; they can be reused.
|
||||||
* Tests can be found in ./tests/misc.nix
|
|
||||||
* Documentation in the manual, #sec-generators
|
Tests can be found in ./tests/misc.nix
|
||||||
*/
|
Documentation in the manual, #sec-generators
|
||||||
|
*/
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
@ -69,212 +70,277 @@ let
|
||||||
|
|
||||||
## -- HELPER FUNCTIONS & DEFAULTS --
|
## -- HELPER FUNCTIONS & DEFAULTS --
|
||||||
|
|
||||||
/* Convert a value to a sensible default string representation.
|
/*
|
||||||
* The builtin `toString` function has some strange defaults,
|
Convert a value to a sensible default string representation.
|
||||||
* suitable for bash scripts but not much else.
|
The builtin `toString` function has some strange defaults,
|
||||||
|
suitable for bash scripts but not much else.
|
||||||
*/
|
*/
|
||||||
mkValueStringDefault = {}: v:
|
mkValueStringDefault =
|
||||||
let err = t: v: abort
|
{ }:
|
||||||
("generators.mkValueStringDefault: " +
|
v:
|
||||||
"${t} not supported: ${toPretty {} v}");
|
let
|
||||||
in if isInt v then toString v
|
err = t: v: abort ("generators.mkValueStringDefault: " + "${t} not supported: ${toPretty { } v}");
|
||||||
|
in
|
||||||
|
if isInt v then
|
||||||
|
toString v
|
||||||
# convert derivations to store paths
|
# convert derivations to store paths
|
||||||
else if isDerivation v then toString v
|
else if isDerivation v then
|
||||||
|
toString v
|
||||||
# we default to not quoting strings
|
# we default to not quoting strings
|
||||||
else if isString v then v
|
else if isString v then
|
||||||
|
v
|
||||||
# isString returns "1", which is not a good default
|
# isString returns "1", which is not a good default
|
||||||
else if true == v then "true"
|
else if true == v then
|
||||||
|
"true"
|
||||||
# here it returns to "", which is even less of a good default
|
# here it returns to "", which is even less of a good default
|
||||||
else if false == v then "false"
|
else if false == v then
|
||||||
else if null == v then "null"
|
"false"
|
||||||
|
else if null == v then
|
||||||
|
"null"
|
||||||
# if you have lists you probably want to replace this
|
# if you have lists you probably want to replace this
|
||||||
else if isList v then err "lists" v
|
else if isList v then
|
||||||
|
err "lists" v
|
||||||
# same as for lists, might want to replace
|
# same as for lists, might want to replace
|
||||||
else if isAttrs v then err "attrsets" v
|
else if isAttrs v then
|
||||||
|
err "attrsets" v
|
||||||
# functions can’t be printed of course
|
# functions can’t be printed of course
|
||||||
else if isFunction v then err "functions" v
|
else if isFunction v then
|
||||||
|
err "functions" v
|
||||||
# Floats currently can't be converted to precise strings,
|
# Floats currently can't be converted to precise strings,
|
||||||
# condition warning on nix version once this isn't a problem anymore
|
# condition warning on nix version once this isn't a problem anymore
|
||||||
# See https://github.com/NixOS/nix/pull/3480
|
# See https://github.com/NixOS/nix/pull/3480
|
||||||
else if isFloat v then floatToString v
|
else if isFloat v then
|
||||||
else err "this value is" (toString v);
|
floatToString v
|
||||||
|
else
|
||||||
|
err "this value is" (toString v);
|
||||||
|
|
||||||
|
/*
|
||||||
|
Generate a line of key k and value v, separated by
|
||||||
|
character sep. If sep appears in k, it is escaped.
|
||||||
|
Helper for synaxes with different separators.
|
||||||
|
|
||||||
/* Generate a line of key k and value v, separated by
|
mkValueString specifies how values should be formatted.
|
||||||
* character sep. If sep appears in k, it is escaped.
|
|
||||||
* Helper for synaxes with different separators.
|
mkKeyValueDefault {} ":" "f:oo" "bar"
|
||||||
*
|
> "f\:oo:bar"
|
||||||
* mkValueString specifies how values should be formatted.
|
|
||||||
*
|
|
||||||
* mkKeyValueDefault {} ":" "f:oo" "bar"
|
|
||||||
* > "f\:oo:bar"
|
|
||||||
*/
|
*/
|
||||||
mkKeyValueDefault = {
|
mkKeyValueDefault =
|
||||||
mkValueString ? mkValueStringDefault {}
|
{
|
||||||
}: sep: k: v:
|
mkValueString ? mkValueStringDefault { },
|
||||||
"${escape [sep] k}${sep}${mkValueString v}";
|
}:
|
||||||
|
sep: k: v:
|
||||||
|
"${escape [ sep ] k}${sep}${mkValueString v}";
|
||||||
|
|
||||||
## -- FILE FORMAT GENERATORS --
|
## -- FILE FORMAT GENERATORS --
|
||||||
|
|
||||||
|
/*
|
||||||
|
Generate a key-value-style config file from an attrset.
|
||||||
|
|
||||||
/* Generate a key-value-style config file from an attrset.
|
mkKeyValue is the same as in toINI.
|
||||||
*
|
|
||||||
* mkKeyValue is the same as in toINI.
|
|
||||||
*/
|
*/
|
||||||
toKeyValue = {
|
toKeyValue =
|
||||||
mkKeyValue ? mkKeyValueDefault {} "=",
|
{
|
||||||
|
mkKeyValue ? mkKeyValueDefault { } "=",
|
||||||
listsAsDuplicateKeys ? false,
|
listsAsDuplicateKeys ? false,
|
||||||
indent ? ""
|
indent ? "",
|
||||||
}:
|
}:
|
||||||
let mkLine = k: v: indent + mkKeyValue k v + "\n";
|
let
|
||||||
mkLines = if listsAsDuplicateKeys
|
mkLine = k: v: indent + mkKeyValue k v + "\n";
|
||||||
then k: v: map (mkLine k) (if isList v then v else [v])
|
mkLines =
|
||||||
else k: v: [ (mkLine k v) ];
|
if listsAsDuplicateKeys then
|
||||||
in attrs: concatStrings (concatLists (mapAttrsToList mkLines attrs));
|
k: v: map (mkLine k) (if isList v then v else [ v ])
|
||||||
|
else
|
||||||
|
k: v: [ (mkLine k v) ];
|
||||||
|
in
|
||||||
|
attrs: concatStrings (concatLists (mapAttrsToList mkLines attrs));
|
||||||
|
|
||||||
|
/*
|
||||||
|
Generate an INI-style config file from an
|
||||||
|
attrset of sections to an attrset of key-value pairs.
|
||||||
|
|
||||||
/* Generate an INI-style config file from an
|
generators.toINI {} {
|
||||||
* attrset of sections to an attrset of key-value pairs.
|
foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
|
||||||
*
|
baz = { "also, integers" = 42; };
|
||||||
* generators.toINI {} {
|
}
|
||||||
* foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
|
|
||||||
* baz = { "also, integers" = 42; };
|
> [baz]
|
||||||
* }
|
> also, integers=42
|
||||||
*
|
>
|
||||||
*> [baz]
|
> [foo]
|
||||||
*> also, integers=42
|
> ciao=bar
|
||||||
*>
|
> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
|
||||||
*> [foo]
|
|
||||||
*> ciao=bar
|
The mk* configuration attributes can generically change
|
||||||
*> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
|
the way sections and key-value strings are generated.
|
||||||
*
|
|
||||||
* The mk* configuration attributes can generically change
|
For more examples see the test cases in ./tests/misc.nix.
|
||||||
* the way sections and key-value strings are generated.
|
|
||||||
*
|
|
||||||
* For more examples see the test cases in ./tests/misc.nix.
|
|
||||||
*/
|
*/
|
||||||
toINI = {
|
toINI =
|
||||||
|
{
|
||||||
# apply transformations (e.g. escapes) to section names
|
# apply transformations (e.g. escapes) to section names
|
||||||
mkSectionName ? (name: escape [ "[" "]" ] name),
|
mkSectionName ? (
|
||||||
|
name:
|
||||||
|
escape [
|
||||||
|
"["
|
||||||
|
"]"
|
||||||
|
] name
|
||||||
|
),
|
||||||
# format a setting line from key and value
|
# format a setting line from key and value
|
||||||
mkKeyValue ? mkKeyValueDefault {} "=",
|
mkKeyValue ? mkKeyValueDefault { } "=",
|
||||||
# allow lists as values for duplicate keys
|
# allow lists as values for duplicate keys
|
||||||
listsAsDuplicateKeys ? false
|
listsAsDuplicateKeys ? false,
|
||||||
}: attrsOfAttrs:
|
}:
|
||||||
|
attrsOfAttrs:
|
||||||
let
|
let
|
||||||
# map function to string for each key val
|
# map function to string for each key val
|
||||||
mapAttrsToStringsSep = sep: mapFn: attrs:
|
mapAttrsToStringsSep =
|
||||||
concatStringsSep sep
|
sep: mapFn: attrs:
|
||||||
(mapAttrsToList mapFn attrs);
|
concatStringsSep sep (mapAttrsToList mapFn attrs);
|
||||||
mkSection = sectName: sectValues: ''
|
mkSection =
|
||||||
|
sectName: sectValues:
|
||||||
|
''
|
||||||
[${mkSectionName sectName}]
|
[${mkSectionName sectName}]
|
||||||
'' + toKeyValue { inherit mkKeyValue listsAsDuplicateKeys; } sectValues;
|
''
|
||||||
|
+ toKeyValue { inherit mkKeyValue listsAsDuplicateKeys; } sectValues;
|
||||||
in
|
in
|
||||||
# map input to ini sections
|
# map input to ini sections
|
||||||
mapAttrsToStringsSep "\n" mkSection attrsOfAttrs;
|
mapAttrsToStringsSep "\n" mkSection attrsOfAttrs;
|
||||||
|
|
||||||
/* Generate an INI-style config file from an attrset
|
/*
|
||||||
* specifying the global section (no header), and an
|
Generate an INI-style config file from an attrset
|
||||||
* attrset of sections to an attrset of key-value pairs.
|
specifying the global section (no header), and an
|
||||||
*
|
attrset of sections to an attrset of key-value pairs.
|
||||||
* generators.toINIWithGlobalSection {} {
|
|
||||||
* globalSection = {
|
generators.toINIWithGlobalSection {} {
|
||||||
* someGlobalKey = "hi";
|
globalSection = {
|
||||||
* };
|
someGlobalKey = "hi";
|
||||||
* sections = {
|
};
|
||||||
* foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
|
sections = {
|
||||||
* baz = { "also, integers" = 42; };
|
foo = { hi = "${pkgs.hello}"; ciao = "bar"; };
|
||||||
* }
|
baz = { "also, integers" = 42; };
|
||||||
*
|
}
|
||||||
*> someGlobalKey=hi
|
|
||||||
*>
|
> someGlobalKey=hi
|
||||||
*> [baz]
|
>
|
||||||
*> also, integers=42
|
> [baz]
|
||||||
*>
|
> also, integers=42
|
||||||
*> [foo]
|
>
|
||||||
*> ciao=bar
|
> [foo]
|
||||||
*> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
|
> ciao=bar
|
||||||
*
|
> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
|
||||||
* The mk* configuration attributes can generically change
|
|
||||||
* the way sections and key-value strings are generated.
|
The mk* configuration attributes can generically change
|
||||||
*
|
the way sections and key-value strings are generated.
|
||||||
* For more examples see the test cases in ./tests/misc.nix.
|
|
||||||
*
|
For more examples see the test cases in ./tests/misc.nix.
|
||||||
* If you don’t need a global section, you can also use
|
|
||||||
* `generators.toINI` directly, which only takes
|
If you don’t need a global section, you can also use
|
||||||
* the part in `sections`.
|
`generators.toINI` directly, which only takes
|
||||||
|
the part in `sections`.
|
||||||
*/
|
*/
|
||||||
toINIWithGlobalSection = {
|
toINIWithGlobalSection =
|
||||||
|
{
|
||||||
# apply transformations (e.g. escapes) to section names
|
# apply transformations (e.g. escapes) to section names
|
||||||
mkSectionName ? (name: escape [ "[" "]" ] name),
|
mkSectionName ? (
|
||||||
|
name:
|
||||||
|
escape [
|
||||||
|
"["
|
||||||
|
"]"
|
||||||
|
] name
|
||||||
|
),
|
||||||
# format a setting line from key and value
|
# format a setting line from key and value
|
||||||
mkKeyValue ? mkKeyValueDefault {} "=",
|
mkKeyValue ? mkKeyValueDefault { } "=",
|
||||||
# allow lists as values for duplicate keys
|
# allow lists as values for duplicate keys
|
||||||
listsAsDuplicateKeys ? false
|
listsAsDuplicateKeys ? false,
|
||||||
}: { globalSection, sections ? {} }:
|
}:
|
||||||
( if globalSection == {}
|
{
|
||||||
then ""
|
globalSection,
|
||||||
else (toKeyValue { inherit mkKeyValue listsAsDuplicateKeys; } globalSection)
|
sections ? { },
|
||||||
+ "\n")
|
}:
|
||||||
|
(
|
||||||
|
if globalSection == { } then
|
||||||
|
""
|
||||||
|
else
|
||||||
|
(toKeyValue { inherit mkKeyValue listsAsDuplicateKeys; } globalSection) + "\n"
|
||||||
|
)
|
||||||
+ (toINI { inherit mkSectionName mkKeyValue listsAsDuplicateKeys; } sections);
|
+ (toINI { inherit mkSectionName mkKeyValue listsAsDuplicateKeys; } sections);
|
||||||
|
|
||||||
/* Generate a git-config file from an attrset.
|
/*
|
||||||
*
|
Generate a git-config file from an attrset.
|
||||||
* It has two major differences from the regular INI format:
|
|
||||||
*
|
It has two major differences from the regular INI format:
|
||||||
* 1. values are indented with tabs
|
|
||||||
* 2. sections can have sub-sections
|
1. values are indented with tabs
|
||||||
*
|
2. sections can have sub-sections
|
||||||
* generators.toGitINI {
|
|
||||||
* url."ssh://git@github.com/".insteadOf = "https://github.com";
|
generators.toGitINI {
|
||||||
* user.name = "edolstra";
|
url."ssh://git@github.com/".insteadOf = "https://github.com";
|
||||||
* }
|
user.name = "edolstra";
|
||||||
*
|
}
|
||||||
*> [url "ssh://git@github.com/"]
|
|
||||||
*> insteadOf = "https://github.com"
|
> [url "ssh://git@github.com/"]
|
||||||
*>
|
> insteadOf = "https://github.com"
|
||||||
*> [user]
|
>
|
||||||
*> name = "edolstra"
|
> [user]
|
||||||
|
> name = "edolstra"
|
||||||
*/
|
*/
|
||||||
toGitINI = attrs:
|
toGitINI =
|
||||||
|
attrs:
|
||||||
let
|
let
|
||||||
mkSectionName = name:
|
mkSectionName =
|
||||||
|
name:
|
||||||
let
|
let
|
||||||
containsQuote = hasInfix ''"'' name;
|
containsQuote = hasInfix ''"'' name;
|
||||||
sections = splitString "." name;
|
sections = splitString "." name;
|
||||||
section = head sections;
|
section = head sections;
|
||||||
subsections = tail sections;
|
subsections = tail sections;
|
||||||
subsection = concatStringsSep "." subsections;
|
subsection = concatStringsSep "." subsections;
|
||||||
in if containsQuote || subsections == [ ] then
|
in
|
||||||
name
|
if containsQuote || subsections == [ ] then name else ''${section} "${subsection}"'';
|
||||||
else
|
|
||||||
''${section} "${subsection}"'';
|
|
||||||
|
|
||||||
mkValueString = v:
|
mkValueString =
|
||||||
|
v:
|
||||||
let
|
let
|
||||||
escapedV = ''
|
escapedV = ''"${
|
||||||
"${
|
replaceStrings
|
||||||
replaceStrings [ "\n" " " ''"'' "\\" ] [ "\\n" "\\t" ''\"'' "\\\\" ] v
|
[
|
||||||
|
"\n"
|
||||||
|
" "
|
||||||
|
''"''
|
||||||
|
"\\"
|
||||||
|
]
|
||||||
|
[
|
||||||
|
"\\n"
|
||||||
|
"\\t"
|
||||||
|
''\"''
|
||||||
|
"\\\\"
|
||||||
|
]
|
||||||
|
v
|
||||||
}"'';
|
}"'';
|
||||||
in mkValueStringDefault { } (if isString v then escapedV else v);
|
in
|
||||||
|
mkValueStringDefault { } (if isString v then escapedV else v);
|
||||||
|
|
||||||
# generation for multiple ini values
|
# generation for multiple ini values
|
||||||
mkKeyValue = k: v:
|
mkKeyValue =
|
||||||
let mkKeyValue = mkKeyValueDefault { inherit mkValueString; } " = " k;
|
k: v:
|
||||||
in concatStringsSep "\n" (map (kv: "\t" + mkKeyValue kv) (toList v));
|
let
|
||||||
|
mkKeyValue = mkKeyValueDefault { inherit mkValueString; } " = " k;
|
||||||
|
in
|
||||||
|
concatStringsSep "\n" (map (kv: "\t" + mkKeyValue kv) (toList v));
|
||||||
|
|
||||||
# converts { a.b.c = 5; } to { "a.b".c = 5; } for toINI
|
# converts { a.b.c = 5; } to { "a.b".c = 5; } for toINI
|
||||||
gitFlattenAttrs = let
|
gitFlattenAttrs =
|
||||||
recurse = path: value:
|
let
|
||||||
|
recurse =
|
||||||
|
path: value:
|
||||||
if isAttrs value && !isDerivation value then
|
if isAttrs value && !isDerivation value then
|
||||||
mapAttrsToList (name: value: recurse ([ name ] ++ path) value) value
|
mapAttrsToList (name: value: recurse ([ name ] ++ path) value) value
|
||||||
else if length path > 1 then {
|
else if length path > 1 then
|
||||||
${concatStringsSep "." (reverseList (tail path))}.${head path} = value;
|
{ ${concatStringsSep "." (reverseList (tail path))}.${head path} = value; }
|
||||||
} else {
|
else
|
||||||
${head path} = value;
|
{ ${head path} = value; };
|
||||||
};
|
in
|
||||||
in attrs: foldl recursiveUpdate { } (flatten (recurse [ ] attrs));
|
attrs: foldl recursiveUpdate { } (flatten (recurse [ ] attrs));
|
||||||
|
|
||||||
toINI_ = toINI { inherit mkKeyValue mkSectionName; };
|
toINI_ = toINI { inherit mkKeyValue mkSectionName; };
|
||||||
in
|
in
|
||||||
|
@ -290,10 +356,10 @@ let
|
||||||
|
|
||||||
withRecursion =
|
withRecursion =
|
||||||
{
|
{
|
||||||
/* If this option is not null, the given value will stop evaluating at a certain depth */
|
# If this option is not null, the given value will stop evaluating at a certain depth
|
||||||
depthLimit
|
depthLimit,
|
||||||
/* If this option is true, an error will be thrown, if a certain given depth is exceeded */
|
# If this option is true, an error will be thrown, if a certain given depth is exceeded
|
||||||
, throwOnDepthLimit ? true
|
throwOnDepthLimit ? true,
|
||||||
}:
|
}:
|
||||||
assert isInt depthLimit;
|
assert isInt depthLimit;
|
||||||
let
|
let
|
||||||
|
@ -303,111 +369,168 @@ let
|
||||||
"__toString"
|
"__toString"
|
||||||
"__pretty"
|
"__pretty"
|
||||||
];
|
];
|
||||||
stepIntoAttr = evalNext: name:
|
stepIntoAttr = evalNext: name: if elem name specialAttrs then id else evalNext;
|
||||||
if elem name specialAttrs
|
transform =
|
||||||
then id
|
depth:
|
||||||
else evalNext;
|
|
||||||
transform = depth:
|
|
||||||
if depthLimit != null && depth > depthLimit then
|
if depthLimit != null && depth > depthLimit then
|
||||||
if throwOnDepthLimit
|
if throwOnDepthLimit then
|
||||||
then throw "Exceeded maximum eval-depth limit of ${toString depthLimit} while trying to evaluate with `generators.withRecursion'!"
|
throw "Exceeded maximum eval-depth limit of ${toString depthLimit} while trying to evaluate with `generators.withRecursion'!"
|
||||||
else const "<unevaluated>"
|
else
|
||||||
else id;
|
const "<unevaluated>"
|
||||||
mapAny = depth: v:
|
else
|
||||||
|
id;
|
||||||
|
mapAny =
|
||||||
|
depth: v:
|
||||||
let
|
let
|
||||||
evalNext = x: mapAny (depth + 1) (transform (depth + 1) x);
|
evalNext = x: mapAny (depth + 1) (transform (depth + 1) x);
|
||||||
in
|
in
|
||||||
if isAttrs v then mapAttrs (stepIntoAttr evalNext) v
|
if isAttrs v then
|
||||||
else if isList v then map evalNext v
|
mapAttrs (stepIntoAttr evalNext) v
|
||||||
else transform (depth + 1) v;
|
else if isList v then
|
||||||
|
map evalNext v
|
||||||
|
else
|
||||||
|
transform (depth + 1) v;
|
||||||
in
|
in
|
||||||
mapAny 0;
|
mapAny 0;
|
||||||
|
|
||||||
/* Pretty print a value, akin to `builtins.trace`.
|
/*
|
||||||
* Should probably be a builtin as well.
|
Pretty print a value, akin to `builtins.trace`.
|
||||||
* The pretty-printed string should be suitable for rendering default values
|
Should probably be a builtin as well.
|
||||||
* in the NixOS manual. In particular, it should be as close to a valid Nix expression
|
The pretty-printed string should be suitable for rendering default values
|
||||||
* as possible.
|
in the NixOS manual. In particular, it should be as close to a valid Nix expression
|
||||||
|
as possible.
|
||||||
*/
|
*/
|
||||||
toPretty = {
|
toPretty =
|
||||||
/* If this option is true, attrsets like { __pretty = fn; val = …; }
|
{
|
||||||
|
/*
|
||||||
|
If this option is true, attrsets like { __pretty = fn; val = …; }
|
||||||
will use fn to convert val to a pretty printed representation.
|
will use fn to convert val to a pretty printed representation.
|
||||||
(This means fn is type Val -> String.) */
|
(This means fn is type Val -> String.)
|
||||||
|
*/
|
||||||
allowPrettyValues ? false,
|
allowPrettyValues ? false,
|
||||||
/* If this option is true, the output is indented with newlines for attribute sets and lists */
|
# If this option is true, the output is indented with newlines for attribute sets and lists
|
||||||
multiline ? true,
|
multiline ? true,
|
||||||
/* Initial indentation level */
|
# Initial indentation level
|
||||||
indent ? ""
|
indent ? "",
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
go = indent: v:
|
go =
|
||||||
let introSpace = if multiline then "\n${indent} " else " ";
|
indent: v:
|
||||||
|
let
|
||||||
|
introSpace = if multiline then "\n${indent} " else " ";
|
||||||
outroSpace = if multiline then "\n${indent}" else " ";
|
outroSpace = if multiline then "\n${indent}" else " ";
|
||||||
in if isInt v then toString v
|
in
|
||||||
|
if isInt v then
|
||||||
|
toString v
|
||||||
# toString loses precision on floats, so we use toJSON instead. This isn't perfect
|
# toString loses precision on floats, so we use toJSON instead. This isn't perfect
|
||||||
# as the resulting string may not parse back as a float (e.g. 42, 1e-06), but for
|
# as the resulting string may not parse back as a float (e.g. 42, 1e-06), but for
|
||||||
# pretty-printing purposes this is acceptable.
|
# pretty-printing purposes this is acceptable.
|
||||||
else if isFloat v then builtins.toJSON v
|
else if isFloat v then
|
||||||
|
builtins.toJSON v
|
||||||
else if isString v then
|
else if isString v then
|
||||||
let
|
let
|
||||||
lines = filter (v: ! isList v) (split "\n" v);
|
lines = filter (v: !isList v) (split "\n" v);
|
||||||
escapeSingleline = escape [ "\\" "\"" "\${" ];
|
escapeSingleline = escape [
|
||||||
escapeMultiline = replaceStrings [ "\${" "''" ] [ "''\${" "'''" ];
|
"\\"
|
||||||
|
"\""
|
||||||
|
"\${"
|
||||||
|
];
|
||||||
|
escapeMultiline =
|
||||||
|
replaceStrings
|
||||||
|
[
|
||||||
|
"\${"
|
||||||
|
"''"
|
||||||
|
]
|
||||||
|
[
|
||||||
|
"''\${"
|
||||||
|
"'''"
|
||||||
|
];
|
||||||
singlelineResult = "\"" + concatStringsSep "\\n" (map escapeSingleline lines) + "\"";
|
singlelineResult = "\"" + concatStringsSep "\\n" (map escapeSingleline lines) + "\"";
|
||||||
multilineResult = let
|
multilineResult =
|
||||||
|
let
|
||||||
escapedLines = map escapeMultiline lines;
|
escapedLines = map escapeMultiline lines;
|
||||||
# The last line gets a special treatment: if it's empty, '' is on its own line at the "outer"
|
# The last line gets a special treatment: if it's empty, '' is on its own line at the "outer"
|
||||||
# indentation level. Otherwise, '' is appended to the last line.
|
# indentation level. Otherwise, '' is appended to the last line.
|
||||||
lastLine = last escapedLines;
|
lastLine = last escapedLines;
|
||||||
in "''" + introSpace + concatStringsSep introSpace (init escapedLines)
|
in
|
||||||
+ (if lastLine == "" then outroSpace else introSpace + lastLine) + "''";
|
"''"
|
||||||
|
+ introSpace
|
||||||
|
+ concatStringsSep introSpace (init escapedLines)
|
||||||
|
+ (if lastLine == "" then outroSpace else introSpace + lastLine)
|
||||||
|
+ "''";
|
||||||
in
|
in
|
||||||
if multiline && length lines > 1 then multilineResult else singlelineResult
|
if multiline && length lines > 1 then multilineResult else singlelineResult
|
||||||
else if true == v then "true"
|
else if true == v then
|
||||||
else if false == v then "false"
|
"true"
|
||||||
else if null == v then "null"
|
else if false == v then
|
||||||
else if isPath v then toString v
|
"false"
|
||||||
|
else if null == v then
|
||||||
|
"null"
|
||||||
|
else if isPath v then
|
||||||
|
toString v
|
||||||
else if isList v then
|
else if isList v then
|
||||||
if v == [] then "[ ]"
|
if v == [ ] then
|
||||||
else "[" + introSpace
|
"[ ]"
|
||||||
+ concatMapStringsSep introSpace (go (indent + " ")) v
|
else
|
||||||
+ outroSpace + "]"
|
"[" + introSpace + concatMapStringsSep introSpace (go (indent + " ")) v + outroSpace + "]"
|
||||||
else if isFunction v then
|
else if isFunction v then
|
||||||
let fna = functionArgs v;
|
let
|
||||||
showFnas = concatStringsSep ", " (mapAttrsToList
|
fna = functionArgs v;
|
||||||
(name: hasDefVal: if hasDefVal then name + "?" else name)
|
showFnas = concatStringsSep ", " (
|
||||||
fna);
|
mapAttrsToList (name: hasDefVal: if hasDefVal then name + "?" else name) fna
|
||||||
in if fna == {} then "<function>"
|
);
|
||||||
else "<function, args: {${showFnas}}>"
|
in
|
||||||
|
if fna == { } then "<function>" else "<function, args: {${showFnas}}>"
|
||||||
else if isAttrs v then
|
else if isAttrs v then
|
||||||
# apply pretty values if allowed
|
# apply pretty values if allowed
|
||||||
if allowPrettyValues && v ? __pretty && v ? val
|
if allowPrettyValues && v ? __pretty && v ? val then
|
||||||
then v.__pretty v.val
|
v.__pretty v.val
|
||||||
else if v == {} then "{ }"
|
else if v == { } then
|
||||||
|
"{ }"
|
||||||
else if v ? type && v.type == "derivation" then
|
else if v ? type && v.type == "derivation" then
|
||||||
"<derivation ${v.name or "???"}>"
|
"<derivation ${v.name or "???"}>"
|
||||||
else "{" + introSpace
|
else
|
||||||
+ concatStringsSep introSpace (mapAttrsToList
|
"{"
|
||||||
(name: value:
|
+ introSpace
|
||||||
|
+ concatStringsSep introSpace (
|
||||||
|
mapAttrsToList (
|
||||||
|
name: value:
|
||||||
"${escapeNixIdentifier name} = ${
|
"${escapeNixIdentifier name} = ${
|
||||||
addErrorContext "while evaluating an attribute `${name}`"
|
addErrorContext "while evaluating an attribute `${name}`" (go (indent + " ") value)
|
||||||
(go (indent + " ") value)
|
};"
|
||||||
};") v)
|
) v
|
||||||
+ outroSpace + "}"
|
)
|
||||||
else abort "generators.toPretty: should never happen (v = ${v})";
|
+ outroSpace
|
||||||
in go indent;
|
+ "}"
|
||||||
|
else
|
||||||
|
abort "generators.toPretty: should never happen (v = ${v})";
|
||||||
|
in
|
||||||
|
go indent;
|
||||||
|
|
||||||
# PLIST handling
|
# PLIST handling
|
||||||
toPlist = {}: v: let
|
toPlist =
|
||||||
expr = ind: x:
|
{ }:
|
||||||
if x == null then "" else
|
v:
|
||||||
if isBool x then bool ind x else
|
let
|
||||||
if isInt x then int ind x else
|
expr =
|
||||||
if isString x then str ind x else
|
ind: x:
|
||||||
if isList x then list ind x else
|
if x == null then
|
||||||
if isAttrs x then attrs ind x else
|
""
|
||||||
if isPath x then str ind (toString x) else
|
else if isBool x then
|
||||||
if isFloat x then float ind x else
|
bool ind x
|
||||||
|
else if isInt x then
|
||||||
|
int ind x
|
||||||
|
else if isString x then
|
||||||
|
str ind x
|
||||||
|
else if isList x then
|
||||||
|
list ind x
|
||||||
|
else if isAttrs x then
|
||||||
|
attrs ind x
|
||||||
|
else if isPath x then
|
||||||
|
str ind (toString x)
|
||||||
|
else if isFloat x then
|
||||||
|
float ind x
|
||||||
|
else
|
||||||
abort "generators.toPlist: should never happen (v = ${v})";
|
abort "generators.toPlist: should never happen (v = ${v})";
|
||||||
|
|
||||||
literal = ind: x: ind + x;
|
literal = ind: x: ind + x;
|
||||||
|
@ -422,42 +545,60 @@ let
|
||||||
|
|
||||||
item = ind: concatMapStringsSep "\n" (indent ind);
|
item = ind: concatMapStringsSep "\n" (indent ind);
|
||||||
|
|
||||||
list = ind: x: concatStringsSep "\n" [
|
list =
|
||||||
|
ind: x:
|
||||||
|
concatStringsSep "\n" [
|
||||||
(literal ind "<array>")
|
(literal ind "<array>")
|
||||||
(item ind x)
|
(item ind x)
|
||||||
(literal ind "</array>")
|
(literal ind "</array>")
|
||||||
];
|
];
|
||||||
|
|
||||||
attrs = ind: x: concatStringsSep "\n" [
|
attrs =
|
||||||
|
ind: x:
|
||||||
|
concatStringsSep "\n" [
|
||||||
(literal ind "<dict>")
|
(literal ind "<dict>")
|
||||||
(attr ind x)
|
(attr ind x)
|
||||||
(literal ind "</dict>")
|
(literal ind "</dict>")
|
||||||
];
|
];
|
||||||
|
|
||||||
attr = let attrFilter = name: value: name != "_module" && value != null;
|
attr =
|
||||||
in ind: x: concatStringsSep "\n" (flatten (mapAttrsToList
|
let
|
||||||
(name: value: optionals (attrFilter name value) [
|
attrFilter = name: value: name != "_module" && value != null;
|
||||||
|
in
|
||||||
|
ind: x:
|
||||||
|
concatStringsSep "\n" (
|
||||||
|
flatten (
|
||||||
|
mapAttrsToList (
|
||||||
|
name: value:
|
||||||
|
optionals (attrFilter name value) [
|
||||||
(key "\t${ind}" name)
|
(key "\t${ind}" name)
|
||||||
(expr "\t${ind}" value)
|
(expr "\t${ind}" value)
|
||||||
]) x));
|
]
|
||||||
|
) x
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
in ''<?xml version="1.0" encoding="UTF-8"?>
|
in
|
||||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
''
|
||||||
<plist version="1.0">
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
${expr "" v}
|
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
</plist>'';
|
<plist version="1.0">
|
||||||
|
${expr "" v}
|
||||||
|
</plist>'';
|
||||||
|
|
||||||
/* Translate a simple Nix expression to Dhall notation.
|
/*
|
||||||
* Note that integers are translated to Integer and never
|
Translate a simple Nix expression to Dhall notation.
|
||||||
* the Natural type.
|
Note that integers are translated to Integer and never
|
||||||
|
the Natural type.
|
||||||
*/
|
*/
|
||||||
toDhall = { }@args: v:
|
toDhall =
|
||||||
let concatItems = concatStringsSep ", ";
|
{ }@args:
|
||||||
in if isAttrs v then
|
v:
|
||||||
"{ ${
|
let
|
||||||
concatItems (mapAttrsToList
|
concatItems = concatStringsSep ", ";
|
||||||
(key: value: "${key} = ${toDhall args value}") v)
|
in
|
||||||
} }"
|
if isAttrs v then
|
||||||
|
"{ ${concatItems (mapAttrsToList (key: value: "${key} = ${toDhall args value}") v)} }"
|
||||||
else if isList v then
|
else if isList v then
|
||||||
"[ ${concatItems (map (toDhall args) v)} ]"
|
"[ ${concatItems (map (toDhall args) v)} ]"
|
||||||
else if isInt v then
|
else if isInt v then
|
||||||
|
@ -505,14 +646,16 @@ ${expr "" v}
|
||||||
Type:
|
Type:
|
||||||
toLua :: AttrSet -> Any -> String
|
toLua :: AttrSet -> Any -> String
|
||||||
*/
|
*/
|
||||||
toLua = {
|
toLua =
|
||||||
/* If this option is true, the output is indented with newlines for attribute sets and lists */
|
{
|
||||||
|
# If this option is true, the output is indented with newlines for attribute sets and lists
|
||||||
multiline ? true,
|
multiline ? true,
|
||||||
/* Initial indentation level */
|
# Initial indentation level
|
||||||
indent ? "",
|
indent ? "",
|
||||||
/* Interpret as variable bindings */
|
# Interpret as variable bindings
|
||||||
asBindings ? false,
|
asBindings ? false,
|
||||||
}@args: v:
|
}@args:
|
||||||
|
v:
|
||||||
let
|
let
|
||||||
innerIndent = "${indent} ";
|
innerIndent = "${indent} ";
|
||||||
introSpace = if multiline then "\n${innerIndent}" else " ";
|
introSpace = if multiline then "\n${innerIndent}" else " ";
|
||||||
|
@ -522,13 +665,16 @@ ${expr "" v}
|
||||||
asBindings = false;
|
asBindings = false;
|
||||||
};
|
};
|
||||||
concatItems = concatStringsSep ",${introSpace}";
|
concatItems = concatStringsSep ",${introSpace}";
|
||||||
isLuaInline = { _type ? null, ... }: _type == "lua-inline";
|
isLuaInline =
|
||||||
|
{
|
||||||
|
_type ? null,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
_type == "lua-inline";
|
||||||
|
|
||||||
generatedBindings =
|
generatedBindings =
|
||||||
assert assertMsg (badVarNames == []) "Bad Lua var names: ${toPretty {} badVarNames}";
|
assert assertMsg (badVarNames == [ ]) "Bad Lua var names: ${toPretty { } badVarNames}";
|
||||||
concatStrings (
|
concatStrings (mapAttrsToList (key: value: "${indent}${key} = ${toLua innerArgs value}\n") v);
|
||||||
mapAttrsToList (key: value: "${indent}${key} = ${toLua innerArgs value}\n") v
|
|
||||||
);
|
|
||||||
|
|
||||||
# https://en.wikibooks.org/wiki/Lua_Programming/variable#Variable_names
|
# https://en.wikibooks.org/wiki/Lua_Programming/variable#Variable_names
|
||||||
matchVarName = match "[[:alpha:]_][[:alnum:]_]*(\\.[[:alpha:]_][[:alnum:]_]*)*";
|
matchVarName = match "[[:alpha:]_][[:alnum:]_]*(\\.[[:alpha:]_][[:alnum:]_]*)*";
|
||||||
|
@ -541,8 +687,12 @@ ${expr "" v}
|
||||||
else if isInt v || isFloat v || isString v || isBool v then
|
else if isInt v || isFloat v || isString v || isBool v then
|
||||||
toJSON v
|
toJSON v
|
||||||
else if isList v then
|
else if isList v then
|
||||||
(if v == [ ] then "{}" else
|
(
|
||||||
"{${introSpace}${concatItems (map (value: "${toLua innerArgs value}") v)}${outroSpace}}")
|
if v == [ ] then
|
||||||
|
"{}"
|
||||||
|
else
|
||||||
|
"{${introSpace}${concatItems (map (value: "${toLua innerArgs value}") v)}${outroSpace}}"
|
||||||
|
)
|
||||||
else if isAttrs v then
|
else if isAttrs v then
|
||||||
(
|
(
|
||||||
if isLuaInline v then
|
if isLuaInline v then
|
||||||
|
@ -552,9 +702,9 @@ ${expr "" v}
|
||||||
else if isDerivation v then
|
else if isDerivation v then
|
||||||
''"${toString v}"''
|
''"${toString v}"''
|
||||||
else
|
else
|
||||||
"{${introSpace}${concatItems (
|
"{${introSpace}${
|
||||||
mapAttrsToList (key: value: "[${toJSON key}] = ${toLua innerArgs value}") v
|
concatItems (mapAttrsToList (key: value: "[${toJSON key}] = ${toLua innerArgs value}") v)
|
||||||
)}${outroSpace}}"
|
}${outroSpace}}"
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
abort "generators.toLua: type ${typeOf v} is unsupported";
|
abort "generators.toLua: type ${typeOf v} is unsupported";
|
||||||
|
@ -565,7 +715,10 @@ ${expr "" v}
|
||||||
Type:
|
Type:
|
||||||
mkLuaInline :: String -> AttrSet
|
mkLuaInline :: String -> AttrSet
|
||||||
*/
|
*/
|
||||||
mkLuaInline = expr: { _type = "lua-inline"; inherit expr; };
|
mkLuaInline = expr: {
|
||||||
|
_type = "lua-inline";
|
||||||
|
inherit expr;
|
||||||
|
};
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
|
@ -588,15 +741,17 @@ in
|
||||||
withRecursion
|
withRecursion
|
||||||
;
|
;
|
||||||
|
|
||||||
/* Generates JSON from an arbitrary (non-function) value.
|
/*
|
||||||
|
Generates JSON from an arbitrary (non-function) value.
|
||||||
* For more information see the documentation of the builtin.
|
* For more information see the documentation of the builtin.
|
||||||
*/
|
*/
|
||||||
toJSON = {}: toJSON;
|
toJSON = { }: toJSON;
|
||||||
|
|
||||||
/* YAML has been a strict superset of JSON since 1.2, so we
|
/*
|
||||||
|
YAML has been a strict superset of JSON since 1.2, so we
|
||||||
* use toJSON. Before it only had a few differences referring
|
* use toJSON. Before it only had a few differences referring
|
||||||
* to implicit typing rules, so it should work with older
|
* to implicit typing rules, so it should work with older
|
||||||
* parsers as well.
|
* parsers as well.
|
||||||
*/
|
*/
|
||||||
toYAML = {}: toJSON;
|
toYAML = { }: toJSON;
|
||||||
}
|
}
|
||||||
|
|
177
lib/gvariant.nix
177
lib/gvariant.nix
|
@ -14,7 +14,12 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (lib)
|
inherit (lib)
|
||||||
concatMapStringsSep concatStrings escape head replaceStrings;
|
concatMapStringsSep
|
||||||
|
concatStrings
|
||||||
|
escape
|
||||||
|
head
|
||||||
|
replaceStrings
|
||||||
|
;
|
||||||
|
|
||||||
mkPrimitive = t: v: {
|
mkPrimitive = t: v: {
|
||||||
_type = "gvariant";
|
_type = "gvariant";
|
||||||
|
@ -41,7 +46,8 @@ let
|
||||||
variant = "v";
|
variant = "v";
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Check if a value is a GVariant value
|
/*
|
||||||
|
Check if a value is a GVariant value
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
isGVariant :: Any -> Bool
|
isGVariant :: Any -> Bool
|
||||||
|
@ -53,13 +59,15 @@ rec {
|
||||||
|
|
||||||
inherit type isGVariant;
|
inherit type isGVariant;
|
||||||
|
|
||||||
/* Returns the GVariant value that most closely matches the given Nix value.
|
/*
|
||||||
|
Returns the GVariant value that most closely matches the given Nix value.
|
||||||
If no GVariant value can be found unambiguously then error is thrown.
|
If no GVariant value can be found unambiguously then error is thrown.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkValue :: Any -> gvariant
|
mkValue :: Any -> gvariant
|
||||||
*/
|
*/
|
||||||
mkValue = v:
|
mkValue =
|
||||||
|
v:
|
||||||
if builtins.isBool v then
|
if builtins.isBool v then
|
||||||
mkBoolean v
|
mkBoolean v
|
||||||
else if builtins.isFloat v then
|
else if builtins.isFloat v then
|
||||||
|
@ -73,7 +81,8 @@ rec {
|
||||||
else
|
else
|
||||||
throw "The GVariant type of ${v} can't be inferred.";
|
throw "The GVariant type of ${v} can't be inferred.";
|
||||||
|
|
||||||
/* Returns the GVariant array from the given type of the elements and a Nix list.
|
/*
|
||||||
|
Returns the GVariant array from the given type of the elements and a Nix list.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkArray :: [Any] -> gvariant
|
mkArray :: [Any] -> gvariant
|
||||||
|
@ -82,19 +91,21 @@ rec {
|
||||||
# Creating a string array
|
# Creating a string array
|
||||||
lib.gvariant.mkArray [ "a" "b" "c" ]
|
lib.gvariant.mkArray [ "a" "b" "c" ]
|
||||||
*/
|
*/
|
||||||
mkArray = elems:
|
mkArray =
|
||||||
|
elems:
|
||||||
let
|
let
|
||||||
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
|
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
|
||||||
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (map (v: v.type) vs))
|
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (
|
||||||
"Elements in a list should have same type."
|
map (v: v.type) vs
|
||||||
(head vs).type;
|
)) "Elements in a list should have same type." (head vs).type;
|
||||||
in
|
in
|
||||||
mkPrimitive (type.arrayOf elemType) vs // {
|
mkPrimitive (type.arrayOf elemType) vs
|
||||||
__toString = self:
|
// {
|
||||||
"@${self.type} [${concatMapStringsSep "," toString self.value}]";
|
__toString = self: "@${self.type} [${concatMapStringsSep "," toString self.value}]";
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Returns the GVariant array from the given empty Nix list.
|
/*
|
||||||
|
Returns the GVariant array from the given empty Nix list.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkEmptyArray :: gvariant.type -> gvariant
|
mkEmptyArray :: gvariant.type -> gvariant
|
||||||
|
@ -103,12 +114,11 @@ rec {
|
||||||
# Creating an empty string array
|
# Creating an empty string array
|
||||||
lib.gvariant.mkEmptyArray (lib.gvariant.type.string)
|
lib.gvariant.mkEmptyArray (lib.gvariant.type.string)
|
||||||
*/
|
*/
|
||||||
mkEmptyArray = elemType: mkPrimitive (type.arrayOf elemType) [ ] // {
|
mkEmptyArray =
|
||||||
__toString = self: "@${self.type} []";
|
elemType: mkPrimitive (type.arrayOf elemType) [ ] // { __toString = self: "@${self.type} []"; };
|
||||||
};
|
|
||||||
|
|
||||||
|
/*
|
||||||
/* Returns the GVariant variant from the given Nix value. Variants are containers
|
Returns the GVariant variant from the given Nix value. Variants are containers
|
||||||
of different GVariant type.
|
of different GVariant type.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
|
@ -120,13 +130,15 @@ rec {
|
||||||
(lib.gvariant.mkVariant (lib.gvariant.mkInt32 1))
|
(lib.gvariant.mkVariant (lib.gvariant.mkInt32 1))
|
||||||
]
|
]
|
||||||
*/
|
*/
|
||||||
mkVariant = elem:
|
mkVariant =
|
||||||
let gvarElem = mkValue elem;
|
elem:
|
||||||
in mkPrimitive type.variant gvarElem // {
|
let
|
||||||
__toString = self: "<${toString self.value}>";
|
gvarElem = mkValue elem;
|
||||||
};
|
in
|
||||||
|
mkPrimitive type.variant gvarElem // { __toString = self: "<${toString self.value}>"; };
|
||||||
|
|
||||||
/* Returns the GVariant dictionary entry from the given key and value.
|
/*
|
||||||
|
Returns the GVariant dictionary entry from the given key and value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkDictionaryEntry :: String -> Any -> gvariant
|
mkDictionaryEntry :: String -> Any -> gvariant
|
||||||
|
@ -149,143 +161,162 @@ rec {
|
||||||
value' = mkValue value;
|
value' = mkValue value;
|
||||||
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
|
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
|
||||||
in
|
in
|
||||||
mkPrimitive dictionaryType { inherit name value; } // {
|
mkPrimitive dictionaryType { inherit name value; }
|
||||||
|
// {
|
||||||
__toString = self: "@${self.type} {${name'},${value'}}";
|
__toString = self: "@${self.type} {${name'},${value'}}";
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Returns the GVariant maybe from the given element type.
|
/*
|
||||||
|
Returns the GVariant maybe from the given element type.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkMaybe :: gvariant.type -> Any -> gvariant
|
mkMaybe :: gvariant.type -> Any -> gvariant
|
||||||
*/
|
*/
|
||||||
mkMaybe = elemType: elem:
|
mkMaybe =
|
||||||
mkPrimitive (type.maybeOf elemType) elem // {
|
elemType: elem:
|
||||||
__toString = self:
|
mkPrimitive (type.maybeOf elemType) elem
|
||||||
if self.value == null then
|
// {
|
||||||
"@${self.type} nothing"
|
__toString =
|
||||||
else
|
self: if self.value == null then "@${self.type} nothing" else "just ${toString self.value}";
|
||||||
"just ${toString self.value}";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Returns the GVariant nothing from the given element type.
|
/*
|
||||||
|
Returns the GVariant nothing from the given element type.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkNothing :: gvariant.type -> gvariant
|
mkNothing :: gvariant.type -> gvariant
|
||||||
*/
|
*/
|
||||||
mkNothing = elemType: mkMaybe elemType null;
|
mkNothing = elemType: mkMaybe elemType null;
|
||||||
|
|
||||||
/* Returns the GVariant just from the given Nix value.
|
/*
|
||||||
|
Returns the GVariant just from the given Nix value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkJust :: Any -> gvariant
|
mkJust :: Any -> gvariant
|
||||||
*/
|
*/
|
||||||
mkJust = elem: let gvarElem = mkValue elem; in mkMaybe gvarElem.type gvarElem;
|
mkJust =
|
||||||
|
elem:
|
||||||
|
let
|
||||||
|
gvarElem = mkValue elem;
|
||||||
|
in
|
||||||
|
mkMaybe gvarElem.type gvarElem;
|
||||||
|
|
||||||
/* Returns the GVariant tuple from the given Nix list.
|
/*
|
||||||
|
Returns the GVariant tuple from the given Nix list.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkTuple :: [Any] -> gvariant
|
mkTuple :: [Any] -> gvariant
|
||||||
*/
|
*/
|
||||||
mkTuple = elems:
|
mkTuple =
|
||||||
|
elems:
|
||||||
let
|
let
|
||||||
gvarElems = map mkValue elems;
|
gvarElems = map mkValue elems;
|
||||||
tupleType = type.tupleOf (map (e: e.type) gvarElems);
|
tupleType = type.tupleOf (map (e: e.type) gvarElems);
|
||||||
in
|
in
|
||||||
mkPrimitive tupleType gvarElems // {
|
mkPrimitive tupleType gvarElems
|
||||||
__toString = self:
|
// {
|
||||||
"@${self.type} (${concatMapStringsSep "," toString self.value})";
|
__toString = self: "@${self.type} (${concatMapStringsSep "," toString self.value})";
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Returns the GVariant boolean from the given Nix bool value.
|
/*
|
||||||
|
Returns the GVariant boolean from the given Nix bool value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkBoolean :: Bool -> gvariant
|
mkBoolean :: Bool -> gvariant
|
||||||
*/
|
*/
|
||||||
mkBoolean = v:
|
mkBoolean =
|
||||||
mkPrimitive type.boolean v // {
|
v: mkPrimitive type.boolean v // { __toString = self: if self.value then "true" else "false"; };
|
||||||
__toString = self: if self.value then "true" else "false";
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Returns the GVariant string from the given Nix string value.
|
/*
|
||||||
|
Returns the GVariant string from the given Nix string value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkString :: String -> gvariant
|
mkString :: String -> gvariant
|
||||||
*/
|
*/
|
||||||
mkString = v:
|
mkString =
|
||||||
let sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
|
v:
|
||||||
in mkPrimitive type.string v // {
|
let
|
||||||
__toString = self: "'${sanitize self.value}'";
|
sanitize =
|
||||||
};
|
s:
|
||||||
|
replaceStrings [ "\n" ] [ "\\n" ] (
|
||||||
|
escape [
|
||||||
|
"'"
|
||||||
|
"\\"
|
||||||
|
] s
|
||||||
|
);
|
||||||
|
in
|
||||||
|
mkPrimitive type.string v // { __toString = self: "'${sanitize self.value}'"; };
|
||||||
|
|
||||||
/* Returns the GVariant object path from the given Nix string value.
|
/*
|
||||||
|
Returns the GVariant object path from the given Nix string value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkObjectpath :: String -> gvariant
|
mkObjectpath :: String -> gvariant
|
||||||
*/
|
*/
|
||||||
mkObjectpath = v:
|
mkObjectpath =
|
||||||
mkPrimitive type.string v // {
|
v: mkPrimitive type.string v // { __toString = self: "objectpath '${escape [ "'" ] self.value}'"; };
|
||||||
__toString = self: "objectpath '${escape [ "'" ] self.value}'";
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Returns the GVariant uchar from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant uchar from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkUchar :: Int -> gvariant
|
mkUchar :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkUchar = mkPrimitive type.uchar;
|
mkUchar = mkPrimitive type.uchar;
|
||||||
|
|
||||||
/* Returns the GVariant int16 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant int16 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkInt16 :: Int -> gvariant
|
mkInt16 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkInt16 = mkPrimitive type.int16;
|
mkInt16 = mkPrimitive type.int16;
|
||||||
|
|
||||||
/* Returns the GVariant uint16 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant uint16 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkUint16 :: Int -> gvariant
|
mkUint16 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkUint16 = mkPrimitive type.uint16;
|
mkUint16 = mkPrimitive type.uint16;
|
||||||
|
|
||||||
/* Returns the GVariant int32 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant int32 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkInt32 :: Int -> gvariant
|
mkInt32 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkInt32 = v:
|
mkInt32 = v: mkPrimitive type.int32 v // { __toString = self: toString self.value; };
|
||||||
mkPrimitive type.int32 v // {
|
|
||||||
__toString = self: toString self.value;
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Returns the GVariant uint32 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant uint32 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkUint32 :: Int -> gvariant
|
mkUint32 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkUint32 = mkPrimitive type.uint32;
|
mkUint32 = mkPrimitive type.uint32;
|
||||||
|
|
||||||
/* Returns the GVariant int64 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant int64 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkInt64 :: Int -> gvariant
|
mkInt64 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkInt64 = mkPrimitive type.int64;
|
mkInt64 = mkPrimitive type.int64;
|
||||||
|
|
||||||
/* Returns the GVariant uint64 from the given Nix int value.
|
/*
|
||||||
|
Returns the GVariant uint64 from the given Nix int value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkUint64 :: Int -> gvariant
|
mkUint64 :: Int -> gvariant
|
||||||
*/
|
*/
|
||||||
mkUint64 = mkPrimitive type.uint64;
|
mkUint64 = mkPrimitive type.uint64;
|
||||||
|
|
||||||
/* Returns the GVariant double from the given Nix float value.
|
/*
|
||||||
|
Returns the GVariant double from the given Nix float value.
|
||||||
|
|
||||||
Type:
|
Type:
|
||||||
mkDouble :: Float -> gvariant
|
mkDouble :: Float -> gvariant
|
||||||
*/
|
*/
|
||||||
mkDouble = v:
|
mkDouble = v: mkPrimitive type.double v // { __toString = self: toString self.value; };
|
||||||
mkPrimitive type.double v // {
|
|
||||||
__toString = self: toString self.value;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,17 +5,29 @@ let
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
|
|
||||||
|
|
||||||
# Keeping these around in case we decide to change this horrible implementation :)
|
# Keeping these around in case we decide to change this horrible implementation :)
|
||||||
option = x:
|
option = x: x // { optional = true; };
|
||||||
x // { optional = true; };
|
|
||||||
|
|
||||||
yes = { tristate = "y"; optional = false; };
|
|
||||||
no = { tristate = "n"; optional = false; };
|
|
||||||
module = { tristate = "m"; optional = false; };
|
|
||||||
unset = { tristate = null; optional = false; };
|
|
||||||
freeform = x: { freeform = x; optional = false; };
|
|
||||||
|
|
||||||
|
yes = {
|
||||||
|
tristate = "y";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
no = {
|
||||||
|
tristate = "n";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
module = {
|
||||||
|
tristate = "m";
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
unset = {
|
||||||
|
tristate = null;
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
freeform = x: {
|
||||||
|
freeform = x;
|
||||||
|
optional = false;
|
||||||
|
};
|
||||||
|
|
||||||
# Common patterns/legacy used in common-config/hardened/config.nix
|
# Common patterns/legacy used in common-config/hardened/config.nix
|
||||||
whenHelpers = version: {
|
whenHelpers = version: {
|
||||||
|
|
|
@ -1,28 +1,41 @@
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
lib.mapAttrs (lname: lset: let
|
lib.mapAttrs
|
||||||
|
(
|
||||||
|
lname: lset:
|
||||||
|
let
|
||||||
defaultLicense = {
|
defaultLicense = {
|
||||||
shortName = lname;
|
shortName = lname;
|
||||||
free = true; # Most of our licenses are Free, explicitly declare unfree additions as such!
|
free = true; # Most of our licenses are Free, explicitly declare unfree additions as such!
|
||||||
deprecated = false;
|
deprecated = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
mkLicense = licenseDeclaration: let
|
mkLicense =
|
||||||
|
licenseDeclaration:
|
||||||
|
let
|
||||||
applyDefaults = license: defaultLicense // license;
|
applyDefaults = license: defaultLicense // license;
|
||||||
applySpdx = license:
|
applySpdx =
|
||||||
if license ? spdxId
|
license:
|
||||||
then license // { url = "https://spdx.org/licenses/${license.spdxId}.html"; }
|
if license ? spdxId then
|
||||||
else license;
|
license // { url = "https://spdx.org/licenses/${license.spdxId}.html"; }
|
||||||
|
else
|
||||||
|
license;
|
||||||
applyRedistributable = license: { redistributable = license.free; } // license;
|
applyRedistributable = license: { redistributable = license.free; } // license;
|
||||||
in lib.pipe licenseDeclaration [
|
in
|
||||||
|
lib.pipe licenseDeclaration [
|
||||||
applyDefaults
|
applyDefaults
|
||||||
applySpdx
|
applySpdx
|
||||||
applyRedistributable
|
applyRedistributable
|
||||||
];
|
];
|
||||||
in mkLicense lset) ({
|
in
|
||||||
/* License identifiers from spdx.org where possible.
|
mkLicense lset
|
||||||
* If you cannot find your license here, then look for a similar license or
|
)
|
||||||
* add it to this list. The URL mentioned above is a good source for inspiration.
|
(
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
License identifiers from spdx.org where possible.
|
||||||
|
If you cannot find your license here, then look for a similar license or
|
||||||
|
add it to this list. The URL mentioned above is a good source for inspiration.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
abstyles = {
|
abstyles = {
|
||||||
|
@ -33,7 +46,7 @@ in mkLicense lset) ({
|
||||||
acsl14 = {
|
acsl14 = {
|
||||||
fullName = "Anti-Capitalist Software License v1.4";
|
fullName = "Anti-Capitalist Software License v1.4";
|
||||||
url = "https://anticapitalist.software/";
|
url = "https://anticapitalist.software/";
|
||||||
/* restrictions on corporations apply for both use and redistribution */
|
# restrictions on corporations apply for both use and redistribution
|
||||||
free = false;
|
free = false;
|
||||||
redistributable = false;
|
redistributable = false;
|
||||||
};
|
};
|
||||||
|
@ -1270,7 +1283,8 @@ in mkLicense lset) ({
|
||||||
spdxId = "XSkat";
|
spdxId = "XSkat";
|
||||||
fullName = "XSkat License";
|
fullName = "XSkat License";
|
||||||
};
|
};
|
||||||
} // {
|
}
|
||||||
|
// {
|
||||||
# TODO: remove legacy aliases
|
# TODO: remove legacy aliases
|
||||||
apsl10 = {
|
apsl10 = {
|
||||||
# deprecated for consistency with `apple-psl20`; use `apple-psl10`
|
# deprecated for consistency with `apple-psl20`; use `apple-psl10`
|
||||||
|
@ -1309,4 +1323,5 @@ in mkLicense lset) ({
|
||||||
fullName = "GNU Lesser General Public License v3.0";
|
fullName = "GNU Lesser General Public License v3.0";
|
||||||
deprecated = true;
|
deprecated = true;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
350
lib/lists.nix
350
lib/lists.nix
|
@ -4,12 +4,29 @@
|
||||||
{ lib }:
|
{ lib }:
|
||||||
let
|
let
|
||||||
inherit (lib.strings) toInt;
|
inherit (lib.strings) toInt;
|
||||||
inherit (lib.trivial) compare min id warn pipe;
|
inherit (lib.trivial)
|
||||||
|
compare
|
||||||
|
min
|
||||||
|
id
|
||||||
|
warn
|
||||||
|
pipe
|
||||||
|
;
|
||||||
inherit (lib.attrsets) mapAttrs;
|
inherit (lib.attrsets) mapAttrs;
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
inherit (builtins) head tail length isList elemAt concatLists filter elem genList map;
|
inherit (builtins)
|
||||||
|
head
|
||||||
|
tail
|
||||||
|
length
|
||||||
|
isList
|
||||||
|
elemAt
|
||||||
|
concatLists
|
||||||
|
filter
|
||||||
|
elem
|
||||||
|
genList
|
||||||
|
map
|
||||||
|
;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Create a list consisting of a single element. `singleton x` is
|
Create a list consisting of a single element. `singleton x` is
|
||||||
|
@ -39,7 +56,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
singleton = x: [x];
|
singleton = x: [ x ];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Apply the function to each element in the list.
|
Apply the function to each element in the list.
|
||||||
|
@ -81,7 +98,6 @@ rec {
|
||||||
`list` with `nul` as the starting value, i.e.,
|
`list` with `nul` as the starting value, i.e.,
|
||||||
`foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
|
`foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`op`
|
`op`
|
||||||
|
@ -118,14 +134,13 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
foldr = op: nul: list:
|
foldr =
|
||||||
|
op: nul: list:
|
||||||
let
|
let
|
||||||
len = length list;
|
len = length list;
|
||||||
fold' = n:
|
fold' = n: if n == len then nul else op (elemAt list n) (fold' (n + 1));
|
||||||
if n == len
|
in
|
||||||
then nul
|
fold' 0;
|
||||||
else op (elemAt list n) (fold' (n + 1));
|
|
||||||
in fold' 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
`fold` is an alias of `foldr` for historic reasons
|
`fold` is an alias of `foldr` for historic reasons
|
||||||
|
@ -133,7 +148,6 @@ rec {
|
||||||
# FIXME(Profpatsch): deprecate?
|
# FIXME(Profpatsch): deprecate?
|
||||||
fold = foldr;
|
fold = foldr;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
“left fold”, like `foldr`, but from the left:
|
“left fold”, like `foldr`, but from the left:
|
||||||
|
|
||||||
|
@ -175,13 +189,12 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
foldl = op: nul: list:
|
foldl =
|
||||||
|
op: nul: list:
|
||||||
let
|
let
|
||||||
foldl' = n:
|
foldl' = n: if n == -1 then nul else op (foldl' (n - 1)) (elemAt list n);
|
||||||
if n == -1
|
in
|
||||||
then nul
|
foldl' (length list - 1);
|
||||||
else op (foldl' (n - 1)) (elemAt list n);
|
|
||||||
in foldl' (length list - 1);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Reduce a list by applying a binary operator from left to right,
|
Reduce a list by applying a binary operator from left to right,
|
||||||
|
@ -260,13 +273,11 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
foldl' =
|
foldl' =
|
||||||
op:
|
op: acc:
|
||||||
acc:
|
|
||||||
# The builtin `foldl'` is a bit lazier than one might expect.
|
# The builtin `foldl'` is a bit lazier than one might expect.
|
||||||
# See https://github.com/NixOS/nix/pull/7158.
|
# See https://github.com/NixOS/nix/pull/7158.
|
||||||
# In particular, the initial accumulator value is not forced before the first iteration starts.
|
# In particular, the initial accumulator value is not forced before the first iteration starts.
|
||||||
builtins.seq acc
|
builtins.seq acc (builtins.foldl' op acc);
|
||||||
(builtins.foldl' op acc);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Map with index starting from 0
|
Map with index starting from 0
|
||||||
|
@ -303,7 +314,6 @@ rec {
|
||||||
/**
|
/**
|
||||||
Map with index starting from 1
|
Map with index starting from 1
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -373,12 +383,9 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
ifilter0 =
|
ifilter0 =
|
||||||
ipred:
|
ipred: input:
|
||||||
input:
|
|
||||||
map (idx: elemAt input idx) (
|
map (idx: elemAt input idx) (
|
||||||
filter (idx: ipred idx (elemAt input idx)) (
|
filter (idx: ipred idx (elemAt input idx)) (genList (x: x) (length input))
|
||||||
genList (x: x) (length input)
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -407,14 +414,12 @@ rec {
|
||||||
Flatten the argument into a single list; that is, nested lists are
|
Flatten the argument into a single list; that is, nested lists are
|
||||||
spliced into the top-level lists.
|
spliced into the top-level lists.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`x`
|
`x`
|
||||||
|
|
||||||
: 1\. Function argument
|
: 1\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.flatten` usage example
|
## `lib.lists.flatten` usage example
|
||||||
|
@ -428,15 +433,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
flatten = x:
|
flatten = x: if isList x then concatMap (y: flatten y) x else [ x ];
|
||||||
if isList x
|
|
||||||
then concatMap (y: flatten y) x
|
|
||||||
else [x];
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Remove elements equal to 'e' from a list. Useful for buildInputs.
|
Remove elements equal to 'e' from a list. Useful for buildInputs.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`e`
|
`e`
|
||||||
|
@ -464,8 +465,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
remove =
|
remove = e: filter (x: x != e);
|
||||||
e: filter (x: x != e);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Find the sole element in the list matching the specified
|
Find the sole element in the list matching the specified
|
||||||
|
@ -474,7 +474,6 @@ rec {
|
||||||
Returns `default` if no such element exists, or
|
Returns `default` if no such element exists, or
|
||||||
`multiple` if there are multiple matching elements.
|
`multiple` if there are multiple matching elements.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`pred`
|
`pred`
|
||||||
|
@ -515,14 +514,17 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
findSingle =
|
findSingle =
|
||||||
pred:
|
pred: default: multiple: list:
|
||||||
default:
|
let
|
||||||
multiple:
|
found = filter pred list;
|
||||||
list:
|
len = length found;
|
||||||
let found = filter pred list; len = length found;
|
in
|
||||||
in if len == 0 then default
|
if len == 0 then
|
||||||
else if len != 1 then multiple
|
default
|
||||||
else head found;
|
else if len != 1 then
|
||||||
|
multiple
|
||||||
|
else
|
||||||
|
head found;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Find the first index in the list matching the specified
|
Find the first index in the list matching the specified
|
||||||
|
@ -562,9 +564,7 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
findFirstIndex =
|
findFirstIndex =
|
||||||
pred:
|
pred: default: list:
|
||||||
default:
|
|
||||||
list:
|
|
||||||
let
|
let
|
||||||
# A naive recursive implementation would be much simpler, but
|
# A naive recursive implementation would be much simpler, but
|
||||||
# would also overflow the evaluator stack. We use `foldl'` as a workaround
|
# would also overflow the evaluator stack. We use `foldl'` as a workaround
|
||||||
|
@ -579,12 +579,13 @@ rec {
|
||||||
# - if index >= 0 then pred (elemAt list index) and all elements before (elemAt list index) didn't satisfy pred
|
# - if index >= 0 then pred (elemAt list index) and all elements before (elemAt list index) didn't satisfy pred
|
||||||
#
|
#
|
||||||
# We start with index -1 and the 0'th element of the list, which satisfies the invariant
|
# We start with index -1 and the 0'th element of the list, which satisfies the invariant
|
||||||
resultIndex = foldl' (index: el:
|
resultIndex = foldl' (
|
||||||
|
index: el:
|
||||||
if index < 0 then
|
if index < 0 then
|
||||||
# No match yet before the current index, we need to check the element
|
# No match yet before the current index, we need to check the element
|
||||||
if pred el then
|
if pred el then
|
||||||
# We have a match! Turn it into the actual index to prevent future iterations from modifying it
|
# We have a match! Turn it into the actual index to prevent future iterations from modifying it
|
||||||
- index - 1
|
-index - 1
|
||||||
else
|
else
|
||||||
# Still no match, update the index to the next element (we're counting down, so minus one)
|
# Still no match, update the index to the next element (we're counting down, so minus one)
|
||||||
index - 1
|
index - 1
|
||||||
|
@ -593,10 +594,7 @@ rec {
|
||||||
index
|
index
|
||||||
) (-1) list;
|
) (-1) list;
|
||||||
in
|
in
|
||||||
if resultIndex < 0 then
|
if resultIndex < 0 then default else resultIndex;
|
||||||
default
|
|
||||||
else
|
|
||||||
resultIndex;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Find the first element in the list matching the specified
|
Find the first element in the list matching the specified
|
||||||
|
@ -636,16 +634,11 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
findFirst =
|
findFirst =
|
||||||
pred:
|
pred: default: list:
|
||||||
default:
|
|
||||||
list:
|
|
||||||
let
|
let
|
||||||
index = findFirstIndex pred null list;
|
index = findFirstIndex pred null list;
|
||||||
in
|
in
|
||||||
if index == null then
|
if index == null then default else elemAt list index;
|
||||||
default
|
|
||||||
else
|
|
||||||
elemAt list index;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return true if function `pred` returns true for at least one
|
Return true if function `pred` returns true for at least one
|
||||||
|
@ -744,8 +737,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
count =
|
count = pred: foldl' (c: x: if pred x then c + 1 else c) 0;
|
||||||
pred: foldl' (c: x: if pred x then c + 1 else c) 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return a singleton list or an empty list, depending on a boolean
|
Return a singleton list or an empty list, depending on a boolean
|
||||||
|
@ -781,7 +773,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
optional = cond: elem: if cond then [elem] else [];
|
optional = cond: elem: if cond then [ elem ] else [ ];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return a list or an empty list, depending on a boolean value.
|
Return a list or an empty list, depending on a boolean value.
|
||||||
|
@ -815,10 +807,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
optionals =
|
optionals = cond: elems: if cond then elems else [ ];
|
||||||
cond:
|
|
||||||
elems: if cond then elems else [];
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
If argument is a list, return it; else, wrap it in a singleton
|
If argument is a list, return it; else, wrap it in a singleton
|
||||||
|
@ -844,7 +833,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toList = x: if isList x then x else [x];
|
toList = x: if isList x then x else [ x ];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return a list of integers from `first` up to and including `last`.
|
Return a list of integers from `first` up to and including `last`.
|
||||||
|
@ -878,13 +867,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
range =
|
range = first: last: if first > last then [ ] else genList (n: first + n) (last - first + 1);
|
||||||
first:
|
|
||||||
last:
|
|
||||||
if first > last then
|
|
||||||
[]
|
|
||||||
else
|
|
||||||
genList (n: first + n) (last - first + 1);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return a list with `n` copies of an element.
|
Return a list with `n` copies of an element.
|
||||||
|
@ -976,7 +959,6 @@ rec {
|
||||||
|
|
||||||
: 4\. Function argument
|
: 4\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.groupBy'` usage example
|
## `lib.lists.groupBy'` usage example
|
||||||
|
@ -1001,15 +983,21 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
groupBy' = op: nul: pred: lst: mapAttrs (name: foldl op nul) (groupBy pred lst);
|
groupBy' =
|
||||||
|
op: nul: pred: lst:
|
||||||
|
mapAttrs (name: foldl op nul) (groupBy pred lst);
|
||||||
|
|
||||||
groupBy = builtins.groupBy or (
|
groupBy =
|
||||||
pred: foldl' (r: e:
|
builtins.groupBy or (
|
||||||
|
pred:
|
||||||
|
foldl' (
|
||||||
|
r: e:
|
||||||
let
|
let
|
||||||
key = pred e;
|
key = pred e;
|
||||||
in
|
in
|
||||||
r // { ${key} = (r.${key} or []) ++ [e]; }
|
r // { ${key} = (r.${key} or [ ]) ++ [ e ]; }
|
||||||
) {});
|
) { }
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Merges two lists of the same size together. If the sizes aren't the same
|
Merges two lists of the same size together. If the sizes aren't the same
|
||||||
|
@ -1048,11 +1036,8 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
zipListsWith =
|
zipListsWith =
|
||||||
f:
|
f: fst: snd:
|
||||||
fst:
|
genList (n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
|
||||||
snd:
|
|
||||||
genList
|
|
||||||
(n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Merges two lists of the same size together. If the sizes aren't the same
|
Merges two lists of the same size together. If the sizes aren't the same
|
||||||
|
@ -1113,8 +1098,12 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
reverseList = xs:
|
reverseList =
|
||||||
let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
|
xs:
|
||||||
|
let
|
||||||
|
l = length xs;
|
||||||
|
in
|
||||||
|
genList (n: elemAt xs (l - n - 1)) l;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Depth-First Search (DFS) for lists `list != []`.
|
Depth-First Search (DFS) for lists `list != []`.
|
||||||
|
@ -1122,7 +1111,6 @@ rec {
|
||||||
`before a b == true` means that `b` depends on `a` (there's an
|
`before a b == true` means that `b` depends on `a` (there's an
|
||||||
edge from `b` to `a`).
|
edge from `b` to `a`).
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`stopOnCycles`
|
`stopOnCycles`
|
||||||
|
@ -1137,7 +1125,6 @@ rec {
|
||||||
|
|
||||||
: 3\. Function argument
|
: 3\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.listDfs` usage example
|
## `lib.lists.listDfs` usage example
|
||||||
|
@ -1158,22 +1145,32 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
listDfs = stopOnCycles: before: list:
|
listDfs =
|
||||||
|
stopOnCycles: before: list:
|
||||||
let
|
let
|
||||||
dfs' = us: visited: rest:
|
dfs' =
|
||||||
|
us: visited: rest:
|
||||||
let
|
let
|
||||||
c = filter (x: before x us) visited;
|
c = filter (x: before x us) visited;
|
||||||
b = partition (x: before x us) rest;
|
b = partition (x: before x us) rest;
|
||||||
in if stopOnCycles && (length c > 0)
|
in
|
||||||
then { cycle = us; loops = c; inherit visited rest; }
|
if stopOnCycles && (length c > 0) then
|
||||||
else if length b.right == 0
|
{
|
||||||
then # nothing is before us
|
cycle = us;
|
||||||
{ minimal = us; inherit visited rest; }
|
loops = c;
|
||||||
else # grab the first one before us and continue
|
inherit visited rest;
|
||||||
dfs' (head b.right)
|
}
|
||||||
([ us ] ++ visited)
|
else if length b.right == 0 then
|
||||||
(tail b.right ++ b.wrong);
|
# nothing is before us
|
||||||
in dfs' (head list) [] (tail list);
|
{
|
||||||
|
minimal = us;
|
||||||
|
inherit visited rest;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
# grab the first one before us and continue
|
||||||
|
dfs' (head b.right) ([ us ] ++ visited) (tail b.right ++ b.wrong);
|
||||||
|
in
|
||||||
|
dfs' (head list) [ ] (tail list);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Sort a list based on a partial ordering using DFS. This
|
Sort a list based on a partial ordering using DFS. This
|
||||||
|
@ -1183,7 +1180,6 @@ rec {
|
||||||
`before a b == true` means that `b` should be after `a`
|
`before a b == true` means that `b` should be after `a`
|
||||||
in the result.
|
in the result.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`before`
|
`before`
|
||||||
|
@ -1194,7 +1190,6 @@ rec {
|
||||||
|
|
||||||
: 2\. Function argument
|
: 2\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.toposort` usage example
|
## `lib.lists.toposort` usage example
|
||||||
|
@ -1215,23 +1210,27 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
toposort = before: list:
|
toposort =
|
||||||
|
before: list:
|
||||||
let
|
let
|
||||||
dfsthis = listDfs true before list;
|
dfsthis = listDfs true before list;
|
||||||
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
|
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
|
||||||
in
|
in
|
||||||
if length list < 2
|
if length list < 2 then
|
||||||
then # finish
|
# finish
|
||||||
{ result = list; }
|
{ result = list; }
|
||||||
else if dfsthis ? cycle
|
else if dfsthis ? cycle then
|
||||||
then # there's a cycle, starting from the current vertex, return it
|
# there's a cycle, starting from the current vertex, return it
|
||||||
{ cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
|
{
|
||||||
inherit (dfsthis) loops; }
|
cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
|
||||||
else if toporest ? cycle
|
inherit (dfsthis) loops;
|
||||||
then # there's a cycle somewhere else in the graph, return it
|
}
|
||||||
|
else if toporest ? cycle then
|
||||||
|
# there's a cycle somewhere else in the graph, return it
|
||||||
toporest
|
toporest
|
||||||
# Slow, but short. Can be made a bit faster with an explicit stack.
|
# Slow, but short. Can be made a bit faster with an explicit stack.
|
||||||
else # there are no cycles
|
else
|
||||||
|
# there are no cycles
|
||||||
{ result = [ dfsthis.minimal ] ++ toporest.result; };
|
{ result = [ dfsthis.minimal ] ++ toporest.result; };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1288,7 +1287,6 @@ rec {
|
||||||
sortOn f == sort (p: q: f p < f q)
|
sortOn f == sort (p: q: f p < f q)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`f`
|
`f`
|
||||||
|
@ -1316,18 +1314,22 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
sortOn = f: list:
|
sortOn =
|
||||||
|
f: list:
|
||||||
let
|
let
|
||||||
# Heterogenous list as pair may be ugly, but requires minimal allocations.
|
# Heterogenous list as pair may be ugly, but requires minimal allocations.
|
||||||
pairs = map (x: [(f x) x]) list;
|
pairs = map (x: [
|
||||||
|
(f x)
|
||||||
|
x
|
||||||
|
]) list;
|
||||||
in
|
in
|
||||||
map
|
map (x: builtins.elemAt x 1) (
|
||||||
(x: builtins.elemAt x 1)
|
sort
|
||||||
(sort
|
|
||||||
# Compare the first element of the pairs
|
# Compare the first element of the pairs
|
||||||
# Do not factor out the `<`, to avoid calls in hot code; duplicate instead.
|
# Do not factor out the `<`, to avoid calls in hot code; duplicate instead.
|
||||||
(a: b: head a < head b)
|
(a: b: head a < head b)
|
||||||
pairs);
|
pairs
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Compare two lists element-by-element.
|
Compare two lists element-by-element.
|
||||||
|
@ -1346,7 +1348,6 @@ rec {
|
||||||
|
|
||||||
: 3\. Function argument
|
: 3\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.compareLists` usage example
|
## `lib.lists.compareLists` usage example
|
||||||
|
@ -1364,30 +1365,28 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
compareLists = cmp: a: b:
|
compareLists =
|
||||||
if a == []
|
cmp: a: b:
|
||||||
then if b == []
|
if a == [ ] then
|
||||||
then 0
|
if b == [ ] then 0 else -1
|
||||||
else -1
|
else if b == [ ] then
|
||||||
else if b == []
|
1
|
||||||
then 1
|
else
|
||||||
else let rel = cmp (head a) (head b); in
|
let
|
||||||
if rel == 0
|
rel = cmp (head a) (head b);
|
||||||
then compareLists cmp (tail a) (tail b)
|
in
|
||||||
else rel;
|
if rel == 0 then compareLists cmp (tail a) (tail b) else rel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Sort list using "Natural sorting".
|
Sort list using "Natural sorting".
|
||||||
Numeric portions of strings are sorted in numeric order.
|
Numeric portions of strings are sorted in numeric order.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`lst`
|
`lst`
|
||||||
|
|
||||||
: 1\. Function argument
|
: 1\. Function argument
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.naturalSort` usage example
|
## `lib.lists.naturalSort` usage example
|
||||||
|
@ -1403,10 +1402,14 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
naturalSort = lst:
|
naturalSort =
|
||||||
|
lst:
|
||||||
let
|
let
|
||||||
vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
|
vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
|
||||||
prepared = map (x: [ (vectorise x) x ]) lst; # remember vectorised version for O(n) regex splits
|
prepared = map (x: [
|
||||||
|
(vectorise x)
|
||||||
|
x
|
||||||
|
]) lst; # remember vectorised version for O(n) regex splits
|
||||||
less = a: b: (compareLists compare (head a) (head b)) < 0;
|
less = a: b: (compareLists compare (head a) (head b)) < 0;
|
||||||
in
|
in
|
||||||
map (x: elemAt x 1) (sort less prepared);
|
map (x: elemAt x 1) (sort less prepared);
|
||||||
|
@ -1414,7 +1417,6 @@ rec {
|
||||||
/**
|
/**
|
||||||
Return the first (at most) N elements of a list.
|
Return the first (at most) N elements of a list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`count`
|
`count`
|
||||||
|
@ -1444,13 +1446,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
take =
|
take = count: sublist 0 count;
|
||||||
count: sublist 0 count;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Remove the first (at most) N elements of a list.
|
Remove the first (at most) N elements of a list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`count`
|
`count`
|
||||||
|
@ -1480,14 +1480,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
drop =
|
drop = count: list: sublist count (length list) list;
|
||||||
count:
|
|
||||||
list: sublist count (length list) list;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Whether the first list is a prefix of the second list.
|
Whether the first list is a prefix of the second list.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list1`
|
`list1`
|
||||||
|
@ -1517,10 +1514,7 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
hasPrefix =
|
hasPrefix = list1: list2: take (length list1) list2 == list1;
|
||||||
list1:
|
|
||||||
list2:
|
|
||||||
take (length list1) list2 == list1;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Remove the first list as a prefix from the second list.
|
Remove the first list as a prefix from the second list.
|
||||||
|
@ -1556,8 +1550,7 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
removePrefix =
|
removePrefix =
|
||||||
list1:
|
list1: list2:
|
||||||
list2:
|
|
||||||
if hasPrefix list1 list2 then
|
if hasPrefix list1 list2 then
|
||||||
drop (length list1) list2
|
drop (length list1) list2
|
||||||
else
|
else
|
||||||
|
@ -1601,20 +1594,22 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
sublist =
|
sublist =
|
||||||
start:
|
start: count: list:
|
||||||
count:
|
let
|
||||||
list:
|
len = length list;
|
||||||
let len = length list; in
|
in
|
||||||
genList
|
genList (n: elemAt list (n + start)) (
|
||||||
(n: elemAt list (n + start))
|
if start >= len then
|
||||||
(if start >= len then 0
|
0
|
||||||
else if start + count > len then len - start
|
else if start + count > len then
|
||||||
else count);
|
len - start
|
||||||
|
else
|
||||||
|
count
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The common prefix of two lists.
|
The common prefix of two lists.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list1`
|
`list1`
|
||||||
|
@ -1647,8 +1642,7 @@ rec {
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
commonPrefix =
|
commonPrefix =
|
||||||
list1:
|
list1: list2:
|
||||||
list2:
|
|
||||||
let
|
let
|
||||||
# Zip the lists together into a list of booleans whether each element matches
|
# Zip the lists together into a list of booleans whether each element matches
|
||||||
matchings = zipListsWith (fst: snd: fst != snd) list1 list2;
|
matchings = zipListsWith (fst: snd: fst != snd) list1 list2;
|
||||||
|
@ -1665,7 +1659,6 @@ rec {
|
||||||
|
|
||||||
This function throws an error if the list is empty.
|
This function throws an error if the list is empty.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list`
|
`list`
|
||||||
|
@ -1689,8 +1682,9 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
last = list:
|
last =
|
||||||
assert lib.assertMsg (list != []) "lists.last: list must not be empty!";
|
list:
|
||||||
|
assert lib.assertMsg (list != [ ]) "lists.last: list must not be empty!";
|
||||||
elemAt list (length list - 1);
|
elemAt list (length list - 1);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1698,7 +1692,6 @@ rec {
|
||||||
|
|
||||||
This function throws an error if the list is empty.
|
This function throws an error if the list is empty.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list`
|
`list`
|
||||||
|
@ -1722,15 +1715,14 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
init = list:
|
init =
|
||||||
assert lib.assertMsg (list != []) "lists.init: list must not be empty!";
|
list:
|
||||||
|
assert lib.assertMsg (list != [ ]) "lists.init: list must not be empty!";
|
||||||
take (length list - 1) list;
|
take (length list - 1) list;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Return the image of the cross product of some lists by a function.
|
Return the image of the cross product of some lists by a function.
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.crossLists` usage example
|
## `lib.lists.crossLists` usage example
|
||||||
|
@ -1748,8 +1740,8 @@ rec {
|
||||||
```
|
```
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
crossLists = warn
|
crossLists = warn ''
|
||||||
''lib.crossLists is deprecated, use lib.mapCartesianProduct instead.
|
lib.crossLists is deprecated, use lib.mapCartesianProduct instead.
|
||||||
|
|
||||||
For example, the following function call:
|
For example, the following function call:
|
||||||
|
|
||||||
|
@ -1760,13 +1752,11 @@ rec {
|
||||||
|
|
||||||
nix-repl> lib.mapCartesianProduct ({x,y}: x+y) { x = [1 2]; y = [3 4]; }
|
nix-repl> lib.mapCartesianProduct ({x,y}: x+y) { x = [1 2]; y = [3 4]; }
|
||||||
[ 4 5 5 6 ]
|
[ 4 5 5 6 ]
|
||||||
''
|
'' (f: foldl (fs: args: concatMap (f: map f args) fs) [ f ]);
|
||||||
(f: foldl (fs: args: concatMap (f: map f args) fs) [f]);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Remove duplicate elements from the `list`. O(n^2) complexity.
|
Remove duplicate elements from the `list`. O(n^2) complexity.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list`
|
`list`
|
||||||
|
@ -1790,12 +1780,11 @@ rec {
|
||||||
|
|
||||||
:::
|
:::
|
||||||
*/
|
*/
|
||||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Check if list contains only unique elements. O(n^2) complexity.
|
Check if list contains only unique elements. O(n^2) complexity.
|
||||||
|
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
|
|
||||||
`list`
|
`list`
|
||||||
|
@ -1823,7 +1812,6 @@ rec {
|
||||||
*/
|
*/
|
||||||
allUnique = list: (length (unique list) == length list);
|
allUnique = list: (length (unique list) == length list);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Intersects list 'list1' and another list (`list2`).
|
Intersects list 'list1' and another list (`list2`).
|
||||||
|
|
||||||
|
@ -1839,7 +1827,6 @@ rec {
|
||||||
|
|
||||||
: Second list
|
: Second list
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.intersectLists` usage example
|
## `lib.lists.intersectLists` usage example
|
||||||
|
@ -1868,7 +1855,6 @@ rec {
|
||||||
|
|
||||||
: Second list
|
: Second list
|
||||||
|
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
:::{.example}
|
:::{.example}
|
||||||
## `lib.lists.subtractLists` usage example
|
## `lib.lists.subtractLists` usage example
|
||||||
|
|
152
lib/meta.nix
152
lib/meta.nix
|
@ -1,80 +1,91 @@
|
||||||
/* Some functions for manipulating meta attributes, as well as the
|
/*
|
||||||
name attribute. */
|
Some functions for manipulating meta attributes, as well as the
|
||||||
|
name attribute.
|
||||||
|
*/
|
||||||
|
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (lib) matchAttrs any all isDerivation getBin assertMsg;
|
inherit (lib)
|
||||||
|
matchAttrs
|
||||||
|
any
|
||||||
|
all
|
||||||
|
isDerivation
|
||||||
|
getBin
|
||||||
|
assertMsg
|
||||||
|
;
|
||||||
inherit (builtins) isString match typeOf;
|
inherit (builtins) isString match typeOf;
|
||||||
|
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
|
/*
|
||||||
/* Add to or override the meta attributes of the given
|
Add to or override the meta attributes of the given
|
||||||
derivation.
|
derivation.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
addMetaAttrs {description = "Bla blah";} somePkg
|
addMetaAttrs {description = "Bla blah";} somePkg
|
||||||
*/
|
*/
|
||||||
addMetaAttrs = newAttrs: drv:
|
addMetaAttrs = newAttrs: drv: drv // { meta = (drv.meta or { }) // newAttrs; };
|
||||||
drv // { meta = (drv.meta or {}) // newAttrs; };
|
|
||||||
|
|
||||||
|
# Disable Hydra builds of given derivation.
|
||||||
|
dontDistribute = drv: addMetaAttrs { hydraPlatforms = [ ]; } drv;
|
||||||
|
|
||||||
/* Disable Hydra builds of given derivation.
|
/*
|
||||||
*/
|
Change the symbolic name of a package for presentation purposes
|
||||||
dontDistribute = drv: addMetaAttrs { hydraPlatforms = []; } drv;
|
|
||||||
|
|
||||||
|
|
||||||
/* Change the symbolic name of a package for presentation purposes
|
|
||||||
(i.e., so that nix-env users can tell them apart).
|
(i.e., so that nix-env users can tell them apart).
|
||||||
*/
|
*/
|
||||||
setName = name: drv: drv // {inherit name;};
|
setName = name: drv: drv // { inherit name; };
|
||||||
|
|
||||||
|
/*
|
||||||
/* Like `setName`, but takes the previous name as an argument.
|
Like `setName`, but takes the previous name as an argument.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
updateName (oldName: oldName + "-experimental") somePkg
|
updateName (oldName: oldName + "-experimental") somePkg
|
||||||
*/
|
*/
|
||||||
updateName = updater: drv: drv // {name = updater (drv.name);};
|
updateName = updater: drv: drv // { name = updater (drv.name); };
|
||||||
|
|
||||||
|
/*
|
||||||
/* Append a suffix to the name of a package (before the version
|
Append a suffix to the name of a package (before the version
|
||||||
part). */
|
part).
|
||||||
appendToName = suffix: updateName (name:
|
|
||||||
let x = builtins.parseDrvName name; in "${x.name}-${suffix}-${x.version}");
|
|
||||||
|
|
||||||
|
|
||||||
/* Apply a function to each derivation and only to derivations in an attrset.
|
|
||||||
*/
|
*/
|
||||||
mapDerivationAttrset = f: set: lib.mapAttrs (name: pkg: if lib.isDerivation pkg then (f pkg) else pkg) set;
|
appendToName =
|
||||||
|
suffix:
|
||||||
|
updateName (
|
||||||
|
name:
|
||||||
|
let
|
||||||
|
x = builtins.parseDrvName name;
|
||||||
|
in
|
||||||
|
"${x.name}-${suffix}-${x.version}"
|
||||||
|
);
|
||||||
|
|
||||||
/* Set the nix-env priority of the package.
|
# Apply a function to each derivation and only to derivations in an attrset.
|
||||||
*/
|
mapDerivationAttrset =
|
||||||
|
f: set: lib.mapAttrs (name: pkg: if lib.isDerivation pkg then (f pkg) else pkg) set;
|
||||||
|
|
||||||
|
# Set the nix-env priority of the package.
|
||||||
setPrio = priority: addMetaAttrs { inherit priority; };
|
setPrio = priority: addMetaAttrs { inherit priority; };
|
||||||
|
|
||||||
/* Decrease the nix-env priority of the package, i.e., other
|
/*
|
||||||
|
Decrease the nix-env priority of the package, i.e., other
|
||||||
versions/variants of the package will be preferred.
|
versions/variants of the package will be preferred.
|
||||||
*/
|
*/
|
||||||
lowPrio = setPrio 10;
|
lowPrio = setPrio 10;
|
||||||
|
|
||||||
/* Apply lowPrio to an attrset with derivations
|
# Apply lowPrio to an attrset with derivations
|
||||||
*/
|
|
||||||
lowPrioSet = set: mapDerivationAttrset lowPrio set;
|
lowPrioSet = set: mapDerivationAttrset lowPrio set;
|
||||||
|
|
||||||
|
/*
|
||||||
/* Increase the nix-env priority of the package, i.e., this
|
Increase the nix-env priority of the package, i.e., this
|
||||||
version/variant of the package will be preferred.
|
version/variant of the package will be preferred.
|
||||||
*/
|
*/
|
||||||
hiPrio = setPrio (-10);
|
hiPrio = setPrio (-10);
|
||||||
|
|
||||||
/* Apply hiPrio to an attrset with derivations
|
# Apply hiPrio to an attrset with derivations
|
||||||
*/
|
|
||||||
hiPrioSet = set: mapDerivationAttrset hiPrio set;
|
hiPrioSet = set: mapDerivationAttrset hiPrio set;
|
||||||
|
|
||||||
|
/*
|
||||||
/* Check to see if a platform is matched by the given `meta.platforms`
|
Check to see if a platform is matched by the given `meta.platforms`
|
||||||
element.
|
element.
|
||||||
|
|
||||||
A `meta.platform` pattern is either
|
A `meta.platform` pattern is either
|
||||||
|
@ -92,7 +103,9 @@ rec {
|
||||||
lib.meta.platformMatch { system = "aarch64-darwin"; } "aarch64-darwin"
|
lib.meta.platformMatch { system = "aarch64-darwin"; } "aarch64-darwin"
|
||||||
=> true
|
=> true
|
||||||
*/
|
*/
|
||||||
platformMatch = platform: elem: (
|
platformMatch =
|
||||||
|
platform: elem:
|
||||||
|
(
|
||||||
# Check with simple string comparison if elem was a string.
|
# Check with simple string comparison if elem was a string.
|
||||||
#
|
#
|
||||||
# The majority of comparisons done with this function will be against meta.platforms
|
# The majority of comparisons done with this function will be against meta.platforms
|
||||||
|
@ -100,15 +113,17 @@ rec {
|
||||||
#
|
#
|
||||||
# Avoiding an attrset allocation results in significant performance gains (~2-30) across the board in OfBorg
|
# Avoiding an attrset allocation results in significant performance gains (~2-30) across the board in OfBorg
|
||||||
# because this is a hot path for nixpkgs.
|
# because this is a hot path for nixpkgs.
|
||||||
if isString elem then platform ? system && elem == platform.system
|
if isString elem then
|
||||||
else matchAttrs (
|
platform ? system && elem == platform.system
|
||||||
|
else
|
||||||
|
matchAttrs (
|
||||||
# Normalize platform attrset.
|
# Normalize platform attrset.
|
||||||
if elem ? parsed then elem
|
if elem ? parsed then elem else { parsed = elem; }
|
||||||
else { parsed = elem; }
|
|
||||||
) platform
|
) platform
|
||||||
);
|
);
|
||||||
|
|
||||||
/* Check if a package is available on a given platform.
|
/*
|
||||||
|
Check if a package is available on a given platform.
|
||||||
|
|
||||||
A package is available on a platform if both
|
A package is available on a platform if both
|
||||||
|
|
||||||
|
@ -121,11 +136,13 @@ rec {
|
||||||
lib.meta.availableOn { system = "aarch64-darwin"; } pkg.zsh
|
lib.meta.availableOn { system = "aarch64-darwin"; } pkg.zsh
|
||||||
=> true
|
=> true
|
||||||
*/
|
*/
|
||||||
availableOn = platform: pkg:
|
availableOn =
|
||||||
((!pkg?meta.platforms) || any (platformMatch platform) pkg.meta.platforms) &&
|
platform: pkg:
|
||||||
all (elem: !platformMatch platform elem) (pkg.meta.badPlatforms or []);
|
((!pkg ? meta.platforms) || any (platformMatch platform) pkg.meta.platforms)
|
||||||
|
&& all (elem: !platformMatch platform elem) (pkg.meta.badPlatforms or [ ]);
|
||||||
|
|
||||||
/* Get the corresponding attribute in lib.licenses
|
/*
|
||||||
|
Get the corresponding attribute in lib.licenses
|
||||||
from the SPDX ID.
|
from the SPDX ID.
|
||||||
For SPDX IDs, see
|
For SPDX IDs, see
|
||||||
https://spdx.org/licenses
|
https://spdx.org/licenses
|
||||||
|
@ -144,15 +161,23 @@ rec {
|
||||||
*/
|
*/
|
||||||
getLicenseFromSpdxId =
|
getLicenseFromSpdxId =
|
||||||
let
|
let
|
||||||
spdxLicenses = lib.mapAttrs (id: ls: assert lib.length ls == 1; builtins.head ls)
|
spdxLicenses =
|
||||||
|
lib.mapAttrs
|
||||||
|
(
|
||||||
|
id: ls:
|
||||||
|
assert lib.length ls == 1;
|
||||||
|
builtins.head ls
|
||||||
|
)
|
||||||
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
|
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
|
||||||
in licstr:
|
in
|
||||||
spdxLicenses.${ lib.toLower licstr } or (
|
licstr:
|
||||||
lib.warn "getLicenseFromSpdxId: No license matches the given SPDX ID: ${licstr}"
|
spdxLicenses.${lib.toLower licstr}
|
||||||
{ shortName = licstr; }
|
or (lib.warn "getLicenseFromSpdxId: No license matches the given SPDX ID: ${licstr}" {
|
||||||
);
|
shortName = licstr;
|
||||||
|
});
|
||||||
|
|
||||||
/* Get the path to the main program of a package based on meta.mainProgram
|
/*
|
||||||
|
Get the path to the main program of a package based on meta.mainProgram
|
||||||
|
|
||||||
Type: getExe :: package -> string
|
Type: getExe :: package -> string
|
||||||
|
|
||||||
|
@ -162,14 +187,22 @@ rec {
|
||||||
getExe pkgs.mustache-go
|
getExe pkgs.mustache-go
|
||||||
=> "/nix/store/am9ml4f4ywvivxnkiaqwr0hyxka1xjsf-mustache-go-1.3.0/bin/mustache"
|
=> "/nix/store/am9ml4f4ywvivxnkiaqwr0hyxka1xjsf-mustache-go-1.3.0/bin/mustache"
|
||||||
*/
|
*/
|
||||||
getExe = x: getExe' x (x.meta.mainProgram or (
|
getExe =
|
||||||
|
x:
|
||||||
|
getExe' x (
|
||||||
|
x.meta.mainProgram or (
|
||||||
# This could be turned into an error when 23.05 is at end of life
|
# This could be turned into an error when 23.05 is at end of life
|
||||||
lib.warn "getExe: Package ${lib.strings.escapeNixIdentifier x.meta.name or x.pname or x.name} does not have the meta.mainProgram attribute. We'll assume that the main program has the same name for now, but this behavior is deprecated, because it leads to surprising errors when the assumption does not hold. If the package has a main program, please set `meta.mainProgram` in its definition to make this warning go away. Otherwise, if the package does not have a main program, or if you don't control its definition, use getExe' to specify the name to the program, such as lib.getExe' foo \"bar\"."
|
lib.warn
|
||||||
|
"getExe: Package ${
|
||||||
|
lib.strings.escapeNixIdentifier x.meta.name or x.pname or x.name
|
||||||
|
} does not have the meta.mainProgram attribute. We'll assume that the main program has the same name for now, but this behavior is deprecated, because it leads to surprising errors when the assumption does not hold. If the package has a main program, please set `meta.mainProgram` in its definition to make this warning go away. Otherwise, if the package does not have a main program, or if you don't control its definition, use getExe' to specify the name to the program, such as lib.getExe' foo \"bar\"."
|
||||||
lib.getName
|
lib.getName
|
||||||
x
|
x
|
||||||
));
|
)
|
||||||
|
);
|
||||||
|
|
||||||
/* Get the path of a program of a derivation.
|
/*
|
||||||
|
Get the path of a program of a derivation.
|
||||||
|
|
||||||
Type: getExe' :: derivation -> string -> string
|
Type: getExe' :: derivation -> string -> string
|
||||||
Example:
|
Example:
|
||||||
|
@ -178,7 +211,8 @@ rec {
|
||||||
getExe' pkgs.imagemagick "convert"
|
getExe' pkgs.imagemagick "convert"
|
||||||
=> "/nix/store/5rs48jamq7k6sal98ymj9l4k2bnwq515-imagemagick-7.1.1-15/bin/convert"
|
=> "/nix/store/5rs48jamq7k6sal98ymj9l4k2bnwq515-imagemagick-7.1.1-15/bin/convert"
|
||||||
*/
|
*/
|
||||||
getExe' = x: y:
|
getExe' =
|
||||||
|
x: y:
|
||||||
assert assertMsg (isDerivation x)
|
assert assertMsg (isDerivation x)
|
||||||
"lib.meta.getExe': The first argument is of type ${typeOf x}, but it should be a derivation instead.";
|
"lib.meta.getExe': The first argument is of type ${typeOf x}, but it should be a derivation instead.";
|
||||||
assert assertMsg (isString y)
|
assert assertMsg (isString y)
|
||||||
|
|
1052
lib/modules.nix
1052
lib/modules.nix
File diff suppressed because it is too large
Load diff
325
lib/options.nix
325
lib/options.nix
|
@ -1,4 +1,4 @@
|
||||||
/* Nixpkgs/NixOS option handling. */
|
# Nixpkgs/NixOS option handling.
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
@ -25,27 +25,18 @@ let
|
||||||
optionals
|
optionals
|
||||||
take
|
take
|
||||||
;
|
;
|
||||||
inherit (lib.attrsets)
|
inherit (lib.attrsets) attrByPath optionalAttrs;
|
||||||
attrByPath
|
inherit (lib.strings) concatMapStrings concatStringsSep;
|
||||||
optionalAttrs
|
inherit (lib.types) mkOptionType;
|
||||||
;
|
inherit (lib.lists) last;
|
||||||
inherit (lib.strings)
|
|
||||||
concatMapStrings
|
|
||||||
concatStringsSep
|
|
||||||
;
|
|
||||||
inherit (lib.types)
|
|
||||||
mkOptionType
|
|
||||||
;
|
|
||||||
inherit (lib.lists)
|
|
||||||
last
|
|
||||||
;
|
|
||||||
prioritySuggestion = ''
|
prioritySuggestion = ''
|
||||||
Use `lib.mkForce value` or `lib.mkDefault value` to change the priority on any of these definitions.
|
Use `lib.mkForce value` or `lib.mkDefault value` to change the priority on any of these definitions.
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
/* Returns true when the given argument is an option
|
/*
|
||||||
|
Returns true when the given argument is an option
|
||||||
|
|
||||||
Type: isOption :: a -> bool
|
Type: isOption :: a -> bool
|
||||||
|
|
||||||
|
@ -55,7 +46,8 @@ rec {
|
||||||
*/
|
*/
|
||||||
isOption = lib.isType "option";
|
isOption = lib.isType "option";
|
||||||
|
|
||||||
/* Creates an Option attribute set. mkOption accepts an attribute set with the following keys:
|
/*
|
||||||
|
Creates an Option attribute set. mkOption accepts an attribute set with the following keys:
|
||||||
|
|
||||||
All keys default to `null` when not given.
|
All keys default to `null` when not given.
|
||||||
|
|
||||||
|
@ -85,10 +77,11 @@ rec {
|
||||||
visible ? null,
|
visible ? null,
|
||||||
# Whether the option can be set only once
|
# Whether the option can be set only once
|
||||||
readOnly ? null,
|
readOnly ? null,
|
||||||
} @ attrs:
|
}@attrs:
|
||||||
attrs // { _type = "option"; };
|
attrs // { _type = "option"; };
|
||||||
|
|
||||||
/* Creates an Option attribute set for a boolean value option i.e an
|
/*
|
||||||
|
Creates an Option attribute set for a boolean value option i.e an
|
||||||
option to be toggled on or off:
|
option to be toggled on or off:
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
@ -97,14 +90,16 @@ rec {
|
||||||
*/
|
*/
|
||||||
mkEnableOption =
|
mkEnableOption =
|
||||||
# Name for the created option
|
# Name for the created option
|
||||||
name: mkOption {
|
name:
|
||||||
|
mkOption {
|
||||||
default = false;
|
default = false;
|
||||||
example = true;
|
example = true;
|
||||||
description = "Whether to enable ${name}.";
|
description = "Whether to enable ${name}.";
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Creates an Option attribute set for an option that specifies the
|
/*
|
||||||
|
Creates an Option attribute set for an option that specifies the
|
||||||
package a module should use for some purpose.
|
package a module should use for some purpose.
|
||||||
|
|
||||||
The package is specified in the third argument under `default` as a list of strings
|
The package is specified in the third argument under `default` as a list of strings
|
||||||
|
@ -197,40 +192,53 @@ rec {
|
||||||
# Additional text to include in the option description (may be omitted)
|
# Additional text to include in the option description (may be omitted)
|
||||||
extraDescription ? "",
|
extraDescription ? "",
|
||||||
# Representation of the package set passed as pkgs (defaults to `"pkgs"`)
|
# Representation of the package set passed as pkgs (defaults to `"pkgs"`)
|
||||||
pkgsText ? "pkgs"
|
pkgsText ? "pkgs",
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
name' = if isList name then last name else name;
|
name' = if isList name then last name else name;
|
||||||
default' = if isList default then default else [ default ];
|
default' = if isList default then default else [ default ];
|
||||||
defaultText = concatStringsSep "." default';
|
defaultText = concatStringsSep "." default';
|
||||||
defaultValue = attrByPath default'
|
defaultValue = attrByPath default' (throw "${defaultText} cannot be found in ${pkgsText}") pkgs;
|
||||||
(throw "${defaultText} cannot be found in ${pkgsText}") pkgs;
|
defaults =
|
||||||
defaults = if default != null then {
|
if default != null then
|
||||||
|
{
|
||||||
default = defaultValue;
|
default = defaultValue;
|
||||||
defaultText = literalExpression ("${pkgsText}." + defaultText);
|
defaultText = literalExpression ("${pkgsText}." + defaultText);
|
||||||
} else optionalAttrs nullable {
|
}
|
||||||
default = null;
|
else
|
||||||
};
|
optionalAttrs nullable { default = null; };
|
||||||
in mkOption (defaults // {
|
in
|
||||||
description = "The ${name'} package to use."
|
mkOption (
|
||||||
+ (if extraDescription == "" then "" else " ") + extraDescription;
|
defaults
|
||||||
|
// {
|
||||||
|
description =
|
||||||
|
"The ${name'} package to use." + (if extraDescription == "" then "" else " ") + extraDescription;
|
||||||
type = with lib.types; (if nullable then nullOr else lib.id) package;
|
type = with lib.types; (if nullable then nullOr else lib.id) package;
|
||||||
} // optionalAttrs (example != null) {
|
}
|
||||||
example = literalExpression
|
// optionalAttrs (example != null) {
|
||||||
(if isList example then "${pkgsText}." + concatStringsSep "." example else example);
|
example = literalExpression (
|
||||||
});
|
if isList example then "${pkgsText}." + concatStringsSep "." example else example
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
/* Alias of mkPackageOption. Previously used to create options with markdown
|
/*
|
||||||
|
Alias of mkPackageOption. Previously used to create options with markdown
|
||||||
documentation, which is no longer required.
|
documentation, which is no longer required.
|
||||||
*/
|
*/
|
||||||
mkPackageOptionMD = mkPackageOption;
|
mkPackageOptionMD = mkPackageOption;
|
||||||
|
|
||||||
/* This option accepts anything, but it does not produce any result.
|
/*
|
||||||
|
This option accepts anything, but it does not produce any result.
|
||||||
|
|
||||||
This is useful for sharing a module across different module sets
|
This is useful for sharing a module across different module sets
|
||||||
without having to implement similar features as long as the
|
without having to implement similar features as long as the
|
||||||
values of the options are not accessed. */
|
values of the options are not accessed.
|
||||||
mkSinkUndeclaredOptions = attrs: mkOption ({
|
*/
|
||||||
|
mkSinkUndeclaredOptions =
|
||||||
|
attrs:
|
||||||
|
mkOption (
|
||||||
|
{
|
||||||
internal = true;
|
internal = true;
|
||||||
visible = false;
|
visible = false;
|
||||||
default = false;
|
default = false;
|
||||||
|
@ -241,18 +249,31 @@ rec {
|
||||||
merge = loc: defs: false;
|
merge = loc: defs: false;
|
||||||
};
|
};
|
||||||
apply = x: throw "Option value is not readable because the option is not declared.";
|
apply = x: throw "Option value is not readable because the option is not declared.";
|
||||||
} // attrs);
|
}
|
||||||
|
// attrs
|
||||||
|
);
|
||||||
|
|
||||||
mergeDefaultOption = loc: defs:
|
mergeDefaultOption =
|
||||||
let list = getValues defs; in
|
loc: defs:
|
||||||
if length list == 1 then head list
|
let
|
||||||
else if all isFunction list then x: mergeDefaultOption loc (map (f: f x) list)
|
list = getValues defs;
|
||||||
else if all isList list then concatLists list
|
in
|
||||||
else if all isAttrs list then foldl' lib.mergeAttrs {} list
|
if length list == 1 then
|
||||||
else if all isBool list then foldl' lib.or false list
|
head list
|
||||||
else if all isString list then lib.concatStrings list
|
else if all isFunction list then
|
||||||
else if all isInt list && all (x: x == head list) list then head list
|
x: mergeDefaultOption loc (map (f: f x) list)
|
||||||
else throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
|
else if all isList list then
|
||||||
|
concatLists list
|
||||||
|
else if all isAttrs list then
|
||||||
|
foldl' lib.mergeAttrs { } list
|
||||||
|
else if all isBool list then
|
||||||
|
foldl' lib.or false list
|
||||||
|
else if all isString list then
|
||||||
|
lib.concatStrings list
|
||||||
|
else if all isInt list && all (x: x == head list) list then
|
||||||
|
head list
|
||||||
|
else
|
||||||
|
throw "Cannot merge definitions of `${showOption loc}'. Definition values:${showDefs defs}";
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Require a single definition.
|
Require a single definition.
|
||||||
|
@ -266,34 +287,48 @@ rec {
|
||||||
|
|
||||||
NOTE: When the type is not checked completely by check, pass a merge function for further checking (of sub-attributes, etc).
|
NOTE: When the type is not checked completely by check, pass a merge function for further checking (of sub-attributes, etc).
|
||||||
*/
|
*/
|
||||||
mergeUniqueOption = args@{
|
mergeUniqueOption =
|
||||||
|
args@{
|
||||||
message,
|
message,
|
||||||
# WARNING: the default merge function assumes that the definition is a valid (option) value. You MUST pass a merge function if the return value needs to be
|
# WARNING: the default merge function assumes that the definition is a valid (option) value. You MUST pass a merge function if the return value needs to be
|
||||||
# - type checked beyond what .check does (which should be very litte; only on the value head; not attribute values, etc)
|
# - type checked beyond what .check does (which should be very litte; only on the value head; not attribute values, etc)
|
||||||
# - if you want attribute values to be checked, or list items
|
# - if you want attribute values to be checked, or list items
|
||||||
# - if you want coercedTo-like behavior to work
|
# - if you want coercedTo-like behavior to work
|
||||||
merge ? loc: defs: (head defs).value }:
|
merge ? loc: defs: (head defs).value,
|
||||||
|
}:
|
||||||
loc: defs:
|
loc: defs:
|
||||||
if length defs == 1
|
if length defs == 1 then
|
||||||
then merge loc defs
|
merge loc defs
|
||||||
else
|
else
|
||||||
assert length defs > 1;
|
assert length defs > 1;
|
||||||
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
|
throw "The option `${showOption loc}' is defined multiple times while it's expected to be unique.\n${message}\nDefinition values:${showDefs defs}\n${prioritySuggestion}";
|
||||||
|
|
||||||
/* "Merge" option definitions by checking that they all have the same value. */
|
# "Merge" option definitions by checking that they all have the same value.
|
||||||
mergeEqualOption = loc: defs:
|
mergeEqualOption =
|
||||||
if defs == [] then abort "This case should never happen."
|
loc: defs:
|
||||||
|
if defs == [ ] then
|
||||||
|
abort "This case should never happen."
|
||||||
# Return early if we only have one element
|
# Return early if we only have one element
|
||||||
# This also makes it work for functions, because the foldl' below would try
|
# This also makes it work for functions, because the foldl' below would try
|
||||||
# to compare the first element with itself, which is false for functions
|
# to compare the first element with itself, which is false for functions
|
||||||
else if length defs == 1 then (head defs).value
|
else if length defs == 1 then
|
||||||
else (foldl' (first: def:
|
(head defs).value
|
||||||
if def.value != first.value then
|
|
||||||
throw "The option `${showOption loc}' has conflicting definition values:${showDefs [ first def ]}\n${prioritySuggestion}"
|
|
||||||
else
|
else
|
||||||
first) (head defs) (tail defs)).value;
|
(foldl' (
|
||||||
|
first: def:
|
||||||
|
if def.value != first.value then
|
||||||
|
throw "The option `${showOption loc}' has conflicting definition values:${
|
||||||
|
showDefs [
|
||||||
|
first
|
||||||
|
def
|
||||||
|
]
|
||||||
|
}\n${prioritySuggestion}"
|
||||||
|
else
|
||||||
|
first
|
||||||
|
) (head defs) (tail defs)).value;
|
||||||
|
|
||||||
/* Extracts values of all "value" keys of the given list.
|
/*
|
||||||
|
Extracts values of all "value" keys of the given list.
|
||||||
|
|
||||||
Type: getValues :: [ { value :: a; } ] -> [a]
|
Type: getValues :: [ { value :: a; } ] -> [a]
|
||||||
|
|
||||||
|
@ -303,7 +338,8 @@ rec {
|
||||||
*/
|
*/
|
||||||
getValues = map (x: x.value);
|
getValues = map (x: x.value);
|
||||||
|
|
||||||
/* Extracts values of all "file" keys of the given list
|
/*
|
||||||
|
Extracts values of all "file" keys of the given list
|
||||||
|
|
||||||
Type: getFiles :: [ { file :: a; } ] -> [a]
|
Type: getFiles :: [ { file :: a; } ] -> [a]
|
||||||
|
|
||||||
|
@ -315,50 +351,53 @@ rec {
|
||||||
|
|
||||||
# Generate documentation template from the list of option declaration like
|
# Generate documentation template from the list of option declaration like
|
||||||
# the set generated with filterOptionSets.
|
# the set generated with filterOptionSets.
|
||||||
optionAttrSetToDocList = optionAttrSetToDocList' [];
|
optionAttrSetToDocList = optionAttrSetToDocList' [ ];
|
||||||
|
|
||||||
optionAttrSetToDocList' = _: options:
|
optionAttrSetToDocList' =
|
||||||
concatMap (opt:
|
_: options:
|
||||||
|
concatMap (
|
||||||
|
opt:
|
||||||
let
|
let
|
||||||
name = showOption opt.loc;
|
name = showOption opt.loc;
|
||||||
docOption = {
|
docOption =
|
||||||
|
{
|
||||||
loc = opt.loc;
|
loc = opt.loc;
|
||||||
inherit name;
|
inherit name;
|
||||||
description = opt.description or null;
|
description = opt.description or null;
|
||||||
declarations = filter (x: x != unknownModule) opt.declarations;
|
declarations = filter (x: x != unknownModule) opt.declarations;
|
||||||
internal = opt.internal or false;
|
internal = opt.internal or false;
|
||||||
visible =
|
visible = if (opt ? visible && opt.visible == "shallow") then true else opt.visible or true;
|
||||||
if (opt?visible && opt.visible == "shallow")
|
|
||||||
then true
|
|
||||||
else opt.visible or true;
|
|
||||||
readOnly = opt.readOnly or false;
|
readOnly = opt.readOnly or false;
|
||||||
type = opt.type.description or "unspecified";
|
type = opt.type.description or "unspecified";
|
||||||
}
|
}
|
||||||
// optionalAttrs (opt ? example) {
|
// optionalAttrs (opt ? example) {
|
||||||
example =
|
example = builtins.addErrorContext "while evaluating the example of option `${name}`" (
|
||||||
builtins.addErrorContext "while evaluating the example of option `${name}`" (
|
|
||||||
renderOptionValue opt.example
|
renderOptionValue opt.example
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// optionalAttrs (opt ? defaultText || opt ? default) {
|
// optionalAttrs (opt ? defaultText || opt ? default) {
|
||||||
default =
|
default = builtins.addErrorContext "while evaluating the ${
|
||||||
builtins.addErrorContext "while evaluating the ${if opt?defaultText then "defaultText" else "default value"} of option `${name}`" (
|
if opt ? defaultText then "defaultText" else "default value"
|
||||||
renderOptionValue (opt.defaultText or opt.default)
|
} of option `${name}`" (renderOptionValue (opt.defaultText or opt.default));
|
||||||
);
|
|
||||||
}
|
}
|
||||||
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) { inherit (opt) relatedPackages; };
|
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) {
|
||||||
|
inherit (opt) relatedPackages;
|
||||||
|
};
|
||||||
|
|
||||||
subOptions =
|
subOptions =
|
||||||
let ss = opt.type.getSubOptions opt.loc;
|
let
|
||||||
in if ss != {} then optionAttrSetToDocList' opt.loc ss else [];
|
ss = opt.type.getSubOptions opt.loc;
|
||||||
|
in
|
||||||
|
if ss != { } then optionAttrSetToDocList' opt.loc ss else [ ];
|
||||||
subOptionsVisible = docOption.visible && opt.visible or null != "shallow";
|
subOptionsVisible = docOption.visible && opt.visible or null != "shallow";
|
||||||
in
|
in
|
||||||
# To find infinite recursion in NixOS option docs:
|
# To find infinite recursion in NixOS option docs:
|
||||||
# builtins.trace opt.loc
|
# builtins.trace opt.loc
|
||||||
[ docOption ] ++ optionals subOptionsVisible subOptions) (collect isOption options);
|
[ docOption ] ++ optionals subOptionsVisible subOptions
|
||||||
|
) (collect isOption options);
|
||||||
|
|
||||||
|
/*
|
||||||
/* This function recursively removes all derivation attributes from
|
This function recursively removes all derivation attributes from
|
||||||
`x` except for the `name` attribute.
|
`x` except for the `name` attribute.
|
||||||
|
|
||||||
This is to make the generation of `options.xml` much more
|
This is to make the generation of `options.xml` much more
|
||||||
|
@ -369,54 +408,83 @@ rec {
|
||||||
This function was made obsolete by renderOptionValue and is kept for
|
This function was made obsolete by renderOptionValue and is kept for
|
||||||
compatibility with out-of-tree code.
|
compatibility with out-of-tree code.
|
||||||
*/
|
*/
|
||||||
scrubOptionValue = x:
|
scrubOptionValue =
|
||||||
|
x:
|
||||||
if isDerivation x then
|
if isDerivation x then
|
||||||
{ type = "derivation"; drvPath = x.name; outPath = x.name; name = x.name; }
|
{
|
||||||
else if isList x then map scrubOptionValue x
|
type = "derivation";
|
||||||
else if isAttrs x then mapAttrs (n: v: scrubOptionValue v) (removeAttrs x ["_args"])
|
drvPath = x.name;
|
||||||
else x;
|
outPath = x.name;
|
||||||
|
name = x.name;
|
||||||
|
}
|
||||||
|
else if isList x then
|
||||||
|
map scrubOptionValue x
|
||||||
|
else if isAttrs x then
|
||||||
|
mapAttrs (n: v: scrubOptionValue v) (removeAttrs x [ "_args" ])
|
||||||
|
else
|
||||||
|
x;
|
||||||
|
|
||||||
|
/*
|
||||||
/* Ensures that the given option value (default or example) is a `_type`d string
|
Ensures that the given option value (default or example) is a `_type`d string
|
||||||
by rendering Nix values to `literalExpression`s.
|
by rendering Nix values to `literalExpression`s.
|
||||||
*/
|
*/
|
||||||
renderOptionValue = v:
|
renderOptionValue =
|
||||||
if v ? _type && v ? text then v
|
v:
|
||||||
else literalExpression (lib.generators.toPretty {
|
if v ? _type && v ? text then
|
||||||
|
v
|
||||||
|
else
|
||||||
|
literalExpression (
|
||||||
|
lib.generators.toPretty {
|
||||||
multiline = true;
|
multiline = true;
|
||||||
allowPrettyValues = true;
|
allowPrettyValues = true;
|
||||||
} v);
|
} v
|
||||||
|
);
|
||||||
|
|
||||||
|
/*
|
||||||
/* For use in the `defaultText` and `example` option attributes. Causes the
|
For use in the `defaultText` and `example` option attributes. Causes the
|
||||||
given string to be rendered verbatim in the documentation as Nix code. This
|
given string to be rendered verbatim in the documentation as Nix code. This
|
||||||
is necessary for complex values, e.g. functions, or values that depend on
|
is necessary for complex values, e.g. functions, or values that depend on
|
||||||
other values or packages.
|
other values or packages.
|
||||||
*/
|
*/
|
||||||
literalExpression = text:
|
literalExpression =
|
||||||
if ! isString text then throw "literalExpression expects a string."
|
text:
|
||||||
else { _type = "literalExpression"; inherit text; };
|
if !isString text then
|
||||||
|
throw "literalExpression expects a string."
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_type = "literalExpression";
|
||||||
|
inherit text;
|
||||||
|
};
|
||||||
|
|
||||||
literalExample = lib.warn "lib.literalExample is deprecated, use lib.literalExpression instead, or use lib.literalMD for a non-Nix description." literalExpression;
|
literalExample = lib.warn "lib.literalExample is deprecated, use lib.literalExpression instead, or use lib.literalMD for a non-Nix description." literalExpression;
|
||||||
|
|
||||||
/* Transition marker for documentation that's already migrated to markdown
|
/*
|
||||||
|
Transition marker for documentation that's already migrated to markdown
|
||||||
syntax. Has been a no-op for some while and been removed from nixpkgs.
|
syntax. Has been a no-op for some while and been removed from nixpkgs.
|
||||||
Kept here to alert downstream users who may not be aware of the migration's
|
Kept here to alert downstream users who may not be aware of the migration's
|
||||||
completion that it should be removed from modules.
|
completion that it should be removed from modules.
|
||||||
*/
|
*/
|
||||||
mdDoc = lib.warn "lib.mdDoc will be removed from nixpkgs in 24.11. Option descriptions are now in Markdown by default; you can remove any remaining uses of lib.mdDoc.";
|
mdDoc = lib.warn "lib.mdDoc will be removed from nixpkgs in 24.11. Option descriptions are now in Markdown by default; you can remove any remaining uses of lib.mdDoc.";
|
||||||
|
|
||||||
/* For use in the `defaultText` and `example` option attributes. Causes the
|
/*
|
||||||
|
For use in the `defaultText` and `example` option attributes. Causes the
|
||||||
given MD text to be inserted verbatim in the documentation, for when
|
given MD text to be inserted verbatim in the documentation, for when
|
||||||
a `literalExpression` would be too hard to read.
|
a `literalExpression` would be too hard to read.
|
||||||
*/
|
*/
|
||||||
literalMD = text:
|
literalMD =
|
||||||
if ! isString text then throw "literalMD expects a string."
|
text:
|
||||||
else { _type = "literalMD"; inherit text; };
|
if !isString text then
|
||||||
|
throw "literalMD expects a string."
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_type = "literalMD";
|
||||||
|
inherit text;
|
||||||
|
};
|
||||||
|
|
||||||
# Helper functions.
|
# Helper functions.
|
||||||
|
|
||||||
/* Convert an option, described as a list of the option parts to a
|
/*
|
||||||
|
Convert an option, described as a list of the option parts to a
|
||||||
human-readable version.
|
human-readable version.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
@ -428,8 +496,11 @@ rec {
|
||||||
(showOption ["foo" "*" "bar"]) == "foo.*.bar"
|
(showOption ["foo" "*" "bar"]) == "foo.*.bar"
|
||||||
(showOption ["foo" "<name>" "bar"]) == "foo.<name>.bar"
|
(showOption ["foo" "<name>" "bar"]) == "foo.<name>.bar"
|
||||||
*/
|
*/
|
||||||
showOption = parts: let
|
showOption =
|
||||||
escapeOptionPart = part:
|
parts:
|
||||||
|
let
|
||||||
|
escapeOptionPart =
|
||||||
|
part:
|
||||||
let
|
let
|
||||||
# We assume that these are "special values" and not real configuration data.
|
# We assume that these are "special values" and not real configuration data.
|
||||||
# If it is real configuration data, it is rendered incorrectly.
|
# If it is real configuration data, it is rendered incorrectly.
|
||||||
|
@ -438,29 +509,41 @@ rec {
|
||||||
"*" # listOf (submodule {})
|
"*" # listOf (submodule {})
|
||||||
"<function body>" # functionTo
|
"<function body>" # functionTo
|
||||||
];
|
];
|
||||||
in if builtins.elem part specialIdentifiers
|
in
|
||||||
then part
|
if builtins.elem part specialIdentifiers then part else lib.strings.escapeNixIdentifier part;
|
||||||
else lib.strings.escapeNixIdentifier part;
|
in
|
||||||
in (concatStringsSep ".") (map escapeOptionPart parts);
|
(concatStringsSep ".") (map escapeOptionPart parts);
|
||||||
showFiles = files: concatStringsSep " and " (map (f: "`${f}'") files);
|
showFiles = files: concatStringsSep " and " (map (f: "`${f}'") files);
|
||||||
|
|
||||||
showDefs = defs: concatMapStrings (def:
|
showDefs =
|
||||||
|
defs:
|
||||||
|
concatMapStrings (
|
||||||
|
def:
|
||||||
let
|
let
|
||||||
# Pretty print the value for display, if successful
|
# Pretty print the value for display, if successful
|
||||||
prettyEval = builtins.tryEval
|
prettyEval = builtins.tryEval (
|
||||||
(lib.generators.toPretty { }
|
lib.generators.toPretty { } (
|
||||||
(lib.generators.withRecursion { depthLimit = 10; throwOnDepthLimit = false; } def.value));
|
lib.generators.withRecursion {
|
||||||
|
depthLimit = 10;
|
||||||
|
throwOnDepthLimit = false;
|
||||||
|
} def.value
|
||||||
|
)
|
||||||
|
);
|
||||||
# Split it into its lines
|
# Split it into its lines
|
||||||
lines = filter (v: ! isList v) (builtins.split "\n" prettyEval.value);
|
lines = filter (v: !isList v) (builtins.split "\n" prettyEval.value);
|
||||||
# Only display the first 5 lines, and indent them for better visibility
|
# Only display the first 5 lines, and indent them for better visibility
|
||||||
value = concatStringsSep "\n " (take 5 lines ++ optional (length lines > 5) "...");
|
value = concatStringsSep "\n " (take 5 lines ++ optional (length lines > 5) "...");
|
||||||
result =
|
result =
|
||||||
# Don't print any value if evaluating the value strictly fails
|
# Don't print any value if evaluating the value strictly fails
|
||||||
if ! prettyEval.success then ""
|
if !prettyEval.success then
|
||||||
|
""
|
||||||
# Put it on a new line if it consists of multiple
|
# Put it on a new line if it consists of multiple
|
||||||
else if length lines > 1 then ":\n " + value
|
else if length lines > 1 then
|
||||||
else ": " + value;
|
":\n " + value
|
||||||
in "\n- In `${def.file}'${result}"
|
else
|
||||||
|
": " + value;
|
||||||
|
in
|
||||||
|
"\n- In `${def.file}'${result}"
|
||||||
) defs;
|
) defs;
|
||||||
|
|
||||||
showOptionWithDefLocs = opt: ''
|
showOptionWithDefLocs = opt: ''
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/* Functions for working with path values. */
|
# Functions for working with path values.
|
||||||
# See ./README.md for internal docs
|
# See ./README.md for internal docs
|
||||||
{ lib }:
|
{ lib }:
|
||||||
let
|
let
|
||||||
|
@ -27,22 +27,16 @@ let
|
||||||
|
|
||||||
listHasPrefix = lib.lists.hasPrefix;
|
listHasPrefix = lib.lists.hasPrefix;
|
||||||
|
|
||||||
inherit (lib.strings)
|
inherit (lib.strings) concatStringsSep substring;
|
||||||
concatStringsSep
|
|
||||||
substring
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.asserts)
|
inherit (lib.asserts) assertMsg;
|
||||||
assertMsg
|
|
||||||
;
|
|
||||||
|
|
||||||
inherit (lib.path.subpath)
|
inherit (lib.path.subpath) isValid;
|
||||||
isValid
|
|
||||||
;
|
|
||||||
|
|
||||||
# Return the reason why a subpath is invalid, or `null` if it's valid
|
# Return the reason why a subpath is invalid, or `null` if it's valid
|
||||||
subpathInvalidReason = value:
|
subpathInvalidReason =
|
||||||
if ! isString value then
|
value:
|
||||||
|
if !isString value then
|
||||||
"The given value is of type ${builtins.typeOf value}, but a string was expected"
|
"The given value is of type ${builtins.typeOf value}, but a string was expected"
|
||||||
else if value == "" then
|
else if value == "" then
|
||||||
"The given string is empty"
|
"The given string is empty"
|
||||||
|
@ -51,11 +45,13 @@ let
|
||||||
# We don't support ".." components, see ./path.md#parent-directory
|
# We don't support ".." components, see ./path.md#parent-directory
|
||||||
else if match "(.*/)?\\.\\.(/.*)?" value != null then
|
else if match "(.*/)?\\.\\.(/.*)?" value != null then
|
||||||
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
|
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
|
||||||
else null;
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
# Split and normalise a relative path string into its components.
|
# Split and normalise a relative path string into its components.
|
||||||
# Error for ".." components and doesn't include "." components
|
# Error for ".." components and doesn't include "." components
|
||||||
splitRelPath = path:
|
splitRelPath =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
# Split the string into its parts using regex for efficiency. This regex
|
# Split the string into its parts using regex for efficiency. This regex
|
||||||
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
|
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
|
||||||
|
@ -89,23 +85,28 @@ let
|
||||||
# Special case of a single "." path component. Such a case leaves a
|
# Special case of a single "." path component. Such a case leaves a
|
||||||
# componentCount of -1 due to the skipStart/skipEnd not verifying that
|
# componentCount of -1 due to the skipStart/skipEnd not verifying that
|
||||||
# they don't refer to the same character
|
# they don't refer to the same character
|
||||||
if path == "." then []
|
if path == "." then
|
||||||
|
[ ]
|
||||||
|
|
||||||
# Generate the result list directly. This is more efficient than a
|
# Generate the result list directly. This is more efficient than a
|
||||||
# combination of `filter`, `init` and `tail`, because here we don't
|
# combination of `filter`, `init` and `tail`, because here we don't
|
||||||
# allocate any intermediate lists
|
# allocate any intermediate lists
|
||||||
else genList (index:
|
else
|
||||||
|
genList (
|
||||||
|
index:
|
||||||
# To get to the element we need to add the number of parts we skip and
|
# To get to the element we need to add the number of parts we skip and
|
||||||
# multiply by two due to the interleaved layout of `parts`
|
# multiply by two due to the interleaved layout of `parts`
|
||||||
elemAt parts ((skipStart + index) * 2)
|
elemAt parts ((skipStart + index) * 2)
|
||||||
) componentCount;
|
) componentCount;
|
||||||
|
|
||||||
# Join relative path components together
|
# Join relative path components together
|
||||||
joinRelPath = components:
|
joinRelPath =
|
||||||
|
components:
|
||||||
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
|
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
|
||||||
"./" +
|
"./"
|
||||||
|
+
|
||||||
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
|
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
|
||||||
(if components == [] then "." else concatStringsSep "/" components);
|
(if components == [ ] then "." else concatStringsSep "/" components);
|
||||||
|
|
||||||
# Type: Path -> { root :: Path, components :: [ String ] }
|
# Type: Path -> { root :: Path, components :: [ String ] }
|
||||||
#
|
#
|
||||||
|
@ -117,11 +118,18 @@ let
|
||||||
# because it can distinguish different filesystem roots
|
# because it can distinguish different filesystem roots
|
||||||
deconstructPath =
|
deconstructPath =
|
||||||
let
|
let
|
||||||
recurse = components: base:
|
recurse =
|
||||||
|
components: base:
|
||||||
# If the parent of a path is the path itself, then it's a filesystem root
|
# If the parent of a path is the path itself, then it's a filesystem root
|
||||||
if base == dirOf base then { root = base; inherit components; }
|
if base == dirOf base then
|
||||||
else recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
|
{
|
||||||
in recurse [];
|
root = base;
|
||||||
|
inherit components;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
|
||||||
|
in
|
||||||
|
recurse [ ];
|
||||||
|
|
||||||
# The components of the store directory, typically [ "nix" "store" ]
|
# The components of the store directory, typically [ "nix" "store" ]
|
||||||
storeDirComponents = splitRelPath ("./" + storeDir);
|
storeDirComponents = splitRelPath ("./" + storeDir);
|
||||||
|
@ -132,7 +140,8 @@ let
|
||||||
#
|
#
|
||||||
# Whether path components have a store path as a prefix, according to
|
# Whether path components have a store path as a prefix, according to
|
||||||
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
|
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
|
||||||
componentsHaveStorePathPrefix = components:
|
componentsHaveStorePathPrefix =
|
||||||
|
components:
|
||||||
# path starts with the store directory (typically /nix/store)
|
# path starts with the store directory (typically /nix/store)
|
||||||
listHasPrefix storeDirComponents components
|
listHasPrefix storeDirComponents components
|
||||||
# is not the store directory itself, meaning there's at least one extra component
|
# is not the store directory itself, meaning there's at least one extra component
|
||||||
|
@ -145,7 +154,9 @@ let
|
||||||
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
|
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
|
||||||
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
|
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
|
||||||
|
|
||||||
in /* No rec! Add dependencies on this file at the top. */ {
|
in
|
||||||
|
# No rec! Add dependencies on this file at the top.
|
||||||
|
{
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Append a subpath string to a path.
|
Append a subpath string to a path.
|
||||||
|
@ -194,8 +205,8 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
path:
|
path:
|
||||||
# The subpath string to append
|
# The subpath string to append
|
||||||
subpath:
|
subpath:
|
||||||
assert assertMsg (isPath path) ''
|
assert assertMsg (isPath path)
|
||||||
lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
|
''lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
|
||||||
assert assertMsg (isValid subpath) ''
|
assert assertMsg (isValid subpath) ''
|
||||||
lib.path.append: Second argument is not a valid subpath string:
|
lib.path.append: Second argument is not a valid subpath string:
|
||||||
${subpathInvalidReason subpath}'';
|
${subpathInvalidReason subpath}'';
|
||||||
|
@ -225,25 +236,23 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
*/
|
*/
|
||||||
hasPrefix =
|
hasPrefix =
|
||||||
path1:
|
path1:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path1)
|
||||||
(isPath path1)
|
|
||||||
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
|
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
|
||||||
let
|
let
|
||||||
path1Deconstructed = deconstructPath path1;
|
path1Deconstructed = deconstructPath path1;
|
||||||
in
|
in
|
||||||
path2:
|
path2:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path2)
|
||||||
(isPath path2)
|
|
||||||
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
|
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
|
||||||
let
|
let
|
||||||
path2Deconstructed = deconstructPath path2;
|
path2Deconstructed = deconstructPath path2;
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
|
||||||
(path1Deconstructed.root == path2Deconstructed.root) ''
|
|
||||||
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
||||||
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
||||||
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
||||||
take (length path1Deconstructed.components) path2Deconstructed.components == path1Deconstructed.components;
|
take (length path1Deconstructed.components) path2Deconstructed.components
|
||||||
|
== path1Deconstructed.components;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Remove the first path as a component-wise prefix from the second path.
|
Remove the first path as a component-wise prefix from the second path.
|
||||||
|
@ -270,16 +279,14 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
*/
|
*/
|
||||||
removePrefix =
|
removePrefix =
|
||||||
path1:
|
path1:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path1)
|
||||||
(isPath path1)
|
|
||||||
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
|
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
|
||||||
let
|
let
|
||||||
path1Deconstructed = deconstructPath path1;
|
path1Deconstructed = deconstructPath path1;
|
||||||
path1Length = length path1Deconstructed.components;
|
path1Length = length path1Deconstructed.components;
|
||||||
in
|
in
|
||||||
path2:
|
path2:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path2)
|
||||||
(isPath path2)
|
|
||||||
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
|
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
|
||||||
let
|
let
|
||||||
path2Deconstructed = deconstructPath path2;
|
path2Deconstructed = deconstructPath path2;
|
||||||
|
@ -288,11 +295,9 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
if success then
|
if success then
|
||||||
drop path1Length path2Deconstructed.components
|
drop path1Length path2Deconstructed.components
|
||||||
else
|
else
|
||||||
throw ''
|
throw ''lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
|
||||||
lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
|
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
|
||||||
(path1Deconstructed.root == path2Deconstructed.root) ''
|
|
||||||
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
|
||||||
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
|
||||||
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
|
||||||
|
@ -336,12 +341,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
splitRoot =
|
splitRoot =
|
||||||
# The path to split the root off of
|
# The path to split the root off of
|
||||||
path:
|
path:
|
||||||
assert assertMsg
|
assert assertMsg (isPath path)
|
||||||
(isPath path)
|
|
||||||
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
|
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
|
||||||
let
|
let
|
||||||
deconstructed = deconstructPath path;
|
deconstructed = deconstructPath path;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
root = deconstructed.root;
|
root = deconstructed.root;
|
||||||
subpath = joinRelPath deconstructed.components;
|
subpath = joinRelPath deconstructed.components;
|
||||||
};
|
};
|
||||||
|
@ -387,12 +392,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
|
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
|
||||||
=> true
|
=> true
|
||||||
*/
|
*/
|
||||||
hasStorePathPrefix = path:
|
hasStorePathPrefix =
|
||||||
|
path:
|
||||||
let
|
let
|
||||||
deconstructed = deconstructPath path;
|
deconstructed = deconstructPath path;
|
||||||
in
|
in
|
||||||
assert assertMsg
|
assert assertMsg (isPath path)
|
||||||
(isPath path)
|
|
||||||
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
|
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
|
||||||
assert assertMsg
|
assert assertMsg
|
||||||
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
|
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
|
||||||
|
@ -446,9 +451,7 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
*/
|
*/
|
||||||
subpath.isValid =
|
subpath.isValid =
|
||||||
# The value to check
|
# The value to check
|
||||||
value:
|
value: subpathInvalidReason value == null;
|
||||||
subpathInvalidReason value == null;
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Join subpath strings together using `/`, returning a normalised subpath string.
|
Join subpath strings together using `/`, returning a normalised subpath string.
|
||||||
|
@ -511,16 +514,18 @@ in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
# The list of subpaths to join together
|
# The list of subpaths to join together
|
||||||
subpaths:
|
subpaths:
|
||||||
# Fast in case all paths are valid
|
# Fast in case all paths are valid
|
||||||
if all isValid subpaths
|
if all isValid subpaths then
|
||||||
then joinRelPath (concatMap splitRelPath subpaths)
|
joinRelPath (concatMap splitRelPath subpaths)
|
||||||
else
|
else
|
||||||
# Otherwise we take our time to gather more info for a better error message
|
# Otherwise we take our time to gather more info for a better error message
|
||||||
# Strictly go through each path, throwing on the first invalid one
|
# Strictly go through each path, throwing on the first invalid one
|
||||||
# Tracks the list index in the fold accumulator
|
# Tracks the list index in the fold accumulator
|
||||||
foldl' (i: path:
|
foldl' (
|
||||||
if isValid path
|
i: path:
|
||||||
then i + 1
|
if isValid path then
|
||||||
else throw ''
|
i + 1
|
||||||
|
else
|
||||||
|
throw ''
|
||||||
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
|
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
|
||||||
${subpathInvalidReason path}''
|
${subpathInvalidReason path}''
|
||||||
) 0 subpaths;
|
) 0 subpaths;
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
nixpkgs ? ../../..,
|
nixpkgs ? ../../..,
|
||||||
system ? builtins.currentSystem,
|
system ? builtins.currentSystem,
|
||||||
pkgs ? import nixpkgs {
|
pkgs ? import nixpkgs {
|
||||||
config = {};
|
config = { };
|
||||||
overlays = [];
|
overlays = [ ];
|
||||||
inherit system;
|
inherit system;
|
||||||
},
|
},
|
||||||
nixVersions ? import ../../tests/nix-for-tests.nix { inherit pkgs; },
|
nixVersions ? import ../../tests/nix-for-tests.nix { inherit pkgs; },
|
||||||
|
@ -12,14 +12,16 @@
|
||||||
seed ? null,
|
seed ? null,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
pkgs.runCommand "lib-path-tests" {
|
pkgs.runCommand "lib-path-tests"
|
||||||
nativeBuildInputs = [
|
{
|
||||||
nixVersions.stable
|
nativeBuildInputs =
|
||||||
] ++ (with pkgs; [
|
[ nixVersions.stable ]
|
||||||
|
++ (with pkgs; [
|
||||||
jq
|
jq
|
||||||
bc
|
bc
|
||||||
]);
|
]);
|
||||||
} ''
|
}
|
||||||
|
''
|
||||||
# Needed to make Nix evaluation work
|
# Needed to make Nix evaluation work
|
||||||
export TEST_ROOT=$(pwd)/test-tmp
|
export TEST_ROOT=$(pwd)/test-tmp
|
||||||
export NIX_BUILD_HOOK=
|
export NIX_BUILD_HOOK=
|
||||||
|
@ -42,4 +44,4 @@ pkgs.runCommand "lib-path-tests" {
|
||||||
bash lib/path/tests/prop.sh ${toString seed}
|
bash lib/path/tests/prop.sh ${toString seed}
|
||||||
|
|
||||||
touch $out
|
touch $out
|
||||||
''
|
''
|
||||||
|
|
|
@ -16,14 +16,15 @@ let
|
||||||
lib = import libpath;
|
lib = import libpath;
|
||||||
|
|
||||||
# read each file into a string
|
# read each file into a string
|
||||||
strings = map (name:
|
strings = map (name: builtins.readFile (dir + "/${name}")) (
|
||||||
builtins.readFile (dir + "/${name}")
|
builtins.attrNames (builtins.readDir dir)
|
||||||
) (builtins.attrNames (builtins.readDir dir));
|
);
|
||||||
|
|
||||||
inherit (lib.path.subpath) normalise isValid;
|
inherit (lib.path.subpath) normalise isValid;
|
||||||
inherit (lib.asserts) assertMsg;
|
inherit (lib.asserts) assertMsg;
|
||||||
|
|
||||||
normaliseAndCheck = str:
|
normaliseAndCheck =
|
||||||
|
str:
|
||||||
let
|
let
|
||||||
originalValid = isValid str;
|
originalValid = isValid str;
|
||||||
|
|
||||||
|
@ -34,27 +35,26 @@ let
|
||||||
absConcatNormalised = /. + ("/" + tryOnce.value);
|
absConcatNormalised = /. + ("/" + tryOnce.value);
|
||||||
in
|
in
|
||||||
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
|
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(originalValid -> tryOnce.success)
|
originalValid -> tryOnce.success
|
||||||
"Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
|
) "Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(! originalValid -> ! tryOnce.success)
|
!originalValid -> !tryOnce.success
|
||||||
"Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
|
) "Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
|
||||||
|
|
||||||
# Check normalisation idempotency
|
# Check normalisation idempotency
|
||||||
assert assertMsg
|
assert assertMsg (
|
||||||
(originalValid -> tryTwice.success)
|
originalValid -> tryTwice.success
|
||||||
"For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
|
) "For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
|
||||||
assert assertMsg
|
assert assertMsg (originalValid -> tryOnce.value == tryTwice.value)
|
||||||
(originalValid -> tryOnce.value == tryTwice.value)
|
|
||||||
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
|
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
|
||||||
|
|
||||||
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
|
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
|
||||||
assert assertMsg
|
assert assertMsg (originalValid -> absConcatOrig == absConcatNormalised)
|
||||||
(originalValid -> absConcatOrig == absConcatNormalised)
|
|
||||||
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
|
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
|
||||||
|
|
||||||
# Return an empty string when failed
|
# Return an empty string when failed
|
||||||
if tryOnce.success then tryOnce.value else "";
|
if tryOnce.success then tryOnce.value else "";
|
||||||
|
|
||||||
in lib.genAttrs strings normaliseAndCheck
|
in
|
||||||
|
lib.genAttrs strings normaliseAndCheck
|
||||||
|
|
|
@ -3,7 +3,14 @@
|
||||||
{ libpath }:
|
{ libpath }:
|
||||||
let
|
let
|
||||||
lib = import libpath;
|
lib = import libpath;
|
||||||
inherit (lib.path) hasPrefix removePrefix append splitRoot hasStorePathPrefix subpath;
|
inherit (lib.path)
|
||||||
|
hasPrefix
|
||||||
|
removePrefix
|
||||||
|
append
|
||||||
|
splitRoot
|
||||||
|
hasStorePathPrefix
|
||||||
|
subpath
|
||||||
|
;
|
||||||
|
|
||||||
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
|
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
|
||||||
storeDirPath = /. + builtins.storeDir;
|
storeDirPath = /. + builtins.storeDir;
|
||||||
|
@ -79,15 +86,24 @@ let
|
||||||
|
|
||||||
testSplitRootExample1 = {
|
testSplitRootExample1 = {
|
||||||
expr = splitRoot /foo/bar;
|
expr = splitRoot /foo/bar;
|
||||||
expected = { root = /.; subpath = "./foo/bar"; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./foo/bar";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample2 = {
|
testSplitRootExample2 = {
|
||||||
expr = splitRoot /.;
|
expr = splitRoot /.;
|
||||||
expected = { root = /.; subpath = "./."; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./.";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample3 = {
|
testSplitRootExample3 = {
|
||||||
expr = splitRoot /foo/../bar;
|
expr = splitRoot /foo/../bar;
|
||||||
expected = { root = /.; subpath = "./bar"; };
|
expected = {
|
||||||
|
root = /.;
|
||||||
|
subpath = "./bar";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
testSplitRootExample4 = {
|
testSplitRootExample4 = {
|
||||||
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
|
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
|
||||||
|
@ -111,7 +127,9 @@ let
|
||||||
expected = false;
|
expected = false;
|
||||||
};
|
};
|
||||||
testHasStorePathPrefixExample5 = {
|
testHasStorePathPrefixExample5 = {
|
||||||
expr = hasStorePathPrefix (storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq");
|
expr = hasStorePathPrefix (
|
||||||
|
storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq"
|
||||||
|
);
|
||||||
expected = false;
|
expected = false;
|
||||||
};
|
};
|
||||||
testHasStorePathPrefixExample6 = {
|
testHasStorePathPrefixExample6 = {
|
||||||
|
@ -188,11 +206,18 @@ let
|
||||||
|
|
||||||
# Test examples from the lib.path.subpath.join documentation
|
# Test examples from the lib.path.subpath.join documentation
|
||||||
testSubpathJoinExample1 = {
|
testSubpathJoinExample1 = {
|
||||||
expr = subpath.join [ "foo" "bar/baz" ];
|
expr = subpath.join [
|
||||||
|
"foo"
|
||||||
|
"bar/baz"
|
||||||
|
];
|
||||||
expected = "./foo/bar/baz";
|
expected = "./foo/bar/baz";
|
||||||
};
|
};
|
||||||
testSubpathJoinExample2 = {
|
testSubpathJoinExample2 = {
|
||||||
expr = subpath.join [ "./foo" "." "bar//./baz/" ];
|
expr = subpath.join [
|
||||||
|
"./foo"
|
||||||
|
"."
|
||||||
|
"bar//./baz/"
|
||||||
|
];
|
||||||
expected = "./foo/bar/baz";
|
expected = "./foo/bar/baz";
|
||||||
};
|
};
|
||||||
testSubpathJoinExample3 = {
|
testSubpathJoinExample3 = {
|
||||||
|
@ -273,7 +298,11 @@ let
|
||||||
};
|
};
|
||||||
testSubpathComponentsExample2 = {
|
testSubpathComponentsExample2 = {
|
||||||
expr = subpath.components "./foo//bar/./baz/";
|
expr = subpath.components "./foo//bar/./baz/";
|
||||||
expected = [ "foo" "bar" "baz" ];
|
expected = [
|
||||||
|
"foo"
|
||||||
|
"bar"
|
||||||
|
"baz"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
testSubpathComponentsExample3 = {
|
testSubpathComponentsExample3 = {
|
||||||
expr = (builtins.tryEval (subpath.components "/foo")).success;
|
expr = (builtins.tryEval (subpath.components "/foo")).success;
|
||||||
|
@ -281,5 +310,7 @@ let
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
if cases == [] then "Unit tests successful"
|
if cases == [ ] then
|
||||||
else throw "Path unit tests failed: ${lib.generators.toPretty {} cases}"
|
"Unit tests successful"
|
||||||
|
else
|
||||||
|
throw "Path unit tests failed: ${lib.generators.toPretty { } cases}"
|
||||||
|
|
|
@ -5,15 +5,16 @@ let
|
||||||
shortName = tname;
|
shortName = tname;
|
||||||
isSource = false;
|
isSource = false;
|
||||||
};
|
};
|
||||||
in lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
|
in
|
||||||
|
lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
|
||||||
|
|
||||||
fromSource = {
|
fromSource = {
|
||||||
isSource = true;
|
isSource = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
binaryNativeCode = {};
|
binaryNativeCode = { };
|
||||||
|
|
||||||
binaryBytecode = {};
|
binaryBytecode = { };
|
||||||
|
|
||||||
binaryFirmware = {};
|
binaryFirmware = { };
|
||||||
}
|
}
|
||||||
|
|
199
lib/sources.nix
199
lib/sources.nix
|
@ -1,41 +1,47 @@
|
||||||
/* Functions for copying sources to the Nix store. */
|
# Functions for copying sources to the Nix store.
|
||||||
{ lib }:
|
{ lib }:
|
||||||
|
|
||||||
# Tested in lib/tests/sources.sh
|
# Tested in lib/tests/sources.sh
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins) match split storeDir;
|
||||||
match
|
|
||||||
split
|
|
||||||
storeDir
|
|
||||||
;
|
|
||||||
inherit (lib)
|
inherit (lib)
|
||||||
boolToString
|
boolToString
|
||||||
filter
|
filter
|
||||||
isString
|
isString
|
||||||
readFile
|
readFile
|
||||||
;
|
;
|
||||||
inherit (lib.filesystem)
|
inherit (lib.filesystem) pathIsRegularFile;
|
||||||
pathIsRegularFile
|
|
||||||
;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
A basic filter for `cleanSourceWith` that removes
|
A basic filter for `cleanSourceWith` that removes
|
||||||
directories of version control system, backup files (*~)
|
directories of version control system, backup files (*~)
|
||||||
and some generated files.
|
and some generated files.
|
||||||
*/
|
*/
|
||||||
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
|
cleanSourceFilter =
|
||||||
|
name: type:
|
||||||
|
let
|
||||||
|
baseName = baseNameOf (toString name);
|
||||||
|
in
|
||||||
|
!(
|
||||||
# Filter out version control software files/directories
|
# Filter out version control software files/directories
|
||||||
(baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
|
(
|
||||||
|
baseName == ".git"
|
||||||
|
|| type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")
|
||||||
|
)
|
||||||
|
||
|
||||||
# Filter out editor backup / swap files.
|
# Filter out editor backup / swap files.
|
||||||
lib.hasSuffix "~" baseName ||
|
lib.hasSuffix "~" baseName
|
||||||
match "^\\.sw[a-z]$" baseName != null ||
|
|| match "^\\.sw[a-z]$" baseName != null
|
||||||
match "^\\..*\\.sw[a-z]$" baseName != null ||
|
|| match "^\\..*\\.sw[a-z]$" baseName != null
|
||||||
|
||
|
||||||
|
|
||||||
# Filter out generates files.
|
# Filter out generates files.
|
||||||
lib.hasSuffix ".o" baseName ||
|
lib.hasSuffix ".o" baseName
|
||||||
lib.hasSuffix ".so" baseName ||
|
|| lib.hasSuffix ".so" baseName
|
||||||
|
||
|
||||||
# Filter out nix-build result symlinks
|
# Filter out nix-build result symlinks
|
||||||
(type == "symlink" && lib.hasPrefix "result" baseName) ||
|
(type == "symlink" && lib.hasPrefix "result" baseName)
|
||||||
|
||
|
||||||
# Filter out sockets and other types of files we can't have in the store.
|
# Filter out sockets and other types of files we can't have in the store.
|
||||||
(type == "unknown")
|
(type == "unknown")
|
||||||
);
|
);
|
||||||
|
@ -46,7 +52,12 @@ let
|
||||||
Example:
|
Example:
|
||||||
cleanSource ./.
|
cleanSource ./.
|
||||||
*/
|
*/
|
||||||
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
|
cleanSource =
|
||||||
|
src:
|
||||||
|
cleanSourceWith {
|
||||||
|
filter = cleanSourceFilter;
|
||||||
|
inherit src;
|
||||||
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Like `builtins.filterSource`, except it will compose with itself,
|
Like `builtins.filterSource`, except it will compose with itself,
|
||||||
|
@ -65,7 +76,6 @@ let
|
||||||
|
|
||||||
builtins.filterSource f (builtins.filterSource g ./.)
|
builtins.filterSource f (builtins.filterSource g ./.)
|
||||||
# Fails!
|
# Fails!
|
||||||
|
|
||||||
*/
|
*/
|
||||||
cleanSourceWith =
|
cleanSourceWith =
|
||||||
{
|
{
|
||||||
|
@ -80,11 +90,12 @@ let
|
||||||
filter ? _path: _type: true,
|
filter ? _path: _type: true,
|
||||||
# Optional name to use as part of the store path.
|
# Optional name to use as part of the store path.
|
||||||
# This defaults to `src.name` or otherwise `"source"`.
|
# This defaults to `src.name` or otherwise `"source"`.
|
||||||
name ? null
|
name ? null,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
orig = toSourceAttributes src;
|
orig = toSourceAttributes src;
|
||||||
in fromSourceAttributes {
|
in
|
||||||
|
fromSourceAttributes {
|
||||||
inherit (orig) origSrc;
|
inherit (orig) origSrc;
|
||||||
filter = path: type: filter path type && orig.filter path type;
|
filter = path: type: filter path type && orig.filter path type;
|
||||||
name = if name != null then name else orig.name;
|
name = if name != null then name else orig.name;
|
||||||
|
@ -102,14 +113,17 @@ let
|
||||||
attrs = toSourceAttributes src;
|
attrs = toSourceAttributes src;
|
||||||
in
|
in
|
||||||
fromSourceAttributes (
|
fromSourceAttributes (
|
||||||
attrs // {
|
attrs
|
||||||
filter = path: type:
|
// {
|
||||||
|
filter =
|
||||||
|
path: type:
|
||||||
let
|
let
|
||||||
r = attrs.filter path type;
|
r = attrs.filter path type;
|
||||||
in
|
in
|
||||||
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
|
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
|
||||||
}
|
}
|
||||||
) // {
|
)
|
||||||
|
// {
|
||||||
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
|
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -118,14 +132,20 @@ let
|
||||||
|
|
||||||
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
|
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
|
||||||
*/
|
*/
|
||||||
sourceByRegex = src: regexes:
|
sourceByRegex =
|
||||||
|
src: regexes:
|
||||||
let
|
let
|
||||||
isFiltered = src ? _isLibCleanSourceWith;
|
isFiltered = src ? _isLibCleanSourceWith;
|
||||||
origSrc = if isFiltered then src.origSrc else src;
|
origSrc = if isFiltered then src.origSrc else src;
|
||||||
in lib.cleanSourceWith {
|
in
|
||||||
filter = (path: type:
|
lib.cleanSourceWith {
|
||||||
let relPath = lib.removePrefix (toString origSrc + "/") (toString path);
|
filter = (
|
||||||
in lib.any (re: match re relPath != null) regexes);
|
path: type:
|
||||||
|
let
|
||||||
|
relPath = lib.removePrefix (toString origSrc + "/") (toString path);
|
||||||
|
in
|
||||||
|
lib.any (re: match re relPath != null) regexes
|
||||||
|
);
|
||||||
inherit src;
|
inherit src;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -145,21 +165,29 @@ let
|
||||||
src:
|
src:
|
||||||
# A list of file suffix strings
|
# A list of file suffix strings
|
||||||
exts:
|
exts:
|
||||||
let filter = name: type:
|
let
|
||||||
let base = baseNameOf (toString name);
|
filter =
|
||||||
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
|
name: type:
|
||||||
in cleanSourceWith { inherit filter src; };
|
let
|
||||||
|
base = baseNameOf (toString name);
|
||||||
|
in
|
||||||
|
type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
|
||||||
|
in
|
||||||
|
cleanSourceWith { inherit filter src; };
|
||||||
|
|
||||||
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path)?value;
|
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path) ? value;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Get the commit id of a git repo.
|
Get the commit id of a git repo.
|
||||||
|
|
||||||
Example: commitIdFromGitRepo <nixpkgs/.git>
|
Example: commitIdFromGitRepo <nixpkgs/.git>
|
||||||
*/
|
*/
|
||||||
commitIdFromGitRepo = path:
|
commitIdFromGitRepo =
|
||||||
let commitIdOrError = _commitIdFromGitRepoOrError path;
|
path:
|
||||||
in commitIdOrError.value or (throw commitIdOrError.error);
|
let
|
||||||
|
commitIdOrError = _commitIdFromGitRepoOrError path;
|
||||||
|
in
|
||||||
|
commitIdOrError.value or (throw commitIdOrError.error);
|
||||||
|
|
||||||
# Get the commit id of a git repo.
|
# Get the commit id of a git repo.
|
||||||
|
|
||||||
|
@ -168,55 +196,68 @@ let
|
||||||
# Example: commitIdFromGitRepo <nixpkgs/.git>
|
# Example: commitIdFromGitRepo <nixpkgs/.git>
|
||||||
# not exported, used for commitIdFromGitRepo
|
# not exported, used for commitIdFromGitRepo
|
||||||
_commitIdFromGitRepoOrError =
|
_commitIdFromGitRepoOrError =
|
||||||
let readCommitFromFile = file: path:
|
let
|
||||||
let fileName = path + "/${file}";
|
readCommitFromFile =
|
||||||
|
file: path:
|
||||||
|
let
|
||||||
|
fileName = path + "/${file}";
|
||||||
packedRefsName = path + "/packed-refs";
|
packedRefsName = path + "/packed-refs";
|
||||||
absolutePath = base: path:
|
absolutePath =
|
||||||
if lib.hasPrefix "/" path
|
base: path: if lib.hasPrefix "/" path then path else toString (/. + "${base}/${path}");
|
||||||
then path
|
in
|
||||||
else toString (/. + "${base}/${path}");
|
if
|
||||||
in if pathIsRegularFile path
|
pathIsRegularFile path
|
||||||
# Resolve git worktrees. See gitrepository-layout(5)
|
# Resolve git worktrees. See gitrepository-layout(5)
|
||||||
then
|
then
|
||||||
let m = match "^gitdir: (.*)$" (lib.fileContents path);
|
let
|
||||||
in if m == null
|
m = match "^gitdir: (.*)$" (lib.fileContents path);
|
||||||
then { error = "File contains no gitdir reference: " + path; }
|
in
|
||||||
|
if m == null then
|
||||||
|
{ error = "File contains no gitdir reference: " + path; }
|
||||||
else
|
else
|
||||||
let gitDir = absolutePath (dirOf path) (lib.head m);
|
let
|
||||||
commonDir'' = if pathIsRegularFile "${gitDir}/commondir"
|
gitDir = absolutePath (dirOf path) (lib.head m);
|
||||||
then lib.fileContents "${gitDir}/commondir"
|
commonDir'' =
|
||||||
else gitDir;
|
if pathIsRegularFile "${gitDir}/commondir" then lib.fileContents "${gitDir}/commondir" else gitDir;
|
||||||
commonDir' = lib.removeSuffix "/" commonDir'';
|
commonDir' = lib.removeSuffix "/" commonDir'';
|
||||||
commonDir = absolutePath gitDir commonDir';
|
commonDir = absolutePath gitDir commonDir';
|
||||||
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
|
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
|
||||||
in readCommitFromFile refFile commonDir
|
in
|
||||||
|
readCommitFromFile refFile commonDir
|
||||||
|
|
||||||
else if pathIsRegularFile fileName
|
else if
|
||||||
|
pathIsRegularFile fileName
|
||||||
# Sometimes git stores the commitId directly in the file but
|
# Sometimes git stores the commitId directly in the file but
|
||||||
# sometimes it stores something like: «ref: refs/heads/branch-name»
|
# sometimes it stores something like: «ref: refs/heads/branch-name»
|
||||||
then
|
then
|
||||||
let fileContent = lib.fileContents fileName;
|
let
|
||||||
|
fileContent = lib.fileContents fileName;
|
||||||
matchRef = match "^ref: (.*)$" fileContent;
|
matchRef = match "^ref: (.*)$" fileContent;
|
||||||
in if matchRef == null
|
in
|
||||||
then { value = fileContent; }
|
if matchRef == null then { value = fileContent; } else readCommitFromFile (lib.head matchRef) path
|
||||||
else readCommitFromFile (lib.head matchRef) path
|
|
||||||
|
|
||||||
else if pathIsRegularFile packedRefsName
|
else if
|
||||||
|
pathIsRegularFile packedRefsName
|
||||||
# Sometimes, the file isn't there at all and has been packed away in the
|
# Sometimes, the file isn't there at all and has been packed away in the
|
||||||
# packed-refs file, so we have to grep through it:
|
# packed-refs file, so we have to grep through it:
|
||||||
then
|
then
|
||||||
let fileContent = readFile packedRefsName;
|
let
|
||||||
|
fileContent = readFile packedRefsName;
|
||||||
matchRef = match "([a-z0-9]+) ${file}";
|
matchRef = match "([a-z0-9]+) ${file}";
|
||||||
isRef = s: isString s && (matchRef s) != null;
|
isRef = s: isString s && (matchRef s) != null;
|
||||||
# there is a bug in libstdc++ leading to stackoverflow for long strings:
|
# there is a bug in libstdc++ leading to stackoverflow for long strings:
|
||||||
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
|
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
|
||||||
refs = filter isRef (split "\n" fileContent);
|
refs = filter isRef (split "\n" fileContent);
|
||||||
in if refs == []
|
in
|
||||||
then { error = "Could not find " + file + " in " + packedRefsName; }
|
if refs == [ ] then
|
||||||
else { value = lib.head (matchRef (lib.head refs)); }
|
{ error = "Could not find " + file + " in " + packedRefsName; }
|
||||||
|
else
|
||||||
|
{ value = lib.head (matchRef (lib.head refs)); }
|
||||||
|
|
||||||
else { error = "Not a .git directory: " + toString path; };
|
else
|
||||||
in readCommitFromFile "HEAD";
|
{ error = "Not a .git directory: " + toString path; };
|
||||||
|
in
|
||||||
|
readCommitFromFile "HEAD";
|
||||||
|
|
||||||
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
|
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
|
||||||
|
|
||||||
|
@ -233,7 +274,8 @@ let
|
||||||
# like class of objects in the wild.
|
# like class of objects in the wild.
|
||||||
# (Existing ones being: paths, strings, sources and x//{outPath})
|
# (Existing ones being: paths, strings, sources and x//{outPath})
|
||||||
# So instead of exposing internals, we build a library of combinator functions.
|
# So instead of exposing internals, we build a library of combinator functions.
|
||||||
toSourceAttributes = src:
|
toSourceAttributes =
|
||||||
|
src:
|
||||||
let
|
let
|
||||||
isFiltered = src ? _isLibCleanSourceWith;
|
isFiltered = src ? _isLibCleanSourceWith;
|
||||||
in
|
in
|
||||||
|
@ -247,24 +289,35 @@ let
|
||||||
# fromSourceAttributes : SourceAttrs -> Source
|
# fromSourceAttributes : SourceAttrs -> Source
|
||||||
#
|
#
|
||||||
# Inverse of toSourceAttributes for Source objects.
|
# Inverse of toSourceAttributes for Source objects.
|
||||||
fromSourceAttributes = { origSrc, filter, name }:
|
fromSourceAttributes =
|
||||||
|
{
|
||||||
|
origSrc,
|
||||||
|
filter,
|
||||||
|
name,
|
||||||
|
}:
|
||||||
{
|
{
|
||||||
_isLibCleanSourceWith = true;
|
_isLibCleanSourceWith = true;
|
||||||
inherit origSrc filter name;
|
inherit origSrc filter name;
|
||||||
outPath = builtins.path { inherit filter name; path = origSrc; };
|
outPath = builtins.path {
|
||||||
|
inherit filter name;
|
||||||
|
path = origSrc;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
|
|
||||||
pathType = lib.warnIf (lib.isInOldestRelease 2305)
|
pathType = lib.warnIf (lib.isInOldestRelease
|
||||||
"lib.sources.pathType has been moved to lib.filesystem.pathType."
|
2305
|
||||||
lib.filesystem.pathType;
|
) "lib.sources.pathType has been moved to lib.filesystem.pathType." lib.filesystem.pathType;
|
||||||
|
|
||||||
pathIsDirectory = lib.warnIf (lib.isInOldestRelease 2305)
|
pathIsDirectory =
|
||||||
|
lib.warnIf (lib.isInOldestRelease 2305)
|
||||||
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
|
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
|
||||||
lib.filesystem.pathIsDirectory;
|
lib.filesystem.pathIsDirectory;
|
||||||
|
|
||||||
pathIsRegularFile = lib.warnIf (lib.isInOldestRelease 2305)
|
pathIsRegularFile =
|
||||||
|
lib.warnIf (lib.isInOldestRelease 2305)
|
||||||
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
|
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
|
||||||
lib.filesystem.pathIsRegularFile;
|
lib.filesystem.pathIsRegularFile;
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{ lib }:
|
{ lib }:
|
||||||
/*
|
/*
|
||||||
Usage:
|
Usage:
|
||||||
|
|
||||||
You define you custom builder script by adding all build steps to a list.
|
You define you custom builder script by adding all build steps to a list.
|
||||||
for example:
|
for example:
|
||||||
|
@ -52,32 +52,63 @@ let
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
||||||
/* !!! The interface of this function is kind of messed up, since
|
/*
|
||||||
|
!!! The interface of this function is kind of messed up, since
|
||||||
it's way too overloaded and almost but not quite computes a
|
it's way too overloaded and almost but not quite computes a
|
||||||
topological sort of the depstrings. */
|
topological sort of the depstrings.
|
||||||
|
*/
|
||||||
|
|
||||||
textClosureList = predefined: arg:
|
textClosureList =
|
||||||
|
predefined: arg:
|
||||||
let
|
let
|
||||||
f = done: todo:
|
f =
|
||||||
if todo == [] then {result = []; inherit done;}
|
done: todo:
|
||||||
|
if todo == [ ] then
|
||||||
|
{
|
||||||
|
result = [ ];
|
||||||
|
inherit done;
|
||||||
|
}
|
||||||
else
|
else
|
||||||
let entry = head todo; in
|
let
|
||||||
|
entry = head todo;
|
||||||
|
in
|
||||||
if isAttrs entry then
|
if isAttrs entry then
|
||||||
let x = f done entry.deps;
|
let
|
||||||
|
x = f done entry.deps;
|
||||||
y = f x.done (tail todo);
|
y = f x.done (tail todo);
|
||||||
in { result = x.result ++ [entry.text] ++ y.result;
|
in
|
||||||
|
{
|
||||||
|
result = x.result ++ [ entry.text ] ++ y.result;
|
||||||
done = y.done;
|
done = y.done;
|
||||||
}
|
}
|
||||||
else if done ? ${entry} then f done (tail todo)
|
else if done ? ${entry} then
|
||||||
else f (done // listToAttrs [{name = entry; value = 1;}]) ([predefined.${entry}] ++ tail todo);
|
f done (tail todo)
|
||||||
in (f {} arg).result;
|
else
|
||||||
|
f (
|
||||||
|
done
|
||||||
|
// listToAttrs [
|
||||||
|
{
|
||||||
|
name = entry;
|
||||||
|
value = 1;
|
||||||
|
}
|
||||||
|
]
|
||||||
|
) ([ predefined.${entry} ] ++ tail todo);
|
||||||
|
in
|
||||||
|
(f { } arg).result;
|
||||||
|
|
||||||
textClosureMap = f: predefined: names:
|
textClosureMap =
|
||||||
|
f: predefined: names:
|
||||||
concatStringsSep "\n" (map f (textClosureList predefined names));
|
concatStringsSep "\n" (map f (textClosureList predefined names));
|
||||||
|
|
||||||
noDepEntry = text: {inherit text; deps = [];};
|
noDepEntry = text: {
|
||||||
fullDepEntry = text: deps: {inherit text deps;};
|
inherit text;
|
||||||
packEntry = deps: {inherit deps; text="";};
|
deps = [ ];
|
||||||
|
};
|
||||||
|
fullDepEntry = text: deps: { inherit text deps; };
|
||||||
|
packEntry = deps: {
|
||||||
|
inherit deps;
|
||||||
|
text = "";
|
||||||
|
};
|
||||||
|
|
||||||
stringAfter = deps: text: { inherit text deps; };
|
stringAfter = deps: text: { inherit text deps; };
|
||||||
|
|
||||||
|
|
694
lib/strings.nix
694
lib/strings.nix
File diff suppressed because it is too large
Load diff
|
@ -7,36 +7,284 @@ rec {
|
||||||
# Spec: https://gitlab.com/x86-psABIs/x86-64-ABI/
|
# Spec: https://gitlab.com/x86-psABIs/x86-64-ABI/
|
||||||
default = [ ];
|
default = [ ];
|
||||||
x86-64 = [ ];
|
x86-64 = [ ];
|
||||||
x86-64-v2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
|
x86-64-v2 = [
|
||||||
x86-64-v3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "fma" ];
|
"sse3"
|
||||||
x86-64-v4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "avx512" "fma" ];
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
];
|
||||||
|
x86-64-v3 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
x86-64-v4 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
# x86_64 Intel
|
# x86_64 Intel
|
||||||
nehalem = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
|
nehalem = [
|
||||||
westmere = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
|
"sse3"
|
||||||
sandybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
|
"ssse3"
|
||||||
ivybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
|
"sse4_1"
|
||||||
haswell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
"sse4_2"
|
||||||
broadwell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
"aes"
|
||||||
skylake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
];
|
||||||
skylake-avx512 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
westmere = [
|
||||||
cannonlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
"sse3"
|
||||||
icelake-client = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
"ssse3"
|
||||||
icelake-server = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
"sse4_1"
|
||||||
cascadelake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
"sse4_2"
|
||||||
cooperlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
"aes"
|
||||||
tigerlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
];
|
||||||
alderlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
sandybridge = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
];
|
||||||
|
ivybridge = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
];
|
||||||
|
haswell = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
broadwell = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
skylake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
skylake-avx512 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
cannonlake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
icelake-client = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
icelake-server = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
cascadelake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
cooperlake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
tigerlake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
alderlake = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
# x86_64 AMD
|
# x86_64 AMD
|
||||||
btver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
|
btver1 = [
|
||||||
btver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
|
"sse3"
|
||||||
bdver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
|
"ssse3"
|
||||||
bdver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
|
"sse4_1"
|
||||||
bdver3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "fma" "fma4" ];
|
"sse4_2"
|
||||||
bdver4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" "fma4" ];
|
];
|
||||||
znver1 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
|
btver2 = [
|
||||||
znver2 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
|
"sse3"
|
||||||
znver3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "fma" ];
|
"ssse3"
|
||||||
znver4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "sse4a" "aes" "avx" "avx2" "avx512" "fma" ];
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
];
|
||||||
|
bdver1 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"fma"
|
||||||
|
"fma4"
|
||||||
|
];
|
||||||
|
bdver2 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"fma"
|
||||||
|
"fma4"
|
||||||
|
];
|
||||||
|
bdver3 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"fma"
|
||||||
|
"fma4"
|
||||||
|
];
|
||||||
|
bdver4 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
"fma4"
|
||||||
|
];
|
||||||
|
znver1 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
znver2 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
znver3 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
|
znver4 = [
|
||||||
|
"sse3"
|
||||||
|
"ssse3"
|
||||||
|
"sse4_1"
|
||||||
|
"sse4_2"
|
||||||
|
"sse4a"
|
||||||
|
"aes"
|
||||||
|
"avx"
|
||||||
|
"avx2"
|
||||||
|
"avx512"
|
||||||
|
"fma"
|
||||||
|
];
|
||||||
# other
|
# other
|
||||||
armv5te = [ ];
|
armv5te = [ ];
|
||||||
armv6 = [ ];
|
armv6 = [ ];
|
||||||
|
@ -62,11 +310,25 @@ rec {
|
||||||
sandybridge = [ "westmere" ] ++ inferiors.westmere;
|
sandybridge = [ "westmere" ] ++ inferiors.westmere;
|
||||||
ivybridge = [ "sandybridge" ] ++ inferiors.sandybridge;
|
ivybridge = [ "sandybridge" ] ++ inferiors.sandybridge;
|
||||||
|
|
||||||
haswell = lib.unique ([ "ivybridge" "x86-64-v3" ] ++ inferiors.ivybridge ++ inferiors.x86-64-v3);
|
haswell = lib.unique (
|
||||||
|
[
|
||||||
|
"ivybridge"
|
||||||
|
"x86-64-v3"
|
||||||
|
]
|
||||||
|
++ inferiors.ivybridge
|
||||||
|
++ inferiors.x86-64-v3
|
||||||
|
);
|
||||||
broadwell = [ "haswell" ] ++ inferiors.haswell;
|
broadwell = [ "haswell" ] ++ inferiors.haswell;
|
||||||
skylake = [ "broadwell" ] ++ inferiors.broadwell;
|
skylake = [ "broadwell" ] ++ inferiors.broadwell;
|
||||||
|
|
||||||
skylake-avx512 = lib.unique ([ "skylake" "x86-64-v4" ] ++ inferiors.skylake ++ inferiors.x86-64-v4);
|
skylake-avx512 = lib.unique (
|
||||||
|
[
|
||||||
|
"skylake"
|
||||||
|
"x86-64-v4"
|
||||||
|
]
|
||||||
|
++ inferiors.skylake
|
||||||
|
++ inferiors.x86-64-v4
|
||||||
|
);
|
||||||
cannonlake = [ "skylake-avx512" ] ++ inferiors.skylake-avx512;
|
cannonlake = [ "skylake-avx512" ] ++ inferiors.skylake-avx512;
|
||||||
icelake-client = [ "cannonlake" ] ++ inferiors.cannonlake;
|
icelake-client = [ "cannonlake" ] ++ inferiors.cannonlake;
|
||||||
icelake-server = [ "icelake-client" ] ++ inferiors.icelake-client;
|
icelake-server = [ "icelake-client" ] ++ inferiors.icelake-client;
|
||||||
|
@ -107,7 +369,14 @@ rec {
|
||||||
znver1 = [ "skylake" ] ++ inferiors.skylake; # Includes haswell and x86-64-v3
|
znver1 = [ "skylake" ] ++ inferiors.skylake; # Includes haswell and x86-64-v3
|
||||||
znver2 = [ "znver1" ] ++ inferiors.znver1;
|
znver2 = [ "znver1" ] ++ inferiors.znver1;
|
||||||
znver3 = [ "znver2" ] ++ inferiors.znver2;
|
znver3 = [ "znver2" ] ++ inferiors.znver2;
|
||||||
znver4 = lib.unique ([ "znver3" "x86-64-v4" ] ++ inferiors.znver3 ++ inferiors.x86-64-v4);
|
znver4 = lib.unique (
|
||||||
|
[
|
||||||
|
"znver3"
|
||||||
|
"x86-64-v4"
|
||||||
|
]
|
||||||
|
++ inferiors.znver3
|
||||||
|
++ inferiors.x86-64-v4
|
||||||
|
);
|
||||||
|
|
||||||
# other
|
# other
|
||||||
armv5te = [ ];
|
armv5te = [ ];
|
||||||
|
@ -118,9 +387,11 @@ rec {
|
||||||
loongson2f = [ ];
|
loongson2f = [ ];
|
||||||
};
|
};
|
||||||
|
|
||||||
predicates = let
|
predicates =
|
||||||
featureSupport = feature: x: builtins.elem feature features.${x} or [];
|
let
|
||||||
in {
|
featureSupport = feature: x: builtins.elem feature features.${x} or [ ];
|
||||||
|
in
|
||||||
|
{
|
||||||
sse3Support = featureSupport "sse3";
|
sse3Support = featureSupport "sse3";
|
||||||
ssse3Support = featureSupport "ssse3";
|
ssse3Support = featureSupport "ssse3";
|
||||||
sse4_1Support = featureSupport "sse4_1";
|
sse4_1Support = featureSupport "sse4_1";
|
||||||
|
|
|
@ -42,8 +42,10 @@ let
|
||||||
both arguments have been `elaborate`-d.
|
both arguments have been `elaborate`-d.
|
||||||
*/
|
*/
|
||||||
equals =
|
equals =
|
||||||
let removeFunctions = a: filterAttrs (_: v: !isFunction v) a;
|
let
|
||||||
in a: b: removeFunctions a == removeFunctions b;
|
removeFunctions = a: filterAttrs (_: v: !isFunction v) a;
|
||||||
|
in
|
||||||
|
a: b: removeFunctions a == removeFunctions b;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
List of all Nix system doubles the nixpkgs flake will expose the package set
|
List of all Nix system doubles the nixpkgs flake will expose the package set
|
||||||
|
@ -61,42 +63,63 @@ let
|
||||||
# `parsed` is inferred from args, both because there are two options with one
|
# `parsed` is inferred from args, both because there are two options with one
|
||||||
# clearly preferred, and to prevent cycles. A simpler fixed point where the RHS
|
# clearly preferred, and to prevent cycles. A simpler fixed point where the RHS
|
||||||
# always just used `final.*` would fail on both counts.
|
# always just used `final.*` would fail on both counts.
|
||||||
elaborate = args': let
|
elaborate =
|
||||||
args = if isString args' then { system = args'; }
|
args':
|
||||||
else args';
|
let
|
||||||
|
args = if isString args' then { system = args'; } else args';
|
||||||
|
|
||||||
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
|
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
|
||||||
rust = args.rust or args.rustc or {};
|
rust = args.rust or args.rustc or { };
|
||||||
|
|
||||||
final = {
|
final =
|
||||||
|
{
|
||||||
# Prefer to parse `config` as it is strictly more informative.
|
# Prefer to parse `config` as it is strictly more informative.
|
||||||
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
|
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
|
||||||
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
|
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
|
||||||
system = parse.doubleFromSystem final.parsed;
|
system = parse.doubleFromSystem final.parsed;
|
||||||
config = parse.tripleFromSystem final.parsed;
|
config = parse.tripleFromSystem final.parsed;
|
||||||
# Determine whether we can execute binaries built for the provided platform.
|
# Determine whether we can execute binaries built for the provided platform.
|
||||||
canExecute = platform:
|
canExecute =
|
||||||
final.isAndroid == platform.isAndroid &&
|
platform:
|
||||||
parse.isCompatible final.parsed.cpu platform.parsed.cpu
|
final.isAndroid == platform.isAndroid
|
||||||
|
&& parse.isCompatible final.parsed.cpu platform.parsed.cpu
|
||||||
&& final.parsed.kernel == platform.parsed.kernel;
|
&& final.parsed.kernel == platform.parsed.kernel;
|
||||||
isCompatible = _: throw "2022-05-23: isCompatible has been removed in favor of canExecute, refer to the 22.11 changelog for details";
|
isCompatible =
|
||||||
|
_:
|
||||||
|
throw "2022-05-23: isCompatible has been removed in favor of canExecute, refer to the 22.11 changelog for details";
|
||||||
# Derived meta-data
|
# Derived meta-data
|
||||||
libc =
|
libc =
|
||||||
/**/ if final.isDarwin then "libSystem"
|
if final.isDarwin then
|
||||||
else if final.isMinGW then "msvcrt"
|
"libSystem"
|
||||||
else if final.isWasi then "wasilibc"
|
else if final.isMinGW then
|
||||||
else if final.isRedox then "relibc"
|
"msvcrt"
|
||||||
else if final.isMusl then "musl"
|
else if final.isWasi then
|
||||||
else if final.isUClibc then "uclibc"
|
"wasilibc"
|
||||||
else if final.isAndroid then "bionic"
|
else if final.isRedox then
|
||||||
else if final.isLinux /* default */ then "glibc"
|
"relibc"
|
||||||
else if final.isFreeBSD then "fblibc"
|
else if final.isMusl then
|
||||||
else if final.isNetBSD then "nblibc"
|
"musl"
|
||||||
else if final.isAvr then "avrlibc"
|
else if final.isUClibc then
|
||||||
else if final.isGhcjs then null
|
"uclibc"
|
||||||
else if final.isNone then "newlib"
|
else if final.isAndroid then
|
||||||
|
"bionic"
|
||||||
|
else if
|
||||||
|
final.isLinux # default
|
||||||
|
then
|
||||||
|
"glibc"
|
||||||
|
else if final.isFreeBSD then
|
||||||
|
"fblibc"
|
||||||
|
else if final.isNetBSD then
|
||||||
|
"nblibc"
|
||||||
|
else if final.isAvr then
|
||||||
|
"avrlibc"
|
||||||
|
else if final.isGhcjs then
|
||||||
|
null
|
||||||
|
else if final.isNone then
|
||||||
|
"newlib"
|
||||||
# TODO(@Ericson2314) think more about other operating systems
|
# TODO(@Ericson2314) think more about other operating systems
|
||||||
else "native/impure";
|
else
|
||||||
|
"native/impure";
|
||||||
# Choose what linker we wish to use by default. Someday we might also
|
# Choose what linker we wish to use by default. Someday we might also
|
||||||
# choose the C compiler, runtime library, C++ standard library, etc. in
|
# choose the C compiler, runtime library, C++ standard library, etc. in
|
||||||
# this way, nice and orthogonally, and deprecate `useLLVM`. But due to
|
# this way, nice and orthogonally, and deprecate `useLLVM`. But due to
|
||||||
|
@ -104,34 +127,36 @@ let
|
||||||
# independently, so we are just doing `linker` and keeping `useLLVM` for
|
# independently, so we are just doing `linker` and keeping `useLLVM` for
|
||||||
# now.
|
# now.
|
||||||
linker =
|
linker =
|
||||||
/**/ if final.useLLVM or false then "lld"
|
if final.useLLVM or false then
|
||||||
else if final.isDarwin then "cctools"
|
"lld"
|
||||||
|
else if final.isDarwin then
|
||||||
|
"cctools"
|
||||||
# "bfd" and "gold" both come from GNU binutils. The existence of Gold
|
# "bfd" and "gold" both come from GNU binutils. The existence of Gold
|
||||||
# is why we use the more obscure "bfd" and not "binutils" for this
|
# is why we use the more obscure "bfd" and not "binutils" for this
|
||||||
# choice.
|
# choice.
|
||||||
else "bfd";
|
else
|
||||||
|
"bfd";
|
||||||
# The standard lib directory name that non-nixpkgs binaries distributed
|
# The standard lib directory name that non-nixpkgs binaries distributed
|
||||||
# for this platform normally assume.
|
# for this platform normally assume.
|
||||||
libDir = if final.isLinux then
|
libDir =
|
||||||
if final.isx86_64 || final.isMips64 || final.isPower64
|
if final.isLinux then
|
||||||
then "lib64"
|
if final.isx86_64 || final.isMips64 || final.isPower64 then "lib64" else "lib"
|
||||||
else "lib"
|
else
|
||||||
else null;
|
null;
|
||||||
extensions = optionalAttrs final.hasSharedLibraries {
|
extensions =
|
||||||
|
optionalAttrs final.hasSharedLibraries {
|
||||||
sharedLibrary =
|
sharedLibrary =
|
||||||
if final.isDarwin then ".dylib"
|
if final.isDarwin then
|
||||||
else if final.isWindows then ".dll"
|
".dylib"
|
||||||
else ".so";
|
else if final.isWindows then
|
||||||
} // {
|
".dll"
|
||||||
staticLibrary =
|
else
|
||||||
/**/ if final.isWindows then ".lib"
|
".so";
|
||||||
else ".a";
|
}
|
||||||
library =
|
// {
|
||||||
/**/ if final.isStatic then final.extensions.staticLibrary
|
staticLibrary = if final.isWindows then ".lib" else ".a";
|
||||||
else final.extensions.sharedLibrary;
|
library = if final.isStatic then final.extensions.staticLibrary else final.extensions.sharedLibrary;
|
||||||
executable =
|
executable = if final.isWindows then ".exe" else "";
|
||||||
/**/ if final.isWindows then ".exe"
|
|
||||||
else "";
|
|
||||||
};
|
};
|
||||||
# Misc boolean options
|
# Misc boolean options
|
||||||
useAndroidPrebuilt = false;
|
useAndroidPrebuilt = false;
|
||||||
|
@ -140,7 +165,8 @@ let
|
||||||
# Output from uname
|
# Output from uname
|
||||||
uname = {
|
uname = {
|
||||||
# uname -s
|
# uname -s
|
||||||
system = {
|
system =
|
||||||
|
{
|
||||||
linux = "Linux";
|
linux = "Linux";
|
||||||
windows = "Windows";
|
windows = "Windows";
|
||||||
darwin = "Darwin";
|
darwin = "Darwin";
|
||||||
|
@ -150,17 +176,19 @@ let
|
||||||
wasi = "Wasi";
|
wasi = "Wasi";
|
||||||
redox = "Redox";
|
redox = "Redox";
|
||||||
genode = "Genode";
|
genode = "Genode";
|
||||||
}.${final.parsed.kernel.name} or null;
|
}
|
||||||
|
.${final.parsed.kernel.name} or null;
|
||||||
|
|
||||||
# uname -m
|
# uname -m
|
||||||
processor =
|
processor =
|
||||||
if final.isPower64
|
if final.isPower64 then
|
||||||
then "ppc64${optionalString final.isLittleEndian "le"}"
|
"ppc64${optionalString final.isLittleEndian "le"}"
|
||||||
else if final.isPower
|
else if final.isPower then
|
||||||
then "ppc${optionalString final.isLittleEndian "le"}"
|
"ppc${optionalString final.isLittleEndian "le"}"
|
||||||
else if final.isMips64
|
else if final.isMips64 then
|
||||||
then "mips64" # endianness is *not* included on mips64
|
"mips64" # endianness is *not* included on mips64
|
||||||
else final.parsed.cpu.name;
|
else
|
||||||
|
final.parsed.cpu.name;
|
||||||
|
|
||||||
# uname -r
|
# uname -r
|
||||||
release = null;
|
release = null;
|
||||||
|
@ -172,11 +200,21 @@ let
|
||||||
# will still build on/for those platforms with --enable-shared, but simply
|
# will still build on/for those platforms with --enable-shared, but simply
|
||||||
# omit any `.so` build products such as libgcc_s.so. When that happens,
|
# omit any `.so` build products such as libgcc_s.so. When that happens,
|
||||||
# it causes hard-to-troubleshoot build failures.
|
# it causes hard-to-troubleshoot build failures.
|
||||||
hasSharedLibraries = with final;
|
hasSharedLibraries =
|
||||||
(isAndroid || isGnu || isMusl # Linux (allows multiple libcs)
|
with final;
|
||||||
|| isDarwin || isSunOS || isOpenBSD || isFreeBSD || isNetBSD # BSDs
|
(
|
||||||
|| isCygwin || isMinGW # Windows
|
isAndroid
|
||||||
) && !isStatic;
|
|| isGnu
|
||||||
|
|| isMusl # Linux (allows multiple libcs)
|
||||||
|
|| isDarwin
|
||||||
|
|| isSunOS
|
||||||
|
|| isOpenBSD
|
||||||
|
|| isFreeBSD
|
||||||
|
|| isNetBSD # BSDs
|
||||||
|
|| isCygwin
|
||||||
|
|| isMinGW # Windows
|
||||||
|
)
|
||||||
|
&& !isStatic;
|
||||||
|
|
||||||
# The difference between `isStatic` and `hasSharedLibraries` is mainly the
|
# The difference between `isStatic` and `hasSharedLibraries` is mainly the
|
||||||
# addition of the `staticMarker` (see make-derivation.nix). Some
|
# addition of the `staticMarker` (see make-derivation.nix). Some
|
||||||
|
@ -188,73 +226,115 @@ let
|
||||||
|
|
||||||
# Just a guess, based on `system`
|
# Just a guess, based on `system`
|
||||||
inherit
|
inherit
|
||||||
({
|
(
|
||||||
linux-kernel = args.linux-kernel or {};
|
{
|
||||||
gcc = args.gcc or {};
|
linux-kernel = args.linux-kernel or { };
|
||||||
} // platforms.select final)
|
gcc = args.gcc or { };
|
||||||
linux-kernel gcc;
|
}
|
||||||
|
// platforms.select final
|
||||||
|
)
|
||||||
|
linux-kernel
|
||||||
|
gcc
|
||||||
|
;
|
||||||
|
|
||||||
# TODO: remove after 23.05 is EOL, with an error pointing to the rust.* attrs.
|
# TODO: remove after 23.05 is EOL, with an error pointing to the rust.* attrs.
|
||||||
rustc = args.rustc or {};
|
rustc = args.rustc or { };
|
||||||
|
|
||||||
linuxArch =
|
linuxArch =
|
||||||
if final.isAarch32 then "arm"
|
if final.isAarch32 then
|
||||||
else if final.isAarch64 then "arm64"
|
"arm"
|
||||||
else if final.isx86_32 then "i386"
|
else if final.isAarch64 then
|
||||||
else if final.isx86_64 then "x86_64"
|
"arm64"
|
||||||
|
else if final.isx86_32 then
|
||||||
|
"i386"
|
||||||
|
else if final.isx86_64 then
|
||||||
|
"x86_64"
|
||||||
# linux kernel does not distinguish microblaze/microblazeel
|
# linux kernel does not distinguish microblaze/microblazeel
|
||||||
else if final.isMicroBlaze then "microblaze"
|
else if final.isMicroBlaze then
|
||||||
else if final.isMips32 then "mips"
|
"microblaze"
|
||||||
else if final.isMips64 then "mips" # linux kernel does not distinguish mips32/mips64
|
else if final.isMips32 then
|
||||||
else if final.isPower then "powerpc"
|
"mips"
|
||||||
else if final.isRiscV then "riscv"
|
else if final.isMips64 then
|
||||||
else if final.isS390 then "s390"
|
"mips" # linux kernel does not distinguish mips32/mips64
|
||||||
else if final.isLoongArch64 then "loongarch"
|
else if final.isPower then
|
||||||
else final.parsed.cpu.name;
|
"powerpc"
|
||||||
|
else if final.isRiscV then
|
||||||
|
"riscv"
|
||||||
|
else if final.isS390 then
|
||||||
|
"s390"
|
||||||
|
else if final.isLoongArch64 then
|
||||||
|
"loongarch"
|
||||||
|
else
|
||||||
|
final.parsed.cpu.name;
|
||||||
|
|
||||||
# https://source.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106
|
# https://source.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106
|
||||||
ubootArch =
|
ubootArch =
|
||||||
if final.isx86_32 then "x86" # not i386
|
if final.isx86_32 then
|
||||||
else if final.isMips64 then "mips64" # uboot *does* distinguish between mips32/mips64
|
"x86" # not i386
|
||||||
else final.linuxArch; # other cases appear to agree with linuxArch
|
else if final.isMips64 then
|
||||||
|
"mips64" # uboot *does* distinguish between mips32/mips64
|
||||||
|
else
|
||||||
|
final.linuxArch; # other cases appear to agree with linuxArch
|
||||||
|
|
||||||
qemuArch =
|
qemuArch =
|
||||||
if final.isAarch32 then "arm"
|
if final.isAarch32 then
|
||||||
else if final.isS390 && !final.isS390x then null
|
"arm"
|
||||||
else if final.isx86_64 then "x86_64"
|
else if final.isS390 && !final.isS390x then
|
||||||
else if final.isx86 then "i386"
|
null
|
||||||
else if final.isMips64n32 then "mipsn32${optionalString final.isLittleEndian "el"}"
|
else if final.isx86_64 then
|
||||||
else if final.isMips64 then "mips64${optionalString final.isLittleEndian "el"}"
|
"x86_64"
|
||||||
else final.uname.processor;
|
else if final.isx86 then
|
||||||
|
"i386"
|
||||||
|
else if final.isMips64n32 then
|
||||||
|
"mipsn32${optionalString final.isLittleEndian "el"}"
|
||||||
|
else if final.isMips64 then
|
||||||
|
"mips64${optionalString final.isLittleEndian "el"}"
|
||||||
|
else
|
||||||
|
final.uname.processor;
|
||||||
|
|
||||||
# Name used by UEFI for architectures.
|
# Name used by UEFI for architectures.
|
||||||
efiArch =
|
efiArch =
|
||||||
if final.isx86_32 then "ia32"
|
if final.isx86_32 then
|
||||||
else if final.isx86_64 then "x64"
|
"ia32"
|
||||||
else if final.isAarch32 then "arm"
|
else if final.isx86_64 then
|
||||||
else if final.isAarch64 then "aa64"
|
"x64"
|
||||||
else final.parsed.cpu.name;
|
else if final.isAarch32 then
|
||||||
|
"arm"
|
||||||
|
else if final.isAarch64 then
|
||||||
|
"aa64"
|
||||||
|
else
|
||||||
|
final.parsed.cpu.name;
|
||||||
|
|
||||||
darwinArch = {
|
darwinArch =
|
||||||
|
{
|
||||||
armv7a = "armv7";
|
armv7a = "armv7";
|
||||||
aarch64 = "arm64";
|
aarch64 = "arm64";
|
||||||
}.${final.parsed.cpu.name} or final.parsed.cpu.name;
|
}
|
||||||
|
.${final.parsed.cpu.name} or final.parsed.cpu.name;
|
||||||
|
|
||||||
darwinPlatform =
|
darwinPlatform =
|
||||||
if final.isMacOS then "macos"
|
if final.isMacOS then
|
||||||
else if final.isiOS then "ios"
|
"macos"
|
||||||
else null;
|
else if final.isiOS then
|
||||||
|
"ios"
|
||||||
|
else
|
||||||
|
null;
|
||||||
# The canonical name for this attribute is darwinSdkVersion, but some
|
# The canonical name for this attribute is darwinSdkVersion, but some
|
||||||
# platforms define the old name "sdkVer".
|
# platforms define the old name "sdkVer".
|
||||||
darwinSdkVersion = final.sdkVer or (if final.isAarch64 then "11.0" else "10.12");
|
darwinSdkVersion = final.sdkVer or (if final.isAarch64 then "11.0" else "10.12");
|
||||||
darwinMinVersion = final.darwinSdkVersion;
|
darwinMinVersion = final.darwinSdkVersion;
|
||||||
darwinMinVersionVariable =
|
darwinMinVersionVariable =
|
||||||
if final.isMacOS then "MACOSX_DEPLOYMENT_TARGET"
|
if final.isMacOS then
|
||||||
else if final.isiOS then "IPHONEOS_DEPLOYMENT_TARGET"
|
"MACOSX_DEPLOYMENT_TARGET"
|
||||||
else null;
|
else if final.isiOS then
|
||||||
} // (
|
"IPHONEOS_DEPLOYMENT_TARGET"
|
||||||
|
else
|
||||||
|
null;
|
||||||
|
}
|
||||||
|
// (
|
||||||
let
|
let
|
||||||
selectEmulator = pkgs:
|
selectEmulator =
|
||||||
|
pkgs:
|
||||||
let
|
let
|
||||||
qemu-user = pkgs.qemu.override {
|
qemu-user = pkgs.qemu.override {
|
||||||
smartcardSupport = false;
|
smartcardSupport = false;
|
||||||
|
@ -277,54 +357,69 @@ let
|
||||||
};
|
};
|
||||||
wine = (pkgs.winePackagesFor "wine${toString final.parsed.cpu.bits}").minimal;
|
wine = (pkgs.winePackagesFor "wine${toString final.parsed.cpu.bits}").minimal;
|
||||||
in
|
in
|
||||||
if pkgs.stdenv.hostPlatform.canExecute final
|
if pkgs.stdenv.hostPlatform.canExecute final then
|
||||||
then "${pkgs.runtimeShell} -c '\"$@\"' --"
|
"${pkgs.runtimeShell} -c '\"$@\"' --"
|
||||||
else if final.isWindows
|
else if final.isWindows then
|
||||||
then "${wine}/bin/wine${optionalString (final.parsed.cpu.bits == 64) "64"}"
|
"${wine}/bin/wine${optionalString (final.parsed.cpu.bits == 64) "64"}"
|
||||||
else if final.isLinux && pkgs.stdenv.hostPlatform.isLinux && final.qemuArch != null
|
else if final.isLinux && pkgs.stdenv.hostPlatform.isLinux && final.qemuArch != null then
|
||||||
then "${qemu-user}/bin/qemu-${final.qemuArch}"
|
"${qemu-user}/bin/qemu-${final.qemuArch}"
|
||||||
else if final.isWasi
|
else if final.isWasi then
|
||||||
then "${pkgs.wasmtime}/bin/wasmtime"
|
"${pkgs.wasmtime}/bin/wasmtime"
|
||||||
else if final.isMmix
|
else if final.isMmix then
|
||||||
then "${pkgs.mmixware}/bin/mmix"
|
"${pkgs.mmixware}/bin/mmix"
|
||||||
else null;
|
else
|
||||||
in {
|
null;
|
||||||
|
in
|
||||||
|
{
|
||||||
emulatorAvailable = pkgs: (selectEmulator pkgs) != null;
|
emulatorAvailable = pkgs: (selectEmulator pkgs) != null;
|
||||||
|
|
||||||
emulator = pkgs:
|
emulator =
|
||||||
if (final.emulatorAvailable pkgs)
|
pkgs:
|
||||||
then selectEmulator pkgs
|
if (final.emulatorAvailable pkgs) then
|
||||||
else throw "Don't know how to run ${final.config} executables.";
|
selectEmulator pkgs
|
||||||
|
else
|
||||||
|
throw "Don't know how to run ${final.config} executables.";
|
||||||
|
|
||||||
}) // mapAttrs (n: v: v final.parsed) inspect.predicates
|
}
|
||||||
|
)
|
||||||
|
// mapAttrs (n: v: v final.parsed) inspect.predicates
|
||||||
// mapAttrs (n: v: v final.gcc.arch or "default") architectures.predicates
|
// mapAttrs (n: v: v final.gcc.arch or "default") architectures.predicates
|
||||||
// args // {
|
// args
|
||||||
|
// {
|
||||||
rust = rust // {
|
rust = rust // {
|
||||||
# Once args.rustc.platform.target-family is deprecated and
|
# Once args.rustc.platform.target-family is deprecated and
|
||||||
# removed, there will no longer be any need to modify any
|
# removed, there will no longer be any need to modify any
|
||||||
# values from args.rust.platform, so we can drop all the
|
# values from args.rust.platform, so we can drop all the
|
||||||
# "args ? rust" etc. checks, and merge args.rust.platform in
|
# "args ? rust" etc. checks, and merge args.rust.platform in
|
||||||
# /after/.
|
# /after/.
|
||||||
platform = rust.platform or {} // {
|
platform = rust.platform or { } // {
|
||||||
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch
|
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch
|
||||||
arch =
|
arch =
|
||||||
/**/ if rust ? platform then rust.platform.arch
|
if rust ? platform then
|
||||||
else if final.isAarch32 then "arm"
|
rust.platform.arch
|
||||||
else if final.isMips64 then "mips64" # never add "el" suffix
|
else if final.isAarch32 then
|
||||||
else if final.isPower64 then "powerpc64" # never add "le" suffix
|
"arm"
|
||||||
else final.parsed.cpu.name;
|
else if final.isMips64 then
|
||||||
|
"mips64" # never add "el" suffix
|
||||||
|
else if final.isPower64 then
|
||||||
|
"powerpc64" # never add "le" suffix
|
||||||
|
else
|
||||||
|
final.parsed.cpu.name;
|
||||||
|
|
||||||
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_os
|
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_os
|
||||||
os =
|
os =
|
||||||
/**/ if rust ? platform then rust.platform.os or "none"
|
if rust ? platform then
|
||||||
else if final.isDarwin then "macos"
|
rust.platform.os or "none"
|
||||||
else final.parsed.kernel.name;
|
else if final.isDarwin then
|
||||||
|
"macos"
|
||||||
|
else
|
||||||
|
final.parsed.kernel.name;
|
||||||
|
|
||||||
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family
|
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family
|
||||||
target-family =
|
target-family =
|
||||||
/**/ if args ? rust.platform.target-family then args.rust.platform.target-family
|
if args ? rust.platform.target-family then
|
||||||
else if args ? rustc.platform.target-family
|
args.rust.platform.target-family
|
||||||
then
|
else if args ? rustc.platform.target-family then
|
||||||
(
|
(
|
||||||
# Since https://github.com/rust-lang/rust/pull/84072
|
# Since https://github.com/rust-lang/rust/pull/84072
|
||||||
# `target-family` is a list instead of single value.
|
# `target-family` is a list instead of single value.
|
||||||
|
@ -333,71 +428,79 @@ let
|
||||||
in
|
in
|
||||||
if isList f then f else [ f ]
|
if isList f then f else [ f ]
|
||||||
)
|
)
|
||||||
else optional final.isUnix "unix"
|
else
|
||||||
++ optional final.isWindows "windows";
|
optional final.isUnix "unix" ++ optional final.isWindows "windows";
|
||||||
|
|
||||||
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor
|
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor
|
||||||
vendor = let
|
vendor =
|
||||||
|
let
|
||||||
inherit (final.parsed) vendor;
|
inherit (final.parsed) vendor;
|
||||||
in rust.platform.vendor or {
|
in
|
||||||
|
rust.platform.vendor or {
|
||||||
"w64" = "pc";
|
"w64" = "pc";
|
||||||
}.${vendor.name} or vendor.name;
|
}
|
||||||
|
.${vendor.name} or vendor.name;
|
||||||
};
|
};
|
||||||
|
|
||||||
# The name of the rust target, even if it is custom. Adjustments are
|
# The name of the rust target, even if it is custom. Adjustments are
|
||||||
# because rust has slightly different naming conventions than we do.
|
# because rust has slightly different naming conventions than we do.
|
||||||
rustcTarget = let
|
rustcTarget =
|
||||||
|
let
|
||||||
inherit (final.parsed) cpu kernel abi;
|
inherit (final.parsed) cpu kernel abi;
|
||||||
cpu_ = rust.platform.arch or {
|
cpu_ =
|
||||||
|
rust.platform.arch or {
|
||||||
"armv7a" = "armv7";
|
"armv7a" = "armv7";
|
||||||
"armv7l" = "armv7";
|
"armv7l" = "armv7";
|
||||||
"armv6l" = "arm";
|
"armv6l" = "arm";
|
||||||
"armv5tel" = "armv5te";
|
"armv5tel" = "armv5te";
|
||||||
"riscv64" = "riscv64gc";
|
"riscv64" = "riscv64gc";
|
||||||
}.${cpu.name} or cpu.name;
|
}
|
||||||
|
.${cpu.name} or cpu.name;
|
||||||
vendor_ = final.rust.platform.vendor;
|
vendor_ = final.rust.platform.vendor;
|
||||||
|
in
|
||||||
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
|
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
|
||||||
in args.rust.rustcTarget or args.rustc.config
|
args.rust.rustcTarget or args.rustc.config
|
||||||
or "${cpu_}-${vendor_}-${kernel.name}${optionalString (abi.name != "unknown") "-${abi.name}"}";
|
or "${cpu_}-${vendor_}-${kernel.name}${optionalString (abi.name != "unknown") "-${abi.name}"}";
|
||||||
|
|
||||||
# The name of the rust target if it is standard, or the json file
|
# The name of the rust target if it is standard, or the json file
|
||||||
# containing the custom target spec.
|
# containing the custom target spec.
|
||||||
rustcTargetSpec = rust.rustcTargetSpec or (
|
rustcTargetSpec =
|
||||||
/**/ if rust ? platform
|
rust.rustcTargetSpec or (
|
||||||
then builtins.toFile (final.rust.rustcTarget + ".json") (toJSON rust.platform)
|
if rust ? platform then
|
||||||
else final.rust.rustcTarget);
|
builtins.toFile (final.rust.rustcTarget + ".json") (toJSON rust.platform)
|
||||||
|
else
|
||||||
|
final.rust.rustcTarget
|
||||||
|
);
|
||||||
|
|
||||||
# The name of the rust target if it is standard, or the
|
# The name of the rust target if it is standard, or the
|
||||||
# basename of the file containing the custom target spec,
|
# basename of the file containing the custom target spec,
|
||||||
# without the .json extension.
|
# without the .json extension.
|
||||||
#
|
#
|
||||||
# This is the name used by Cargo for target subdirectories.
|
# This is the name used by Cargo for target subdirectories.
|
||||||
cargoShortTarget =
|
cargoShortTarget = removeSuffix ".json" (baseNameOf "${final.rust.rustcTargetSpec}");
|
||||||
removeSuffix ".json" (baseNameOf "${final.rust.rustcTargetSpec}");
|
|
||||||
|
|
||||||
# When used as part of an environment variable name, triples are
|
# When used as part of an environment variable name, triples are
|
||||||
# uppercased and have all hyphens replaced by underscores:
|
# uppercased and have all hyphens replaced by underscores:
|
||||||
#
|
#
|
||||||
# https://github.com/rust-lang/cargo/pull/9169
|
# https://github.com/rust-lang/cargo/pull/9169
|
||||||
# https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431
|
# https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431
|
||||||
cargoEnvVarTarget =
|
cargoEnvVarTarget = replaceStrings [ "-" ] [ "_" ] (toUpper final.rust.cargoShortTarget);
|
||||||
replaceStrings ["-"] ["_"]
|
|
||||||
(toUpper final.rust.cargoShortTarget);
|
|
||||||
|
|
||||||
# True if the target is no_std
|
# True if the target is no_std
|
||||||
# https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421
|
# https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421
|
||||||
isNoStdTarget =
|
isNoStdTarget = any (t: hasInfix t final.rust.rustcTarget) [
|
||||||
any (t: hasInfix t final.rust.rustcTarget) ["-none" "nvptx" "switch" "-uefi"];
|
"-none"
|
||||||
|
"nvptx"
|
||||||
|
"switch"
|
||||||
|
"-uefi"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
in assert final.useAndroidPrebuilt -> final.isAndroid;
|
in
|
||||||
assert foldl
|
assert final.useAndroidPrebuilt -> final.isAndroid;
|
||||||
(pass: { assertion, message }:
|
assert foldl (pass: { assertion, message }: if assertion final then pass else throw message) true (
|
||||||
if assertion final
|
final.parsed.abi.assertions or [ ]
|
||||||
then pass
|
);
|
||||||
else throw message)
|
|
||||||
true
|
|
||||||
(final.parsed.abi.assertions or []);
|
|
||||||
final;
|
final;
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
|
@ -7,16 +7,23 @@ let
|
||||||
|
|
||||||
all = [
|
all = [
|
||||||
# Cygwin
|
# Cygwin
|
||||||
"i686-cygwin" "x86_64-cygwin"
|
"i686-cygwin"
|
||||||
|
"x86_64-cygwin"
|
||||||
|
|
||||||
# Darwin
|
# Darwin
|
||||||
"x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin"
|
"x86_64-darwin"
|
||||||
|
"i686-darwin"
|
||||||
|
"aarch64-darwin"
|
||||||
|
"armv7a-darwin"
|
||||||
|
|
||||||
# FreeBSD
|
# FreeBSD
|
||||||
"i686-freebsd13" "x86_64-freebsd13"
|
"i686-freebsd13"
|
||||||
|
"x86_64-freebsd13"
|
||||||
|
|
||||||
# Genode
|
# Genode
|
||||||
"aarch64-genode" "i686-genode" "x86_64-genode"
|
"aarch64-genode"
|
||||||
|
"i686-genode"
|
||||||
|
"x86_64-genode"
|
||||||
|
|
||||||
# illumos
|
# illumos
|
||||||
"x86_64-solaris"
|
"x86_64-solaris"
|
||||||
|
@ -25,47 +32,93 @@ let
|
||||||
"javascript-ghcjs"
|
"javascript-ghcjs"
|
||||||
|
|
||||||
# Linux
|
# Linux
|
||||||
"aarch64-linux" "armv5tel-linux" "armv6l-linux" "armv7a-linux"
|
"aarch64-linux"
|
||||||
"armv7l-linux" "i686-linux" "loongarch64-linux" "m68k-linux" "microblaze-linux"
|
"armv5tel-linux"
|
||||||
"microblazeel-linux" "mips-linux" "mips64-linux" "mips64el-linux"
|
"armv6l-linux"
|
||||||
"mipsel-linux" "powerpc64-linux" "powerpc64le-linux" "riscv32-linux"
|
"armv7a-linux"
|
||||||
"riscv64-linux" "s390-linux" "s390x-linux" "x86_64-linux"
|
"armv7l-linux"
|
||||||
|
"i686-linux"
|
||||||
|
"loongarch64-linux"
|
||||||
|
"m68k-linux"
|
||||||
|
"microblaze-linux"
|
||||||
|
"microblazeel-linux"
|
||||||
|
"mips-linux"
|
||||||
|
"mips64-linux"
|
||||||
|
"mips64el-linux"
|
||||||
|
"mipsel-linux"
|
||||||
|
"powerpc64-linux"
|
||||||
|
"powerpc64le-linux"
|
||||||
|
"riscv32-linux"
|
||||||
|
"riscv64-linux"
|
||||||
|
"s390-linux"
|
||||||
|
"s390x-linux"
|
||||||
|
"x86_64-linux"
|
||||||
|
|
||||||
# MMIXware
|
# MMIXware
|
||||||
"mmix-mmixware"
|
"mmix-mmixware"
|
||||||
|
|
||||||
# NetBSD
|
# NetBSD
|
||||||
"aarch64-netbsd" "armv6l-netbsd" "armv7a-netbsd" "armv7l-netbsd"
|
"aarch64-netbsd"
|
||||||
"i686-netbsd" "m68k-netbsd" "mipsel-netbsd" "powerpc-netbsd"
|
"armv6l-netbsd"
|
||||||
"riscv32-netbsd" "riscv64-netbsd" "x86_64-netbsd"
|
"armv7a-netbsd"
|
||||||
|
"armv7l-netbsd"
|
||||||
|
"i686-netbsd"
|
||||||
|
"m68k-netbsd"
|
||||||
|
"mipsel-netbsd"
|
||||||
|
"powerpc-netbsd"
|
||||||
|
"riscv32-netbsd"
|
||||||
|
"riscv64-netbsd"
|
||||||
|
"x86_64-netbsd"
|
||||||
|
|
||||||
# none
|
# none
|
||||||
"aarch64_be-none" "aarch64-none" "arm-none" "armv6l-none" "avr-none" "i686-none"
|
"aarch64_be-none"
|
||||||
"microblaze-none" "microblazeel-none" "mips-none" "mips64-none" "msp430-none" "or1k-none" "m68k-none"
|
"aarch64-none"
|
||||||
"powerpc-none" "powerpcle-none" "riscv32-none" "riscv64-none" "rx-none"
|
"arm-none"
|
||||||
"s390-none" "s390x-none" "vc4-none" "x86_64-none"
|
"armv6l-none"
|
||||||
|
"avr-none"
|
||||||
|
"i686-none"
|
||||||
|
"microblaze-none"
|
||||||
|
"microblazeel-none"
|
||||||
|
"mips-none"
|
||||||
|
"mips64-none"
|
||||||
|
"msp430-none"
|
||||||
|
"or1k-none"
|
||||||
|
"m68k-none"
|
||||||
|
"powerpc-none"
|
||||||
|
"powerpcle-none"
|
||||||
|
"riscv32-none"
|
||||||
|
"riscv64-none"
|
||||||
|
"rx-none"
|
||||||
|
"s390-none"
|
||||||
|
"s390x-none"
|
||||||
|
"vc4-none"
|
||||||
|
"x86_64-none"
|
||||||
|
|
||||||
# OpenBSD
|
# OpenBSD
|
||||||
"i686-openbsd" "x86_64-openbsd"
|
"i686-openbsd"
|
||||||
|
"x86_64-openbsd"
|
||||||
|
|
||||||
# Redox
|
# Redox
|
||||||
"x86_64-redox"
|
"x86_64-redox"
|
||||||
|
|
||||||
# WASI
|
# WASI
|
||||||
"wasm64-wasi" "wasm32-wasi"
|
"wasm64-wasi"
|
||||||
|
"wasm32-wasi"
|
||||||
|
|
||||||
# Windows
|
# Windows
|
||||||
"x86_64-windows" "i686-windows"
|
"x86_64-windows"
|
||||||
|
"i686-windows"
|
||||||
];
|
];
|
||||||
|
|
||||||
allParsed = map parse.mkSystemFromString all;
|
allParsed = map parse.mkSystemFromString all;
|
||||||
|
|
||||||
filterDoubles = f: map parse.doubleFromSystem (lists.filter f allParsed);
|
filterDoubles = f: map parse.doubleFromSystem (lists.filter f allParsed);
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit all;
|
inherit all;
|
||||||
|
|
||||||
none = [];
|
none = [ ];
|
||||||
|
|
||||||
arm = filterDoubles predicates.isAarch32;
|
arm = filterDoubles predicates.isAarch32;
|
||||||
armv7 = filterDoubles predicates.isArmv7;
|
armv7 = filterDoubles predicates.isArmv7;
|
||||||
|
@ -96,13 +149,35 @@ in {
|
||||||
darwin = filterDoubles predicates.isDarwin;
|
darwin = filterDoubles predicates.isDarwin;
|
||||||
freebsd = filterDoubles predicates.isFreeBSD;
|
freebsd = filterDoubles predicates.isFreeBSD;
|
||||||
# Should be better, but MinGW is unclear.
|
# Should be better, but MinGW is unclear.
|
||||||
gnu = filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnu; })
|
gnu =
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabi; })
|
filterDoubles (matchAttrs {
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnueabihf; })
|
kernel = parse.kernels.linux;
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabin32; })
|
abi = parse.abis.gnu;
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabi64; })
|
})
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabielfv1; })
|
++ filterDoubles (matchAttrs {
|
||||||
++ filterDoubles (matchAttrs { kernel = parse.kernels.linux; abi = parse.abis.gnuabielfv2; });
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnueabi;
|
||||||
|
})
|
||||||
|
++ filterDoubles (matchAttrs {
|
||||||
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnueabihf;
|
||||||
|
})
|
||||||
|
++ filterDoubles (matchAttrs {
|
||||||
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnuabin32;
|
||||||
|
})
|
||||||
|
++ filterDoubles (matchAttrs {
|
||||||
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnuabi64;
|
||||||
|
})
|
||||||
|
++ filterDoubles (matchAttrs {
|
||||||
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnuabielfv1;
|
||||||
|
})
|
||||||
|
++ filterDoubles (matchAttrs {
|
||||||
|
kernel = parse.kernels.linux;
|
||||||
|
abi = parse.abis.gnuabielfv2;
|
||||||
|
});
|
||||||
illumos = filterDoubles predicates.isSunOS;
|
illumos = filterDoubles predicates.isSunOS;
|
||||||
linux = filterDoubles predicates.isLinux;
|
linux = filterDoubles predicates.isLinux;
|
||||||
netbsd = filterDoubles predicates.isNetBSD;
|
netbsd = filterDoubles predicates.isNetBSD;
|
||||||
|
@ -115,5 +190,18 @@ in {
|
||||||
|
|
||||||
embedded = filterDoubles predicates.isNone;
|
embedded = filterDoubles predicates.isNone;
|
||||||
|
|
||||||
mesaPlatforms = ["i686-linux" "x86_64-linux" "x86_64-darwin" "armv5tel-linux" "armv6l-linux" "armv7l-linux" "armv7a-linux" "aarch64-linux" "powerpc64-linux" "powerpc64le-linux" "aarch64-darwin" "riscv64-linux"];
|
mesaPlatforms = [
|
||||||
|
"i686-linux"
|
||||||
|
"x86_64-linux"
|
||||||
|
"x86_64-darwin"
|
||||||
|
"armv5tel-linux"
|
||||||
|
"armv6l-linux"
|
||||||
|
"armv7l-linux"
|
||||||
|
"armv7a-linux"
|
||||||
|
"aarch64-linux"
|
||||||
|
"powerpc64-linux"
|
||||||
|
"powerpc64le-linux"
|
||||||
|
"aarch64-darwin"
|
||||||
|
"riscv64-linux"
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,7 @@
|
||||||
let
|
let
|
||||||
platforms = import ./platforms.nix { inherit lib; };
|
platforms = import ./platforms.nix { inherit lib; };
|
||||||
|
|
||||||
riscv = bits: {
|
riscv = bits: { config = "riscv${bits}-unknown-linux-gnu"; };
|
||||||
config = "riscv${bits}-unknown-linux-gnu";
|
|
||||||
};
|
|
||||||
in
|
in
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
|
@ -26,7 +24,9 @@ rec {
|
||||||
};
|
};
|
||||||
ppc64-musl = {
|
ppc64-musl = {
|
||||||
config = "powerpc64-unknown-linux-musl";
|
config = "powerpc64-unknown-linux-musl";
|
||||||
gcc = { abi = "elfv2"; };
|
gcc = {
|
||||||
|
abi = "elfv2";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
sheevaplug = {
|
sheevaplug = {
|
||||||
|
@ -95,16 +95,28 @@ rec {
|
||||||
} // platforms.fuloong2f_n32;
|
} // platforms.fuloong2f_n32;
|
||||||
|
|
||||||
# can execute on 32bit chip
|
# can execute on 32bit chip
|
||||||
mips-linux-gnu = { config = "mips-unknown-linux-gnu"; } // platforms.gcc_mips32r2_o32;
|
mips-linux-gnu = {
|
||||||
mipsel-linux-gnu = { config = "mipsel-unknown-linux-gnu"; } // platforms.gcc_mips32r2_o32;
|
config = "mips-unknown-linux-gnu";
|
||||||
|
} // platforms.gcc_mips32r2_o32;
|
||||||
|
mipsel-linux-gnu = {
|
||||||
|
config = "mipsel-unknown-linux-gnu";
|
||||||
|
} // platforms.gcc_mips32r2_o32;
|
||||||
|
|
||||||
# require 64bit chip (for more registers, 64-bit floating point, 64-bit "long long") but use 32bit pointers
|
# require 64bit chip (for more registers, 64-bit floating point, 64-bit "long long") but use 32bit pointers
|
||||||
mips64-linux-gnuabin32 = { config = "mips64-unknown-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
|
mips64-linux-gnuabin32 = {
|
||||||
mips64el-linux-gnuabin32 = { config = "mips64el-unknown-linux-gnuabin32"; } // platforms.gcc_mips64r2_n32;
|
config = "mips64-unknown-linux-gnuabin32";
|
||||||
|
} // platforms.gcc_mips64r2_n32;
|
||||||
|
mips64el-linux-gnuabin32 = {
|
||||||
|
config = "mips64el-unknown-linux-gnuabin32";
|
||||||
|
} // platforms.gcc_mips64r2_n32;
|
||||||
|
|
||||||
# 64bit pointers
|
# 64bit pointers
|
||||||
mips64-linux-gnuabi64 = { config = "mips64-unknown-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
|
mips64-linux-gnuabi64 = {
|
||||||
mips64el-linux-gnuabi64 = { config = "mips64el-unknown-linux-gnuabi64"; } // platforms.gcc_mips64r2_64;
|
config = "mips64-unknown-linux-gnuabi64";
|
||||||
|
} // platforms.gcc_mips64r2_64;
|
||||||
|
mips64el-linux-gnuabi64 = {
|
||||||
|
config = "mips64el-unknown-linux-gnuabi64";
|
||||||
|
} // platforms.gcc_mips64r2_64;
|
||||||
|
|
||||||
muslpi = raspberryPi // {
|
muslpi = raspberryPi // {
|
||||||
config = "armv6l-unknown-linux-musleabihf";
|
config = "armv6l-unknown-linux-musleabihf";
|
||||||
|
@ -114,12 +126,20 @@ rec {
|
||||||
config = "aarch64-unknown-linux-musl";
|
config = "aarch64-unknown-linux-musl";
|
||||||
};
|
};
|
||||||
|
|
||||||
gnu64 = { config = "x86_64-unknown-linux-gnu"; };
|
gnu64 = {
|
||||||
|
config = "x86_64-unknown-linux-gnu";
|
||||||
|
};
|
||||||
gnu64_simplekernel = gnu64 // platforms.pc_simplekernel; # see test/cross/default.nix
|
gnu64_simplekernel = gnu64 // platforms.pc_simplekernel; # see test/cross/default.nix
|
||||||
gnu32 = { config = "i686-unknown-linux-gnu"; };
|
gnu32 = {
|
||||||
|
config = "i686-unknown-linux-gnu";
|
||||||
|
};
|
||||||
|
|
||||||
musl64 = { config = "x86_64-unknown-linux-musl"; };
|
musl64 = {
|
||||||
musl32 = { config = "i686-unknown-linux-musl"; };
|
config = "x86_64-unknown-linux-musl";
|
||||||
|
};
|
||||||
|
musl32 = {
|
||||||
|
config = "i686-unknown-linux-musl";
|
||||||
|
};
|
||||||
|
|
||||||
riscv64 = riscv "64";
|
riscv64 = riscv "64";
|
||||||
riscv32 = riscv "32";
|
riscv32 = riscv "32";
|
||||||
|
@ -294,13 +314,13 @@ rec {
|
||||||
aarch64-darwin = {
|
aarch64-darwin = {
|
||||||
config = "aarch64-apple-darwin";
|
config = "aarch64-apple-darwin";
|
||||||
xcodePlatform = "MacOSX";
|
xcodePlatform = "MacOSX";
|
||||||
platform = {};
|
platform = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
x86_64-darwin = {
|
x86_64-darwin = {
|
||||||
config = "x86_64-apple-darwin";
|
config = "x86_64-apple-darwin";
|
||||||
xcodePlatform = "MacOSX";
|
xcodePlatform = "MacOSX";
|
||||||
platform = {};
|
platform = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
|
@ -38,123 +38,434 @@ rec {
|
||||||
# `lib.attrsets.matchAttrs`, which requires a match on *all* attributes of
|
# `lib.attrsets.matchAttrs`, which requires a match on *all* attributes of
|
||||||
# the product.
|
# the product.
|
||||||
|
|
||||||
isi686 = { cpu = cpuTypes.i686; };
|
isi686 = {
|
||||||
isx86_32 = { cpu = { family = "x86"; bits = 32; }; };
|
cpu = cpuTypes.i686;
|
||||||
isx86_64 = { cpu = { family = "x86"; bits = 64; }; };
|
};
|
||||||
isPower = { cpu = { family = "power"; }; };
|
isx86_32 = {
|
||||||
isPower64 = { cpu = { family = "power"; bits = 64; }; };
|
cpu = {
|
||||||
|
family = "x86";
|
||||||
|
bits = 32;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isx86_64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "x86";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isPower = {
|
||||||
|
cpu = {
|
||||||
|
family = "power";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isPower64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "power";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
# This ABI is the default in NixOS PowerPC64 BE, but not on mainline GCC,
|
# This ABI is the default in NixOS PowerPC64 BE, but not on mainline GCC,
|
||||||
# so it sometimes causes issues in certain packages that makes the wrong
|
# so it sometimes causes issues in certain packages that makes the wrong
|
||||||
# assumption on the used ABI.
|
# assumption on the used ABI.
|
||||||
isAbiElfv2 = [
|
isAbiElfv2 = [
|
||||||
{ abi = { abi = "elfv2"; }; }
|
{
|
||||||
{ abi = { name = "musl"; }; cpu = { family = "power"; bits = 64; }; }
|
abi = {
|
||||||
|
abi = "elfv2";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
abi = {
|
||||||
|
name = "musl";
|
||||||
|
};
|
||||||
|
cpu = {
|
||||||
|
family = "power";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
}
|
||||||
];
|
];
|
||||||
isx86 = { cpu = { family = "x86"; }; };
|
isx86 = {
|
||||||
isAarch32 = { cpu = { family = "arm"; bits = 32; }; };
|
cpu = {
|
||||||
isArmv7 = map ({ arch, ... }: { cpu = { inherit arch; }; })
|
family = "x86";
|
||||||
(filter (cpu: hasPrefix "armv7" cpu.arch or "")
|
};
|
||||||
(attrValues cpuTypes));
|
};
|
||||||
isAarch64 = { cpu = { family = "arm"; bits = 64; }; };
|
isAarch32 = {
|
||||||
isAarch = { cpu = { family = "arm"; }; };
|
cpu = {
|
||||||
isMicroBlaze = { cpu = { family = "microblaze"; }; };
|
family = "arm";
|
||||||
isMips = { cpu = { family = "mips"; }; };
|
bits = 32;
|
||||||
isMips32 = { cpu = { family = "mips"; bits = 32; }; };
|
};
|
||||||
isMips64 = { cpu = { family = "mips"; bits = 64; }; };
|
};
|
||||||
isMips64n32 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "n32"; }; };
|
isArmv7 = map (
|
||||||
isMips64n64 = { cpu = { family = "mips"; bits = 64; }; abi = { abi = "64"; }; };
|
{ arch, ... }:
|
||||||
isMmix = { cpu = { family = "mmix"; }; };
|
{
|
||||||
isRiscV = { cpu = { family = "riscv"; }; };
|
cpu = {
|
||||||
isRiscV32 = { cpu = { family = "riscv"; bits = 32; }; };
|
inherit arch;
|
||||||
isRiscV64 = { cpu = { family = "riscv"; bits = 64; }; };
|
};
|
||||||
isRx = { cpu = { family = "rx"; }; };
|
}
|
||||||
isSparc = { cpu = { family = "sparc"; }; };
|
) (filter (cpu: hasPrefix "armv7" cpu.arch or "") (attrValues cpuTypes));
|
||||||
isSparc64 = { cpu = { family = "sparc"; bits = 64; }; };
|
isAarch64 = {
|
||||||
isWasm = { cpu = { family = "wasm"; }; };
|
cpu = {
|
||||||
isMsp430 = { cpu = { family = "msp430"; }; };
|
family = "arm";
|
||||||
isVc4 = { cpu = { family = "vc4"; }; };
|
bits = 64;
|
||||||
isAvr = { cpu = { family = "avr"; }; };
|
};
|
||||||
isAlpha = { cpu = { family = "alpha"; }; };
|
};
|
||||||
isOr1k = { cpu = { family = "or1k"; }; };
|
isAarch = {
|
||||||
isM68k = { cpu = { family = "m68k"; }; };
|
cpu = {
|
||||||
isS390 = { cpu = { family = "s390"; }; };
|
family = "arm";
|
||||||
isS390x = { cpu = { family = "s390"; bits = 64; }; };
|
};
|
||||||
isLoongArch64 = { cpu = { family = "loongarch"; bits = 64; }; };
|
};
|
||||||
isJavaScript = { cpu = cpuTypes.javascript; };
|
isMicroBlaze = {
|
||||||
|
cpu = {
|
||||||
|
family = "microblaze";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMips = {
|
||||||
|
cpu = {
|
||||||
|
family = "mips";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMips32 = {
|
||||||
|
cpu = {
|
||||||
|
family = "mips";
|
||||||
|
bits = 32;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMips64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "mips";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMips64n32 = {
|
||||||
|
cpu = {
|
||||||
|
family = "mips";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
abi = {
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMips64n64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "mips";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
abi = {
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMmix = {
|
||||||
|
cpu = {
|
||||||
|
family = "mmix";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isRiscV = {
|
||||||
|
cpu = {
|
||||||
|
family = "riscv";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isRiscV32 = {
|
||||||
|
cpu = {
|
||||||
|
family = "riscv";
|
||||||
|
bits = 32;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isRiscV64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "riscv";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isRx = {
|
||||||
|
cpu = {
|
||||||
|
family = "rx";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isSparc = {
|
||||||
|
cpu = {
|
||||||
|
family = "sparc";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isSparc64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "sparc";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isWasm = {
|
||||||
|
cpu = {
|
||||||
|
family = "wasm";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isMsp430 = {
|
||||||
|
cpu = {
|
||||||
|
family = "msp430";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isVc4 = {
|
||||||
|
cpu = {
|
||||||
|
family = "vc4";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isAvr = {
|
||||||
|
cpu = {
|
||||||
|
family = "avr";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isAlpha = {
|
||||||
|
cpu = {
|
||||||
|
family = "alpha";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isOr1k = {
|
||||||
|
cpu = {
|
||||||
|
family = "or1k";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isM68k = {
|
||||||
|
cpu = {
|
||||||
|
family = "m68k";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isS390 = {
|
||||||
|
cpu = {
|
||||||
|
family = "s390";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isS390x = {
|
||||||
|
cpu = {
|
||||||
|
family = "s390";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isLoongArch64 = {
|
||||||
|
cpu = {
|
||||||
|
family = "loongarch";
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isJavaScript = {
|
||||||
|
cpu = cpuTypes.javascript;
|
||||||
|
};
|
||||||
|
|
||||||
is32bit = { cpu = { bits = 32; }; };
|
is32bit = {
|
||||||
is64bit = { cpu = { bits = 64; }; };
|
cpu = {
|
||||||
isILP32 = [ { cpu = { family = "wasm"; bits = 32; }; } ] ++
|
bits = 32;
|
||||||
map (a: { abi = { abi = a; }; }) [ "n32" "ilp32" "x32" ];
|
};
|
||||||
isBigEndian = { cpu = { significantByte = significantBytes.bigEndian; }; };
|
};
|
||||||
isLittleEndian = { cpu = { significantByte = significantBytes.littleEndian; }; };
|
is64bit = {
|
||||||
|
cpu = {
|
||||||
|
bits = 64;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isILP32 =
|
||||||
|
[
|
||||||
|
{
|
||||||
|
cpu = {
|
||||||
|
family = "wasm";
|
||||||
|
bits = 32;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
]
|
||||||
|
++ map
|
||||||
|
(a: {
|
||||||
|
abi = {
|
||||||
|
abi = a;
|
||||||
|
};
|
||||||
|
})
|
||||||
|
[
|
||||||
|
"n32"
|
||||||
|
"ilp32"
|
||||||
|
"x32"
|
||||||
|
];
|
||||||
|
isBigEndian = {
|
||||||
|
cpu = {
|
||||||
|
significantByte = significantBytes.bigEndian;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isLittleEndian = {
|
||||||
|
cpu = {
|
||||||
|
significantByte = significantBytes.littleEndian;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
isBSD = { kernel = { families = { inherit (kernelFamilies) bsd; }; }; };
|
isBSD = {
|
||||||
isDarwin = { kernel = { families = { inherit (kernelFamilies) darwin; }; }; };
|
kernel = {
|
||||||
isUnix = [ isBSD isDarwin isLinux isSunOS isCygwin isRedox ];
|
families = {
|
||||||
|
inherit (kernelFamilies) bsd;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isDarwin = {
|
||||||
|
kernel = {
|
||||||
|
families = {
|
||||||
|
inherit (kernelFamilies) darwin;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isUnix = [
|
||||||
|
isBSD
|
||||||
|
isDarwin
|
||||||
|
isLinux
|
||||||
|
isSunOS
|
||||||
|
isCygwin
|
||||||
|
isRedox
|
||||||
|
];
|
||||||
|
|
||||||
isMacOS = { kernel = kernels.macos; };
|
isMacOS = {
|
||||||
isiOS = { kernel = kernels.ios; };
|
kernel = kernels.macos;
|
||||||
isLinux = { kernel = kernels.linux; };
|
};
|
||||||
isSunOS = { kernel = kernels.solaris; };
|
isiOS = {
|
||||||
isFreeBSD = { kernel = { name = "freebsd"; }; };
|
kernel = kernels.ios;
|
||||||
isNetBSD = { kernel = kernels.netbsd; };
|
};
|
||||||
isOpenBSD = { kernel = kernels.openbsd; };
|
isLinux = {
|
||||||
isWindows = { kernel = kernels.windows; };
|
kernel = kernels.linux;
|
||||||
isCygwin = { kernel = kernels.windows; abi = abis.cygnus; };
|
};
|
||||||
isMinGW = { kernel = kernels.windows; abi = abis.gnu; };
|
isSunOS = {
|
||||||
isWasi = { kernel = kernels.wasi; };
|
kernel = kernels.solaris;
|
||||||
isRedox = { kernel = kernels.redox; };
|
};
|
||||||
isGhcjs = { kernel = kernels.ghcjs; };
|
isFreeBSD = {
|
||||||
isGenode = { kernel = kernels.genode; };
|
kernel = {
|
||||||
isNone = { kernel = kernels.none; };
|
name = "freebsd";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
isNetBSD = {
|
||||||
|
kernel = kernels.netbsd;
|
||||||
|
};
|
||||||
|
isOpenBSD = {
|
||||||
|
kernel = kernels.openbsd;
|
||||||
|
};
|
||||||
|
isWindows = {
|
||||||
|
kernel = kernels.windows;
|
||||||
|
};
|
||||||
|
isCygwin = {
|
||||||
|
kernel = kernels.windows;
|
||||||
|
abi = abis.cygnus;
|
||||||
|
};
|
||||||
|
isMinGW = {
|
||||||
|
kernel = kernels.windows;
|
||||||
|
abi = abis.gnu;
|
||||||
|
};
|
||||||
|
isWasi = {
|
||||||
|
kernel = kernels.wasi;
|
||||||
|
};
|
||||||
|
isRedox = {
|
||||||
|
kernel = kernels.redox;
|
||||||
|
};
|
||||||
|
isGhcjs = {
|
||||||
|
kernel = kernels.ghcjs;
|
||||||
|
};
|
||||||
|
isGenode = {
|
||||||
|
kernel = kernels.genode;
|
||||||
|
};
|
||||||
|
isNone = {
|
||||||
|
kernel = kernels.none;
|
||||||
|
};
|
||||||
|
|
||||||
isAndroid = [ { abi = abis.android; } { abi = abis.androideabi; } ];
|
isAndroid = [
|
||||||
isGnu = with abis; map (a: { abi = a; }) [ gnuabi64 gnuabin32 gnu gnueabi gnueabihf gnuabielfv1 gnuabielfv2 ];
|
{ abi = abis.android; }
|
||||||
isMusl = with abis; map (a: { abi = a; }) [ musl musleabi musleabihf muslabin32 muslabi64 ];
|
{ abi = abis.androideabi; }
|
||||||
isUClibc = with abis; map (a: { abi = a; }) [ uclibc uclibceabi uclibceabihf ];
|
];
|
||||||
|
isGnu =
|
||||||
|
with abis;
|
||||||
|
map (a: { abi = a; }) [
|
||||||
|
gnuabi64
|
||||||
|
gnuabin32
|
||||||
|
gnu
|
||||||
|
gnueabi
|
||||||
|
gnueabihf
|
||||||
|
gnuabielfv1
|
||||||
|
gnuabielfv2
|
||||||
|
];
|
||||||
|
isMusl =
|
||||||
|
with abis;
|
||||||
|
map (a: { abi = a; }) [
|
||||||
|
musl
|
||||||
|
musleabi
|
||||||
|
musleabihf
|
||||||
|
muslabin32
|
||||||
|
muslabi64
|
||||||
|
];
|
||||||
|
isUClibc =
|
||||||
|
with abis;
|
||||||
|
map (a: { abi = a; }) [
|
||||||
|
uclibc
|
||||||
|
uclibceabi
|
||||||
|
uclibceabihf
|
||||||
|
];
|
||||||
|
|
||||||
isEfi = [
|
isEfi = [
|
||||||
{ cpu = { family = "arm"; version = "6"; }; }
|
{
|
||||||
{ cpu = { family = "arm"; version = "7"; }; }
|
cpu = {
|
||||||
{ cpu = { family = "arm"; version = "8"; }; }
|
family = "arm";
|
||||||
{ cpu = { family = "riscv"; }; }
|
version = "6";
|
||||||
{ cpu = { family = "x86"; }; }
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
cpu = {
|
||||||
|
family = "arm";
|
||||||
|
version = "7";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
cpu = {
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
cpu = {
|
||||||
|
family = "riscv";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
cpu = {
|
||||||
|
family = "x86";
|
||||||
|
};
|
||||||
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
isElf = { kernel.execFormat = execFormats.elf; };
|
isElf = {
|
||||||
isMacho = { kernel.execFormat = execFormats.macho; };
|
kernel.execFormat = execFormats.elf;
|
||||||
|
};
|
||||||
|
isMacho = {
|
||||||
|
kernel.execFormat = execFormats.macho;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
# given two patterns, return a pattern which is their logical AND.
|
# given two patterns, return a pattern which is their logical AND.
|
||||||
# Since a pattern is a list-of-disjuncts, this needs to
|
# Since a pattern is a list-of-disjuncts, this needs to
|
||||||
patternLogicalAnd = pat1_: pat2_:
|
patternLogicalAnd =
|
||||||
|
pat1_: pat2_:
|
||||||
let
|
let
|
||||||
# patterns can be either a list or a (bare) singleton; turn
|
# patterns can be either a list or a (bare) singleton; turn
|
||||||
# them into singletons for uniform handling
|
# them into singletons for uniform handling
|
||||||
pat1 = toList pat1_;
|
pat1 = toList pat1_;
|
||||||
pat2 = toList pat2_;
|
pat2 = toList pat2_;
|
||||||
in
|
in
|
||||||
concatMap (attr1:
|
concatMap (
|
||||||
map (attr2:
|
attr1:
|
||||||
recursiveUpdateUntil
|
map (
|
||||||
(path: subattr1: subattr2:
|
attr2:
|
||||||
if (builtins.intersectAttrs subattr1 subattr2) == {} || subattr1 == subattr2
|
recursiveUpdateUntil (
|
||||||
then true
|
path: subattr1: subattr2:
|
||||||
else throw ''
|
if (builtins.intersectAttrs subattr1 subattr2) == { } || subattr1 == subattr2 then
|
||||||
|
true
|
||||||
|
else
|
||||||
|
throw ''
|
||||||
pattern conflict at path ${toString path}:
|
pattern conflict at path ${toString path}:
|
||||||
${toJSON subattr1}
|
${toJSON subattr1}
|
||||||
${toJSON subattr2}
|
${toJSON subattr2}
|
||||||
'')
|
''
|
||||||
attr1
|
) attr1 attr2
|
||||||
attr2
|
) pat2
|
||||||
)
|
) pat1;
|
||||||
pat2)
|
|
||||||
pat1;
|
|
||||||
|
|
||||||
matchAnyAttrs = patterns:
|
matchAnyAttrs =
|
||||||
if isList patterns then attrs: any (pattern: matchAttrs pattern attrs) patterns
|
patterns:
|
||||||
else matchAttrs patterns;
|
if isList patterns then
|
||||||
|
attrs: any (pattern: matchAttrs pattern attrs) patterns
|
||||||
|
else
|
||||||
|
matchAttrs patterns;
|
||||||
|
|
||||||
predicates = mapAttrs (_: matchAnyAttrs) patterns;
|
predicates = mapAttrs (_: matchAnyAttrs) patterns;
|
||||||
|
|
||||||
|
@ -163,7 +474,9 @@ rec {
|
||||||
# that `lib.meta.availableOn` can distinguish them from the patterns which
|
# that `lib.meta.availableOn` can distinguish them from the patterns which
|
||||||
# apply only to the `parsed` field.
|
# apply only to the `parsed` field.
|
||||||
|
|
||||||
platformPatterns = mapAttrs (_: p: { parsed = {}; } // p) {
|
platformPatterns = mapAttrs (_: p: { parsed = { }; } // p) {
|
||||||
isStatic = { isStatic = true; };
|
isStatic = {
|
||||||
|
isStatic = true;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,19 +55,23 @@ let
|
||||||
types
|
types
|
||||||
;
|
;
|
||||||
|
|
||||||
setTypes = type:
|
setTypes =
|
||||||
mapAttrs (name: value:
|
type:
|
||||||
|
mapAttrs (
|
||||||
|
name: value:
|
||||||
assert type.check value;
|
assert type.check value;
|
||||||
setType type.name ({ inherit name; } // value));
|
setType type.name ({ inherit name; } // value)
|
||||||
|
);
|
||||||
|
|
||||||
# gnu-config will ignore the portion of a triple matching the
|
# gnu-config will ignore the portion of a triple matching the
|
||||||
# regex `e?abi.*$` when determining the validity of a triple. In
|
# regex `e?abi.*$` when determining the validity of a triple. In
|
||||||
# other words, `i386-linuxabichickenlips` is a valid triple.
|
# other words, `i386-linuxabichickenlips` is a valid triple.
|
||||||
removeAbiSuffix = x:
|
removeAbiSuffix =
|
||||||
let found = match "(.*)e?abi.*" x;
|
x:
|
||||||
in if found == null
|
let
|
||||||
then x
|
found = match "(.*)e?abi.*" x;
|
||||||
else elemAt found 0;
|
in
|
||||||
|
if found == null then x else elemAt found 0;
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
|
@ -84,14 +88,20 @@ rec {
|
||||||
types.significantByte = enum (attrValues significantBytes);
|
types.significantByte = enum (attrValues significantBytes);
|
||||||
|
|
||||||
significantBytes = setTypes types.openSignificantByte {
|
significantBytes = setTypes types.openSignificantByte {
|
||||||
bigEndian = {};
|
bigEndian = { };
|
||||||
littleEndian = {};
|
littleEndian = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
# Reasonable power of 2
|
# Reasonable power of 2
|
||||||
types.bitWidth = enum [ 8 16 32 64 128 ];
|
types.bitWidth = enum [
|
||||||
|
8
|
||||||
|
16
|
||||||
|
32
|
||||||
|
64
|
||||||
|
128
|
||||||
|
];
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
|
@ -99,87 +109,307 @@ rec {
|
||||||
name = "cpu-type";
|
name = "cpu-type";
|
||||||
description = "instruction set architecture name and information";
|
description = "instruction set architecture name and information";
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
check = x: types.bitWidth.check x.bits
|
check =
|
||||||
&& (if 8 < x.bits
|
x:
|
||||||
then types.significantByte.check x.significantByte
|
types.bitWidth.check x.bits
|
||||||
else !(x ? significantByte));
|
&& (if 8 < x.bits then types.significantByte.check x.significantByte else !(x ? significantByte));
|
||||||
};
|
};
|
||||||
|
|
||||||
types.cpuType = enum (attrValues cpuTypes);
|
types.cpuType = enum (attrValues cpuTypes);
|
||||||
|
|
||||||
cpuTypes = let inherit (significantBytes) bigEndian littleEndian; in setTypes types.openCpuType {
|
cpuTypes =
|
||||||
arm = { bits = 32; significantByte = littleEndian; family = "arm"; };
|
let
|
||||||
armv5tel = { bits = 32; significantByte = littleEndian; family = "arm"; version = "5"; arch = "armv5t"; };
|
inherit (significantBytes) bigEndian littleEndian;
|
||||||
armv6m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "6"; arch = "armv6-m"; };
|
in
|
||||||
armv6l = { bits = 32; significantByte = littleEndian; family = "arm"; version = "6"; arch = "armv6"; };
|
setTypes types.openCpuType {
|
||||||
armv7a = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; arch = "armv7-a"; };
|
arm = {
|
||||||
armv7r = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; arch = "armv7-r"; };
|
bits = 32;
|
||||||
armv7m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; arch = "armv7-m"; };
|
significantByte = littleEndian;
|
||||||
armv7l = { bits = 32; significantByte = littleEndian; family = "arm"; version = "7"; arch = "armv7"; };
|
family = "arm";
|
||||||
armv8a = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; arch = "armv8-a"; };
|
};
|
||||||
armv8r = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; arch = "armv8-a"; };
|
armv5tel = {
|
||||||
armv8m = { bits = 32; significantByte = littleEndian; family = "arm"; version = "8"; arch = "armv8-m"; };
|
bits = 32;
|
||||||
aarch64 = { bits = 64; significantByte = littleEndian; family = "arm"; version = "8"; arch = "armv8-a"; };
|
significantByte = littleEndian;
|
||||||
aarch64_be = { bits = 64; significantByte = bigEndian; family = "arm"; version = "8"; arch = "armv8-a"; };
|
family = "arm";
|
||||||
|
version = "5";
|
||||||
|
arch = "armv5t";
|
||||||
|
};
|
||||||
|
armv6m = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "6";
|
||||||
|
arch = "armv6-m";
|
||||||
|
};
|
||||||
|
armv6l = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "6";
|
||||||
|
arch = "armv6";
|
||||||
|
};
|
||||||
|
armv7a = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "7";
|
||||||
|
arch = "armv7-a";
|
||||||
|
};
|
||||||
|
armv7r = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "7";
|
||||||
|
arch = "armv7-r";
|
||||||
|
};
|
||||||
|
armv7m = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "7";
|
||||||
|
arch = "armv7-m";
|
||||||
|
};
|
||||||
|
armv7l = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "7";
|
||||||
|
arch = "armv7";
|
||||||
|
};
|
||||||
|
armv8a = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
arch = "armv8-a";
|
||||||
|
};
|
||||||
|
armv8r = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
arch = "armv8-a";
|
||||||
|
};
|
||||||
|
armv8m = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
arch = "armv8-m";
|
||||||
|
};
|
||||||
|
aarch64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
arch = "armv8-a";
|
||||||
|
};
|
||||||
|
aarch64_be = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "arm";
|
||||||
|
version = "8";
|
||||||
|
arch = "armv8-a";
|
||||||
|
};
|
||||||
|
|
||||||
i386 = { bits = 32; significantByte = littleEndian; family = "x86"; arch = "i386"; };
|
i386 = {
|
||||||
i486 = { bits = 32; significantByte = littleEndian; family = "x86"; arch = "i486"; };
|
bits = 32;
|
||||||
i586 = { bits = 32; significantByte = littleEndian; family = "x86"; arch = "i586"; };
|
significantByte = littleEndian;
|
||||||
i686 = { bits = 32; significantByte = littleEndian; family = "x86"; arch = "i686"; };
|
family = "x86";
|
||||||
x86_64 = { bits = 64; significantByte = littleEndian; family = "x86"; arch = "x86-64"; };
|
arch = "i386";
|
||||||
|
};
|
||||||
|
i486 = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "x86";
|
||||||
|
arch = "i486";
|
||||||
|
};
|
||||||
|
i586 = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "x86";
|
||||||
|
arch = "i586";
|
||||||
|
};
|
||||||
|
i686 = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "x86";
|
||||||
|
arch = "i686";
|
||||||
|
};
|
||||||
|
x86_64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "x86";
|
||||||
|
arch = "x86-64";
|
||||||
|
};
|
||||||
|
|
||||||
microblaze = { bits = 32; significantByte = bigEndian; family = "microblaze"; };
|
microblaze = {
|
||||||
microblazeel = { bits = 32; significantByte = littleEndian; family = "microblaze"; };
|
bits = 32;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "microblaze";
|
||||||
|
};
|
||||||
|
microblazeel = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "microblaze";
|
||||||
|
};
|
||||||
|
|
||||||
mips = { bits = 32; significantByte = bigEndian; family = "mips"; };
|
mips = {
|
||||||
mipsel = { bits = 32; significantByte = littleEndian; family = "mips"; };
|
bits = 32;
|
||||||
mips64 = { bits = 64; significantByte = bigEndian; family = "mips"; };
|
significantByte = bigEndian;
|
||||||
mips64el = { bits = 64; significantByte = littleEndian; family = "mips"; };
|
family = "mips";
|
||||||
|
};
|
||||||
|
mipsel = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "mips";
|
||||||
|
};
|
||||||
|
mips64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "mips";
|
||||||
|
};
|
||||||
|
mips64el = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "mips";
|
||||||
|
};
|
||||||
|
|
||||||
mmix = { bits = 64; significantByte = bigEndian; family = "mmix"; };
|
mmix = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "mmix";
|
||||||
|
};
|
||||||
|
|
||||||
m68k = { bits = 32; significantByte = bigEndian; family = "m68k"; };
|
m68k = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "m68k";
|
||||||
|
};
|
||||||
|
|
||||||
powerpc = { bits = 32; significantByte = bigEndian; family = "power"; };
|
powerpc = {
|
||||||
powerpc64 = { bits = 64; significantByte = bigEndian; family = "power"; };
|
bits = 32;
|
||||||
powerpc64le = { bits = 64; significantByte = littleEndian; family = "power"; };
|
significantByte = bigEndian;
|
||||||
powerpcle = { bits = 32; significantByte = littleEndian; family = "power"; };
|
family = "power";
|
||||||
|
};
|
||||||
|
powerpc64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "power";
|
||||||
|
};
|
||||||
|
powerpc64le = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "power";
|
||||||
|
};
|
||||||
|
powerpcle = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "power";
|
||||||
|
};
|
||||||
|
|
||||||
riscv32 = { bits = 32; significantByte = littleEndian; family = "riscv"; };
|
riscv32 = {
|
||||||
riscv64 = { bits = 64; significantByte = littleEndian; family = "riscv"; };
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "riscv";
|
||||||
|
};
|
||||||
|
riscv64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "riscv";
|
||||||
|
};
|
||||||
|
|
||||||
s390 = { bits = 32; significantByte = bigEndian; family = "s390"; };
|
s390 = {
|
||||||
s390x = { bits = 64; significantByte = bigEndian; family = "s390"; };
|
bits = 32;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "s390";
|
||||||
|
};
|
||||||
|
s390x = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "s390";
|
||||||
|
};
|
||||||
|
|
||||||
sparc = { bits = 32; significantByte = bigEndian; family = "sparc"; };
|
sparc = {
|
||||||
sparc64 = { bits = 64; significantByte = bigEndian; family = "sparc"; };
|
bits = 32;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "sparc";
|
||||||
|
};
|
||||||
|
sparc64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "sparc";
|
||||||
|
};
|
||||||
|
|
||||||
wasm32 = { bits = 32; significantByte = littleEndian; family = "wasm"; };
|
wasm32 = {
|
||||||
wasm64 = { bits = 64; significantByte = littleEndian; family = "wasm"; };
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "wasm";
|
||||||
|
};
|
||||||
|
wasm64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "wasm";
|
||||||
|
};
|
||||||
|
|
||||||
alpha = { bits = 64; significantByte = littleEndian; family = "alpha"; };
|
alpha = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "alpha";
|
||||||
|
};
|
||||||
|
|
||||||
rx = { bits = 32; significantByte = littleEndian; family = "rx"; };
|
rx = {
|
||||||
msp430 = { bits = 16; significantByte = littleEndian; family = "msp430"; };
|
bits = 32;
|
||||||
avr = { bits = 8; family = "avr"; };
|
significantByte = littleEndian;
|
||||||
|
family = "rx";
|
||||||
|
};
|
||||||
|
msp430 = {
|
||||||
|
bits = 16;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "msp430";
|
||||||
|
};
|
||||||
|
avr = {
|
||||||
|
bits = 8;
|
||||||
|
family = "avr";
|
||||||
|
};
|
||||||
|
|
||||||
vc4 = { bits = 32; significantByte = littleEndian; family = "vc4"; };
|
vc4 = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "vc4";
|
||||||
|
};
|
||||||
|
|
||||||
or1k = { bits = 32; significantByte = bigEndian; family = "or1k"; };
|
or1k = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = bigEndian;
|
||||||
|
family = "or1k";
|
||||||
|
};
|
||||||
|
|
||||||
loongarch64 = { bits = 64; significantByte = littleEndian; family = "loongarch"; };
|
loongarch64 = {
|
||||||
|
bits = 64;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "loongarch";
|
||||||
|
};
|
||||||
|
|
||||||
javascript = { bits = 32; significantByte = littleEndian; family = "javascript"; };
|
javascript = {
|
||||||
|
bits = 32;
|
||||||
|
significantByte = littleEndian;
|
||||||
|
family = "javascript";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
# GNU build systems assume that older NetBSD architectures are using a.out.
|
# GNU build systems assume that older NetBSD architectures are using a.out.
|
||||||
gnuNetBSDDefaultExecFormat = cpu:
|
gnuNetBSDDefaultExecFormat =
|
||||||
if (cpu.family == "arm" && cpu.bits == 32) ||
|
cpu:
|
||||||
(cpu.family == "sparc" && cpu.bits == 32) ||
|
if
|
||||||
(cpu.family == "m68k" && cpu.bits == 32) ||
|
(cpu.family == "arm" && cpu.bits == 32)
|
||||||
(cpu.family == "x86" && cpu.bits == 32)
|
|| (cpu.family == "sparc" && cpu.bits == 32)
|
||||||
then execFormats.aout
|
|| (cpu.family == "m68k" && cpu.bits == 32)
|
||||||
else execFormats.elf;
|
|| (cpu.family == "x86" && cpu.bits == 32)
|
||||||
|
then
|
||||||
|
execFormats.aout
|
||||||
|
else
|
||||||
|
execFormats.elf;
|
||||||
|
|
||||||
# Determine when two CPUs are compatible with each other. That is,
|
# Determine when two CPUs are compatible with each other. That is,
|
||||||
# can code built for system B run on system A? For that to happen,
|
# can code built for system B run on system A? For that to happen,
|
||||||
|
@ -197,7 +427,10 @@ rec {
|
||||||
# Note: Since 22.11 the archs of a mode switching CPU are no longer considered
|
# Note: Since 22.11 the archs of a mode switching CPU are no longer considered
|
||||||
# pairwise compatible. Mode switching implies that binaries built for A
|
# pairwise compatible. Mode switching implies that binaries built for A
|
||||||
# and B respectively can't be executed at the same time.
|
# and B respectively can't be executed at the same time.
|
||||||
isCompatible = with cpuTypes; a: b: any id [
|
isCompatible =
|
||||||
|
with cpuTypes;
|
||||||
|
a: b:
|
||||||
|
any id [
|
||||||
# x86
|
# x86
|
||||||
(b == i386 && isCompatible a i486)
|
(b == i386 && isCompatible a i486)
|
||||||
(b == i486 && isCompatible a i586)
|
(b == i486 && isCompatible a i586)
|
||||||
|
@ -259,16 +492,16 @@ rec {
|
||||||
types.vendor = enum (attrValues vendors);
|
types.vendor = enum (attrValues vendors);
|
||||||
|
|
||||||
vendors = setTypes types.openVendor {
|
vendors = setTypes types.openVendor {
|
||||||
apple = {};
|
apple = { };
|
||||||
pc = {};
|
pc = { };
|
||||||
knuth = {};
|
knuth = { };
|
||||||
|
|
||||||
# Actually matters, unlocking some MinGW-w64-specific options in GCC. See
|
# Actually matters, unlocking some MinGW-w64-specific options in GCC. See
|
||||||
# bottom of https://sourceforge.net/p/mingw-w64/wiki2/Unicode%20apps/
|
# bottom of https://sourceforge.net/p/mingw-w64/wiki2/Unicode%20apps/
|
||||||
w64 = {};
|
w64 = { };
|
||||||
|
|
||||||
none = {};
|
none = { };
|
||||||
unknown = {};
|
unknown = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -282,13 +515,13 @@ rec {
|
||||||
types.execFormat = enum (attrValues execFormats);
|
types.execFormat = enum (attrValues execFormats);
|
||||||
|
|
||||||
execFormats = setTypes types.openExecFormat {
|
execFormats = setTypes types.openExecFormat {
|
||||||
aout = {}; # a.out
|
aout = { }; # a.out
|
||||||
elf = {};
|
elf = { };
|
||||||
macho = {};
|
macho = { };
|
||||||
pe = {};
|
pe = { };
|
||||||
wasm = {};
|
wasm = { };
|
||||||
|
|
||||||
unknown = {};
|
unknown = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -302,8 +535,8 @@ rec {
|
||||||
types.kernelFamily = enum (attrValues kernelFamilies);
|
types.kernelFamily = enum (attrValues kernelFamilies);
|
||||||
|
|
||||||
kernelFamilies = setTypes types.openKernelFamily {
|
kernelFamilies = setTypes types.openKernelFamily {
|
||||||
bsd = {};
|
bsd = { };
|
||||||
darwin = {};
|
darwin = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -312,37 +545,109 @@ rec {
|
||||||
name = "kernel";
|
name = "kernel";
|
||||||
description = "kernel name and information";
|
description = "kernel name and information";
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
check = x: types.execFormat.check x.execFormat
|
check =
|
||||||
&& all types.kernelFamily.check (attrValues x.families);
|
x: types.execFormat.check x.execFormat && all types.kernelFamily.check (attrValues x.families);
|
||||||
};
|
};
|
||||||
|
|
||||||
types.kernel = enum (attrValues kernels);
|
types.kernel = enum (attrValues kernels);
|
||||||
|
|
||||||
kernels = let
|
kernels =
|
||||||
inherit (execFormats) elf pe wasm unknown macho;
|
let
|
||||||
|
inherit (execFormats)
|
||||||
|
elf
|
||||||
|
pe
|
||||||
|
wasm
|
||||||
|
unknown
|
||||||
|
macho
|
||||||
|
;
|
||||||
inherit (kernelFamilies) bsd darwin;
|
inherit (kernelFamilies) bsd darwin;
|
||||||
in setTypes types.openKernel {
|
in
|
||||||
|
setTypes types.openKernel {
|
||||||
# TODO(@Ericson2314): Don't want to mass-rebuild yet to keeping 'darwin' as
|
# TODO(@Ericson2314): Don't want to mass-rebuild yet to keeping 'darwin' as
|
||||||
# the normalized name for macOS.
|
# the normalized name for macOS.
|
||||||
macos = { execFormat = macho; families = { inherit darwin; }; name = "darwin"; };
|
macos = {
|
||||||
ios = { execFormat = macho; families = { inherit darwin; }; };
|
execFormat = macho;
|
||||||
|
families = {
|
||||||
|
inherit darwin;
|
||||||
|
};
|
||||||
|
name = "darwin";
|
||||||
|
};
|
||||||
|
ios = {
|
||||||
|
execFormat = macho;
|
||||||
|
families = {
|
||||||
|
inherit darwin;
|
||||||
|
};
|
||||||
|
};
|
||||||
# A tricky thing about FreeBSD is that there is no stable ABI across
|
# A tricky thing about FreeBSD is that there is no stable ABI across
|
||||||
# versions. That means that putting in the version as part of the
|
# versions. That means that putting in the version as part of the
|
||||||
# config string is paramount.
|
# config string is paramount.
|
||||||
freebsd12 = { execFormat = elf; families = { inherit bsd; }; name = "freebsd"; version = 12; };
|
freebsd12 = {
|
||||||
freebsd13 = { execFormat = elf; families = { inherit bsd; }; name = "freebsd"; version = 13; };
|
execFormat = elf;
|
||||||
linux = { execFormat = elf; families = { }; };
|
families = {
|
||||||
netbsd = { execFormat = elf; families = { inherit bsd; }; };
|
inherit bsd;
|
||||||
none = { execFormat = unknown; families = { }; };
|
};
|
||||||
openbsd = { execFormat = elf; families = { inherit bsd; }; };
|
name = "freebsd";
|
||||||
solaris = { execFormat = elf; families = { }; };
|
version = 12;
|
||||||
wasi = { execFormat = wasm; families = { }; };
|
};
|
||||||
redox = { execFormat = elf; families = { }; };
|
freebsd13 = {
|
||||||
windows = { execFormat = pe; families = { }; };
|
execFormat = elf;
|
||||||
ghcjs = { execFormat = unknown; families = { }; };
|
families = {
|
||||||
genode = { execFormat = elf; families = { }; };
|
inherit bsd;
|
||||||
mmixware = { execFormat = unknown; families = { }; };
|
};
|
||||||
} // { # aliases
|
name = "freebsd";
|
||||||
|
version = 13;
|
||||||
|
};
|
||||||
|
linux = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
netbsd = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = {
|
||||||
|
inherit bsd;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
none = {
|
||||||
|
execFormat = unknown;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
openbsd = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = {
|
||||||
|
inherit bsd;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
solaris = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
wasi = {
|
||||||
|
execFormat = wasm;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
redox = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
windows = {
|
||||||
|
execFormat = pe;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
ghcjs = {
|
||||||
|
execFormat = unknown;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
genode = {
|
||||||
|
execFormat = elf;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
mmixware = {
|
||||||
|
execFormat = unknown;
|
||||||
|
families = { };
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// {
|
||||||
|
# aliases
|
||||||
# 'darwin' is the kernel for all of them. We choose macOS by default.
|
# 'darwin' is the kernel for all of them. We choose macOS by default.
|
||||||
darwin = kernels.macos;
|
darwin = kernels.macos;
|
||||||
watchos = kernels.ios;
|
watchos = kernels.ios;
|
||||||
|
@ -361,22 +666,27 @@ rec {
|
||||||
types.abi = enum (attrValues abis);
|
types.abi = enum (attrValues abis);
|
||||||
|
|
||||||
abis = setTypes types.openAbi {
|
abis = setTypes types.openAbi {
|
||||||
cygnus = {};
|
cygnus = { };
|
||||||
msvc = {};
|
msvc = { };
|
||||||
|
|
||||||
# Note: eabi is specific to ARM and PowerPC.
|
# Note: eabi is specific to ARM and PowerPC.
|
||||||
# On PowerPC, this corresponds to PPCEABI.
|
# On PowerPC, this corresponds to PPCEABI.
|
||||||
# On ARM, this corresponds to ARMEABI.
|
# On ARM, this corresponds to ARMEABI.
|
||||||
eabi = { float = "soft"; };
|
eabi = {
|
||||||
eabihf = { float = "hard"; };
|
float = "soft";
|
||||||
|
};
|
||||||
|
eabihf = {
|
||||||
|
float = "hard";
|
||||||
|
};
|
||||||
|
|
||||||
# Other architectures should use ELF in embedded situations.
|
# Other architectures should use ELF in embedded situations.
|
||||||
elf = {};
|
elf = { };
|
||||||
|
|
||||||
androideabi = {};
|
androideabi = { };
|
||||||
android = {
|
android = {
|
||||||
assertions = [
|
assertions = [
|
||||||
{ assertion = platform: !platform.isAarch32;
|
{
|
||||||
|
assertion = platform: !platform.isAarch32;
|
||||||
message = ''
|
message = ''
|
||||||
The "android" ABI is not for 32-bit ARM. Use "androideabi" instead.
|
The "android" ABI is not for 32-bit ARM. Use "androideabi" instead.
|
||||||
'';
|
'';
|
||||||
|
@ -384,43 +694,69 @@ rec {
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
gnueabi = { float = "soft"; };
|
gnueabi = {
|
||||||
gnueabihf = { float = "hard"; };
|
float = "soft";
|
||||||
|
};
|
||||||
|
gnueabihf = {
|
||||||
|
float = "hard";
|
||||||
|
};
|
||||||
gnu = {
|
gnu = {
|
||||||
assertions = [
|
assertions = [
|
||||||
{ assertion = platform: !platform.isAarch32;
|
{
|
||||||
|
assertion = platform: !platform.isAarch32;
|
||||||
message = ''
|
message = ''
|
||||||
The "gnu" ABI is ambiguous on 32-bit ARM. Use "gnueabi" or "gnueabihf" instead.
|
The "gnu" ABI is ambiguous on 32-bit ARM. Use "gnueabi" or "gnueabihf" instead.
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
{ assertion = platform: !(platform.isPower64 && platform.isBigEndian);
|
{
|
||||||
|
assertion = platform: !(platform.isPower64 && platform.isBigEndian);
|
||||||
message = ''
|
message = ''
|
||||||
The "gnu" ABI is ambiguous on big-endian 64-bit PowerPC. Use "gnuabielfv2" or "gnuabielfv1" instead.
|
The "gnu" ABI is ambiguous on big-endian 64-bit PowerPC. Use "gnuabielfv2" or "gnuabielfv1" instead.
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
gnuabi64 = { abi = "64"; };
|
gnuabi64 = {
|
||||||
muslabi64 = { abi = "64"; };
|
abi = "64";
|
||||||
|
};
|
||||||
|
muslabi64 = {
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
|
||||||
# NOTE: abi=n32 requires a 64-bit MIPS chip! That is not a typo.
|
# NOTE: abi=n32 requires a 64-bit MIPS chip! That is not a typo.
|
||||||
# It is basically the 64-bit abi with 32-bit pointers. Details:
|
# It is basically the 64-bit abi with 32-bit pointers. Details:
|
||||||
# https://www.linux-mips.org/pub/linux/mips/doc/ABI/MIPS-N32-ABI-Handbook.pdf
|
# https://www.linux-mips.org/pub/linux/mips/doc/ABI/MIPS-N32-ABI-Handbook.pdf
|
||||||
gnuabin32 = { abi = "n32"; };
|
gnuabin32 = {
|
||||||
muslabin32 = { abi = "n32"; };
|
abi = "n32";
|
||||||
|
};
|
||||||
|
muslabin32 = {
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
|
||||||
gnuabielfv2 = { abi = "elfv2"; };
|
gnuabielfv2 = {
|
||||||
gnuabielfv1 = { abi = "elfv1"; };
|
abi = "elfv2";
|
||||||
|
};
|
||||||
|
gnuabielfv1 = {
|
||||||
|
abi = "elfv1";
|
||||||
|
};
|
||||||
|
|
||||||
musleabi = { float = "soft"; };
|
musleabi = {
|
||||||
musleabihf = { float = "hard"; };
|
float = "soft";
|
||||||
musl = {};
|
};
|
||||||
|
musleabihf = {
|
||||||
|
float = "hard";
|
||||||
|
};
|
||||||
|
musl = { };
|
||||||
|
|
||||||
uclibceabi = { float = "soft"; };
|
uclibceabi = {
|
||||||
uclibceabihf = { float = "hard"; };
|
float = "soft";
|
||||||
uclibc = {};
|
};
|
||||||
|
uclibceabihf = {
|
||||||
|
float = "hard";
|
||||||
|
};
|
||||||
|
uclibc = { };
|
||||||
|
|
||||||
unknown = {};
|
unknown = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -429,7 +765,13 @@ rec {
|
||||||
name = "system";
|
name = "system";
|
||||||
description = "fully parsed representation of llvm- or nix-style platform tuple";
|
description = "fully parsed representation of llvm- or nix-style platform tuple";
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
check = { cpu, vendor, kernel, abi }:
|
check =
|
||||||
|
{
|
||||||
|
cpu,
|
||||||
|
vendor,
|
||||||
|
kernel,
|
||||||
|
abi,
|
||||||
|
}:
|
||||||
types.cpuType.check cpu
|
types.cpuType.check cpu
|
||||||
&& types.vendor.check vendor
|
&& types.vendor.check vendor
|
||||||
&& types.kernel.check kernel
|
&& types.kernel.check kernel
|
||||||
|
@ -438,63 +780,120 @@ rec {
|
||||||
|
|
||||||
isSystem = isType "system";
|
isSystem = isType "system";
|
||||||
|
|
||||||
mkSystem = components:
|
mkSystem =
|
||||||
|
components:
|
||||||
assert types.parsedPlatform.check components;
|
assert types.parsedPlatform.check components;
|
||||||
setType "system" components;
|
setType "system" components;
|
||||||
|
|
||||||
mkSkeletonFromList = l: {
|
mkSkeletonFromList =
|
||||||
"1" = if elemAt l 0 == "avr"
|
l:
|
||||||
then { cpu = elemAt l 0; kernel = "none"; abi = "unknown"; }
|
{
|
||||||
else throw "Target specification with 1 components is ambiguous";
|
"1" =
|
||||||
|
if elemAt l 0 == "avr" then
|
||||||
|
{
|
||||||
|
cpu = elemAt l 0;
|
||||||
|
kernel = "none";
|
||||||
|
abi = "unknown";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
throw "Target specification with 1 components is ambiguous";
|
||||||
"2" = # We only do 2-part hacks for things Nix already supports
|
"2" = # We only do 2-part hacks for things Nix already supports
|
||||||
if elemAt l 1 == "cygwin"
|
if elemAt l 1 == "cygwin" then
|
||||||
then { cpu = elemAt l 0; kernel = "windows"; abi = "cygnus"; }
|
{
|
||||||
|
cpu = elemAt l 0;
|
||||||
|
kernel = "windows";
|
||||||
|
abi = "cygnus";
|
||||||
|
}
|
||||||
# MSVC ought to be the default ABI so this case isn't needed. But then it
|
# MSVC ought to be the default ABI so this case isn't needed. But then it
|
||||||
# becomes difficult to handle the gnu* variants for Aarch32 correctly for
|
# becomes difficult to handle the gnu* variants for Aarch32 correctly for
|
||||||
# minGW. So it's easier to make gnu* the default for the MinGW, but
|
# minGW. So it's easier to make gnu* the default for the MinGW, but
|
||||||
# hack-in MSVC for the non-MinGW case right here.
|
# hack-in MSVC for the non-MinGW case right here.
|
||||||
else if elemAt l 1 == "windows"
|
else if elemAt l 1 == "windows" then
|
||||||
then { cpu = elemAt l 0; kernel = "windows"; abi = "msvc"; }
|
{
|
||||||
else if (elemAt l 1) == "elf"
|
cpu = elemAt l 0;
|
||||||
then { cpu = elemAt l 0; vendor = "unknown"; kernel = "none"; abi = elemAt l 1; }
|
kernel = "windows";
|
||||||
else { cpu = elemAt l 0; kernel = elemAt l 1; };
|
abi = "msvc";
|
||||||
|
}
|
||||||
|
else if (elemAt l 1) == "elf" then
|
||||||
|
{
|
||||||
|
cpu = elemAt l 0;
|
||||||
|
vendor = "unknown";
|
||||||
|
kernel = "none";
|
||||||
|
abi = elemAt l 1;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
cpu = elemAt l 0;
|
||||||
|
kernel = elemAt l 1;
|
||||||
|
};
|
||||||
"3" =
|
"3" =
|
||||||
# cpu-kernel-environment
|
# cpu-kernel-environment
|
||||||
if elemAt l 1 == "linux" ||
|
if
|
||||||
elem (elemAt l 2) ["eabi" "eabihf" "elf" "gnu"]
|
elemAt l 1 == "linux"
|
||||||
then {
|
|| elem (elemAt l 2) [
|
||||||
|
"eabi"
|
||||||
|
"eabihf"
|
||||||
|
"elf"
|
||||||
|
"gnu"
|
||||||
|
]
|
||||||
|
then
|
||||||
|
{
|
||||||
cpu = elemAt l 0;
|
cpu = elemAt l 0;
|
||||||
kernel = elemAt l 1;
|
kernel = elemAt l 1;
|
||||||
abi = elemAt l 2;
|
abi = elemAt l 2;
|
||||||
vendor = "unknown";
|
vendor = "unknown";
|
||||||
}
|
}
|
||||||
# cpu-vendor-os
|
# cpu-vendor-os
|
||||||
else if elemAt l 1 == "apple" ||
|
else if
|
||||||
elem (elemAt l 2) [ "wasi" "redox" "mmixware" "ghcjs" "mingw32" ] ||
|
elemAt l 1 == "apple"
|
||||||
hasPrefix "freebsd" (elemAt l 2) ||
|
|| elem (elemAt l 2) [
|
||||||
hasPrefix "netbsd" (elemAt l 2) ||
|
"wasi"
|
||||||
hasPrefix "genode" (elemAt l 2)
|
"redox"
|
||||||
then {
|
"mmixware"
|
||||||
|
"ghcjs"
|
||||||
|
"mingw32"
|
||||||
|
]
|
||||||
|
|| hasPrefix "freebsd" (elemAt l 2)
|
||||||
|
|| hasPrefix "netbsd" (elemAt l 2)
|
||||||
|
|| hasPrefix "genode" (elemAt l 2)
|
||||||
|
then
|
||||||
|
{
|
||||||
cpu = elemAt l 0;
|
cpu = elemAt l 0;
|
||||||
vendor = elemAt l 1;
|
vendor = elemAt l 1;
|
||||||
kernel = if elemAt l 2 == "mingw32"
|
kernel =
|
||||||
then "windows" # autotools breaks on -gnu for window
|
if elemAt l 2 == "mingw32" then
|
||||||
else elemAt l 2;
|
"windows" # autotools breaks on -gnu for window
|
||||||
|
else
|
||||||
|
elemAt l 2;
|
||||||
}
|
}
|
||||||
else throw "Target specification with 3 components is ambiguous";
|
else
|
||||||
"4" = { cpu = elemAt l 0; vendor = elemAt l 1; kernel = elemAt l 2; abi = elemAt l 3; };
|
throw "Target specification with 3 components is ambiguous";
|
||||||
}.${toString (length l)}
|
"4" = {
|
||||||
|
cpu = elemAt l 0;
|
||||||
|
vendor = elemAt l 1;
|
||||||
|
kernel = elemAt l 2;
|
||||||
|
abi = elemAt l 3;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
.${toString (length l)}
|
||||||
or (throw "system string has invalid number of hyphen-separated components");
|
or (throw "system string has invalid number of hyphen-separated components");
|
||||||
|
|
||||||
# This should revert the job done by config.guess from the gcc compiler.
|
# This should revert the job done by config.guess from the gcc compiler.
|
||||||
mkSystemFromSkeleton = { cpu
|
mkSystemFromSkeleton =
|
||||||
, # Optional, but fallback too complex for here.
|
{
|
||||||
|
cpu,
|
||||||
|
# Optional, but fallback too complex for here.
|
||||||
# Inferred below instead.
|
# Inferred below instead.
|
||||||
vendor ? assert false; null
|
vendor ?
|
||||||
, kernel
|
assert false;
|
||||||
, # Also inferred below
|
null,
|
||||||
abi ? assert false; null
|
kernel,
|
||||||
} @ args: let
|
# Also inferred below
|
||||||
|
abi ?
|
||||||
|
assert false;
|
||||||
|
null,
|
||||||
|
}@args:
|
||||||
|
let
|
||||||
getCpu = name: cpuTypes.${name} or (throw "Unknown CPU type: ${name}");
|
getCpu = name: cpuTypes.${name} or (throw "Unknown CPU type: ${name}");
|
||||||
getVendor = name: vendors.${name} or (throw "Unknown vendor: ${name}");
|
getVendor = name: vendors.${name} or (throw "Unknown vendor: ${name}");
|
||||||
getKernel = name: kernels.${name} or (throw "Unknown kernel: ${name}");
|
getKernel = name: kernels.${name} or (throw "Unknown kernel: ${name}");
|
||||||
|
@ -503,45 +902,73 @@ rec {
|
||||||
parsed = {
|
parsed = {
|
||||||
cpu = getCpu args.cpu;
|
cpu = getCpu args.cpu;
|
||||||
vendor =
|
vendor =
|
||||||
/**/ if args ? vendor then getVendor args.vendor
|
if args ? vendor then
|
||||||
else if isDarwin parsed then vendors.apple
|
getVendor args.vendor
|
||||||
else if isWindows parsed then vendors.pc
|
else if isDarwin parsed then
|
||||||
else vendors.unknown;
|
vendors.apple
|
||||||
kernel = if hasPrefix "darwin" args.kernel then getKernel "darwin"
|
else if isWindows parsed then
|
||||||
else if hasPrefix "netbsd" args.kernel then getKernel "netbsd"
|
vendors.pc
|
||||||
else getKernel (removeAbiSuffix args.kernel);
|
else
|
||||||
|
vendors.unknown;
|
||||||
|
kernel =
|
||||||
|
if hasPrefix "darwin" args.kernel then
|
||||||
|
getKernel "darwin"
|
||||||
|
else if hasPrefix "netbsd" args.kernel then
|
||||||
|
getKernel "netbsd"
|
||||||
|
else
|
||||||
|
getKernel (removeAbiSuffix args.kernel);
|
||||||
abi =
|
abi =
|
||||||
/**/ if args ? abi then getAbi args.abi
|
if args ? abi then
|
||||||
|
getAbi args.abi
|
||||||
else if isLinux parsed || isWindows parsed then
|
else if isLinux parsed || isWindows parsed then
|
||||||
if isAarch32 parsed then
|
if isAarch32 parsed then
|
||||||
if versionAtLeast (parsed.cpu.version or "0") "6"
|
if versionAtLeast (parsed.cpu.version or "0") "6" then abis.gnueabihf else abis.gnueabi
|
||||||
then abis.gnueabihf
|
|
||||||
else abis.gnueabi
|
|
||||||
# Default ppc64 BE to ELFv2
|
# Default ppc64 BE to ELFv2
|
||||||
else if isPower64 parsed && isBigEndian parsed then abis.gnuabielfv2
|
else if isPower64 parsed && isBigEndian parsed then
|
||||||
else abis.gnu
|
abis.gnuabielfv2
|
||||||
else abis.unknown;
|
else
|
||||||
|
abis.gnu
|
||||||
|
else
|
||||||
|
abis.unknown;
|
||||||
};
|
};
|
||||||
|
|
||||||
in mkSystem parsed;
|
in
|
||||||
|
mkSystem parsed;
|
||||||
|
|
||||||
mkSystemFromString = s: mkSystemFromSkeleton (mkSkeletonFromList (splitString "-" s));
|
mkSystemFromString = s: mkSystemFromSkeleton (mkSkeletonFromList (splitString "-" s));
|
||||||
|
|
||||||
kernelName = kernel:
|
kernelName = kernel: kernel.name + toString (kernel.version or "");
|
||||||
kernel.name + toString (kernel.version or "");
|
|
||||||
|
|
||||||
doubleFromSystem = { cpu, kernel, abi, ... }:
|
doubleFromSystem =
|
||||||
/**/ if abi == abis.cygnus then "${cpu.name}-cygwin"
|
{
|
||||||
else if kernel.families ? darwin then "${cpu.name}-darwin"
|
cpu,
|
||||||
else "${cpu.name}-${kernelName kernel}";
|
kernel,
|
||||||
|
abi,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
if abi == abis.cygnus then
|
||||||
|
"${cpu.name}-cygwin"
|
||||||
|
else if kernel.families ? darwin then
|
||||||
|
"${cpu.name}-darwin"
|
||||||
|
else
|
||||||
|
"${cpu.name}-${kernelName kernel}";
|
||||||
|
|
||||||
tripleFromSystem = { cpu, vendor, kernel, abi, ... } @ sys: assert isSystem sys; let
|
tripleFromSystem =
|
||||||
optExecFormat =
|
{
|
||||||
optionalString (kernel.name == "netbsd" &&
|
cpu,
|
||||||
gnuNetBSDDefaultExecFormat cpu != kernel.execFormat)
|
vendor,
|
||||||
kernel.execFormat.name;
|
kernel,
|
||||||
|
abi,
|
||||||
|
...
|
||||||
|
}@sys:
|
||||||
|
assert isSystem sys;
|
||||||
|
let
|
||||||
|
optExecFormat = optionalString (
|
||||||
|
kernel.name == "netbsd" && gnuNetBSDDefaultExecFormat cpu != kernel.execFormat
|
||||||
|
) kernel.execFormat.name;
|
||||||
optAbi = optionalString (abi != abis.unknown) "-${abi.name}";
|
optAbi = optionalString (abi != abis.unknown) "-${abi.name}";
|
||||||
in "${cpu.name}-${vendor.name}-${kernelName kernel}${optExecFormat}${optAbi}";
|
in
|
||||||
|
"${cpu.name}-${vendor.name}-${kernelName kernel}${optExecFormat}${optAbi}";
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,7 @@ rec {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
pc_simplekernel = lib.recursiveUpdate pc {
|
pc_simplekernel = lib.recursiveUpdate pc { linux-kernel.autoModules = false; };
|
||||||
linux-kernel.autoModules = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
powernv = {
|
powernv = {
|
||||||
linux-kernel = {
|
linux-kernel = {
|
||||||
|
@ -490,12 +488,42 @@ rec {
|
||||||
};
|
};
|
||||||
|
|
||||||
# can execute on 32bit chip
|
# can execute on 32bit chip
|
||||||
gcc_mips32r2_o32 = { gcc = { arch = "mips32r2"; abi = "32"; }; };
|
gcc_mips32r2_o32 = {
|
||||||
gcc_mips32r6_o32 = { gcc = { arch = "mips32r6"; abi = "32"; }; };
|
gcc = {
|
||||||
gcc_mips64r2_n32 = { gcc = { arch = "mips64r2"; abi = "n32"; }; };
|
arch = "mips32r2";
|
||||||
gcc_mips64r6_n32 = { gcc = { arch = "mips64r6"; abi = "n32"; }; };
|
abi = "32";
|
||||||
gcc_mips64r2_64 = { gcc = { arch = "mips64r2"; abi = "64"; }; };
|
};
|
||||||
gcc_mips64r6_64 = { gcc = { arch = "mips64r6"; abi = "64"; }; };
|
};
|
||||||
|
gcc_mips32r6_o32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips32r6";
|
||||||
|
abi = "32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r2_n32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r2";
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r6_n32 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r6";
|
||||||
|
abi = "n32";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r2_64 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r2";
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
gcc_mips64r6_64 = {
|
||||||
|
gcc = {
|
||||||
|
arch = "mips64r6";
|
||||||
|
abi = "64";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
# based on:
|
# based on:
|
||||||
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
|
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
|
||||||
|
@ -546,27 +574,38 @@ rec {
|
||||||
# This function takes a minimally-valid "platform" and returns an
|
# This function takes a minimally-valid "platform" and returns an
|
||||||
# attrset containing zero or more additional attrs which should be
|
# attrset containing zero or more additional attrs which should be
|
||||||
# included in the platform in order to further elaborate it.
|
# included in the platform in order to further elaborate it.
|
||||||
select = platform:
|
select =
|
||||||
|
platform:
|
||||||
# x86
|
# x86
|
||||||
/**/ if platform.isx86 then pc
|
if platform.isx86 then
|
||||||
|
pc
|
||||||
|
|
||||||
# ARM
|
# ARM
|
||||||
else if platform.isAarch32 then let
|
else if platform.isAarch32 then
|
||||||
|
let
|
||||||
version = platform.parsed.cpu.version or null;
|
version = platform.parsed.cpu.version or null;
|
||||||
in if version == null then pc
|
in
|
||||||
else if lib.versionOlder version "6" then sheevaplug
|
if version == null then
|
||||||
else if lib.versionOlder version "7" then raspberrypi
|
pc
|
||||||
else armv7l-hf-multiplatform
|
else if lib.versionOlder version "6" then
|
||||||
|
sheevaplug
|
||||||
|
else if lib.versionOlder version "7" then
|
||||||
|
raspberrypi
|
||||||
|
else
|
||||||
|
armv7l-hf-multiplatform
|
||||||
|
|
||||||
else if platform.isAarch64 then
|
else if platform.isAarch64 then
|
||||||
if platform.isDarwin then apple-m1
|
if platform.isDarwin then apple-m1 else aarch64-multiplatform
|
||||||
else aarch64-multiplatform
|
|
||||||
|
|
||||||
else if platform.isRiscV then riscv-multiplatform
|
else if platform.isRiscV then
|
||||||
|
riscv-multiplatform
|
||||||
|
|
||||||
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then (import ./examples.nix { inherit lib; }).mipsel-linux-gnu
|
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then
|
||||||
|
(import ./examples.nix { inherit lib; }).mipsel-linux-gnu
|
||||||
|
|
||||||
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then powernv
|
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then
|
||||||
|
powernv
|
||||||
|
|
||||||
else { };
|
else
|
||||||
|
{ };
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
# Throws an error if any of our lib tests fail.
|
# Throws an error if any of our lib tests fail.
|
||||||
|
|
||||||
let tests = [ "misc" "systems" ];
|
let
|
||||||
|
tests = [
|
||||||
|
"misc"
|
||||||
|
"systems"
|
||||||
|
];
|
||||||
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
|
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
|
||||||
in if all == []
|
in
|
||||||
then null
|
if all == [ ] then null else throw (builtins.toJSON all)
|
||||||
else throw (builtins.toJSON all)
|
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
name = lib.mkOption {
|
name = lib.mkOption { type = types.str; };
|
||||||
type = types.str;
|
|
||||||
};
|
|
||||||
email = lib.mkOption {
|
email = lib.mkOption {
|
||||||
type = types.nullOr types.str;
|
type = types.nullOr types.str;
|
||||||
default = null;
|
default = null;
|
||||||
|
@ -23,10 +22,8 @@ in {
|
||||||
default = null;
|
default = null;
|
||||||
};
|
};
|
||||||
keys = lib.mkOption {
|
keys = lib.mkOption {
|
||||||
type = types.listOf (types.submodule {
|
type = types.listOf (types.submodule { options.fingerprint = lib.mkOption { type = types.str; }; });
|
||||||
options.fingerprint = lib.mkOption { type = types.str; };
|
default = [ ];
|
||||||
});
|
|
||||||
default = [];
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
# to run these tests (and the others)
|
# to run these tests (and the others)
|
||||||
# nix-build nixpkgs/lib/tests/release.nix
|
# nix-build nixpkgs/lib/tests/release.nix
|
||||||
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
|
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
|
||||||
{ # The pkgs used for dependencies for the testing itself
|
{
|
||||||
pkgs ? import ../.. {}
|
# The pkgs used for dependencies for the testing itself
|
||||||
, lib ? pkgs.lib
|
pkgs ? import ../.. { },
|
||||||
|
lib ? pkgs.lib,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
checkMaintainer = handle: uncheckedAttrs:
|
checkMaintainer =
|
||||||
|
handle: uncheckedAttrs:
|
||||||
let
|
let
|
||||||
prefix = [ "lib" "maintainers" handle ];
|
prefix = [
|
||||||
checkedAttrs = (lib.modules.evalModules {
|
"lib"
|
||||||
|
"maintainers"
|
||||||
|
handle
|
||||||
|
];
|
||||||
|
checkedAttrs =
|
||||||
|
(lib.modules.evalModules {
|
||||||
inherit prefix;
|
inherit prefix;
|
||||||
modules = [
|
modules = [
|
||||||
./maintainer-module.nix
|
./maintainer-module.nix
|
||||||
|
@ -21,7 +28,8 @@ let
|
||||||
];
|
];
|
||||||
}).config;
|
}).config;
|
||||||
|
|
||||||
checks = lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
|
checks =
|
||||||
|
lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
|
||||||
# Calling this too often would hit non-authenticated API limits, but this
|
# Calling this too often would hit non-authenticated API limits, but this
|
||||||
# shouldn't happen since such errors will get fixed rather quickly
|
# shouldn't happen since such errors will get fixed rather quickly
|
||||||
|
@ -29,25 +37,40 @@ let
|
||||||
id=$(jq -r '.id' <<< "$info")
|
id=$(jq -r '.id' <<< "$info")
|
||||||
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
|
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
|
||||||
echo -e " githubId = $id;\n"
|
echo -e " githubId = $id;\n"
|
||||||
'' ++ lib.optional (checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null) ''
|
''
|
||||||
|
++
|
||||||
|
lib.optional
|
||||||
|
(checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null)
|
||||||
|
''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
|
||||||
'' ++ lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email) ''
|
''
|
||||||
|
++
|
||||||
|
lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email)
|
||||||
|
''
|
||||||
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
|
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
|
||||||
'';
|
'';
|
||||||
in lib.deepSeq checkedAttrs checks;
|
in
|
||||||
|
lib.deepSeq checkedAttrs checks;
|
||||||
|
|
||||||
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
|
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
|
||||||
|
|
||||||
success = pkgs.runCommand "checked-maintainers-success" {} ">$out";
|
success = pkgs.runCommand "checked-maintainers-success" { } ">$out";
|
||||||
|
|
||||||
failure = pkgs.runCommand "checked-maintainers-failure" {
|
failure =
|
||||||
nativeBuildInputs = [ pkgs.curl pkgs.jq ];
|
pkgs.runCommand "checked-maintainers-failure"
|
||||||
|
{
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.curl
|
||||||
|
pkgs.jq
|
||||||
|
];
|
||||||
outputHash = "sha256:${lib.fakeSha256}";
|
outputHash = "sha256:${lib.fakeSha256}";
|
||||||
outputHAlgo = "sha256";
|
outputHAlgo = "sha256";
|
||||||
outputHashMode = "flat";
|
outputHashMode = "flat";
|
||||||
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
|
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||||
} ''
|
}
|
||||||
|
''
|
||||||
${lib.concatStringsSep "\n" missingGithubIds}
|
${lib.concatStringsSep "\n" missingGithubIds}
|
||||||
exit 1
|
exit 1
|
||||||
'';
|
'';
|
||||||
in if missingGithubIds == [] then success else failure
|
in
|
||||||
|
if missingGithubIds == [ ] then success else failure
|
||||||
|
|
2062
lib/tests/misc.nix
2062
lib/tests/misc.nix
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,9 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
options.dummy = lib.mkOption { type = lib.types.anything; default = {}; };
|
{
|
||||||
|
options.dummy = lib.mkOption {
|
||||||
|
type = lib.types.anything;
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
freeformType =
|
freeformType =
|
||||||
let
|
let
|
||||||
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
|
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
|
||||||
|
@ -7,8 +11,6 @@
|
||||||
# modifying types like this breaks type merging.
|
# modifying types like this breaks type merging.
|
||||||
# This test makes sure that type merging is not performed when only a single declaration exists.
|
# This test makes sure that type merging is not performed when only a single declaration exists.
|
||||||
# Don't modify types in practice!
|
# Don't modify types in practice!
|
||||||
a // {
|
a // { merge = loc: defs: { freeformItems = a.merge loc defs; }; };
|
||||||
merge = loc: defs: { freeformItems = a.merge loc defs; };
|
|
||||||
};
|
|
||||||
config.foo.bar = "ok";
|
config.foo.bar = "ok";
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ in
|
||||||
# mkAliasOptionModule sets warnings, so this has to be defined.
|
# mkAliasOptionModule sets warnings, so this has to be defined.
|
||||||
warnings = mkOption {
|
warnings = mkOption {
|
||||||
internal = true;
|
internal = true;
|
||||||
default = [];
|
default = [ ];
|
||||||
type = types.listOf types.str;
|
type = types.listOf types.str;
|
||||||
example = [ "The `foo' service is deprecated and will go away soon!" ];
|
example = [ "The `foo' service is deprecated and will go away soon!" ];
|
||||||
description = ''
|
description = ''
|
||||||
|
@ -46,14 +46,16 @@ in
|
||||||
|
|
||||||
# Disable the aliased option with a high priority so it
|
# Disable the aliased option with a high priority so it
|
||||||
# should override the next import.
|
# should override the next import.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enableAlias = mkForce false;
|
enableAlias = mkForce false;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enable the normal (non-aliased) option.
|
# Enable the normal (non-aliased) option.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enable = true;
|
enable = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ in
|
||||||
# mkAliasOptionModule sets warnings, so this has to be defined.
|
# mkAliasOptionModule sets warnings, so this has to be defined.
|
||||||
warnings = mkOption {
|
warnings = mkOption {
|
||||||
internal = true;
|
internal = true;
|
||||||
default = [];
|
default = [ ];
|
||||||
type = types.listOf types.str;
|
type = types.listOf types.str;
|
||||||
example = [ "The `foo' service is deprecated and will go away soon!" ];
|
example = [ "The `foo' service is deprecated and will go away soon!" ];
|
||||||
description = ''
|
description = ''
|
||||||
|
@ -46,14 +46,16 @@ in
|
||||||
|
|
||||||
# Disable the aliased option, but with a default (low) priority so it
|
# Disable the aliased option, but with a default (low) priority so it
|
||||||
# should be able to be overridden by the next import.
|
# should be able to be overridden by the next import.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enableAlias = mkDefault false;
|
enableAlias = mkDefault false;
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enable the normal (non-aliased) option.
|
# Enable the normal (non-aliased) option.
|
||||||
( { config, lib, ... }:
|
(
|
||||||
|
{ config, lib, ... }:
|
||||||
{
|
{
|
||||||
enable = true;
|
enable = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
options.conditionalWorks = lib.mkOption {
|
{
|
||||||
default = ! config.value ? foo;
|
options.conditionalWorks = lib.mkOption { default = !config.value ? foo; };
|
||||||
};
|
|
||||||
|
|
||||||
config.value.foo = lib.mkIf false "should not be defined";
|
config.value.foo = lib.mkIf false "should not be defined";
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
{ lib, config, ... }: {
|
{ lib, config, ... }:
|
||||||
options.isLazy = lib.mkOption {
|
{
|
||||||
default = ! config.value ? foo;
|
options.isLazy = lib.mkOption { default = !config.value ? foo; };
|
||||||
};
|
|
||||||
|
|
||||||
config.value.bar = throw "is not lazy";
|
config.value.bar = throw "is not lazy";
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,24 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption { type = lib.types.lazyAttrsOf lib.types.boolByOr; };
|
||||||
type = lib.types.lazyAttrsOf lib.types.boolByOr;
|
|
||||||
};
|
|
||||||
|
|
||||||
config.value = {
|
config.value = {
|
||||||
falseFalse = lib.mkMerge [ false false ];
|
falseFalse = lib.mkMerge [
|
||||||
trueFalse = lib.mkMerge [ true false ];
|
false
|
||||||
falseTrue = lib.mkMerge [ false true ];
|
false
|
||||||
trueTrue = lib.mkMerge [ true true ];
|
];
|
||||||
|
trueFalse = lib.mkMerge [
|
||||||
|
true
|
||||||
|
false
|
||||||
|
];
|
||||||
|
falseTrue = lib.mkMerge [
|
||||||
|
false
|
||||||
|
true
|
||||||
|
];
|
||||||
|
trueTrue = lib.mkMerge [
|
||||||
|
true
|
||||||
|
true
|
||||||
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
sub = {
|
sub = {
|
||||||
nixosOk = lib.mkOption {
|
nixosOk = lib.mkOption {
|
||||||
|
@ -40,16 +41,12 @@
|
||||||
];
|
];
|
||||||
config = {
|
config = {
|
||||||
_module.freeformType = lib.types.anything;
|
_module.freeformType = lib.types.anything;
|
||||||
ok =
|
ok = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [ ./module-class-is-nixos.nix ];
|
||||||
./module-class-is-nixos.nix
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fail =
|
fail = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [
|
||||||
./module-class-is-nixos.nix
|
./module-class-is-nixos.nix
|
||||||
|
@ -57,20 +54,24 @@
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
fail-anon =
|
fail-anon = lib.evalModules {
|
||||||
lib.evalModules {
|
|
||||||
class = "nixos";
|
class = "nixos";
|
||||||
modules = [
|
modules = [
|
||||||
./module-class-is-nixos.nix
|
./module-class-is-nixos.nix
|
||||||
{ _file = "foo.nix#darwinModules.default";
|
{
|
||||||
|
_file = "foo.nix#darwinModules.default";
|
||||||
_class = "darwin";
|
_class = "darwin";
|
||||||
config = {};
|
config = { };
|
||||||
imports = [];
|
imports = [ ];
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
sub.nixosOk = { _class = "nixos"; };
|
sub.nixosOk = {
|
||||||
sub.nixosFail = { imports = [ ./module-class-is-darwin.nix ]; };
|
_class = "nixos";
|
||||||
|
};
|
||||||
|
sub.nixosFail = {
|
||||||
|
imports = [ ./module-class-is-darwin.nix ];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,24 +1,22 @@
|
||||||
{ lib, options, ... }:
|
{ lib, options, ... }:
|
||||||
let discardPositions = lib.mapAttrs (k: v: v);
|
let
|
||||||
|
discardPositions = lib.mapAttrs (k: v: v);
|
||||||
in
|
in
|
||||||
# unsafeGetAttrPos is unspecified best-effort behavior, so we only want to consider this test on an evaluator that satisfies some basic assumptions about this function.
|
# unsafeGetAttrPos is unspecified best-effort behavior, so we only want to consider this test on an evaluator that satisfies some basic assumptions about this function.
|
||||||
assert builtins.unsafeGetAttrPos "a" { a = true; } != null;
|
assert builtins.unsafeGetAttrPos "a" { a = true; } != null;
|
||||||
assert builtins.unsafeGetAttrPos "a" (discardPositions { a = true; }) == null;
|
assert
|
||||||
|
builtins.unsafeGetAttrPos "a" (discardPositions {
|
||||||
|
a = true;
|
||||||
|
}) == null;
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
{
|
{
|
||||||
options.imported.line10 = lib.mkOption {
|
options.imported.line10 = lib.mkOption { type = lib.types.int; };
|
||||||
type = lib.types.int;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Simulates various patterns of generating modules such as
|
# Simulates various patterns of generating modules such as
|
||||||
# programs.firefox.nativeMessagingHosts.ff2mpv. We don't expect to get
|
# programs.firefox.nativeMessagingHosts.ff2mpv. We don't expect to get
|
||||||
# line numbers for these, but we can fall back on knowing the file.
|
# line numbers for these, but we can fall back on knowing the file.
|
||||||
options.generated = discardPositions {
|
options.generated = discardPositions { line18 = lib.mkOption { type = lib.types.int; }; };
|
||||||
line18 = lib.mkOption {
|
|
||||||
type = lib.types.int;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
options.submoduleLine34.extraOptLine23 = lib.mkOption {
|
options.submoduleLine34.extraOptLine23 = lib.mkOption {
|
||||||
default = 1;
|
default = 1;
|
||||||
|
@ -27,23 +25,25 @@ assert builtins.unsafeGetAttrPos "a" (discardPositions { a = true; }) == null;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
options.nested.nestedLine30 = lib.mkOption {
|
options.nested.nestedLine30 = lib.mkOption { type = lib.types.int; };
|
||||||
type = lib.types.int;
|
|
||||||
};
|
|
||||||
|
|
||||||
options.submoduleLine34 = lib.mkOption {
|
options.submoduleLine34 = lib.mkOption {
|
||||||
default = { };
|
default = { };
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
({ options, ... }: {
|
(
|
||||||
|
{ options, ... }:
|
||||||
|
{
|
||||||
options.submodDeclLine39 = lib.mkOption { };
|
options.submodDeclLine39 = lib.mkOption { };
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{ freeformType = with lib.types; lazyAttrsOf (uniq unspecified); }
|
{ freeformType = with lib.types; lazyAttrsOf (uniq unspecified); }
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
submoduleLine34.submodDeclLine39 = (options.submoduleLine34.type.getSubOptions [ ]).submodDeclLine39.declarationPositions;
|
submoduleLine34.submodDeclLine39 =
|
||||||
|
(options.submoduleLine34.type.getSubOptions [ ]).submodDeclLine39.declarationPositions;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
deathtrapArgs = lib.mapAttrs
|
deathtrapArgs = lib.mapAttrs (
|
||||||
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
|
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
|
||||||
(lib.functionArgs lib.mkOption);
|
) (lib.functionArgs lib.mkOption);
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.attrsOf lib.types.str;
|
type = lib.types.attrsOf lib.types.str;
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
options.testing-laziness-so-don't-read-me = lib.mkOption deathtrapArgs;
|
options.testing-laziness-so-don't-read-me = lib.mkOption deathtrapArgs;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
submod = { ... }: {
|
submod =
|
||||||
|
{ ... }:
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
enable = lib.mkOption {
|
enable = lib.mkOption {
|
||||||
default = false;
|
default = false;
|
||||||
|
@ -18,8 +20,8 @@ in
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
attrsOfSub = lib.mkOption {
|
attrsOfSub = lib.mkOption {
|
||||||
default = {};
|
default = { };
|
||||||
example = {};
|
example = { };
|
||||||
type = lib.types.attrsOf (lib.types.submodule [ submod ]);
|
type = lib.types.attrsOf (lib.types.submodule [ submod ]);
|
||||||
description = ''
|
description = ''
|
||||||
Some descriptive text
|
Some descriptive text
|
||||||
|
|
|
@ -8,11 +8,9 @@ in
|
||||||
modules = [ ];
|
modules = [ ];
|
||||||
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
|
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
|
||||||
};
|
};
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
# config-dependent options: won't recommend, but useful for making this test parameterized
|
# config-dependent options: won't recommend, but useful for making this test parameterized
|
||||||
options.shorthandOnlyDefinesConfig = mkOption {
|
options.shorthandOnlyDefinesConfig = mkOption { default = false; };
|
||||||
default = false;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
options.value = lib.mkOption {
|
{
|
||||||
type = lib.types.either lib.types.int lib.types.str;
|
options.value = lib.mkOption { type = lib.types.either lib.types.int lib.types.str; };
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
|
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
value = lib.mkOption {
|
value = lib.mkOption { type = lib.types.ints.between (-21) 43; };
|
||||||
type = lib.types.ints.between (-21) 43;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
|
|
||||||
{
|
{
|
||||||
options.set = {
|
options.set = {
|
||||||
value = lib.mkOption {
|
value = lib.mkOption { type = lib.types.ints.positive; };
|
||||||
type = lib.types.ints.positive;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
|
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
value = lib.mkOption {
|
value = lib.mkOption { type = lib.types.ints.positive; };
|
||||||
type = lib.types.ints.positive;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
|
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
value = lib.mkOption {
|
value = lib.mkOption { type = lib.types.ints.unsigned; };
|
||||||
type = lib.types.ints.unsigned;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
|
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,17 @@
|
||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
pkgs.hello = {
|
pkgs.hello = {
|
||||||
type = "derivation";
|
type = "derivation";
|
||||||
pname = "hello";
|
pname = "hello";
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options = {
|
options = {
|
||||||
package = lib.mkPackageOption pkgs "hello" { };
|
package = lib.mkPackageOption pkgs "hello" { };
|
||||||
|
|
||||||
namedPackage = lib.mkPackageOption pkgs "Hello" {
|
namedPackage = lib.mkPackageOption pkgs "Hello" { default = [ "hello" ]; };
|
||||||
default = [ "hello" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
namedPackageSingletonDefault = lib.mkPackageOption pkgs "Hello" {
|
namedPackageSingletonDefault = lib.mkPackageOption pkgs "Hello" { default = "hello"; };
|
||||||
default = "hello";
|
|
||||||
};
|
|
||||||
|
|
||||||
pathPackage = lib.mkPackageOption pkgs [ "hello" ] { };
|
pathPackage = lib.mkPackageOption pkgs [ "hello" ] { };
|
||||||
|
|
||||||
|
@ -21,33 +19,31 @@ in {
|
||||||
example = "pkgs.hello.override { stdenv = pkgs.clangStdenv; }";
|
example = "pkgs.hello.override { stdenv = pkgs.clangStdenv; }";
|
||||||
};
|
};
|
||||||
|
|
||||||
packageWithPathExample = lib.mkPackageOption pkgs "hello" {
|
packageWithPathExample = lib.mkPackageOption pkgs "hello" { example = [ "hello" ]; };
|
||||||
example = [ "hello" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
packageWithExtraDescription = lib.mkPackageOption pkgs "hello" {
|
packageWithExtraDescription = lib.mkPackageOption pkgs "hello" {
|
||||||
extraDescription = "Example extra description.";
|
extraDescription = "Example extra description.";
|
||||||
};
|
};
|
||||||
|
|
||||||
undefinedPackage = lib.mkPackageOption pkgs "hello" {
|
undefinedPackage = lib.mkPackageOption pkgs "hello" { default = null; };
|
||||||
default = null;
|
|
||||||
};
|
|
||||||
|
|
||||||
nullablePackage = lib.mkPackageOption pkgs "hello" {
|
nullablePackage = lib.mkPackageOption pkgs "hello" {
|
||||||
nullable = true;
|
nullable = true;
|
||||||
default = null;
|
default = null;
|
||||||
};
|
};
|
||||||
|
|
||||||
nullablePackageWithDefault = lib.mkPackageOption pkgs "hello" {
|
nullablePackageWithDefault = lib.mkPackageOption pkgs "hello" { nullable = true; };
|
||||||
nullable = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
packageWithPkgsText = lib.mkPackageOption pkgs "hello" {
|
packageWithPkgsText = lib.mkPackageOption pkgs "hello" { pkgsText = "myPkgs"; };
|
||||||
pkgsText = "myPkgs";
|
|
||||||
};
|
|
||||||
|
|
||||||
packageFromOtherSet = let myPkgs = {
|
packageFromOtherSet =
|
||||||
hello = pkgs.hello // { pname = "hello-other"; };
|
let
|
||||||
}; in lib.mkPackageOption myPkgs "hello" { };
|
myPkgs = {
|
||||||
|
hello = pkgs.hello // {
|
||||||
|
pname = "hello-other";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
lib.mkPackageOption myPkgs "hello" { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.value = lib.mkOption {
|
options.value = lib.mkOption {
|
||||||
type = lib.types.oneOf [
|
type = lib.types.oneOf [
|
||||||
lib.types.int
|
lib.types.int
|
||||||
|
|
|
@ -3,7 +3,9 @@
|
||||||
{
|
{
|
||||||
options.set = lib.mkOption {
|
options.set = lib.mkOption {
|
||||||
default = { };
|
default = { };
|
||||||
example = { a = 1; };
|
example = {
|
||||||
|
a = 1;
|
||||||
|
};
|
||||||
type = lib.types.attrsOf lib.types.int;
|
type = lib.types.attrsOf lib.types.int;
|
||||||
description = ''
|
description = ''
|
||||||
Some descriptive text
|
Some descriptive text
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
inherit (lib.evalModules {
|
inherit
|
||||||
|
(lib.evalModules {
|
||||||
modules = [
|
modules = [
|
||||||
{
|
{
|
||||||
options.inner = lib.mkOption {
|
options.inner = lib.mkOption {
|
||||||
|
@ -9,17 +11,22 @@
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
}) type;
|
})
|
||||||
default = {};
|
type
|
||||||
|
;
|
||||||
|
default = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
config.submodule = lib.mkMerge [
|
config.submodule = lib.mkMerge [
|
||||||
({ lib, ... }: {
|
(
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.outer = lib.mkOption {
|
options.outer = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{
|
{
|
||||||
inner = true;
|
inner = true;
|
||||||
outer = true;
|
outer = true;
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
|
@ -10,16 +11,19 @@
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
config.submodule = lib.mkMerge [
|
config.submodule = lib.mkMerge [
|
||||||
({ lib, ... }: {
|
(
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.outer = lib.mkOption {
|
options.outer = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
{
|
{
|
||||||
inner = true;
|
inner = true;
|
||||||
outer = true;
|
outer = true;
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
sub.options.config = lib.mkOption {
|
sub.options.config = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith { modules = [ sub ]; };
|
||||||
modules = [ sub ];
|
default = { };
|
||||||
};
|
|
||||||
default = {};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith { modules = [ ./declare-enable.nix ]; };
|
||||||
modules = [
|
default = { };
|
||||||
./declare-enable.nix
|
|
||||||
];
|
|
||||||
};
|
|
||||||
default = {};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
config.submodule = ./define-enable.nix;
|
config.submodule = ./define-enable.nix;
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
{ lib, ... }: let
|
{ lib, ... }:
|
||||||
|
let
|
||||||
sub.options.config = lib.mkOption {
|
sub.options.config = lib.mkOption {
|
||||||
type = lib.types.bool;
|
type = lib.types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [ sub ];
|
modules = [ sub ];
|
||||||
shorthandOnlyDefinesConfig = true;
|
shorthandOnlyDefinesConfig = true;
|
||||||
};
|
};
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,19 @@
|
||||||
{ lib, ... }: {
|
{ lib, ... }:
|
||||||
|
{
|
||||||
options.submodule = lib.mkOption {
|
options.submodule = lib.mkOption {
|
||||||
type = lib.types.submoduleWith {
|
type = lib.types.submoduleWith {
|
||||||
modules = [
|
modules = [
|
||||||
({ lib, ... }: {
|
(
|
||||||
options.foo = lib.mkOption {
|
{ lib, ... }:
|
||||||
default = lib.foo;
|
{
|
||||||
};
|
options.foo = lib.mkOption { default = lib.foo; };
|
||||||
})
|
}
|
||||||
|
)
|
||||||
];
|
];
|
||||||
specialArgs.lib = lib // {
|
specialArgs.lib = lib // {
|
||||||
foo = "foo";
|
foo = "foo";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
{ lib, moduleType, ... }:
|
{ lib, moduleType, ... }:
|
||||||
let inherit (lib) mkOption types;
|
let
|
||||||
|
inherit (lib) mkOption types;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options.variants = mkOption {
|
options.variants = mkOption {
|
||||||
type = types.lazyAttrsOf moduleType;
|
type = types.lazyAttrsOf moduleType;
|
||||||
default = {};
|
default = { };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,15 @@
|
||||||
{ lib ? import ../.., modules ? [] }:
|
{
|
||||||
|
lib ? import ../..,
|
||||||
|
modules ? [ ],
|
||||||
|
}:
|
||||||
|
|
||||||
{
|
{
|
||||||
inherit (lib.evalModules {
|
inherit
|
||||||
|
(lib.evalModules {
|
||||||
inherit modules;
|
inherit modules;
|
||||||
specialArgs.modulesPath = ./.;
|
specialArgs.modulesPath = ./.;
|
||||||
}) config options;
|
})
|
||||||
|
config
|
||||||
|
options
|
||||||
|
;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,28 @@
|
||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types mkOption setDefaultModuleLocation evalModules;
|
inherit (lib)
|
||||||
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
|
types
|
||||||
|
mkOption
|
||||||
|
setDefaultModuleLocation
|
||||||
|
evalModules
|
||||||
|
;
|
||||||
|
inherit (types)
|
||||||
|
deferredModule
|
||||||
|
lazyAttrsOf
|
||||||
|
submodule
|
||||||
|
str
|
||||||
|
raw
|
||||||
|
enum
|
||||||
|
;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options = {
|
options = {
|
||||||
deferred = mkOption {
|
deferred = mkOption { type = deferredModule; };
|
||||||
type = deferredModule;
|
result = mkOption { default = (evalModules { modules = [ config.deferred ]; }).config.result; };
|
||||||
};
|
|
||||||
result = mkOption {
|
|
||||||
default = (evalModules { modules = [ config.deferred ]; }).config.result;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
config = {
|
config = {
|
||||||
deferred = { ... }:
|
deferred =
|
||||||
|
{ ... }:
|
||||||
# this should be an attrset, so this fails
|
# this should be an attrset, so this fails
|
||||||
true;
|
true;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,7 +1,14 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
let
|
let
|
||||||
inherit (lib) types mkOption setDefaultModuleLocation;
|
inherit (lib) types mkOption setDefaultModuleLocation;
|
||||||
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
|
inherit (types)
|
||||||
|
deferredModule
|
||||||
|
lazyAttrsOf
|
||||||
|
submodule
|
||||||
|
str
|
||||||
|
raw
|
||||||
|
enum
|
||||||
|
;
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
|
@ -9,11 +16,15 @@ in
|
||||||
# - nodes.<name>
|
# - nodes.<name>
|
||||||
# - default
|
# - default
|
||||||
# where all nodes include the default
|
# where all nodes include the default
|
||||||
({ config, ... }: {
|
(
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
_file = "generic.nix";
|
_file = "generic.nix";
|
||||||
options.nodes = mkOption {
|
options.nodes = mkOption {
|
||||||
type = lazyAttrsOf (submodule { imports = [ config.default ]; });
|
type = lazyAttrsOf (submodule {
|
||||||
default = {};
|
imports = [ config.default ];
|
||||||
|
});
|
||||||
|
default = { };
|
||||||
};
|
};
|
||||||
options.default = mkOption {
|
options.default = mkOption {
|
||||||
type = deferredModule;
|
type = deferredModule;
|
||||||
|
@ -22,13 +33,19 @@ in
|
||||||
Module that is included in all nodes.
|
Module that is included in all nodes.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
{
|
{
|
||||||
_file = "default-1.nix";
|
_file = "default-1.nix";
|
||||||
default = { config, ... }: {
|
default =
|
||||||
options.settingsDict = lib.mkOption { type = lazyAttrsOf str; default = {}; };
|
{ config, ... }:
|
||||||
options.bottom = lib.mkOption { type = enum []; };
|
{
|
||||||
|
options.settingsDict = lib.mkOption {
|
||||||
|
type = lazyAttrsOf str;
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
|
options.bottom = lib.mkOption { type = enum [ ]; };
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +66,9 @@ in
|
||||||
|
|
||||||
{
|
{
|
||||||
_file = "nodes-foo-c-is-a.nix";
|
_file = "nodes-foo-c-is-a.nix";
|
||||||
nodes.foo = { config, ... }: {
|
nodes.foo =
|
||||||
|
{ config, ... }:
|
||||||
|
{
|
||||||
settingsDict.c = config.settingsDict.a;
|
settingsDict.c = config.settingsDict.a;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ attrsOfSub.bar.enable = true; }
|
||||||
attrsOfSub.bar.enable = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ attrsOfSub.bar = { }; }
|
||||||
attrsOfSub.bar = {};
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ attrsOfSub.foo.enable = true; }
|
||||||
attrsOfSub.foo.enable = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
attrsOfSub.foo = lib.mkForce {
|
attrsOfSub.foo = lib.mkForce { enable = false; };
|
||||||
enable = false;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
attrsOfSub.foo = lib.mkIf config.enable {
|
attrsOfSub.foo = lib.mkIf config.enable { enable = true; };
|
||||||
enable = true;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ attrsOfSub.foo = { }; }
|
||||||
attrsOfSub.foo = {};
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
attrsOfSub = lib.mkForce {
|
attrsOfSub = lib.mkForce { foo.enable = false; };
|
||||||
foo.enable = false;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
attrsOfSub = lib.mkIf config.enable {
|
attrsOfSub = lib.mkIf config.enable { foo.enable = true; };
|
||||||
foo.enable = true;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ config.enable = abort "oops"; }
|
||||||
config.enable = abort "oops";
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ config.enable = throw "oops"; }
|
||||||
config.enable = throw "oops";
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ enable = true; }
|
||||||
enable = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
lib.mkForce {
|
lib.mkForce { attrsOfSub.foo.enable = false; }
|
||||||
attrsOfSub.foo.enable = false;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
{ lib, ... }:
|
{ lib, ... }:
|
||||||
|
|
||||||
lib.mkForce {
|
lib.mkForce { enable = false; }
|
||||||
enable = false;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,15 +1,24 @@
|
||||||
{ config, ... }: {
|
{ config, ... }:
|
||||||
class = { "just" = "data"; };
|
{
|
||||||
|
class = {
|
||||||
|
"just" = "data";
|
||||||
|
};
|
||||||
a = "one";
|
a = "one";
|
||||||
b = "two";
|
b = "two";
|
||||||
meta = "meta";
|
meta = "meta";
|
||||||
|
|
||||||
_module.args.result =
|
_module.args.result =
|
||||||
let r = builtins.removeAttrs config [ "_module" ];
|
let
|
||||||
in builtins.trace (builtins.deepSeq r r) (r == {
|
r = builtins.removeAttrs config [ "_module" ];
|
||||||
|
in
|
||||||
|
builtins.trace (builtins.deepSeq r r) (
|
||||||
|
r == {
|
||||||
a = "one";
|
a = "one";
|
||||||
b = "two";
|
b = "two";
|
||||||
class = { "just" = "data"; };
|
class = {
|
||||||
|
"just" = "data";
|
||||||
|
};
|
||||||
meta = "meta";
|
meta = "meta";
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
{ config, lib, ... }:
|
{ config, lib, ... }:
|
||||||
|
|
||||||
lib.mkIf config.enable {
|
lib.mkIf config.enable { attrsOfSub.foo.enable = true; }
|
||||||
attrsOfSub.foo.enable = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ _module.check = false; }
|
||||||
_module.check = false;
|
|
||||||
}
|
|
||||||
|
|
|
@ -5,12 +5,11 @@
|
||||||
{
|
{
|
||||||
|
|
||||||
# Always defined, but the value depends on the presence of an option.
|
# Always defined, but the value depends on the presence of an option.
|
||||||
config.set = {
|
config.set =
|
||||||
|
{
|
||||||
value = if options ? set.enable then 360 else 7;
|
value = if options ? set.enable then 360 else 7;
|
||||||
}
|
}
|
||||||
# Only define if possible.
|
# Only define if possible.
|
||||||
// lib.optionalAttrs (options ? set.enable) {
|
// lib.optionalAttrs (options ? set.enable) { enable = true; };
|
||||||
enable = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,12 +5,11 @@
|
||||||
{
|
{
|
||||||
|
|
||||||
# Always defined, but the value depends on the presence of an option.
|
# Always defined, but the value depends on the presence of an option.
|
||||||
config = {
|
config =
|
||||||
|
{
|
||||||
value = if options ? enable then 360 else 7;
|
value = if options ? enable then 360 else 7;
|
||||||
}
|
}
|
||||||
# Only define if possible.
|
# Only define if possible.
|
||||||
// lib.optionalAttrs (options ? enable) {
|
// lib.optionalAttrs (options ? enable) { enable = true; };
|
||||||
enable = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
{ config, ... }: {
|
{ config, ... }:
|
||||||
|
{
|
||||||
settingsDict.a = config.settingsDict.b;
|
settingsDict.a = config.settingsDict.b;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ submodule.config.config = true; }
|
||||||
submodule.config.config = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ submodule.config = true; }
|
||||||
submodule.config = true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ value = -23; }
|
||||||
value = -23;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
{
|
{ value = 42; }
|
||||||
value = 42;
|
|
||||||
}
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue