2022-04-25 08:06:59 +00:00
|
|
|
# Compatibility function to allow flakes to be used by
|
|
|
|
# non-flake-enabled Nix versions. Given a source tree containing a
|
|
|
|
# 'flake.nix' and 'flake.lock' file, it fetches the flake inputs and
|
|
|
|
# calls the flake's 'outputs' function. It then returns an attrset
|
|
|
|
# containing 'defaultNix' (to be used in 'default.nix'), 'shellNix'
|
|
|
|
# (to be used in 'shell.nix').
|
2022-06-12 15:42:42 +00:00
|
|
|
_: let
|
2022-04-25 12:15:18 +00:00
|
|
|
inherit (builtins) attrNames listToAttrs;
|
|
|
|
|
|
|
|
mapAttrs' = f: set:
|
|
|
|
listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
|
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
flake_compat = src: let
|
|
|
|
lockFilePath = src + "/flake.lock";
|
2022-04-25 12:09:35 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
lockFile = builtins.fromJSON (builtins.readFile lockFilePath);
|
2022-04-25 12:09:35 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
fetchTree = info:
|
|
|
|
if info.type == "github"
|
|
|
|
then {
|
|
|
|
outPath =
|
|
|
|
fetchTarball
|
|
|
|
(
|
|
|
|
{url = "https://api.${info.host or "github.com"}/repos/${info.owner}/${info.repo}/tarball/${info.rev}";}
|
|
|
|
// (
|
|
|
|
if info ? narHash
|
|
|
|
then {sha256 = info.narHash;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
);
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) rev;
|
2022-06-12 15:39:15 +00:00
|
|
|
shortRev = builtins.substring 0 7 info.rev;
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) lastModified;
|
2022-06-12 15:39:15 +00:00
|
|
|
lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) narHash;
|
2022-06-12 15:39:15 +00:00
|
|
|
}
|
|
|
|
else if info.type == "git"
|
|
|
|
then
|
|
|
|
{
|
|
|
|
outPath =
|
|
|
|
builtins.fetchGit
|
|
|
|
(
|
2022-06-12 15:42:42 +00:00
|
|
|
{inherit (info) url;}
|
2022-06-12 15:39:15 +00:00
|
|
|
// (
|
|
|
|
if info ? rev
|
|
|
|
then {inherit (info) rev;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
// (
|
|
|
|
if info ? ref
|
|
|
|
then {inherit (info) ref;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
// (
|
|
|
|
if info ? submodules
|
|
|
|
then {inherit (info) submodules;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
);
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) lastModified;
|
2022-06-12 15:39:15 +00:00
|
|
|
lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) narHash;
|
2022-06-12 15:39:15 +00:00
|
|
|
}
|
|
|
|
// (
|
|
|
|
if info ? rev
|
|
|
|
then {
|
2022-06-12 15:42:42 +00:00
|
|
|
inherit (info) rev;
|
2022-04-25 12:09:35 +00:00
|
|
|
shortRev = builtins.substring 0 7 info.rev;
|
|
|
|
}
|
2022-06-12 15:39:15 +00:00
|
|
|
else {}
|
|
|
|
)
|
|
|
|
else if info.type == "path"
|
|
|
|
then {
|
2022-06-12 15:42:42 +00:00
|
|
|
outPath = builtins.path {inherit (info) path;};
|
|
|
|
inherit (info) narHash;
|
2022-06-12 15:39:15 +00:00
|
|
|
}
|
|
|
|
else if info.type == "tarball"
|
|
|
|
then {
|
|
|
|
outPath =
|
|
|
|
fetchTarball
|
|
|
|
(
|
|
|
|
{inherit (info) url;}
|
|
|
|
// (
|
|
|
|
if info ? narHash
|
|
|
|
then {sha256 = info.narHash;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
else if info.type == "gitlab"
|
|
|
|
then {
|
|
|
|
inherit (info) rev narHash lastModified;
|
|
|
|
outPath =
|
|
|
|
fetchTarball
|
|
|
|
(
|
|
|
|
{url = "https://${info.host or "gitlab.com"}/api/v4/projects/${info.owner}%2F${info.repo}/repository/archive.tar.gz?sha=${info.rev}";}
|
|
|
|
// (
|
|
|
|
if info ? narHash
|
|
|
|
then {sha256 = info.narHash;}
|
|
|
|
else {}
|
|
|
|
)
|
|
|
|
);
|
|
|
|
shortRev = builtins.substring 0 7 info.rev;
|
|
|
|
}
|
|
|
|
else
|
2022-04-25 12:09:35 +00:00
|
|
|
# FIXME: add Mercurial, tarball inputs.
|
2022-06-12 15:39:15 +00:00
|
|
|
throw "flake input has unsupported input type '${info.type}'";
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
callFlake4 = flakeSrc: locks: let
|
|
|
|
flake = import (flakeSrc + "/flake.nix");
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
inputs =
|
|
|
|
builtins.mapAttrs
|
|
|
|
(n: v:
|
|
|
|
if v.flake or true
|
|
|
|
then callFlake4 (fetchTree (v.locked // v.info)) v.inputs
|
|
|
|
else fetchTree (v.locked // v.info))
|
|
|
|
locks;
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
outputs = flakeSrc // (flake.outputs (inputs // {self = outputs;}));
|
|
|
|
in
|
|
|
|
assert flake.edition == 201909; outputs;
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
callLocklessFlake = flakeSrc: let
|
|
|
|
flake = import (flakeSrc + "/flake.nix");
|
|
|
|
outputs = flakeSrc // (flake.outputs {self = outputs;});
|
|
|
|
in
|
|
|
|
outputs;
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
rootSrc = let
|
|
|
|
# Try to clean the source tree by using fetchGit, if this source
|
|
|
|
# tree is a valid git repository.
|
|
|
|
tryFetchGit = src:
|
|
|
|
if isGit && !isShallow
|
|
|
|
then let
|
|
|
|
res = builtins.fetchGit src;
|
2022-04-25 12:09:35 +00:00
|
|
|
in
|
2022-06-12 15:39:15 +00:00
|
|
|
if res.rev == "0000000000000000000000000000000000000000"
|
|
|
|
then removeAttrs res ["rev" "shortRev"]
|
|
|
|
else res
|
|
|
|
else {outPath = src;};
|
|
|
|
# NB git worktrees have a file for .git, so we don't check the type of .git
|
|
|
|
isGit = builtins.pathExists (src + "/.git");
|
|
|
|
isShallow = builtins.pathExists (src + "/.git/shallow");
|
|
|
|
in
|
|
|
|
{
|
|
|
|
lastModified = 0;
|
|
|
|
lastModifiedDate = formatSecondsSinceEpoch 0;
|
|
|
|
}
|
|
|
|
// (
|
|
|
|
if src ? outPath
|
|
|
|
then src
|
|
|
|
else tryFetchGit src
|
|
|
|
);
|
2022-04-25 12:09:35 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
# Format number of seconds in the Unix epoch as %Y%m%d%H%M%S.
|
|
|
|
formatSecondsSinceEpoch = t: let
|
|
|
|
rem = x: y: x - x / y * y;
|
|
|
|
days = t / 86400;
|
|
|
|
secondsInDay = rem t 86400;
|
|
|
|
hours = secondsInDay / 3600;
|
|
|
|
minutes = (rem secondsInDay 3600) / 60;
|
|
|
|
seconds = rem t 60;
|
|
|
|
|
|
|
|
# Courtesy of https://stackoverflow.com/a/32158604.
|
|
|
|
z = days + 719468;
|
|
|
|
era =
|
|
|
|
(
|
|
|
|
if z >= 0
|
|
|
|
then z
|
|
|
|
else z - 146096
|
|
|
|
)
|
|
|
|
/ 146097;
|
|
|
|
doe = z - era * 146097;
|
|
|
|
yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
|
|
|
|
y = yoe + era * 400;
|
|
|
|
doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
|
|
|
|
mp = (5 * doy + 2) / 153;
|
|
|
|
d = doy - (153 * mp + 2) / 5 + 1;
|
|
|
|
m =
|
|
|
|
mp
|
|
|
|
+ (
|
|
|
|
if mp < 10
|
|
|
|
then 3
|
|
|
|
else -9
|
|
|
|
);
|
|
|
|
y' =
|
|
|
|
y
|
|
|
|
+ (
|
|
|
|
if m <= 2
|
|
|
|
then 1
|
|
|
|
else 0
|
|
|
|
);
|
|
|
|
|
|
|
|
pad = s:
|
|
|
|
if builtins.stringLength s < 2
|
|
|
|
then "0" + s
|
|
|
|
else s;
|
|
|
|
in "${toString y'}${pad (toString m)}${pad (toString d)}${pad (toString hours)}${pad (toString minutes)}${pad (toString seconds)}";
|
|
|
|
|
|
|
|
allNodes =
|
|
|
|
builtins.mapAttrs
|
|
|
|
(
|
|
|
|
key: node: let
|
|
|
|
sourceInfo =
|
|
|
|
if key == lockFile.root
|
|
|
|
then rootSrc
|
|
|
|
else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
|
|
|
|
|
|
|
subdir =
|
|
|
|
if key == lockFile.root
|
|
|
|
then ""
|
|
|
|
else node.locked.dir or "";
|
|
|
|
|
|
|
|
flake = import (sourceInfo
|
|
|
|
+ (
|
|
|
|
if subdir != ""
|
|
|
|
then "/"
|
|
|
|
else ""
|
|
|
|
)
|
|
|
|
+ subdir
|
|
|
|
+ "/flake.nix");
|
|
|
|
|
|
|
|
inputs =
|
|
|
|
builtins.mapAttrs
|
|
|
|
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
|
|
|
(node.inputs or {});
|
|
|
|
|
|
|
|
# Resolve a input spec into a node name. An input spec is
|
|
|
|
# either a node name, or a 'follows' path from the root
|
|
|
|
# node.
|
|
|
|
resolveInput = inputSpec:
|
|
|
|
if builtins.isList inputSpec
|
|
|
|
then getInputByPath lockFile.root inputSpec
|
|
|
|
else inputSpec;
|
|
|
|
|
|
|
|
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
|
|
|
# root node, returning the final node.
|
|
|
|
getInputByPath = nodeName: path:
|
|
|
|
if path == []
|
|
|
|
then nodeName
|
2022-04-25 08:06:59 +00:00
|
|
|
else
|
2022-06-12 15:39:15 +00:00
|
|
|
getInputByPath
|
|
|
|
# Since this could be a 'follows' input, call resolveInput.
|
|
|
|
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
|
|
|
(builtins.tail path);
|
|
|
|
|
|
|
|
outputs = flake.outputs (inputs // {self = result;});
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
result =
|
|
|
|
outputs
|
|
|
|
// sourceInfo
|
|
|
|
// {
|
|
|
|
inherit inputs;
|
|
|
|
inherit outputs;
|
|
|
|
inherit sourceInfo;
|
|
|
|
};
|
|
|
|
in
|
|
|
|
if node.flake or true
|
|
|
|
then assert builtins.isFunction flake.outputs; result
|
|
|
|
else sourceInfo
|
|
|
|
)
|
|
|
|
lockFile.nodes;
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-06-12 15:39:15 +00:00
|
|
|
result =
|
|
|
|
if !(builtins.pathExists lockFilePath)
|
|
|
|
then callLocklessFlake rootSrc
|
|
|
|
else if lockFile.version == 4
|
2022-06-12 15:42:42 +00:00
|
|
|
then callFlake4 rootSrc lockFile.inputs
|
2022-06-12 15:39:15 +00:00
|
|
|
else if lockFile.version >= 5 && lockFile.version <= 7
|
|
|
|
then allNodes.${lockFile.root}
|
|
|
|
else throw "lock file '${lockFilePath}' has unsupported version ${toString lockFile.version}";
|
|
|
|
in
|
|
|
|
result.hydraJobs;
|
2022-04-25 08:06:59 +00:00
|
|
|
|
2022-04-25 12:27:42 +00:00
|
|
|
prs = builtins.fromJSON (builtins.readFile <prs>);
|
2022-06-12 15:39:15 +00:00
|
|
|
srcs =
|
|
|
|
mapAttrs'
|
2022-04-25 12:15:18 +00:00
|
|
|
(n: value: {
|
|
|
|
name = "pr${n}";
|
2022-06-12 15:39:15 +00:00
|
|
|
value =
|
|
|
|
builtins.fetchGit
|
2022-04-25 12:15:18 +00:00
|
|
|
{
|
2022-09-19 14:35:34 +00:00
|
|
|
url = "${value.head.repo.clone_url}";
|
2022-04-25 12:27:13 +00:00
|
|
|
ref = "${value.head.ref}";
|
2022-04-25 12:15:18 +00:00
|
|
|
};
|
|
|
|
})
|
2022-04-25 12:09:35 +00:00
|
|
|
prs;
|
2022-04-25 08:06:59 +00:00
|
|
|
in
|
2022-06-12 15:39:15 +00:00
|
|
|
builtins.mapAttrs (_: flake_compat) srcs
|