mirror of
https://github.com/NixOS/nix.git
synced 2025-11-09 03:56:01 +01:00
Merge pull request #13786 from NixOS/mergify/bp/2.28-maintenance/pr-13785
flake: nixpkgs: nixos-unstable -> nixos-25.05-small (backport #13785)
This commit is contained in:
commit
265e535650
61 changed files with 655 additions and 570 deletions
|
|
@ -1,2 +1,6 @@
|
||||||
# bulk initial re-formatting with clang-format
|
# bulk initial re-formatting with clang-format
|
||||||
a5264aa46eadb89c055b4d1442e814edb2d4414e # !autorebase ./maintainers/format.sh --until-stable
|
a5264aa46eadb89c055b4d1442e814edb2d4414e # !autorebase ./maintainers/format.sh --until-stable
|
||||||
|
# clang-format 18 -> clang-format 19
|
||||||
|
945d9d7264b0dc7d0a8c8edf1cab34f38cd49a7f # !autorebase ./maintainers/format.sh --until-stable
|
||||||
|
# nixfmt 1.0.0
|
||||||
|
448bbbe0fd1fbe09cb46a238fec25b220f172122 # !autorebase ./maintainers/format.sh --until-stable
|
||||||
|
|
|
||||||
100
docker.nix
100
docker.nix
|
|
@ -38,60 +38,58 @@ let
|
||||||
]
|
]
|
||||||
++ extraPkgs;
|
++ extraPkgs;
|
||||||
|
|
||||||
users =
|
users = {
|
||||||
{
|
|
||||||
|
|
||||||
root = {
|
root = {
|
||||||
uid = 0;
|
uid = 0;
|
||||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||||
home = "/root";
|
home = "/root";
|
||||||
gid = 0;
|
gid = 0;
|
||||||
groups = [ "root" ];
|
groups = [ "root" ];
|
||||||
description = "System administrator";
|
description = "System administrator";
|
||||||
};
|
|
||||||
|
|
||||||
nobody = {
|
|
||||||
uid = 65534;
|
|
||||||
shell = "${pkgs.shadow}/bin/nologin";
|
|
||||||
home = "/var/empty";
|
|
||||||
gid = 65534;
|
|
||||||
groups = [ "nobody" ];
|
|
||||||
description = "Unprivileged account (don't use!)";
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
||||||
// lib.optionalAttrs (uid != 0) {
|
|
||||||
"${uname}" = {
|
|
||||||
uid = uid;
|
|
||||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
|
||||||
home = "/home/${uname}";
|
|
||||||
gid = gid;
|
|
||||||
groups = [ "${gname}" ];
|
|
||||||
description = "Nix user";
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// lib.listToAttrs (
|
|
||||||
map (n: {
|
|
||||||
name = "nixbld${toString n}";
|
|
||||||
value = {
|
|
||||||
uid = 30000 + n;
|
|
||||||
gid = 30000;
|
|
||||||
groups = [ "nixbld" ];
|
|
||||||
description = "Nix build user ${toString n}";
|
|
||||||
};
|
|
||||||
}) (lib.lists.range 1 32)
|
|
||||||
);
|
|
||||||
|
|
||||||
groups =
|
|
||||||
{
|
|
||||||
root.gid = 0;
|
|
||||||
nixbld.gid = 30000;
|
|
||||||
nobody.gid = 65534;
|
|
||||||
}
|
|
||||||
// lib.optionalAttrs (gid != 0) {
|
|
||||||
"${gname}".gid = gid;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
nobody = {
|
||||||
|
uid = 65534;
|
||||||
|
shell = "${pkgs.shadow}/bin/nologin";
|
||||||
|
home = "/var/empty";
|
||||||
|
gid = 65534;
|
||||||
|
groups = [ "nobody" ];
|
||||||
|
description = "Unprivileged account (don't use!)";
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
// lib.optionalAttrs (uid != 0) {
|
||||||
|
"${uname}" = {
|
||||||
|
uid = uid;
|
||||||
|
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||||
|
home = "/home/${uname}";
|
||||||
|
gid = gid;
|
||||||
|
groups = [ "${gname}" ];
|
||||||
|
description = "Nix user";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// lib.listToAttrs (
|
||||||
|
map (n: {
|
||||||
|
name = "nixbld${toString n}";
|
||||||
|
value = {
|
||||||
|
uid = 30000 + n;
|
||||||
|
gid = 30000;
|
||||||
|
groups = [ "nixbld" ];
|
||||||
|
description = "Nix build user ${toString n}";
|
||||||
|
};
|
||||||
|
}) (lib.lists.range 1 32)
|
||||||
|
);
|
||||||
|
|
||||||
|
groups = {
|
||||||
|
root.gid = 0;
|
||||||
|
nixbld.gid = 30000;
|
||||||
|
nobody.gid = 65534;
|
||||||
|
}
|
||||||
|
// lib.optionalAttrs (gid != 0) {
|
||||||
|
"${gname}".gid = gid;
|
||||||
|
};
|
||||||
|
|
||||||
userToPasswd = (
|
userToPasswd = (
|
||||||
k:
|
k:
|
||||||
{
|
{
|
||||||
|
|
|
||||||
8
flake.lock
generated
8
flake.lock
generated
|
|
@ -63,16 +63,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1734359947,
|
"lastModified": 1755442223,
|
||||||
"narHash": "sha256-1Noao/H+N8nFB4Beoy8fgwrcOQLVm9o4zKW1ODaqK9E=",
|
"narHash": "sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "48d12d5e70ee91fe8481378e540433a7303dbf6a",
|
"rev": "cd32a774ac52caaa03bcfc9e7591ac8c18617ced",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "release-24.11",
|
"ref": "nixos-25.05-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-24.11";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
||||||
|
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
||||||
|
|
|
||||||
|
|
@ -54,12 +54,12 @@ let
|
||||||
preConfigure =
|
preConfigure =
|
||||||
prevAttrs.preConfigure or ""
|
prevAttrs.preConfigure or ""
|
||||||
+
|
+
|
||||||
# Update the repo-global .version file.
|
# Update the repo-global .version file.
|
||||||
# Symlink ./.version points there, but by default only workDir is writable.
|
# Symlink ./.version points there, but by default only workDir is writable.
|
||||||
''
|
''
|
||||||
chmod u+w ./.version
|
chmod u+w ./.version
|
||||||
echo ${finalAttrs.version} > ./.version
|
echo ${finalAttrs.version} > ./.version
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
localSourceLayer =
|
localSourceLayer =
|
||||||
|
|
@ -148,7 +148,8 @@ let
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
meson
|
meson
|
||||||
ninja
|
ninja
|
||||||
] ++ prevAttrs.nativeBuildInputs or [ ];
|
]
|
||||||
|
++ prevAttrs.nativeBuildInputs or [ ];
|
||||||
mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [
|
mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [
|
||||||
"--print-errorlogs"
|
"--print-errorlogs"
|
||||||
];
|
];
|
||||||
|
|
|
||||||
|
|
@ -46,24 +46,24 @@
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
libs =
|
libs = {
|
||||||
{
|
inherit
|
||||||
inherit
|
nix-util
|
||||||
nix-util
|
nix-util-c
|
||||||
nix-util-c
|
nix-store
|
||||||
nix-store
|
nix-store-c
|
||||||
nix-store-c
|
nix-fetchers
|
||||||
nix-fetchers
|
nix-expr
|
||||||
nix-expr
|
nix-expr-c
|
||||||
nix-expr-c
|
nix-flake
|
||||||
nix-flake
|
nix-flake-c
|
||||||
nix-flake-c
|
nix-main
|
||||||
nix-main
|
nix-main-c
|
||||||
nix-main-c
|
nix-cmd
|
||||||
nix-cmd
|
;
|
||||||
;
|
}
|
||||||
}
|
//
|
||||||
// lib.optionalAttrs
|
lib.optionalAttrs
|
||||||
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
||||||
{
|
{
|
||||||
# Currently fails in static build
|
# Currently fails in static build
|
||||||
|
|
@ -125,20 +125,19 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
*/
|
*/
|
||||||
dontFixup = true;
|
dontFixup = true;
|
||||||
|
|
||||||
checkInputs =
|
checkInputs = [
|
||||||
[
|
# Make sure the unit tests have passed
|
||||||
# Make sure the unit tests have passed
|
nix-util-tests.tests.run
|
||||||
nix-util-tests.tests.run
|
nix-store-tests.tests.run
|
||||||
nix-store-tests.tests.run
|
nix-expr-tests.tests.run
|
||||||
nix-expr-tests.tests.run
|
nix-fetchers-tests.tests.run
|
||||||
nix-fetchers-tests.tests.run
|
nix-flake-tests.tests.run
|
||||||
nix-flake-tests.tests.run
|
|
||||||
|
|
||||||
# Make sure the functional tests have passed
|
# Make sure the functional tests have passed
|
||||||
nix-functional-tests
|
nix-functional-tests
|
||||||
]
|
]
|
||||||
++ lib.optionals
|
++
|
||||||
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
||||||
[
|
[
|
||||||
# Perl currently fails in static build
|
# Perl currently fails in static build
|
||||||
# TODO: Split out tests into a separate derivation?
|
# TODO: Split out tests into a separate derivation?
|
||||||
|
|
|
||||||
|
|
@ -505,8 +505,9 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
|
||||||
|
|
||||||
for (auto & s : ss) {
|
for (auto & s : ss) {
|
||||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
||||||
result.push_back(make_ref<InstallableAttrPath>(
|
result.push_back(
|
||||||
InstallableAttrPath::parse(state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
|
make_ref<InstallableAttrPath>(InstallableAttrPath::parse(
|
||||||
|
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -521,8 +522,9 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
|
||||||
|
|
||||||
if (prefix.find('/') != std::string::npos) {
|
if (prefix.find('/') != std::string::npos) {
|
||||||
try {
|
try {
|
||||||
result.push_back(make_ref<InstallableDerivedPath>(
|
result.push_back(
|
||||||
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
|
make_ref<InstallableDerivedPath>(
|
||||||
|
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
|
||||||
continue;
|
continue;
|
||||||
} catch (BadStorePath &) {
|
} catch (BadStorePath &) {
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
|
@ -534,15 +536,16 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
|
||||||
try {
|
try {
|
||||||
auto [flakeRef, fragment] =
|
auto [flakeRef, fragment] =
|
||||||
parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, absPath(getCommandBaseDir()));
|
parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, absPath(getCommandBaseDir()));
|
||||||
result.push_back(make_ref<InstallableFlake>(
|
result.push_back(
|
||||||
this,
|
make_ref<InstallableFlake>(
|
||||||
getEvalState(),
|
this,
|
||||||
std::move(flakeRef),
|
getEvalState(),
|
||||||
fragment,
|
std::move(flakeRef),
|
||||||
std::move(extendedOutputsSpec),
|
fragment,
|
||||||
getDefaultFlakeAttrPaths(),
|
std::move(extendedOutputsSpec),
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
getDefaultFlakeAttrPaths(),
|
||||||
lockFlags));
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
|
lockFlags));
|
||||||
continue;
|
continue;
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
ex = std::current_exception();
|
ex = std::current_exception();
|
||||||
|
|
@ -610,10 +613,11 @@ static void throwBuildErrors(std::vector<KeyedBuildResult> & buildResults, const
|
||||||
StringSet failedPaths;
|
StringSet failedPaths;
|
||||||
for (; failedResult != failed.end(); failedResult++) {
|
for (; failedResult != failed.end(); failedResult++) {
|
||||||
if (!failedResult->errorMsg.empty()) {
|
if (!failedResult->errorMsg.empty()) {
|
||||||
logError(ErrorInfo{
|
logError(
|
||||||
.level = lvlError,
|
ErrorInfo{
|
||||||
.msg = failedResult->errorMsg,
|
.level = lvlError,
|
||||||
});
|
.msg = failedResult->errorMsg,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
failedPaths.insert(failedResult->path.to_string(store));
|
failedPaths.insert(failedResult->path.to_string(store));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,8 @@ mkMesonLibrary (finalAttrs: {
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
({ inherit editline readline; }.${readlineFlavor})
|
({ inherit editline readline; }.${readlineFlavor})
|
||||||
] ++ lib.optional enableMarkdown lowdown;
|
]
|
||||||
|
++ lib.optional enableMarkdown lowdown;
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
nix-util
|
nix-util
|
||||||
|
|
|
||||||
|
|
@ -650,7 +650,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||||
for (auto & arg : args)
|
for (auto & arg : args)
|
||||||
arg = "*" + arg + "*";
|
arg = "*" + arg + "*";
|
||||||
|
|
||||||
markdown += "**Synopsis:** `builtins." + (std::string)(*doc->name) + "` " + concatStringsSep(" ", args)
|
markdown += "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` " + concatStringsSep(" ", args)
|
||||||
+ "\n\n";
|
+ "\n\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -220,25 +220,28 @@ INSTANTIATE_TEST_SUITE_P(
|
||||||
// expanded.
|
// expanded.
|
||||||
#define X_EXPAND_IF0(k, v) k "." v
|
#define X_EXPAND_IF0(k, v) k "." v
|
||||||
#define X_EXPAND_IF1(k, v) k " = { " v " };"
|
#define X_EXPAND_IF1(k, v) k " = { " v " };"
|
||||||
#define X4(w, x, y, z) \
|
#define X4(w, x, y, z) \
|
||||||
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \
|
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \
|
||||||
{ \
|
{ \
|
||||||
auto v = eval("{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \
|
auto v = eval( \
|
||||||
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
|
"{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \
|
||||||
ASSERT_THAT(v, IsTrue()); \
|
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
|
||||||
}; \
|
ASSERT_THAT(v, IsTrue()); \
|
||||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \
|
}; \
|
||||||
{ \
|
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \
|
||||||
ASSERT_THROW( \
|
{ \
|
||||||
eval("{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \
|
ASSERT_THROW( \
|
||||||
"a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \
|
eval( \
|
||||||
Error); \
|
"{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \
|
||||||
}; \
|
"a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \
|
||||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \
|
Error); \
|
||||||
{ \
|
}; \
|
||||||
auto v = eval("{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \
|
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \
|
||||||
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
|
{ \
|
||||||
ASSERT_THAT(v, IsTrue()); \
|
auto v = eval( \
|
||||||
|
"{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \
|
||||||
|
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
|
||||||
|
ASSERT_THAT(v, IsTrue()); \
|
||||||
};
|
};
|
||||||
#define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1)
|
#define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1)
|
||||||
#define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1)
|
#define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1)
|
||||||
|
|
|
||||||
|
|
@ -123,13 +123,13 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs)(0, false).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs) (0, false).exec();
|
||||||
|
|
||||||
AttrId rowId = state->db.getLastInsertedRowId();
|
AttrId rowId = state->db.getLastInsertedRowId();
|
||||||
assert(rowId);
|
assert(rowId);
|
||||||
|
|
||||||
for (auto & attr : attrs)
|
for (auto & attr : attrs)
|
||||||
state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder)(0, false).exec();
|
state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder) (0, false).exec();
|
||||||
|
|
||||||
return rowId;
|
return rowId;
|
||||||
});
|
});
|
||||||
|
|
@ -147,10 +147,10 @@ struct AttrDb
|
||||||
ctx.push_back(' ');
|
ctx.push_back(' ');
|
||||||
ctx.append(*p);
|
ctx.append(*p);
|
||||||
}
|
}
|
||||||
state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String)(s) (ctx)
|
state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx)
|
||||||
.exec();
|
.exec();
|
||||||
} else {
|
} else {
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String)(s).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String) (s).exec();
|
||||||
}
|
}
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
|
|
@ -162,7 +162,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool)(b ? 1 : 0).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool) (b ? 1 : 0).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
@ -173,7 +173,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int)(n).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int) (n).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
@ -185,8 +185,8 @@ struct AttrDb
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute
|
state->insertAttribute
|
||||||
.use()(key.first)(symbols[key.second])(AttrType::ListOfStrings)(
|
.use()(key.first)(symbols[key.second])(
|
||||||
dropEmptyInitThenConcatStringsSep("\t", l))
|
AttrType::ListOfStrings) (dropEmptyInitThenConcatStringsSep("\t", l))
|
||||||
.exec();
|
.exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
|
|
@ -198,7 +198,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder)(0, false).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder) (0, false).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
@ -209,7 +209,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing)(0, false).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing) (0, false).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
@ -220,7 +220,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc)(0, false).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc) (0, false).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
@ -231,7 +231,7 @@ struct AttrDb
|
||||||
return doSQLite([&]() {
|
return doSQLite([&]() {
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed)(0, false).exec();
|
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed) (0, false).exec();
|
||||||
|
|
||||||
return state->db.getLastInsertedRowId();
|
return state->db.getLastInsertedRowId();
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -44,12 +44,13 @@ EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr
|
||||||
// NOTE: This is abusing side-effects.
|
// NOTE: This is abusing side-effects.
|
||||||
// TODO: check compatibility with nested debugger calls.
|
// TODO: check compatibility with nested debugger calls.
|
||||||
// TODO: What side-effects??
|
// TODO: What side-effects??
|
||||||
error.state.debugTraces.push_front(DebugTrace{
|
error.state.debugTraces.push_front(
|
||||||
.pos = expr.getPos(),
|
DebugTrace{
|
||||||
.expr = expr,
|
.pos = expr.getPos(),
|
||||||
.env = env,
|
.expr = expr,
|
||||||
.hint = HintFmt("Fake frame for debugging purposes"),
|
.env = env,
|
||||||
.isError = true});
|
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||||
|
.isError = true});
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -795,8 +795,9 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||||
printError("%s\n", error->what());
|
printError("%s\n", error->what());
|
||||||
|
|
||||||
if (trylevel > 0 && error->info().level != lvlInfo)
|
if (trylevel > 0 && error->info().level != lvlInfo)
|
||||||
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL
|
printError(
|
||||||
" to skip these.\n");
|
"This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL
|
||||||
|
" to skip these.\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
auto se = getStaticEnv(expr);
|
auto se = getStaticEnv(expr);
|
||||||
|
|
|
||||||
|
|
@ -70,13 +70,15 @@ mkMesonLibrary (finalAttrs: {
|
||||||
nix-util
|
nix-util
|
||||||
nix-store
|
nix-store
|
||||||
nix-fetchers
|
nix-fetchers
|
||||||
] ++ finalAttrs.passthru.externalPropagatedBuildInputs;
|
]
|
||||||
|
++ finalAttrs.passthru.externalPropagatedBuildInputs;
|
||||||
|
|
||||||
# Hack for sake of the dev shell
|
# Hack for sake of the dev shell
|
||||||
passthru.externalPropagatedBuildInputs = [
|
passthru.externalPropagatedBuildInputs = [
|
||||||
boost
|
boost
|
||||||
nlohmann_json
|
nlohmann_json
|
||||||
] ++ lib.optional enableGC boehmgc;
|
]
|
||||||
|
++ lib.optional enableGC boehmgc;
|
||||||
|
|
||||||
mesonFlags = [
|
mesonFlags = [
|
||||||
(lib.mesonEnable "gc" enableGC)
|
(lib.mesonEnable "gc" enableGC)
|
||||||
|
|
|
||||||
|
|
@ -68,10 +68,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
|
||||||
std::visit(
|
std::visit(
|
||||||
overloaded{
|
overloaded{
|
||||||
[&](const NixStringContextElem::Built & b) {
|
[&](const NixStringContextElem::Built & b) {
|
||||||
drvs.push_back(DerivedPath::Built{
|
drvs.push_back(
|
||||||
.drvPath = b.drvPath,
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::Names{b.output},
|
.drvPath = b.drvPath,
|
||||||
});
|
.outputs = OutputsSpec::Names{b.output},
|
||||||
|
});
|
||||||
ensureValid(b.drvPath->getBaseStorePath());
|
ensureValid(b.drvPath->getBaseStorePath());
|
||||||
},
|
},
|
||||||
[&](const NixStringContextElem::Opaque & o) {
|
[&](const NixStringContextElem::Opaque & o) {
|
||||||
|
|
@ -117,10 +118,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
|
||||||
/* Get all the output paths corresponding to the placeholders we had */
|
/* Get all the output paths corresponding to the placeholders we had */
|
||||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
DownstreamPlaceholder::fromSingleDerivedPathBuilt(SingleDerivedPath::Built{
|
DownstreamPlaceholder::fromSingleDerivedPathBuilt(
|
||||||
.drvPath = drv.drvPath,
|
SingleDerivedPath::Built{
|
||||||
.output = outputName,
|
.drvPath = drv.drvPath,
|
||||||
})
|
.output = outputName,
|
||||||
|
})
|
||||||
.render(),
|
.render(),
|
||||||
buildStore->printStorePath(outputPath));
|
buildStore->printStorePath(outputPath));
|
||||||
}
|
}
|
||||||
|
|
@ -297,10 +299,11 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_scopedImport(PrimOp{
|
static RegisterPrimOp primop_scopedImport(
|
||||||
.name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
|
PrimOp{
|
||||||
import(state, pos, *args[1], args[0], v);
|
.name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
|
||||||
}});
|
import(state, pos, *args[1], args[0], v);
|
||||||
|
}});
|
||||||
|
|
||||||
static RegisterPrimOp primop_import(
|
static RegisterPrimOp primop_import(
|
||||||
{.name = "import",
|
{.name = "import",
|
||||||
|
|
@ -806,11 +809,12 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar
|
||||||
v.mkList(list);
|
v.mkList(list);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_genericClosure(PrimOp{
|
static RegisterPrimOp primop_genericClosure(
|
||||||
.name = "__genericClosure",
|
PrimOp{
|
||||||
.args = {"attrset"},
|
.name = "__genericClosure",
|
||||||
.arity = 1,
|
.args = {"attrset"},
|
||||||
.doc = R"(
|
.arity = 1,
|
||||||
|
.doc = R"(
|
||||||
`builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
|
`builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
|
||||||
|
|
||||||
It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attribute sets:
|
It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attribute sets:
|
||||||
|
|
@ -860,8 +864,8 @@ static RegisterPrimOp primop_genericClosure(PrimOp{
|
||||||
> [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
|
> [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
|
||||||
> ```
|
> ```
|
||||||
)",
|
)",
|
||||||
.fun = prim_genericClosure,
|
.fun = prim_genericClosure,
|
||||||
});
|
});
|
||||||
|
|
||||||
static RegisterPrimOp primop_break(
|
static RegisterPrimOp primop_break(
|
||||||
{.name = "break",
|
{.name = "break",
|
||||||
|
|
@ -872,11 +876,12 @@ static RegisterPrimOp primop_break(
|
||||||
)",
|
)",
|
||||||
.fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
|
.fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
|
||||||
if (state.canDebug()) {
|
if (state.canDebug()) {
|
||||||
auto error = Error(ErrorInfo{
|
auto error = Error(
|
||||||
.level = lvlInfo,
|
ErrorInfo{
|
||||||
.msg = HintFmt("breakpoint reached"),
|
.level = lvlInfo,
|
||||||
.pos = state.positions[pos],
|
.msg = HintFmt("breakpoint reached"),
|
||||||
});
|
.pos = state.positions[pos],
|
||||||
|
});
|
||||||
|
|
||||||
state.runDebugRepl(&error);
|
state.runDebugRepl(&error);
|
||||||
}
|
}
|
||||||
|
|
@ -940,13 +945,14 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value ** a
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_addErrorContext(PrimOp{
|
static RegisterPrimOp primop_addErrorContext(
|
||||||
.name = "__addErrorContext",
|
PrimOp{
|
||||||
.arity = 2,
|
.name = "__addErrorContext",
|
||||||
// The normal trace item is redundant
|
.arity = 2,
|
||||||
.addTrace = false,
|
// The normal trace item is redundant
|
||||||
.fun = prim_addErrorContext,
|
.addTrace = false,
|
||||||
});
|
.fun = prim_addErrorContext,
|
||||||
|
});
|
||||||
|
|
||||||
static void prim_ceil(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
static void prim_ceil(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||||
{
|
{
|
||||||
|
|
@ -1656,11 +1662,12 @@ static void derivationStrictInternal(EvalState & state, const std::string & drvN
|
||||||
v.mkAttrs(result);
|
v.mkAttrs(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_derivationStrict(PrimOp{
|
static RegisterPrimOp primop_derivationStrict(
|
||||||
.name = "derivationStrict",
|
PrimOp{
|
||||||
.arity = 1,
|
.name = "derivationStrict",
|
||||||
.fun = prim_derivationStrict,
|
.arity = 1,
|
||||||
});
|
.fun = prim_derivationStrict,
|
||||||
|
});
|
||||||
|
|
||||||
/* Return a placeholder string for the specified output that will be
|
/* Return a placeholder string for the specified output that will be
|
||||||
substituted by the corresponding output path at build time. For
|
substituted by the corresponding output path at build time. For
|
||||||
|
|
@ -1898,9 +1905,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
}
|
}
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
for (auto && p : std::move(refs)) {
|
for (auto && p : std::move(refs)) {
|
||||||
context.insert(NixStringContextElem::Opaque{
|
context.insert(
|
||||||
.path = std::move((StorePath &&) p),
|
NixStringContextElem::Opaque{
|
||||||
});
|
.path = std::move((StorePath &&) p),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
v.mkString(s, context);
|
v.mkString(s, context);
|
||||||
}
|
}
|
||||||
|
|
@ -1956,10 +1964,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
.debugThrow();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
lookupPath.elements.emplace_back(LookupPath::Elem{
|
lookupPath.elements.emplace_back(
|
||||||
.prefix = LookupPath::Prefix{.s = prefix},
|
LookupPath::Elem{
|
||||||
.path = LookupPath::Path{.s = path},
|
.prefix = LookupPath::Prefix{.s = prefix},
|
||||||
});
|
.path = LookupPath::Path{.s = path},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto path =
|
auto path =
|
||||||
|
|
@ -1968,10 +1977,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
v.mkPath(state.findFile(lookupPath, path, pos));
|
v.mkPath(state.findFile(lookupPath, path, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_findFile(PrimOp{
|
static RegisterPrimOp primop_findFile(
|
||||||
.name = "__findFile",
|
PrimOp{
|
||||||
.args = {"search-path", "lookup-path"},
|
.name = "__findFile",
|
||||||
.doc = R"(
|
.args = {"search-path", "lookup-path"},
|
||||||
|
.doc = R"(
|
||||||
Find *lookup-path* in *search-path*.
|
Find *lookup-path* in *search-path*.
|
||||||
|
|
||||||
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](#builtins-nixPath):
|
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](#builtins-nixPath):
|
||||||
|
|
@ -2099,8 +2109,8 @@ static RegisterPrimOp primop_findFile(PrimOp{
|
||||||
>
|
>
|
||||||
> makes `<nixpkgs>` refer to a particular branch of the `NixOS/nixpkgs` repository on GitHub.
|
> makes `<nixpkgs>` refer to a particular branch of the `NixOS/nixpkgs` repository on GitHub.
|
||||||
)",
|
)",
|
||||||
.fun = prim_findFile,
|
.fun = prim_findFile,
|
||||||
});
|
});
|
||||||
|
|
||||||
/* Return the cryptographic hash of a file in base-16. */
|
/* Return the cryptographic hash of a file in base-16. */
|
||||||
static void prim_hashFile(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
static void prim_hashFile(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||||
|
|
@ -2871,11 +2881,12 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value **
|
||||||
state.mkPos(v, i->pos);
|
state.mkPos(v, i->pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp{
|
static RegisterPrimOp primop_unsafeGetAttrPos(
|
||||||
.name = "__unsafeGetAttrPos",
|
PrimOp{
|
||||||
.arity = 2,
|
.name = "__unsafeGetAttrPos",
|
||||||
.fun = prim_unsafeGetAttrPos,
|
.arity = 2,
|
||||||
});
|
.fun = prim_unsafeGetAttrPos,
|
||||||
|
});
|
||||||
|
|
||||||
// access to exact position information (ie, line and colum numbers) is deferred
|
// access to exact position information (ie, line and colum numbers) is deferred
|
||||||
// due to the cost associated with calculating that information and how rarely
|
// due to the cost associated with calculating that information and how rarely
|
||||||
|
|
|
||||||
|
|
@ -276,9 +276,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
|
||||||
|
|
||||||
if (auto attr = i.value->attrs()->get(sPath)) {
|
if (auto attr = i.value->attrs()->get(sPath)) {
|
||||||
if (state.forceBool(*attr->value, attr->pos, "while evaluating the `path` attribute of a string context"))
|
if (state.forceBool(*attr->value, attr->pos, "while evaluating the `path` attribute of a string context"))
|
||||||
context.emplace(NixStringContextElem::Opaque{
|
context.emplace(
|
||||||
.path = namePath,
|
NixStringContextElem::Opaque{
|
||||||
});
|
.path = namePath,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto attr = i.value->attrs()->get(sAllOutputs)) {
|
if (auto attr = i.value->attrs()->get(sAllOutputs)) {
|
||||||
|
|
@ -291,9 +292,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
|
||||||
.atPos(i.pos)
|
.atPos(i.pos)
|
||||||
.debugThrow();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
context.emplace(NixStringContextElem::DrvDeep{
|
context.emplace(
|
||||||
.drvPath = namePath,
|
NixStringContextElem::DrvDeep{
|
||||||
});
|
.drvPath = namePath,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -309,10 +311,11 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
|
||||||
for (auto elem : attr->value->listItems()) {
|
for (auto elem : attr->value->listItems()) {
|
||||||
auto outputName =
|
auto outputName =
|
||||||
state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
|
state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
|
||||||
context.emplace(NixStringContextElem::Built{
|
context.emplace(
|
||||||
.drvPath = makeConstantStorePathRef(namePath),
|
NixStringContextElem::Built{
|
||||||
.output = std::string{outputName},
|
.drvPath = makeConstantStorePathRef(namePath),
|
||||||
});
|
.output = std::string{outputName},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -44,10 +44,11 @@ static void downloadToSink(
|
||||||
|
|
||||||
static std::string getLfsApiToken(const ParsedURL & url)
|
static std::string getLfsApiToken(const ParsedURL & url)
|
||||||
{
|
{
|
||||||
auto [status, output] = runProgram(RunOptions{
|
auto [status, output] = runProgram(
|
||||||
.program = "ssh",
|
RunOptions{
|
||||||
.args = {*url.authority, "git-lfs-authenticate", url.path, "download"},
|
.program = "ssh",
|
||||||
});
|
.args = {*url.authority, "git-lfs-authenticate", url.path, "download"},
|
||||||
|
});
|
||||||
|
|
||||||
if (output.empty())
|
if (output.empty())
|
||||||
throw Error(
|
throw Error(
|
||||||
|
|
|
||||||
|
|
@ -389,11 +389,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
continue;
|
continue;
|
||||||
std::string key2(key, 0, key.size() - 5);
|
std::string key2(key, 0, key.size() - 5);
|
||||||
auto path = CanonPath(value);
|
auto path = CanonPath(value);
|
||||||
result.push_back(Submodule{
|
result.push_back(
|
||||||
.path = path,
|
Submodule{
|
||||||
.url = entries[key2 + ".url"],
|
.path = path,
|
||||||
.branch = entries[key2 + ".branch"],
|
.url = entries[key2 + ".url"],
|
||||||
});
|
.branch = entries[key2 + ".branch"],
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -533,14 +534,15 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
append(gitArgs, {"--depth", "1"});
|
append(gitArgs, {"--depth", "1"});
|
||||||
append(gitArgs, {std::string("--"), url, refspec});
|
append(gitArgs, {std::string("--"), url, refspec});
|
||||||
|
|
||||||
runProgram(RunOptions{
|
runProgram(
|
||||||
.program = "git",
|
RunOptions{
|
||||||
.lookupPath = true,
|
.program = "git",
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
.lookupPath = true,
|
||||||
// we're using --quiet for now. Should process its stderr.
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
.args = gitArgs,
|
// we're using --quiet for now. Should process its stderr.
|
||||||
.input = {},
|
.args = gitArgs,
|
||||||
.isInteractive = true});
|
.input = {},
|
||||||
|
.isInteractive = true});
|
||||||
}
|
}
|
||||||
|
|
||||||
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
|
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||||
|
|
@ -566,17 +568,18 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
writeFile(allowedSignersFile, allowedSigners);
|
writeFile(allowedSignersFile, allowedSigners);
|
||||||
|
|
||||||
// Run verification command
|
// Run verification command
|
||||||
auto [status, output] = runProgram(RunOptions{
|
auto [status, output] = runProgram(
|
||||||
.program = "git",
|
RunOptions{
|
||||||
.args =
|
.program = "git",
|
||||||
{"-c",
|
.args =
|
||||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
{"-c",
|
||||||
"-C",
|
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||||
path.string(),
|
"-C",
|
||||||
"verify-commit",
|
path.string(),
|
||||||
rev.gitRev()},
|
"verify-commit",
|
||||||
.mergeStderrToStdout = true,
|
rev.gitRev()},
|
||||||
});
|
.mergeStderrToStdout = true,
|
||||||
|
});
|
||||||
|
|
||||||
/* Evaluate result through status code and checking if public
|
/* Evaluate result through status code and checking if public
|
||||||
key fingerprints appear on stderr. This is neccessary
|
key fingerprints appear on stderr. This is neccessary
|
||||||
|
|
|
||||||
|
|
@ -56,12 +56,13 @@ Path getCachePath(std::string_view key, bool shallow)
|
||||||
// ...
|
// ...
|
||||||
std::optional<std::string> readHead(const Path & path)
|
std::optional<std::string> readHead(const Path & path)
|
||||||
{
|
{
|
||||||
auto [status, output] = runProgram(RunOptions{
|
auto [status, output] = runProgram(
|
||||||
.program = "git",
|
RunOptions{
|
||||||
// FIXME: use 'HEAD' to avoid returning all refs
|
.program = "git",
|
||||||
.args = {"ls-remote", "--symref", path},
|
// FIXME: use 'HEAD' to avoid returning all refs
|
||||||
.isInteractive = true,
|
.args = {"ls-remote", "--symref", path},
|
||||||
});
|
.isInteractive = true,
|
||||||
|
});
|
||||||
if (status != 0)
|
if (status != 0)
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
|
|
||||||
|
|
@ -325,17 +326,18 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
writeFile(*repoPath / path.rel(), contents);
|
writeFile(*repoPath / path.rel(), contents);
|
||||||
|
|
||||||
auto result = runProgram(RunOptions{
|
auto result = runProgram(
|
||||||
.program = "git",
|
RunOptions{
|
||||||
.args =
|
.program = "git",
|
||||||
{"-C",
|
.args =
|
||||||
repoPath->string(),
|
{"-C",
|
||||||
"--git-dir",
|
repoPath->string(),
|
||||||
repoInfo.gitDir,
|
"--git-dir",
|
||||||
"check-ignore",
|
repoInfo.gitDir,
|
||||||
"--quiet",
|
"check-ignore",
|
||||||
std::string(path.rel())},
|
"--quiet",
|
||||||
});
|
std::string(path.rel())},
|
||||||
|
});
|
||||||
auto exitCode =
|
auto exitCode =
|
||||||
#ifndef WIN32 // TODO abstract over exit status handling on Windows
|
#ifndef WIN32 // TODO abstract over exit status handling on Windows
|
||||||
WEXITSTATUS(result.first)
|
WEXITSTATUS(result.first)
|
||||||
|
|
|
||||||
|
|
@ -35,11 +35,12 @@ std::shared_ptr<Registry> Registry::read(const Settings & settings, const Path &
|
||||||
toAttrs.erase(j);
|
toAttrs.erase(j);
|
||||||
}
|
}
|
||||||
auto exact = i.find("exact");
|
auto exact = i.find("exact");
|
||||||
registry->entries.push_back(Entry{
|
registry->entries.push_back(
|
||||||
.from = Input::fromAttrs(settings, jsonToAttrs(i["from"])),
|
Entry{
|
||||||
.to = Input::fromAttrs(settings, std::move(toAttrs)),
|
.from = Input::fromAttrs(settings, jsonToAttrs(i["from"])),
|
||||||
.extraAttrs = extraAttrs,
|
.to = Input::fromAttrs(settings, std::move(toAttrs)),
|
||||||
.exact = exact != i.end() && exact.value()});
|
.extraAttrs = extraAttrs,
|
||||||
|
.exact = exact != i.end() && exact.value()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,9 @@ Gen<OutputsSpec> Arbitrary<OutputsSpec>::arbitrary()
|
||||||
return gen::just((OutputsSpec) OutputsSpec::All{});
|
return gen::just((OutputsSpec) OutputsSpec::All{});
|
||||||
case 1:
|
case 1:
|
||||||
return gen::map(
|
return gen::map(
|
||||||
gen::nonEmpty(gen::container<StringSet>(
|
gen::nonEmpty(
|
||||||
gen::map(gen::arbitrary<StorePathName>(), [](StorePathName n) { return n.name; }))),
|
gen::container<StringSet>(
|
||||||
|
gen::map(gen::arbitrary<StorePathName>(), [](StorePathName n) { return n.name; }))),
|
||||||
[](StringSet names) { return (OutputsSpec) OutputsSpec::Names{names}; });
|
[](StringSet names) { return (OutputsSpec) OutputsSpec::Names{names}; });
|
||||||
default:
|
default:
|
||||||
assert(false);
|
assert(false);
|
||||||
|
|
|
||||||
|
|
@ -82,15 +82,17 @@ VERSIONED_CHARACTERIZATION_TEST(
|
||||||
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||||
},
|
},
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
.drvPath = makeConstantStorePathRef(StorePath{
|
.drvPath = makeConstantStorePathRef(
|
||||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
StorePath{
|
||||||
}),
|
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||||
|
}),
|
||||||
.outputs = OutputsSpec::All{},
|
.outputs = OutputsSpec::All{},
|
||||||
},
|
},
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
.drvPath = makeConstantStorePathRef(StorePath{
|
.drvPath = makeConstantStorePathRef(
|
||||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
StorePath{
|
||||||
}),
|
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||||
|
}),
|
||||||
.outputs = OutputsSpec::Names{"x", "y"},
|
.outputs = OutputsSpec::Names{"x", "y"},
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
@ -108,15 +110,17 @@ VERSIONED_CHARACTERIZATION_TEST(
|
||||||
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||||
},
|
},
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
.drvPath = makeConstantStorePathRef(StorePath{
|
.drvPath = makeConstantStorePathRef(
|
||||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
StorePath{
|
||||||
}),
|
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||||
|
}),
|
||||||
.outputs = OutputsSpec::All{},
|
.outputs = OutputsSpec::All{},
|
||||||
},
|
},
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
.drvPath = makeConstantStorePathRef(StorePath{
|
.drvPath = makeConstantStorePathRef(
|
||||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
StorePath{
|
||||||
}),
|
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||||
|
}),
|
||||||
.outputs = OutputsSpec::Names{"x", "y"},
|
.outputs = OutputsSpec::Names{"x", "y"},
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
@ -369,9 +373,10 @@ VERSIONED_CHARACTERIZATION_TEST(WorkerProtoTest, keyedBuildResult_1_29, "keyed-b
|
||||||
},
|
},
|
||||||
/* .path = */
|
/* .path = */
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
.drvPath = makeConstantStorePathRef(StorePath{
|
.drvPath = makeConstantStorePathRef(
|
||||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
StorePath{
|
||||||
}),
|
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||||
|
}),
|
||||||
.outputs = OutputsSpec::Names{"out"},
|
.outputs = OutputsSpec::Names{"out"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1456,10 +1456,11 @@ std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
// derivation, and the output path is valid, but we don't have
|
// derivation, and the output path is valid, but we don't have
|
||||||
// its realisation stored (probably because it has been built
|
// its realisation stored (probably because it has been built
|
||||||
// without the `ca-derivations` experimental flag).
|
// without the `ca-derivations` experimental flag).
|
||||||
worker.store.registerDrvOutput(Realisation{
|
worker.store.registerDrvOutput(
|
||||||
drvOutput,
|
Realisation{
|
||||||
info.known->path,
|
drvOutput,
|
||||||
});
|
info.known->path,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (info.known && info.known->isValid())
|
if (info.known && info.known->isValid())
|
||||||
|
|
@ -1551,10 +1552,11 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||||
auto & outputs = nodeP->value;
|
auto & outputs = nodeP->value;
|
||||||
|
|
||||||
for (auto & outputName : outputs) {
|
for (auto & outputName : outputs) {
|
||||||
auto buildResult = dg->getBuildResult(DerivedPath::Built{
|
auto buildResult = dg->getBuildResult(
|
||||||
.drvPath = makeConstantStorePathRef(dg->drvPath),
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::Names{outputName},
|
.drvPath = makeConstantStorePathRef(dg->drvPath),
|
||||||
});
|
.outputs = OutputsSpec::Names{outputName},
|
||||||
|
});
|
||||||
if (buildResult.success()) {
|
if (buildResult.success()) {
|
||||||
auto i = buildResult.builtOutputs.find(outputName);
|
auto i = buildResult.builtOutputs.find(outputName);
|
||||||
if (i != buildResult.builtOutputs.end())
|
if (i != buildResult.builtOutputs.end())
|
||||||
|
|
|
||||||
|
|
@ -68,10 +68,11 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
||||||
results.reserve(state.size());
|
results.reserve(state.size());
|
||||||
|
|
||||||
for (auto & [req, goalPtr] : state)
|
for (auto & [req, goalPtr] : state)
|
||||||
results.emplace_back(KeyedBuildResult{
|
results.emplace_back(
|
||||||
goalPtr->getBuildResult(req),
|
KeyedBuildResult{
|
||||||
/* .path = */ req,
|
goalPtr->getBuildResult(req),
|
||||||
});
|
/* .path = */ req,
|
||||||
|
});
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
@ -88,10 +89,11 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
||||||
|
|
||||||
try {
|
try {
|
||||||
worker.run(Goals{goal});
|
worker.run(Goals{goal});
|
||||||
return goal->getBuildResult(DerivedPath::Built{
|
return goal->getBuildResult(
|
||||||
.drvPath = makeConstantStorePathRef(drvPath),
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::All{},
|
.drvPath = makeConstantStorePathRef(drvPath),
|
||||||
});
|
.outputs = OutputsSpec::All{},
|
||||||
|
});
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
return BuildResult{
|
return BuildResult{
|
||||||
.status = BuildResult::MiscFailure,
|
.status = BuildResult::MiscFailure,
|
||||||
|
|
|
||||||
|
|
@ -279,10 +279,11 @@ void Worker::run(const Goals & _topGoals)
|
||||||
for (auto & i : _topGoals) {
|
for (auto & i : _topGoals) {
|
||||||
topGoals.insert(i);
|
topGoals.insert(i);
|
||||||
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
|
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
|
||||||
topPaths.push_back(DerivedPath::Built{
|
topPaths.push_back(
|
||||||
.drvPath = makeConstantStorePathRef(goal->drvPath),
|
DerivedPath::Built{
|
||||||
.outputs = goal->wantedOutputs,
|
.drvPath = makeConstantStorePathRef(goal->drvPath),
|
||||||
});
|
.outputs = goal->wantedOutputs,
|
||||||
|
});
|
||||||
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
|
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
|
||||||
topPaths.push_back(DerivedPath::Opaque{goal->storePath});
|
topPaths.push_back(DerivedPath::Opaque{goal->storePath});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -39,10 +39,11 @@ LegacySSHStore::LegacySSHStore(std::string_view scheme, std::string_view host, c
|
||||||
, CommonSSHStoreConfig(scheme, host, params)
|
, CommonSSHStoreConfig(scheme, host, params)
|
||||||
, LegacySSHStoreConfig(scheme, host, params)
|
, LegacySSHStoreConfig(scheme, host, params)
|
||||||
, Store(params)
|
, Store(params)
|
||||||
, connections(make_ref<Pool<Connection>>(
|
, connections(
|
||||||
std::max(1, (int) maxConnections),
|
make_ref<Pool<Connection>>(
|
||||||
[this]() { return openConnection(); },
|
std::max(1, (int) maxConnections),
|
||||||
[](const ref<Connection> & r) { return r->good; }))
|
[this]() { return openConnection(); },
|
||||||
|
[](const ref<Connection> & r) { return r->good; }))
|
||||||
, master(createSSHMaster(
|
, master(createSSHMaster(
|
||||||
// Use SSH master only if using more than 1 connection.
|
// Use SSH master only if using more than 1 connection.
|
||||||
connections->capacity() > 1,
|
connections->capacity() > 1,
|
||||||
|
|
|
||||||
|
|
@ -15,15 +15,16 @@ Machine::Machine(
|
||||||
decltype(supportedFeatures) supportedFeatures,
|
decltype(supportedFeatures) supportedFeatures,
|
||||||
decltype(mandatoryFeatures) mandatoryFeatures,
|
decltype(mandatoryFeatures) mandatoryFeatures,
|
||||||
decltype(sshPublicHostKey) sshPublicHostKey)
|
decltype(sshPublicHostKey) sshPublicHostKey)
|
||||||
: storeUri(StoreReference::parse(
|
: storeUri(
|
||||||
// Backwards compatibility: if the URI is schemeless, is not a path,
|
StoreReference::parse(
|
||||||
// and is not one of the special store connection words, prepend
|
// Backwards compatibility: if the URI is schemeless, is not a path,
|
||||||
// ssh://.
|
// and is not one of the special store connection words, prepend
|
||||||
storeUri.find("://") != std::string::npos || storeUri.find("/") != std::string::npos || storeUri == "auto"
|
// ssh://.
|
||||||
|| storeUri == "daemon" || storeUri == "local" || hasPrefix(storeUri, "auto?")
|
storeUri.find("://") != std::string::npos || storeUri.find("/") != std::string::npos || storeUri == "auto"
|
||||||
|| hasPrefix(storeUri, "daemon?") || hasPrefix(storeUri, "local?") || hasPrefix(storeUri, "?")
|
|| storeUri == "daemon" || storeUri == "local" || hasPrefix(storeUri, "auto?")
|
||||||
? storeUri
|
|| hasPrefix(storeUri, "daemon?") || hasPrefix(storeUri, "local?") || hasPrefix(storeUri, "?")
|
||||||
: "ssh://" + storeUri))
|
? storeUri
|
||||||
|
: "ssh://" + storeUri))
|
||||||
, systemTypes(systemTypes)
|
, systemTypes(systemTypes)
|
||||||
, sshKey(sshKey)
|
, sshKey(sshKey)
|
||||||
, maxJobs(maxJobs)
|
, maxJobs(maxJobs)
|
||||||
|
|
@ -171,22 +172,23 @@ static Machine parseBuilderLine(const std::set<std::string> & defaultSystems, co
|
||||||
|
|
||||||
// TODO use designated initializers, once C++ supports those with
|
// TODO use designated initializers, once C++ supports those with
|
||||||
// custom constructors.
|
// custom constructors.
|
||||||
return {// `storeUri`
|
return {
|
||||||
tokens[0],
|
// `storeUri`
|
||||||
// `systemTypes`
|
tokens[0],
|
||||||
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : defaultSystems,
|
// `systemTypes`
|
||||||
// `sshKey`
|
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : defaultSystems,
|
||||||
isSet(2) ? tokens[2] : "",
|
// `sshKey`
|
||||||
// `maxJobs`
|
isSet(2) ? tokens[2] : "",
|
||||||
isSet(3) ? parseUnsignedIntField(3) : 1U,
|
// `maxJobs`
|
||||||
// `speedFactor`
|
isSet(3) ? parseUnsignedIntField(3) : 1U,
|
||||||
isSet(4) ? parseFloatField(4) : 1.0f,
|
// `speedFactor`
|
||||||
// `supportedFeatures`
|
isSet(4) ? parseFloatField(4) : 1.0f,
|
||||||
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
|
// `supportedFeatures`
|
||||||
// `mandatoryFeatures`
|
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
|
||||||
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
|
// `mandatoryFeatures`
|
||||||
// `sshPublicHostKey`
|
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
|
||||||
isSet(7) ? ensureBase64(7) : ""};
|
// `sshPublicHostKey`
|
||||||
|
isSet(7) ? ensureBase64(7) : ""};
|
||||||
}
|
}
|
||||||
|
|
||||||
static Machines
|
static Machines
|
||||||
|
|
|
||||||
|
|
@ -58,30 +58,28 @@ mkMesonLibrary (finalAttrs: {
|
||||||
|
|
||||||
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
|
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
|
||||||
|
|
||||||
buildInputs =
|
buildInputs = [
|
||||||
[
|
boost
|
||||||
boost
|
curl
|
||||||
curl
|
sqlite
|
||||||
sqlite
|
]
|
||||||
]
|
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||||
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
# There have been issues building these dependencies
|
||||||
# There have been issues building these dependencies
|
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
|
||||||
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
|
++ lib.optional withAWS aws-sdk-cpp;
|
||||||
++ lib.optional withAWS aws-sdk-cpp;
|
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
nix-util
|
nix-util
|
||||||
nlohmann_json
|
nlohmann_json
|
||||||
];
|
];
|
||||||
|
|
||||||
mesonFlags =
|
mesonFlags = [
|
||||||
[
|
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
||||||
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
||||||
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
]
|
||||||
]
|
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
||||||
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
];
|
||||||
];
|
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
||||||
|
|
|
||||||
|
|
@ -27,25 +27,26 @@ namespace nix {
|
||||||
RemoteStore::RemoteStore(const Params & params)
|
RemoteStore::RemoteStore(const Params & params)
|
||||||
: RemoteStoreConfig(params)
|
: RemoteStoreConfig(params)
|
||||||
, Store(params)
|
, Store(params)
|
||||||
, connections(make_ref<Pool<Connection>>(
|
, connections(
|
||||||
std::max(1, (int) maxConnections),
|
make_ref<Pool<Connection>>(
|
||||||
[this]() {
|
std::max(1, (int) maxConnections),
|
||||||
auto conn = openConnectionWrapper();
|
[this]() {
|
||||||
try {
|
auto conn = openConnectionWrapper();
|
||||||
initConnection(*conn);
|
try {
|
||||||
} catch (...) {
|
initConnection(*conn);
|
||||||
failed = true;
|
} catch (...) {
|
||||||
throw;
|
failed = true;
|
||||||
}
|
throw;
|
||||||
return conn;
|
}
|
||||||
},
|
return conn;
|
||||||
[this](const ref<Connection> & r) {
|
},
|
||||||
return r->to.good() && r->from.good()
|
[this](const ref<Connection> & r) {
|
||||||
&& std::chrono::duration_cast<std::chrono::seconds>(
|
return r->to.good() && r->from.good()
|
||||||
std::chrono::steady_clock::now() - r->startTime)
|
&& std::chrono::duration_cast<std::chrono::seconds>(
|
||||||
.count()
|
std::chrono::steady_clock::now() - r->startTime)
|
||||||
< maxConnectionAge;
|
.count()
|
||||||
}))
|
< maxConnectionAge;
|
||||||
|
}))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -655,12 +656,13 @@ std::vector<KeyedBuildResult> RemoteStore::buildPathsWithResults(
|
||||||
std::visit(
|
std::visit(
|
||||||
overloaded{
|
overloaded{
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
results.push_back(KeyedBuildResult{
|
results.push_back(
|
||||||
{
|
KeyedBuildResult{
|
||||||
.status = BuildResult::Substituted,
|
{
|
||||||
},
|
.status = BuildResult::Substituted,
|
||||||
/* .path = */ bo,
|
},
|
||||||
});
|
/* .path = */ bo,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
KeyedBuildResult res{
|
KeyedBuildResult res{
|
||||||
|
|
|
||||||
|
|
@ -93,19 +93,21 @@ static void initAWS()
|
||||||
S3Helper::S3Helper(
|
S3Helper::S3Helper(
|
||||||
const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint)
|
const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint)
|
||||||
: config(makeConfig(region, scheme, endpoint))
|
: config(makeConfig(region, scheme, endpoint))
|
||||||
, client(make_ref<Aws::S3::S3Client>(
|
, client(
|
||||||
profile == "" ? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
|
make_ref<Aws::S3::S3Client>(
|
||||||
std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>())
|
profile == ""
|
||||||
: std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
|
? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
|
||||||
std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())),
|
std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>())
|
||||||
*config,
|
: std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
|
||||||
|
std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())),
|
||||||
|
*config,
|
||||||
// FIXME: https://github.com/aws/aws-sdk-cpp/issues/759
|
// FIXME: https://github.com/aws/aws-sdk-cpp/issues/759
|
||||||
# if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3
|
# if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3
|
||||||
false,
|
false,
|
||||||
# else
|
# else
|
||||||
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never,
|
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never,
|
||||||
# endif
|
# endif
|
||||||
endpoint.empty()))
|
endpoint.empty()))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -88,13 +88,14 @@ void handleDiffHook(
|
||||||
if (diffHookOpt && settings.runDiffHook) {
|
if (diffHookOpt && settings.runDiffHook) {
|
||||||
auto & diffHook = *diffHookOpt;
|
auto & diffHook = *diffHookOpt;
|
||||||
try {
|
try {
|
||||||
auto diffRes = runProgram(RunOptions{
|
auto diffRes = runProgram(
|
||||||
.program = diffHook,
|
RunOptions{
|
||||||
.lookupPath = true,
|
.program = diffHook,
|
||||||
.args = {tryA, tryB, drvPath, tmpDir},
|
.lookupPath = true,
|
||||||
.uid = uid,
|
.args = {tryA, tryB, drvPath, tmpDir},
|
||||||
.gid = gid,
|
.uid = uid,
|
||||||
.chdir = "/"});
|
.gid = gid,
|
||||||
|
.chdir = "/"});
|
||||||
if (!statusOk(diffRes.first))
|
if (!statusOk(diffRes.first))
|
||||||
throw ExecError(diffRes.first, "diff-hook program '%1%' %2%", diffHook, statusToString(diffRes.first));
|
throw ExecError(diffRes.first, "diff-hook program '%1%' %2%", diffHook, statusToString(diffRes.first));
|
||||||
|
|
||||||
|
|
@ -2715,10 +2716,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
|
|
||||||
std::filesystem::rename(tmpOutput, actualPath);
|
std::filesystem::rename(tmpOutput, actualPath);
|
||||||
|
|
||||||
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating{
|
auto newInfo0 = newInfoFromCA(
|
||||||
.method = dof.ca.method,
|
DerivationOutput::CAFloating{
|
||||||
.hashAlgo = wanted.algo,
|
.method = dof.ca.method,
|
||||||
});
|
.hashAlgo = wanted.algo,
|
||||||
|
});
|
||||||
|
|
||||||
/* Check wanted hash */
|
/* Check wanted hash */
|
||||||
assert(newInfo0.ca);
|
assert(newInfo0.ca);
|
||||||
|
|
@ -2754,10 +2756,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
},
|
},
|
||||||
|
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
return newInfoFromCA(DerivationOutput::CAFloating{
|
return newInfoFromCA(
|
||||||
.method = doi.method,
|
DerivationOutput::CAFloating{
|
||||||
.hashAlgo = doi.hashAlgo,
|
.method = doi.method,
|
||||||
});
|
.hashAlgo = doi.hashAlgo,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -314,9 +314,10 @@ TEST(Config, applyConfigFailsOnMissingIncludes)
|
||||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
|
||||||
ASSERT_THROW(
|
ASSERT_THROW(
|
||||||
config.applyConfig("name-of-the-setting = value-from-file\n"
|
config.applyConfig(
|
||||||
"# name-of-the-setting = foo\n"
|
"name-of-the-setting = value-from-file\n"
|
||||||
"include /nix/store/does/not/exist.nix"),
|
"# name-of-the-setting = foo\n"
|
||||||
|
"include /nix/store/does/not/exist.nix"),
|
||||||
Error);
|
Error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -31,10 +31,11 @@ TEST(to_json, vectorOfOptionalInts)
|
||||||
|
|
||||||
TEST(to_json, optionalVectorOfInts)
|
TEST(to_json, optionalVectorOfInts)
|
||||||
{
|
{
|
||||||
std::optional<std::vector<int>> val = std::make_optional(std::vector<int>{
|
std::optional<std::vector<int>> val = std::make_optional(
|
||||||
-420,
|
std::vector<int>{
|
||||||
420,
|
-420,
|
||||||
});
|
420,
|
||||||
|
});
|
||||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]"));
|
ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]"));
|
||||||
val = std::nullopt;
|
val = std::nullopt;
|
||||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr));
|
ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr));
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,8 @@ mkMesonLibrary (finalAttrs: {
|
||||||
libblake3
|
libblake3
|
||||||
libsodium
|
libsodium
|
||||||
openssl
|
openssl
|
||||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
]
|
||||||
|
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
boost
|
boost
|
||||||
|
|
|
||||||
|
|
@ -41,10 +41,11 @@ Suggestions Suggestions::bestMatches(const std::set<std::string> & allMatches, s
|
||||||
{
|
{
|
||||||
std::set<Suggestion> res;
|
std::set<Suggestion> res;
|
||||||
for (const auto & possibleMatch : allMatches) {
|
for (const auto & possibleMatch : allMatches) {
|
||||||
res.insert(Suggestion{
|
res.insert(
|
||||||
.distance = levenshteinDistance(query, possibleMatch),
|
Suggestion{
|
||||||
.suggestion = possibleMatch,
|
.distance = levenshteinDistance(query, possibleMatch),
|
||||||
});
|
.suggestion = possibleMatch,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return Suggestions{res};
|
return Suggestions{res};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -253,8 +253,13 @@ pid_t startProcess(std::function<void()> fun, const ProcessOptions & options)
|
||||||
std::string runProgram(
|
std::string runProgram(
|
||||||
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
|
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
|
||||||
{
|
{
|
||||||
auto res = runProgram(RunOptions{
|
auto res = runProgram(
|
||||||
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
|
RunOptions{
|
||||||
|
.program = program,
|
||||||
|
.lookupPath = lookupPath,
|
||||||
|
.args = args,
|
||||||
|
.input = input,
|
||||||
|
.isInteractive = isInteractive});
|
||||||
|
|
||||||
if (!statusOk(res.first))
|
if (!statusOk(res.first))
|
||||||
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
|
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
|
||||||
|
|
|
||||||
|
|
@ -84,8 +84,13 @@ int Pid::wait()
|
||||||
std::string runProgram(
|
std::string runProgram(
|
||||||
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
|
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
|
||||||
{
|
{
|
||||||
auto res = runProgram(RunOptions{
|
auto res = runProgram(
|
||||||
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
|
RunOptions{
|
||||||
|
.program = program,
|
||||||
|
.lookupPath = lookupPath,
|
||||||
|
.args = args,
|
||||||
|
.input = input,
|
||||||
|
.isInteractive = isInteractive});
|
||||||
|
|
||||||
if (!statusOk(res.first))
|
if (!statusOk(res.first))
|
||||||
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
|
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
|
||||||
|
|
|
||||||
|
|
@ -480,10 +480,11 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
throw Error("the 'bashInteractive' attribute in <nixpkgs> did not evaluate to a derivation");
|
throw Error("the 'bashInteractive' attribute in <nixpkgs> did not evaluate to a derivation");
|
||||||
|
|
||||||
auto bashDrv = drv->requireDrvPath();
|
auto bashDrv = drv->requireDrvPath();
|
||||||
pathsToBuild.push_back(DerivedPath::Built{
|
pathsToBuild.push_back(
|
||||||
.drvPath = makeConstantStorePathRef(bashDrv),
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::Names{"out"},
|
.drvPath = makeConstantStorePathRef(bashDrv),
|
||||||
});
|
.outputs = OutputsSpec::Names{"out"},
|
||||||
|
});
|
||||||
pathsToCopy.insert(bashDrv);
|
pathsToCopy.insert(bashDrv);
|
||||||
shellDrv = bashDrv;
|
shellDrv = bashDrv;
|
||||||
|
|
||||||
|
|
@ -499,10 +500,11 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
accumDerivedPath = [&](ref<SingleDerivedPath> inputDrv,
|
accumDerivedPath = [&](ref<SingleDerivedPath> inputDrv,
|
||||||
const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
||||||
if (!inputNode.value.empty())
|
if (!inputNode.value.empty())
|
||||||
pathsToBuild.push_back(DerivedPath::Built{
|
pathsToBuild.push_back(
|
||||||
.drvPath = inputDrv,
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::Names{inputNode.value},
|
.drvPath = inputDrv,
|
||||||
});
|
.outputs = OutputsSpec::Names{inputNode.value},
|
||||||
|
});
|
||||||
for (const auto & [outputName, childNode] : inputNode.childMap)
|
for (const auto & [outputName, childNode] : inputNode.childMap)
|
||||||
accumDerivedPath(
|
accumDerivedPath(
|
||||||
make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
||||||
|
|
@ -687,10 +689,11 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
if (outputName == "")
|
if (outputName == "")
|
||||||
throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath));
|
throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath));
|
||||||
|
|
||||||
pathsToBuild.push_back(DerivedPath::Built{
|
pathsToBuild.push_back(
|
||||||
.drvPath = makeConstantStorePathRef(drvPath),
|
DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::Names{outputName},
|
.drvPath = makeConstantStorePathRef(drvPath),
|
||||||
});
|
.outputs = OutputsSpec::Names{outputName},
|
||||||
|
});
|
||||||
pathsToBuildOrdered.push_back({drvPath, {outputName}});
|
pathsToBuildOrdered.push_back({drvPath, {outputName}});
|
||||||
drvsToCopy.insert(drvPath);
|
drvsToCopy.insert(drvPath);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -467,9 +467,10 @@ static void printMissing(EvalState & state, PackageInfos & elems)
|
||||||
};
|
};
|
||||||
targets.emplace_back(std::move(path));
|
targets.emplace_back(std::move(path));
|
||||||
} else
|
} else
|
||||||
targets.emplace_back(DerivedPath::Opaque{
|
targets.emplace_back(
|
||||||
.path = i.queryOutPath(),
|
DerivedPath::Opaque{
|
||||||
});
|
.path = i.queryOutPath(),
|
||||||
|
});
|
||||||
|
|
||||||
printMissing(state.store, targets);
|
printMissing(state.store, targets);
|
||||||
}
|
}
|
||||||
|
|
@ -1108,7 +1109,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
||||||
attrs["substitutable"] = hasSubs ? "1" : "0";
|
attrs["substitutable"] = hasSubs ? "1" : "0";
|
||||||
} else
|
} else
|
||||||
columns.push_back(
|
columns.push_back(
|
||||||
(std::string)(isInstalled ? "I" : "-") + (isValid ? "P" : "-") + (hasSubs ? "S" : "-"));
|
(std::string) (isInstalled ? "I" : "-") + (isValid ? "P" : "-") + (hasSubs ? "S" : "-"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (xmlOutput)
|
if (xmlOutput)
|
||||||
|
|
|
||||||
|
|
@ -70,28 +70,29 @@ UnresolvedApp InstallableValue::toApp(EvalState & state)
|
||||||
|
|
||||||
std::vector<DerivedPath> context2;
|
std::vector<DerivedPath> context2;
|
||||||
for (auto & c : context) {
|
for (auto & c : context) {
|
||||||
context2.emplace_back(std::visit(
|
context2.emplace_back(
|
||||||
overloaded{
|
std::visit(
|
||||||
[&](const NixStringContextElem::DrvDeep & d) -> DerivedPath {
|
overloaded{
|
||||||
/* We want all outputs of the drv */
|
[&](const NixStringContextElem::DrvDeep & d) -> DerivedPath {
|
||||||
return DerivedPath::Built{
|
/* We want all outputs of the drv */
|
||||||
.drvPath = makeConstantStorePathRef(d.drvPath),
|
return DerivedPath::Built{
|
||||||
.outputs = OutputsSpec::All{},
|
.drvPath = makeConstantStorePathRef(d.drvPath),
|
||||||
};
|
.outputs = OutputsSpec::All{},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Built & b) -> DerivedPath {
|
||||||
|
return DerivedPath::Built{
|
||||||
|
.drvPath = b.drvPath,
|
||||||
|
.outputs = OutputsSpec::Names{b.output},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) -> DerivedPath {
|
||||||
|
return DerivedPath::Opaque{
|
||||||
|
.path = o.path,
|
||||||
|
};
|
||||||
|
},
|
||||||
},
|
},
|
||||||
[&](const NixStringContextElem::Built & b) -> DerivedPath {
|
c.raw));
|
||||||
return DerivedPath::Built{
|
|
||||||
.drvPath = b.drvPath,
|
|
||||||
.outputs = OutputsSpec::Names{b.output},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
[&](const NixStringContextElem::Opaque & o) -> DerivedPath {
|
|
||||||
return DerivedPath::Opaque{
|
|
||||||
.path = o.path,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
},
|
|
||||||
c.raw));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return UnresolvedApp{App{
|
return UnresolvedApp{App{
|
||||||
|
|
|
||||||
|
|
@ -402,10 +402,11 @@ void mainWrapped(int argc, char ** argv)
|
||||||
self-aware. That is, it has to know where it is installed. We
|
self-aware. That is, it has to know where it is installed. We
|
||||||
don't think it's sentient.
|
don't think it's sentient.
|
||||||
*/
|
*/
|
||||||
settings.buildHook.setDefault(Strings{
|
settings.buildHook.setDefault(
|
||||||
getNixBin({}).string(),
|
Strings{
|
||||||
"__build-remote",
|
getNixBin({}).string(),
|
||||||
});
|
"__build-remote",
|
||||||
|
});
|
||||||
|
|
||||||
#ifdef __linux__
|
#ifdef __linux__
|
||||||
if (isRootUser()) {
|
if (isRootUser()) {
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,14 @@ void runNix(Path program, const Strings & args, const std::optional<std::string>
|
||||||
auto subprocessEnv = getEnv();
|
auto subprocessEnv = getEnv();
|
||||||
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
|
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
|
||||||
// isInteractive avoid grabling interactive commands
|
// isInteractive avoid grabling interactive commands
|
||||||
runProgram2(RunOptions{
|
runProgram2(
|
||||||
.program = getNixBin(program).string(),
|
RunOptions{
|
||||||
.args = args,
|
.program = getNixBin(program).string(),
|
||||||
.environment = subprocessEnv,
|
.args = args,
|
||||||
.input = input,
|
.environment = subprocessEnv,
|
||||||
.isInteractive = true,
|
.input = input,
|
||||||
});
|
.isInteractive = true,
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -34,13 +34,14 @@ struct CmdSearch : InstallableValueCommand, MixJSON
|
||||||
CmdSearch()
|
CmdSearch()
|
||||||
{
|
{
|
||||||
expectArgs("regex", &res);
|
expectArgs("regex", &res);
|
||||||
addFlag(Flag{
|
addFlag(
|
||||||
.longName = "exclude",
|
Flag{
|
||||||
.shortName = 'e',
|
.longName = "exclude",
|
||||||
.description = "Hide packages whose attribute path, name or description contain *regex*.",
|
.shortName = 'e',
|
||||||
.labels = {"regex"},
|
.description = "Hide packages whose attribute path, name or description contain *regex*.",
|
||||||
.handler = {[this](std::string s) { excludeRes.push_back(s); }},
|
.labels = {"regex"},
|
||||||
});
|
.handler = {[this](std::string s) { excludeRes.push_back(s); }},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,8 @@ perl.pkgs.toPerlModule (
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
nix-store
|
nix-store
|
||||||
] ++ finalAttrs.passthru.externalBuildInputs;
|
]
|
||||||
|
++ finalAttrs.passthru.externalBuildInputs;
|
||||||
|
|
||||||
# Hack for sake of the dev shell
|
# Hack for sake of the dev shell
|
||||||
passthru.externalBuildInputs = [
|
passthru.externalBuildInputs = [
|
||||||
|
|
|
||||||
|
|
@ -2,16 +2,15 @@ with import ./lib.nix;
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
attrs =
|
attrs = {
|
||||||
{
|
y = "y";
|
||||||
y = "y";
|
x = "x";
|
||||||
x = "x";
|
foo = "foo";
|
||||||
foo = "foo";
|
}
|
||||||
}
|
// rec {
|
||||||
// rec {
|
x = "newx";
|
||||||
x = "newx";
|
bar = x;
|
||||||
bar = x;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
names = builtins.attrNames attrs;
|
names = builtins.attrNames attrs;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,14 @@
|
||||||
let {
|
let {
|
||||||
as =
|
as = {
|
||||||
{
|
x = 123;
|
||||||
x = 123;
|
y = 456;
|
||||||
y = 456;
|
}
|
||||||
}
|
// {
|
||||||
// {
|
z = 789;
|
||||||
z = 789;
|
}
|
||||||
}
|
// {
|
||||||
// {
|
z = 987;
|
||||||
z = 987;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
body =
|
body =
|
||||||
if as ? a then
|
if as ? a then
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,14 @@
|
||||||
let {
|
let {
|
||||||
as =
|
as = {
|
||||||
{
|
x = 123;
|
||||||
x = 123;
|
y = 456;
|
||||||
y = 456;
|
}
|
||||||
}
|
// {
|
||||||
// {
|
z = 789;
|
||||||
z = 789;
|
}
|
||||||
}
|
// {
|
||||||
// {
|
z = 987;
|
||||||
z = 987;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
A = "a";
|
A = "a";
|
||||||
Z = "z";
|
Z = "z";
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,8 @@ let
|
||||||
scopedImport = attrs: fn: scopedImport (overrides // attrs) fn;
|
scopedImport = attrs: fn: scopedImport (overrides // attrs) fn;
|
||||||
|
|
||||||
builtins = builtins // overrides;
|
builtins = builtins // overrides;
|
||||||
} // import ./lib.nix;
|
}
|
||||||
|
// import ./lib.nix;
|
||||||
|
|
||||||
in
|
in
|
||||||
scopedImport overrides ./imported.nix
|
scopedImport overrides ./imported.nix
|
||||||
|
|
|
||||||
|
|
@ -6,32 +6,31 @@ mkDerivation {
|
||||||
name = "nested-sandboxing";
|
name = "nested-sandboxing";
|
||||||
busybox = builtins.getEnv "busybox";
|
busybox = builtins.getEnv "busybox";
|
||||||
EXTRA_SANDBOX = builtins.getEnv "EXTRA_SANDBOX";
|
EXTRA_SANDBOX = builtins.getEnv "EXTRA_SANDBOX";
|
||||||
buildCommand =
|
buildCommand = ''
|
||||||
''
|
set -x
|
||||||
set -x
|
set -eu -o pipefail
|
||||||
set -eu -o pipefail
|
''
|
||||||
''
|
+ (
|
||||||
+ (
|
if altitude == 0 then
|
||||||
if altitude == 0 then
|
''
|
||||||
''
|
echo Deep enough! > $out
|
||||||
echo Deep enough! > $out
|
''
|
||||||
''
|
else
|
||||||
else
|
''
|
||||||
''
|
cp -r ${../common} ./common
|
||||||
cp -r ${../common} ./common
|
cp ${../common.sh} ./common.sh
|
||||||
cp ${../common.sh} ./common.sh
|
cp ${../config.nix} ./config.nix
|
||||||
cp ${../config.nix} ./config.nix
|
cp -r ${./.} ./nested-sandboxing
|
||||||
cp -r ${./.} ./nested-sandboxing
|
|
||||||
|
|
||||||
export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
|
export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
|
||||||
|
|
||||||
export _NIX_TEST_SOURCE_DIR=$PWD
|
export _NIX_TEST_SOURCE_DIR=$PWD
|
||||||
export _NIX_TEST_BUILD_DIR=$PWD
|
export _NIX_TEST_BUILD_DIR=$PWD
|
||||||
|
|
||||||
source common.sh
|
source common.sh
|
||||||
source ./nested-sandboxing/command.sh
|
source ./nested-sandboxing/command.sh
|
||||||
|
|
||||||
runNixBuild ${storeFun} ${toString altitude} >> $out
|
runNixBuild ${storeFun} ${toString altitude} >> $out
|
||||||
''
|
''
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -46,25 +46,24 @@ mkMesonDerivation (
|
||||||
];
|
];
|
||||||
|
|
||||||
# Hack for sake of the dev shell
|
# Hack for sake of the dev shell
|
||||||
passthru.externalNativeBuildInputs =
|
passthru.externalNativeBuildInputs = [
|
||||||
[
|
meson
|
||||||
meson
|
ninja
|
||||||
ninja
|
pkg-config
|
||||||
pkg-config
|
|
||||||
|
|
||||||
jq
|
jq
|
||||||
git
|
git
|
||||||
mercurial
|
mercurial
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||||
# For various sandboxing tests that needs a statically-linked shell,
|
# For various sandboxing tests that needs a statically-linked shell,
|
||||||
# etc.
|
# etc.
|
||||||
busybox-sandbox-shell
|
busybox-sandbox-shell
|
||||||
# For Overlay FS tests need `mount`, `umount`, and `unshare`.
|
# For Overlay FS tests need `mount`, `umount`, and `unshare`.
|
||||||
# For `script` command (ensuring a TTY)
|
# For `script` command (ensuring a TTY)
|
||||||
# TODO use `unixtools` to be precise over which executables instead?
|
# TODO use `unixtools` to be precise over which executables instead?
|
||||||
util-linux
|
util-linux
|
||||||
];
|
];
|
||||||
|
|
||||||
nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [
|
nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [
|
||||||
nix-cli
|
nix-cli
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,15 @@ let
|
||||||
{ lib, pkgs, ... }:
|
{ lib, pkgs, ... }:
|
||||||
{
|
{
|
||||||
imports = [ checkOverrideNixVersion ];
|
imports = [ checkOverrideNixVersion ];
|
||||||
nix.package = lib.mkForce pkgs.nixVersions.nix_2_3;
|
nix.package = lib.mkForce (
|
||||||
|
pkgs.nixVersions.nix_2_3.overrideAttrs (o: {
|
||||||
|
meta = o.meta // {
|
||||||
|
# This version shouldn't be used by end-users, but we run tests against
|
||||||
|
# it to ensure we don't break protocol compatibility.
|
||||||
|
knownVulnerabilities = [ ];
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
otherNixes.nix_2_13.setNixPackage =
|
otherNixes.nix_2_13.setNixPackage =
|
||||||
|
|
@ -88,6 +96,8 @@ let
|
||||||
nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs
|
nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs
|
||||||
(o: {
|
(o: {
|
||||||
meta = o.meta // {
|
meta = o.meta // {
|
||||||
|
# This version shouldn't be used by end-users, but we run tests against
|
||||||
|
# it to ensure we don't break protocol compatibility.
|
||||||
knownVulnerabilities = [ ];
|
knownVulnerabilities = [ ];
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
|
@ -98,7 +108,9 @@ let
|
||||||
{ lib, pkgs, ... }:
|
{ lib, pkgs, ... }:
|
||||||
{
|
{
|
||||||
imports = [ checkOverrideNixVersion ];
|
imports = [ checkOverrideNixVersion ];
|
||||||
nix.package = lib.mkForce pkgs.nixVersions.nix_2_18;
|
nix.package =
|
||||||
|
lib.mkForce
|
||||||
|
nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_18;
|
||||||
};
|
};
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
|
||||||
|
|
@ -45,14 +45,14 @@
|
||||||
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
||||||
|
|
||||||
# Install the SSH key on the builders.
|
# Install the SSH key on the builders.
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
remote.succeed("mkdir -p -m 700 /root/.ssh")
|
remote.succeed("mkdir -p -m 700 /root/.ssh")
|
||||||
remote.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
remote.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
||||||
remote.wait_for_unit("sshd")
|
remote.wait_for_unit("sshd")
|
||||||
remote.wait_for_unit("multi-user.target")
|
remote.wait_for_unit("multi-user.target")
|
||||||
remote.wait_for_unit("network-online.target")
|
remote.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.succeed(f"ssh -o StrictHostKeyChecking=no {remote.name} 'echo hello world'")
|
client.succeed(f"ssh -o StrictHostKeyChecking=no {remote.name} 'echo hello world'")
|
||||||
|
|
||||||
remote.succeed("""
|
remote.succeed("""
|
||||||
|
|
|
||||||
|
|
@ -187,9 +187,9 @@ in
|
||||||
github.succeed("cat /var/log/httpd/*.log >&2")
|
github.succeed("cat /var/log/httpd/*.log >&2")
|
||||||
|
|
||||||
github.wait_for_unit("httpd.service")
|
github.wait_for_unit("httpd.service")
|
||||||
github.wait_for_unit("network-online.target")
|
github.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.succeed("curl -v https://github.com/ >&2")
|
client.succeed("curl -v https://github.com/ >&2")
|
||||||
out = client.succeed("nix registry list")
|
out = client.succeed("nix registry list")
|
||||||
print(out)
|
print(out)
|
||||||
|
|
|
||||||
|
|
@ -70,9 +70,9 @@ in
|
||||||
server.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
server.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
||||||
server.wait_for_unit("sshd")
|
server.wait_for_unit("sshd")
|
||||||
server.wait_for_unit("multi-user.target")
|
server.wait_for_unit("multi-user.target")
|
||||||
server.wait_for_unit("network-online.target")
|
server.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'")
|
client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'")
|
||||||
|
|
||||||
# Copy the closure of package A from the client to the server.
|
# Copy the closure of package A from the client to the server.
|
||||||
|
|
|
||||||
|
|
@ -79,9 +79,9 @@ in
|
||||||
|
|
||||||
server.wait_for_unit("sshd")
|
server.wait_for_unit("sshd")
|
||||||
server.wait_for_unit("multi-user.target")
|
server.wait_for_unit("multi-user.target")
|
||||||
server.wait_for_unit("network-online.target")
|
server.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.wait_for_unit("getty@tty1.service")
|
client.wait_for_unit("getty@tty1.service")
|
||||||
# Either the prompt: ]#
|
# Either the prompt: ]#
|
||||||
# or an OCR misreading of it: 1#
|
# or an OCR misreading of it: 1#
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ in
|
||||||
{ nodes }:
|
{ nodes }:
|
||||||
''
|
''
|
||||||
cache.wait_for_unit("harmonia.service")
|
cache.wait_for_unit("harmonia.service")
|
||||||
cache.wait_for_unit("network-online.target")
|
cache.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
machine.succeed("mkdir -p /etc/containers")
|
machine.succeed("mkdir -p /etc/containers")
|
||||||
machine.succeed("""echo '{"default":[{"type":"insecureAcceptAnything"}]}' > /etc/containers/policy.json""")
|
machine.succeed("""echo '{"default":[{"type":"insecureAcceptAnything"}]}' > /etc/containers/policy.json""")
|
||||||
|
|
|
||||||
|
|
@ -145,7 +145,7 @@ in
|
||||||
testScript =
|
testScript =
|
||||||
{ nodes, ... }:
|
{ nodes, ... }:
|
||||||
''
|
''
|
||||||
http_dns.wait_for_unit("network-online.target")
|
http_dns.wait_for_unit("network-addresses-eth1.service")
|
||||||
http_dns.wait_for_unit("nginx")
|
http_dns.wait_for_unit("nginx")
|
||||||
http_dns.wait_for_open_port(80)
|
http_dns.wait_for_open_port(80)
|
||||||
http_dns.wait_for_unit("unbound")
|
http_dns.wait_for_unit("unbound")
|
||||||
|
|
@ -153,7 +153,7 @@ in
|
||||||
|
|
||||||
client.start()
|
client.start()
|
||||||
client.wait_for_unit('multi-user.target')
|
client.wait_for_unit('multi-user.target')
|
||||||
client.wait_for_unit('network-online.target')
|
client.wait_for_unit('network-addresses-eth1.service')
|
||||||
|
|
||||||
with subtest("can fetch data from a remote server outside sandbox"):
|
with subtest("can fetch data from a remote server outside sandbox"):
|
||||||
client.succeed("nix --version >&2")
|
client.succeed("nix --version >&2")
|
||||||
|
|
|
||||||
|
|
@ -102,12 +102,12 @@ in
|
||||||
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
||||||
|
|
||||||
# Install the SSH key on the builder.
|
# Install the SSH key on the builder.
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
||||||
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
||||||
builder.wait_for_unit("sshd")
|
builder.wait_for_unit("sshd")
|
||||||
builder.wait_for_unit("multi-user.target")
|
builder.wait_for_unit("multi-user.target")
|
||||||
builder.wait_for_unit("network-online.target")
|
builder.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'")
|
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -123,12 +123,12 @@ in
|
||||||
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
client.succeed("chmod 600 /root/.ssh/id_ed25519")
|
||||||
|
|
||||||
# Install the SSH key on the builders.
|
# Install the SSH key on the builders.
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
for builder in [builder1, builder2]:
|
for builder in [builder1, builder2]:
|
||||||
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
||||||
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
||||||
builder.wait_for_unit("sshd")
|
builder.wait_for_unit("sshd")
|
||||||
builder.wait_for_unit("network-online.target")
|
builder.wait_for_unit("network-addresses-eth1.service")
|
||||||
# Make sure the builder can handle our login correctly
|
# Make sure the builder can handle our login correctly
|
||||||
builder.wait_for_unit("multi-user.target")
|
builder.wait_for_unit("multi-user.target")
|
||||||
# Make sure there's no funny business on the client either
|
# Make sure there's no funny business on the client either
|
||||||
|
|
|
||||||
|
|
@ -67,14 +67,14 @@ in
|
||||||
|
|
||||||
# Create a binary cache.
|
# Create a binary cache.
|
||||||
server.wait_for_unit("minio")
|
server.wait_for_unit("minio")
|
||||||
server.wait_for_unit("network-online.target")
|
server.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4")
|
server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4")
|
||||||
server.succeed("mc mb minio/my-cache")
|
server.succeed("mc mb minio/my-cache")
|
||||||
|
|
||||||
server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}")
|
server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}")
|
||||||
|
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
# Test fetchurl on s3:// URLs while we're at it.
|
# Test fetchurl on s3:// URLs while we're at it.
|
||||||
client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'")
|
client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'")
|
||||||
|
|
|
||||||
|
|
@ -139,8 +139,8 @@ in
|
||||||
start_all()
|
start_all()
|
||||||
|
|
||||||
sourcehut.wait_for_unit("httpd.service")
|
sourcehut.wait_for_unit("httpd.service")
|
||||||
sourcehut.wait_for_unit("network-online.target")
|
sourcehut.wait_for_unit("network-addresses-eth1.service")
|
||||||
client.wait_for_unit("network-online.target")
|
client.wait_for_unit("network-addresses-eth1.service")
|
||||||
|
|
||||||
client.succeed("curl -v https://git.sr.ht/ >&2")
|
client.succeed("curl -v https://git.sr.ht/ >&2")
|
||||||
client.succeed("nix registry list | grep nixpkgs")
|
client.succeed("nix registry list | grep nixpkgs")
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue