1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-09 03:56:01 +01:00

Merge pull request #13786 from NixOS/mergify/bp/2.28-maintenance/pr-13785

flake: nixpkgs: nixos-unstable -> nixos-25.05-small (backport #13785)
This commit is contained in:
John Ericson 2025-08-18 16:57:02 -04:00 committed by GitHub
commit 265e535650
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
61 changed files with 655 additions and 570 deletions

View file

@ -1,2 +1,6 @@
# bulk initial re-formatting with clang-format
a5264aa46eadb89c055b4d1442e814edb2d4414e # !autorebase ./maintainers/format.sh --until-stable
# clang-format 18 -> clang-format 19
945d9d7264b0dc7d0a8c8edf1cab34f38cd49a7f # !autorebase ./maintainers/format.sh --until-stable
# nixfmt 1.0.0
448bbbe0fd1fbe09cb46a238fec25b220f172122 # !autorebase ./maintainers/format.sh --until-stable

View file

@ -38,60 +38,58 @@ let
]
++ extraPkgs;
users =
{
users = {
root = {
uid = 0;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root";
gid = 0;
groups = [ "root" ];
description = "System administrator";
};
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
};
}
// lib.optionalAttrs (uid != 0) {
"${uname}" = {
uid = uid;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/home/${uname}";
gid = gid;
groups = [ "${gname}" ];
description = "Nix user";
};
}
// lib.listToAttrs (
map (n: {
name = "nixbld${toString n}";
value = {
uid = 30000 + n;
gid = 30000;
groups = [ "nixbld" ];
description = "Nix build user ${toString n}";
};
}) (lib.lists.range 1 32)
);
groups =
{
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
}
// lib.optionalAttrs (gid != 0) {
"${gname}".gid = gid;
root = {
uid = 0;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root";
gid = 0;
groups = [ "root" ];
description = "System administrator";
};
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
};
}
// lib.optionalAttrs (uid != 0) {
"${uname}" = {
uid = uid;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/home/${uname}";
gid = gid;
groups = [ "${gname}" ];
description = "Nix user";
};
}
// lib.listToAttrs (
map (n: {
name = "nixbld${toString n}";
value = {
uid = 30000 + n;
gid = 30000;
groups = [ "nixbld" ];
description = "Nix build user ${toString n}";
};
}) (lib.lists.range 1 32)
);
groups = {
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
}
// lib.optionalAttrs (gid != 0) {
"${gname}".gid = gid;
};
userToPasswd = (
k:
{

8
flake.lock generated
View file

@ -63,16 +63,16 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1734359947,
"narHash": "sha256-1Noao/H+N8nFB4Beoy8fgwrcOQLVm9o4zKW1ODaqK9E=",
"lastModified": 1755442223,
"narHash": "sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "48d12d5e70ee91fe8481378e540433a7303dbf6a",
"rev": "cd32a774ac52caaa03bcfc9e7591ac8c18617ced",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-24.11",
"ref": "nixos-25.05-small",
"repo": "nixpkgs",
"type": "github"
}

View file

@ -1,7 +1,7 @@
{
description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-24.11";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";

View file

@ -54,12 +54,12 @@ let
preConfigure =
prevAttrs.preConfigure or ""
+
# Update the repo-global .version file.
# Symlink ./.version points there, but by default only workDir is writable.
''
chmod u+w ./.version
echo ${finalAttrs.version} > ./.version
'';
# Update the repo-global .version file.
# Symlink ./.version points there, but by default only workDir is writable.
''
chmod u+w ./.version
echo ${finalAttrs.version} > ./.version
'';
};
localSourceLayer =
@ -148,7 +148,8 @@ let
nativeBuildInputs = [
meson
ninja
] ++ prevAttrs.nativeBuildInputs or [ ];
]
++ prevAttrs.nativeBuildInputs or [ ];
mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [
"--print-errorlogs"
];

View file

@ -46,24 +46,24 @@
}:
let
libs =
{
inherit
nix-util
nix-util-c
nix-store
nix-store-c
nix-fetchers
nix-expr
nix-expr-c
nix-flake
nix-flake-c
nix-main
nix-main-c
nix-cmd
;
}
// lib.optionalAttrs
libs = {
inherit
nix-util
nix-util-c
nix-store
nix-store-c
nix-fetchers
nix-expr
nix-expr-c
nix-flake
nix-flake-c
nix-main
nix-main-c
nix-cmd
;
}
//
lib.optionalAttrs
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
{
# Currently fails in static build
@ -125,20 +125,19 @@ stdenv.mkDerivation (finalAttrs: {
*/
dontFixup = true;
checkInputs =
[
# Make sure the unit tests have passed
nix-util-tests.tests.run
nix-store-tests.tests.run
nix-expr-tests.tests.run
nix-fetchers-tests.tests.run
nix-flake-tests.tests.run
checkInputs = [
# Make sure the unit tests have passed
nix-util-tests.tests.run
nix-store-tests.tests.run
nix-expr-tests.tests.run
nix-fetchers-tests.tests.run
nix-flake-tests.tests.run
# Make sure the functional tests have passed
nix-functional-tests
]
++ lib.optionals
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
# Make sure the functional tests have passed
nix-functional-tests
]
++
lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
[
# Perl currently fails in static build
# TODO: Split out tests into a separate derivation?

View file

@ -505,8 +505,9 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
for (auto & s : ss) {
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
result.push_back(make_ref<InstallableAttrPath>(
InstallableAttrPath::parse(state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
result.push_back(
make_ref<InstallableAttrPath>(InstallableAttrPath::parse(
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
}
} else {
@ -521,8 +522,9 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
if (prefix.find('/') != std::string::npos) {
try {
result.push_back(make_ref<InstallableDerivedPath>(
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
result.push_back(
make_ref<InstallableDerivedPath>(
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
continue;
} catch (BadStorePath &) {
} catch (...) {
@ -534,15 +536,16 @@ Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<
try {
auto [flakeRef, fragment] =
parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, absPath(getCommandBaseDir()));
result.push_back(make_ref<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment,
std::move(extendedOutputsSpec),
getDefaultFlakeAttrPaths(),
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
result.push_back(
make_ref<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment,
std::move(extendedOutputsSpec),
getDefaultFlakeAttrPaths(),
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
continue;
} catch (...) {
ex = std::current_exception();
@ -610,10 +613,11 @@ static void throwBuildErrors(std::vector<KeyedBuildResult> & buildResults, const
StringSet failedPaths;
for (; failedResult != failed.end(); failedResult++) {
if (!failedResult->errorMsg.empty()) {
logError(ErrorInfo{
.level = lvlError,
.msg = failedResult->errorMsg,
});
logError(
ErrorInfo{
.level = lvlError,
.msg = failedResult->errorMsg,
});
}
failedPaths.insert(failedResult->path.to_string(store));
}

View file

@ -53,7 +53,8 @@ mkMesonLibrary (finalAttrs: {
buildInputs = [
({ inherit editline readline; }.${readlineFlavor})
] ++ lib.optional enableMarkdown lowdown;
]
++ lib.optional enableMarkdown lowdown;
propagatedBuildInputs = [
nix-util

View file

@ -650,7 +650,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
for (auto & arg : args)
arg = "*" + arg + "*";
markdown += "**Synopsis:** `builtins." + (std::string)(*doc->name) + "` " + concatStringsSep(" ", args)
markdown += "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` " + concatStringsSep(" ", args)
+ "\n\n";
}

View file

@ -220,25 +220,28 @@ INSTANTIATE_TEST_SUITE_P(
// expanded.
#define X_EXPAND_IF0(k, v) k "." v
#define X_EXPAND_IF1(k, v) k " = { " v " };"
#define X4(w, x, y, z) \
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \
{ \
auto v = eval("{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
ASSERT_THAT(v, IsTrue()); \
}; \
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \
{ \
ASSERT_THROW( \
eval("{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \
"a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \
Error); \
}; \
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \
{ \
auto v = eval("{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
ASSERT_THAT(v, IsTrue()); \
#define X4(w, x, y, z) \
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \
{ \
auto v = eval( \
"{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
ASSERT_THAT(v, IsTrue()); \
}; \
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \
{ \
ASSERT_THROW( \
eval( \
"{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \
"a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \
Error); \
}; \
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \
{ \
auto v = eval( \
"{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
ASSERT_THAT(v, IsTrue()); \
};
#define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1)
#define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1)

View file

@ -123,13 +123,13 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs)(0, false).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs) (0, false).exec();
AttrId rowId = state->db.getLastInsertedRowId();
assert(rowId);
for (auto & attr : attrs)
state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder)(0, false).exec();
state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder) (0, false).exec();
return rowId;
});
@ -147,10 +147,10 @@ struct AttrDb
ctx.push_back(' ');
ctx.append(*p);
}
state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String)(s) (ctx)
state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx)
.exec();
} else {
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String)(s).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String) (s).exec();
}
return state->db.getLastInsertedRowId();
@ -162,7 +162,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool)(b ? 1 : 0).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool) (b ? 1 : 0).exec();
return state->db.getLastInsertedRowId();
});
@ -173,7 +173,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int)(n).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int) (n).exec();
return state->db.getLastInsertedRowId();
});
@ -185,8 +185,8 @@ struct AttrDb
auto state(_state->lock());
state->insertAttribute
.use()(key.first)(symbols[key.second])(AttrType::ListOfStrings)(
dropEmptyInitThenConcatStringsSep("\t", l))
.use()(key.first)(symbols[key.second])(
AttrType::ListOfStrings) (dropEmptyInitThenConcatStringsSep("\t", l))
.exec();
return state->db.getLastInsertedRowId();
@ -198,7 +198,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder)(0, false).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder) (0, false).exec();
return state->db.getLastInsertedRowId();
});
@ -209,7 +209,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing)(0, false).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing) (0, false).exec();
return state->db.getLastInsertedRowId();
});
@ -220,7 +220,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc)(0, false).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc) (0, false).exec();
return state->db.getLastInsertedRowId();
});
@ -231,7 +231,7 @@ struct AttrDb
return doSQLite([&]() {
auto state(_state->lock());
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed)(0, false).exec();
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed) (0, false).exec();
return state->db.getLastInsertedRowId();
});

View file

@ -44,12 +44,13 @@ EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr
// NOTE: This is abusing side-effects.
// TODO: check compatibility with nested debugger calls.
// TODO: What side-effects??
error.state.debugTraces.push_front(DebugTrace{
.pos = expr.getPos(),
.expr = expr,
.env = env,
.hint = HintFmt("Fake frame for debugging purposes"),
.isError = true});
error.state.debugTraces.push_front(
DebugTrace{
.pos = expr.getPos(),
.expr = expr,
.env = env,
.hint = HintFmt("Fake frame for debugging purposes"),
.isError = true});
return *this;
}

View file

@ -795,8 +795,9 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
printError("%s\n", error->what());
if (trylevel > 0 && error->info().level != lvlInfo)
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL
" to skip these.\n");
printError(
"This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL
" to skip these.\n");
}
auto se = getStaticEnv(expr);

View file

@ -70,13 +70,15 @@ mkMesonLibrary (finalAttrs: {
nix-util
nix-store
nix-fetchers
] ++ finalAttrs.passthru.externalPropagatedBuildInputs;
]
++ finalAttrs.passthru.externalPropagatedBuildInputs;
# Hack for sake of the dev shell
passthru.externalPropagatedBuildInputs = [
boost
nlohmann_json
] ++ lib.optional enableGC boehmgc;
]
++ lib.optional enableGC boehmgc;
mesonFlags = [
(lib.mesonEnable "gc" enableGC)

View file

@ -68,10 +68,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
std::visit(
overloaded{
[&](const NixStringContextElem::Built & b) {
drvs.push_back(DerivedPath::Built{
.drvPath = b.drvPath,
.outputs = OutputsSpec::Names{b.output},
});
drvs.push_back(
DerivedPath::Built{
.drvPath = b.drvPath,
.outputs = OutputsSpec::Names{b.output},
});
ensureValid(b.drvPath->getBaseStorePath());
},
[&](const NixStringContextElem::Opaque & o) {
@ -117,10 +118,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
/* Get all the output paths corresponding to the placeholders we had */
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
res.insert_or_assign(
DownstreamPlaceholder::fromSingleDerivedPathBuilt(SingleDerivedPath::Built{
.drvPath = drv.drvPath,
.output = outputName,
})
DownstreamPlaceholder::fromSingleDerivedPathBuilt(
SingleDerivedPath::Built{
.drvPath = drv.drvPath,
.output = outputName,
})
.render(),
buildStore->printStorePath(outputPath));
}
@ -297,10 +299,11 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
}
}
static RegisterPrimOp primop_scopedImport(PrimOp{
.name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
import(state, pos, *args[1], args[0], v);
}});
static RegisterPrimOp primop_scopedImport(
PrimOp{
.name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
import(state, pos, *args[1], args[0], v);
}});
static RegisterPrimOp primop_import(
{.name = "import",
@ -806,11 +809,12 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar
v.mkList(list);
}
static RegisterPrimOp primop_genericClosure(PrimOp{
.name = "__genericClosure",
.args = {"attrset"},
.arity = 1,
.doc = R"(
static RegisterPrimOp primop_genericClosure(
PrimOp{
.name = "__genericClosure",
.args = {"attrset"},
.arity = 1,
.doc = R"(
`builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attribute sets:
@ -860,8 +864,8 @@ static RegisterPrimOp primop_genericClosure(PrimOp{
> [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
> ```
)",
.fun = prim_genericClosure,
});
.fun = prim_genericClosure,
});
static RegisterPrimOp primop_break(
{.name = "break",
@ -872,11 +876,12 @@ static RegisterPrimOp primop_break(
)",
.fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) {
if (state.canDebug()) {
auto error = Error(ErrorInfo{
.level = lvlInfo,
.msg = HintFmt("breakpoint reached"),
.pos = state.positions[pos],
});
auto error = Error(
ErrorInfo{
.level = lvlInfo,
.msg = HintFmt("breakpoint reached"),
.pos = state.positions[pos],
});
state.runDebugRepl(&error);
}
@ -940,13 +945,14 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value ** a
}
}
static RegisterPrimOp primop_addErrorContext(PrimOp{
.name = "__addErrorContext",
.arity = 2,
// The normal trace item is redundant
.addTrace = false,
.fun = prim_addErrorContext,
});
static RegisterPrimOp primop_addErrorContext(
PrimOp{
.name = "__addErrorContext",
.arity = 2,
// The normal trace item is redundant
.addTrace = false,
.fun = prim_addErrorContext,
});
static void prim_ceil(EvalState & state, const PosIdx pos, Value ** args, Value & v)
{
@ -1656,11 +1662,12 @@ static void derivationStrictInternal(EvalState & state, const std::string & drvN
v.mkAttrs(result);
}
static RegisterPrimOp primop_derivationStrict(PrimOp{
.name = "derivationStrict",
.arity = 1,
.fun = prim_derivationStrict,
});
static RegisterPrimOp primop_derivationStrict(
PrimOp{
.name = "derivationStrict",
.arity = 1,
.fun = prim_derivationStrict,
});
/* Return a placeholder string for the specified output that will be
substituted by the corresponding output path at build time. For
@ -1898,9 +1905,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value ** args, Va
}
NixStringContext context;
for (auto && p : std::move(refs)) {
context.insert(NixStringContextElem::Opaque{
.path = std::move((StorePath &&) p),
});
context.insert(
NixStringContextElem::Opaque{
.path = std::move((StorePath &&) p),
});
}
v.mkString(s, context);
}
@ -1956,10 +1964,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va
.debugThrow();
}
lookupPath.elements.emplace_back(LookupPath::Elem{
.prefix = LookupPath::Prefix{.s = prefix},
.path = LookupPath::Path{.s = path},
});
lookupPath.elements.emplace_back(
LookupPath::Elem{
.prefix = LookupPath::Prefix{.s = prefix},
.path = LookupPath::Path{.s = path},
});
}
auto path =
@ -1968,10 +1977,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va
v.mkPath(state.findFile(lookupPath, path, pos));
}
static RegisterPrimOp primop_findFile(PrimOp{
.name = "__findFile",
.args = {"search-path", "lookup-path"},
.doc = R"(
static RegisterPrimOp primop_findFile(
PrimOp{
.name = "__findFile",
.args = {"search-path", "lookup-path"},
.doc = R"(
Find *lookup-path* in *search-path*.
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](#builtins-nixPath):
@ -2099,8 +2109,8 @@ static RegisterPrimOp primop_findFile(PrimOp{
>
> makes `<nixpkgs>` refer to a particular branch of the `NixOS/nixpkgs` repository on GitHub.
)",
.fun = prim_findFile,
});
.fun = prim_findFile,
});
/* Return the cryptographic hash of a file in base-16. */
static void prim_hashFile(EvalState & state, const PosIdx pos, Value ** args, Value & v)
@ -2871,11 +2881,12 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value **
state.mkPos(v, i->pos);
}
static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp{
.name = "__unsafeGetAttrPos",
.arity = 2,
.fun = prim_unsafeGetAttrPos,
});
static RegisterPrimOp primop_unsafeGetAttrPos(
PrimOp{
.name = "__unsafeGetAttrPos",
.arity = 2,
.fun = prim_unsafeGetAttrPos,
});
// access to exact position information (ie, line and colum numbers) is deferred
// due to the cost associated with calculating that information and how rarely

View file

@ -276,9 +276,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
if (auto attr = i.value->attrs()->get(sPath)) {
if (state.forceBool(*attr->value, attr->pos, "while evaluating the `path` attribute of a string context"))
context.emplace(NixStringContextElem::Opaque{
.path = namePath,
});
context.emplace(
NixStringContextElem::Opaque{
.path = namePath,
});
}
if (auto attr = i.value->attrs()->get(sAllOutputs)) {
@ -291,9 +292,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
.atPos(i.pos)
.debugThrow();
}
context.emplace(NixStringContextElem::DrvDeep{
.drvPath = namePath,
});
context.emplace(
NixStringContextElem::DrvDeep{
.drvPath = namePath,
});
}
}
@ -309,10 +311,11 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg
for (auto elem : attr->value->listItems()) {
auto outputName =
state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
context.emplace(NixStringContextElem::Built{
.drvPath = makeConstantStorePathRef(namePath),
.output = std::string{outputName},
});
context.emplace(
NixStringContextElem::Built{
.drvPath = makeConstantStorePathRef(namePath),
.output = std::string{outputName},
});
}
}
}

View file

@ -44,10 +44,11 @@ static void downloadToSink(
static std::string getLfsApiToken(const ParsedURL & url)
{
auto [status, output] = runProgram(RunOptions{
.program = "ssh",
.args = {*url.authority, "git-lfs-authenticate", url.path, "download"},
});
auto [status, output] = runProgram(
RunOptions{
.program = "ssh",
.args = {*url.authority, "git-lfs-authenticate", url.path, "download"},
});
if (output.empty())
throw Error(

View file

@ -389,11 +389,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
continue;
std::string key2(key, 0, key.size() - 5);
auto path = CanonPath(value);
result.push_back(Submodule{
.path = path,
.url = entries[key2 + ".url"],
.branch = entries[key2 + ".branch"],
});
result.push_back(
Submodule{
.path = path,
.url = entries[key2 + ".url"],
.branch = entries[key2 + ".branch"],
});
}
return result;
@ -533,14 +534,15 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
append(gitArgs, {"--depth", "1"});
append(gitArgs, {std::string("--"), url, refspec});
runProgram(RunOptions{
.program = "git",
.lookupPath = true,
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
.args = gitArgs,
.input = {},
.isInteractive = true});
runProgram(
RunOptions{
.program = "git",
.lookupPath = true,
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
.args = gitArgs,
.input = {},
.isInteractive = true});
}
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
@ -566,17 +568,18 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
writeFile(allowedSignersFile, allowedSigners);
// Run verification command
auto [status, output] = runProgram(RunOptions{
.program = "git",
.args =
{"-c",
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
"-C",
path.string(),
"verify-commit",
rev.gitRev()},
.mergeStderrToStdout = true,
});
auto [status, output] = runProgram(
RunOptions{
.program = "git",
.args =
{"-c",
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
"-C",
path.string(),
"verify-commit",
rev.gitRev()},
.mergeStderrToStdout = true,
});
/* Evaluate result through status code and checking if public
key fingerprints appear on stderr. This is neccessary

View file

@ -56,12 +56,13 @@ Path getCachePath(std::string_view key, bool shallow)
// ...
std::optional<std::string> readHead(const Path & path)
{
auto [status, output] = runProgram(RunOptions{
.program = "git",
// FIXME: use 'HEAD' to avoid returning all refs
.args = {"ls-remote", "--symref", path},
.isInteractive = true,
});
auto [status, output] = runProgram(
RunOptions{
.program = "git",
// FIXME: use 'HEAD' to avoid returning all refs
.args = {"ls-remote", "--symref", path},
.isInteractive = true,
});
if (status != 0)
return std::nullopt;
@ -325,17 +326,18 @@ struct GitInputScheme : InputScheme
writeFile(*repoPath / path.rel(), contents);
auto result = runProgram(RunOptions{
.program = "git",
.args =
{"-C",
repoPath->string(),
"--git-dir",
repoInfo.gitDir,
"check-ignore",
"--quiet",
std::string(path.rel())},
});
auto result = runProgram(
RunOptions{
.program = "git",
.args =
{"-C",
repoPath->string(),
"--git-dir",
repoInfo.gitDir,
"check-ignore",
"--quiet",
std::string(path.rel())},
});
auto exitCode =
#ifndef WIN32 // TODO abstract over exit status handling on Windows
WEXITSTATUS(result.first)

View file

@ -35,11 +35,12 @@ std::shared_ptr<Registry> Registry::read(const Settings & settings, const Path &
toAttrs.erase(j);
}
auto exact = i.find("exact");
registry->entries.push_back(Entry{
.from = Input::fromAttrs(settings, jsonToAttrs(i["from"])),
.to = Input::fromAttrs(settings, std::move(toAttrs)),
.extraAttrs = extraAttrs,
.exact = exact != i.end() && exact.value()});
registry->entries.push_back(
Entry{
.from = Input::fromAttrs(settings, jsonToAttrs(i["from"])),
.to = Input::fromAttrs(settings, std::move(toAttrs)),
.extraAttrs = extraAttrs,
.exact = exact != i.end() && exact.value()});
}
}

View file

@ -14,8 +14,9 @@ Gen<OutputsSpec> Arbitrary<OutputsSpec>::arbitrary()
return gen::just((OutputsSpec) OutputsSpec::All{});
case 1:
return gen::map(
gen::nonEmpty(gen::container<StringSet>(
gen::map(gen::arbitrary<StorePathName>(), [](StorePathName n) { return n.name; }))),
gen::nonEmpty(
gen::container<StringSet>(
gen::map(gen::arbitrary<StorePathName>(), [](StorePathName n) { return n.name; }))),
[](StringSet names) { return (OutputsSpec) OutputsSpec::Names{names}; });
default:
assert(false);

View file

@ -82,15 +82,17 @@ VERSIONED_CHARACTERIZATION_TEST(
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
},
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.drvPath = makeConstantStorePathRef(
StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.outputs = OutputsSpec::All{},
},
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.drvPath = makeConstantStorePathRef(
StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.outputs = OutputsSpec::Names{"x", "y"},
},
}))
@ -108,15 +110,17 @@ VERSIONED_CHARACTERIZATION_TEST(
.path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
},
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.drvPath = makeConstantStorePathRef(
StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.outputs = OutputsSpec::All{},
},
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.drvPath = makeConstantStorePathRef(
StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.outputs = OutputsSpec::Names{"x", "y"},
},
}))
@ -369,9 +373,10 @@ VERSIONED_CHARACTERIZATION_TEST(WorkerProtoTest, keyedBuildResult_1_29, "keyed-b
},
/* .path = */
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.drvPath = makeConstantStorePathRef(
StorePath{
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
}),
.outputs = OutputsSpec::Names{"out"},
},
},

View file

@ -1456,10 +1456,11 @@ std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
// derivation, and the output path is valid, but we don't have
// its realisation stored (probably because it has been built
// without the `ca-derivations` experimental flag).
worker.store.registerDrvOutput(Realisation{
drvOutput,
info.known->path,
});
worker.store.registerDrvOutput(
Realisation{
drvOutput,
info.known->path,
});
}
}
if (info.known && info.known->isValid())
@ -1551,10 +1552,11 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
auto & outputs = nodeP->value;
for (auto & outputName : outputs) {
auto buildResult = dg->getBuildResult(DerivedPath::Built{
.drvPath = makeConstantStorePathRef(dg->drvPath),
.outputs = OutputsSpec::Names{outputName},
});
auto buildResult = dg->getBuildResult(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(dg->drvPath),
.outputs = OutputsSpec::Names{outputName},
});
if (buildResult.success()) {
auto i = buildResult.builtOutputs.find(outputName);
if (i != buildResult.builtOutputs.end())

View file

@ -68,10 +68,11 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
results.reserve(state.size());
for (auto & [req, goalPtr] : state)
results.emplace_back(KeyedBuildResult{
goalPtr->getBuildResult(req),
/* .path = */ req,
});
results.emplace_back(
KeyedBuildResult{
goalPtr->getBuildResult(req),
/* .path = */ req,
});
return results;
}
@ -88,10 +89,11 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
try {
worker.run(Goals{goal});
return goal->getBuildResult(DerivedPath::Built{
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::All{},
});
return goal->getBuildResult(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::All{},
});
} catch (Error & e) {
return BuildResult{
.status = BuildResult::MiscFailure,

View file

@ -279,10 +279,11 @@ void Worker::run(const Goals & _topGoals)
for (auto & i : _topGoals) {
topGoals.insert(i);
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Built{
.drvPath = makeConstantStorePathRef(goal->drvPath),
.outputs = goal->wantedOutputs,
});
topPaths.push_back(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(goal->drvPath),
.outputs = goal->wantedOutputs,
});
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Opaque{goal->storePath});
}

View file

@ -39,10 +39,11 @@ LegacySSHStore::LegacySSHStore(std::string_view scheme, std::string_view host, c
, CommonSSHStoreConfig(scheme, host, params)
, LegacySSHStoreConfig(scheme, host, params)
, Store(params)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->good; }))
, connections(
make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->good; }))
, master(createSSHMaster(
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,

View file

@ -15,15 +15,16 @@ Machine::Machine(
decltype(supportedFeatures) supportedFeatures,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey)
: storeUri(StoreReference::parse(
// Backwards compatibility: if the URI is schemeless, is not a path,
// and is not one of the special store connection words, prepend
// ssh://.
storeUri.find("://") != std::string::npos || storeUri.find("/") != std::string::npos || storeUri == "auto"
|| storeUri == "daemon" || storeUri == "local" || hasPrefix(storeUri, "auto?")
|| hasPrefix(storeUri, "daemon?") || hasPrefix(storeUri, "local?") || hasPrefix(storeUri, "?")
? storeUri
: "ssh://" + storeUri))
: storeUri(
StoreReference::parse(
// Backwards compatibility: if the URI is schemeless, is not a path,
// and is not one of the special store connection words, prepend
// ssh://.
storeUri.find("://") != std::string::npos || storeUri.find("/") != std::string::npos || storeUri == "auto"
|| storeUri == "daemon" || storeUri == "local" || hasPrefix(storeUri, "auto?")
|| hasPrefix(storeUri, "daemon?") || hasPrefix(storeUri, "local?") || hasPrefix(storeUri, "?")
? storeUri
: "ssh://" + storeUri))
, systemTypes(systemTypes)
, sshKey(sshKey)
, maxJobs(maxJobs)
@ -171,22 +172,23 @@ static Machine parseBuilderLine(const std::set<std::string> & defaultSystems, co
// TODO use designated initializers, once C++ supports those with
// custom constructors.
return {// `storeUri`
tokens[0],
// `systemTypes`
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : defaultSystems,
// `sshKey`
isSet(2) ? tokens[2] : "",
// `maxJobs`
isSet(3) ? parseUnsignedIntField(3) : 1U,
// `speedFactor`
isSet(4) ? parseFloatField(4) : 1.0f,
// `supportedFeatures`
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
// `mandatoryFeatures`
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
// `sshPublicHostKey`
isSet(7) ? ensureBase64(7) : ""};
return {
// `storeUri`
tokens[0],
// `systemTypes`
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : defaultSystems,
// `sshKey`
isSet(2) ? tokens[2] : "",
// `maxJobs`
isSet(3) ? parseUnsignedIntField(3) : 1U,
// `speedFactor`
isSet(4) ? parseFloatField(4) : 1.0f,
// `supportedFeatures`
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
// `mandatoryFeatures`
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
// `sshPublicHostKey`
isSet(7) ? ensureBase64(7) : ""};
}
static Machines

View file

@ -58,30 +58,28 @@ mkMesonLibrary (finalAttrs: {
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
buildInputs =
[
boost
curl
sqlite
]
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
# There have been issues building these dependencies
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
++ lib.optional withAWS aws-sdk-cpp;
buildInputs = [
boost
curl
sqlite
]
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
# There have been issues building these dependencies
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
++ lib.optional withAWS aws-sdk-cpp;
propagatedBuildInputs = [
nix-util
nlohmann_json
];
mesonFlags =
[
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
]
++ lib.optionals stdenv.hostPlatform.isLinux [
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
];
mesonFlags = [
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
]
++ lib.optionals stdenv.hostPlatform.isLinux [
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
];
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;

View file

@ -27,25 +27,26 @@ namespace nix {
RemoteStore::RemoteStore(const Params & params)
: RemoteStoreConfig(params)
, Store(params)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() {
auto conn = openConnectionWrapper();
try {
initConnection(*conn);
} catch (...) {
failed = true;
throw;
}
return conn;
},
[this](const ref<Connection> & r) {
return r->to.good() && r->from.good()
&& std::chrono::duration_cast<std::chrono::seconds>(
std::chrono::steady_clock::now() - r->startTime)
.count()
< maxConnectionAge;
}))
, connections(
make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() {
auto conn = openConnectionWrapper();
try {
initConnection(*conn);
} catch (...) {
failed = true;
throw;
}
return conn;
},
[this](const ref<Connection> & r) {
return r->to.good() && r->from.good()
&& std::chrono::duration_cast<std::chrono::seconds>(
std::chrono::steady_clock::now() - r->startTime)
.count()
< maxConnectionAge;
}))
{
}
@ -655,12 +656,13 @@ std::vector<KeyedBuildResult> RemoteStore::buildPathsWithResults(
std::visit(
overloaded{
[&](const DerivedPath::Opaque & bo) {
results.push_back(KeyedBuildResult{
{
.status = BuildResult::Substituted,
},
/* .path = */ bo,
});
results.push_back(
KeyedBuildResult{
{
.status = BuildResult::Substituted,
},
/* .path = */ bo,
});
},
[&](const DerivedPath::Built & bfd) {
KeyedBuildResult res{

View file

@ -93,19 +93,21 @@ static void initAWS()
S3Helper::S3Helper(
const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint)
: config(makeConfig(region, scheme, endpoint))
, client(make_ref<Aws::S3::S3Client>(
profile == "" ? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>())
: std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())),
*config,
, client(
make_ref<Aws::S3::S3Client>(
profile == ""
? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>())
: std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())),
*config,
// FIXME: https://github.com/aws/aws-sdk-cpp/issues/759
# if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3
false,
false,
# else
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never,
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never,
# endif
endpoint.empty()))
endpoint.empty()))
{
}

View file

@ -88,13 +88,14 @@ void handleDiffHook(
if (diffHookOpt && settings.runDiffHook) {
auto & diffHook = *diffHookOpt;
try {
auto diffRes = runProgram(RunOptions{
.program = diffHook,
.lookupPath = true,
.args = {tryA, tryB, drvPath, tmpDir},
.uid = uid,
.gid = gid,
.chdir = "/"});
auto diffRes = runProgram(
RunOptions{
.program = diffHook,
.lookupPath = true,
.args = {tryA, tryB, drvPath, tmpDir},
.uid = uid,
.gid = gid,
.chdir = "/"});
if (!statusOk(diffRes.first))
throw ExecError(diffRes.first, "diff-hook program '%1%' %2%", diffHook, statusToString(diffRes.first));
@ -2715,10 +2716,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::filesystem::rename(tmpOutput, actualPath);
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating{
.method = dof.ca.method,
.hashAlgo = wanted.algo,
});
auto newInfo0 = newInfoFromCA(
DerivationOutput::CAFloating{
.method = dof.ca.method,
.hashAlgo = wanted.algo,
});
/* Check wanted hash */
assert(newInfo0.ca);
@ -2754,10 +2756,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
},
[&](const DerivationOutput::Impure & doi) {
return newInfoFromCA(DerivationOutput::CAFloating{
.method = doi.method,
.hashAlgo = doi.hashAlgo,
});
return newInfoFromCA(
DerivationOutput::CAFloating{
.method = doi.method,
.hashAlgo = doi.hashAlgo,
});
},
},

View file

@ -314,9 +314,10 @@ TEST(Config, applyConfigFailsOnMissingIncludes)
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
ASSERT_THROW(
config.applyConfig("name-of-the-setting = value-from-file\n"
"# name-of-the-setting = foo\n"
"include /nix/store/does/not/exist.nix"),
config.applyConfig(
"name-of-the-setting = value-from-file\n"
"# name-of-the-setting = foo\n"
"include /nix/store/does/not/exist.nix"),
Error);
}

View file

@ -31,10 +31,11 @@ TEST(to_json, vectorOfOptionalInts)
TEST(to_json, optionalVectorOfInts)
{
std::optional<std::vector<int>> val = std::make_optional(std::vector<int>{
-420,
420,
});
std::optional<std::vector<int>> val = std::make_optional(
std::vector<int>{
-420,
420,
});
ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]"));
val = std::nullopt;
ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr));

View file

@ -50,7 +50,8 @@ mkMesonLibrary (finalAttrs: {
libblake3
libsodium
openssl
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
]
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
propagatedBuildInputs = [
boost

View file

@ -41,10 +41,11 @@ Suggestions Suggestions::bestMatches(const std::set<std::string> & allMatches, s
{
std::set<Suggestion> res;
for (const auto & possibleMatch : allMatches) {
res.insert(Suggestion{
.distance = levenshteinDistance(query, possibleMatch),
.suggestion = possibleMatch,
});
res.insert(
Suggestion{
.distance = levenshteinDistance(query, possibleMatch),
.suggestion = possibleMatch,
});
}
return Suggestions{res};
}

View file

@ -253,8 +253,13 @@ pid_t startProcess(std::function<void()> fun, const ProcessOptions & options)
std::string runProgram(
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
{
auto res = runProgram(RunOptions{
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
auto res = runProgram(
RunOptions{
.program = program,
.lookupPath = lookupPath,
.args = args,
.input = input,
.isInteractive = isInteractive});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));

View file

@ -84,8 +84,13 @@ int Pid::wait()
std::string runProgram(
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
{
auto res = runProgram(RunOptions{
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
auto res = runProgram(
RunOptions{
.program = program,
.lookupPath = lookupPath,
.args = args,
.input = input,
.isInteractive = isInteractive});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));

View file

@ -480,10 +480,11 @@ static void main_nix_build(int argc, char ** argv)
throw Error("the 'bashInteractive' attribute in <nixpkgs> did not evaluate to a derivation");
auto bashDrv = drv->requireDrvPath();
pathsToBuild.push_back(DerivedPath::Built{
.drvPath = makeConstantStorePathRef(bashDrv),
.outputs = OutputsSpec::Names{"out"},
});
pathsToBuild.push_back(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(bashDrv),
.outputs = OutputsSpec::Names{"out"},
});
pathsToCopy.insert(bashDrv);
shellDrv = bashDrv;
@ -499,10 +500,11 @@ static void main_nix_build(int argc, char ** argv)
accumDerivedPath = [&](ref<SingleDerivedPath> inputDrv,
const DerivedPathMap<StringSet>::ChildNode & inputNode) {
if (!inputNode.value.empty())
pathsToBuild.push_back(DerivedPath::Built{
.drvPath = inputDrv,
.outputs = OutputsSpec::Names{inputNode.value},
});
pathsToBuild.push_back(
DerivedPath::Built{
.drvPath = inputDrv,
.outputs = OutputsSpec::Names{inputNode.value},
});
for (const auto & [outputName, childNode] : inputNode.childMap)
accumDerivedPath(
make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
@ -687,10 +689,11 @@ static void main_nix_build(int argc, char ** argv)
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath));
pathsToBuild.push_back(DerivedPath::Built{
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::Names{outputName},
});
pathsToBuild.push_back(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::Names{outputName},
});
pathsToBuildOrdered.push_back({drvPath, {outputName}});
drvsToCopy.insert(drvPath);

View file

@ -467,9 +467,10 @@ static void printMissing(EvalState & state, PackageInfos & elems)
};
targets.emplace_back(std::move(path));
} else
targets.emplace_back(DerivedPath::Opaque{
.path = i.queryOutPath(),
});
targets.emplace_back(
DerivedPath::Opaque{
.path = i.queryOutPath(),
});
printMissing(state.store, targets);
}
@ -1108,7 +1109,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
attrs["substitutable"] = hasSubs ? "1" : "0";
} else
columns.push_back(
(std::string)(isInstalled ? "I" : "-") + (isValid ? "P" : "-") + (hasSubs ? "S" : "-"));
(std::string) (isInstalled ? "I" : "-") + (isValid ? "P" : "-") + (hasSubs ? "S" : "-"));
}
if (xmlOutput)

View file

@ -70,28 +70,29 @@ UnresolvedApp InstallableValue::toApp(EvalState & state)
std::vector<DerivedPath> context2;
for (auto & c : context) {
context2.emplace_back(std::visit(
overloaded{
[&](const NixStringContextElem::DrvDeep & d) -> DerivedPath {
/* We want all outputs of the drv */
return DerivedPath::Built{
.drvPath = makeConstantStorePathRef(d.drvPath),
.outputs = OutputsSpec::All{},
};
context2.emplace_back(
std::visit(
overloaded{
[&](const NixStringContextElem::DrvDeep & d) -> DerivedPath {
/* We want all outputs of the drv */
return DerivedPath::Built{
.drvPath = makeConstantStorePathRef(d.drvPath),
.outputs = OutputsSpec::All{},
};
},
[&](const NixStringContextElem::Built & b) -> DerivedPath {
return DerivedPath::Built{
.drvPath = b.drvPath,
.outputs = OutputsSpec::Names{b.output},
};
},
[&](const NixStringContextElem::Opaque & o) -> DerivedPath {
return DerivedPath::Opaque{
.path = o.path,
};
},
},
[&](const NixStringContextElem::Built & b) -> DerivedPath {
return DerivedPath::Built{
.drvPath = b.drvPath,
.outputs = OutputsSpec::Names{b.output},
};
},
[&](const NixStringContextElem::Opaque & o) -> DerivedPath {
return DerivedPath::Opaque{
.path = o.path,
};
},
},
c.raw));
c.raw));
}
return UnresolvedApp{App{

View file

@ -402,10 +402,11 @@ void mainWrapped(int argc, char ** argv)
self-aware. That is, it has to know where it is installed. We
don't think it's sentient.
*/
settings.buildHook.setDefault(Strings{
getNixBin({}).string(),
"__build-remote",
});
settings.buildHook.setDefault(
Strings{
getNixBin({}).string(),
"__build-remote",
});
#ifdef __linux__
if (isRootUser()) {

View file

@ -15,13 +15,14 @@ void runNix(Path program, const Strings & args, const std::optional<std::string>
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
// isInteractive avoid grabling interactive commands
runProgram2(RunOptions{
.program = getNixBin(program).string(),
.args = args,
.environment = subprocessEnv,
.input = input,
.isInteractive = true,
});
runProgram2(
RunOptions{
.program = getNixBin(program).string(),
.args = args,
.environment = subprocessEnv,
.input = input,
.isInteractive = true,
});
return;
}

View file

@ -34,13 +34,14 @@ struct CmdSearch : InstallableValueCommand, MixJSON
CmdSearch()
{
expectArgs("regex", &res);
addFlag(Flag{
.longName = "exclude",
.shortName = 'e',
.description = "Hide packages whose attribute path, name or description contain *regex*.",
.labels = {"regex"},
.handler = {[this](std::string s) { excludeRes.push_back(s); }},
});
addFlag(
Flag{
.longName = "exclude",
.shortName = 'e',
.description = "Hide packages whose attribute path, name or description contain *regex*.",
.labels = {"regex"},
.handler = {[this](std::string s) { excludeRes.push_back(s); }},
});
}
std::string description() override

View file

@ -45,7 +45,8 @@ perl.pkgs.toPerlModule (
buildInputs = [
nix-store
] ++ finalAttrs.passthru.externalBuildInputs;
]
++ finalAttrs.passthru.externalBuildInputs;
# Hack for sake of the dev shell
passthru.externalBuildInputs = [

View file

@ -2,16 +2,15 @@ with import ./lib.nix;
let
attrs =
{
y = "y";
x = "x";
foo = "foo";
}
// rec {
x = "newx";
bar = x;
};
attrs = {
y = "y";
x = "x";
foo = "foo";
}
// rec {
x = "newx";
bar = x;
};
names = builtins.attrNames attrs;

View file

@ -1,15 +1,14 @@
let {
as =
{
x = 123;
y = 456;
}
// {
z = 789;
}
// {
z = 987;
};
as = {
x = 123;
y = 456;
}
// {
z = 789;
}
// {
z = 987;
};
body =
if as ? a then

View file

@ -1,15 +1,14 @@
let {
as =
{
x = 123;
y = 456;
}
// {
z = 789;
}
// {
z = 987;
};
as = {
x = 123;
y = 456;
}
// {
z = 789;
}
// {
z = 987;
};
A = "a";
Z = "z";

View file

@ -6,7 +6,8 @@ let
scopedImport = attrs: fn: scopedImport (overrides // attrs) fn;
builtins = builtins // overrides;
} // import ./lib.nix;
}
// import ./lib.nix;
in
scopedImport overrides ./imported.nix

View file

@ -6,32 +6,31 @@ mkDerivation {
name = "nested-sandboxing";
busybox = builtins.getEnv "busybox";
EXTRA_SANDBOX = builtins.getEnv "EXTRA_SANDBOX";
buildCommand =
''
set -x
set -eu -o pipefail
''
+ (
if altitude == 0 then
''
echo Deep enough! > $out
''
else
''
cp -r ${../common} ./common
cp ${../common.sh} ./common.sh
cp ${../config.nix} ./config.nix
cp -r ${./.} ./nested-sandboxing
buildCommand = ''
set -x
set -eu -o pipefail
''
+ (
if altitude == 0 then
''
echo Deep enough! > $out
''
else
''
cp -r ${../common} ./common
cp ${../common.sh} ./common.sh
cp ${../config.nix} ./config.nix
cp -r ${./.} ./nested-sandboxing
export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
export _NIX_TEST_SOURCE_DIR=$PWD
export _NIX_TEST_BUILD_DIR=$PWD
export _NIX_TEST_SOURCE_DIR=$PWD
export _NIX_TEST_BUILD_DIR=$PWD
source common.sh
source ./nested-sandboxing/command.sh
source common.sh
source ./nested-sandboxing/command.sh
runNixBuild ${storeFun} ${toString altitude} >> $out
''
);
runNixBuild ${storeFun} ${toString altitude} >> $out
''
);
}

View file

@ -46,25 +46,24 @@ mkMesonDerivation (
];
# Hack for sake of the dev shell
passthru.externalNativeBuildInputs =
[
meson
ninja
pkg-config
passthru.externalNativeBuildInputs = [
meson
ninja
pkg-config
jq
git
mercurial
]
++ lib.optionals stdenv.hostPlatform.isLinux [
# For various sandboxing tests that needs a statically-linked shell,
# etc.
busybox-sandbox-shell
# For Overlay FS tests need `mount`, `umount`, and `unshare`.
# For `script` command (ensuring a TTY)
# TODO use `unixtools` to be precise over which executables instead?
util-linux
];
jq
git
mercurial
]
++ lib.optionals stdenv.hostPlatform.isLinux [
# For various sandboxing tests that needs a statically-linked shell,
# etc.
busybox-sandbox-shell
# For Overlay FS tests need `mount`, `umount`, and `unshare`.
# For `script` command (ensuring a TTY)
# TODO use `unixtools` to be precise over which executables instead?
util-linux
];
nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [
nix-cli

View file

@ -77,7 +77,15 @@ let
{ lib, pkgs, ... }:
{
imports = [ checkOverrideNixVersion ];
nix.package = lib.mkForce pkgs.nixVersions.nix_2_3;
nix.package = lib.mkForce (
pkgs.nixVersions.nix_2_3.overrideAttrs (o: {
meta = o.meta // {
# This version shouldn't be used by end-users, but we run tests against
# it to ensure we don't break protocol compatibility.
knownVulnerabilities = [ ];
};
})
);
};
otherNixes.nix_2_13.setNixPackage =
@ -88,6 +96,8 @@ let
nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs
(o: {
meta = o.meta // {
# This version shouldn't be used by end-users, but we run tests against
# it to ensure we don't break protocol compatibility.
knownVulnerabilities = [ ];
};
})
@ -98,7 +108,9 @@ let
{ lib, pkgs, ... }:
{
imports = [ checkOverrideNixVersion ];
nix.package = lib.mkForce pkgs.nixVersions.nix_2_18;
nix.package =
lib.mkForce
nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_18;
};
in

View file

@ -45,14 +45,14 @@
client.succeed("chmod 600 /root/.ssh/id_ed25519")
# Install the SSH key on the builders.
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
remote.succeed("mkdir -p -m 700 /root/.ssh")
remote.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
remote.wait_for_unit("sshd")
remote.wait_for_unit("multi-user.target")
remote.wait_for_unit("network-online.target")
client.wait_for_unit("network-online.target")
remote.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("network-addresses-eth1.service")
client.succeed(f"ssh -o StrictHostKeyChecking=no {remote.name} 'echo hello world'")
remote.succeed("""

View file

@ -187,9 +187,9 @@ in
github.succeed("cat /var/log/httpd/*.log >&2")
github.wait_for_unit("httpd.service")
github.wait_for_unit("network-online.target")
github.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
client.succeed("curl -v https://github.com/ >&2")
out = client.succeed("nix registry list")
print(out)

View file

@ -70,9 +70,9 @@ in
server.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
server.wait_for_unit("sshd")
server.wait_for_unit("multi-user.target")
server.wait_for_unit("network-online.target")
server.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'")
# Copy the closure of package A from the client to the server.

View file

@ -79,9 +79,9 @@ in
server.wait_for_unit("sshd")
server.wait_for_unit("multi-user.target")
server.wait_for_unit("network-online.target")
server.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("getty@tty1.service")
# Either the prompt: ]#
# or an OCR misreading of it: 1#

View file

@ -61,7 +61,7 @@ in
{ nodes }:
''
cache.wait_for_unit("harmonia.service")
cache.wait_for_unit("network-online.target")
cache.wait_for_unit("network-addresses-eth1.service")
machine.succeed("mkdir -p /etc/containers")
machine.succeed("""echo '{"default":[{"type":"insecureAcceptAnything"}]}' > /etc/containers/policy.json""")

View file

@ -145,7 +145,7 @@ in
testScript =
{ nodes, ... }:
''
http_dns.wait_for_unit("network-online.target")
http_dns.wait_for_unit("network-addresses-eth1.service")
http_dns.wait_for_unit("nginx")
http_dns.wait_for_open_port(80)
http_dns.wait_for_unit("unbound")
@ -153,7 +153,7 @@ in
client.start()
client.wait_for_unit('multi-user.target')
client.wait_for_unit('network-online.target')
client.wait_for_unit('network-addresses-eth1.service')
with subtest("can fetch data from a remote server outside sandbox"):
client.succeed("nix --version >&2")

View file

@ -102,12 +102,12 @@ in
client.succeed("chmod 600 /root/.ssh/id_ed25519")
# Install the SSH key on the builder.
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
builder.succeed("mkdir -p -m 700 /root/.ssh")
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
builder.wait_for_unit("sshd")
builder.wait_for_unit("multi-user.target")
builder.wait_for_unit("network-online.target")
builder.wait_for_unit("network-addresses-eth1.service")
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'")

View file

@ -123,12 +123,12 @@ in
client.succeed("chmod 600 /root/.ssh/id_ed25519")
# Install the SSH key on the builders.
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
for builder in [builder1, builder2]:
builder.succeed("mkdir -p -m 700 /root/.ssh")
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
builder.wait_for_unit("sshd")
builder.wait_for_unit("network-online.target")
builder.wait_for_unit("network-addresses-eth1.service")
# Make sure the builder can handle our login correctly
builder.wait_for_unit("multi-user.target")
# Make sure there's no funny business on the client either

View file

@ -67,14 +67,14 @@ in
# Create a binary cache.
server.wait_for_unit("minio")
server.wait_for_unit("network-online.target")
server.wait_for_unit("network-addresses-eth1.service")
server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4")
server.succeed("mc mb minio/my-cache")
server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}")
client.wait_for_unit("network-online.target")
client.wait_for_unit("network-addresses-eth1.service")
# Test fetchurl on s3:// URLs while we're at it.
client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000&region=eu-west-1\"; }'")

View file

@ -139,8 +139,8 @@ in
start_all()
sourcehut.wait_for_unit("httpd.service")
sourcehut.wait_for_unit("network-online.target")
client.wait_for_unit("network-online.target")
sourcehut.wait_for_unit("network-addresses-eth1.service")
client.wait_for_unit("network-addresses-eth1.service")
client.succeed("curl -v https://git.sr.ht/ >&2")
client.succeed("nix registry list | grep nixpkgs")