mirror of
https://github.com/NixOS/nix.git
synced 2025-12-02 15:11:00 +01:00
Merge remote-tracking branch 'origin/2.30-maintenance' into sync-2.30.0
This commit is contained in:
commit
175406c313
284 changed files with 9123 additions and 4178 deletions
|
|
@ -33,7 +33,12 @@ EvalSettings evalSettings {
|
|||
auto flakeRef = parseFlakeRef(fetchSettings, std::string { rest }, {}, true, false);
|
||||
debug("fetching flake search path element '%s''", rest);
|
||||
auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store);
|
||||
auto storePath = nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
auto storePath = nix::fetchToStore(
|
||||
state.fetchSettings,
|
||||
*state.store,
|
||||
SourcePath(accessor),
|
||||
FetchMode::Copy,
|
||||
lockedRef.input.getName());
|
||||
state.allowPath(storePath);
|
||||
return state.storePath(storePath);
|
||||
},
|
||||
|
|
@ -176,14 +181,23 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas
|
|||
state.store,
|
||||
state.fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(s));
|
||||
auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
|
||||
auto storePath = fetchToStore(
|
||||
state.fetchSettings,
|
||||
*state.store,
|
||||
SourcePath(accessor),
|
||||
FetchMode::Copy);
|
||||
return state.storePath(storePath);
|
||||
}
|
||||
|
||||
else if (hasPrefix(s, "flake:")) {
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, std::string(s.substr(6)), {}, true, false);
|
||||
auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store);
|
||||
auto storePath = nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
auto storePath = nix::fetchToStore(
|
||||
state.fetchSettings,
|
||||
*state.store,
|
||||
SourcePath(accessor),
|
||||
FetchMode::Copy,
|
||||
lockedRef.input.getName());
|
||||
state.allowPath(storePath);
|
||||
return state.storePath(storePath);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -338,7 +338,7 @@ struct MixEnvironment : virtual Args
|
|||
|
||||
StringSet keepVars;
|
||||
StringSet unsetVars;
|
||||
std::map<std::string, std::string> setVars;
|
||||
StringMap setVars;
|
||||
bool ignoreEnvironment;
|
||||
|
||||
MixEnvironment();
|
||||
|
|
|
|||
|
|
@ -22,17 +22,17 @@ class Bindings;
|
|||
namespace flake { struct Settings; }
|
||||
|
||||
/**
|
||||
* @todo Get rid of global setttings variables
|
||||
* @todo Get rid of global settings variables
|
||||
*/
|
||||
extern fetchers::Settings fetchSettings;
|
||||
|
||||
/**
|
||||
* @todo Get rid of global setttings variables
|
||||
* @todo Get rid of global settings variables
|
||||
*/
|
||||
extern EvalSettings evalSettings;
|
||||
|
||||
/**
|
||||
* @todo Get rid of global setttings variables
|
||||
* @todo Get rid of global settings variables
|
||||
*/
|
||||
extern flake::Settings flakeSettings;
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
|||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
if (v.type() == nPath) {
|
||||
auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
|
||||
auto storePath = fetchToStore(state->fetchSettings, *state->store, v.path(), FetchMode::Copy);
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
#include <cstdio>
|
||||
|
||||
#include <signal.h>
|
||||
|
||||
#if USE_READLINE
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ struct NixRepl
|
|||
|
||||
const static int envSize = 32768;
|
||||
std::shared_ptr<StaticEnv> staticEnv;
|
||||
Value lastLoaded;
|
||||
Env * env;
|
||||
int displ;
|
||||
StringSet varNames;
|
||||
|
|
@ -95,6 +96,7 @@ struct NixRepl
|
|||
void loadFiles();
|
||||
void loadFlakes();
|
||||
void reloadFilesAndFlakes();
|
||||
void showLastLoaded();
|
||||
void addAttrsToScope(Value & attrs);
|
||||
void addVarToScope(const Symbol name, Value & v);
|
||||
Expr * parseString(std::string s);
|
||||
|
|
@ -158,6 +160,8 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
|||
return out;
|
||||
}
|
||||
|
||||
MakeError(IncompleteReplExpr, ParseError);
|
||||
|
||||
static bool isFirstRepl = true;
|
||||
|
||||
ReplExitStatus NixRepl::mainLoop()
|
||||
|
|
@ -205,16 +209,8 @@ ReplExitStatus NixRepl::mainLoop()
|
|||
default:
|
||||
unreachable();
|
||||
}
|
||||
} catch (ParseError & e) {
|
||||
if (e.msg().find("unexpected end of file") != std::string::npos) {
|
||||
// For parse errors on incomplete input, we continue waiting for the next line of
|
||||
// input without clearing the input so far.
|
||||
continue;
|
||||
} else {
|
||||
printMsg(lvlError, e.msg());
|
||||
}
|
||||
} catch (EvalError & e) {
|
||||
printMsg(lvlError, e.msg());
|
||||
} catch (IncompleteReplExpr &) {
|
||||
continue;
|
||||
} catch (Error & e) {
|
||||
printMsg(lvlError, e.msg());
|
||||
} catch (Interrupted & e) {
|
||||
|
|
@ -294,7 +290,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
|||
} catch (BadURL & e) {
|
||||
// Quietly ignore BadURL flake-related errors.
|
||||
} catch (FileNotFound & e) {
|
||||
// Quietly ignore non-existent file beeing `import`-ed.
|
||||
// Quietly ignore non-existent file being `import`-ed.
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -378,6 +374,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
<< " current profile\n"
|
||||
<< " :l, :load <path> Load Nix expression and add it to scope\n"
|
||||
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
|
||||
<< " :ll, :last-loaded Show most recently loaded variables added to scope\n"
|
||||
<< " :p, :print <expr> Evaluate and print expression recursively\n"
|
||||
<< " Strings are printed directly, without escaping.\n"
|
||||
<< " :q, :quit Exit nix-repl\n"
|
||||
|
|
@ -468,6 +465,10 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
loadFlake(arg);
|
||||
}
|
||||
|
||||
else if (command == ":ll" || command == ":last-loaded") {
|
||||
showLastLoaded();
|
||||
}
|
||||
|
||||
else if (command == ":r" || command == ":reload") {
|
||||
state->resetFileCache();
|
||||
reloadFilesAndFlakes();
|
||||
|
|
@ -483,7 +484,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
|
||||
return {path, 0};
|
||||
} else if (v.isLambda()) {
|
||||
auto pos = state->positions[v.payload.lambda.fun->pos];
|
||||
auto pos = state->positions[v.lambda().fun->pos];
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
return {*path, pos.line};
|
||||
else
|
||||
|
|
@ -760,6 +761,16 @@ void NixRepl::initEnv()
|
|||
varNames.emplace(state->symbols[i.first]);
|
||||
}
|
||||
|
||||
void NixRepl::showLastLoaded()
|
||||
{
|
||||
RunPager pager;
|
||||
|
||||
for (auto & i : *lastLoaded.attrs()) {
|
||||
std::string_view name = state->symbols[i.name];
|
||||
logger->cout(name);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::reloadFilesAndFlakes()
|
||||
{
|
||||
|
|
@ -813,6 +824,27 @@ void NixRepl::addAttrsToScope(Value & attrs)
|
|||
staticEnv->sort();
|
||||
staticEnv->deduplicate();
|
||||
notice("Added %1% variables.", attrs.attrs()->size());
|
||||
|
||||
lastLoaded = attrs;
|
||||
|
||||
const int max_print = 20;
|
||||
int counter = 0;
|
||||
std::ostringstream loaded;
|
||||
for (auto & i : attrs.attrs()->lexicographicOrder(state->symbols)) {
|
||||
if (counter >= max_print)
|
||||
break;
|
||||
|
||||
if (counter > 0)
|
||||
loaded << ", ";
|
||||
|
||||
printIdentifier(loaded, state->symbols[i->name]);
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
notice("%1%", loaded.str());
|
||||
|
||||
if (attrs.attrs()->size() > max_print)
|
||||
notice("... and %1% more; view with :ll", attrs.attrs()->size() - max_print);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -837,7 +869,17 @@ Expr * NixRepl::parseString(std::string s)
|
|||
|
||||
void NixRepl::evalString(std::string s, Value & v)
|
||||
{
|
||||
Expr * e = parseString(s);
|
||||
Expr * e;
|
||||
try {
|
||||
e = parseString(s);
|
||||
} catch (ParseError & e) {
|
||||
if (e.msg().find("unexpected end of file") != std::string::npos)
|
||||
// For parse errors on incomplete input, we continue waiting for the next line of
|
||||
// input without clearing the input so far.
|
||||
throw IncompleteReplExpr(e.msg());
|
||||
else
|
||||
throw;
|
||||
}
|
||||
e->eval(*state, *env, v);
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@ const char * nix_get_path_string(nix_c_context * context, const nix_value * valu
|
|||
// We could use v.path().to_string().c_str(), but I'm concerned this
|
||||
// crashes. Looks like .path() allocates a CanonPath with a copy of the
|
||||
// string, then it gets the underlying data from that.
|
||||
return v.payload.path.path;
|
||||
return v.pathStr();
|
||||
}
|
||||
NIXC_CATCH_ERRS_NULL
|
||||
}
|
||||
|
|
@ -324,7 +324,7 @@ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value,
|
|||
try {
|
||||
auto & v = check_value_in(value);
|
||||
assert(v.type() == nix::nList);
|
||||
auto * p = v.listElems()[ix];
|
||||
auto * p = v.listView()[ix];
|
||||
nix_gc_incref(nullptr, p);
|
||||
if (p != nullptr)
|
||||
state->state.forceValue(*p, nix::noPos);
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
# Public headers directory
|
||||
|
||||
include_dirs = [include_directories('../../..')]
|
||||
include_dirs = [ include_directories('../../..') ]
|
||||
|
||||
headers = files(
|
||||
'libexpr.hh',
|
||||
'nix_api_expr.hh',
|
||||
'value/context.hh',
|
||||
# hack for trailing newline
|
||||
)
|
||||
|
|
|
|||
|
|
@ -458,7 +458,7 @@ namespace nix {
|
|||
HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")),
|
||||
HintFmt("while evaluating the first argument passed to builtins.filterSource"));
|
||||
|
||||
// Usupported by store "dummy"
|
||||
// Unsupported by store "dummy"
|
||||
|
||||
// ASSERT_TRACE2("filterSource (_: 1) ./.",
|
||||
// TypeError,
|
||||
|
|
@ -636,7 +636,7 @@ namespace nix {
|
|||
HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")),
|
||||
HintFmt("while evaluating the second argument passed to builtins.mapAttrs"));
|
||||
|
||||
// XXX: defered
|
||||
// XXX: deferred
|
||||
// ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }",
|
||||
// TypeError,
|
||||
// HintFmt("attempt to call something which is not a function but %s", "a string"),
|
||||
|
|
@ -666,9 +666,9 @@ namespace nix {
|
|||
HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)),
|
||||
HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith"));
|
||||
|
||||
// XXX: How to properly tell that the fucntion takes two arguments ?
|
||||
// XXX: How to properly tell that the function takes two arguments ?
|
||||
// The same question also applies to sort, and maybe others.
|
||||
// Due to lazyness, we only create a thunk, and it fails later on.
|
||||
// Due to laziness, we only create a thunk, and it fails later on.
|
||||
// ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]",
|
||||
// TypeError,
|
||||
// HintFmt("attempt to call something which is not a function but %s", "an integer"),
|
||||
|
|
@ -877,7 +877,7 @@ namespace nix {
|
|||
HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)),
|
||||
HintFmt("while evaluating the first argument passed to builtins.genList"));
|
||||
|
||||
// XXX: defered
|
||||
// XXX: deferred
|
||||
// ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO",
|
||||
// TypeError,
|
||||
// HintFmt("cannot add %s to an integer", "a string"),
|
||||
|
|
|
|||
|
|
@ -32,8 +32,8 @@ deps_private += rapidcheck
|
|||
gtest = dependency('gtest')
|
||||
deps_private += gtest
|
||||
|
||||
gtest = dependency('gmock')
|
||||
deps_private += gtest
|
||||
gmock = dependency('gmock')
|
||||
deps_private += gmock
|
||||
|
||||
configdata = configuration_data()
|
||||
configdata.set_quoted('PACKAGE_VERSION', meson.project_version())
|
||||
|
|
|
|||
|
|
@ -150,8 +150,8 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, attrValues) {
|
||||
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
|
||||
ASSERT_THAT(*v.listView()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listView()[1], IsStringEq("foo"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, getAttr) {
|
||||
|
|
@ -250,8 +250,8 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, catAttrs) {
|
||||
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
|
||||
ASSERT_THAT(*v.listView()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listView()[1], IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, functionArgs) {
|
||||
|
|
@ -301,6 +301,7 @@ namespace nix {
|
|||
|
||||
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
|
||||
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
|
||||
ASSERT_THROW(eval("builtins.elemAt [0] 4294967296"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, head) {
|
||||
|
|
@ -319,7 +320,8 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, tail) {
|
||||
auto v = eval("builtins.tail [ 3 2 1 0 ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [n, elem] : enumerate(listView))
|
||||
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
|
||||
}
|
||||
|
||||
|
|
@ -330,17 +332,17 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, map) {
|
||||
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
auto elem = v.listElems()[0];
|
||||
auto elem = v.listView()[0];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("foobar"));
|
||||
|
||||
elem = v.listElems()[1];
|
||||
elem = v.listView()[1];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("foobla"));
|
||||
|
||||
elem = v.listElems()[2];
|
||||
elem = v.listView()[2];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("fooabc"));
|
||||
|
|
@ -349,7 +351,7 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, filter) {
|
||||
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
for (const auto elem : v.listItems())
|
||||
for (const auto elem : v.listView())
|
||||
ASSERT_THAT(*elem, IsIntEq(2));
|
||||
}
|
||||
|
||||
|
|
@ -366,7 +368,8 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, concatLists) {
|
||||
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
|
||||
ASSERT_THAT(v, IsListOfSize(4));
|
||||
for (const auto [i, elem] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [i, elem] : enumerate(listView))
|
||||
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||
}
|
||||
|
||||
|
|
@ -404,7 +407,8 @@ namespace nix {
|
|||
auto v = eval("builtins.genList (x: x + 1) 3");
|
||||
ASSERT_EQ(v.type(), nList);
|
||||
ASSERT_EQ(v.listSize(), 3u);
|
||||
for (const auto [i, elem] : enumerate(v.listItems())) {
|
||||
auto listView = v.listView();
|
||||
for (const auto [i, elem] : enumerate(listView)) {
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||
|
|
@ -417,7 +421,8 @@ namespace nix {
|
|||
ASSERT_EQ(v.listSize(), 6u);
|
||||
|
||||
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [n, elem] : enumerate(listView))
|
||||
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||
}
|
||||
|
||||
|
|
@ -428,17 +433,17 @@ namespace nix {
|
|||
auto right = v.attrs()->get(createSymbol("right"));
|
||||
ASSERT_NE(right, nullptr);
|
||||
ASSERT_THAT(*right->value, IsListOfSize(2));
|
||||
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
|
||||
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
|
||||
ASSERT_THAT(*right->value->listView()[0], IsIntEq(23));
|
||||
ASSERT_THAT(*right->value->listView()[1], IsIntEq(42));
|
||||
|
||||
auto wrong = v.attrs()->get(createSymbol("wrong"));
|
||||
ASSERT_NE(wrong, nullptr);
|
||||
ASSERT_EQ(wrong->value->type(), nList);
|
||||
ASSERT_EQ(wrong->value->listSize(), 3u);
|
||||
ASSERT_THAT(*wrong->value, IsListOfSize(3));
|
||||
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
|
||||
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
|
||||
ASSERT_THAT(*wrong->value->listView()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*wrong->value->listView()[1], IsIntEq(9));
|
||||
ASSERT_THAT(*wrong->value->listView()[2], IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, concatMap) {
|
||||
|
|
@ -447,7 +452,8 @@ namespace nix {
|
|||
ASSERT_EQ(v.listSize(), 6u);
|
||||
|
||||
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [n, elem] : enumerate(listView))
|
||||
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||
}
|
||||
|
||||
|
|
@ -592,6 +598,16 @@ namespace nix {
|
|||
ASSERT_THAT(v, IsStringEq("n"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, substringHugeStart){
|
||||
auto v = eval("builtins.substring 4294967296 5 \"nixos\"");
|
||||
ASSERT_THAT(v, IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, substringHugeLength){
|
||||
auto v = eval("builtins.substring 0 4294967296 \"nixos\"");
|
||||
ASSERT_THAT(v, IsStringEq("nixos"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, substringEmptyString){
|
||||
auto v = eval("builtins.substring 1 3 \"\"");
|
||||
ASSERT_THAT(v, IsStringEq(""));
|
||||
|
|
@ -656,8 +672,8 @@ namespace nix {
|
|||
auto v = eval("derivation");
|
||||
ASSERT_EQ(v.type(), nFunction);
|
||||
ASSERT_TRUE(v.isLambda());
|
||||
ASSERT_NE(v.payload.lambda.fun, nullptr);
|
||||
ASSERT_TRUE(v.payload.lambda.fun->hasFormals());
|
||||
ASSERT_NE(v.lambda().fun, nullptr);
|
||||
ASSERT_TRUE(v.lambda().fun->hasFormals());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, currentTime) {
|
||||
|
|
@ -671,7 +687,8 @@ namespace nix {
|
|||
ASSERT_THAT(v, IsListOfSize(4));
|
||||
|
||||
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
|
||||
for (const auto [n, p] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [n, p] : enumerate(listView))
|
||||
ASSERT_THAT(*p, IsStringEq(strings[n]));
|
||||
}
|
||||
|
||||
|
|
@ -761,12 +778,12 @@ namespace nix {
|
|||
auto v = eval("builtins.split \"(a)b\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
ASSERT_THAT(*v.listView()[0], IsStringEq(""));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
ASSERT_THAT(*v.listView()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
|
||||
ASSERT_THAT(*v.listView()[2], IsStringEq("c"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split2) {
|
||||
|
|
@ -774,17 +791,17 @@ namespace nix {
|
|||
auto v = eval("builtins.split \"([ac])\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(5));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
ASSERT_THAT(*v.listView()[0], IsStringEq(""));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
ASSERT_THAT(*v.listView()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||
ASSERT_THAT(*v.listView()[2], IsStringEq("b"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
|
||||
ASSERT_THAT(*v.listView()[3], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listView()[3]->listView()[0], IsStringEq("c"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||
ASSERT_THAT(*v.listView()[4], IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split3) {
|
||||
|
|
@ -792,36 +809,36 @@ namespace nix {
|
|||
ASSERT_THAT(v, IsListOfSize(5));
|
||||
|
||||
// First list element
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
ASSERT_THAT(*v.listView()[0], IsStringEq(""));
|
||||
|
||||
// 2nd list element is a list [ "" null ]
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
|
||||
ASSERT_THAT(*v.listView()[1], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a"));
|
||||
ASSERT_THAT(*v.listView()[1]->listView()[1], IsNull());
|
||||
|
||||
// 3rd element
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||
ASSERT_THAT(*v.listView()[2], IsStringEq("b"));
|
||||
|
||||
// 4th element is a list: [ null "c" ]
|
||||
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
|
||||
ASSERT_THAT(*v.listView()[3], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listView()[3]->listView()[0], IsNull());
|
||||
ASSERT_THAT(*v.listView()[3]->listView()[1], IsStringEq("c"));
|
||||
|
||||
// 5th element is the empty string
|
||||
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||
ASSERT_THAT(*v.listView()[4], IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split4) {
|
||||
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
auto first = v.listElems()[0];
|
||||
auto second = v.listElems()[1];
|
||||
auto third = v.listElems()[2];
|
||||
auto first = v.listView()[0];
|
||||
auto second = v.listView()[1];
|
||||
auto third = v.listView()[2];
|
||||
|
||||
ASSERT_THAT(*first, IsStringEq(" "));
|
||||
|
||||
ASSERT_THAT(*second, IsListOfSize(1));
|
||||
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
|
||||
ASSERT_THAT(*second->listView()[0], IsStringEq("FOO"));
|
||||
|
||||
ASSERT_THAT(*third, IsStringEq(" "));
|
||||
}
|
||||
|
|
@ -839,14 +856,14 @@ namespace nix {
|
|||
TEST_F(PrimOpTest, match3) {
|
||||
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
|
||||
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
|
||||
ASSERT_THAT(*v.listView()[0], IsStringEq("b"));
|
||||
ASSERT_THAT(*v.listView()[1], IsStringEq("c"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match4) {
|
||||
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
|
||||
ASSERT_THAT(v, IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
|
||||
ASSERT_THAT(*v.listView()[0], IsStringEq("FOO"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match5) {
|
||||
|
|
@ -863,7 +880,8 @@ namespace nix {
|
|||
|
||||
// ensure that the list is sorted
|
||||
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
auto listView = v.listView();
|
||||
for (const auto [n, elem] : enumerate(listView))
|
||||
ASSERT_THAT(*elem, IsStringEq(expected[n]));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ namespace nix {
|
|||
// Usually Nix rejects duplicate keys in an attrset but it does allow
|
||||
// so if it is an attribute set that contains disjoint sets of keys.
|
||||
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
|
||||
// The attribute set `a` will be a Thunk at first as the attribuets
|
||||
// The attribute set `a` will be a Thunk at first as the attributes
|
||||
// have to be merged (or otherwise computed) and that is done in a lazy
|
||||
// manner.
|
||||
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
|||
if (*attrIndex >= v->listSize())
|
||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||
|
||||
v = v->listElems()[*attrIndex];
|
||||
v = v->listView()[*attrIndex];
|
||||
pos = noPos;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -724,7 +724,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
|||
|
||||
std::vector<std::string> res;
|
||||
|
||||
for (auto & elem : v.listItems())
|
||||
for (auto elem : v.listView())
|
||||
res.push_back(std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching")));
|
||||
|
||||
if (root->db)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
#include "nix/util/config-global.hh"
|
||||
#include "nix/util/serialise.hh"
|
||||
#include "nix/expr/eval-gc.hh"
|
||||
#include "nix/expr/value.hh"
|
||||
|
||||
#include "expr-config-private.hh"
|
||||
|
||||
|
|
@ -52,6 +53,13 @@ static inline void initGCReal()
|
|||
|
||||
GC_INIT();
|
||||
|
||||
/* Register valid displacements in case we are using alignment niches
|
||||
for storing the type information. This way tagged pointers are considered
|
||||
to be valid, even when they are not aligned. */
|
||||
if constexpr (detail::useBitPackedValueStorage<sizeof(void *)>)
|
||||
for (std::size_t i = 1; i < sizeof(std::uintptr_t); ++i)
|
||||
GC_register_displacement(i);
|
||||
|
||||
GC_set_oom_fn(oomHandler);
|
||||
|
||||
/* Set the initial heap size to something fairly big (25% of
|
||||
|
|
|
|||
49
src/libexpr/eval-profiler-settings.cc
Normal file
49
src/libexpr/eval-profiler-settings.cc
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
#include "nix/expr/eval-profiler-settings.hh"
|
||||
#include "nix/util/configuration.hh"
|
||||
#include "nix/util/logging.hh" /* Needs to be included before config-impl.hh */
|
||||
#include "nix/util/config-impl.hh"
|
||||
#include "nix/util/abstract-setting-to-json.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<>
|
||||
EvalProfilerMode BaseSetting<EvalProfilerMode>::parse(const std::string & str) const
|
||||
{
|
||||
if (str == "disabled")
|
||||
return EvalProfilerMode::disabled;
|
||||
else if (str == "flamegraph")
|
||||
return EvalProfilerMode::flamegraph;
|
||||
else
|
||||
throw UsageError("option '%s' has invalid value '%s'", name, str);
|
||||
}
|
||||
|
||||
template<>
|
||||
struct BaseSetting<EvalProfilerMode>::trait
|
||||
{
|
||||
static constexpr bool appendable = false;
|
||||
};
|
||||
|
||||
template<>
|
||||
std::string BaseSetting<EvalProfilerMode>::to_string() const
|
||||
{
|
||||
if (value == EvalProfilerMode::disabled)
|
||||
return "disabled";
|
||||
else if (value == EvalProfilerMode::flamegraph)
|
||||
return "flamegraph";
|
||||
else
|
||||
unreachable();
|
||||
}
|
||||
|
||||
NLOHMANN_JSON_SERIALIZE_ENUM(
|
||||
EvalProfilerMode,
|
||||
{
|
||||
{EvalProfilerMode::disabled, "disabled"},
|
||||
{EvalProfilerMode::flamegraph, "flamegraph"},
|
||||
});
|
||||
|
||||
/* Explicit instantiation of templates */
|
||||
template class BaseSetting<EvalProfilerMode>;
|
||||
|
||||
}
|
||||
355
src/libexpr/eval-profiler.cc
Normal file
355
src/libexpr/eval-profiler.cc
Normal file
|
|
@ -0,0 +1,355 @@
|
|||
#include "nix/expr/eval-profiler.hh"
|
||||
#include "nix/expr/nixexpr.hh"
|
||||
#include "nix/expr/eval.hh"
|
||||
#include "nix/util/lru-cache.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
void EvalProfiler::preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) {}
|
||||
|
||||
void EvalProfiler::postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
}
|
||||
|
||||
void MultiEvalProfiler::preFunctionCallHook(
|
||||
EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
for (auto & profiler : profilers) {
|
||||
if (profiler->getNeededHooks().test(Hook::preFunctionCall))
|
||||
profiler->preFunctionCallHook(state, v, args, pos);
|
||||
}
|
||||
}
|
||||
|
||||
void MultiEvalProfiler::postFunctionCallHook(
|
||||
EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
for (auto & profiler : profilers) {
|
||||
if (profiler->getNeededHooks().test(Hook::postFunctionCall))
|
||||
profiler->postFunctionCallHook(state, v, args, pos);
|
||||
}
|
||||
}
|
||||
|
||||
EvalProfiler::Hooks MultiEvalProfiler::getNeededHooksImpl() const
|
||||
{
|
||||
Hooks hooks;
|
||||
for (auto & p : profilers)
|
||||
hooks |= p->getNeededHooks();
|
||||
return hooks;
|
||||
}
|
||||
|
||||
void MultiEvalProfiler::addProfiler(ref<EvalProfiler> profiler)
|
||||
{
|
||||
profilers.push_back(profiler);
|
||||
invalidateNeededHooks();
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
class PosCache : private LRUCache<PosIdx, Pos>
|
||||
{
|
||||
const EvalState & state;
|
||||
|
||||
public:
|
||||
PosCache(const EvalState & state)
|
||||
: LRUCache(524288) /* ~40MiB */
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
|
||||
Pos lookup(PosIdx posIdx)
|
||||
{
|
||||
auto posOrNone = LRUCache::get(posIdx);
|
||||
if (posOrNone)
|
||||
return *posOrNone;
|
||||
|
||||
auto pos = state.positions[posIdx];
|
||||
upsert(posIdx, pos);
|
||||
return pos;
|
||||
}
|
||||
};
|
||||
|
||||
struct LambdaFrameInfo
|
||||
{
|
||||
ExprLambda * expr;
|
||||
/** Position where the lambda has been called. */
|
||||
PosIdx callPos = noPos;
|
||||
std::ostream & symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const;
|
||||
auto operator<=>(const LambdaFrameInfo & rhs) const = default;
|
||||
};
|
||||
|
||||
/** Primop call. */
|
||||
struct PrimOpFrameInfo
|
||||
{
|
||||
const PrimOp * expr;
|
||||
/** Position where the primop has been called. */
|
||||
PosIdx callPos = noPos;
|
||||
std::ostream & symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const;
|
||||
auto operator<=>(const PrimOpFrameInfo & rhs) const = default;
|
||||
};
|
||||
|
||||
/** Used for functor calls (attrset with __functor attr). */
|
||||
struct FunctorFrameInfo
|
||||
{
|
||||
PosIdx pos;
|
||||
std::ostream & symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const;
|
||||
auto operator<=>(const FunctorFrameInfo & rhs) const = default;
|
||||
};
|
||||
|
||||
struct DerivationStrictFrameInfo
|
||||
{
|
||||
PosIdx callPos = noPos;
|
||||
std::string drvName;
|
||||
std::ostream & symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const;
|
||||
auto operator<=>(const DerivationStrictFrameInfo & rhs) const = default;
|
||||
};
|
||||
|
||||
/** Fallback frame info. */
|
||||
struct GenericFrameInfo
|
||||
{
|
||||
PosIdx pos;
|
||||
std::ostream & symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const;
|
||||
auto operator<=>(const GenericFrameInfo & rhs) const = default;
|
||||
};
|
||||
|
||||
using FrameInfo =
|
||||
std::variant<LambdaFrameInfo, PrimOpFrameInfo, FunctorFrameInfo, DerivationStrictFrameInfo, GenericFrameInfo>;
|
||||
using FrameStack = std::vector<FrameInfo>;
|
||||
|
||||
/**
|
||||
* Stack sampling profiler.
|
||||
*/
|
||||
class SampleStack : public EvalProfiler
|
||||
{
|
||||
/* How often stack profiles should be flushed to file. This avoids the need
|
||||
to persist stack samples across the whole evaluation at the cost
|
||||
of periodically flushing data to disk. */
|
||||
static constexpr std::chrono::microseconds profileDumpInterval = std::chrono::milliseconds(2000);
|
||||
|
||||
Hooks getNeededHooksImpl() const override
|
||||
{
|
||||
return Hooks().set(preFunctionCall).set(postFunctionCall);
|
||||
}
|
||||
|
||||
FrameInfo getPrimOpFrameInfo(const PrimOp & primOp, std::span<Value *> args, PosIdx pos);
|
||||
|
||||
public:
|
||||
SampleStack(EvalState & state, std::filesystem::path profileFile, std::chrono::nanoseconds period)
|
||||
: state(state)
|
||||
, sampleInterval(period)
|
||||
, profileFd([&]() {
|
||||
AutoCloseFD fd = toDescriptor(open(profileFile.string().c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0660));
|
||||
if (!fd)
|
||||
throw SysError("opening file %s", profileFile);
|
||||
return fd;
|
||||
}())
|
||||
, posCache(state)
|
||||
{
|
||||
}
|
||||
|
||||
[[gnu::noinline]] void
|
||||
preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
[[gnu::noinline]] void
|
||||
postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
|
||||
void maybeSaveProfile(std::chrono::time_point<std::chrono::high_resolution_clock> now);
|
||||
void saveProfile();
|
||||
FrameInfo getFrameInfoFromValueAndPos(const Value & v, std::span<Value *> args, PosIdx pos);
|
||||
|
||||
SampleStack(SampleStack &&) = default;
|
||||
SampleStack & operator=(SampleStack &&) = delete;
|
||||
SampleStack(const SampleStack &) = delete;
|
||||
SampleStack & operator=(const SampleStack &) = delete;
|
||||
~SampleStack();
|
||||
private:
|
||||
/** Hold on to an instance of EvalState for symbolizing positions. */
|
||||
EvalState & state;
|
||||
std::chrono::nanoseconds sampleInterval;
|
||||
AutoCloseFD profileFd;
|
||||
FrameStack stack;
|
||||
std::map<FrameStack, uint32_t> callCount;
|
||||
std::chrono::time_point<std::chrono::high_resolution_clock> lastStackSample =
|
||||
std::chrono::high_resolution_clock::now();
|
||||
std::chrono::time_point<std::chrono::high_resolution_clock> lastDump = std::chrono::high_resolution_clock::now();
|
||||
PosCache posCache;
|
||||
};
|
||||
|
||||
FrameInfo SampleStack::getPrimOpFrameInfo(const PrimOp & primOp, std::span<Value *> args, PosIdx pos)
|
||||
{
|
||||
auto derivationInfo = [&]() -> std::optional<FrameInfo> {
|
||||
/* Here we rely a bit on the implementation details of libexpr/primops/derivation.nix
|
||||
and derivationStrict primop. This is not ideal, but is necessary for
|
||||
the usefulness of the profiler. This might actually affect the evaluation,
|
||||
but the cost shouldn't be that high as to make the traces entirely inaccurate. */
|
||||
if (primOp.name == "derivationStrict") {
|
||||
try {
|
||||
/* Error context strings don't actually matter, since we ignore all eval errors. */
|
||||
state.forceAttrs(*args[0], pos, "");
|
||||
auto attrs = args[0]->attrs();
|
||||
auto nameAttr = state.getAttr(state.sName, attrs, "");
|
||||
auto drvName = std::string(state.forceStringNoCtx(*nameAttr->value, pos, ""));
|
||||
return DerivationStrictFrameInfo{.callPos = pos, .drvName = std::move(drvName)};
|
||||
} catch (...) {
|
||||
/* Ignore all errors, since those will be diagnosed by the evaluator itself. */
|
||||
}
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}();
|
||||
|
||||
return derivationInfo.value_or(PrimOpFrameInfo{.expr = &primOp, .callPos = pos});
|
||||
}
|
||||
|
||||
FrameInfo SampleStack::getFrameInfoFromValueAndPos(const Value & v, std::span<Value *> args, PosIdx pos)
|
||||
{
|
||||
/* NOTE: No actual references to garbage collected values are not held in
|
||||
the profiler. */
|
||||
if (v.isLambda())
|
||||
return LambdaFrameInfo{.expr = v.lambda().fun, .callPos = pos};
|
||||
else if (v.isPrimOp()) {
|
||||
return getPrimOpFrameInfo(*v.primOp(), args, pos);
|
||||
} else if (v.isPrimOpApp())
|
||||
/* Resolve primOp eagerly. Must not hold on to a reference to a Value. */
|
||||
return PrimOpFrameInfo{.expr = v.primOpAppPrimOp(), .callPos = pos};
|
||||
else if (state.isFunctor(v)) {
|
||||
const auto functor = v.attrs()->get(state.sFunctor);
|
||||
if (auto pos_ = posCache.lookup(pos); std::holds_alternative<std::monostate>(pos_.origin))
|
||||
/* HACK: In case callsite position is unresolved. */
|
||||
return FunctorFrameInfo{.pos = functor->pos};
|
||||
return FunctorFrameInfo{.pos = pos};
|
||||
} else
|
||||
/* NOTE: Add a stack frame even for invalid cases (e.g. when calling a non-function). This is what
|
||||
* trace-function-calls does. */
|
||||
return GenericFrameInfo{.pos = pos};
|
||||
}
|
||||
|
||||
[[gnu::noinline]] void
|
||||
SampleStack::preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
stack.push_back(getFrameInfoFromValueAndPos(v, args, pos));
|
||||
|
||||
auto now = std::chrono::high_resolution_clock::now();
|
||||
|
||||
if (now - lastStackSample > sampleInterval) {
|
||||
callCount[stack] += 1;
|
||||
lastStackSample = now;
|
||||
}
|
||||
|
||||
/* Do this in preFunctionCallHook because we might throw an exception, but
|
||||
callFunction uses Finally, which doesn't play well with exceptions. */
|
||||
maybeSaveProfile(now);
|
||||
}
|
||||
|
||||
[[gnu::noinline]] void
|
||||
SampleStack::postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
if (!stack.empty())
|
||||
stack.pop_back();
|
||||
}
|
||||
|
||||
std::ostream & LambdaFrameInfo::symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const
|
||||
{
|
||||
if (auto pos = posCache.lookup(callPos); std::holds_alternative<std::monostate>(pos.origin))
|
||||
/* HACK: To avoid dubious «none»:0 in the generated profile if the origin can't be resolved
|
||||
resort to printing the lambda location instead of the callsite position. */
|
||||
os << posCache.lookup(expr->getPos());
|
||||
else
|
||||
os << pos;
|
||||
if (expr->name)
|
||||
os << ":" << state.symbols[expr->name];
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream & GenericFrameInfo::symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const
|
||||
{
|
||||
os << posCache.lookup(pos);
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream & FunctorFrameInfo::symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const
|
||||
{
|
||||
os << posCache.lookup(pos) << ":functor";
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream & PrimOpFrameInfo::symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const
|
||||
{
|
||||
/* Sometimes callsite position can have an unresolved origin, which
|
||||
leads to confusing «none»:0 locations in the profile. */
|
||||
auto pos = posCache.lookup(callPos);
|
||||
if (!std::holds_alternative<std::monostate>(pos.origin))
|
||||
os << posCache.lookup(callPos) << ":";
|
||||
os << *expr;
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
DerivationStrictFrameInfo::symbolize(const EvalState & state, std::ostream & os, PosCache & posCache) const
|
||||
{
|
||||
/* Sometimes callsite position can have an unresolved origin, which
|
||||
leads to confusing «none»:0 locations in the profile. */
|
||||
auto pos = posCache.lookup(callPos);
|
||||
if (!std::holds_alternative<std::monostate>(pos.origin))
|
||||
os << posCache.lookup(callPos) << ":";
|
||||
os << "primop derivationStrict:" << drvName;
|
||||
return os;
|
||||
}
|
||||
|
||||
void SampleStack::maybeSaveProfile(std::chrono::time_point<std::chrono::high_resolution_clock> now)
|
||||
{
|
||||
if (now - lastDump >= profileDumpInterval)
|
||||
saveProfile();
|
||||
else
|
||||
return;
|
||||
|
||||
/* Save the last dump timepoint. Do this after actually saving data to file
|
||||
to not account for the time doing the flushing to disk. */
|
||||
lastDump = std::chrono::high_resolution_clock::now();
|
||||
|
||||
/* Free up memory used for stack sampling. This might be very significant for
|
||||
long-running evaluations, so we shouldn't hog too much memory. */
|
||||
callCount.clear();
|
||||
}
|
||||
|
||||
void SampleStack::saveProfile()
|
||||
{
|
||||
auto os = std::ostringstream{};
|
||||
for (auto & [stack, count] : callCount) {
|
||||
auto first = true;
|
||||
for (auto & pos : stack) {
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
os << ";";
|
||||
|
||||
std::visit([&](auto && info) { info.symbolize(state, os, posCache); }, pos);
|
||||
}
|
||||
os << " " << count;
|
||||
writeLine(profileFd.get(), std::move(os).str());
|
||||
/* Clear ostringstream. */
|
||||
os.str("");
|
||||
os.clear();
|
||||
}
|
||||
}
|
||||
|
||||
SampleStack::~SampleStack()
|
||||
{
|
||||
/* Guard against cases when we are already unwinding the stack. */
|
||||
try {
|
||||
saveProfile();
|
||||
} catch (...) {
|
||||
ignoreExceptionInDestructor();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ref<EvalProfiler> makeSampleStackProfiler(EvalState & state, std::filesystem::path profileFile, uint64_t frequency)
|
||||
{
|
||||
/* 0 is a special value for sampling stack after each call. */
|
||||
std::chrono::nanoseconds period = frequency == 0
|
||||
? std::chrono::nanoseconds{0}
|
||||
: std::chrono::nanoseconds{std::nano::den / frequency / std::nano::num};
|
||||
return make_ref<SampleStack>(state, profileFile, period);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@
|
|||
#include "nix/expr/eval-settings.hh"
|
||||
#include "nix/expr/primops.hh"
|
||||
#include "nix/expr/print-options.hh"
|
||||
#include "nix/expr/symbol-table.hh"
|
||||
#include "nix/util/exit.hh"
|
||||
#include "nix/util/types.hh"
|
||||
#include "nix/util/util.hh"
|
||||
|
|
@ -90,20 +91,16 @@ std::string printValue(EvalState & state, Value & v)
|
|||
return out.str();
|
||||
}
|
||||
|
||||
Value * Value::toPtr(SymbolStr str) noexcept
|
||||
{
|
||||
return const_cast<Value *>(str.valuePtr());
|
||||
}
|
||||
|
||||
void Value::print(EvalState & state, std::ostream & str, PrintOptions options)
|
||||
{
|
||||
printValue(state, str, *this, options);
|
||||
}
|
||||
|
||||
const Value * getPrimOp(const Value &v) {
|
||||
const Value * primOp = &v;
|
||||
while (primOp->isPrimOpApp()) {
|
||||
primOp = primOp->payload.primOpApp.left;
|
||||
}
|
||||
assert(primOp->isPrimOp());
|
||||
return primOp;
|
||||
}
|
||||
|
||||
std::string_view showType(ValueType type, bool withArticle)
|
||||
{
|
||||
#define WA(a, w) withArticle ? a " " w : w
|
||||
|
|
@ -129,12 +126,12 @@ std::string showType(const Value & v)
|
|||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
switch (v.internalType) {
|
||||
case tString: return v.payload.string.context ? "a string with context" : "a string";
|
||||
switch (v.getInternalType()) {
|
||||
case tString: return v.context() ? "a string with context" : "a string";
|
||||
case tPrimOp:
|
||||
return fmt("the built-in function '%s'", std::string(v.payload.primOp->name));
|
||||
return fmt("the built-in function '%s'", std::string(v.primOp()->name));
|
||||
case tPrimOpApp:
|
||||
return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->payload.primOp->name));
|
||||
return fmt("the partially applied built-in function '%s'", v.primOpAppPrimOp()->name);
|
||||
case tExternal: return v.external()->showType();
|
||||
case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk";
|
||||
case tApp: return "a function application";
|
||||
|
|
@ -149,12 +146,10 @@ PosIdx Value::determinePos(const PosIdx pos) const
|
|||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
if (this->pos != 0)
|
||||
return PosIdx(this->pos);
|
||||
switch (internalType) {
|
||||
switch (getInternalType()) {
|
||||
case tAttrs: return attrs()->pos;
|
||||
case tLambda: return payload.lambda.fun->pos;
|
||||
case tApp: return payload.app.left->determinePos(pos);
|
||||
case tLambda: return lambda().fun->pos;
|
||||
case tApp: return app().left->determinePos(pos);
|
||||
default: return pos;
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
|
|
@ -163,13 +158,12 @@ PosIdx Value::determinePos(const PosIdx pos) const
|
|||
bool Value::isTrivial() const
|
||||
{
|
||||
return
|
||||
internalType != tApp
|
||||
&& internalType != tPrimOpApp
|
||||
&& (internalType != tThunk
|
||||
|| (dynamic_cast<ExprAttrs *>(payload.thunk.expr)
|
||||
&& ((ExprAttrs *) payload.thunk.expr)->dynamicAttrs.empty())
|
||||
|| dynamic_cast<ExprLambda *>(payload.thunk.expr)
|
||||
|| dynamic_cast<ExprList *>(payload.thunk.expr));
|
||||
!isa<tApp, tPrimOpApp>()
|
||||
&& (!isa<tThunk>()
|
||||
|| (dynamic_cast<ExprAttrs *>(thunk().expr)
|
||||
&& ((ExprAttrs *) thunk().expr)->dynamicAttrs.empty())
|
||||
|| dynamic_cast<ExprLambda *>(thunk().expr)
|
||||
|| dynamic_cast<ExprList *>(thunk().expr));
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -215,6 +209,7 @@ EvalState::EvalState(
|
|||
, sRight(symbols.create("right"))
|
||||
, sWrong(symbols.create("wrong"))
|
||||
, sStructuredAttrs(symbols.create("__structuredAttrs"))
|
||||
, sJson(symbols.create("__json"))
|
||||
, sAllowedReferences(symbols.create("allowedReferences"))
|
||||
, sAllowedRequisites(symbols.create("allowedRequisites"))
|
||||
, sDisallowedReferences(symbols.create("disallowedReferences"))
|
||||
|
|
@ -372,8 +367,20 @@ EvalState::EvalState(
|
|||
);
|
||||
|
||||
createBaseEnv(settings);
|
||||
}
|
||||
|
||||
/* Register function call tracer. */
|
||||
if (settings.traceFunctionCalls)
|
||||
profiler.addProfiler(make_ref<FunctionCallTrace>());
|
||||
|
||||
switch (settings.evalProfilerMode) {
|
||||
case EvalProfilerMode::flamegraph:
|
||||
profiler.addProfiler(makeSampleStackProfiler(
|
||||
*this, settings.evalProfileFile.get(), settings.evalProfilerFrequency));
|
||||
break;
|
||||
case EvalProfilerMode::disabled:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
EvalState::~EvalState()
|
||||
{
|
||||
|
|
@ -493,7 +500,7 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
|
|||
/* Install value the base environment. */
|
||||
staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
getBuiltins().payload.attrs->push_back(Attr(symbols.create(name2), v));
|
||||
const_cast<Bindings *>(getBuiltins().attrs())->push_back(Attr(symbols.create(name2), v));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -515,13 +522,15 @@ std::ostream & operator<<(std::ostream & output, const PrimOp & primOp)
|
|||
|
||||
const PrimOp * Value::primOpAppPrimOp() const
|
||||
{
|
||||
Value * left = payload.primOpApp.left;
|
||||
Value * left = primOpApp().left;
|
||||
while (left && !left->isPrimOp()) {
|
||||
left = left->payload.primOpApp.left;
|
||||
left = left->primOpApp().left;
|
||||
}
|
||||
|
||||
if (!left)
|
||||
return nullptr;
|
||||
|
||||
assert(left->isPrimOp());
|
||||
return left->primOp();
|
||||
}
|
||||
|
||||
|
|
@ -529,7 +538,7 @@ const PrimOp * Value::primOpAppPrimOp() const
|
|||
void Value::mkPrimOp(PrimOp * p)
|
||||
{
|
||||
p->check();
|
||||
finishValue(tPrimOp, { .primOp = p });
|
||||
setStorage(p);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -561,7 +570,7 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
|
|||
else {
|
||||
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
getBuiltins().payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
|
||||
const_cast<Bindings *>(getBuiltins().attrs())->push_back(Attr(symbols.create(primOp.name), v));
|
||||
}
|
||||
|
||||
return v;
|
||||
|
|
@ -598,7 +607,7 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||
};
|
||||
}
|
||||
if (v.isLambda()) {
|
||||
auto exprLambda = v.payload.lambda.fun;
|
||||
auto exprLambda = v.lambda().fun;
|
||||
|
||||
std::ostringstream s;
|
||||
std::string name;
|
||||
|
|
@ -645,7 +654,7 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||
Value & functor = *v.attrs()->find(sFunctor)->value;
|
||||
Value * vp[] = {&v};
|
||||
Value partiallyApplied;
|
||||
// The first paramater is not user-provided, and may be
|
||||
// The first parameter is not user-provided, and may be
|
||||
// handled by code that is opaque to the user, like lib.const = x: y: y;
|
||||
// So preferably we show docs that are relevant to the
|
||||
// "partially applied" function returned by e.g. `const`.
|
||||
|
|
@ -908,7 +917,7 @@ void Value::mkStringMove(const char * s, const NixStringContext & context)
|
|||
|
||||
void Value::mkPath(const SourcePath & path)
|
||||
{
|
||||
mkPath(&*path.accessor, makeImmutableString(path.path.abs()), noPos.get());
|
||||
mkPath(&*path.accessor, makeImmutableString(path.path.abs()));
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1535,9 +1544,14 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
{
|
||||
auto _level = addCallDepth(pos);
|
||||
|
||||
auto trace = settings.traceFunctionCalls
|
||||
? std::make_unique<FunctionCallTrace>(positions[pos])
|
||||
: nullptr;
|
||||
auto neededHooks = profiler.getNeededHooks();
|
||||
if (neededHooks.test(EvalProfiler::preFunctionCall)) [[unlikely]]
|
||||
profiler.preFunctionCallHook(*this, fun, args, pos);
|
||||
|
||||
Finally traceExit_{[&](){
|
||||
if (profiler.getNeededHooks().test(EvalProfiler::postFunctionCall)) [[unlikely]]
|
||||
profiler.postFunctionCallHook(*this, fun, args, pos);
|
||||
}};
|
||||
|
||||
forceValue(fun, pos);
|
||||
|
||||
|
|
@ -1559,13 +1573,13 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
|
||||
if (vCur.isLambda()) {
|
||||
|
||||
ExprLambda & lambda(*vCur.payload.lambda.fun);
|
||||
ExprLambda & lambda(*vCur.lambda().fun);
|
||||
|
||||
auto size =
|
||||
(!lambda.arg ? 0 : 1) +
|
||||
(lambda.hasFormals() ? lambda.formals->formals.size() : 0);
|
||||
Env & env2(allocEnv(size));
|
||||
env2.up = vCur.payload.lambda.env;
|
||||
env2.up = vCur.lambda().env;
|
||||
|
||||
Displacement displ = 0;
|
||||
|
||||
|
|
@ -1595,7 +1609,7 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
symbols[i.name])
|
||||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withFrame(*fun.payload.lambda.env, lambda)
|
||||
.withFrame(*fun.lambda().env, lambda)
|
||||
.debugThrow();
|
||||
}
|
||||
env2.values[displ++] = i.def->maybeThunk(*this, env2);
|
||||
|
|
@ -1622,7 +1636,7 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withSuggestions(suggestions)
|
||||
.withFrame(*fun.payload.lambda.env, lambda)
|
||||
.withFrame(*fun.lambda().env, lambda)
|
||||
.debugThrow();
|
||||
}
|
||||
unreachable();
|
||||
|
|
@ -1694,7 +1708,7 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
Value * primOp = &vCur;
|
||||
while (primOp->isPrimOpApp()) {
|
||||
argsDone++;
|
||||
primOp = primOp->payload.primOpApp.left;
|
||||
primOp = primOp->primOpApp().left;
|
||||
}
|
||||
assert(primOp->isPrimOp());
|
||||
auto arity = primOp->primOp()->arity;
|
||||
|
|
@ -1710,8 +1724,8 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
|
||||
Value * vArgs[maxPrimOpArity];
|
||||
auto n = argsDone;
|
||||
for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->payload.primOpApp.left)
|
||||
vArgs[--n] = arg->payload.primOpApp.right;
|
||||
for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp().left)
|
||||
vArgs[--n] = arg->primOpApp().right;
|
||||
|
||||
for (size_t i = 0; i < argsLeft; ++i)
|
||||
vArgs[argsDone + i] = args[i];
|
||||
|
|
@ -1817,14 +1831,14 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res
|
|||
}
|
||||
}
|
||||
|
||||
if (!fun.isLambda() || !fun.payload.lambda.fun->hasFormals()) {
|
||||
if (!fun.isLambda() || !fun.lambda().fun->hasFormals()) {
|
||||
res = fun;
|
||||
return;
|
||||
}
|
||||
|
||||
auto attrs = buildBindings(std::max(static_cast<uint32_t>(fun.payload.lambda.fun->formals->formals.size()), args.size()));
|
||||
auto attrs = buildBindings(std::max(static_cast<uint32_t>(fun.lambda().fun->formals->formals.size()), args.size()));
|
||||
|
||||
if (fun.payload.lambda.fun->formals->ellipsis) {
|
||||
if (fun.lambda().fun->formals->ellipsis) {
|
||||
// If the formals have an ellipsis (eg the function accepts extra args) pass
|
||||
// all available automatic arguments (which includes arguments specified on
|
||||
// the command line via --arg/--argstr)
|
||||
|
|
@ -1832,7 +1846,7 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res
|
|||
attrs.insert(v);
|
||||
} else {
|
||||
// Otherwise, only pass the arguments that the function accepts
|
||||
for (auto & i : fun.payload.lambda.fun->formals->formals) {
|
||||
for (auto & i : fun.lambda().fun->formals->formals) {
|
||||
auto j = args.get(i.name);
|
||||
if (j) {
|
||||
attrs.insert(*j);
|
||||
|
|
@ -1842,7 +1856,7 @@ Nix attempted to evaluate a function as a top level expression; in
|
|||
this case it must have its arguments supplied either by default
|
||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||
https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
|
||||
.atPos(i.pos).withFrame(*fun.payload.lambda.env, *fun.payload.lambda.fun).debugThrow();
|
||||
.atPos(i.pos).withFrame(*fun.lambda().env, *fun.lambda().fun).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2000,9 +2014,10 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * const * lists, co
|
|||
auto list = buildList(len);
|
||||
auto out = list.elems;
|
||||
for (size_t n = 0, pos = 0; n < nrLists; ++n) {
|
||||
auto l = lists[n]->listSize();
|
||||
auto listView = lists[n]->listView();
|
||||
auto l = listView.size();
|
||||
if (l)
|
||||
memcpy(out + pos, lists[n]->listElems(), l * sizeof(Value *));
|
||||
memcpy(out + pos, listView.data(), l * sizeof(Value *));
|
||||
pos += l;
|
||||
}
|
||||
v.mkList(list);
|
||||
|
|
@ -2155,7 +2170,7 @@ void EvalState::forceValueDeep(Value & v)
|
|||
try {
|
||||
// If the value is a thunk, we're evaling. Otherwise no trace necessary.
|
||||
auto dts = debugRepl && i.value->isThunk()
|
||||
? makeDebugTraceStacker(*this, *i.value->payload.thunk.expr, *i.value->payload.thunk.env, i.pos,
|
||||
? makeDebugTraceStacker(*this, *i.value->thunk().expr, *i.value->thunk().env, i.pos,
|
||||
"while evaluating the attribute '%1%'", symbols[i.name])
|
||||
: nullptr;
|
||||
|
||||
|
|
@ -2167,7 +2182,7 @@ void EvalState::forceValueDeep(Value & v)
|
|||
}
|
||||
|
||||
else if (v.isList()) {
|
||||
for (auto v2 : v.listItems())
|
||||
for (auto v2 : v.listView())
|
||||
recurse(*v2);
|
||||
}
|
||||
};
|
||||
|
|
@ -2235,8 +2250,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx
|
|||
return v.boolean();
|
||||
}
|
||||
|
||||
Bindings::const_iterator EvalState::getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx)
|
||||
{
|
||||
auto value = attrSet->find(attrSym);
|
||||
if (value == attrSet->end()) {
|
||||
error<TypeError>("attribute '%s' missing", symbols[attrSym])
|
||||
.withTrace(noPos, errorCtx)
|
||||
.debugThrow();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
bool EvalState::isFunctor(Value & fun)
|
||||
bool EvalState::isFunctor(const Value & fun) const
|
||||
{
|
||||
return fun.type() == nAttrs && fun.attrs()->find(sFunctor) != fun.attrs()->end();
|
||||
}
|
||||
|
|
@ -2279,8 +2304,8 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
|||
|
||||
void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
if (v.payload.string.context)
|
||||
for (const char * * p = v.payload.string.context; *p; ++p)
|
||||
if (v.context())
|
||||
for (const char * * p = v.context(); *p; ++p)
|
||||
context.insert(NixStringContextElem::parse(*p, xpSettings));
|
||||
}
|
||||
|
||||
|
|
@ -2356,7 +2381,7 @@ BackedStringView EvalState::coerceToString(
|
|||
!canonicalizePath && !copyToStore
|
||||
? // FIXME: hack to preserve path literals that end in a
|
||||
// slash, as in /foo/${x}.
|
||||
v.payload.path.path
|
||||
v.pathStr()
|
||||
: copyToStore
|
||||
? store->printStorePath(copyPathToStore(context, v.path(), v.determinePos(pos)))
|
||||
: ({
|
||||
|
|
@ -2409,7 +2434,8 @@ BackedStringView EvalState::coerceToString(
|
|||
|
||||
if (v.isList()) {
|
||||
std::string result;
|
||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||
auto listView = v.listView();
|
||||
for (auto [n, v2] : enumerate(listView)) {
|
||||
try {
|
||||
result += *coerceToString(pos, *v2, context,
|
||||
"while evaluating one element of the list",
|
||||
|
|
@ -2447,6 +2473,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
|
|||
? *dstPathCached
|
||||
: [&]() {
|
||||
auto dstPath = fetchToStore(
|
||||
fetchSettings,
|
||||
*store,
|
||||
path.resolveSymlinks(SymlinkResolution::Ancestors),
|
||||
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
|
||||
|
|
@ -2491,7 +2518,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext
|
|||
}
|
||||
}
|
||||
|
||||
/* Any other value should be coercable to a string, interpreted
|
||||
/* Any other value should be coercible to a string, interpreted
|
||||
relative to the root filesystem. */
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
|
|
@ -2637,14 +2664,14 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st
|
|||
return;
|
||||
|
||||
case nPath:
|
||||
if (v1.payload.path.accessor != v2.payload.path.accessor) {
|
||||
if (v1.pathAccessor() != v2.pathAccessor()) {
|
||||
error<AssertionError>(
|
||||
"path '%s' is not equal to path '%s' because their accessors are different",
|
||||
ValuePrinter(*this, v1, errorPrintOptions),
|
||||
ValuePrinter(*this, v2, errorPrintOptions))
|
||||
.debugThrow();
|
||||
}
|
||||
if (strcmp(v1.payload.path.path, v2.payload.path.path) != 0) {
|
||||
if (strcmp(v1.pathStr(), v2.pathStr()) != 0) {
|
||||
error<AssertionError>(
|
||||
"path '%s' is not equal to path '%s'",
|
||||
ValuePrinter(*this, v1, errorPrintOptions),
|
||||
|
|
@ -2668,7 +2695,7 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st
|
|||
}
|
||||
for (size_t n = 0; n < v1.listSize(); ++n) {
|
||||
try {
|
||||
assertEqValues(*v1.listElems()[n], *v2.listElems()[n], pos, errorCtx);
|
||||
assertEqValues(*v1.listView()[n], *v2.listView()[n], pos, errorCtx);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], "while comparing list element %d", n);
|
||||
throw;
|
||||
|
|
@ -2811,8 +2838,8 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
case nPath:
|
||||
return
|
||||
// FIXME: compare accessors by their fingerprint.
|
||||
v1.payload.path.accessor == v2.payload.path.accessor
|
||||
&& strcmp(v1.payload.path.path, v2.payload.path.path) == 0;
|
||||
v1.pathAccessor() == v2.pathAccessor()
|
||||
&& strcmp(v1.pathStr(), v2.pathStr()) == 0;
|
||||
|
||||
case nNull:
|
||||
return true;
|
||||
|
|
@ -2820,7 +2847,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
case nList:
|
||||
if (v1.listSize() != v2.listSize()) return false;
|
||||
for (size_t n = 0; n < v1.listSize(); ++n)
|
||||
if (!eqValues(*v1.listElems()[n], *v2.listElems()[n], pos, errorCtx)) return false;
|
||||
if (!eqValues(*v1.listView()[n], *v2.listView()[n], pos, errorCtx)) return false;
|
||||
return true;
|
||||
|
||||
case nAttrs: {
|
||||
|
|
@ -2867,7 +2894,7 @@ bool EvalState::fullGC() {
|
|||
GC_gcollect();
|
||||
// Check that it ran. We might replace this with a version that uses more
|
||||
// of the boehm API to get this reliably, at a maintenance cost.
|
||||
// We use a 1K margin because technically this has a race condtion, but we
|
||||
// We use a 1K margin because technically this has a race condition, but we
|
||||
// probably won't encounter it in practice, because the CLI isn't concurrent
|
||||
// like that.
|
||||
return GC_get_bytes_since_gc() < 1024;
|
||||
|
|
@ -3020,7 +3047,7 @@ void EvalState::printStatistics()
|
|||
// XXX: overrides earlier assignment
|
||||
topObj["symbols"] = json::array();
|
||||
auto &list = topObj["symbols"];
|
||||
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
|
||||
symbols.dump([&](std::string_view s) { list.emplace_back(s); });
|
||||
}
|
||||
if (outPath == "-") {
|
||||
std::cerr << topObj.dump(2) << std::endl;
|
||||
|
|
@ -3159,7 +3186,7 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
|
|||
store,
|
||||
fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(value));
|
||||
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
|
||||
auto storePath = fetchToStore(fetchSettings, *store, SourcePath(accessor), FetchMode::Copy);
|
||||
return finish(this->storePath(storePath));
|
||||
} catch (Error & e) {
|
||||
logWarning({
|
||||
|
|
|
|||
|
|
@ -3,16 +3,20 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) {
|
||||
void FunctionCallTrace::preFunctionCallHook(
|
||||
EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||
printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count());
|
||||
printMsg(lvlInfo, "function-trace entered %1% at %2%", state.positions[pos], ns.count());
|
||||
}
|
||||
|
||||
FunctionCallTrace::~FunctionCallTrace() {
|
||||
void FunctionCallTrace::postFunctionCallHook(
|
||||
EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos)
|
||||
{
|
||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||
printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count());
|
||||
printMsg(lvlInfo, "function-trace exited %1% at %2%", state.positions[pos], ns.count());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT
|
|||
state->forceList(*i->value, i->pos, "while evaluating the 'outputs' attribute of a derivation");
|
||||
|
||||
/* For each output... */
|
||||
for (auto elem : i->value->listItems()) {
|
||||
for (auto elem : i->value->listView()) {
|
||||
std::string output(state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation"));
|
||||
|
||||
if (withPaths) {
|
||||
|
|
@ -159,7 +159,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT
|
|||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList()) throw errMsg;
|
||||
Outputs result;
|
||||
for (auto elem : outTI->listItems()) {
|
||||
for (auto elem : outTI->listView()) {
|
||||
if (elem->type() != nString) throw errMsg;
|
||||
auto out = outputs.find(elem->c_str());
|
||||
if (out == outputs.end()) throw errMsg;
|
||||
|
|
@ -206,7 +206,7 @@ bool PackageInfo::checkMeta(Value & v)
|
|||
{
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
if (v.type() == nList) {
|
||||
for (auto elem : v.listItems())
|
||||
for (auto elem : v.listView())
|
||||
if (!checkMeta(*elem)) return false;
|
||||
return true;
|
||||
}
|
||||
|
|
@ -400,7 +400,8 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
|
||||
else if (v.type() == nList) {
|
||||
for (auto [n, elem] : enumerate(v.listItems())) {
|
||||
auto listView = v.listView();
|
||||
for (auto [n, elem] : enumerate(listView)) {
|
||||
std::string pathPrefix2 = addToPath(pathPrefix, fmt("%d", n));
|
||||
if (getDerivation(state, *elem, pathPrefix2, drvs, done, ignoreAssertionFailures))
|
||||
getDerivations(state, *elem, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
|
|
|
|||
|
|
@ -89,9 +89,9 @@ Env & EvalState::allocEnv(size_t size)
|
|||
void EvalState::forceValue(Value & v, const PosIdx pos)
|
||||
{
|
||||
if (v.isThunk()) {
|
||||
Env * env = v.payload.thunk.env;
|
||||
Env * env = v.thunk().env;
|
||||
assert(env || v.isBlackhole());
|
||||
Expr * expr = v.payload.thunk.expr;
|
||||
Expr * expr = v.thunk().expr;
|
||||
try {
|
||||
v.mkBlackhole();
|
||||
//checkInterrupt();
|
||||
|
|
@ -106,7 +106,7 @@ void EvalState::forceValue(Value & v, const PosIdx pos)
|
|||
}
|
||||
}
|
||||
else if (v.isApp())
|
||||
callFunction(*v.payload.app.left, *v.payload.app.right, v, pos);
|
||||
callFunction(*v.app().left, *v.app().right, v, pos);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
16
src/libexpr/include/nix/expr/eval-profiler-settings.hh
Normal file
16
src/libexpr/include/nix/expr/eval-profiler-settings.hh
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "nix/util/configuration.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
enum struct EvalProfilerMode { disabled, flamegraph };
|
||||
|
||||
template<>
|
||||
EvalProfilerMode BaseSetting<EvalProfilerMode>::parse(const std::string & str) const;
|
||||
|
||||
template<>
|
||||
std::string BaseSetting<EvalProfilerMode>::to_string() const;
|
||||
|
||||
}
|
||||
114
src/libexpr/include/nix/expr/eval-profiler.hh
Normal file
114
src/libexpr/include/nix/expr/eval-profiler.hh
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* Evaluation profiler interface definitions and builtin implementations.
|
||||
*/
|
||||
|
||||
#include "nix/util/ref.hh"
|
||||
|
||||
#include <vector>
|
||||
#include <span>
|
||||
#include <bitset>
|
||||
#include <optional>
|
||||
#include <filesystem>
|
||||
|
||||
namespace nix {
|
||||
|
||||
class EvalState;
|
||||
class PosIdx;
|
||||
struct Value;
|
||||
|
||||
class EvalProfiler
|
||||
{
|
||||
public:
|
||||
enum Hook {
|
||||
preFunctionCall,
|
||||
postFunctionCall,
|
||||
};
|
||||
|
||||
static constexpr std::size_t numHooks = Hook::postFunctionCall + 1;
|
||||
using Hooks = std::bitset<numHooks>;
|
||||
|
||||
private:
|
||||
std::optional<Hooks> neededHooks;
|
||||
|
||||
protected:
|
||||
/** Invalidate the cached neededHooks. */
|
||||
void invalidateNeededHooks()
|
||||
{
|
||||
neededHooks = std::nullopt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get which hooks need to be called.
|
||||
*
|
||||
* This is the actual implementation which has to be defined by subclasses.
|
||||
* Public API goes through the needsHooks, which is a
|
||||
* non-virtual interface (NVI) which caches the return value.
|
||||
*/
|
||||
virtual Hooks getNeededHooksImpl() const
|
||||
{
|
||||
return Hooks{};
|
||||
}
|
||||
|
||||
public:
|
||||
/**
|
||||
* Hook called in the EvalState::callFunction preamble.
|
||||
* Gets called only if (getNeededHooks().test(Hook::preFunctionCall)) is true.
|
||||
*
|
||||
* @param state Evaluator state.
|
||||
* @param v Function being invoked.
|
||||
* @param args Function arguments.
|
||||
* @param pos Function position.
|
||||
*/
|
||||
virtual void preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos);
|
||||
|
||||
/**
|
||||
* Hook called on EvalState::callFunction exit.
|
||||
* Gets called only if (getNeededHooks().test(Hook::postFunctionCall)) is true.
|
||||
*
|
||||
* @param state Evaluator state.
|
||||
* @param v Function being invoked.
|
||||
* @param args Function arguments.
|
||||
* @param pos Function position.
|
||||
*/
|
||||
virtual void postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos);
|
||||
|
||||
virtual ~EvalProfiler() = default;
|
||||
|
||||
/**
|
||||
* Get which hooks need to be invoked for this EvalProfiler instance.
|
||||
*/
|
||||
Hooks getNeededHooks()
|
||||
{
|
||||
if (neededHooks.has_value())
|
||||
return *neededHooks;
|
||||
return *(neededHooks = getNeededHooksImpl());
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Profiler that invokes multiple profilers at once.
|
||||
*/
|
||||
class MultiEvalProfiler : public EvalProfiler
|
||||
{
|
||||
std::vector<ref<EvalProfiler>> profilers;
|
||||
|
||||
[[gnu::noinline]] Hooks getNeededHooksImpl() const override;
|
||||
|
||||
public:
|
||||
MultiEvalProfiler() = default;
|
||||
|
||||
/** Register a profiler instance. */
|
||||
void addProfiler(ref<EvalProfiler> profiler);
|
||||
|
||||
[[gnu::noinline]] void
|
||||
preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
[[gnu::noinline]] void
|
||||
postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
};
|
||||
|
||||
ref<EvalProfiler> makeSampleStackProfiler(EvalState & state, std::filesystem::path profileFile, uint64_t frequency);
|
||||
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "nix/expr/eval-profiler-settings.hh"
|
||||
#include "nix/util/configuration.hh"
|
||||
#include "nix/util/source-path.hh"
|
||||
|
||||
|
|
@ -12,7 +13,7 @@ struct PrimOp;
|
|||
struct EvalSettings : Config
|
||||
{
|
||||
/**
|
||||
* Function used to interpet look path entries of a given scheme.
|
||||
* Function used to interpret look path entries of a given scheme.
|
||||
*
|
||||
* The argument is the non-scheme part of the lookup path entry (see
|
||||
* `LookupPathHooks` below).
|
||||
|
|
@ -203,6 +204,29 @@ struct EvalSettings : Config
|
|||
`flamegraph.pl`.
|
||||
)"};
|
||||
|
||||
Setting<EvalProfilerMode> evalProfilerMode{this, EvalProfilerMode::disabled, "eval-profiler",
|
||||
R"(
|
||||
Enables evaluation profiling. The following modes are supported:
|
||||
|
||||
* `flamegraph` stack sampling profiler. Outputs folded format, one line per stack (suitable for `flamegraph.pl` and compatible tools).
|
||||
|
||||
Use [`eval-profile-file`](#conf-eval-profile-file) to specify where the profile is saved.
|
||||
|
||||
See [Using the `eval-profiler`](@docroot@/advanced-topics/eval-profiler.md).
|
||||
)"};
|
||||
|
||||
Setting<Path> evalProfileFile{this, "nix.profile", "eval-profile-file",
|
||||
R"(
|
||||
Specifies the file where [evaluation profile](#conf-eval-profiler) is saved.
|
||||
)"};
|
||||
|
||||
Setting<uint32_t> evalProfilerFrequency{this, 99, "eval-profiler-frequency",
|
||||
R"(
|
||||
Specifies the sampling rate in hertz for sampling evaluation profilers.
|
||||
Use `0` to sample the stack after each function call.
|
||||
See [`eval-profiler`](#conf-eval-profiler).
|
||||
)"};
|
||||
|
||||
Setting<bool> useEvalCache{this, true, "eval-cache",
|
||||
R"(
|
||||
Whether to use the flake evaluation cache.
|
||||
|
|
@ -212,7 +236,7 @@ struct EvalSettings : Config
|
|||
|
||||
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
|
||||
R"(
|
||||
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
|
||||
If set to true, ignore exceptions inside 'tryEval' calls when evaluating Nix expressions in
|
||||
debug mode (using the --debugger flag). By default, the debugger pauses on all exceptions.
|
||||
)"};
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
#include "nix/expr/attr-set.hh"
|
||||
#include "nix/expr/eval-error.hh"
|
||||
#include "nix/expr/eval-profiler.hh"
|
||||
#include "nix/util/types.hh"
|
||||
#include "nix/expr/value.hh"
|
||||
#include "nix/expr/nixexpr.hh"
|
||||
|
|
@ -214,7 +215,7 @@ public:
|
|||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
sRight, sWrong, sStructuredAttrs,
|
||||
sRight, sWrong, sStructuredAttrs, sJson,
|
||||
sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites,
|
||||
sMaxSize, sMaxClosureSize,
|
||||
sBuilder, sArgs,
|
||||
|
|
@ -552,6 +553,11 @@ public:
|
|||
std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
* Get attribute from an attribute set and throw an error if it doesn't exist.
|
||||
*/
|
||||
Bindings::const_iterator getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx);
|
||||
|
||||
template<typename... Args>
|
||||
[[gnu::noinline]]
|
||||
void addErrorTrace(Error & e, const Args & ... formatArgs) const;
|
||||
|
|
@ -766,7 +772,7 @@ public:
|
|||
*/
|
||||
void assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
bool isFunctor(Value & fun);
|
||||
bool isFunctor(const Value & fun) const;
|
||||
|
||||
void callFunction(Value & fun, std::span<Value *> args, Value & vRes, const PosIdx pos);
|
||||
|
||||
|
|
@ -939,6 +945,9 @@ private:
|
|||
typedef std::map<ExprLambda *, size_t> FunctionCalls;
|
||||
FunctionCalls functionCalls;
|
||||
|
||||
/** Evaluation/call profiler. */
|
||||
MultiEvalProfiler profiler;
|
||||
|
||||
void incrFunctionCall(ExprLambda * fun);
|
||||
|
||||
typedef std::map<PosIdx, size_t> AttrSelects;
|
||||
|
|
|
|||
|
|
@ -2,15 +2,24 @@
|
|||
///@file
|
||||
|
||||
#include "nix/expr/eval.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include "nix/expr/eval-profiler.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct FunctionCallTrace
|
||||
class FunctionCallTrace : public EvalProfiler
|
||||
{
|
||||
const Pos pos;
|
||||
FunctionCallTrace(const Pos & pos);
|
||||
~FunctionCallTrace();
|
||||
Hooks getNeededHooksImpl() const override
|
||||
{
|
||||
return Hooks().set(preFunctionCall).set(postFunctionCall);
|
||||
}
|
||||
|
||||
public:
|
||||
FunctionCallTrace() = default;
|
||||
|
||||
[[gnu::noinline]] void
|
||||
preFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
[[gnu::noinline]] void
|
||||
postFunctionCallHook(EvalState & state, const Value & v, std::span<Value *> args, const PosIdx pos) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ headers = [config_pub_h] + files(
|
|||
'eval-error.hh',
|
||||
'eval-gc.hh',
|
||||
'eval-inline.hh',
|
||||
'eval-profiler-settings.hh',
|
||||
'eval-profiler.hh',
|
||||
'eval-settings.hh',
|
||||
'eval.hh',
|
||||
'function-trace.hh',
|
||||
|
|
|
|||
|
|
@ -138,9 +138,9 @@ struct ExprPath : Expr
|
|||
ref<SourceAccessor> accessor;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprPath(ref<SourceAccessor> accessor, std::string s, PosIdx pos) : accessor(accessor), s(std::move(s))
|
||||
ExprPath(ref<SourceAccessor> accessor, std::string s) : accessor(accessor), s(std::move(s))
|
||||
{
|
||||
v.mkPath(&*accessor, this->s.c_str(), pos.get());
|
||||
v.mkPath(&*accessor, this->s.c_str());
|
||||
}
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
|
|
@ -306,6 +306,9 @@ struct Formal
|
|||
struct Formals
|
||||
{
|
||||
typedef std::vector<Formal> Formals_;
|
||||
/**
|
||||
* @pre Sorted according to predicate (std::tie(a.name, a.pos) < std::tie(b.name, b.pos)).
|
||||
*/
|
||||
Formals_ formals;
|
||||
bool ellipsis;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "nix/expr/value.hh"
|
||||
#include "nix/expr/symbol-table.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
|||
|
|
@ -1,51 +1,35 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "nix/util/types.hh"
|
||||
#include <memory_resource>
|
||||
#include "nix/expr/value.hh"
|
||||
#include "nix/util/chunked-vector.hh"
|
||||
#include "nix/util/error.hh"
|
||||
|
||||
#include <boost/version.hpp>
|
||||
#define USE_FLAT_SYMBOL_SET (BOOST_VERSION >= 108100)
|
||||
#if USE_FLAT_SYMBOL_SET
|
||||
# include <boost/unordered/unordered_flat_set.hpp>
|
||||
#else
|
||||
# include <boost/unordered/unordered_set.hpp>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* This class mainly exists to give us an operator<< for ostreams. We could also
|
||||
* return plain strings from SymbolTable, but then we'd have to wrap every
|
||||
* instance of a symbol that is fmt()ed, which is inconvenient and error-prone.
|
||||
*/
|
||||
class SymbolStr
|
||||
class SymbolValue : protected Value
|
||||
{
|
||||
friend class SymbolStr;
|
||||
friend class SymbolTable;
|
||||
|
||||
private:
|
||||
const std::string * s;
|
||||
uint32_t size_;
|
||||
uint32_t idx;
|
||||
|
||||
explicit SymbolStr(const std::string & symbol): s(&symbol) {}
|
||||
SymbolValue() = default;
|
||||
|
||||
public:
|
||||
bool operator == (std::string_view s2) const
|
||||
operator std::string_view() const noexcept
|
||||
{
|
||||
return *s == s2;
|
||||
}
|
||||
|
||||
const char * c_str() const
|
||||
{
|
||||
return s->c_str();
|
||||
}
|
||||
|
||||
operator const std::string_view () const
|
||||
{
|
||||
return *s;
|
||||
}
|
||||
|
||||
friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol);
|
||||
|
||||
bool empty() const
|
||||
{
|
||||
return s->empty();
|
||||
return {c_str(), size_};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -56,24 +40,161 @@ public:
|
|||
*/
|
||||
class Symbol
|
||||
{
|
||||
friend class SymbolStr;
|
||||
friend class SymbolTable;
|
||||
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit Symbol(uint32_t id): id(id) {}
|
||||
explicit Symbol(uint32_t id) noexcept : id(id) {}
|
||||
|
||||
public:
|
||||
Symbol() : id(0) {}
|
||||
Symbol() noexcept : id(0) {}
|
||||
|
||||
explicit operator bool() const { return id > 0; }
|
||||
[[gnu::always_inline]]
|
||||
explicit operator bool() const noexcept { return id > 0; }
|
||||
|
||||
auto operator<=>(const Symbol other) const { return id <=> other.id; }
|
||||
bool operator==(const Symbol other) const { return id == other.id; }
|
||||
auto operator<=>(const Symbol other) const noexcept { return id <=> other.id; }
|
||||
bool operator==(const Symbol other) const noexcept { return id == other.id; }
|
||||
|
||||
friend class std::hash<Symbol>;
|
||||
};
|
||||
|
||||
/**
|
||||
* This class mainly exists to give us an operator<< for ostreams. We could also
|
||||
* return plain strings from SymbolTable, but then we'd have to wrap every
|
||||
* instance of a symbol that is fmt()ed, which is inconvenient and error-prone.
|
||||
*/
|
||||
class SymbolStr
|
||||
{
|
||||
friend class SymbolTable;
|
||||
|
||||
constexpr static size_t chunkSize{8192};
|
||||
using SymbolValueStore = ChunkedVector<SymbolValue, chunkSize>;
|
||||
|
||||
const SymbolValue * s;
|
||||
|
||||
struct Key
|
||||
{
|
||||
using HashType = boost::hash<std::string_view>;
|
||||
|
||||
SymbolValueStore & store;
|
||||
std::string_view s;
|
||||
std::size_t hash;
|
||||
std::pmr::polymorphic_allocator<char> & alloc;
|
||||
|
||||
Key(SymbolValueStore & store, std::string_view s, std::pmr::polymorphic_allocator<char> & stringAlloc)
|
||||
: store(store)
|
||||
, s(s)
|
||||
, hash(HashType{}(s))
|
||||
, alloc(stringAlloc) {}
|
||||
};
|
||||
|
||||
public:
|
||||
SymbolStr(const SymbolValue & s) noexcept : s(&s) {}
|
||||
|
||||
SymbolStr(const Key & key)
|
||||
{
|
||||
auto size = key.s.size();
|
||||
if (size >= std::numeric_limits<uint32_t>::max()) {
|
||||
throw Error("Size of symbol exceeds 4GiB and cannot be stored");
|
||||
}
|
||||
// for multi-threaded implementations: lock store and allocator here
|
||||
const auto & [v, idx] = key.store.add(SymbolValue{});
|
||||
if (size == 0) {
|
||||
v.mkString("", nullptr);
|
||||
} else {
|
||||
auto s = key.alloc.allocate(size + 1);
|
||||
memcpy(s, key.s.data(), size);
|
||||
s[size] = '\0';
|
||||
v.mkString(s, nullptr);
|
||||
}
|
||||
v.size_ = size;
|
||||
v.idx = idx;
|
||||
this->s = &v;
|
||||
}
|
||||
|
||||
bool operator == (std::string_view s2) const noexcept
|
||||
{
|
||||
return *s == s2;
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
const char * c_str() const noexcept
|
||||
{
|
||||
return s->c_str();
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
operator std::string_view () const noexcept
|
||||
{
|
||||
return *s;
|
||||
}
|
||||
|
||||
friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol);
|
||||
|
||||
[[gnu::always_inline]]
|
||||
bool empty() const noexcept
|
||||
{
|
||||
return s->size_ == 0;
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
size_t size() const noexcept
|
||||
{
|
||||
return s->size_;
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
const Value * valuePtr() const noexcept
|
||||
{
|
||||
return s;
|
||||
}
|
||||
|
||||
explicit operator Symbol() const noexcept
|
||||
{
|
||||
return Symbol{s->idx + 1};
|
||||
}
|
||||
|
||||
struct Hash
|
||||
{
|
||||
using is_transparent = void;
|
||||
using is_avalanching = std::true_type;
|
||||
|
||||
std::size_t operator()(SymbolStr str) const
|
||||
{
|
||||
return Key::HashType{}(*str.s);
|
||||
}
|
||||
|
||||
std::size_t operator()(const Key & key) const noexcept
|
||||
{
|
||||
return key.hash;
|
||||
}
|
||||
};
|
||||
|
||||
struct Equal
|
||||
{
|
||||
using is_transparent = void;
|
||||
|
||||
bool operator()(SymbolStr a, SymbolStr b) const noexcept
|
||||
{
|
||||
// strings are unique, so that a pointer comparison is OK
|
||||
return a.s == b.s;
|
||||
}
|
||||
|
||||
bool operator()(SymbolStr a, const Key & b) const noexcept
|
||||
{
|
||||
return a == b.s;
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
bool operator()(const Key & a, SymbolStr b) const noexcept
|
||||
{
|
||||
return operator()(b, a);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Symbol table used by the parser and evaluator to represent and look
|
||||
* up identifiers and attributes efficiently.
|
||||
|
|
@ -82,29 +203,46 @@ class SymbolTable
|
|||
{
|
||||
private:
|
||||
/**
|
||||
* Map from string view (backed by ChunkedVector) -> offset into the store.
|
||||
* SymbolTable is an append only data structure.
|
||||
* During its lifetime the monotonic buffer holds all strings and nodes, if the symbol set is node based.
|
||||
*/
|
||||
std::pmr::monotonic_buffer_resource buffer;
|
||||
std::pmr::polymorphic_allocator<char> stringAlloc{&buffer};
|
||||
SymbolStr::SymbolValueStore store{16};
|
||||
|
||||
/**
|
||||
* Transparent lookup of string view for a pointer to a ChunkedVector entry -> return offset into the store.
|
||||
* ChunkedVector references are never invalidated.
|
||||
*/
|
||||
std::unordered_map<std::string_view, uint32_t> symbols;
|
||||
ChunkedVector<std::string, 8192> store{16};
|
||||
#if USE_FLAT_SYMBOL_SET
|
||||
boost::unordered_flat_set<SymbolStr, SymbolStr::Hash, SymbolStr::Equal> symbols{SymbolStr::chunkSize};
|
||||
#else
|
||||
using SymbolValueAlloc = std::pmr::polymorphic_allocator<SymbolStr>;
|
||||
boost::unordered_set<SymbolStr, SymbolStr::Hash, SymbolStr::Equal, SymbolValueAlloc> symbols{SymbolStr::chunkSize, {&buffer}};
|
||||
#endif
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Converts a string into a symbol.
|
||||
*/
|
||||
Symbol create(std::string_view s)
|
||||
{
|
||||
Symbol create(std::string_view s) {
|
||||
// Most symbols are looked up more than once, so we trade off insertion performance
|
||||
// for lookup performance.
|
||||
// FIXME: make this thread-safe.
|
||||
auto it = symbols.find(s);
|
||||
if (it != symbols.end())
|
||||
return Symbol(it->second + 1);
|
||||
return [&]<typename T>(T && key) -> Symbol {
|
||||
if constexpr (requires { symbols.insert<T>(key); }) {
|
||||
auto [it, _] = symbols.insert<T>(key);
|
||||
return Symbol(*it);
|
||||
} else {
|
||||
auto it = symbols.find<T>(key);
|
||||
if (it != symbols.end())
|
||||
return Symbol(*it);
|
||||
|
||||
const auto & [rawSym, idx] = store.add(s);
|
||||
symbols.emplace(rawSym, idx);
|
||||
return Symbol(idx + 1);
|
||||
it = symbols.emplace(key).first;
|
||||
return Symbol(*it);
|
||||
}
|
||||
}(SymbolStr::Key{store, s, stringAlloc});
|
||||
}
|
||||
|
||||
std::vector<SymbolStr> resolve(const std::vector<Symbol> & symbols) const
|
||||
|
|
@ -118,12 +256,14 @@ public:
|
|||
|
||||
SymbolStr operator[](Symbol s) const
|
||||
{
|
||||
if (s.id == 0 || s.id > store.size())
|
||||
uint32_t idx = s.id - uint32_t(1);
|
||||
if (idx >= store.size())
|
||||
unreachable();
|
||||
return SymbolStr(store[s.id - 1]);
|
||||
return store[idx];
|
||||
}
|
||||
|
||||
size_t size() const
|
||||
[[gnu::always_inline]]
|
||||
size_t size() const noexcept
|
||||
{
|
||||
return store.size();
|
||||
}
|
||||
|
|
@ -147,3 +287,5 @@ struct std::hash<nix::Symbol>
|
|||
return std::hash<decltype(s.id)>{}(s.id);
|
||||
}
|
||||
};
|
||||
|
||||
#undef USE_FLAT_SYMBOL_SET
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
#include <cstddef>
|
||||
|
||||
// inluding the generated headers twice leads to errors
|
||||
// including the generated headers twice leads to errors
|
||||
#ifndef BISON_HEADER
|
||||
# include "lexer-tab.hh"
|
||||
# include "parser-tab.hh"
|
||||
|
|
|
|||
|
|
@ -140,6 +140,8 @@ sources = files(
|
|||
'eval-cache.cc',
|
||||
'eval-error.cc',
|
||||
'eval-gc.cc',
|
||||
'eval-profiler-settings.cc',
|
||||
'eval-profiler.cc',
|
||||
'eval-settings.cc',
|
||||
'eval.cc',
|
||||
'function-trace.cc',
|
||||
|
|
|
|||
|
|
@ -606,7 +606,7 @@ void ExprLambda::setDocComment(DocComment docComment) {
|
|||
size_t SymbolTable::totalSize() const
|
||||
{
|
||||
size_t n = 0;
|
||||
dump([&] (const std::string & s) { n += s.size(); });
|
||||
dump([&] (SymbolStr s) { n += s.size(); });
|
||||
return n;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -374,8 +374,8 @@ path_start
|
|||
root filesystem accessor, rather than the accessor of the
|
||||
current Nix expression. */
|
||||
literal.front() == '/'
|
||||
? new ExprPath(state->rootFS, std::move(path), CUR_POS)
|
||||
: new ExprPath(state->basePath.accessor, std::move(path), CUR_POS);
|
||||
? new ExprPath(state->rootFS, std::move(path))
|
||||
: new ExprPath(state->basePath.accessor, std::move(path));
|
||||
}
|
||||
| HPATH {
|
||||
if (state->settings.pureEval) {
|
||||
|
|
@ -385,7 +385,7 @@ path_start
|
|||
);
|
||||
}
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(ref<SourceAccessor>(state->rootFS), std::move(path), CUR_POS);
|
||||
$$ = new ExprPath(ref<SourceAccessor>(state->rootFS), std::move(path));
|
||||
}
|
||||
;
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,11 @@ StorePath EvalState::devirtualize(const StorePath & path, StringMap * rewrites)
|
|||
{
|
||||
if (auto mount = storeFS->getMount(CanonPath(store->printStorePath(path)))) {
|
||||
auto storePath = fetchToStore(
|
||||
*store, SourcePath{ref(mount)}, settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, path.name());
|
||||
fetchSettings,
|
||||
*store,
|
||||
SourcePath{ref(mount)},
|
||||
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
|
||||
path.name());
|
||||
assert(storePath.name() == path.name());
|
||||
if (rewrites)
|
||||
rewrites->emplace(path.hashPart(), storePath.hashPart());
|
||||
|
|
@ -57,13 +61,12 @@ std::string EvalState::computeBaseName(const SourcePath & path, PosIdx pos)
|
|||
if (path.accessor == rootFS) {
|
||||
if (auto storePath = store->maybeParseStorePath(path.path.abs())) {
|
||||
warn(
|
||||
"Copying '%s' to the store again\n"
|
||||
"Copying '%s' to the store again.\n"
|
||||
"You can make Nix evaluate faster and copy fewer files by replacing `./.` with the `self` flake input, "
|
||||
"or `builtins.path { path = ./.; name = \"source\"; }`\n\n"
|
||||
"Location: %s\n",
|
||||
path,
|
||||
positions[pos]);
|
||||
return std::string(fetchToStore(*store, path, FetchMode::DryRun, storePath->name()).to_string());
|
||||
"or `builtins.path { path = ./.; name = \"source\"; }`.\n",
|
||||
path);
|
||||
return std::string(
|
||||
fetchToStore(fetchSettings, *store, path, FetchMode::DryRun, storePath->name()).to_string());
|
||||
}
|
||||
}
|
||||
return std::string(path.baseName());
|
||||
|
|
@ -72,8 +75,9 @@ std::string EvalState::computeBaseName(const SourcePath & path, PosIdx pos)
|
|||
StorePath EvalState::mountInput(
|
||||
fetchers::Input & input, const fetchers::Input & originalInput, ref<SourceAccessor> accessor, bool requireLockable)
|
||||
{
|
||||
auto storePath = settings.lazyTrees ? StorePath::random(input.getName())
|
||||
: fetchToStore(*store, accessor, FetchMode::Copy, input.getName());
|
||||
auto storePath = settings.lazyTrees
|
||||
? StorePath::random(input.getName())
|
||||
: fetchToStore(fetchSettings, *store, accessor, FetchMode::Copy, input.getName());
|
||||
|
||||
allowPath(storePath); // FIXME: should just whitelist the entire virtual store
|
||||
|
||||
|
|
@ -84,7 +88,7 @@ StorePath EvalState::mountInput(
|
|||
if (store->isValidPath(storePath))
|
||||
_narHash = store->queryPathInfo(storePath)->narHash;
|
||||
else
|
||||
_narHash = fetchToStore2(*store, accessor, FetchMode::DryRun, input.getName()).second;
|
||||
_narHash = fetchToStore2(fetchSettings, *store, accessor, FetchMode::DryRun, input.getName()).second;
|
||||
}
|
||||
return _narHash;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
#include "nix/expr/value-to-xml.hh"
|
||||
#include "nix/expr/primops.hh"
|
||||
#include "nix/fetchers/fetch-to-store.hh"
|
||||
#include "nix/util/sort.hh"
|
||||
#include "nix/util/mounted-source-accessor.hh"
|
||||
|
||||
#include <boost/container/small_vector.hpp>
|
||||
|
|
@ -421,7 +422,7 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.exec");
|
||||
auto elems = args[0]->listElems();
|
||||
auto elems = args[0]->listView();
|
||||
auto count = args[0]->listSize();
|
||||
if (count == 0)
|
||||
state.error<EvalError>("at least one argument to 'exec' required").atPos(pos).debugThrow();
|
||||
|
|
@ -430,7 +431,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
"while evaluating the first element of the argument passed to builtins.exec",
|
||||
false, false).toOwned();
|
||||
Strings commandArgs;
|
||||
for (unsigned int i = 1; i < args[0]->listSize(); ++i) {
|
||||
for (size_t i = 1; i < count; ++i) {
|
||||
commandArgs.push_back(
|
||||
state.coerceToString(pos, *elems[i], context,
|
||||
"while evaluating an element of the argument passed to builtins.exec",
|
||||
|
|
@ -658,7 +659,7 @@ struct CompareValues
|
|||
// Note: we don't take the accessor into account
|
||||
// since it's not obvious how to compare them in a
|
||||
// reproducible way.
|
||||
return strcmp(v1->payload.path.path, v2->payload.path.path) < 0;
|
||||
return strcmp(v1->pathStr(), v2->pathStr()) < 0;
|
||||
case nList:
|
||||
// Lexicographic comparison
|
||||
for (size_t i = 0;; i++) {
|
||||
|
|
@ -666,8 +667,8 @@ struct CompareValues
|
|||
return false;
|
||||
} else if (i == v1->listSize()) {
|
||||
return true;
|
||||
} else if (!state.eqValues(*v1->listElems()[i], *v2->listElems()[i], pos, errorCtx)) {
|
||||
return (*this)(v1->listElems()[i], v2->listElems()[i], "while comparing two list elements");
|
||||
} else if (!state.eqValues(*v1->listView()[i], *v2->listView()[i], pos, errorCtx)) {
|
||||
return (*this)(v1->listView()[i], v2->listView()[i], "while comparing two list elements");
|
||||
}
|
||||
}
|
||||
default:
|
||||
|
|
@ -685,31 +686,17 @@ struct CompareValues
|
|||
|
||||
typedef std::list<Value *, gc_allocator<Value *>> ValueList;
|
||||
|
||||
|
||||
static Bindings::const_iterator getAttr(
|
||||
EvalState & state,
|
||||
Symbol attrSym,
|
||||
const Bindings * attrSet,
|
||||
std::string_view errorCtx)
|
||||
{
|
||||
auto value = attrSet->find(attrSym);
|
||||
if (value == attrSet->end()) {
|
||||
state.error<TypeError>("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceAttrs(*args[0], noPos, "while evaluating the first argument passed to builtins.genericClosure");
|
||||
|
||||
/* Get the start set. */
|
||||
auto startSet = getAttr(state, state.sStartSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure");
|
||||
auto startSet = state.getAttr(state.sStartSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure");
|
||||
|
||||
state.forceList(*startSet->value, noPos, "while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure");
|
||||
|
||||
ValueList workSet;
|
||||
for (auto elem : startSet->value->listItems())
|
||||
for (auto elem : startSet->value->listView())
|
||||
workSet.push_back(elem);
|
||||
|
||||
if (startSet->value->listSize() == 0) {
|
||||
|
|
@ -718,7 +705,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
|
|||
}
|
||||
|
||||
/* Get the operator. */
|
||||
auto op = getAttr(state, state.sOperator, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure");
|
||||
auto op = state.getAttr(state.sOperator, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure");
|
||||
state.forceFunction(*op->value, noPos, "while evaluating the 'operator' attribute passed as argument to builtins.genericClosure");
|
||||
|
||||
/* Construct the closure by applying the operator to elements of
|
||||
|
|
@ -735,7 +722,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
|
|||
|
||||
state.forceAttrs(*e, noPos, "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure");
|
||||
|
||||
auto key = getAttr(state, state.sKey, e->attrs(), "in one of the attrsets generated by (or initially passed to) builtins.genericClosure");
|
||||
auto key = state.getAttr(state.sKey, e->attrs(), "in one of the attrsets generated by (or initially passed to) builtins.genericClosure");
|
||||
state.forceValue(*key->value, noPos);
|
||||
|
||||
if (!doneKeys.insert(key->value).second) continue;
|
||||
|
|
@ -747,7 +734,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
|
|||
state.forceList(newElements, noPos, "while evaluating the return value of the `operator` passed to builtins.genericClosure");
|
||||
|
||||
/* Add the values returned by the operator to the work set. */
|
||||
for (auto elem : newElements.listItems()) {
|
||||
for (auto elem : newElements.listView()) {
|
||||
state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` passed to builtins.genericClosure");
|
||||
workSet.push_back(elem);
|
||||
}
|
||||
|
|
@ -919,7 +906,7 @@ static void prim_ceil(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
auto arg = args[0]->integer();
|
||||
auto res = v.integer();
|
||||
if (arg != res) {
|
||||
state.error<EvalError>("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occured in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -960,7 +947,7 @@ static void prim_floor(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
auto arg = args[0]->integer();
|
||||
auto res = v.integer();
|
||||
if (arg != res) {
|
||||
state.error<EvalError>("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occured in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -994,7 +981,7 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
ReplExitStatus (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
|
||||
if (state.debugRepl && state.settings.ignoreExceptionsDuringTry)
|
||||
{
|
||||
/* to prevent starting the repl from exceptions withing a tryEval, null it. */
|
||||
/* to prevent starting the repl from exceptions within a tryEval, null it. */
|
||||
savedDebugRepl = state.debugRepl;
|
||||
state.debugRepl = nullptr;
|
||||
}
|
||||
|
|
@ -1200,7 +1187,7 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
|
||||
static void derivationStrictInternal(
|
||||
EvalState & state,
|
||||
const std::string & name,
|
||||
std::string_view name,
|
||||
const Bindings * attrs,
|
||||
Value & v);
|
||||
|
||||
|
|
@ -1218,9 +1205,9 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
auto attrs = args[0]->attrs();
|
||||
|
||||
/* Figure out the name first (for stack backtraces). */
|
||||
auto nameAttr = getAttr(state, state.sName, attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
||||
auto nameAttr = state.getAttr(state.sName, attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
||||
|
||||
std::string drvName;
|
||||
std::string_view drvName;
|
||||
try {
|
||||
drvName = state.forceStringNoCtx(*nameAttr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict");
|
||||
} catch (Error & e) {
|
||||
|
|
@ -1279,7 +1266,7 @@ static void checkDerivationName(EvalState & state, std::string_view drvName)
|
|||
|
||||
static void derivationStrictInternal(
|
||||
EvalState & state,
|
||||
const std::string & drvName,
|
||||
std::string_view drvName,
|
||||
const Bindings * attrs,
|
||||
Value & v)
|
||||
{
|
||||
|
|
@ -1387,7 +1374,7 @@ static void derivationStrictInternal(
|
|||
command-line arguments to the builder. */
|
||||
else if (i->name == state.sArgs) {
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
for (auto elem : i->value->listItems()) {
|
||||
for (auto elem : i->value->listView()) {
|
||||
auto s = state.coerceToString(pos, *elem, context,
|
||||
"while evaluating an element of the argument list",
|
||||
true).toOwned();
|
||||
|
|
@ -1419,7 +1406,7 @@ static void derivationStrictInternal(
|
|||
/* Require ‘outputs’ to be a list of strings. */
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
Strings ss;
|
||||
for (auto elem : i->value->listItems())
|
||||
for (auto elem : i->value->listView())
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below));
|
||||
handleOutputs(ss);
|
||||
}
|
||||
|
|
@ -1448,6 +1435,8 @@ static void derivationStrictInternal(
|
|||
else if (i->name == state.sOutputHashMode) handleHashMode(s);
|
||||
else if (i->name == state.sOutputs)
|
||||
handleOutputs(tokenizeString<Strings>(s));
|
||||
else if (i->name == state.sJson)
|
||||
warn("In derivation '%s': setting structured attributes via '__json' is deprecated, and may be disallowed in future versions of Nix. Set '__structuredAttrs = true' instead.", drvName);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1917,7 +1906,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
LookupPath lookupPath;
|
||||
|
||||
for (auto v2 : args[0]->listItems()) {
|
||||
for (auto v2 : args[0]->listView()) {
|
||||
state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.findFile");
|
||||
|
||||
std::string prefix;
|
||||
|
|
@ -1925,7 +1914,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
if (i != v2->attrs()->end())
|
||||
prefix = state.forceStringNoCtx(*i->value, pos, "while evaluating the `prefix` attribute of an element of the list passed to builtins.findFile");
|
||||
|
||||
i = getAttr(state, state.sPath, v2->attrs(), "in an element of the __nixPath");
|
||||
i = state.getAttr(state.sPath, v2->attrs(), "in an element of the __nixPath");
|
||||
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToString(pos, *i->value, context,
|
||||
|
|
@ -1934,7 +1923,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
try {
|
||||
auto rewrites = state.realiseContext(context);
|
||||
path = rewriteStrings(path, rewrites);
|
||||
path = rewriteStrings(std::move(path), rewrites);
|
||||
} catch (InvalidPathError & e) {
|
||||
state.error<EvalError>(
|
||||
"cannot find '%1%', since path '%2%' is not valid",
|
||||
|
|
@ -1944,8 +1933,8 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
}
|
||||
|
||||
lookupPath.elements.emplace_back(LookupPath::Elem {
|
||||
.prefix = LookupPath::Prefix { .s = prefix },
|
||||
.path = LookupPath::Path { .s = path },
|
||||
.prefix = LookupPath::Prefix { .s = std::move(prefix) },
|
||||
.path = LookupPath::Path { .s = std::move(path) },
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -2218,7 +2207,7 @@ static RegisterPrimOp primop_outputOf({
|
|||
[input placeholder string](@docroot@/store/derivation/index.md#input-placeholder)
|
||||
if needed.
|
||||
|
||||
If the derivation has a statically-known output path (i.e. the derivation output is input-addressed, or fixed content-addresed), the output path is returned.
|
||||
If the derivation has a statically-known output path (i.e. the derivation output is input-addressed, or fixed content-addressed), the output path is returned.
|
||||
But if the derivation is content-addressed or if the derivation is itself not-statically produced (i.e. is the output of another derivation), an input placeholder is returned instead.
|
||||
|
||||
*`derivation reference`* must be a string that may contain a regular store path to a derivation, or may be an input placeholder reference.
|
||||
|
|
@ -2410,7 +2399,7 @@ static RegisterPrimOp primop_fromJSON({
|
|||
static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.toFile"));
|
||||
auto name = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.toFile");
|
||||
std::string contents(state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.toFile"));
|
||||
|
||||
StorePathSet refs;
|
||||
|
|
@ -2591,6 +2580,7 @@ static void addPath(
|
|||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
// FIXME: make this lazy?
|
||||
auto dstPath = fetchToStore(
|
||||
state.fetchSettings,
|
||||
*state.store,
|
||||
path.resolveSymlinks(),
|
||||
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
|
||||
|
|
@ -2636,7 +2626,7 @@ static RegisterPrimOp primop_filterSource({
|
|||
> the name of the input directory. Since `<hash>` depends on the
|
||||
> unfiltered directory, the name of the output directory
|
||||
> indirectly depends on files that are filtered out by the
|
||||
> function. This triggers a rebuild even when a filtered-out
|
||||
> function. This triggers a rebuild even when a filtered out
|
||||
> file is changed. Use `builtins.path` instead, which allows
|
||||
> specifying the name of the output directory.
|
||||
|
||||
|
|
@ -2681,7 +2671,7 @@ static RegisterPrimOp primop_filterSource({
|
|||
static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
std::optional<SourcePath> path;
|
||||
std::string name;
|
||||
std::string_view name;
|
||||
Value * filterFun = nullptr;
|
||||
auto method = ContentAddressMethod::Raw::NixArchive;
|
||||
std::optional<Hash> expectedHash;
|
||||
|
|
@ -2769,7 +2759,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
|
|||
auto list = state.buildList(args[0]->attrs()->size());
|
||||
|
||||
for (const auto & [n, i] : enumerate(*args[0]->attrs()))
|
||||
(list[n] = state.allocValue())->mkString(state.symbols[i.name]);
|
||||
list[n] = Value::toPtr(state.symbols[i.name]);
|
||||
|
||||
std::sort(list.begin(), list.end(),
|
||||
[](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; });
|
||||
|
|
@ -2827,8 +2817,7 @@ void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
{
|
||||
auto attr = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getAttr");
|
||||
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.getAttr");
|
||||
auto i = getAttr(
|
||||
state,
|
||||
auto i = state.getAttr(
|
||||
state.symbols.create(attr),
|
||||
args[1]->attrs(),
|
||||
"in the attribute set under consideration"
|
||||
|
|
@ -2875,7 +2864,7 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
|
|||
.fun = prim_unsafeGetAttrPos,
|
||||
});
|
||||
|
||||
// access to exact position information (ie, line and colum numbers) is deferred
|
||||
// access to exact position information (ie, line and column numbers) is deferred
|
||||
// due to the cost associated with calculating that information and how rarely
|
||||
// it is used in practice. this is achieved by creating thunks to otherwise
|
||||
// inaccessible primops that are not exposed as __op or under builtins to turn
|
||||
|
|
@ -2887,7 +2876,7 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
|
|||
// but each type of thunk has an associated runtime cost in the current evaluator.
|
||||
// as with black holes this cost is too high to justify another thunk type to check
|
||||
// for in the very hot path that is forceValue.
|
||||
static struct LazyPosAcessors {
|
||||
static struct LazyPosAccessors {
|
||||
PrimOp primop_lineOfPos{
|
||||
.arity = 1,
|
||||
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
|
||||
|
|
@ -2903,7 +2892,7 @@ static struct LazyPosAcessors {
|
|||
|
||||
Value lineOfPos, columnOfPos;
|
||||
|
||||
LazyPosAcessors()
|
||||
LazyPosAccessors()
|
||||
{
|
||||
lineOfPos.mkPrimOp(&primop_lineOfPos);
|
||||
columnOfPos.mkPrimOp(&primop_columnOfPos);
|
||||
|
|
@ -2969,7 +2958,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
|
|||
// 64: large enough to fit the attributes of a derivation
|
||||
boost::container::small_vector<Attr, 64> names;
|
||||
names.reserve(args[1]->listSize());
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
for (auto elem : args[1]->listView()) {
|
||||
state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs");
|
||||
names.emplace_back(state.symbols.create(elem->string_view()), nullptr);
|
||||
}
|
||||
|
|
@ -3011,25 +3000,48 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value * * args
|
|||
{
|
||||
state.forceList(*args[0], pos, "while evaluating the argument passed to builtins.listToAttrs");
|
||||
|
||||
auto attrs = state.buildBindings(args[0]->listSize());
|
||||
// Step 1. Sort the name-value attrsets in place using the memory we allocate for the result
|
||||
auto listView = args[0]->listView();
|
||||
size_t listSize = listView.size();
|
||||
auto & bindings = *state.allocBindings(listSize);
|
||||
using ElemPtr = decltype(&bindings[0].value);
|
||||
|
||||
std::set<Symbol> seen;
|
||||
|
||||
for (auto v2 : args[0]->listItems()) {
|
||||
for (const auto & [n, v2] : enumerate(listView)) {
|
||||
state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.listToAttrs");
|
||||
|
||||
auto j = getAttr(state, state.sName, v2->attrs(), "in a {name=...; value=...;} pair");
|
||||
auto j = state.getAttr(state.sName, v2->attrs(), "in a {name=...; value=...;} pair");
|
||||
|
||||
auto name = state.forceStringNoCtx(*j->value, j->pos, "while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs");
|
||||
|
||||
auto sym = state.symbols.create(name);
|
||||
if (seen.insert(sym).second) {
|
||||
auto j2 = getAttr(state, state.sValue, v2->attrs(), "in a {name=...; value=...;} pair");
|
||||
attrs.insert(sym, j2->value, j2->pos);
|
||||
}
|
||||
|
||||
// (ab)use Attr to store a Value * * instead of a Value *, so that we can stabilize the sort using the Value * *
|
||||
bindings[n] = Attr(sym, std::bit_cast<Value *>(&v2));
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
std::sort(&bindings[0], &bindings[listSize], [](const Attr & a, const Attr & b) {
|
||||
// Note that .value is actually a Value * * that corresponds to the position in the list
|
||||
return a < b || (!(a > b) && std::bit_cast<ElemPtr>(a.value) < std::bit_cast<ElemPtr>(b.value));
|
||||
});
|
||||
|
||||
// Step 2. Unpack the bindings in place and skip name-value pairs with duplicate names
|
||||
Symbol prev;
|
||||
for (size_t n = 0; n < listSize; n++) {
|
||||
auto attr = bindings[n];
|
||||
if (prev == attr.name) {
|
||||
continue;
|
||||
}
|
||||
// Note that .value is actually a Value * *; see earlier comments
|
||||
Value * v2 = *std::bit_cast<ElemPtr>(attr.value);
|
||||
|
||||
auto j = state.getAttr(state.sValue, v2->attrs(), "in a {name=...; value=...;} pair");
|
||||
prev = attr.name;
|
||||
bindings.push_back({prev, j->value, j->pos});
|
||||
}
|
||||
// help GC and clear end of allocated array
|
||||
for (size_t n = bindings.size(); n < listSize; n++) {
|
||||
bindings[n] = Attr{};
|
||||
}
|
||||
v.mkAttrs(&bindings);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_listToAttrs({
|
||||
|
|
@ -3149,14 +3161,14 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
SmallValueVector<nonRecursiveStackReservation> res(args[1]->listSize());
|
||||
size_t found = 0;
|
||||
|
||||
for (auto v2 : args[1]->listItems()) {
|
||||
for (auto v2 : args[1]->listView()) {
|
||||
state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs");
|
||||
if (auto i = v2->attrs()->get(attrName))
|
||||
res[found++] = i->value;
|
||||
}
|
||||
|
||||
auto list = state.buildList(found);
|
||||
for (unsigned int n = 0; n < found; ++n)
|
||||
for (size_t n = 0; n < found; ++n)
|
||||
list[n] = res[n];
|
||||
v.mkList(list);
|
||||
}
|
||||
|
|
@ -3188,15 +3200,21 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
|
|||
if (!args[0]->isLambda())
|
||||
state.error<TypeError>("'functionArgs' requires a function").atPos(pos).debugThrow();
|
||||
|
||||
if (!args[0]->payload.lambda.fun->hasFormals()) {
|
||||
if (!args[0]->lambda().fun->hasFormals()) {
|
||||
v.mkAttrs(&state.emptyBindings);
|
||||
return;
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(args[0]->payload.lambda.fun->formals->formals.size());
|
||||
for (auto & i : args[0]->payload.lambda.fun->formals->formals)
|
||||
const auto &formals = args[0]->lambda().fun->formals->formals;
|
||||
auto attrs = state.buildBindings(formals.size());
|
||||
for (auto & i : formals)
|
||||
attrs.insert(i.name, state.getBool(i.def), i.pos);
|
||||
v.mkAttrs(attrs);
|
||||
/* Optimization: avoid sorting bindings. `formals` must already be sorted according to
|
||||
(std::tie(a.name, a.pos) < std::tie(b.name, b.pos)) predicate, so the following assertion
|
||||
always holds:
|
||||
assert(std::is_sorted(attrs.alreadySorted()->begin(), attrs.alreadySorted()->end()));
|
||||
.*/
|
||||
v.mkAttrs(attrs.alreadySorted());
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_functionArgs({
|
||||
|
|
@ -3224,9 +3242,8 @@ static void prim_mapAttrs(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
auto attrs = state.buildBindings(args[1]->attrs()->size());
|
||||
|
||||
for (auto & i : *args[1]->attrs()) {
|
||||
Value * vName = state.allocValue();
|
||||
Value * vName = Value::toPtr(state.symbols[i.name]);
|
||||
Value * vFun2 = state.allocValue();
|
||||
vName->mkString(state.symbols[i.name]);
|
||||
vFun2->mkApp(args[0], vName);
|
||||
attrs.alloc(i.name).mkApp(vFun2, i.value);
|
||||
}
|
||||
|
|
@ -3269,7 +3286,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
|
|||
|
||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.zipAttrsWith");
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.zipAttrsWith");
|
||||
const auto listItems = args[1]->listItems();
|
||||
const auto listItems = args[1]->listView();
|
||||
|
||||
for (auto & vElem : listItems) {
|
||||
state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith");
|
||||
|
|
@ -3290,8 +3307,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
|
|||
auto attrs = state.buildBindings(attrsSeen.size());
|
||||
|
||||
for (auto & [sym, elem] : attrsSeen) {
|
||||
auto name = state.allocValue();
|
||||
name->mkString(state.symbols[sym]);
|
||||
auto name = Value::toPtr(state.symbols[sym]);
|
||||
auto call1 = state.allocValue();
|
||||
call1->mkApp(args[0], name);
|
||||
auto call2 = state.allocValue();
|
||||
|
|
@ -3363,14 +3379,14 @@ static void prim_elemAt(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
{
|
||||
NixInt::Inner n = state.forceInt(*args[1], pos, "while evaluating the second argument passed to 'builtins.elemAt'").value;
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.elemAt'");
|
||||
if (n < 0 || (unsigned int) n >= args[0]->listSize())
|
||||
if (n < 0 || std::make_unsigned_t<NixInt::Inner>(n) >= args[0]->listSize())
|
||||
state.error<EvalError>(
|
||||
"'builtins.elemAt' called with index %d on a list of size %d",
|
||||
n,
|
||||
args[0]->listSize()
|
||||
).atPos(pos).debugThrow();
|
||||
state.forceValue(*args[0]->listElems()[n], pos);
|
||||
v = *args[0]->listElems()[n];
|
||||
state.forceValue(*args[0]->listView()[n], pos);
|
||||
v = *args[0]->listView()[n];
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_elemAt({
|
||||
|
|
@ -3391,8 +3407,8 @@ static void prim_head(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
state.error<EvalError>(
|
||||
"'builtins.head' called on an empty list"
|
||||
).atPos(pos).debugThrow();
|
||||
state.forceValue(*args[0]->listElems()[0], pos);
|
||||
v = *args[0]->listElems()[0];
|
||||
state.forceValue(*args[0]->listView()[0], pos);
|
||||
v = *args[0]->listView()[0];
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_head({
|
||||
|
|
@ -3417,7 +3433,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
|
||||
auto list = state.buildList(args[0]->listSize() - 1);
|
||||
for (const auto & [n, v] : enumerate(list))
|
||||
v = args[0]->listElems()[n + 1];
|
||||
v = args[0]->listView()[n + 1];
|
||||
v.mkList(list);
|
||||
}
|
||||
|
||||
|
|
@ -3452,7 +3468,7 @@ static void prim_map(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
auto list = state.buildList(args[1]->listSize());
|
||||
for (const auto & [n, v] : enumerate(list))
|
||||
(v = state.allocValue())->mkApp(
|
||||
args[0], args[1]->listElems()[n]);
|
||||
args[0], args[1]->listView()[n]);
|
||||
v.mkList(list);
|
||||
}
|
||||
|
||||
|
|
@ -3486,15 +3502,16 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
|
||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filter");
|
||||
|
||||
SmallValueVector<nonRecursiveStackReservation> vs(args[1]->listSize());
|
||||
auto len = args[1]->listSize();
|
||||
SmallValueVector<nonRecursiveStackReservation> vs(len);
|
||||
size_t k = 0;
|
||||
|
||||
bool same = true;
|
||||
for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
|
||||
for (size_t n = 0; n < len; ++n) {
|
||||
Value res;
|
||||
state.callFunction(*args[0], *args[1]->listElems()[n], res, noPos);
|
||||
state.callFunction(*args[0], *args[1]->listView()[n], res, noPos);
|
||||
if (state.forceBool(res, pos, "while evaluating the return value of the filtering function passed to builtins.filter"))
|
||||
vs[k++] = args[1]->listElems()[n];
|
||||
vs[k++] = args[1]->listView()[n];
|
||||
else
|
||||
same = false;
|
||||
}
|
||||
|
|
@ -3523,7 +3540,7 @@ static void prim_elem(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
{
|
||||
bool res = false;
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.elem");
|
||||
for (auto elem : args[1]->listItems())
|
||||
for (auto elem : args[1]->listView())
|
||||
if (state.eqValues(*args[0], *elem, pos, "while searching for the presence of the given element in the list")) {
|
||||
res = true;
|
||||
break;
|
||||
|
|
@ -3545,7 +3562,8 @@ static RegisterPrimOp primop_elem({
|
|||
static void prim_concatLists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.concatLists");
|
||||
state.concatLists(v, args[0]->listSize(), args[0]->listElems(), pos, "while evaluating a value of the list passed to builtins.concatLists");
|
||||
auto listView = args[0]->listView();
|
||||
state.concatLists(v, args[0]->listSize(), listView.data(), pos, "while evaluating a value of the list passed to builtins.concatLists");
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_concatLists({
|
||||
|
|
@ -3583,7 +3601,8 @@ static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args
|
|||
if (args[2]->listSize()) {
|
||||
Value * vCur = args[1];
|
||||
|
||||
for (auto [n, elem] : enumerate(args[2]->listItems())) {
|
||||
auto listView = args[2]->listView();
|
||||
for (auto [n, elem] : enumerate(listView)) {
|
||||
Value * vs []{vCur, elem};
|
||||
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
|
||||
state.callFunction(*args[0], vs, *vCur, pos);
|
||||
|
|
@ -3625,7 +3644,7 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar
|
|||
: "while evaluating the return value of the function passed to builtins.all";
|
||||
|
||||
Value vTmp;
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
for (auto elem : args[1]->listView()) {
|
||||
state.callFunction(*args[0], *elem, vTmp, pos);
|
||||
bool res = state.forceBool(vTmp, pos, errorCtx);
|
||||
if (res == any) {
|
||||
|
|
@ -3672,12 +3691,12 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
{
|
||||
auto len_ = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList").value;
|
||||
|
||||
if (len_ < 0)
|
||||
if (len_ < 0 || std::make_unsigned_t<NixInt::Inner>(len_) > std::numeric_limits<size_t>::max())
|
||||
state.error<EvalError>("cannot create list of size %1%", len_).atPos(pos).debugThrow();
|
||||
|
||||
size_t len = size_t(len_);
|
||||
|
||||
// More strict than striclty (!) necessary, but acceptable
|
||||
// More strict than strictly (!) necessary, but acceptable
|
||||
// as evaluating map without accessing any values makes little sense.
|
||||
state.forceFunction(*args[0], noPos, "while evaluating the first argument passed to builtins.genList");
|
||||
|
||||
|
|
@ -3723,7 +3742,7 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
|
||||
auto list = state.buildList(len);
|
||||
for (const auto & [n, v] : enumerate(list))
|
||||
state.forceValue(*(v = args[1]->listElems()[n]), pos);
|
||||
state.forceValue(*(v = args[1]->listView()[n]), pos);
|
||||
|
||||
auto comparator = [&](Value * a, Value * b) {
|
||||
/* Optimization: if the comparator is lessThan, bypass
|
||||
|
|
@ -3740,10 +3759,14 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
return state.forceBool(vBool, pos, "while evaluating the return value of the sorting function passed to builtins.sort");
|
||||
};
|
||||
|
||||
/* FIXME: std::sort can segfault if the comparator is not a strict
|
||||
weak ordering. What to do? std::stable_sort() seems more
|
||||
resilient, but no guarantees... */
|
||||
std::stable_sort(list.begin(), list.end(), comparator);
|
||||
/* NOTE: Using custom implementation because std::sort and std::stable_sort
|
||||
are not resilient to comparators that violate strict weak ordering. Diagnosing
|
||||
incorrect implementations is a O(n^3) problem, so doing the checks is much more
|
||||
expensive that doing the sorting. For this reason we choose to use sorting algorithms
|
||||
that are can't be broken by invalid comprators. peeksort (mergesort)
|
||||
doesn't misbehave when any of the strict weak order properties is
|
||||
violated - output is always a reordering of the input. */
|
||||
peeksort(list.begin(), list.end(), comparator);
|
||||
|
||||
v.mkList(list);
|
||||
}
|
||||
|
|
@ -3765,6 +3788,32 @@ static RegisterPrimOp primop_sort({
|
|||
|
||||
This is a stable sort: it preserves the relative order of elements
|
||||
deemed equal by the comparator.
|
||||
|
||||
*comparator* must impose a strict weak ordering on the set of values
|
||||
in the *list*. This means that for any elements *a*, *b* and *c* from the
|
||||
*list*, *comparator* must satisfy the following relations:
|
||||
|
||||
1. Transitivity
|
||||
|
||||
```nix
|
||||
comparator a b && comparator b c -> comparator a c
|
||||
```
|
||||
|
||||
1. Irreflexivity
|
||||
|
||||
```nix
|
||||
comparator a a == false
|
||||
```
|
||||
|
||||
1. Transitivity of equivalence
|
||||
|
||||
```nix
|
||||
let equiv = a: b: (!comparator a b && !comparator b a); in
|
||||
equiv a b && equiv b c -> equiv a c
|
||||
```
|
||||
|
||||
If the *comparator* violates any of these properties, then `builtins.sort`
|
||||
reorders elements in an unspecified manner.
|
||||
)",
|
||||
.fun = prim_sort,
|
||||
});
|
||||
|
|
@ -3778,8 +3827,8 @@ static void prim_partition(EvalState & state, const PosIdx pos, Value * * args,
|
|||
|
||||
ValueVector right, wrong;
|
||||
|
||||
for (unsigned int n = 0; n < len; ++n) {
|
||||
auto vElem = args[1]->listElems()[n];
|
||||
for (size_t n = 0; n < len; ++n) {
|
||||
auto vElem = args[1]->listView()[n];
|
||||
state.forceValue(*vElem, pos);
|
||||
Value res;
|
||||
state.callFunction(*args[0], *vElem, res, pos);
|
||||
|
|
@ -3836,7 +3885,7 @@ static void prim_groupBy(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
|
||||
ValueVectorMap attrs;
|
||||
|
||||
for (auto vElem : args[1]->listItems()) {
|
||||
for (auto vElem : args[1]->listView()) {
|
||||
Value res;
|
||||
state.callFunction(*args[0], *vElem, res, pos);
|
||||
auto name = state.forceStringNoCtx(res, pos, "while evaluating the return value of the grouping function passed to builtins.groupBy");
|
||||
|
|
@ -3891,8 +3940,8 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
|||
SmallTemporaryValueVector<conservativeStackReservation> lists(nrLists);
|
||||
size_t len = 0;
|
||||
|
||||
for (unsigned int n = 0; n < nrLists; ++n) {
|
||||
Value * vElem = args[1]->listElems()[n];
|
||||
for (size_t n = 0; n < nrLists; ++n) {
|
||||
Value * vElem = args[1]->listView()[n];
|
||||
state.callFunction(*args[0], *vElem, lists[n], pos);
|
||||
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap");
|
||||
len += lists[n].listSize();
|
||||
|
|
@ -3900,10 +3949,11 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
|||
|
||||
auto list = state.buildList(len);
|
||||
auto out = list.elems;
|
||||
for (unsigned int n = 0, pos = 0; n < nrLists; ++n) {
|
||||
auto l = lists[n].listSize();
|
||||
for (size_t n = 0, pos = 0; n < nrLists; ++n) {
|
||||
auto listView = lists[n].listView();
|
||||
auto l = listView.size();
|
||||
if (l)
|
||||
memcpy(out + pos, lists[n].listElems(), l * sizeof(Value *));
|
||||
memcpy(out + pos, listView.data(), l * sizeof(Value *));
|
||||
pos += l;
|
||||
}
|
||||
v.mkList(list);
|
||||
|
|
@ -4165,22 +4215,20 @@ static RegisterPrimOp primop_toString({
|
|||
non-negative. */
|
||||
static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
using NixUInt = std::make_unsigned_t<NixInt::Inner>;
|
||||
NixInt::Inner start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring").value;
|
||||
|
||||
if (start < 0)
|
||||
state.error<EvalError>("negative start position in 'substring'").atPos(pos).debugThrow();
|
||||
|
||||
|
||||
NixInt::Inner len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring").value;
|
||||
|
||||
// Negative length may be idiomatically passed to builtins.substring to get
|
||||
// the tail of the string.
|
||||
if (len < 0) {
|
||||
len = std::numeric_limits<NixInt::Inner>::max();
|
||||
}
|
||||
auto _len = std::numeric_limits<std::string::size_type>::max();
|
||||
|
||||
// Special-case on empty substring to avoid O(n) strlen
|
||||
// This allows for the use of empty substrings to efficently capture string context
|
||||
// This allows for the use of empty substrings to efficiently capture string context
|
||||
if (len == 0) {
|
||||
state.forceValue(*args[2], pos);
|
||||
if (args[2]->type() == nString) {
|
||||
|
|
@ -4189,10 +4237,14 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
|||
}
|
||||
}
|
||||
|
||||
if (len >= 0 && NixUInt(len) < _len) {
|
||||
_len = len;
|
||||
}
|
||||
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring");
|
||||
|
||||
v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context);
|
||||
v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_substring({
|
||||
|
|
@ -4263,7 +4315,7 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
|
|||
state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash");
|
||||
auto inputAttrs = args[0]->attrs();
|
||||
|
||||
auto iteratorHash = getAttr(state, state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'");
|
||||
auto iteratorHash = state.getAttr(state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'");
|
||||
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
||||
|
||||
auto iteratorHashAlgo = inputAttrs->get(state.symbols.create("hashAlgo"));
|
||||
|
|
@ -4271,7 +4323,7 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
|
|||
if (iteratorHashAlgo)
|
||||
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||
|
||||
auto iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs(), "while locating the attribute 'toHashFormat'");
|
||||
auto iteratorToHashFormat = state.getAttr(state.symbols.create("toHashFormat"), args[0]->attrs(), "while locating the attribute 'toHashFormat'");
|
||||
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
||||
|
||||
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
|
||||
|
|
@ -4496,7 +4548,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
// Add a list for matched substrings.
|
||||
const size_t slen = match.size() - 1;
|
||||
|
||||
// Start at 1, beacause the first match is the whole string.
|
||||
// Start at 1, because the first match is the whole string.
|
||||
auto list2 = state.buildList(slen);
|
||||
for (const auto & [si, v2] : enumerate(list2)) {
|
||||
if (!match[si + 1].matched)
|
||||
|
|
@ -4577,7 +4629,7 @@ static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value * *
|
|||
res.reserve((args[1]->listSize() + 32) * sep.size());
|
||||
bool first = true;
|
||||
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
for (auto elem : args[1]->listView()) {
|
||||
if (first) first = false; else res += sep;
|
||||
res += *state.coerceToString(pos, *elem, context, "while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep");
|
||||
}
|
||||
|
|
@ -4605,13 +4657,13 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
|||
"'from' and 'to' arguments passed to builtins.replaceStrings have different lengths"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
std::vector<std::string> from;
|
||||
std::vector<std::string_view> from;
|
||||
from.reserve(args[0]->listSize());
|
||||
for (auto elem : args[0]->listItems())
|
||||
for (auto elem : args[0]->listView())
|
||||
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings"));
|
||||
|
||||
std::unordered_map<size_t, std::string> cache;
|
||||
auto to = args[1]->listItems();
|
||||
std::unordered_map<size_t, std::string_view> cache;
|
||||
auto to = args[1]->listView();
|
||||
|
||||
NixStringContext context;
|
||||
auto s = state.forceString(*args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings");
|
||||
|
|
@ -4864,7 +4916,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings)
|
|||
1683705525
|
||||
```
|
||||
|
||||
The [store path](@docroot@/store/store-path.md) of a derivation depending on `currentTime` differs for each evaluation unless both evaluate `builtins.currentTime` in the same second.
|
||||
The [store path](@docroot@/store/store-path.md) of a derivation depending on `currentTime` differs for each evaluation, unless both evaluate `builtins.currentTime` in the same second.
|
||||
)",
|
||||
.impureOnly = true,
|
||||
});
|
||||
|
|
@ -5040,7 +5092,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings)
|
|||
|
||||
/* Now that we've added all primops, sort the `builtins' set,
|
||||
because attribute lookups expect it to be sorted. */
|
||||
getBuiltins().payload.attrs->sort();
|
||||
const_cast<Bindings *>(getBuiltins().attrs())->sort();
|
||||
|
||||
staticBaseEnv->sort();
|
||||
|
||||
|
|
|
|||
|
|
@ -332,7 +332,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
|||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
}
|
||||
for (auto elem : attr->value->listItems()) {
|
||||
for (auto elem : attr->value->listView()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
|
||||
context.emplace(NixStringContextElem::Built {
|
||||
.drvPath = makeConstantStorePathRef(namePath),
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
|||
|
||||
if (attrName == "fromPath") {
|
||||
NixStringContext context;
|
||||
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
|
||||
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint()); // FIXME: overflow
|
||||
}
|
||||
|
||||
else if (attrName == "toPath") {
|
||||
|
|
|
|||
|
|
@ -303,7 +303,7 @@ static RegisterPrimOp primop_fetchTree({
|
|||
- `"tarball"`
|
||||
|
||||
Download a tar archive and extract it into the Nix store.
|
||||
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
|
||||
This has the same underlying implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
|
||||
|
||||
- `url` (String, required)
|
||||
|
||||
|
|
@ -533,11 +533,12 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
auto storePath =
|
||||
unpack
|
||||
? fetchToStore(
|
||||
state.fetchSettings,
|
||||
*state.store,
|
||||
fetchers::downloadTarball(state.store, state.fetchSettings, *url),
|
||||
FetchMode::Copy,
|
||||
name)
|
||||
: fetchers::downloadFile(state.store, *url, name).storePath;
|
||||
: fetchers::downloadFile(state.store, state.fetchSettings, *url, name).storePath;
|
||||
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
generated_headers += gen_header.process(
|
||||
'derivation.nix',
|
||||
preserve_path_from: meson.project_source_root(),
|
||||
preserve_path_from : meson.project_source_root(),
|
||||
)
|
||||
|
||||
sources += files(
|
||||
|
|
|
|||
|
|
@ -54,11 +54,13 @@ void printAmbiguous(
|
|||
break;
|
||||
}
|
||||
case nList:
|
||||
if (seen && v.listSize() && !seen->insert(v.listElems()).second)
|
||||
/* Use pointer to the Value instead of pointer to the elements, because
|
||||
that would need to explicitly handle the case of SmallList. */
|
||||
if (seen && v.listSize() && !seen->insert(&v).second)
|
||||
str << "«repeated»";
|
||||
else {
|
||||
str << "[ ";
|
||||
for (auto v2 : v.listItems()) {
|
||||
for (auto v2 : v.listView()) {
|
||||
if (v2)
|
||||
printAmbiguous(state, *v2, str, seen, depth - 1);
|
||||
else
|
||||
|
|
|
|||
|
|
@ -419,8 +419,8 @@ private:
|
|||
if (depth < options.maxDepth) {
|
||||
increaseIndent();
|
||||
output << "[";
|
||||
auto listItems = v.listItems();
|
||||
auto prettyPrint = shouldPrettyPrintList(listItems);
|
||||
auto listItems = v.listView();
|
||||
auto prettyPrint = shouldPrettyPrintList(listItems.span());
|
||||
|
||||
size_t currentListItemsPrinted = 0;
|
||||
|
||||
|
|
@ -457,13 +457,13 @@ private:
|
|||
|
||||
if (v.isLambda()) {
|
||||
output << "lambda";
|
||||
if (v.payload.lambda.fun) {
|
||||
if (v.payload.lambda.fun->name) {
|
||||
output << " " << state.symbols[v.payload.lambda.fun->name];
|
||||
if (v.lambda().fun) {
|
||||
if (v.lambda().fun->name) {
|
||||
output << " " << state.symbols[v.lambda().fun->name];
|
||||
}
|
||||
|
||||
std::ostringstream s;
|
||||
s << state.positions[v.payload.lambda.fun->pos];
|
||||
s << state.positions[v.lambda().fun->pos];
|
||||
output << " @ " << filterANSIEscapes(toView(s));
|
||||
}
|
||||
} else if (v.isPrimOp()) {
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
case nList: {
|
||||
out = json::array();
|
||||
int i = 0;
|
||||
for (auto elem : v.listItems()) {
|
||||
for (auto elem : v.listView()) {
|
||||
try {
|
||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
case nList: {
|
||||
XMLOpenElement _(doc, "list");
|
||||
for (auto v2 : v.listItems())
|
||||
for (auto v2 : v.listView())
|
||||
printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
}
|
||||
|
|
@ -126,18 +126,18 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
break;
|
||||
}
|
||||
XMLAttrs xmlAttrs;
|
||||
if (location) posToXML(state, xmlAttrs, state.positions[v.payload.lambda.fun->pos]);
|
||||
if (location) posToXML(state, xmlAttrs, state.positions[v.lambda().fun->pos]);
|
||||
XMLOpenElement _(doc, "function", xmlAttrs);
|
||||
|
||||
if (v.payload.lambda.fun->hasFormals()) {
|
||||
if (v.lambda().fun->hasFormals()) {
|
||||
XMLAttrs attrs;
|
||||
if (v.payload.lambda.fun->arg) attrs["name"] = state.symbols[v.payload.lambda.fun->arg];
|
||||
if (v.payload.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||
if (v.lambda().fun->arg) attrs["name"] = state.symbols[v.lambda().fun->arg];
|
||||
if (v.lambda().fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||
XMLOpenElement _(doc, "attrspat", attrs);
|
||||
for (auto & i : v.payload.lambda.fun->formals->lexicographicOrder(state.symbols))
|
||||
for (auto & i : v.lambda().fun->formals->lexicographicOrder(state.symbols))
|
||||
doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name]));
|
||||
} else
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.payload.lambda.fun->arg]));
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.lambda().fun->arg]));
|
||||
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -89,9 +89,9 @@ bool getBoolAttr(const Attrs & attrs, const std::string & name)
|
|||
return *s;
|
||||
}
|
||||
|
||||
std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
|
||||
StringMap attrsToQuery(const Attrs & attrs)
|
||||
{
|
||||
std::map<std::string, std::string> query;
|
||||
StringMap query;
|
||||
for (auto & attr : attrs) {
|
||||
if (auto v = std::get_if<uint64_t>(&attr.second)) {
|
||||
query.insert_or_assign(attr.first, fmt("%d", *v));
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#include "nix/fetchers/cache.hh"
|
||||
#include "nix/fetchers/fetch-settings.hh"
|
||||
#include "nix/util/users.hh"
|
||||
#include "nix/store/sqlite.hh"
|
||||
#include "nix/util/sync.hh"
|
||||
|
|
@ -163,10 +164,12 @@ struct CacheImpl : Cache
|
|||
}
|
||||
};
|
||||
|
||||
ref<Cache> getCache()
|
||||
ref<Cache> Settings::getCache() const
|
||||
{
|
||||
static auto cache = std::make_shared<CacheImpl>();
|
||||
return ref<Cache>(cache);
|
||||
auto cache(_cache.lock());
|
||||
if (!*cache)
|
||||
*cache = std::make_shared<CacheImpl>();
|
||||
return ref<Cache>(*cache);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#include "nix/fetchers/fetch-to-store.hh"
|
||||
#include "nix/fetchers/fetchers.hh"
|
||||
#include "nix/fetchers/fetch-settings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -16,6 +17,7 @@ fetchers::Cache::Key makeSourcePathToHashCacheKey(
|
|||
}
|
||||
|
||||
StorePath fetchToStore(
|
||||
const fetchers::Settings & settings,
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
FetchMode mode,
|
||||
|
|
@ -24,10 +26,11 @@ StorePath fetchToStore(
|
|||
PathFilter * filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
return fetchToStore2(store, path, mode, name, method, filter, repair).first;
|
||||
return fetchToStore2(settings, store, path, mode, name, method, filter, repair).first;
|
||||
}
|
||||
|
||||
std::pair<StorePath, Hash> fetchToStore2(
|
||||
const fetchers::Settings & settings,
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
FetchMode mode,
|
||||
|
|
@ -45,7 +48,7 @@ std::pair<StorePath, Hash> fetchToStore2(
|
|||
|
||||
if (fingerprint) {
|
||||
cacheKey = makeSourcePathToHashCacheKey(*fingerprint, method, subpath.abs());
|
||||
if (auto res = fetchers::getCache()->lookup(*cacheKey)) {
|
||||
if (auto res = settings.getCache()->lookup(*cacheKey)) {
|
||||
auto hash = Hash::parseSRI(fetchers::getStrAttr(*res, "hash"));
|
||||
auto storePath = store.makeFixedOutputPathFromCA(name,
|
||||
ContentAddressWithReferences::fromParts(method, hash, {}));
|
||||
|
|
@ -96,7 +99,7 @@ std::pair<StorePath, Hash> fetchToStore2(
|
|||
});
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->upsert(*cacheKey, {{"hash", hash.to_string(HashFormat::SRI, true)}});
|
||||
settings.getCache()->upsert(*cacheKey, {{"hash", hash.to_string(HashFormat::SRI, true)}});
|
||||
|
||||
return {storePath, hash};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ ParsedURL Input::toURL() const
|
|||
return scheme->toURL(*this);
|
||||
}
|
||||
|
||||
std::string Input::toURLString(const std::map<std::string, std::string> & extraQuery) const
|
||||
std::string Input::toURLString(const StringMap & extraQuery) const
|
||||
{
|
||||
auto url = toURL();
|
||||
for (auto & attr : extraQuery)
|
||||
|
|
@ -198,7 +198,7 @@ std::tuple<StorePath, ref<SourceAccessor>, Input> Input::fetchToStore(ref<Store>
|
|||
try {
|
||||
auto [accessor, result] = getAccessorUnchecked(store);
|
||||
|
||||
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, result.getName());
|
||||
auto storePath = nix::fetchToStore(*settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName());
|
||||
|
||||
auto narHash = store->queryPathInfo(storePath)->narHash;
|
||||
result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#include "nix/fetchers/git-utils.hh"
|
||||
#include "nix/fetchers/git-lfs-fetch.hh"
|
||||
#include "nix/fetchers/cache.hh"
|
||||
#include "nix/fetchers/fetch-settings.hh"
|
||||
#include "nix/util/finally.hh"
|
||||
#include "nix/util/processes.hh"
|
||||
#include "nix/util/signals.hh"
|
||||
|
|
@ -321,8 +322,17 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
|
||||
for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) {
|
||||
git_commit * parent;
|
||||
if (git_commit_parent(&parent, commit->get(), n))
|
||||
throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message);
|
||||
if (git_commit_parent(&parent, commit->get(), n)) {
|
||||
throw Error(
|
||||
"Failed to retrieve the parent of Git commit '%s': %s. "
|
||||
"This may be due to an incomplete repository history. "
|
||||
"To resolve this, either enable the shallow parameter in your flake URL (?shallow=1) "
|
||||
"or add set the shallow parameter to true in builtins.fetchGit, "
|
||||
"or fetch the complete history for this branch.",
|
||||
*git_commit_id(commit->get()),
|
||||
git_error_last()->message
|
||||
);
|
||||
}
|
||||
todo.push(Commit(parent));
|
||||
}
|
||||
}
|
||||
|
|
@ -367,7 +377,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
if (git_config_iterator_glob_new(Setter(it), config.get(), "^submodule\\..*\\.(path|url|branch)$"))
|
||||
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
||||
|
||||
std::map<std::string, std::string> entries;
|
||||
StringMap entries;
|
||||
|
||||
while (true) {
|
||||
git_config_entry * entry = nullptr;
|
||||
|
|
@ -586,7 +596,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
});
|
||||
|
||||
/* Evaluate result through status code and checking if public
|
||||
key fingerprints appear on stderr. This is neccessary
|
||||
key fingerprints appear on stderr. This is necessary
|
||||
because the git command might also succeed due to the
|
||||
commit being signed by gpg keys that are present in the
|
||||
users key agent. */
|
||||
|
|
@ -610,18 +620,18 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
|
||||
}
|
||||
|
||||
Hash treeHashToNarHash(const Hash & treeHash) override
|
||||
Hash treeHashToNarHash(const fetchers::Settings & settings, const Hash & treeHash) override
|
||||
{
|
||||
auto accessor = getAccessor(treeHash, false, "");
|
||||
|
||||
fetchers::Cache::Key cacheKey{"treeHashToNarHash", {{"treeHash", treeHash.gitRev()}}};
|
||||
|
||||
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
||||
if (auto res = settings.getCache()->lookup(cacheKey))
|
||||
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
|
||||
|
||||
auto narHash = accessor->hashPath(CanonPath::root);
|
||||
|
||||
fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
|
||||
settings.getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
|
||||
|
||||
return narHash;
|
||||
}
|
||||
|
|
@ -655,28 +665,40 @@ ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create,
|
|||
|
||||
struct GitSourceAccessor : SourceAccessor
|
||||
{
|
||||
ref<GitRepoImpl> repo;
|
||||
Object root;
|
||||
std::optional<lfs::Fetch> lfsFetch = std::nullopt;
|
||||
struct State
|
||||
{
|
||||
ref<GitRepoImpl> repo;
|
||||
Object root;
|
||||
std::optional<lfs::Fetch> lfsFetch = std::nullopt;
|
||||
};
|
||||
|
||||
Sync<State> state_;
|
||||
|
||||
GitSourceAccessor(ref<GitRepoImpl> repo_, const Hash & rev, bool smudgeLfs)
|
||||
: repo(repo_)
|
||||
, root(peelToTreeOrBlob(lookupObject(*repo, hashToOID(rev)).get()))
|
||||
: state_{
|
||||
State {
|
||||
.repo = repo_,
|
||||
.root = peelToTreeOrBlob(lookupObject(*repo_, hashToOID(rev)).get()),
|
||||
.lfsFetch = smudgeLfs ? std::make_optional(lfs::Fetch(*repo_, hashToOID(rev))) : std::nullopt,
|
||||
}
|
||||
}
|
||||
{
|
||||
if (smudgeLfs)
|
||||
lfsFetch = std::make_optional(lfs::Fetch(*repo, hashToOID(rev)));
|
||||
}
|
||||
|
||||
std::string readBlob(const CanonPath & path, bool symlink)
|
||||
{
|
||||
const auto blob = getBlob(path, symlink);
|
||||
auto state(state_.lock());
|
||||
|
||||
if (lfsFetch) {
|
||||
if (lfsFetch->shouldFetch(path)) {
|
||||
const auto blob = getBlob(*state, path, symlink);
|
||||
|
||||
if (state->lfsFetch) {
|
||||
if (state->lfsFetch->shouldFetch(path)) {
|
||||
StringSink s;
|
||||
try {
|
||||
// FIXME: do we need to hold the state lock while
|
||||
// doing this?
|
||||
auto contents = std::string((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
|
||||
lfsFetch->fetch(contents, path, s, [&s](uint64_t size){ s.s.reserve(size); });
|
||||
state->lfsFetch->fetch(contents, path, s, [&s](uint64_t size){ s.s.reserve(size); });
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while smudging git-lfs file '%s'", path);
|
||||
throw;
|
||||
|
|
@ -695,15 +717,18 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
return path.isRoot() ? true : (bool) lookup(path);
|
||||
auto state(state_.lock());
|
||||
return path.isRoot() ? true : (bool) lookup(*state, path);
|
||||
}
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
if (path.isRoot())
|
||||
return Stat { .type = git_object_type(root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular };
|
||||
auto state(state_.lock());
|
||||
|
||||
auto entry = lookup(path);
|
||||
if (path.isRoot())
|
||||
return Stat { .type = git_object_type(state->root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular };
|
||||
|
||||
auto entry = lookup(*state, path);
|
||||
if (!entry)
|
||||
return std::nullopt;
|
||||
|
||||
|
|
@ -731,6 +756,8 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
auto state(state_.lock());
|
||||
|
||||
return std::visit(overloaded {
|
||||
[&](Tree tree) {
|
||||
DirEntries res;
|
||||
|
|
@ -748,7 +775,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
[&](Submodule) {
|
||||
return DirEntries();
|
||||
}
|
||||
}, getTree(path));
|
||||
}, getTree(*state, path));
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
|
|
@ -762,7 +789,9 @@ struct GitSourceAccessor : SourceAccessor
|
|||
*/
|
||||
std::optional<Hash> getSubmoduleRev(const CanonPath & path)
|
||||
{
|
||||
auto entry = lookup(path);
|
||||
auto state(state_.lock());
|
||||
|
||||
auto entry = lookup(*state, path);
|
||||
|
||||
if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_COMMIT)
|
||||
return std::nullopt;
|
||||
|
|
@ -773,7 +802,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
std::unordered_map<CanonPath, TreeEntry> lookupCache;
|
||||
|
||||
/* Recursively look up 'path' relative to the root. */
|
||||
git_tree_entry * lookup(const CanonPath & path)
|
||||
git_tree_entry * lookup(State & state, const CanonPath & path)
|
||||
{
|
||||
auto i = lookupCache.find(path);
|
||||
if (i != lookupCache.end()) return i->second.get();
|
||||
|
|
@ -783,7 +812,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
auto name = path.baseName().value();
|
||||
|
||||
auto parentTree = lookupTree(*parent);
|
||||
auto parentTree = lookupTree(state, *parent);
|
||||
if (!parentTree) return nullptr;
|
||||
|
||||
auto count = git_tree_entrycount(parentTree->get());
|
||||
|
|
@ -812,29 +841,29 @@ struct GitSourceAccessor : SourceAccessor
|
|||
return res;
|
||||
}
|
||||
|
||||
std::optional<Tree> lookupTree(const CanonPath & path)
|
||||
std::optional<Tree> lookupTree(State & state, const CanonPath & path)
|
||||
{
|
||||
if (path.isRoot()) {
|
||||
if (git_object_type(root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*root);
|
||||
if (git_object_type(state.root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*state.root);
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
auto entry = lookup(path);
|
||||
auto entry = lookup(state, path);
|
||||
if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_TREE)
|
||||
return std::nullopt;
|
||||
|
||||
Tree tree;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *state.repo, entry))
|
||||
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return tree;
|
||||
}
|
||||
|
||||
git_tree_entry * need(const CanonPath & path)
|
||||
git_tree_entry * need(State & state, const CanonPath & path)
|
||||
{
|
||||
auto entry = lookup(path);
|
||||
auto entry = lookup(state, path);
|
||||
if (!entry)
|
||||
throw Error("'%s' does not exist", showPath(path));
|
||||
return entry;
|
||||
|
|
@ -842,16 +871,16 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
struct Submodule { };
|
||||
|
||||
std::variant<Tree, Submodule> getTree(const CanonPath & path)
|
||||
std::variant<Tree, Submodule> getTree(State & state, const CanonPath & path)
|
||||
{
|
||||
if (path.isRoot()) {
|
||||
if (git_object_type(root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*root);
|
||||
if (git_object_type(state.root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*state.root);
|
||||
else
|
||||
throw Error("Git root object '%s' is not a directory", *git_object_id(root.get()));
|
||||
throw Error("Git root object '%s' is not a directory", *git_object_id(state.root.get()));
|
||||
}
|
||||
|
||||
auto entry = need(path);
|
||||
auto entry = need(state, path);
|
||||
|
||||
if (git_tree_entry_type(entry) == GIT_OBJECT_COMMIT)
|
||||
return Submodule();
|
||||
|
|
@ -860,16 +889,16 @@ struct GitSourceAccessor : SourceAccessor
|
|||
throw Error("'%s' is not a directory", showPath(path));
|
||||
|
||||
Tree tree;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *state.repo, entry))
|
||||
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return tree;
|
||||
}
|
||||
|
||||
Blob getBlob(const CanonPath & path, bool expectSymlink)
|
||||
Blob getBlob(State & state, const CanonPath & path, bool expectSymlink)
|
||||
{
|
||||
if (!expectSymlink && git_object_type(root.get()) == GIT_OBJECT_BLOB)
|
||||
return dupObject<Blob>((git_blob *) &*root);
|
||||
if (!expectSymlink && git_object_type(state.root.get()) == GIT_OBJECT_BLOB)
|
||||
return dupObject<Blob>((git_blob *) &*state.root);
|
||||
|
||||
auto notExpected = [&]()
|
||||
{
|
||||
|
|
@ -882,7 +911,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
if (path.isRoot()) notExpected();
|
||||
|
||||
auto entry = need(path);
|
||||
auto entry = need(state, path);
|
||||
|
||||
if (git_tree_entry_type(entry) != GIT_OBJECT_BLOB)
|
||||
notExpected();
|
||||
|
|
@ -897,7 +926,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
}
|
||||
|
||||
Blob blob;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *state.repo, entry))
|
||||
throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return blob;
|
||||
|
|
|
|||
|
|
@ -481,11 +481,11 @@ struct GitInputScheme : InputScheme
|
|||
return repoInfo;
|
||||
}
|
||||
|
||||
uint64_t getLastModified(const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const
|
||||
uint64_t getLastModified(const Settings & settings, const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const
|
||||
{
|
||||
Cache::Key key{"gitLastModified", {{"rev", rev.gitRev()}}};
|
||||
|
||||
auto cache = getCache();
|
||||
auto cache = settings.getCache();
|
||||
|
||||
if (auto res = cache->lookup(key))
|
||||
return getIntAttr(*res, "lastModified");
|
||||
|
|
@ -497,11 +497,11 @@ struct GitInputScheme : InputScheme
|
|||
return lastModified;
|
||||
}
|
||||
|
||||
uint64_t getRevCount(const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const
|
||||
uint64_t getRevCount(const Settings & settings, const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const
|
||||
{
|
||||
Cache::Key key{"gitRevCount", {{"rev", rev.gitRev()}}};
|
||||
|
||||
auto cache = getCache();
|
||||
auto cache = settings.getCache();
|
||||
|
||||
if (auto revCountAttrs = cache->lookup(key))
|
||||
return getIntAttr(*revCountAttrs, "revCount");
|
||||
|
|
@ -679,12 +679,12 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
Attrs infoAttrs({
|
||||
{"rev", rev.gitRev()},
|
||||
{"lastModified", getLastModified(repoInfo, repoDir, rev)},
|
||||
{"lastModified", getLastModified(*input.settings, repoInfo, repoDir, rev)},
|
||||
});
|
||||
|
||||
if (!getShallowAttr(input))
|
||||
infoAttrs.insert_or_assign("revCount",
|
||||
getRevCount(repoInfo, repoDir, rev));
|
||||
getRevCount(*input.settings, repoInfo, repoDir, rev));
|
||||
|
||||
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg());
|
||||
|
||||
|
|
@ -799,8 +799,10 @@ struct GitInputScheme : InputScheme
|
|||
auto rev = repoInfo.workdirInfo.headRev.value_or(nullRev);
|
||||
|
||||
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||
input.attrs.insert_or_assign("revCount",
|
||||
rev == nullRev ? 0 : getRevCount(repoInfo, repoPath, rev));
|
||||
if (!getShallowAttr(input)) {
|
||||
input.attrs.insert_or_assign("revCount",
|
||||
rev == nullRev ? 0 : getRevCount(*input.settings, repoInfo, repoPath, rev));
|
||||
}
|
||||
|
||||
verifyCommit(input, repo);
|
||||
} else {
|
||||
|
|
@ -819,7 +821,7 @@ struct GitInputScheme : InputScheme
|
|||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
repoInfo.workdirInfo.headRev
|
||||
? getLastModified(repoInfo, repoPath, *repoInfo.workdirInfo.headRev)
|
||||
? getLastModified(*input.settings, repoInfo, repoPath, *repoInfo.workdirInfo.headRev)
|
||||
: 0);
|
||||
|
||||
return {accessor, std::move(input)};
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
// Search for the longest possible match starting from the begining and ending at either the end or a path segment.
|
||||
// Search for the longest possible match starting from the beginning and ending at either the end or a path segment.
|
||||
std::optional<std::string> getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const override
|
||||
{
|
||||
auto tokens = settings.accessTokens.get();
|
||||
|
|
@ -265,7 +265,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.erase("ref");
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
||||
auto cache = getCache();
|
||||
auto cache = input.settings->getCache();
|
||||
|
||||
Cache::Key treeHashKey{"gitRevToTreeHash", {{"rev", rev->gitRev()}}};
|
||||
Cache::Key lastModifiedKey{"gitRevToLastModified", {{"rev", rev->gitRev()}}};
|
||||
|
|
@ -409,7 +409,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
|||
auto json = nlohmann::json::parse(
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", headers).storePath)));
|
||||
downloadFile(store, *input.settings, url, "source", headers).storePath)));
|
||||
|
||||
return RefInfo {
|
||||
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
|
||||
|
|
@ -483,7 +483,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
auto json = nlohmann::json::parse(
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", headers).storePath)));
|
||||
downloadFile(store, *input.settings, url, "source", headers).storePath)));
|
||||
|
||||
if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
|
||||
return RefInfo {
|
||||
|
|
@ -553,7 +553,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
|||
std::string refUri;
|
||||
if (ref == "HEAD") {
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath);
|
||||
downloadFile(store, *input.settings, fmt("%s/HEAD", base_url), "source", headers).storePath);
|
||||
std::ifstream is(file);
|
||||
std::string line;
|
||||
getline(is, line);
|
||||
|
|
@ -569,7 +569,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
|||
std::regex refRegex(refUri);
|
||||
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath);
|
||||
downloadFile(store, *input.settings, fmt("%s/info/refs", base_url), "source", headers).storePath);
|
||||
std::ifstream is(file);
|
||||
|
||||
std::string line;
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & na
|
|||
|
||||
bool getBoolAttr(const Attrs & attrs, const std::string & name);
|
||||
|
||||
std::map<std::string, std::string> attrsToQuery(const Attrs & attrs);
|
||||
StringMap attrsToQuery(const Attrs & attrs);
|
||||
|
||||
Hash getRevAttr(const Attrs & attrs, const std::string & name);
|
||||
|
||||
|
|
|
|||
|
|
@ -92,6 +92,4 @@ struct Cache
|
|||
Store & store) = 0;
|
||||
};
|
||||
|
||||
ref<Cache> getCache();
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
#include "nix/util/types.hh"
|
||||
#include "nix/util/configuration.hh"
|
||||
#include "nix/util/ref.hh"
|
||||
#include "nix/util/sync.hh"
|
||||
|
||||
#include <map>
|
||||
#include <limits>
|
||||
|
|
@ -11,6 +13,8 @@
|
|||
|
||||
namespace nix::fetchers {
|
||||
|
||||
struct Cache;
|
||||
|
||||
struct Settings : public Config
|
||||
{
|
||||
Settings();
|
||||
|
|
@ -106,6 +110,11 @@ struct Settings : public Config
|
|||
|
||||
When empty, disables the global flake registry.
|
||||
)"};
|
||||
|
||||
ref<Cache> getCache() const;
|
||||
|
||||
private:
|
||||
mutable Sync<std::shared_ptr<Cache>> _cache;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ enum struct FetchMode { DryRun, Copy };
|
|||
* Copy the `path` to the Nix store.
|
||||
*/
|
||||
StorePath fetchToStore(
|
||||
const fetchers::Settings & settings,
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
FetchMode mode,
|
||||
|
|
@ -24,6 +25,7 @@ StorePath fetchToStore(
|
|||
RepairFlag repair = NoRepair);
|
||||
|
||||
std::pair<StorePath, Hash> fetchToStore2(
|
||||
const fetchers::Settings & settings,
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
FetchMode mode,
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ public:
|
|||
|
||||
ParsedURL toURL() const;
|
||||
|
||||
std::string toURLString(const std::map<std::string, std::string> & extraQuery = {}) const;
|
||||
std::string toURLString(const StringMap & extraQuery = {}) const;
|
||||
|
||||
std::string to_string() const;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "nix/util/canon-path.hh"
|
||||
#include "nix/util/serialise.hh"
|
||||
#include "nix/util/url.hh"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
namespace fetchers { struct PublicKey; }
|
||||
namespace fetchers { struct PublicKey; struct Settings; }
|
||||
|
||||
/**
|
||||
* A sink that writes into a Git repository. Note that nothing may be written
|
||||
|
|
@ -115,7 +115,7 @@ struct GitRepo
|
|||
* Given a Git tree hash, compute the hash of its NAR
|
||||
* serialisation. This is memoised on-disk.
|
||||
*/
|
||||
virtual Hash treeHashToNarHash(const Hash & treeHash) = 0;
|
||||
virtual Hash treeHashToNarHash(const fetchers::Settings & settings, const Hash & treeHash) = 0;
|
||||
|
||||
/**
|
||||
* If the specified Git object is a directory with a single entry
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ struct DownloadFileResult
|
|||
|
||||
DownloadFileResult downloadFile(
|
||||
ref<Store> store,
|
||||
const Settings & settings,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
const Headers & headers = {});
|
||||
|
|
|
|||
|
|
@ -253,13 +253,13 @@ struct MercurialInputScheme : InputScheme
|
|||
}};
|
||||
|
||||
if (!input.getRev()) {
|
||||
if (auto res = getCache()->lookupWithTTL(refToRevKey))
|
||||
if (auto res = input.settings->getCache()->lookupWithTTL(refToRevKey))
|
||||
input.attrs.insert_or_assign("rev", getRevAttr(*res, "rev").gitRev());
|
||||
}
|
||||
|
||||
/* If we have a rev, check if we have a cached store path. */
|
||||
if (auto rev = input.getRev()) {
|
||||
if (auto res = getCache()->lookupStorePath(revInfoKey(*rev), *store))
|
||||
if (auto res = input.settings->getCache()->lookupStorePath(revInfoKey(*rev), *store))
|
||||
return makeResult(res->value, res->storePath);
|
||||
}
|
||||
|
||||
|
|
@ -309,7 +309,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
/* Now that we have the rev, check the cache again for a
|
||||
cached store path. */
|
||||
if (auto res = getCache()->lookupStorePath(revInfoKey(rev), *store))
|
||||
if (auto res = input.settings->getCache()->lookupStorePath(revInfoKey(rev), *store))
|
||||
return makeResult(res->value, res->storePath);
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
|
@ -326,9 +326,9 @@ struct MercurialInputScheme : InputScheme
|
|||
});
|
||||
|
||||
if (!origRev)
|
||||
getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}});
|
||||
input.settings->getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}});
|
||||
|
||||
getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath);
|
||||
input.settings->getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath);
|
||||
|
||||
return makeResult(infoAttrs, std::move(storePath));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
#include "nix/fetchers/store-path-accessor.hh"
|
||||
#include "nix/fetchers/cache.hh"
|
||||
#include "nix/fetchers/fetch-to-store.hh"
|
||||
#include "nix/fetchers/fetch-settings.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
|
@ -149,7 +150,7 @@ struct PathInputScheme : InputScheme
|
|||
// store, pre-create an entry in the fetcher cache.
|
||||
auto info = store->queryPathInfo(*storePath);
|
||||
accessor->fingerprint = fmt("path:%s", store->queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true));
|
||||
fetchers::getCache()->upsert(
|
||||
input.settings->getCache()->upsert(
|
||||
makeSourcePathToHashCacheKey(*accessor->fingerprint, ContentAddressMethod::Raw::NixArchive, "/"),
|
||||
{{"hash", info->narHash.to_string(HashFormat::SRI, true)}});
|
||||
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(const Settings & settings, re
|
|||
}
|
||||
|
||||
if (!isAbsolute(path)) {
|
||||
auto storePath = downloadFile(store, path, "flake-registry.json").storePath;
|
||||
auto storePath = downloadFile(store, settings, path, "flake-registry.json").storePath;
|
||||
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
|
||||
store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json");
|
||||
path = store->toRealPath(storePath);
|
||||
|
|
|
|||
|
|
@ -9,11 +9,13 @@
|
|||
#include "nix/fetchers/store-path-accessor.hh"
|
||||
#include "nix/store/store-api.hh"
|
||||
#include "nix/fetchers/git-utils.hh"
|
||||
#include "nix/fetchers/fetch-settings.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
DownloadFileResult downloadFile(
|
||||
ref<Store> store,
|
||||
const Settings & settings,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
const Headers & headers)
|
||||
|
|
@ -25,7 +27,7 @@ DownloadFileResult downloadFile(
|
|||
{"name", name},
|
||||
}}};
|
||||
|
||||
auto cached = getCache()->lookupStorePath(key, *store);
|
||||
auto cached = settings.getCache()->lookupStorePath(key, *store);
|
||||
|
||||
auto useCached = [&]() -> DownloadFileResult
|
||||
{
|
||||
|
|
@ -92,7 +94,7 @@ DownloadFileResult downloadFile(
|
|||
key.second.insert_or_assign("url", url);
|
||||
assert(!res.urls.empty());
|
||||
infoAttrs.insert_or_assign("url", *res.urls.rbegin());
|
||||
getCache()->upsert(key, *store, infoAttrs, *storePath);
|
||||
settings.getCache()->upsert(key, *store, infoAttrs, *storePath);
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -104,13 +106,14 @@ DownloadFileResult downloadFile(
|
|||
}
|
||||
|
||||
static DownloadTarballResult downloadTarball_(
|
||||
const Settings & settings,
|
||||
const std::string & url,
|
||||
const Headers & headers,
|
||||
const std::string & displayPrefix)
|
||||
{
|
||||
Cache::Key cacheKey{"tarball", {{"url", url}}};
|
||||
|
||||
auto cached = getCache()->lookupExpired(cacheKey);
|
||||
auto cached = settings.getCache()->lookupExpired(cacheKey);
|
||||
|
||||
auto attrsToResult = [&](const Attrs & infoAttrs)
|
||||
{
|
||||
|
|
@ -196,7 +199,7 @@ static DownloadTarballResult downloadTarball_(
|
|||
/* Insert a cache entry for every URL in the redirect chain. */
|
||||
for (auto & url : res->urls) {
|
||||
cacheKey.second.insert_or_assign("url", url);
|
||||
getCache()->upsert(cacheKey, infoAttrs);
|
||||
settings.getCache()->upsert(cacheKey, infoAttrs);
|
||||
}
|
||||
|
||||
// FIXME: add a cache entry for immutableUrl? That could allow
|
||||
|
|
@ -341,7 +344,7 @@ struct FileInputScheme : CurlInputScheme
|
|||
the Nix store directly, since there is little deduplication
|
||||
benefit in using the Git cache for single big files like
|
||||
tarballs. */
|
||||
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName());
|
||||
auto file = downloadFile(store, *input.settings, getStrAttr(input.attrs, "url"), input.getName());
|
||||
|
||||
auto narHash = store->queryPathInfo(file.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
|
@ -373,6 +376,7 @@ struct TarballInputScheme : CurlInputScheme
|
|||
auto input(_input);
|
||||
|
||||
auto result = downloadTarball_(
|
||||
*input.settings,
|
||||
getStrAttr(input.attrs, "url"),
|
||||
{},
|
||||
"«" + input.to_string() + "»");
|
||||
|
|
@ -390,7 +394,7 @@ struct TarballInputScheme : CurlInputScheme
|
|||
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
||||
|
||||
input.attrs.insert_or_assign("narHash",
|
||||
getTarballCache()->treeHashToNarHash(result.treeHash).to_string(HashFormat::SRI, true));
|
||||
getTarballCache()->treeHashToNarHash(*input.settings, result.treeHash).to_string(HashFormat::SRI, true));
|
||||
|
||||
return {result.accessor, input};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ extern "C" {
|
|||
typedef struct nix_flake_settings nix_flake_settings;
|
||||
|
||||
/**
|
||||
* @brief Context and paramaters for parsing a flake reference
|
||||
* @brief Context and parameters for parsing a flake reference
|
||||
* @see nix_flake_reference_parse_flags_free
|
||||
* @see nix_flake_reference_parse_string
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -39,24 +39,16 @@ let
|
|||
allNodes = mapAttrs (
|
||||
key: node:
|
||||
let
|
||||
hasOverride = overrides ? ${key};
|
||||
isRelative = node.locked.type or null == "path" && builtins.substring 0 1 node.locked.path != "/";
|
||||
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
|
||||
flakeDir =
|
||||
let
|
||||
dir = overrides.${key}.dir or node.locked.path or "";
|
||||
parentDir = parentNode.flakeDir;
|
||||
in
|
||||
if node ? parent then parentDir + ("/" + dir) else dir;
|
||||
|
||||
sourceInfo =
|
||||
if overrides ? ${key} then
|
||||
if hasOverride then
|
||||
overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then
|
||||
else if isRelative then
|
||||
parentNode.sourceInfo
|
||||
// {
|
||||
outPath = parentNode.sourceInfo.outPath + ("/" + flakeDir);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
# Note: lock file entries are always final.
|
||||
|
|
@ -64,7 +56,11 @@ let
|
|||
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
outPath =
|
||||
if !hasOverride && isRelative then
|
||||
parentNode.outPath + (if node.locked.path == "" then "" else "/" + node.locked.path)
|
||||
else
|
||||
sourceInfo.outPath + (if subdir == "" then "" else "/" + subdir);
|
||||
|
||||
flake = import (outPath + "/flake.nix");
|
||||
|
||||
|
|
@ -99,9 +95,9 @@ let
|
|||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo;
|
||||
sourceInfo // { inherit sourceInfo outPath; };
|
||||
|
||||
inherit flakeDir sourceInfo;
|
||||
inherit outPath sourceInfo;
|
||||
}
|
||||
) lockFile.nodes;
|
||||
|
||||
|
|
|
|||
|
|
@ -234,8 +234,8 @@ static Flake readFlake(
|
|||
if (auto outputs = vInfo.attrs()->get(sOutputs)) {
|
||||
expectType(state, nFunction, *outputs->value, outputs->pos);
|
||||
|
||||
if (outputs->value->isLambda() && outputs->value->payload.lambda.fun->hasFormals()) {
|
||||
for (auto & formal : outputs->value->payload.lambda.fun->formals->formals) {
|
||||
if (outputs->value->isLambda() && outputs->value->lambda().fun->hasFormals()) {
|
||||
for (auto & formal : outputs->value->lambda().fun->formals->formals) {
|
||||
if (formal.name != state.sSelf)
|
||||
flake.inputs.emplace(state.symbols[formal.name], FlakeInput {
|
||||
.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))
|
||||
|
|
@ -258,7 +258,7 @@ static Flake readFlake(
|
|||
state.symbols[setting.name],
|
||||
std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
|
||||
else if (setting.value->type() == nPath) {
|
||||
auto storePath = fetchToStore(*state.store, setting.value->path(), FetchMode::Copy);
|
||||
auto storePath = fetchToStore(state.fetchSettings, *state.store, setting.value->path(), FetchMode::Copy);
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.store->printStorePath(storePath));
|
||||
|
|
@ -273,7 +273,7 @@ static Flake readFlake(
|
|||
Explicit<bool> { state.forceBool(*setting.value, setting.pos, "") });
|
||||
else if (setting.value->type() == nList) {
|
||||
std::vector<std::string> ss;
|
||||
for (auto elem : setting.value->listItems()) {
|
||||
for (auto elem : setting.value->listView()) {
|
||||
if (elem->type() != nString)
|
||||
state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
|
|
@ -522,7 +522,7 @@ LockedFlake lockFlake(
|
|||
|
||||
/* Resolve relative 'path:' inputs relative to
|
||||
the source path of the overrider. */
|
||||
auto overridenSourcePath = hasOverride ? i->second.sourcePath : sourcePath;
|
||||
auto overriddenSourcePath = hasOverride ? i->second.sourcePath : sourcePath;
|
||||
|
||||
/* Respect the "flakeness" of the input even if we
|
||||
override it. */
|
||||
|
|
@ -544,7 +544,7 @@ LockedFlake lockFlake(
|
|||
if (!input.ref)
|
||||
input.ref = FlakeRef::fromAttrs(state.fetchSettings, {{"type", "indirect"}, {"id", std::string(id)}});
|
||||
|
||||
auto overridenParentPath =
|
||||
auto overriddenParentPath =
|
||||
input.ref->input.isRelative()
|
||||
? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
|
||||
: std::nullopt;
|
||||
|
|
@ -553,8 +553,8 @@ LockedFlake lockFlake(
|
|||
{
|
||||
if (auto relativePath = input.ref->input.isRelative()) {
|
||||
return SourcePath {
|
||||
overridenSourcePath.accessor,
|
||||
CanonPath(*relativePath, overridenSourcePath.path.parent().value())
|
||||
overriddenSourcePath.accessor,
|
||||
CanonPath(*relativePath, overriddenSourcePath.path.parent().value())
|
||||
};
|
||||
} else
|
||||
return std::nullopt;
|
||||
|
|
@ -589,7 +589,7 @@ LockedFlake lockFlake(
|
|||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef.canonicalize() == input.ref->canonicalize()
|
||||
&& oldLock->parentInputAttrPath == overridenParentPath
|
||||
&& oldLock->parentInputAttrPath == overriddenParentPath
|
||||
&& !hasCliOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputAttrPathS);
|
||||
|
|
@ -711,7 +711,7 @@ LockedFlake lockFlake(
|
|||
inputFlake.lockedRef,
|
||||
ref,
|
||||
true,
|
||||
overridenParentPath);
|
||||
overriddenParentPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
|
|
@ -760,7 +760,7 @@ LockedFlake lockFlake(
|
|||
}
|
||||
}();
|
||||
|
||||
auto childNode = make_ref<LockedNode>(lockedRef, ref, false, overridenParentPath);
|
||||
auto childNode = make_ref<LockedNode>(lockedRef, ref, false, overriddenParentPath);
|
||||
|
||||
nodePaths.emplace(childNode, path);
|
||||
|
||||
|
|
@ -815,7 +815,7 @@ LockedFlake lockFlake(
|
|||
"Not writing lock file of flake '%s' because it has an unlocked input ('%s'). "
|
||||
"Use '--allow-dirty-locks' to allow this anyway.", topRef, *unlockedInput);
|
||||
if (state.fetchSettings.warnDirty)
|
||||
warn("Not writing lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||
warn("not writing lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege
|
|||
|
||||
std::string FlakeRef::to_string() const
|
||||
{
|
||||
std::map<std::string, std::string> extraQuery;
|
||||
StringMap extraQuery;
|
||||
if (subdir != "")
|
||||
extraQuery.insert_or_assign("dir", subdir);
|
||||
return input.toURLString(extraQuery);
|
||||
|
|
@ -57,18 +57,6 @@ FlakeRef parseFlakeRef(
|
|||
return flakeRef;
|
||||
}
|
||||
|
||||
std::optional<FlakeRef> maybeParseFlakeRef(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir)
|
||||
{
|
||||
try {
|
||||
return parseFlakeRef(fetchSettings, url, baseDir);
|
||||
} catch (Error &) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
static std::pair<FlakeRef, std::string> fromParsedURL(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
ParsedURL && parsedURL,
|
||||
|
|
@ -261,17 +249,6 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
}
|
||||
}
|
||||
|
||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const std::string & url, const std::optional<Path> & baseDir)
|
||||
{
|
||||
try {
|
||||
return parseFlakeRefWithFragment(fetchSettings, url, baseDir);
|
||||
} catch (Error & e) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
FlakeRef FlakeRef::fromAttrs(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const fetchers::Attrs & attrs)
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ struct LockedFlake
|
|||
|
||||
/**
|
||||
* Source tree accessors for nodes that have been fetched in
|
||||
* lockFlake(); in particular, the root node and the overriden
|
||||
* lockFlake(); in particular, the root node and the overridden
|
||||
* inputs.
|
||||
*/
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
|
|
|||
|
|
@ -93,14 +93,6 @@ FlakeRef parseFlakeRef(
|
|||
bool isFlake = true,
|
||||
bool preserveRelativePaths = false);
|
||||
|
||||
/**
|
||||
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
|
||||
*/
|
||||
std::optional<FlakeRef> maybeParseFlake(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {});
|
||||
|
||||
/**
|
||||
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
|
||||
*/
|
||||
|
|
@ -112,14 +104,6 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
bool isFlake = true,
|
||||
bool preserveRelativePaths = false);
|
||||
|
||||
/**
|
||||
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
|
||||
*/
|
||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {});
|
||||
|
||||
/**
|
||||
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -35,15 +35,17 @@ void printVersion(const std::string & programName);
|
|||
void printGCWarning();
|
||||
|
||||
class Store;
|
||||
struct MissingPaths;
|
||||
|
||||
void printMissing(
|
||||
ref<Store> store,
|
||||
const std::vector<DerivedPath> & paths,
|
||||
Verbosity lvl = lvlInfo);
|
||||
|
||||
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
||||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||
uint64_t downloadSize, uint64_t narSize, Verbosity lvl = lvlInfo);
|
||||
void printMissing(
|
||||
ref<Store> store,
|
||||
const MissingPaths & missing,
|
||||
Verbosity lvl = lvlInfo);
|
||||
|
||||
std::string getArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end);
|
||||
|
|
|
|||
|
|
@ -259,7 +259,7 @@ public:
|
|||
update(*state);
|
||||
}
|
||||
|
||||
/* Check whether an activity has an ancestore with the specified
|
||||
/* Check whether an activity has an ancestor with the specified
|
||||
type. */
|
||||
bool hasAncestor(State & state, ActivityType type, ActivityId act)
|
||||
{
|
||||
|
|
@ -382,7 +382,7 @@ public:
|
|||
/**
|
||||
* Redraw, if the output has changed.
|
||||
*
|
||||
* Excessive redrawing is noticable on slow terminals, and it interferes
|
||||
* Excessive redrawing is noticeable on slow terminals, and it interferes
|
||||
* with text selection in some terminals, including libvte-based terminal
|
||||
* emulators.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -46,43 +46,41 @@ void printGCWarning()
|
|||
|
||||
void printMissing(ref<Store> store, const std::vector<DerivedPath> & paths, Verbosity lvl)
|
||||
{
|
||||
uint64_t downloadSize, narSize;
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||
printMissing(store, willBuild, willSubstitute, unknown, downloadSize, narSize, lvl);
|
||||
printMissing(store, store->queryMissing(paths), lvl);
|
||||
}
|
||||
|
||||
|
||||
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
||||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||
uint64_t downloadSize, uint64_t narSize, Verbosity lvl)
|
||||
void printMissing(
|
||||
ref<Store> store,
|
||||
const MissingPaths & missing,
|
||||
Verbosity lvl)
|
||||
{
|
||||
if (!willBuild.empty()) {
|
||||
if (willBuild.size() == 1)
|
||||
if (!missing.willBuild.empty()) {
|
||||
if (missing.willBuild.size() == 1)
|
||||
printMsg(lvl, "this derivation will be built:");
|
||||
else
|
||||
printMsg(lvl, "these %d derivations will be built:", willBuild.size());
|
||||
auto sorted = store->topoSortPaths(willBuild);
|
||||
printMsg(lvl, "these %d derivations will be built:", missing.willBuild.size());
|
||||
auto sorted = store->topoSortPaths(missing.willBuild);
|
||||
reverse(sorted.begin(), sorted.end());
|
||||
for (auto & i : sorted)
|
||||
printMsg(lvl, " %s", store->printStorePath(i));
|
||||
}
|
||||
|
||||
if (!willSubstitute.empty()) {
|
||||
const float downloadSizeMiB = downloadSize / (1024.f * 1024.f);
|
||||
const float narSizeMiB = narSize / (1024.f * 1024.f);
|
||||
if (willSubstitute.size() == 1) {
|
||||
if (!missing.willSubstitute.empty()) {
|
||||
const float downloadSizeMiB = missing.downloadSize / (1024.f * 1024.f);
|
||||
const float narSizeMiB = missing.narSize / (1024.f * 1024.f);
|
||||
if (missing.willSubstitute.size() == 1) {
|
||||
printMsg(lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):",
|
||||
downloadSizeMiB,
|
||||
narSizeMiB);
|
||||
} else {
|
||||
printMsg(lvl, "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):",
|
||||
willSubstitute.size(),
|
||||
missing.willSubstitute.size(),
|
||||
downloadSizeMiB,
|
||||
narSizeMiB);
|
||||
}
|
||||
std::vector<const StorePath *> willSubstituteSorted = {};
|
||||
std::for_each(willSubstitute.begin(), willSubstitute.end(),
|
||||
std::for_each(missing.willSubstitute.begin(), missing.willSubstitute.end(),
|
||||
[&](const StorePath &p) { willSubstituteSorted.push_back(&p); });
|
||||
std::sort(willSubstituteSorted.begin(), willSubstituteSorted.end(),
|
||||
[](const StorePath *lhs, const StorePath *rhs) {
|
||||
|
|
@ -95,10 +93,10 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
|||
printMsg(lvl, " %s", store->printStorePath(*p));
|
||||
}
|
||||
|
||||
if (!unknown.empty()) {
|
||||
if (!missing.unknown.empty()) {
|
||||
printMsg(lvl, "don't know how to build these paths%s:",
|
||||
(settings.readOnlyMode ? " (may be caused by read-only store access)" : ""));
|
||||
for (auto & i : unknown)
|
||||
for (auto & i : missing.unknown)
|
||||
printMsg(lvl, " %s", store->printStorePath(i));
|
||||
}
|
||||
}
|
||||
|
|
@ -176,16 +174,6 @@ void initNix(bool loadConfig)
|
|||
now. In particular, store objects should be readable by
|
||||
everybody. */
|
||||
umask(0022);
|
||||
|
||||
/* Initialise the PRNG. */
|
||||
struct timeval tv;
|
||||
gettimeofday(&tv, 0);
|
||||
#ifndef _WIN32
|
||||
srandom(tv.tv_usec);
|
||||
#endif
|
||||
srand(tv.tv_usec);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -327,29 +315,34 @@ int handleExceptions(const std::string & programName, std::function<void()> fun)
|
|||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
try {
|
||||
try {
|
||||
fun();
|
||||
} catch (...) {
|
||||
/* Subtle: we have to make sure that any `interrupted'
|
||||
condition is discharged before we reach printMsg()
|
||||
below, since otherwise it will throw an (uncaught)
|
||||
exception. */
|
||||
setInterruptThrown();
|
||||
throw;
|
||||
try {
|
||||
fun();
|
||||
} catch (...) {
|
||||
/* Subtle: we have to make sure that any `interrupted'
|
||||
condition is discharged before we reach printMsg()
|
||||
below, since otherwise it will throw an (uncaught)
|
||||
exception. */
|
||||
setInterruptThrown();
|
||||
throw;
|
||||
}
|
||||
} catch (Exit & e) {
|
||||
return e.status;
|
||||
} catch (UsageError & e) {
|
||||
logError(e.info());
|
||||
printError("Try '%1% --help' for more information.", programName);
|
||||
return 1;
|
||||
} catch (BaseError & e) {
|
||||
logError(e.info());
|
||||
return e.info().status;
|
||||
} catch (std::bad_alloc & e) {
|
||||
printError(error + "out of memory");
|
||||
return 1;
|
||||
} catch (std::exception & e) {
|
||||
printError(error + e.what());
|
||||
return 1;
|
||||
}
|
||||
} catch (Exit & e) {
|
||||
return e.status;
|
||||
} catch (UsageError & e) {
|
||||
logError(e.info());
|
||||
printError("Try '%1% --help' for more information.", programName);
|
||||
return 1;
|
||||
} catch (BaseError & e) {
|
||||
logError(e.info());
|
||||
return e.info().status;
|
||||
} catch (std::bad_alloc & e) {
|
||||
printError(error + "out of memory");
|
||||
return 1;
|
||||
} catch (std::exception & e) {
|
||||
printError(error + e.what());
|
||||
} catch (...) {
|
||||
/* In case logger also throws just give up. */
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ public:
|
|||
Store * store;
|
||||
std::string nixDir;
|
||||
std::string nixStoreDir;
|
||||
std::string nixStateDir;
|
||||
std::string nixLogDir;
|
||||
|
||||
protected:
|
||||
void init_local_store()
|
||||
|
|
@ -53,11 +55,13 @@ protected:
|
|||
#endif
|
||||
|
||||
nixStoreDir = nixDir + "/my_nix_store";
|
||||
nixStateDir = nixDir + "/my_state";
|
||||
nixLogDir = nixDir + "/my_log";
|
||||
|
||||
// Options documented in `nix help-stores`
|
||||
const char * p1[] = {"store", nixStoreDir.c_str()};
|
||||
const char * p2[] = {"state", (new std::string(nixDir + "/my_state"))->c_str()};
|
||||
const char * p3[] = {"log", (new std::string(nixDir + "/my_log"))->c_str()};
|
||||
const char * p2[] = {"state", nixStateDir.c_str()};
|
||||
const char * p3[] = {"log", nixLogDir.c_str()};
|
||||
|
||||
const char ** params[] = {p1, p2, p3, nullptr};
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
],
|
||||
"builder": "/bin/bash",
|
||||
"env": {
|
||||
"__json": "{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}",
|
||||
"dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz",
|
||||
"out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"
|
||||
},
|
||||
|
|
@ -22,5 +21,16 @@
|
|||
"method": "nar"
|
||||
}
|
||||
},
|
||||
"structuredAttrs": {
|
||||
"builder": "/bin/bash",
|
||||
"name": "advanced-attributes-structured-attrs-defaults",
|
||||
"outputHashAlgo": "sha256",
|
||||
"outputHashMode": "recursive",
|
||||
"outputs": [
|
||||
"out",
|
||||
"dev"
|
||||
],
|
||||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
],
|
||||
"builder": "/bin/bash",
|
||||
"env": {
|
||||
"__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}",
|
||||
"bin": "/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m",
|
||||
"dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz",
|
||||
"out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"
|
||||
|
|
@ -44,5 +43,62 @@
|
|||
"method": "nar"
|
||||
}
|
||||
},
|
||||
"structuredAttrs": {
|
||||
"__darwinAllowLocalNetworking": true,
|
||||
"__impureHostDeps": [
|
||||
"/usr/bin/ditto"
|
||||
],
|
||||
"__noChroot": true,
|
||||
"__sandboxProfile": "sandcastle",
|
||||
"allowSubstitutes": false,
|
||||
"builder": "/bin/bash",
|
||||
"exportReferencesGraph": {
|
||||
"refs1": [
|
||||
"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"
|
||||
],
|
||||
"refs2": [
|
||||
"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"
|
||||
]
|
||||
},
|
||||
"impureEnvVars": [
|
||||
"UNICORN"
|
||||
],
|
||||
"name": "advanced-attributes-structured-attrs",
|
||||
"outputChecks": {
|
||||
"bin": {
|
||||
"disallowedReferences": [
|
||||
"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"
|
||||
],
|
||||
"disallowedRequisites": [
|
||||
"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"
|
||||
]
|
||||
},
|
||||
"dev": {
|
||||
"maxClosureSize": 5909,
|
||||
"maxSize": 789
|
||||
},
|
||||
"out": {
|
||||
"allowedReferences": [
|
||||
"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"
|
||||
],
|
||||
"allowedRequisites": [
|
||||
"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"
|
||||
]
|
||||
}
|
||||
},
|
||||
"outputHashAlgo": "sha256",
|
||||
"outputHashMode": "recursive",
|
||||
"outputs": [
|
||||
"out",
|
||||
"bin",
|
||||
"dev"
|
||||
],
|
||||
"preferLocalBuild": true,
|
||||
"requiredSystemFeatures": [
|
||||
"rainbow",
|
||||
"uid-range"
|
||||
],
|
||||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
],
|
||||
"builder": "/bin/bash",
|
||||
"env": {
|
||||
"__json": "{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}",
|
||||
"dev": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev",
|
||||
"out": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults"
|
||||
},
|
||||
|
|
@ -20,5 +19,14 @@
|
|||
"path": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults"
|
||||
}
|
||||
},
|
||||
"structuredAttrs": {
|
||||
"builder": "/bin/bash",
|
||||
"name": "advanced-attributes-structured-attrs-defaults",
|
||||
"outputs": [
|
||||
"out",
|
||||
"dev"
|
||||
],
|
||||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
],
|
||||
"builder": "/bin/bash",
|
||||
"env": {
|
||||
"__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}",
|
||||
"bin": "/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin",
|
||||
"dev": "/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev",
|
||||
"out": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs"
|
||||
|
|
@ -41,5 +40,60 @@
|
|||
"path": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs"
|
||||
}
|
||||
},
|
||||
"structuredAttrs": {
|
||||
"__darwinAllowLocalNetworking": true,
|
||||
"__impureHostDeps": [
|
||||
"/usr/bin/ditto"
|
||||
],
|
||||
"__noChroot": true,
|
||||
"__sandboxProfile": "sandcastle",
|
||||
"allowSubstitutes": false,
|
||||
"builder": "/bin/bash",
|
||||
"exportReferencesGraph": {
|
||||
"refs1": [
|
||||
"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"
|
||||
],
|
||||
"refs2": [
|
||||
"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"
|
||||
]
|
||||
},
|
||||
"impureEnvVars": [
|
||||
"UNICORN"
|
||||
],
|
||||
"name": "advanced-attributes-structured-attrs",
|
||||
"outputChecks": {
|
||||
"bin": {
|
||||
"disallowedReferences": [
|
||||
"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"
|
||||
],
|
||||
"disallowedRequisites": [
|
||||
"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"
|
||||
]
|
||||
},
|
||||
"dev": {
|
||||
"maxClosureSize": 5909,
|
||||
"maxSize": 789
|
||||
},
|
||||
"out": {
|
||||
"allowedReferences": [
|
||||
"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"
|
||||
],
|
||||
"allowedRequisites": [
|
||||
"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"
|
||||
]
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
"out",
|
||||
"bin",
|
||||
"dev"
|
||||
],
|
||||
"preferLocalBuild": true,
|
||||
"requiredSystemFeatures": [
|
||||
"rainbow",
|
||||
"uid-range"
|
||||
],
|
||||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) {
|
|||
|
||||
TEST(machines, getMachinesWithFunnyWhitespace) {
|
||||
auto actual = Machine::parseConfig({},
|
||||
" # commment ; comment\n"
|
||||
" # comment ; comment\n"
|
||||
" nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl \n"
|
||||
"\n \n"
|
||||
"\n ;;; \n"
|
||||
|
|
|
|||
|
|
@ -67,17 +67,21 @@ TEST_F(nix_api_store_test, ReturnsValidStorePath)
|
|||
ASSERT_NE(result, nullptr);
|
||||
ASSERT_STREQ("name", result->path.name().data());
|
||||
ASSERT_STREQ(PATH_SUFFIX.substr(1).c_str(), result->path.to_string().data());
|
||||
nix_store_path_free(result);
|
||||
}
|
||||
|
||||
TEST_F(nix_api_store_test, SetsLastErrCodeToNixOk)
|
||||
{
|
||||
nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str());
|
||||
StorePath * path = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str());
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_OK);
|
||||
nix_store_path_free(path);
|
||||
}
|
||||
|
||||
TEST_F(nix_api_store_test, DoesNotCrashWhenContextIsNull)
|
||||
{
|
||||
ASSERT_NO_THROW(nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str()));
|
||||
StorePath * path = nullptr;
|
||||
ASSERT_NO_THROW(path = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str()));
|
||||
nix_store_path_free(path);
|
||||
}
|
||||
|
||||
TEST_F(nix_api_store_test, get_version)
|
||||
|
|
@ -115,6 +119,7 @@ TEST_F(nix_api_store_test, nix_store_is_valid_path_not_in_store)
|
|||
{
|
||||
StorePath * path = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str());
|
||||
ASSERT_EQ(false, nix_store_is_valid_path(ctx, store, path));
|
||||
nix_store_path_free(path);
|
||||
}
|
||||
|
||||
TEST_F(nix_api_store_test, nix_store_real_path)
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ TEST(OutputsSpec, names_underscore) {
|
|||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_numberic) {
|
||||
TEST(OutputsSpec, names_numeric) {
|
||||
std::string_view str = "01";
|
||||
OutputsSpec expected = OutputsSpec::Names { "01" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
|
|
@ -126,7 +126,7 @@ TEST_DONT_PARSE(star_second, "^foo,*")
|
|||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
TEST(ExtendedOutputsSpec, defeault) {
|
||||
TEST(ExtendedOutputsSpec, default) {
|
||||
std::string_view str = "foo";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
|
|
|
|||
1257
src/libstore/build/derivation-building-goal.cc
Normal file
1257
src/libstore/build/derivation-building-goal.cc
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -12,7 +12,7 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
|
|||
Worker & worker,
|
||||
RepairFlag repair,
|
||||
std::optional<ContentAddress> ca)
|
||||
: Goal(worker)
|
||||
: Goal(worker, init())
|
||||
, id(id)
|
||||
{
|
||||
name = fmt("substitution of '%s'", id.to_string());
|
||||
|
|
@ -139,7 +139,7 @@ Goal::Co DrvOutputSubstitutionGoal::realisationFetched(Goals waitees, std::share
|
|||
|
||||
if (nrFailed > 0) {
|
||||
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
|
||||
co_return amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed);
|
||||
co_return amDone(nrNoSubstituters > 0 ? ecNoSubstituters : ecFailed);
|
||||
}
|
||||
|
||||
worker.store.registerDrvOutput(*outputInfo);
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
if (i->exitCode != Goal::ecSuccess) {
|
||||
#ifndef _WIN32 // TODO Enable building on Windows
|
||||
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
||||
failed.insert(printStorePath(i2->drvPath));
|
||||
failed.insert(i2->drvReq->to_string(*this));
|
||||
else
|
||||
#endif
|
||||
if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
||||
|
|
|
|||
|
|
@ -151,11 +151,11 @@ Goal::Done Goal::amDone(ExitCode result, std::optional<Error> ex)
|
|||
trace("done");
|
||||
assert(top_co);
|
||||
assert(exitCode == ecBusy);
|
||||
assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure);
|
||||
assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters);
|
||||
exitCode = result;
|
||||
|
||||
if (ex) {
|
||||
if (!waiters.empty())
|
||||
if (!preserveException && !waiters.empty())
|
||||
logError(ex->info());
|
||||
else
|
||||
this->ex = std::move(*ex);
|
||||
|
|
@ -170,12 +170,10 @@ Goal::Done Goal::amDone(ExitCode result, std::optional<Error> ex)
|
|||
|
||||
goal->trace(fmt("waitee '%s' done; %d left", name, goal->waitees.size()));
|
||||
|
||||
if (result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure) ++goal->nrFailed;
|
||||
if (result == ecFailed || result == ecNoSubstituters) ++goal->nrFailed;
|
||||
|
||||
if (result == ecNoSubstituters) ++goal->nrNoSubstituters;
|
||||
|
||||
if (result == ecIncompleteClosure) ++goal->nrIncompleteClosure;
|
||||
|
||||
if (goal->waitees.empty()) {
|
||||
worker.wakeUp(goal);
|
||||
} else if (result == ecFailed && !settings.keepGoing) {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
namespace nix {
|
||||
|
||||
PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
: Goal(worker)
|
||||
: Goal(worker, init())
|
||||
, storePath(storePath)
|
||||
, repair(repair)
|
||||
, ca(ca)
|
||||
|
|
@ -181,7 +181,7 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref<Store> sub,
|
|||
|
||||
if (nrFailed > 0) {
|
||||
co_return done(
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
nrNoSubstituters > 0 ? ecNoSubstituters : ecFailed,
|
||||
BuildResult::DependencyFailed,
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
#include "nix/store/build/substitution-goal.hh"
|
||||
#include "nix/store/build/drv-output-substitution-goal.hh"
|
||||
#include "nix/store/build/derivation-goal.hh"
|
||||
#include "nix/store/build/derivation-building-goal.hh"
|
||||
#ifndef _WIN32 // TODO Enable building on Windows
|
||||
# include "nix/store/build/hook-instance.hh"
|
||||
#endif
|
||||
|
|
@ -41,13 +42,23 @@ Worker::~Worker()
|
|||
assert(expectedNarSize == 0);
|
||||
}
|
||||
|
||||
template<class G, typename... Args>
|
||||
std::shared_ptr<G> Worker::initGoalIfNeeded(std::weak_ptr<G> & goal_weak, Args && ...args)
|
||||
{
|
||||
if (auto goal = goal_weak.lock()) return goal;
|
||||
|
||||
auto goal = std::make_shared<G>(args...);
|
||||
goal_weak = goal;
|
||||
wakeUp(goal);
|
||||
return goal;
|
||||
}
|
||||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||
const StorePath & drvPath,
|
||||
ref<const SingleDerivedPath> drvReq,
|
||||
const OutputsSpec & wantedOutputs,
|
||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
|
||||
{
|
||||
std::weak_ptr<DerivationGoal> & goal_weak = derivationGoals[drvPath];
|
||||
std::weak_ptr<DerivationGoal> & goal_weak = derivationGoals.ensureSlot(*drvReq).value;
|
||||
std::shared_ptr<DerivationGoal> goal = goal_weak.lock();
|
||||
if (!goal) {
|
||||
goal = mkDrvGoal();
|
||||
|
|
@ -60,29 +71,30 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
|||
}
|
||||
|
||||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
|
||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(ref<const SingleDerivedPath> drvReq,
|
||||
const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||
{
|
||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
|
||||
return makeDerivationGoalCommon(drvReq, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return std::make_shared<DerivationGoal>(drvReq, wantedOutputs, *this, buildMode);
|
||||
});
|
||||
}
|
||||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
||||
const BasicDerivation & drv, const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||
{
|
||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return makeDerivationGoalCommon(makeConstantStorePathRef(drvPath), wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
std::shared_ptr<DerivationBuildingGoal> Worker::makeDerivationBuildingGoal(const StorePath & drvPath,
|
||||
const Derivation & drv, BuildMode buildMode)
|
||||
{
|
||||
std::weak_ptr<PathSubstitutionGoal> & goal_weak = substitutionGoals[path];
|
||||
std::weak_ptr<DerivationBuildingGoal> & goal_weak = derivationBuildingGoals[drvPath];
|
||||
auto goal = goal_weak.lock(); // FIXME
|
||||
if (!goal) {
|
||||
goal = std::make_shared<PathSubstitutionGoal>(path, *this, repair, ca);
|
||||
goal = std::make_shared<DerivationBuildingGoal>(drvPath, drv, *this, buildMode);
|
||||
goal_weak = goal;
|
||||
wakeUp(goal);
|
||||
}
|
||||
|
|
@ -90,16 +102,15 @@ std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const Sto
|
|||
}
|
||||
|
||||
|
||||
std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
{
|
||||
return initGoalIfNeeded(substitutionGoals[path], path, *this, repair, ca);
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
{
|
||||
std::weak_ptr<DrvOutputSubstitutionGoal> & goal_weak = drvOutputSubstitutionGoals[id];
|
||||
auto goal = goal_weak.lock(); // FIXME
|
||||
if (!goal) {
|
||||
goal = std::make_shared<DrvOutputSubstitutionGoal>(id, *this, repair, ca);
|
||||
goal_weak = goal;
|
||||
wakeUp(goal);
|
||||
}
|
||||
return goal;
|
||||
return initGoalIfNeeded(drvOutputSubstitutionGoals[id], id, *this, repair, ca);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -107,10 +118,7 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
|
|||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) -> GoalPtr {
|
||||
if (auto bop = std::get_if<DerivedPath::Opaque>(&*bfd.drvPath))
|
||||
return makeDerivationGoal(bop->path, bfd.outputs, buildMode);
|
||||
else
|
||||
throw UnimplementedError("Building dynamic derivations in one shot is not yet implemented.");
|
||||
return makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode);
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) -> GoalPtr {
|
||||
return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair);
|
||||
|
|
@ -119,27 +127,48 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
|
|||
}
|
||||
|
||||
|
||||
template<typename K, typename V, typename F>
|
||||
static void cullMap(std::map<K, V> & goalMap, F f)
|
||||
{
|
||||
for (auto i = goalMap.begin(); i != goalMap.end();)
|
||||
if (!f(i->second))
|
||||
i = goalMap.erase(i);
|
||||
else ++i;
|
||||
}
|
||||
|
||||
|
||||
template<typename K, typename G>
|
||||
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
|
||||
{
|
||||
/* !!! inefficient */
|
||||
for (auto i = goalMap.begin();
|
||||
i != goalMap.end(); )
|
||||
if (i->second.lock() == goal) {
|
||||
auto j = i; ++j;
|
||||
goalMap.erase(i);
|
||||
i = j;
|
||||
}
|
||||
else ++i;
|
||||
cullMap(goalMap, [&](const std::weak_ptr<G> & gp) -> bool {
|
||||
return gp.lock() != goal;
|
||||
});
|
||||
}
|
||||
|
||||
template<typename K>
|
||||
static void removeGoal(std::shared_ptr<DerivationGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<DerivationGoal>>::ChildNode> & goalMap);
|
||||
|
||||
template<typename K>
|
||||
static void removeGoal(std::shared_ptr<DerivationGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<DerivationGoal>>::ChildNode> & goalMap)
|
||||
{
|
||||
/* !!! inefficient */
|
||||
cullMap(goalMap, [&](DerivedPathMap<std::weak_ptr<DerivationGoal>>::ChildNode & node) -> bool {
|
||||
if (node.value.lock() == goal)
|
||||
node.value.reset();
|
||||
removeGoal(goal, node.childMap);
|
||||
return !node.value.expired() || !node.childMap.empty();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void Worker::removeGoal(GoalPtr goal)
|
||||
{
|
||||
if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
|
||||
nix::removeGoal(drvGoal, derivationGoals);
|
||||
else
|
||||
if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
|
||||
nix::removeGoal(drvGoal, derivationGoals.map);
|
||||
else if (auto drvBuildingGoal = std::dynamic_pointer_cast<DerivationBuildingGoal>(goal))
|
||||
nix::removeGoal(drvBuildingGoal, derivationBuildingGoals);
|
||||
else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
|
||||
nix::removeGoal(subGoal, substitutionGoals);
|
||||
else if (auto subGoal = std::dynamic_pointer_cast<DrvOutputSubstitutionGoal>(goal))
|
||||
nix::removeGoal(subGoal, drvOutputSubstitutionGoals);
|
||||
|
|
@ -202,6 +231,9 @@ void Worker::childStarted(GoalPtr goal, const std::set<MuxablePipePollState::Com
|
|||
case JobCategory::Build:
|
||||
nrLocalBuilds++;
|
||||
break;
|
||||
case JobCategory::Administration:
|
||||
/* Intentionally not limited, see docs */
|
||||
break;
|
||||
default:
|
||||
unreachable();
|
||||
}
|
||||
|
|
@ -225,6 +257,9 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
|||
assert(nrLocalBuilds > 0);
|
||||
nrLocalBuilds--;
|
||||
break;
|
||||
case JobCategory::Administration:
|
||||
/* Intentionally not limited, see docs */
|
||||
break;
|
||||
default:
|
||||
unreachable();
|
||||
}
|
||||
|
|
@ -279,7 +314,7 @@ void Worker::run(const Goals & _topGoals)
|
|||
topGoals.insert(i);
|
||||
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
|
||||
topPaths.push_back(DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(goal->drvPath),
|
||||
.drvPath = goal->drvReq,
|
||||
.outputs = goal->wantedOutputs,
|
||||
});
|
||||
} else
|
||||
|
|
@ -289,9 +324,7 @@ void Worker::run(const Goals & _topGoals)
|
|||
}
|
||||
|
||||
/* Call queryMissing() to efficiently query substitutes. */
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
uint64_t downloadSize, narSize;
|
||||
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||
store.queryMissing(topPaths);
|
||||
|
||||
debug("entered goal loop");
|
||||
|
||||
|
|
@ -327,23 +360,14 @@ void Worker::run(const Goals & _topGoals)
|
|||
else if (awake.empty() && 0U == settings.maxBuildJobs) {
|
||||
if (getMachines().empty())
|
||||
throw Error(
|
||||
R"(
|
||||
Unable to start any build;
|
||||
either increase '--max-jobs' or enable remote builds.
|
||||
|
||||
For more information run 'man nix.conf' and search for '/machines'.
|
||||
)"
|
||||
);
|
||||
"Unable to start any build; either increase '--max-jobs' or enable remote builds.\n"
|
||||
"\n"
|
||||
"For more information run 'man nix.conf' and search for '/machines'.");
|
||||
else
|
||||
throw Error(
|
||||
R"(
|
||||
Unable to start any build;
|
||||
remote machines may not have all required system features.
|
||||
|
||||
For more information run 'man nix.conf' and search for '/machines'.
|
||||
)"
|
||||
);
|
||||
|
||||
"Unable to start any build; remote machines may not have all required system features.\n"
|
||||
"\n"
|
||||
"For more information run 'man nix.conf' and search for '/machines'.");
|
||||
} else assert(!awake.empty());
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -949,14 +949,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
case WorkerProto::Op::QueryMissing: {
|
||||
auto targets = WorkerProto::Serialise<DerivedPaths>::read(*store, rconn);
|
||||
logger->startWork();
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
uint64_t downloadSize, narSize;
|
||||
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||
auto missing = store->queryMissing(targets);
|
||||
logger->stopWork();
|
||||
WorkerProto::write(*store, wconn, willBuild);
|
||||
WorkerProto::write(*store, wconn, willSubstitute);
|
||||
WorkerProto::write(*store, wconn, unknown);
|
||||
conn.to << downloadSize << narSize;
|
||||
WorkerProto::write(*store, wconn, missing.willBuild);
|
||||
WorkerProto::write(*store, wconn, missing.willSubstitute);
|
||||
WorkerProto::write(*store, wconn, missing.unknown);
|
||||
conn.to << missing.downloadSize << missing.narSize;
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -412,7 +412,7 @@ Derivation parseDerivation(
|
|||
expect(str, "rvWithVersion(");
|
||||
auto versionS = parseString(str);
|
||||
if (*versionS == "xp-dyn-drv") {
|
||||
// Only verison we have so far
|
||||
// Only version we have so far
|
||||
version = DerivationATermVersion::DynamicDerivations;
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
} else {
|
||||
|
|
@ -553,7 +553,7 @@ static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string
|
|||
* derivation?
|
||||
*
|
||||
* In other words, does it on the output of derivation that is itself an
|
||||
* ouput of a derivation? This corresponds to a dependency that is an
|
||||
* output of a derivation? This corresponds to a dependency that is an
|
||||
* inductive derived path with more than one layer of
|
||||
* `DerivedPath::Built`.
|
||||
*/
|
||||
|
|
@ -1333,6 +1333,11 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const
|
|||
res["args"] = args;
|
||||
res["env"] = env;
|
||||
|
||||
if (auto it = env.find("__json"); it != env.end()) {
|
||||
res["env"].erase("__json");
|
||||
res["structuredAttrs"] = nlohmann::json::parse(it->second);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
@ -1396,7 +1401,17 @@ Derivation Derivation::fromJSON(
|
|||
res.platform = getString(valueAt(json, "system"));
|
||||
res.builder = getString(valueAt(json, "builder"));
|
||||
res.args = getStringList(valueAt(json, "args"));
|
||||
res.env = getStringMap(valueAt(json, "env"));
|
||||
|
||||
auto envJson = valueAt(json, "env");
|
||||
try {
|
||||
res.env = getStringMap(envJson);
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'env'");
|
||||
throw;
|
||||
}
|
||||
|
||||
if (auto structuredAttrs = get(json, "structuredAttrs"))
|
||||
res.env.insert_or_assign("__json", structuredAttrs->dump());
|
||||
|
||||
return res;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ typename DerivedPathMap<V>::ChildNode * DerivedPathMap<V>::findSlot(const Single
|
|||
|
||||
// instantiations
|
||||
|
||||
#include "nix/store/build/derivation-goal.hh"
|
||||
namespace nix {
|
||||
|
||||
template<>
|
||||
|
|
@ -68,4 +69,7 @@ std::strong_ordering DerivedPathMap<StringSet>::ChildNode::operator <=> (
|
|||
template struct DerivedPathMap<StringSet>::ChildNode;
|
||||
template struct DerivedPathMap<StringSet>;
|
||||
|
||||
template struct DerivedPathMap<std::weak_ptr<DerivationGoal>>;
|
||||
|
||||
|
||||
};
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@
|
|||
#endif
|
||||
|
||||
#ifdef __linux__
|
||||
# include "nix/util/namespaces.hh"
|
||||
# include "nix/util/linux-namespaces.hh"
|
||||
#endif
|
||||
|
||||
#include <unistd.h>
|
||||
|
|
|
|||
|
|
@ -790,7 +790,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
deleteFromStore(path.to_string());
|
||||
referrersCache.erase(path);
|
||||
} catch (PathInUse &e) {
|
||||
// If we end up here, it's likely a new occurence
|
||||
// If we end up here, it's likely a new occurrence
|
||||
// of https://github.com/NixOS/nix/issues/11923
|
||||
printError("BUG: %s", e.what());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ Settings::Settings()
|
|||
builders = concatStringsSep("\n", ss);
|
||||
}
|
||||
|
||||
#if defined(__linux__) && defined(SANDBOX_SHELL)
|
||||
#if (defined(__linux__) || defined(__FreeBSD__)) && defined(SANDBOX_SHELL)
|
||||
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
|
||||
#endif
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ struct BuildResult
|
|||
{
|
||||
/**
|
||||
* @note This is directly used in the nix-store --serve protocol.
|
||||
* That means we need to worry about compatability across versions.
|
||||
* That means we need to worry about compatibility across versions.
|
||||
* Therefore, don't remove status codes, and only add new status
|
||||
* codes at the end of the list.
|
||||
*/
|
||||
|
|
|
|||
194
src/libstore/include/nix/store/build/derivation-building-goal.hh
Normal file
194
src/libstore/include/nix/store/build/derivation-building-goal.hh
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "nix/store/parsed-derivations.hh"
|
||||
#include "nix/store/derivations.hh"
|
||||
#include "nix/store/derivation-options.hh"
|
||||
#include "nix/store/build/derivation-building-misc.hh"
|
||||
#include "nix/store/outputs-spec.hh"
|
||||
#include "nix/store/store-api.hh"
|
||||
#include "nix/store/pathlocks.hh"
|
||||
#include "nix/store/build/goal.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using std::map;
|
||||
|
||||
#ifndef _WIN32 // TODO enable build hook on Windows
|
||||
struct HookInstance;
|
||||
struct DerivationBuilder;
|
||||
#endif
|
||||
|
||||
typedef enum {rpAccept, rpDecline, rpPostpone} HookReply;
|
||||
|
||||
/** Used internally */
|
||||
void runPostBuildHook(
|
||||
Store & store,
|
||||
Logger & logger,
|
||||
const StorePath & drvPath,
|
||||
const StorePathSet & outputPaths);
|
||||
|
||||
/**
|
||||
* A goal for building some or all of the outputs of a derivation.
|
||||
*/
|
||||
struct DerivationBuildingGoal : public Goal
|
||||
{
|
||||
/** The path of the derivation. */
|
||||
StorePath drvPath;
|
||||
|
||||
/**
|
||||
* The derivation stored at drvPath.
|
||||
*/
|
||||
std::unique_ptr<Derivation> drv;
|
||||
|
||||
std::unique_ptr<StructuredAttrs> parsedDrv;
|
||||
std::unique_ptr<DerivationOptions> drvOptions;
|
||||
|
||||
/**
|
||||
* The remainder is state held during the build.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Locks on (fixed) output paths.
|
||||
*/
|
||||
PathLocks outputLocks;
|
||||
|
||||
/**
|
||||
* All input paths (that is, the union of FS closures of the
|
||||
* immediate input paths).
|
||||
*/
|
||||
StorePathSet inputPaths;
|
||||
|
||||
std::map<std::string, InitialOutput> initialOutputs;
|
||||
|
||||
/**
|
||||
* File descriptor for the log file.
|
||||
*/
|
||||
AutoCloseFD fdLogFile;
|
||||
std::shared_ptr<BufferedSink> logFileSink, logSink;
|
||||
|
||||
/**
|
||||
* Number of bytes received from the builder's stdout/stderr.
|
||||
*/
|
||||
unsigned long logSize;
|
||||
|
||||
/**
|
||||
* The most recent log lines.
|
||||
*/
|
||||
std::list<std::string> logTail;
|
||||
|
||||
std::string currentLogLine;
|
||||
size_t currentLogLinePos = 0; // to handle carriage return
|
||||
|
||||
std::string currentHookLine;
|
||||
|
||||
#ifndef _WIN32 // TODO enable build hook on Windows
|
||||
/**
|
||||
* The build hook.
|
||||
*/
|
||||
std::unique_ptr<HookInstance> hook;
|
||||
|
||||
std::unique_ptr<DerivationBuilder> builder;
|
||||
#endif
|
||||
|
||||
BuildMode buildMode;
|
||||
|
||||
std::unique_ptr<MaintainCount<uint64_t>> mcRunningBuilds;
|
||||
|
||||
std::unique_ptr<Activity> act;
|
||||
|
||||
/**
|
||||
* Activity that denotes waiting for a lock.
|
||||
*/
|
||||
std::unique_ptr<Activity> actLock;
|
||||
|
||||
std::map<ActivityId, Activity> builderActivities;
|
||||
|
||||
/**
|
||||
* The remote machine on which we're building.
|
||||
*/
|
||||
std::string machineName;
|
||||
|
||||
DerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv,
|
||||
Worker & worker,
|
||||
BuildMode buildMode = bmNormal);
|
||||
~DerivationBuildingGoal();
|
||||
|
||||
void timedOut(Error && ex) override;
|
||||
|
||||
std::string key() override;
|
||||
|
||||
/**
|
||||
* The states.
|
||||
*/
|
||||
Co gaveUpOnSubstitution();
|
||||
Co tryToBuild();
|
||||
Co hookDone();
|
||||
|
||||
/**
|
||||
* Is the build hook willing to perform the build?
|
||||
*/
|
||||
HookReply tryBuildHook();
|
||||
|
||||
/**
|
||||
* Open a log file and a pipe to it.
|
||||
*/
|
||||
Path openLogFile();
|
||||
|
||||
/**
|
||||
* Close the log file.
|
||||
*/
|
||||
void closeLogFile();
|
||||
|
||||
bool isReadDesc(Descriptor fd);
|
||||
|
||||
/**
|
||||
* Callback used by the worker to write to the log.
|
||||
*/
|
||||
void handleChildOutput(Descriptor fd, std::string_view data) override;
|
||||
void handleEOF(Descriptor fd) override;
|
||||
void flushLine();
|
||||
|
||||
/**
|
||||
* Wrappers around the corresponding Store methods that first consult the
|
||||
* derivation. This is currently needed because when there is no drv file
|
||||
* there also is no DB entry.
|
||||
*/
|
||||
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap();
|
||||
|
||||
/**
|
||||
* Update 'initialOutputs' to determine the current status of the
|
||||
* outputs of the derivation. Also returns a Boolean denoting
|
||||
* whether all outputs are valid and non-corrupt, and a
|
||||
* 'SingleDrvOutputs' structure containing the valid outputs.
|
||||
*/
|
||||
std::pair<bool, SingleDrvOutputs> checkPathValidity();
|
||||
|
||||
/**
|
||||
* Aborts if any output is not valid or corrupt, and otherwise
|
||||
* returns a 'SingleDrvOutputs' structure containing all outputs.
|
||||
*/
|
||||
SingleDrvOutputs assertPathValidity();
|
||||
|
||||
/**
|
||||
* Forcibly kill the child process, if any.
|
||||
*/
|
||||
void killChild();
|
||||
|
||||
void started();
|
||||
|
||||
Done done(
|
||||
BuildResult::Status status,
|
||||
SingleDrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
void appendLogTailErrorMsg(std::string & msg);
|
||||
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||
|
||||
JobCategory jobCategory() const override {
|
||||
return JobCategory::Build;
|
||||
};
|
||||
};
|
||||
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Misc type defitions for both local building and remote (RPC building)
|
||||
* @file Misc type definitions for both local building and remote (RPC building)
|
||||
*/
|
||||
|
||||
#include "nix/util/hash.hh"
|
||||
|
|
|
|||
|
|
@ -14,13 +14,6 @@ namespace nix {
|
|||
|
||||
using std::map;
|
||||
|
||||
#ifndef _WIN32 // TODO enable build hook on Windows
|
||||
struct HookInstance;
|
||||
struct DerivationBuilder;
|
||||
#endif
|
||||
|
||||
typedef enum {rpAccept, rpDecline, rpPostpone} HookReply;
|
||||
|
||||
/** Used internally */
|
||||
void runPostBuildHook(
|
||||
Store & store,
|
||||
|
|
@ -33,13 +26,8 @@ void runPostBuildHook(
|
|||
*/
|
||||
struct DerivationGoal : public Goal
|
||||
{
|
||||
/**
|
||||
* Whether to use an on-disk .drv file.
|
||||
*/
|
||||
bool useDerivation;
|
||||
|
||||
/** The path of the derivation. */
|
||||
StorePath drvPath;
|
||||
ref<const SingleDerivedPath> drvReq;
|
||||
|
||||
/**
|
||||
* The specific outputs that we need to build.
|
||||
|
|
@ -54,7 +42,7 @@ struct DerivationGoal : public Goal
|
|||
* The goal state machine is progressing based on the current value of
|
||||
* `wantedOutputs. No actions are needed.
|
||||
*/
|
||||
OutputsUnmodifedDontNeed,
|
||||
OutputsUnmodifiedDontNeed,
|
||||
/**
|
||||
* `wantedOutputs` has been extended, but the state machine is
|
||||
* proceeding according to its old value, so we need to restart.
|
||||
|
|
@ -71,116 +59,32 @@ struct DerivationGoal : public Goal
|
|||
/**
|
||||
* Whether additional wanted outputs have been added.
|
||||
*/
|
||||
NeedRestartForMoreOutputs needRestart = NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed;
|
||||
NeedRestartForMoreOutputs needRestart = NeedRestartForMoreOutputs::OutputsUnmodifiedDontNeed;
|
||||
|
||||
/**
|
||||
* See `retrySubstitution`; just for that field.
|
||||
*/
|
||||
enum RetrySubstitution {
|
||||
/**
|
||||
* No issues have yet arose, no need to restart.
|
||||
*/
|
||||
NoNeed,
|
||||
/**
|
||||
* Something failed and there is an incomplete closure. Let's retry
|
||||
* substituting.
|
||||
*/
|
||||
YesNeed,
|
||||
/**
|
||||
* We are current or have already retried substitution, and whether or
|
||||
* not something goes wrong we will not retry again.
|
||||
*/
|
||||
AlreadyRetried,
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether to retry substituting the outputs after building the
|
||||
* inputs. This is done in case of an incomplete closure.
|
||||
*/
|
||||
RetrySubstitution retrySubstitution = RetrySubstitution::NoNeed;
|
||||
|
||||
/**
|
||||
* The derivation stored at drvPath.
|
||||
* The derivation stored at `drvReq`.
|
||||
*/
|
||||
std::unique_ptr<Derivation> drv;
|
||||
|
||||
std::unique_ptr<StructuredAttrs> parsedDrv;
|
||||
std::unique_ptr<DerivationOptions> drvOptions;
|
||||
|
||||
/**
|
||||
* The remainder is state held during the build.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Locks on (fixed) output paths.
|
||||
*/
|
||||
PathLocks outputLocks;
|
||||
|
||||
/**
|
||||
* All input paths (that is, the union of FS closures of the
|
||||
* immediate input paths).
|
||||
*/
|
||||
StorePathSet inputPaths;
|
||||
|
||||
std::map<std::string, InitialOutput> initialOutputs;
|
||||
|
||||
/**
|
||||
* File descriptor for the log file.
|
||||
*/
|
||||
AutoCloseFD fdLogFile;
|
||||
std::shared_ptr<BufferedSink> logFileSink, logSink;
|
||||
|
||||
/**
|
||||
* Number of bytes received from the builder's stdout/stderr.
|
||||
*/
|
||||
unsigned long logSize;
|
||||
|
||||
/**
|
||||
* The most recent log lines.
|
||||
*/
|
||||
std::list<std::string> logTail;
|
||||
|
||||
std::string currentLogLine;
|
||||
size_t currentLogLinePos = 0; // to handle carriage return
|
||||
|
||||
std::string currentHookLine;
|
||||
|
||||
#ifndef _WIN32 // TODO enable build hook on Windows
|
||||
/**
|
||||
* The build hook.
|
||||
*/
|
||||
std::unique_ptr<HookInstance> hook;
|
||||
|
||||
std::unique_ptr<DerivationBuilder> builder;
|
||||
#endif
|
||||
|
||||
BuildMode buildMode;
|
||||
|
||||
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds;
|
||||
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds;
|
||||
|
||||
std::unique_ptr<Activity> act;
|
||||
|
||||
/**
|
||||
* Activity that denotes waiting for a lock.
|
||||
*/
|
||||
std::unique_ptr<Activity> actLock;
|
||||
|
||||
std::map<ActivityId, Activity> builderActivities;
|
||||
|
||||
/**
|
||||
* The remote machine on which we're building.
|
||||
*/
|
||||
std::string machineName;
|
||||
|
||||
DerivationGoal(const StorePath & drvPath,
|
||||
DerivationGoal(ref<const SingleDerivedPath> drvReq,
|
||||
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||
BuildMode buildMode = bmNormal);
|
||||
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||
BuildMode buildMode = bmNormal);
|
||||
~DerivationGoal();
|
||||
~DerivationGoal() = default;
|
||||
|
||||
void timedOut(Error && ex) override;
|
||||
void timedOut(Error && ex) override { unreachable(); };
|
||||
|
||||
std::string key() override;
|
||||
|
||||
|
|
@ -192,43 +96,16 @@ struct DerivationGoal : public Goal
|
|||
/**
|
||||
* The states.
|
||||
*/
|
||||
Co init() override;
|
||||
Co haveDerivation();
|
||||
Co gaveUpOnSubstitution();
|
||||
Co tryToBuild();
|
||||
Co hookDone();
|
||||
|
||||
/**
|
||||
* Is the build hook willing to perform the build?
|
||||
*/
|
||||
HookReply tryBuildHook();
|
||||
|
||||
/**
|
||||
* Open a log file and a pipe to it.
|
||||
*/
|
||||
Path openLogFile();
|
||||
|
||||
/**
|
||||
* Close the log file.
|
||||
*/
|
||||
void closeLogFile();
|
||||
|
||||
bool isReadDesc(Descriptor fd);
|
||||
|
||||
/**
|
||||
* Callback used by the worker to write to the log.
|
||||
*/
|
||||
void handleChildOutput(Descriptor fd, std::string_view data) override;
|
||||
void handleEOF(Descriptor fd) override;
|
||||
void flushLine();
|
||||
Co loadDerivation();
|
||||
Co haveDerivation(StorePath drvPath);
|
||||
|
||||
/**
|
||||
* Wrappers around the corresponding Store methods that first consult the
|
||||
* derivation. This is currently needed because when there is no drv file
|
||||
* there also is no DB entry.
|
||||
*/
|
||||
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap();
|
||||
OutputPathMap queryDerivationOutputMap();
|
||||
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & drvPath);
|
||||
OutputPathMap queryDerivationOutputMap(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* Update 'initialOutputs' to determine the current status of the
|
||||
|
|
@ -236,34 +113,24 @@ struct DerivationGoal : public Goal
|
|||
* whether all outputs are valid and non-corrupt, and a
|
||||
* 'SingleDrvOutputs' structure containing the valid outputs.
|
||||
*/
|
||||
std::pair<bool, SingleDrvOutputs> checkPathValidity();
|
||||
std::pair<bool, SingleDrvOutputs> checkPathValidity(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* Aborts if any output is not valid or corrupt, and otherwise
|
||||
* returns a 'SingleDrvOutputs' structure containing all outputs.
|
||||
*/
|
||||
SingleDrvOutputs assertPathValidity();
|
||||
SingleDrvOutputs assertPathValidity(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* Forcibly kill the child process, if any.
|
||||
*/
|
||||
void killChild();
|
||||
|
||||
Co repairClosure();
|
||||
|
||||
void started();
|
||||
Co repairClosure(StorePath drvPath);
|
||||
|
||||
Done done(
|
||||
const StorePath & drvPath,
|
||||
BuildResult::Status status,
|
||||
SingleDrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
void appendLogTailErrorMsg(std::string & msg);
|
||||
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||
|
||||
JobCategory jobCategory() const override {
|
||||
return JobCategory::Build;
|
||||
return JobCategory::Administration;
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ public:
|
|||
typedef void (DrvOutputSubstitutionGoal::*GoalState)();
|
||||
GoalState state;
|
||||
|
||||
Co init() override;
|
||||
Co init();
|
||||
Co realisationFetched(Goals waitees, std::shared_ptr<const Realisation> outputInfo, nix::ref<nix::Store> sub);
|
||||
|
||||
void timedOut(Error && ex) override { unreachable(); };
|
||||
|
|
|
|||
|
|
@ -50,6 +50,16 @@ enum struct JobCategory {
|
|||
* A substitution an arbitrary store object; it will use network resources.
|
||||
*/
|
||||
Substitution,
|
||||
/**
|
||||
* A goal that does no "real" work by itself, and just exists to depend on
|
||||
* other goals which *do* do real work. These goals therefore are not
|
||||
* limited.
|
||||
*
|
||||
* These goals cannot infinitely create themselves, so there is no risk of
|
||||
* a "fork bomb" type situation (which would be a problem even though the
|
||||
* goal do no real work) either.
|
||||
*/
|
||||
Administration,
|
||||
};
|
||||
|
||||
struct Goal : public std::enable_shared_from_this<Goal>
|
||||
|
|
@ -61,7 +71,7 @@ private:
|
|||
Goals waitees;
|
||||
|
||||
public:
|
||||
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
|
||||
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters} ExitCode;
|
||||
|
||||
/**
|
||||
* Backlink to the worker.
|
||||
|
|
@ -85,12 +95,6 @@ public:
|
|||
*/
|
||||
size_t nrNoSubstituters = 0;
|
||||
|
||||
/**
|
||||
* Number of substitution goals we are/were waiting for that
|
||||
* failed because they had unsubstitutable references.
|
||||
*/
|
||||
size_t nrIncompleteClosure = 0;
|
||||
|
||||
/**
|
||||
* Name of this goal for debugging purposes.
|
||||
*/
|
||||
|
|
@ -344,17 +348,6 @@ protected:
|
|||
*/
|
||||
std::optional<Co> top_co;
|
||||
|
||||
/**
|
||||
* The entry point for the goal
|
||||
*/
|
||||
virtual Co init() = 0;
|
||||
|
||||
/**
|
||||
* Wrapper around @ref init since virtual functions
|
||||
* can't be used in constructors.
|
||||
*/
|
||||
inline Co init_wrapper();
|
||||
|
||||
/**
|
||||
* Signals that the goal is done.
|
||||
* `co_return` the result. If you're not inside a coroutine, you can ignore
|
||||
|
|
@ -377,13 +370,24 @@ public:
|
|||
*/
|
||||
BuildResult getBuildResult(const DerivedPath &) const;
|
||||
|
||||
/**
|
||||
* Hack to say that this goal should not log `ex`, but instead keep
|
||||
* it around. Set by a waitee which sees itself as the designated
|
||||
* continuation of this goal, responsible for reporting its
|
||||
* successes or failures.
|
||||
*
|
||||
* @todo this is yet another not-nice hack in the goal system that
|
||||
* we ought to get rid of. See #11927
|
||||
*/
|
||||
bool preserveException = false;
|
||||
|
||||
/**
|
||||
* Exception containing an error message, if any.
|
||||
*/
|
||||
std::optional<Error> ex;
|
||||
|
||||
Goal(Worker & worker)
|
||||
: worker(worker), top_co(init_wrapper())
|
||||
Goal(Worker & worker, Co init)
|
||||
: worker(worker), top_co(std::move(init))
|
||||
{
|
||||
// top_co shouldn't have a goal already, should be nullptr.
|
||||
assert(!top_co->handle.promise().goal);
|
||||
|
|
@ -446,7 +450,3 @@ template<typename... ArgTypes>
|
|||
struct std::coroutine_traits<nix::Goal::Co, ArgTypes...> {
|
||||
using promise_type = nix::Goal::promise_type;
|
||||
};
|
||||
|
||||
nix::Goal::Co nix::Goal::init_wrapper() {
|
||||
co_return init();
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue