mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 19:46:02 +01:00
Cleanup: Use C++23 "explicit this" for recursive lambdas
Try to pass by reference where possible. Co-authored-by: Sergei Zimmerman <sergei@zimmerman.foo>
This commit is contained in:
parent
3b2186e1c8
commit
1507843f6c
15 changed files with 60 additions and 112 deletions
|
|
@ -2160,30 +2160,28 @@ void EvalState::forceValueDeep(Value & v)
|
||||||
{
|
{
|
||||||
std::set<const Value *> seen;
|
std::set<const Value *> seen;
|
||||||
|
|
||||||
std::function<void(Value & v)> recurse;
|
[&, &state(*this)](this const auto & recurse, Value & v) {
|
||||||
|
|
||||||
recurse = [&](Value & v) {
|
|
||||||
if (!seen.insert(&v).second)
|
if (!seen.insert(&v).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
forceValue(v, v.determinePos(noPos));
|
state.forceValue(v, v.determinePos(noPos));
|
||||||
|
|
||||||
if (v.type() == nAttrs) {
|
if (v.type() == nAttrs) {
|
||||||
for (auto & i : *v.attrs())
|
for (auto & i : *v.attrs())
|
||||||
try {
|
try {
|
||||||
// If the value is a thunk, we're evaling. Otherwise no trace necessary.
|
// If the value is a thunk, we're evaling. Otherwise no trace necessary.
|
||||||
auto dts = debugRepl && i.value->isThunk() ? makeDebugTraceStacker(
|
auto dts = state.debugRepl && i.value->isThunk() ? makeDebugTraceStacker(
|
||||||
*this,
|
state,
|
||||||
*i.value->thunk().expr,
|
*i.value->thunk().expr,
|
||||||
*i.value->thunk().env,
|
*i.value->thunk().env,
|
||||||
i.pos,
|
i.pos,
|
||||||
"while evaluating the attribute '%1%'",
|
"while evaluating the attribute '%1%'",
|
||||||
symbols[i.name])
|
state.symbols[i.name])
|
||||||
: nullptr;
|
: nullptr;
|
||||||
|
|
||||||
recurse(*i.value);
|
recurse(*i.value);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", symbols[i.name]);
|
state.addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", state.symbols[i.name]);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -2192,9 +2190,7 @@ void EvalState::forceValueDeep(Value & v)
|
||||||
for (auto v2 : v.listView())
|
for (auto v2 : v.listView())
|
||||||
recurse(*v2);
|
recurse(*v2);
|
||||||
}
|
}
|
||||||
};
|
}(v);
|
||||||
|
|
||||||
recurse(v);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCtx)
|
NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
|
|
|
||||||
|
|
@ -92,7 +92,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
|
|
||||||
std::istringstream tomlStream(std::string{toml});
|
std::istringstream tomlStream(std::string{toml});
|
||||||
|
|
||||||
auto visit = [&](auto & self, Value & v, toml::value t) -> void {
|
auto visit = [&](this auto & self, Value & v, toml::value t) -> void {
|
||||||
switch (t.type()) {
|
switch (t.type()) {
|
||||||
case toml::value_t::table: {
|
case toml::value_t::table: {
|
||||||
auto table = toml::get<toml::table>(t);
|
auto table = toml::get<toml::table>(t);
|
||||||
|
|
@ -100,7 +100,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
|
|
||||||
for (auto & elem : table) {
|
for (auto & elem : table) {
|
||||||
forceNoNullByte(elem.first);
|
forceNoNullByte(elem.first);
|
||||||
self(self, attrs.alloc(elem.first), elem.second);
|
self(attrs.alloc(elem.first), elem.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
v.mkAttrs(attrs);
|
v.mkAttrs(attrs);
|
||||||
|
|
@ -110,7 +110,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
|
|
||||||
auto list = state.buildList(array.size());
|
auto list = state.buildList(array.size());
|
||||||
for (const auto & [n, v] : enumerate(list))
|
for (const auto & [n, v] : enumerate(list))
|
||||||
self(self, *(v = state.allocValue()), array[n]);
|
self(*(v = state.allocValue()), array[n]);
|
||||||
v.mkList(list);
|
v.mkList(list);
|
||||||
} break;
|
} break;
|
||||||
case toml::value_t::boolean:
|
case toml::value_t::boolean:
|
||||||
|
|
@ -155,7 +155,6 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va
|
||||||
|
|
||||||
try {
|
try {
|
||||||
visit(
|
visit(
|
||||||
visit,
|
|
||||||
val,
|
val,
|
||||||
toml::parse(
|
toml::parse(
|
||||||
tomlStream,
|
tomlStream,
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,7 @@ NixStringContextElem NixStringContextElem::parse(std::string_view s0, const Expe
|
||||||
{
|
{
|
||||||
std::string_view s = s0;
|
std::string_view s = s0;
|
||||||
|
|
||||||
std::function<SingleDerivedPath()> parseRest;
|
auto parseRest = [&](this auto & parseRest) -> SingleDerivedPath {
|
||||||
parseRest = [&]() -> SingleDerivedPath {
|
|
||||||
// Case on whether there is a '!'
|
// Case on whether there is a '!'
|
||||||
size_t index = s.find("!");
|
size_t index = s.find("!");
|
||||||
if (index == std::string_view::npos) {
|
if (index == std::string_view::npos) {
|
||||||
|
|
|
||||||
|
|
@ -502,8 +502,8 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef,
|
||||||
|
|
||||||
/* Get the overrides (i.e. attributes of the form
|
/* Get the overrides (i.e. attributes of the form
|
||||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||||
std::function<void(const FlakeInput & input, const InputAttrPath & prefix)> addOverrides;
|
auto addOverrides =
|
||||||
addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix) {
|
[&](this const auto & addOverrides, const FlakeInput & input, const InputAttrPath & prefix) -> void {
|
||||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||||
auto inputAttrPath(prefix);
|
auto inputAttrPath(prefix);
|
||||||
inputAttrPath.push_back(idOverride);
|
inputAttrPath.push_back(idOverride);
|
||||||
|
|
|
||||||
|
|
@ -147,11 +147,10 @@ LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view co
|
||||||
if (version < 5 || version > 7)
|
if (version < 5 || version > 7)
|
||||||
throw Error("lock file '%s' has unsupported version %d", path, version);
|
throw Error("lock file '%s' has unsupported version %d", path, version);
|
||||||
|
|
||||||
std::map<std::string, ref<Node>> nodeMap;
|
std::string rootKey = json["root"];
|
||||||
|
std::map<std::string, ref<Node>> nodeMap{{rootKey, root}};
|
||||||
|
|
||||||
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
[&](this const auto & getInputs, Node & node, const nlohmann::json & jsonNode) {
|
||||||
|
|
||||||
getInputs = [&](Node & node, const nlohmann::json & jsonNode) {
|
|
||||||
if (jsonNode.find("inputs") == jsonNode.end())
|
if (jsonNode.find("inputs") == jsonNode.end())
|
||||||
return;
|
return;
|
||||||
for (auto & i : jsonNode["inputs"].items()) {
|
for (auto & i : jsonNode["inputs"].items()) {
|
||||||
|
|
@ -179,11 +178,7 @@ LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view co
|
||||||
throw Error("lock file contains cycle to root node");
|
throw Error("lock file contains cycle to root node");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}(*root, json["nodes"][rootKey]);
|
||||||
|
|
||||||
std::string rootKey = json["root"];
|
|
||||||
nodeMap.insert_or_assign(rootKey, root);
|
|
||||||
getInputs(*root, json["nodes"][rootKey]);
|
|
||||||
|
|
||||||
// FIXME: check that there are no cycles in version >= 7. Cycles
|
// FIXME: check that there are no cycles in version >= 7. Cycles
|
||||||
// between inputs are only possible using 'follows' indirections.
|
// between inputs are only possible using 'follows' indirections.
|
||||||
|
|
@ -197,9 +192,7 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
||||||
KeyMap nodeKeys;
|
KeyMap nodeKeys;
|
||||||
boost::unordered_flat_set<std::string> keys;
|
boost::unordered_flat_set<std::string> keys;
|
||||||
|
|
||||||
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
|
auto dumpNode = [&](this auto & dumpNode, std::string key, ref<const Node> node) -> std::string {
|
||||||
|
|
||||||
dumpNode = [&](std::string key, ref<const Node> node) -> std::string {
|
|
||||||
auto k = nodeKeys.find(node);
|
auto k = nodeKeys.find(node);
|
||||||
if (k != nodeKeys.end())
|
if (k != nodeKeys.end())
|
||||||
return k->second;
|
return k->second;
|
||||||
|
|
@ -276,17 +269,13 @@ std::optional<FlakeRef> LockFile::isUnlocked(const fetchers::Settings & fetchSet
|
||||||
{
|
{
|
||||||
std::set<ref<const Node>> nodes;
|
std::set<ref<const Node>> nodes;
|
||||||
|
|
||||||
std::function<void(ref<const Node> node)> visit;
|
[&](this const auto & visit, ref<const Node> node) {
|
||||||
|
|
||||||
visit = [&](ref<const Node> node) {
|
|
||||||
if (!nodes.insert(node).second)
|
if (!nodes.insert(node).second)
|
||||||
return;
|
return;
|
||||||
for (auto & i : node->inputs)
|
for (auto & i : node->inputs)
|
||||||
if (auto child = std::get_if<0>(&i.second))
|
if (auto child = std::get_if<0>(&i.second))
|
||||||
visit(*child);
|
visit(*child);
|
||||||
};
|
}(root);
|
||||||
|
|
||||||
visit(root);
|
|
||||||
|
|
||||||
/* Return whether the input is either locked, or, if
|
/* Return whether the input is either locked, or, if
|
||||||
`allow-dirty-locks` is enabled, it has a NAR hash. In the
|
`allow-dirty-locks` is enabled, it has a NAR hash. In the
|
||||||
|
|
@ -332,9 +321,7 @@ std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
|
||||||
std::set<ref<Node>> done;
|
std::set<ref<Node>> done;
|
||||||
std::map<InputAttrPath, Node::Edge> res;
|
std::map<InputAttrPath, Node::Edge> res;
|
||||||
|
|
||||||
std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
|
[&](this const auto & recurse, const InputAttrPath & prefix, ref<Node> node) {
|
||||||
|
|
||||||
recurse = [&](const InputAttrPath & prefix, ref<Node> node) {
|
|
||||||
if (!done.insert(node).second)
|
if (!done.insert(node).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -345,9 +332,7 @@ std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
|
||||||
if (auto child = std::get_if<0>(&input))
|
if (auto child = std::get_if<0>(&input))
|
||||||
recurse(inputAttrPath, *child);
|
recurse(inputAttrPath, *child);
|
||||||
}
|
}
|
||||||
};
|
}({}, root);
|
||||||
|
|
||||||
recurse({}, root);
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1391,8 +1391,7 @@ void adl_serializer<Derivation>::to_json(json & res, const Derivation & d)
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
std::function<nlohmann::json(const DerivedPathMap<StringSet>::ChildNode &)> doInput;
|
auto doInput = [&](this const auto & doInput, const auto & inputNode) -> nlohmann::json {
|
||||||
doInput = [&](const auto & inputNode) {
|
|
||||||
auto value = nlohmann::json::object();
|
auto value = nlohmann::json::object();
|
||||||
value["outputs"] = inputNode.value;
|
value["outputs"] = inputNode.value;
|
||||||
{
|
{
|
||||||
|
|
@ -1454,8 +1453,7 @@ Derivation adl_serializer<Derivation>::from_json(const json & _json, const Exper
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
std::function<DerivedPathMap<StringSet>::ChildNode(const nlohmann::json &)> doInput;
|
auto doInput = [&](this const auto & doInput, const auto & _json) -> DerivedPathMap<StringSet>::ChildNode {
|
||||||
doInput = [&](const auto & _json) {
|
|
||||||
auto & json = getObject(_json);
|
auto & json = getObject(_json);
|
||||||
DerivedPathMap<StringSet>::ChildNode node;
|
DerivedPathMap<StringSet>::ChildNode node;
|
||||||
node.value = getStringSet(valueAt(json, "outputs"));
|
node.value = getStringSet(valueAt(json, "outputs"));
|
||||||
|
|
|
||||||
|
|
@ -126,13 +126,13 @@ MissingPaths Store::queryMissing(const std::vector<DerivedPath> & targets)
|
||||||
|
|
||||||
std::function<void(DerivedPath)> doPath;
|
std::function<void(DerivedPath)> doPath;
|
||||||
|
|
||||||
std::function<void(ref<SingleDerivedPath>, const DerivedPathMap<StringSet>::ChildNode &)> enqueueDerivedPaths;
|
auto enqueueDerivedPaths = [&](this auto self,
|
||||||
|
ref<SingleDerivedPath> inputDrv,
|
||||||
enqueueDerivedPaths = [&](ref<SingleDerivedPath> inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
const DerivedPathMap<StringSet>::ChildNode & inputNode) -> void {
|
||||||
if (!inputNode.value.empty())
|
if (!inputNode.value.empty())
|
||||||
pool.enqueue(std::bind(doPath, DerivedPath::Built{inputDrv, inputNode.value}));
|
pool.enqueue(std::bind(doPath, DerivedPath::Built{inputDrv, inputNode.value}));
|
||||||
for (const auto & [outputName, childNode] : inputNode.childMap)
|
for (const auto & [outputName, childNode] : inputNode.childMap)
|
||||||
enqueueDerivedPaths(make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
self(make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) {
|
auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) {
|
||||||
|
|
@ -350,9 +350,9 @@ drvOutputReferences(Store & store, const Derivation & drv, const StorePath & out
|
||||||
|
|
||||||
std::set<Realisation> inputRealisations;
|
std::set<Realisation> inputRealisations;
|
||||||
|
|
||||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumRealisations;
|
auto accumRealisations = [&](this auto & self,
|
||||||
|
const StorePath & inputDrv,
|
||||||
accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
const DerivedPathMap<StringSet>::ChildNode & inputNode) -> void {
|
||||||
if (!inputNode.value.empty()) {
|
if (!inputNode.value.empty()) {
|
||||||
auto outputHashes = staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv));
|
auto outputHashes = staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv));
|
||||||
for (const auto & outputName : inputNode.value) {
|
for (const auto & outputName : inputNode.value) {
|
||||||
|
|
@ -372,7 +372,7 @@ drvOutputReferences(Store & store, const Derivation & drv, const StorePath & out
|
||||||
auto d = makeConstantStorePathRef(inputDrv);
|
auto d = makeConstantStorePathRef(inputDrv);
|
||||||
for (const auto & [outputName, childNode] : inputNode.childMap) {
|
for (const auto & [outputName, childNode] : inputNode.childMap) {
|
||||||
SingleDerivedPath next = SingleDerivedPath::Built{d, outputName};
|
SingleDerivedPath next = SingleDerivedPath::Built{d, outputName};
|
||||||
accumRealisations(
|
self(
|
||||||
// TODO deep resolutions for dynamic derivations, issue #8947, would go here.
|
// TODO deep resolutions for dynamic derivations, issue #8947, would go here.
|
||||||
resolveDerivedPath(store, next, evalStore_),
|
resolveDerivedPath(store, next, evalStore_),
|
||||||
childNode);
|
childNode);
|
||||||
|
|
|
||||||
|
|
@ -144,11 +144,7 @@ struct NarAccessor : public SourceAccessor
|
||||||
NarAccessor(const nlohmann::json & listing, GetNarBytes getNarBytes)
|
NarAccessor(const nlohmann::json & listing, GetNarBytes getNarBytes)
|
||||||
: getNarBytes(getNarBytes)
|
: getNarBytes(getNarBytes)
|
||||||
{
|
{
|
||||||
using json = nlohmann::json;
|
[&](this const auto & recurse, NarMember & member, const nlohmann::json & v) -> void {
|
||||||
|
|
||||||
std::function<void(NarMember &, const json &)> recurse;
|
|
||||||
|
|
||||||
recurse = [&](NarMember & member, const json & v) {
|
|
||||||
std::string type = v["type"];
|
std::string type = v["type"];
|
||||||
|
|
||||||
if (type == "directory") {
|
if (type == "directory") {
|
||||||
|
|
@ -167,9 +163,7 @@ struct NarAccessor : public SourceAccessor
|
||||||
member.target = v.value("target", "");
|
member.target = v.value("target", "");
|
||||||
} else
|
} else
|
||||||
return;
|
return;
|
||||||
};
|
}(root, listing);
|
||||||
|
|
||||||
recurse(root, listing);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
NarMember * find(const CanonPath & path)
|
NarMember * find(const CanonPath & path)
|
||||||
|
|
|
||||||
|
|
@ -47,12 +47,12 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter &
|
||||||
writePadding(*size, sink);
|
writePadding(*size, sink);
|
||||||
};
|
};
|
||||||
|
|
||||||
std::function<void(const CanonPath & path)> dump;
|
sink << narVersionMagic1;
|
||||||
|
|
||||||
dump = [&](const CanonPath & path) {
|
[&, &this_(*this)](this const auto & dump, const CanonPath & path) -> void {
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
auto st = lstat(path);
|
auto st = this_.lstat(path);
|
||||||
|
|
||||||
sink << "(";
|
sink << "(";
|
||||||
|
|
||||||
|
|
@ -69,7 +69,7 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter &
|
||||||
/* If we're on a case-insensitive system like macOS, undo
|
/* If we're on a case-insensitive system like macOS, undo
|
||||||
the case hack applied by restorePath(). */
|
the case hack applied by restorePath(). */
|
||||||
StringMap unhacked;
|
StringMap unhacked;
|
||||||
for (auto & i : readDirectory(path))
|
for (auto & i : this_.readDirectory(path))
|
||||||
if (archiveSettings.useCaseHack) {
|
if (archiveSettings.useCaseHack) {
|
||||||
std::string name(i.first);
|
std::string name(i.first);
|
||||||
size_t pos = i.first.find(caseHackSuffix);
|
size_t pos = i.first.find(caseHackSuffix);
|
||||||
|
|
@ -92,16 +92,13 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter &
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (st.type == tSymlink)
|
else if (st.type == tSymlink)
|
||||||
sink << "type" << "symlink" << "target" << readLink(path);
|
sink << "type" << "symlink" << "target" << this_.readLink(path);
|
||||||
|
|
||||||
else
|
else
|
||||||
throw Error("file '%s' has an unsupported type", path);
|
throw Error("file '%s' has an unsupported type", path);
|
||||||
|
|
||||||
sink << ")";
|
sink << ")";
|
||||||
};
|
}(path);
|
||||||
|
|
||||||
sink << narVersionMagic1;
|
|
||||||
dump(path);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
||||||
|
|
|
||||||
|
|
@ -24,11 +24,9 @@ void computeClosure(const set<T> startElts, set<T> & res, GetEdgesAsync<T> getEd
|
||||||
|
|
||||||
Sync<State> state_(State{0, res, 0});
|
Sync<State> state_(State{0, res, 0});
|
||||||
|
|
||||||
std::function<void(const T &)> enqueue;
|
|
||||||
|
|
||||||
std::condition_variable done;
|
std::condition_variable done;
|
||||||
|
|
||||||
enqueue = [&](const T & current) -> void {
|
auto enqueue = [&](this auto & enqueue, const T & current) -> void {
|
||||||
{
|
{
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
if (state->exc)
|
if (state->exc)
|
||||||
|
|
|
||||||
|
|
@ -14,9 +14,7 @@ std::vector<T> topoSort(
|
||||||
std::vector<T> sorted;
|
std::vector<T> sorted;
|
||||||
decltype(items) visited, parents;
|
decltype(items) visited, parents;
|
||||||
|
|
||||||
std::function<void(const T & path, const T * parent)> dfsVisit;
|
auto dfsVisit = [&](this auto & dfsVisit, const T & path, const T * parent) {
|
||||||
|
|
||||||
dfsVisit = [&](const T & path, const T * parent) {
|
|
||||||
if (parents.count(path))
|
if (parents.count(path))
|
||||||
throw makeCycleError(path, *parent);
|
throw makeCycleError(path, *parent);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -85,9 +85,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
|
||||||
if (pathExists(*writeTo))
|
if (pathExists(*writeTo))
|
||||||
throw Error("path '%s' already exists", writeTo->string());
|
throw Error("path '%s' already exists", writeTo->string());
|
||||||
|
|
||||||
std::function<void(Value & v, const PosIdx pos, const std::filesystem::path & path)> recurse;
|
[&](this const auto & recurse, Value & v, const PosIdx pos, const std::filesystem::path & path) -> void {
|
||||||
|
|
||||||
recurse = [&](Value & v, const PosIdx pos, const std::filesystem::path & path) {
|
|
||||||
state->forceValue(v, pos);
|
state->forceValue(v, pos);
|
||||||
if (v.type() == nString)
|
if (v.type() == nString)
|
||||||
// FIXME: disallow strings with contexts?
|
// FIXME: disallow strings with contexts?
|
||||||
|
|
@ -111,9 +109,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
|
||||||
} else
|
} else
|
||||||
state->error<TypeError>("value at '%s' is not a string or an attribute set", state->positions[pos])
|
state->error<TypeError>("value at '%s' is not a string or an attribute set", state->positions[pos])
|
||||||
.debugThrow();
|
.debugThrow();
|
||||||
};
|
}(*v, pos, *writeTo);
|
||||||
|
|
||||||
recurse(*v, pos, *writeTo);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (raw) {
|
else if (raw) {
|
||||||
|
|
|
||||||
|
|
@ -38,8 +38,7 @@ struct CmdFlakePrefetchInputs : FlakeCommand
|
||||||
|
|
||||||
std::atomic<size_t> nrFailed{0};
|
std::atomic<size_t> nrFailed{0};
|
||||||
|
|
||||||
std::function<void(const Node & node)> visit;
|
auto visit = [&](this const auto & visit, const Node & node) {
|
||||||
visit = [&](const Node & node) {
|
|
||||||
if (!state_.lock()->done.insert(&node).second)
|
if (!state_.lock()->done.insert(&node).second)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -267,11 +267,9 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
||||||
if (!lockedFlake.lockFile.root->inputs.empty())
|
if (!lockedFlake.lockFile.root->inputs.empty())
|
||||||
logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
|
logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
|
||||||
|
|
||||||
std::set<ref<Node>> visited;
|
std::set<ref<Node>> visited{lockedFlake.lockFile.root};
|
||||||
|
|
||||||
std::function<void(const Node & node, const std::string & prefix)> recurse;
|
[&](this const auto & recurse, const Node & node, const std::string & prefix) -> void {
|
||||||
|
|
||||||
recurse = [&](const Node & node, const std::string & prefix) {
|
|
||||||
for (const auto & [i, input] : enumerate(node.inputs)) {
|
for (const auto & [i, input] : enumerate(node.inputs)) {
|
||||||
bool last = i + 1 == node.inputs.size();
|
bool last = i + 1 == node.inputs.size();
|
||||||
|
|
||||||
|
|
@ -298,10 +296,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
||||||
printInputAttrPath(*follows));
|
printInputAttrPath(*follows));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}(*lockedFlake.lockFile.root, "");
|
||||||
|
|
||||||
visited.insert(lockedFlake.lockFile.root);
|
|
||||||
recurse(*lockedFlake.lockFile.root, "");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -884,8 +879,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
std::vector<std::filesystem::path> changedFiles;
|
std::vector<std::filesystem::path> changedFiles;
|
||||||
std::vector<std::filesystem::path> conflictedFiles;
|
std::vector<std::filesystem::path> conflictedFiles;
|
||||||
|
|
||||||
std::function<void(const SourcePath & from, const std::filesystem::path & to)> copyDir;
|
[&](this const auto & copyDir, const SourcePath & from, const std::filesystem::path & to) -> void {
|
||||||
copyDir = [&](const SourcePath & from, const std::filesystem::path & to) {
|
|
||||||
createDirs(to);
|
createDirs(to);
|
||||||
|
|
||||||
for (auto & [name, entry] : from.readDirectory()) {
|
for (auto & [name, entry] : from.readDirectory()) {
|
||||||
|
|
@ -935,9 +929,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
changedFiles.push_back(to2);
|
changedFiles.push_back(to2);
|
||||||
notice("wrote: %s", to2);
|
notice("wrote: %s", to2);
|
||||||
}
|
}
|
||||||
};
|
}(templateDir, flakeDir);
|
||||||
|
|
||||||
copyDir(templateDir, flakeDir);
|
|
||||||
|
|
||||||
if (!changedFiles.empty() && std::filesystem::exists(std::filesystem::path{flakeDir} / ".git")) {
|
if (!changedFiles.empty() && std::filesystem::exists(std::filesystem::path{flakeDir} / ".git")) {
|
||||||
Strings args = {"-C", flakeDir, "add", "--intent-to-add", "--force", "--"};
|
Strings args = {"-C", flakeDir, "add", "--intent-to-add", "--force", "--"};
|
||||||
|
|
|
||||||
|
|
@ -410,8 +410,7 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
Value vRoot;
|
Value vRoot;
|
||||||
state->eval(e, vRoot);
|
state->eval(e, vRoot);
|
||||||
|
|
||||||
std::function<bool(const Value & v)> takesNixShellAttr;
|
auto takesNixShellAttr = [&](const Value & v) {
|
||||||
takesNixShellAttr = [&](const Value & v) {
|
|
||||||
if (!isNixShell) {
|
if (!isNixShell) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -490,10 +489,9 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::function<void(ref<SingleDerivedPath>, const DerivedPathMap<StringSet>::ChildNode &)> accumDerivedPath;
|
auto accumDerivedPath = [&](this auto & self,
|
||||||
|
ref<SingleDerivedPath> inputDrv,
|
||||||
accumDerivedPath = [&](ref<SingleDerivedPath> inputDrv,
|
const DerivedPathMap<StringSet>::ChildNode & inputNode) -> void {
|
||||||
const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
|
||||||
if (!inputNode.value.empty())
|
if (!inputNode.value.empty())
|
||||||
pathsToBuild.push_back(
|
pathsToBuild.push_back(
|
||||||
DerivedPath::Built{
|
DerivedPath::Built{
|
||||||
|
|
@ -501,8 +499,7 @@ static void main_nix_build(int argc, char ** argv)
|
||||||
.outputs = OutputsSpec::Names{inputNode.value},
|
.outputs = OutputsSpec::Names{inputNode.value},
|
||||||
});
|
});
|
||||||
for (const auto & [outputName, childNode] : inputNode.childMap)
|
for (const auto & [outputName, childNode] : inputNode.childMap)
|
||||||
accumDerivedPath(
|
self(make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
||||||
make_ref<SingleDerivedPath>(SingleDerivedPath::Built{inputDrv, outputName}), childNode);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build or fetch all dependencies of the derivation.
|
// Build or fetch all dependencies of the derivation.
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue