1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-28 13:11:00 +01:00

Merge branch '2.19-maintenance' into ifd-buildStore-2.19

This commit is contained in:
Shea Levy 2024-01-11 07:21:51 -05:00
commit 2e4239f9e3
No known key found for this signature in database
GPG key ID: 5C0BD6957D86FE27
1101 changed files with 16483 additions and 8593 deletions

View file

@ -1,3 +1,6 @@
#pragma once
///@file
#include "derived-path.hh"
#include "realisation.hh"

View file

@ -98,7 +98,7 @@ EvalCommand::EvalCommand()
EvalCommand::~EvalCommand()
{
if (evalState)
evalState->printStats();
evalState->maybePrintStats();
}
ref<Store> EvalCommand::getEvalStore()
@ -175,7 +175,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
paths.push_back(BuiltPath::Opaque{p});
paths.emplace_back(BuiltPath::Opaque{p});
} else {
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
if (recursive) {
@ -188,7 +188,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
paths.push_back(BuiltPath::Opaque{path});
paths.emplace_back(BuiltPath::Opaque{path});
}
}

View file

@ -34,21 +34,28 @@ struct NixMultiCommand : virtual MultiCommand, virtual Command
// For the overloaded run methods
#pragma GCC diagnostic ignored "-Woverloaded-virtual"
/* A command that requires a Nix store. */
/**
* A command that requires a \ref Store "Nix store".
*/
struct StoreCommand : virtual Command
{
StoreCommand();
void run() override;
ref<Store> getStore();
virtual ref<Store> createStore();
/**
* Main entry point, with a `Store` provided
*/
virtual void run(ref<Store>) = 0;
private:
std::shared_ptr<Store> _store;
};
/* A command that copies something between `--from` and `--to`
stores. */
/**
* A command that copies something between `--from` and `--to` \ref
* Store stores.
*/
struct CopyCommand : virtual StoreCommand
{
std::string srcUri, dstUri;
@ -60,6 +67,9 @@ struct CopyCommand : virtual StoreCommand
ref<Store> getDstStore();
};
/**
* A command that needs to evaluate Nix language expressions.
*/
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
bool startReplOnEvalErrors = false;
@ -79,20 +89,26 @@ private:
std::shared_ptr<EvalState> evalState;
};
/**
* A mixin class for commands that process flakes, adding a few standard
* flake-related options/flags.
*/
struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
std::optional<std::string> needsFlakeInputCompletion = {};
MixFlakeOptions();
virtual std::vector<std::string> getFlakesForCompletion()
/**
* The completion for some of these flags depends on the flake(s) in
* question.
*
* This method should be implemented to gather all flakerefs the
* command is operating with (presumably specified via some other
* arguments) so that the completions for these flags can use them.
*/
virtual std::vector<FlakeRef> getFlakeRefsForCompletion()
{ return {}; }
void completeFlakeInput(std::string_view prefix);
void completionHook() override;
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
@ -112,15 +128,35 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
virtual Strings getDefaultFlakeAttrPathPrefixes();
void completeInstallable(std::string_view prefix);
/**
* Complete an installable from the given prefix.
*/
void completeInstallable(AddCompletions & completions, std::string_view prefix);
/**
* Convenience wrapper around the underlying function to make setting the
* callback easier.
*/
CompleterClosure getCompleteInstallable();
};
/**
* A mixin class for commands that need a read-only flag.
*
* What exactly is "read-only" is unspecified, but it will usually be
* the \ref Store "Nix store".
*/
struct MixReadOnlyOption : virtual Args
{
MixReadOnlyOption();
};
/* Like InstallablesCommand but the installables are not loaded */
/**
* Like InstallablesCommand but the installables are not loaded.
*
* This is needed by `CmdRepl` which wants to load (and reload) the
* installables itself.
*/
struct RawInstallablesCommand : virtual Args, SourceExprCommand
{
RawInstallablesCommand();
@ -129,19 +165,22 @@ struct RawInstallablesCommand : virtual Args, SourceExprCommand
void run(ref<Store> store) override;
// FIXME make const after CmdRepl's override is fixed up
// FIXME make const after `CmdRepl`'s override is fixed up
virtual void applyDefaultInstallables(std::vector<std::string> & rawInstallables);
bool readFromStdIn = false;
std::vector<std::string> getFlakesForCompletion() override;
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
private:
std::vector<std::string> rawInstallables;
};
/* A command that operates on a list of "installables", which can be
store paths, attribute paths, Nix expressions, etc. */
/**
* A command that operates on a list of "installables", which can be
* store paths, attribute paths, Nix expressions, etc.
*/
struct InstallablesCommand : RawInstallablesCommand
{
virtual void run(ref<Store> store, Installables && installables) = 0;
@ -149,7 +188,9 @@ struct InstallablesCommand : RawInstallablesCommand
void run(ref<Store> store, std::vector<std::string> && rawInstallables) override;
};
/* A command that operates on exactly one "installable" */
/**
* A command that operates on exactly one "installable".
*/
struct InstallableCommand : virtual Args, SourceExprCommand
{
InstallableCommand();
@ -158,10 +199,7 @@ struct InstallableCommand : virtual Args, SourceExprCommand
void run(ref<Store> store) override;
std::vector<std::string> getFlakesForCompletion() override
{
return {_installable};
}
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
private:
@ -175,7 +213,12 @@ struct MixOperateOnOptions : virtual Args
MixOperateOnOptions();
};
/* A command that operates on zero or more store paths. */
/**
* A command that operates on zero or more extant store paths.
*
* If the argument the user passes is a some sort of recipe for a path
* not yet built, it must be built first.
*/
struct BuiltPathsCommand : InstallablesCommand, virtual MixOperateOnOptions
{
private:
@ -207,7 +250,9 @@ struct StorePathsCommand : public BuiltPathsCommand
void run(ref<Store> store, BuiltPaths && paths) override;
};
/* A command that operates on exactly one store path. */
/**
* A command that operates on exactly one store path.
*/
struct StorePathCommand : public StorePathsCommand
{
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
@ -215,7 +260,9 @@ struct StorePathCommand : public StorePathsCommand
void run(ref<Store> store, StorePaths && storePaths) override;
};
/* A helper class for registering commands globally. */
/**
* A helper class for registering \ref Command commands globally.
*/
struct RegisterCommand
{
typedef std::map<std::vector<std::string>, std::function<ref<Command>()>> Commands;
@ -271,13 +318,24 @@ struct MixEnvironment : virtual Args {
MixEnvironment();
/* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */
/***
* Modify global environ based on `ignoreEnvironment`, `keep`, and
* `unset`. It's expected that exec will be called before this class
* goes out of scope, otherwise `environ` will become invalid.
*/
void setEnviron();
};
void completeFlakeRef(ref<Store> store, std::string_view prefix);
void completeFlakeInputPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,
std::string_view prefix);
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix);
void completeFlakeRefWithFragment(
AddCompletions & completions,
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,

View file

@ -2,13 +2,13 @@
#include "common-eval-args.hh"
#include "shared.hh"
#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
#include "fetchers.hh"
#include "registry.hh"
#include "flake/flakeref.hh"
#include "store-api.hh"
#include "command.hh"
#include "tarball.hh"
namespace nix {
@ -132,8 +132,8 @@ MixEvalArgs::MixEvalArgs()
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
}},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(openStore(), prefix);
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeRef(completions, openStore(), prefix);
}}
});
@ -164,18 +164,18 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
return res.finish();
}
SourcePath lookupFileArg(EvalState & state, std::string_view s)
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
{
if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball(
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).tree.storePath;
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
}
else if (hasPrefix(s, "flake:")) {
experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first;
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
}
@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
}
else
return state.rootPath(CanonPath::fromCwd(s));
return state.rootPath(CanonPath(s, baseDir));
}
}

View file

@ -2,6 +2,7 @@
///@file
#include "args.hh"
#include "canon-path.hh"
#include "common-args.hh"
#include "search-path.hh"
@ -28,6 +29,6 @@ private:
std::map<std::string, std::string> autoArgs;
};
SourcePath lookupFileArg(EvalState & state, std::string_view s);
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
}

View file

@ -1,5 +1,5 @@
#include "util.hh"
#include "editor-for.hh"
#include "environment-variables.hh"
namespace nix {

View file

@ -4,7 +4,6 @@
#include "globals.hh"
#include "installable-value.hh"
#include "outputs-spec.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
#include "common-eval-args.hh"

View file

@ -28,6 +28,11 @@ namespace nix {
std::vector<std::string> InstallableFlake::getActualAttrPaths()
{
std::vector<std::string> res;
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){
attrPaths.front().erase(0,1);
res.push_back(attrPaths.front());
return res;
}
for (auto & prefix : prefixes)
res.push_back(prefix + *attrPaths.begin());

View file

@ -4,6 +4,7 @@
#include "installable-attr-path.hh"
#include "installable-flake.hh"
#include "outputs-spec.hh"
#include "users.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
@ -28,15 +29,38 @@
namespace nix {
void completeFlakeInputPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,
std::string_view prefix)
{
for (auto & flakeRef : flakeRefs) {
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions.add(input.first);
}
}
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
addFlag({
.longName = "recreate-lock-file",
.description = "Recreate the flake's lock file from scratch.",
.description = R"(
Recreate the flake's lock file from scratch.
> **DEPRECATED**
>
> Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead.
)",
.category = category,
.handler = {&lockFlags.recreateLockFile, true}
.handler = {[&]() {
lockFlags.recreateLockFile = true;
warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead.");
}}
});
addFlag({
@ -55,8 +79,13 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "no-registries",
.description =
"Don't allow lookups in the flake registries. This option is deprecated; use `--no-use-registries`.",
.description = R"(
Don't allow lookups in the flake registries.
> **DEPRECATED**
>
> Use [`--no-use-registries`](#opt-no-use-registries) instead.
)",
.category = category,
.handler = {[&]() {
lockFlags.useRegistries = false;
@ -73,14 +102,21 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "update-input",
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
.description = R"(
Update a specific flake input (ignoring its previous entry in the lock file).
> **DEPRECATED**
>
> Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead.
)",
.category = category,
.labels = {"input-path"},
.handler = {[&](std::string s) {
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
needsFlakeInputCompletion = {std::string(prefix)};
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
}}
});
@ -93,13 +129,14 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath("."), true));
parseFlakeRef(flakeRef, absPath(getCommandBaseDir()), true));
}},
.completer = {[&](size_t n, std::string_view prefix) {
if (n == 0)
needsFlakeInputCompletion = {std::string(prefix)};
else if (n == 1)
completeFlakeRef(getEvalState()->store, prefix);
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
if (n == 0) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
} else if (n == 1) {
completeFlakeRef(completions, getEvalState()->store, prefix);
}
}}
});
@ -134,7 +171,7 @@ MixFlakeOptions::MixFlakeOptions()
auto evalState = getEvalState();
auto flake = flake::lockFlake(
*evalState,
parseFlakeRef(flakeRef, absPath(".")),
parseFlakeRef(flakeRef, absPath(getCommandBaseDir())),
{ .writeLockFile = false });
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
@ -146,30 +183,12 @@ MixFlakeOptions::MixFlakeOptions()
}
}
}},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(getEvalState()->store, prefix);
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeRef(completions, getEvalState()->store, prefix);
}}
});
}
void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
{
auto evalState = getEvalState();
for (auto & flakeRefS : getFlakesForCompletion()) {
auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions->add(input.first);
}
}
void MixFlakeOptions::completionHook()
{
if (auto & prefix = needsFlakeInputCompletion)
completeFlakeInput(*prefix);
}
SourceExprCommand::SourceExprCommand()
{
addFlag({
@ -226,11 +245,18 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
};
}
void SourceExprCommand::completeInstallable(std::string_view prefix)
Args::CompleterClosure SourceExprCommand::getCompleteInstallable()
{
return [this](AddCompletions & completions, size_t, std::string_view prefix) {
completeInstallable(completions, prefix);
};
}
void SourceExprCommand::completeInstallable(AddCompletions & completions, std::string_view prefix)
{
try {
if (file) {
completionType = ctAttrs;
completions.setType(AddCompletions::Type::Attrs);
evalSettings.pureEval = false;
auto state = getEvalState();
@ -265,14 +291,15 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
std::string name = state->symbols[i.name];
if (name.find(searchWord) == 0) {
if (prefix_ == "")
completions->add(name);
completions.add(name);
else
completions->add(prefix_ + "." + name);
completions.add(prefix_ + "." + name);
}
}
}
} else {
completeFlakeRefWithFragment(
completions,
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
@ -285,6 +312,7 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
}
void completeFlakeRefWithFragment(
AddCompletions & completions,
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,
@ -296,12 +324,19 @@ void completeFlakeRefWithFragment(
try {
auto hash = prefix.find('#');
if (hash == std::string::npos) {
completeFlakeRef(evalState->store, prefix);
completeFlakeRef(completions, evalState->store, prefix);
} else {
completionType = ctAttrs;
completions.setType(AddCompletions::Type::Attrs);
auto fragment = prefix.substr(hash + 1);
std::string prefixRoot = "";
if (fragment.starts_with(".")){
fragment = fragment.substr(1);
prefixRoot = ".";
}
auto flakeRefS = std::string(prefix.substr(0, hash));
// TODO: ideally this would use the command base directory instead of assuming ".".
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
auto evalCache = openEvalCache(*evalState,
@ -309,6 +344,9 @@ void completeFlakeRefWithFragment(
auto root = evalCache->getRoot();
if (prefixRoot == "."){
attrPathPrefixes.clear();
}
/* Complete 'fragment' relative to all the
attrpath prefixes as well as the root of the
flake. */
@ -333,7 +371,7 @@ void completeFlakeRefWithFragment(
auto attrPath2 = (*attr)->getAttrPath(attr2);
/* Strip the attrpath prefix. */
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
}
}
}
@ -344,7 +382,7 @@ void completeFlakeRefWithFragment(
for (auto & attrPath : defaultFlakeAttrPaths) {
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
if (!attr) continue;
completions->add(flakeRefS + "#");
completions.add(flakeRefS + "#" + prefixRoot);
}
}
}
@ -353,15 +391,15 @@ void completeFlakeRefWithFragment(
}
}
void completeFlakeRef(ref<Store> store, std::string_view prefix)
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix)
{
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
return;
if (prefix == "")
completions->add(".");
completions.add(".");
completeDir(0, prefix);
Args::completeDir(completions, 0, prefix);
/* Look for registry entries that match the prefix. */
for (auto & registry : fetchers::getRegistries(store)) {
@ -370,10 +408,10 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) {
std::string from2(from, 6);
if (hasPrefix(from2, prefix))
completions->add(from2);
completions.add(from2);
} else {
if (hasPrefix(from, prefix))
completions->add(from);
completions.add(from);
}
}
}
@ -447,10 +485,12 @@ Installables SourceExprCommand::parseInstallables(
auto e = state->parseStdin();
state->eval(e, *vFile);
}
else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else if (file) {
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
}
else {
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
state->eval(e, *vFile);
}
@ -485,7 +525,7 @@ Installables SourceExprCommand::parseInstallables(
}
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath(getCommandBaseDir()));
result.push_back(make_ref<InstallableFlake>(
this,
getEvalState(),
@ -669,7 +709,7 @@ BuiltPaths Installable::toBuiltPaths(
BuiltPaths res;
for (auto & drvPath : Installable::toDerivations(store, installables, true))
res.push_back(BuiltPath::Opaque{drvPath});
res.emplace_back(BuiltPath::Opaque{drvPath});
return res;
}
}
@ -739,9 +779,7 @@ RawInstallablesCommand::RawInstallablesCommand()
expectArgs({
.label = "installables",
.handler = {&rawInstallables},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
.completer = getCompleteInstallable(),
});
}
@ -754,6 +792,17 @@ void RawInstallablesCommand::applyDefaultInstallables(std::vector<std::string> &
}
}
std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
{
applyDefaultInstallables(rawInstallables);
std::vector<FlakeRef> res;
for (auto i : rawInstallables)
res.push_back(parseFlakeRefWithFragment(
expandTilde(i),
absPath(getCommandBaseDir())).first);
return res;
}
void RawInstallablesCommand::run(ref<Store> store)
{
if (readFromStdIn && !isatty(STDIN_FILENO)) {
@ -767,10 +816,13 @@ void RawInstallablesCommand::run(ref<Store> store)
run(store, std::move(rawInstallables));
}
std::vector<std::string> RawInstallablesCommand::getFlakesForCompletion()
std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
{
applyDefaultInstallables(rawInstallables);
return rawInstallables;
return {
parseFlakeRefWithFragment(
expandTilde(_installable),
absPath(getCommandBaseDir())).first
};
}
void InstallablesCommand::run(ref<Store> store, std::vector<std::string> && rawInstallables)
@ -786,9 +838,7 @@ InstallableCommand::InstallableCommand()
.label = "installable",
.optional = true,
.handler = {&_installable},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
.completer = getCompleteInstallable(),
});
}

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "util.hh"
#include "path.hh"
#include "outputs-spec.hh"
#include "derived-path.hh"

View file

@ -1,6 +1,7 @@
#include "markdown.hh"
#include "util.hh"
#include "finally.hh"
#include "terminal.hh"
#include <sys/queue.h>
#include <lowdown.h>

View file

@ -22,6 +22,7 @@ extern "C" {
#include "repl.hh"
#include "ansicolor.hh"
#include "signals.hh"
#include "shared.hh"
#include "eval.hh"
#include "eval-cache.hh"
@ -36,6 +37,8 @@ extern "C" {
#include "globals.hh"
#include "flake/flake.hh"
#include "flake/lockfile.hh"
#include "users.hh"
#include "terminal.hh"
#include "editor-for.hh"
#include "finally.hh"
#include "markdown.hh"
@ -922,7 +925,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
case nString:
str << ANSI_WARNING;
printLiteralString(str, v.string.s);
printLiteralString(str, v.string_view());
str << ANSI_NORMAL;
break;

View file

@ -1,6 +1,5 @@
#include "attr-path.hh"
#include "eval-inline.hh"
#include "util.hh"
namespace nix {
@ -132,7 +131,7 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
if (colon == std::string::npos) fail();
std::string filename(fn, 0, colon);
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
return {CanonPath(fn.substr(0, colon)), lineno};
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
} catch (std::invalid_argument & e) {
fail();
abort();

View file

@ -1,3 +1,4 @@
#include "users.hh"
#include "eval-cache.hh"
#include "sqlite.hh"
#include "eval.hh"
@ -50,7 +51,7 @@ struct AttrDb
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
createDirs(cacheDir);
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite";
state->db = SQLite(dbPath);
state->db.isCache();
@ -440,8 +441,8 @@ Value & AttrCursor::forceValue()
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
if (v.type() == nString)
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
string_t{v.string.s, {}}};
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()),
string_t{v.c_str(), {}}};
else if (v.type() == nPath) {
auto path = v.path().path;
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
@ -582,7 +583,7 @@ std::string AttrCursor::getString()
if (v.type() != nString && v.type() != nPath)
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
return v.type() == nString ? v.string.s : v.path().to_string();
return v.type() == nString ? v.c_str() : v.path().to_string();
}
string_t AttrCursor::getStringWithContext()
@ -624,7 +625,7 @@ string_t AttrCursor::getStringWithContext()
if (v.type() == nString) {
NixStringContext context;
copyContext(v, context);
return {v.string.s, std::move(context)};
return {v.c_str(), std::move(context)};
}
else if (v.type() == nPath)
return {v.path().to_string(), {}};

View file

@ -1,3 +1,4 @@
#include "users.hh"
#include "globals.hh"
#include "profiles.hh"
#include "eval.hh"

View file

@ -1,4 +1,6 @@
#pragma once
///@file
#include "config.hh"
namespace nix {
@ -29,10 +31,12 @@ struct EvalSettings : Config
this, false, "restrict-eval",
R"(
If set to `true`, the Nix evaluator will not allow access to any
files outside of the Nix search path (as set via the `NIX_PATH`
environment variable or the `-I` option), or to URIs outside of
[`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris).
The default is `false`.
files outside of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
or to URIs outside of
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
)"};
Setting<bool> pureEval{this, false, "pure-eval",
@ -40,18 +44,22 @@ struct EvalSettings : Config
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
- Restrict file system and network access to files specified by cryptographic hash
- Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
- Disable impure constants:
- [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
- [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
- [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath)
)"
};
Setting<bool> enableImportFromDerivation{
this, true, "allow-import-from-derivation",
R"(
By default, Nix allows you to `import` from a derivation, allowing
building at evaluation time. With this option set to false, Nix will
throw an error when evaluating an expression that uses this feature,
allowing users to ensure their evaluation will not require any
builds to take place.
By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md).
With this option set to `false`, Nix will throw an error when evaluating an expression that uses this feature,
even when the required store object is readily available.
This ensures that evaluation will not require any builds to take place,
regardless of the state of the store.
)"};
Setting<Strings> allowedUris{this, {}, "allowed-uris",
@ -60,6 +68,11 @@ struct EvalSettings : Config
evaluation mode. For example, when set to
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
allowed to access `https://github.com/NixOS/patchelf.git`.
Access is granted when
- the URI is equal to the prefix,
- or the URI is a subpath of the prefix,
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
)"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",

View file

@ -1,6 +1,7 @@
#include "eval.hh"
#include "eval-settings.hh"
#include "hash.hh"
#include "primops.hh"
#include "types.hh"
#include "util.hh"
#include "store-api.hh"
@ -12,6 +13,10 @@
#include "function-trace.hh"
#include "profiles.hh"
#include "print.hh"
#include "fs-input-accessor.hh"
#include "memory-input-accessor.hh"
#include "signals.hh"
#include "url.hh"
#include <algorithm>
#include <chrono>
@ -114,7 +119,7 @@ void Value::print(const SymbolTable &symbols, std::ostream &str,
printLiteralBool(str, boolean);
break;
case tString:
printLiteralString(str, string.s);
printLiteralString(str, string_view());
break;
case tPath:
str << path().to_string(); // !!! escaping?
@ -339,7 +344,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
Value nameValue;
name.expr->eval(state, env, nameValue);
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
return state.symbols.create(nameValue.string.s);
return state.symbols.create(nameValue.string_view());
}
}
@ -503,7 +508,17 @@ EvalState::EvalState(
, sOutputSpecified(symbols.create("outputSpecified"))
, repair(NoRepair)
, emptyBindings(0)
, derivationInternal(rootPath(CanonPath("/builtin/derivation.nix")))
, rootFS(makeFSInputAccessor(CanonPath::root))
, corepkgsFS(makeMemoryInputAccessor())
, internalFS(makeMemoryInputAccessor())
, derivationInternal{corepkgsFS->addFile(
CanonPath("derivation-internal.nix"),
#include "primops/derivation.nix.gen.hh"
)}
, callFlakeInternal{internalFS->addFile(
CanonPath("call-flake.nix"),
#include "flake/call-flake.nix.gen.hh"
)}
, store(store)
, buildStore(buildStore ? buildStore : store)
, debugRepl(nullptr)
@ -539,7 +554,7 @@ EvalState::EvalState(
auto r = resolveSearchPathPath(i.path);
if (!r) continue;
auto path = *std::move(r);
auto path = std::move(*r);
if (store->isInStore(path)) {
try {
@ -555,6 +570,11 @@ EvalState::EvalState(
}
}
corepkgsFS->addFile(
CanonPath("fetchurl.nix"),
#include "fetchurl.nix.gen.hh"
);
createBaseEnv();
}
@ -583,8 +603,20 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
mkStorePathString(storePath, v);
}
inline static bool isJustSchemePrefix(std::string_view prefix)
{
return
!prefix.empty()
&& prefix[prefix.size() - 1] == ':'
&& isValidSchemeName(prefix.substr(0, prefix.size() - 1));
}
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
{
// Don't check non-rootFS accessors, they're in a different namespace.
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
if (!allowedPaths) return path_;
auto i = resolvedPaths.find(path_.path.abs());
@ -599,8 +631,6 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_)
*/
Path abspath = canonPath(path_.path.abs());
if (hasPrefix(abspath, corepkgsPrefix)) return CanonPath(abspath);
for (auto & i : *allowedPaths) {
if (isDirOrInDir(abspath, i)) {
found = true;
@ -617,7 +647,7 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_)
/* Resolve symlinks. */
debug("checking access to '%s'", abspath);
SourcePath path = CanonPath(canonPath(abspath, true));
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
for (auto & i : *allowedPaths) {
if (isDirOrInDir(path.path.abs(), i)) {
@ -630,31 +660,47 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_)
}
bool isAllowedURI(std::string_view uri, const Strings & allowedUris)
{
/* 'uri' should be equal to a prefix, or in a subdirectory of a
prefix. Thus, the prefix https://github.co does not permit
access to https://github.com. */
for (auto & prefix : allowedUris) {
if (uri == prefix
// Allow access to subdirectories of the prefix.
|| (uri.size() > prefix.size()
&& prefix.size() > 0
&& hasPrefix(uri, prefix)
&& (
// Allow access to subdirectories of the prefix.
prefix[prefix.size() - 1] == '/'
|| uri[prefix.size()] == '/'
// Allow access to whole schemes
|| isJustSchemePrefix(prefix)
)
))
return true;
}
return false;
}
void EvalState::checkURI(const std::string & uri)
{
if (!evalSettings.restrictEval) return;
/* 'uri' should be equal to a prefix, or in a subdirectory of a
prefix. Thus, the prefix https://github.co does not permit
access to https://github.com. Note: this allows 'http://' and
'https://' as prefixes for any http/https URI. */
for (auto & prefix : evalSettings.allowedUris.get())
if (uri == prefix ||
(uri.size() > prefix.size()
&& prefix.size() > 0
&& hasPrefix(uri, prefix)
&& (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/')))
return;
if (isAllowedURI(uri, evalSettings.allowedUris.get())) return;
/* If the URI is a path, then check it against allowedPaths as
well. */
if (hasPrefix(uri, "/")) {
checkSourcePath(CanonPath(uri));
checkSourcePath(rootPath(CanonPath(uri)));
return;
}
if (hasPrefix(uri, "file://")) {
checkSourcePath(CanonPath(std::string(uri, 7)));
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
return;
}
@ -703,6 +749,23 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
}
void PrimOp::check()
{
if (arity > maxPrimOpArity) {
throw Error("primop arity must not exceed %1%", maxPrimOpArity);
}
}
void Value::mkPrimOp(PrimOp * p)
{
p->check();
clearValue();
internalType = tPrimOp;
primOp = p;
}
Value * EvalState::addPrimOp(PrimOp && primOp)
{
/* Hack to make constants lazy: turn them into a application of
@ -950,7 +1013,7 @@ void Value::mkStringMove(const char * s, const NixStringContext & context)
void Value::mkPath(const SourcePath & path)
{
mkPath(makeImmutableString(path.path.abs()));
mkPath(&*path.accessor, makeImmutableString(path.path.abs()));
}
@ -1035,7 +1098,7 @@ std::string EvalState::mkOutputStringRaw(
/* In practice, this is testing for the case of CA derivations, or
dynamic derivations. */
return optStaticOutputPath
? store->printStorePath(*std::move(optStaticOutputPath))
? store->printStorePath(std::move(*optStaticOutputPath))
/* Downstream we would substitute this for an actual path once
we build the floating CA derivation */
: DownstreamPlaceholder::fromSingleDerivedPathBuilt(b, xpSettings).render();
@ -1165,24 +1228,6 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
if (!e)
e = parseExprFromFile(checkSourcePath(resolvedPath));
cacheFile(path, resolvedPath, e, v, mustBeTrivial);
}
void EvalState::resetFileCache()
{
fileEvalCache.clear();
fileParseCache.clear();
}
void EvalState::cacheFile(
const SourcePath & path,
const SourcePath & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial)
{
fileParseCache[resolvedPath] = e;
try {
@ -1211,6 +1256,13 @@ void EvalState::cacheFile(
}
void EvalState::resetFileCache()
{
fileEvalCache.clear();
fileParseCache.clear();
}
void EvalState::eval(Expr * e, Value & v)
{
e->eval(*this, baseEnv, v);
@ -1343,7 +1395,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
if (nameVal.type() == nNull)
continue;
state.forceStringNoCtx(nameVal, i.pos, "while evaluating the name of a dynamic attribute");
auto nameSym = state.symbols.create(nameVal.string.s);
auto nameSym = state.symbols.create(nameVal.string_view());
Bindings::iterator j = v.attrs->find(nameSym);
if (j != v.attrs->end())
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
@ -1740,6 +1792,12 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
Value vFun;
fun->eval(state, env, vFun);
// Empirical arity of Nixpkgs lambdas by regex e.g. ([a-zA-Z]+:(\s|(/\*.*\/)|(#.*\n))*){5}
// 2: over 4000
// 3: about 300
// 4: about 60
// 5: under 10
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
Value * vArgs[args.size()];
for (size_t i = 0; i < args.size(); ++i)
vArgs[i] = args[i]->maybeThunk(state, env);
@ -2037,7 +2095,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
else if (firstType == nPath) {
if (!context.empty())
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
v.mkPath(CanonPath(canonPath(str())));
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
} else
v.mkStringMove(c_str(), context);
}
@ -2155,7 +2213,7 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
forceValue(v, pos);
if (v.type() != nString)
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
return v.string.s;
return v.string_view();
} catch (Error & e) {
e.addTrace(positions[pos], errorCtx);
throw;
@ -2182,8 +2240,8 @@ std::string_view EvalState::forceString(Value & v, NixStringContext & context, c
std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx)
{
auto s = forceString(v, pos, errorCtx);
if (v.string.context) {
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string.s, v.string.context[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
if (v.context()) {
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
}
return s;
}
@ -2196,7 +2254,7 @@ bool EvalState::isDerivation(Value & v)
if (i == v.attrs->end()) return false;
forceValue(*i->value, i->pos);
if (i->value->type() != nString) return false;
return strcmp(i->value->string.s, "derivation") == 0;
return i->value->string_view().compare("derivation") == 0;
}
@ -2228,7 +2286,7 @@ BackedStringView EvalState::coerceToString(
if (v.type() == nString) {
copyContext(v, context);
return std::string_view(v.string.s);
return v.string_view();
}
if (v.type() == nPath) {
@ -2236,7 +2294,7 @@ BackedStringView EvalState::coerceToString(
!canonicalizePath && !copyToStore
? // FIXME: hack to preserve path literals that end in a
// slash, as in /foo/${x}.
v._path
v._path.path
: copyToStore
? store->printStorePath(copyPathToStore(context, v.path()))
: std::string(v.path().path.abs());
@ -2290,7 +2348,7 @@ BackedStringView EvalState::coerceToString(
&& (!v2->isList() || v2->listSize() != 0))
result += " ";
}
return std::move(result);
return result;
}
}
@ -2310,7 +2368,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
auto dstPath = i != srcToStore.end()
? i->second
: [&]() {
auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair);
auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
allowPath(dstPath);
srcToStore.insert_or_assign(path, dstPath);
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
@ -2326,10 +2384,34 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx)
{
try {
forceValue(v, pos);
} catch (Error & e) {
e.addTrace(positions[pos], errorCtx);
throw;
}
/* Handle path values directly, without coercing to a string. */
if (v.type() == nPath)
return v.path();
/* Similarly, handle __toString where the result may be a path
value. */
if (v.type() == nAttrs) {
auto i = v.attrs->find(sToString);
if (i != v.attrs->end()) {
Value v1;
callFunction(*i->value, v, v1, pos);
return coerceToPath(pos, v1, context, errorCtx);
}
}
/* Any other value should be coercable to a string, interpreted
relative to the root filesystem. */
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
if (path == "" || path[0] != '/')
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
return CanonPath(path);
return rootPath(CanonPath(path));
}
@ -2426,10 +2508,13 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
return v1.boolean == v2.boolean;
case nString:
return strcmp(v1.string.s, v2.string.s) == 0;
return v1.string_view().compare(v2.string_view()) == 0;
case nPath:
return strcmp(v1._path, v2._path) == 0;
return
// FIXME: compare accessors by their fingerprint.
v1._path.accessor == v2._path.accessor
&& strcmp(v1._path.path, v2._path.path) == 0;
case nNull:
return true;
@ -2477,10 +2562,37 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
}
}
void EvalState::printStats()
bool EvalState::fullGC() {
#if HAVE_BOEHMGC
GC_gcollect();
// Check that it ran. We might replace this with a version that uses more
// of the boehm API to get this reliably, at a maintenance cost.
// We use a 1K margin because technically this has a race condtion, but we
// probably won't encounter it in practice, because the CLI isn't concurrent
// like that.
return GC_get_bytes_since_gc() < 1024;
#else
return false;
#endif
}
void EvalState::maybePrintStats()
{
bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0";
if (showStats) {
// Make the final heap size more deterministic.
#if HAVE_BOEHMGC
if (!fullGC()) {
warn("failed to perform a full GC before reporting stats");
}
#endif
printStatistics();
}
}
void EvalState::printStatistics()
{
struct rusage buf;
getrusage(RUSAGE_SELF, &buf);
float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000);
@ -2494,105 +2606,105 @@ void EvalState::printStats()
GC_word heapSize, totalBytes;
GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes);
#endif
if (showStats) {
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
std::fstream fs;
if (outPath != "-")
fs.open(outPath, std::fstream::out);
json topObj = json::object();
topObj["cpuTime"] = cpuTime;
topObj["envs"] = {
{"number", nrEnvs},
{"elements", nrValuesInEnvs},
{"bytes", bEnvs},
};
topObj["list"] = {
{"elements", nrListElems},
{"bytes", bLists},
{"concats", nrListConcats},
};
topObj["values"] = {
{"number", nrValues},
{"bytes", bValues},
};
topObj["symbols"] = {
{"number", symbols.size()},
{"bytes", symbols.totalSize()},
};
topObj["sets"] = {
{"number", nrAttrsets},
{"bytes", bAttrsets},
{"elements", nrAttrsInAttrsets},
};
topObj["sizes"] = {
{"Env", sizeof(Env)},
{"Value", sizeof(Value)},
{"Bindings", sizeof(Bindings)},
{"Attr", sizeof(Attr)},
};
topObj["nrOpUpdates"] = nrOpUpdates;
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
topObj["nrThunks"] = nrThunks;
topObj["nrAvoided"] = nrAvoided;
topObj["nrLookups"] = nrLookups;
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
topObj["nrFunctionCalls"] = nrFunctionCalls;
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
std::fstream fs;
if (outPath != "-")
fs.open(outPath, std::fstream::out);
json topObj = json::object();
topObj["cpuTime"] = cpuTime;
topObj["envs"] = {
{"number", nrEnvs},
{"elements", nrValuesInEnvs},
{"bytes", bEnvs},
};
topObj["nrExprs"] = Expr::nrExprs;
topObj["list"] = {
{"elements", nrListElems},
{"bytes", bLists},
{"concats", nrListConcats},
};
topObj["values"] = {
{"number", nrValues},
{"bytes", bValues},
};
topObj["symbols"] = {
{"number", symbols.size()},
{"bytes", symbols.totalSize()},
};
topObj["sets"] = {
{"number", nrAttrsets},
{"bytes", bAttrsets},
{"elements", nrAttrsInAttrsets},
};
topObj["sizes"] = {
{"Env", sizeof(Env)},
{"Value", sizeof(Value)},
{"Bindings", sizeof(Bindings)},
{"Attr", sizeof(Attr)},
};
topObj["nrOpUpdates"] = nrOpUpdates;
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
topObj["nrThunks"] = nrThunks;
topObj["nrAvoided"] = nrAvoided;
topObj["nrLookups"] = nrLookups;
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
topObj["nrFunctionCalls"] = nrFunctionCalls;
#if HAVE_BOEHMGC
topObj["gc"] = {
{"heapSize", heapSize},
{"totalBytes", totalBytes},
};
topObj["gc"] = {
{"heapSize", heapSize},
{"totalBytes", totalBytes},
};
#endif
if (countCalls) {
topObj["primops"] = primOpCalls;
{
auto& list = topObj["functions"];
list = json::array();
for (auto & [fun, count] : functionCalls) {
json obj = json::object();
if (fun->name)
obj["name"] = (std::string_view) symbols[fun->name];
else
obj["name"] = nullptr;
if (auto pos = positions[fun->pos]) {
if (auto path = std::get_if<SourcePath>(&pos.origin))
obj["file"] = path->to_string();
obj["line"] = pos.line;
obj["column"] = pos.column;
}
obj["count"] = count;
list.push_back(obj);
}
}
{
auto list = topObj["attributes"];
list = json::array();
for (auto & i : attrSelects) {
json obj = json::object();
if (auto pos = positions[i.first]) {
if (auto path = std::get_if<SourcePath>(&pos.origin))
obj["file"] = path->to_string();
obj["line"] = pos.line;
obj["column"] = pos.column;
}
obj["count"] = i.second;
list.push_back(obj);
if (countCalls) {
topObj["primops"] = primOpCalls;
{
auto& list = topObj["functions"];
list = json::array();
for (auto & [fun, count] : functionCalls) {
json obj = json::object();
if (fun->name)
obj["name"] = (std::string_view) symbols[fun->name];
else
obj["name"] = nullptr;
if (auto pos = positions[fun->pos]) {
if (auto path = std::get_if<SourcePath>(&pos.origin))
obj["file"] = path->to_string();
obj["line"] = pos.line;
obj["column"] = pos.column;
}
obj["count"] = count;
list.push_back(obj);
}
}
{
auto list = topObj["attributes"];
list = json::array();
for (auto & i : attrSelects) {
json obj = json::object();
if (auto pos = positions[i.first]) {
if (auto path = std::get_if<SourcePath>(&pos.origin))
obj["file"] = path->to_string();
obj["line"] = pos.line;
obj["column"] = pos.column;
}
obj["count"] = i.second;
list.push_back(obj);
}
}
}
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
// XXX: overrides earlier assignment
topObj["symbols"] = json::array();
auto &list = topObj["symbols"];
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
}
if (outPath == "-") {
std::cerr << topObj.dump(2) << std::endl;
} else {
fs << topObj.dump(2) << std::endl;
}
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
// XXX: overrides earlier assignment
topObj["symbols"] = json::array();
auto &list = topObj["symbols"];
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
}
if (outPath == "-") {
std::cerr << topObj.dump(2) << std::endl;
} else {
fs << topObj.dump(2) << std::endl;
}
}

View file

@ -18,12 +18,20 @@
namespace nix {
/**
* We put a limit on primop arity because it lets us use a fixed size array on
* the stack. 8 is already an impractical number of arguments. Use an attrset
* argument for such overly complicated functions.
*/
constexpr size_t maxPrimOpArity = 8;
class Store;
class EvalState;
class StorePath;
struct SingleDerivedPath;
enum RepairFlag : bool;
struct FSInputAccessor;
struct MemoryInputAccessor;
/**
@ -69,6 +77,12 @@ struct PrimOp
* Optional experimental for this to be gated on.
*/
std::optional<ExperimentalFeature> experimentalFeature;
/**
* Validity check to be performed by functions that introduce primops,
* such as RegisterPrimOp() and Value::mkPrimOp().
*/
void check();
};
/**
@ -211,8 +225,26 @@ public:
Bindings emptyBindings;
/**
* The accessor for the root filesystem.
*/
const ref<FSInputAccessor> rootFS;
/**
* The in-memory filesystem for <nix/...> paths.
*/
const ref<MemoryInputAccessor> corepkgsFS;
/**
* In-memory filesystem for internal, non-user-callable Nix
* expressions like call-flake.nix.
*/
const ref<MemoryInputAccessor> internalFS;
const SourcePath derivationInternal;
const SourcePath callFlakeInternal;
/**
* Store used to materialise .drv files.
*/
@ -223,7 +255,6 @@ public:
*/
const ref<Store> buildStore;
RootValue vCallFlake = nullptr;
RootValue vImportedDrvToDerivation = nullptr;
/**
@ -405,16 +436,6 @@ public:
*/
void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false);
/**
* Like `evalFile`, but with an already parsed expression.
*/
void cacheFile(
const SourcePath & path,
const SourcePath & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial = false);
void resetFileCache();
/**
@ -424,7 +445,7 @@ public:
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
/**
* Try to resolve a search path value (not the optinal key part)
* Try to resolve a search path value (not the optional key part)
*
* If the specified search path element is a URI, download it.
*
@ -709,9 +730,25 @@ public:
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
/**
* Print statistics.
* Print statistics, if enabled.
*
* Performs a full memory GC before printing the statistics, so that the
* GC statistics are more accurate.
*/
void printStats();
void maybePrintStats();
/**
* Print statistics, unconditionally, cheaply, without performing a GC first.
*/
void printStatistics();
/**
* Perform a full memory garbage collection - not incremental.
*
* @return true if Nix was built with GC and a GC was performed, false if not.
* The return value is currently not thread safe - just the return value.
*/
bool fullGC();
/**
* Realise the given context, and return a mapping from the placeholders
@ -802,7 +839,12 @@ std::string showType(const Value & v);
/**
* If `path` refers to a directory, then append "/default.nix".
*/
SourcePath resolveExprPath(const SourcePath & path);
SourcePath resolveExprPath(SourcePath path);
/**
* Whether a URI is allowed, assuming restrictEval is enabled
*/
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
struct InvalidPathError : EvalError
{
@ -813,8 +855,6 @@ struct InvalidPathError : EvalError
#endif
};
static const std::string corepkgsPrefix{"/__corepkgs__/"};
template<class ErrorType>
void ErrorBuilder::debugThrow()
{

View file

@ -1,6 +1,7 @@
#include "flake.hh"
#include "users.hh"
#include "globals.hh"
#include "fetch-settings.hh"
#include "flake.hh"
#include <nlohmann/json.hpp>

View file

@ -1,3 +1,4 @@
#include "terminal.hh"
#include "flake.hh"
#include "eval.hh"
#include "eval-settings.hh"
@ -8,6 +9,7 @@
#include "fetchers.hh"
#include "finally.hh"
#include "fetch-settings.hh"
#include "value-to-json.hh"
namespace nix {
@ -15,7 +17,7 @@ using namespace flake;
namespace flake {
typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
typedef std::pair<StorePath, FlakeRef> FetchedFlake;
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
static std::optional<FetchedFlake> lookupInFlakeCache(
@ -34,7 +36,7 @@ static std::optional<FetchedFlake> lookupInFlakeCache(
return std::nullopt;
}
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
static std::tuple<StorePath, FlakeRef, FlakeRef> fetchOrSubstituteTree(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
@ -61,16 +63,16 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
flakeCache.push_back({originalRef, *fetched});
}
auto [tree, lockedRef] = *fetched;
auto [storePath, lockedRef] = *fetched;
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
state.store->printStorePath(storePath), lockedRef);
state.allowPath(tree.storePath);
state.allowPath(storePath);
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
assert(!originalRef.input.getNarHash() || storePath == originalRef.input.computeStorePath(*state.store));
return {std::move(tree), resolvedRef, lockedRef};
return {std::move(storePath), resolvedRef, lockedRef};
}
static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos)
@ -113,7 +115,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try {
if (attr.name == sUrl) {
expectType(state, nString, *attr.value, attr.pos);
url = attr.value->string.s;
url = attr.value->string_view();
attrs.emplace("url", *url);
} else if (attr.name == sFlake) {
expectType(state, nBool, *attr.value, attr.pos);
@ -122,7 +124,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, attr.pos);
auto follows(parseInputPath(attr.value->string.s));
auto follows(parseInputPath(attr.value->c_str()));
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
input.follows = follows;
} else {
@ -131,7 +133,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
#pragma GCC diagnostic ignored "-Wswitch-enum"
switch (attr.value->type()) {
case nString:
attrs.emplace(state.symbols[attr.name], attr.value->string.s);
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
break;
case nBool:
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
@ -140,8 +142,13 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
break;
default:
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
if (attr.name == state.symbols.create("publicKeys")) {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
NixStringContext emptyContext = {};
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
} else
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
}
#pragma GCC diagnostic pop
}
@ -202,34 +209,34 @@ static Flake getFlake(
FlakeCache & flakeCache,
InputPath lockRootPath)
{
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
// Guard against symlink attacks.
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
auto flakeDir = canonPath(state.store->toRealPath(storePath) + "/" + lockedRef.subdir, true);
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
if (!isInDir(flakeFile, sourceInfo.actualPath))
if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
lockedRef, state.store->printStorePath(storePath));
Flake flake {
.originalRef = originalRef,
.resolvedRef = resolvedRef,
.lockedRef = lockedRef,
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
.storePath = storePath,
};
if (!pathExists(flakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
Value vInfo;
state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack
state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1));
expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
flake.description = description->value->string.s;
flake.description = description->value->c_str();
}
auto sInputs = state.symbols.create("inputs");
@ -346,7 +353,7 @@ LockedFlake lockFlake(
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
lockFlags.referenceLockFilePath.value_or(
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock"));
state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
debug("old lock file: %s", oldLockFile);
@ -447,8 +454,8 @@ LockedFlake lockFlake(
assert(input.ref);
/* Do we have an entry in the existing lock file? And we
don't have a --update-input flag for this input? */
/* Do we have an entry in the existing lock file?
And the input is not in updateInputs? */
std::shared_ptr<LockedNode> oldLock;
updatesUsed.insert(inputPath);
@ -472,9 +479,8 @@ LockedFlake lockFlake(
node->inputs.insert_or_assign(id, childNode);
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
update it. */
/* If we have this input in updateInputs, then we
must fetch the flake to update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto mustRefetch =
@ -574,7 +580,7 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
oldLock ? lockRootPath : inputPath,
localPath,
false);
@ -598,7 +604,7 @@ LockedFlake lockFlake(
};
// Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
computeLocks(
flake.inputs,
@ -616,19 +622,14 @@ LockedFlake lockFlake(
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
warn("'%s' does not match any input of this flake", printInputPath(i));
/* Check 'follows' inputs. */
newLockFile.check();
debug("new lock file: %s", newLockFile);
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto sourcePath = topRef.input.getSourcePath();
auto outputLockFilePath = sourcePath ? std::optional{*sourcePath + "/" + relPath} : std::nullopt;
if (lockFlags.outputLockFilePath) {
outputLockFilePath = lockFlags.outputLockFilePath;
}
/* Check whether we need to / can write the new lock file. */
if (newLockFile != oldLockFile || lockFlags.outputLockFilePath) {
@ -636,7 +637,7 @@ LockedFlake lockFlake(
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
if (outputLockFilePath) {
if (sourcePath || lockFlags.outputLockFilePath) {
if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
@ -644,41 +645,48 @@ LockedFlake lockFlake(
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
bool lockFileExists = pathExists(*outputLockFilePath);
auto newLockFileS = fmt("%s\n", newLockFile);
if (lockFlags.outputLockFilePath) {
if (lockFlags.commitLockFile)
throw Error("'--commit-lock-file' and '--output-lock-file' are incompatible");
writeFile(*lockFlags.outputLockFilePath, newLockFileS);
} else {
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto outputLockFilePath = *sourcePath + "/" + relPath;
bool lockFileExists = pathExists(outputLockFilePath);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
warn("updating lock file '%s'", *outputLockFilePath);
else
warn("updating lock file '%s':\n%s", *outputLockFilePath, s);
} else
warn("creating lock file '%s'", *outputLockFilePath);
if (lockFileExists) {
if (s.empty())
warn("updating lock file '%s'", outputLockFilePath);
else
warn("updating lock file '%s':\n%s", outputLockFilePath, s);
} else
warn("creating lock file '%s': \n%s", outputLockFilePath, s);
newLockFile.write(*outputLockFilePath);
std::optional<std::string> commitMessage = std::nullopt;
std::optional<std::string> commitMessage = std::nullopt;
if (lockFlags.commitLockFile) {
if (lockFlags.outputLockFilePath) {
throw Error("--commit-lock-file and --output-lock-file are currently incompatible");
}
std::string cm;
if (lockFlags.commitLockFile) {
std::string cm;
cm = fetchSettings.commitLockFileSummary.get();
cm = fetchSettings.commitLockFileSummary.get();
if (cm == "") {
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
if (cm == "") {
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
}
cm += "\n\nFlake lock file updates:\n\n";
cm += filterANSIEscapes(diff, true);
commitMessage = cm;
}
cm += "\n\nFlake lock file updates:\n\n";
cm += filterANSIEscapes(diff, true);
commitMessage = cm;
topRef.input.putFile(
CanonPath((topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock"),
newLockFileS, commitMessage);
}
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
commitMessage);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
@ -729,7 +737,7 @@ void callFlake(EvalState & state,
emitTreeAttrs(
state,
*lockedFlake.flake.sourceInfo,
lockedFlake.flake.storePath,
lockedFlake.flake.lockedRef.input,
*vRootSrc,
false,
@ -737,14 +745,10 @@ void callFlake(EvalState & state,
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
if (!state.vCallFlake) {
state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
, CanonPath::root), **state.vCallFlake);
}
auto vCallFlake = state.allocValue();
state.evalFile(state.callFlakeInternal, *vCallFlake);
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
@ -850,7 +854,7 @@ static void prim_flakeRefToString(
Explicit<bool> { attr.value->boolean });
} else if (t == nString) {
attrs.emplace(state.symbols[attr.name],
std::string(attr.value->str()));
std::string(attr.value->string_view()));
} else {
state.error(
"flake reference attribute sets may only contain integers, Booleans, "
@ -893,7 +897,7 @@ Fingerprint LockedFlake::getFingerprint() const
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
flake.storePath.to_string(),
flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),

View file

@ -10,8 +10,6 @@ namespace nix {
class EvalState;
namespace fetchers { struct Tree; }
namespace flake {
struct FlakeInput;
@ -84,7 +82,7 @@ struct Flake
*/
bool forceDirty = false;
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
StorePath storePath;
FlakeInputs inputs;
/**
* 'nixConfig' attribute
@ -193,7 +191,7 @@ void callFlake(
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const StorePath & storePath,
const fetchers::Input & input,
Value & v,
bool emptyRevFallback = false,

View file

@ -69,32 +69,130 @@ std::optional<FlakeRef> maybeParseFlakeRef(
}
}
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
const std::string & url,
const std::optional<Path> & baseDir,
bool allowMissing,
bool isFlake)
{
using namespace fetchers;
std::string path = url;
std::string fragment = "";
std::map<std::string, std::string> query;
auto pathEnd = url.find_first_of("#?");
auto fragmentStart = pathEnd;
if (pathEnd != std::string::npos && url[pathEnd] == '?') {
fragmentStart = url.find("#");
}
if (pathEnd != std::string::npos) {
path = url.substr(0, pathEnd);
}
if (fragmentStart != std::string::npos) {
fragment = percentDecode(url.substr(fragmentStart+1));
}
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
}
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
if (baseDir) {
/* Check if 'url' is a path (either absolute or relative
to 'baseDir'). If so, search upward to the root of the
repo (i.e. the directory containing .git). */
static std::regex pathUrlRegex(
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
+ "(?:\\?(" + queryRegex + "))?"
+ "(?:#(" + queryRegex + "))?",
std::regex::ECMAScript);
path = absPath(path, baseDir);
if (isFlake) {
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
// Save device to detect filesystem boundary
dev_t device = lstat(path).st_dev;
bool found = false;
while (path != "/") {
if (pathExists(path + "/flake.nix")) {
found = true;
break;
} else if (pathExists(path + "/.git"))
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
else {
if (lstat(path).st_dev != device)
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
}
path = dirOf(path);
}
if (!found)
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
auto flakeRoot = path;
std::string subdir;
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = query,
};
if (subdir != "") {
if (parsedURL.query.count("dir"))
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
parsedURL.query.insert_or_assign("dir", subdir);
}
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
FlakeRef(fetchers::Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
fragment);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
flakeRoot = dirOf(flakeRoot);
}
}
} else {
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
path = canonPath(path + "/" + getOr(query, "dir", ""));
}
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "path");
attrs.insert_or_assign("path", path);
return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(std::move(attrs)), ""), fragment);
};
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
const std::string & url,
bool isFlake
)
{
std::smatch match;
static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
+ "(?:#(" + queryRegex + "))?",
std::regex::ECMAScript);
std::smatch match;
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
if (std::regex_match(url, match, flakeRegex)) {
auto parsedURL = ParsedURL{
.url = url,
@ -105,111 +203,53 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
};
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL, isFlake), ""),
FlakeRef(fetchers::Input::fromURL(parsedURL, isFlake), ""),
percentDecode(match.str(6)));
}
else if (std::regex_match(url, match, pathUrlRegex)) {
std::string path = match[1];
std::string fragment = percentDecode(match.str(3));
return {};
}
if (baseDir) {
/* Check if 'url' is a path (either absolute or relative
to 'baseDir'). If so, search upward to the root of the
repo (i.e. the directory containing .git). */
path = absPath(path, baseDir);
if (isFlake) {
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
// Save device to detect filesystem boundary
dev_t device = lstat(path).st_dev;
bool found = false;
while (path != "/") {
if (pathExists(path + "/flake.nix")) {
found = true;
break;
} else if (pathExists(path + "/.git"))
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
else {
if (lstat(path).st_dev != device)
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
}
path = dirOf(path);
}
if (!found)
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
auto flakeRoot = path;
std::string subdir;
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = decodeQuery(match[2]),
};
if (subdir != "") {
if (parsedURL.query.count("dir"))
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
parsedURL.query.insert_or_assign("dir", subdir);
}
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL, isFlake), getOr(parsedURL.query, "dir", "")),
fragment);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
flakeRoot = dirOf(flakeRoot);
}
}
} else {
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
auto query = decodeQuery(match[2]);
path = canonPath(path + "/" + getOr(query, "dir", ""));
}
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "path");
attrs.insert_or_assign("path", path);
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
std::optional<std::pair<FlakeRef, std::string>> parseURLFlakeRef(
const std::string & url,
const std::optional<Path> & baseDir,
bool isFlake
)
{
ParsedURL parsedURL;
try {
parsedURL = parseURL(url);
} catch (BadURL &) {
return std::nullopt;
}
else {
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
auto input = Input::fromURL(parsedURL, isFlake);
input.parent = baseDir;
auto input = fetchers::Input::fromURL(parsedURL, isFlake);
input.parent = baseDir;
return std::make_pair(
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
fragment);
return std::make_pair(
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
fragment);
}
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
const std::string & url,
const std::optional<Path> & baseDir,
bool allowMissing,
bool isFlake)
{
using namespace fetchers;
std::smatch match;
if (auto res = parseFlakeIdRef(url, isFlake)) {
return *res;
} else if (auto res = parseURLFlakeRef(url, baseDir, isFlake)) {
return *res;
} else {
return parsePathFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
}
}
@ -232,10 +272,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
}
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
{
auto [tree, lockedInput] = input.fetch(store);
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
auto [storePath, lockedInput] = input.fetch(store);
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
}
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
@ -249,4 +289,6 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
}
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
}

View file

@ -6,6 +6,7 @@
#include "fetchers.hh"
#include "outputs-spec.hh"
#include <regex>
#include <variant>
namespace nix {
@ -62,7 +63,7 @@ struct FlakeRef
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
std::pair<StorePath, FlakeRef> fetchTree(ref<Store> store) const;
};
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
@ -91,5 +92,7 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
bool allowMissing = false,
bool isFlake = true);
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
}

View file

@ -2,8 +2,10 @@
#include "store-api.hh"
#include "url-parts.hh"
#include <algorithm>
#include <iomanip>
#include <iterator>
#include <nlohmann/json.hpp>
namespace nix::flake {
@ -45,16 +47,26 @@ StorePath LockedNode::computeStorePath(Store & store) const
return lockedRef.input.computeStorePath(store);
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
static std::shared_ptr<Node> doFind(const ref<Node>& root, const InputPath & path, std::vector<InputPath>& visited) {
auto pos = root;
auto found = std::find(visited.cbegin(), visited.cend(), path);
if(found != visited.end()) {
std::vector<std::string> cycle;
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
cycle.push_back(printInputPath(path));
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
}
visited.push_back(path);
for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i))
pos = *node;
else if (auto follows = std::get_if<1>(&*i)) {
if (auto p = findInput(*follows))
if (auto p = doFind(root, *follows, visited))
pos = ref(p);
else
return {};
@ -66,6 +78,12 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
return pos;
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
std::vector<InputPath> visited;
return doFind(root, path, visited);
}
LockFile::LockFile(const nlohmann::json & json, const Path & path)
{
auto version = json.value("version", 0);
@ -196,12 +214,6 @@ std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
return stream;
}
void LockFile::write(const Path & path) const
{
createDirs(dirOf(path));
writeFile(path, fmt("%s\n", *this));
}
std::optional<FlakeRef> LockFile::isUnlocked() const
{
std::set<ref<const Node>> nodes;

View file

@ -65,8 +65,6 @@ struct LockFile
static LockFile read(const Path & path);
void write(const Path & path) const;
/**
* Check whether this lock file has any unlocked inputs.
*/

View file

@ -1,5 +1,4 @@
#include "get-drvs.hh"
#include "util.hh"
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"
@ -156,7 +155,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
Outputs result;
for (auto elem : outTI->listItems()) {
if (elem->type() != nString) throw errMsg;
auto out = outputs.find(elem->string.s);
auto out = outputs.find(elem->c_str());
if (out == outputs.end()) throw errMsg;
result.insert(*out);
}
@ -230,7 +229,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
{
Value * v = queryMeta(name);
if (!v || v->type() != nString) return "";
return v->string.s;
return v->c_str();
}
@ -242,7 +241,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
integer meta fields. */
if (auto n = string2Int<NixInt>(v->string.s))
if (auto n = string2Int<NixInt>(v->c_str()))
return *n;
}
return def;
@ -256,7 +255,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
float meta fields. */
if (auto n = string2Float<NixFloat>(v->string.s))
if (auto n = string2Float<NixFloat>(v->c_str()))
return *n;
}
return def;
@ -271,8 +270,8 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
Boolean meta fields. */
if (strcmp(v->string.s, "true") == 0) return true;
if (strcmp(v->string.s, "false") == 0) return false;
if (v->string_view() == "true") return true;
if (v->string_view() == "false") return false;
}
return def;
}

View file

@ -43,8 +43,8 @@ $(foreach i, $(wildcard src/libexpr/value/*.hh), \
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =

View file

@ -76,12 +76,12 @@ void Expr::show(const SymbolTable & symbols, std::ostream & str) const
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
{
str << n;
str << v.integer;
}
void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
{
str << nf;
str << v.fpoint;
}
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const

View file

@ -20,7 +20,6 @@ MakeError(Abort, EvalError);
MakeError(TypeError, EvalError);
MakeError(UndefinedVarError, Error);
MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
/**
* Position objects.
@ -155,6 +154,10 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
struct Expr
{
static unsigned long nrExprs;
Expr() {
nrExprs++;
}
virtual ~Expr() { };
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
@ -171,18 +174,16 @@ struct Expr
struct ExprInt : Expr
{
NixInt n;
Value v;
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
ExprInt(NixInt n) { v.mkInt(n); };
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
struct ExprFloat : Expr
{
NixFloat nf;
Value v;
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
ExprFloat(NixFloat nf) { v.mkFloat(nf); };
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
@ -198,9 +199,13 @@ struct ExprString : Expr
struct ExprPath : Expr
{
ref<InputAccessor> accessor;
std::string s;
Value v;
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
ExprPath(ref<InputAccessor> accessor, std::string s) : accessor(accessor), s(std::move(s))
{
v.mkPath(&*accessor, this->s.c_str());
}
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
@ -238,7 +243,7 @@ struct ExprSelect : Expr
PosIdx pos;
Expr * e, * def;
AttrPath attrPath;
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath && attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
PosIdx getPos() const override { return pos; }
COMMON_METHODS
@ -248,7 +253,7 @@ struct ExprOpHasAttr : Expr
{
Expr * e;
AttrPath attrPath;
ExprOpHasAttr(Expr * e, const AttrPath && attrPath) : e(e), attrPath(std::move(attrPath)) { };
ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { };
PosIdx getPos() const override { return e->getPos(); }
COMMON_METHODS
};

View file

@ -19,6 +19,7 @@
#include <variant>
#include "util.hh"
#include "users.hh"
#include "nixexpr.hh"
#include "eval.hh"
@ -520,7 +521,7 @@ path_start
/* add back in the trailing '/' to the first segment */
if ($1.p[$1.l-1] == '/' && $1.l > 1)
path += "/";
$$ = new ExprPath(path);
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
}
| HPATH {
if (evalSettings.pureEval) {
@ -530,7 +531,7 @@ path_start
);
}
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
}
;
@ -646,13 +647,16 @@ formal
#include "eval.hh"
#include "filetransfer.hh"
#include "fetchers.hh"
#include "tarball.hh"
#include "store-api.hh"
#include "flake/flake.hh"
#include "fs-input-accessor.hh"
#include "memory-input-accessor.hh"
namespace nix {
unsigned long Expr::nrExprs = 0;
Expr * EvalState::parse(
char * text,
@ -682,17 +686,25 @@ Expr * EvalState::parse(
}
SourcePath resolveExprPath(const SourcePath & path)
SourcePath resolveExprPath(SourcePath path)
{
unsigned int followCount = 0, maxFollow = 1024;
/* If `path' is a symlink, follow it. This is so that relative
path references work. */
auto path2 = path.resolveSymlinks();
while (true) {
// Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (path.lstat().type != InputAccessor::tSymlink) break;
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
}
/* If `path' refers to a directory, append `/default.nix'. */
if (path2.lstat().type == InputAccessor::tDirectory)
return path2 + "default.nix";
if (path.lstat().type == InputAccessor::tDirectory)
return path + "default.nix";
return path2;
return path;
}
@ -755,11 +767,11 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
auto r = *rOpt;
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
if (pathExists(res)) return CanonPath(canonPath(res));
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
}
if (hasPrefix(path, "nix/"))
return CanonPath(concatStrings(corepkgsPrefix, path.substr(4)));
return {corepkgsFS, CanonPath(path.substr(3))};
debugThrow(ThrownError({
.msg = hintfmt(evalSettings.pureEval
@ -782,7 +794,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
if (EvalSettings::isPseudoUrl(value)) {
try {
auto storePath = fetchers::downloadTarball(
store, EvalSettings::resolvePseudoUrl(value), "source", false).tree.storePath;
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
res = { store->toRealPath(storePath) };
} catch (FileTransferError & e) {
logWarning({
@ -796,7 +808,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", value);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
res = { store->toRealPath(storePath) };
}

View file

@ -1,10 +1,11 @@
#include "eval.hh"
#include "fs-input-accessor.hh"
namespace nix {
SourcePath EvalState::rootPath(CanonPath path)
{
return std::move(path);
return {rootFS, std::move(path)};
}
}

View file

@ -10,6 +10,7 @@
#include "path-references.hh"
#include "store-api.hh"
#include "util.hh"
#include "processes.hh"
#include "value-to-json.hh"
#include "value-to-xml.hh"
#include "primops.hh"
@ -28,7 +29,6 @@
#include <cmath>
namespace nix {
@ -129,13 +129,15 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, co
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
try {
StringMap rewrites = state.realiseContext(context);
auto realPath = state.rootPath(CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context)));
if (!context.empty()) {
auto rewrites = state.realiseContext(context);
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
return {path.accessor, CanonPath(realPath)};
}
return flags.checkForPureEval
? state.checkSourcePath(realPath)
: realPath;
? state.checkSourcePath(path)
: path;
} catch (Error & e) {
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
throw;
@ -210,7 +212,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
, CanonPath::root), **state.vImportedDrvToDerivation);
, state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation);
}
state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh");
@ -218,12 +220,6 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh");
}
else if (path2 == corepkgsPrefix + "fetchurl.nix") {
state.eval(state.parseExprFromString(
#include "fetchurl.nix.gen.hh"
, CanonPath::root), v);
}
else {
if (!vScope)
state.evalFile(path, v);
@ -266,64 +262,71 @@ static RegisterPrimOp primop_import({
.args = {"path"},
// TODO turn "normal path values" into link below
.doc = R"(
Load, parse and return the Nix expression in the file *path*.
The value *path* can be a path, a string, or an attribute set with an
`__toString` attribute or a `outPath` attribute (as derivations or flake
inputs typically have).
If *path* is a directory, the file `default.nix` in that directory
is loaded.
Evaluation aborts if the file doesnt exist or contains
an incorrect Nix expression. `import` implements Nixs module
system: you can put any Nix expression (such as a set or a
function) in a separate file, and use it from Nix expressions in
other files.
Load, parse, and return the Nix expression in the file *path*.
> **Note**
>
> Unlike some languages, `import` is a regular function in Nix.
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
> are normal [path values](@docroot@/language/values.md#type-path).
A Nix expression loaded by `import` must not contain any *free
variables* (identifiers that are not defined in the Nix expression
itself and are not built-in). Therefore, it cannot refer to
variables that are in scope at the call site. For instance, if you
have a calling expression
The *path* argument must meet the same criteria as an [interpolated expression](@docroot@/language/string-interpolation.md#interpolated-expression).
```nix
rec {
x = 123;
y = import ./foo.nix;
}
```
If *path* is a directory, the file `default.nix` in that directory is used if it exists.
then the following `foo.nix` will give an error:
> **Example**
>
> ```console
> $ echo 123 > default.nix
> ```
>
> Import `default.nix` from the current directory.
>
> ```nix
> import ./.
> ```
>
> 123
```nix
x + 456
```
Evaluation aborts if the file doesnt exist or contains an invalid Nix expression.
since `x` is not in scope in `foo.nix`. If you want `x` to be
available in `foo.nix`, you should pass it as a function argument:
A Nix expression loaded by `import` must not contain any *free variables*, that is, identifiers that are not defined in the Nix expression itself and are not built-in.
Therefore, it cannot refer to variables that are in scope at the call site.
```nix
rec {
x = 123;
y = import ./foo.nix x;
}
```
and
```nix
x: x + 456
```
(The function argument doesnt have to be called `x` in `foo.nix`;
any name would work.)
> **Example**
>
> If you have a calling expression
>
> ```nix
> rec {
> x = 123;
> y = import ./foo.nix;
> }
> ```
>
> then the following `foo.nix` will give an error:
>
> ```nix
> # foo.nix
> x + 456
> ```
>
> since `x` is not in scope in `foo.nix`.
> If you want `x` to be available in `foo.nix`, pass it as a function argument:
>
> ```nix
> rec {
> x = 123;
> y = import ./foo.nix x;
> }
> ```
>
> and
>
> ```nix
> # foo.nix
> x: x + 456
> ```
>
> The function argument doesnt have to be called `x` in `foo.nix`; any name would work.
)",
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
@ -598,9 +601,12 @@ struct CompareValues
case nFloat:
return v1->fpoint < v2->fpoint;
case nString:
return strcmp(v1->string.s, v2->string.s) < 0;
return v1->string_view().compare(v2->string_view()) < 0;
case nPath:
return strcmp(v1->_path, v2->_path) < 0;
// Note: we don't take the accessor into account
// since it's not obvious how to compare them in a
// reproducible way.
return strcmp(v1->_path.path, v2->_path.path) < 0;
case nList:
// Lexicographic comparison
for (size_t i = 0;; i++) {
@ -735,6 +741,14 @@ static RegisterPrimOp primop_genericClosure(PrimOp {
```
[ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
```
`key` can be one of the following types:
- [Number](@docroot@/language/values.md#type-number)
- [Boolean](@docroot@/language/values.md#type-boolean)
- [String](@docroot@/language/values.md#type-string)
- [Path](@docroot@/language/values.md#type-path)
- [List](@docroot@/language/values.md#list)
)",
.fun = prim_genericClosure,
});
@ -818,7 +832,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
auto message = state.coerceToString(pos, *args[0], context,
"while evaluating the error message passed to builtins.addErrorContext",
false, false).toOwned();
e.addTrace(nullptr, message, true);
e.addTrace(nullptr, hintfmt(message), true);
throw;
}
}
@ -990,7 +1004,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
{
state.forceValue(*args[0], pos);
if (args[0]->type() == nString)
printError("trace: %1%", args[0]->string.s);
printError("trace: %1%", args[0]->string_view());
else
printError("trace: %1%", printValue(state, *args[0]));
state.forceValue(*args[1], pos);
@ -1486,7 +1500,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
}));
NixStringContext context;
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")).path;
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
/* Resolve symlinks in path, unless path itself is a symlink
directly in the store. The latter condition is necessary so
e.g. nix-push does the right thing. */
@ -1536,14 +1550,14 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
/* SourcePath doesn't know about trailing slash. */
auto mustBeDir = arg.type() == nString && arg.str().ends_with("/");
auto mustBeDir = arg.type() == nString
&& (arg.string_view().ends_with("/")
|| arg.string_view().ends_with("/."));
try {
auto checked = state.checkSourcePath(path);
auto exists = checked.pathExists();
if (exists && mustBeDir) {
exists = checked.lstat().type == InputAccessor::tDirectory;
}
auto st = checked.maybeLstat();
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
v.mkBool(exists);
} catch (SysError & e) {
/* Don't give away info from errors while canonicalising
@ -1697,13 +1711,14 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
static RegisterPrimOp primop_findFile(PrimOp {
.name = "__findFile",
.args = {"search path", "lookup path"},
.args = {"search-path", "lookup-path"},
.doc = R"(
Look up the given path with the given search path.
Find *lookup-path* in *search-path*.
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`.
`prefix` is a relative path.
`path` denotes a file system location; the exact syntax depends on the command line interface.
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes:
- `prefix` is a relative path.
- `path` denotes a file system location
The exact syntax depends on the command line interface.
Examples of search path attribute sets:
@ -1721,15 +1736,14 @@ static RegisterPrimOp primop_findFile(PrimOp {
}
```
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match.
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match:
This is the process for each entry:
If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`.
Note that the `path` may need to be downloaded at this point to look inside.
If the suffix is found inside that directory, then the entry is a match;
the combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
- If *lookup-path* matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`.
Note that the `path` may need to be downloaded at this point to look inside.
- If the suffix is found inside that directory, then the entry is a match.
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
The syntax
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
```nix
<nixpkgs>
@ -1757,7 +1771,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
auto path = realisePath(state, pos, *args[1]);
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashFile({
@ -2202,7 +2216,7 @@ static void addPath(
path = evalSettings.pureEval && expectedHash
? path
: state.checkSourcePath(CanonPath(path)).path.abs();
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
PathFilter filter = filterFun ? ([&](const Path & path) {
auto st = lstat(path);
@ -2235,9 +2249,7 @@ static void addPath(
});
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
StorePath dstPath = settings.readOnlyMode
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs);
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
if (expectedHash && expectedStorePath != dstPath)
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v);
@ -2254,7 +2266,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
{
NixStringContext context;
auto path = state.coerceToPath(pos, *args[1], context,
"while evaluating the second argument (the path to filter) passed to builtins.filterSource");
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
}
@ -2370,7 +2382,7 @@ static RegisterPrimOp primop_path({
like `@`.
- filter\
A function of the type expected by `builtins.filterSource`,
A function of the type expected by [`builtins.filterSource`](#builtins-filterSource),
with the same semantics.
- recursive\
@ -2408,7 +2420,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
(v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
std::sort(v.listElems(), v.listElems() + n,
[](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; });
[](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; });
}
static RegisterPrimOp primop_attrNames({
@ -2545,11 +2557,12 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
/* Get the attribute names to be removed.
We keep them as Attrs instead of Symbols so std::set_difference
can be used to remove them from attrs[0]. */
// 64: large enough to fit the attributes of a derivation
boost::container::small_vector<Attr, 64> names;
names.reserve(args[1]->listSize());
for (auto elem : args[1]->listItems()) {
state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs");
names.emplace_back(state.symbols.create(elem->string.s), nullptr);
names.emplace_back(state.symbols.create(elem->string_view()), nullptr);
}
std::sort(names.begin(), names.end());
@ -2725,7 +2738,7 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
Value * res[args[1]->listSize()];
unsigned int found = 0;
size_t found = 0;
for (auto v2 : args[1]->listItems()) {
state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs");
@ -2999,7 +3012,7 @@ static RegisterPrimOp primop_tail({
.name = "__tail",
.args = {"list"},
.doc = R"(
Return the second to last elements of a list; abort evaluation if
Return the list without its first item; abort evaluation if
the argument isnt a list or is an empty list.
> **Warning**
@ -3061,7 +3074,7 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
// FIXME: putting this on the stack is risky.
Value * vs[args[1]->listSize()];
unsigned int k = 0;
size_t k = 0;
bool same = true;
for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
@ -3186,10 +3199,14 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar
state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all"));
state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all"));
std::string_view errorCtx = any
? "while evaluating the return value of the function passed to builtins.any"
: "while evaluating the return value of the function passed to builtins.all";
Value vTmp;
for (auto elem : args[1]->listItems()) {
state.callFunction(*args[0], *elem, vTmp, pos);
bool res = state.forceBool(vTmp, pos, std::string("while evaluating the return value of the function passed to builtins.") + (any ? "any" : "all"));
bool res = state.forceBool(vTmp, pos, errorCtx);
if (res == any) {
v.mkBool(any);
return;
@ -3451,7 +3468,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
for (unsigned int n = 0; n < nrLists; ++n) {
Value * vElem = args[1]->listElems()[n];
state.callFunction(*args[0], *vElem, lists[n], pos);
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap");
len += lists[n].listSize();
}
@ -3714,10 +3731,11 @@ static RegisterPrimOp primop_substring({
.doc = R"(
Return the substring of *s* from character position *start*
(zero-based) up to but not including *start + len*. If *start* is
greater than the length of the string, an empty string is returned,
and if *start + len* lies beyond the end of the string, only the
substring up to the end of the string is returned. *start* must be
non-negative. For example,
greater than the length of the string, an empty string is returned.
If *start + len* lies beyond the end of the string or *len* is `-1`,
only the substring up to the end of the string is returned.
*start* must be non-negative.
For example,
```nix
builtins.substring 0 3 "nixos"
@ -3759,7 +3777,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
v.mkString(hashString(*ht, s).to_string(Base16, false));
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashString({
@ -3773,6 +3791,101 @@ static RegisterPrimOp primop_hashString({
.fun = prim_hashString,
});
static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash");
auto &inputAttrs = args[0]->attrs;
Bindings::iterator iteratorHash = getAttr(state, state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'");
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
std::optional<HashType> ht = std::nullopt;
if (iteratorHashAlgo != inputAttrs->end()) {
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
}
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
}
static RegisterPrimOp primop_convertHash({
.name = "__convertHash",
.args = {"args"},
.doc = R"(
Return the specified representation of a hash string, based on the attributes presented in *args*:
- `hash`
The hash to be converted.
The hash format is detected automatically.
- `hashAlgo`
The algorithm used to create the hash. Must be one of
- `"md5"`
- `"sha1"`
- `"sha256"`
- `"sha512"`
The attribute may be omitted when `hash` is an [SRI hash](https://www.w3.org/TR/SRI/#the-integrity-attribute) or when the hash is prefixed with the hash algorithm name followed by a colon.
That `<hashAlgo>:<hashBody>` syntax is supported for backwards compatibility with existing tooling.
- `toHashFormat`
The format of the resulting hash. Must be one of
- `"base16"`
- `"base32"`
- `"base64"`
- `"sri"`
The result hash is the *toHashFormat* representation of the hash *hash*.
> **Example**
>
> Convert a SHA256 hash in Base16 to SRI:
>
> ```nix
> builtins.convertHash {
> hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
> toHashFormat = "sri";
> hashAlgo = "sha256";
> }
> ```
>
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="
> **Example**
>
> Convert a SHA256 hash in SRI to Base16:
>
> ```nix
> builtins.convertHash {
> hash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=";
> toHashFormat = "base16";
> }
> ```
>
> "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
> **Example**
>
> Convert a hash in the form `<hashAlgo>:<hashBody>` in Base16 to SRI:
>
> ```nix
> builtins.convertHash {
> hash = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
> toHashFormat = "sri";
> }
> ```
>
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="
)",
.fun = prim_convertHash,
});
struct RegexCache
{
// TODO use C++20 transparent comparison when available
@ -4395,9 +4508,9 @@ void EvalState::createBaseEnv()
addConstant("__nixPath", v, {
.type = nList,
.doc = R"(
The search path used to resolve angle bracket path lookups.
List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
Angle bracket expressions can be
Lookup path expressions can be
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
using this and
[`builtins.findFile`](./builtins.html#builtins-findFile):
@ -4441,12 +4554,7 @@ void EvalState::createBaseEnv()
/* Note: we have to initialize the 'derivation' constant *after*
building baseEnv/staticBaseEnv because it uses 'builtins'. */
char code[] =
#include "primops/derivation.nix.gen.hh"
// the parser needs two NUL bytes as terminators; one of them
// is implied by being a C string.
"\0";
eval(parse(code, sizeof(code), derivationInternal, {CanonPath::root}, staticBaseEnv), *vDerivation);
evalFile(derivationInternal, *vDerivation);
}

View file

@ -8,6 +8,22 @@
namespace nix {
/**
* For functions where we do not expect deep recursion, we can use a sizable
* part of the stack a free allocation space.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t nonRecursiveStackReservation = 128;
/**
* Functions that maybe applied to self-similar inputs, such as concatMap on a
* tree, should reserve a smaller part of the stack for allocation.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t conservativeStackReservation = 16;
struct RegisterPrimOp
{
typedef std::vector<PrimOp> PrimOps;

View file

@ -30,20 +30,27 @@ static RegisterPrimOp primop_hasContext({
.name = "__hasContext",
.args = {"s"},
.doc = R"(
Return `true` if string *s* has a non-empty context. The
context can be obtained with
Return `true` if string *s* has a non-empty context.
The context can be obtained with
[`getContext`](#builtins-getContext).
> **Example**
>
> Many operations require a string context to be empty because they are intended only to work with "regular" strings, and also to help users avoid unintentionally loosing track of string context elements.
> `builtins.hasContext` can help create better domain-specific errors in those case.
>
> ```nix
> name: meta:
>
> if builtins.hasContext name
> then throw "package name cannot contain string context"
> else { ${name} = meta; }
> ```
)",
.fun = prim_hasContext
});
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
builder without causing the derivation to be built (for instance,
in the derivation that builds NARs in nix-push, when doing
source-only deployment). This primop marks the string context so
that builtins.derivation adds the path to drv.inputSrcs rather than
drv.inputDrvs. */
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
NixStringContext context;
@ -66,11 +73,83 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
static RegisterPrimOp primop_unsafeDiscardOutputDependency({
.name = "__unsafeDiscardOutputDependency",
.arity = 1,
.args = {"s"},
.doc = R"(
Create a copy of the given string where every "derivation deep" string context element is turned into a constant string context element.
This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies).
This is unsafe because it allows us to "forget" store objects we would have otherwise refered to with the string context,
whereas Nix normally tracks all dependencies consistently.
Safe operations "grow" but never "shrink" string contexts.
[`builtins.addDrvOutputDependencies`] in contrast is safe because "derivation deep" string context element always refers to the underlying derivation (among many more things).
Replacing a constant string context element with a "derivation deep" element is a safe operation that just enlargens the string context without forgetting anything.
[`builtins.addDrvOutputDependencies`]: #builtins-addDrvOutputDependencies
)",
.fun = prim_unsafeDiscardOutputDependency
});
static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
NixStringContext context;
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies");
auto contextSize = context.size();
if (contextSize != 1) {
throw EvalError({
.msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize),
.errPos = state.positions[pos]
});
}
NixStringContext context2 {
(NixStringContextElem { std::visit(overloaded {
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
if (!c.path.isDerivation()) {
throw EvalError({
.msg = hintfmt("path '%s' is not a derivation",
state.store->printStorePath(c.path)),
.errPos = state.positions[pos],
});
}
return NixStringContextElem::DrvDeep {
.drvPath = c.path,
};
},
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
throw EvalError({
.msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output),
.errPos = state.positions[pos],
});
},
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
/* Reuse original item because we want this to be idempotent. */
return std::move(c);
},
}, context.begin()->raw) }),
};
v.mkString(*s, context2);
}
static RegisterPrimOp primop_addDrvOutputDependencies({
.name = "__addDrvOutputDependencies",
.args = {"s"},
.doc = R"(
Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element.
The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
The latter is supported so this function is idempotent.
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies).
)",
.fun = prim_addDrvOutputDependencies
});
/* Extract the context of a string as a structured Nix value.
The context is represented as an attribute set whose keys are the

View file

@ -133,7 +133,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
else if (attrName == "toPath") {
state.forceValue(*attr.value, attr.pos);
bool isEmptyString = attr.value->type() == nString && attr.value->string.s == std::string("");
bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == "";
if (isEmptyString) {
toPath = StorePathOrGap {};
}

View file

@ -71,10 +71,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name
auto [tree, input2] = input.fetch(state.store);
auto [storePath, input2] = input.fetch(state.store);
auto attrs2 = state.buildBindings(8);
state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath));
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));
if (input2.getRef())
attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to
@ -86,7 +86,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
attrs2.alloc("revCount").mkInt(*revCount);
v.mkAttrs(attrs2);
state.allowPath(tree.storePath);
state.allowPath(storePath);
}
static RegisterPrimOp r_fetchMercurial({

View file

@ -5,7 +5,9 @@
#include "fetchers.hh"
#include "filetransfer.hh"
#include "registry.hh"
#include "tarball.hh"
#include "url.hh"
#include "value-to-json.hh"
#include <ctime>
#include <iomanip>
@ -15,7 +17,7 @@ namespace nix {
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const StorePath & storePath,
const fetchers::Input & input,
Value & v,
bool emptyRevFallback,
@ -25,14 +27,13 @@ void emitTreeAttrs(
auto attrs = state.buildBindings(10);
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
// FIXME: support arbitrary input attributes.
auto narHash = input.getNarHash();
assert(narHash);
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
if (input.getType() == "git")
attrs.alloc("submodules").mkBool(
@ -71,36 +72,10 @@ void emitTreeAttrs(
v.mkAttrs(attrs);
}
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
if (uri.find("://") == std::string::npos) {
const auto p = ParsedURL {
.scheme = defaultScheme,
.authority = "",
.path = uri
};
return p.to_string();
} else {
return uri;
}
}
std::string fixURIForGit(std::string uri, EvalState & state)
{
/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
* them by removing the `:` and assuming a scheme of `ssh://`
* */
static std::regex scp_uri("([^/]*)@(.*):(.*)");
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
else
return fixURI(uri, state);
}
struct FetchTreeParams {
bool emptyRevFallback = false;
bool allowNameArgument = false;
bool isFetchGit = false;
};
static void fetchTree(
@ -108,11 +83,12 @@ static void fetchTree(
const PosIdx pos,
Value * * args,
Value & v,
std::optional<std::string> type,
const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
NixStringContext context;
std::optional<std::string> type;
if (params.isFetchGit) type = "git";
state.forceValue(*args[0], pos);
@ -142,16 +118,18 @@ static void fetchTree(
if (attr.value->type() == nPath || attr.value->type() == nString) {
auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned();
attrs.emplace(state.symbols[attr.name],
state.symbols[attr.name] == "url"
? type == "git"
? fixURIForGit(s, state)
: fixURI(s, state)
params.isFetchGit && state.symbols[attr.name] == "url"
? fixGitURL(s)
: s);
}
else if (attr.value->type() == nBool)
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
else if (attr.value->type() == nInt)
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
else if (state.symbols[attr.name] == "publicKeys") {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
}
else
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
state.symbols[attr.name], showType(*attr.value)));
@ -170,40 +148,87 @@ static void fetchTree(
"while evaluating the first argument passed to the fetcher",
false, false).toOwned();
if (type == "git") {
if (params.isFetchGit) {
fetchers::Attrs attrs;
attrs.emplace("type", "git");
attrs.emplace("url", fixURIForGit(url, state));
attrs.emplace("url", fixGitURL(url));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
input = fetchers::Input::fromURL(fixURI(url, state));
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"),
.errPos = state.positions[pos]
}));
input = fetchers::Input::fromURL(url);
}
}
if (!evalSettings.pureEval && !input.isDirect())
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
input = lookupInRegistries(state.store, input).first;
if (evalSettings.pureEval && !input.isLocked())
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
auto [tree, input2] = input.fetch(state.store);
state.checkURI(input.toURLString());
state.allowPath(tree.storePath);
auto [storePath, input2] = input.fetch(state.store);
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
state.allowPath(storePath);
emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false);
}
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
experimentalFeatureSettings.require(Xp::Flakes);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
fetchTree(state, pos, args, v, { });
}
// FIXME: document
static RegisterPrimOp primop_fetchTree({
.name = "fetchTree",
.arity = 1,
.fun = prim_fetchTree
.args = {"input"},
.doc = R"(
Fetch a source tree or a plain file using one of the supported backends.
*input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax.
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled.
> **Note**
>
> The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
Here are some examples of how to use `fetchTree`:
- Fetch a GitHub repository using the attribute set representation:
```nix
builtins.fetchTree {
type = "github";
owner = "NixOS";
repo = "nixpkgs";
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
}
```
This evaluates to the following attribute set:
```
{
lastModified = 1686503798;
lastModifiedDate = "20230611171638";
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
shortRev = "ae2e6b3";
}
```
- Fetch the same GitHub repository using the URL-like syntax:
```
builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
```
)",
.fun = prim_fetchTree,
.experimentalFeature = Xp::FetchTree,
});
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
@ -270,7 +295,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
// https://github.com/NixOS/nix/issues/4313
auto storePath =
unpack
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).tree.storePath
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
if (expectedHash) {
@ -279,7 +304,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
: hashFile(htSHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)));
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
}
state.allowAndSetStorePathString(storePath, v);
@ -353,7 +378,12 @@ static RegisterPrimOp primop_fetchTarball({
static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
fetchTree(state, pos, args, v,
FetchTreeParams {
.emptyRevFallback = true,
.allowNameArgument = true,
.isFetchGit = true
});
}
static RegisterPrimOp primop_fetchGit({
@ -368,7 +398,7 @@ static RegisterPrimOp primop_fetchGit({
The URL of the repo.
- `name` (default: *basename of the URL*)
- `name` (default: `source`)
The name of the directory the repo should be exported to in the store.
@ -395,7 +425,8 @@ static RegisterPrimOp primop_fetchGit({
- `shallow` (default: `false`)
A Boolean parameter that specifies whether fetching a shallow clone is allowed.
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
This still performs a full clone of what is available on the remote.
- `allRefs`
@ -403,6 +434,42 @@ static RegisterPrimOp primop_fetchGit({
With this argument being true, it's possible to load a `rev` from *any* `ref`
(by default only `rev`s from the specified `ref` are supported).
- `verifyCommit` (default: `true` if `publicKey` or `publicKeys` are provided, otherwise `false`)
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKey`
The public key against which `rev` is verified if `verifyCommit` is enabled.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `keytype` (default: `"ssh-ed25519"`)
The key type of `publicKey`.
Possible values:
- `"ssh-dsa"`
- `"ssh-ecdsa"`
- `"ssh-ecdsa-sk"`
- `"ssh-ed25519"`
- `"ssh-ed25519-sk"`
- `"ssh-rsa"`
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKeys`
The public keys against which `rev` is verified if `verifyCommit` is enabled.
Must be given as a list of attribute sets with the following form:
```nix
{
key = "<public key>";
type = "<key type>"; # optional, default: "ssh-ed25519"
}
```
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
Here are some examples of how to use `fetchGit`.
- To fetch a private repository over SSH:
@ -477,6 +544,21 @@ static RegisterPrimOp primop_fetchGit({
}
```
- To verify the commit signature:
```nix
builtins.fetchGit {
url = "ssh://git@github.com/nixos/nix.git";
verifyCommit = true;
publicKeys = [
{
type = "ssh-ed25519";
key = "AAAAC3NzaC1lZDI1NTE5AAAAIArPKULJOid8eS6XETwUjO48/HKBWl7FTCK0Z//fplDi";
}
];
}
```
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).

View file

@ -1,5 +1,4 @@
#include "search-path.hh"
#include "util.hh"
namespace nix {

View file

@ -1,68 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/derived-path.hh"
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class DerivedPathExpressionTest : public LibExprTest {};
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
// no a real fixture.
//
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
TEST_F(DerivedPathExpressionTest, force_init)
{
}
#ifndef COVERAGE
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_opaque_path_round_trip,
(const SingleDerivedPath::Opaque & o))
{
auto * v = state.allocValue();
state.mkStorePathString(o.path, *v);
auto d = state.coerceToSingleDerivedPath(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { o } == d);
}
// TODO use DerivedPath::Built for parameter once it supports a single output
// path only.
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_derived_path_built_placeholder_round_trip,
(const SingleDerivedPath::Built & b))
{
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
mockXpSettings.set("experimental-features", "ca-derivations");
auto * v = state.allocValue();
state.mkOutputString(*v, b, std::nullopt, mockXpSettings);
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { b } == d);
}
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_derived_path_built_out_path_round_trip,
(const SingleDerivedPath::Built & b, const StorePath & outPath))
{
auto * v = state.allocValue();
state.mkOutputString(*v, b, outPath);
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { b } == d);
}
#endif
} /* namespace nix */

File diff suppressed because it is too large Load diff

View file

@ -1,22 +0,0 @@
#include <gtest/gtest.h>
#include "flake/flakeref.hh"
namespace nix {
/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/
/* ----------------------------------------------------------------------------
* to_string
* --------------------------------------------------------------------------*/
TEST(to_string, doesntReencodeUrl) {
auto s = "http://localhost:8181/test/+3d.tar.gz";
auto flakeref = parseFlakeRef(s);
auto parsed = flakeref.to_string();
auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
ASSERT_EQ(parsed, expected);
}
}

View file

@ -1,68 +0,0 @@
#include "tests/libexpr.hh"
#include "value-to-json.hh"
namespace nix {
// Testing the conversion to JSON
class JSONValueTest : public LibExprTest {
protected:
std::string getJSONValue(Value& value) {
std::stringstream ss;
NixStringContext ps;
printValueAsJSON(state, true, value, noPos, ss, ps);
return ss.str();
}
};
TEST_F(JSONValueTest, null) {
Value v;
v.mkNull();
ASSERT_EQ(getJSONValue(v), "null");
}
TEST_F(JSONValueTest, BoolFalse) {
Value v;
v.mkBool(false);
ASSERT_EQ(getJSONValue(v),"false");
}
TEST_F(JSONValueTest, BoolTrue) {
Value v;
v.mkBool(true);
ASSERT_EQ(getJSONValue(v), "true");
}
TEST_F(JSONValueTest, IntPositive) {
Value v;
v.mkInt(100);
ASSERT_EQ(getJSONValue(v), "100");
}
TEST_F(JSONValueTest, IntNegative) {
Value v;
v.mkInt(-100);
ASSERT_EQ(getJSONValue(v), "-100");
}
TEST_F(JSONValueTest, String) {
Value v;
v.mkString("test");
ASSERT_EQ(getJSONValue(v), "\"test\"");
}
TEST_F(JSONValueTest, StringQuotes) {
Value v;
v.mkString("test\"");
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
}
// The dummy store doesn't support writing files. Fails with this exception message:
// C++ exception with description "error: operation 'addToStoreFromDump' is
// not supported by store 'dummy'" thrown in the test body.
TEST_F(JSONValueTest, DISABLED_Path) {
Value v;
v.mkPath("test");
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
}
} /* namespace nix */

View file

@ -1,140 +0,0 @@
#pragma once
///@file
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "value.hh"
#include "nixexpr.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "tests/libstore.hh"
namespace nix {
class LibExprTest : public LibStoreTest {
public:
static void SetUpTestSuite() {
LibStoreTest::SetUpTestSuite();
initGC();
}
protected:
LibExprTest()
: LibStoreTest()
, state({}, store)
{
}
Value eval(std::string input, bool forceValue = true) {
Value v;
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
assert(e);
state.eval(e, v);
if (forceValue)
state.forceValue(v, noPos);
return v;
}
Symbol createSymbol(const char * value) {
return state.symbols.create(value);
}
EvalState state;
};
MATCHER(IsListType, "") {
return arg != nList;
}
MATCHER(IsList, "") {
return arg.type() == nList;
}
MATCHER(IsString, "") {
return arg.type() == nString;
}
MATCHER(IsNull, "") {
return arg.type() == nNull;
}
MATCHER(IsThunk, "") {
return arg.type() == nThunk;
}
MATCHER(IsAttrs, "") {
return arg.type() == nAttrs;
}
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
if (arg.type() != nString) {
return false;
}
return std::string_view(arg.string.s) == s;
}
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
if (arg.type() != nInt) {
return false;
}
return arg.integer == v;
}
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
if (arg.type() != nFloat) {
return false;
}
return arg.fpoint == v;
}
MATCHER(IsTrue, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == true;
}
MATCHER(IsFalse, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == false;
}
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
if (arg.type() != nPath) {
*result_listener << "Expected a path got " << arg.type();
return false;
} else if (std::string_view(arg.string.s) != p) {
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
return false;
}
return true;
}
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
if (arg.type() != nList) {
*result_listener << "Expected list got " << arg.type();
return false;
} else if (arg.listSize() != (size_t)n) {
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
return false;
}
return true;
}
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
if (arg.type() != nAttrs) {
*result_listener << "Expected set got " << arg.type();
return false;
} else if (arg.attrs->size() != (size_t)n) {
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
return false;
}
return true;
}
} /* namespace nix */

View file

@ -1,19 +0,0 @@
check: libexpr-tests_RUN
programs += libexpr-tests
libexpr-tests_NAME := libnixexpr-tests
libexpr-tests_DIR := $(d)
libexpr-tests_INSTALL_DIR :=
libexpr-tests_SOURCES := \
$(wildcard $(d)/*.cc) \
$(wildcard $(d)/value/*.cc)
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock

View file

@ -1,832 +0,0 @@
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "tests/libexpr.hh"
namespace nix {
class CaptureLogger : public Logger
{
std::ostringstream oss;
public:
CaptureLogger() {}
std::string get() const {
return oss.str();
}
void log(Verbosity lvl, std::string_view s) override {
oss << s << std::endl;
}
void logEI(const ErrorInfo & ei) override {
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
}
};
class CaptureLogging {
Logger * oldLogger;
std::unique_ptr<CaptureLogger> tempLogger;
public:
CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
oldLogger = logger;
logger = tempLogger.get();
}
~CaptureLogging() {
logger = oldLogger;
}
std::string get() const {
return tempLogger->get();
}
};
// Testing eval of PrimOp's
class PrimOpTest : public LibExprTest {};
TEST_F(PrimOpTest, throw) {
ASSERT_THROW(eval("throw \"foo\""), ThrownError);
}
TEST_F(PrimOpTest, abort) {
ASSERT_THROW(eval("abort \"abort\""), Abort);
}
TEST_F(PrimOpTest, ceil) {
auto v = eval("builtins.ceil 1.9");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(PrimOpTest, floor) {
auto v = eval("builtins.floor 1.9");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, tryEvalFailure) {
auto v = eval("builtins.tryEval (throw \"\")");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsFalse());
}
TEST_F(PrimOpTest, tryEvalSuccess) {
auto v = eval("builtins.tryEval 123");
ASSERT_THAT(v, IsAttrs());
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsTrue());
s = createSymbol("value");
p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsIntEq(123));
}
TEST_F(PrimOpTest, getEnv) {
setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
ASSERT_THAT(v, IsStringEq("test value"));
}
TEST_F(PrimOpTest, seq) {
ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
}
TEST_F(PrimOpTest, seqNotDeep) {
auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
ASSERT_THAT(v, IsAttrs());
}
TEST_F(PrimOpTest, deepSeq) {
ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
}
TEST_F(PrimOpTest, trace) {
CaptureLogging l;
auto v = eval("builtins.trace \"test string 123\" 123");
ASSERT_THAT(v, IsIntEq(123));
auto text = l.get();
ASSERT_NE(text.find("test string 123"), std::string::npos);
}
TEST_F(PrimOpTest, placeholder) {
auto v = eval("builtins.placeholder \"out\"");
ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
}
TEST_F(PrimOpTest, baseNameOf) {
auto v = eval("builtins.baseNameOf /some/path");
ASSERT_THAT(v, IsStringEq("path"));
}
TEST_F(PrimOpTest, dirOf) {
auto v = eval("builtins.dirOf /some/path");
ASSERT_THAT(v, IsPathEq("/some"));
}
TEST_F(PrimOpTest, attrValues) {
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttr) {
auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
ASSERT_THAT(v, IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttrNotFound) {
// FIXME: TypeError is really bad here, also the error wording is worse
// than on Nix <=2.3
ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
}
TEST_F(PrimOpTest, unsafeGetAttrPos) {
// The `y` attribute is at position
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
auto v = eval(expr);
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, hasAttr) {
auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, hasAttrNotFound) {
auto v = eval("builtins.hasAttr \"x\" { }");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, isAttrs) {
auto v = eval("builtins.isAttrs {}");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isAttrsFalse) {
auto v = eval("builtins.isAttrs null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, removeAttrs) {
auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(PrimOpTest, removeAttrsRetains) {
auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(1));
ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
}
TEST_F(PrimOpTest, listToAttrsEmptyList) {
auto v = eval("builtins.listToAttrs []");
ASSERT_THAT(v, IsAttrsOfSize(0));
ASSERT_EQ(v.type(), nAttrs);
ASSERT_EQ(v.attrs->size(), 0);
}
TEST_F(PrimOpTest, listToAttrsNotFieldName) {
ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
}
TEST_F(PrimOpTest, listToAttrs) {
auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto key = v.attrs->find(createSymbol("key"));
ASSERT_NE(key, nullptr);
ASSERT_THAT(*key->value, IsIntEq(123));
}
TEST_F(PrimOpTest, intersectAttrs) {
auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(3));
}
TEST_F(PrimOpTest, catAttrs) {
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
}
TEST_F(PrimOpTest, functionArgs) {
auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto x = v.attrs->find(createSymbol("x"));
ASSERT_NE(x, nullptr);
ASSERT_THAT(*x->value, IsFalse());
auto y = v.attrs->find(createSymbol("y"));
ASSERT_NE(y, nullptr);
ASSERT_THAT(*y->value, IsTrue());
}
TEST_F(PrimOpTest, mapAttrs) {
auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsIntEq(10));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsThunk());
state.forceValue(*b->value, noPos);
ASSERT_THAT(*b->value, IsIntEq(20));
}
TEST_F(PrimOpTest, isList) {
auto v = eval("builtins.isList []");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isListFalse) {
auto v = eval("builtins.isList null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, elemtAt) {
auto v = eval("builtins.elemAt [0 1 2 3] 3");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
}
TEST_F(PrimOpTest, head) {
auto v = eval("builtins.head [ 3 2 1 0 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, headEmpty) {
ASSERT_THROW(eval("builtins.head [ ]"), Error);
}
TEST_F(PrimOpTest, headWrongType) {
ASSERT_THROW(eval("builtins.head { }"), Error);
}
TEST_F(PrimOpTest, tail) {
auto v = eval("builtins.tail [ 3 2 1 0 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
}
TEST_F(PrimOpTest, tailEmpty) {
ASSERT_THROW(eval("builtins.tail []"), Error);
}
TEST_F(PrimOpTest, map) {
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
ASSERT_THAT(v, IsListOfSize(3));
auto elem = v.listElems()[0];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobar"));
elem = v.listElems()[1];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobla"));
elem = v.listElems()[2];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("fooabc"));
}
TEST_F(PrimOpTest, filter) {
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto elem : v.listItems())
ASSERT_THAT(*elem, IsIntEq(2));
}
TEST_F(PrimOpTest, elemTrue) {
auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, elemFalse) {
auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, concatLists) {
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
ASSERT_THAT(v, IsListOfSize(4));
for (const auto [i, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
TEST_F(PrimOpTest, length) {
auto v = eval("builtins.length [ 1 2 3 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, foldStrict) {
auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
ASSERT_THAT(v, IsIntEq(6));
}
TEST_F(PrimOpTest, anyTrue) {
auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, anyFalse) {
auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, allTrue) {
auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, allFalse) {
auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, genList) {
auto v = eval("builtins.genList (x: x + 1) 3");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 3);
for (const auto [i, elem] : enumerate(v.listItems())) {
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
}
TEST_F(PrimOpTest, sortLessThan) {
auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, partition) {
auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto right = v.attrs->get(createSymbol("right"));
ASSERT_NE(right, nullptr);
ASSERT_THAT(*right->value, IsListOfSize(2));
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
auto wrong = v.attrs->get(createSymbol("wrong"));
ASSERT_NE(wrong, nullptr);
ASSERT_EQ(wrong->value->type(), nList);
ASSERT_EQ(wrong->value->listSize(), 3);
ASSERT_THAT(*wrong->value, IsListOfSize(3));
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
}
TEST_F(PrimOpTest, concatMap) {
auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, addInt) {
auto v = eval("builtins.add 3 5");
ASSERT_THAT(v, IsIntEq(8));
}
TEST_F(PrimOpTest, addFloat) {
auto v = eval("builtins.add 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, addFloatToInt) {
auto v = eval("builtins.add 3.0 5");
ASSERT_THAT(v, IsFloatEq(8.0));
v = eval("builtins.add 3 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, subInt) {
auto v = eval("builtins.sub 5 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, subFloat) {
auto v = eval("builtins.sub 5.0 2.0");
ASSERT_THAT(v, IsFloatEq(3.0));
}
TEST_F(PrimOpTest, subFloatFromInt) {
auto v = eval("builtins.sub 5.0 2");
ASSERT_THAT(v, IsFloatEq(3.0));
v = eval("builtins.sub 4 2.0");
ASSERT_THAT(v, IsFloatEq(2.0));
}
TEST_F(PrimOpTest, mulInt) {
auto v = eval("builtins.mul 3 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(PrimOpTest, mulFloat) {
auto v = eval("builtins.mul 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
}
TEST_F(PrimOpTest, mulFloatMixed) {
auto v = eval("builtins.mul 3 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
v = eval("builtins.mul 2.0 5");
ASSERT_THAT(v, IsFloatEq(10.0));
}
TEST_F(PrimOpTest, divInt) {
auto v = eval("builtins.div 5 (-1)");
ASSERT_THAT(v, IsIntEq(-5));
}
TEST_F(PrimOpTest, divIntZero) {
ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
}
TEST_F(PrimOpTest, divFloat) {
auto v = eval("builtins.div 5.0 (-1)");
ASSERT_THAT(v, IsFloatEq(-5.0));
}
TEST_F(PrimOpTest, divFloatZero) {
ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
}
TEST_F(PrimOpTest, bitOr) {
auto v = eval("builtins.bitOr 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, bitXor) {
auto v = eval("builtins.bitXor 3 2");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, lessThanFalse) {
auto v = eval("builtins.lessThan 3 1");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, lessThanTrue) {
auto v = eval("builtins.lessThan 1 3");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, toStringAttrsThrows) {
ASSERT_THROW(eval("builtins.toString {}"), EvalError);
}
TEST_F(PrimOpTest, toStringLambdaThrows) {
ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
}
class ToStringPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
{};
TEST_P(ToStringPrimOpTest, toString) {
const auto [input, output] = GetParam();
auto v = eval(input);
ASSERT_THAT(v, IsStringEq(output));
}
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P(
toString,
ToStringPrimOpTest,
testing::Values(
CASE(R"("foo")", "foo"),
CASE(R"(1)", "1"),
CASE(R"([1 2 3])", "1 2 3"),
CASE(R"(.123)", "0.123000"),
CASE(R"(true)", "1"),
CASE(R"(false)", ""),
CASE(R"(null)", ""),
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
CASE(R"({ outPath = "foo"; })", "foo"),
CASE(R"(./test)", "/test")
)
);
#undef CASE
TEST_F(PrimOpTest, substring){
auto v = eval("builtins.substring 0 3 \"nixos\"");
ASSERT_THAT(v, IsStringEq("nix"));
}
TEST_F(PrimOpTest, substringSmallerString){
auto v = eval("builtins.substring 0 3 \"n\"");
ASSERT_THAT(v, IsStringEq("n"));
}
TEST_F(PrimOpTest, substringEmptyString){
auto v = eval("builtins.substring 1 3 \"\"");
ASSERT_THAT(v, IsStringEq(""));
}
TEST_F(PrimOpTest, stringLength) {
auto v = eval("builtins.stringLength \"123\"");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, hashStringMd5) {
auto v = eval("builtins.hashString \"md5\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
}
TEST_F(PrimOpTest, hashStringSha1) {
auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
}
TEST_F(PrimOpTest, hashStringSha256) {
auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
}
TEST_F(PrimOpTest, hashStringSha512) {
auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
}
TEST_F(PrimOpTest, hashStringInvalidHashType) {
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
}
TEST_F(PrimOpTest, nixPath) {
auto v = eval("builtins.nixPath");
ASSERT_EQ(v.type(), nList);
// We can't test much more as currently the EvalSettings are a global
// that we can't easily swap / replace
}
TEST_F(PrimOpTest, langVersion) {
auto v = eval("builtins.langVersion");
ASSERT_EQ(v.type(), nInt);
}
TEST_F(PrimOpTest, storeDir) {
auto v = eval("builtins.storeDir");
ASSERT_THAT(v, IsStringEq(settings.nixStore));
}
TEST_F(PrimOpTest, nixVersion) {
auto v = eval("builtins.nixVersion");
ASSERT_THAT(v, IsStringEq(nixVersion));
}
TEST_F(PrimOpTest, currentSystem) {
auto v = eval("builtins.currentSystem");
ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
}
TEST_F(PrimOpTest, derivation) {
auto v = eval("derivation");
ASSERT_EQ(v.type(), nFunction);
ASSERT_TRUE(v.isLambda());
ASSERT_NE(v.lambda.fun, nullptr);
ASSERT_TRUE(v.lambda.fun->hasFormals());
}
TEST_F(PrimOpTest, currentTime) {
auto v = eval("builtins.currentTime");
ASSERT_EQ(v.type(), nInt);
ASSERT_TRUE(v.integer > 0);
}
TEST_F(PrimOpTest, splitVersion) {
auto v = eval("builtins.splitVersion \"1.2.3git\"");
ASSERT_THAT(v, IsListOfSize(4));
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
for (const auto [n, p] : enumerate(v.listItems()))
ASSERT_THAT(*p, IsStringEq(strings[n]));
}
class CompareVersionsPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, const int>>
{};
TEST_P(CompareVersionsPrimOpTest, compareVersions) {
auto [expression, expectation] = GetParam();
auto v = eval(expression);
ASSERT_THAT(v, IsIntEq(expectation));
}
#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
INSTANTIATE_TEST_SUITE_P(
compareVersions,
CompareVersionsPrimOpTest,
testing::Values(
// The first two are weird cases. Intuition tells they should
// be the same but they aren't.
CASE(1.0, 1.0.0, -1),
CASE(1.0.0, 1.0, 1),
// the following are from the nix-env manual:
CASE(1.0, 2.3, -1),
CASE(2.1, 2.3, -1),
CASE(2.3, 2.3, 0),
CASE(2.5, 2.3, 1),
CASE(3.1, 2.3, 1),
CASE(2.3.1, 2.3, 1),
CASE(2.3.1, 2.3a, 1),
CASE(2.3pre1, 2.3, -1),
CASE(2.3pre3, 2.3pre12, -1),
CASE(2.3a, 2.3c, -1),
CASE(2.3pre1, 2.3c, -1),
CASE(2.3pre1, 2.3q, -1)
)
);
#undef CASE
class ParseDrvNamePrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
{};
TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
auto [input, expectedName, expectedVersion] = GetParam();
const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(2));
auto name = v.attrs->find(createSymbol("name"));
ASSERT_TRUE(name);
ASSERT_THAT(*name->value, IsStringEq(expectedName));
auto version = v.attrs->find(createSymbol("version"));
ASSERT_TRUE(version);
ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
}
INSTANTIATE_TEST_SUITE_P(
parseDrvName,
ParseDrvNamePrimOpTest,
testing::Values(
std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
)
);
TEST_F(PrimOpTest, replaceStrings) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(v.string.s, std::string_view("fabir"));
}
TEST_F(PrimOpTest, concatStringsSep) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz");
}
TEST_F(PrimOpTest, split1) {
// v = [ "" [ "a" ] "c" ]
auto v = eval("builtins.split \"(a)b\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(3));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
}
TEST_F(PrimOpTest, split2) {
// v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
auto v = eval("builtins.split \"([ac])\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split3) {
auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
// First list element
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
// 2nd list element is a list [ "" null ]
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
// 3rd element
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
// 4th element is a list: [ null "c" ]
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
// 5th element is the empty string
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split4) {
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(3));
auto first = v.listElems()[0];
auto second = v.listElems()[1];
auto third = v.listElems()[2];
ASSERT_THAT(*first, IsStringEq(" "));
ASSERT_THAT(*second, IsListOfSize(1));
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
ASSERT_THAT(*third, IsStringEq(" "));
}
TEST_F(PrimOpTest, match1) {
auto v = eval("builtins.match \"ab\" \"abc\"");
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, match2) {
auto v = eval("builtins.match \"abc\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(PrimOpTest, match3) {
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
}
TEST_F(PrimOpTest, match4) {
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(1));
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
}
TEST_F(PrimOpTest, attrNames) {
auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
ASSERT_THAT(v, IsListOfSize(4));
// ensure that the list is sorted
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsStringEq(expected[n]));
}
TEST_F(PrimOpTest, genericClosure_not_strict) {
// Operator should not be used when startSet is empty
auto v = eval("builtins.genericClosure { startSet = []; }");
ASSERT_THAT(v, IsListOfSize(0));
}
} /* namespace nix */

View file

@ -1,90 +0,0 @@
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "search-path.hh"
namespace nix {
TEST(SearchPathElem, parse_justPath) {
ASSERT_EQ(
SearchPath::Elem::parse("foo"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "" },
.path = SearchPath::Path { .s = "foo" },
}));
}
TEST(SearchPathElem, parse_emptyPrefix) {
ASSERT_EQ(
SearchPath::Elem::parse("=foo"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "" },
.path = SearchPath::Path { .s = "foo" },
}));
}
TEST(SearchPathElem, parse_oneEq) {
ASSERT_EQ(
SearchPath::Elem::parse("foo=bar"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "foo" },
.path = SearchPath::Path { .s = "bar" },
}));
}
TEST(SearchPathElem, parse_twoEqs) {
ASSERT_EQ(
SearchPath::Elem::parse("foo=bar=baz"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "foo" },
.path = SearchPath::Path { .s = "bar=baz" },
}));
}
TEST(SearchPathElem, suffixIfPotentialMatch_justPath) {
SearchPath::Prefix prefix { .s = "" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix1) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix2) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_partialPrefix) {
SearchPath::Prefix prefix { .s = "fooX" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_exactPrefix) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_multiKey) {
SearchPath::Prefix prefix { .s = "foo/bar" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingSlash) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingDoubleSlash) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingPath) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" });
}
}

View file

@ -1,196 +0,0 @@
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class TrivialExpressionTest : public LibExprTest {};
TEST_F(TrivialExpressionTest, true) {
auto v = eval("true");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, false) {
auto v = eval("false");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, null) {
auto v = eval("null");
ASSERT_THAT(v, IsNull());
}
TEST_F(TrivialExpressionTest, 1) {
auto v = eval("1");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, 1plus1) {
auto v = eval("1+1");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, minus1) {
auto v = eval("-1");
ASSERT_THAT(v, IsIntEq(-1));
}
TEST_F(TrivialExpressionTest, 1minus1) {
auto v = eval("1-1");
ASSERT_THAT(v, IsIntEq(0));
}
TEST_F(TrivialExpressionTest, lambdaAdd) {
auto v = eval("let add = a: b: a + b; in add 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(TrivialExpressionTest, list) {
auto v = eval("[]");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(TrivialExpressionTest, attrs) {
auto v = eval("{}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, float) {
auto v = eval("1.234");
ASSERT_THAT(v, IsFloatEq(1.234));
}
TEST_F(TrivialExpressionTest, updateAttrs) {
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsIntEq(3));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
auto v = eval("{} ? a");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
auto v = eval("{ a = 123; } ? a");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, withFound) {
auto v = eval("with { a = 23; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, withNotFound) {
ASSERT_THROW(eval("with {}; a"), Error);
}
TEST_F(TrivialExpressionTest, withOverride) {
auto v = eval("with { a = 23; }; with { a = 42; }; a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, letOverWith) {
auto v = eval("let a = 23; in with { a = 1; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, multipleLet) {
auto v = eval("let a = 23; in let a = 42; in a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
auto v = eval("({ a ? 123 }: a) {}");
ASSERT_THAT(v, IsIntEq(123));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
auto v = eval("({ a ? 123 }: a) { a = 5; }");
ASSERT_THAT(v, IsIntEq(5));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
auto v = eval("({ a ? 123 }@args: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
auto v = eval("(args@{ a ? 123 }: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, assertThrows) {
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
}
TEST_F(TrivialExpressionTest, assertPassed) {
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
ASSERT_THAT(v, IsIntEq(123));
}
class AttrSetMergeTrvialExpressionTest :
public TrivialExpressionTest,
public testing::WithParamInterface<const char*>
{};
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
// Usually Nix rejects duplicate keys in an attrset but it does allow
// so if it is an attribute set that contains disjoint sets of keys.
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
// The attribute set `a` will be a Thunk at first as the attribuets
// have to be merged (or otherwise computed) and that is done in a lazy
// manner.
auto expr = GetParam();
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(1));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
auto b = a->value->attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
auto c = a->value->attrs->find(createSymbol("c"));
ASSERT_NE(c, nullptr);
ASSERT_THAT(*c->value, IsIntEq(2));
}
INSTANTIATE_TEST_SUITE_P(
attrsetMergeLazy,
AttrSetMergeTrvialExpressionTest,
testing::Values(
"{ a.b = 1; a.c = 2; }",
"{ a = { b = 1; }; a = { c = 2; }; }"
)
);
TEST_F(TrivialExpressionTest, functor) {
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(TrivialExpressionTest, bindOr) {
auto v = eval("{ or = 1; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("or"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, orCantBeUsed) {
ASSERT_THROW(eval("let or = 1; in or"), Error);
}
} /* namespace nix */

View file

@ -1,162 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/path.hh"
#include "tests/libexpr.hh"
#include "tests/value/context.hh"
namespace nix {
// Test a few cases of invalid string context elements.
TEST(NixStringContextElemTest, empty_invalid) {
EXPECT_THROW(
NixStringContextElem::parse(""),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, single_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!"),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, double_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!!/"),
BadStorePath);
}
TEST(NixStringContextElemTest, eq_slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("=/"),
BadStorePath);
}
TEST(NixStringContextElemTest, slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("/"),
BadStorePath);
}
/**
* Round trip (string <-> data structure) test for
* `NixStringContextElem::Opaque`.
*/
TEST(NixStringContextElemTest, opaque) {
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
auto elem = NixStringContextElem::parse(opaque);
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->path, StorePath { opaque });
ASSERT_EQ(elem.to_string(), opaque);
}
/**
* Round trip (string <-> data structure) test for
* `NixStringContextElem::DrvDeep`.
*/
TEST(NixStringContextElemTest, drvDeep) {
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(drvDeep);
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
ASSERT_EQ(elem.to_string(), drvDeep);
}
/**
* Round trip (string <-> data structure) test for a simpler
* `NixStringContextElem::Built`.
*/
TEST(NixStringContextElemTest, built_opaque) {
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(built);
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->output, "foo");
ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
.path = StorePath { built.substr(5) },
}));
ASSERT_EQ(elem.to_string(), built);
}
/**
* Round trip (string <-> data structure) test for a more complex,
* inductive `NixStringContextElem::Built`.
*/
TEST(NixStringContextElemTest, built_built) {
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
std::string_view built = "!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(built, mockXpSettings);
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->output, "foo");
auto * drvPath = std::get_if<SingleDerivedPath::Built>(&*p->drvPath);
ASSERT_TRUE(drvPath);
ASSERT_EQ(drvPath->output, "bar");
ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
.path = StorePath { built.substr(9) },
}));
ASSERT_EQ(elem.to_string(), built);
}
/**
* Without the right experimental features enabled, we cannot parse a
* complex inductive string context element.
*/
TEST(NixStringContextElemTest, built_built_xp) {
ASSERT_THROW(
NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature);
}
}
namespace rc {
using namespace nix;
Gen<NixStringContextElem::DrvDeep> Arbitrary<NixStringContextElem::DrvDeep>::arbitrary()
{
return gen::just(NixStringContextElem::DrvDeep {
.drvPath = *gen::arbitrary<StorePath>(),
});
}
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
{
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<NixStringContextElem::Raw>)) {
case 0:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
case 1:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
case 2:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
default:
assert(false);
}
}
}
namespace nix {
#ifndef COVERAGE
RC_GTEST_PROP(
NixStringContextElemTest,
prop_round_rip,
(const NixStringContextElem & o))
{
RC_ASSERT(o == NixStringContextElem::parse(o.to_string()));
}
#endif
}

View file

@ -1,31 +0,0 @@
#pragma once
///@file
#include <rapidcheck/gen/Arbitrary.h>
#include <value/context.hh>
namespace rc {
using namespace nix;
template<>
struct Arbitrary<NixStringContextElem::Opaque> {
static Gen<NixStringContextElem::Opaque> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::Built> {
static Gen<NixStringContextElem::Built> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::DrvDeep> {
static Gen<NixStringContextElem::DrvDeep> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem> {
static Gen<NixStringContextElem> arbitrary();
};
}

View file

@ -1,236 +0,0 @@
#include "tests/libexpr.hh"
#include "value.hh"
namespace nix {
using namespace testing;
struct ValuePrintingTests : LibExprTest
{
template<class... A>
void test(Value v, std::string_view expected, A... args)
{
std::stringstream out;
v.print(state.symbols, out, args...);
ASSERT_EQ(out.str(), expected);
}
};
TEST_F(ValuePrintingTests, tInt)
{
Value vInt;
vInt.mkInt(10);
test(vInt, "10");
}
TEST_F(ValuePrintingTests, tBool)
{
Value vBool;
vBool.mkBool(true);
test(vBool, "true");
}
TEST_F(ValuePrintingTests, tString)
{
Value vString;
vString.mkString("some-string");
test(vString, "\"some-string\"");
}
TEST_F(ValuePrintingTests, tPath)
{
Value vPath;
vPath.mkString("/foo");
test(vPath, "\"/foo\"");
}
TEST_F(ValuePrintingTests, tNull)
{
Value vNull;
vNull.mkNull();
test(vNull, "null");
}
TEST_F(ValuePrintingTests, tAttrs)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
test(vAttrs, "{ one = 1; two = 2; }");
}
TEST_F(ValuePrintingTests, tList)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
Value vList;
state.mkList(vList, 5);
vList.bigList.elems[0] = &vOne;
vList.bigList.elems[1] = &vTwo;
vList.bigList.size = 3;
test(vList, "[ 1 2 (nullptr) ]");
}
TEST_F(ValuePrintingTests, vThunk)
{
Value vThunk;
vThunk.mkThunk(nullptr, nullptr);
test(vThunk, "<CODE>");
}
TEST_F(ValuePrintingTests, vApp)
{
Value vApp;
vApp.mkApp(nullptr, nullptr);
test(vApp, "<CODE>");
}
TEST_F(ValuePrintingTests, vLambda)
{
Value vLambda;
vLambda.mkLambda(nullptr, nullptr);
test(vLambda, "<LAMBDA>");
}
TEST_F(ValuePrintingTests, vPrimOp)
{
Value vPrimOp;
vPrimOp.mkPrimOp(nullptr);
test(vPrimOp, "<PRIMOP>");
}
TEST_F(ValuePrintingTests, vPrimOpApp)
{
Value vPrimOpApp;
vPrimOpApp.mkPrimOpApp(nullptr, nullptr);
test(vPrimOpApp, "<PRIMOP-APP>");
}
TEST_F(ValuePrintingTests, vExternal)
{
struct MyExternal : ExternalValueBase
{
public:
std::string showType() const override
{
return "";
}
std::string typeOf() const override
{
return "";
}
virtual std::ostream & print(std::ostream & str) const override
{
str << "testing-external!";
return str;
}
} myExternal;
Value vExternal;
vExternal.mkExternal(&myExternal);
test(vExternal, "testing-external!");
}
TEST_F(ValuePrintingTests, vFloat)
{
Value vFloat;
vFloat.mkFloat(2.0);
test(vFloat, "2");
}
TEST_F(ValuePrintingTests, vBlackhole)
{
Value vBlackhole;
vBlackhole.mkBlackhole();
test(vBlackhole, "«potential infinite recursion»");
}
TEST_F(ValuePrintingTests, depthAttrs)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
BindingsBuilder builder2(state, state.allocBindings(10));
builder2.insert(state.symbols.create("one"), &vOne);
builder2.insert(state.symbols.create("two"), &vTwo);
builder2.insert(state.symbols.create("nested"), &vAttrs);
Value vNested;
vNested.mkAttrs(builder2.finish());
test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1);
test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2);
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3);
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4);
}
TEST_F(ValuePrintingTests, depthList)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
BindingsBuilder builder2(state, state.allocBindings(10));
builder2.insert(state.symbols.create("one"), &vOne);
builder2.insert(state.symbols.create("two"), &vTwo);
builder2.insert(state.symbols.create("nested"), &vAttrs);
Value vNested;
vNested.mkAttrs(builder2.finish());
Value vList;
state.mkList(vList, 5);
vList.bigList.elems[0] = &vOne;
vList.bigList.elems[1] = &vTwo;
vList.bigList.elems[2] = &vNested;
vList.bigList.size = 3;
test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1);
test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2);
test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3);
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4);
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5);
}
} // namespace nix

View file

@ -1,7 +1,7 @@
#include "value-to-json.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "store-api.hh"
#include "signals.hh"
#include <cstdlib>
#include <iomanip>
@ -31,7 +31,7 @@ json printValueAsJSON(EvalState & state, bool strict,
case nString:
copyContext(v, context);
out = v.string.s;
out = v.c_str();
break;
case nPath:

View file

@ -1,7 +1,7 @@
#include "value-to-xml.hh"
#include "xml-writer.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "signals.hh"
#include <cstdlib>
@ -74,7 +74,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
case nString:
/* !!! show the context? */
copyContext(v, context);
doc.writeEmptyElement("string", singletonAttrs("value", v.string.s));
doc.writeEmptyElement("string", singletonAttrs("value", v.c_str()));
break;
case nPath:
@ -96,14 +96,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
if (a != v.attrs->end()) {
if (strict) state.forceValue(*a->value, a->pos);
if (a->value->type() == nString)
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
xmlAttrs["drvPath"] = drvPath = a->value->c_str();
}
a = v.attrs->find(state.sOutPath);
if (a != v.attrs->end()) {
if (strict) state.forceValue(*a->value, a->pos);
if (a->value->type() == nString)
xmlAttrs["outPath"] = a->value->string.s;
xmlAttrs["outPath"] = a->value->c_str();
}
XMLOpenElement _(doc, "derivation", xmlAttrs);

View file

@ -3,6 +3,7 @@
#include <cassert>
#include <climits>
#include <span>
#include "symbol-table.hh"
#include "value/context.hh"
@ -158,60 +159,72 @@ public:
inline bool isPrimOp() const { return internalType == tPrimOp; };
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
*
* "--with-freetype2-library=" + freetype + "/lib"
*
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct StringWithContext {
const char * c_str;
const char * * context; // must be in sorted order
};
struct Path {
InputAccessor * accessor;
const char * path;
};
struct ClosureThunk {
Env * env;
Expr * expr;
};
struct FunctionApplicationThunk {
Value * left, * right;
};
struct Lambda {
Env * env;
ExprLambda * fun;
};
union
{
NixInt integer;
bool boolean;
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
StringWithContext string;
* "--with-freetype2-library=" + freetype + "/lib"
Path _path;
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct {
const char * s;
const char * * context; // must be in sorted order
} string;
const char * _path;
Bindings * attrs;
struct {
size_t size;
Value * * elems;
} bigList;
Value * smallList[2];
struct {
Env * env;
Expr * expr;
} thunk;
struct {
Value * left, * right;
} app;
struct {
Env * env;
ExprLambda * fun;
} lambda;
ClosureThunk thunk;
FunctionApplicationThunk app;
Lambda lambda;
PrimOp * primOp;
struct {
Value * left, * right;
} primOpApp;
FunctionApplicationThunk primOpApp;
ExternalValueBase * external;
NixFloat fpoint;
};
@ -270,7 +283,7 @@ public:
inline void mkString(const char * s, const char * * context = 0)
{
internalType = tString;
string.s = s;
string.c_str = s;
string.context = context;
}
@ -287,11 +300,12 @@ public:
void mkPath(const SourcePath & path);
inline void mkPath(const char * path)
inline void mkPath(InputAccessor * accessor, const char * path)
{
clearValue();
internalType = tPath;
_path = path;
_path.accessor = accessor;
_path.path = path;
}
inline void mkNull()
@ -349,13 +363,7 @@ public:
// Value will be overridden anyways
}
inline void mkPrimOp(PrimOp * p)
{
clearValue();
internalType = tPrimOp;
primOp = p;
}
void mkPrimOp(PrimOp * p);
inline void mkPrimOpApp(Value * l, Value * r)
{
@ -388,7 +396,13 @@ public:
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
const Value * const * listElems() const
std::span<Value * const> listItems() const
{
assert(isList());
return std::span<Value * const>(listElems(), listSize());
}
Value * const * listElems() const
{
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
@ -407,44 +421,30 @@ public:
*/
bool isTrivial() const;
auto listItems()
{
struct ListIterable
{
typedef Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ListIterable { begin, begin + listSize() };
}
auto listItems() const
{
struct ConstListIterable
{
typedef const Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ConstListIterable { begin, begin + listSize() };
}
SourcePath path() const
{
assert(internalType == tPath);
return SourcePath{CanonPath(_path)};
return SourcePath {
.accessor = ref(_path.accessor->shared_from_this()),
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
};
}
std::string_view str() const
std::string_view string_view() const
{
assert(internalType == tString);
return std::string_view(string.s);
return std::string_view(string.c_str);
}
const char * const c_str() const
{
assert(internalType == tString);
return string.c_str;
}
const char * * context() const
{
return string.context;
}
};

View file

@ -1,3 +1,4 @@
#include "util.hh"
#include "value/context.hh"
#include <optional>

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "util.hh"
#include "comparator.hh"
#include "derived-path.hh"
#include "variant-wrapper.hh"

View file

@ -13,6 +13,12 @@
namespace nix::fetchers {
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
/**
* An `Attrs` can be thought of a JSON object restricted or simplified
* to be "flat", not containing any subcontainers (arrays or objects)
* and also not containing any `null`s.
*/
typedef std::map<std::string, Attr> Attrs;
Attrs jsonToAttrs(const nlohmann::json & json);

View file

@ -1,4 +1,5 @@
#include "cache.hh"
#include "users.hh"
#include "sqlite.hh"
#include "sync.hh"
#include "store-api.hh"

View file

@ -2,6 +2,7 @@
///@file
#include "fetchers.hh"
#include "path.hh"
namespace nix::fetchers {

View file

@ -3,7 +3,6 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
#include <map>
#include <limits>

View file

@ -5,12 +5,31 @@
namespace nix::fetchers {
std::unique_ptr<std::vector<std::shared_ptr<InputScheme>>> inputSchemes = nullptr;
using InputSchemeMap = std::map<std::string_view, std::shared_ptr<InputScheme>>;
std::unique_ptr<InputSchemeMap> inputSchemes = nullptr;
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
{
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::shared_ptr<InputScheme>>>();
inputSchemes->push_back(std::move(inputScheme));
if (!inputSchemes)
inputSchemes = std::make_unique<InputSchemeMap>();
auto schemeName = inputScheme->schemeName();
if (inputSchemes->count(schemeName) > 0)
throw Error("Input scheme with name %s already registered", schemeName);
inputSchemes->insert_or_assign(schemeName, std::move(inputScheme));
}
nlohmann::json dumpRegisterInputSchemeInfo() {
using nlohmann::json;
auto res = json::object();
for (auto & [name, scheme] : *inputSchemes) {
auto & r = res[name] = json::object();
r["allowedAttrs"] = scheme->allowedAttrs();
}
return res;
}
Input Input::fromURL(const std::string & url, bool requireTree)
@ -33,9 +52,10 @@ static void fixupInput(Input & input)
Input Input::fromURL(const ParsedURL & url, bool requireTree)
{
for (auto & inputScheme : *inputSchemes) {
for (auto & [_, inputScheme] : *inputSchemes) {
auto res = inputScheme->inputFromURL(url, requireTree);
if (res) {
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
@ -47,19 +67,44 @@ Input Input::fromURL(const ParsedURL & url, bool requireTree)
Input Input::fromAttrs(Attrs && attrs)
{
for (auto & inputScheme : *inputSchemes) {
auto res = inputScheme->inputFromAttrs(attrs);
if (res) {
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
}
auto schemeName = ({
auto schemeNameOpt = maybeGetStrAttr(attrs, "type");
if (!schemeNameOpt)
throw Error("'type' attribute to specify input scheme is required but not provided");
*std::move(schemeNameOpt);
});
Input input;
input.attrs = attrs;
fixupInput(input);
return input;
auto raw = [&]() {
// Return an input without a scheme; most operations will fail,
// but not all of them. Doing this is to support those other
// operations which are supposed to be robust on
// unknown/uninterpretable inputs.
Input input;
input.attrs = attrs;
fixupInput(input);
return input;
};
std::shared_ptr<InputScheme> inputScheme = ({
auto i = inputSchemes->find(schemeName);
i == inputSchemes->end() ? nullptr : i->second;
});
if (!inputScheme) return raw();
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
auto allowedAttrs = inputScheme->allowedAttrs();
for (auto & [name, _] : attrs)
if (name != "type" && allowedAttrs.count(name) == 0)
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
auto res = inputScheme->inputFromAttrs(attrs);
if (!res) return raw();
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
ParsedURL Input::toURL() const
@ -82,16 +127,16 @@ std::string Input::to_string() const
return toURL().to_string();
}
bool Input::isDirect() const
{
return !scheme || scheme->isDirect(*this);
}
Attrs Input::toAttrs() const
{
return attrs;
}
bool Input::hasAllInfo() const
{
return getNarHash() && scheme && scheme->hasAllInfo(*this);
}
bool Input::operator ==(const Input & other) const
{
return attrs == other.attrs;
@ -107,7 +152,7 @@ bool Input::contains(const Input & other) const
return false;
}
std::pair<Tree, Input> Input::fetch(ref<Store> store) const
std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
{
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@ -115,7 +160,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
/* The tree may already be in the Nix store, or it could be
substituted (which is often faster than fetching from the
original source). So check that. */
if (hasAllInfo()) {
if (getNarHash()) {
try {
auto storePath = computeStorePath(*store);
@ -124,7 +169,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
debug("using substituted/cached input '%s' in '%s'",
to_string(), store->printStorePath(storePath));
return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this};
return {std::move(storePath), *this};
} catch (Error & e) {
debug("substitution of input '%s' failed: %s", to_string(), e.what());
}
@ -139,18 +184,16 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
}
}();
Tree tree {
.actualPath = store->toRealPath(storePath),
.storePath = storePath,
};
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
auto narHash = store->queryPathInfo(storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
to_string(),
store->printStorePath(storePath),
prevNarHash->to_string(HashFormat::SRI, true),
narHash.to_string(HashFormat::SRI, true));
}
if (auto prevLastModified = getLastModified()) {
@ -173,9 +216,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
input.locked = true;
assert(input.hasAllInfo());
return {std::move(tree), input};
return {std::move(storePath), input};
}
Input Input::applyOverrides(
@ -198,12 +239,13 @@ std::optional<Path> Input::getSourcePath() const
return scheme->getSourcePath(*this);
}
void Input::markChangedFile(
std::string_view file,
void Input::putFile(
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const
{
assert(scheme);
return scheme->markChangedFile(*this, file, commitMsg);
return scheme->putFile(*this, path, contents, commitMsg);
}
std::string Input::getName() const
@ -254,7 +296,8 @@ std::optional<Hash> Input::getRev() const
try {
hash = Hash::parseAnyPrefixed(*s);
} catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing flakes
// Default to sha1 for backwards compatibility with existing
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
hash = Hash::parseAny(*s, htSHA1);
}
}
@ -293,14 +336,18 @@ Input InputScheme::applyOverrides(
return input;
}
std::optional<Path> InputScheme::getSourcePath(const Input & input)
std::optional<Path> InputScheme::getSourcePath(const Input & input) const
{
return {};
}
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
void InputScheme::putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const
{
assert(false);
throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path);
}
void InputScheme::clone(const Input & input, const Path & destDir) const
@ -308,4 +355,14 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
throw Error("do not know how to clone input '%s'", input.to_string());
}
std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
{
return {};
}
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
{
return ((nlohmann::json) publicKeys).dump();
}
}

View file

@ -3,31 +3,25 @@
#include "types.hh"
#include "hash.hh"
#include "path.hh"
#include "canon-path.hh"
#include "attrs.hh"
#include "url.hh"
#include <memory>
#include <nlohmann/json_fwd.hpp>
namespace nix { class Store; }
namespace nix { class Store; class StorePath; }
namespace nix::fetchers {
struct Tree
{
Path actualPath;
StorePath storePath;
};
struct InputScheme;
/**
* The Input object is generated by a specific fetcher, based on the
* user-supplied input attribute in the flake.nix file, and contains
* The `Input` object is generated by a specific fetcher, based on
* user-supplied information, and contains
* the information that the specific fetcher needs to perform the
* actual fetch. The Input object is most commonly created via the
* "fromURL()" or "fromAttrs()" static functions which are provided
* the url or attrset specified in the flake file.
* `fromURL()` or `fromAttrs()` static functions.
*/
struct Input
{
@ -36,7 +30,6 @@ struct Input
std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs;
bool locked = false;
bool direct = true;
/**
* path of the parent of this input, used for relative path resolution
@ -44,10 +37,20 @@ struct Input
std::optional<Path> parent;
public:
/**
* Create an `Input` from a URL.
*
* The URL indicate which sort of fetcher, and provides information to that fetcher.
*/
static Input fromURL(const std::string & url, bool requireTree = true);
static Input fromURL(const ParsedURL & url, bool requireTree = true);
/**
* Create an `Input` from a an `Attrs`.
*
* The URL indicate which sort of fetcher, and provides information to that fetcher.
*/
static Input fromAttrs(Attrs && attrs);
ParsedURL toURL() const;
@ -62,7 +65,7 @@ public:
* Check whether this is a "direct" input, that is, not
* one that goes through a registry.
*/
bool isDirect() const { return direct; }
bool isDirect() const;
/**
* Check whether this is a "locked" input, that is,
@ -70,24 +73,15 @@ public:
*/
bool isLocked() const { return locked; }
/**
* Check whether the input carries all necessary info required
* for cache insertion and substitution.
* These fields are used to uniquely identify cached trees
* within the "tarball TTL" window without necessarily
* indicating that the input's origin is unchanged.
*/
bool hasAllInfo() const;
bool operator ==(const Input & other) const;
bool contains(const Input & other) const;
/**
* Fetch the input into the Nix store, returning the location in
* the Nix store and the locked input.
* Fetch the entire input into the Nix store, returning the
* location in the Nix store and the locked input.
*/
std::pair<Tree, Input> fetch(ref<Store> store) const;
std::pair<StorePath, Input> fetch(ref<Store> store) const;
Input applyOverrides(
std::optional<std::string> ref,
@ -97,8 +91,13 @@ public:
std::optional<Path> getSourcePath() const;
void markChangedFile(
std::string_view file,
/**
* Write a file to this input, for input types that support
* writing. Optionally commit the change (for e.g. Git inputs).
*/
void putFile(
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const;
std::string getName() const;
@ -116,13 +115,13 @@ public:
/**
* The InputScheme represents a type of fetcher. Each fetcher
* registers with nix at startup time. When processing an input for a
* flake, each scheme is given an opportunity to "recognize" that
* input from the url or attributes in the flake file's specification
* and return an Input object to represent the input if it is
* recognized. The Input object contains the information the fetcher
* needs to actually perform the "fetch()" when called.
* The `InputScheme` represents a type of fetcher. Each fetcher
* registers with nix at startup time. When processing an `Input`,
* each scheme is given an opportunity to "recognize" that
* input from the user-provided url or attributes
* and return an `Input` object to represent the input if it is
* recognized. The `Input` object contains the information the fetcher
* needs to actually perform the `fetch()` when called.
*/
struct InputScheme
{
@ -133,9 +132,25 @@ struct InputScheme
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
virtual ParsedURL toURL(const Input & input) const;
/**
* What is the name of the scheme?
*
* The `type` attribute is used to select which input scheme is
* used, and then the other fields are forwarded to that input
* scheme.
*/
virtual std::string_view schemeName() const = 0;
virtual bool hasAllInfo(const Input & input) const = 0;
/**
* Allowed attributes in an attribute set that is converted to an
* input.
*
* `type` is not included from this set, because the `type` field is
parsed first to choose which scheme; `type` is always required.
*/
virtual StringSet allowedAttrs() const = 0;
virtual ParsedURL toURL(const Input & input) const;
virtual Input applyOverrides(
const Input & input,
@ -144,42 +159,36 @@ struct InputScheme
virtual void clone(const Input & input, const Path & destDir) const;
virtual std::optional<Path> getSourcePath(const Input & input);
virtual std::optional<Path> getSourcePath(const Input & input) const;
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
virtual void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const;
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
/**
* Is this `InputScheme` part of an experimental feature?
*/
virtual std::optional<ExperimentalFeature> experimentalFeature() const;
virtual bool isDirect(const Input & input) const
{ return true; }
};
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
struct DownloadFileResult
nlohmann::json dumpRegisterInputSchemeInfo();
struct PublicKey
{
StorePath storePath;
std::string etag;
std::string effectiveUrl;
std::optional<std::string> immutableUrl;
std::string type = "ssh-ed25519";
std::string key;
};
NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(PublicKey, type, key)
DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
struct DownloadTarballResult
{
Tree tree;
time_t lastModified;
std::optional<std::string> immutableUrl;
};
DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
std::string publicKeys_to_string(const std::vector<PublicKey>&);
}

View file

@ -0,0 +1,130 @@
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
#include "store-api.hh"
namespace nix {
struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
{
CanonPath root;
std::optional<std::set<CanonPath>> allowedPaths;
MakeNotAllowedError makeNotAllowedError;
FSInputAccessorImpl(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
: root(root)
, allowedPaths(std::move(allowedPaths))
, makeNotAllowedError(std::move(makeNotAllowedError))
{
}
void readFile(
const CanonPath & path,
Sink & sink,
std::function<void(uint64_t)> sizeCallback) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
}
bool pathExists(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
return PosixSourceAccessor::maybeLstat(absPath);
}
DirEntries readDirectory(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
DirEntries res;
for (auto & entry : PosixSourceAccessor::readDirectory(absPath))
if (isAllowed(absPath + entry.first))
res.emplace(entry);
return res;
}
std::string readLink(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
return PosixSourceAccessor::readLink(absPath);
}
CanonPath makeAbsPath(const CanonPath & path)
{
return root + path;
}
void checkAllowed(const CanonPath & absPath) override
{
if (!isAllowed(absPath))
throw makeNotAllowedError
? makeNotAllowedError(absPath)
: RestrictedPathError("access to path '%s' is forbidden", absPath);
}
bool isAllowed(const CanonPath & absPath)
{
if (!absPath.isWithin(root))
return false;
if (allowedPaths) {
auto p = absPath.removePrefix(root);
if (!p.isAllowed(*allowedPaths))
return false;
}
return true;
}
void allowPath(CanonPath path) override
{
if (allowedPaths)
allowedPaths->insert(std::move(path));
}
bool hasAccessControl() override
{
return (bool) allowedPaths;
}
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
{
return makeAbsPath(path);
}
};
ref<FSInputAccessor> makeFSInputAccessor(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
{
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
}
ref<FSInputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath,
MakeNotAllowedError && makeNotAllowedError)
{
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
}
SourcePath getUnfilteredRootPath(CanonPath path)
{
static auto rootFS = makeFSInputAccessor(CanonPath::root);
return {rootFS, path};
}
}

View file

@ -0,0 +1,33 @@
#pragma once
#include "input-accessor.hh"
namespace nix {
class StorePath;
class Store;
struct FSInputAccessor : InputAccessor
{
virtual void checkAllowed(const CanonPath & absPath) = 0;
virtual void allowPath(CanonPath path) = 0;
virtual bool hasAccessControl() = 0;
};
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
ref<FSInputAccessor> makeFSInputAccessor(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths = {},
MakeNotAllowedError && makeNotAllowedError = {});
ref<FSInputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath,
MakeNotAllowedError && makeNotAllowedError = {});
SourcePath getUnfilteredRootPath(CanonPath path);
}

View file

@ -1,11 +1,12 @@
#include "fetchers.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
#include "util.hh"
#include "processes.hh"
#include "git.hh"
#include "fetch-settings.hh"
@ -46,7 +47,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false);
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
}
// Returns the name of the HEAD branch.
@ -143,6 +144,69 @@ struct WorkdirInfo
bool hasHead = false;
};
std::vector<PublicKey> getPublicKeys(const Attrs & attrs) {
std::vector<PublicKey> publicKeys;
if (attrs.contains("publicKeys")) {
nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys"));
ensureType(publicKeysJson, nlohmann::json::value_t::array);
publicKeys = publicKeysJson.get<std::vector<PublicKey>>();
}
else {
publicKeys = {};
}
if (attrs.contains("publicKey"))
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
return publicKeys;
}
void doCommitVerification(const Path repoDir, const Path gitDir, const std::string rev, const std::vector<PublicKey>& publicKeys) {
// Create ad-hoc allowedSignersFile and populate it with publicKeys
auto allowedSignersFile = createTempFile().second;
std::string allowedSigners;
for (const PublicKey& k : publicKeys) {
if (k.type != "ssh-dsa"
&& k.type != "ssh-ecdsa"
&& k.type != "ssh-ecdsa-sk"
&& k.type != "ssh-ed25519"
&& k.type != "ssh-ed25519-sk"
&& k.type != "ssh-rsa")
warn("Unknown keytype: %s\n"
"Please use one of\n"
"- ssh-dsa\n"
" ssh-ecdsa\n"
" ssh-ecdsa-sk\n"
" ssh-ed25519\n"
" ssh-ed25519-sk\n"
" ssh-rsa", k.type);
allowedSigners += "* " + k.type + " " + k.key + "\n";
}
writeFile(allowedSignersFile, allowedSigners);
// Run verification command
auto [status, output] = runProgram(RunOptions {
.program = "git",
.args = {"-c", "gpg.ssh.allowedSignersFile=" + allowedSignersFile, "-C", repoDir,
"--git-dir", gitDir, "verify-commit", rev},
.mergeStderrToStdout = true,
});
/* Evaluate result through status code and checking if public key fingerprints appear on stderr
* This is neccessary because the git command might also succeed due to the commit being signed by gpg keys
* that are present in the users key agent. */
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
for (const PublicKey& k : publicKeys){
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
re += "(" + escaped_fingerprint + ")";
}
re += "]";
if (status == 0 && std::regex_search(output, std::regex(re)))
printTalkative("Signature verification on commit %s succeeded", rev);
else
throw Error("Commit signature verification on commit %s failed: \n%s", rev, output);
}
// Returns whether a git workdir is clean and has commits.
WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
{
@ -272,9 +336,9 @@ struct GitInputScheme : InputScheme
attrs.emplace("type", "git");
for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref")
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
attrs.emplace(name, value);
else if (name == "shallow" || name == "submodules" || name == "allRefs")
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
@ -285,18 +349,47 @@ struct GitInputScheme : InputScheme
return inputFromAttrs(attrs);
}
std::string_view schemeName() const override
{
return "git";
}
StringSet allowedAttrs() const override
{
return {
"url",
"ref",
"rev",
"shallow",
"submodules",
"lastModified",
"revCount",
"narHash",
"allRefs",
"name",
"dirtyRev",
"dirtyShortRev",
"verifyCommit",
"keytype",
"publicKey",
"publicKeys",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, _] : attrs)
if (name == "verifyCommit"
|| name == "keytype"
|| name == "publicKey"
|| name == "publicKeys")
experimentalFeatureSettings.require(Xp::VerifiedFetches);
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
maybeGetBoolAttr(attrs, "shallow");
maybeGetBoolAttr(attrs, "submodules");
maybeGetBoolAttr(attrs, "allRefs");
maybeGetBoolAttr(attrs, "verifyCommit");
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (std::regex_search(*ref, badGitRefRegex))
@ -305,6 +398,9 @@ struct GitInputScheme : InputScheme
Input input;
input.attrs = attrs;
auto url = fixGitURL(getStrAttr(attrs, "url"));
parseURL(url);
input.attrs["url"] = url;
return input;
}
@ -316,18 +412,18 @@ struct GitInputScheme : InputScheme
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
url.query.insert_or_assign("shallow", "1");
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
url.query.insert_or_assign("verifyCommit", "1");
auto publicKeys = getPublicKeys(input.attrs);
if (publicKeys.size() == 1) {
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
}
else if (publicKeys.size() > 1)
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
return url;
}
bool hasAllInfo(const Input & input) const override
{
bool maybeDirty = !input.getRef();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
return
maybeGetIntAttr(input.attrs, "lastModified")
&& (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount"));
}
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
@ -361,7 +457,7 @@ struct GitInputScheme : InputScheme
runProgram("git", true, args, {}, true);
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
@ -369,18 +465,26 @@ struct GitInputScheme : InputScheme
return {};
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
auto root = getSourcePath(input);
if (!root)
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
writeFile((CanonPath(*root) + path).abs(), contents);
auto gitDir = ".git";
runProgram("git", true,
{ "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
{ "-C", *root, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
if (commitMsg)
runProgram("git", true,
{ "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg });
{ "-C", *root, "--git-dir", gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
@ -406,6 +510,8 @@ struct GitInputScheme : InputScheme
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
std::vector<PublicKey> publicKeys = getPublicKeys(input.attrs);
bool verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty());
std::string cacheType = "git";
if (shallow) cacheType += "-shallow";
@ -415,7 +521,7 @@ struct GitInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
auto getLockedAttrs = [&]()
@ -426,6 +532,8 @@ struct GitInputScheme : InputScheme
{"type", cacheType},
{"name", name},
{"rev", input.getRev()->gitRev()},
{"verifyCommit", verifyCommit},
{"publicKeys", publicKeys_to_string(publicKeys)},
});
};
@ -448,12 +556,15 @@ struct GitInputScheme : InputScheme
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
/* If this is a local directory and no ref or revision is given,
/* If this is a local directory, no ref or revision is given and no signature verification is needed,
allow fetching directly from a dirty workdir. */
if (!input.getRef() && !input.getRev() && isLocal) {
auto workdirInfo = getWorkdirInfo(input, actualUrl);
if (!workdirInfo.clean) {
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
if (verifyCommit)
throw Error("Can't fetch from a dirty workdir with commit signature verification enabled.");
else
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
}
}
@ -461,6 +572,8 @@ struct GitInputScheme : InputScheme
{"type", cacheType},
{"name", name},
{"url", actualUrl},
{"verifyCommit", verifyCommit},
{"publicKeys", publicKeys_to_string(publicKeys)},
});
Path repoDir;
@ -618,6 +731,9 @@ struct GitInputScheme : InputScheme
);
}
if (verifyCommit)
doCommitVerification(repoDir, gitDir, input.getRev()->gitRev(), publicKeys);
if (submodules) {
Path tmpGitDir = createTempDir();
AutoDelete delTmpGitDir(tmpGitDir, true);

View file

@ -7,6 +7,7 @@
#include "git.hh"
#include "fetchers.hh"
#include "fetch-settings.hh"
#include "tarball.hh"
#include <optional>
#include <nlohmann/json.hpp>
@ -26,13 +27,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme
{
virtual std::string type() const = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != type()) return {};
if (url.scheme != schemeName()) return {};
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
@ -90,7 +89,7 @@ struct GitArchiveInputScheme : InputScheme
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
Input input;
input.attrs.insert_or_assign("type", type());
input.attrs.insert_or_assign("type", std::string { schemeName() });
input.attrs.insert_or_assign("owner", path[0]);
input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
@ -100,14 +99,21 @@ struct GitArchiveInputScheme : InputScheme
return input;
}
StringSet allowedAttrs() const override
{
return {
"owner",
"repo",
"ref",
"rev",
"narHash",
"lastModified",
"host",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner");
getStrAttr(attrs, "repo");
@ -125,18 +131,13 @@ struct GitArchiveInputScheme : InputScheme
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base16, false);
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
return ParsedURL {
.scheme = type(),
.scheme = std::string { schemeName() },
.path = path,
};
}
bool hasAllInfo(const Input & input) const override
{
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
}
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
@ -218,16 +219,21 @@ struct GitArchiveInputScheme : InputScheme
{"rev", rev->gitRev()},
{"lastModified", uint64_t(result.lastModified)}
},
result.tree.storePath,
result.storePath,
true);
return {result.tree.storePath, input};
return {result.storePath, input};
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
};
struct GitHubInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "github"; }
std::string_view schemeName() const override { return "github"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -291,7 +297,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
: "https://api.%s/repos/%s/%s/tarball/%s";
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
return DownloadUrl { url, headers };
}
@ -308,7 +314,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
struct GitLabInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "gitlab"; }
std::string_view schemeName() const override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -357,7 +363,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
@ -376,7 +382,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
struct SourceHutInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "sourcehut"; }
std::string_view schemeName() const override { return "sourcehut"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -444,7 +450,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };

View file

@ -1,5 +1,6 @@
#include "fetchers.hh"
#include "url-parts.hh"
#include "path.hh"
namespace nix::fetchers {
@ -41,7 +42,6 @@ struct IndirectInputScheme : InputScheme
// FIXME: forbid query params?
Input input;
input.direct = false;
input.attrs.insert_or_assign("type", "indirect");
input.attrs.insert_or_assign("id", id);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
@ -50,20 +50,28 @@ struct IndirectInputScheme : InputScheme
return input;
}
std::string_view schemeName() const override
{
return "indirect";
}
StringSet allowedAttrs() const override
{
return {
"id",
"ref",
"rev",
"narHash",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash")
throw Error("unsupported indirect input attribute '%s'", name);
auto id = getStrAttr(attrs, "id");
if (!std::regex_match(id, flakeRegex))
throw BadURL("'%s' is not a valid flake ID", id);
Input input;
input.direct = false;
input.attrs = attrs;
return input;
}
@ -78,11 +86,6 @@ struct IndirectInputScheme : InputScheme
return url;
}
bool hasAllInfo(const Input & input) const override
{
return false;
}
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
@ -98,6 +101,14 @@ struct IndirectInputScheme : InputScheme
{
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
bool isDirect(const Input & input) const override
{ return false; }
};
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });

View file

@ -3,12 +3,52 @@
namespace nix {
StorePath InputAccessor::fetchToStore(
ref<Store> store,
const CanonPath & path,
std::string_view name,
FileIngestionMethod method,
PathFilter * filter,
RepairFlag repair)
{
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
auto source = sinkToSource([&](Sink & sink) {
if (method == FileIngestionMethod::Recursive)
dumpPath(path, sink, filter ? *filter : defaultPathFilter);
else
readFile(path, sink);
});
auto storePath =
settings.readOnlyMode
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
return storePath;
}
SourcePath InputAccessor::root()
{
return {ref(shared_from_this()), CanonPath::root};
}
std::ostream & operator << (std::ostream & str, const SourcePath & path)
{
str << path.to_string();
return str;
}
StorePath SourcePath::fetchToStore(
ref<Store> store,
std::string_view name,
FileIngestionMethod method,
PathFilter * filter,
RepairFlag repair) const
{
return accessor->fetchToStore(store, path, name, method, filter, repair);
}
std::string_view SourcePath::baseName() const
{
return path.baseName().value_or("source");
@ -18,60 +58,12 @@ SourcePath SourcePath::parent() const
{
auto p = path.parent();
assert(p);
return std::move(*p);
}
InputAccessor::Stat SourcePath::lstat() const
{
auto st = nix::lstat(path.abs());
return InputAccessor::Stat {
.type =
S_ISREG(st.st_mode) ? InputAccessor::tRegular :
S_ISDIR(st.st_mode) ? InputAccessor::tDirectory :
S_ISLNK(st.st_mode) ? InputAccessor::tSymlink :
InputAccessor::tMisc,
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR
};
}
std::optional<InputAccessor::Stat> SourcePath::maybeLstat() const
{
// FIXME: merge these into one operation.
if (!pathExists())
return {};
return lstat();
}
InputAccessor::DirEntries SourcePath::readDirectory() const
{
InputAccessor::DirEntries res;
for (auto & entry : nix::readDirectory(path.abs())) {
std::optional<InputAccessor::Type> type;
switch (entry.type) {
case DT_REG: type = InputAccessor::Type::tRegular; break;
case DT_LNK: type = InputAccessor::Type::tSymlink; break;
case DT_DIR: type = InputAccessor::Type::tDirectory; break;
}
res.emplace(entry.name, type);
}
return res;
}
StorePath SourcePath::fetchToStore(
ref<Store> store,
std::string_view name,
PathFilter * filter,
RepairFlag repair) const
{
return
settings.readOnlyMode
? store->computeStorePathForPath(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter).first
: store->addToStore(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter, repair);
return {accessor, std::move(*p)};
}
SourcePath SourcePath::resolveSymlinks() const
{
SourcePath res(CanonPath::root);
auto res = accessor->root();
int linksAllowed = 1024;

View file

@ -1,40 +1,41 @@
#pragma once
///@file
#include "source-accessor.hh"
#include "ref.hh"
#include "types.hh"
#include "archive.hh"
#include "canon-path.hh"
#include "file-system.hh"
#include "repair-flag.hh"
#include "content-address.hh"
namespace nix {
MakeError(RestrictedPathError, Error);
struct SourcePath;
class StorePath;
class Store;
struct InputAccessor
struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<InputAccessor>
{
enum Type {
tRegular, tSymlink, tDirectory,
/**
Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things.
Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`.
Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types.
*/
tMisc
};
struct Stat
/**
* Return the maximum last-modified time of the files in this
* tree, if available.
*/
virtual std::optional<time_t> getLastModified()
{
Type type = tMisc;
//uint64_t fileSize = 0; // regular files only
bool isExecutable = false; // regular files only
};
return std::nullopt;
}
typedef std::optional<Type> DirEntry;
StorePath fetchToStore(
ref<Store> store,
const CanonPath & path,
std::string_view name = "source",
FileIngestionMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair);
typedef std::map<std::string, DirEntry> DirEntries;
SourcePath root();
};
/**
@ -45,12 +46,9 @@ struct InputAccessor
*/
struct SourcePath
{
ref<InputAccessor> accessor;
CanonPath path;
SourcePath(CanonPath path)
: path(std::move(path))
{ }
std::string_view baseName() const;
/**
@ -64,39 +62,42 @@ struct SourcePath
* return its contents; otherwise throw an error.
*/
std::string readFile() const
{ return nix::readFile(path.abs()); }
{ return accessor->readFile(path); }
/**
* Return whether this `SourcePath` denotes a file (of any type)
* that exists
*/
bool pathExists() const
{ return nix::pathExists(path.abs()); }
{ return accessor->pathExists(path); }
/**
* Return stats about this `SourcePath`, or throw an exception if
* it doesn't exist.
*/
InputAccessor::Stat lstat() const;
InputAccessor::Stat lstat() const
{ return accessor->lstat(path); }
/**
* Return stats about this `SourcePath`, or std::nullopt if it
* doesn't exist.
*/
std::optional<InputAccessor::Stat> maybeLstat() const;
std::optional<InputAccessor::Stat> maybeLstat() const
{ return accessor->maybeLstat(path); }
/**
* If this `SourcePath` denotes a directory (not a symlink),
* return its directory entries; otherwise throw an error.
*/
InputAccessor::DirEntries readDirectory() const;
InputAccessor::DirEntries readDirectory() const
{ return accessor->readDirectory(path); }
/**
* If this `SourcePath` denotes a symlink, return its target;
* otherwise throw an error.
*/
std::string readLink() const
{ return nix::readLink(path.abs()); }
{ return accessor->readLink(path); }
/**
* Dump this `SourcePath` to `sink` as a NAR archive.
@ -104,7 +105,7 @@ struct SourcePath
void dumpPath(
Sink & sink,
PathFilter & filter = defaultPathFilter) const
{ return nix::dumpPath(path.abs(), sink, filter); }
{ return accessor->dumpPath(path, sink, filter); }
/**
* Copy this `SourcePath` to the Nix store.
@ -112,6 +113,7 @@ struct SourcePath
StorePath fetchToStore(
ref<Store> store,
std::string_view name = "source",
FileIngestionMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair) const;
@ -120,7 +122,7 @@ struct SourcePath
* it has a physical location.
*/
std::optional<CanonPath> getPhysicalPath() const
{ return path; }
{ return accessor->getPhysicalPath(path); }
std::string to_string() const
{ return path.abs(); }
@ -129,7 +131,7 @@ struct SourcePath
* Append a `CanonPath` to this path.
*/
SourcePath operator + (const CanonPath & x) const
{ return {path + x}; }
{ return {accessor, path + x}; }
/**
* Append a single component `c` to this path. `c` must not
@ -137,21 +139,21 @@ struct SourcePath
* and `c`.
*/
SourcePath operator + (std::string_view c) const
{ return {path + c}; }
{ return {accessor, path + c}; }
bool operator == (const SourcePath & x) const
{
return path == x.path;
return std::tie(accessor, path) == std::tie(x.accessor, x.path);
}
bool operator != (const SourcePath & x) const
{
return path != x.path;
return std::tie(accessor, path) != std::tie(x.accessor, x.path);
}
bool operator < (const SourcePath & x) const
{
return path < x.path;
return std::tie(accessor, path) < std::tie(x.accessor, x.path);
}
/**

View file

@ -0,0 +1,22 @@
#include "memory-input-accessor.hh"
#include "memory-source-accessor.hh"
namespace nix {
struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor
{
SourcePath addFile(CanonPath path, std::string && contents) override
{
return {
ref(shared_from_this()),
MemorySourceAccessor::addFile(path, std::move(contents))
};
}
};
ref<MemoryInputAccessor> makeMemoryInputAccessor()
{
return make_ref<MemoryInputAccessorImpl>();
}
}

View file

@ -0,0 +1,15 @@
#include "input-accessor.hh"
namespace nix {
/**
* An input accessor for an in-memory file system.
*/
struct MemoryInputAccessor : InputAccessor
{
virtual SourcePath addFile(CanonPath path, std::string && contents) = 0;
};
ref<MemoryInputAccessor> makeMemoryInputAccessor();
}

View file

@ -1,4 +1,6 @@
#include "fetchers.hh"
#include "processes.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"
@ -69,14 +71,25 @@ struct MercurialInputScheme : InputScheme
return inputFromAttrs(attrs);
}
std::string_view schemeName() const override
{
return "hg";
}
StringSet allowedAttrs() const override
{
return {
"url",
"ref",
"rev",
"revCount",
"narHash",
"name",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
@ -98,13 +111,6 @@ struct MercurialInputScheme : InputScheme
return url;
}
bool hasAllInfo(const Input & input) const override
{
// FIXME: ugly, need to distinguish between dirty and clean
// default trees.
return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount");
}
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
@ -116,7 +122,7 @@ struct MercurialInputScheme : InputScheme
return res;
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
@ -124,18 +130,27 @@ struct MercurialInputScheme : InputScheme
return {};
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
auto [isLocal, repoPath] = getActualUrl(input);
if (!isLocal)
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
auto absPath = CanonPath(repoPath) + path;
writeFile(absPath.abs(), contents);
// FIXME: shut up if file is already tracked.
runHg(
{ "add", *sourcePath + "/" + std::string(file) });
{ "add", absPath.abs() });
if (commitMsg)
runHg(
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
{ "commit", absPath.abs(), "-m", *commitMsg });
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
@ -206,7 +221,7 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && hash->type != htSHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
};
@ -252,7 +267,7 @@ struct MercurialInputScheme : InputScheme
}
}
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */

View file

@ -32,23 +32,30 @@ struct PathInputScheme : InputScheme
return input;
}
std::string_view schemeName() const override
{
return "path";
}
StringSet allowedAttrs() const override
{
return {
"path",
/* Allow the user to pass in "fake" tree info
attributes. This is useful for making a pinned tree work
the same as the repository from which is exported (e.g.
path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...).
*/
"rev",
"revCount",
"lastModified",
"narHash",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "path") return {};
getStrAttr(attrs, "path");
for (auto & [name, value] : attrs)
/* Allow the user to pass in "fake" tree info
attributes. This is useful for making a pinned tree
work the same as the repository from which is exported
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
// checked in Input::fromAttrs
;
else
throw Error("unsupported path input attribute '%s'", name);
Input input;
input.attrs = attrs;
return input;
@ -66,19 +73,28 @@ struct PathInputScheme : InputScheme
};
}
bool hasAllInfo(const Input & input) const override
{
return true;
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
return getStrAttr(input.attrs, "path");
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
// nothing to do
writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents);
}
CanonPath getAbsPath(const Input & input) const
{
auto path = getStrAttr(input.attrs, "path");
if (path[0] == '/')
return CanonPath(path);
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
@ -125,6 +141,11 @@ struct PathInputScheme : InputScheme
return {std::move(*storePath), input};
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
};
static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });

View file

@ -1,6 +1,6 @@
#include "registry.hh"
#include "fetchers.hh"
#include "util.hh"
#include "tarball.hh"
#include "users.hh"
#include "globals.hh"
#include "store-api.hh"
#include "local-fs-store.hh"

View file

@ -1,3 +1,4 @@
#include "tarball.hh"
#include "fetchers.hh"
#include "cache.hh"
#include "filetransfer.hh"
@ -133,7 +134,7 @@ DownloadTarballResult downloadTarball(
if (cached && !cached->expired)
return {
.tree = Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
.storePath = std::move(cached->storePath),
.lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
.immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
};
@ -174,7 +175,7 @@ DownloadTarballResult downloadTarball(
locked);
return {
.tree = Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
.storePath = std::move(*unpackedStorePath),
.lastModified = lastModified,
.immutableUrl = res.immutableUrl,
};
@ -183,7 +184,6 @@ DownloadTarballResult downloadTarball(
// An input scheme corresponding to a curl-downloadable resource.
struct CurlInputScheme : InputScheme
{
virtual const std::string inputType() const = 0;
const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
const bool hasTarballExtension(std::string_view path) const
@ -221,22 +221,27 @@ struct CurlInputScheme : InputScheme
url.query.erase("rev");
url.query.erase("revCount");
input.attrs.insert_or_assign("type", inputType());
input.attrs.insert_or_assign("type", std::string { schemeName() });
input.attrs.insert_or_assign("url", url.to_string());
return input;
}
StringSet allowedAttrs() const override
{
return {
"type",
"url",
"narHash",
"name",
"unpack",
"rev",
"revCount",
"lastModified",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
auto type = maybeGetStrAttr(attrs, "type");
if (type != inputType()) return {};
// FIXME: some of these only apply to TarballInputScheme.
std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount", "lastModified"};
for (auto & [name, value] : attrs)
if (!allowedNames.count(name))
throw Error("unsupported %s input attribute '%s'", *type, name);
Input input;
input.attrs = attrs;
@ -250,27 +255,21 @@ struct CurlInputScheme : InputScheme
// NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
return url;
}
bool hasAllInfo(const Input & input) const override
{
return true;
}
};
struct FileInputScheme : CurlInputScheme
{
const std::string inputType() const override { return "file"; }
std::string_view schemeName() const override { return "file"; }
bool isValidURL(const ParsedURL & url, bool requireTree) const override
{
auto parsedUrlScheme = parseUrlScheme(url.scheme);
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
&& (parsedUrlScheme.application
? parsedUrlScheme.application.value() == inputType()
? parsedUrlScheme.application.value() == schemeName()
: (!requireTree && !hasTarballExtension(url.path)));
}
@ -283,7 +282,7 @@ struct FileInputScheme : CurlInputScheme
struct TarballInputScheme : CurlInputScheme
{
const std::string inputType() const override { return "tarball"; }
std::string_view schemeName() const override { return "tarball"; }
bool isValidURL(const ParsedURL & url, bool requireTree) const override
{
@ -291,7 +290,7 @@ struct TarballInputScheme : CurlInputScheme
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
&& (parsedUrlScheme.application
? parsedUrlScheme.application.value() == inputType()
? parsedUrlScheme.application.value() == schemeName()
: (requireTree || hasTarballExtension(url.path)));
}
@ -313,7 +312,7 @@ struct TarballInputScheme : CurlInputScheme
if (result.lastModified && !input.attrs.contains("lastModified"))
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
return {result.tree.storePath, std::move(input)};
return {result.storePath, std::move(input)};
}
};

View file

@ -0,0 +1,43 @@
#pragma once
#include "types.hh"
#include "path.hh"
#include <optional>
namespace nix {
class Store;
}
namespace nix::fetchers {
struct DownloadFileResult
{
StorePath storePath;
std::string etag;
std::string effectiveUrl;
std::optional<std::string> immutableUrl;
};
DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
struct DownloadTarballResult
{
StorePath storePath;
time_t lastModified;
std::optional<std::string> immutableUrl;
};
DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
}

View file

@ -1,6 +1,9 @@
#include "common-args.hh"
#include "args/root.hh"
#include "globals.hh"
#include "logging.hh"
#include "loggers.hh"
#include "util.hh"
namespace nix {
@ -34,21 +37,21 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
.category = miscCategory,
.labels = {"name", "value"},
.handler = {[](std::string name, std::string value) {
.handler = {[this](std::string name, std::string value) {
try {
globalConfig.set(name, value);
} catch (UsageError & e) {
if (!completions)
if (!getRoot().completions)
warn(e.what());
}
}},
.completer = [](size_t index, std::string_view prefix) {
.completer = [](AddCompletions & completions, size_t index, std::string_view prefix) {
if (index == 0) {
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
if (hasPrefix(s.first, prefix))
completions->add(s.first, fmt("Set the `%s` setting.", s.first));
completions.add(s.first, fmt("Set the `%s` setting.", s.first));
}
}
});

View file

@ -1,6 +1,6 @@
#include "loggers.hh"
#include "environment-variables.hh"
#include "progress-bar.hh"
#include "util.hh"
namespace nix {

View file

@ -1,5 +1,5 @@
#include "progress-bar.hh"
#include "util.hh"
#include "terminal.hh"
#include "sync.hh"
#include "store-api.hh"
#include "names.hh"

View file

@ -1,10 +1,11 @@
#include "globals.hh"
#include "current-process.hh"
#include "shared.hh"
#include "store-api.hh"
#include "gc-store.hh"
#include "util.hh"
#include "loggers.hh"
#include "progress-bar.hh"
#include "signals.hh"
#include <algorithm>
#include <cctype>
@ -379,9 +380,9 @@ RunPager::RunPager()
});
pid.setKillSignal(SIGINT);
stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
std_out = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
throw SysError("dupping stdout");
throw SysError("dupping standard output");
}
@ -390,7 +391,7 @@ RunPager::~RunPager()
try {
if (pid != -1) {
std::cout.flush();
dup2(stdout, STDOUT_FILENO);
dup2(std_out, STDOUT_FILENO);
pid.wait();
}
} catch (...) {

View file

@ -1,8 +1,9 @@
#pragma once
///@file
#include "util.hh"
#include "processes.hh"
#include "args.hh"
#include "args/root.hh"
#include "common-args.hh"
#include "path.hh"
#include "derived-path.hh"
@ -66,7 +67,7 @@ template<class N> N getIntArg(const std::string & opt,
}
struct LegacyArgs : public MixCommonArgs
struct LegacyArgs : public MixCommonArgs, public RootArgs
{
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg;
@ -85,8 +86,9 @@ struct LegacyArgs : public MixCommonArgs
void showManPage(const std::string & name);
/**
* The constructor of this class starts a pager if stdout is a
* terminal and $PAGER is set. Stdout is redirected to the pager.
* The constructor of this class starts a pager if standard output is a
* terminal and $PAGER is set. Standard output is redirected to the
* pager.
*/
class RunPager
{
@ -96,7 +98,7 @@ public:
private:
Pid pid;
int stdout;
int std_out;
};
extern volatile ::sig_atomic_t blockInt;

View file

@ -2,7 +2,7 @@
#include "binary-cache-store.hh"
#include "compression.hh"
#include "derivations.hh"
#include "fs-accessor.hh"
#include "source-accessor.hh"
#include "globals.hh"
#include "nar-info.hh"
#include "sync.hh"
@ -11,6 +11,7 @@
#include "nar-accessor.hh"
#include "thread-pool.hh"
#include "callback.hh"
#include "signals.hh"
#include <chrono>
#include <future>
@ -143,7 +144,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink { htSHA256 };
std::shared_ptr<FSAccessor> narAccessor;
std::shared_ptr<SourceAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
{
FdSink fileSink(fdTemp.get());
@ -164,7 +165,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize;
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
@ -195,7 +196,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
if (writeNARListing) {
nlohmann::json j = {
{"version", 1},
{"root", listNar(ref<FSAccessor>(narAccessor), "", true)},
{"root", listNar(ref<SourceAccessor>(narAccessor), CanonPath::root, true)},
};
upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json");
@ -206,9 +207,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
specify the NAR file and member containing the debug info. */
if (writeDebugInfo) {
std::string buildIdDir = "/lib/debug/.build-id";
CanonPath buildIdDir("lib/debug/.build-id");
if (narAccessor->stat(buildIdDir).type == FSAccessor::tDirectory) {
if (auto st = narAccessor->maybeLstat(buildIdDir); st && st->type == SourceAccessor::tDirectory) {
ThreadPool threadPool(25);
@ -231,17 +232,17 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
std::regex regex1("^[0-9a-f]{2}$");
std::regex regex2("^[0-9a-f]{38}\\.debug$");
for (auto & s1 : narAccessor->readDirectory(buildIdDir)) {
auto dir = buildIdDir + "/" + s1;
for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) {
auto dir = buildIdDir + s1;
if (narAccessor->stat(dir).type != FSAccessor::tDirectory
if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory
|| !std::regex_match(s1, regex1))
continue;
for (auto & s2 : narAccessor->readDirectory(dir)) {
auto debugPath = dir + "/" + s2;
for (auto & [s2, _type] : narAccessor->readDirectory(dir)) {
auto debugPath = dir + s2;
if (narAccessor->stat(debugPath).type != FSAccessor::tRegular
if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular
|| !std::regex_match(s2, regex2))
continue;
@ -250,7 +251,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
std::string key = "debuginfo/" + buildId;
std::string target = "../" + narInfo->url;
threadPool.enqueue(std::bind(doFile, std::string(debugPath, 1), key, target));
threadPool.enqueue(std::bind(doFile, std::string(debugPath.rel()), key, target));
}
}
@ -503,9 +504,9 @@ void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
upsertFile(filePath, info.toJSON().dump(), "application/json");
}
ref<FSAccessor> BinaryCacheStore::getFSAccessor()
ref<SourceAccessor> BinaryCacheStore::getFSAccessor(bool requireValidPath)
{
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), localNarCache);
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), requireValidPath, localNarCache);
}
void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs)

View file

@ -17,28 +17,28 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression",
const Setting<std::string> compression{this, "xz", "compression",
"NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."};
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing",
const Setting<bool> writeNARListing{this, false, "write-nar-listing",
"Whether to write a JSON file that lists the files in each NAR."};
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info",
const Setting<bool> writeDebugInfo{this, false, "index-debug-info",
R"(
Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to
fetch debug info on demand
)"};
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key",
const Setting<Path> secretKeyFile{this, "", "secret-key",
"Path to the secret key used to sign the binary cache."};
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache",
const Setting<Path> localNarCache{this, "", "local-nar-cache",
"Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."};
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
const Setting<bool> parallelCompression{this, false, "parallel-compression",
"Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."};
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
const Setting<int> compressionLevel{this, -1, "compression-level",
R"(
The *preset level* to be used when compressing NARs.
The meaning and accepted values depend on the compression method selected.
@ -148,7 +148,7 @@ public:
void narFromPath(const StorePath & path, Sink & sink) override;
ref<FSAccessor> getFSAccessor() override;
ref<SourceAccessor> getFSAccessor(bool requireValidPath) override;
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;

View file

@ -0,0 +1,18 @@
#include "build-result.hh"
namespace nix {
GENERATE_CMP_EXT(
,
BuildResult,
me->status,
me->errorMsg,
me->timesBuilt,
me->isNonDeterministic,
me->builtOutputs,
me->startTime,
me->stopTime,
me->cpuUser,
me->cpuSystem);
}

View file

@ -3,6 +3,7 @@
#include "realisation.hh"
#include "derived-path.hh"
#include "comparator.hh"
#include <string>
#include <chrono>
@ -100,6 +101,8 @@ struct BuildResult
*/
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
DECLARE_CMP(BuildResult);
bool success()
{
return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid;

View file

@ -0,0 +1,37 @@
#include "child.hh"
#include "current-process.hh"
#include "logging.hh"
#include <fcntl.h>
#include <unistd.h>
namespace nix {
void commonChildInit()
{
logger = makeSimpleLogger();
const static std::string pathNullDevice = "/dev/null";
restoreProcessContext(false);
/* Put the child in a separate session (and thus a separate
process group) so that it has no controlling terminal (meaning
that e.g. ssh cannot open /dev/tty) and it doesn't receive
terminal signals. */
if (setsid() == -1)
throw SysError("creating a new session");
/* Dup stderr to stdout. */
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
throw SysError("cannot dup stderr into stdout");
/* Reroute stdin to /dev/null. */
int fdDevNull = open(pathNullDevice.c_str(), O_RDWR);
if (fdDevNull == -1)
throw SysError("cannot open '%1%'", pathNullDevice);
if (dup2(fdDevNull, STDIN_FILENO) == -1)
throw SysError("cannot dup null device into stdin");
close(fdDevNull);
}
}

View file

@ -0,0 +1,11 @@
#pragma once
///@file
namespace nix {
/**
* Common initialisation performed in child processes.
*/
void commonChildInit();
}

View file

@ -1,157 +0,0 @@
#include "create-derivation-and-realise-goal.hh"
#include "worker.hh"
namespace nix {
CreateDerivationAndRealiseGoal::CreateDerivationAndRealiseGoal(ref<SingleDerivedPath> drvReq,
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker, DerivedPath::Built { .drvPath = drvReq, .outputs = wantedOutputs })
, drvReq(drvReq)
, wantedOutputs(wantedOutputs)
, buildMode(buildMode)
{
state = &CreateDerivationAndRealiseGoal::getDerivation;
name = fmt(
"outer obtaining drv from '%s' and then building outputs %s",
drvReq->to_string(worker.store),
std::visit(overloaded {
[&](const OutputsSpec::All) -> std::string {
return "* (all of them)";
},
[&](const OutputsSpec::Names os) {
return concatStringsSep(", ", quoteStrings(os));
},
}, wantedOutputs.raw));
trace("created outer");
worker.updateProgress();
}
CreateDerivationAndRealiseGoal::~CreateDerivationAndRealiseGoal()
{
}
static StorePath pathPartOfReq(const SingleDerivedPath & req)
{
return std::visit(overloaded {
[&](const SingleDerivedPath::Opaque & bo) {
return bo.path;
},
[&](const SingleDerivedPath::Built & bfd) {
return pathPartOfReq(*bfd.drvPath);
},
}, req.raw());
}
std::string CreateDerivationAndRealiseGoal::key()
{
/* Ensure that derivations get built in order of their name,
i.e. a derivation named "aardvark" always comes before "baboon". And
substitution goals and inner derivation goals always happen before
derivation goals (due to "b$"). */
return "c$" + std::string(pathPartOfReq(*drvReq).name()) + "$" + drvReq->to_string(worker.store);
}
void CreateDerivationAndRealiseGoal::timedOut(Error && ex)
{
}
void CreateDerivationAndRealiseGoal::work()
{
(this->*state)();
}
void CreateDerivationAndRealiseGoal::addWantedOutputs(const OutputsSpec & outputs)
{
/* If we already want all outputs, there is nothing to do. */
auto newWanted = wantedOutputs.union_(outputs);
bool needRestart = !newWanted.isSubsetOf(wantedOutputs);
wantedOutputs = newWanted;
if (!needRestart) return;
if (!optDrvPath)
// haven't started steps where the outputs matter yet
return;
worker.makeDerivationGoal(*optDrvPath, outputs, buildMode);
}
void CreateDerivationAndRealiseGoal::getDerivation()
{
trace("outer init");
/* The first thing to do is to make sure that the derivation
exists. If it doesn't, it may be created through a
substitute. */
if (auto optDrvPath = [this]() -> std::optional<StorePath> {
if (buildMode != bmNormal) return std::nullopt;
auto drvPath = StorePath::dummy;
try {
drvPath = resolveDerivedPath(worker.store, *drvReq);
} catch (MissingRealisation &) {
return std::nullopt;
}
return worker.evalStore.isValidPath(drvPath) || worker.store.isValidPath(drvPath)
? std::optional { drvPath }
: std::nullopt;
}()) {
trace(fmt("already have drv '%s' for '%s', can go straight to building",
worker.store.printStorePath(*optDrvPath),
drvReq->to_string(worker.store)));
loadAndBuildDerivation();
} else {
trace("need to obtain drv we want to build");
addWaitee(worker.makeGoal(DerivedPath::fromSingle(*drvReq)));
state = &CreateDerivationAndRealiseGoal::loadAndBuildDerivation;
if (waitees.empty()) work();
}
}
void CreateDerivationAndRealiseGoal::loadAndBuildDerivation()
{
trace("outer load and build derivation");
if (nrFailed != 0) {
amDone(ecFailed, Error("cannot build missing derivation '%s'", drvReq->to_string(worker.store)));
return;
}
StorePath drvPath = resolveDerivedPath(worker.store, *drvReq);
/* Build this step! */
concreteDrvGoal = worker.makeDerivationGoal(drvPath, wantedOutputs, buildMode);
addWaitee(upcast_goal(concreteDrvGoal));
state = &CreateDerivationAndRealiseGoal::buildDone;
optDrvPath = std::move(drvPath);
if (waitees.empty()) work();
}
void CreateDerivationAndRealiseGoal::buildDone()
{
trace("outer build done");
buildResult = upcast_goal(concreteDrvGoal)->getBuildResult(DerivedPath::Built {
.drvPath = drvReq,
.outputs = wantedOutputs,
});
if (buildResult.success())
amDone(ecSuccess);
else
amDone(ecFailed, Error("building '%s' failed", drvReq->to_string(worker.store)));
}
}

View file

@ -1,96 +0,0 @@
#pragma once
#include "parsed-derivations.hh"
#include "lock.hh"
#include "store-api.hh"
#include "pathlocks.hh"
#include "goal.hh"
namespace nix {
struct DerivationGoal;
/**
* This goal type is essentially the serial composition (like function
* composition) of a goal for getting a derivation, and then a
* `DerivationGoal` using the newly-obtained derivation.
*
* In the (currently experimental) general inductive case of derivations
* that are themselves build outputs, that first goal will be *another*
* `CreateDerivationAndRealiseGoal`. In the (much more common) base-case
* where the derivation has no provence and is just referred to by
* (content-addressed) store path, that first goal is a
* `SubstitutionGoal`.
*
* If we already have the derivation (e.g. if the evalutator has created
* the derivation locally and then instructured the store to build it),
* we can skip the first goal entirely as a small optimization.
*/
struct CreateDerivationAndRealiseGoal : public Goal
{
/**
* How to obtain a store path of the derivation to build.
*/
ref<SingleDerivedPath> drvReq;
/**
* The path of the derivation, once obtained.
**/
std::optional<StorePath> optDrvPath;
/**
* The goal for the corresponding concrete derivation.
**/
std::shared_ptr<DerivationGoal> concreteDrvGoal;
/**
* The specific outputs that we need to build.
*/
OutputsSpec wantedOutputs;
typedef void (CreateDerivationAndRealiseGoal::*GoalState)();
GoalState state;
/**
* The final output paths of the build.
*
* - For input-addressed derivations, always the precomputed paths
*
* - For content-addressed derivations, calcuated from whatever the
* hash ends up being. (Note that fixed outputs derivations that
* produce the "wrong" output still install that data under its
* true content-address.)
*/
OutputPathMap finalOutputs;
BuildMode buildMode;
CreateDerivationAndRealiseGoal(ref<SingleDerivedPath> drvReq,
const OutputsSpec & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
virtual ~CreateDerivationAndRealiseGoal();
void timedOut(Error && ex) override;
std::string key() override;
void work() override;
/**
* Add wanted outputs to an already existing derivation goal.
*/
void addWantedOutputs(const OutputsSpec & outputs);
/**
* The states.
*/
void getDerivation();
void loadAndBuildDerivation();
void buildDone();
JobCategory jobCategory() const override {
return JobCategory::Administration;
};
};
}

View file

@ -8,8 +8,8 @@
#include "util.hh"
#include "archive.hh"
#include "compression.hh"
#include "worker-protocol.hh"
#include "worker-protocol-impl.hh"
#include "common-protocol.hh"
#include "common-protocol-impl.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "local-store.hh" // TODO remove, along with remaining downcasts
@ -71,7 +71,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
, wantedOutputs(wantedOutputs)
, buildMode(buildMode)
{
state = &DerivationGoal::loadDerivation;
state = &DerivationGoal::getDerivation;
name = fmt(
"building of '%s' from .drv file",
DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store));
@ -164,6 +164,24 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
}
void DerivationGoal::getDerivation()
{
trace("init");
/* The first thing to do is to make sure that the derivation
exists. If it doesn't, it may be created through a
substitute. */
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
loadDerivation();
return;
}
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
state = &DerivationGoal::loadDerivation;
}
void DerivationGoal::loadDerivation()
{
trace("loading derivation");
@ -543,7 +561,7 @@ void DerivationGoal::inputsRealised()
attempt = fullDrv.tryResolve(worker.store);
}
assert(attempt);
Derivation drvResolved { *std::move(attempt) };
Derivation drvResolved { std::move(*attempt) };
auto pathResolved = writeDerivation(worker.store, drvResolved);
@ -1167,11 +1185,11 @@ HookReply DerivationGoal::tryBuildHook()
throw;
}
WorkerProto::WriteConn conn { hook->sink };
CommonProto::WriteConn conn { hook->sink };
/* Tell the hook all the inputs that have to be copied to the
remote system. */
WorkerProto::write(worker.store, conn, inputPaths);
CommonProto::write(worker.store, conn, inputPaths);
/* Tell the hooks the missing outputs that have to be copied back
from the remote system. */
@ -1182,7 +1200,7 @@ HookReply DerivationGoal::tryBuildHook()
if (buildMode != bmCheck && status.known && status.known->isValid()) continue;
missingOutputs.insert(outputName);
}
WorkerProto::write(worker.store, conn, missingOutputs);
CommonProto::write(worker.store, conn, missingOutputs);
}
hook->sink = FdSink();
@ -1299,9 +1317,26 @@ void DerivationGoal::handleChildOutput(int fd, std::string_view data)
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
// ensure that logs from a builder using `ssh-ng://` as protocol
// are also available to `nix log`.
if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) {
auto f = (*json)["fields"];
(*logSink)((f.size() > 0 ? f.at(0).get<std::string>() : "") + "\n");
if (s && !isWrittenToLog && logSink) {
const auto type = (*json)["type"];
const auto fields = (*json)["fields"];
if (type == resBuildLogLine) {
(*logSink)((fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n");
} else if (type == resSetPhase && ! fields.is_null()) {
const auto phase = fields[0];
if (! phase.is_null()) {
// nixpkgs' stdenv produces lines in the log to signal
// phase changes.
// We want to get the same lines in case of remote builds.
// The format is:
// @nix { "action": "setPhase", "phase": "$curPhase" }
const auto logLine = nlohmann::json::object({
{"action", "setPhase"},
{"phase", phase}
});
(*logSink)("@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + "\n");
}
}
}
}
currentHookLine.clear();
@ -1456,6 +1491,7 @@ void DerivationGoal::done(
SingleDrvOutputs builtOutputs,
std::optional<Error> ex)
{
outputLocks.unlock();
buildResult.status = status;
if (ex)
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));
@ -1498,24 +1534,23 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
if (!useDerivation) return;
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
std::optional info = tryGetConcreteDrvGoal(waitee);
if (!info) return;
const auto & [dg, drvReq] = *info;
auto * dg = dynamic_cast<DerivationGoal *>(&*waitee);
if (!dg) return;
auto * nodeP = fullDrv.inputDrvs.findSlot(drvReq.get());
auto * nodeP = fullDrv.inputDrvs.findSlot(DerivedPath::Opaque { .path = dg->drvPath });
if (!nodeP) return;
auto & outputs = nodeP->value;
for (auto & outputName : outputs) {
auto buildResult = dg.get().getBuildResult(DerivedPath::Built {
.drvPath = makeConstantStorePathRef(dg.get().drvPath),
auto buildResult = dg->getBuildResult(DerivedPath::Built {
.drvPath = makeConstantStorePathRef(dg->drvPath),
.outputs = OutputsSpec::Names { outputName },
});
if (buildResult.success()) {
auto i = buildResult.builtOutputs.find(outputName);
if (i != buildResult.builtOutputs.end())
inputDrvOutputs.insert_or_assign(
{ dg.get().drvPath, outputName },
{ dg->drvPath, outputName },
i->second.outPath);
}
}

View file

@ -52,10 +52,6 @@ struct InitialOutput {
/**
* A goal for building some or all of the outputs of a derivation.
*
* The derivation must already be present, either in the store in a drv
* or in memory. If the derivation itself needs to be gotten first, a
* `CreateDerivationAndRealiseGoal` goal must be used instead.
*/
struct DerivationGoal : public Goal
{
@ -235,6 +231,7 @@ struct DerivationGoal : public Goal
/**
* The states.
*/
void getDerivation();
void loadDerivation();
void haveDerivation();
void outputsSubstitutionTried();

View file

@ -1,6 +1,5 @@
#include "worker.hh"
#include "substitution-goal.hh"
#include "create-derivation-and-realise-goal.hh"
#include "derivation-goal.hh"
#include "local-store.hh"
@ -16,7 +15,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
worker.run(goals);
StringSet failed;
StorePathSet failed;
std::optional<Error> ex;
for (auto & i : goals) {
if (i->ex) {
@ -26,10 +25,10 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
ex = std::move(i->ex);
}
if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<CreateDerivationAndRealiseGoal *>(i.get()))
failed.insert(i2->drvReq->to_string(*this));
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
failed.insert(i2->drvPath);
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
failed.insert(printStorePath(i2->storePath));
failed.insert(i2->storePath);
}
}
@ -38,7 +37,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
throw std::move(*ex);
} else if (!failed.empty()) {
if (ex) logError(ex->info());
throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed)));
throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed));
}
}

View file

@ -49,16 +49,6 @@ enum struct JobCategory {
* A substitution an arbitrary store object; it will use network resources.
*/
Substitution,
/**
* A goal that does no "real" work by itself, and just exists to depend on
* other goals which *do* do real work. These goals therefore are not
* limited.
*
* These goals cannot infinitely create themselves, so there is no risk of
* a "fork bomb" type situation (which would be a problem even though the
* goal do no real work) either.
*/
Administration,
};
struct Goal : public std::enable_shared_from_this<Goal>

View file

@ -1,5 +1,7 @@
#include "globals.hh"
#include "hook-instance.hh"
#include "file-system.hh"
#include "child.hh"
namespace nix {

View file

@ -3,6 +3,7 @@
#include "logging.hh"
#include "serialise.hh"
#include "processes.hh"
namespace nix {

View file

@ -15,7 +15,10 @@
#include "json-utils.hh"
#include "cgroup.hh"
#include "personality.hh"
#include "current-process.hh"
#include "namespaces.hh"
#include "child.hh"
#include "unix-domain-socket.hh"
#include <regex>
#include <queue>
@ -227,7 +230,7 @@ void LocalDerivationGoal::tryLocalBuild()
if (!buildUser) {
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for UID to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
fmt("waiting for a free build user ID for '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
worker.waitForAWhile(shared_from_this());
return;
}
@ -386,27 +389,27 @@ void LocalDerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
cleanupPostOutputsRegisteredModeCheck();
}
#if __linux__
static void linkOrCopy(const Path & from, const Path & to)
{
if (link(from.c_str(), to.c_str()) == -1) {
/* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case?
It can also fail with EPERM in BeegFS v7 and earlier versions
or fail with EXDEV in OpenAFS
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM && errno != EXDEV)
throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to);
static void doBind(const Path & source, const Path & target, bool optional = false) {
debug("bind mounting '%1%' to '%2%'", source, target);
struct stat st;
if (stat(source.c_str(), &st) == -1) {
if (optional && errno == ENOENT)
return;
else
throw SysError("getting attributes of path '%1%'", source);
}
}
if (S_ISDIR(st.st_mode))
createDirs(target);
else {
createDirs(dirOf(target));
writeFile(target, "");
}
if (mount(source.c_str(), target.c_str(), "", MS_BIND | MS_REC, 0) == -1)
throw SysError("bind mount from '%1%' to '%2%' failed", source, target);
};
#endif
void LocalDerivationGoal::startBuilder()
{
if ((buildUser && buildUser->getUIDCount() != 1)
@ -581,7 +584,7 @@ void LocalDerivationGoal::startBuilder()
/* Allow a user-configurable set of directories from the
host file system. */
dirsInChroot.clear();
pathsInChroot.clear();
for (auto i : settings.sandboxPaths.get()) {
if (i.empty()) continue;
@ -592,19 +595,19 @@ void LocalDerivationGoal::startBuilder()
}
size_t p = i.find('=');
if (p == std::string::npos)
dirsInChroot[i] = {i, optional};
pathsInChroot[i] = {i, optional};
else
dirsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional};
pathsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional};
}
if (hasPrefix(worker.store.storeDir, tmpDirInSandbox))
{
throw Error("`sandbox-build-dir` must not contain the storeDir");
}
dirsInChroot[tmpDirInSandbox] = tmpDir;
pathsInChroot[tmpDirInSandbox] = tmpDir;
/* Add the closure of store paths to the chroot. */
StorePathSet closure;
for (auto & i : dirsInChroot)
for (auto & i : pathsInChroot)
try {
if (worker.store.isInStore(i.second.source))
worker.store.computeFSClosure(worker.store.toStorePath(i.second.source).first, closure);
@ -615,7 +618,7 @@ void LocalDerivationGoal::startBuilder()
}
for (auto & i : closure) {
auto p = worker.store.printStorePath(i);
dirsInChroot.insert_or_assign(p, p);
pathsInChroot.insert_or_assign(p, p);
}
PathSet allowedPaths = settings.allowedImpureHostPrefixes;
@ -643,14 +646,14 @@ void LocalDerivationGoal::startBuilder()
/* Allow files in __impureHostDeps to be missing; e.g.
macOS 11+ has no /usr/lib/libSystem*.dylib */
dirsInChroot[i] = {i, true};
pathsInChroot[i] = {i, true};
}
#if __linux__
/* Create a temporary directory in which we set up the chroot
environment using bind-mounts. We put it in the Nix store
to ensure that we can create hard-links to non-directory
inputs in the fake Nix store in the chroot (see below). */
so that the build outputs can be moved efficiently from the
chroot to their final location. */
chrootRootDir = worker.store.Store::toRealPath(drvPath) + ".chroot";
deletePath(chrootRootDir);
@ -711,15 +714,12 @@ void LocalDerivationGoal::startBuilder()
for (auto & i : inputPaths) {
auto p = worker.store.printStorePath(i);
Path r = worker.store.toRealPath(p);
if (S_ISDIR(lstat(r).st_mode))
dirsInChroot.insert_or_assign(p, r);
else
linkOrCopy(r, chrootRootDir + p);
pathsInChroot.insert_or_assign(p, r);
}
/* If we're repairing, checking or rebuilding part of a
multiple-outputs derivation, it's possible that we're
rebuilding a path that is in settings.dirsInChroot
rebuilding a path that is in settings.sandbox-paths
(typically the dependencies of /bin/sh). Throw them
out. */
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
@ -729,7 +729,7 @@ void LocalDerivationGoal::startBuilder()
is already in the sandbox, so we don't need to worry about
removing it. */
if (i.second.second)
dirsInChroot.erase(worker.store.printStorePath(*i.second.second));
pathsInChroot.erase(worker.store.printStorePath(*i.second.second));
}
if (cgroup) {
@ -787,9 +787,9 @@ void LocalDerivationGoal::startBuilder()
} else {
auto p = line.find('=');
if (p == std::string::npos)
dirsInChroot[line] = line;
pathsInChroot[line] = line;
else
dirsInChroot[line.substr(0, p)] = line.substr(p + 1);
pathsInChroot[line.substr(0, p)] = line.substr(p + 1);
}
}
}
@ -1066,7 +1066,7 @@ void LocalDerivationGoal::initTmpDir() {
env[i.first] = i.second;
} else {
auto hash = hashString(htSHA256, i.first);
std::string fn = ".attr-" + hash.to_string(Base32, false);
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
@ -1135,8 +1135,18 @@ void LocalDerivationGoal::initEnv()
fixed-output derivations is by definition pure (since we
already know the cryptographic hash of the output). */
if (!derivationType->isSandboxed()) {
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings()))
env[i] = getEnv(i).value_or("");
auto & impureEnv = settings.impureEnv.get();
if (!impureEnv.empty())
experimentalFeatureSettings.require(Xp::ConfigurableImpureEnv);
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) {
auto envVar = impureEnv.find(i);
if (envVar != impureEnv.end()) {
env[i] = envVar->second;
} else {
env[i] = getEnv(i).value_or("");
}
}
}
/* Currently structured log messages piggyback on stderr, but we
@ -1555,41 +1565,33 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
Path source = worker.store.Store::toRealPath(path);
Path target = chrootRootDir + worker.store.printStorePath(path);
debug("bind-mounting %s -> %s", target, source);
if (pathExists(target))
if (pathExists(target)) {
// There is a similar debug message in doBind, so only run it in this block to not have double messages.
debug("bind-mounting %s -> %s", target, source);
throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path));
}
auto st = lstat(source);
/* Bind-mount the path into the sandbox. This requires
entering its mount namespace, which is not possible
in multithreaded programs. So we do this in a
child process.*/
Pid child(startProcess([&]() {
if (S_ISDIR(st.st_mode)) {
if (usingUserNamespace && (setns(sandboxUserNamespace.get(), 0) == -1))
throw SysError("entering sandbox user namespace");
/* Bind-mount the path into the sandbox. This requires
entering its mount namespace, which is not possible
in multithreaded programs. So we do this in a
child process.*/
Pid child(startProcess([&]() {
if (setns(sandboxMountNamespace.get(), 0) == -1)
throw SysError("entering sandbox mount namespace");
if (usingUserNamespace && (setns(sandboxUserNamespace.get(), 0) == -1))
throw SysError("entering sandbox user namespace");
doBind(source, target);
if (setns(sandboxMountNamespace.get(), 0) == -1)
throw SysError("entering sandbox mount namespace");
_exit(0);
}));
createDirs(target);
if (mount(source.c_str(), target.c_str(), "", MS_BIND, 0) == -1)
throw SysError("bind mount from '%s' to '%s' failed", source, target);
_exit(0);
}));
int status = child.wait();
if (status != 0)
throw Error("could not add path '%s' to sandbox", worker.store.printStorePath(path));
} else
linkOrCopy(source, target);
int status = child.wait();
if (status != 0)
throw Error("could not add path '%s' to sandbox", worker.store.printStorePath(path));
#else
throw Error("don't know how to make path '%s' (produced by a recursive Nix call) appear in the sandbox",
@ -1621,6 +1623,8 @@ void setupSeccomp()
seccomp_release(ctx);
});
constexpr std::string_view nativeSystem = SYSTEM;
if (nativeSystem == "x86_64-linux" &&
seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0)
throw SysError("unable to add 32-bit seccomp architecture");
@ -1779,7 +1783,7 @@ void LocalDerivationGoal::runChild()
/* Set up a nearly empty /dev, unless the user asked to
bind-mount the host /dev. */
Strings ss;
if (dirsInChroot.find("/dev") == dirsInChroot.end()) {
if (pathsInChroot.find("/dev") == pathsInChroot.end()) {
createDirs(chrootRootDir + "/dev/shm");
createDirs(chrootRootDir + "/dev/pts");
ss.push_back("/dev/full");
@ -1814,34 +1818,15 @@ void LocalDerivationGoal::runChild()
ss.push_back(path);
if (settings.caFile != "")
dirsInChroot.try_emplace("/etc/ssl/certs/ca-certificates.crt", settings.caFile, true);
pathsInChroot.try_emplace("/etc/ssl/certs/ca-certificates.crt", settings.caFile, true);
}
for (auto & i : ss) dirsInChroot.emplace(i, i);
for (auto & i : ss) pathsInChroot.emplace(i, i);
/* Bind-mount all the directories from the "host"
filesystem that we want in the chroot
environment. */
auto doBind = [&](const Path & source, const Path & target, bool optional = false) {
debug("bind mounting '%1%' to '%2%'", source, target);
struct stat st;
if (stat(source.c_str(), &st) == -1) {
if (optional && errno == ENOENT)
return;
else
throw SysError("getting attributes of path '%1%'", source);
}
if (S_ISDIR(st.st_mode))
createDirs(target);
else {
createDirs(dirOf(target));
writeFile(target, "");
}
if (mount(source.c_str(), target.c_str(), "", MS_BIND | MS_REC, 0) == -1)
throw SysError("bind mount from '%1%' to '%2%' failed", source, target);
};
for (auto & i : dirsInChroot) {
for (auto & i : pathsInChroot) {
if (i.second.source == "/proc") continue; // backwards compatibility
#if HAVE_EMBEDDED_SANDBOX_SHELL
@ -1882,7 +1867,7 @@ void LocalDerivationGoal::runChild()
if /dev/ptx/ptmx exists). */
if (pathExists("/dev/pts/ptmx") &&
!pathExists(chrootRootDir + "/dev/ptmx")
&& !dirsInChroot.count("/dev/pts"))
&& !pathsInChroot.count("/dev/pts"))
{
if (mount("none", (chrootRootDir + "/dev/pts").c_str(), "devpts", 0, "newinstance,mode=0620") == 0)
{
@ -2017,7 +2002,7 @@ void LocalDerivationGoal::runChild()
/* We build the ancestry before adding all inputPaths to the store because we know they'll
all have the same parents (the store), and there might be lots of inputs. This isn't
particularly efficient... I doubt it'll be a bottleneck in practice */
for (auto & i : dirsInChroot) {
for (auto & i : pathsInChroot) {
Path cur = i.first;
while (cur.compare("/") != 0) {
cur = dirOf(cur);
@ -2025,7 +2010,7 @@ void LocalDerivationGoal::runChild()
}
}
/* And we want the store in there regardless of how empty dirsInChroot. We include the innermost
/* And we want the store in there regardless of how empty pathsInChroot. We include the innermost
path component this time, since it's typically /nix/store and we care about that. */
Path cur = worker.store.storeDir;
while (cur.compare("/") != 0) {
@ -2036,7 +2021,7 @@ void LocalDerivationGoal::runChild()
/* Add all our input paths to the chroot */
for (auto & i : inputPaths) {
auto p = worker.store.printStorePath(i);
dirsInChroot[p] = p;
pathsInChroot[p] = p;
}
/* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */
@ -2067,7 +2052,7 @@ void LocalDerivationGoal::runChild()
without file-write* allowed, access() incorrectly returns EPERM
*/
sandboxProfile += "(allow file-read* file-write* process-exec\n";
for (auto & i : dirsInChroot) {
for (auto & i : pathsInChroot) {
if (i.first != i.second.source)
throw Error(
"can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin",
@ -2511,7 +2496,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
ValidPathInfo newInfo0 {
worker.store,
outputPathName(drv->name, outputName),
*std::move(optCA),
std::move(*optCA),
Hash::dummy,
};
if (*scratchPath != newInfo0.path) {
@ -2573,8 +2558,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s",
worker.store.printStorePath(drvPath),
wanted.to_string(SRI, true),
got.to_string(SRI, true)));
wanted.to_string(HashFormat::SRI, true),
got.to_string(HashFormat::SRI, true)));
}
if (!newInfo0.references.empty())
delayedException = std::make_exception_ptr(

View file

@ -3,6 +3,7 @@
#include "derivation-goal.hh"
#include "local-store.hh"
#include "processes.hh"
namespace nix {
@ -86,8 +87,8 @@ struct LocalDerivationGoal : public DerivationGoal
: source(source), optional(optional)
{ }
};
typedef map<Path, ChrootPath> DirsInChroot; // maps target path to source path
DirsInChroot dirsInChroot;
typedef map<Path, ChrootPath> PathsInChroot; // maps target path to source path
PathsInChroot pathsInChroot;
typedef map<std::string, std::string> Environment;
Environment env;
@ -120,14 +121,6 @@ struct LocalDerivationGoal : public DerivationGoal
*/
OutputPathMap scratchOutputs;
/**
* Path registration info from the previous round, if we're
* building multiple times. Since this contains the hash, it
* allows us to compare whether two rounds produced the same
* result.
*/
std::map<Path, ValidPathInfo> prevInfos;
uid_t sandboxUid() { return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 1000 : 0) : buildUser->getUID(); }
gid_t sandboxGid() { return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 100 : 0) : buildUser->getGID(); }

View file

@ -2,6 +2,7 @@
#include "substitution-goal.hh"
#include "nar-info.hh"
#include "finally.hh"
#include "signals.hh"
namespace nix {
@ -217,6 +218,8 @@ void PathSubstitutionGoal::tryToRun()
thr = std::thread([this]() {
try {
ReceiveInterrupts receiveInterrupts;
/* Wake up the worker loop when we're done. */
Finally updateStats([this]() { outPipe.writeSide.close(); });

View file

@ -2,9 +2,9 @@
#include "worker.hh"
#include "substitution-goal.hh"
#include "drv-output-substitution-goal.hh"
#include "create-derivation-and-realise-goal.hh"
#include "local-derivation-goal.hh"
#include "hook-instance.hh"
#include "signals.hh"
#include <poll.h>
@ -42,24 +42,6 @@ Worker::~Worker()
}
std::shared_ptr<CreateDerivationAndRealiseGoal> Worker::makeCreateDerivationAndRealiseGoal(
ref<SingleDerivedPath> drvReq,
const OutputsSpec & wantedOutputs,
BuildMode buildMode)
{
std::weak_ptr<CreateDerivationAndRealiseGoal> & goal_weak = outerDerivationGoals.ensureSlot(*drvReq).value;
std::shared_ptr<CreateDerivationAndRealiseGoal> goal = goal_weak.lock();
if (!goal) {
goal = std::make_shared<CreateDerivationAndRealiseGoal>(drvReq, wantedOutputs, *this, buildMode);
goal_weak = goal;
wakeUp(goal);
} else {
goal->addWantedOutputs(wantedOutputs);
}
return goal;
}
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
const StorePath & drvPath,
const OutputsSpec & wantedOutputs,
@ -130,7 +112,10 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
{
return std::visit(overloaded {
[&](const DerivedPath::Built & bfd) -> GoalPtr {
return makeCreateDerivationAndRealiseGoal(bfd.drvPath, bfd.outputs, buildMode);
if (auto bop = std::get_if<DerivedPath::Opaque>(&*bfd.drvPath))
return makeDerivationGoal(bop->path, bfd.outputs, buildMode);
else
throw UnimplementedError("Building dynamic derivations in one shot is not yet implemented.");
},
[&](const DerivedPath::Opaque & bo) -> GoalPtr {
return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair);
@ -139,46 +124,24 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
}
template<typename K, typename V, typename F>
static void cullMap(std::map<K, V> & goalMap, F f)
{
for (auto i = goalMap.begin(); i != goalMap.end();)
if (!f(i->second))
i = goalMap.erase(i);
else ++i;
}
template<typename K, typename G>
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
{
/* !!! inefficient */
cullMap(goalMap, [&](const std::weak_ptr<G> & gp) -> bool {
return gp.lock() != goal;
});
}
template<typename K>
static void removeGoal(std::shared_ptr<CreateDerivationAndRealiseGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode> & goalMap);
template<typename K>
static void removeGoal(std::shared_ptr<CreateDerivationAndRealiseGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode> & goalMap)
{
/* !!! inefficient */
cullMap(goalMap, [&](DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode & node) -> bool {
if (node.value.lock() == goal)
node.value.reset();
removeGoal(goal, node.childMap);
return !node.value.expired() || !node.childMap.empty();
});
for (auto i = goalMap.begin();
i != goalMap.end(); )
if (i->second.lock() == goal) {
auto j = i; ++j;
goalMap.erase(i);
i = j;
}
else ++i;
}
void Worker::removeGoal(GoalPtr goal)
{
if (auto drvGoal = std::dynamic_pointer_cast<CreateDerivationAndRealiseGoal>(goal))
nix::removeGoal(drvGoal, outerDerivationGoals.map);
else if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
nix::removeGoal(drvGoal, derivationGoals);
else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
nix::removeGoal(subGoal, substitutionGoals);
@ -236,19 +199,8 @@ void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
child.respectTimeouts = respectTimeouts;
children.emplace_back(child);
if (inBuildSlot) {
switch (goal->jobCategory()) {
case JobCategory::Substitution:
nrSubstitutions++;
break;
case JobCategory::Build:
nrLocalBuilds++;
break;
case JobCategory::Administration:
/* Intentionally not limited, see docs */
break;
default:
abort();
}
if (goal->jobCategory() == JobCategory::Substitution) nrSubstitutions++;
else nrLocalBuilds++;
}
}
@ -260,20 +212,12 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
if (i == children.end()) return;
if (i->inBuildSlot) {
switch (goal->jobCategory()) {
case JobCategory::Substitution:
if (goal->jobCategory() == JobCategory::Substitution) {
assert(nrSubstitutions > 0);
nrSubstitutions--;
break;
case JobCategory::Build:
} else {
assert(nrLocalBuilds > 0);
nrLocalBuilds--;
break;
case JobCategory::Administration:
/* Intentionally not limited, see docs */
break;
default:
abort();
}
}
@ -324,9 +268,9 @@ void Worker::run(const Goals & _topGoals)
for (auto & i : _topGoals) {
topGoals.insert(i);
if (auto goal = dynamic_cast<CreateDerivationAndRealiseGoal *>(i.get())) {
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Built {
.drvPath = goal->drvReq,
.drvPath = makeConstantStorePathRef(goal->drvPath),
.outputs = goal->wantedOutputs,
});
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
@ -589,19 +533,4 @@ GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal)
return subGoal;
}
GoalPtr upcast_goal(std::shared_ptr<DerivationGoal> subGoal)
{
return subGoal;
}
std::optional<std::pair<std::reference_wrapper<const DerivationGoal>, std::reference_wrapper<const SingleDerivedPath>>> tryGetConcreteDrvGoal(GoalPtr waitee)
{
auto * odg = dynamic_cast<CreateDerivationAndRealiseGoal *>(&*waitee);
if (!odg) return std::nullopt;
return {{
std::cref(*odg->concreteDrvGoal),
std::cref(*odg->drvReq),
}};
}
}

View file

@ -4,7 +4,6 @@
#include "types.hh"
#include "lock.hh"
#include "store-api.hh"
#include "derived-path-map.hh"
#include "goal.hh"
#include "realisation.hh"
@ -14,7 +13,6 @@
namespace nix {
/* Forward definition. */
struct CreateDerivationAndRealiseGoal;
struct DerivationGoal;
struct PathSubstitutionGoal;
class DrvOutputSubstitutionGoal;
@ -33,25 +31,9 @@ class DrvOutputSubstitutionGoal;
*/
GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal);
GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal);
GoalPtr upcast_goal(std::shared_ptr<DerivationGoal> subGoal);
typedef std::chrono::time_point<std::chrono::steady_clock> steady_time_point;
/**
* The current implementation of impure derivations has
* `DerivationGoal`s accumulate realisations from their waitees.
* Unfortunately, `DerivationGoal`s don't directly depend on other
* goals, but instead depend on `CreateDerivationAndRealiseGoal`s.
*
* We try not to share any of the details of any goal type with any
* other, for sake of modularity and quicker rebuilds. This means we
* cannot "just" downcast and fish out the field. So as an escape hatch,
* we have made the function, written in `worker.cc` where all the goal
* types are visible, and use it instead.
*/
std::optional<std::pair<std::reference_wrapper<const DerivationGoal>, std::reference_wrapper<const SingleDerivedPath>>> tryGetConcreteDrvGoal(GoalPtr waitee);
/**
* A mapping used to remember for each child process to what goal it
* belongs, and file descriptors for receiving log data and output
@ -119,9 +101,6 @@ private:
* Maps used to prevent multiple instantiations of a goal for the
* same derivation / path.
*/
DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>> outerDerivationGoals;
std::map<StorePath, std::weak_ptr<DerivationGoal>> derivationGoals;
std::map<StorePath, std::weak_ptr<PathSubstitutionGoal>> substitutionGoals;
std::map<DrvOutput, std::weak_ptr<DrvOutputSubstitutionGoal>> drvOutputSubstitutionGoals;
@ -209,9 +188,6 @@ public:
* @ref DerivationGoal "derivation goal"
*/
private:
std::shared_ptr<CreateDerivationAndRealiseGoal> makeCreateDerivationAndRealiseGoal(
ref<SingleDerivedPath> drvPath,
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);

View file

@ -174,15 +174,19 @@ void builtinBuildenv(const BasicDerivation & drv)
/* Convert the stuff we get from the environment back into a
* coherent data type. */
Packages pkgs;
auto derivations = tokenizeString<Strings>(getAttr("derivations"));
while (!derivations.empty()) {
/* !!! We're trusting the caller to structure derivations env var correctly */
auto active = derivations.front(); derivations.pop_front();
auto priority = stoi(derivations.front()); derivations.pop_front();
auto outputs = stoi(derivations.front()); derivations.pop_front();
for (auto n = 0; n < outputs; n++) {
auto path = derivations.front(); derivations.pop_front();
pkgs.emplace_back(path, active != "false", priority);
{
auto derivations = tokenizeString<Strings>(getAttr("derivations"));
auto itemIt = derivations.begin();
while (itemIt != derivations.end()) {
/* !!! We're trusting the caller to structure derivations env var correctly */
const bool active = "false" != *itemIt++;
const int priority = stoi(*itemIt++);
const size_t outputs = stoul(*itemIt++);
for (size_t n {0}; n < outputs; n++) {
pkgs.emplace_back(std::move(*itemIt++), active, priority);
}
}
}

Some files were not shown because too many files have changed in this diff Show more