1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-21 09:49:36 +01:00

Merge remote-tracking branch 'origin/master' into progress-bar

This commit is contained in:
Eelco Dolstra 2021-11-03 14:01:55 +01:00
commit c4f0508ef5
417 changed files with 42697 additions and 9071 deletions

View file

@ -17,7 +17,8 @@
#include "store-api.hh"
#include "derivations.hh"
#include "local-store.hh"
#include "../nix/legacy.hh"
#include "legacy.hh"
#include "experimental-features.hh"
using namespace nix;
using std::cin;
@ -53,6 +54,9 @@ static int main_build_remote(int argc, char * * argv)
unsetenv("DISPLAY");
unsetenv("SSH_ASKPASS");
/* If we ever use the common args framework, make sure to
remove initPlugins below and initialize settings first.
*/
if (argc != 2)
throw UsageError("called without required arguments");
@ -71,11 +75,15 @@ static int main_build_remote(int argc, char * * argv)
initPlugins();
auto store = openStore().cast<LocalStore>();
auto store = openStore();
/* It would be more appropriate to use $XDG_RUNTIME_DIR, since
that gets cleared on reboot, but it wouldn't work on macOS. */
currentLoad = store->stateDir + "/current-load";
auto currentLoadName = "/current-load";
if (auto localStore = store.dynamic_pointer_cast<LocalFSStore>())
currentLoad = std::string { localStore->stateDir } + currentLoadName;
else
currentLoad = settings.nixStateDir + currentLoadName;
std::shared_ptr<Store> sshStore;
AutoCloseFD bestSlotLock;
@ -123,11 +131,14 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines) {
debug("considering building on remote machine '%s'", m.storeUri);
if (m.enabled && std::find(m.systemTypes.begin(),
m.systemTypes.end(),
neededSystem) != m.systemTypes.end() &&
if (m.enabled
&& (neededSystem == "builtin"
|| std::find(m.systemTypes.begin(),
m.systemTypes.end(),
neededSystem) != m.systemTypes.end()) &&
m.allSupported(requiredFeatures) &&
m.mandatoryMet(requiredFeatures)) {
m.mandatoryMet(requiredFeatures))
{
rightType = true;
AutoCloseFD free;
uint64_t load = 0;
@ -172,13 +183,14 @@ static int main_build_remote(int argc, char * * argv)
else
{
// build the hint template.
string hintstring = "derivation: %s\nrequired (system, features): (%s, %s)";
hintstring += "\n%s available machines:";
hintstring += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
string errorText =
"Failed to find a machine for remote build!\n"
"derivation: %s\nrequired (system, features): (%s, %s)";
errorText += "\n%s available machines:";
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
for (unsigned int i = 0; i < machines.size(); ++i) {
hintstring += "\n(%s, %s, %s, %s)";
}
for (unsigned int i = 0; i < machines.size(); ++i)
errorText += "\n(%s, %s, %s, %s)";
// add the template values.
string drvstr;
@ -187,25 +199,21 @@ static int main_build_remote(int argc, char * * argv)
else
drvstr = "<unknown>";
auto hint = hintformat(hintstring);
hint
% drvstr
% neededSystem
% concatStringsSep<StringSet>(", ", requiredFeatures)
% machines.size();
auto error = hintformat(errorText);
error
% drvstr
% neededSystem
% concatStringsSep<StringSet>(", ", requiredFeatures)
% machines.size();
for (auto & m : machines) {
hint % concatStringsSep<vector<string>>(", ", m.systemTypes)
% m.maxJobs
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
}
for (auto & m : machines)
error
% concatStringsSep<vector<string>>(", ", m.systemTypes)
% m.maxJobs
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
logErrorInfo(canBuildLocally ? lvlChatty : lvlWarn, {
.name = "Remote build",
.description = "Failed to find a machine for remote build!",
.hint = hint
});
printMsg(canBuildLocally ? lvlChatty : lvlWarn, error);
std::cerr << "# decline\n";
}
@ -230,12 +238,9 @@ static int main_build_remote(int argc, char * * argv)
} catch (std::exception & e) {
auto msg = chomp(drainFD(5, false));
logError({
.name = "Remote build",
.hint = hintfmt("cannot build on '%s': %s%s",
bestMachine->storeUri, e.what(),
(msg.empty() ? "" : ": " + msg))
});
printError("cannot build on '%s': %s%s",
bestMachine->storeUri, e.what(),
msg.empty() ? "" : ": " + msg);
bestMachine->enabled = false;
continue;
}
@ -250,7 +255,7 @@ connected:
std::cerr << "# accept\n" << storeUri << "\n";
auto inputs = readStrings<PathSet>(source);
auto outputs = readStrings<PathSet>(source);
auto wantedOutputs = readStrings<StringSet>(source);
AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true);
@ -269,28 +274,65 @@ connected:
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
copyPaths(store, ref<Store>(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
copyPaths(*store, *sshStore, store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
}
uploadLock = -1;
auto drv = store->readDerivation(*drvPath);
drv.inputSrcs = store->parseStorePathSet(inputs);
auto outputHashes = staticOutputHashes(*store, drv);
// Hijack the inputs paths of the derivation to include all the paths
// that come from the `inputDrvs` set.
// We dont do that for the derivations whose `inputDrvs` is empty
// because
// 1. Its not needed
// 2. Changing the `inputSrcs` set changes the associated output ids,
// which break CA derivations
if (!drv.inputDrvs.empty())
drv.inputSrcs = store->parseStorePathSet(inputs);
auto result = sshStore->buildDerivation(*drvPath, drv);
if (!result.success())
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
StorePathSet missing;
for (auto & path : outputs)
if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path));
std::set<Realisation> missingRealisations;
StorePathSet missingPaths;
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
for (auto & outputName : wantedOutputs) {
auto thisOutputHash = outputHashes.at(outputName);
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
if (!store->queryRealisation(thisOutputId)) {
debug("missing output %s", outputName);
assert(result.builtOutputs.count(thisOutputId));
auto newRealisation = result.builtOutputs.at(thisOutputId);
missingRealisations.insert(newRealisation);
missingPaths.insert(newRealisation.outPath);
}
}
} else {
auto outputPaths = drv.outputsAndOptPaths(*store);
for (auto & [outputName, hopefullyOutputPath] : outputPaths) {
assert(hopefullyOutputPath.second);
if (!store->isValidPath(*hopefullyOutputPath.second))
missingPaths.insert(*hopefullyOutputPath.second);
}
}
if (!missing.empty()) {
if (!missingPaths.empty()) {
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
for (auto & i : missing)
store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);
if (auto localStore = store.dynamic_pointer_cast<LocalStore>())
for (auto & path : missingPaths)
localStore->locksHeld.insert(store->printStorePath(path)); /* FIXME: ugly */
copyPaths(*sshStore, *store, missingPaths, NoRepair, NoCheckSigs, NoSubstitute);
}
// XXX: Should be done as part of `copyPaths`
for (auto & realisation : missingRealisations) {
// Should hold, because if the feature isn't enabled the set
// of missing realisations should be empty
settings.requireExperimentalFeature(Xp::CaDerivations);
store->registerDrvOutput(realisation);
}
return 0;

View file

@ -27,11 +27,6 @@ nix::Commands RegisterCommand::getCommandsFor(const std::vector<std::string> & p
return res;
}
void NixMultiCommand::printHelp(const string & programName, std::ostream & out)
{
MultiCommand::printHelp(programName, out);
}
nlohmann::json NixMultiCommand::toJSON()
{
// FIXME: use Command::toJSON() as well.
@ -59,57 +54,105 @@ void StoreCommand::run()
run(getStore());
}
StorePathsCommand::StorePathsCommand(bool recursive)
EvalCommand::EvalCommand()
{
}
EvalCommand::~EvalCommand()
{
if (evalState)
evalState->printStats();
}
ref<Store> EvalCommand::getEvalStore()
{
if (!evalStore)
evalStore = evalStoreUrl ? openStore(*evalStoreUrl) : getStore();
return ref<Store>(evalStore);
}
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState)
evalState = std::make_shared<EvalState>(searchPath, getEvalStore(), getStore());
return ref<EvalState>(evalState);
}
BuiltPathsCommand::BuiltPathsCommand(bool recursive)
: recursive(recursive)
{
if (recursive)
addFlag({
.longName = "no-recursive",
.description = "apply operation to specified paths only",
.description = "Apply operation to specified paths only.",
.category = installablesCategory,
.handler = {&this->recursive, false},
});
else
addFlag({
.longName = "recursive",
.shortName = 'r',
.description = "apply operation to closure of the specified paths",
.description = "Apply operation to closure of the specified paths.",
.category = installablesCategory,
.handler = {&this->recursive, true},
});
mkFlag(0, "all", "apply operation to the entire store", &all);
addFlag({
.longName = "all",
.description = "Apply the operation to every store path.",
.category = installablesCategory,
.handler = {&all, true},
});
}
void StorePathsCommand::run(ref<Store> store)
void BuiltPathsCommand::run(ref<Store> store)
{
StorePaths storePaths;
BuiltPaths paths;
if (all) {
if (installables.size())
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
storePaths.push_back(p);
}
else {
for (auto & p : toStorePaths(store, realiseMode, operateOn, installables))
storePaths.push_back(p);
paths.push_back(BuiltPath::Opaque{p});
} else {
paths = toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
if (recursive) {
StorePathSet closure;
store->computeFSClosure(StorePathSet(storePaths.begin(), storePaths.end()), closure, false, false);
storePaths.clear();
for (auto & p : closure)
storePaths.push_back(p);
// XXX: This only computes the store path closure, ignoring
// intermediate realisations
StorePathSet pathsRoots, pathsClosure;
for (auto & root : paths) {
auto rootFromThis = root.outPaths();
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
paths.push_back(BuiltPath::Opaque{path});
}
}
run(store, std::move(storePaths));
run(store, std::move(paths));
}
void StorePathCommand::run(ref<Store> store)
StorePathsCommand::StorePathsCommand(bool recursive)
: BuiltPathsCommand(recursive)
{
auto storePaths = toStorePaths(store, Realise::Nothing, operateOn, installables);
}
void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
{
StorePathSet storePaths;
for (auto & builtPath : paths)
for (auto & p : builtPath.outPaths())
storePaths.insert(p);
auto sorted = store->topoSortPaths(storePaths);
std::reverse(sorted.begin(), sorted.end());
run(store, std::move(sorted));
}
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
{
if (storePaths.size() != 1)
throw UsageError("this command requires exactly one store path");
@ -133,7 +176,7 @@ MixProfile::MixProfile()
{
addFlag({
.longName = "profile",
.description = "profile to update",
.description = "The profile to update.",
.labels = {"path"},
.handler = {&profile},
.completer = completePath
@ -152,7 +195,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
profile2, storePath));
}
void MixProfile::updateProfile(const Buildables & buildables)
void MixProfile::updateProfile(const BuiltPaths & buildables)
{
if (!profile) return;
@ -160,22 +203,19 @@ void MixProfile::updateProfile(const Buildables & buildables)
for (auto & buildable : buildables) {
std::visit(overloaded {
[&](BuildableOpaque bo) {
[&](const BuiltPath::Opaque & bo) {
result.push_back(bo.path);
},
[&](BuildableFromDrv bfd) {
[&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
/* Output path should be known because we just tried to
build it. */
assert(output.second);
result.push_back(*output.second);
result.push_back(output.second);
}
},
}, buildable);
}, buildable.raw());
}
if (result.size() != 1)
throw Error("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
throw UsageError("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
updateProfile(result[0]);
}
@ -190,14 +230,14 @@ MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
addFlag({
.longName = "ignore-environment",
.shortName = 'i',
.description = "clear the entire environment (except those specified with --keep)",
.description = "Clear the entire environment (except those specified with `--keep`).",
.handler = {&ignoreEnvironment, true},
});
addFlag({
.longName = "keep",
.shortName = 'k',
.description = "keep specified environment variable",
.description = "Keep the environment variable *name*.",
.labels = {"name"},
.handler = {[&](std::string s) { keep.insert(s); }},
});
@ -205,7 +245,7 @@ MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
addFlag({
.longName = "unset",
.shortName = 'u',
.description = "unset specified environment variable",
.description = "Unset the environment variable *name*.",
.labels = {"name"},
.handler = {[&](std::string s) { unset.insert(s); }},
});

View file

@ -13,6 +13,8 @@ namespace nix {
extern std::string programPath;
extern char * * savedArgv;
class EvalState;
struct Pos;
class Store;
@ -21,10 +23,10 @@ static constexpr Command::Category catSecondary = 100;
static constexpr Command::Category catUtility = 101;
static constexpr Command::Category catNixInstallation = 102;
static constexpr auto installablesCategory = "Options that change the interpretation of installables";
struct NixMultiCommand : virtual MultiCommand, virtual Command
{
void printHelp(const string & programName, std::ostream & out) override;
nlohmann::json toJSON() override;
};
@ -43,8 +45,17 @@ private:
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
EvalCommand();
~EvalCommand();
ref<Store> getEvalStore();
ref<EvalState> getEvalState();
private:
std::shared_ptr<Store> evalStore;
std::shared_ptr<EvalState> evalState;
};
@ -97,6 +108,8 @@ enum class Realise {
exists. */
Derivation,
/* Evaluate in dry-run mode. Postcondition: nothing. */
// FIXME: currently unused, but could be revived if we can
// evaluate derivations in-memory.
Nothing
};
@ -139,7 +152,7 @@ private:
};
/* A command that operates on zero or more store paths. */
struct StorePathsCommand : public InstallablesCommand
struct BuiltPathsCommand : public InstallablesCommand
{
private:
@ -152,25 +165,36 @@ protected:
public:
StorePathsCommand(bool recursive = false);
BuiltPathsCommand(bool recursive = false);
using StoreCommand::run;
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
void run(ref<Store> store) override;
bool useDefaultInstallables() override { return !all; }
};
/* A command that operates on exactly one store path. */
struct StorePathCommand : public InstallablesCommand
struct StorePathsCommand : public BuiltPathsCommand
{
using StoreCommand::run;
StorePathsCommand(bool recursive = false);
using BuiltPathsCommand::run;
virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
void run(ref<Store> store, BuiltPaths && paths) override;
};
/* A command that operates on exactly one store path. */
struct StorePathCommand : public StorePathsCommand
{
using StorePathsCommand::run;
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
void run(ref<Store> store) override;
void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
};
/* A helper class for registering commands globally. */
@ -201,21 +225,38 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
return RegisterCommand(std::move(name), [](){ return make_ref<T>(); });
}
Buildables build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode = bmNormal);
BuiltPaths build(
ref<Store> evalStore,
ref<Store> store, Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode = bmNormal);
std::set<StorePath> toStorePaths(ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables);
std::set<StorePath> toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables);
StorePath toStorePath(ref<Store> store,
Realise mode, OperateOn operateOn,
StorePath toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
std::shared_ptr<Installable> installable);
std::set<StorePath> toDerivations(ref<Store> store,
std::vector<std::shared_ptr<Installable>> installables,
std::set<StorePath> toDerivations(
ref<Store> store,
const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver = false);
BuiltPaths toBuiltPaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables);
/* Helper function to generate args that invoke $EDITOR on
filename:lineno. */
Strings editorFor(const Pos & pos);
@ -231,7 +272,7 @@ struct MixProfile : virtual StoreCommand
/* If 'profile' is set, make it point at the store path produced
by 'buildables'. */
void updateProfile(const Buildables & buildables);
void updateProfile(const BuiltPaths & buildables);
};
struct MixDefaultProfile : MixProfile
@ -261,6 +302,8 @@ void completeFlakeRefWithFragment(
const Strings & defaultFlakeAttrPaths,
std::string_view prefix);
std::string showVersions(const std::set<std::string> & versions);
void printClosureDiff(
ref<Store> store,
const StorePath & beforePath,

View file

@ -20,31 +20,6 @@
namespace nix {
nlohmann::json BuildableOpaque::toJSON(ref<Store> store) const {
nlohmann::json res;
res["path"] = store->printStorePath(path);
return res;
}
nlohmann::json BuildableFromDrv::toJSON(ref<Store> store) const {
nlohmann::json res;
res["drvPath"] = store->printStorePath(drvPath);
for (const auto& [output, path] : outputs) {
res["outputs"][output] = path ? store->printStorePath(*path) : "";
}
return res;
}
nlohmann::json buildablesToJSON(const Buildables & buildables, ref<Store> store) {
auto res = nlohmann::json::array();
for (const Buildable & buildable : buildables) {
std::visit([&res, store](const auto & buildable) {
res.push_back(buildable.toJSON(store));
}, buildable);
}
return res;
}
void completeFlakeInputPath(
ref<EvalState> evalState,
const FlakeRef & flakeRef,
@ -58,39 +33,51 @@ void completeFlakeInputPath(
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
addFlag({
.longName = "recreate-lock-file",
.description = "recreate lock file from scratch",
.description = "Recreate the flake's lock file from scratch.",
.category = category,
.handler = {&lockFlags.recreateLockFile, true}
});
addFlag({
.longName = "no-update-lock-file",
.description = "do not allow any updates to the lock file",
.description = "Do not allow any updates to the flake's lock file.",
.category = category,
.handler = {&lockFlags.updateLockFile, false}
});
addFlag({
.longName = "no-write-lock-file",
.description = "do not write the newly generated lock file",
.description = "Do not write the flake's newly generated lock file.",
.category = category,
.handler = {&lockFlags.writeLockFile, false}
});
addFlag({
.longName = "no-registries",
.description = "don't use flake registries",
.handler = {&lockFlags.useRegistries, false}
.description =
"Don't allow lookups in the flake registries. This option is deprecated; use `--no-use-registries`.",
.category = category,
.handler = {[&]() {
lockFlags.useRegistries = false;
warn("'--no-registries' is deprecated; use '--no-use-registries'");
}}
});
addFlag({
.longName = "commit-lock-file",
.description = "commit changes to the lock file",
.description = "Commit changes to the flake's lock file.",
.category = category,
.handler = {&lockFlags.commitLockFile, true}
});
addFlag({
.longName = "update-input",
.description = "update a specific flake input",
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
.category = category,
.labels = {"input-path"},
.handler = {[&](std::string s) {
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
@ -103,9 +90,11 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "override-input",
.description = "override a specific flake input (e.g. `dwarffs/nixpkgs`)",
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
.category = category,
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath(".")));
@ -114,7 +103,8 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "inputs-from",
.description = "use the inputs of the specified flake as registry entries",
.description = "Use the inputs of the specified flake as registry entries.",
.category = category,
.labels = {"flake-url"},
.handler = {[&](std::string flakeRef) {
auto evalState = getEvalState();
@ -143,22 +133,25 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName = "file",
.shortName = 'f',
.description = "evaluate *file* rather than the default",
.description = "Interpret installables as attribute paths relative to the Nix expression stored in *file*.",
.category = installablesCategory,
.labels = {"file"},
.handler = {&file},
.completer = completePath
});
addFlag({
.longName ="expr",
.description = "evaluate attributes from *expr*",
.longName = "expr",
.description = "Interpret installables as attribute paths relative to the Nix expression *expr*.",
.category = installablesCategory,
.labels = {"expr"},
.handler = {&expr}
});
addFlag({
.longName ="derivation",
.description = "operate on the store derivation rather than its outputs",
.longName = "derivation",
.description = "Operate on the store derivation rather than its outputs.",
.category = installablesCategory,
.handler = {&operateOn, OperateOn::Derivation},
});
}
@ -182,14 +175,50 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
void SourceExprCommand::completeInstallable(std::string_view prefix)
{
if (file) return; // FIXME
if (file) {
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
Value root;
state->eval(e, root);
auto autoArgs = getAutoArgs(*state);
std::string prefix_ = std::string(prefix);
auto sep = prefix_.rfind('.');
std::string searchWord;
if (sep != std::string::npos) {
searchWord = prefix_.substr(sep, std::string::npos);
prefix_ = prefix_.substr(0, sep);
} else {
searchWord = prefix_;
prefix_ = "";
}
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
state->forceValue(v1);
Value v2;
state->autoCallFunction(*autoArgs, v1, v2);
if (v2.type() == nAttrs) {
for (auto & i : *v2.attrs) {
std::string name = i.name;
if (name.find(searchWord) == 0) {
completions->add(i.name);
}
}
}
} else {
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
}
}
void completeFlakeRefWithFragment(
@ -260,13 +289,6 @@ void completeFlakeRefWithFragment(
completeFlakeRef(evalState->store, prefix);
}
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState)
evalState = std::make_shared<EvalState>(searchPath, getStore());
return ref<EvalState>(evalState);
}
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
if (prefix == "")
@ -290,9 +312,9 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
}
}
Buildable Installable::toBuildable()
DerivedPath Installable::toDerivedPath()
{
auto buildables = toBuildables();
auto buildables = toDerivedPaths();
if (buildables.size() != 1)
throw Error("installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size());
return std::move(buildables[0]);
@ -326,22 +348,19 @@ struct InstallableStorePath : Installable
std::string what() override { return store->printStorePath(storePath); }
Buildables toBuildables() override
DerivedPaths toDerivedPaths() override
{
if (storePath.isDerivation()) {
std::map<std::string, std::optional<StorePath>> outputs;
auto drv = store->readDerivation(storePath);
for (auto & [name, output] : drv.outputsAndOptPaths(*store))
outputs.emplace(name, output.second);
return {
BuildableFromDrv {
DerivedPath::Built {
.drvPath = storePath,
.outputs = std::move(outputs)
.outputs = drv.outputNames(),
}
};
} else {
return {
BuildableOpaque {
DerivedPath::Opaque {
.path = storePath,
}
};
@ -354,22 +373,24 @@ struct InstallableStorePath : Installable
}
};
Buildables InstallableValue::toBuildables()
DerivedPaths InstallableValue::toDerivedPaths()
{
Buildables res;
DerivedPaths res;
std::map<StorePath, std::map<std::string, std::optional<StorePath>>> drvsToOutputs;
std::map<StorePath, std::set<std::string>> drvsToOutputs;
RealisedPath::Set drvsToCopy;
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) {
auto outputName = drv.outputName;
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath));
drvsToOutputs[drv.drvPath].insert_or_assign(outputName, drv.outPath);
drvsToOutputs[drv.drvPath].insert(outputName);
drvsToCopy.insert(drv.drvPath);
}
for (auto & i : drvsToOutputs)
res.push_back(BuildableFromDrv { i.first, i.second });
res.push_back(DerivedPath::Built { i.first, i.second });
return res;
}
@ -483,6 +504,23 @@ static std::string showAttrPaths(const std::vector<std::string> & paths)
return s;
}
InstallableFlake::InstallableFlake(
SourceExprCommand * cmd,
ref<EvalState> state,
FlakeRef && flakeRef,
Strings && attrPaths,
Strings && prefixes,
const flake::LockFlags & lockFlags)
: InstallableValue(state),
flakeRef(flakeRef),
attrPaths(attrPaths),
prefixes(prefixes),
lockFlags(lockFlags)
{
if (cmd && cmd->getAutoArgs(*state)->size())
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
}
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{
auto lockedFlake = getLockedFlake();
@ -493,7 +531,11 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
auto root = cache->getRoot();
for (auto & attrPath : getActualAttrPaths()) {
auto attr = root->findAlongAttrPath(parseAttrPath(*state, attrPath));
auto attr = root->findAlongAttrPath(
parseAttrPath(*state, attrPath),
true
);
if (!attr) continue;
if (!attr->isDerivation())
@ -503,7 +545,7 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
auto drvInfo = DerivationInfo{
std::move(drvPath),
state->store->parseStorePath(attr->getAttr(state->sOutPath)->getString()),
state->store->maybeParseStorePath(attr->getAttr(state->sOutPath)->getString()),
attr->getAttr(state->sOutputName)->getString()
};
@ -562,10 +604,10 @@ InstallableFlake::getCursors(EvalState & state)
std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
{
flake::LockFlags lockFlagsApplyConfig = lockFlags;
lockFlagsApplyConfig.applyNixConfig = true;
if (!_lockedFlake) {
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlags));
_lockedFlake->flake.config.apply();
// FIXME: send new config to the daemon.
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlagsApplyConfig));
}
return _lockedFlake;
}
@ -614,17 +656,6 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
getEvalState(), std::move(flakeRef),
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
getDefaultFlakeAttrPathPrefixes(), lockFlags));
continue;
} catch (...) {
ex = std::current_exception();
}
if (s.find('/') != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
@ -636,14 +667,21 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
}
}
std::rethrow_exception(ex);
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
continue;
} catch (...) {
ex = std::current_exception();
}
/*
throw Error(
pathExists(s)
? "path '%s' is not a flake or a store path"
: "don't know how to handle argument '%s'", s);
*/
std::rethrow_exception(ex);
}
}
@ -658,79 +696,121 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
return installables.front();
}
Buildables build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode)
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
{
BuiltPaths res;
for (const auto & b : hopefullyBuiltPaths)
std::visit(
overloaded{
[&](const DerivedPath::Opaque & bo) {
res.push_back(BuiltPath::Opaque{bo.path});
},
[&](const DerivedPath::Built & bfd) {
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
auto drvOutputs = drv.outputsAndOptPaths(*store);
for (auto & output : bfd.outputs) {
if (!outputHashes.count(output))
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath), output);
if (settings.isExperimentalFeatureEnabled(
Xp::CaDerivations)) {
auto outputId =
DrvOutput{outputHashes.at(output), output};
auto realisation =
store->queryRealisation(outputId);
if (!realisation)
throw Error(
"cannot operate on an output of unbuilt "
"content-addressed derivation '%s'",
outputId.to_string());
outputs.insert_or_assign(
output, realisation->outPath);
} else {
// If ca-derivations isn't enabled, assume that
// the output path is statically known.
assert(drvOutputs.count(output));
assert(drvOutputs.at(output).second);
outputs.insert_or_assign(
output, *drvOutputs.at(output).second);
}
}
res.push_back(BuiltPath::Built{bfd.drvPath, outputs});
},
},
b.raw());
return res;
}
BuiltPaths build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode)
{
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
Buildables buildables;
std::vector<StorePathWithOutputs> pathsToBuild;
std::vector<DerivedPath> pathsToBuild;
for (auto & i : installables) {
for (auto & b : i->toBuildables()) {
std::visit(overloaded {
[&](BuildableOpaque bo) {
pathsToBuild.push_back({bo.path});
},
[&](BuildableFromDrv bfd) {
StringSet outputNames;
for (auto & output : bfd.outputs)
outputNames.insert(output.first);
pathsToBuild.push_back({bfd.drvPath, outputNames});
},
}, b);
buildables.push_back(std::move(b));
}
auto b = i->toDerivedPaths();
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
}
if (mode == Realise::Nothing)
if (mode == Realise::Nothing || mode == Realise::Derivation)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild, bMode);
store->buildPaths(pathsToBuild, bMode, evalStore);
return buildables;
return getBuiltPaths(evalStore, store, pathsToBuild);
}
StorePathSet toStorePaths(ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables)
BuiltPaths toBuiltPaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables)
{
StorePathSet outPaths;
if (operateOn == OperateOn::Output) {
for (auto & b : build(store, mode, installables))
std::visit(overloaded {
[&](BuildableOpaque bo) {
outPaths.insert(bo.path);
},
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs) {
if (!output.second)
throw Error("Cannot operate on output of unbuilt CA drv");
outPaths.insert(*output.second);
}
},
}, b);
} else {
if (operateOn == OperateOn::Output)
return build(evalStore, store, mode, installables);
else {
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
for (auto & i : installables)
for (auto & b : i->toBuildables())
if (auto bfd = std::get_if<BuildableFromDrv>(&b))
outPaths.insert(bfd->drvPath);
BuiltPaths res;
for (auto & drvPath : toDerivations(store, installables, true))
res.push_back(BuiltPath::Opaque{drvPath});
return res;
}
}
StorePathSet toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables)
{
StorePathSet outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
auto thisOutPaths = path.outPaths();
outPaths.insert(thisOutPaths.begin(), thisOutPaths.end());
}
return outPaths;
}
StorePath toStorePath(ref<Store> store,
StorePath toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
std::shared_ptr<Installable> installable)
{
auto paths = toStorePaths(store, mode, operateOn, {installable});
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
if (paths.size() != 1)
throw Error("argument '%s' should evaluate to one store path", installable->what());
@ -738,15 +818,17 @@ StorePath toStorePath(ref<Store> store,
return *paths.begin();
}
StorePathSet toDerivations(ref<Store> store,
std::vector<std::shared_ptr<Installable>> installables, bool useDeriver)
StorePathSet toDerivations(
ref<Store> store,
const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver)
{
StorePathSet drvPaths;
for (auto & i : installables)
for (auto & b : i->toBuildables())
for (const auto & i : installables)
for (const auto & b : i->toDerivedPaths())
std::visit(overloaded {
[&](BuildableOpaque bo) {
[&](const DerivedPath::Opaque & bo) {
if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what());
auto derivers = store->queryValidDerivers(bo.path);
@ -755,10 +837,10 @@ StorePathSet toDerivations(ref<Store> store,
// FIXME: use all derivers?
drvPaths.insert(*derivers.begin());
},
[&](BuildableFromDrv bfd) {
[&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);
},
}, b);
}, b.raw());
return drvPaths;
}

View file

@ -2,13 +2,13 @@
#include "util.hh"
#include "path.hh"
#include "path-with-outputs.hh"
#include "derived-path.hh"
#include "eval.hh"
#include "flake/flake.hh"
#include <optional>
#include <nlohmann/json_fwd.hpp>
namespace nix {
struct DrvInfo;
@ -16,25 +16,6 @@ struct SourceExprCommand;
namespace eval_cache { class EvalCache; class AttrCursor; }
struct BuildableOpaque {
StorePath path;
nlohmann::json toJSON(ref<Store> store) const;
};
struct BuildableFromDrv {
StorePath drvPath;
std::map<std::string, std::optional<StorePath>> outputs;
nlohmann::json toJSON(ref<Store> store) const;
};
typedef std::variant<
BuildableOpaque,
BuildableFromDrv
> Buildable;
typedef std::vector<Buildable> Buildables;
nlohmann::json buildablesToJSON(const Buildables & buildables, ref<Store> store);
struct App
{
std::vector<StorePathWithOutputs> context;
@ -42,17 +23,23 @@ struct App
// FIXME: add args, sandbox settings, metadata, ...
};
struct UnresolvedApp
{
App unresolved;
App resolve(ref<Store> evalStore, ref<Store> store);
};
struct Installable
{
virtual ~Installable() { }
virtual std::string what() = 0;
virtual Buildables toBuildables() = 0;
virtual DerivedPaths toDerivedPaths() = 0;
Buildable toBuildable();
DerivedPath toDerivedPath();
App toApp(EvalState & state);
UnresolvedApp toApp(EvalState & state);
virtual std::pair<Value *, Pos> toValue(EvalState & state)
{
@ -93,7 +80,7 @@ struct InstallableValue : Installable
virtual std::vector<DerivationInfo> toDerivations() = 0;
Buildables toBuildables() override;
DerivedPaths toDerivedPaths() override;
};
struct InstallableFlake : InstallableValue
@ -104,11 +91,13 @@ struct InstallableFlake : InstallableValue
const flake::LockFlags & lockFlags;
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
InstallableFlake(ref<EvalState> state, FlakeRef && flakeRef,
Strings && attrPaths, Strings && prefixes, const flake::LockFlags & lockFlags)
: InstallableValue(state), flakeRef(flakeRef), attrPaths(attrPaths),
prefixes(prefixes), lockFlags(lockFlags)
{ }
InstallableFlake(
SourceExprCommand * cmd,
ref<EvalState> state,
FlakeRef && flakeRef,
Strings && attrPaths,
Strings && prefixes,
const flake::LockFlags & lockFlags);
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }

15
src/libcmd/local.mk Normal file
View file

@ -0,0 +1,15 @@
libraries += libcmd
libcmd_NAME = libnixcmd
libcmd_DIR := $(d)
libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
libcmd_LDFLAGS += -llowdown -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))

View file

@ -3,9 +3,7 @@
#include "finally.hh"
#include <sys/queue.h>
extern "C" {
#include <lowdown.h>
}
namespace nix {
@ -14,7 +12,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
struct lowdown_opts opts {
.type = LOWDOWN_TERM,
.maxdepth = 20,
.cols = std::min(getWindowSize().second, (unsigned short) 80),
.cols = std::max(getWindowSize().second, (unsigned short) 80),
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
@ -27,7 +25,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
Finally freeDoc([&]() { lowdown_doc_free(doc); });
size_t maxn = 0;
auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size(), nullptr);
if (!node)
throw Error("cannot parse Markdown document");
Finally freeNode([&]() { lowdown_node_free(node); });
@ -42,9 +40,11 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
throw Error("cannot allocate Markdown output buffer");
Finally freeBuffer([&]() { lowdown_buf_free(buf); });
lowdown_term_rndr(buf, nullptr, renderer, node);
int rndr_res = lowdown_term_rndr(buf, renderer, node);
if (!rndr_res)
throw Error("allocation error while rendering Markdown");
return std::string(buf->data, buf->size);
return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
}
}

9
src/libcmd/nix-cmd.pc.in Normal file
View file

@ -0,0 +1,9 @@
prefix=@prefix@
libdir=@libdir@
includedir=@includedir@
Name: Nix
Description: Nix Package Manager
Version: @PACKAGE_VERSION@
Libs: -L${libdir} -lnixcmd
Cflags: -I${includedir}/nix -std=c++17

View file

@ -19,7 +19,7 @@ static Strings parseAttrPath(std::string_view s)
++i;
while (1) {
if (i == s.end())
throw Error("missing closing quote in selection path '%1%'", s);
throw ParseError("missing closing quote in selection path '%1%'", s);
if (*i == '"') break;
cur.push_back(*i++);
}
@ -52,9 +52,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
for (auto & attr : tokens) {
/* Is i an index (integer) or a normal attribute name? */
enum { apAttr, apIndex } apType = apAttr;
unsigned int attrIndex;
if (string2Int(attr, attrIndex)) apType = apIndex;
auto attrIndex = string2Int<unsigned int>(attr);
/* Evaluate the expression. */
Value * vNew = state.allocValue();
@ -65,7 +63,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
/* It should evaluate to either a set or an expression,
according to what is specified in the attrPath. */
if (apType == apAttr) {
if (!attrIndex) {
if (v->type() != nAttrs)
throw TypeError(
@ -82,17 +80,17 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
pos = *a->pos;
}
else if (apType == apIndex) {
else {
if (!v->isList())
throw TypeError(
"the expression selected by the selection path '%1%' should be a list but is %2%",
attrPath,
showType(*v));
if (attrIndex >= v->listSize())
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", attrIndex, attrPath);
if (*attrIndex >= v->listSize())
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
v = v->listElems()[attrIndex];
v = v->listElems()[*attrIndex];
pos = noPos;
}
@ -102,7 +100,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
}
Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Pos findPackageFilename(EvalState & state, Value & v, std::string what)
{
Value * v2;
try {
@ -118,14 +116,14 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
auto colon = pos.rfind(':');
if (colon == std::string::npos)
throw Error("cannot parse meta.position attribute '%s'", pos);
throw ParseError("cannot parse meta.position attribute '%s'", pos);
std::string filename(pos, 0, colon);
unsigned int lineno;
try {
lineno = std::stoi(std::string(pos, colon + 1));
} catch (std::invalid_argument & e) {
throw Error("cannot parse line number '%s'", pos);
throw ParseError("cannot parse line number '%s'", pos);
}
Symbol file = state.symbols.create(filename);

View file

@ -14,7 +14,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
Bindings & autoArgs, Value & vIn);
/* Heuristic to find the filename and lineno or a nix value. */
Pos findDerivationFilename(EvalState & state, Value & v, std::string what);
Pos findPackageFilename(EvalState & state, Value & v, std::string what);
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);

View file

@ -17,8 +17,8 @@ struct Attr
{
Symbol name;
Value * value;
Pos * pos;
Attr(Symbol name, Value * value, Pos * pos = &noPos)
ptr<Pos> pos;
Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
: name(name), value(value), pos(pos) { };
Attr() : pos(&noPos) { };
bool operator < (const Attr & a) const
@ -35,12 +35,13 @@ class Bindings
{
public:
typedef uint32_t size_t;
ptr<Pos> pos;
private:
size_t size_, capacity_;
Attr attrs[0];
Bindings(size_t capacity) : size_(0), capacity_(capacity) { }
Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { }
Bindings(const Bindings & bindings) = delete;
public:
@ -77,7 +78,7 @@ public:
auto a = get(name);
if (!a)
throw Error({
.hint = hintfmt("attribute '%s' missing", name),
.msg = hintfmt("attribute '%s' missing", name),
.errPos = pos
});

View file

@ -12,16 +12,20 @@ namespace nix {
MixEvalArgs::MixEvalArgs()
{
auto category = "Common evaluation options";
addFlag({
.longName = "arg",
.description = "argument to be passed to Nix functions",
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "expr"},
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
});
addFlag({
.longName = "argstr",
.description = "string-valued argument to be passed to Nix functions",
.description = "Pass the string *string* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "string"},
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
});
@ -29,14 +33,16 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "include",
.shortName = 'I',
.description = "add a path to the list of locations used to look up `<...>` file names",
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
.category = category,
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
});
addFlag({
.longName = "impure",
.description = "allow access to mutable paths and repositories",
.description = "Allow access to mutable paths and repositories.",
.category = category,
.handler = {[&]() {
evalSettings.pureEval = false;
}},
@ -44,7 +50,8 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "override-flake",
.description = "override a flake registry value",
.description = "Override the flake registries, redirecting *original-ref* to *resolved-ref*.",
.category = category,
.labels = {"original-ref", "resolved-ref"},
.handler = {[&](std::string _from, std::string _to) {
auto from = parseFlakeRef(_from, absPath("."));
@ -54,6 +61,14 @@ MixEvalArgs::MixEvalArgs()
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
}}
});
addFlag({
.longName = "eval-store",
.description = "The Nix store to use for evaluations.",
.category = category,
.labels = {"store-url"},
.handler = {&evalStoreUrl},
});
}
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)

View file

@ -16,8 +16,9 @@ struct MixEvalArgs : virtual Args
Strings searchPath;
private:
std::optional<std::string> evalStoreUrl;
private:
std::map<std::string, std::string> autoArgs;
};

View file

@ -486,11 +486,11 @@ std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name)
return getAttr(root->state.symbols.create(name));
}
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath)
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
{
auto res = shared_from_this();
for (auto & attr : attrPath) {
res = res->maybeGetAttr(attr);
res = res->maybeGetAttr(attr, force);
if (!res) return {};
}
return res;

View file

@ -102,7 +102,7 @@ public:
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath);
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false);
std::string getString();

View file

@ -10,7 +10,7 @@ namespace nix {
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{
throw EvalError({
.hint = hintfmt(s),
.msg = hintfmt(s),
.errPos = pos
});
}
@ -24,7 +24,7 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
{
throw TypeError({
.hint = hintfmt(s, showType(v)),
.msg = hintfmt(s, showType(v)),
.errPos = pos
});
}

View file

@ -64,7 +64,11 @@ static char * dupStringWithLen(const char * s, size_t size)
RootValue allocRootValue(Value * v)
{
#if HAVE_BOEHMGC
return std::allocate_shared<Value *>(traceable_allocator<Value *>(), v);
#else
return std::make_shared<Value *>(v);
#endif
}
@ -201,6 +205,15 @@ string showType(const Value & v)
}
}
Pos Value::determinePos(const Pos &pos) const
{
switch (internalType) {
case tAttrs: return *attrs->pos;
case tLambda: return lambda.fun->pos;
case tApp: return app.left->determinePos(pos);
default: return pos;
}
}
bool Value::isTrivial() const
{
@ -224,22 +237,34 @@ static void * oomHandler(size_t requested)
}
class BoehmGCStackAllocator : public StackAllocator {
boost::coroutines2::protected_fixedsize_stack stack {
// We allocate 8 MB, the default max stack size on NixOS.
// A smaller stack might be quicker to allocate but reduces the stack
// depth available for source filter expressions etc.
std::max(boost::context::stack_traits::default_size(), static_cast<std::size_t>(8 * 1024 * 1024))
boost::coroutines2::protected_fixedsize_stack stack {
// We allocate 8 MB, the default max stack size on NixOS.
// A smaller stack might be quicker to allocate but reduces the stack
// depth available for source filter expressions etc.
std::max(boost::context::stack_traits::default_size(), static_cast<std::size_t>(8 * 1024 * 1024))
};
// This is specific to boost::coroutines2::protected_fixedsize_stack.
// The stack protection page is included in sctx.size, so we have to
// subtract one page size from the stack size.
std::size_t pfss_usable_stack_size(boost::context::stack_context &sctx) {
return sctx.size - boost::context::stack_traits::page_size();
}
public:
boost::context::stack_context allocate() override {
auto sctx = stack.allocate();
GC_add_roots(static_cast<char *>(sctx.sp) - sctx.size, sctx.sp);
// Stacks generally start at a high address and grow to lower addresses.
// Architectures that do the opposite are rare; in fact so rare that
// boost_routine does not implement it.
// So we subtract the stack size.
GC_add_roots(static_cast<char *>(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
return sctx;
}
void deallocate(boost::context::stack_context sctx) override {
GC_remove_roots(static_cast<char *>(sctx.sp) - sctx.size, sctx.sp);
GC_remove_roots(static_cast<char *>(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
stack.deallocate(sctx);
}
@ -353,7 +378,10 @@ static Strings parseNixPath(const string & s)
}
EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
EvalState::EvalState(
const Strings & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore)
: sWith(symbols.create("<with>"))
, sOutPath(symbols.create("outPath"))
, sDrvPath(symbols.create("drvPath"))
@ -386,6 +414,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create(""))
, repair(NoRepair)
, store(store)
, buildStore(buildStore ? buildStore : store)
, regexCache(makeRegexCache())
, baseEnv(allocEnv(128))
, staticBaseEnv(false, 0)
@ -416,12 +445,12 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
StorePathSet closure;
store->computeFSClosure(store->toStorePath(r.second).first, closure);
for (auto & path : closure)
allowedPaths->insert(store->printStorePath(path));
allowPath(path);
} catch (InvalidPath &) {
allowedPaths->insert(r.second);
allowPath(r.second);
}
} else
allowedPaths->insert(r.second);
allowPath(r.second);
}
}
@ -436,6 +465,35 @@ EvalState::~EvalState()
}
void EvalState::requireExperimentalFeatureOnEvaluation(
const ExperimentalFeature & feature,
const std::string_view fName,
const Pos & pos)
{
if (!settings.isExperimentalFeatureEnabled(feature)) {
throw EvalError({
.msg = hintfmt(
"Cannot call '%2%' because experimental Nix feature '%1%' is disabled. You can enable it via '--extra-experimental-features %1%'.",
feature,
fName
),
.errPos = pos
});
}
}
void EvalState::allowPath(const Path & path)
{
if (allowedPaths)
allowedPaths->insert(path);
}
void EvalState::allowPath(const StorePath & storePath)
{
if (allowedPaths)
allowedPaths->insert(store->toRealPath(storePath));
}
Path EvalState::checkSourcePath(const Path & path_)
{
if (!allowedPaths) return path_;
@ -592,10 +650,8 @@ Value & EvalState::getBuiltin(const string & name)
std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
{
if (v.isPrimOp() || v.isPrimOpApp()) {
if (v.isPrimOp()) {
auto v2 = &v;
while (v2->isPrimOpApp())
v2 = v2->primOpApp.left;
if (v2->primOp->doc)
return Doc {
.pos = noPos,
@ -622,7 +678,7 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2))
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2))
{
throw EvalError({
.hint = hintfmt(s, s2),
.msg = hintfmt(s, s2),
.errPos = pos
});
}
@ -635,7 +691,7 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, con
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2, const string & s3))
{
throw EvalError({
.hint = hintfmt(s, s2, s3),
.msg = hintfmt(s, s2, s3),
.errPos = pos
});
}
@ -644,7 +700,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
{
// p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({
.hint = hintfmt(s, sym, p2),
.msg = hintfmt(s, sym, p2),
.errPos = p1
});
}
@ -652,20 +708,15 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{
throw TypeError({
.hint = hintfmt(s),
.msg = hintfmt(s),
.errPos = pos
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const string & s1))
{
throw TypeError(s, s1);
}
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const ExprLambda & fun, const Symbol & s2))
{
throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2),
.msg = hintfmt(s, fun.showNamePos(), s2),
.errPos = pos
});
}
@ -673,7 +724,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1))
{
throw AssertionError({
.hint = hintfmt(s, s1),
.msg = hintfmt(s, s1),
.errPos = pos
});
}
@ -681,7 +732,15 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char * s, const string & s1))
{
throw UndefinedVarError({
.hint = hintfmt(s, s1),
.msg = hintfmt(s, s1),
.errPos = pos
});
}
LocalNoInlineNoReturn(void throwMissingArgumentError(const Pos & pos, const char * s, const string & s1))
{
throw MissingArgumentError({
.msg = hintfmt(s, s1),
.errPos = pos
});
}
@ -740,7 +799,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
Bindings::iterator j = env->values[0]->attrs->find(var.name);
if (j != env->values[0]->attrs->end()) {
if (countCalls && j->pos) attrSelects[*j->pos]++;
if (countCalls) attrSelects[*j->pos]++;
return j->value;
}
if (!env->prevWith)
@ -750,18 +809,10 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
std::atomic<uint64_t> nrValuesFreed{0};
void finalizeValue(void * obj, void * data)
{
nrValuesFreed++;
}
Value * EvalState::allocValue()
{
nrValues++;
auto v = (Value *) allocBytes(sizeof(Value));
//GC_register_finalizer_no_order(v, finalizeValue, nullptr, nullptr, nullptr);
return v;
}
@ -803,9 +854,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
}
void EvalState::mkPos(Value & v, Pos * pos)
void EvalState::mkPos(Value & v, ptr<Pos> pos)
{
if (pos && pos->file.set()) {
if (pos->file.set()) {
mkAttrs(v, 3);
mkString(*allocAttr(v, sFile), pos->file);
mkInt(*allocAttr(v, sLine), pos->line);
@ -828,39 +879,37 @@ Value * Expr::maybeThunk(EvalState & state, Env & env)
}
unsigned long nrAvoided = 0;
Value * ExprVar::maybeThunk(EvalState & state, Env & env)
{
Value * v = state.lookupVar(&env, *this, true);
/* The value might not be initialised in the environment yet.
In that case, ignore it. */
if (v) { nrAvoided++; return v; }
if (v) { state.nrAvoided++; return v; }
return Expr::maybeThunk(state, env);
}
Value * ExprString::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprInt::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprFloat::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprPath::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
@ -875,38 +924,23 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
return;
}
Path path2 = resolveExprPath(path);
if ((i = fileEvalCache.find(path2)) != fileEvalCache.end()) {
Path resolvedPath = resolveExprPath(path);
if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
v = i->second;
return;
}
printTalkative("evaluating file '%1%'", path2);
printTalkative("evaluating file '%1%'", resolvedPath);
Expr * e = nullptr;
auto j = fileParseCache.find(path2);
auto j = fileParseCache.find(resolvedPath);
if (j != fileParseCache.end())
e = j->second;
if (!e)
e = parseExprFromFile(checkSourcePath(path2));
e = parseExprFromFile(checkSourcePath(resolvedPath));
fileParseCache[path2] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
// computation.
if (mustBeTrivial &&
!(dynamic_cast<ExprAttrs *>(e)))
throw Error("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw;
}
fileEvalCache[path2] = v;
if (path != path2) fileEvalCache[path] = v;
cacheFile(path, resolvedPath, e, v, mustBeTrivial);
}
@ -917,6 +951,32 @@ void EvalState::resetFileCache()
}
void EvalState::cacheFile(
const Path & path,
const Path & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial)
{
fileParseCache[resolvedPath] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
// computation.
if (mustBeTrivial &&
!(dynamic_cast<ExprAttrs *>(e)))
throw EvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
throw;
}
fileEvalCache[resolvedPath] = v;
if (path != resolvedPath) fileEvalCache[path] = v;
}
void EvalState::eval(Expr * e, Value & v)
{
e->eval(*this, baseEnv, v);
@ -1007,7 +1067,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
} else
vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
env2.values[displ++] = vAttr;
v.attrs->push_back(Attr(i.first, vAttr, &i.second.pos));
v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos)));
}
/* If the rec contains an attribute called `__overrides', then
@ -1039,7 +1099,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
else
for (auto & i : attrs)
v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), &i.second.pos));
v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos)));
/* Dynamic attrs apply *after* rec and __overrides. */
for (auto & i : dynamicAttrs) {
@ -1056,9 +1116,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), &i.pos));
v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos)));
v.attrs->sort(); // FIXME: inefficient
}
v.attrs->pos = ptr(&pos);
}
@ -1113,12 +1175,10 @@ static string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPa
}
unsigned long nrLookups = 0;
void ExprSelect::eval(EvalState & state, Env & env, Value & v)
{
Value vTmp;
Pos * pos2 = 0;
ptr<Pos> pos2(&noPos);
Value * vAttrs = &vTmp;
e->eval(state, env, vTmp);
@ -1126,7 +1186,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
try {
for (auto & i : attrPath) {
nrLookups++;
state.nrLookups++;
Bindings::iterator j;
Symbol name = getName(i, state, env);
if (def) {
@ -1144,13 +1204,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
}
vAttrs = j->value;
pos2 = j->pos;
if (state.countCalls && pos2) state.attrSelects[*pos2]++;
if (state.countCalls) state.attrSelects[*pos2]++;
}
state.forceValue(*vAttrs, ( pos2 != NULL ? *pos2 : this->pos ) );
state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) );
} catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix)
if (*pos2 != noPos && pos2->file != state.sDerivationNix)
addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath));
throw;
@ -1268,13 +1328,13 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
auto size =
(lambda.arg.empty() ? 0 : 1) +
(lambda.matchAttrs ? lambda.formals->formals.size() : 0);
(lambda.hasFormals() ? lambda.formals->formals.size() : 0);
Env & env2(allocEnv(size));
env2.up = fun.lambda.env;
size_t displ = 0;
if (!lambda.matchAttrs)
if (!lambda.hasFormals())
env2.values[displ++] = &arg;
else {
@ -1354,7 +1414,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
}
}
if (!fun.isLambda() || !fun.lambda.fun->matchAttrs) {
if (!fun.isLambda() || !fun.lambda.fun->hasFormals()) {
res = fun;
return;
}
@ -1376,7 +1436,13 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
if (j != args.end()) {
actualArgs->attrs->push_back(*j);
} else if (!i.def) {
throwTypeError("cannot auto-call a function that has an argument without a default value ('%1%')", i.name);
throwMissingArgumentError(i.pos, R"(cannot evaluate a function that has an argument without a value ('%1%')
Nix attempted to evaluate a function as a top level expression; in
this case it must have its arguments supplied either by default
values, or passed explicitly with '--arg' or '--argstr'. See
https://nixos.org/manual/nix/stable/#ss-functions.)", i.name);
}
}
}
@ -1550,7 +1616,6 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
and none of the strings are allowed to have contexts. */
if (first) {
firstType = vTmp.type();
first = false;
}
if (firstType == nInt) {
@ -1571,7 +1636,12 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
} else
throwEvalError(pos, "cannot add %1% to a float", showType(vTmp));
} else
s << state.coerceToString(pos, vTmp, context, false, firstType == nString);
/* skip canonization of first path, which would only be not
canonized in the first place if it's coming from a ./${foo} type
path */
s << state.coerceToString(pos, vTmp, context, false, firstType == nString, !first);
first = false;
}
if (firstType == nInt)
@ -1590,7 +1660,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
void ExprPos::eval(EvalState & state, Env & env, Value & v)
{
state.mkPos(v, &pos);
state.mkPos(v, ptr(&pos));
}
@ -1760,7 +1830,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
}
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore, bool copyToStore)
bool coerceMore, bool copyToStore, bool canonicalizePath)
{
forceValue(v, pos);
@ -1772,7 +1842,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
}
if (v.type() == nPath) {
Path path(canonPath(v.path));
Path path(canonicalizePath ? canonPath(v.path) : v.path);
return copyToStore ? copyPathToStore(context, path) : path;
}
@ -1831,6 +1901,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
dstPath = store->printStorePath(p);
allowPath(p);
srcToStore.insert_or_assign(path, std::move(p));
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
}
@ -2048,7 +2119,7 @@ void EvalState::printStats()
string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
{
throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()),
.msg = hintfmt("cannot coerce %1% to a string", showType()),
.errPos = pos
});
}
@ -2084,9 +2155,12 @@ Strings EvalSettings::getDefaultNixPath()
}
};
add(getHome() + "/.nix-defexpr/channels");
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
add(settings.nixStateDir + "/profiles/per-user/root/channels");
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
add(getHome() + "/.nix-defexpr/channels");
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
add(settings.nixStateDir + "/profiles/per-user/root/channels");
}
return res;
}

View file

@ -5,6 +5,7 @@
#include "nixexpr.hh"
#include "symbol-table.hh"
#include "config.hh"
#include "experimental-features.hh"
#include <map>
#include <optional>
@ -94,8 +95,14 @@ public:
Value vEmptySet;
/* Store used to materialise .drv files. */
const ref<Store> store;
/* Store used to build stuff. */
const ref<Store> buildStore;
RootValue vCallFlake = nullptr;
RootValue vImportedDrvToDerivation = nullptr;
private:
SrcToStore srcToStore;
@ -128,13 +135,31 @@ private:
public:
EvalState(const Strings & _searchPath, ref<Store> store);
EvalState(
const Strings & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
void requireExperimentalFeatureOnEvaluation(
const ExperimentalFeature &,
const std::string_view fName,
const Pos & pos
);
void addToSearchPath(const string & s);
SearchPath getSearchPath() { return searchPath; }
/* Allow access to a path. */
void allowPath(const Path & path);
/* Allow access to a store path. Note that this gets remapped to
the real store path if `store` is a chroot store. */
void allowPath(const StorePath & storePath);
/* Check whether access to a path is allowed and throw an error if
not. Otherwise return the canonicalised path. */
Path checkSourcePath(const Path & path);
void checkURI(const std::string & uri);
@ -163,6 +188,14 @@ public:
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
/* Like `cacheFile`, but with an already parsed expression. */
void cacheFile(
const Path & path,
const Path & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial = false);
void resetFileCache();
/* Look up a file in the search path. */
@ -217,7 +250,8 @@ public:
booleans and lists to a string. If `copyToStore' is set,
referenced paths are copied to the Nix store as a side effect. */
string coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore = false, bool copyToStore = true);
bool coerceMore = false, bool copyToStore = true,
bool canonicalizePath = true);
string copyPathToStore(PathSet & context, const Path & path);
@ -301,7 +335,7 @@ public:
void mkList(Value & v, size_t length);
void mkAttrs(Value & v, size_t capacity);
void mkThunk_(Value & v, Expr * expr);
void mkPos(Value & v, Pos * pos);
void mkPos(Value & v, ptr<Pos> pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos);
@ -316,8 +350,10 @@ private:
unsigned long nrValuesInEnvs = 0;
unsigned long nrValues = 0;
unsigned long nrListElems = 0;
unsigned long nrLookups = 0;
unsigned long nrAttrsets = 0;
unsigned long nrAttrsInAttrsets = 0;
unsigned long nrAvoided = 0;
unsigned long nrOpUpdates = 0;
unsigned long nrOpUpdateValuesCopied = 0;
unsigned long nrListConcats = 0;
@ -339,6 +375,11 @@ private:
friend struct ExprOpUpdate;
friend struct ExprOpConcatLists;
friend struct ExprVar;
friend struct ExprString;
friend struct ExprInt;
friend struct ExprFloat;
friend struct ExprPath;
friend struct ExprSelect;
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);

View file

@ -22,12 +22,14 @@ static TrustedList readTrustedList()
static void writeTrustedList(const TrustedList & trustedList)
{
writeFile(trustedListPath(), nlohmann::json(trustedList).dump());
auto path = trustedListPath();
createDirs(dirOf(path));
writeFile(path, nlohmann::json(trustedList).dump());
}
void ConfigFile::apply()
{
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix"};
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix", "flake-registry"};
for (auto & [name, value] : settings) {

View file

@ -1,4 +1,5 @@
#include "flake.hh"
#include "eval.hh"
#include "lockfile.hh"
#include "primops.hh"
#include "eval-inline.hh"
@ -63,8 +64,7 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
state.allowPath(tree.storePath);
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
@ -88,10 +88,12 @@ static void expectType(EvalState & state, ValueType type,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, Value * value, const Pos & pos);
EvalState & state, Value * value, const Pos & pos,
const std::optional<Path> & baseDir);
static FlakeInput parseFlakeInput(EvalState & state,
const std::string & inputName, Value * value, const Pos & pos)
const std::string & inputName, Value * value, const Pos & pos,
const std::optional<Path> & baseDir)
{
expectType(state, nAttrs, *value, pos);
@ -115,16 +117,25 @@ static FlakeInput parseFlakeInput(EvalState & state,
expectType(state, nBool, *attr.value, *attr.pos);
input.isFlake = attr.value->boolean;
} else if (attr.name == sInputs) {
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, *attr.pos);
input.follows = parseInputPath(attr.value->string.s);
} else {
if (attr.value->type() == nString)
attrs.emplace(attr.name, attr.value->string.s);
else
throw TypeError("flake input attribute '%s' is %s while a string is expected",
attr.name, showType(*attr.value));
switch (attr.value->type()) {
case nString:
attrs.emplace(attr.name, attr.value->string.s);
break;
case nBool:
attrs.emplace(attr.name, Explicit<bool> { attr.value->boolean });
break;
case nInt:
attrs.emplace(attr.name, (long unsigned int)attr.value->integer);
break;
default:
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
attr.name, showType(*attr.value));
}
}
} catch (Error & e) {
e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name));
@ -144,7 +155,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
if (!attrs.empty())
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
if (url)
input.ref = parseFlakeRef(*url, {}, true);
input.ref = parseFlakeRef(*url, baseDir, true);
}
if (!input.follows && !input.ref)
@ -154,7 +165,8 @@ static FlakeInput parseFlakeInput(EvalState & state,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, Value * value, const Pos & pos)
EvalState & state, Value * value, const Pos & pos,
const std::optional<Path> & baseDir)
{
std::map<FlakeId, FlakeInput> inputs;
@ -165,7 +177,8 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
parseFlakeInput(state,
inputAttr.name,
inputAttr.value,
*inputAttr.pos));
*inputAttr.pos,
baseDir));
}
return inputs;
@ -181,7 +194,8 @@ static Flake getFlake(
state, originalRef, allowLookup, flakeCache);
// Guard against symlink attacks.
auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir);
auto flakeFile = canonPath(flakeDir + "/flake.nix");
if (!isInDir(flakeFile, sourceInfo.actualPath))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
@ -209,14 +223,14 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir);
auto sOutputs = state.symbols.create("outputs");
if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, nFunction, *outputs->value, *outputs->pos);
if (outputs->value->isLambda() && outputs->value->lambda.fun->matchAttrs) {
if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) {
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput {
@ -283,292 +297,360 @@ LockedFlake lockFlake(
const FlakeRef & topRef,
const LockFlags & lockFlags)
{
settings.requireExperimentalFeature("flakes");
settings.requireExperimentalFeature(Xp::Flakes);
Activity act(*logger, lvlTalkative, actLockFlake);
FlakeCache flakeCache;
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries);
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
auto flake = getFlake(state, topRef, useRegistries, flakeCache);
debug("old lock file: %s", oldLockFile);
if (lockFlags.applyNixConfig) {
flake.config.apply();
// FIXME: send new config to the daemon.
}
// FIXME: check whether all overrides are used.
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> overridesUsed, updatesUsed;
try {
for (auto & i : lockFlags.inputOverrides)
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
LockFile newLockFile;
debug("old lock file: %s", oldLockFile);
std::vector<FlakeRef> parents;
// FIXME: check whether all overrides are used.
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> overridesUsed, updatesUsed;
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)>
computeLocks;
for (auto & i : lockFlags.inputOverrides)
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
computeLocks = [&](
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
LockFile newLockFile;
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
// FIXME: check this
for (auto & [id, input] : flake.inputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
std::vector<FlakeRef> parents;
struct LockParent {
/* The path to this parent. */
InputPath path;
/* Whether we are currently inside a top-level lockfile
(inputs absolute) or subordinate lockfile (inputs
relative). */
bool absolute;
};
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode,
const LockParent & parent,
const Path & parentPath)>
computeLocks;
computeLocks = [&](
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode,
const LockParent & parent,
const Path & parentPath)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
for (auto & [id, input] : flakeInputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
inputPath.push_back(idOverride);
overrides.insert_or_assign(inputPath, inputOverride);
}
}
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
for (auto & [id, input2] : flakeInputs) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
inputPath.push_back(idOverride);
overrides.insert_or_assign(inputPath, inputOverride);
}
}
auto inputPathS = printInputPath(inputPath);
debug("computing input '%s'", inputPathS);
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
for (auto & [id, input2] : flakeInputs) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
auto inputPathS = printInputPath(inputPath);
debug("computing input '%s'", inputPathS);
try {
/* Do we have an override for this input from one of the
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
if (hasOverride) overridesUsed.insert(inputPath);
auto & input = hasOverride ? i->second : input2;
/* Do we have an override for this input from one of the
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
if (hasOverride) {
overridesUsed.insert(inputPath);
// Respect the “flakeness” of the input even if we
// override it
i->second.isFlake = input2.isFlake;
}
auto & input = hasOverride ? i->second : input2;
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
if (input.follows) {
InputPath target;
if (hasOverride || input.absolute)
/* 'follows' from an override is relative to the
root of the graph. */
target = *input.follows;
else {
/* Otherwise, it's relative to the current flake. */
target = inputPathPrefix;
for (auto & i : *input.follows) target.push_back(i);
}
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
node->inputs.insert_or_assign(id, target);
continue;
}
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
if (input.follows) {
InputPath target;
assert(input.ref);
if (parent.absolute && !hasOverride) {
target = *input.follows;
} else {
if (hasOverride) {
target = inputPathPrefix;
target.pop_back();
} else
target = parent.path;
/* Do we have an entry in the existing lock file? And we
don't have a --update-input flag for this input? */
std::shared_ptr<LockedNode> oldLock;
for (auto & i : *input.follows) target.push_back(i);
}
updatesUsed.insert(inputPath);
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
node->inputs.insert_or_assign(id, target);
continue;
}
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
if (auto oldLock2 = get(oldNode->inputs, id))
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
oldLock = *oldLock3;
assert(input.ref);
if (oldLock
&& oldLock->originalRef == *input.ref
&& !hasOverride)
{
debug("keeping existing input '%s'", inputPathS);
/* Do we have an entry in the existing lock file? And we
don't have a --update-input flag for this input? */
std::shared_ptr<LockedNode> oldLock;
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
higher level flake. */
auto childNode = std::make_shared<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
updatesUsed.insert(inputPath);
node->inputs.insert_or_assign(id, childNode);
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
if (auto oldLock2 = get(oldNode->inputs, id))
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
oldLock = *oldLock3;
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
if (oldLock
&& oldLock->originalRef == *input.ref
&& !hasOverride)
{
debug("keeping existing input '%s'", inputPathS);
auto hasChildUpdate =
lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
higher level flake. */
auto childNode = std::make_shared<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
if (hasChildUpdate) {
auto inputFlake = getFlake(
state, oldLock->lockedRef, false, flakeCache);
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
} else {
/* No need to fetch this flake, we can be
lazy. However there may be new overrides on the
inputs of this flake, so we need to check
those. */
FlakeInputs fakeInputs;
node->inputs.insert_or_assign(id, childNode);
for (auto & i : oldLock->inputs) {
if (auto lockedNode = std::get_if<0>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.ref = (*lockedNode)->originalRef,
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.follows = *follows,
.absolute = true
});
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto hasChildUpdate =
lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
if (hasChildUpdate) {
auto inputFlake = getFlake(
state, oldLock->lockedRef, false, flakeCache);
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, parent, parentPath);
} else {
/* No need to fetch this flake, we can be
lazy. However there may be new overrides on the
inputs of this flake, so we need to check
those. */
FlakeInputs fakeInputs;
for (auto & i : oldLock->inputs) {
if (auto lockedNode = std::get_if<0>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.ref = (*lockedNode)->originalRef,
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.follows = *follows,
});
}
}
computeLocks(fakeInputs, childNode, inputPath, oldLock, parent, parentPath);
}
} else {
/* We need to create a new lock file entry. So fetch
this input. */
debug("creating new input '%s'", inputPathS);
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (input.isFlake) {
Path localPath = parentPath;
FlakeRef localRef = *input.ref;
// If this input is a path, recurse it down.
// This allows us to resolve path inputs relative to the current flake.
if (localRef.input.getType() == "path")
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto childNode = std::make_shared<LockedNode>(
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
node->inputs.insert_or_assign(id, childNode);
/* Guard against circular flake imports. */
for (auto & parent : parents)
if (parent == *input.ref)
throw Error("found circular import of flake '%s'", parent);
parents.push_back(*input.ref);
Finally cleanup([&]() { parents.pop_back(); });
// Follows paths from existing inputs in the top-level lockfile are absolute,
// whereas paths in subordinate lockfiles are relative to those lockfiles.
LockParent newParent {
.path = inputPath,
.absolute = oldLock ? true : false
};
/* Recursively process the inputs of this
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
own lock file. */
computeLocks(
inputFlake.inputs, childNode, inputPath,
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
newParent, localPath);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
}
}
computeLocks(fakeInputs, childNode, inputPath, oldLock);
}
} else {
/* We need to create a new lock file entry. So fetch
this input. */
debug("creating new input '%s'", inputPathS);
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (input.isFlake) {
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto childNode = std::make_shared<LockedNode>(
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
node->inputs.insert_or_assign(id, childNode);
/* Guard against circular flake imports. */
for (auto & parent : parents)
if (parent == *input.ref)
throw Error("found circular import of flake '%s'", parent);
parents.push_back(*input.ref);
Finally cleanup([&]() { parents.pop_back(); });
/* Recursively process the inputs of this
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
own lock file. */
computeLocks(
inputFlake.inputs, childNode, inputPath,
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, lockFlags.useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
} catch (Error & e) {
e.addTrace({}, "while updating the flake input '%s'", inputPathS);
throw;
}
}
};
LockParent parent {
.path = {},
.absolute = true
};
// Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir);
computeLocks(
flake.inputs, newLockFile.root, {},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
warn("the flag '--override-input %s %s' does not match any input",
printInputPath(i.first), i.second);
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
/* Check 'follows' inputs. */
newLockFile.check();
debug("new lock file: %s", newLockFile);
/* Check whether we need to / can write the new lock file. */
if (!(newLockFile == oldLockFile)) {
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) {
if (!newLockFile.isImmutable()) {
if (settings.warnDirty)
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
} else {
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto path = *sourcePath + "/" + relPath;
bool lockFileExists = pathExists(path);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
warn("updating lock file '%s'", path);
else
warn("updating lock file '%s':\n%s", path, s);
} else
warn("creating lock file '%s'", path);
newLockFile.write(path);
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
lockFlags.commitLockFile
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
: std::nullopt);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
auto prevLockedRef = flake.lockedRef;
FlakeCache dummyCache;
flake = getFlake(state, topRef, useRegistries, dummyCache);
if (lockFlags.commitLockFile &&
flake.lockedRef.input.getRev() &&
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
/* Make sure that we picked up the change,
i.e. the tree should usually be dirty
now. Corner case: we could have reverted from a
dirty to a clean tree! */
if (flake.lockedRef.input == prevLockedRef.input
&& !flake.lockedRef.input.isImmutable())
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
}
} else
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
} else {
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
flake.forceDirty = true;
}
}
};
computeLocks(
flake.inputs, newLockFile.root, {},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
warn("the flag '--override-input %s %s' does not match any input",
printInputPath(i.first), i.second);
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
/* Check 'follows' inputs. */
newLockFile.check();
debug("new lock file: %s", newLockFile);
/* Check whether we need to / can write the new lock file. */
if (!(newLockFile == oldLockFile)) {
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) {
if (!newLockFile.isImmutable()) {
if (settings.warnDirty)
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
} else {
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto path = *sourcePath + "/" + relPath;
bool lockFileExists = pathExists(path);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
warn("updating lock file '%s'", path);
else
warn("updating lock file '%s':\n%s", path, s);
} else
warn("creating lock file '%s'", path);
newLockFile.write(path);
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
lockFlags.commitLockFile
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", diff))
: std::nullopt);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
auto prevLockedRef = flake.lockedRef;
FlakeCache dummyCache;
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
if (lockFlags.commitLockFile &&
flake.lockedRef.input.getRev() &&
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
/* Make sure that we picked up the change,
i.e. the tree should usually be dirty
now. Corner case: we could have reverted from a
dirty to a clean tree! */
if (flake.lockedRef.input == prevLockedRef.input
&& !flake.lockedRef.input.isImmutable())
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
}
} else
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
} else
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
} catch (Error & e) {
e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
throw;
}
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
}
void callFlake(EvalState & state,
@ -583,26 +665,32 @@ void callFlake(EvalState & state,
mkString(*vLocks, lockedFlake.lockFile.to_string());
emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
emitTreeAttrs(
state,
*lockedFlake.flake.sourceInfo,
lockedFlake.flake.lockedRef.input,
*vRootSrc,
false,
lockedFlake.flake.forceDirty);
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
static RootValue vCallFlake = nullptr;
if (!vCallFlake) {
vCallFlake = allocRootValue(state.allocValue());
if (!state.vCallFlake) {
state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
, "/"), **vCallFlake);
, "/"), **state.vCallFlake);
}
state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
@ -612,13 +700,13 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
.useRegistries = !evalSettings.pureEval,
.useRegistries = !evalSettings.pureEval && settings.useRegistries,
.allowMutable = !evalSettings.pureEval,
}),
v);
}
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake);
}
@ -628,8 +716,9 @@ Fingerprint LockedFlake::getFingerprint() const
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
fmt("%s;%d;%d;%s",
fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));

View file

@ -43,7 +43,6 @@ struct FlakeInput
std::optional<FlakeRef> ref;
bool isFlake = true; // true = process flake to get outputs, false = (fetched) static source path
std::optional<InputPath> follows;
bool absolute = false; // whether 'follows' is relative to the flake root
FlakeInputs overrides;
};
@ -59,9 +58,10 @@ struct ConfigFile
/* The contents of a flake.nix file. */
struct Flake
{
FlakeRef originalRef; // the original flake specification (by the user)
FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
FlakeRef lockedRef; // the specific local store result of invoking the fetcher
FlakeRef originalRef; // the original flake specification (by the user)
FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
FlakeRef lockedRef; // the specific local store result of invoking the fetcher
bool forceDirty = false; // pretend that 'lockedRef' is dirty
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs;
@ -102,7 +102,11 @@ struct LockFlags
/* Whether to use the registries to lookup indirect flake
references like 'nixpkgs'. */
bool useRegistries = true;
std::optional<bool> useRegistries = std::nullopt;
/* Whether to apply flake's nixConfig attribute to the configuration */
bool applyNixConfig = false;
/* Whether mutable flake references (i.e. those without a Git
revision or similar) without a corresponding lock are
@ -113,7 +117,7 @@ struct LockFlags
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;
/* Flake inputs to be overriden. */
/* Flake inputs to be overridden. */
std::map<InputPath, FlakeRef> inputOverrides;
/* Flake inputs to be updated. This means that any existing lock
@ -137,6 +141,8 @@ void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const fetchers::Input & input,
Value & v, bool emptyRevFallback = false);
Value & v,
bool emptyRevFallback = false,
bool forceDirty = false);
}

View file

@ -172,8 +172,12 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
auto input = Input::fromURL(parsedURL);
input.parent = baseDir;
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
fragment);
}
}

View file

@ -2,6 +2,8 @@
#include "store-api.hh"
#include "url-parts.hh"
#include <iomanip>
#include <nlohmann/json.hpp>
namespace nix::flake {
@ -268,10 +270,20 @@ std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
return res;
}
static std::string describe(const FlakeRef & flakeRef)
{
auto s = fmt("'%s'", flakeRef.to_string());
if (auto lastModified = flakeRef.input.getLastModified())
s += fmt(" (%s)", std::put_time(std::gmtime(&*lastModified), "%Y-%m-%d"));
return s;
}
std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
{
if (auto node = std::get_if<0>(&edge))
stream << "'" << (*node)->lockedRef << "'";
stream << describe((*node)->lockedRef);
else if (auto follows = std::get_if<1>(&edge))
stream << fmt("follows '%s'", printInputPath(*follows));
return stream;
@ -299,14 +311,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
while (i != oldFlat.end() || j != newFlat.end()) {
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
res += fmt("* Added '%s': %s\n", printInputPath(j->first), j->second);
res += fmt("" ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
printInputPath(j->first), j->second);
++j;
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
res += fmt("* Removed '%s'\n", printInputPath(i->first));
res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first));
++i;
} else {
if (!equals(i->second, j->second)) {
res += fmt("* Updated '%s': %s -> %s\n",
res += fmt("" ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n %s\n",
printInputPath(i->first),
i->second,
j->second);

View file

@ -2,6 +2,7 @@
#include "util.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "path-with-outputs.hh"
#include <cstring>
#include <regex>
@ -19,7 +20,7 @@ DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs)
DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs)
: state(&state), attrs(nullptr), attrPath("")
{
auto [drvPath, selectedOutputs] = store->parsePathWithOutputs(drvPathWithOutputs);
auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs);
this->drvPath = store->printStorePath(drvPath);
@ -214,8 +215,8 @@ NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
integer meta fields. */
NixInt n;
if (string2Int(v->string.s, n)) return n;
if (auto n = string2Int<NixInt>(v->string.s))
return *n;
}
return def;
}
@ -228,8 +229,8 @@ NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
float meta fields. */
NixFloat n;
if (string2Float(v->string.s, n)) return n;
if (auto n = string2Float<NixFloat>(v->string.s))
return *n;
}
return def;
}

View file

@ -9,6 +9,9 @@
%s DEFAULT
%x STRING
%x IND_STRING
%x INPATH
%x INPATH_SLASH
%x PATH_START
%{
@ -25,6 +28,8 @@ using namespace nix;
namespace nix {
// backup to recover from yyless(0)
YYLTYPE prev_yylloc;
static void initLoc(YYLTYPE * loc)
{
@ -35,14 +40,18 @@ static void initLoc(YYLTYPE * loc)
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
prev_yylloc = *loc;
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
while (len--) {
for (size_t i = 0; i < len; i++) {
switch (*s++) {
case '\r':
if (*s == '\n') /* cr/lf */
if (*s == '\n') { /* cr/lf */
i++;
s++;
}
/* fall through */
case '\n':
++loc->last_line;
@ -95,9 +104,12 @@ ANY .|\n
ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]*
INT [0-9]+
FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?
PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\>
PATH_CHAR [a-zA-Z0-9\.\_\-\+]
PATH {PATH_CHAR}*(\/{PATH_CHAR}+)+\/?
PATH_SEG {PATH_CHAR}*\/
HPATH \~(\/{PATH_CHAR}+)+\/?
HPATH_START \~\/
SPATH \<{PATH_CHAR}+(\/{PATH_CHAR}+)*\>
URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+
@ -198,17 +210,75 @@ or { return OR_KW; }
return IND_STR;
}
{PATH_SEG}\$\{ |
{HPATH_START}\$\{ {
PUSH_STATE(PATH_START);
yyless(0);
*yylloc = prev_yylloc;
}
<PATH_START>{PATH_SEG} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
return PATH;
}
<PATH_START>{HPATH_START} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
return HPATH;
}
{PATH} {
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
return PATH;
}
{HPATH} {
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
return HPATH;
}
<INPATH,INPATH_SLASH>\$\{ {
POP_STATE();
PUSH_STATE(INPATH);
PUSH_STATE(DEFAULT);
return DOLLAR_CURLY;
}
<INPATH,INPATH_SLASH>{PATH}|{PATH_SEG}|{PATH_CHAR}+ {
POP_STATE();
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->e = new ExprString(data->symbols.create(string(yytext)));
return STR;
}
<INPATH>{ANY} |
<INPATH><<EOF>> {
/* if we encounter a non-path character we inform the parser that the path has
ended with a PATH_END token and re-parse this character in the default
context (it may be ')', ';', or something of that sort) */
POP_STATE();
yyless(0);
*yylloc = prev_yylloc;
return PATH_END;
}
<INPATH_SLASH>{ANY} |
<INPATH_SLASH><<EOF>> {
throw ParseError("path has a trailing slash");
}
{PATH} { if (yytext[yyleng-1] == '/')
throw ParseError("path '%s' has a trailing slash", yytext);
yylval->path = strdup(yytext);
return PATH;
}
{HPATH} { if (yytext[yyleng-1] == '/')
throw ParseError("path '%s' has a trailing slash", yytext);
yylval->path = strdup(yytext);
return HPATH;
}
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
{URI} { yylval->uri = strdup(yytext); return URI; }

View file

@ -15,8 +15,8 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS = -lboost_context
ifneq ($(OS), FreeBSD)
libexpr_LDFLAGS += -lboost_context -pthread
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
@ -35,7 +35,7 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))

View file

@ -124,7 +124,7 @@ void ExprList::show(std::ostream & str) const
void ExprLambda::show(std::ostream & str) const
{
str << "(";
if (matchAttrs) {
if (hasFormals()) {
str << "{ ";
bool first = true;
for (auto & i : formals->formals) {
@ -284,7 +284,7 @@ void ExprVar::bindVars(const StaticEnv & env)
"undefined variable" error now. */
if (withLevel == -1)
throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name),
.msg = hintfmt("undefined variable '%1%'", name),
.errPos = pos
});
fromWith = true;
@ -348,7 +348,7 @@ void ExprLambda::bindVars(const StaticEnv & env)
if (!arg.empty()) newEnv.vars[arg] = displ++;
if (matchAttrs) {
if (hasFormals()) {
for (auto & i : formals->formals)
newEnv.vars[i.name] = displ++;

View file

@ -17,6 +17,7 @@ MakeError(ThrownError, AssertionError);
MakeError(Abort, EvalError);
MakeError(TypeError, EvalError);
MakeError(UndefinedVarError, Error);
MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
@ -179,6 +180,7 @@ struct ExprOpHasAttr : Expr
struct ExprAttrs : Expr
{
bool recursive;
Pos pos;
struct AttrDef {
bool inherited;
Expr * e;
@ -198,7 +200,8 @@ struct ExprAttrs : Expr
};
typedef std::vector<DynamicAttrDef> DynamicAttrDefs;
DynamicAttrDefs dynamicAttrs;
ExprAttrs() : recursive(false) { };
ExprAttrs(const Pos &pos) : recursive(false), pos(pos) { };
ExprAttrs() : recursive(false), pos(noPos) { };
COMMON_METHODS
};
@ -230,20 +233,20 @@ struct ExprLambda : Expr
Pos pos;
Symbol name;
Symbol arg;
bool matchAttrs;
Formals * formals;
Expr * body;
ExprLambda(const Pos & pos, const Symbol & arg, bool matchAttrs, Formals * formals, Expr * body)
: pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
: pos(pos), arg(arg), formals(formals), body(body)
{
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
.errPos = pos
});
};
void setName(Symbol & name);
string showNamePos() const;
inline bool hasFormals() const { return formals != nullptr; }
COMMON_METHODS
};

View file

@ -32,7 +32,7 @@ namespace nix {
Path basePath;
Symbol file;
FileOrigin origin;
ErrorInfo error;
std::optional<ErrorInfo> error;
Symbol sLetBody;
ParseData(EvalState & state)
: state(state)
@ -66,8 +66,8 @@ namespace nix {
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.msg = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.errPos = pos
});
}
@ -75,7 +75,7 @@ static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prev
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.msg = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.errPos = pos
});
}
@ -146,7 +146,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
{
if (!formals->argNames.insert(formal.name).second)
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'",
.msg = hintfmt("duplicate formal function argument '%1%'",
formal.name),
.errPos = pos
});
@ -258,7 +258,7 @@ static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
{
data->error = {
.hint = hintfmt(error),
.msg = hintfmt(error),
.errPos = makeCurPos(*loc, data)
};
}
@ -290,13 +290,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%type <formal> formal
%type <attrNames> attrs attrpath
%type <string_parts> string_parts_interpolated ind_string_parts
%type <e> string_parts string_attr
%type <e> path_start string_parts string_attr
%type <id> attr
%token <id> ID ATTRPATH
%token <e> STR IND_STR
%token <n> INT
%token <nf> FLOAT
%token <path> PATH HPATH SPATH
%token <path> PATH HPATH SPATH PATH_END
%token <uri> URI
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
%token DOLLAR_CURLY /* == ${ */
@ -324,13 +324,13 @@ expr: expr_function;
expr_function
: ID ':' expr_function
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), false, 0, $3); }
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
| '{' formals '}' ':' expr_function
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), true, $2, $5); }
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), $2, $5); }
| '{' formals '}' '@' ID ':' expr_function
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($5), true, $2, $7); }
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($5), $2, $7); }
| ID '@' '{' formals '}' ':' expr_function
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), true, $4, $7); }
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), $4, $7); }
| ASSERT expr ';' expr_function
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
| WITH expr ';' expr_function
@ -338,7 +338,7 @@ expr_function
| LET binds IN expr_function
{ if (!$2->dynamicAttrs.empty())
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"),
.msg = hintfmt("dynamic attributes not allowed in let"),
.errPos = CUR_POS
});
$$ = new ExprLet($2, $4);
@ -405,8 +405,11 @@ expr_simple
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
$$ = stripIndentation(CUR_POS, data->symbols, *$2);
}
| PATH { $$ = new ExprPath(absPath($1, data->basePath)); }
| HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); }
| path_start PATH_END { $$ = $1; }
| path_start string_parts_interpolated PATH_END {
$2->insert($2->begin(), $1);
$$ = new ExprConcatStrings(CUR_POS, false, $2);
}
| SPATH {
string path($1 + 1, strlen($1) - 2);
$$ = new ExprApp(CUR_POS,
@ -415,10 +418,10 @@ expr_simple
new ExprString(data->symbols.create(path)));
}
| URI {
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
if (noURLLiterals)
throw ParseError({
.hint = hintfmt("URL literals are disabled"),
.msg = hintfmt("URL literals are disabled"),
.errPos = CUR_POS
});
$$ = new ExprString(data->symbols.create($1));
@ -452,6 +455,20 @@ string_parts_interpolated
}
;
path_start
: PATH {
Path path(absPath($1, data->basePath));
/* add back in the trailing '/' to the first segment */
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
path += "/";
$$ = new ExprPath(path);
}
| HPATH {
Path path(getHome() + string($1 + 1));
$$ = new ExprPath(path);
}
;
ind_string_parts
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
@ -478,7 +495,7 @@ binds
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data));
}
}
| { $$ = new ExprAttrs; }
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
;
attrs
@ -491,7 +508,7 @@ attrs
delete str;
} else
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"),
.msg = hintfmt("dynamic attributes not allowed in inherit"),
.errPos = makeCurPos(@2, data)
});
}
@ -576,7 +593,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
ParseData data(*this);
data.origin = origin;
switch (origin) {
case foFile:
case foFile:
data.file = data.symbols.create(path);
break;
case foStdin:
@ -593,7 +610,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
int res = yyparse(scanner, &data);
yylex_destroy(scanner);
if (res) throw ParseError(data.error);
if (res) throw ParseError(data.error.value());
data.result->bindVars(staticEnv);
@ -703,7 +720,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
return corepkgsPrefix + path.substr(4);
throw ThrownError({
.hint = hintfmt(evalSettings.pureEval
.msg = hintfmt(evalSettings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
@ -725,8 +742,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
store, resolveUri(elem.second), "source", false).first.storePath) };
} catch (FileTransferError & e) {
logWarning({
.name = "Entry download",
.hint = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
});
res = { false, "" };
}
@ -736,8 +752,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
res = { true, path };
else {
logWarning({
.name = "Entry not found",
.hint = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
});
res = { false, "" };
}

View file

@ -21,6 +21,8 @@
#include <regex>
#include <dlfcn.h>
#include <cmath>
namespace nix {
@ -35,7 +37,7 @@ InvalidPathError::InvalidPathError(const Path & path) :
void EvalState::realiseContext(const PathSet & context)
{
std::vector<StorePathWithOutputs> drvs;
std::vector<DerivedPath::Built> drvs;
for (auto & i : context) {
auto [ctxS, outputName] = decodeContext(i);
@ -43,22 +45,21 @@ void EvalState::realiseContext(const PathSet & context)
if (!store->isValidPath(ctx))
throw InvalidPathError(store->printStorePath(ctx));
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
drvs.push_back({ctx, {outputName}});
}
}
if (drvs.empty()) return;
if (!evalSettings.enableImportFromDerivation)
throw EvalError("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false",
store->printStorePath(drvs.begin()->path));
throw Error(
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
store->printStorePath(drvs.begin()->drvPath));
/* For performance, prefetch all substitute info. */
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs);
/* Build/substitute the context. */
std::vector<DerivedPath> buildReqs;
for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d });
store->buildPaths(buildReqs);
/* Add the output of this derivations to the allowed
paths. */
@ -115,9 +116,12 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.msg = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.errPos = pos
});
} catch (Error & e) {
e.addTrace(pos, "while importing '%s'", path);
throw;
}
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
@ -153,16 +157,15 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
}
w.attrs->sort();
static RootValue fun;
if (!fun) {
fun = allocRootValue(state.allocValue());
if (!state.vImportedDrvToDerivation) {
state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
, "/"), **fun);
, "/"), **state.vImportedDrvToDerivation);
}
state.forceFunction(**fun, pos);
mkApp(v, **fun, w);
state.forceFunction(**state.vImportedDrvToDerivation, pos);
mkApp(v, **state.vImportedDrvToDerivation, w);
state.forceAttrs(v, pos);
}
@ -282,7 +285,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt(
.msg = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.errPos = pos
@ -322,7 +325,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
auto count = args[0]->listSize();
if (count == 0) {
throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"),
.msg = hintfmt("at least one argument to 'exec' required"),
.errPos = pos
});
}
@ -336,7 +339,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
.msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.errPos = pos
});
@ -407,7 +410,7 @@ static RegisterPrimOp primop_isNull({
Return `true` if *e* evaluates to `null`, and `false` otherwise.
> **Warning**
>
>
> This function is *deprecated*; just write `e == null` instead.
)",
.fun = prim_isNull,
@ -542,18 +545,56 @@ typedef list<Value *> ValueList;
#endif
static Bindings::iterator getAttr(
EvalState & state,
string funcName,
string attrName,
Bindings * attrSet,
const Pos & pos)
{
Bindings::iterator value = attrSet->find(state.symbols.create(attrName));
if (value == attrSet->end()) {
hintformat errorMsg = hintfmt(
"attribute '%s' missing for call to '%s'",
attrName,
funcName
);
Pos aPos = *attrSet->pos;
if (aPos == noPos) {
throw TypeError({
.msg = errorMsg,
.errPos = pos,
});
} else {
auto e = TypeError({
.msg = errorMsg,
.errPos = aPos,
});
// Adding another trace for the function name to make it clear
// which call received wrong arguments.
e.addTrace(pos, hintfmt("while invoking '%s'", funcName));
throw e;
}
}
return value;
}
static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
/* Get the start set. */
Bindings::iterator startSet =
args[0]->attrs->find(state.symbols.create("startSet"));
if (startSet == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'startSet' required"),
.errPos = pos
});
Bindings::iterator startSet = getAttr(
state,
"genericClosure",
"startSet",
args[0]->attrs,
pos
);
state.forceList(*startSet->value, pos);
ValueList workSet;
@ -561,13 +602,14 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
workSet.push_back(startSet->value->listElems()[n]);
/* Get the operator. */
Bindings::iterator op =
args[0]->attrs->find(state.symbols.create("operator"));
if (op == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'operator' required"),
.errPos = pos
});
Bindings::iterator op = getAttr(
state,
"genericClosure",
"operator",
args[0]->attrs,
pos
);
state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
@ -587,7 +629,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
e->attrs->find(state.symbols.create("key"));
if (key == e->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'key' required"),
.msg = hintfmt("attribute 'key' required"),
.errPos = pos
});
state.forceValue(*key->value, pos);
@ -670,6 +712,44 @@ static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
.fun = prim_addErrorContext,
});
static void prim_ceil(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto value = state.forceFloat(*args[0], args[0]->determinePos(pos));
mkInt(v, ceil(value));
}
static RegisterPrimOp primop_ceil({
.name = "__ceil",
.args = {"double"},
.doc = R"(
Converts an IEEE-754 double-precision floating-point number (*double*) to
the next higher integer.
If the datatype is neither an integer nor a "float", an evaluation error will be
thrown.
)",
.fun = prim_ceil,
});
static void prim_floor(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto value = state.forceFloat(*args[0], args[0]->determinePos(pos));
mkInt(v, floor(value));
}
static RegisterPrimOp primop_floor({
.name = "__floor",
.args = {"double"},
.doc = R"(
Converts an IEEE-754 double-precision floating-point number (*double*) to
the next lower integer.
If the datatype is neither an integer nor a "float", an evaluation error will be
thrown.
)",
.fun = prim_floor,
});
/* Try evaluating the argument. Success => {success=true; value=something;},
* else => {success=false; value=false;} */
static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -693,10 +773,14 @@ static RegisterPrimOp primop_tryEval({
Try to shallowly evaluate *e*. Return a set containing the
attributes `success` (`true` if *e* evaluated successfully,
`false` if an error was thrown) and `value`, equalling *e* if
successful and `false` otherwise. Note that this doesn't evaluate
*e* deeply, so ` let e = { x = throw ""; }; in (builtins.tryEval
e).success ` will be `true`. Using ` builtins.deepSeq ` one can
get the expected result: `let e = { x = throw ""; }; in
successful and `false` otherwise. `tryEval` will only prevent
errors created by `throw` or `assert` from being thrown.
Errors `tryEval` will not catch are for example those created
by `abort` and type errors generated by builtins. Also note that
this doesn't evaluate *e* deeply, so `let e = { x = throw ""; };
in (builtins.tryEval e).success` will be `true`. Using
`builtins.deepSeq` one can get the expected result:
`let e = { x = throw ""; }; in
(builtins.tryEval (builtins.deepSeq e e)).success` will be
`false`.
)",
@ -807,12 +891,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.forceAttrs(*args[0], pos);
/* Figure out the name first (for stack backtraces). */
Bindings::iterator attr = args[0]->attrs->find(state.sName);
if (attr == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("required attribute 'name' missing"),
.errPos = pos
});
Bindings::iterator attr = getAttr(
state,
"derivationStrict",
state.sName,
args[0]->attrs,
pos
);
string drvName;
Pos & posDrvName(*attr->pos);
try {
@ -859,7 +945,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
else
throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.errPos = posDrvName
});
};
@ -869,7 +955,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & j : ss) {
if (outputs.find(j) != outputs.end())
throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j),
.msg = hintfmt("duplicate derivation output '%1%'", j),
.errPos = posDrvName
});
/* !!! Check whether j is a valid attribute
@ -879,14 +965,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
the resulting set. */
if (j == "drv")
throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ),
.msg = hintfmt("invalid derivation output name 'drv'" ),
.errPos = posDrvName
});
outputs.insert(j);
}
if (outputs.empty())
throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"),
.msg = hintfmt("derivation cannot have an empty set of outputs"),
.errPos = posDrvName
});
};
@ -899,7 +985,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
if (i->name == state.sContentAddressed) {
settings.requireExperimentalFeature("ca-derivations");
settings.requireExperimentalFeature(Xp::CaDerivations);
contentAddressed = state.forceBool(*i->value, pos);
}
@ -944,7 +1030,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
} else {
auto s = state.coerceToString(posDrvName, *i->value, context, true);
auto s = state.coerceToString(*i->pos, *i->value, context, true);
drv.env.emplace(key, s);
if (i->name == state.sBuilder) drv.builder = s;
else if (i->name == state.sSystem) drv.platform = s;
@ -1007,20 +1093,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Do we have all required attributes? */
if (drv.builder == "")
throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"),
.msg = hintfmt("required attribute 'builder' missing"),
.errPos = posDrvName
});
if (drv.platform == "")
throw EvalError({
.hint = hintfmt("required attribute 'system' missing"),
.msg = hintfmt("required attribute 'system' missing"),
.errPos = posDrvName
});
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.errPos = posDrvName
});
@ -1031,7 +1117,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
already content addressed. */
if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.errPos = posDrvName
});
@ -1084,7 +1170,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
// hash per output.
auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true);
std::visit(overloaded {
[&](Hash h) {
[&](Hash & h) {
for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName);
drv.env[i] = state.store->printStorePath(outPath);
@ -1096,11 +1182,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
});
}
},
[&](CaOutputHashes) {
[&](CaOutputHashes &) {
// Shouldn't happen as the toplevel derivation is not CA.
assert(false);
},
[&](DeferredHash _) {
[&](DeferredHash &) {
for (auto & i : outputs) {
drv.outputs.insert_or_assign(i,
DerivationOutput {
@ -1201,7 +1287,10 @@ static RegisterPrimOp primop_toPath({
static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
if (evalSettings.pureEval)
throw EvalError("builtins.storePath' is not allowed in pure evaluation mode");
throw EvalError({
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
.errPos = pos
});
PathSet context;
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context));
@ -1211,7 +1300,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
if (!state.store->isStorePath(path)) path = canonPath(path, true);
if (!state.store->isInStore(path))
throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path),
.msg = hintfmt("path '%1%' is not in the Nix store", path),
.errPos = pos
});
auto path2 = state.store->toStorePath(path).first;
@ -1247,7 +1336,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt(
.msg = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
.errPos = pos
@ -1324,7 +1413,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.msg = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.errPos = pos
});
}
@ -1360,12 +1449,13 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i != v2.attrs->end())
prefix = state.forceStringNoCtx(*i->value, pos);
i = v2.attrs->find(state.symbols.create("path"));
if (i == v2.attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'path' missing"),
.errPos = pos
});
i = getAttr(
state,
"findFile",
"path",
v2.attrs,
pos
);
PathSet context;
string path = state.coerceToString(pos, *i->value, context, false, false);
@ -1374,7 +1464,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.errPos = pos
});
}
@ -1399,15 +1489,20 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.errPos = pos
});
throw Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = pos
});
PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context);
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError("cannot read '%s' since path '%s' is not valid, at %s", path, e.path, pos);
}
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
mkString(v, hashFile(*ht, state.checkSourcePath(state.toRealPath(path, context))).to_string(Base16, false));
}
static RegisterPrimOp primop_hashFile({
@ -1430,7 +1525,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
state.realiseContext(ctx);
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.msg = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.errPos = pos
});
}
@ -1618,7 +1713,7 @@ static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Va
parseJSON(state, s, v);
} catch (JSONParseError &e) {
e.addTrace(pos, "while decoding a JSON string");
throw e;
throw;
}
}
@ -1650,7 +1745,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
for (auto path : context) {
if (path.at(0) != '/')
throw EvalError( {
.hint = hintfmt(
.msg = hintfmt(
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name, path),
@ -1748,50 +1843,81 @@ static RegisterPrimOp primop_toFile({
.fun = prim_toFile,
});
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
static void addPath(
EvalState & state,
const Pos & pos,
const string & name,
Path path,
Value * filterFun,
FileIngestionMethod method,
const std::optional<Hash> expectedHash,
Value & v,
const PathSet & context)
{
const auto path = evalSettings.pureEval && expectedHash ?
path_ :
state.checkSourcePath(path_);
PathFilter filter = filterFun ? ([&](const Path & path) {
auto st = lstat(path);
try {
// FIXME: handle CA derivation outputs (where path needs to
// be rewritten to the actual output).
state.realiseContext(context);
/* Call the filter function. The first argument is the path,
the second is a string indicating the type of the file. */
Value arg1;
mkString(arg1, path);
if (state.store->isInStore(path)) {
auto [storePath, subPath] = state.store->toStorePath(path);
auto info = state.store->queryPathInfo(storePath);
if (!info->references.empty())
throw EvalError("store path '%s' is not allowed to have references",
state.store->printStorePath(storePath));
path = state.store->toRealPath(storePath) + subPath;
}
Value fun2;
state.callFunction(*filterFun, arg1, fun2, noPos);
path = evalSettings.pureEval && expectedHash
? path
: state.checkSourcePath(path);
Value arg2;
mkString(arg2,
S_ISREG(st.st_mode) ? "regular" :
S_ISDIR(st.st_mode) ? "directory" :
S_ISLNK(st.st_mode) ? "symlink" :
"unknown" /* not supported, will fail! */);
PathFilter filter = filterFun ? ([&](const Path & path) {
auto st = lstat(path);
Value res;
state.callFunction(fun2, arg2, res, noPos);
/* Call the filter function. The first argument is the path,
the second is a string indicating the type of the file. */
Value arg1;
mkString(arg1, path);
return state.forceBool(res, pos);
}) : defaultPathFilter;
Value fun2;
state.callFunction(*filterFun, arg1, fun2, noPos);
std::optional<StorePath> expectedStorePath;
if (expectedHash)
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
} else
dstPath = state.store->printStorePath(*expectedStorePath);
Value arg2;
mkString(arg2,
S_ISREG(st.st_mode) ? "regular" :
S_ISDIR(st.st_mode) ? "directory" :
S_ISLNK(st.st_mode) ? "symlink" :
"unknown" /* not supported, will fail! */);
mkString(v, dstPath, {dstPath});
Value res;
state.callFunction(fun2, arg2, res, noPos);
return state.forceBool(res, pos);
}) : defaultPathFilter;
std::optional<StorePath> expectedStorePath;
if (expectedHash)
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
} else
dstPath = state.store->printStorePath(*expectedStorePath);
mkString(v, dstPath, {dstPath});
state.allowPath(dstPath);
} catch (Error & e) {
e.addTrace(pos, "while adding path '%s'", path);
throw;
}
}
@ -1799,28 +1925,36 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
{
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.errPos = pos
});
state.forceValue(*args[0], pos);
if (args[0]->type() != nFunction)
throw TypeError({
.hint = hintfmt(
.msg = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
.errPos = pos
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
}
static RegisterPrimOp primop_filterSource({
.name = "__filterSource",
.args = {"e1", "e2"},
.doc = R"(
> **Warning**
>
> `filterSource` should not be used to filter store paths. Since
> `filterSource` uses the name of the input directory while naming
> the output directory, doing so will produce a directory name in
> the form of `<hash2>-<hash>-<name>`, where `<hash>-<name>` is
> the name of the input directory. Since `<hash>` depends on the
> unfiltered directory, the name of the output directory will
> indirectly depend on files that are filtered out by the
> function. This will trigger a rebuild even when a filtered out
> file is changed. Use `builtins.path` instead, which allows
> specifying the name of the output directory.
This function allows you to copy sources into the Nix store while
filtering certain files. For instance, suppose that you want to use
the directory `source-dir` as an input to a Nix expression, e.g.
@ -1867,18 +2001,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
Value * filterFun = nullptr;
auto method = FileIngestionMethod::Recursive;
std::optional<Hash> expectedHash;
PathSet context;
for (auto & attr : *args[0]->attrs) {
const string & n(attr.name);
if (n == "path") {
PathSet context;
if (n == "path")
path = state.coerceToPath(*attr.pos, *attr.value, context);
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.errPos = *attr.pos
});
} else if (attr.name == state.sName)
else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
state.forceValue(*attr.value, pos);
@ -1889,19 +2018,19 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else
throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.errPos = *attr.pos
});
}
if (path.empty())
throw EvalError({
.hint = hintfmt("'path' required"),
.msg = hintfmt("'path' required"),
.errPos = pos
});
if (name.empty())
name = baseNameOf(path);
addPath(state, pos, name, path, filterFun, method, expectedHash, v);
addPath(state, pos, name, path, filterFun, method, expectedHash, v, context);
}
static RegisterPrimOp primop_path({
@ -1911,26 +2040,26 @@ static RegisterPrimOp primop_path({
An enrichment of the built-in path type, based on the attributes
present in *args*. All are optional except `path`:
- path
- path\
The underlying path.
- name
- name\
The name of the path when added to the store. This can used to
reference paths that have nix-illegal characters in their names,
like `@`.
- filter
- filter\
A function of the type expected by `builtins.filterSource`,
with the same semantics.
- recursive
- recursive\
When `false`, when `path` is added to the store it is with a
flat hash, rather than a hash of the NAR serialization of the
file. Thus, `path` must refer to a regular file, not a
directory. This allows similar behavior to `fetchurl`. Defaults
to `true`.
- sha256
- sha256\
When provided, this is the expected hash of the file at the
path. Evaluation will fail if the hash is incorrect, and
providing a hash allows `builtins.path` to be used even when the
@ -2007,14 +2136,15 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
string attr = state.forceStringNoCtx(*args[0], pos);
state.forceAttrs(*args[1], pos);
// !!! Should we create a symbol here or just do a lookup?
Bindings::iterator i = args[1]->attrs->find(state.symbols.create(attr));
if (i == args[1]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr),
.errPos = pos
});
Bindings::iterator i = getAttr(
state,
"getAttr",
attr,
args[1]->attrs,
pos
);
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
v = *i->value;
}
@ -2139,22 +2269,25 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
Value & v2(*args[0]->listElems()[i]);
state.forceAttrs(v2, pos);
Bindings::iterator j = v2.attrs->find(state.sName);
if (j == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.errPos = pos
});
string name = state.forceStringNoCtx(*j->value, pos);
Bindings::iterator j = getAttr(
state,
"listToAttrs",
state.sName,
v2.attrs,
pos
);
string name = state.forceStringNoCtx(*j->value, *j->pos);
Symbol sym = state.symbols.create(name);
if (seen.insert(sym).second) {
Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue));
if (j2 == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.errPos = pos
});
Bindings::iterator j2 = getAttr(
state,
"listToAttrs",
state.sValue,
v2.attrs,
pos
);
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
}
@ -2258,11 +2391,11 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
}
if (!args[0]->isLambda())
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
.msg = hintfmt("'functionArgs' requires a function"),
.errPos = pos
});
if (!args[0]->lambda.fun->matchAttrs) {
if (!args[0]->lambda.fun->hasFormals()) {
state.mkAttrs(v, 0);
return;
}
@ -2271,7 +2404,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
Value * value = state.allocValue();
v.attrs->push_back(Attr(i.name, value, &i.pos));
v.attrs->push_back(Attr(i.name, value, ptr(&i.pos)));
mkBool(*value, i.def);
}
v.attrs->sort();
@ -2352,7 +2485,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
state.forceList(list, pos);
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error({
.hint = hintfmt("list index %1% is out of bounds", n),
.msg = hintfmt("list index %1% is out of bounds", n),
.errPos = pos
});
state.forceValue(*list.listElems()[n], pos);
@ -2400,7 +2533,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
state.forceList(*args[0], pos);
if (args[0]->listSize() == 0)
throw Error({
.hint = hintfmt("'tail' called on an empty list"),
.msg = hintfmt("'tail' called on an empty list"),
.errPos = pos
});
@ -2417,7 +2550,7 @@ static RegisterPrimOp primop_tail({
the argument isnt a list or is an empty list.
> **Warning**
>
>
> This function should generally be avoided since it's inefficient:
> unlike Haskell's `tail`, it takes O(n) time, so recursing over a
> list by repeatedly calling `tail` takes O(n^2) time.
@ -2639,7 +2772,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
if (len < 0)
throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len),
.msg = hintfmt("cannot create list of size %1%", len),
.errPos = pos
});
@ -2795,7 +2928,12 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
for (unsigned int n = 0; n < nrLists; ++n) {
Value * vElem = args[1]->listElems()[n];
state.callFunction(*args[0], *vElem, lists[n], pos);
state.forceList(lists[n], pos);
try {
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)));
} catch (TypeError &e) {
e.addTrace(pos, hintfmt("while invoking '%s'", "concatMap"));
throw;
}
len += lists[n].listSize();
}
@ -2890,7 +3028,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
NixFloat f2 = state.forceFloat(*args[1], pos);
if (f2 == 0)
throw EvalError({
.hint = hintfmt("division by zero"),
.msg = hintfmt("division by zero"),
.errPos = pos
});
@ -2902,7 +3040,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
/* Avoid division overflow as it might raise SIGFPE. */
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError({
.hint = hintfmt("overflow in integer division"),
.msg = hintfmt("overflow in integer division"),
.errPos = pos
});
@ -3006,7 +3144,7 @@ static RegisterPrimOp primop_toString({
- A path (e.g., `toString /foo/bar` yields `"/foo/bar"`.
- A set containing `{ __toString = self: ...; }`.
- A set containing `{ __toString = self: ...; }` or `{ outPath = ...; }`.
- An integer.
@ -3033,7 +3171,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
if (start < 0)
throw EvalError({
.hint = hintfmt("negative start position in 'substring'"),
.msg = hintfmt("negative start position in 'substring'"),
.errPos = pos
});
@ -3084,14 +3222,14 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = pos
});
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
mkString(v, hashString(*ht, s).to_string(Base16, false));
}
static RegisterPrimOp primop_hashString({
@ -3148,12 +3286,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
.errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.msg = hintfmt("invalid regular expression '%s'", re),
.errPos = pos
});
}
@ -3256,12 +3394,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
.errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.msg = hintfmt("invalid regular expression '%s'", re),
.errPos = pos
});
}
@ -3341,7 +3479,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
state.forceList(*args[1], pos);
if (args[0]->listSize() != args[1]->listSize())
throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.errPos = pos
});
@ -3498,15 +3636,13 @@ static RegisterPrimOp primop_splitVersion({
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
std::optional<std::string> requiredFeature)
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
{
if (!primOps) primOps = new PrimOps;
primOps->push_back({
.name = name,
.args = {},
.arity = arity,
.requiredFeature = std::move(requiredFeature),
.fun = fun
});
}
@ -3542,9 +3678,7 @@ void EvalState::createBaseEnv()
if (!evalSettings.pureEval) {
mkInt(v, time(0));
addConstant("__currentTime", v);
}
if (!evalSettings.pureEval) {
mkString(v, settings.thisSystem.get());
addConstant("__currentSystem", v);
}
@ -3582,14 +3716,13 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
addPrimOp({
.fun = primOp.fun,
.arity = std::max(primOp.args.size(), primOp.arity),
.name = symbols.create(primOp.name),
.args = std::move(primOp.args),
.doc = primOp.doc,
});
addPrimOp({
.fun = primOp.fun,
.arity = std::max(primOp.args.size(), primOp.arity),
.name = symbols.create(primOp.name),
.args = std::move(primOp.args),
.doc = primOp.doc,
});
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */

View file

@ -15,7 +15,6 @@ struct RegisterPrimOp
std::vector<std::string> args;
size_t arity = 0;
const char * doc;
std::optional<std::string> requiredFeature;
PrimOpFun fun;
};
@ -28,8 +27,7 @@ struct RegisterPrimOp
RegisterPrimOp(
std::string name,
size_t arity,
PrimOpFun fun,
std::optional<std::string> requiredFeature = {});
PrimOpFun fun);
RegisterPrimOp(Info && info);
};

View file

@ -147,7 +147,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
for (auto & i : *args[1]->attrs) {
if (!state.store->isStorePath(i.name))
throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name),
.msg = hintfmt("Context key '%s' is not a store path", i.name),
.errPos = *i.pos
});
if (!settings.readOnlyMode)
@ -164,7 +164,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (state.forceBool(*iter->value, *iter->pos)) {
if (!isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.errPos = *i.pos
});
}
@ -177,7 +177,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
state.forceList(*iter->value, *iter->pos);
if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.errPos = *i.pos
});
}

View file

@ -15,7 +15,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
std::string name = "source";
PathSet context;
state.forceValue(*args[0]);
state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
@ -38,14 +38,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.errPos = *attr.pos
});
}
if (url.empty())
throw EvalError({
.hint = hintfmt("'url' argument required"),
.msg = hintfmt("'url' argument required"),
.errPos = pos
});
@ -62,6 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "hg");
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
attrs.insert_or_assign("name", name);
if (ref) attrs.insert_or_assign("ref", *ref);
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs));
@ -83,8 +84,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
v.attrs->sort();
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
state.allowPath(tree.storePath);
}
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);

View file

@ -7,6 +7,7 @@
#include <ctime>
#include <iomanip>
#include <regex>
namespace nix {
@ -15,7 +16,8 @@ void emitTreeAttrs(
const fetchers::Tree & tree,
const fetchers::Input & input,
Value & v,
bool emptyRevFallback)
bool emptyRevFallback,
bool forceDirty)
{
assert(input.isImmutable());
@ -32,24 +34,28 @@ void emitTreeAttrs(
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
narHash->to_string(SRI, true));
if (auto rev = input.getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1);
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
}
if (input.getType() == "git")
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
if (auto revCount = input.getRevCount())
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
else if (emptyRevFallback)
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
if (!forceDirty) {
if (auto rev = input.getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1);
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
}
if (auto revCount = input.getRevCount())
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
else if (emptyRevFallback)
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
}
if (auto lastModified = input.getLastModified()) {
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
@ -60,47 +66,71 @@ void emitTreeAttrs(
v.attrs->sort();
}
std::string fixURI(std::string uri, EvalState &state)
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
return uri.find("://") != std::string::npos ? uri : "file://" + uri;
return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
}
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
std::string fixURIForGit(std::string uri, EvalState & state)
{
string n(name);
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
static std::regex scp_uri("([^/].*)@(.*):(.*)");
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
else
return fixURI(uri, state);
}
struct FetchTreeParams {
bool emptyRevFallback = false;
bool allowNameArgument = false;
};
static void fetchTree(
EvalState &state,
const Pos &pos,
Value **args,
Value &v,
const std::optional<std::string> type,
bool emptyRevFallback = false
EvalState & state,
const Pos & pos,
Value * * args,
Value & v,
std::optional<std::string> type,
const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
PathSet context;
state.forceValue(*args[0]);
state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
state.forceAttrs(*args[0], pos);
fetchers::Attrs attrs;
if (auto aType = args[0]->attrs->get(state.sType)) {
if (type)
throw Error({
.msg = hintfmt("unexpected attribute 'type'"),
.errPos = pos
});
type = state.forceStringNoCtx(*aType->value, *aType->pos);
} else if (!type)
throw Error({
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.errPos = pos
});
attrs.emplace("type", type.value());
for (auto & attr : *args[0]->attrs) {
state.forceValue(*attr.value);
if (attr.value->type() == nPath || attr.value->type() == nString)
addURI(
state,
attrs,
attr.name,
state.coerceToString(*attr.pos, *attr.value, context, false, false)
);
else if (attr.value->type() == nString)
addURI(state, attrs, attr.name, attr.value->string.s);
if (attr.name == state.sType) continue;
state.forceValue(*attr.value, *attr.pos);
if (attr.value->type() == nPath || attr.value->type() == nString) {
auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false);
attrs.emplace(attr.name,
attr.name == "url"
? type == "git"
? fixURIForGit(s, state)
: fixURI(s, state)
: s);
}
else if (attr.value->type() == nBool)
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
else if (attr.value->type() == nInt)
@ -110,26 +140,24 @@ static void fetchTree(
attr.name, showType(*attr.value));
}
if (type)
attrs.emplace("type", type.value());
if (!attrs.count("type"))
throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.errPos = pos
});
if (!params.allowNameArgument)
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
throw Error({
.msg = hintfmt("attribute 'name' isnt supported in call to 'fetchTree'"),
.errPos = pos
});
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
auto url = fixURI(state.coerceToString(pos, *args[0], context, false, false), state);
auto url = state.coerceToString(pos, *args[0], context, false, false);
if (type == "git") {
fetchers::Attrs attrs;
attrs.emplace("type", "git");
attrs.emplace("url", url);
attrs.emplace("url", fixURIForGit(url, state));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
input = fetchers::Input::fromURL(url);
input = fetchers::Input::fromURL(fixURI(url, state));
}
}
@ -141,18 +169,18 @@ static void fetchTree(
auto [tree, input2] = input.fetch(state.store);
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
state.allowPath(tree.storePath);
emitTreeAttrs(state, tree, input2, v, emptyRevFallback);
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
}
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
settings.requireExperimentalFeature("flakes");
fetchTree(state, pos, args, v, std::nullopt);
settings.requireExperimentalFeature(Xp::Flakes);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
}
// FIXME: document
static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
@ -161,7 +189,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
std::optional<std::string> url;
std::optional<Hash> expectedHash;
state.forceValue(*args[0]);
state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
@ -177,14 +205,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.msg = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.errPos = *attr.pos
});
}
if (!url)
throw EvalError({
.hint = hintfmt("'url' argument required"),
.msg = hintfmt("'url' argument required"),
.errPos = pos
});
} else
@ -205,20 +233,18 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
auto path = state.store->toRealPath(storePath);
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, path);
: hashFile(htSHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)
state.allowedPaths->insert(path);
state.allowPath(storePath);
auto path = state.store->printStorePath(storePath);
mkString(v, path, PathSet({path}));
}
@ -261,13 +287,13 @@ static RegisterPrimOp primop_fetchTarball({
stdenv.mkDerivation { }
```
The fetched tarball is cached for a certain amount of time (1 hour
by default) in `~/.cache/nix/tarballs/`. You can change the cache
timeout either on the command line with `--option tarball-ttl number
of seconds` or in the Nix configuration file with this option: `
number of seconds to cache `.
The fetched tarball is cached for a certain amount of time (1
hour by default) in `~/.cache/nix/tarballs/`. You can change the
cache timeout either on the command line with `--tarball-ttl`
*number-of-seconds* or in the Nix configuration file by adding
the line `tarball-ttl = ` *number-of-seconds*.
Note that when obtaining the hash with ` nix-prefetch-url ` the
Note that when obtaining the hash with `nix-prefetch-url` the
option `--unpack` is required.
This function can also verify the contents against a hash. In that
@ -291,7 +317,7 @@ static RegisterPrimOp primop_fetchTarball({
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
{
fetchTree(state, pos, args, v, "git", true);
fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
}
static RegisterPrimOp primop_fetchGit({
@ -302,17 +328,17 @@ static RegisterPrimOp primop_fetchGit({
of the repo at that URL is fetched. Otherwise, it can be an
attribute with the following attributes (all except `url` optional):
- url
- url\
The URL of the repo.
- name
- name\
The name of the directory the repo should be exported to in the
store. Defaults to the basename of the URL.
- rev
- rev\
The git revision to fetch. Defaults to the tip of `ref`.
- ref
- ref\
The git ref to look for the requested revision under. This is
often a branch or tag name. Defaults to `HEAD`.
@ -320,11 +346,11 @@ static RegisterPrimOp primop_fetchGit({
of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts
with `refs/`.
- submodules
- submodules\
A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`.
- allRefs
- allRefs\
Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only
`rev`s from the specified `ref` are supported).
@ -367,7 +393,7 @@ static RegisterPrimOp primop_fetchGit({
```
> **Note**
>
>
> It is nice to always specify the branch which a revision
> belongs to. Without the branch being specified, the fetcher
> might fail if the default branch changes. Additionally, it can
@ -404,12 +430,12 @@ static RegisterPrimOp primop_fetchGit({
```
> **Note**
>
>
> Nix will refetch the branch in accordance with
> the option `tarball-ttl`.
> **Note**
>
>
> This behavior is disabled in *Pure evaluation mode*.
)",
.fun = prim_fetchGit,

View file

@ -82,7 +82,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
visit(v, parser(tomlStream).parse());
} catch (std::runtime_error & e) {
throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
.errPos = pos
});
}

View file

@ -42,7 +42,7 @@ static void showAttrs(EvalState & state, bool strict, bool location,
XMLAttrs xmlAttrs;
xmlAttrs["name"] = i;
if (location && a.pos != &noPos) posToXML(xmlAttrs, *a.pos);
if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos);
XMLOpenElement _(doc, "attr", xmlAttrs);
printValueAsXML(state, strict, location,
@ -135,7 +135,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
if (location) posToXML(xmlAttrs, v.lambda.fun->pos);
XMLOpenElement _(doc, "function", xmlAttrs);
if (v.lambda.fun->matchAttrs) {
if (v.lambda.fun->hasFormals()) {
XMLAttrs attrs;
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";

View file

@ -341,6 +341,8 @@ public:
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
}
Pos determinePos(const Pos &pos) const;
/* Check whether forcing this value requires a trivial amount of
computation. In particular, function applications are
non-trivial. */

View file

@ -6,6 +6,8 @@
#include <nlohmann/json_fwd.hpp>
#include <optional>
namespace nix::fetchers {
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;

View file

@ -132,7 +132,14 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
}
}
auto [tree, input] = scheme->fetch(store, *this);
auto [tree, input] = [&]() -> std::pair<Tree, Input> {
try {
return scheme->fetch(store, *this);
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}();
if (tree.actualPath == "")
tree.actualPath = store->toRealPath(tree.storePath);
@ -193,12 +200,17 @@ void Input::markChangedFile(
return scheme->markChangedFile(*this, file, commitMsg);
}
std::string Input::getName() const
{
return maybeGetStrAttr(attrs, "name").value_or("source");
}
StorePath Input::computeStorePath(Store & store) const
{
auto narHash = getNarHash();
if (!narHash)
throw Error("cannot compute store path for mutable input '%s'", to_string());
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
}
std::string Input::getType() const

View file

@ -38,6 +38,9 @@ struct Input
bool immutable = false;
bool direct = true;
/* path of the parent of this input, used for relative path resolution */
std::optional<Path> parent;
public:
static Input fromURL(const std::string & url);
@ -81,6 +84,8 @@ public:
std::string_view file,
std::optional<std::string> commitMsg) const;
std::string getName() const;
StorePath computeStorePath(Store & store) const;
// Convenience functions for common attributes.

View file

@ -4,13 +4,21 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
#include <sys/time.h>
#include <sys/wait.h>
using namespace std::string_literals;
namespace nix::fetchers {
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
// The value itself does not matter, since we always fetch a specific revision or branch.
// It is set with `-c init.defaultBranch=` instead of `--initial-branch=` to stay compatible with
// old version of git, which will ignore unrecognized `-c` options.
const std::string gitInitialBranch = "__nix_dummy_branch";
static std::string readHead(const Path & path)
{
return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
@ -59,7 +67,7 @@ struct GitInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs")
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@ -153,21 +161,23 @@ struct GitInputScheme : InputScheme
std::pair<bool, std::string> getActualUrl(const Input & input) const
{
// Don't clone file:// URIs (but otherwise treat them the
// same as remote URIs, i.e. don't use the working tree or
// HEAD).
// file:// URIs are normally not cloned (but otherwise treated the
// same as remote URIs, i.e. we don't use the working tree or
// HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
// repo, treat as a remote URI to force a clone.
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isLocal = url.scheme == "file" && !forceHttp;
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
return {isLocal, isLocal ? url.path : url.base};
}
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
Input input(_input);
std::string name = input.getName();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
@ -267,7 +277,7 @@ struct GitInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
@ -314,11 +324,17 @@ struct GitInputScheme : InputScheme
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
repoDir = cacheDir;
Path cacheDirLock = cacheDir + ".lock";
createDirs(dirOf(cacheDir));
AutoCloseFD lock = openLockFile(cacheDirLock, true);
lockFile(lock.get(), ltWrite, true);
if (!pathExists(cacheDir)) {
createDirs(dirOf(cacheDir));
runProgram("git", true, { "init", "--bare", repoDir });
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
}
deleteLockFile(cacheDirLock, lock.get());
Path localRefFile =
input.getRef()->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input.getRef()
@ -363,7 +379,9 @@ struct GitInputScheme : InputScheme
? "refs/*"
: ref->compare(0, 5, "refs/") == 0
? *ref
: "refs/heads/" + *ref;
: ref == "HEAD"
? *ref
: "refs/heads/" + *ref;
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
@ -401,17 +419,14 @@ struct GitInputScheme : InputScheme
AutoDelete delTmpDir(tmpDir, true);
PathFilter filter = defaultPathFilter;
RunOptions checkCommitOpts(
"git",
{ "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() }
);
checkCommitOpts.searchPath = true;
checkCommitOpts.mergeStderrToStdout = true;
auto result = runProgram(checkCommitOpts);
auto result = runProgram(RunOptions {
.program = "git",
.args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
.mergeStderrToStdout = true
});
if (WEXITSTATUS(result.first) == 128
&& result.second.find("bad file") != std::string::npos
) {
&& result.second.find("bad file") != std::string::npos)
{
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
@ -427,7 +442,7 @@ struct GitInputScheme : InputScheme
Path tmpGitDir = createTempDir();
AutoDelete delTmpGitDir(tmpGitDir, true);
runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
// TODO: repoDir might lack the ref (it only checks if rev
// exists, see FIXME above) so use a big hammer and fetch
// everything to ensure we get the rev.
@ -443,9 +458,11 @@ struct GitInputScheme : InputScheme
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
gitOptions.standardOut = &sink;
runProgram2(gitOptions);
runProgram2({
.program = "git",
.args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
.standardOut = &sink
});
});
unpackTarfile(*source, tmpDir);

View file

@ -37,15 +37,29 @@ struct GitArchiveInputScheme : InputScheme
std::optional<std::string> ref;
std::optional<std::string> host_url;
if (path.size() == 2) {
} else if (path.size() == 3) {
auto size = path.size();
if (size == 3) {
if (std::regex_match(path[2], revRegex))
rev = Hash::parseAny(path[2], htSHA1);
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
} else
} else if (size > 3) {
std::string rs;
for (auto i = std::next(path.begin(), 2); i != path.end(); i++) {
rs += *i;
if (std::next(i) != path.end()) {
rs += "/";
}
}
if (std::regex_match(rs, refRegex)) {
ref = rs;
} else {
throw BadURL("in URL '%s', '%s' is not a branch/tag name", url.url, rs);
}
} else if (size < 2)
throw BadURL("URL '%s' is invalid", url.url);
for (auto &[name, value] : url.query) {
@ -193,7 +207,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
auto [tree, lastModified] = downloadTarball(store, url.url, input.getName(), true, url.headers);
input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
@ -259,9 +273,9 @@ struct GitHubInputScheme : GitArchiveInputScheme
void clone(const Input & input, const Path & destDir) override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
@ -327,9 +341,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};

View file

@ -8,4 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
libfetchers_LDFLAGS += -pthread
libfetchers_LIBS = libutil libstore

View file

@ -11,34 +11,32 @@ using namespace std::string_literals;
namespace nix::fetchers {
namespace {
static RunOptions hgOptions(const Strings & args)
{
auto env = getEnv();
// Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
env["HGPLAIN"] = "";
RunOptions hgOptions(const Strings & args) {
RunOptions opts("hg", args);
opts.searchPath = true;
auto env = getEnv();
// Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
env["HGPLAIN"] = "";
opts.environment = env;
return opts;
return {
.program = "hg",
.searchPath = true,
.args = args,
.environment = env
};
}
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
string runHg(const Strings & args, const std::optional<std::string> & input = {})
static string runHg(const Strings & args, const std::optional<std::string> & input = {})
{
RunOptions opts = hgOptions(args);
opts.input = input;
RunOptions opts = hgOptions(args);
opts.input = input;
auto res = runProgram(opts);
auto res = runProgram(std::move(opts));
if (!statusOk(res.first))
throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
return res.second;
}
if (!statusOk(res.first))
throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
return res.second;
}
struct MercurialInputScheme : InputScheme
@ -74,7 +72,7 @@ struct MercurialInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash")
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@ -147,10 +145,10 @@ struct MercurialInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
Input input(_input);
auto name = input.getName();
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
@ -193,7 +191,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {
Tree(store->toRealPath(storePath), std::move(storePath)),
@ -253,9 +251,7 @@ struct MercurialInputScheme : InputScheme
have to pull again. */
if (!(input.getRev()
&& pathExists(cacheDir)
&& runProgram(
hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
.killStderr(true)).second == "1"))
&& runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1"))
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));

View file

@ -20,10 +20,10 @@ struct PathInputScheme : InputScheme
if (name == "rev" || name == "narHash")
input.attrs.insert_or_assign(name, value);
else if (name == "revCount" || name == "lastModified") {
uint64_t n;
if (!string2Int(value, n))
if (auto n = string2Int<uint64_t>(value))
input.attrs.insert_or_assign(name, *n);
else
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
input.attrs.insert_or_assign(name, n);
}
else
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
@ -82,18 +82,38 @@ struct PathInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
std::string absPath;
auto path = getStrAttr(input.attrs, "path");
// FIXME: check whether access to 'path' is allowed.
if (path[0] != '/') {
if (!input.parent)
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
auto storePath = store->maybeParseStorePath(path);
auto parent = canonPath(*input.parent);
// the path isn't relative, prefix it
absPath = nix::absPath(path, parent);
// for security, ensure that if the parent is a store path, it's inside it
if (store->isInStore(parent)) {
auto storePath = store->printStorePath(store->toStorePath(parent).first);
if (!isInDir(absPath, storePath))
throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
}
} else
absPath = path;
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
// FIXME: check whether access to 'path' is allowed.
auto storePath = store->maybeParseStorePath(absPath);
if (storePath)
store->addTempRoot(*storePath);
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath))
// FIXME: try to substitute storePath.
storePath = store->addToStore("source", path);
storePath = store->addToStore("source", absPath);
return {
Tree(store->toRealPath(*storePath), std::move(*storePath)),

View file

@ -114,7 +114,7 @@ static std::shared_ptr<Registry> getSystemRegistry()
Path getUserRegistryPath()
{
return getHome() + "/.config/nix/registry.json";
return getConfigDir() + "/nix/registry.json";
}
std::shared_ptr<Registry> getUserRegistry()
@ -124,6 +124,13 @@ std::shared_ptr<Registry> getUserRegistry()
return userRegistry;
}
std::shared_ptr<Registry> getCustomRegistry(const Path & p)
{
static auto customRegistry =
Registry::read(p, Registry::Custom);
return customRegistry;
}
static std::shared_ptr<Registry> flagRegistry =
std::make_shared<Registry>(Registry::Flag);

View file

@ -14,6 +14,7 @@ struct Registry
User = 1,
System = 2,
Global = 3,
Custom = 4,
};
RegistryType type;
@ -48,6 +49,8 @@ typedef std::vector<std::shared_ptr<Registry>> Registries;
std::shared_ptr<Registry> getUserRegistry();
std::shared_ptr<Registry> getCustomRegistry(const Path & p);
Path getUserRegistryPath();
Registries getRegistries(ref<Store> store);

View file

@ -64,7 +64,6 @@ DownloadFileResult downloadFile(
if (res.cached) {
assert(cached);
assert(request.expectedETag == res.etag);
storePath = std::move(cached->storePath);
} else {
StringSink sink;
@ -179,7 +178,8 @@ struct TarballInputScheme : InputScheme
&& !hasSuffix(url.path, ".tar")
&& !hasSuffix(url.path, ".tar.gz")
&& !hasSuffix(url.path, ".tar.xz")
&& !hasSuffix(url.path, ".tar.bz2"))
&& !hasSuffix(url.path, ".tar.bz2")
&& !hasSuffix(url.path, ".tar.zst"))
return {};
Input input;
@ -196,7 +196,7 @@ struct TarballInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash")
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash" && name != "name")
throw Error("unsupported tarball input attribute '%s'", name);
Input input;
@ -226,7 +226,7 @@ struct TarballInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
return {std::move(tree), input};
}
};

View file

@ -10,25 +10,28 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "verbose",
.shortName = 'v',
.description = "increase verbosity level",
.description = "Increase the logging verbosity level.",
.category = loggingCategory,
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
});
addFlag({
.longName = "quiet",
.description = "decrease verbosity level",
.description = "Decrease the logging verbosity level.",
.category = loggingCategory,
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
});
addFlag({
.longName = "debug",
.description = "enable debug output",
.description = "Set the logging verbosity level to 'debug'.",
.category = loggingCategory,
.handler = {[]() { verbosity = lvlDebug; }},
});
addFlag({
.longName = "option",
.description = "set a Nix configuration option (overriding `nix.conf`)",
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
.labels = {"name", "value"},
.handler = {[](std::string name, std::string value) {
try {
@ -51,8 +54,8 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "log-format",
.description = "format of log output; `raw`, `internal-json`, `bar` "
"or `bar-with-logs`",
.description = "Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`.",
.category = loggingCategory,
.labels = {"format"},
.handler = {[](std::string format) { setLogFormat(format); }},
});
@ -60,14 +63,14 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "max-jobs",
.shortName = 'j',
.description = "maximum number of parallel builds",
.description = "The maximum number of parallel builds.",
.labels = Strings{"jobs"},
.handler = {[=](std::string s) {
settings.set("max-jobs", s);
}}
});
std::string cat = "config";
std::string cat = "Options to override configuration settings";
globalConfig.convertToArgs(*this, cat);
// Backward compatibility hack: nix-env already had a --system flag.
@ -76,4 +79,11 @@ MixCommonArgs::MixCommonArgs(const string & programName)
hiddenCategories.insert(cat);
}
void MixCommonArgs::initialFlagsProcessed()
{
initPlugins();
pluginsInited();
}
}

View file

@ -4,10 +4,17 @@
namespace nix {
struct MixCommonArgs : virtual Args
//static constexpr auto commonArgsCategory = "Miscellaneous common options";
static constexpr auto loggingCategory = "Logging-related options";
class MixCommonArgs : public virtual Args
{
void initialFlagsProcessed() override;
public:
string programName;
MixCommonArgs(const string & programName);
protected:
virtual void pluginsInited() {}
};
struct MixDryRun : virtual Args
@ -16,7 +23,12 @@ struct MixDryRun : virtual Args
MixDryRun()
{
mkFlag(0, "dry-run", "show what this command would do without doing it", &dryRun);
addFlag({
.longName = "dry-run",
.description = "Show what this command would do without doing it.",
//.category = commonArgsCategory,
.handler = {&dryRun, true},
});
}
};
@ -26,7 +38,12 @@ struct MixJSON : virtual Args
MixJSON()
{
mkFlag(0, "json", "produce JSON output", &json);
addFlag({
.longName = "json",
.description = "Produce output in JSON format, suitable for consumption by another program.",
//.category = commonArgsCategory,
.handler = {&json, true},
});
}
};

View file

@ -8,10 +8,10 @@ libmain_SOURCES := $(wildcard $(d)/*.cc)
libmain_CXXFLAGS += -I src/libutil -I src/libstore
libmain_LDFLAGS = $(OPENSSL_LIBS)
libmain_LDFLAGS += $(OPENSSL_LIBS)
libmain_LIBS = libstore libutil
libmain_ALLOW_UNDEFINED = 1
$(eval $(call install-file-in, $(d)/nix-main.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))

View file

@ -362,6 +362,7 @@ public:
void log(Verbosity lvl, const FormatOrString & fs) override
{
if (lvl > verbosity) return;
auto state(state_.lock());
log(*state, lvl, fs.s);
}
@ -673,7 +674,7 @@ public:
return
ANSI_RED + repeat("", chars1) +
ANSI_GREEN + repeat("", chars2 - chars1) +
ANSI_YELLOW + repeat("", chars3 - chars2) +
ANSI_WARNING + repeat("", chars3 - chars2) +
ANSI_NORMAL + repeat("", barLength - chars3);
};
@ -862,12 +863,7 @@ public:
Logger * makeProgressBar()
{
return new ProgressBar(
isatty(STDIN_FILENO)
&& isatty(STDOUT_FILENO)
&& isatty(STDERR_FILENO)
&& getEnv("TERM").value_or("dumb") != "dumb"
);
return new ProgressBar(shouldANSI() && isatty(STDIN_FILENO));
}
void stopProgressBar()

View file

@ -15,9 +15,14 @@
#include <sys/stat.h>
#include <unistd.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>
#include <openssl/crypto.h>
#include <sodium.h>
namespace nix {
@ -34,7 +39,7 @@ void printGCWarning()
}
void printMissing(ref<Store> store, const std::vector<StorePathWithOutputs> & paths, Verbosity lvl)
void printMissing(ref<Store> store, const std::vector<DerivedPath> & paths, Verbosity lvl)
{
uint64_t downloadSize, narSize;
StorePathSet willBuild, willSubstitute, unknown;
@ -108,6 +113,31 @@ static void opensslLockCallback(int mode, int type, const char * file, int line)
}
#endif
static std::once_flag dns_resolve_flag;
static void preloadNSS() {
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
load its lookup libraries in the parent before any child gets a chance to. */
std::call_once(dns_resolve_flag, []() {
struct addrinfo *res = NULL;
/* nss will only force the "local" (not through nscd) dns resolution if its on the LOCALDOMAIN.
We need the resolution to be done locally, as nscd socket will not be accessible in the
sandbox. */
char * previous_env = getenv("LOCALDOMAIN");
setenv("LOCALDOMAIN", "invalid", 1);
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) == 0) {
if (res) freeaddrinfo(res);
}
if (previous_env) {
setenv("LOCALDOMAIN", previous_env, 1);
} else {
unsetenv("LOCALDOMAIN");
}
});
}
static void sigHandler(int signo) { }
@ -126,6 +156,9 @@ void initNix()
CRYPTO_set_locking_callback(opensslLockCallback);
#endif
if (sodium_init() == -1)
throw Error("could not initialise libsodium");
loadConfFile();
startSignalHandlerThread();
@ -171,6 +204,8 @@ void initNix()
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
preloadNSS();
}
@ -181,50 +216,64 @@ LegacyArgs::LegacyArgs(const std::string & programName,
addFlag({
.longName = "no-build-output",
.shortName = 'Q',
.description = "do not show build output",
.description = "Do not show build output.",
.handler = {[&]() {setLogFormat(LogFormat::raw); }},
});
addFlag({
.longName = "keep-failed",
.shortName ='K',
.description = "keep temporary directories of failed builds",
.description = "Keep temporary directories of failed builds.",
.handler = {&(bool&) settings.keepFailed, true},
});
addFlag({
.longName = "keep-going",
.shortName ='k',
.description = "keep going after a build fails",
.description = "Keep going after a build fails.",
.handler = {&(bool&) settings.keepGoing, true},
});
addFlag({
.longName = "fallback",
.description = "build from source if substitution fails",
.description = "Build from source if substitution fails.",
.handler = {&(bool&) settings.tryFallback, true},
});
auto intSettingAlias = [&](char shortName, const std::string & longName,
const std::string & description, const std::string & dest) {
mkFlag<unsigned int>(shortName, longName, description, [=](unsigned int n) {
settings.set(dest, std::to_string(n));
const std::string & description, const std::string & dest)
{
addFlag({
.longName = longName,
.shortName = shortName,
.description = description,
.labels = {"n"},
.handler = {[=](std::string s) {
auto n = string2IntWithUnitPrefix<uint64_t>(s);
settings.set(dest, std::to_string(n));
}}
});
};
intSettingAlias(0, "cores", "maximum number of CPU cores to use inside a build", "cores");
intSettingAlias(0, "max-silent-time", "number of seconds of silence before a build is killed", "max-silent-time");
intSettingAlias(0, "timeout", "number of seconds before a build is killed", "timeout");
intSettingAlias(0, "cores", "Maximum number of CPU cores to use inside a build.", "cores");
intSettingAlias(0, "max-silent-time", "Number of seconds of silence before a build is killed.", "max-silent-time");
intSettingAlias(0, "timeout", "Number of seconds before a build is killed.", "timeout");
mkFlag(0, "readonly-mode", "do not write to the Nix store",
&settings.readOnlyMode);
addFlag({
.longName = "readonly-mode",
.description = "Do not write to the Nix store.",
.handler = {&settings.readOnlyMode, true},
});
mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'",
&gcWarning, false);
addFlag({
.longName = "no-gc-warning",
.description = "Disable warnings about not using `--add-root`.",
.handler = {&gcWarning, false},
});
addFlag({
.longName = "store",
.description = "URI of the Nix store to use",
.description = "The URL of the Nix store to use.",
.labels = {"store-uri"},
.handler = {&(std::string&) settings.storeUri},
});
@ -274,9 +323,7 @@ void printVersion(const string & programName)
#if HAVE_BOEHMGC
cfg.push_back("gc");
#endif
#if HAVE_SODIUM
cfg.push_back("signed-caches");
#endif
std::cout << "System type: " << settings.thisSystem << "\n";
std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n";
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
@ -293,7 +340,7 @@ void printVersion(const string & programName)
void showManPage(const string & name)
{
restoreSignals();
restoreProcessContext();
setenv("MANPATH", settings.nixManDir.c_str(), 1);
execlp("man", "man", name.c_str(), nullptr);
throw SysError("command 'man %1%' failed", name.c_str());
@ -356,7 +403,7 @@ RunPager::RunPager()
throw SysError("dupping stdin");
if (!getenv("LESS"))
setenv("LESS", "FRSXMK", 1);
restoreSignals();
restoreProcessContext();
if (pager)
execl("/bin/sh", "sh", "-c", pager, nullptr);
execlp("pager", "pager", nullptr);

View file

@ -4,6 +4,7 @@
#include "args.hh"
#include "common-args.hh"
#include "path.hh"
#include "derived-path.hh"
#include <signal.h>
@ -42,7 +43,7 @@ struct StorePathWithOutputs;
void printMissing(
ref<Store> store,
const std::vector<StorePathWithOutputs> & paths,
const std::vector<DerivedPath> & paths,
Verbosity lvl = lvlInfo);
void printMissing(ref<Store> store, const StorePathSet & willBuild,
@ -57,23 +58,7 @@ template<class N> N getIntArg(const string & opt,
{
++i;
if (i == end) throw UsageError("'%1%' requires an argument", opt);
string s = *i;
N multiplier = 1;
if (allowUnit && !s.empty()) {
char u = std::toupper(*s.rbegin());
if (std::isalpha(u)) {
if (u == 'K') multiplier = 1ULL << 10;
else if (u == 'M') multiplier = 1ULL << 20;
else if (u == 'G') multiplier = 1ULL << 30;
else if (u == 'T') multiplier = 1ULL << 40;
else throw UsageError("invalid unit specifier '%1%'", u);
s.resize(s.size() - 1);
}
}
N n;
if (!string2Int(s, n))
throw UsageError("'%1%' requires an integer argument", opt);
return n * multiplier;
return string2IntWithUnitPrefix<N>(*i);
}

View file

@ -52,9 +52,9 @@ void BinaryCacheStore::init()
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
getUri(), value, storeDir);
} else if (name == "WantMassQuery") {
wantMassQuery.setDefault(value == "1" ? "true" : "false");
wantMassQuery.setDefault(value == "1");
} else if (name == "Priority") {
priority.setDefault(fmt("%d", std::stoi(value)));
priority.setDefault(std::stoi(value));
}
}
}
@ -111,15 +111,15 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
std::string hashPart(narInfo->path.hashPart());
{
auto state_(state.lock());
state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
state_->pathInfoCache.upsert(
std::string(narInfo->path.to_string()),
PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
}
if (diskCache)
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
diskCache->upsertNarInfo(getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr<NarInfo>(narInfo));
}
AutoCloseFD openFile(const Path & path)
@ -130,17 +130,6 @@ AutoCloseFD openFile(const Path & path)
return fd;
}
struct FileSource : FdSource
{
AutoCloseFD fd2;
FileSource(const Path & path)
: fd2(openFile(path))
{
fd = fd2.get();
}
};
ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo)
@ -160,7 +149,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
{
FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink };
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed, parallelCompression, compressionLevel);
TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
TeeSource teeSource { narSource, teeSinkUncompressed };
narAccessor = makeNarAccessor(teeSource);
@ -179,6 +168,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
compression == "lzip" ? ".lzip" :
compression == "lz4" ? ".lz4" :
compression == "br" ? ".br" :
"");
@ -447,18 +439,43 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
{
if (diskCache) {
auto [cacheOutcome, maybeCachedRealisation] =
diskCache->lookupRealisation(getUri(), id);
switch (cacheOutcome) {
case NarInfoDiskCache::oValid:
debug("Returning a cached realisation for %s", id.to_string());
return *maybeCachedRealisation;
case NarInfoDiskCache::oInvalid:
debug("Returning a cached missing realisation for %s", id.to_string());
return {};
case NarInfoDiskCache::oUnknown:
break;
}
}
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
auto rawOutputInfo = getFile(outputInfoFilePath);
if (rawOutputInfo) {
return {Realisation::fromJSON(
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath)};
auto realisation = Realisation::fromJSON(
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
if (diskCache)
diskCache->upsertRealisation(
getUri(), realisation);
return {realisation};
} else {
if (diskCache)
diskCache->upsertAbsentRealisation(getUri(), id);
return std::nullopt;
}
}
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
if (diskCache)
diskCache->upsertRealisation(getUri(), info);
auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi";
upsertFile(filePath, info.toJSON().dump(), "application/json");
}

View file

@ -15,13 +15,17 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', 'gzip', 'zstd', or 'none')"};
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
"enable multi-threading compression, available for xz only currently"};
"enable multi-threading compression for NARs, available for xz and zstd only currently"};
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
"specify 'preset level' of compression to be used with NARs: "
"meaning and accepted range of values depends on compression method selected, "
"other than -1 which we reserve to indicate Nix defaults should be used"};
};
class BinaryCacheStore : public virtual BinaryCacheStoreConfig, public virtual Store
@ -34,7 +38,7 @@ private:
protected:
// The prefix under which realisation infos will be stored
const std::string realisationsPrefix = "/realisations";
const std::string realisationsPrefix = "realisations";
BinaryCacheStore(const Params & params);
@ -108,13 +112,6 @@ public:
void narFromPath(const StorePath & path, Sink & sink) override;
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override
{ unsupported("buildDerivation"); }
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
ref<FSAccessor> getFSAccessor() override;
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,8 @@
#include "parsed-derivations.hh"
#include "lock.hh"
#include "local-store.hh"
#include "store-api.hh"
#include "pathlocks.hh"
#include "goal.hh"
namespace nix {
@ -37,6 +38,7 @@ struct InitialOutputStatus {
struct InitialOutput {
bool wanted;
Hash outputHash;
std::optional<InitialOutputStatus> known;
};
@ -48,6 +50,9 @@ struct DerivationGoal : public Goal
/* The path of the derivation. */
StorePath drvPath;
/* The path of the corresponding resolved derivation */
std::optional<BasicDerivation> resolvedDrv;
/* The specific outputs that we need to build. Empty means all of
them. */
StringSet wantedOutputs;
@ -60,7 +65,7 @@ struct DerivationGoal : public Goal
bool retrySubstitution;
/* The derivation stored at drvPath. */
std::unique_ptr<BasicDerivation> drv;
std::unique_ptr<Derivation> drv;
std::unique_ptr<ParsedDerivation> parsedDrv;
@ -75,18 +80,6 @@ struct DerivationGoal : public Goal
std::map<std::string, InitialOutput> initialOutputs;
/* User selected for running the builder. */
std::unique_ptr<UserLock> buildUser;
/* The process ID of the builder. */
Pid pid;
/* The temporary directory. */
Path tmpDir;
/* The path of the temporary directory in the sandbox. */
Path tmpDirInSandbox;
/* File descriptor for the log file. */
AutoCloseFD fdLogFile;
std::shared_ptr<BufferedSink> logFileSink, logSink;
@ -102,79 +95,15 @@ struct DerivationGoal : public Goal
std::string currentHookLine;
/* Pipe for the builder's standard output/error. */
Pipe builderOut;
/* Pipe for synchronising updates to the builder namespaces. */
Pipe userNamespaceSync;
/* The mount namespace of the builder, used to add additional
paths to the sandbox as a result of recursive Nix calls. */
AutoCloseFD sandboxMountNamespace;
/* On Linux, whether we're doing the build in its own user
namespace. */
bool usingUserNamespace = true;
/* The build hook. */
std::unique_ptr<HookInstance> hook;
/* Whether we're currently doing a chroot build. */
bool useChroot = false;
Path chrootRootDir;
/* RAII object to delete the chroot directory. */
std::shared_ptr<AutoDelete> autoDelChroot;
/* The sort of derivation we are building. */
DerivationType derivationType;
/* Whether to run the build in a private network namespace. */
bool privateNetwork = false;
typedef void (DerivationGoal::*GoalState)();
GoalState state;
/* Stuff we need to pass to initChild(). */
struct ChrootPath {
Path source;
bool optional;
ChrootPath(Path source = "", bool optional = false)
: source(source), optional(optional)
{ }
};
typedef map<Path, ChrootPath> DirsInChroot; // maps target path to source path
DirsInChroot dirsInChroot;
typedef map<string, string> Environment;
Environment env;
#if __APPLE__
typedef string SandboxProfile;
SandboxProfile additionalSandboxProfile;
#endif
/* Hash rewriting. */
StringMap inputRewrites, outputRewrites;
typedef map<StorePath, StorePath> RedirectedOutputs;
RedirectedOutputs redirectedOutputs;
/* The outputs paths used during the build.
- Input-addressed derivations or fixed content-addressed outputs are
sometimes built when some of their outputs already exist, and can not
be hidden via sandboxing. We use temporary locations instead and
rewrite after the build. Otherwise the regular predetermined paths are
put here.
- Floating content-addressed derivations do not know their final build
output paths until the outputs are hashed, so random locations are
used, and then renamed. The randomness helps guard against hidden
self-references.
*/
OutputPathMap scratchOutputs;
/* The final output paths of the build.
- For input-addressed derivations, always the precomputed paths
@ -187,11 +116,6 @@ struct DerivationGoal : public Goal
BuildMode buildMode;
/* If we're repairing without a chroot, there may be outputs that
are valid but corrupt. So we redirect these outputs to
temporary paths. */
StorePathSet redirectedBadOutputs;
BuildResult result;
/* Time the build started. 'result' also has a 'startTime' field,
@ -204,17 +128,6 @@ struct DerivationGoal : public Goal
size_t nrRounds;
/* Path registration info from the previous round, if we're
building multiple times. Since this contains the hash, it
allows us to compare whether two rounds produced the same
result. */
std::map<Path, ValidPathInfo> prevInfos;
uid_t sandboxUid() { return usingUserNamespace ? 1000 : buildUser->getUID(); }
gid_t sandboxGid() { return usingUserNamespace ? 100 : buildUser->getGID(); }
const static Path homeDir;
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds;
std::unique_ptr<Activity> act;
@ -227,39 +140,13 @@ struct DerivationGoal : public Goal
/* The remote machine on which we're building. */
std::string machineName;
/* The recursive Nix daemon socket. */
AutoCloseFD daemonSocket;
/* The daemon main thread. */
std::thread daemonThread;
/* The daemon worker threads. */
std::vector<std::thread> daemonWorkerThreads;
/* Paths that were added via recursive Nix calls. */
StorePathSet addedPaths;
/* Recursive Nix calls are only allowed to build or realize paths
in the original input closure or added via a recursive Nix call
(so e.g. you can't do 'nix-store -r /nix/store/<bla>' where
/nix/store/<bla> is some arbitrary path in a binary cache). */
bool isAllowed(const StorePath & path)
{
return inputPaths.count(path) || addedPaths.count(path);
}
friend struct RestrictedStore;
DerivationGoal(const StorePath & drvPath,
const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
~DerivationGoal();
/* Whether we need to perform hash rewriting if there are valid output paths. */
bool needsHashRewrite();
virtual ~DerivationGoal();
void timedOut(Error && ex) override;
@ -281,7 +168,7 @@ struct DerivationGoal : public Goal
void closureRepaired();
void inputsRealised();
void tryToBuild();
void tryLocalBuild();
virtual void tryLocalBuild();
void buildDone();
void resolvedFinished();
@ -289,49 +176,33 @@ struct DerivationGoal : public Goal
/* Is the build hook willing to perform the build? */
HookReply tryBuildHook();
/* Start building a derivation. */
void startBuilder();
/* Fill in the environment for the builder. */
void initEnv();
/* Setup tmp dir location. */
void initTmpDir();
/* Write a JSON file containing the derivation attributes. */
void writeStructuredAttrs();
void startDaemon();
void stopDaemon();
/* Add 'path' to the set of paths that may be referenced by the
outputs, and make it appear in the sandbox. */
void addDependency(const StorePath & path);
/* Make a file owned by the builder. */
void chownToBuilder(const Path & path);
/* Run the builder's process. */
void runChild();
virtual int getChildStatus();
/* Check that the derivation outputs all exist and register them
as valid. */
void registerOutputs();
/* Check that an output meets the requirements specified by the
'outputChecks' attribute (or the legacy
'{allowed,disallowed}{References,Requisites}' attributes). */
void checkOutputs(const std::map<std::string, ValidPathInfo> & outputs);
virtual void registerOutputs();
/* Open a log file and a pipe to it. */
Path openLogFile();
/* Sign the newly built realisation if the store allows it */
virtual void signRealisation(Realisation&) {}
/* Close the log file. */
void closeLogFile();
/* Delete the temporary directory, if we have one. */
void deleteTmpDir(bool force);
/* Close the read side of the logger pipe. */
virtual void closeReadPipes();
/* Cleanup hooks for buildDone() */
virtual void cleanupHookFinally();
virtual void cleanupPreChildKill();
virtual void cleanupPostChildKill();
virtual bool cleanupDecideWhetherDiskFull();
virtual void cleanupPostOutputsRegisteredModeCheck();
virtual void cleanupPostOutputsRegisteredModeNonCheck();
virtual bool isReadDesc(int fd);
/* Callback used by the worker to write to the log. */
void handleChildOutput(int fd, const string & data) override;
@ -348,17 +219,7 @@ struct DerivationGoal : public Goal
void checkPathValidity();
/* Forcibly kill the child process, if any. */
void killChild();
/* Create alternative path calculated from but distinct from the
input, so we can avoid overwriting outputs (or other store paths)
that already exist. */
StorePath makeFallbackPath(const StorePath & path);
/* Make a path to another based on the output name along with the
derivation hash. */
/* FIXME add option to randomize, so we can audit whether our
rewrites caught everything */
StorePath makeFallbackPath(std::string_view outputName);
virtual void killChild();
void repairClosure();
@ -371,4 +232,6 @@ struct DerivationGoal : public Goal
StorePathSet exportReferences(const StorePathSet & storePaths);
};
MakeError(NotDeterministic, BuildError);
}

View file

@ -0,0 +1,122 @@
#include "drv-output-substitution-goal.hh"
#include "worker.hh"
#include "substitution-goal.hh"
namespace nix {
DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
: Goal(worker)
, id(id)
{
state = &DrvOutputSubstitutionGoal::init;
name = fmt("substitution of '%s'", id.to_string());
trace("created");
}
void DrvOutputSubstitutionGoal::init()
{
trace("init");
/* If the derivation already exists, were done */
if (worker.store.queryRealisation(id)) {
amDone(ecSuccess);
return;
}
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
tryNext();
}
void DrvOutputSubstitutionGoal::tryNext()
{
trace("Trying next substituter");
if (subs.size() == 0) {
/* None left. Terminate this goal and let someone else deal
with it. */
debug("drv output '%s' is required, but there is no substituter that can provide it", id.to_string());
/* Hack: don't indicate failure if there were no substituters.
In that case the calling derivation should just do a
build. */
amDone(substituterFailed ? ecFailed : ecNoSubstituters);
if (substituterFailed) {
worker.failedSubstitutions++;
worker.updateProgress();
}
return;
}
auto sub = subs.front();
subs.pop_front();
// FIXME: Make async
outputInfo = sub->queryRealisation(id);
if (!outputInfo) {
tryNext();
return;
}
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
if (depId != id) {
if (auto localOutputInfo = worker.store.queryRealisation(depId);
localOutputInfo && localOutputInfo->outPath != depPath) {
warn(
"substituter '%s' has an incompatible realisation for '%s', ignoring.\n"
"Local: %s\n"
"Remote: %s",
sub->getUri(),
depId.to_string(),
worker.store.printStorePath(localOutputInfo->outPath),
worker.store.printStorePath(depPath)
);
tryNext();
return;
}
addWaitee(worker.makeDrvOutputSubstitutionGoal(depId));
}
}
addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath));
if (waitees.empty()) outPathValid();
else state = &DrvOutputSubstitutionGoal::outPathValid;
}
void DrvOutputSubstitutionGoal::outPathValid()
{
assert(outputInfo);
trace("Output path substituted");
if (nrFailed > 0) {
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed);
return;
}
worker.store.registerDrvOutput(*outputInfo);
finished();
}
void DrvOutputSubstitutionGoal::finished()
{
trace("finished");
amDone(ecSuccess);
}
string DrvOutputSubstitutionGoal::key()
{
/* "a$" ensures substitution goals happen before derivation
goals. */
return "a$" + std::string(id.to_string());
}
void DrvOutputSubstitutionGoal::work()
{
(this->*state)();
}
}

View file

@ -0,0 +1,50 @@
#pragma once
#include "store-api.hh"
#include "goal.hh"
#include "realisation.hh"
namespace nix {
class Worker;
// Substitution of a derivation output.
// This is done in three steps:
// 1. Fetch the output info from a substituter
// 2. Substitute the corresponding output path
// 3. Register the output info
class DrvOutputSubstitutionGoal : public Goal {
private:
// The drv output we're trying to substitue
DrvOutput id;
// The realisation corresponding to the given output id.
// Will be filled once we can get it.
std::optional<Realisation> outputInfo;
/* The remaining substituters. */
std::list<ref<Store>> subs;
/* Whether a substituter failed. */
bool substituterFailed = false;
public:
DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
typedef void (DrvOutputSubstitutionGoal::*GoalState)();
GoalState state;
void init();
void tryNext();
void outPathValid();
void finished();
void timedOut(Error && ex) override { abort(); };
string key() override;
void work() override;
};
}

View file

@ -1,20 +1,24 @@
#include "machines.hh"
#include "worker.hh"
#include "substitution-goal.hh"
#include "derivation-goal.hh"
#include "local-store.hh"
namespace nix {
void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode)
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
{
Worker worker(*this);
Worker worker(*this, evalStore ? *evalStore : *this);
Goals goals;
for (auto & path : drvPaths) {
if (path.path.isDerivation())
goals.insert(worker.makeDerivationGoal(path.path, path.outputs, buildMode));
else
goals.insert(worker.makeSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair));
for (const auto & br : reqs) {
std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
},
[&](const DerivedPath::Opaque & bo) {
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
},
}, br.raw());
}
worker.run(goals);
@ -30,7 +34,7 @@ void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths,
}
if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath);
else if (auto i2 = dynamic_cast<SubstitutionGoal *>(i.get())) failed.insert(i2->storePath);
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get())) failed.insert(i2->storePath);
}
}
@ -43,10 +47,10 @@ void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths,
}
}
BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode)
{
Worker worker(*this);
Worker worker(*this, *this);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
BuildResult result;
@ -58,18 +62,38 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe
result.status = BuildResult::MiscFailure;
result.errorMsg = e.msg();
}
// XXX: Should use `goal->queryPartialDerivationOutputMap()` once it's
// extended to return the full realisation for each output
auto staticDrvOutputs = drv.outputsAndOptPaths(*this);
auto outputHashes = staticOutputHashes(*this, drv);
for (auto & [outputName, staticOutput] : staticDrvOutputs) {
auto outputId = DrvOutput{outputHashes.at(outputName), outputName};
if (staticOutput.second)
result.builtOutputs.insert_or_assign(
outputId,
Realisation{ outputId, *staticOutput.second}
);
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
auto realisation = this->queryRealisation(outputId);
if (realisation)
result.builtOutputs.insert_or_assign(
outputId,
*realisation
);
}
}
return result;
}
void LocalStore::ensurePath(const StorePath & path)
void Store::ensurePath(const StorePath & path)
{
/* If the path is already valid, we're done. */
if (isValidPath(path)) return;
Worker worker(*this);
GoalPtr goal = worker.makeSubstitutionGoal(path);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path);
Goals goals = {goal};
worker.run(goals);
@ -86,8 +110,8 @@ void LocalStore::ensurePath(const StorePath & path)
void LocalStore::repairPath(const StorePath & path)
{
Worker worker(*this);
GoalPtr goal = worker.makeSubstitutionGoal(path, Repair);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
Goals goals = {goal};
worker.run(goals);

View file

@ -13,11 +13,9 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
void addToWeakGoals(WeakGoals & goals, GoalPtr p)
{
// FIXME: necessary?
// FIXME: O(n)
for (auto & i : goals)
if (i.lock() == p) return;
goals.push_back(p);
if (goals.find(p) != goals.end())
return;
goals.insert(p);
}
@ -46,10 +44,7 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
/* If we failed and keepGoing is not set, we remove all
remaining waitees. */
for (auto & goal : waitees) {
WeakGoals waiters2;
for (auto & j : goal->waiters)
if (j.lock() != shared_from_this()) waiters2.push_back(j);
goal->waiters = waiters2;
goal->waiters.extract(shared_from_this());
}
waitees.clear();
@ -78,6 +73,8 @@ void Goal::amDone(ExitCode result, std::optional<Error> ex)
}
waiters.clear();
worker.removeGoal(shared_from_this());
cleanup();
}

View file

@ -19,7 +19,7 @@ struct CompareGoalPtrs {
/* Set of goals. */
typedef set<GoalPtr, CompareGoalPtrs> Goals;
typedef list<WeakGoalPtr> WeakGoals;
typedef set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
/* A map of paths to goals (and the other way around). */
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
@ -100,6 +100,8 @@ struct Goal : public std::enable_shared_from_this<Goal>
virtual string key() = 0;
void amDone(ExitCode result, std::optional<Error> ex = {});
virtual void cleanup() { }
};
void addToWeakGoals(WeakGoals & goals, GoalPtr p);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,210 @@
#pragma once
#include "derivation-goal.hh"
#include "local-store.hh"
namespace nix {
struct LocalDerivationGoal : public DerivationGoal
{
LocalStore & getLocalStore();
/* User selected for running the builder. */
std::unique_ptr<UserLock> buildUser;
/* The process ID of the builder. */
Pid pid;
/* The temporary directory. */
Path tmpDir;
/* The path of the temporary directory in the sandbox. */
Path tmpDirInSandbox;
/* Pipe for the builder's standard output/error. */
Pipe builderOut;
/* Pipe for synchronising updates to the builder namespaces. */
Pipe userNamespaceSync;
/* The mount namespace of the builder, used to add additional
paths to the sandbox as a result of recursive Nix calls. */
AutoCloseFD sandboxMountNamespace;
/* On Linux, whether we're doing the build in its own user
namespace. */
bool usingUserNamespace = true;
/* Whether we're currently doing a chroot build. */
bool useChroot = false;
Path chrootRootDir;
/* RAII object to delete the chroot directory. */
std::shared_ptr<AutoDelete> autoDelChroot;
/* Whether to run the build in a private network namespace. */
bool privateNetwork = false;
/* Stuff we need to pass to initChild(). */
struct ChrootPath {
Path source;
bool optional;
ChrootPath(Path source = "", bool optional = false)
: source(source), optional(optional)
{ }
};
typedef map<Path, ChrootPath> DirsInChroot; // maps target path to source path
DirsInChroot dirsInChroot;
typedef map<string, string> Environment;
Environment env;
#if __APPLE__
typedef string SandboxProfile;
SandboxProfile additionalSandboxProfile;
#endif
/* Hash rewriting. */
StringMap inputRewrites, outputRewrites;
typedef map<StorePath, StorePath> RedirectedOutputs;
RedirectedOutputs redirectedOutputs;
/* The outputs paths used during the build.
- Input-addressed derivations or fixed content-addressed outputs are
sometimes built when some of their outputs already exist, and can not
be hidden via sandboxing. We use temporary locations instead and
rewrite after the build. Otherwise the regular predetermined paths are
put here.
- Floating content-addressed derivations do not know their final build
output paths until the outputs are hashed, so random locations are
used, and then renamed. The randomness helps guard against hidden
self-references.
*/
OutputPathMap scratchOutputs;
/* Path registration info from the previous round, if we're
building multiple times. Since this contains the hash, it
allows us to compare whether two rounds produced the same
result. */
std::map<Path, ValidPathInfo> prevInfos;
uid_t sandboxUid() { return usingUserNamespace ? 1000 : buildUser->getUID(); }
gid_t sandboxGid() { return usingUserNamespace ? 100 : buildUser->getGID(); }
const static Path homeDir;
/* The recursive Nix daemon socket. */
AutoCloseFD daemonSocket;
/* The daemon main thread. */
std::thread daemonThread;
/* The daemon worker threads. */
std::vector<std::thread> daemonWorkerThreads;
/* Paths that were added via recursive Nix calls. */
StorePathSet addedPaths;
/* Realisations that were added via recursive Nix calls. */
std::set<DrvOutput> addedDrvOutputs;
/* Recursive Nix calls are only allowed to build or realize paths
in the original input closure or added via a recursive Nix call
(so e.g. you can't do 'nix-store -r /nix/store/<bla>' where
/nix/store/<bla> is some arbitrary path in a binary cache). */
bool isAllowed(const StorePath & path)
{
return inputPaths.count(path) || addedPaths.count(path);
}
bool isAllowed(const DrvOutput & id)
{
return addedDrvOutputs.count(id);
}
bool isAllowed(const DerivedPath & req);
friend struct RestrictedStore;
using DerivationGoal::DerivationGoal;
virtual ~LocalDerivationGoal() override;
/* Whether we need to perform hash rewriting if there are valid output paths. */
bool needsHashRewrite();
/* The additional states. */
void tryLocalBuild() override;
/* Start building a derivation. */
void startBuilder();
/* Fill in the environment for the builder. */
void initEnv();
/* Setup tmp dir location. */
void initTmpDir();
/* Write a JSON file containing the derivation attributes. */
void writeStructuredAttrs();
void startDaemon();
void stopDaemon();
/* Add 'path' to the set of paths that may be referenced by the
outputs, and make it appear in the sandbox. */
void addDependency(const StorePath & path);
/* Make a file owned by the builder. */
void chownToBuilder(const Path & path);
int getChildStatus() override;
/* Run the builder's process. */
void runChild();
/* Check that the derivation outputs all exist and register them
as valid. */
void registerOutputs() override;
void signRealisation(Realisation &) override;
/* Check that an output meets the requirements specified by the
'outputChecks' attribute (or the legacy
'{allowed,disallowed}{References,Requisites}' attributes). */
void checkOutputs(const std::map<std::string, ValidPathInfo> & outputs);
/* Close the read side of the logger pipe. */
void closeReadPipes() override;
/* Cleanup hooks for buildDone() */
void cleanupHookFinally() override;
void cleanupPreChildKill() override;
void cleanupPostChildKill() override;
bool cleanupDecideWhetherDiskFull() override;
void cleanupPostOutputsRegisteredModeCheck() override;
void cleanupPostOutputsRegisteredModeNonCheck() override;
bool isReadDesc(int fd) override;
/* Delete the temporary directory, if we have one. */
void deleteTmpDir(bool force);
/* Forcibly kill the child process, if any. */
void killChild() override;
/* Create alternative path calculated from but distinct from the
input, so we can avoid overwriting outputs (or other store paths)
that already exist. */
StorePath makeFallbackPath(const StorePath & path);
/* Make a path to another based on the output name along with the
derivation hash. */
/* FIXME add option to randomize, so we can audit whether our
rewrites caught everything */
StorePath makeFallbackPath(std::string_view outputName);
};
}

View file

@ -5,40 +5,32 @@
namespace nix {
SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
: Goal(worker)
, storePath(storePath)
, repair(repair)
, ca(ca)
{
state = &SubstitutionGoal::init;
state = &PathSubstitutionGoal::init;
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
trace("created");
maintainExpectedSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.expectedSubstitutions);
}
SubstitutionGoal::~SubstitutionGoal()
PathSubstitutionGoal::~PathSubstitutionGoal()
{
try {
if (thr.joinable()) {
// FIXME: signal worker thread to quit.
thr.join();
worker.childTerminated(this);
}
} catch (...) {
ignoreException();
}
cleanup();
}
void SubstitutionGoal::work()
void PathSubstitutionGoal::work()
{
(this->*state)();
}
void SubstitutionGoal::init()
void PathSubstitutionGoal::init()
{
trace("init");
@ -59,10 +51,12 @@ void SubstitutionGoal::init()
}
void SubstitutionGoal::tryNext()
void PathSubstitutionGoal::tryNext()
{
trace("trying next substituter");
cleanup();
if (subs.size() == 0) {
/* None left. Terminate this goal and let someone else deal
with it. */
@ -142,15 +136,10 @@ void SubstitutionGoal::tryNext()
/* Bail out early if this substituter lacks a valid
signature. LocalStore::addToStore() also checks for this, but
only after we've downloaded the path. */
if (worker.store.requireSigs
&& !sub->isTrusted
&& !info->checkSignatures(worker.store, worker.store.getPublicKeys()))
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{
logWarning({
.name = "Invalid path signature",
.hint = hintfmt("substituter '%s' does not have a valid signature for path '%s'",
sub->getUri(), worker.store.printStorePath(storePath))
});
warn("substituter '%s' does not have a valid signature for path '%s'",
sub->getUri(), worker.store.printStorePath(storePath));
tryNext();
return;
}
@ -159,16 +148,16 @@ void SubstitutionGoal::tryNext()
paths referenced by this one. */
for (auto & i : info->references)
if (i != storePath) /* ignore self-references */
addWaitee(worker.makeSubstitutionGoal(i));
addWaitee(worker.makePathSubstitutionGoal(i));
if (waitees.empty()) /* to prevent hang (no wake-up event) */
referencesValid();
else
state = &SubstitutionGoal::referencesValid;
state = &PathSubstitutionGoal::referencesValid;
}
void SubstitutionGoal::referencesValid()
void PathSubstitutionGoal::referencesValid()
{
trace("all references realised");
@ -182,12 +171,12 @@ void SubstitutionGoal::referencesValid()
if (i != storePath) /* ignore self-references */
assert(worker.store.isValidPath(i));
state = &SubstitutionGoal::tryToRun;
state = &PathSubstitutionGoal::tryToRun;
worker.wakeUp(shared_from_this());
}
void SubstitutionGoal::tryToRun()
void PathSubstitutionGoal::tryToRun()
{
trace("trying to run");
@ -212,12 +201,12 @@ void SubstitutionGoal::tryToRun()
thr = std::thread([this]() {
try {
/* Wake up the worker loop when we're done. */
Finally updateStats([this]() { outPipe.writeSide = -1; });
Finally updateStats([this]() { outPipe.writeSide.close(); });
Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
PushActivity pact(act.id);
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
copyStorePath(*sub, worker.store,
subPath ? *subPath : storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
promise.set_value();
@ -228,11 +217,11 @@ void SubstitutionGoal::tryToRun()
worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
state = &SubstitutionGoal::finished;
state = &PathSubstitutionGoal::finished;
}
void SubstitutionGoal::finished()
void PathSubstitutionGoal::finished()
{
trace("substitute finished");
@ -256,7 +245,7 @@ void SubstitutionGoal::finished()
}
/* Try the next substitute. */
state = &SubstitutionGoal::tryNext;
state = &PathSubstitutionGoal::tryNext;
worker.wakeUp(shared_from_this());
return;
}
@ -295,14 +284,31 @@ void SubstitutionGoal::finished()
}
void SubstitutionGoal::handleChildOutput(int fd, const string & data)
void PathSubstitutionGoal::handleChildOutput(int fd, const string & data)
{
}
void SubstitutionGoal::handleEOF(int fd)
void PathSubstitutionGoal::handleEOF(int fd)
{
if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
}
void PathSubstitutionGoal::cleanup()
{
try {
if (thr.joinable()) {
// FIXME: signal worker thread to quit.
thr.join();
worker.childTerminated(this);
}
outPipe.close();
} catch (...) {
ignoreException();
}
}
}

View file

@ -8,13 +8,13 @@ namespace nix {
class Worker;
struct SubstitutionGoal : public Goal
struct PathSubstitutionGoal : public Goal
{
/* The store path that should be realised through a substitute. */
StorePath storePath;
/* The path the substituter refers to the path as. This will be
* different when the stores have different names. */
different when the stores have different names. */
std::optional<StorePath> subPath;
/* The remaining substituters. */
@ -47,7 +47,7 @@ struct SubstitutionGoal : public Goal
std::unique_ptr<MaintainCount<uint64_t>> maintainExpectedSubstitutions,
maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload;
typedef void (SubstitutionGoal::*GoalState)();
typedef void (PathSubstitutionGoal::*GoalState)();
GoalState state;
/* Content address for recomputing store path */
@ -56,8 +56,9 @@ struct SubstitutionGoal : public Goal
/* Time substitution started. */
std::chrono::time_point<std::chrono::steady_clock> startTime;
SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
~SubstitutionGoal();
public:
PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
~PathSubstitutionGoal();
void timedOut(Error && ex) override { abort(); };
@ -81,6 +82,8 @@ struct SubstitutionGoal : public Goal
/* Callback used by the worker to write to the log. */
void handleChildOutput(int fd, const string & data) override;
void handleEOF(int fd) override;
void cleanup() override;
};
}

View file

@ -1,18 +1,20 @@
#include "machines.hh"
#include "worker.hh"
#include "substitution-goal.hh"
#include "derivation-goal.hh"
#include "drv-output-substitution-goal.hh"
#include "local-derivation-goal.hh"
#include "hook-instance.hh"
#include <poll.h>
namespace nix {
Worker::Worker(LocalStore & store)
Worker::Worker(Store & store, Store & evalStore)
: act(*logger, actRealise)
, actDerivations(*logger, actBuilds)
, actSubstitutions(*logger, actCopyPaths)
, store(store)
, evalStore(evalStore)
{
/* Debugging: prevent recursive workers. */
nrLocalBuilds = 0;
@ -59,8 +61,10 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
const StringSet & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
return std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared</* */DerivationGoal>(drvPath, wantedOutputs, *this, buildMode)
: std::make_shared<LocalDerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
});
}
@ -68,26 +72,40 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drv
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
return std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared</* */DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode)
: std::make_shared<LocalDerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
});
}
std::shared_ptr<SubstitutionGoal> Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
{
std::weak_ptr<SubstitutionGoal> & goal_weak = substitutionGoals[path];
std::weak_ptr<PathSubstitutionGoal> & goal_weak = substitutionGoals[path];
auto goal = goal_weak.lock(); // FIXME
if (!goal) {
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
goal = std::make_shared<PathSubstitutionGoal>(path, *this, repair, ca);
goal_weak = goal;
wakeUp(goal);
}
return goal;
}
template<typename G>
static void removeGoal(std::shared_ptr<G> goal, std::map<StorePath, std::weak_ptr<G>> & goalMap)
std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional<ContentAddress> ca)
{
std::weak_ptr<DrvOutputSubstitutionGoal> & goal_weak = drvOutputSubstitutionGoals[id];
auto goal = goal_weak.lock(); // FIXME
if (!goal) {
goal = std::make_shared<DrvOutputSubstitutionGoal>(id, *this, repair, ca);
goal_weak = goal;
wakeUp(goal);
}
return goal;
}
template<typename K, typename G>
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
{
/* !!! inefficient */
for (auto i = goalMap.begin();
@ -107,10 +125,13 @@ void Worker::removeGoal(GoalPtr goal)
act.result(resUnexpectBuild, store.printStorePath(drvGoal->drvPath));
nix::removeGoal(drvGoal, derivationGoals);
}
else if (auto subGoal = std::dynamic_pointer_cast<SubstitutionGoal>(goal)) {
else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal)) {
act.result(resUnexpectSubstitution, store.printStorePath(subGoal->storePath));
nix::removeGoal(subGoal, substitutionGoals);
} else
}
else if (auto subGoal = std::dynamic_pointer_cast<DrvOutputSubstitutionGoal>(goal))
nix::removeGoal(subGoal, drvOutputSubstitutionGoals);
else
assert(false);
if (topGoals.find(goal) != topGoals.end()) {
@ -211,18 +232,18 @@ void Worker::waitForAWhile(GoalPtr goal)
void Worker::run(const Goals & _topGoals)
{
std::vector<nix::StorePathWithOutputs> topPaths;
std::vector<nix::DerivedPath> topPaths;
for (auto & i : _topGoals) {
topGoals.insert(i);
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
topPaths.push_back({goal->drvPath, goal->wantedOutputs});
} else if (auto goal = dynamic_cast<SubstitutionGoal *>(i.get())) {
topPaths.push_back({goal->storePath});
topPaths.push_back(DerivedPath::Built{goal->drvPath, goal->wantedOutputs});
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Opaque{goal->storePath});
}
}
/* Call queryMissing() efficiently query substitutes. */
/* Call queryMissing() to efficiently query substitutes. */
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
@ -239,7 +260,9 @@ void Worker::run(const Goals & _topGoals)
checkInterrupt();
store.autoGC(false);
// TODO GC interface?
if (auto localStore = dynamic_cast<LocalStore *>(&store))
localStore->autoGC(false);
/* Call every wake goal (in the ordering established by
CompareGoalPtrs). */
@ -464,10 +487,7 @@ bool Worker::pathContentsGood(const StorePath & path)
}
pathContentsGoodCache.insert_or_assign(path, res);
if (!res)
logError({
.name = "Corrupted path",
.hint = hintfmt("path '%s' is corrupted or missing!", store.printStorePath(path))
});
printError("path '%s' is corrupted or missing!", store.printStorePath(path));
return res;
}
@ -478,7 +498,10 @@ void Worker::markContentsGood(const StorePath & path)
}
GoalPtr upcast_goal(std::shared_ptr<SubstitutionGoal> subGoal) {
GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal) {
return subGoal;
}
GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal) {
return subGoal;
}

View file

@ -2,25 +2,31 @@
#include "types.hh"
#include "lock.hh"
#include "local-store.hh"
#include "store-api.hh"
#include "goal.hh"
#include "realisation.hh"
#include <future>
#include <thread>
namespace nix {
/* Forward definition. */
struct DerivationGoal;
struct SubstitutionGoal;
struct PathSubstitutionGoal;
class DrvOutputSubstitutionGoal;
/* Workaround for not being able to declare a something like
class SubstitutionGoal : public Goal;
class PathSubstitutionGoal : public Goal;
even when Goal is a complete type.
This is still a static cast. The purpose of exporting it is to define it in
a place where `SubstitutionGoal` is concrete, and use it in a place where it
a place where `PathSubstitutionGoal` is concrete, and use it in a place where it
is opaque. */
GoalPtr upcast_goal(std::shared_ptr<SubstitutionGoal> subGoal);
GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal);
GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal);
typedef std::chrono::time_point<std::chrono::steady_clock> steady_time_point;
@ -69,7 +75,8 @@ private:
/* Maps used to prevent multiple instantiations of a goal for the
same derivation / path. */
std::map<StorePath, std::weak_ptr<DerivationGoal>> derivationGoals;
std::map<StorePath, std::weak_ptr<SubstitutionGoal>> substitutionGoals;
std::map<StorePath, std::weak_ptr<PathSubstitutionGoal>> substitutionGoals;
std::map<DrvOutput, std::weak_ptr<DrvOutputSubstitutionGoal>> drvOutputSubstitutionGoals;
/* Goals waiting for busy paths to be unlocked. */
WeakGoals waitingForAnyGoal;
@ -102,7 +109,8 @@ public:
/* Set if at least one derivation is not deterministic in check mode. */
bool checkMismatch;
LocalStore & store;
Store & store;
Store & evalStore;
std::unique_ptr<HookInstance> hook;
@ -124,7 +132,7 @@ public:
it answers with "decline-permanently", we don't try again. */
bool tryBuildHook = true;
Worker(LocalStore & store);
Worker(Store & store, Store & evalStore);
~Worker();
/* Make a goal (with caching). */
@ -143,7 +151,8 @@ public:
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
/* substitution goal */
std::shared_ptr<SubstitutionGoal> makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
/* Remove a dead goal. */
void removeGoal(GoalPtr goal);

View file

@ -22,10 +22,7 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
srcFiles = readDirectory(srcDir);
} catch (SysError & e) {
if (e.errNo == ENOTDIR) {
logWarning({
.name = "Create links - directory",
.hint = hintfmt("not including '%s' in the user environment because it's not a directory", srcDir)
});
warn("not including '%s' in the user environment because it's not a directory", srcDir);
return;
}
throw;
@ -44,10 +41,7 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
throw SysError("getting status of '%1%'", srcFile);
} catch (SysError & e) {
if (e.errNo == ENOENT || e.errNo == ENOTDIR) {
logWarning({
.name = "Create links - skipping symlink",
.hint = hintfmt("skipping dangling symlink '%s'", dstFile)
});
warn("skipping dangling symlink '%s'", dstFile);
continue;
}
throw;

View file

@ -3,9 +3,19 @@
-- is enabled
create table if not exists Realisations (
id integer primary key autoincrement not null,
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
primary key (drvPath, outputName),
signatures text, -- space-separated list
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
create table if not exists RealisationsRefs (
referrer integer not null,
realisationReference integer,
foreign key (referrer) references Realisations(id) on delete cascade,
foreign key (realisationReference) references Realisations(id) on delete restrict
);

View file

@ -31,10 +31,10 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded {
[](TextHash th) {
[](TextHash & th) {
return "text:" + th.hash.to_string(Base32, true);
},
[](FixedOutputHash fsh) {
[](FixedOutputHash & fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
@ -43,10 +43,10 @@ std::string renderContentAddress(ContentAddress ca)
std::string renderContentAddressMethod(ContentAddressMethod cam)
{
return std::visit(overloaded {
[](TextHashMethod &th) {
[](TextHashMethod & th) {
return std::string{"text:"} + printHashType(htSHA256);
},
[](FixedOutputHashMethod &fshm) {
[](FixedOutputHashMethod & fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
@ -104,12 +104,12 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
return std::visit(
overloaded {
[&](TextHashMethod thm) {
[&](TextHashMethod & thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
[&](FixedOutputHashMethod fohMethod) {
[&](FixedOutputHashMethod & fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
@ -137,10 +137,10 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
Hash getContentAddressHash(const ContentAddress & ca)
{
return std::visit(overloaded {
[](TextHash th) {
[](const TextHash & th) {
return th.hash;
},
[](FixedOutputHash fsh) {
[](const FixedOutputHash & fsh) {
return fsh.hash;
}
}, ca);

View file

@ -2,21 +2,19 @@
#include "util.hh"
#include "globals.hh"
#if HAVE_SODIUM
#include <sodium.h>
#endif
namespace nix {
static std::pair<std::string, std::string> split(const string & s)
static std::pair<std::string_view, std::string_view> split(std::string_view s)
{
size_t colon = s.find(':');
if (colon == std::string::npos || colon == 0)
return {"", ""};
return {std::string(s, 0, colon), std::string(s, colon + 1)};
return {s.substr(0, colon), s.substr(colon + 1)};
}
Key::Key(const string & s)
Key::Key(std::string_view s)
{
auto ss = split(s);
@ -29,62 +27,57 @@ Key::Key(const string & s)
key = base64Decode(key);
}
SecretKey::SecretKey(const string & s)
std::string Key::to_string() const
{
return name + ":" + base64Encode(key);
}
SecretKey::SecretKey(std::string_view s)
: Key(s)
{
#if HAVE_SODIUM
if (key.size() != crypto_sign_SECRETKEYBYTES)
throw Error("secret key is not valid");
#endif
}
#if !HAVE_SODIUM
[[noreturn]] static void noSodium()
std::string SecretKey::signDetached(std::string_view data) const
{
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
}
#endif
std::string SecretKey::signDetached(const std::string & data) const
{
#if HAVE_SODIUM
unsigned char sig[crypto_sign_BYTES];
unsigned long long sigLen;
crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(),
(unsigned char *) key.data());
return name + ":" + base64Encode(std::string((char *) sig, sigLen));
#else
noSodium();
#endif
}
PublicKey SecretKey::toPublicKey() const
{
#if HAVE_SODIUM
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
crypto_sign_ed25519_sk_to_pk(pk, (unsigned char *) key.data());
return PublicKey(name, std::string((char *) pk, crypto_sign_PUBLICKEYBYTES));
#else
noSodium();
#endif
}
PublicKey::PublicKey(const string & s)
SecretKey SecretKey::generate(std::string_view name)
{
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
unsigned char sk[crypto_sign_SECRETKEYBYTES];
if (crypto_sign_keypair(pk, sk) != 0)
throw Error("key generation failed");
return SecretKey(name, std::string((char *) sk, crypto_sign_SECRETKEYBYTES));
}
PublicKey::PublicKey(std::string_view s)
: Key(s)
{
#if HAVE_SODIUM
if (key.size() != crypto_sign_PUBLICKEYBYTES)
throw Error("public key is not valid");
#endif
}
bool verifyDetached(const std::string & data, const std::string & sig,
const PublicKeys & publicKeys)
{
#if HAVE_SODIUM
auto ss = split(sig);
auto key = publicKeys.find(ss.first);
auto key = publicKeys.find(std::string(ss.first));
if (key == publicKeys.end()) return false;
auto sig2 = base64Decode(ss.second);
@ -94,9 +87,6 @@ bool verifyDetached(const std::string & data, const std::string & sig,
return crypto_sign_verify_detached((unsigned char *) sig2.data(),
(unsigned char *) data.data(), data.size(),
(unsigned char *) key->second.key.data()) == 0;
#else
noSodium();
#endif
}
PublicKeys getDefaultPublicKeys()

View file

@ -13,32 +13,40 @@ struct Key
/* Construct Key from a string in the format
<name>:<key-in-base64>. */
Key(const std::string & s);
Key(std::string_view s);
std::string to_string() const;
protected:
Key(const std::string & name, const std::string & key)
: name(name), key(key) { }
Key(std::string_view name, std::string && key)
: name(name), key(std::move(key)) { }
};
struct PublicKey;
struct SecretKey : Key
{
SecretKey(const std::string & s);
SecretKey(std::string_view s);
/* Return a detached signature of the given string. */
std::string signDetached(const std::string & s) const;
std::string signDetached(std::string_view s) const;
PublicKey toPublicKey() const;
static SecretKey generate(std::string_view name);
private:
SecretKey(std::string_view name, std::string && key)
: Key(name, std::move(key)) { }
};
struct PublicKey : Key
{
PublicKey(const std::string & data);
PublicKey(std::string_view data);
private:
PublicKey(const std::string & name, const std::string & key)
: Key(name, key) { }
PublicKey(std::string_view name, std::string && key)
: Key(name, std::move(key)) { }
friend struct SecretKey;
};

View file

@ -2,6 +2,7 @@
#include "monitor-fd.hh"
#include "worker-protocol.hh"
#include "store-api.hh"
#include "path-with-outputs.hh"
#include "finally.hh"
#include "affinity.hh"
#include "archive.hh"
@ -226,8 +227,15 @@ struct ClientSettings
try {
if (name == "ssh-auth-sock") // obsolete
;
else if (name == settings.experimentalFeatures.name) {
// We dont want to forward the experimental features to
// the daemon, as that could cause some pretty weird stuff
if (parseFeatures(tokenizeString<StringSet>(value)) != settings.experimentalFeatures.get())
debug("Ignoring the client-specified experimental features");
}
else if (trusted
|| name == settings.buildTimeout.name
|| name == settings.buildRepeat.name
|| name == "connect-timeout"
|| (name == "builders" && value == ""))
settings.set(name, value);
@ -242,21 +250,16 @@ struct ClientSettings
}
};
static void writeValidPathInfo(
ref<Store> store,
unsigned int clientVersion,
Sink & to,
std::shared_ptr<const ValidPathInfo> info)
static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int clientVersion, Source & from)
{
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
worker_proto::write(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
std::vector<DerivedPath> reqs;
if (GET_PROTOCOL_MINOR(clientVersion) >= 30) {
reqs = worker_proto::read(store, from, Phantom<std::vector<DerivedPath>> {});
} else {
for (auto & s : readStrings<Strings>(from))
reqs.push_back(parsePathWithOutputs(store, s).toDerivedPath());
}
return reqs;
}
static void performOp(TunnelLogger * logger, ref<Store> store,
@ -393,13 +396,13 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
[&](TextHashMethod &_) {
[&](TextHashMethod &) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](FixedOutputHashMethod &fohm) {
[&](FixedOutputHashMethod & fohm) {
if (!refs.empty())
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
@ -409,9 +412,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}();
logger->stopWork();
to << store->printStorePath(pathInfo->path);
writeValidPathInfo(store, clientVersion, to, pathInfo);
pathInfo->write(to, *store, GET_PROTOCOL_MINOR(clientVersion));
} else {
HashType hashAlgo;
std::string baseName;
@ -458,6 +459,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
case wopAddMultipleToStore: {
bool repair, dontCheckSigs;
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs)
dontCheckSigs = false;
logger->startWork();
FramedSource source(from);
store->addMultipleToStore(source,
RepairFlag{repair},
dontCheckSigs ? NoCheckSigs : CheckSigs);
logger->stopWork();
break;
}
case wopAddTextToStore: {
string suffix = readString(from);
string s = readString(from);
@ -493,9 +509,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopBuildPaths: {
std::vector<StorePathWithOutputs> drvs;
for (auto & s : readStrings<Strings>(from))
drvs.push_back(store->parsePathWithOutputs(s));
auto drvs = readDerivedPaths(*store, clientVersion, from);
BuildMode mode = bmNormal;
if (GET_PROTOCOL_MINOR(clientVersion) >= 15) {
mode = (BuildMode) readInt(from);
@ -575,6 +589,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork();
to << res.status << res.errorMsg;
if (GET_PROTOCOL_MINOR(clientVersion) >= 29) {
to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime;
}
if (GET_PROTOCOL_MINOR(clientVersion) >= 28) {
worker_proto::write(*store, to, res.builtOutputs);
}
break;
}
@ -605,9 +625,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
// Obsolete.
case wopSyncWithGC: {
logger->startWork();
store->syncWithGC();
logger->stopWork();
to << 1;
break;
@ -753,7 +773,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (info) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
writeValidPathInfo(store, clientVersion, to, info);
info->write(to, *store, GET_PROTOCOL_MINOR(clientVersion), false);
} else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
to << 0;
@ -853,9 +873,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopQueryMissing: {
std::vector<StorePathWithOutputs> targets;
for (auto & s : readStrings<Strings>(from))
targets.push_back(store->parsePathWithOutputs(s));
auto targets = readDerivedPaths(*store, clientVersion, from);
logger->startWork();
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
@ -870,11 +888,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopRegisterDrvOutput: {
logger->startWork();
auto outputId = DrvOutput::parse(readString(from));
auto outputPath = StorePath(readString(from));
auto resolvedDrv = StorePath(readString(from));
store->registerDrvOutput(Realisation{
.id = outputId, .outPath = outputPath});
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
auto outputId = DrvOutput::parse(readString(from));
auto outputPath = StorePath(readString(from));
store->registerDrvOutput(Realisation{
.id = outputId, .outPath = outputPath});
} else {
auto realisation = worker_proto::read(*store, from, Phantom<Realisation>());
store->registerDrvOutput(realisation);
}
logger->stopWork();
break;
}
@ -884,9 +906,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto outputId = DrvOutput::parse(readString(from));
auto info = store->queryRealisation(outputId);
logger->stopWork();
std::set<StorePath> outPaths;
if (info) outPaths.insert(info->outPath);
worker_proto::write(*store, to, outPaths);
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
std::set<StorePath> outPaths;
if (info) outPaths.insert(info->outPath);
worker_proto::write(*store, to, outPaths);
} else {
std::set<Realisation> realisations;
if (info) realisations.insert(*info);
worker_proto::write(*store, to, realisations);
}
break;
}

View file

@ -10,18 +10,18 @@ namespace nix {
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{
return std::visit(overloaded {
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
[](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> {
return { doi.path };
},
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
[&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> {
return {
dof.path(store, drvName, outputName)
};
},
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
[](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> {
return std::nullopt;
},
[](DerivationOutputDeferred) -> std::optional<StorePath> {
[](const DerivationOutputDeferred &) -> std::optional<StorePath> {
return std::nullopt;
},
}, output);
@ -57,6 +57,17 @@ bool derivationIsFixed(DerivationType dt) {
assert(false);
}
bool derivationHasKnownOutputPaths(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return true;
case DerivationType::CAFixed: return true;
case DerivationType::CAFloating: return false;
case DerivationType::DeferredInputAddressed: return false;
};
assert(false);
}
bool derivationIsImpure(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
@ -176,7 +187,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
},
};
} else {
settings.requireExperimentalFeature("ca-derivations");
settings.requireExperimentalFeature(Xp::CaDerivations);
assert(pathS == "");
return DerivationOutput {
.output = DerivationOutputCAFloating {
@ -321,22 +332,22 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
[&](const DerivationOutputInputAddressed & doi) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
},
[&](DerivationOutputCAFixed dof) {
[&](const DerivationOutputCAFixed & dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
[&](const DerivationOutputCAFloating & dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
[&](DerivationOutputDeferred) {
[&](const DerivationOutputDeferred &) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
@ -409,13 +420,13 @@ DerivationType BasicDerivation::type() const
std::optional<HashType> floatingHashType;
for (auto & i : outputs) {
std::visit(overloaded {
[&](DerivationOutputInputAddressed _) {
[&](const DerivationOutputInputAddressed &) {
inputAddressedOutputs.insert(i.first);
},
[&](DerivationOutputCAFixed _) {
[&](const DerivationOutputCAFixed &) {
fixedCAOutputs.insert(i.first);
},
[&](DerivationOutputCAFloating dof) {
[&](const DerivationOutputCAFloating & dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
@ -424,7 +435,7 @@ DerivationType BasicDerivation::type() const
throw Error("All floating outputs must use the same hash type");
}
},
[&](DerivationOutputDeferred _) {
[&](const DerivationOutputDeferred &) {
deferredIAOutputs.insert(i.first);
},
}, i.second.output);
@ -527,15 +538,15 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
const auto & res = pathDerivationModulo(store, i.first);
std::visit(overloaded {
// Regular non-CA derivation, replace derivation
[&](Hash drvHash) {
[&](const Hash & drvHash) {
inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
},
[&](DeferredHash deferredHash) {
[&](const DeferredHash & deferredHash) {
isDeferred = true;
inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second);
},
// CA derivation's output hashes
[&](CaOutputHashes outputHashes) {
[&](const CaOutputHashes & outputHashes) {
std::set<std::string> justOut = { "out" };
for (auto & output : i.second) {
/* Put each one in with a single "out" output.. */
@ -557,21 +568,21 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
}
std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& drv)
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv)
{
std::map<std::string, Hash> res;
std::visit(overloaded {
[&](Hash drvHash) {
[&](const Hash & drvHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, drvHash});
}
},
[&](DeferredHash deferredHash) {
[&](const DeferredHash & deferredHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, deferredHash.hash});
}
},
[&](CaOutputHashes outputHashes) {
[&](const CaOutputHashes & outputHashes) {
res = outputHashes;
},
}, hashDerivationModulo(store, drv, true));
@ -579,14 +590,6 @@ std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& d
}
std::string StorePathWithOutputs::to_string(const Store & store) const
{
return outputs.empty()
? store.printStorePath(path)
: store.printStorePath(path) + "!" + concatStringsSep(",", outputs);
}
bool wantOutput(const string & output, const std::set<string> & wanted)
{
return wanted.empty() || wanted.find(output) != wanted.end();
@ -663,22 +666,22 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
for (auto & i : drv.outputs) {
out << i.first;
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
[&](const DerivationOutputInputAddressed & doi) {
out << store.printStorePath(doi.path)
<< ""
<< "";
},
[&](DerivationOutputCAFixed dof) {
[&](const DerivationOutputCAFixed & dof) {
out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
[&](DerivationOutputCAFloating dof) {
[&](const DerivationOutputCAFloating & dof) {
out << ""
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
[&](DerivationOutputDeferred) {
[&](const DerivationOutputDeferred &) {
out << ""
<< ""
<< "";
@ -745,7 +748,7 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
}
std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
BasicDerivation resolved { *this };
// Input paths that we'll want to rewrite in the derivation
@ -756,8 +759,13 @@ std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
StringSet newOutputNames;
for (auto & outputName : input.second) {
auto actualPathOpt = inputDrvOutputs.at(outputName);
if (!actualPathOpt)
if (!actualPathOpt) {
warn("output %s of input %s missing, aborting the resolving",
outputName,
store.printStorePath(input.first)
);
return std::nullopt;
}
auto actualPath = *actualPathOpt;
inputRewrites.emplace(
downstreamPlaceholder(store, input.first, outputName),
@ -771,34 +779,4 @@ std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
return resolved;
}
std::optional<BasicDerivation> Derivation::tryResolve(Store& store)
{
auto drvPath = writeDerivation(store, *this, NoRepair, false);
return Derivation::tryResolve(store, drvPath);
}
std::optional<BasicDerivation> Derivation::tryResolve(Store& store, const StorePath& drvPath)
{
// This is quite dirty and leaky, but will disappear once #4340 is merged
static Sync<std::map<StorePath, std::optional<Derivation>>> resolutionsCache;
{
auto resolutions = resolutionsCache.lock();
auto resolvedDrvIter = resolutions->find(drvPath);
if (resolvedDrvIter != resolutions->end()) {
auto & [_, resolvedDrv] = *resolvedDrvIter;
return *resolvedDrv;
}
}
/* Try resolve drv and use that path instead. */
auto drv = store.readDerivation(drvPath);
auto attempt = drv.tryResolveUncached(store);
if (!attempt)
return std::nullopt;
/* Store in memo table. */
resolutionsCache.lock()->insert_or_assign(drvPath, *attempt);
return *attempt;
}
}

View file

@ -52,7 +52,7 @@ struct DerivationOutput
DerivationOutputCAFloating,
DerivationOutputDeferred
> output;
std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndOptPaths */
@ -94,6 +94,11 @@ bool derivationIsFixed(DerivationType);
derivation is controlled separately. Never true for non-CA derivations. */
bool derivationIsImpure(DerivationType);
/* Does the derivation knows its own output paths?
* Only true when there's no floating-ca derivation involved in the closure.
*/
bool derivationHasKnownOutputPaths(DerivationType);
struct BasicDerivation
{
DerivationOutputs outputs; /* keyed on symbolic IDs */
@ -133,19 +138,15 @@ struct Derivation : BasicDerivation
/* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
static std::optional<BasicDerivation> tryResolve(Store & store, const StorePath & drvPath);
Derivation() = default;
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
private:
std::optional<BasicDerivation> tryResolveUncached(Store & store);
};

View file

@ -0,0 +1,118 @@
#include "derived-path.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
namespace nix {
nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
nlohmann::json res;
res["path"] = store->printStorePath(path);
return res;
}
nlohmann::json BuiltPath::Built::toJSON(ref<Store> store) const {
nlohmann::json res;
res["drvPath"] = store->printStorePath(drvPath);
for (const auto& [output, path] : outputs) {
res["outputs"][output] = store->printStorePath(path);
}
return res;
}
StorePathSet BuiltPath::outPaths() const
{
return std::visit(
overloaded{
[](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
[](const BuiltPath::Built & b) {
StorePathSet res;
for (auto & [_, path] : b.outputs)
res.insert(path);
return res;
},
}, raw()
);
}
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store) {
auto res = nlohmann::json::array();
for (const BuiltPath & buildable : buildables) {
std::visit([&res, store](const auto & buildable) {
res.push_back(buildable.toJSON(store));
}, buildable.raw());
}
return res;
}
std::string DerivedPath::Opaque::to_string(const Store & store) const {
return store.printStorePath(path);
}
std::string DerivedPath::Built::to_string(const Store & store) const {
return store.printStorePath(drvPath)
+ "!"
+ (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs));
}
std::string DerivedPath::to_string(const Store & store) const
{
return std::visit(
[&](const auto & req) { return req.to_string(store); },
this->raw());
}
DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_view s)
{
return {store.parseStorePath(s)};
}
DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view s)
{
size_t n = s.find("!");
assert(n != s.npos);
auto drvPath = store.parseStorePath(s.substr(0, n));
auto outputsS = s.substr(n + 1);
std::set<string> outputs;
if (outputsS != "*")
outputs = tokenizeString<std::set<string>>(outputsS, ",");
return {drvPath, outputs};
}
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
{
size_t n = s.find("!");
return n == s.npos
? (DerivedPath) DerivedPath::Opaque::parse(store, s)
: (DerivedPath) DerivedPath::Built::parse(store, s);
}
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
{
RealisedPath::Set res;
std::visit(
overloaded{
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
[&](const BuiltPath::Built & p) {
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath));
for (auto& [outputName, outputPath] : p.outputs) {
if (settings.isExperimentalFeatureEnabled(
Xp::CaDerivations)) {
auto thisRealisation = store.queryRealisation(
DrvOutput{drvHashes.at(outputName), outputName});
assert(thisRealisation); // Weve built it, so we must h
// ve the realisation
res.insert(*thisRealisation);
} else {
res.insert(outputPath);
}
}
},
},
raw());
return res;
}
}

View file

@ -0,0 +1,123 @@
#pragma once
#include "util.hh"
#include "path.hh"
#include "realisation.hh"
#include <optional>
#include <nlohmann/json_fwd.hpp>
namespace nix {
class Store;
/**
* An opaque derived path.
*
* Opaque derived paths are just store paths, and fully evaluated. They
* cannot be simplified further. Since they are opaque, they cannot be
* built, but they can fetched.
*/
struct DerivedPathOpaque {
StorePath path;
nlohmann::json toJSON(ref<Store> store) const;
std::string to_string(const Store & store) const;
static DerivedPathOpaque parse(const Store & store, std::string_view);
};
/**
* A derived path that is built from a derivation
*
* Built derived paths are pair of a derivation and some output names.
* They are evaluated by building the derivation, and then replacing the
* output names with the resulting outputs.
*
* Note that does mean a derived store paths evaluates to multiple
* opaque paths, which is sort of icky as expressions are supposed to
* evaluate to single values. Perhaps this should have just a single
* output name.
*/
struct DerivedPathBuilt {
StorePath drvPath;
std::set<std::string> outputs;
std::string to_string(const Store & store) const;
static DerivedPathBuilt parse(const Store & store, std::string_view);
};
using _DerivedPathRaw = std::variant<
DerivedPathOpaque,
DerivedPathBuilt
>;
/**
* A "derived path" is a very simple sort of expression that evaluates
* to (concrete) store path. It is either:
*
* - opaque, in which case it is just a concrete store path with
* possibly no known derivation
*
* - built, in which case it is a pair of a derivation path and an
* output name.
*/
struct DerivedPath : _DerivedPathRaw {
using Raw = _DerivedPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
using Built = DerivedPathBuilt;
inline const Raw & raw() const {
return static_cast<const Raw &>(*this);
}
std::string to_string(const Store & store) const;
static DerivedPath parse(const Store & store, std::string_view);
};
/**
* A built derived path with hints in the form of optional concrete output paths.
*
* See 'BuiltPath' for more an explanation.
*/
struct BuiltPathBuilt {
StorePath drvPath;
std::map<std::string, StorePath> outputs;
nlohmann::json toJSON(ref<Store> store) const;
static BuiltPathBuilt parse(const Store & store, std::string_view);
};
using _BuiltPathRaw = std::variant<
DerivedPath::Opaque,
BuiltPathBuilt
>;
/**
* A built path. Similar to a `DerivedPath`, but enriched with the corresponding
* output path(s).
*/
struct BuiltPath : _BuiltPathRaw {
using Raw = _BuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
using Built = BuiltPathBuilt;
inline const Raw & raw() const {
return static_cast<const Raw &>(*this);
}
StorePathSet outPaths() const;
RealisedPath::Set toRealisedPaths(Store & store) const;
};
typedef std::vector<DerivedPath> DerivedPaths;
typedef std::vector<BuiltPath> BuiltPaths;
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store);
}

View file

@ -43,11 +43,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
RepairFlag repair, CheckSigsFlag checkSigs) override
{ unsupported("addToStore"); }
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override
{ unsupported("addToStore"); }
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); }
@ -55,13 +50,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
void narFromPath(const StorePath & path, Sink & sink) override
{ unsupported("narFromPath"); }
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override
{ unsupported("buildDerivation"); }
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
{ unsupported("queryRealisation"); }
};

View file

@ -7,7 +7,7 @@
#include "finally.hh"
#include "callback.hh"
#ifdef ENABLE_S3
#if ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
#endif
@ -148,7 +148,7 @@ struct curlFileTransfer : public FileTransfer
}
LambdaSink finalSink;
std::shared_ptr<CompressionSink> decompressionSink;
std::shared_ptr<FinishSink> decompressionSink;
std::optional<StringSink> errorSink;
std::exception_ptr writeException;
@ -375,6 +375,13 @@ struct curlFileTransfer : public FileTransfer
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
{
result.cached = httpStatus == 304;
// In 2021, GitHub responds to If-None-Match with 304,
// but omits ETag. We just use the If-None-Match etag
// since 304 implies they are the same.
if (httpStatus == 304 && result.etag == "")
result.etag = request.expectedETag;
act.progress(result.bodySize, result.bodySize);
done = true;
callback(std::move(result));
@ -632,11 +639,7 @@ struct curlFileTransfer : public FileTransfer
workerThreadMain();
} catch (nix::Interrupted & e) {
} catch (std::exception & e) {
logError({
.name = "File transfer",
.hint = hintfmt("unexpected error in download thread: %s",
e.what())
});
printError("unexpected error in download thread: %s", e.what());
}
{
@ -662,7 +665,7 @@ struct curlFileTransfer : public FileTransfer
writeFull(wakeupPipe.writeSide.get(), " ");
}
#ifdef ENABLE_S3
#if ENABLE_S3
std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
{
auto [path, params] = splitUriAndParams(uri);
@ -685,7 +688,7 @@ struct curlFileTransfer : public FileTransfer
if (hasPrefix(request.uri, "s3://")) {
// FIXME: do this on a worker thread
try {
#ifdef ENABLE_S3
#if ENABLE_S3
auto [bucketName, key, params] = parseS3Uri(request.uri);
std::string profile = get(params, "profile").value_or("");
@ -713,15 +716,24 @@ struct curlFileTransfer : public FileTransfer
}
};
ref<curlFileTransfer> makeCurlFileTransfer()
{
return make_ref<curlFileTransfer>();
}
ref<FileTransfer> getFileTransfer()
{
static ref<FileTransfer> fileTransfer = makeFileTransfer();
static ref<curlFileTransfer> fileTransfer = makeCurlFileTransfer();
if (fileTransfer->state_.lock()->quit)
fileTransfer = makeCurlFileTransfer();
return fileTransfer;
}
ref<FileTransfer> makeFileTransfer()
{
return make_ref<curlFileTransfer>();
return makeCurlFileTransfer();
}
std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
@ -852,11 +864,10 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
// to print different messages for different verbosity levels. For now
// we add some heuristics for detecting when we want to show the response.
if (response && (response->size() < 1024 || response->find("<html>") != string::npos)) {
err.hint = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), *response);
} else {
err.hint = hf;
}
if (response && (response->size() < 1024 || response->find("<html>") != string::npos))
err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response));
else
err.msg = hf;
}
bool isUri(const string & s)

View file

@ -63,7 +63,7 @@ struct FileTransferRequest
std::string mimeType;
std::function<void(std::string_view data)> dataCallback;
FileTransferRequest(const std::string & uri)
FileTransferRequest(std::string_view uri)
: uri(uri), parentAct(getCurActivity()) { }
std::string verb()

View file

@ -10,48 +10,22 @@
#include <regex>
#include <random>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/statvfs.h>
#include <climits>
#include <errno.h>
#include <fcntl.h>
#include <poll.h>
#include <sys/socket.h>
#include <sys/stat.h>
#include <sys/statvfs.h>
#include <sys/types.h>
#include <sys/un.h>
#include <unistd.h>
#include <climits>
namespace nix {
static string gcLockName = "gc.lock";
static string gcRootsDir = "gcroots";
/* Acquire the global GC lock. This is used to prevent new Nix
processes from starting after the temporary root files have been
read. To be precise: when they try to create a new temporary root
file, they will block until the garbage collector has finished /
yielded the GC lock. */
AutoCloseFD LocalStore::openGCLock(LockType lockType)
{
Path fnGCLock = (format("%1%/%2%")
% stateDir % gcLockName).str();
debug(format("acquiring global GC lock '%1%'") % fnGCLock);
AutoCloseFD fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
if (!fdGCLock)
throw SysError("opening global GC lock '%1%'", fnGCLock);
if (!lockFile(fdGCLock.get(), lockType, false)) {
printInfo("waiting for the big garbage collector lock...");
lockFile(fdGCLock.get(), lockType, true);
}
/* !!! Restrict read permission on the GC root. Otherwise any
process that can open the file for reading can DoS the
collector. */
return fdGCLock;
}
static std::string gcSocketPath = "/gc-socket/socket";
static std::string gcRootsDir = "gcroots";
static void makeSymlink(const Path & link, const Path & target)
@ -71,12 +45,6 @@ static void makeSymlink(const Path & link, const Path & target)
}
void LocalStore::syncWithGC()
{
AutoCloseFD fdGCLock = openGCLock(ltRead);
}
void LocalStore::addIndirectRoot(const Path & path)
{
string hash = hashString(htSHA1, path).to_string(Base32, false);
@ -95,6 +63,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
"creating a garbage collector root (%1%) in the Nix store is forbidden "
"(are you running nix-build inside the store?)", gcRoot);
/* Register this root with the garbage collector, if it's
running. This should be superfluous since the caller should
have registered this root yet, but let's be on the safe
side. */
addTempRoot(storePath);
/* Don't clobber the link if it already exists and doesn't
point to the Nix store. */
if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
@ -102,11 +76,6 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
makeSymlink(gcRoot, printStorePath(storePath));
addIndirectRoot(gcRoot);
/* Grab the global GC root, causing us to block while a GC is in
progress. This prevents the set of permanent roots from
increasing while a GC is in progress. */
syncWithGC();
return gcRoot;
}
@ -119,8 +88,6 @@ void LocalStore::addTempRoot(const StorePath & path)
if (!state->fdTempRoots) {
while (1) {
AutoCloseFD fdGCLock = openGCLock(ltRead);
if (pathExists(fnTempRoots))
/* It *must* be stale, since there can be no two
processes with the same pid. */
@ -128,10 +95,8 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdTempRoots = openLockFile(fnTempRoots, true);
fdGCLock = -1;
debug(format("acquiring read lock on '%1%'") % fnTempRoots);
lockFile(state->fdTempRoots.get(), ltRead, true);
debug("acquiring write lock on '%s'", fnTempRoots);
lockFile(state->fdTempRoots.get(), ltWrite, true);
/* Check whether the garbage collector didn't get in our
way. */
@ -147,24 +112,55 @@ void LocalStore::addTempRoot(const StorePath & path)
}
/* Upgrade the lock to a write lock. This will cause us to block
if the garbage collector is holding our lock. */
debug(format("acquiring write lock on '%1%'") % fnTempRoots);
lockFile(state->fdTempRoots.get(), ltWrite, true);
if (!state->fdGCLock)
state->fdGCLock = openGCLock();
restart:
FdLock gcLock(state->fdGCLock.get(), ltRead, false, "");
if (!gcLock.acquired) {
/* We couldn't get a shared global GC lock, so the garbage
collector is running. So we have to connect to the garbage
collector and inform it about our root. */
if (!state->fdRootsSocket) {
auto socketPath = stateDir.get() + gcSocketPath;
debug("connecting to '%s'", socketPath);
state->fdRootsSocket = createUnixDomainSocket();
nix::connect(state->fdRootsSocket.get(), socketPath);
}
try {
debug("sending GC root '%s'", printStorePath(path));
writeFull(state->fdRootsSocket.get(), printStorePath(path) + "\n", false);
char c;
readFull(state->fdRootsSocket.get(), &c, 1);
assert(c == '1');
debug("got ack for GC root '%s'", printStorePath(path));
} catch (SysError & e) {
/* The garbage collector may have exited, so we need to
restart. */
if (e.errNo == EPIPE) {
debug("GC socket disconnected");
state->fdRootsSocket.close();
goto restart;
}
} catch (EndOfFile & e) {
debug("GC socket disconnected");
state->fdRootsSocket.close();
goto restart;
}
}
/* Append the store path to the temporary roots file. */
string s = printStorePath(path) + '\0';
writeFull(state->fdTempRoots.get(), s);
/* Downgrade to a read lock. */
debug(format("downgrading to read lock on '%1%'") % fnTempRoots);
lockFile(state->fdTempRoots.get(), ltRead, true);
}
static std::string censored = "{censored}";
void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
void LocalStore::findTempRoots(Roots & tempRoots, bool censor)
{
/* Read the `temproots' directory for per-process temporary root
files. */
@ -179,35 +175,25 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
pid_t pid = std::stoi(i.name);
debug(format("reading temporary root file '%1%'") % path);
FDPtr fd(new AutoCloseFD(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666)));
if (!*fd) {
AutoCloseFD fd(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666));
if (!fd) {
/* It's okay if the file has disappeared. */
if (errno == ENOENT) continue;
throw SysError("opening temporary roots file '%1%'", path);
}
/* This should work, but doesn't, for some reason. */
//FDPtr fd(new AutoCloseFD(openLockFile(path, false)));
//if (*fd == -1) continue;
/* Try to acquire a write lock without blocking. This can
only succeed if the owning process has died. In that case
we don't care about its temporary roots. */
if (lockFile(fd->get(), ltWrite, false)) {
if (lockFile(fd.get(), ltWrite, false)) {
printInfo("removing stale temporary roots file '%1%'", path);
unlink(path.c_str());
writeFull(fd->get(), "d");
writeFull(fd.get(), "d");
continue;
}
/* Acquire a read lock. This will prevent the owning process
from upgrading to a write lock, therefore it will block in
addTempRoot(). */
debug(format("waiting for read lock on '%1%'") % path);
lockFile(fd->get(), ltRead, true);
/* Read the entire file. */
string contents = readFile(fd->get());
string contents = readFile(fd.get());
/* Extract the roots. */
string::size_type pos = 0, end;
@ -218,8 +204,6 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
tempRoots[parseStorePath(root)].emplace(censor ? censored : fmt("{temp:%d}", pid));
pos = end + 1;
}
fds.push_back(fd); /* keep open */
}
}
@ -304,8 +288,7 @@ Roots LocalStore::findRoots(bool censor)
Roots roots;
findRootsNoTemp(roots, censor);
FDs fds;
findTempRoots(fds, roots, censor);
findTempRoots(roots, censor);
return roots;
}
@ -455,265 +438,139 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
struct GCLimitReached { };
struct LocalStore::GCState
{
const GCOptions & options;
GCResults & results;
StorePathSet roots;
StorePathSet tempRoots;
StorePathSet dead;
StorePathSet alive;
bool gcKeepOutputs;
bool gcKeepDerivations;
uint64_t bytesInvalidated;
bool moveToTrash = true;
bool shouldDelete;
GCState(const GCOptions & options, GCResults & results)
: options(options), results(results), bytesInvalidated(0) { }
};
bool LocalStore::isActiveTempFile(const GCState & state,
const Path & path, const string & suffix)
{
return hasSuffix(path, suffix)
&& state.tempRoots.count(parseStorePath(string(path, 0, path.size() - suffix.size())));
}
void LocalStore::deleteGarbage(GCState & state, const Path & path)
{
uint64_t bytesFreed;
deletePath(path, bytesFreed);
state.results.bytesFreed += bytesFreed;
}
void LocalStore::deletePathRecursive(GCState & state, const Path & path)
{
checkInterrupt();
uint64_t size = 0;
auto storePath = maybeParseStorePath(path);
if (storePath && isValidPath(*storePath)) {
StorePathSet referrers;
queryReferrers(*storePath, referrers);
for (auto & i : referrers)
if (printStorePath(i) != path) deletePathRecursive(state, printStorePath(i));
size = queryPathInfo(*storePath)->narSize;
invalidatePathChecked(*storePath);
}
Path realPath = realStoreDir + "/" + std::string(baseNameOf(path));
struct stat st;
if (lstat(realPath.c_str(), &st)) {
if (errno == ENOENT) return;
throw SysError("getting status of %1%", realPath);
}
printInfo(format("deleting '%1%'") % path);
state.results.paths.insert(path);
/* If the path is not a regular file or symlink, move it to the
trash directory. The move is to ensure that later (when we're
not holding the global GC lock) we can delete the path without
being afraid that the path has become alive again. Otherwise
delete it right away. */
if (state.moveToTrash && S_ISDIR(st.st_mode)) {
// Estimate the amount freed using the narSize field. FIXME:
// if the path was not valid, need to determine the actual
// size.
try {
if (chmod(realPath.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError("making '%1%' writable", realPath);
Path tmp = trashDir + "/" + std::string(baseNameOf(path));
if (rename(realPath.c_str(), tmp.c_str()))
throw SysError("unable to rename '%1%' to '%2%'", realPath, tmp);
state.bytesInvalidated += size;
} catch (SysError & e) {
if (e.errNo == ENOSPC) {
printInfo(format("note: can't create move '%1%': %2%") % realPath % e.msg());
deleteGarbage(state, realPath);
}
}
} else
deleteGarbage(state, realPath);
if (state.results.bytesFreed + state.bytesInvalidated > state.options.maxFreed) {
printInfo(format("deleted or invalidated more than %1% bytes; stopping") % state.options.maxFreed);
throw GCLimitReached();
}
}
bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path)
{
if (visited.count(path)) return false;
if (state.alive.count(path)) return true;
if (state.dead.count(path)) return false;
if (state.roots.count(path)) {
debug("cannot delete '%1%' because it's a root", printStorePath(path));
state.alive.insert(path);
return true;
}
visited.insert(path);
if (!isValidPath(path)) return false;
StorePathSet incoming;
/* Don't delete this path if any of its referrers are alive. */
queryReferrers(path, incoming);
/* If keep-derivations is set and this is a derivation, then
don't delete the derivation if any of the outputs are alive. */
if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath &&
isValidPath(*maybeOutPath) &&
queryPathInfo(*maybeOutPath)->deriver == path
)
incoming.insert(*maybeOutPath);
}
/* If keep-outputs is set, then don't delete this path if there
are derivers of this path that are not garbage. */
if (state.gcKeepOutputs) {
auto derivers = queryValidDerivers(path);
for (auto & i : derivers)
incoming.insert(i);
}
for (auto & i : incoming)
if (i != path)
if (canReachRoot(state, visited, i)) {
state.alive.insert(path);
return true;
}
return false;
}
void LocalStore::tryToDelete(GCState & state, const Path & path)
{
checkInterrupt();
auto realPath = realStoreDir + "/" + std::string(baseNameOf(path));
if (realPath == linksDir || realPath == trashDir) return;
//Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path);
auto storePath = maybeParseStorePath(path);
if (!storePath || !isValidPath(*storePath)) {
/* A lock file belonging to a path that we're building right
now isn't garbage. */
if (isActiveTempFile(state, path, ".lock")) return;
/* Don't delete .chroot directories for derivations that are
currently being built. */
if (isActiveTempFile(state, path, ".chroot")) return;
/* Don't delete .check directories for derivations that are
currently being built, because we may need to run
diff-hook. */
if (isActiveTempFile(state, path, ".check")) return;
}
StorePathSet visited;
if (storePath && canReachRoot(state, visited, *storePath)) {
debug("cannot delete '%s' because it's still reachable", path);
} else {
/* No path we visited was a root, so everything is garbage.
But we only delete path and its referrers here so that
nix-store --delete doesn't have the unexpected effect of
recursing into derivations and outputs. */
for (auto & i : visited)
state.dead.insert(i);
if (state.shouldDelete)
deletePathRecursive(state, path);
}
}
/* Unlink all files in /nix/store/.links that have a link count of 1,
which indicates that there are no other links and so they can be
safely deleted. FIXME: race condition with optimisePath(): we
might see a link count of 1 just before optimisePath() increases
the link count. */
void LocalStore::removeUnusedLinks(const GCState & state)
{
AutoCloseDir dir(opendir(linksDir.c_str()));
if (!dir) throw SysError("opening directory '%1%'", linksDir);
int64_t actualSize = 0, unsharedSize = 0;
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) {
checkInterrupt();
string name = dirent->d_name;
if (name == "." || name == "..") continue;
Path path = linksDir + "/" + name;
auto st = lstat(path);
if (st.st_nlink != 1) {
actualSize += st.st_size;
unsharedSize += (st.st_nlink - 1) * st.st_size;
continue;
}
printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
if (unlink(path.c_str()) == -1)
throw SysError("deleting '%1%'", path);
state.results.bytesFreed += st.st_size;
}
struct stat st;
if (stat(linksDir.c_str(), &st) == -1)
throw SysError("statting '%1%'", linksDir);
int64_t overhead = st.st_blocks * 512ULL;
printInfo("note: currently hard linking saves %.2f MiB",
((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
}
void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
{
GCState state(options, results);
state.gcKeepOutputs = settings.gcKeepOutputs;
state.gcKeepDerivations = settings.gcKeepDerivations;
bool shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific;
bool gcKeepOutputs = settings.gcKeepOutputs;
bool gcKeepDerivations = settings.gcKeepDerivations;
StorePathSet roots, dead, alive;
struct Shared
{
// The temp roots only store the hash part to make it easier to
// ignore suffixes like '.lock', '.chroot' and '.check'.
std::unordered_set<std::string> tempRoots;
// Hash part of the store path currently being deleted, if
// any.
std::optional<std::string> pending;
};
Sync<Shared> _shared;
std::condition_variable wakeup;
/* Using `--ignore-liveness' with `--delete' can have unintended
consequences if `keep-outputs' or `keep-derivations' are true
(the garbage collector will recurse into deleting the outputs
or derivers, respectively). So disable them. */
if (options.action == GCOptions::gcDeleteSpecific && options.ignoreLiveness) {
state.gcKeepOutputs = false;
state.gcKeepDerivations = false;
gcKeepOutputs = false;
gcKeepDerivations = false;
}
state.shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific;
if (state.shouldDelete)
if (shouldDelete)
deletePath(reservedPath);
/* Acquire the global GC root. This prevents
a) New roots from being added.
b) Processes from creating new temporary root files. */
AutoCloseFD fdGCLock = openGCLock(ltWrite);
/* Acquire the global GC root. Note: we don't use fdGCLock
here because then in auto-gc mode, another thread could
downgrade our exclusive lock. */
auto fdGCLock = openGCLock();
FdLock gcLock(fdGCLock.get(), ltWrite, true, "waiting for the big garbage collector lock...");
/* Start the server for receiving new roots. */
auto socketPath = stateDir.get() + gcSocketPath;
createDirs(dirOf(socketPath));
auto fdServer = createUnixDomainSocket(socketPath, 0666);
if (fcntl(fdServer.get(), F_SETFL, fcntl(fdServer.get(), F_GETFL) | O_NONBLOCK) == -1)
throw SysError("making socket '%1%' non-blocking", socketPath);
Pipe shutdownPipe;
shutdownPipe.create();
std::thread serverThread([&]() {
Sync<std::map<int, std::thread>> connections;
Finally cleanup([&]() {
debug("GC roots server shutting down");
while (true) {
auto item = remove_begin(*connections.lock());
if (!item) break;
auto & [fd, thread] = *item;
shutdown(fd, SHUT_RDWR);
thread.join();
}
});
while (true) {
std::vector<struct pollfd> fds;
fds.push_back({.fd = shutdownPipe.readSide.get(), .events = POLLIN});
fds.push_back({.fd = fdServer.get(), .events = POLLIN});
auto count = poll(fds.data(), fds.size(), -1);
assert(count != -1);
if (fds[0].revents)
/* Parent is asking us to quit. */
break;
if (fds[1].revents) {
/* Accept a new connection. */
assert(fds[1].revents & POLLIN);
AutoCloseFD fdClient = accept(fdServer.get(), nullptr, nullptr);
if (!fdClient) continue;
/* Process the connection in a separate thread. */
auto fdClient_ = fdClient.get();
std::thread clientThread([&, fdClient = std::move(fdClient)]() {
Finally cleanup([&]() {
auto conn(connections.lock());
auto i = conn->find(fdClient.get());
if (i != conn->end()) {
i->second.detach();
conn->erase(i);
}
});
while (true) {
try {
auto path = readLine(fdClient.get());
auto storePath = maybeParseStorePath(path);
if (storePath) {
debug("got new GC root '%s'", path);
auto hashPart = std::string(storePath->hashPart());
auto shared(_shared.lock());
shared->tempRoots.insert(hashPart);
/* If this path is currently being
deleted, then we have to wait until
deletion is finished to ensure that
the client doesn't start
re-creating it before we're
done. FIXME: ideally we would use a
FD for this so we don't block the
poll loop. */
while (shared->pending == hashPart) {
debug("synchronising with deletion of path '%s'", path);
shared.wait(wakeup);
}
} else
printError("received garbage instead of a root from client");
writeFull(fdClient.get(), "1", false);
} catch (Error &) { break; }
}
});
connections.lock()->insert({fdClient_, std::move(clientThread)});
}
}
});
Finally stopServer([&]() {
writeFull(shutdownPipe.writeSide.get(), "x", false);
wakeup.notify_all();
if (serverThread.joinable()) serverThread.join();
});
/* Find the roots. Since we've grabbed the GC lock, the set of
permanent roots cannot increase now. */
@ -722,124 +579,256 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
if (!options.ignoreLiveness)
findRootsNoTemp(rootMap, true);
for (auto & i : rootMap) state.roots.insert(i.first);
for (auto & i : rootMap) roots.insert(i.first);
/* Read the temporary roots. This acquires read locks on all
per-process temporary root files. So after this point no paths
can be added to the set of temporary roots. */
FDs fds;
/* Read the temporary roots created before we acquired the global
GC root. Any new roots will be sent to our socket. */
Roots tempRoots;
findTempRoots(fds, tempRoots, true);
findTempRoots(tempRoots, true);
for (auto & root : tempRoots) {
state.tempRoots.insert(root.first);
state.roots.insert(root.first);
_shared.lock()->tempRoots.insert(std::string(root.first.hashPart()));
roots.insert(root.first);
}
/* After this point the set of roots or temporary roots cannot
increase, since we hold locks on everything. So everything
that is not reachable from `roots' is garbage. */
/* Helper function that deletes a path from the store and throws
GCLimitReached if we've deleted enough garbage. */
auto deleteFromStore = [&](std::string_view baseName)
{
Path path = storeDir + "/" + std::string(baseName);
Path realPath = realStoreDir + "/" + std::string(baseName);
if (state.shouldDelete) {
if (pathExists(trashDir)) deleteGarbage(state, trashDir);
try {
createDirs(trashDir);
} catch (SysError & e) {
if (e.errNo == ENOSPC) {
printInfo("note: can't create trash directory: %s", e.msg());
state.moveToTrash = false;
printInfo("deleting '%1%'", path);
results.paths.insert(path);
uint64_t bytesFreed;
deletePath(realPath, bytesFreed);
results.bytesFreed += bytesFreed;
if (results.bytesFreed > options.maxFreed) {
printInfo("deleted more than %d bytes; stopping", options.maxFreed);
throw GCLimitReached();
}
};
std::map<StorePath, StorePathSet> referrersCache;
/* Helper function that visits all paths reachable from `start`
via the referrers edges and optionally derivers and derivation
output edges. If none of those paths are roots, then all
visited paths are garbage and are deleted. */
auto deleteReferrersClosure = [&](const StorePath & start) {
StorePathSet visited;
std::queue<StorePath> todo;
/* Wake up any GC client waiting for deletion of the paths in
'visited' to finish. */
Finally releasePending([&]() {
auto shared(_shared.lock());
shared->pending.reset();
wakeup.notify_all();
});
auto enqueue = [&](const StorePath & path) {
if (visited.insert(path).second)
todo.push(path);
};
enqueue(start);
while (auto path = pop(todo)) {
checkInterrupt();
/* Bail out if we've previously discovered that this path
is alive. */
if (alive.count(*path)) {
alive.insert(start);
return;
}
/* If we've previously deleted this path, we don't have to
handle it again. */
if (dead.count(*path)) continue;
auto markAlive = [&]()
{
alive.insert(*path);
alive.insert(start);
try {
StorePathSet closure;
computeFSClosure(*path, closure);
for (auto & p : closure)
alive.insert(p);
} catch (InvalidPath &) { }
};
/* If this is a root, bail out. */
if (roots.count(*path)) {
debug("cannot delete '%s' because it's a root", printStorePath(*path));
return markAlive();
}
if (options.action == GCOptions::gcDeleteSpecific
&& !options.pathsToDelete.count(*path))
return;
{
auto hashPart = std::string(path->hashPart());
auto shared(_shared.lock());
if (shared->tempRoots.count(hashPart)) {
debug("cannot delete '%s' because it's a temporary root", printStorePath(*path));
return markAlive();
}
shared->pending = hashPart;
}
if (isValidPath(*path)) {
/* Visit the referrers of this path. */
auto i = referrersCache.find(*path);
if (i == referrersCache.end()) {
StorePathSet referrers;
queryReferrers(*path, referrers);
referrersCache.emplace(*path, std::move(referrers));
i = referrersCache.find(*path);
}
for (auto & p : i->second)
enqueue(p);
/* If keep-derivations is set and this is a
derivation, then visit the derivation outputs. */
if (gcKeepDerivations && path->isDerivation()) {
for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(*path))
if (maybeOutPath &&
isValidPath(*maybeOutPath) &&
queryPathInfo(*maybeOutPath)->deriver == *path)
enqueue(*maybeOutPath);
}
/* If keep-outputs is set, then visit the derivers. */
if (gcKeepOutputs) {
auto derivers = queryValidDerivers(*path);
for (auto & i : derivers)
enqueue(i);
}
}
}
}
/* Now either delete all garbage paths, or just the specified
for (auto & path : topoSortPaths(visited)) {
if (!dead.insert(path).second) continue;
if (shouldDelete) {
invalidatePathChecked(path);
deleteFromStore(path.to_string());
referrersCache.erase(path);
}
}
};
/* Synchronisation point for testing, see tests/gc-concurrent.sh. */
if (auto p = getEnv("_NIX_TEST_GC_SYNC"))
readFile(*p);
/* Either delete all garbage paths, or just the specified
paths (for gcDeleteSpecific). */
if (options.action == GCOptions::gcDeleteSpecific) {
for (auto & i : options.pathsToDelete) {
tryToDelete(state, printStorePath(i));
if (state.dead.find(i) == state.dead.end())
deleteReferrersClosure(i);
if (!dead.count(i))
throw Error(
"cannot delete path '%1%' since it is still alive. "
"To find out why use: "
"Cannot delete path '%1%' since it is still alive. "
"To find out why, use: "
"nix-store --query --roots",
printStorePath(i));
}
} else if (options.maxFreed > 0) {
if (state.shouldDelete)
if (shouldDelete)
printInfo("deleting garbage...");
else
printInfo("determining live/dead paths...");
try {
AutoCloseDir dir(opendir(realStoreDir.c_str()));
AutoCloseDir dir(opendir(realStoreDir.get().c_str()));
if (!dir) throw SysError("opening directory '%1%'", realStoreDir);
/* Read the store and immediately delete all paths that
aren't valid. When using --max-freed etc., deleting
invalid paths is preferred over deleting unreachable
paths, since unreachable paths could become reachable
again. We don't use readDirectory() here so that GCing
can start faster. */
/* Read the store and delete all paths that are invalid or
unreachable. We don't use readDirectory() here so that
GCing can start faster. */
auto linksName = baseNameOf(linksDir);
Paths entries;
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) {
checkInterrupt();
string name = dirent->d_name;
if (name == "." || name == "..") continue;
Path path = storeDir + "/" + name;
auto storePath = maybeParseStorePath(path);
if (storePath && isValidPath(*storePath))
entries.push_back(path);
if (name == "." || name == ".." || name == linksName) continue;
if (auto storePath = maybeParseStorePath(storeDir + "/" + name))
deleteReferrersClosure(*storePath);
else
tryToDelete(state, path);
deleteFromStore(name);
}
dir.reset();
/* Now delete the unreachable valid paths. Randomise the
order in which we delete entries to make the collector
less biased towards deleting paths that come
alphabetically first (e.g. /nix/store/000...). This
matters when using --max-freed etc. */
vector<Path> entries_(entries.begin(), entries.end());
std::mt19937 gen(1);
std::shuffle(entries_.begin(), entries_.end(), gen);
for (auto & i : entries_)
tryToDelete(state, i);
} catch (GCLimitReached & e) {
}
}
if (state.options.action == GCOptions::gcReturnLive) {
for (auto & i : state.alive)
state.results.paths.insert(printStorePath(i));
if (options.action == GCOptions::gcReturnLive) {
for (auto & i : alive)
results.paths.insert(printStorePath(i));
return;
}
if (state.options.action == GCOptions::gcReturnDead) {
for (auto & i : state.dead)
state.results.paths.insert(printStorePath(i));
if (options.action == GCOptions::gcReturnDead) {
for (auto & i : dead)
results.paths.insert(printStorePath(i));
return;
}
/* Allow other processes to add to the store from here on. */
fdGCLock = -1;
fds.clear();
/* Delete the trash directory. */
printInfo(format("deleting '%1%'") % trashDir);
deleteGarbage(state, trashDir);
/* Clean up the links directory. */
/* Unlink all files in /nix/store/.links that have a link count of 1,
which indicates that there are no other links and so they can be
safely deleted. FIXME: race condition with optimisePath(): we
might see a link count of 1 just before optimisePath() increases
the link count. */
if (options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific) {
printInfo("deleting unused links...");
removeUnusedLinks(state);
AutoCloseDir dir(opendir(linksDir.c_str()));
if (!dir) throw SysError("opening directory '%1%'", linksDir);
int64_t actualSize = 0, unsharedSize = 0;
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) {
checkInterrupt();
string name = dirent->d_name;
if (name == "." || name == "..") continue;
Path path = linksDir + "/" + name;
auto st = lstat(path);
if (st.st_nlink != 1) {
actualSize += st.st_size;
unsharedSize += (st.st_nlink - 1) * st.st_size;
continue;
}
printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
if (unlink(path.c_str()) == -1)
throw SysError("deleting '%1%'", path);
results.bytesFreed += st.st_size;
}
struct stat st;
if (stat(linksDir.c_str(), &st) == -1)
throw SysError("statting '%1%'", linksDir);
int64_t overhead = st.st_blocks * 512ULL;
printInfo("note: currently hard linking saves %.2f MiB",
((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
}
/* While we're at it, vacuum the database. */
@ -856,7 +845,7 @@ void LocalStore::autoGC(bool sync)
return std::stoll(readFile(*fakeFreeSpaceFile));
struct statvfs st;
if (statvfs(realStoreDir.c_str(), &st))
if (statvfs(realStoreDir.get().c_str(), &st))
throw SysError("getting filesystem info about '%s'", realStoreDir);
return (uint64_t) st.f_bavail * st.f_frsize;

View file

@ -3,6 +3,7 @@
#include "archive.hh"
#include "args.hh"
#include "abstract-setting-to-json.hh"
#include "compute-levels.hh"
#include <algorithm>
#include <map>
@ -80,7 +81,7 @@ void loadConfFile()
/* We only want to send overrides to the daemon, i.e. stuff from
~/.nix/nix.conf or the command line. */
globalConfig.resetOverriden();
globalConfig.resetOverridden();
auto files = settings.nixUserConfFiles;
for (auto file = files.rbegin(); file != files.rend(); file++) {
@ -131,16 +132,44 @@ StringSet Settings::getDefaultSystemFeatures()
return features;
}
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
StringSet Settings::getDefaultExtraPlatforms()
{
auto & f = experimentalFeatures.get();
return std::find(f.begin(), f.end(), name) != f.end();
StringSet extraPlatforms;
if (std::string{SYSTEM} == "x86_64-linux" && !isWSL1())
extraPlatforms.insert("i686-linux");
#if __linux__
StringSet levels = computeLevels();
for (auto iter = levels.begin(); iter != levels.end(); ++iter)
extraPlatforms.insert(*iter + "-linux");
#elif __APPLE__
// Rosetta 2 emulation layer can run x86_64 binaries on aarch64
// machines. Note that we cant force processes from executing
// x86_64 in aarch64 environments or vice versa since they can
// always exec with their own binary preferences.
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") ||
pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
if (std::string{SYSTEM} == "x86_64-darwin")
extraPlatforms.insert("aarch64-darwin");
else if (std::string{SYSTEM} == "aarch64-darwin")
extraPlatforms.insert("x86_64-darwin");
}
#endif
return extraPlatforms;
}
void Settings::requireExperimentalFeature(const std::string & name)
bool Settings::isExperimentalFeatureEnabled(const ExperimentalFeature & feature)
{
if (!isExperimentalFeatureEnabled(name))
throw Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", name);
auto & f = experimentalFeatures.get();
return std::find(f.begin(), f.end(), feature) != f.end();
}
void Settings::requireExperimentalFeature(const ExperimentalFeature & feature)
{
if (!isExperimentalFeatureEnabled(feature))
throw MissingExperimentalFeature(feature);
}
bool Settings::isWSL1()
@ -206,13 +235,26 @@ template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::s
void MaxBuildJobsSetting::set(const std::string & str, bool append)
{
if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency());
else if (!string2Int(str, value))
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
else {
if (auto n = string2Int<decltype(value)>(str))
value = *n;
else
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
}
}
void PluginFilesSetting::set(const std::string & str, bool append)
{
if (pluginsLoaded)
throw UsageError("plugin-files set after plugins were loaded, you may need to move the flag before the subcommand");
BaseSetting<Paths>::set(str, append);
}
void initPlugins()
{
assert(!settings.pluginFiles.pluginsLoaded);
for (const auto & pluginFile : settings.pluginFiles.get()) {
Paths pluginFiles;
try {
@ -238,6 +280,9 @@ void initPlugins()
unknown settings. */
globalConfig.reapplyUnknownSettings();
globalConfig.warnUnknownSettings();
/* Tell the user if they try to set plugin-files after we've already loaded */
settings.pluginFiles.pluginsLoaded = true;
}
}

View file

@ -3,6 +3,7 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
#include "experimental-features.hh"
#include <map>
#include <limits>
@ -28,12 +29,31 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
void set(const std::string & str, bool append = false) override;
};
struct PluginFilesSetting : public BaseSetting<Paths>
{
bool pluginsLoaded = false;
PluginFilesSetting(Config * options,
const Paths & def,
const std::string & name,
const std::string & description,
const std::set<std::string> & aliases = {})
: BaseSetting<Paths>(def, name, description, aliases)
{
options->addSetting(this);
}
void set(const std::string & str, bool append = false) override;
};
class Settings : public Config {
unsigned int getDefaultCores();
StringSet getDefaultSystemFeatures();
StringSet getDefaultExtraPlatforms();
bool isWSL1();
public:
@ -178,7 +198,10 @@ public:
Setting<std::string> builders{
this, "@" + nixConfDir + "/machines", "builders",
"A semicolon-separated list of build machines, in the format of `nix.machines`."};
R"(
A semicolon-separated list of build machines.
For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md)
)"};
Setting<bool> buildersUseSubstitutes{
this, false, "builders-use-substitutes",
@ -545,7 +568,7 @@ public:
Setting<StringSet> extraPlatforms{
this,
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
getDefaultExtraPlatforms(),
"extra-platforms",
R"(
Platforms other than the native one which this machine is capable of
@ -586,8 +609,10 @@ public:
Strings{"https://cache.nixos.org/"},
"substituters",
R"(
A list of URLs of substituters, separated by whitespace. The default
is `https://cache.nixos.org`.
A list of URLs of substituters, separated by whitespace. Substituters
are tried based on their Priority value, which each substituter can set
independently. Lower value means higher priority.
The default is `https://cache.nixos.org`, with a Priority of 40.
)",
{"binary-caches"}};
@ -613,7 +638,7 @@ public:
is `root`.
> **Warning**
>
>
> Adding a user to `trusted-users` is essentially equivalent to
> giving that user root access to the system. For example, the user
> can set `sandbox-paths` and thereby obtain read access to
@ -670,7 +695,7 @@ public:
send a series of commands to modify various settings to stdout. The
currently recognized commands are:
- `extra-sandbox-paths`
- `extra-sandbox-paths`\
Pass a list of files and directories to be included in the
sandbox for this build. One entry per line, terminated by an
empty line. Entries have the same format as `sandbox-paths`.
@ -703,13 +728,13 @@ public:
The program executes with no arguments. The program's environment
contains the following environment variables:
- `DRV_PATH`
- `DRV_PATH`
The derivation for the built paths.
Example:
`/nix/store/5nihn1a7pa8b25l9zafqaqibznlvvp3f-bash-4.4-p23.drv`
- `OUT_PATHS`
- `OUT_PATHS`
Output paths of the built derivation, separated by a space
character.
@ -740,7 +765,7 @@ public:
documentation](https://ec.haxx.se/usingcurl-netrc.html).
> **Note**
>
>
> This must be an absolute path, and `~` is not resolved. For
> example, `~/.netrc` won't resolve to your home directory's
> `.netrc`.
@ -817,7 +842,7 @@ public:
Setting<uint64_t> minFreeCheckInterval{this, 5, "min-free-check-interval",
"Number of seconds between checking free disk space."};
Setting<Paths> pluginFiles{
PluginFilesSetting pluginFiles{
this, {}, "plugin-files",
R"(
A list of plugin files to be loaded by Nix. Each of these files will
@ -829,6 +854,9 @@ public:
command, and RegisterSetting to add new nix config settings. See the
constructors for those types for more details.
Warning! These APIs are inherently unstable and may change from
release to release.
Since these files are loaded into the same address space as Nix
itself, they must be DSOs compatible with the instance of Nix
running at the time (i.e. compiled against the same headers, not
@ -889,12 +917,12 @@ public:
value.
)"};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
bool isExperimentalFeatureEnabled(const std::string & name);
bool isExperimentalFeatureEnabled(const ExperimentalFeature &);
void requireExperimentalFeature(const std::string & name);
void requireExperimentalFeature(const ExperimentalFeature &);
Setting<bool> allowDirty{this, true, "allow-dirty",
"Whether to allow dirty Git/Mercurial trees."};
@ -920,6 +948,9 @@ public:
resolves to a different location from that of the build machine. You
can enable this setting if you are sure you're not going to do that.
)"};
Setting<bool> useRegistries{this, true, "use-registries",
"Whether to use flake registries to resolve flake references."};
};

View file

@ -57,8 +57,8 @@ public:
{
// FIXME: do this lazily?
if (auto cacheInfo = diskCache->cacheExists(cacheUri)) {
wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
priority.setDefault(fmt("%d", cacheInfo->priority));
wantMassQuery.setDefault(cacheInfo->wantMassQuery);
priority.setDefault(cacheInfo->priority);
} else {
try {
BinaryCacheStore::init();

View file

@ -3,6 +3,7 @@
#include "remote-store.hh"
#include "serve-protocol.hh"
#include "store-api.hh"
#include "path-with-outputs.hh"
#include "worker-protocol.hh"
#include "ssh.hh"
#include "derivations.hh"
@ -15,6 +16,7 @@ struct LegacySSHStoreConfig : virtual StoreConfig
using StoreConfig::StoreConfig;
const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"};
const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
@ -59,6 +61,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
, master(
host,
sshKey,
sshPublicHostKey,
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,
compress,
@ -79,9 +82,20 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
unsigned int magic = readInt(conn->from);
if (magic != SERVE_MAGIC_2)
throw Error("protocol mismatch with 'nix-store --serve' on '%s'", host);
StringSink saved;
try {
TeeSource tee(conn->from, saved);
unsigned int magic = readInt(tee);
if (magic != SERVE_MAGIC_2)
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
} catch (SerialisationError & e) {
/* In case the other side is waiting for our input,
close it. */
conn->sshConn->in.close();
auto msg = conn->from.drain();
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
host, chomp(*saved.s + msg));
}
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
@ -234,6 +248,10 @@ private:
conn.to
<< settings.buildRepeat
<< settings.enforceDeterminism;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
conn.to << ((int) settings.keepFailed);
}
}
public:
@ -258,18 +276,32 @@ public:
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) {
status.builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
}
return status;
}
void buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode) override
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
{
if (evalStore && evalStore.get() != this)
throw Error("building on an SSH store is incompatible with '--eval-store'");
auto conn(connections->get());
conn->to << cmdBuildPaths;
Strings ss;
for (auto & p : drvPaths)
ss.push_back(p.to_string(*this));
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
[&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(*this));
},
[&](const StorePath & drvPath) {
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
},
}, sOrDrvPath);
}
conn->to << ss;
putBuildSettings(*conn);

View file

@ -2,6 +2,8 @@
#include "globals.hh"
#include "nar-info-disk-cache.hh"
#include <atomic>
namespace nix {
struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
@ -50,7 +52,8 @@ protected:
const std::string & mimeType) override
{
auto path2 = binaryCacheDir + "/" + path;
Path tmp = path2 + ".tmp." + std::to_string(getpid());
static std::atomic<int> counter{0};
Path tmp = fmt("%s.tmp.%d.%d", path2, getpid(), ++counter);
AutoDelete del(tmp, false);
StreamToSourceAdapter source(istream);
writeFile(tmp, source);
@ -90,7 +93,7 @@ protected:
void LocalBinaryCacheStore::init()
{
createDirs(binaryCacheDir + "/nar");
createDirs(binaryCacheDir + realisationsPrefix);
createDirs(binaryCacheDir + "/" + realisationsPrefix);
if (writeDebugInfo)
createDirs(binaryCacheDir + "/debuginfo");
BinaryCacheStore::init();

View file

@ -18,6 +18,9 @@ struct LocalFSStoreConfig : virtual StoreConfig
const PathSetting logDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
"log", "directory where Nix will store state"};
const PathSetting realStoreDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"};
};
class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store
@ -34,7 +37,7 @@ public:
/* Register a permanent GC root. */
Path addPermRoot(const StorePath & storePath, const Path & gcRoot);
virtual Path getRealStoreDir() { return storeDir; }
virtual Path getRealStoreDir() { return realStoreDir; }
Path toRealPath(const Path & storePath) override
{

View file

@ -53,12 +53,15 @@ struct LocalStore::State::Stmts {
SQLiteStmt InvalidatePath;
SQLiteStmt AddDerivationOutput;
SQLiteStmt RegisterRealisedOutput;
SQLiteStmt UpdateRealisedOutput;
SQLiteStmt QueryValidDerivers;
SQLiteStmt QueryDerivationOutputs;
SQLiteStmt QueryRealisedOutput;
SQLiteStmt QueryAllRealisedOutputs;
SQLiteStmt QueryPathFromHashPart;
SQLiteStmt QueryValidPaths;
SQLiteStmt QueryRealisationReferences;
SQLiteStmt AddRealisationReference;
};
int getSchema(Path schemaPath)
@ -66,15 +69,17 @@ int getSchema(Path schemaPath)
int curSchema = 0;
if (pathExists(schemaPath)) {
string s = readFile(schemaPath);
if (!string2Int(s, curSchema))
auto n = string2Int<int>(s);
if (!n)
throw Error("'%1%' is corrupt", schemaPath);
curSchema = *n;
}
return curSchema;
}
void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
{
const int nixCASchemaVersion = 1;
const int nixCASchemaVersion = 2;
int curCASchema = getSchema(schemaPath);
if (curCASchema != nixCASchemaVersion) {
if (curCASchema > nixCASchemaVersion) {
@ -92,7 +97,39 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
#include "ca-specific-schema.sql.gen.hh"
;
db.exec(schema);
curCASchema = nixCASchemaVersion;
}
if (curCASchema < 2) {
SQLiteTxn txn(db);
// Ugly little sql dance to add a new `id` column and make it the primary key
db.exec(R"(
create table Realisations2 (
id integer primary key autoincrement not null,
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
signatures text, -- space-separated list
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
insert into Realisations2 (drvPath, outputName, outputPath, signatures)
select drvPath, outputName, outputPath, signatures from Realisations;
drop table Realisations;
alter table Realisations2 rename to Realisations;
)");
db.exec(R"(
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
create table if not exists RealisationsRefs (
referrer integer not null,
realisationReference integer,
foreign key (referrer) references Realisations(id) on delete cascade,
foreign key (realisationReference) references Realisations(id) on delete restrict
);
)");
txn.commit();
}
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
lockFile(lockFd.get(), ltRead, true);
}
@ -104,14 +141,10 @@ LocalStore::LocalStore(const Params & params)
, LocalStoreConfig(params)
, Store(params)
, LocalFSStore(params)
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"}
, realStoreDir(realStoreDir_)
, dbDir(stateDir + "/db")
, linksDir(realStoreDir + "/.links")
, reservedPath(dbDir + "/reserved")
, schemaPath(dbDir + "/schema")
, trashDir(realStoreDir + "/trash")
, tempRootsDir(stateDir + "/temproots")
, fnTempRoots(fmt("%s/%d", tempRootsDir, getpid()))
, locksHeld(tokenizeString<PathSet>(getEnv("NIX_HELD_LOCKS").value_or("")))
@ -148,21 +181,16 @@ LocalStore::LocalStore(const Params & params)
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
if (!gr)
logError({
.name = "'build-users-group' not found",
.hint = hintfmt(
"warning: the group '%1%' specified in 'build-users-group' does not exist",
settings.buildUsersGroup)
});
printError("warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup);
else {
struct stat st;
if (stat(realStoreDir.c_str(), &st))
if (stat(realStoreDir.get().c_str(), &st))
throw SysError("getting attributes of path '%1%'", realStoreDir);
if (st.st_uid != 0 || st.st_gid != gr->gr_gid || (st.st_mode & ~S_IFMT) != perm) {
if (chown(realStoreDir.c_str(), 0, gr->gr_gid) == -1)
if (chown(realStoreDir.get().c_str(), 0, gr->gr_gid) == -1)
throw SysError("changing ownership of path '%1%'", realStoreDir);
if (chmod(realStoreDir.c_str(), perm) == -1)
if (chmod(realStoreDir.get().c_str(), perm) == -1)
throw SysError("changing permissions on path '%1%'", realStoreDir);
}
}
@ -280,7 +308,7 @@ LocalStore::LocalStore(const Params & params)
else openDB(*state, false);
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
}
@ -310,16 +338,25 @@ LocalStore::LocalStore(const Params & params)
state->stmts->QueryPathFromHashPart.create(state->db,
"select path from ValidPaths where path >= ? limit 1;");
state->stmts->QueryValidPaths.create(state->db, "select path from ValidPaths");
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
state->stmts->RegisterRealisedOutput.create(state->db,
R"(
insert or replace into Realisations (drvPath, outputName, outputPath)
values (?, ?, (select id from ValidPaths where path = ?))
insert into Realisations (drvPath, outputName, outputPath, signatures)
values (?, ?, (select id from ValidPaths where path = ?), ?)
;
)");
state->stmts->UpdateRealisedOutput.create(state->db,
R"(
update Realisations
set signatures = ?
where
drvPath = ? and
outputName = ?
;
)");
state->stmts->QueryRealisedOutput.create(state->db,
R"(
select Output.path from Realisations
select Realisations.id, Output.path, Realisations.signatures from Realisations
inner join ValidPaths as Output on Output.id = Realisations.outputPath
where drvPath = ? and outputName = ?
;
@ -331,10 +368,33 @@ LocalStore::LocalStore(const Params & params)
where drvPath = ?
;
)");
state->stmts->QueryRealisationReferences.create(state->db,
R"(
select drvPath, outputName from Realisations
join RealisationsRefs on realisationReference = Realisations.id
where referrer = ?;
)");
state->stmts->AddRealisationReference.create(state->db,
R"(
insert or replace into RealisationsRefs (referrer, realisationReference)
values (
(select id from Realisations where drvPath = ? and outputName = ?),
(select id from Realisations where drvPath = ? and outputName = ?));
)");
}
}
AutoCloseFD LocalStore::openGCLock()
{
Path fnGCLock = stateDir + "/gc.lock";
auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
if (!fdGCLock)
throw SysError("opening global GC lock '%1%'", fnGCLock);
return fdGCLock;
}
LocalStore::~LocalStore()
{
std::shared_future<void> future;
@ -440,14 +500,14 @@ void LocalStore::makeStoreWritable()
if (getuid() != 0) return;
/* Check if /nix/store is on a read-only mount. */
struct statvfs stat;
if (statvfs(realStoreDir.c_str(), &stat) != 0)
if (statvfs(realStoreDir.get().c_str(), &stat) != 0)
throw SysError("getting info about the Nix store mount point");
if (stat.f_flag & ST_RDONLY) {
if (unshare(CLONE_NEWNS) == -1)
throw SysError("setting up a private mount namespace");
if (mount(0, realStoreDir.c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
if (mount(0, realStoreDir.get().c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
throw SysError("remounting %1% writable", realStoreDir);
}
#endif
@ -630,7 +690,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
std::optional<Hash> h;
for (auto & i : drv.outputs) {
std::visit(overloaded {
[&](DerivationOutputInputAddressed doia) {
[&](const DerivationOutputInputAddressed & doia) {
if (!h) {
// somewhat expensive so we do lazily
auto temp = hashDerivationModulo(*this, drv, true);
@ -642,29 +702,79 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
envHasRightPath(doia.path, i.first);
},
[&](DerivationOutputCAFixed dof) {
[&](const DerivationOutputCAFixed & dof) {
StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
envHasRightPath(path, i.first);
},
[&](DerivationOutputCAFloating _) {
[&](const DerivationOutputCAFloating &) {
/* Nothing to check */
},
[&](DerivationOutputDeferred) {
[&](const DerivationOutputDeferred &) {
},
}, i.second.output);
}
}
void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
{
settings.requireExperimentalFeature(Xp::CaDerivations);
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
registerDrvOutput(info);
else
throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string());
}
void LocalStore::registerDrvOutput(const Realisation & info)
{
auto state(_state.lock());
settings.requireExperimentalFeature(Xp::CaDerivations);
retrySQLite<void>([&]() {
state->stmts->RegisterRealisedOutput.use()
(info.id.strHash())
(info.id.outputName)
(printStorePath(info.outPath))
.exec();
auto state(_state.lock());
if (auto oldR = queryRealisation_(*state, info.id)) {
if (info.isCompatibleWith(*oldR)) {
auto combinedSignatures = oldR->signatures;
combinedSignatures.insert(info.signatures.begin(),
info.signatures.end());
state->stmts->UpdateRealisedOutput.use()
(concatStringsSep(" ", combinedSignatures))
(info.id.strHash())
(info.id.outputName)
.exec();
} else {
throw Error("Trying to register a realisation of '%s', but we already "
"have another one locally.\n"
"Local: %s\n"
"Remote: %s",
info.id.to_string(),
printStorePath(oldR->outPath),
printStorePath(info.outPath)
);
}
} else {
state->stmts->RegisterRealisedOutput.use()
(info.id.strHash())
(info.id.outputName)
(printStorePath(info.outPath))
(concatStringsSep(" ", info.signatures))
.exec();
}
for (auto & [outputId, depPath] : info.dependentRealisations) {
auto localRealisation = queryRealisationCore_(*state, outputId);
if (!localRealisation)
throw Error("unable to register the derivation '%s' as it "
"depends on the non existent '%s'",
info.id.to_string(), outputId.to_string());
if (localRealisation->second.outPath != depPath)
throw Error("unable to register the derivation '%s' as it "
"depends on a realisation of '%s' that doesnt"
"match what we have locally",
info.id.to_string(), outputId.to_string());
state->stmts->AddRealisationReference.use()
(info.id.strHash())
(info.id.outputName)
(outputId.strHash())
(outputId.outputName)
.exec();
}
});
}
@ -724,7 +834,7 @@ uint64_t LocalStore::addValidPath(State & state,
{
auto state_(Store::state.lock());
state_->pathInfoCache.upsert(std::string(info.path.hashPart()),
state_->pathInfoCache.upsert(std::string(info.path.to_string()),
PathInfoCacheValue{ .value = std::make_shared<const ValidPathInfo>(info) });
}
@ -886,7 +996,7 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
std::map<std::string, std::optional<StorePath>>
LocalStore::queryDerivationOutputMapNoResolve(const StorePath& path_)
LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
{
auto path = path_;
auto outputs = retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
@ -902,7 +1012,7 @@ LocalStore::queryDerivationOutputMapNoResolve(const StorePath& path_)
return outputs;
});
if (!settings.isExperimentalFeatureEnabled("ca-derivations"))
if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
return outputs;
auto drv = readInvalidDerivation(path);
@ -912,7 +1022,7 @@ LocalStore::queryDerivationOutputMapNoResolve(const StorePath& path_)
if (realisation)
outputs.insert_or_assign(outputName, realisation->outPath);
else
outputs.insert_or_assign(outputName, std::nullopt);
outputs.insert({outputName, std::nullopt});
}
return outputs;
@ -970,14 +1080,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
}
// FIXME: move this, it's not specific to LocalStore.
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
{
if (!settings.useSubstitutes) return;
for (auto & sub : getDefaultSubstituters()) {
for (auto & path : paths) {
if (infos.count(path.first))
// Choose first succeeding substituter.
continue;
auto subPath(path.first);
// recompute store path so that we can use a different store root
// Recompute store path so that we can use a different store root.
if (path.second) {
subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
if (sub->storeDir == storeDir)
@ -1092,11 +1207,10 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
{
auto state_(Store::state.lock());
state_->pathInfoCache.erase(std::string(path.hashPart()));
state_->pathInfoCache.erase(std::string(path.to_string()));
}
}
const PublicKeys & LocalStore::getPublicKeys()
{
auto state(_state.lock());
@ -1105,11 +1219,20 @@ const PublicKeys & LocalStore::getPublicKeys()
return *state->publicKeys;
}
bool LocalStore::pathInfoIsUntrusted(const ValidPathInfo & info)
{
return requireSigs && !info.checkSignatures(*this, getPublicKeys());
}
bool LocalStore::realisationIsUntrusted(const Realisation & realisation)
{
return requireSigs && !realisation.checkSignatures(getPublicKeys());
}
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
if (checkSigs && pathInfoIsUntrusted(info))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
addTempRoot(info.path);
@ -1130,24 +1253,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
// text hashing has long been allowed to have non-self-references because it is used for drv files.
bool refersToSelf = info.references.count(info.path) > 0;
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash
of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink;
if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
HashSink hashSink(htSHA256);
TeeSource wrapperSource { source, *hashSink };
TeeSource wrapperSource { source, hashSink };
restorePath(realPath, wrapperSource);
auto hashResult = hashSink->finish();
auto hashResult = hashSink.finish();
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
@ -1157,6 +1271,31 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), info.narSize, hashResult.second);
if (info.ca) {
if (auto foHash = std::get_if<FixedOutputHash>(&*info.ca)) {
auto actualFoHash = hashCAPath(
foHash->method,
foHash->hash.type,
info.path
);
if (foHash->hash != actualFoHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
foHash->hash.to_string(Base32, true),
actualFoHash.hash.to_string(Base32, true));
}
}
if (auto textHash = std::get_if<TextHash>(&*info.ca)) {
auto actualTextHash = hashString(htSHA256, readFile(realPath));
if (textHash->hash != actualTextHash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
textHash->hash.to_string(Base32, true),
actualTextHash.to_string(Base32, true));
}
}
}
autoGC();
canonicalisePathMetaData(realPath, -1);
@ -1375,7 +1514,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Acquire the global GC lock to get a consistent snapshot of
existing and valid paths. */
AutoCloseFD fdGCLock = openGCLock(ltWrite);
auto fdGCLock = openGCLock();
FdLock gcLock(fdGCLock.get(), ltRead, true, "waiting for the big garbage collector lock...");
StringSet store;
for (auto & i : readDirectory(realStoreDir)) store.insert(i.name);
@ -1386,8 +1526,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
StorePathSet validPaths;
PathSet done;
fdGCLock = -1;
for (auto & i : queryAllValidPaths())
verifyPath(printStorePath(i), store, done, validPaths, repair, errors);
@ -1401,12 +1539,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
Path linkPath = linksDir + "/" + link.name;
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
if (hash != link.name) {
logError({
.name = "Invalid hash",
.hint = hintfmt(
"link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash)
});
printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
if (repair) {
if (unlink(linkPath.c_str()) == 0)
printInfo("removed link '%s'", linkPath);
@ -1429,21 +1563,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
auto hashSink = HashSink(info->narHash.type);
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
dumpPath(Store::toRealPath(i), hashSink);
auto current = hashSink.finish();
if (info->narHash != nullHash && info->narHash != current.first) {
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
});
printError("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true));
if (repair) repairPath(i); else errors = true;
} else {
@ -1494,10 +1621,7 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
if (!done.insert(pathS).second) return;
if (!isStorePath(pathS)) {
logError({
.name = "Nix path not found",
.hint = hintfmt("path '%s' is not in the Nix store", pathS)
});
printError("path '%s' is not in the Nix store", pathS);
return;
}
@ -1520,10 +1644,7 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
auto state(_state.lock());
invalidatePath(*state, path);
} else {
logError({
.name = "Missing path with referrers",
.hint = hintfmt("path '%s' disappeared, but it still has valid referrers!", pathS)
});
printError("path '%s' disappeared, but it still has valid referrers!", pathS);
if (repair)
try {
repairPath(path);
@ -1624,6 +1745,18 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si
}
void LocalStore::signRealisation(Realisation & realisation)
{
// FIXME: keep secret keys in memory.
auto secretKeyFiles = settings.secretKeyFiles;
for (auto & secretKeyFile : secretKeyFiles.get()) {
SecretKey secretKey(readFile(secretKeyFile));
realisation.sign(secretKey);
}
}
void LocalStore::signPathInfo(ValidPathInfo & info)
{
// FIXME: keep secret keys in memory.
@ -1651,18 +1784,97 @@ void LocalStore::createUser(const std::string & userName, uid_t userId)
}
}
std::optional<const Realisation> LocalStore::queryRealisation(
const DrvOutput& id) {
typedef std::optional<const Realisation> Ret;
return retrySQLite<Ret>([&]() -> Ret {
std::optional<std::pair<int64_t, Realisation>> LocalStore::queryRealisationCore_(
LocalStore::State & state,
const DrvOutput & id)
{
auto useQueryRealisedOutput(
state.stmts->QueryRealisedOutput.use()
(id.strHash())
(id.outputName));
if (!useQueryRealisedOutput.next())
return std::nullopt;
auto realisationDbId = useQueryRealisedOutput.getInt(0);
auto outputPath = parseStorePath(useQueryRealisedOutput.getStr(1));
auto signatures =
tokenizeString<StringSet>(useQueryRealisedOutput.getStr(2));
return {{
realisationDbId,
Realisation{
.id = id,
.outPath = outputPath,
.signatures = signatures,
}
}};
}
std::optional<const Realisation> LocalStore::queryRealisation_(
LocalStore::State & state,
const DrvOutput & id)
{
auto maybeCore = queryRealisationCore_(state, id);
if (!maybeCore)
return std::nullopt;
auto [realisationDbId, res] = *maybeCore;
std::map<DrvOutput, StorePath> dependentRealisations;
auto useRealisationRefs(
state.stmts->QueryRealisationReferences.use()
(realisationDbId));
while (useRealisationRefs.next()) {
auto depId = DrvOutput {
Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)),
useRealisationRefs.getStr(1),
};
auto dependentRealisation = queryRealisationCore_(state, depId);
assert(dependentRealisation); // Enforced by the db schema
auto outputPath = dependentRealisation->second.outPath;
dependentRealisations.insert({depId, outputPath});
}
res.dependentRealisations = dependentRealisations;
return { res };
}
std::optional<const Realisation>
LocalStore::queryRealisation(const DrvOutput & id)
{
return retrySQLite<std::optional<const Realisation>>([&]() {
auto state(_state.lock());
auto use(state->stmts->QueryRealisedOutput.use()(id.strHash())(
id.outputName));
if (!use.next())
return std::nullopt;
auto outputPath = parseStorePath(use.getStr(0));
return Ret{
Realisation{.id = id, .outPath = outputPath}};
return queryRealisation_(*state, id);
});
}
FixedOutputHash LocalStore::hashCAPath(
const FileIngestionMethod & method, const HashType & hashType,
const StorePath & path)
{
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
}
FixedOutputHash LocalStore::hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const Path & path,
const std::string_view pathHash
)
{
HashModuloSink caSink ( hashType, std::string(pathHash) );
switch (method) {
case FileIngestionMethod::Recursive:
dumpPath(path, caSink);
break;
case FileIngestionMethod::Flat:
readFile(path, caSink);
break;
}
auto hash = caSink.finish().first;
return FixedOutputHash{
.method = method,
.hash = hash,
};
}
} // namespace nix

View file

@ -58,9 +58,15 @@ private:
struct Stmts;
std::unique_ptr<Stmts> stmts;
/* The global GC lock */
AutoCloseFD fdGCLock;
/* The file to which we write our temporary roots. */
AutoCloseFD fdTempRoots;
/* Connection to the garbage collector. */
AutoCloseFD fdRootsSocket;
/* The last time we checked whether to do an auto-GC, or an
auto-GC finished. */
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
@ -83,14 +89,10 @@ private:
public:
PathSetting realStoreDir_;
const Path realStoreDir;
const Path dbDir;
const Path linksDir;
const Path reservedPath;
const Path schemaPath;
const Path trashDir;
const Path tempRootsDir;
const Path fnTempRoots;
@ -127,7 +129,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override;
std::map<std::string, std::optional<StorePath>> queryDerivationOutputMapNoResolve(const StorePath & path) override;
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
@ -136,6 +138,9 @@ public:
void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) override;
bool pathInfoIsUntrusted(const ValidPathInfo &) override;
bool realisationIsUntrusted(const Realisation & ) override;
void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override;
@ -145,27 +150,15 @@ public:
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;
void buildPaths(
const std::vector<StorePathWithOutputs> & paths,
BuildMode buildMode) override;
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override;
void ensurePath(const StorePath & path) override;
void addTempRoot(const StorePath & path) override;
void addIndirectRoot(const Path & path) override;
void syncWithGC() override;
private:
typedef std::shared_ptr<AutoCloseFD> FDPtr;
typedef list<FDPtr> FDs;
void findTempRoots(Roots & roots, bool censor);
void findTempRoots(FDs & fds, Roots & roots, bool censor);
AutoCloseFD openGCLock();
public:
@ -198,9 +191,7 @@ public:
void vacuumDB();
/* Repair the contents of the given path by redownloading it using
a substituter (if available). */
void repairPath(const StorePath & path);
void repairPath(const StorePath & path) override;
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
@ -211,8 +202,11 @@ public:
/* Register the store path 'output' as the output named 'outputName' of
derivation 'deriver'. */
void registerDrvOutput(const Realisation & info) override;
void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
private:
@ -244,29 +238,12 @@ private:
PathSet queryValidPathsOld();
ValidPathInfo queryPathInfoOld(const Path & path);
struct GCState;
void deleteGarbage(GCState & state, const Path & path);
void tryToDelete(GCState & state, const Path & path);
bool canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path);
void deletePathRecursive(GCState & state, const Path & path);
bool isActiveTempFile(const GCState & state,
const Path & path, const string & suffix);
AutoCloseFD openGCLock(LockType lockType);
void findRoots(const Path & path, unsigned char type, Roots & roots);
void findRootsNoTemp(Roots & roots, bool censor);
void findRuntimeRoots(Roots & roots, bool censor);
void removeUnusedLinks(const GCState & state);
Path createTempDirInStore();
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
@ -281,16 +258,30 @@ private:
bool isValidPath_(State & state, const StorePath & path);
void queryReferrers(State & state, const StorePath & path, StorePathSet & referrers);
/* Add signatures to a ValidPathInfo using the secret keys
/* Add signatures to a ValidPathInfo or Realisation using the secret keys
specified by the secret-key-files option. */
void signPathInfo(ValidPathInfo & info);
Path getRealStoreDir() override { return realStoreDir; }
void signRealisation(Realisation &);
void createUser(const std::string & userName, uid_t userId) override;
friend struct DerivationGoal;
// XXX: Make a generic `Store` method
FixedOutputHash hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const StorePath & path);
FixedOutputHash hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const Path & path,
const std::string_view pathHash
);
friend struct LocalDerivationGoal;
friend struct PathSubstitutionGoal;
friend struct SubstitutionGoal;
friend struct DerivationGoal;
};

View file

@ -8,12 +8,12 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
libstore_LIBS = libutil
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifneq ($(OS), FreeBSD)
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifdef HOST_LINUX
libstore_LDFLAGS += -ldl
endif
ifeq ($(OS), Darwin)
ifdef HOST_DARWIN
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
endif
@ -23,12 +23,12 @@ ifeq ($(ENABLE_S3), 1)
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core
endif
ifeq ($(OS), SunOS)
ifdef HOST_SOLARIS
libstore_LDFLAGS += -lsocket
endif
ifeq ($(HAVE_SECCOMP), 1)
libstore_LDFLAGS += -lseccomp
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
endif
libstore_CXXFLAGS += \
@ -60,7 +60,7 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))

View file

@ -16,13 +16,18 @@ Machine::Machine(decltype(storeUri) storeUri,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey) :
storeUri(
// Backwards compatibility: if the URI is a hostname,
// prepend ssh://.
// Backwards compatibility: if the URI is schemeless, is not a path,
// and is not one of the special store connection words, prepend
// ssh://.
storeUri.find("://") != std::string::npos
|| hasPrefix(storeUri, "local")
|| hasPrefix(storeUri, "remote")
|| hasPrefix(storeUri, "auto")
|| hasPrefix(storeUri, "/")
|| storeUri.find("/") != std::string::npos
|| storeUri == "auto"
|| storeUri == "daemon"
|| storeUri == "local"
|| hasPrefix(storeUri, "auto?")
|| hasPrefix(storeUri, "daemon?")
|| hasPrefix(storeUri, "local?")
|| hasPrefix(storeUri, "?")
? storeUri
: "ssh://" + storeUri),
systemTypes(systemTypes),
@ -34,7 +39,8 @@ Machine::Machine(decltype(storeUri) storeUri,
sshPublicHostKey(sshPublicHostKey)
{}
bool Machine::allSupported(const std::set<string> & features) const {
bool Machine::allSupported(const std::set<string> & features) const
{
return std::all_of(features.begin(), features.end(),
[&](const string & feature) {
return supportedFeatures.count(feature) ||
@ -42,21 +48,29 @@ bool Machine::allSupported(const std::set<string> & features) const {
});
}
bool Machine::mandatoryMet(const std::set<string> & features) const {
bool Machine::mandatoryMet(const std::set<string> & features) const
{
return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(),
[&](const string & feature) {
return features.count(feature);
});
}
ref<Store> Machine::openStore() const {
ref<Store> Machine::openStore() const
{
Store::Params storeParams;
if (hasPrefix(storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
}
if (hasPrefix(storeUri, "ssh://") || hasPrefix(storeUri, "ssh-ng://")) {
if (sshKey != "")
storeParams["ssh-key"] = sshKey;
if (sshPublicHostKey != "")
storeParams["base64-ssh-public-host-key"] = sshPublicHostKey;
}
{
auto & fs = storeParams["system-features"];
auto append = [&](auto feats) {
@ -72,53 +86,86 @@ ref<Store> Machine::openStore() const {
return nix::openStore(storeUri, storeParams);
}
void parseMachines(const std::string & s, Machines & machines)
static std::vector<std::string> expandBuilderLines(const std::string & builders)
{
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
std::vector<std::string> result;
for (auto line : tokenizeString<std::vector<string>>(builders, "\n;")) {
trim(line);
line.erase(std::find(line.begin(), line.end(), '#'), line.end());
if (line.empty()) continue;
if (line[0] == '@') {
auto file = trim(std::string(line, 1));
const std::string path = trim(std::string(line, 1));
std::string text;
try {
parseMachines(readFile(file), machines);
text = readFile(path);
} catch (const SysError & e) {
if (e.errNo != ENOENT)
throw;
debug("cannot find machines file '%s'", file);
debug("cannot find machines file '%s'", path);
}
const auto lines = expandBuilderLines(text);
result.insert(end(result), begin(lines), end(lines));
continue;
}
auto tokens = tokenizeString<std::vector<string>>(line);
auto sz = tokens.size();
if (sz < 1)
throw FormatError("bad machine specification '%s'", line);
auto isSet = [&](size_t n) {
return tokens.size() > n && tokens[n] != "" && tokens[n] != "-";
};
machines.emplace_back(tokens[0],
isSet(1) ? tokenizeString<std::vector<string>>(tokens[1], ",") : std::vector<string>{settings.thisSystem},
isSet(2) ? tokens[2] : "",
isSet(3) ? std::stoull(tokens[3]) : 1LL,
isSet(4) ? std::stoull(tokens[4]) : 1LL,
isSet(5) ? tokenizeString<std::set<string>>(tokens[5], ",") : std::set<string>{},
isSet(6) ? tokenizeString<std::set<string>>(tokens[6], ",") : std::set<string>{},
isSet(7) ? tokens[7] : "");
result.emplace_back(line);
}
return result;
}
static Machine parseBuilderLine(const std::string & line)
{
const auto tokens = tokenizeString<std::vector<string>>(line);
auto isSet = [&](size_t fieldIndex) {
return tokens.size() > fieldIndex && tokens[fieldIndex] != "" && tokens[fieldIndex] != "-";
};
auto parseUnsignedIntField = [&](size_t fieldIndex) {
const auto result = string2Int<unsigned int>(tokens[fieldIndex]);
if (!result) {
throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'unsigned int'", fieldIndex, line);
}
return result.value();
};
auto ensureBase64 = [&](size_t fieldIndex) {
const auto & str = tokens[fieldIndex];
try {
base64Decode(str);
} catch (const Error & e) {
throw FormatError("bad machine specification: a column #%lu in a row: '%s' is not valid base64 string: %s", fieldIndex, line, e.what());
}
return str;
};
if (!isSet(0))
throw FormatError("bad machine specification: store URL was not found at the first column of a row: '%s'", line);
return {
tokens[0],
isSet(1) ? tokenizeString<std::vector<string>>(tokens[1], ",") : std::vector<string>{settings.thisSystem},
isSet(2) ? tokens[2] : "",
isSet(3) ? parseUnsignedIntField(3) : 1U,
isSet(4) ? parseUnsignedIntField(4) : 1U,
isSet(5) ? tokenizeString<std::set<string>>(tokens[5], ",") : std::set<string>{},
isSet(6) ? tokenizeString<std::set<string>>(tokens[6], ",") : std::set<string>{},
isSet(7) ? ensureBase64(7) : ""
};
}
static Machines parseBuilderLines(const std::vector<std::string>& builders) {
Machines result;
std::transform(builders.begin(), builders.end(), std::back_inserter(result), parseBuilderLine);
return result;
}
Machines getMachines()
{
static auto machines = [&]() {
Machines machines;
parseMachines(settings.builders, machines);
return machines;
}();
return machines;
const auto builderLines = expandBuilderLines(settings.builders);
return parseBuilderLines(builderLines);
}
}

View file

@ -6,100 +6,73 @@
#include "thread-pool.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "closure.hh"
namespace nix {
void Store::computeFSClosure(const StorePathSet & startPaths,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
struct State
{
size_t pending;
StorePathSet & paths;
std::exception_ptr exc;
};
std::function<std::set<StorePath>(const StorePath & path, std::future<ref<const ValidPathInfo>> &)> queryDeps;
if (flipDirection)
queryDeps = [&](const StorePath& path,
std::future<ref<const ValidPathInfo>> & fut) {
StorePathSet res;
StorePathSet referrers;
queryReferrers(path, referrers);
for (auto& ref : referrers)
if (ref != path)
res.insert(ref);
Sync<State> state_(State{0, paths_, 0});
if (includeOutputs)
for (auto& i : queryValidDerivers(path))
res.insert(i);
std::function<void(const Path &)> enqueue;
if (includeDerivers && path.isDerivation())
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath && isValidPath(*maybeOutPath))
res.insert(*maybeOutPath);
return res;
};
else
queryDeps = [&](const StorePath& path,
std::future<ref<const ValidPathInfo>> & fut) {
StorePathSet res;
auto info = fut.get();
for (auto& ref : info->references)
if (ref != path)
res.insert(ref);
std::condition_variable done;
if (includeOutputs && path.isDerivation())
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath && isValidPath(*maybeOutPath))
res.insert(*maybeOutPath);
enqueue = [&](const Path & path) -> void {
{
auto state(state_.lock());
if (state->exc) return;
if (!state->paths.insert(parseStorePath(path)).second) return;
state->pending++;
}
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
res.insert(*info->deriver);
return res;
};
queryPathInfo(parseStorePath(path), {[&, pathS(path)](std::future<ref<const ValidPathInfo>> fut) {
// FIXME: calls to isValidPath() should be async
try {
auto info = fut.get();
auto path = parseStorePath(pathS);
if (flipDirection) {
StorePathSet referrers;
queryReferrers(path, referrers);
for (auto & ref : referrers)
if (ref != path)
enqueue(printStorePath(ref));
if (includeOutputs)
for (auto & i : queryValidDerivers(path))
enqueue(printStorePath(i));
if (includeDerivers && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
enqueue(printStorePath(i));
} else {
for (auto & ref : info->references)
if (ref != path)
enqueue(printStorePath(ref));
if (includeOutputs && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i)) enqueue(printStorePath(i));
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
enqueue(printStorePath(*info->deriver));
}
{
auto state(state_.lock());
assert(state->pending);
if (!--state->pending) done.notify_one();
}
} catch (...) {
auto state(state_.lock());
if (!state->exc) state->exc = std::current_exception();
assert(state->pending);
if (!--state->pending) done.notify_one();
};
}});
};
for (auto & startPath : startPaths)
enqueue(printStorePath(startPath));
{
auto state(state_.lock());
while (state->pending) state.wait(done);
if (state->exc) std::rethrow_exception(state->exc);
}
computeClosure<StorePath>(
startPaths, paths_,
[&](const StorePath& path,
std::function<void(std::promise<std::set<StorePath>>&)>
processEdges) {
std::promise<std::set<StorePath>> promise;
std::function<void(std::future<ref<const ValidPathInfo>>)>
getDependencies =
[&](std::future<ref<const ValidPathInfo>> fut) {
try {
promise.set_value(queryDeps(path, fut));
} catch (...) {
promise.set_exception(std::current_exception());
}
};
queryPathInfo(path, getDependencies);
processEdges(promise);
});
}
void Store::computeFSClosure(const StorePath & startPath,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
@ -119,7 +92,7 @@ std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv)
return std::nullopt;
}
void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
void Store::queryMissing(const std::vector<DerivedPath> & targets,
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
uint64_t & downloadSize_, uint64_t & narSize_)
{
@ -147,7 +120,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
Sync<State> state_(State{{}, unknown_, willSubstitute_, willBuild_, downloadSize_, narSize_});
std::function<void(StorePathWithOutputs)> doPath;
std::function<void(DerivedPath)> doPath;
auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) {
{
@ -156,17 +129,14 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
}
for (auto & i : drv.inputDrvs)
pool.enqueue(std::bind(doPath, StorePathWithOutputs { i.first, i.second }));
pool.enqueue(std::bind(doPath, DerivedPath::Built { i.first, i.second }));
};
auto checkOutput = [&](
const Path & drvPathS, ref<Derivation> drv, const Path & outPathS, ref<Sync<DrvState>> drvState_)
const StorePath & drvPath, ref<Derivation> drv, const StorePath & outPath, ref<Sync<DrvState>> drvState_)
{
if (drvState_->lock()->done) return;
auto drvPath = parseStorePath(drvPathS);
auto outPath = parseStorePath(outPathS);
SubstitutablePathInfos infos;
querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos);
@ -182,77 +152,80 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
drvState->outPaths.insert(outPath);
if (!drvState->left) {
for (auto & path : drvState->outPaths)
pool.enqueue(std::bind(doPath, StorePathWithOutputs { path } ));
pool.enqueue(std::bind(doPath, DerivedPath::Opaque { path } ));
}
}
}
};
doPath = [&](const StorePathWithOutputs & path) {
doPath = [&](const DerivedPath & req) {
{
auto state(state_.lock());
if (!state->done.insert(path.to_string(*this)).second) return;
if (!state->done.insert(req.to_string(*this)).second) return;
}
if (path.path.isDerivation()) {
if (!isValidPath(path.path)) {
std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
if (!isValidPath(bfd.drvPath)) {
// FIXME: we could try to substitute the derivation.
auto state(state_.lock());
state->unknown.insert(path.path);
state->unknown.insert(bfd.drvPath);
return;
}
PathSet invalid;
StorePathSet invalid;
/* true for regular derivations, and CA derivations for which we
have a trust mapping for all wanted outputs. */
auto knownOutputPaths = true;
for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(path.path)) {
for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(bfd.drvPath)) {
if (!pathOpt) {
knownOutputPaths = false;
break;
}
if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt))
invalid.insert(printStorePath(*pathOpt));
if (wantOutput(outputName, bfd.outputs) && !isValidPath(*pathOpt))
invalid.insert(*pathOpt);
}
if (knownOutputPaths && invalid.empty()) return;
auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
auto drv = make_ref<Derivation>(derivationFromPath(bfd.drvPath));
ParsedDerivation parsedDrv(StorePath(bfd.drvPath), *drv);
if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size()));
for (auto & output : invalid)
pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState));
pool.enqueue(std::bind(checkOutput, bfd.drvPath, drv, output, drvState));
} else
mustBuildDrv(path.path, *drv);
mustBuildDrv(bfd.drvPath, *drv);
} else {
},
[&](const DerivedPath::Opaque & bo) {
if (isValidPath(path.path)) return;
if (isValidPath(bo.path)) return;
SubstitutablePathInfos infos;
querySubstitutablePathInfos({{path.path, std::nullopt}}, infos);
querySubstitutablePathInfos({{bo.path, std::nullopt}}, infos);
if (infos.empty()) {
auto state(state_.lock());
state->unknown.insert(path.path);
state->unknown.insert(bo.path);
return;
}
auto info = infos.find(path.path);
auto info = infos.find(bo.path);
assert(info != infos.end());
{
auto state(state_.lock());
state->willSubstitute.insert(path.path);
state->willSubstitute.insert(bo.path);
state->downloadSize += info->second.downloadSize;
state->narSize += info->second.narSize;
}
for (auto & ref : info->second.references)
pool.enqueue(std::bind(doPath, StorePathWithOutputs { ref }));
}
pool.enqueue(std::bind(doPath, DerivedPath::Opaque { ref }));
},
}, req.raw());
};
for (auto & path : targets)
@ -266,12 +239,11 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
{
return topoSort(paths,
{[&](const StorePath & path) {
StorePathSet references;
try {
references = queryPathInfo(path)->references;
return queryPathInfo(path)->references;
} catch (InvalidPath &) {
return StorePathSet();
}
return references;
}},
{[&](const StorePath & path, const StorePath & parent) {
return BuildError(
@ -281,5 +253,44 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
}});
}
std::map<DrvOutput, StorePath> drvOutputReferences(
const std::set<Realisation> & inputRealisations,
const StorePathSet & pathReferences)
{
std::map<DrvOutput, StorePath> res;
for (const auto & input : inputRealisations) {
if (pathReferences.count(input.outPath)) {
res.insert({input.id, input.outPath});
}
}
return res;
}
std::map<DrvOutput, StorePath> drvOutputReferences(
Store & store,
const Derivation & drv,
const StorePath & outputPath)
{
std::set<Realisation> inputRealisations;
for (const auto& [inputDrv, outputNames] : drv.inputDrvs) {
auto outputHashes =
staticOutputHashes(store, store.readDerivation(inputDrv));
for (const auto& outputName : outputNames) {
auto thisRealisation = store.queryRealisation(
DrvOutput{outputHashes.at(outputName), outputName});
if (!thisRealisation)
throw Error(
"output '%s' of derivation '%s' isnt built", outputName,
store.printStorePath(inputDrv));
inputRealisations.insert(*thisRealisation);
}
}
auto info = store.queryPathInfo(outputPath);
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
}
}

View file

@ -80,16 +80,16 @@ string nextComponent(string::const_iterator & p,
static bool componentsLT(const string & c1, const string & c2)
{
int n1, n2;
bool c1Num = string2Int(c1, n1), c2Num = string2Int(c2, n2);
auto n1 = string2Int<int>(c1);
auto n2 = string2Int<int>(c2);
if (c1Num && c2Num) return n1 < n2;
else if (c1 == "" && c2Num) return true;
if (n1 && n2) return *n1 < *n2;
else if (c1 == "" && n2) return true;
else if (c1 == "pre" && c2 != "pre") return true;
else if (c2 == "pre") return false;
/* Assume that `2.3a' < `2.3.1'. */
else if (c2Num) return true;
else if (c1Num) return false;
else if (n2) return true;
else if (n1) return false;
else return c1 < c2;
}

View file

@ -4,6 +4,7 @@
#include "globals.hh"
#include <sqlite3.h>
#include <nlohmann/json.hpp>
namespace nix {
@ -38,6 +39,15 @@ create table if not exists NARs (
foreign key (cache) references BinaryCaches(id) on delete cascade
);
create table if not exists Realisations (
cache integer not null,
outputId text not null,
content blob, -- Json serialisation of the realisation, or null if the realisation is absent
timestamp integer not null,
primary key (cache, outputId),
foreign key (cache) references BinaryCaches(id) on delete cascade
);
create table if not exists LastPurge (
dummy text primary key,
value integer
@ -63,7 +73,9 @@ public:
struct State
{
SQLite db;
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR, purgeCache;
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR,
queryNAR, insertRealisation, insertMissingRealisation,
queryRealisation, purgeCache;
std::map<std::string, Cache> caches;
};
@ -98,6 +110,26 @@ public:
state->queryNAR.create(state->db,
"select present, namePart, url, compression, fileHash, fileSize, narHash, narSize, refs, deriver, sigs, ca from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))");
state->insertRealisation.create(state->db,
R"(
insert or replace into Realisations(cache, outputId, content, timestamp)
values (?, ?, ?, ?)
)");
state->insertMissingRealisation.create(state->db,
R"(
insert or replace into Realisations(cache, outputId, timestamp)
values (?, ?, ?)
)");
state->queryRealisation.create(state->db,
R"(
select content from Realisations
where cache = ? and outputId = ? and
((content is null and timestamp > ?) or
(content is not null and timestamp > ?))
)");
/* Periodically purge expired entries from the database. */
retrySQLite<void>([&]() {
auto now = time(0);
@ -109,8 +141,10 @@ public:
SQLiteStmt(state->db,
"delete from NARs where ((present = 0 and timestamp < ?) or (present = 1 and timestamp < ?))")
.use()
(now - settings.ttlNegativeNarInfoCache)
(now - settings.ttlPositiveNarInfoCache)
// Use a minimum TTL to prevent --refresh from
// nuking the entire disk cache.
(now - std::max(settings.ttlNegativeNarInfoCache.get(), 3600U))
(now - std::max(settings.ttlPositiveNarInfoCache.get(), 30 * 24 * 3600U))
.exec();
debug("deleted %d entries from the NAR info disk cache", sqlite3_changes(state->db));
@ -210,6 +244,38 @@ public:
});
}
std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
const std::string & uri, const DrvOutput & id) override
{
return retrySQLite<std::pair<Outcome, std::shared_ptr<Realisation>>>(
[&]() -> std::pair<Outcome, std::shared_ptr<Realisation>> {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
auto now = time(0);
auto queryRealisation(state->queryRealisation.use()
(cache.id)
(id.to_string())
(now - settings.ttlNegativeNarInfoCache)
(now - settings.ttlPositiveNarInfoCache));
if (!queryRealisation.next())
return {oUnknown, 0};
if (queryRealisation.isNull(0))
return {oInvalid, 0};
auto realisation =
std::make_shared<Realisation>(Realisation::fromJSON(
nlohmann::json::parse(queryRealisation.getStr(0)),
"Local disk cache"));
return {oValid, realisation};
});
}
void upsertNarInfo(
const std::string & uri, const std::string & hashPart,
std::shared_ptr<const ValidPathInfo> info) override
@ -249,6 +315,39 @@ public:
}
});
}
void upsertRealisation(
const std::string & uri,
const Realisation & realisation) override
{
retrySQLite<void>([&]() {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
state->insertRealisation.use()
(cache.id)
(realisation.id.to_string())
(realisation.toJSON().dump())
(time(0)).exec();
});
}
virtual void upsertAbsentRealisation(
const std::string & uri,
const DrvOutput & id) override
{
retrySQLite<void>([&]() {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
state->insertMissingRealisation.use()
(cache.id)
(id.to_string())
(time(0)).exec();
});
}
};
ref<NarInfoDiskCache> getNarInfoDiskCache()

View file

@ -2,6 +2,7 @@
#include "ref.hh"
#include "nar-info.hh"
#include "realisation.hh"
namespace nix {
@ -29,6 +30,15 @@ public:
virtual void upsertNarInfo(
const std::string & uri, const std::string & hashPart,
std::shared_ptr<const ValidPathInfo> info) = 0;
virtual void upsertRealisation(
const std::string & uri,
const Realisation & realisation) = 0;
virtual void upsertAbsentRealisation(
const std::string & uri,
const DrvOutput & id) = 0;
virtual std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
const std::string & uri, const DrvOutput & id) = 0;
};
/* Return a singleton cache object that can be used concurrently by

Some files were not shown because too many files have changed in this diff Show more