mirror of
https://github.com/NixOS/nix.git
synced 2025-11-18 08:19:35 +01:00
Merge remote-tracking branch 'upstream/master' into path-info
This commit is contained in:
commit
f4f3203aa7
303 changed files with 32784 additions and 3308 deletions
|
|
@ -52,9 +52,9 @@ void BinaryCacheStore::init()
|
|||
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
|
||||
getUri(), value, storeDir);
|
||||
} else if (name == "WantMassQuery") {
|
||||
wantMassQuery.setDefault(value == "1" ? "true" : "false");
|
||||
wantMassQuery.setDefault(value == "1");
|
||||
} else if (name == "Priority") {
|
||||
priority.setDefault(fmt("%d", std::stoi(value)));
|
||||
priority.setDefault(std::stoi(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -130,17 +130,6 @@ AutoCloseFD openFile(const Path & path)
|
|||
return fd;
|
||||
}
|
||||
|
||||
struct FileSource : FdSource
|
||||
{
|
||||
AutoCloseFD fd2;
|
||||
|
||||
FileSource(const Path & path)
|
||||
: fd2(openFile(path))
|
||||
{
|
||||
fd = fd2.get();
|
||||
}
|
||||
};
|
||||
|
||||
ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
|
||||
std::function<ValidPathInfo(HashResult)> mkInfo)
|
||||
|
|
@ -179,6 +168,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
|||
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
|
||||
+ (compression == "xz" ? ".xz" :
|
||||
compression == "bzip2" ? ".bz2" :
|
||||
compression == "zstd" ? ".zst" :
|
||||
compression == "lzip" ? ".lzip" :
|
||||
compression == "lz4" ? ".lz4" :
|
||||
compression == "br" ? ".br" :
|
||||
"");
|
||||
|
||||
|
|
@ -471,18 +463,43 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
|||
|
||||
std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
|
||||
{
|
||||
if (diskCache) {
|
||||
auto [cacheOutcome, maybeCachedRealisation] =
|
||||
diskCache->lookupRealisation(getUri(), id);
|
||||
switch (cacheOutcome) {
|
||||
case NarInfoDiskCache::oValid:
|
||||
debug("Returning a cached realisation for %s", id.to_string());
|
||||
return *maybeCachedRealisation;
|
||||
case NarInfoDiskCache::oInvalid:
|
||||
debug("Returning a cached missing realisation for %s", id.to_string());
|
||||
return {};
|
||||
case NarInfoDiskCache::oUnknown:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
|
||||
auto rawOutputInfo = getFile(outputInfoFilePath);
|
||||
|
||||
if (rawOutputInfo) {
|
||||
return {Realisation::fromJSON(
|
||||
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath)};
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertRealisation(
|
||||
getUri(), realisation);
|
||||
|
||||
return {realisation};
|
||||
} else {
|
||||
if (diskCache)
|
||||
diskCache->upsertAbsentRealisation(getUri(), id);
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
|
||||
if (diskCache)
|
||||
diskCache->upsertRealisation(getUri(), info);
|
||||
auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi";
|
||||
upsertFile(filePath, info.toJSON().dump(), "application/json");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ private:
|
|||
protected:
|
||||
|
||||
// The prefix under which realisation infos will be stored
|
||||
const std::string realisationsPrefix = "/realisations";
|
||||
const std::string realisationsPrefix = "realisations";
|
||||
|
||||
BinaryCacheStore(const Params & params);
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@
|
|||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
#include <sys/wait.h>
|
||||
#include <netdb.h>
|
||||
#include <fcntl.h>
|
||||
#include <termios.h>
|
||||
|
|
@ -142,7 +143,6 @@ void DerivationGoal::work()
|
|||
(this->*state)();
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
||||
{
|
||||
/* If we already want all outputs, there is nothing to do. */
|
||||
|
|
@ -165,7 +165,7 @@ void DerivationGoal::getDerivation()
|
|||
/* The first thing to do is to make sure that the derivation
|
||||
exists. If it doesn't, it may be created through a
|
||||
substitute. */
|
||||
if (buildMode == bmNormal && worker.store.isValidPath(drvPath)) {
|
||||
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
|
||||
loadDerivation();
|
||||
return;
|
||||
}
|
||||
|
|
@ -188,12 +188,12 @@ void DerivationGoal::loadDerivation()
|
|||
/* `drvPath' should already be a root, but let's be on the safe
|
||||
side: if the user forgot to make it a root, we wouldn't want
|
||||
things being garbage collected while we're busy. */
|
||||
worker.store.addTempRoot(drvPath);
|
||||
worker.evalStore.addTempRoot(drvPath);
|
||||
|
||||
assert(worker.store.isValidPath(drvPath));
|
||||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
|
||||
/* Get the derivation. */
|
||||
drv = std::make_unique<Derivation>(worker.store.derivationFromPath(drvPath));
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.derivationFromPath(drvPath));
|
||||
|
||||
haveDerivation();
|
||||
}
|
||||
|
|
@ -212,8 +212,8 @@ void DerivationGoal::haveDerivation()
|
|||
if (i.second.second)
|
||||
worker.store.addTempRoot(*i.second.second);
|
||||
|
||||
auto outputHashes = staticOutputHashes(worker.store, *drv);
|
||||
for (auto &[outputName, outputHash] : outputHashes)
|
||||
auto outputHashes = staticOutputHashes(worker.evalStore, *drv);
|
||||
for (auto & [outputName, outputHash] : outputHashes)
|
||||
initialOutputs.insert({
|
||||
outputName,
|
||||
InitialOutput{
|
||||
|
|
@ -337,6 +337,15 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs)
|
||||
addWaitee(worker.makeDerivationGoal(i.first, i.second, buildMode == bmRepair ? bmRepair : bmNormal));
|
||||
|
||||
/* Copy the input sources from the eval store to the build
|
||||
store. */
|
||||
if (&worker.evalStore != &worker.store) {
|
||||
RealisedPath::Set inputSrcs;
|
||||
for (auto & i : drv->inputSrcs)
|
||||
inputSrcs.insert(i);
|
||||
copyClosure(worker.evalStore, worker.store, inputSrcs);
|
||||
}
|
||||
|
||||
for (auto & i : drv->inputSrcs) {
|
||||
if (worker.store.isValidPath(i)) continue;
|
||||
if (!settings.useSubstitutes)
|
||||
|
|
@ -478,8 +487,8 @@ void DerivationGoal::inputsRealised()
|
|||
/* Add the relevant output closures of the input derivation
|
||||
`i' as input paths. Only add the closures of output paths
|
||||
that are specified as inputs. */
|
||||
assert(worker.store.isValidPath(drvPath));
|
||||
auto outputs = worker.store.queryPartialDerivationOutputMap(depDrvPath);
|
||||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
auto outputs = worker.evalStore.queryPartialDerivationOutputMap(depDrvPath);
|
||||
for (auto & j : wantedDepOutputs) {
|
||||
if (outputs.count(j) > 0) {
|
||||
auto optRealizedInput = outputs.at(j);
|
||||
|
|
@ -544,7 +553,7 @@ void DerivationGoal::tryToBuild()
|
|||
PathSet lockFiles;
|
||||
/* FIXME: Should lock something like the drv itself so we don't build same
|
||||
CA drv concurrently */
|
||||
if (dynamic_cast<LocalStore *>(&worker.store))
|
||||
if (dynamic_cast<LocalStore *>(&worker.store)) {
|
||||
/* If we aren't a local store, we might need to use the local store as
|
||||
a build remote, but that would cause a deadlock. */
|
||||
/* FIXME: Make it so we can use ourselves as a build remote even if we
|
||||
|
|
@ -552,9 +561,15 @@ void DerivationGoal::tryToBuild()
|
|||
/* FIXME: find some way to lock for scheduling for the other stores so
|
||||
a forking daemon with --store still won't farm out redundant builds.
|
||||
*/
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
||||
if (i.second.second)
|
||||
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
|
||||
else
|
||||
lockFiles.insert(
|
||||
worker.store.Store::toRealPath(drvPath) + "." + i.first
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||
if (!actLock)
|
||||
|
|
@ -738,6 +753,64 @@ void DerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
|
|||
{
|
||||
}
|
||||
|
||||
void runPostBuildHook(
|
||||
Store & store,
|
||||
Logger & logger,
|
||||
const StorePath & drvPath,
|
||||
StorePathSet outputPaths
|
||||
)
|
||||
{
|
||||
auto hook = settings.postBuildHook;
|
||||
if (hook == "")
|
||||
return;
|
||||
|
||||
Activity act(logger, lvlInfo, actPostBuildHook,
|
||||
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
||||
Logger::Fields{store.printStorePath(drvPath)});
|
||||
PushActivity pact(act.id);
|
||||
std::map<std::string, std::string> hookEnvironment = getEnv();
|
||||
|
||||
hookEnvironment.emplace("DRV_PATH", store.printStorePath(drvPath));
|
||||
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths))));
|
||||
hookEnvironment.emplace("NIX_CONFIG", globalConfig.toKeyValue());
|
||||
|
||||
struct LogSink : Sink {
|
||||
Activity & act;
|
||||
std::string currentLine;
|
||||
|
||||
LogSink(Activity & act) : act(act) { }
|
||||
|
||||
void operator() (std::string_view data) override {
|
||||
for (auto c : data) {
|
||||
if (c == '\n') {
|
||||
flushLine();
|
||||
} else {
|
||||
currentLine += c;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void flushLine() {
|
||||
act.result(resPostBuildLogLine, currentLine);
|
||||
currentLine.clear();
|
||||
}
|
||||
|
||||
~LogSink() {
|
||||
if (currentLine != "") {
|
||||
currentLine += '\n';
|
||||
flushLine();
|
||||
}
|
||||
}
|
||||
};
|
||||
LogSink sink(act);
|
||||
|
||||
runProgram2({
|
||||
.program = settings.postBuildHook,
|
||||
.environment = hookEnvironment,
|
||||
.standardOut = &sink,
|
||||
.mergeStderrToStdout = true,
|
||||
});
|
||||
}
|
||||
|
||||
void DerivationGoal::buildDone()
|
||||
{
|
||||
|
|
@ -803,57 +876,15 @@ void DerivationGoal::buildDone()
|
|||
being valid. */
|
||||
registerOutputs();
|
||||
|
||||
if (settings.postBuildHook != "") {
|
||||
Activity act(*logger, lvlInfo, actPostBuildHook,
|
||||
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
||||
Logger::Fields{worker.store.printStorePath(drvPath)});
|
||||
PushActivity pact(act.id);
|
||||
StorePathSet outputPaths;
|
||||
for (auto i : drv->outputs) {
|
||||
outputPaths.insert(finalOutputs.at(i.first));
|
||||
}
|
||||
std::map<std::string, std::string> hookEnvironment = getEnv();
|
||||
|
||||
hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
|
||||
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", worker.store.printStorePathSet(outputPaths))));
|
||||
|
||||
RunOptions opts(settings.postBuildHook, {});
|
||||
opts.environment = hookEnvironment;
|
||||
|
||||
struct LogSink : Sink {
|
||||
Activity & act;
|
||||
std::string currentLine;
|
||||
|
||||
LogSink(Activity & act) : act(act) { }
|
||||
|
||||
void operator() (std::string_view data) override {
|
||||
for (auto c : data) {
|
||||
if (c == '\n') {
|
||||
flushLine();
|
||||
} else {
|
||||
currentLine += c;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void flushLine() {
|
||||
act.result(resPostBuildLogLine, currentLine);
|
||||
currentLine.clear();
|
||||
}
|
||||
|
||||
~LogSink() {
|
||||
if (currentLine != "") {
|
||||
currentLine += '\n';
|
||||
flushLine();
|
||||
}
|
||||
}
|
||||
};
|
||||
LogSink sink(act);
|
||||
|
||||
opts.standardOut = &sink;
|
||||
opts.mergeStderrToStdout = true;
|
||||
runProgram2(opts);
|
||||
}
|
||||
StorePathSet outputPaths;
|
||||
for (auto & [_, path] : finalOutputs)
|
||||
outputPaths.insert(path);
|
||||
runPostBuildHook(
|
||||
worker.store,
|
||||
*logger,
|
||||
drvPath,
|
||||
outputPaths
|
||||
);
|
||||
|
||||
if (buildMode == bmCheck) {
|
||||
cleanupPostOutputsRegisteredModeCheck();
|
||||
|
|
@ -909,6 +940,8 @@ void DerivationGoal::resolvedFinished() {
|
|||
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
|
||||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
|
|
@ -926,8 +959,10 @@ void DerivationGoal::resolvedFinished() {
|
|||
auto newRealisation = *realisation;
|
||||
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
|
||||
newRealisation.signatures.clear();
|
||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
|
||||
signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
outputPaths.insert(realisation->outPath);
|
||||
} else {
|
||||
// If we don't have a realisation, then it must mean that something
|
||||
// failed when building the resolved drv
|
||||
|
|
@ -935,6 +970,13 @@ void DerivationGoal::resolvedFinished() {
|
|||
}
|
||||
}
|
||||
|
||||
runPostBuildHook(
|
||||
worker.store,
|
||||
*logger,
|
||||
drvPath,
|
||||
outputPaths
|
||||
);
|
||||
|
||||
// This is potentially a bit fishy in terms of error reporting. Not sure
|
||||
// how to do it in a cleaner way
|
||||
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
|
||||
|
|
@ -967,7 +1009,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
return readLine(worker.hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the response from the build hook");
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
|
||||
|
|
@ -1013,7 +1055,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
machineName = readLine(hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the machine name from the build hook");
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
|
||||
/* Tell the hook all the inputs that have to be copied to the
|
||||
|
|
@ -1047,42 +1089,6 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
}
|
||||
|
||||
|
||||
StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
||||
{
|
||||
StorePathSet paths;
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
if (!inputPaths.count(storePath))
|
||||
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath));
|
||||
|
||||
worker.store.computeFSClosure({storePath}, paths);
|
||||
}
|
||||
|
||||
/* If there are derivations in the graph, then include their
|
||||
outputs as well. This is useful if you want to do things
|
||||
like passing all build-time dependencies of some path to a
|
||||
derivation that builds a NixOS DVD image. */
|
||||
auto paths2 = paths;
|
||||
|
||||
for (auto & j : paths2) {
|
||||
if (j.isDerivation()) {
|
||||
Derivation drv = worker.store.derivationFromPath(j);
|
||||
for (auto & k : drv.outputsAndOptPaths(worker.store)) {
|
||||
if (!k.second.second)
|
||||
/* FIXME: I am confused why we are calling
|
||||
`computeFSClosure` on the output path, rather than
|
||||
derivation itself. That doesn't seem right to me, so I
|
||||
won't try to implemented this for CA derivations. */
|
||||
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
||||
worker.store.computeFSClosure(*k.second.second, paths);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::registerOutputs()
|
||||
{
|
||||
/* When using a build hook, the build hook can register the output
|
||||
|
|
@ -1268,12 +1274,23 @@ void DerivationGoal::checkPathValidity()
|
|||
};
|
||||
}
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (auto real = worker.store.queryRealisation(
|
||||
DrvOutput{initialOutputs.at(i.first).outputHash, i.first})) {
|
||||
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
|
||||
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
||||
info.known = {
|
||||
.path = real->outPath,
|
||||
.status = PathStatus::Valid,
|
||||
};
|
||||
} else if (info.known && info.known->status == PathStatus::Valid) {
|
||||
// We know the output because it' a static output of the
|
||||
// derivation, and the output path is valid, but we don't have
|
||||
// its realisation stored (probably because it has been built
|
||||
// without the `ca-derivations` experimental flag)
|
||||
worker.store.registerDrvOutput(
|
||||
Realisation{
|
||||
drvOutput,
|
||||
info.known->path,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,13 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker
|
|||
void DrvOutputSubstitutionGoal::init()
|
||||
{
|
||||
trace("init");
|
||||
|
||||
/* If the derivation already exists, we’re done */
|
||||
if (worker.store.queryRealisation(id)) {
|
||||
amDone(ecSuccess);
|
||||
return;
|
||||
}
|
||||
|
||||
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
|
||||
tryNext();
|
||||
}
|
||||
|
|
@ -53,6 +60,26 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
return;
|
||||
}
|
||||
|
||||
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
|
||||
if (depId != id) {
|
||||
if (auto localOutputInfo = worker.store.queryRealisation(depId);
|
||||
localOutputInfo && localOutputInfo->outPath != depPath) {
|
||||
warn(
|
||||
"substituter '%s' has an incompatible realisation for '%s', ignoring.\n"
|
||||
"Local: %s\n"
|
||||
"Remote: %s",
|
||||
sub->getUri(),
|
||||
depId.to_string(),
|
||||
worker.store.printStorePath(localOutputInfo->outPath),
|
||||
worker.store.printStorePath(depPath)
|
||||
);
|
||||
tryNext();
|
||||
return;
|
||||
}
|
||||
addWaitee(worker.makeDrvOutputSubstitutionGoal(depId));
|
||||
}
|
||||
}
|
||||
|
||||
addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath));
|
||||
|
||||
if (waitees.empty()) outPathValid();
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode)
|
||||
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this);
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (auto & br : reqs) {
|
||||
|
|
@ -51,7 +51,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this);
|
||||
Worker worker(*this, *this);
|
||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
|
||||
|
||||
BuildResult result;
|
||||
|
|
@ -93,7 +93,7 @@ void Store::ensurePath(const StorePath & path)
|
|||
/* If the path is already valid, we're done. */
|
||||
if (isValidPath(path)) return;
|
||||
|
||||
Worker worker(*this);
|
||||
Worker worker(*this, *this);
|
||||
GoalPtr goal = worker.makePathSubstitutionGoal(path);
|
||||
Goals goals = {goal};
|
||||
|
||||
|
|
@ -111,7 +111,7 @@ void Store::ensurePath(const StorePath & path)
|
|||
|
||||
void LocalStore::repairPath(const StorePath & path)
|
||||
{
|
||||
Worker worker(*this);
|
||||
Worker worker(*this, *this);
|
||||
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
|
||||
Goals goals = {goal};
|
||||
|
||||
|
|
|
|||
|
|
@ -13,11 +13,9 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
|
|||
|
||||
void addToWeakGoals(WeakGoals & goals, GoalPtr p)
|
||||
{
|
||||
// FIXME: necessary?
|
||||
// FIXME: O(n)
|
||||
for (auto & i : goals)
|
||||
if (i.lock() == p) return;
|
||||
goals.push_back(p);
|
||||
if (goals.find(p) != goals.end())
|
||||
return;
|
||||
goals.insert(p);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -46,10 +44,7 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
|
|||
/* If we failed and keepGoing is not set, we remove all
|
||||
remaining waitees. */
|
||||
for (auto & goal : waitees) {
|
||||
WeakGoals waiters2;
|
||||
for (auto & j : goal->waiters)
|
||||
if (j.lock() != shared_from_this()) waiters2.push_back(j);
|
||||
goal->waiters = waiters2;
|
||||
goal->waiters.extract(shared_from_this());
|
||||
}
|
||||
waitees.clear();
|
||||
|
||||
|
|
@ -78,6 +73,8 @@ void Goal::amDone(ExitCode result, std::optional<Error> ex)
|
|||
}
|
||||
waiters.clear();
|
||||
worker.removeGoal(shared_from_this());
|
||||
|
||||
cleanup();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ struct CompareGoalPtrs {
|
|||
|
||||
/* Set of goals. */
|
||||
typedef set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef list<WeakGoalPtr> WeakGoals;
|
||||
typedef set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
|
||||
/* A map of paths to goals (and the other way around). */
|
||||
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||
|
|
@ -100,6 +100,8 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
virtual string key() = 0;
|
||||
|
||||
void amDone(ExitCode result, std::optional<Error> ex = {});
|
||||
|
||||
virtual void cleanup() { }
|
||||
};
|
||||
|
||||
void addToWeakGoals(WeakGoals & goals, GoalPtr p);
|
||||
|
|
|
|||
|
|
@ -17,16 +17,14 @@
|
|||
#include <regex>
|
||||
#include <queue>
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
#include <netdb.h>
|
||||
#include <fcntl.h>
|
||||
#include <termios.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/mman.h>
|
||||
#include <sys/utsname.h>
|
||||
#include <sys/resource.h>
|
||||
#include <sys/socket.h>
|
||||
|
||||
#if HAVE_STATVFS
|
||||
#include <sys/statvfs.h>
|
||||
|
|
@ -34,7 +32,6 @@
|
|||
|
||||
/* Includes required for chroot support. */
|
||||
#if __linux__
|
||||
#include <sys/socket.h>
|
||||
#include <sys/ioctl.h>
|
||||
#include <net/if.h>
|
||||
#include <netinet/ip.h>
|
||||
|
|
@ -70,12 +67,14 @@ void handleDiffHook(
|
|||
auto diffHook = settings.diffHook;
|
||||
if (diffHook != "" && settings.runDiffHook) {
|
||||
try {
|
||||
RunOptions diffHookOptions(diffHook,{tryA, tryB, drvPath, tmpDir});
|
||||
diffHookOptions.searchPath = true;
|
||||
diffHookOptions.uid = uid;
|
||||
diffHookOptions.gid = gid;
|
||||
diffHookOptions.chdir = "/";
|
||||
auto diffRes = runProgram(diffHookOptions);
|
||||
auto diffRes = runProgram(RunOptions {
|
||||
.program = diffHook,
|
||||
.searchPath = true,
|
||||
.args = {tryA, tryB, drvPath, tmpDir},
|
||||
.uid = uid,
|
||||
.gid = gid,
|
||||
.chdir = "/"
|
||||
});
|
||||
if (!statusOk(diffRes.first))
|
||||
throw ExecError(diffRes.first,
|
||||
"diff-hook program '%1%' %2%",
|
||||
|
|
@ -153,6 +152,7 @@ void LocalDerivationGoal::killChild()
|
|||
void LocalDerivationGoal::tryLocalBuild() {
|
||||
unsigned int curBuilds = worker.getNrLocalBuilds();
|
||||
if (curBuilds >= settings.maxBuildJobs) {
|
||||
state = &DerivationGoal::tryToBuild;
|
||||
worker.waitForBuildSlot(shared_from_this());
|
||||
outputLocks.unlock();
|
||||
return;
|
||||
|
|
@ -291,7 +291,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
|
|||
auto & localStore = getLocalStore();
|
||||
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
|
||||
struct statvfs st;
|
||||
if (statvfs(localStore.realStoreDir.c_str(), &st) == 0 &&
|
||||
if (statvfs(localStore.realStoreDir.get().c_str(), &st) == 0 &&
|
||||
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||
diskFull = true;
|
||||
if (statvfs(tmpDir.c_str(), &st) == 0 &&
|
||||
|
|
@ -343,23 +343,6 @@ int childEntry(void * arg)
|
|||
}
|
||||
|
||||
|
||||
static std::once_flag dns_resolve_flag;
|
||||
|
||||
static void preloadNSS() {
|
||||
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
|
||||
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
|
||||
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
|
||||
load its lookup libraries in the parent before any child gets a chance to. */
|
||||
std::call_once(dns_resolve_flag, []() {
|
||||
struct addrinfo *res = NULL;
|
||||
|
||||
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) != 0) {
|
||||
if (res) freeaddrinfo(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
static void linkOrCopy(const Path & from, const Path & to)
|
||||
{
|
||||
if (link(from.c_str(), to.c_str()) == -1) {
|
||||
|
|
@ -388,9 +371,6 @@ void LocalDerivationGoal::startBuilder()
|
|||
settings.thisSystem,
|
||||
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
|
||||
|
||||
if (drv->isBuiltin())
|
||||
preloadNSS();
|
||||
|
||||
#if __APPLE__
|
||||
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
|
||||
#endif
|
||||
|
|
@ -416,7 +396,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
}
|
||||
|
||||
auto & localStore = getLocalStore();
|
||||
if (localStore.storeDir != localStore.realStoreDir) {
|
||||
if (localStore.storeDir != localStore.realStoreDir.get()) {
|
||||
#if __linux__
|
||||
useChroot = true;
|
||||
#else
|
||||
|
|
@ -517,7 +497,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
/* Write closure info to <fileName>. */
|
||||
writeFile(tmpDir + "/" + fileName,
|
||||
worker.store.makeValidityRegistration(
|
||||
exportReferences({storePath}), false, false));
|
||||
worker.store.exportReferences({storePath}, inputPaths), false, false));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -581,7 +561,9 @@ void LocalDerivationGoal::startBuilder()
|
|||
throw Error("derivation '%s' requested impure path '%s', but it was not in allowed-impure-host-deps",
|
||||
worker.store.printStorePath(drvPath), i);
|
||||
|
||||
dirsInChroot[i] = i;
|
||||
/* Allow files in __impureHostDeps to be missing; e.g.
|
||||
macOS 11+ has no /usr/lib/libSystem*.dylib */
|
||||
dirsInChroot[i] = {i, true};
|
||||
}
|
||||
|
||||
#if __linux__
|
||||
|
|
@ -956,9 +938,12 @@ void LocalDerivationGoal::startBuilder()
|
|||
try {
|
||||
return readLine(builderOut.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while waiting for the build environment to initialize (previous messages: %s)",
|
||||
auto status = pid.wait();
|
||||
e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
|
||||
worker.store.printStorePath(drvPath),
|
||||
statusToString(status),
|
||||
concatStringsSep("|", msgs));
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
if (string(msg, 0, 1) == "\2") break;
|
||||
|
|
@ -966,7 +951,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
FdSource source(builderOut.readSide.get());
|
||||
auto ex = readError(source);
|
||||
ex.addTrace({}, "while setting up the build environment");
|
||||
throw ex;
|
||||
throw;
|
||||
}
|
||||
debug("sandbox setup: " + msg);
|
||||
msgs.push_back(std::move(msg));
|
||||
|
|
@ -1081,113 +1066,28 @@ void LocalDerivationGoal::initEnv()
|
|||
}
|
||||
|
||||
|
||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||
|
||||
|
||||
void LocalDerivationGoal::writeStructuredAttrs()
|
||||
{
|
||||
auto structuredAttrs = parsedDrv->getStructuredAttrs();
|
||||
if (!structuredAttrs) return;
|
||||
if (auto structAttrsJson = parsedDrv->prepareStructuredAttrs(worker.store, inputPaths)) {
|
||||
auto json = structAttrsJson.value();
|
||||
nlohmann::json rewritten;
|
||||
for (auto & [i, v] : json["outputs"].get<nlohmann::json::object_t>()) {
|
||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||
cases where we know or don't know the output path ahead of time. */
|
||||
rewritten[i] = rewriteStrings(v, inputRewrites);
|
||||
}
|
||||
|
||||
auto json = *structuredAttrs;
|
||||
json["outputs"] = rewritten;
|
||||
|
||||
/* Add an "outputs" object containing the output paths. */
|
||||
nlohmann::json outputs;
|
||||
for (auto & i : drv->outputs) {
|
||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||
cases where we know or don't know the output path ahead of time. */
|
||||
outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites);
|
||||
auto jsonSh = writeStructuredAttrsShell(json);
|
||||
|
||||
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
||||
chownToBuilder(tmpDir + "/.attrs.sh");
|
||||
env["NIX_ATTRS_SH_FILE"] = tmpDir + "/.attrs.sh";
|
||||
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
||||
chownToBuilder(tmpDir + "/.attrs.json");
|
||||
env["NIX_ATTRS_JSON_FILE"] = tmpDir + "/.attrs.json";
|
||||
}
|
||||
json["outputs"] = outputs;
|
||||
|
||||
/* Handle exportReferencesGraph. */
|
||||
auto e = json.find("exportReferencesGraph");
|
||||
if (e != json.end() && e->is_object()) {
|
||||
for (auto i = e->begin(); i != e->end(); ++i) {
|
||||
std::ostringstream str;
|
||||
{
|
||||
JSONPlaceholder jsonRoot(str, true);
|
||||
StorePathSet storePaths;
|
||||
for (auto & p : *i)
|
||||
storePaths.insert(worker.store.parseStorePath(p.get<std::string>()));
|
||||
worker.store.pathInfoToJSON(jsonRoot,
|
||||
exportReferences(storePaths), false, true);
|
||||
}
|
||||
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
||||
}
|
||||
}
|
||||
|
||||
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
||||
chownToBuilder(tmpDir + "/.attrs.json");
|
||||
|
||||
/* As a convenience to bash scripts, write a shell file that
|
||||
maps all attributes that are representable in bash -
|
||||
namely, strings, integers, nulls, Booleans, and arrays and
|
||||
objects consisting entirely of those values. (So nested
|
||||
arrays or objects are not supported.) */
|
||||
|
||||
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
||||
if (value.is_string())
|
||||
return shellEscape(value);
|
||||
|
||||
if (value.is_number()) {
|
||||
auto f = value.get<float>();
|
||||
if (std::ceil(f) == f)
|
||||
return std::to_string(value.get<int>());
|
||||
}
|
||||
|
||||
if (value.is_null())
|
||||
return std::string("''");
|
||||
|
||||
if (value.is_boolean())
|
||||
return value.get<bool>() ? std::string("1") : std::string("");
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
std::string jsonSh;
|
||||
|
||||
for (auto i = json.begin(); i != json.end(); ++i) {
|
||||
|
||||
if (!std::regex_match(i.key(), shVarName)) continue;
|
||||
|
||||
auto & value = i.value();
|
||||
|
||||
auto s = handleSimpleType(value);
|
||||
if (s)
|
||||
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
|
||||
|
||||
else if (value.is_array()) {
|
||||
std::string s2;
|
||||
bool good = true;
|
||||
|
||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||
auto s3 = handleSimpleType(i.value());
|
||||
if (!s3) { good = false; break; }
|
||||
s2 += *s3; s2 += ' ';
|
||||
}
|
||||
|
||||
if (good)
|
||||
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
|
||||
}
|
||||
|
||||
else if (value.is_object()) {
|
||||
std::string s2;
|
||||
bool good = true;
|
||||
|
||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||
auto s3 = handleSimpleType(i.value());
|
||||
if (!s3) { good = false; break; }
|
||||
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
|
||||
}
|
||||
|
||||
if (good)
|
||||
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
|
||||
}
|
||||
}
|
||||
|
||||
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
||||
chownToBuilder(tmpDir + "/.attrs.sh");
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1330,13 +1230,20 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
std::optional<const Realisation> queryRealisation(const DrvOutput & id) override
|
||||
// XXX: This should probably be allowed if the realisation corresponds to
|
||||
// an allowed derivation
|
||||
{ throw Error("queryRealisation"); }
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode) override
|
||||
{
|
||||
if (!goal.isAllowed(id))
|
||||
throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
|
||||
return next->queryRealisation(id);
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
{
|
||||
assert(!evalStore);
|
||||
|
||||
if (buildMode != bmNormal) throw Error("unsupported build mode");
|
||||
|
||||
StorePathSet newPaths;
|
||||
std::set<Realisation> newRealisations;
|
||||
|
||||
for (auto & req : paths) {
|
||||
if (!goal.isAllowed(req))
|
||||
|
|
@ -1349,16 +1256,28 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
auto p = std::get_if<DerivedPath::Built>(&path);
|
||||
if (!p) continue;
|
||||
auto & bfd = *p;
|
||||
auto drv = readDerivation(bfd.drvPath);
|
||||
auto drvHashes = staticOutputHashes(*this, drv);
|
||||
auto outputs = next->queryDerivationOutputMap(bfd.drvPath);
|
||||
for (auto & [outputName, outputPath] : outputs)
|
||||
if (wantOutput(outputName, bfd.outputs))
|
||||
if (wantOutput(outputName, bfd.outputs)) {
|
||||
newPaths.insert(outputPath);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
auto thisRealisation = next->queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName}
|
||||
);
|
||||
assert(thisRealisation);
|
||||
newRealisations.insert(*thisRealisation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StorePathSet closure;
|
||||
next->computeFSClosure(newPaths, closure);
|
||||
for (auto & path : closure)
|
||||
goal.addDependency(path);
|
||||
for (auto & real : Realisation::closure(*next, newRealisations))
|
||||
goal.addedDrvOutputs.insert(real.id);
|
||||
}
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
|
|
@ -1734,7 +1653,7 @@ void LocalDerivationGoal::runChild()
|
|||
/* N.B. it is realistic that these paths might not exist. It
|
||||
happens when testing Nix building fixed-output derivations
|
||||
within a pure derivation. */
|
||||
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts", "/var/run/nscd/socket" })
|
||||
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" })
|
||||
if (pathExists(path))
|
||||
ss.push_back(path);
|
||||
}
|
||||
|
|
@ -1916,7 +1835,7 @@ void LocalDerivationGoal::runChild()
|
|||
/* Fill in the arguments. */
|
||||
Strings args;
|
||||
|
||||
const char *builder = "invalid";
|
||||
std::string builder = "invalid";
|
||||
|
||||
if (drv->isBuiltin()) {
|
||||
;
|
||||
|
|
@ -2042,13 +1961,13 @@ void LocalDerivationGoal::runChild()
|
|||
}
|
||||
args.push_back(drv->builder);
|
||||
} else {
|
||||
builder = drv->builder.c_str();
|
||||
builder = drv->builder;
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
}
|
||||
#else
|
||||
else {
|
||||
builder = drv->builder.c_str();
|
||||
builder = drv->builder;
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
#endif
|
||||
|
|
@ -2104,9 +2023,9 @@ void LocalDerivationGoal::runChild()
|
|||
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
|
||||
}
|
||||
|
||||
posix_spawn(NULL, builder, NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
#else
|
||||
execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
#endif
|
||||
|
||||
throw SysError("executing '%1%'", drv->builder);
|
||||
|
|
@ -2298,10 +2217,6 @@ void LocalDerivationGoal::registerOutputs()
|
|||
sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
|
||||
StringSource source(*sink.s);
|
||||
restorePath(actualPath, source);
|
||||
|
||||
/* FIXME: set proper permissions in restorePath() so
|
||||
we don't have to do another traversal. */
|
||||
canonicalisePathMetaData(actualPath, -1, inodesSeen);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -2353,9 +2268,6 @@ void LocalDerivationGoal::registerOutputs()
|
|||
break;
|
||||
}
|
||||
auto got = caSink.finish().first;
|
||||
HashModuloSink narSink { htSHA256, oldHashPart };
|
||||
dumpPath(actualPath, narSink);
|
||||
auto narHashAndSize = narSink.finish();
|
||||
ValidPathInfo newInfo0 {
|
||||
worker.store,
|
||||
{
|
||||
|
|
@ -2368,9 +2280,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
rewriteRefs(),
|
||||
},
|
||||
},
|
||||
narHashAndSize.first,
|
||||
Hash::dummy,
|
||||
};
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
if (scratchPath != newInfo0.path) {
|
||||
// Also rewrite the output path
|
||||
auto source = sinkToSource([&](Sink & nextSink) {
|
||||
|
|
@ -2386,6 +2297,10 @@ void LocalDerivationGoal::registerOutputs()
|
|||
movePath(tmpPath, actualPath);
|
||||
}
|
||||
|
||||
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
|
||||
assert(newInfo0.ca);
|
||||
return newInfo0;
|
||||
};
|
||||
|
|
@ -2443,6 +2358,10 @@ void LocalDerivationGoal::registerOutputs()
|
|||
},
|
||||
}, output.output);
|
||||
|
||||
/* FIXME: set proper permissions in restorePath() so
|
||||
we don't have to do another traversal. */
|
||||
canonicalisePathMetaData(actualPath, -1, inodesSeen);
|
||||
|
||||
/* Calculate where we'll move the output files. In the checking case we
|
||||
will leave leave them where they are, for now, rather than move to
|
||||
their usual "final destination" */
|
||||
|
|
@ -2452,6 +2371,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
floating CA derivations and hash-mismatching fixed-output
|
||||
derivations. */
|
||||
PathLocks dynamicOutputLock;
|
||||
dynamicOutputLock.setDeletion(true);
|
||||
auto optFixedPath = output.path(worker.store, drv->name, outputName);
|
||||
if (!optFixedPath ||
|
||||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
|
||||
|
|
@ -2475,6 +2395,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
assert(newInfo.ca);
|
||||
} else {
|
||||
auto destPath = worker.store.toRealPath(finalDestPath);
|
||||
deletePath(destPath);
|
||||
movePath(actualPath, destPath);
|
||||
actualPath = destPath;
|
||||
}
|
||||
|
|
@ -2544,7 +2465,13 @@ void LocalDerivationGoal::registerOutputs()
|
|||
infos.emplace(outputName, std::move(newInfo));
|
||||
}
|
||||
|
||||
if (buildMode == bmCheck) return;
|
||||
if (buildMode == bmCheck) {
|
||||
// In case of FOD mismatches on `--check` an error must be thrown as this is also
|
||||
// a source for non-determinism.
|
||||
if (delayedException)
|
||||
std::rethrow_exception(delayedException);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Apply output checks. */
|
||||
checkOutputs(infos);
|
||||
|
|
|
|||
|
|
@ -108,6 +108,9 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
/* Paths that were added via recursive Nix calls. */
|
||||
StorePathSet addedPaths;
|
||||
|
||||
/* Realisations that were added via recursive Nix calls. */
|
||||
std::set<DrvOutput> addedDrvOutputs;
|
||||
|
||||
/* Recursive Nix calls are only allowed to build or realize paths
|
||||
in the original input closure or added via a recursive Nix call
|
||||
(so e.g. you can't do 'nix-store -r /nix/store/<bla>' where
|
||||
|
|
@ -116,6 +119,11 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
{
|
||||
return inputPaths.count(path) || addedPaths.count(path);
|
||||
}
|
||||
bool isAllowed(const DrvOutput & id)
|
||||
{
|
||||
return addedDrvOutputs.count(id);
|
||||
}
|
||||
|
||||
bool isAllowed(const DerivedPath & req);
|
||||
|
||||
friend struct RestrictedStore;
|
||||
|
|
|
|||
|
|
@ -20,15 +20,7 @@ PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker &
|
|||
|
||||
PathSubstitutionGoal::~PathSubstitutionGoal()
|
||||
{
|
||||
try {
|
||||
if (thr.joinable()) {
|
||||
// FIXME: signal worker thread to quit.
|
||||
thr.join();
|
||||
worker.childTerminated(this);
|
||||
}
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -63,6 +55,8 @@ void PathSubstitutionGoal::tryNext()
|
|||
{
|
||||
trace("trying next substituter");
|
||||
|
||||
cleanup();
|
||||
|
||||
if (subs.size() == 0) {
|
||||
/* None left. Terminate this goal and let someone else deal
|
||||
with it. */
|
||||
|
|
@ -206,12 +200,12 @@ void PathSubstitutionGoal::tryToRun()
|
|||
thr = std::thread([this]() {
|
||||
try {
|
||||
/* Wake up the worker loop when we're done. */
|
||||
Finally updateStats([this]() { outPipe.writeSide = -1; });
|
||||
Finally updateStats([this]() { outPipe.writeSide.close(); });
|
||||
|
||||
Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
|
||||
PushActivity pact(act.id);
|
||||
|
||||
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
|
||||
copyStorePath(*sub, worker.store,
|
||||
subPath ? *subPath : storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
|
||||
|
||||
promise.set_value();
|
||||
|
|
@ -289,4 +283,21 @@ void PathSubstitutionGoal::handleEOF(int fd)
|
|||
if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
|
||||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::cleanup()
|
||||
{
|
||||
try {
|
||||
if (thr.joinable()) {
|
||||
// FIXME: signal worker thread to quit.
|
||||
thr.join();
|
||||
worker.childTerminated(this);
|
||||
}
|
||||
|
||||
outPipe.close();
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ struct PathSubstitutionGoal : public Goal
|
|||
StorePath storePath;
|
||||
|
||||
/* The path the substituter refers to the path as. This will be
|
||||
* different when the stores have different names. */
|
||||
different when the stores have different names. */
|
||||
std::optional<StorePath> subPath;
|
||||
|
||||
/* The remaining substituters. */
|
||||
|
|
@ -79,6 +79,8 @@ public:
|
|||
/* Callback used by the worker to write to the log. */
|
||||
void handleChildOutput(int fd, const string & data) override;
|
||||
void handleEOF(int fd) override;
|
||||
|
||||
void cleanup() override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,11 +9,12 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Worker::Worker(Store & store)
|
||||
Worker::Worker(Store & store, Store & evalStore)
|
||||
: act(*logger, actRealise)
|
||||
, actDerivations(*logger, actBuilds)
|
||||
, actSubstitutions(*logger, actCopyPaths)
|
||||
, store(store)
|
||||
, evalStore(evalStore)
|
||||
{
|
||||
/* Debugging: prevent recursive workers. */
|
||||
nrLocalBuilds = 0;
|
||||
|
|
@ -128,6 +129,7 @@ void Worker::removeGoal(GoalPtr goal)
|
|||
nix::removeGoal(subGoal, drvOutputSubstitutionGoals);
|
||||
else
|
||||
assert(false);
|
||||
|
||||
if (topGoals.find(goal) != topGoals.end()) {
|
||||
topGoals.erase(goal);
|
||||
/* If a top-level goal failed, then kill all other goals
|
||||
|
|
|
|||
|
|
@ -110,6 +110,7 @@ public:
|
|||
bool checkMismatch;
|
||||
|
||||
Store & store;
|
||||
Store & evalStore;
|
||||
|
||||
std::unique_ptr<HookInstance> hook;
|
||||
|
||||
|
|
@ -131,7 +132,7 @@ public:
|
|||
it answers with "decline-permanently", we don't try again. */
|
||||
bool tryBuildHook = true;
|
||||
|
||||
Worker(Store & store);
|
||||
Worker(Store & store, Store & evalStore);
|
||||
~Worker();
|
||||
|
||||
/* Make a goal (with caching). */
|
||||
|
|
|
|||
|
|
@ -3,10 +3,19 @@
|
|||
-- is enabled
|
||||
|
||||
create table if not exists Realisations (
|
||||
id integer primary key autoincrement not null,
|
||||
drvPath text not null,
|
||||
outputName text not null, -- symbolic output id, usually "out"
|
||||
outputPath integer not null,
|
||||
signatures text, -- space-separated list
|
||||
primary key (drvPath, outputName),
|
||||
foreign key (outputPath) references ValidPaths(id) on delete cascade
|
||||
);
|
||||
|
||||
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
|
||||
|
||||
create table if not exists RealisationsRefs (
|
||||
referrer integer not null,
|
||||
realisationReference integer,
|
||||
foreign key (referrer) references Realisations(id) on delete cascade,
|
||||
foreign key (realisationReference) references Realisations(id) on delete restrict
|
||||
);
|
||||
|
|
|
|||
|
|
@ -227,6 +227,12 @@ struct ClientSettings
|
|||
try {
|
||||
if (name == "ssh-auth-sock") // obsolete
|
||||
;
|
||||
else if (name == settings.experimentalFeatures.name) {
|
||||
// We don’t want to forward the experimental features to
|
||||
// the daemon, as that could cause some pretty weird stuff
|
||||
if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get())
|
||||
debug("Ignoring the client-specified experimental features");
|
||||
}
|
||||
else if (trusted
|
||||
|| name == settings.buildTimeout.name
|
||||
|| name == "connect-timeout"
|
||||
|
|
@ -243,27 +249,10 @@ struct ClientSettings
|
|||
}
|
||||
};
|
||||
|
||||
static void writeValidPathInfo(
|
||||
ref<Store> store,
|
||||
unsigned int clientVersion,
|
||||
Sink & to,
|
||||
std::shared_ptr<const ValidPathInfo> info)
|
||||
{
|
||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||
<< info->narHash.to_string(Base16, false);
|
||||
worker_proto::write(*store, to, info->referencesPossiblyToSelf());
|
||||
to << info->registrationTime << info->narSize;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||
to << info->ultimate
|
||||
<< info->sigs
|
||||
<< renderContentAddress(info->ca);
|
||||
}
|
||||
}
|
||||
|
||||
static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int clientVersion, Source & from)
|
||||
{
|
||||
std::vector<DerivedPath> reqs;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 29) {
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 30) {
|
||||
reqs = worker_proto::read(store, from, Phantom<std::vector<DerivedPath>> {});
|
||||
} else {
|
||||
for (auto & s : readStrings<Strings>(from))
|
||||
|
|
@ -422,9 +411,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
}();
|
||||
logger->stopWork();
|
||||
|
||||
to << store->printStorePath(pathInfo->path);
|
||||
writeValidPathInfo(store, clientVersion, to, pathInfo);
|
||||
|
||||
pathInfo->write(to, *store, GET_PROTOCOL_MINOR(clientVersion));
|
||||
} else {
|
||||
HashType hashAlgo;
|
||||
std::string baseName;
|
||||
|
|
@ -471,6 +458,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
case wopAddMultipleToStore: {
|
||||
bool repair, dontCheckSigs;
|
||||
from >> repair >> dontCheckSigs;
|
||||
if (!trusted && dontCheckSigs)
|
||||
dontCheckSigs = false;
|
||||
|
||||
logger->startWork();
|
||||
FramedSource source(from);
|
||||
store->addMultipleToStore(source,
|
||||
RepairFlag{repair},
|
||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
logger->stopWork();
|
||||
break;
|
||||
}
|
||||
|
||||
case wopAddTextToStore: {
|
||||
string suffix = readString(from);
|
||||
string s = readString(from);
|
||||
|
|
@ -770,7 +772,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (info) {
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||
to << 1;
|
||||
writeValidPathInfo(store, clientVersion, to, info);
|
||||
info->write(to, *store, GET_PROTOCOL_MINOR(clientVersion), false);
|
||||
} else {
|
||||
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
|
||||
to << 0;
|
||||
|
|
@ -885,11 +887,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
case wopRegisterDrvOutput: {
|
||||
logger->startWork();
|
||||
auto outputId = DrvOutput::parse(readString(from));
|
||||
auto outputPath = StorePath(readString(from));
|
||||
auto resolvedDrv = StorePath(readString(from));
|
||||
store->registerDrvOutput(Realisation{
|
||||
.id = outputId, .outPath = outputPath});
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
|
||||
auto outputId = DrvOutput::parse(readString(from));
|
||||
auto outputPath = StorePath(readString(from));
|
||||
store->registerDrvOutput(Realisation{
|
||||
.id = outputId, .outPath = outputPath});
|
||||
} else {
|
||||
auto realisation = worker_proto::read(*store, from, Phantom<Realisation>());
|
||||
store->registerDrvOutput(realisation);
|
||||
}
|
||||
logger->stopWork();
|
||||
break;
|
||||
}
|
||||
|
|
@ -899,9 +905,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
auto outputId = DrvOutput::parse(readString(from));
|
||||
auto info = store->queryRealisation(outputId);
|
||||
logger->stopWork();
|
||||
std::set<StorePath> outPaths;
|
||||
if (info) outPaths.insert(info->outPath);
|
||||
worker_proto::write(*store, to, outPaths);
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
|
||||
std::set<StorePath> outPaths;
|
||||
if (info) outPaths.insert(info->outPath);
|
||||
worker_proto::write(*store, to, outPaths);
|
||||
} else {
|
||||
std::set<Realisation> realisations;
|
||||
if (info) realisations.insert(*info);
|
||||
worker_proto::write(*store, to, realisations);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -568,7 +568,7 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
|||
}
|
||||
|
||||
|
||||
std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& drv)
|
||||
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv)
|
||||
{
|
||||
std::map<std::string, Hash> res;
|
||||
std::visit(overloaded {
|
||||
|
|
|
|||
|
|
@ -11,18 +11,33 @@ nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json DerivedPathWithHints::Built::toJSON(ref<Store> store) const {
|
||||
nlohmann::json BuiltPath::Built::toJSON(ref<Store> store) const {
|
||||
nlohmann::json res;
|
||||
res["drvPath"] = store->printStorePath(drvPath);
|
||||
for (const auto& [output, path] : outputs) {
|
||||
res["outputs"][output] = path ? store->printStorePath(*path) : "";
|
||||
res["outputs"][output] = store->printStorePath(path);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref<Store> store) {
|
||||
StorePathSet BuiltPath::outPaths() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
|
||||
[](BuiltPath::Built b) {
|
||||
StorePathSet res;
|
||||
for (auto & [_, path] : b.outputs)
|
||||
res.insert(path);
|
||||
return res;
|
||||
},
|
||||
}, raw()
|
||||
);
|
||||
}
|
||||
|
||||
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store) {
|
||||
auto res = nlohmann::json::array();
|
||||
for (const DerivedPathWithHints & buildable : buildables) {
|
||||
for (const BuiltPath & buildable : buildables) {
|
||||
std::visit([&res, store](const auto & buildable) {
|
||||
res.push_back(buildable.toJSON(store));
|
||||
}, buildable.raw());
|
||||
|
|
@ -62,7 +77,7 @@ DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_vi
|
|||
auto outputsS = s.substr(n + 1);
|
||||
std::set<string> outputs;
|
||||
if (outputsS != "*")
|
||||
outputs = tokenizeString<std::set<string>>(outputsS);
|
||||
outputs = tokenizeString<std::set<string>>(outputsS, ",");
|
||||
return {drvPath, outputs};
|
||||
}
|
||||
|
||||
|
|
@ -74,4 +89,30 @@ DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
|||
: (DerivedPath) DerivedPath::Built::parse(store, s);
|
||||
}
|
||||
|
||||
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||
{
|
||||
RealisedPath::Set res;
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](BuiltPath::Opaque p) { res.insert(p.path); },
|
||||
[&](BuiltPath::Built p) {
|
||||
auto drvHashes =
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath));
|
||||
for (auto& [outputName, outputPath] : p.outputs) {
|
||||
if (settings.isExperimentalFeatureEnabled(
|
||||
"ca-derivations")) {
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must h
|
||||
// ve the realisation
|
||||
res.insert(*thisRealisation);
|
||||
} else {
|
||||
res.insert(outputPath);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
raw());
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "realisation.hh"
|
||||
|
||||
#include <optional>
|
||||
#include <variant>
|
||||
|
|
@ -80,51 +81,44 @@ struct DerivedPath : _DerivedPathRaw {
|
|||
/**
|
||||
* A built derived path with hints in the form of optional concrete output paths.
|
||||
*
|
||||
* See 'DerivedPathWithHints' for more an explanation.
|
||||
* See 'BuiltPath' for more an explanation.
|
||||
*/
|
||||
struct DerivedPathWithHintsBuilt {
|
||||
struct BuiltPathBuilt {
|
||||
StorePath drvPath;
|
||||
std::map<std::string, std::optional<StorePath>> outputs;
|
||||
std::map<std::string, StorePath> outputs;
|
||||
|
||||
nlohmann::json toJSON(ref<Store> store) const;
|
||||
static DerivedPathWithHintsBuilt parse(const Store & store, std::string_view);
|
||||
static BuiltPathBuilt parse(const Store & store, std::string_view);
|
||||
};
|
||||
|
||||
using _DerivedPathWithHintsRaw = std::variant<
|
||||
using _BuiltPathRaw = std::variant<
|
||||
DerivedPath::Opaque,
|
||||
DerivedPathWithHintsBuilt
|
||||
BuiltPathBuilt
|
||||
>;
|
||||
|
||||
/**
|
||||
* A derived path with hints in the form of optional concrete output paths in the built case.
|
||||
*
|
||||
* This type is currently just used by the CLI. The paths are filled in
|
||||
* during evaluation for derivations that know what paths they will
|
||||
* produce in advanced, i.e. input-addressed or fixed-output content
|
||||
* addressed derivations.
|
||||
*
|
||||
* That isn't very good, because it puts floating content-addressed
|
||||
* derivations "at a disadvantage". It would be better to never rely on
|
||||
* the output path of unbuilt derivations, and exclusively use the
|
||||
* realizations types to work with built derivations' concrete output
|
||||
* paths.
|
||||
* A built path. Similar to a `DerivedPath`, but enriched with the corresponding
|
||||
* output path(s).
|
||||
*/
|
||||
// FIXME Stop using and delete this, or if that is not possible move out of libstore to libcmd.
|
||||
struct DerivedPathWithHints : _DerivedPathWithHintsRaw {
|
||||
using Raw = _DerivedPathWithHintsRaw;
|
||||
struct BuiltPath : _BuiltPathRaw {
|
||||
using Raw = _BuiltPathRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Opaque = DerivedPathOpaque;
|
||||
using Built = DerivedPathWithHintsBuilt;
|
||||
using Built = BuiltPathBuilt;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
StorePathSet outPaths() const;
|
||||
RealisedPath::Set toRealisedPaths(Store & store) const;
|
||||
|
||||
};
|
||||
|
||||
typedef std::vector<DerivedPathWithHints> DerivedPathsWithHints;
|
||||
typedef std::vector<DerivedPath> DerivedPaths;
|
||||
typedef std::vector<BuiltPath> BuiltPaths;
|
||||
|
||||
nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref<Store> store);
|
||||
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store);
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -43,11 +43,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
PathFilter & filter, RepairFlag repair) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
#include "finally.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
#ifdef ENABLE_S3
|
||||
#if ENABLE_S3
|
||||
#include <aws/core/client/ClientConfiguration.h>
|
||||
#endif
|
||||
|
||||
|
|
@ -148,7 +148,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
|
||||
LambdaSink finalSink;
|
||||
std::shared_ptr<CompressionSink> decompressionSink;
|
||||
std::shared_ptr<FinishSink> decompressionSink;
|
||||
std::optional<StringSink> errorSink;
|
||||
|
||||
std::exception_ptr writeException;
|
||||
|
|
@ -665,7 +665,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
writeFull(wakeupPipe.writeSide.get(), " ");
|
||||
}
|
||||
|
||||
#ifdef ENABLE_S3
|
||||
#if ENABLE_S3
|
||||
std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
|
||||
{
|
||||
auto [path, params] = splitUriAndParams(uri);
|
||||
|
|
@ -688,7 +688,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (hasPrefix(request.uri, "s3://")) {
|
||||
// FIXME: do this on a worker thread
|
||||
try {
|
||||
#ifdef ENABLE_S3
|
||||
#if ENABLE_S3
|
||||
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
||||
|
||||
std::string profile = get(params, "profile").value_or("");
|
||||
|
|
|
|||
|
|
@ -775,7 +775,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
|
||||
try {
|
||||
|
||||
AutoCloseDir dir(opendir(realStoreDir.c_str()));
|
||||
AutoCloseDir dir(opendir(realStoreDir.get().c_str()));
|
||||
if (!dir) throw SysError("opening directory '%1%'", realStoreDir);
|
||||
|
||||
/* Read the store and immediately delete all paths that
|
||||
|
|
@ -856,7 +856,7 @@ void LocalStore::autoGC(bool sync)
|
|||
return std::stoll(readFile(*fakeFreeSpaceFile));
|
||||
|
||||
struct statvfs st;
|
||||
if (statvfs(realStoreDir.c_str(), &st))
|
||||
if (statvfs(realStoreDir.get().c_str(), &st))
|
||||
throw SysError("getting filesystem info about '%s'", realStoreDir);
|
||||
|
||||
return (uint64_t) st.f_bavail * st.f_frsize;
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
|||
}
|
||||
|
||||
MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", feature)
|
||||
, missingFeature(feature)
|
||||
{}
|
||||
|
||||
|
|
|
|||
|
|
@ -617,8 +617,10 @@ public:
|
|||
Strings{"https://cache.nixos.org/"},
|
||||
"substituters",
|
||||
R"(
|
||||
A list of URLs of substituters, separated by whitespace. The default
|
||||
is `https://cache.nixos.org`.
|
||||
A list of URLs of substituters, separated by whitespace. Substituters
|
||||
are tried based on their Priority value, which each substituter can set
|
||||
independently. Lower value means higher priority.
|
||||
The default is `https://cache.nixos.org`, with a Priority of 40.
|
||||
)",
|
||||
{"binary-caches"}};
|
||||
|
||||
|
|
@ -701,7 +703,7 @@ public:
|
|||
send a series of commands to modify various settings to stdout. The
|
||||
currently recognized commands are:
|
||||
|
||||
- `extra-sandbox-paths`
|
||||
- `extra-sandbox-paths`\
|
||||
Pass a list of files and directories to be included in the
|
||||
sandbox for this build. One entry per line, terminated by an
|
||||
empty line. Entries have the same format as `sandbox-paths`.
|
||||
|
|
@ -954,6 +956,9 @@ public:
|
|||
resolves to a different location from that of the build machine. You
|
||||
can enable this setting if you are sure you're not going to do that.
|
||||
)"};
|
||||
|
||||
Setting<bool> useRegistries{this, true, "use-registries",
|
||||
"Whether to use flake registries to resolve flake references."};
|
||||
};
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -57,8 +57,8 @@ public:
|
|||
{
|
||||
// FIXME: do this lazily?
|
||||
if (auto cacheInfo = diskCache->cacheExists(cacheUri)) {
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
|
||||
priority.setDefault(fmt("%d", cacheInfo->priority));
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery);
|
||||
priority.setDefault(cacheInfo->priority);
|
||||
} else {
|
||||
try {
|
||||
BinaryCacheStore::init();
|
||||
|
|
|
|||
|
|
@ -82,9 +82,20 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
unsigned int magic = readInt(conn->from);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("protocol mismatch with 'nix-store --serve' on '%s'", host);
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(*saved.s + msg));
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
|
@ -266,8 +277,11 @@ public:
|
|||
return status;
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode) override
|
||||
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
{
|
||||
if (evalStore && evalStore.get() != this)
|
||||
throw Error("building on an SSH store is incompatible with '--eval-store'");
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to << cmdBuildPaths;
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ protected:
|
|||
void LocalBinaryCacheStore::init()
|
||||
{
|
||||
createDirs(binaryCacheDir + "/nar");
|
||||
createDirs(binaryCacheDir + realisationsPrefix);
|
||||
createDirs(binaryCacheDir + "/" + realisationsPrefix);
|
||||
if (writeDebugInfo)
|
||||
createDirs(binaryCacheDir + "/debuginfo");
|
||||
BinaryCacheStore::init();
|
||||
|
|
|
|||
|
|
@ -18,6 +18,9 @@ struct LocalFSStoreConfig : virtual StoreConfig
|
|||
const PathSetting logDir{(StoreConfig*) this, false,
|
||||
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
|
||||
"log", "directory where Nix will store state"};
|
||||
const PathSetting realStoreDir{(StoreConfig*) this, false,
|
||||
rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
|
||||
"physical path to the Nix store"};
|
||||
};
|
||||
|
||||
class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store
|
||||
|
|
@ -34,7 +37,7 @@ public:
|
|||
/* Register a permanent GC root. */
|
||||
Path addPermRoot(const StorePath & storePath, const Path & gcRoot);
|
||||
|
||||
virtual Path getRealStoreDir() { return storeDir; }
|
||||
virtual Path getRealStoreDir() { return realStoreDir; }
|
||||
|
||||
Path toRealPath(const Path & storePath) override
|
||||
{
|
||||
|
|
|
|||
|
|
@ -53,12 +53,15 @@ struct LocalStore::State::Stmts {
|
|||
SQLiteStmt InvalidatePath;
|
||||
SQLiteStmt AddDerivationOutput;
|
||||
SQLiteStmt RegisterRealisedOutput;
|
||||
SQLiteStmt UpdateRealisedOutput;
|
||||
SQLiteStmt QueryValidDerivers;
|
||||
SQLiteStmt QueryDerivationOutputs;
|
||||
SQLiteStmt QueryRealisedOutput;
|
||||
SQLiteStmt QueryAllRealisedOutputs;
|
||||
SQLiteStmt QueryPathFromHashPart;
|
||||
SQLiteStmt QueryValidPaths;
|
||||
SQLiteStmt QueryRealisationReferences;
|
||||
SQLiteStmt AddRealisationReference;
|
||||
};
|
||||
|
||||
int getSchema(Path schemaPath)
|
||||
|
|
@ -76,7 +79,7 @@ int getSchema(Path schemaPath)
|
|||
|
||||
void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
|
||||
{
|
||||
const int nixCASchemaVersion = 1;
|
||||
const int nixCASchemaVersion = 2;
|
||||
int curCASchema = getSchema(schemaPath);
|
||||
if (curCASchema != nixCASchemaVersion) {
|
||||
if (curCASchema > nixCASchemaVersion) {
|
||||
|
|
@ -94,7 +97,39 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
|
|||
#include "ca-specific-schema.sql.gen.hh"
|
||||
;
|
||||
db.exec(schema);
|
||||
curCASchema = nixCASchemaVersion;
|
||||
}
|
||||
|
||||
if (curCASchema < 2) {
|
||||
SQLiteTxn txn(db);
|
||||
// Ugly little sql dance to add a new `id` column and make it the primary key
|
||||
db.exec(R"(
|
||||
create table Realisations2 (
|
||||
id integer primary key autoincrement not null,
|
||||
drvPath text not null,
|
||||
outputName text not null, -- symbolic output id, usually "out"
|
||||
outputPath integer not null,
|
||||
signatures text, -- space-separated list
|
||||
foreign key (outputPath) references ValidPaths(id) on delete cascade
|
||||
);
|
||||
insert into Realisations2 (drvPath, outputName, outputPath, signatures)
|
||||
select drvPath, outputName, outputPath, signatures from Realisations;
|
||||
drop table Realisations;
|
||||
alter table Realisations2 rename to Realisations;
|
||||
)");
|
||||
db.exec(R"(
|
||||
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
|
||||
|
||||
create table if not exists RealisationsRefs (
|
||||
referrer integer not null,
|
||||
realisationReference integer,
|
||||
foreign key (referrer) references Realisations(id) on delete cascade,
|
||||
foreign key (realisationReference) references Realisations(id) on delete restrict
|
||||
);
|
||||
)");
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
|
||||
lockFile(lockFd.get(), ltRead, true);
|
||||
}
|
||||
|
|
@ -106,9 +141,6 @@ LocalStore::LocalStore(const Params & params)
|
|||
, LocalStoreConfig(params)
|
||||
, Store(params)
|
||||
, LocalFSStore(params)
|
||||
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
|
||||
"physical path to the Nix store"}
|
||||
, realStoreDir(realStoreDir_)
|
||||
, dbDir(stateDir + "/db")
|
||||
, linksDir(realStoreDir + "/.links")
|
||||
, reservedPath(dbDir + "/reserved")
|
||||
|
|
@ -153,13 +185,13 @@ LocalStore::LocalStore(const Params & params)
|
|||
printError("warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup);
|
||||
else {
|
||||
struct stat st;
|
||||
if (stat(realStoreDir.c_str(), &st))
|
||||
if (stat(realStoreDir.get().c_str(), &st))
|
||||
throw SysError("getting attributes of path '%1%'", realStoreDir);
|
||||
|
||||
if (st.st_uid != 0 || st.st_gid != gr->gr_gid || (st.st_mode & ~S_IFMT) != perm) {
|
||||
if (chown(realStoreDir.c_str(), 0, gr->gr_gid) == -1)
|
||||
if (chown(realStoreDir.get().c_str(), 0, gr->gr_gid) == -1)
|
||||
throw SysError("changing ownership of path '%1%'", realStoreDir);
|
||||
if (chmod(realStoreDir.c_str(), perm) == -1)
|
||||
if (chmod(realStoreDir.get().c_str(), perm) == -1)
|
||||
throw SysError("changing permissions on path '%1%'", realStoreDir);
|
||||
}
|
||||
}
|
||||
|
|
@ -310,13 +342,22 @@ LocalStore::LocalStore(const Params & params)
|
|||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
state->stmts->RegisterRealisedOutput.create(state->db,
|
||||
R"(
|
||||
insert or replace into Realisations (drvPath, outputName, outputPath, signatures)
|
||||
insert into Realisations (drvPath, outputName, outputPath, signatures)
|
||||
values (?, ?, (select id from ValidPaths where path = ?), ?)
|
||||
;
|
||||
)");
|
||||
state->stmts->UpdateRealisedOutput.create(state->db,
|
||||
R"(
|
||||
update Realisations
|
||||
set signatures = ?
|
||||
where
|
||||
drvPath = ? and
|
||||
outputName = ?
|
||||
;
|
||||
)");
|
||||
state->stmts->QueryRealisedOutput.create(state->db,
|
||||
R"(
|
||||
select Output.path, Realisations.signatures from Realisations
|
||||
select Realisations.id, Output.path, Realisations.signatures from Realisations
|
||||
inner join ValidPaths as Output on Output.id = Realisations.outputPath
|
||||
where drvPath = ? and outputName = ?
|
||||
;
|
||||
|
|
@ -328,6 +369,19 @@ LocalStore::LocalStore(const Params & params)
|
|||
where drvPath = ?
|
||||
;
|
||||
)");
|
||||
state->stmts->QueryRealisationReferences.create(state->db,
|
||||
R"(
|
||||
select drvPath, outputName from Realisations
|
||||
join RealisationsRefs on realisationReference = Realisations.id
|
||||
where referrer = ?;
|
||||
)");
|
||||
state->stmts->AddRealisationReference.create(state->db,
|
||||
R"(
|
||||
insert or replace into RealisationsRefs (referrer, realisationReference)
|
||||
values (
|
||||
(select id from Realisations where drvPath = ? and outputName = ?),
|
||||
(select id from Realisations where drvPath = ? and outputName = ?));
|
||||
)");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -437,14 +491,14 @@ void LocalStore::makeStoreWritable()
|
|||
if (getuid() != 0) return;
|
||||
/* Check if /nix/store is on a read-only mount. */
|
||||
struct statvfs stat;
|
||||
if (statvfs(realStoreDir.c_str(), &stat) != 0)
|
||||
if (statvfs(realStoreDir.get().c_str(), &stat) != 0)
|
||||
throw SysError("getting info about the Nix store mount point");
|
||||
|
||||
if (stat.f_flag & ST_RDONLY) {
|
||||
if (unshare(CLONE_NEWNS) == -1)
|
||||
throw SysError("setting up a private mount namespace");
|
||||
|
||||
if (mount(0, realStoreDir.c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
|
||||
if (mount(0, realStoreDir.get().c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
|
||||
throw SysError("remounting %1% writable", realStoreDir);
|
||||
}
|
||||
#endif
|
||||
|
|
@ -664,14 +718,54 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
|
|||
void LocalStore::registerDrvOutput(const Realisation & info)
|
||||
{
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
auto state(_state.lock());
|
||||
retrySQLite<void>([&]() {
|
||||
state->stmts->RegisterRealisedOutput.use()
|
||||
(info.id.strHash())
|
||||
(info.id.outputName)
|
||||
(printStorePath(info.outPath))
|
||||
(concatStringsSep(" ", info.signatures))
|
||||
.exec();
|
||||
auto state(_state.lock());
|
||||
if (auto oldR = queryRealisation_(*state, info.id)) {
|
||||
if (info.isCompatibleWith(*oldR)) {
|
||||
auto combinedSignatures = oldR->signatures;
|
||||
combinedSignatures.insert(info.signatures.begin(),
|
||||
info.signatures.end());
|
||||
state->stmts->UpdateRealisedOutput.use()
|
||||
(concatStringsSep(" ", combinedSignatures))
|
||||
(info.id.strHash())
|
||||
(info.id.outputName)
|
||||
.exec();
|
||||
} else {
|
||||
throw Error("Trying to register a realisation of '%s', but we already "
|
||||
"have another one locally.\n"
|
||||
"Local: %s\n"
|
||||
"Remote: %s",
|
||||
info.id.to_string(),
|
||||
printStorePath(oldR->outPath),
|
||||
printStorePath(info.outPath)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
state->stmts->RegisterRealisedOutput.use()
|
||||
(info.id.strHash())
|
||||
(info.id.outputName)
|
||||
(printStorePath(info.outPath))
|
||||
(concatStringsSep(" ", info.signatures))
|
||||
.exec();
|
||||
}
|
||||
for (auto & [outputId, depPath] : info.dependentRealisations) {
|
||||
auto localRealisation = queryRealisationCore_(*state, outputId);
|
||||
if (!localRealisation)
|
||||
throw Error("unable to register the derivation '%s' as it "
|
||||
"depends on the non existent '%s'",
|
||||
info.id.to_string(), outputId.to_string());
|
||||
if (localRealisation->second.outPath != depPath)
|
||||
throw Error("unable to register the derivation '%s' as it "
|
||||
"depends on a realisation of '%s' that doesn’t"
|
||||
"match what we have locally",
|
||||
info.id.to_string(), outputId.to_string());
|
||||
state->stmts->AddRealisationReference.use()
|
||||
(info.id.strHash())
|
||||
(info.id.outputName)
|
||||
(outputId.strHash())
|
||||
(outputId.outputName)
|
||||
.exec();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -978,14 +1072,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
|
|||
}
|
||||
|
||||
|
||||
// FIXME: move this, it's not specific to LocalStore.
|
||||
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
|
||||
{
|
||||
if (!settings.useSubstitutes) return;
|
||||
for (auto & sub : getDefaultSubstituters()) {
|
||||
for (auto & path : paths) {
|
||||
if (infos.count(path.first))
|
||||
// Choose first succeeding substituter.
|
||||
continue;
|
||||
|
||||
auto subPath(path.first);
|
||||
|
||||
// recompute store path so that we can use a different store root
|
||||
// Recompute store path so that we can use a different store root.
|
||||
if (path.second) {
|
||||
subPath = makeFixedOutputPathFromCA({
|
||||
.name = std::string { path.first.name() },
|
||||
|
|
@ -1151,23 +1250,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
deletePath(realPath);
|
||||
|
||||
// text hashing has long been allowed to have non-self-references because it is used for drv files.
|
||||
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !info.hasSelfReference))
|
||||
settings.requireExperimentalFeature("ca-references");
|
||||
|
||||
/* While restoring the path from the NAR, compute the hash
|
||||
of the NAR. */
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (!info.ca.has_value() || !info.hasSelfReference)
|
||||
hashSink = std::make_unique<HashSink>(htSHA256);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
||||
HashSink hashSink(htSHA256);
|
||||
|
||||
TeeSource wrapperSource { source, *hashSink };
|
||||
TeeSource wrapperSource { source, hashSink };
|
||||
|
||||
restorePath(realPath, wrapperSource);
|
||||
|
||||
auto hashResult = hashSink->finish();
|
||||
auto hashResult = hashSink.finish();
|
||||
|
||||
if (hashResult.first != info.narHash)
|
||||
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
|
|
@ -1177,6 +1268,31 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path), info.narSize, hashResult.second);
|
||||
|
||||
if (info.ca) {
|
||||
if (auto foHash = std::get_if<FixedOutputHash>(&*info.ca)) {
|
||||
auto actualFoHash = hashCAPath(
|
||||
foHash->method,
|
||||
foHash->hash.type,
|
||||
info.path
|
||||
);
|
||||
if (foHash->hash != actualFoHash.hash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
foHash->hash.to_string(Base32, true),
|
||||
actualFoHash.hash.to_string(Base32, true));
|
||||
}
|
||||
}
|
||||
if (auto textHash = std::get_if<TextHash>(&*info.ca)) {
|
||||
auto actualTextHash = hashString(htSHA256, readFile(realPath));
|
||||
if (textHash->hash != actualTextHash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
textHash->hash.to_string(Base32, true),
|
||||
actualTextHash.to_string(Base32, true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
autoGC();
|
||||
|
||||
canonicalisePathMetaData(realPath, -1);
|
||||
|
|
@ -1458,14 +1574,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
/* Check the content hash (optionally - slow). */
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (!info->ca || !info->hasSelfReference)
|
||||
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||
auto hashSink = HashSink(info->narHash.type);
|
||||
|
||||
dumpPath(Store::toRealPath(i), *hashSink);
|
||||
auto current = hashSink->finish();
|
||||
dumpPath(Store::toRealPath(i), hashSink);
|
||||
auto current = hashSink.finish();
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
|
|
@ -1683,19 +1795,97 @@ void LocalStore::createUser(const std::string & userName, uid_t userId)
|
|||
}
|
||||
}
|
||||
|
||||
std::optional<const Realisation> LocalStore::queryRealisation(
|
||||
const DrvOutput& id) {
|
||||
typedef std::optional<const Realisation> Ret;
|
||||
return retrySQLite<Ret>([&]() -> Ret {
|
||||
std::optional<std::pair<int64_t, Realisation>> LocalStore::queryRealisationCore_(
|
||||
LocalStore::State & state,
|
||||
const DrvOutput & id)
|
||||
{
|
||||
auto useQueryRealisedOutput(
|
||||
state.stmts->QueryRealisedOutput.use()
|
||||
(id.strHash())
|
||||
(id.outputName));
|
||||
if (!useQueryRealisedOutput.next())
|
||||
return std::nullopt;
|
||||
auto realisationDbId = useQueryRealisedOutput.getInt(0);
|
||||
auto outputPath = parseStorePath(useQueryRealisedOutput.getStr(1));
|
||||
auto signatures =
|
||||
tokenizeString<StringSet>(useQueryRealisedOutput.getStr(2));
|
||||
|
||||
return {{
|
||||
realisationDbId,
|
||||
Realisation{
|
||||
.id = id,
|
||||
.outPath = outputPath,
|
||||
.signatures = signatures,
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
std::optional<const Realisation> LocalStore::queryRealisation_(
|
||||
LocalStore::State & state,
|
||||
const DrvOutput & id)
|
||||
{
|
||||
auto maybeCore = queryRealisationCore_(state, id);
|
||||
if (!maybeCore)
|
||||
return std::nullopt;
|
||||
auto [realisationDbId, res] = *maybeCore;
|
||||
|
||||
std::map<DrvOutput, StorePath> dependentRealisations;
|
||||
auto useRealisationRefs(
|
||||
state.stmts->QueryRealisationReferences.use()
|
||||
(realisationDbId));
|
||||
while (useRealisationRefs.next()) {
|
||||
auto depId = DrvOutput {
|
||||
Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)),
|
||||
useRealisationRefs.getStr(1),
|
||||
};
|
||||
auto dependentRealisation = queryRealisationCore_(state, depId);
|
||||
assert(dependentRealisation); // Enforced by the db schema
|
||||
auto outputPath = dependentRealisation->second.outPath;
|
||||
dependentRealisations.insert({depId, outputPath});
|
||||
}
|
||||
|
||||
res.dependentRealisations = dependentRealisations;
|
||||
|
||||
return { res };
|
||||
}
|
||||
|
||||
std::optional<const Realisation>
|
||||
LocalStore::queryRealisation(const DrvOutput & id)
|
||||
{
|
||||
return retrySQLite<std::optional<const Realisation>>([&]() {
|
||||
auto state(_state.lock());
|
||||
auto use(state->stmts->QueryRealisedOutput.use()(id.strHash())(
|
||||
id.outputName));
|
||||
if (!use.next())
|
||||
return std::nullopt;
|
||||
auto outputPath = parseStorePath(use.getStr(0));
|
||||
auto signatures = tokenizeString<StringSet>(use.getStr(1));
|
||||
return Ret{Realisation{
|
||||
.id = id, .outPath = outputPath, .signatures = signatures}};
|
||||
return queryRealisation_(*state, id);
|
||||
});
|
||||
}
|
||||
|
||||
FixedOutputHash LocalStore::hashCAPath(
|
||||
const FileIngestionMethod & method, const HashType & hashType,
|
||||
const StorePath & path)
|
||||
{
|
||||
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
|
||||
}
|
||||
|
||||
FixedOutputHash LocalStore::hashCAPath(
|
||||
const FileIngestionMethod & method,
|
||||
const HashType & hashType,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
)
|
||||
{
|
||||
HashModuloSink caSink ( hashType, std::string(pathHash) );
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(path, caSink);
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(path, caSink);
|
||||
break;
|
||||
}
|
||||
auto hash = caSink.finish().first;
|
||||
return FixedOutputHash{
|
||||
.method = method,
|
||||
.hash = hash,
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -83,9 +83,6 @@ private:
|
|||
|
||||
public:
|
||||
|
||||
PathSetting realStoreDir_;
|
||||
|
||||
const Path realStoreDir;
|
||||
const Path dbDir;
|
||||
const Path linksDir;
|
||||
const Path reservedPath;
|
||||
|
|
@ -206,6 +203,8 @@ public:
|
|||
void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
|
||||
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
|
||||
|
||||
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
|
||||
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
|
||||
|
||||
private:
|
||||
|
|
@ -279,10 +278,21 @@ private:
|
|||
void signPathInfo(ValidPathInfo & info);
|
||||
void signRealisation(Realisation &);
|
||||
|
||||
Path getRealStoreDir() override { return realStoreDir; }
|
||||
|
||||
void createUser(const std::string & userName, uid_t userId) override;
|
||||
|
||||
// XXX: Make a generic `Store` method
|
||||
FixedOutputHash hashCAPath(
|
||||
const FileIngestionMethod & method,
|
||||
const HashType & hashType,
|
||||
const StorePath & path);
|
||||
|
||||
FixedOutputHash hashCAPath(
|
||||
const FileIngestionMethod & method,
|
||||
const HashType & hashType,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
);
|
||||
|
||||
friend struct LocalDerivationGoal;
|
||||
friend struct PathSubstitutionGoal;
|
||||
friend struct SubstitutionGoal;
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
|||
|
||||
libstore_LIBS = libutil
|
||||
|
||||
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
|
||||
ifneq ($(OS), FreeBSD)
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
|
||||
ifdef HOST_LINUX
|
||||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
ifeq ($(OS), Darwin)
|
||||
ifdef HOST_DARWIN
|
||||
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
|
||||
endif
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ ifeq ($(ENABLE_S3), 1)
|
|||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core
|
||||
endif
|
||||
|
||||
ifeq ($(OS), SunOS)
|
||||
ifdef HOST_SOLARIS
|
||||
libstore_LDFLAGS += -lsocket
|
||||
endif
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ $(d)/build.cc:
|
|||
|
||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
|
||||
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
||||
|
|
|
|||
|
|
@ -16,13 +16,18 @@ Machine::Machine(decltype(storeUri) storeUri,
|
|||
decltype(mandatoryFeatures) mandatoryFeatures,
|
||||
decltype(sshPublicHostKey) sshPublicHostKey) :
|
||||
storeUri(
|
||||
// Backwards compatibility: if the URI is a hostname,
|
||||
// prepend ssh://.
|
||||
// Backwards compatibility: if the URI is schemeless, is not a path,
|
||||
// and is not one of the special store connection words, prepend
|
||||
// ssh://.
|
||||
storeUri.find("://") != std::string::npos
|
||||
|| hasPrefix(storeUri, "local")
|
||||
|| hasPrefix(storeUri, "remote")
|
||||
|| hasPrefix(storeUri, "auto")
|
||||
|| hasPrefix(storeUri, "/")
|
||||
|| storeUri.find("/") != std::string::npos
|
||||
|| storeUri == "auto"
|
||||
|| storeUri == "daemon"
|
||||
|| storeUri == "local"
|
||||
|| hasPrefix(storeUri, "auto?")
|
||||
|| hasPrefix(storeUri, "daemon?")
|
||||
|| hasPrefix(storeUri, "local?")
|
||||
|| hasPrefix(storeUri, "?")
|
||||
? storeUri
|
||||
: "ssh://" + storeUri),
|
||||
systemTypes(systemTypes),
|
||||
|
|
|
|||
|
|
@ -6,97 +6,72 @@
|
|||
#include "thread-pool.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "callback.hh"
|
||||
#include "closure.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
void Store::computeFSClosure(const StorePathSet & startPaths,
|
||||
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
|
||||
{
|
||||
struct State
|
||||
{
|
||||
size_t pending;
|
||||
StorePathSet & paths;
|
||||
std::exception_ptr exc;
|
||||
};
|
||||
std::function<std::set<StorePath>(const StorePath & path, std::future<ref<const ValidPathInfo>> &)> queryDeps;
|
||||
if (flipDirection)
|
||||
queryDeps = [&](const StorePath& path,
|
||||
std::future<ref<const ValidPathInfo>> & fut) {
|
||||
StorePathSet res;
|
||||
StorePathSet referrers;
|
||||
queryReferrers(path, referrers);
|
||||
for (auto& ref : referrers)
|
||||
if (ref != path)
|
||||
res.insert(ref);
|
||||
|
||||
Sync<State> state_(State{0, paths_, 0});
|
||||
if (includeOutputs)
|
||||
for (auto& i : queryValidDerivers(path))
|
||||
res.insert(i);
|
||||
|
||||
std::function<void(const StorePath &)> enqueue;
|
||||
if (includeDerivers && path.isDerivation())
|
||||
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
|
||||
if (maybeOutPath && isValidPath(*maybeOutPath))
|
||||
res.insert(*maybeOutPath);
|
||||
return res;
|
||||
};
|
||||
else
|
||||
queryDeps = [&](const StorePath& path,
|
||||
std::future<ref<const ValidPathInfo>> & fut) {
|
||||
StorePathSet res;
|
||||
auto info = fut.get();
|
||||
for (auto& ref : info->references)
|
||||
res.insert(ref);
|
||||
|
||||
std::condition_variable done;
|
||||
if (includeOutputs && path.isDerivation())
|
||||
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
|
||||
if (maybeOutPath && isValidPath(*maybeOutPath))
|
||||
res.insert(*maybeOutPath);
|
||||
|
||||
enqueue = [&](const StorePath & path) -> void {
|
||||
{
|
||||
auto state(state_.lock());
|
||||
if (state->exc) return;
|
||||
if (!state->paths.insert(path).second) return;
|
||||
state->pending++;
|
||||
}
|
||||
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
|
||||
res.insert(*info->deriver);
|
||||
return res;
|
||||
};
|
||||
|
||||
queryPathInfo(path, {[&](std::future<ref<const ValidPathInfo>> fut) {
|
||||
// FIXME: calls to isValidPath() should be async
|
||||
|
||||
try {
|
||||
auto info = fut.get();
|
||||
|
||||
if (flipDirection) {
|
||||
|
||||
StorePathSet referrers;
|
||||
queryReferrers(path, referrers);
|
||||
for (auto & ref : referrers)
|
||||
if (ref != path)
|
||||
enqueue(ref);
|
||||
|
||||
if (includeOutputs)
|
||||
for (auto & i : queryValidDerivers(path))
|
||||
enqueue(i);
|
||||
|
||||
if (includeDerivers && path.isDerivation())
|
||||
for (auto & i : queryDerivationOutputs(path))
|
||||
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
|
||||
enqueue(i);
|
||||
|
||||
} else {
|
||||
|
||||
for (auto & ref : info->references)
|
||||
enqueue(ref);
|
||||
|
||||
if (includeOutputs && path.isDerivation())
|
||||
for (auto & i : queryDerivationOutputs(path))
|
||||
if (isValidPath(i)) enqueue(i);
|
||||
|
||||
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
|
||||
enqueue(*info->deriver);
|
||||
|
||||
}
|
||||
|
||||
{
|
||||
auto state(state_.lock());
|
||||
assert(state->pending);
|
||||
if (!--state->pending) done.notify_one();
|
||||
}
|
||||
|
||||
} catch (...) {
|
||||
auto state(state_.lock());
|
||||
if (!state->exc) state->exc = std::current_exception();
|
||||
assert(state->pending);
|
||||
if (!--state->pending) done.notify_one();
|
||||
};
|
||||
}});
|
||||
};
|
||||
|
||||
for (auto & startPath : startPaths)
|
||||
enqueue(startPath);
|
||||
|
||||
{
|
||||
auto state(state_.lock());
|
||||
while (state->pending) state.wait(done);
|
||||
if (state->exc) std::rethrow_exception(state->exc);
|
||||
}
|
||||
computeClosure<StorePath>(
|
||||
startPaths, paths_,
|
||||
[&](const StorePath& path,
|
||||
std::function<void(std::promise<std::set<StorePath>>&)>
|
||||
processEdges) {
|
||||
std::promise<std::set<StorePath>> promise;
|
||||
std::function<void(std::future<ref<const ValidPathInfo>>)>
|
||||
getDependencies =
|
||||
[&](std::future<ref<const ValidPathInfo>> fut) {
|
||||
try {
|
||||
promise.set_value(queryDeps(path, fut));
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
};
|
||||
queryPathInfo(path, getDependencies);
|
||||
processEdges(promise);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void Store::computeFSClosure(const StorePath & startPath,
|
||||
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
|
||||
{
|
||||
|
|
@ -278,5 +253,44 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
|
|||
}});
|
||||
}
|
||||
|
||||
std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||
const std::set<Realisation> & inputRealisations,
|
||||
const StorePathSet & pathReferences)
|
||||
{
|
||||
std::map<DrvOutput, StorePath> res;
|
||||
|
||||
for (const auto & input : inputRealisations) {
|
||||
if (pathReferences.count(input.outPath)) {
|
||||
res.insert({input.id, input.outPath});
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||
Store & store,
|
||||
const Derivation & drv,
|
||||
const StorePath & outputPath)
|
||||
{
|
||||
std::set<Realisation> inputRealisations;
|
||||
|
||||
for (const auto& [inputDrv, outputNames] : drv.inputDrvs) {
|
||||
auto outputHashes =
|
||||
staticOutputHashes(store, store.readDerivation(inputDrv));
|
||||
for (const auto& outputName : outputNames) {
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{outputHashes.at(outputName), outputName});
|
||||
if (!thisRealisation)
|
||||
throw Error(
|
||||
"output '%s' of derivation '%s' isn’t built", outputName,
|
||||
store.printStorePath(inputDrv));
|
||||
inputRealisations.insert(*thisRealisation);
|
||||
}
|
||||
}
|
||||
|
||||
auto info = store.queryPathInfo(outputPath);
|
||||
|
||||
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
#include "globals.hh"
|
||||
|
||||
#include <sqlite3.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -38,6 +39,15 @@ create table if not exists NARs (
|
|||
foreign key (cache) references BinaryCaches(id) on delete cascade
|
||||
);
|
||||
|
||||
create table if not exists Realisations (
|
||||
cache integer not null,
|
||||
outputId text not null,
|
||||
content blob, -- Json serialisation of the realisation, or null if the realisation is absent
|
||||
timestamp integer not null,
|
||||
primary key (cache, outputId),
|
||||
foreign key (cache) references BinaryCaches(id) on delete cascade
|
||||
);
|
||||
|
||||
create table if not exists LastPurge (
|
||||
dummy text primary key,
|
||||
value integer
|
||||
|
|
@ -63,7 +73,9 @@ public:
|
|||
struct State
|
||||
{
|
||||
SQLite db;
|
||||
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR, purgeCache;
|
||||
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR,
|
||||
queryNAR, insertRealisation, insertMissingRealisation,
|
||||
queryRealisation, purgeCache;
|
||||
std::map<std::string, Cache> caches;
|
||||
};
|
||||
|
||||
|
|
@ -98,6 +110,26 @@ public:
|
|||
state->queryNAR.create(state->db,
|
||||
"select present, namePart, url, compression, fileHash, fileSize, narHash, narSize, refs, deriver, sigs, ca from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))");
|
||||
|
||||
state->insertRealisation.create(state->db,
|
||||
R"(
|
||||
insert or replace into Realisations(cache, outputId, content, timestamp)
|
||||
values (?, ?, ?, ?)
|
||||
)");
|
||||
|
||||
state->insertMissingRealisation.create(state->db,
|
||||
R"(
|
||||
insert or replace into Realisations(cache, outputId, timestamp)
|
||||
values (?, ?, ?)
|
||||
)");
|
||||
|
||||
state->queryRealisation.create(state->db,
|
||||
R"(
|
||||
select content from Realisations
|
||||
where cache = ? and outputId = ? and
|
||||
((content is null and timestamp > ?) or
|
||||
(content is not null and timestamp > ?))
|
||||
)");
|
||||
|
||||
/* Periodically purge expired entries from the database. */
|
||||
retrySQLite<void>([&]() {
|
||||
auto now = time(0);
|
||||
|
|
@ -212,6 +244,38 @@ public:
|
|||
});
|
||||
}
|
||||
|
||||
std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
|
||||
const std::string & uri, const DrvOutput & id) override
|
||||
{
|
||||
return retrySQLite<std::pair<Outcome, std::shared_ptr<Realisation>>>(
|
||||
[&]() -> std::pair<Outcome, std::shared_ptr<Realisation>> {
|
||||
auto state(_state.lock());
|
||||
|
||||
auto & cache(getCache(*state, uri));
|
||||
|
||||
auto now = time(0);
|
||||
|
||||
auto queryRealisation(state->queryRealisation.use()
|
||||
(cache.id)
|
||||
(id.to_string())
|
||||
(now - settings.ttlNegativeNarInfoCache)
|
||||
(now - settings.ttlPositiveNarInfoCache));
|
||||
|
||||
if (!queryRealisation.next())
|
||||
return {oUnknown, 0};
|
||||
|
||||
if (queryRealisation.isNull(0))
|
||||
return {oInvalid, 0};
|
||||
|
||||
auto realisation =
|
||||
std::make_shared<Realisation>(Realisation::fromJSON(
|
||||
nlohmann::json::parse(queryRealisation.getStr(0)),
|
||||
"Local disk cache"));
|
||||
|
||||
return {oValid, realisation};
|
||||
});
|
||||
}
|
||||
|
||||
void upsertNarInfo(
|
||||
const std::string & uri, const std::string & hashPart,
|
||||
std::shared_ptr<const ValidPathInfo> info) override
|
||||
|
|
@ -251,6 +315,39 @@ public:
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
void upsertRealisation(
|
||||
const std::string & uri,
|
||||
const Realisation & realisation) override
|
||||
{
|
||||
retrySQLite<void>([&]() {
|
||||
auto state(_state.lock());
|
||||
|
||||
auto & cache(getCache(*state, uri));
|
||||
|
||||
state->insertRealisation.use()
|
||||
(cache.id)
|
||||
(realisation.id.to_string())
|
||||
(realisation.toJSON().dump())
|
||||
(time(0)).exec();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
virtual void upsertAbsentRealisation(
|
||||
const std::string & uri,
|
||||
const DrvOutput & id) override
|
||||
{
|
||||
retrySQLite<void>([&]() {
|
||||
auto state(_state.lock());
|
||||
|
||||
auto & cache(getCache(*state, uri));
|
||||
state->insertMissingRealisation.use()
|
||||
(cache.id)
|
||||
(id.to_string())
|
||||
(time(0)).exec();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
ref<NarInfoDiskCache> getNarInfoDiskCache()
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "ref.hh"
|
||||
#include "nar-info.hh"
|
||||
#include "realisation.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -29,6 +30,15 @@ public:
|
|||
virtual void upsertNarInfo(
|
||||
const std::string & uri, const std::string & hashPart,
|
||||
std::shared_ptr<const ValidPathInfo> info) = 0;
|
||||
|
||||
virtual void upsertRealisation(
|
||||
const std::string & uri,
|
||||
const Realisation & realisation) = 0;
|
||||
virtual void upsertAbsentRealisation(
|
||||
const std::string & uri,
|
||||
const DrvOutput & id) = 0;
|
||||
virtual std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
|
||||
const std::string & uri, const DrvOutput & id) = 0;
|
||||
};
|
||||
|
||||
/* Return a singleton cache object that can be used concurrently by
|
||||
|
|
|
|||
|
|
@ -198,7 +198,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
/* Make the containing directory writable, but only if it's not
|
||||
the store itself (we don't want or need to mess with its
|
||||
permissions). */
|
||||
bool mustToggle = dirOf(path) != realStoreDir;
|
||||
bool mustToggle = dirOf(path) != realStoreDir.get();
|
||||
if (mustToggle) makeWritable(dirOf(path));
|
||||
|
||||
/* When we're done, make the directory read-only again and reset
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
#include "parsed-derivations.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <regex>
|
||||
#include "json.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -91,6 +93,8 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
|
|||
StringSet res;
|
||||
for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings()))
|
||||
res.insert(i);
|
||||
if (!derivationHasKnownOutputPaths(drv.type()))
|
||||
res.insert("ca-derivations");
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
@ -121,4 +125,107 @@ bool ParsedDerivation::substitutesAllowed() const
|
|||
return getBoolAttr("allowSubstitutes", true);
|
||||
}
|
||||
|
||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||
|
||||
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
|
||||
{
|
||||
auto structuredAttrs = getStructuredAttrs();
|
||||
if (!structuredAttrs) return std::nullopt;
|
||||
|
||||
auto json = *structuredAttrs;
|
||||
|
||||
/* Add an "outputs" object containing the output paths. */
|
||||
nlohmann::json outputs;
|
||||
for (auto & i : drv.outputs)
|
||||
outputs[i.first] = hashPlaceholder(i.first);
|
||||
json["outputs"] = outputs;
|
||||
|
||||
/* Handle exportReferencesGraph. */
|
||||
auto e = json.find("exportReferencesGraph");
|
||||
if (e != json.end() && e->is_object()) {
|
||||
for (auto i = e->begin(); i != e->end(); ++i) {
|
||||
std::ostringstream str;
|
||||
{
|
||||
JSONPlaceholder jsonRoot(str, true);
|
||||
StorePathSet storePaths;
|
||||
for (auto & p : *i)
|
||||
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
||||
store.pathInfoToJSON(jsonRoot,
|
||||
store.exportReferences(storePaths, inputPaths), false, true);
|
||||
}
|
||||
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
||||
}
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
/* As a convenience to bash scripts, write a shell file that
|
||||
maps all attributes that are representable in bash -
|
||||
namely, strings, integers, nulls, Booleans, and arrays and
|
||||
objects consisting entirely of those values. (So nested
|
||||
arrays or objects are not supported.) */
|
||||
std::string writeStructuredAttrsShell(const nlohmann::json & json)
|
||||
{
|
||||
|
||||
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
||||
if (value.is_string())
|
||||
return shellEscape(value);
|
||||
|
||||
if (value.is_number()) {
|
||||
auto f = value.get<float>();
|
||||
if (std::ceil(f) == f)
|
||||
return std::to_string(value.get<int>());
|
||||
}
|
||||
|
||||
if (value.is_null())
|
||||
return std::string("''");
|
||||
|
||||
if (value.is_boolean())
|
||||
return value.get<bool>() ? std::string("1") : std::string("");
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
std::string jsonSh;
|
||||
|
||||
for (auto & [key, value] : json.items()) {
|
||||
|
||||
if (!std::regex_match(key, shVarName)) continue;
|
||||
|
||||
auto s = handleSimpleType(value);
|
||||
if (s)
|
||||
jsonSh += fmt("declare %s=%s\n", key, *s);
|
||||
|
||||
else if (value.is_array()) {
|
||||
std::string s2;
|
||||
bool good = true;
|
||||
|
||||
for (auto & value2 : value) {
|
||||
auto s3 = handleSimpleType(value2);
|
||||
if (!s3) { good = false; break; }
|
||||
s2 += *s3; s2 += ' ';
|
||||
}
|
||||
|
||||
if (good)
|
||||
jsonSh += fmt("declare -a %s=(%s)\n", key, s2);
|
||||
}
|
||||
|
||||
else if (value.is_object()) {
|
||||
std::string s2;
|
||||
bool good = true;
|
||||
|
||||
for (auto & [key2, value2] : value.items()) {
|
||||
auto s3 = handleSimpleType(value2);
|
||||
if (!s3) { good = false; break; }
|
||||
s2 += fmt("[%s]=%s ", shellEscape(key2), *s3);
|
||||
}
|
||||
|
||||
if (good)
|
||||
jsonSh += fmt("declare -A %s=(%s)\n", key, s2);
|
||||
}
|
||||
}
|
||||
|
||||
return jsonSh;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,6 +36,10 @@ public:
|
|||
bool willBuildLocally(Store & localStore) const;
|
||||
|
||||
bool substitutesAllowed() const;
|
||||
|
||||
std::optional<nlohmann::json> prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths);
|
||||
};
|
||||
|
||||
std::string writeStructuredAttrsShell(const nlohmann::json & json);
|
||||
|
||||
}
|
||||
|
|
|
|||
46
src/libstore/path-info.cc
Normal file
46
src/libstore/path-info.cc
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
#include "path-info.hh"
|
||||
#include "worker-protocol.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format)
|
||||
{
|
||||
return read(source, store, format, store.parseStorePath(readString(source)));
|
||||
}
|
||||
|
||||
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format, StorePath && path)
|
||||
{
|
||||
auto deriver = readString(source);
|
||||
auto narHash = Hash::parseAny(readString(source), htSHA256);
|
||||
ValidPathInfo info(path, narHash);
|
||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||
info.setReferencesPossiblyToSelf(worker_proto::read(store, source, Phantom<StorePathSet> {}));
|
||||
source >> info.registrationTime >> info.narSize;
|
||||
if (format >= 16) {
|
||||
source >> info.ultimate;
|
||||
info.sigs = readStrings<StringSet>(source);
|
||||
info.ca = parseContentAddressOpt(readString(source));
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
void ValidPathInfo::write(
|
||||
Sink & sink,
|
||||
const Store & store,
|
||||
unsigned int format,
|
||||
bool includePath) const
|
||||
{
|
||||
if (includePath)
|
||||
sink << store.printStorePath(path);
|
||||
sink << (deriver ? store.printStorePath(*deriver) : "")
|
||||
<< narHash.to_string(Base16, false);
|
||||
worker_proto::write(store, sink, referencesPossiblyToSelf());
|
||||
sink << registrationTime << narSize;
|
||||
if (format >= 16) {
|
||||
sink << ultimate
|
||||
<< sigs
|
||||
<< renderContentAddress(ca);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -107,6 +107,11 @@ struct ValidPathInfo : PathReferences<StorePath>
|
|||
StorePathDescriptor && ca, Hash narHash);
|
||||
|
||||
virtual ~ValidPathInfo() { }
|
||||
|
||||
static ValidPathInfo read(Source & source, const Store & store, unsigned int format);
|
||||
static ValidPathInfo read(Source & source, const Store & store, unsigned int format, StorePath && path);
|
||||
|
||||
void write(Sink & sink, const Store & store, unsigned int format, bool includePath = true) const;
|
||||
};
|
||||
|
||||
typedef std::map<StorePath, ValidPathInfo> ValidPathInfos;
|
||||
|
|
|
|||
|
|
@ -126,9 +126,9 @@ void deleteGeneration(const Path & profile, GenerationNumber gen)
|
|||
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
|
||||
{
|
||||
if (dryRun)
|
||||
printInfo(format("would remove generation %1%") % gen);
|
||||
notice("would remove profile version %1%", gen);
|
||||
else {
|
||||
printInfo(format("removing generation %1%") % gen);
|
||||
notice("removing profile version %1%", gen);
|
||||
deleteGeneration(profile, gen);
|
||||
}
|
||||
}
|
||||
|
|
@ -142,7 +142,7 @@ void deleteGenerations(const Path & profile, const std::set<GenerationNumber> &
|
|||
auto [gens, curGen] = findGenerations(profile);
|
||||
|
||||
if (gensToDelete.count(*curGen))
|
||||
throw Error("cannot delete current generation of profile %1%'", profile);
|
||||
throw Error("cannot delete current version of profile %1%'", profile);
|
||||
|
||||
for (auto & i : gens) {
|
||||
if (!gensToDelete.count(i.number)) continue;
|
||||
|
|
@ -211,12 +211,15 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
|
|||
|
||||
void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, bool dryRun)
|
||||
{
|
||||
if (timeSpec.empty() || timeSpec[timeSpec.size() - 1] != 'd')
|
||||
throw UsageError("invalid number of days specifier '%1%', expected something like '14d'", timeSpec);
|
||||
|
||||
time_t curTime = time(0);
|
||||
string strDays = string(timeSpec, 0, timeSpec.size() - 1);
|
||||
auto days = string2Int<int>(strDays);
|
||||
|
||||
if (!days || *days < 1)
|
||||
throw Error("invalid number of days specifier '%1%'", timeSpec);
|
||||
throw UsageError("invalid number of days specifier '%1%'", timeSpec);
|
||||
|
||||
time_t oldTime = curTime - *days * 24 * 3600;
|
||||
|
||||
|
|
@ -233,6 +236,37 @@ void switchLink(Path link, Path target)
|
|||
}
|
||||
|
||||
|
||||
void switchGeneration(
|
||||
const Path & profile,
|
||||
std::optional<GenerationNumber> dstGen,
|
||||
bool dryRun)
|
||||
{
|
||||
PathLocks lock;
|
||||
lockProfile(lock, profile);
|
||||
|
||||
auto [gens, curGen] = findGenerations(profile);
|
||||
|
||||
std::optional<Generation> dst;
|
||||
for (auto & i : gens)
|
||||
if ((!dstGen && i.number < curGen) ||
|
||||
(dstGen && i.number == *dstGen))
|
||||
dst = i;
|
||||
|
||||
if (!dst) {
|
||||
if (dstGen)
|
||||
throw Error("profile version %1% does not exist", *dstGen);
|
||||
else
|
||||
throw Error("no profile version older than the current (%1%) exists", curGen.value_or(0));
|
||||
}
|
||||
|
||||
notice("switching profile from version %d to %d", curGen.value_or(0), dst->number);
|
||||
|
||||
if (dryRun) return;
|
||||
|
||||
switchLink(profile, dst->path);
|
||||
}
|
||||
|
||||
|
||||
void lockProfile(PathLocks & lock, const Path & profile)
|
||||
{
|
||||
lock.lockPaths({profile}, (format("waiting for lock on profile '%1%'") % profile).str());
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ namespace nix {
|
|||
class StorePath;
|
||||
|
||||
|
||||
typedef unsigned int GenerationNumber;
|
||||
typedef uint64_t GenerationNumber;
|
||||
|
||||
struct Generation
|
||||
{
|
||||
|
|
@ -46,6 +46,13 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
|
|||
|
||||
void switchLink(Path link, Path target);
|
||||
|
||||
/* Roll back a profile to the specified generation, or to the most
|
||||
recent one older than the current. */
|
||||
void switchGeneration(
|
||||
const Path & profile,
|
||||
std::optional<GenerationNumber> dstGen,
|
||||
bool dryRun);
|
||||
|
||||
/* Ensure exclusive access to a profile. Any command that modifies
|
||||
the profile first acquires this lock. */
|
||||
void lockProfile(PathLocks & lock, const Path & profile);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#include "realisation.hh"
|
||||
#include "store-api.hh"
|
||||
#include "closure.hh"
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -21,11 +22,52 @@ std::string DrvOutput::to_string() const {
|
|||
return strHash() + "!" + outputName;
|
||||
}
|
||||
|
||||
std::set<Realisation> Realisation::closure(Store & store, const std::set<Realisation> & startOutputs)
|
||||
{
|
||||
std::set<Realisation> res;
|
||||
Realisation::closure(store, startOutputs, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
void Realisation::closure(Store & store, const std::set<Realisation> & startOutputs, std::set<Realisation> & res)
|
||||
{
|
||||
auto getDeps = [&](const Realisation& current) -> std::set<Realisation> {
|
||||
std::set<Realisation> res;
|
||||
for (auto& [currentDep, _] : current.dependentRealisations) {
|
||||
if (auto currentRealisation = store.queryRealisation(currentDep))
|
||||
res.insert(*currentRealisation);
|
||||
else
|
||||
throw Error(
|
||||
"Unrealised derivation '%s'", currentDep.to_string());
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
computeClosure<Realisation>(
|
||||
startOutputs, res,
|
||||
[&](const Realisation& current,
|
||||
std::function<void(std::promise<std::set<Realisation>>&)>
|
||||
processEdges) {
|
||||
std::promise<std::set<Realisation>> promise;
|
||||
try {
|
||||
auto res = getDeps(current);
|
||||
promise.set_value(res);
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
return processEdges(promise);
|
||||
});
|
||||
}
|
||||
|
||||
nlohmann::json Realisation::toJSON() const {
|
||||
auto jsonDependentRealisations = nlohmann::json::object();
|
||||
for (auto & [depId, depOutPath] : dependentRealisations)
|
||||
jsonDependentRealisations.emplace(depId.to_string(), depOutPath.to_string());
|
||||
return nlohmann::json{
|
||||
{"id", id.to_string()},
|
||||
{"outPath", outPath.to_string()},
|
||||
{"signatures", signatures},
|
||||
{"dependentRealisations", jsonDependentRealisations},
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -51,10 +93,16 @@ Realisation Realisation::fromJSON(
|
|||
if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end())
|
||||
signatures.insert(signaturesIterator->begin(), signaturesIterator->end());
|
||||
|
||||
std::map <DrvOutput, StorePath> dependentRealisations;
|
||||
if (auto jsonDependencies = json.find("dependentRealisations"); jsonDependencies != json.end())
|
||||
for (auto & [jsonDepId, jsonDepOutPath] : jsonDependencies->get<std::map<std::string, std::string>>())
|
||||
dependentRealisations.insert({DrvOutput::parse(jsonDepId), StorePath(jsonDepOutPath)});
|
||||
|
||||
return Realisation{
|
||||
.id = DrvOutput::parse(getField("id")),
|
||||
.outPath = StorePath(getField("outPath")),
|
||||
.signatures = signatures,
|
||||
.dependentRealisations = dependentRealisations,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -92,6 +140,24 @@ StorePath RealisedPath::path() const {
|
|||
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
|
||||
}
|
||||
|
||||
bool Realisation::isCompatibleWith(const Realisation & other) const
|
||||
{
|
||||
assert (id == other.id);
|
||||
if (outPath == other.outPath) {
|
||||
if (dependentRealisations.empty() != other.dependentRealisations.empty()) {
|
||||
warn(
|
||||
"Encountered a realisation for '%s' with an empty set of "
|
||||
"dependencies. This is likely an artifact from an older Nix. "
|
||||
"I’ll try to fix the realisation if I can",
|
||||
id.to_string());
|
||||
return true;
|
||||
} else if (dependentRealisations == other.dependentRealisations) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void RealisedPath::closure(
|
||||
Store& store,
|
||||
const RealisedPath::Set& startPaths,
|
||||
|
|
|
|||
|
|
@ -31,6 +31,14 @@ struct Realisation {
|
|||
|
||||
StringSet signatures;
|
||||
|
||||
/**
|
||||
* The realisations that are required for the current one to be valid.
|
||||
*
|
||||
* When importing this realisation, the store will first check that all its
|
||||
* dependencies exist, and map to the correct output path
|
||||
*/
|
||||
std::map<DrvOutput, StorePath> dependentRealisations;
|
||||
|
||||
nlohmann::json toJSON() const;
|
||||
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
|
||||
|
||||
|
|
@ -39,6 +47,11 @@ struct Realisation {
|
|||
bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
|
||||
size_t checkSignatures(const PublicKeys & publicKeys) const;
|
||||
|
||||
static std::set<Realisation> closure(Store &, const std::set<Realisation> &);
|
||||
static void closure(Store &, const std::set<Realisation> &, std::set<Realisation> & res);
|
||||
|
||||
bool isCompatibleWith(const Realisation & other) const;
|
||||
|
||||
StorePath getPath() const { return outPath; }
|
||||
|
||||
GENERATE_CMP(Realisation, me->id, me->outPath);
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
#include <map>
|
||||
#include <cstdlib>
|
||||
#include <mutex>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -16,14 +17,13 @@ static unsigned int refLength = 32; /* characters */
|
|||
static void search(const unsigned char * s, size_t len,
|
||||
StringSet & hashes, StringSet & seen)
|
||||
{
|
||||
static bool initialised = false;
|
||||
static std::once_flag initialised;
|
||||
static bool isBase32[256];
|
||||
if (!initialised) {
|
||||
std::call_once(initialised, [](){
|
||||
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
||||
for (unsigned int i = 0; i < base32Chars.size(); ++i)
|
||||
isBase32[(unsigned char) base32Chars[i]] = true;
|
||||
initialised = true;
|
||||
}
|
||||
});
|
||||
|
||||
for (size_t i = 0; i + refLength <= len; ) {
|
||||
int j;
|
||||
|
|
|
|||
|
|
@ -162,8 +162,19 @@ void RemoteStore::initConnection(Connection & conn)
|
|||
try {
|
||||
conn.to << WORKER_MAGIC_1;
|
||||
conn.to.flush();
|
||||
unsigned int magic = readInt(conn.from);
|
||||
if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch");
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn.from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != WORKER_MAGIC_2)
|
||||
throw Error("protocol mismatch");
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input, close
|
||||
it. */
|
||||
conn.closeWrite();
|
||||
auto msg = conn.from.drain();
|
||||
throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg));
|
||||
}
|
||||
|
||||
conn.from >> conn.daemonVersion;
|
||||
if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
|
||||
|
|
@ -222,6 +233,7 @@ void RemoteStore::setOptions(Connection & conn)
|
|||
overrides.erase(settings.buildCores.name);
|
||||
overrides.erase(settings.useSubstitutes.name);
|
||||
overrides.erase(loggerSettings.showTrace.name);
|
||||
overrides.erase(settings.experimentalFeatures.name);
|
||||
conn.to << overrides.size();
|
||||
for (auto & i : overrides)
|
||||
conn.to << i.first << i.second.value;
|
||||
|
|
@ -387,23 +399,6 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
|
|||
}
|
||||
|
||||
|
||||
ref<const ValidPathInfo> RemoteStore::readValidPathInfo(ConnectionHandle & conn, const StorePath & path)
|
||||
{
|
||||
auto deriver = readString(conn->from);
|
||||
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
|
||||
auto info = make_ref<ValidPathInfo>(path, narHash);
|
||||
if (deriver != "") info->deriver = parseStorePath(deriver);
|
||||
info->setReferencesPossiblyToSelf(worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}));
|
||||
conn->from >> info->registrationTime >> info->narSize;
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||
conn->from >> info->ultimate;
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
|
||||
void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
|
|
@ -424,7 +419,8 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
|||
bool valid; conn->from >> valid;
|
||||
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||
}
|
||||
info = readValidPathInfo(conn, path);
|
||||
info = std::make_shared<ValidPathInfo>(
|
||||
ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion), StorePath{path}));
|
||||
}
|
||||
callback(std::move(info));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
|
|
@ -526,8 +522,8 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
});
|
||||
}
|
||||
|
||||
auto path = parseStorePath(readString(conn->from));
|
||||
return readValidPathInfo(conn, path);
|
||||
return make_ref<ValidPathInfo>(
|
||||
ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion)));
|
||||
}
|
||||
else {
|
||||
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
|
||||
|
|
@ -643,6 +639,25 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
}
|
||||
|
||||
|
||||
void RemoteStore::addMultipleToStore(
|
||||
Source & source,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
if (GET_PROTOCOL_MINOR(getConnection()->daemonVersion) >= 32) {
|
||||
auto conn(getConnection());
|
||||
conn->to
|
||||
<< wopAddMultipleToStore
|
||||
<< repair
|
||||
<< !checkSigs;
|
||||
conn.withFramedSink([&](Sink & sink) {
|
||||
source.drainInto(sink);
|
||||
});
|
||||
} else
|
||||
Store::addMultipleToStore(source, repair, checkSigs);
|
||||
}
|
||||
|
||||
|
||||
StorePath RemoteStore::addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
|
|
@ -654,8 +669,12 @@ void RemoteStore::registerDrvOutput(const Realisation & info)
|
|||
{
|
||||
auto conn(getConnection());
|
||||
conn->to << wopRegisterDrvOutput;
|
||||
conn->to << info.id.to_string();
|
||||
conn->to << std::string(info.outPath.to_string());
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
|
||||
conn->to << info.id.to_string();
|
||||
conn->to << std::string(info.outPath.to_string());
|
||||
} else {
|
||||
worker_proto::write(*this, conn->to, info);
|
||||
}
|
||||
conn.processStderr();
|
||||
}
|
||||
|
||||
|
|
@ -665,15 +684,22 @@ std::optional<const Realisation> RemoteStore::queryRealisation(const DrvOutput &
|
|||
conn->to << wopQueryRealisation;
|
||||
conn->to << id.to_string();
|
||||
conn.processStderr();
|
||||
auto outPaths = worker_proto::read(*this, conn->from, Phantom<std::set<StorePath>>{});
|
||||
if (outPaths.empty())
|
||||
return std::nullopt;
|
||||
return {Realisation{.id = id, .outPath = *outPaths.begin()}};
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
|
||||
auto outPaths = worker_proto::read(*this, conn->from, Phantom<std::set<StorePath>>{});
|
||||
if (outPaths.empty())
|
||||
return std::nullopt;
|
||||
return {Realisation{.id = id, .outPath = *outPaths.begin()}};
|
||||
} else {
|
||||
auto realisations = worker_proto::read(*this, conn->from, Phantom<std::set<Realisation>>{});
|
||||
if (realisations.empty())
|
||||
return std::nullopt;
|
||||
return *realisations.begin();
|
||||
}
|
||||
}
|
||||
|
||||
static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector<DerivedPath> & reqs)
|
||||
{
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 30) {
|
||||
worker_proto::write(store, conn->to, reqs);
|
||||
} else {
|
||||
Strings ss;
|
||||
|
|
@ -695,8 +721,18 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons
|
|||
}
|
||||
}
|
||||
|
||||
void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode)
|
||||
void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
if (evalStore && evalStore.get() != this) {
|
||||
/* The remote doesn't have a way to access evalStore, so copy
|
||||
the .drvs. */
|
||||
RealisedPath::Set drvPaths2;
|
||||
for (auto & i : drvPaths)
|
||||
if (auto p = std::get_if<DerivedPath::Built>(&i))
|
||||
drvPaths2.insert(p->drvPath);
|
||||
copyClosure(*evalStore, *this, drvPaths2);
|
||||
}
|
||||
|
||||
auto conn(getConnection());
|
||||
conn->to << wopBuildPaths;
|
||||
assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13);
|
||||
|
|
@ -991,14 +1027,14 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
|
||||
void ConnectionHandle::withFramedSink(std::function<void(Sink & sink)> fun)
|
||||
{
|
||||
(*this)->to.flush();
|
||||
|
||||
std::exception_ptr ex;
|
||||
|
||||
/* Handle log messages / exceptions from the remote on a
|
||||
separate thread. */
|
||||
/* Handle log messages / exceptions from the remote on a separate
|
||||
thread. */
|
||||
std::thread stderrThread([&]()
|
||||
{
|
||||
try {
|
||||
|
|
@ -1031,7 +1067,6 @@ void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
|
|||
stderrThread.join();
|
||||
if (ex)
|
||||
std::rethrow_exception(ex);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,6 +78,11 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
void addMultipleToStore(
|
||||
Source & source,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override;
|
||||
|
||||
|
|
@ -85,7 +90,7 @@ public:
|
|||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode) override;
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override;
|
||||
|
|
@ -120,7 +125,6 @@ public:
|
|||
|
||||
struct Connection
|
||||
{
|
||||
AutoCloseFD fd;
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
unsigned int daemonVersion;
|
||||
|
|
@ -128,6 +132,8 @@ public:
|
|||
|
||||
virtual ~Connection();
|
||||
|
||||
virtual void closeWrite() = 0;
|
||||
|
||||
std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
|
||||
};
|
||||
|
||||
|
|
@ -151,8 +157,6 @@ protected:
|
|||
|
||||
virtual void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
ref<const ValidPathInfo> readValidPathInfo(ConnectionHandle & conn, const StorePath & path);
|
||||
|
||||
private:
|
||||
|
||||
std::atomic_bool failed{false};
|
||||
|
|
|
|||
|
|
@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
|
|||
S3Helper s3Helper;
|
||||
|
||||
S3BinaryCacheStoreImpl(
|
||||
const std::string & scheme,
|
||||
const std::string & uriScheme,
|
||||
const std::string & bucketName,
|
||||
const Params & params)
|
||||
: StoreConfig(params)
|
||||
|
|
@ -232,8 +232,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
|
|||
void init() override
|
||||
{
|
||||
if (auto cacheInfo = diskCache->cacheExists(getUri())) {
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
|
||||
priority.setDefault(fmt("%d", cacheInfo->priority));
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery);
|
||||
priority.setDefault(cacheInfo->priority);
|
||||
} else {
|
||||
BinaryCacheStore::init();
|
||||
diskCache->createCache(getUri(), storeDir, wantMassQuery, priority);
|
||||
|
|
|
|||
|
|
@ -32,7 +32,9 @@
|
|||
(literal "/tmp") (subpath TMPDIR))
|
||||
|
||||
; Some packages like to read the system version.
|
||||
(allow file-read* (literal "/System/Library/CoreServices/SystemVersion.plist"))
|
||||
(allow file-read*
|
||||
(literal "/System/Library/CoreServices/SystemVersion.plist")
|
||||
(literal "/System/Library/CoreServices/SystemVersionCompat.plist"))
|
||||
|
||||
; Without this line clang cannot write to /dev/null, breaking some configure tests.
|
||||
(allow file-read-metadata (literal "/dev"))
|
||||
|
|
@ -95,3 +97,7 @@
|
|||
; This is used by /bin/sh on macOS 10.15 and later.
|
||||
(allow file*
|
||||
(literal "/private/var/select/sh"))
|
||||
|
||||
; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
|
||||
(allow file-read*
|
||||
(subpath "/Library/Apple/usr/libexec/oah"))
|
||||
|
|
|
|||
|
|
@ -57,6 +57,11 @@ private:
|
|||
struct Connection : RemoteStore::Connection
|
||||
{
|
||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||
|
||||
void closeWrite() override
|
||||
{
|
||||
sshConn->in.close();
|
||||
}
|
||||
};
|
||||
|
||||
ref<RemoteStore::Connection> openConnection() override;
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
|||
options.dieWithParent = false;
|
||||
|
||||
conn->sshPid = startProcess([&]() {
|
||||
restoreSignals();
|
||||
restoreProcessContext();
|
||||
|
||||
close(in.writeSide.get());
|
||||
close(out.readSide.get());
|
||||
|
|
@ -110,7 +110,7 @@ Path SSHMaster::startMaster()
|
|||
options.dieWithParent = false;
|
||||
|
||||
state->sshMaster = startProcess([&]() {
|
||||
restoreSignals();
|
||||
restoreProcessContext();
|
||||
|
||||
close(out.readSide.get());
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
#include "references.hh"
|
||||
#include "archive.hh"
|
||||
#include "callback.hh"
|
||||
#include "remote-store.hh"
|
||||
|
||||
#include <regex>
|
||||
|
||||
|
|
@ -258,6 +259,20 @@ StorePath Store::addToStore(const string & name, const Path & _srcPath,
|
|||
}
|
||||
|
||||
|
||||
void Store::addMultipleToStore(
|
||||
Source & source,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
auto expected = readNum<uint64_t>(source);
|
||||
for (uint64_t i = 0; i < expected; ++i) {
|
||||
auto info = ValidPathInfo::read(source, *this, 16);
|
||||
info.ultimate = false;
|
||||
addToStore(info, source, repair, checkSigs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
The aim of this function is to compute in one pass the correct ValidPathInfo for
|
||||
the files that we are trying to add to the store. To accomplish that in one
|
||||
|
|
@ -355,6 +370,13 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
return info;
|
||||
}
|
||||
|
||||
StringSet StoreConfig::getDefaultSystemFeatures()
|
||||
{
|
||||
auto res = settings.systemFeatures.get();
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations"))
|
||||
res.insert("ca-derivations");
|
||||
return res;
|
||||
}
|
||||
|
||||
Store::Store(const Params & params)
|
||||
: StoreConfig(params)
|
||||
|
|
@ -643,6 +665,42 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
|||
}
|
||||
|
||||
|
||||
StorePathSet Store::exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths)
|
||||
{
|
||||
StorePathSet paths;
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
if (!inputPaths.count(storePath))
|
||||
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath));
|
||||
|
||||
computeFSClosure({storePath}, paths);
|
||||
}
|
||||
|
||||
/* If there are derivations in the graph, then include their
|
||||
outputs as well. This is useful if you want to do things
|
||||
like passing all build-time dependencies of some path to a
|
||||
derivation that builds a NixOS DVD image. */
|
||||
auto paths2 = paths;
|
||||
|
||||
for (auto & j : paths2) {
|
||||
if (j.isDerivation()) {
|
||||
Derivation drv = derivationFromPath(j);
|
||||
for (auto & k : drv.outputsAndOptPaths(*this)) {
|
||||
if (!k.second.second)
|
||||
/* FIXME: I am confused why we are calling
|
||||
`computeFSClosure` on the output path, rather than
|
||||
derivation itself. That doesn't seem right to me, so I
|
||||
won't try to implemented this for CA derivations. */
|
||||
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
||||
computeFSClosure(*k.second.second, paths);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
|
||||
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
Base hashBase,
|
||||
|
|
@ -743,31 +801,44 @@ const Store::Stats & Store::getStats()
|
|||
}
|
||||
|
||||
|
||||
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||
const StorePath & storePath, RepairFlag repair, CheckSigsFlag checkSigs)
|
||||
static std::string makeCopyPathMessage(
|
||||
std::string_view srcUri,
|
||||
std::string_view dstUri,
|
||||
std::string_view storePath)
|
||||
{
|
||||
auto srcUri = srcStore->getUri();
|
||||
auto dstUri = dstStore->getUri();
|
||||
return srcUri == "local" || srcUri == "daemon"
|
||||
? fmt("copying path '%s' to '%s'", storePath, dstUri)
|
||||
: dstUri == "local" || dstUri == "daemon"
|
||||
? fmt("copying path '%s' from '%s'", storePath, srcUri)
|
||||
: fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri);
|
||||
}
|
||||
|
||||
|
||||
void copyStorePath(
|
||||
Store & srcStore,
|
||||
Store & dstStore,
|
||||
const StorePath & storePath,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
auto srcUri = srcStore.getUri();
|
||||
auto dstUri = dstStore.getUri();
|
||||
auto storePathS = srcStore.printStorePath(storePath);
|
||||
Activity act(*logger, lvlInfo, actCopyPath,
|
||||
srcUri == "local" || srcUri == "daemon"
|
||||
? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri)
|
||||
: dstUri == "local" || dstUri == "daemon"
|
||||
? fmt("copying path '%s' from '%s'", srcStore->printStorePath(storePath), srcUri)
|
||||
: fmt("copying path '%s' from '%s' to '%s'", srcStore->printStorePath(storePath), srcUri, dstUri),
|
||||
{srcStore->printStorePath(storePath), srcUri, dstUri});
|
||||
makeCopyPathMessage(srcUri, dstUri, storePathS),
|
||||
{storePathS, srcUri, dstUri});
|
||||
PushActivity pact(act.id);
|
||||
|
||||
auto info = srcStore->queryPathInfo(storePath);
|
||||
auto info = srcStore.queryPathInfo(storePath);
|
||||
|
||||
uint64_t total = 0;
|
||||
|
||||
// recompute store path on the chance dstStore does it differently
|
||||
if (info->ca && info->references.empty()) {
|
||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||
info2->path = dstStore->makeFixedOutputPathFromCA(
|
||||
info2->path = dstStore.makeFixedOutputPathFromCA(
|
||||
info->fullStorePathDescriptorOpt().value());
|
||||
if (dstStore->storeDir == srcStore->storeDir)
|
||||
if (dstStore.storeDir == srcStore.storeDir)
|
||||
assert(info->path == info2->path);
|
||||
info = info2;
|
||||
}
|
||||
|
|
@ -784,32 +855,56 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
|||
act.progress(total, info->narSize);
|
||||
});
|
||||
TeeSink tee { sink, progressSink };
|
||||
srcStore->narFromPath(storePath, tee);
|
||||
srcStore.narFromPath(storePath, tee);
|
||||
}, [&]() {
|
||||
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
|
||||
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), srcStore.getUri());
|
||||
});
|
||||
|
||||
dstStore->addToStore(*info, *source, repair, checkSigs);
|
||||
dstStore.addToStore(*info, *source, repair, checkSigs);
|
||||
}
|
||||
|
||||
|
||||
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore, const RealisedPath::Set & paths,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
|
||||
std::map<StorePath, StorePath> copyPaths(
|
||||
Store & srcStore,
|
||||
Store & dstStore,
|
||||
const RealisedPath::Set & paths,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs,
|
||||
SubstituteFlag substitute)
|
||||
{
|
||||
StorePathSet storePaths;
|
||||
std::set<Realisation> realisations;
|
||||
std::set<Realisation> toplevelRealisations;
|
||||
for (auto & path : paths) {
|
||||
storePaths.insert(path.path());
|
||||
if (auto realisation = std::get_if<Realisation>(&path.raw)) {
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
realisations.insert(*realisation);
|
||||
toplevelRealisations.insert(*realisation);
|
||||
}
|
||||
}
|
||||
auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute);
|
||||
|
||||
ThreadPool pool;
|
||||
|
||||
try {
|
||||
for (auto & realisation : realisations) {
|
||||
dstStore->registerDrvOutput(realisation, checkSigs);
|
||||
}
|
||||
// Copy the realisation closure
|
||||
processGraph<Realisation>(
|
||||
pool, Realisation::closure(srcStore, toplevelRealisations),
|
||||
[&](const Realisation & current) -> std::set<Realisation> {
|
||||
std::set<Realisation> children;
|
||||
for (const auto & [drvOutput, _] : current.dependentRealisations) {
|
||||
auto currentChild = srcStore.queryRealisation(drvOutput);
|
||||
if (!currentChild)
|
||||
throw Error(
|
||||
"incomplete realisation closure: '%s' is a "
|
||||
"dependency of '%s' but isn't registered",
|
||||
drvOutput.to_string(), current.id.to_string());
|
||||
children.insert(*currentChild);
|
||||
}
|
||||
return children;
|
||||
},
|
||||
[&](const Realisation& current) -> void {
|
||||
dstStore.registerDrvOutput(current, checkSigs);
|
||||
});
|
||||
} catch (MissingExperimentalFeature & e) {
|
||||
// Don't fail if the remote doesn't support CA derivations is it might
|
||||
// not be within our control to change that, and we might still want
|
||||
|
|
@ -823,10 +918,15 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
return pathsMap;
|
||||
}
|
||||
|
||||
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & storePaths,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
|
||||
std::map<StorePath, StorePath> copyPaths(
|
||||
Store & srcStore,
|
||||
Store & dstStore,
|
||||
const StorePathSet & storePaths,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs,
|
||||
SubstituteFlag substitute)
|
||||
{
|
||||
auto valid = dstStore->queryValidPaths(storePaths, substitute);
|
||||
auto valid = dstStore.queryValidPaths(storePaths, substitute);
|
||||
|
||||
StorePathSet missing;
|
||||
for (auto & path : storePaths)
|
||||
|
|
@ -836,9 +936,31 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
for (auto & path : storePaths)
|
||||
pathsMap.insert_or_assign(path, path);
|
||||
|
||||
|
||||
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
|
||||
|
||||
auto sorted = srcStore.topoSortPaths(missing);
|
||||
std::reverse(sorted.begin(), sorted.end());
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
sink << sorted.size();
|
||||
for (auto & storePath : sorted) {
|
||||
auto srcUri = srcStore.getUri();
|
||||
auto dstUri = dstStore.getUri();
|
||||
auto storePathS = srcStore.printStorePath(storePath);
|
||||
Activity act(*logger, lvlInfo, actCopyPath,
|
||||
makeCopyPathMessage(srcUri, dstUri, storePathS),
|
||||
{storePathS, srcUri, dstUri});
|
||||
PushActivity pact(act.id);
|
||||
|
||||
auto info = srcStore.queryPathInfo(storePath);
|
||||
info->write(sink, srcStore, 16);
|
||||
srcStore.narFromPath(storePath, sink);
|
||||
}
|
||||
});
|
||||
|
||||
dstStore.addMultipleToStore(*source, repair, checkSigs);
|
||||
|
||||
#if 0
|
||||
std::atomic<size_t> nrDone{0};
|
||||
std::atomic<size_t> nrFailed{0};
|
||||
std::atomic<uint64_t> bytesExpected{0};
|
||||
|
|
@ -854,19 +976,22 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
StorePathSet(missing.begin(), missing.end()),
|
||||
|
||||
[&](const StorePath & storePath) {
|
||||
auto info = srcStore->queryPathInfo(storePath);
|
||||
auto info = srcStore.queryPathInfo(storePath);
|
||||
auto storePathForDst = storePath;
|
||||
if (info->ca && info->references.empty()) {
|
||||
storePathForDst = dstStore->makeFixedOutputPathFromCA(
|
||||
storePathForDst = dstStore.makeFixedOutputPathFromCA(
|
||||
info->fullStorePathDescriptorOpt().value());
|
||||
if (dstStore->storeDir == srcStore->storeDir)
|
||||
if (dstStore.storeDir == srcStore.storeDir)
|
||||
assert(storePathForDst == storePath);
|
||||
if (storePathForDst != storePath)
|
||||
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
|
||||
debug("replaced path '%s' to '%s' for substituter '%s'",
|
||||
srcStore.printStorePath(storePath),
|
||||
dstStore.printStorePath(storePathForDst),
|
||||
dstStore.getUri());
|
||||
}
|
||||
pathsMap.insert_or_assign(storePath, storePathForDst);
|
||||
|
||||
if (dstStore->isValidPath(storePath)) {
|
||||
if (dstStore.isValidPath(storePath)) {
|
||||
nrDone++;
|
||||
showProgress();
|
||||
return StorePathSet();
|
||||
|
|
@ -881,20 +1006,23 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
[&](const StorePath & storePath) {
|
||||
checkInterrupt();
|
||||
|
||||
auto info = srcStore->queryPathInfo(storePath);
|
||||
auto info = srcStore.queryPathInfo(storePath);
|
||||
|
||||
auto storePathForDst = storePath;
|
||||
if (info->ca && info->references.empty()) {
|
||||
storePathForDst = dstStore->makeFixedOutputPathFromCA(
|
||||
storePathForDst = dstStore.makeFixedOutputPathFromCA(
|
||||
info->fullStorePathDescriptorOpt().value());
|
||||
if (dstStore->storeDir == srcStore->storeDir)
|
||||
assert(storePathForDst == storePath);
|
||||
if (storePathForDst != storePath)
|
||||
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
|
||||
debug("replaced path '%s' to '%s' for substituter '%s'",
|
||||
srcStore.printStorePath(storePath),
|
||||
dstStore.printStorePath(storePathForDst),
|
||||
dstStore.getUri());
|
||||
}
|
||||
pathsMap.insert_or_assign(storePath, storePathForDst);
|
||||
|
||||
if (!dstStore->isValidPath(storePathForDst)) {
|
||||
if (!dstStore.isValidPath(storePathForDst)) {
|
||||
MaintainCount<decltype(nrRunning)> mc(nrRunning);
|
||||
showProgress();
|
||||
try {
|
||||
|
|
@ -903,7 +1031,7 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
nrFailed++;
|
||||
if (!settings.keepGoing)
|
||||
throw e;
|
||||
logger->log(lvlError, fmt("could not copy %s: %s", dstStore->printStorePath(storePath), e.what()));
|
||||
logger->log(lvlError, fmt("could not copy %s: %s", dstStore.printStorePath(storePath), e.what()));
|
||||
showProgress();
|
||||
return;
|
||||
}
|
||||
|
|
@ -912,9 +1040,27 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
|
|||
nrDone++;
|
||||
showProgress();
|
||||
});
|
||||
#endif
|
||||
|
||||
return pathsMap;
|
||||
}
|
||||
|
||||
void copyClosure(
|
||||
Store & srcStore,
|
||||
Store & dstStore,
|
||||
const RealisedPath::Set & paths,
|
||||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs,
|
||||
SubstituteFlag substitute)
|
||||
{
|
||||
if (&srcStore == &dstStore) return;
|
||||
|
||||
RealisedPath::Set closure;
|
||||
RealisedPath::closure(srcStore, paths, closure);
|
||||
|
||||
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
|
||||
}
|
||||
|
||||
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
|
||||
{
|
||||
std::string path;
|
||||
|
|
|
|||
|
|
@ -182,6 +182,8 @@ struct StoreConfig : public Config
|
|||
|
||||
StoreConfig() = delete;
|
||||
|
||||
StringSet getDefaultSystemFeatures();
|
||||
|
||||
virtual ~StoreConfig() { }
|
||||
|
||||
virtual const std::string name() = 0;
|
||||
|
|
@ -198,7 +200,7 @@ struct StoreConfig : public Config
|
|||
|
||||
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
|
||||
|
||||
Setting<StringSet> systemFeatures{this, settings.systemFeatures,
|
||||
Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
|
||||
"system-features",
|
||||
"Optional features that the system this store builds on implements (like \"kvm\")."};
|
||||
|
||||
|
|
@ -424,9 +426,10 @@ public:
|
|||
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
|
||||
|
||||
/* Query substitute info (i.e. references, derivers and download
|
||||
sizes) of a map of paths to their optional ca values. If a path
|
||||
does not have substitute info, it's omitted from the resulting
|
||||
‘infos’ map. */
|
||||
sizes) of a map of paths to their optional ca values. The info
|
||||
of the first succeeding substituter for each path will be
|
||||
returned. If a path does not have substitute info, it's omitted
|
||||
from the resulting ‘infos’ map. */
|
||||
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
|
||||
SubstitutablePathInfos & infos) { return; };
|
||||
|
||||
|
|
@ -434,6 +437,12 @@ public:
|
|||
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
|
||||
|
||||
/* Import multiple paths into the store. */
|
||||
virtual void addMultipleToStore(
|
||||
Source & source,
|
||||
RepairFlag repair = NoRepair,
|
||||
CheckSigsFlag checkSigs = CheckSigs);
|
||||
|
||||
/* Copy the contents of a path to the store and register the
|
||||
validity the resulting path. The resulting path is returned.
|
||||
The function object `filter' can be used to exclude files (see
|
||||
|
|
@ -491,7 +500,8 @@ public:
|
|||
not derivations, substitute them. */
|
||||
virtual void buildPaths(
|
||||
const std::vector<DerivedPath> & paths,
|
||||
BuildMode buildMode = bmNormal);
|
||||
BuildMode buildMode = bmNormal,
|
||||
std::shared_ptr<Store> evalStore = nullptr);
|
||||
|
||||
/* Build a single non-materialized derivation (i.e. not from an
|
||||
on-disk .drv file).
|
||||
|
|
@ -537,7 +547,7 @@ public:
|
|||
/* Add a store path as a temporary root of the garbage collector.
|
||||
The root disappears as soon as we exit. */
|
||||
virtual void addTempRoot(const StorePath & path)
|
||||
{ warn("not creating temp root, store doesn't support GC"); }
|
||||
{ debug("not creating temporary root, store doesn't support GC"); }
|
||||
|
||||
/* Add an indirect root, which is merely a symlink to `path' from
|
||||
/nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed
|
||||
|
|
@ -691,6 +701,11 @@ public:
|
|||
|
||||
const Stats & getStats();
|
||||
|
||||
/* Computes the full closure of of a set of store-paths for e.g.
|
||||
derivations that need this information for `exportReferencesGraph`.
|
||||
*/
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
||||
|
||||
/* Return the build log of the specified store path, if available,
|
||||
or null otherwise. */
|
||||
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
||||
|
|
@ -740,8 +755,12 @@ protected:
|
|||
|
||||
|
||||
/* Copy a path from one store to another. */
|
||||
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||
const StorePath & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs);
|
||||
void copyStorePath(
|
||||
Store & srcStore,
|
||||
Store & dstStore,
|
||||
const StorePath & storePath,
|
||||
RepairFlag repair = NoRepair,
|
||||
CheckSigsFlag checkSigs = CheckSigs);
|
||||
|
||||
|
||||
/* Copy store paths from one store to another. The paths may be copied
|
||||
|
|
@ -750,17 +769,27 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
|||
of store paths is not automatically closed; use copyClosure() for
|
||||
that. Returns a map of what each path was copied to the dstStore
|
||||
as. */
|
||||
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore,
|
||||
std::map<StorePath, StorePath> copyPaths(
|
||||
Store & srcStore, Store & dstStore,
|
||||
const RealisedPath::Set &,
|
||||
RepairFlag repair = NoRepair,
|
||||
CheckSigsFlag checkSigs = CheckSigs,
|
||||
SubstituteFlag substitute = NoSubstitute);
|
||||
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore,
|
||||
const StorePathSet& paths,
|
||||
|
||||
std::map<StorePath, StorePath> copyPaths(
|
||||
Store & srcStore, Store & dstStore,
|
||||
const StorePathSet & paths,
|
||||
RepairFlag repair = NoRepair,
|
||||
CheckSigsFlag checkSigs = CheckSigs,
|
||||
SubstituteFlag substitute = NoSubstitute);
|
||||
|
||||
/* Copy the closure of `paths` from `srcStore` to `dstStore`. */
|
||||
void copyClosure(
|
||||
Store & srcStore, Store & dstStore,
|
||||
const RealisedPath::Set & paths,
|
||||
RepairFlag repair = NoRepair,
|
||||
CheckSigsFlag checkSigs = CheckSigs,
|
||||
SubstituteFlag substitute = NoSubstitute);
|
||||
|
||||
/* Remove the temporary roots file for this process. Any temporary
|
||||
root becomes garbage after this point unless it has been registered
|
||||
|
|
@ -860,4 +889,9 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri)
|
|||
|
||||
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv);
|
||||
|
||||
std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||
Store & store,
|
||||
const Derivation & drv,
|
||||
const StorePath & outputPath);
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,6 +45,12 @@ std::string UDSRemoteStore::getUri()
|
|||
}
|
||||
|
||||
|
||||
void UDSRemoteStore::Connection::closeWrite()
|
||||
{
|
||||
shutdown(fd.get(), SHUT_WR);
|
||||
}
|
||||
|
||||
|
||||
ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
|
|
|
|||
|
|
@ -40,6 +40,12 @@ public:
|
|||
|
||||
private:
|
||||
|
||||
struct Connection : RemoteStore::Connection
|
||||
{
|
||||
AutoCloseFD fd;
|
||||
void closeWrite() override;
|
||||
};
|
||||
|
||||
ref<RemoteStore::Connection> openConnection() override;
|
||||
std::optional<std::string> path;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ namespace nix {
|
|||
#define WORKER_MAGIC_1 0x6e697863
|
||||
#define WORKER_MAGIC_2 0x6478696f
|
||||
|
||||
#define PROTOCOL_VERSION (1 << 8 | 29)
|
||||
#define PROTOCOL_VERSION (1 << 8 | 32)
|
||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||
|
||||
|
|
@ -55,6 +55,7 @@ typedef enum {
|
|||
wopQueryDerivationOutputMap = 41,
|
||||
wopRegisterDrvOutput = 42,
|
||||
wopQueryRealisation = 43,
|
||||
wopAddMultipleToStore = 44,
|
||||
} WorkerOp;
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue