mirror of
https://github.com/NixOS/nix.git
synced 2025-11-20 17:29:36 +01:00
Merge remote-tracking branch 'upstream/master' into add-body-to-network-errors
This commit is contained in:
commit
465daa9396
90 changed files with 1375 additions and 623 deletions
|
|
@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
|
|||
/* Hard-linking fails if we exceed the maximum link count on a
|
||||
file (e.g. 32000 of ext3), which is quite possible after a
|
||||
'nix-store --optimise'. FIXME: actually, why don't we just
|
||||
bind-mount in this case? */
|
||||
if (errno != EMLINK)
|
||||
bind-mount in this case?
|
||||
|
||||
It can also fail with EPERM in BeegFS v7 and earlier versions
|
||||
which don't allow hard-links to other directories */
|
||||
if (errno != EMLINK && errno != EPERM)
|
||||
throw SysError("linking '%s' to '%s'", to, from);
|
||||
copyPath(from, to);
|
||||
}
|
||||
|
|
@ -2750,8 +2753,8 @@ struct RestrictedStore : public LocalFSStore
|
|||
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
|
||||
{ }
|
||||
|
||||
StorePathSet queryDerivationOutputs(const StorePath & path) override
|
||||
{ throw Error("queryDerivationOutputs"); }
|
||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override
|
||||
{ throw Error("queryDerivationOutputMap"); }
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
|
@ -3714,14 +3717,11 @@ void DerivationGoal::registerOutputs()
|
|||
/* Check that fixed-output derivations produced the right
|
||||
outputs (i.e., the content hash should match the specified
|
||||
hash). */
|
||||
std::string ca;
|
||||
std::optional<ContentAddress> ca;
|
||||
|
||||
if (fixedOutput) {
|
||||
|
||||
FileIngestionMethod outputHashMode; Hash h;
|
||||
i.second.parseHashInfo(outputHashMode, h);
|
||||
|
||||
if (outputHashMode == FileIngestionMethod::Flat) {
|
||||
if (i.second.hash->method == FileIngestionMethod::Flat) {
|
||||
/* The output path should be a regular file without execute permission. */
|
||||
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
||||
throw BuildError(
|
||||
|
|
@ -3732,20 +3732,22 @@ void DerivationGoal::registerOutputs()
|
|||
|
||||
/* Check the hash. In hash mode, move the path produced by
|
||||
the derivation to its content-addressed location. */
|
||||
Hash h2 = outputHashMode == FileIngestionMethod::Recursive
|
||||
? hashPath(h.type, actualPath).first
|
||||
: hashFile(h.type, actualPath);
|
||||
Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
|
||||
? hashPath(*i.second.hash->hash.type, actualPath).first
|
||||
: hashFile(*i.second.hash->hash.type, actualPath);
|
||||
|
||||
auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name());
|
||||
auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
|
||||
|
||||
if (h != h2) {
|
||||
if (i.second.hash->hash != h2) {
|
||||
|
||||
/* Throw an error after registering the path as
|
||||
valid. */
|
||||
worker.hashMismatch = true;
|
||||
delayedException = std::make_exception_ptr(
|
||||
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
||||
worker.store.printStorePath(dest), h.to_string(SRI, true), h2.to_string(SRI, true)));
|
||||
worker.store.printStorePath(dest),
|
||||
i.second.hash->hash.to_string(SRI, true),
|
||||
h2.to_string(SRI, true)));
|
||||
|
||||
Path actualDest = worker.store.Store::toRealPath(dest);
|
||||
|
||||
|
|
@ -3765,7 +3767,10 @@ void DerivationGoal::registerOutputs()
|
|||
else
|
||||
assert(worker.store.parseStorePath(path) == dest);
|
||||
|
||||
ca = makeFixedOutputCA(outputHashMode, h2);
|
||||
ca = FixedOutputHash {
|
||||
.method = i.second.hash->method,
|
||||
.hash = h2,
|
||||
};
|
||||
}
|
||||
|
||||
/* Get rid of all weird permissions. This also checks that
|
||||
|
|
@ -3838,7 +3843,10 @@ void DerivationGoal::registerOutputs()
|
|||
info.ca = ca;
|
||||
worker.store.signPathInfo(info);
|
||||
|
||||
if (!info.references.empty()) info.ca.clear();
|
||||
if (!info.references.empty()) {
|
||||
// FIXME don't we have an experimental feature for fixed output with references?
|
||||
info.ca = {};
|
||||
}
|
||||
|
||||
infos.emplace(i.first, std::move(info));
|
||||
}
|
||||
|
|
@ -4998,7 +5006,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
|
||||
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
||||
Hash nullHash(htSHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current.first;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,14 +58,17 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
}
|
||||
};
|
||||
|
||||
/* We always have one output, and if it's a fixed-output derivation (as
|
||||
checked below) it must be the only output */
|
||||
auto & output = drv.outputs.begin()->second;
|
||||
|
||||
/* Try the hashed mirrors first. */
|
||||
if (getAttr("outputHashMode") == "flat")
|
||||
if (output.hash && output.hash->method == FileIngestionMethod::Flat)
|
||||
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||
try {
|
||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||
auto ht = parseHashType(getAttr("outputHashAlgo"));
|
||||
auto h = Hash(getAttr("outputHash"), ht);
|
||||
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
|
||||
auto & h = output.hash->hash;
|
||||
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
|
||||
return;
|
||||
} catch (Error & e) {
|
||||
debug(e.what());
|
||||
|
|
|
|||
85
src/libstore/content-address.cc
Normal file
85
src/libstore/content-address.cc
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
#include "content-address.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::string FixedOutputHash::printMethodAlgo() const {
|
||||
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
|
||||
}
|
||||
|
||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
||||
switch (m) {
|
||||
case FileIngestionMethod::Flat:
|
||||
return "";
|
||||
case FileIngestionMethod::Recursive:
|
||||
return "r:";
|
||||
default:
|
||||
throw Error("impossible, caught both cases");
|
||||
}
|
||||
}
|
||||
|
||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
||||
{
|
||||
return "fixed:"
|
||||
+ makeFileIngestionPrefix(method)
|
||||
+ hash.to_string(Base32, true);
|
||||
}
|
||||
|
||||
// FIXME Put this somewhere?
|
||||
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||
|
||||
std::string renderContentAddress(ContentAddress ca) {
|
||||
return std::visit(overloaded {
|
||||
[](TextHash th) {
|
||||
return "text:" + th.hash.to_string(Base32, true);
|
||||
},
|
||||
[](FixedOutputHash fsh) {
|
||||
return makeFixedOutputCA(fsh.method, fsh.hash);
|
||||
}
|
||||
}, ca);
|
||||
}
|
||||
|
||||
ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||
auto prefixSeparator = rawCa.find(':');
|
||||
if (prefixSeparator != string::npos) {
|
||||
auto prefix = string(rawCa, 0, prefixSeparator);
|
||||
if (prefix == "text") {
|
||||
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||
Hash hash = Hash(string(hashTypeAndHash));
|
||||
if (*hash.type != htSHA256) {
|
||||
throw Error("parseContentAddress: the text hash should have type SHA256");
|
||||
}
|
||||
return TextHash { hash };
|
||||
} else if (prefix == "fixed") {
|
||||
// This has to be an inverse of makeFixedOutputCA
|
||||
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||
if (methodAndHash.substr(0,2) == "r:") {
|
||||
std::string_view hashRaw = methodAndHash.substr(2,string::npos);
|
||||
return FixedOutputHash {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = Hash(string(hashRaw)),
|
||||
};
|
||||
} else {
|
||||
std::string_view hashRaw = methodAndHash;
|
||||
return FixedOutputHash {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = Hash(string(hashRaw)),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
|
||||
}
|
||||
} else {
|
||||
throw Error("Not a content address because it lacks an appropriate prefix");
|
||||
}
|
||||
};
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
|
||||
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
|
||||
};
|
||||
|
||||
std::string renderContentAddress(std::optional<ContentAddress> ca) {
|
||||
return ca ? renderContentAddress(*ca) : "";
|
||||
}
|
||||
|
||||
}
|
||||
56
src/libstore/content-address.hh
Normal file
56
src/libstore/content-address.hh
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
#pragma once
|
||||
|
||||
#include <variant>
|
||||
#include "hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
Flat = false,
|
||||
Recursive = true
|
||||
};
|
||||
|
||||
struct TextHash {
|
||||
Hash hash;
|
||||
};
|
||||
|
||||
/// Pair of a hash, and how the file system was ingested
|
||||
struct FixedOutputHash {
|
||||
FileIngestionMethod method;
|
||||
Hash hash;
|
||||
std::string printMethodAlgo() const;
|
||||
};
|
||||
|
||||
/*
|
||||
We've accumulated several types of content-addressed paths over the years;
|
||||
fixed-output derivations support multiple hash algorithms and serialisation
|
||||
methods (flat file vs NAR). Thus, ‘ca’ has one of the following forms:
|
||||
|
||||
* ‘text:sha256:<sha256 hash of file contents>’: For paths
|
||||
computed by makeTextPath() / addTextToStore().
|
||||
|
||||
* ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||
makeFixedOutputPath() / addToStore().
|
||||
*/
|
||||
typedef std::variant<
|
||||
TextHash, // for paths computed by makeTextPath() / addTextToStore
|
||||
FixedOutputHash // for path computed by makeFixedOutputPath
|
||||
> ContentAddress;
|
||||
|
||||
/* Compute the prefix to the hash algorithm which indicates how the files were
|
||||
ingested. */
|
||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
|
||||
|
||||
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
||||
for paths created by makeFixedOutputPath() / addToStore(). */
|
||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
||||
|
||||
std::string renderContentAddress(ContentAddress ca);
|
||||
|
||||
std::string renderContentAddress(std::optional<ContentAddress> ca);
|
||||
|
||||
ContentAddress parseContentAddress(std::string_view rawCa);
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
|
||||
|
||||
}
|
||||
|
|
@ -78,10 +78,10 @@ struct TunnelLogger : public Logger
|
|||
if (ei.level > verbosity) return;
|
||||
|
||||
std::stringstream oss;
|
||||
oss << ei;
|
||||
showErrorInfo(oss, ei, false);
|
||||
|
||||
StringSink buf;
|
||||
buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n");
|
||||
buf << STDERR_NEXT << oss.str() << "\n";
|
||||
enqueueMsg(*buf.s);
|
||||
}
|
||||
|
||||
|
|
@ -347,6 +347,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
case wopQueryDerivationOutputMap: {
|
||||
auto path = store->parseStorePath(readString(from));
|
||||
logger->startWork();
|
||||
OutputPathMap outputs = store->queryDerivationOutputMap(path);
|
||||
logger->stopWork();
|
||||
writeOutputPathMap(*store, to, outputs);
|
||||
break;
|
||||
}
|
||||
|
||||
case wopQueryDeriver: {
|
||||
auto path = store->parseStorePath(readString(from));
|
||||
logger->startWork();
|
||||
|
|
@ -652,7 +661,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||
to << info->ultimate
|
||||
<< info->sigs
|
||||
<< info->ca;
|
||||
<< renderContentAddress(info->ca);
|
||||
}
|
||||
} else {
|
||||
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
|
||||
|
|
@ -710,7 +719,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
info.references = readStorePaths<StorePathSet>(*store, from);
|
||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||
info.sigs = readStrings<StringSet>(from);
|
||||
from >> info.ca >> repair >> dontCheckSigs;
|
||||
info.ca = parseContentAddressOpt(readString(from));
|
||||
from >> repair >> dontCheckSigs;
|
||||
if (!trusted && dontCheckSigs)
|
||||
dontCheckSigs = false;
|
||||
if (!trusted)
|
||||
|
|
|
|||
|
|
@ -8,25 +8,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
|
||||
void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const
|
||||
{
|
||||
recursive = FileIngestionMethod::Flat;
|
||||
string algo = hashAlgo;
|
||||
|
||||
if (string(algo, 0, 2) == "r:") {
|
||||
recursive = FileIngestionMethod::Recursive;
|
||||
algo = string(algo, 2);
|
||||
}
|
||||
|
||||
HashType hashType = parseHashType(algo);
|
||||
if (hashType == htUnknown)
|
||||
throw Error("unknown hash algorithm '%s'", algo);
|
||||
|
||||
hash = Hash(this->hash, hashType);
|
||||
}
|
||||
|
||||
|
||||
const StorePath & BasicDerivation::findOutput(const string & id) const
|
||||
{
|
||||
auto i = outputs.find(id);
|
||||
|
|
@ -120,6 +101,34 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
|
|||
}
|
||||
|
||||
|
||||
static DerivationOutput parseDerivationOutput(const Store & store, istringstream_nocopy & str)
|
||||
{
|
||||
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
|
||||
expect(str, ","); auto hashAlgo = parseString(str);
|
||||
expect(str, ","); const auto hash = parseString(str);
|
||||
expect(str, ")");
|
||||
|
||||
std::optional<FixedOutputHash> fsh;
|
||||
if (hashAlgo != "") {
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
if (string(hashAlgo, 0, 2) == "r:") {
|
||||
method = FileIngestionMethod::Recursive;
|
||||
hashAlgo = string(hashAlgo, 2);
|
||||
}
|
||||
const HashType hashType = parseHashType(hashAlgo);
|
||||
fsh = FixedOutputHash {
|
||||
.method = std::move(method),
|
||||
.hash = Hash(hash, hashType),
|
||||
};
|
||||
}
|
||||
|
||||
return DerivationOutput {
|
||||
.path = std::move(path),
|
||||
.hash = std::move(fsh),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
static Derivation parseDerivation(const Store & store, const string & s)
|
||||
{
|
||||
Derivation drv;
|
||||
|
|
@ -129,15 +138,8 @@ static Derivation parseDerivation(const Store & store, const string & s)
|
|||
/* Parse the list of outputs. */
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "("); std::string id = parseString(str);
|
||||
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
|
||||
expect(str, ","); auto hashAlgo = parseString(str);
|
||||
expect(str, ","); auto hash = parseString(str);
|
||||
expect(str, ")");
|
||||
drv.outputs.emplace(id, DerivationOutput {
|
||||
.path = std::move(path),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
.hash = std::move(hash)
|
||||
});
|
||||
auto output = parseDerivationOutput(store, str);
|
||||
drv.outputs.emplace(std::move(id), std::move(output));
|
||||
}
|
||||
|
||||
/* Parse the list of input derivations. */
|
||||
|
|
@ -263,8 +265,9 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
if (first) first = false; else s += ',';
|
||||
s += '('; printUnquotedString(s, i.first);
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path));
|
||||
s += ','; printUnquotedString(s, i.second.hashAlgo);
|
||||
s += ','; printUnquotedString(s, i.second.hash);
|
||||
s += ','; printUnquotedString(s, i.second.hash ? i.second.hash->printMethodAlgo() : "");
|
||||
s += ','; printUnquotedString(s,
|
||||
i.second.hash ? i.second.hash->hash.to_string(Base16, false) : "");
|
||||
s += ')';
|
||||
}
|
||||
|
||||
|
|
@ -320,7 +323,7 @@ bool BasicDerivation::isFixedOutput() const
|
|||
{
|
||||
return outputs.size() == 1 &&
|
||||
outputs.begin()->first == "out" &&
|
||||
outputs.begin()->second.hash != "";
|
||||
outputs.begin()->second.hash;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -353,8 +356,8 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
|
|||
if (drv.isFixedOutput()) {
|
||||
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||
return hashString(htSHA256, "fixed:out:"
|
||||
+ i->second.hashAlgo + ":"
|
||||
+ i->second.hash + ":"
|
||||
+ i->second.hash->printMethodAlgo() + ":"
|
||||
+ i->second.hash->hash.to_string(Base16, false) + ":"
|
||||
+ store.printStorePath(i->second.path));
|
||||
}
|
||||
|
||||
|
|
@ -397,6 +400,31 @@ StorePathSet BasicDerivation::outputPaths() const
|
|||
return paths;
|
||||
}
|
||||
|
||||
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||
{
|
||||
auto path = store.parseStorePath(readString(in));
|
||||
auto hashAlgo = readString(in);
|
||||
auto hash = readString(in);
|
||||
|
||||
std::optional<FixedOutputHash> fsh;
|
||||
if (hashAlgo != "") {
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
if (string(hashAlgo, 0, 2) == "r:") {
|
||||
method = FileIngestionMethod::Recursive;
|
||||
hashAlgo = string(hashAlgo, 2);
|
||||
}
|
||||
auto hashType = parseHashType(hashAlgo);
|
||||
fsh = FixedOutputHash {
|
||||
.method = std::move(method),
|
||||
.hash = Hash(hash, hashType),
|
||||
};
|
||||
}
|
||||
|
||||
return DerivationOutput {
|
||||
.path = std::move(path),
|
||||
.hash = std::move(fsh),
|
||||
};
|
||||
}
|
||||
|
||||
StringSet BasicDerivation::outputNames() const
|
||||
{
|
||||
|
|
@ -413,14 +441,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
|
|||
auto nr = readNum<size_t>(in);
|
||||
for (size_t n = 0; n < nr; n++) {
|
||||
auto name = readString(in);
|
||||
auto path = store.parseStorePath(readString(in));
|
||||
auto hashAlgo = readString(in);
|
||||
auto hash = readString(in);
|
||||
drv.outputs.emplace(name, DerivationOutput {
|
||||
.path = std::move(path),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
.hash = std::move(hash)
|
||||
});
|
||||
auto output = readDerivationOutput(in, store);
|
||||
drv.outputs.emplace(std::move(name), std::move(output));
|
||||
}
|
||||
|
||||
drv.inputSrcs = readStorePaths<StorePathSet>(store, in);
|
||||
|
|
@ -441,8 +463,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
|
|||
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
|
||||
{
|
||||
out << drv.outputs.size();
|
||||
for (auto & i : drv.outputs)
|
||||
out << i.first << store.printStorePath(i.second.path) << i.second.hashAlgo << i.second.hash;
|
||||
for (auto & i : drv.outputs) {
|
||||
out << i.first
|
||||
<< store.printStorePath(i.second.path);
|
||||
if (i.second.hash) {
|
||||
out << i.second.hash->printMethodAlgo()
|
||||
<< i.second.hash->hash.to_string(Base16, false);
|
||||
} else {
|
||||
out << "" << "";
|
||||
}
|
||||
}
|
||||
writeStorePaths(store, out, drv.inputSrcs);
|
||||
out << drv.platform << drv.builder << drv.args;
|
||||
out << drv.env.size();
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
#pragma once
|
||||
|
||||
#include "path.hh"
|
||||
#include "types.hh"
|
||||
#include "hash.hh"
|
||||
#include "store-api.hh"
|
||||
#include "content-address.hh"
|
||||
|
||||
#include <map>
|
||||
|
||||
|
|
@ -15,9 +16,7 @@ namespace nix {
|
|||
struct DerivationOutput
|
||||
{
|
||||
StorePath path;
|
||||
std::string hashAlgo; /* hash used for expected hash computation */
|
||||
std::string hash; /* expected hash, may be null */
|
||||
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
|
||||
std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
|
||||
};
|
||||
|
||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||
|
|
@ -70,6 +69,7 @@ struct Derivation : BasicDerivation
|
|||
|
||||
class Store;
|
||||
|
||||
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||
|
||||
/* Write a derivation to the Nix store, and return its path. */
|
||||
StorePath writeDerivation(ref<Store> store,
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
|||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashAndWriteSink.currentHash();
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ Settings::Settings()
|
|||
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
|
||||
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
|
||||
, nixManDir(canonPath(NIX_MAN_DIR))
|
||||
, nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH))
|
||||
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
|
||||
{
|
||||
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
|
||||
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
|
||||
|
|
|
|||
|
|
@ -196,10 +196,6 @@ public:
|
|||
/* Whether to lock the Nix client and worker to the same CPU. */
|
||||
bool lockCPU;
|
||||
|
||||
/* Whether to show a stack trace if Nix evaluation fails. */
|
||||
Setting<bool> showTrace{this, false, "show-trace",
|
||||
"Whether to show a stack trace on evaluation errors."};
|
||||
|
||||
Setting<SandboxMode> sandboxMode{this,
|
||||
#if __linux__
|
||||
smEnabled
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ struct LegacySSHStore : public Store
|
|||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
||||
auto s = readString(conn->from);
|
||||
info->narHash = s.empty() ? Hash() : Hash(s);
|
||||
conn->from >> info->ca;
|
||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
}
|
||||
|
||||
|
|
@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
|
|||
<< info.narSize
|
||||
<< info.ultimate
|
||||
<< info.sigs
|
||||
<< info.ca;
|
||||
<< renderContentAddress(info.ca);
|
||||
try {
|
||||
copyNAR(source, conn->to);
|
||||
} catch (...) {
|
||||
|
|
|
|||
|
|
@ -561,10 +561,12 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
if (out == drv.outputs.end())
|
||||
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
|
||||
|
||||
FileIngestionMethod method; Hash h;
|
||||
out->second.parseHashInfo(method, h);
|
||||
|
||||
check(makeFixedOutputPath(method, h, drvName), out->second.path, "out");
|
||||
check(
|
||||
makeFixedOutputPath(
|
||||
out->second.hash->method,
|
||||
out->second.hash->hash,
|
||||
drvName),
|
||||
out->second.path, "out");
|
||||
}
|
||||
|
||||
else {
|
||||
|
|
@ -578,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
uint64_t LocalStore::addValidPath(State & state,
|
||||
const ValidPathInfo & info, bool checkOutputs)
|
||||
{
|
||||
if (info.ca != "" && !info.isContentAddressed(*this))
|
||||
if (info.ca.has_value() && !info.isContentAddressed(*this))
|
||||
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
|
||||
printStorePath(info.path));
|
||||
|
||||
|
|
@ -590,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
|||
(info.narSize, info.narSize != 0)
|
||||
(info.ultimate ? 1 : 0, info.ultimate)
|
||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||
(info.ca, !info.ca.empty())
|
||||
(renderContentAddress(info.ca), (bool) info.ca)
|
||||
.exec();
|
||||
uint64_t id = sqlite3_last_insert_rowid(state.db);
|
||||
|
||||
|
|
@ -664,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
|
|||
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
||||
|
||||
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
|
||||
if (s) info->ca = s;
|
||||
if (s) info->ca = parseContentAddressOpt(s);
|
||||
|
||||
/* Get the references. */
|
||||
auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
|
||||
|
|
@ -687,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
|||
(info.narHash.to_string(Base16, true))
|
||||
(info.ultimate ? 1 : 0, info.ultimate)
|
||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||
(info.ca, !info.ca.empty())
|
||||
(renderContentAddress(info.ca), (bool) info.ca)
|
||||
(printStorePath(info.path))
|
||||
.exec();
|
||||
}
|
||||
|
|
@ -772,17 +774,20 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
|
|||
}
|
||||
|
||||
|
||||
StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path)
|
||||
OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
|
||||
{
|
||||
return retrySQLite<StorePathSet>([&]() {
|
||||
return retrySQLite<OutputPathMap>([&]() {
|
||||
auto state(_state.lock());
|
||||
|
||||
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
|
||||
(queryValidPathId(*state, path)));
|
||||
|
||||
StorePathSet outputs;
|
||||
OutputPathMap outputs;
|
||||
while (useQueryDerivationOutputs.next())
|
||||
outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1)));
|
||||
outputs.emplace(
|
||||
useQueryDerivationOutputs.getStr(0),
|
||||
parseStorePath(useQueryDerivationOutputs.getStr(1))
|
||||
);
|
||||
|
||||
return outputs;
|
||||
});
|
||||
|
|
@ -983,15 +988,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
deletePath(realPath);
|
||||
|
||||
if (info.ca != "" &&
|
||||
!((hasPrefix(info.ca, "text:") && !info.references.count(info.path))
|
||||
|| info.references.empty()))
|
||||
// text hashing has long been allowed to have non-self-references because it is used for drv files.
|
||||
bool refersToSelf = info.references.count(info.path) > 0;
|
||||
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
|
||||
settings.requireExperimentalFeature("ca-references");
|
||||
|
||||
/* While restoring the path from the NAR, compute the hash
|
||||
of the NAR. */
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (info.ca == "" || !info.references.count(info.path))
|
||||
if (!info.ca.has_value() || !info.references.count(info.path))
|
||||
hashSink = std::make_unique<HashSink>(htSHA256);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
||||
|
|
@ -1077,7 +1082,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
|||
ValidPathInfo info(dstPath);
|
||||
info.narHash = hash.first;
|
||||
info.narSize = hash.second;
|
||||
info.ca = makeFixedOutputCA(method, h);
|
||||
info.ca = FixedOutputHash { .method = method, .hash = h };
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
|
|
@ -1141,7 +1146,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
|||
info.narHash = narHash;
|
||||
info.narSize = sink.s->size();
|
||||
info.references = references;
|
||||
info.ca = "text:" + hash.to_string(Base32, true);
|
||||
info.ca = TextHash { .hash = hash };
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
|
|
@ -1252,10 +1257,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (info->ca == "" || !info->references.count(info->path))
|
||||
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||
if (!info->ca || !info->references.count(info->path))
|
||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
||||
|
||||
dumpPath(Store::toRealPath(i), *hashSink);
|
||||
auto current = hashSink->finish();
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ public:
|
|||
|
||||
StorePathSet queryValidDerivers(const StorePath & path) override;
|
||||
|
||||
StorePathSet queryDerivationOutputs(const StorePath & path) override;
|
||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||
|
||||
|
|
|
|||
|
|
@ -203,7 +203,7 @@ public:
|
|||
narInfo->deriver = StorePath(queryNAR.getStr(9));
|
||||
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
|
||||
narInfo->sigs.insert(sig);
|
||||
narInfo->ca = queryNAR.getStr(11);
|
||||
narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
|
||||
|
||||
return {oValid, narInfo};
|
||||
});
|
||||
|
|
@ -237,7 +237,7 @@ public:
|
|||
(concatStringsSep(" ", info->shortRefs()))
|
||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||
(concatStringsSep(" ", info->sigs))
|
||||
(info->ca)
|
||||
(renderContentAddress(info->ca))
|
||||
(time(0)).exec();
|
||||
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -67,8 +67,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
else if (name == "Sig")
|
||||
sigs.insert(value);
|
||||
else if (name == "CA") {
|
||||
if (!ca.empty()) corrupt();
|
||||
ca = value;
|
||||
if (ca) corrupt();
|
||||
// FIXME: allow blank ca or require skipping field?
|
||||
ca = parseContentAddressOpt(value);
|
||||
}
|
||||
|
||||
pos = eol + 1;
|
||||
|
|
@ -104,8 +105,8 @@ std::string NarInfo::to_string(const Store & store) const
|
|||
for (auto sig : sigs)
|
||||
res += "Sig: " + sig + "\n";
|
||||
|
||||
if (!ca.empty())
|
||||
res += "CA: " + ca + "\n";
|
||||
if (ca)
|
||||
res += "CA: " + renderContentAddress(*ca) + "\n";
|
||||
|
||||
return res;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#include "derivations.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#pragma once
|
||||
|
||||
#include "content-address.hh"
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -61,15 +62,11 @@ public:
|
|||
|
||||
typedef std::set<StorePath> StorePathSet;
|
||||
typedef std::vector<StorePath> StorePaths;
|
||||
typedef std::map<string, StorePath> OutputPathMap;
|
||||
|
||||
/* Extension of derivations in the Nix store. */
|
||||
const std::string drvExtension = ".drv";
|
||||
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
Flat = false,
|
||||
Recursive = true
|
||||
};
|
||||
|
||||
struct StorePathWithOutputs
|
||||
{
|
||||
StorePath path;
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
#include "derivations.hh"
|
||||
#include "pool.hh"
|
||||
#include "finally.hh"
|
||||
#include "logging.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
|
|
@ -38,6 +39,29 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
|
|||
out << store.printStorePath(i);
|
||||
}
|
||||
|
||||
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
|
||||
{
|
||||
std::map<string, StorePath> pathMap;
|
||||
auto rawInput = readStrings<Strings>(from);
|
||||
if (rawInput.size() % 2)
|
||||
throw Error("got an odd number of elements from the daemon when trying to read a output path map");
|
||||
auto curInput = rawInput.begin();
|
||||
while (curInput != rawInput.end()) {
|
||||
auto thisKey = *curInput++;
|
||||
auto thisValue = *curInput++;
|
||||
pathMap.emplace(thisKey, store.parseStorePath(thisValue));
|
||||
}
|
||||
return pathMap;
|
||||
}
|
||||
|
||||
void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
|
||||
{
|
||||
out << 2*pathMap.size();
|
||||
for (auto & i : pathMap) {
|
||||
out << i.first;
|
||||
out << store.printStorePath(i.second);
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO: Separate these store impls into different files, give them better names */
|
||||
RemoteStore::RemoteStore(const Params & params)
|
||||
|
|
@ -197,7 +221,7 @@ void RemoteStore::setOptions(Connection & conn)
|
|||
overrides.erase(settings.maxSilentTime.name);
|
||||
overrides.erase(settings.buildCores.name);
|
||||
overrides.erase(settings.useSubstitutes.name);
|
||||
overrides.erase(settings.showTrace.name);
|
||||
overrides.erase(loggerSettings.showTrace.name);
|
||||
conn.to << overrides.size();
|
||||
for (auto & i : overrides)
|
||||
conn.to << i.first << i.second.value;
|
||||
|
|
@ -381,7 +405,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
|||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||
conn->from >> info->ultimate;
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
conn->from >> info->ca;
|
||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||
}
|
||||
}
|
||||
callback(std::move(info));
|
||||
|
|
@ -412,12 +436,24 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
|
|||
StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0x16) {
|
||||
return Store::queryDerivationOutputs(path);
|
||||
}
|
||||
conn->to << wopQueryDerivationOutputs << printStorePath(path);
|
||||
conn.processStderr();
|
||||
return readStorePaths<StorePathSet>(*this, conn->from);
|
||||
}
|
||||
|
||||
|
||||
OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
|
||||
conn.processStderr();
|
||||
return readOutputPathMap(*this, conn->from);
|
||||
|
||||
}
|
||||
|
||||
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
|
|
@ -465,7 +501,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
<< info.narHash.to_string(Base16, false);
|
||||
writeStorePaths(*this, conn->to, info.references);
|
||||
conn->to << info.registrationTime << info.narSize
|
||||
<< info.ultimate << info.sigs << info.ca
|
||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||
<< repair << !checkSigs;
|
||||
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
|
||||
if (!tunnel) copyNAR(source, conn->to);
|
||||
|
|
|
|||
|
|
@ -51,6 +51,7 @@ public:
|
|||
|
||||
StorePathSet queryDerivationOutputs(const StorePath & path) override;
|
||||
|
||||
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||
|
||||
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
||||
|
|
|
|||
|
|
@ -173,20 +173,20 @@ static std::string makeType(
|
|||
|
||||
|
||||
StorePath Store::makeFixedOutputPath(
|
||||
FileIngestionMethod recursive,
|
||||
FileIngestionMethod method,
|
||||
const Hash & hash,
|
||||
std::string_view name,
|
||||
const StorePathSet & references,
|
||||
bool hasSelfReference) const
|
||||
{
|
||||
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) {
|
||||
if (hash.type == htSHA256 && method == FileIngestionMethod::Recursive) {
|
||||
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
|
||||
} else {
|
||||
assert(references.empty());
|
||||
return makeStorePath("output:out",
|
||||
hashString(htSHA256,
|
||||
"fixed:out:"
|
||||
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
|
||||
+ makeFileIngestionPrefix(method)
|
||||
+ hash.to_string(Base16, true) + ":"),
|
||||
name);
|
||||
}
|
||||
|
|
@ -242,6 +242,16 @@ bool Store::PathInfoCacheValue::isKnownNow()
|
|||
return std::chrono::steady_clock::now() < time_point + ttl;
|
||||
}
|
||||
|
||||
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
|
||||
{
|
||||
auto outputMap = this->queryDerivationOutputMap(path);
|
||||
StorePathSet outputPaths;
|
||||
for (auto & i: outputMap) {
|
||||
outputPaths.emplace(std::move(i.second));
|
||||
}
|
||||
return outputPaths;
|
||||
}
|
||||
|
||||
bool Store::isValidPath(const StorePath & storePath)
|
||||
{
|
||||
std::string hashPart(storePath.hashPart());
|
||||
|
|
@ -471,8 +481,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
|||
jsonRefs.elem(printStorePath(ref));
|
||||
}
|
||||
|
||||
if (info->ca != "")
|
||||
jsonPath.attr("ca", info->ca);
|
||||
if (info->ca)
|
||||
jsonPath.attr("ca", renderContentAddress(info->ca));
|
||||
|
||||
std::pair<uint64_t, uint64_t> closureSizes;
|
||||
|
||||
|
|
@ -757,41 +767,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
|||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||
}
|
||||
|
||||
// FIXME Put this somewhere?
|
||||
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||
|
||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||
{
|
||||
auto warn = [&]() {
|
||||
logWarning(
|
||||
ErrorInfo{
|
||||
.name = "Path not content-addressed",
|
||||
.hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
|
||||
});
|
||||
};
|
||||
if (! ca) return false;
|
||||
|
||||
if (hasPrefix(ca, "text:")) {
|
||||
Hash hash(ca.substr(5));
|
||||
if (store.makeTextPath(path.name(), hash, references) == path)
|
||||
return true;
|
||||
else
|
||||
warn();
|
||||
}
|
||||
|
||||
else if (hasPrefix(ca, "fixed:")) {
|
||||
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
|
||||
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
|
||||
auto refs = references;
|
||||
bool hasSelfReference = false;
|
||||
if (refs.count(path)) {
|
||||
hasSelfReference = true;
|
||||
refs.erase(path);
|
||||
auto caPath = std::visit(overloaded {
|
||||
[&](TextHash th) {
|
||||
return store.makeTextPath(path.name(), th.hash, references);
|
||||
},
|
||||
[&](FixedOutputHash fsh) {
|
||||
auto refs = references;
|
||||
bool hasSelfReference = false;
|
||||
if (refs.count(path)) {
|
||||
hasSelfReference = true;
|
||||
refs.erase(path);
|
||||
}
|
||||
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
||||
}
|
||||
if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path)
|
||||
return true;
|
||||
else
|
||||
warn();
|
||||
}
|
||||
}, *ca);
|
||||
|
||||
return false;
|
||||
bool res = caPath == path;
|
||||
|
||||
if (!res)
|
||||
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -822,14 +826,6 @@ Strings ValidPathInfo::shortRefs() const
|
|||
}
|
||||
|
||||
|
||||
std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
|
||||
{
|
||||
return "fixed:"
|
||||
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
|
||||
+ hash.to_string(Base32, true);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,12 +2,14 @@
|
|||
|
||||
#include "path.hh"
|
||||
#include "hash.hh"
|
||||
#include "content-address.hh"
|
||||
#include "serialise.hh"
|
||||
#include "crypto.hh"
|
||||
#include "lru-cache.hh"
|
||||
#include "sync.hh"
|
||||
#include "globals.hh"
|
||||
#include "config.hh"
|
||||
#include "derivations.hh"
|
||||
|
||||
#include <atomic>
|
||||
#include <limits>
|
||||
|
|
@ -17,6 +19,7 @@
|
|||
#include <memory>
|
||||
#include <string>
|
||||
#include <chrono>
|
||||
#include <variant>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -31,15 +34,12 @@ MakeError(SubstituterDisabled, Error);
|
|||
MakeError(NotInStore, Error);
|
||||
|
||||
|
||||
struct BasicDerivation;
|
||||
struct Derivation;
|
||||
class FSAccessor;
|
||||
class NarInfoDiskCache;
|
||||
class Store;
|
||||
class JSONPlaceholder;
|
||||
|
||||
|
||||
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||
|
|
@ -111,7 +111,6 @@ struct SubstitutablePathInfo
|
|||
|
||||
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
|
||||
|
||||
|
||||
struct ValidPathInfo
|
||||
{
|
||||
StorePath path;
|
||||
|
|
@ -140,21 +139,11 @@ struct ValidPathInfo
|
|||
that a particular output path was produced by a derivation; the
|
||||
path then implies the contents.)
|
||||
|
||||
Ideally, the content-addressability assertion would just be a
|
||||
Boolean, and the store path would be computed from
|
||||
the name component, ‘narHash’ and ‘references’. However,
|
||||
1) we've accumulated several types of content-addressed paths
|
||||
over the years; and 2) fixed-output derivations support
|
||||
multiple hash algorithms and serialisation methods (flat file
|
||||
vs NAR). Thus, ‘ca’ has one of the following forms:
|
||||
|
||||
* ‘text:sha256:<sha256 hash of file contents>’: For paths
|
||||
computed by makeTextPath() / addTextToStore().
|
||||
|
||||
* ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||
makeFixedOutputPath() / addToStore().
|
||||
Ideally, the content-addressability assertion would just be a Boolean,
|
||||
and the store path would be computed from the name component, ‘narHash’
|
||||
and ‘references’. However, we support many types of content addresses.
|
||||
*/
|
||||
std::string ca;
|
||||
std::optional<ContentAddress> ca;
|
||||
|
||||
bool operator == (const ValidPathInfo & i) const
|
||||
{
|
||||
|
|
@ -189,9 +178,10 @@ struct ValidPathInfo
|
|||
|
||||
Strings shortRefs() const;
|
||||
|
||||
ValidPathInfo(const StorePath & path) : path(path) { }
|
||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||
|
||||
ValidPathInfo(StorePath && path) : path(std::move(path)) { }
|
||||
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
|
||||
ValidPathInfo(const StorePath & path) : path(path) { };
|
||||
|
||||
virtual ~ValidPathInfo() { }
|
||||
};
|
||||
|
|
@ -428,8 +418,11 @@ public:
|
|||
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
|
||||
|
||||
/* Query the outputs of the derivation denoted by `path'. */
|
||||
virtual StorePathSet queryDerivationOutputs(const StorePath & path)
|
||||
{ unsupported("queryDerivationOutputs"); }
|
||||
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
|
||||
|
||||
/* Query the mapping outputName=>outputPath for the given derivation */
|
||||
virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
|
||||
{ unsupported("queryDerivationOutputMap"); }
|
||||
|
||||
/* Query the full store path given the hash part of a valid store
|
||||
path, or empty if the path doesn't exist. */
|
||||
|
|
@ -838,12 +831,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
|
|||
std::istream & str,
|
||||
bool hashGiven = false);
|
||||
|
||||
|
||||
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
||||
for paths created by makeFixedOutputPath() / addToStore(). */
|
||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
||||
|
||||
|
||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ namespace nix {
|
|||
#define WORKER_MAGIC_1 0x6e697863
|
||||
#define WORKER_MAGIC_2 0x6478696f
|
||||
|
||||
#define PROTOCOL_VERSION 0x115
|
||||
#define PROTOCOL_VERSION 0x116
|
||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ typedef enum {
|
|||
wopSetOptions = 19,
|
||||
wopCollectGarbage = 20,
|
||||
wopQuerySubstitutablePathInfo = 21,
|
||||
wopQueryDerivationOutputs = 22,
|
||||
wopQueryDerivationOutputs = 22, // obsolete
|
||||
wopQueryAllValidPaths = 23,
|
||||
wopQueryFailedPaths = 24,
|
||||
wopClearFailedPaths = 25,
|
||||
|
|
@ -49,6 +49,7 @@ typedef enum {
|
|||
wopNarFromPath = 38,
|
||||
wopAddToStoreNar = 39,
|
||||
wopQueryMissing = 40,
|
||||
wopQueryDerivationOutputMap = 41,
|
||||
} WorkerOp;
|
||||
|
||||
|
||||
|
|
@ -69,5 +70,6 @@ template<class T> T readStorePaths(const Store & store, Source & from);
|
|||
|
||||
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
|
||||
|
||||
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
|
||||
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue