mirror of
https://github.com/NixOS/nix.git
synced 2025-11-18 00:12:43 +01:00
Merge branch 'master' into fix-sandbox-escape
This commit is contained in:
commit
8a420162ab
274 changed files with 3295 additions and 900 deletions
|
|
@ -322,7 +322,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
|
|||
if (static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod())
|
||||
caHash = hashString(HashAlgorithm::SHA256, dump2.s);
|
||||
switch (dumpMethod) {
|
||||
case FileSerialisationMethod::Recursive:
|
||||
case FileSerialisationMethod::NixArchive:
|
||||
// The dump is already NAR in this case, just use it.
|
||||
nar = dump2.s;
|
||||
break;
|
||||
|
|
@ -339,7 +339,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
|
|||
} else {
|
||||
// Otherwise, we have to do th same hashing as NAR so our single
|
||||
// hash will suffice for both purposes.
|
||||
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
}
|
||||
StringSource narDump { nar };
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# include "hook-instance.hh"
|
||||
#endif
|
||||
#include "processes.hh"
|
||||
#include "config-global.hh"
|
||||
#include "worker.hh"
|
||||
#include "builtins.hh"
|
||||
#include "builtins/buildenv.hh"
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ Worker::Worker(Store & store, Store & evalStore)
|
|||
, store(store)
|
||||
, evalStore(evalStore)
|
||||
{
|
||||
/* Debugging: prevent recursive workers. */
|
||||
nrLocalBuilds = 0;
|
||||
nrSubstitutions = 0;
|
||||
lastWokenUp = steady_time_point::min();
|
||||
|
|
@ -530,7 +529,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
else {
|
||||
auto current = hashPath(
|
||||
{store.getFSAccessor(), CanonPath(store.printStorePath(path))},
|
||||
FileIngestionMethod::Recursive, info->narHash.algo).first;
|
||||
FileIngestionMethod::NixArchive, info->narHash.algo).first;
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ struct HookInstance;
|
|||
#endif
|
||||
|
||||
/**
|
||||
* The worker class.
|
||||
* Coordinates one or more realisations and their interdependencies.
|
||||
*/
|
||||
class Worker
|
||||
{
|
||||
|
|
|
|||
|
|
@ -8,98 +8,136 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
|
|||
{
|
||||
switch (m) {
|
||||
case FileIngestionMethod::Flat:
|
||||
// Not prefixed for back compat
|
||||
return "";
|
||||
case FileIngestionMethod::Recursive:
|
||||
case FileIngestionMethod::NixArchive:
|
||||
return "r:";
|
||||
case FileIngestionMethod::Git:
|
||||
experimentalFeatureSettings.require(Xp::GitHashing);
|
||||
return "git:";
|
||||
default:
|
||||
throw Error("impossible, caught both cases");
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
std::string_view ContentAddressMethod::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextIngestionMethod) -> std::string_view { return "text"; },
|
||||
[](FileIngestionMethod m2) {
|
||||
/* Not prefixed for back compat with things that couldn't produce text before. */
|
||||
return renderFileIngestionMethod(m2);
|
||||
},
|
||||
}, raw);
|
||||
switch (raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
return "text";
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return renderFileIngestionMethod(getFileIngestionMethod());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* **Not surjective**
|
||||
*
|
||||
* This is not exposed because `FileIngestionMethod::Flat` maps to
|
||||
* `ContentAddressMethod::Raw::Flat` and
|
||||
* `ContentAddressMethod::Raw::Text` alike. We can thus only safely use
|
||||
* this when the latter is ruled out (e.g. because it is already
|
||||
* handled).
|
||||
*/
|
||||
static ContentAddressMethod fileIngestionMethodToContentAddressMethod(FileIngestionMethod m)
|
||||
{
|
||||
switch (m) {
|
||||
case FileIngestionMethod::Flat:
|
||||
return ContentAddressMethod::Raw::Flat;
|
||||
case FileIngestionMethod::NixArchive:
|
||||
return ContentAddressMethod::Raw::NixArchive;
|
||||
case FileIngestionMethod::Git:
|
||||
return ContentAddressMethod::Raw::Git;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
|
||||
{
|
||||
if (m == "text")
|
||||
return TextIngestionMethod {};
|
||||
return ContentAddressMethod::Raw::Text;
|
||||
else
|
||||
return parseFileIngestionMethod(m);
|
||||
return fileIngestionMethodToContentAddressMethod(
|
||||
parseFileIngestionMethod(m));
|
||||
}
|
||||
|
||||
std::string_view ContentAddressMethod::renderPrefix() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextIngestionMethod) -> std::string_view { return "text:"; },
|
||||
[](FileIngestionMethod m2) {
|
||||
/* Not prefixed for back compat with things that couldn't produce text before. */
|
||||
return makeFileIngestionPrefix(m2);
|
||||
},
|
||||
}, raw);
|
||||
switch (raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
return "text:";
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return makeFileIngestionPrefix(getFileIngestionMethod());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
|
||||
{
|
||||
if (splitPrefix(m, "r:")) {
|
||||
return FileIngestionMethod::Recursive;
|
||||
return ContentAddressMethod::Raw::NixArchive;
|
||||
}
|
||||
else if (splitPrefix(m, "git:")) {
|
||||
experimentalFeatureSettings.require(Xp::GitHashing);
|
||||
return FileIngestionMethod::Git;
|
||||
return ContentAddressMethod::Raw::Git;
|
||||
}
|
||||
else if (splitPrefix(m, "text:")) {
|
||||
return TextIngestionMethod {};
|
||||
return ContentAddressMethod::Raw::Text;
|
||||
}
|
||||
return ContentAddressMethod::Raw::Flat;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is slightly more mindful of forward compat in that it uses `fixed:`
|
||||
* rather than just doing a raw empty prefix or `r:`, which doesn't "save room"
|
||||
* for future changes very well.
|
||||
*/
|
||||
static std::string renderPrefixModern(const ContentAddressMethod & ca)
|
||||
{
|
||||
switch (ca.raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
return "text:";
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return "fixed:" + makeFileIngestionPrefix(ca.getFileIngestionMethod());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
return FileIngestionMethod::Flat;
|
||||
}
|
||||
|
||||
std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & th) {
|
||||
return std::string{"text:"} + printHashAlgo(ha);
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
|
||||
}
|
||||
}, raw);
|
||||
return renderPrefixModern(*this) + printHashAlgo(ha);
|
||||
}
|
||||
|
||||
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & th) {
|
||||
return FileIngestionMethod::Flat;
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
return fim;
|
||||
}
|
||||
}, raw);
|
||||
switch (raw) {
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
return FileIngestionMethod::Flat;
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
return FileIngestionMethod::NixArchive;
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return FileIngestionMethod::Git;
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
return FileIngestionMethod::Flat;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
std::string ContentAddress::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const TextIngestionMethod &) -> std::string {
|
||||
return "text:";
|
||||
},
|
||||
[](const FileIngestionMethod & method) {
|
||||
return "fixed:"
|
||||
+ makeFileIngestionPrefix(method);
|
||||
},
|
||||
}, method.raw)
|
||||
+ this->hash.to_string(HashFormat::Nix32, true);
|
||||
return renderPrefixModern(method) + this->hash.to_string(HashFormat::Nix32, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -130,17 +168,17 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
// No parsing of the ingestion method, "text" only support flat.
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
TextIngestionMethod {},
|
||||
ContentAddressMethod::Raw::Text,
|
||||
std::move(hashAlgo),
|
||||
};
|
||||
} else if (prefix == "fixed") {
|
||||
// Parse method
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
auto method = ContentAddressMethod::Raw::Flat;
|
||||
if (splitPrefix(rest, "r:"))
|
||||
method = FileIngestionMethod::Recursive;
|
||||
method = ContentAddressMethod::Raw::NixArchive;
|
||||
else if (splitPrefix(rest, "git:")) {
|
||||
experimentalFeatureSettings.require(Xp::GitHashing);
|
||||
method = FileIngestionMethod::Git;
|
||||
method = ContentAddressMethod::Raw::Git;
|
||||
}
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
|
|
@ -201,57 +239,58 @@ size_t StoreReferences::size() const
|
|||
|
||||
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) -> ContentAddressWithReferences {
|
||||
return TextInfo {
|
||||
.hash = ca.hash,
|
||||
.references = {},
|
||||
};
|
||||
},
|
||||
[&](const FileIngestionMethod & method) -> ContentAddressWithReferences {
|
||||
return FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = ca.hash,
|
||||
.references = {},
|
||||
};
|
||||
},
|
||||
}, ca.method.raw);
|
||||
switch (ca.method.raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
return TextInfo {
|
||||
.hash = ca.hash,
|
||||
.references = {},
|
||||
};
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return FixedOutputInfo {
|
||||
.method = ca.method.getFileIngestionMethod(),
|
||||
.hash = ca.hash,
|
||||
.references = {},
|
||||
};
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
ContentAddressWithReferences ContentAddressWithReferences::fromParts(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](TextIngestionMethod _) -> ContentAddressWithReferences {
|
||||
if (refs.self)
|
||||
throw Error("self-reference not allowed with text hashing");
|
||||
return ContentAddressWithReferences {
|
||||
TextInfo {
|
||||
.hash = std::move(hash),
|
||||
.references = std::move(refs.others),
|
||||
}
|
||||
};
|
||||
},
|
||||
[&](FileIngestionMethod m2) -> ContentAddressWithReferences {
|
||||
return ContentAddressWithReferences {
|
||||
FixedOutputInfo {
|
||||
.method = m2,
|
||||
.hash = std::move(hash),
|
||||
.references = std::move(refs),
|
||||
}
|
||||
};
|
||||
},
|
||||
}, method.raw);
|
||||
switch (method.raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
if (refs.self)
|
||||
throw Error("self-reference not allowed with text hashing");
|
||||
return TextInfo {
|
||||
.hash = std::move(hash),
|
||||
.references = std::move(refs.others),
|
||||
};
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
return FixedOutputInfo {
|
||||
.method = method.getFileIngestionMethod(),
|
||||
.hash = std::move(hash),
|
||||
.references = std::move(refs),
|
||||
};
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
ContentAddressMethod ContentAddressWithReferences::getMethod() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const TextInfo & th) -> ContentAddressMethod {
|
||||
return TextIngestionMethod {};
|
||||
return ContentAddressMethod::Raw::Text;
|
||||
},
|
||||
[](const FixedOutputInfo & fsh) -> ContentAddressMethod {
|
||||
return fsh.method;
|
||||
return fileIngestionMethodToContentAddressMethod(
|
||||
fsh.method);
|
||||
},
|
||||
}, raw);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
#include "hash.hh"
|
||||
#include "path.hh"
|
||||
#include "file-content-address.hh"
|
||||
#include "comparator.hh"
|
||||
#include "variant-wrapper.hh"
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -14,24 +13,6 @@ namespace nix {
|
|||
* Content addressing method
|
||||
*/
|
||||
|
||||
/* We only have one way to hash text with references, so this is a single-value
|
||||
type, mainly useful with std::variant.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The single way we can serialize "text" file system objects.
|
||||
*
|
||||
* Somewhat obscure, used by \ref Derivation derivations and
|
||||
* `builtins.toFile` currently.
|
||||
*
|
||||
* TextIngestionMethod is identical to FileIngestionMethod::Fixed except that
|
||||
* the former may not have self-references and is tagged `text:${algo}:${hash}`
|
||||
* rather than `fixed:${algo}:${hash}`. The contents of the store path are
|
||||
* ingested and hashed identically, aside from the slightly different tag and
|
||||
* restriction on self-references.
|
||||
*/
|
||||
struct TextIngestionMethod : std::monostate { };
|
||||
|
||||
/**
|
||||
* Compute the prefix to the hash algorithm which indicates how the
|
||||
* files were ingested.
|
||||
|
|
@ -48,14 +29,51 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
|
|||
*/
|
||||
struct ContentAddressMethod
|
||||
{
|
||||
typedef std::variant<
|
||||
TextIngestionMethod,
|
||||
FileIngestionMethod
|
||||
> Raw;
|
||||
enum struct Raw {
|
||||
/**
|
||||
* Calculate a store path using the `FileIngestionMethod::Flat`
|
||||
* hash of the file system objects, and references.
|
||||
*
|
||||
* See `store-object/content-address.md#method-flat` in the
|
||||
* manual.
|
||||
*/
|
||||
Flat,
|
||||
|
||||
/**
|
||||
* Calculate a store path using the
|
||||
* `FileIngestionMethod::NixArchive` hash of the file system
|
||||
* objects, and references.
|
||||
*
|
||||
* See `store-object/content-address.md#method-flat` in the
|
||||
* manual.
|
||||
*/
|
||||
NixArchive,
|
||||
|
||||
/**
|
||||
* Calculate a store path using the `FileIngestionMethod::Git`
|
||||
* hash of the file system objects, and references.
|
||||
*
|
||||
* Part of `ExperimentalFeature::GitHashing`.
|
||||
*
|
||||
* See `store-object/content-address.md#method-git` in the
|
||||
* manual.
|
||||
*/
|
||||
Git,
|
||||
|
||||
/**
|
||||
* Calculate a store path using the `FileIngestionMethod::Flat`
|
||||
* hash of the file system objects, and references, but in a
|
||||
* different way than `ContentAddressMethod::Raw::Flat`.
|
||||
*
|
||||
* See `store-object/content-address.md#method-text` in the
|
||||
* manual.
|
||||
*/
|
||||
Text,
|
||||
};
|
||||
|
||||
Raw raw;
|
||||
|
||||
GENERATE_CMP(ContentAddressMethod, me->raw);
|
||||
auto operator <=>(const ContentAddressMethod &) const = default;
|
||||
|
||||
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
|
||||
|
||||
|
|
@ -141,7 +159,7 @@ struct ContentAddress
|
|||
*/
|
||||
Hash hash;
|
||||
|
||||
GENERATE_CMP(ContentAddress, me->method, me->hash);
|
||||
auto operator <=>(const ContentAddress &) const = default;
|
||||
|
||||
/**
|
||||
* Compute the content-addressability assertion
|
||||
|
|
@ -200,7 +218,7 @@ struct StoreReferences
|
|||
*/
|
||||
size_t size() const;
|
||||
|
||||
GENERATE_CMP(StoreReferences, me->self, me->others);
|
||||
auto operator <=>(const StoreReferences &) const = default;
|
||||
};
|
||||
|
||||
// This matches the additional info that we need for makeTextPath
|
||||
|
|
@ -217,7 +235,7 @@ struct TextInfo
|
|||
*/
|
||||
StorePathSet references;
|
||||
|
||||
GENERATE_CMP(TextInfo, me->hash, me->references);
|
||||
auto operator <=>(const TextInfo &) const = default;
|
||||
};
|
||||
|
||||
struct FixedOutputInfo
|
||||
|
|
@ -237,7 +255,7 @@ struct FixedOutputInfo
|
|||
*/
|
||||
StoreReferences references;
|
||||
|
||||
GENERATE_CMP(FixedOutputInfo, me->hash, me->references);
|
||||
auto operator <=>(const FixedOutputInfo &) const = default;
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -254,7 +272,7 @@ struct ContentAddressWithReferences
|
|||
|
||||
Raw raw;
|
||||
|
||||
GENERATE_CMP(ContentAddressWithReferences, me->raw);
|
||||
auto operator <=>(const ContentAddressWithReferences &) const = default;
|
||||
|
||||
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences);
|
||||
|
||||
|
|
|
|||
|
|
@ -415,12 +415,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
case FileIngestionMethod::Flat:
|
||||
dumpMethod = FileSerialisationMethod::Flat;
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpMethod = FileSerialisationMethod::Recursive;
|
||||
case FileIngestionMethod::NixArchive:
|
||||
dumpMethod = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
case FileIngestionMethod::Git:
|
||||
// Use NAR; Git is not a serialization method
|
||||
dumpMethod = FileSerialisationMethod::Recursive;
|
||||
dumpMethod = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
|
|
@ -435,19 +435,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
} else {
|
||||
HashAlgorithm hashAlgo;
|
||||
std::string baseName;
|
||||
FileIngestionMethod method;
|
||||
ContentAddressMethod method;
|
||||
{
|
||||
bool fixed;
|
||||
uint8_t recursive;
|
||||
std::string hashAlgoRaw;
|
||||
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
|
||||
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
|
||||
if (recursive > true)
|
||||
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
|
||||
method = FileIngestionMethod { recursive };
|
||||
method = recursive
|
||||
? ContentAddressMethod::Raw::NixArchive
|
||||
: ContentAddressMethod::Raw::Flat;
|
||||
/* Compatibility hack. */
|
||||
if (!fixed) {
|
||||
hashAlgoRaw = "sha256";
|
||||
method = FileIngestionMethod::Recursive;
|
||||
method = ContentAddressMethod::Raw::NixArchive;
|
||||
}
|
||||
hashAlgo = parseHashAlgo(hashAlgoRaw);
|
||||
}
|
||||
|
|
@ -468,7 +470,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
});
|
||||
logger->startWork();
|
||||
auto path = store->addToStoreFromDump(
|
||||
*dumpSource, baseName, FileSerialisationMethod::Recursive, method, hashAlgo);
|
||||
*dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo);
|
||||
logger->stopWork();
|
||||
|
||||
to << store->printStorePath(path);
|
||||
|
|
@ -500,7 +502,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
logger->startWork();
|
||||
auto path = ({
|
||||
StringSource source { s };
|
||||
store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
|
||||
store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, NoRepair);
|
||||
});
|
||||
logger->stopWork();
|
||||
to << store->printStorePath(path);
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ StorePath writeDerivation(Store & store,
|
|||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
|
||||
store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references, repair);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -274,7 +274,7 @@ static DerivationOutput parseDerivationOutput(
|
|||
{
|
||||
if (hashAlgoStr != "") {
|
||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
|
||||
if (method == TextIngestionMethod {})
|
||||
if (method == ContentAddressMethod::Raw::Text)
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
const auto hashAlgo = parseHashAlgo(hashAlgoStr);
|
||||
if (hashS == "impure") {
|
||||
|
|
@ -1249,7 +1249,7 @@ DerivationOutput DerivationOutput::fromJSON(
|
|||
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
|
||||
auto & method_ = getString(valueAt(json, "method"));
|
||||
ContentAddressMethod method = ContentAddressMethod::parse(method_);
|
||||
if (method == TextIngestionMethod {})
|
||||
if (method == ContentAddressMethod::Raw::Text)
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
|
||||
auto & hashAlgo_ = getString(valueAt(json, "hashAlgo"));
|
||||
|
|
|
|||
|
|
@ -64,8 +64,8 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "globals.hh"
|
||||
#include "config-global.hh"
|
||||
#include "store-api.hh"
|
||||
#include "s3.hh"
|
||||
#include "compression.hh"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#include "globals.hh"
|
||||
#include "config-global.hh"
|
||||
#include "current-process.hh"
|
||||
#include "archive.hh"
|
||||
#include "args.hh"
|
||||
|
|
@ -123,12 +124,12 @@ Settings::Settings()
|
|||
};
|
||||
}
|
||||
|
||||
void loadConfFile()
|
||||
void loadConfFile(AbstractConfig & config)
|
||||
{
|
||||
auto applyConfigFile = [&](const Path & path) {
|
||||
try {
|
||||
std::string contents = readFile(path);
|
||||
globalConfig.applyConfig(contents, path);
|
||||
config.applyConfig(contents, path);
|
||||
} catch (SystemError &) { }
|
||||
};
|
||||
|
||||
|
|
@ -136,7 +137,7 @@ void loadConfFile()
|
|||
|
||||
/* We only want to send overrides to the daemon, i.e. stuff from
|
||||
~/.nix/nix.conf or the command line. */
|
||||
globalConfig.resetOverridden();
|
||||
config.resetOverridden();
|
||||
|
||||
auto files = settings.nixUserConfFiles;
|
||||
for (auto file = files.rbegin(); file != files.rend(); file++) {
|
||||
|
|
@ -145,7 +146,7 @@ void loadConfFile()
|
|||
|
||||
auto nixConfEnv = getEnv("NIX_CONFIG");
|
||||
if (nixConfEnv.has_value()) {
|
||||
globalConfig.applyConfig(nixConfEnv.value(), "NIX_CONFIG");
|
||||
config.applyConfig(nixConfEnv.value(), "NIX_CONFIG");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -437,7 +438,7 @@ void initLibStore(bool loadConfig) {
|
|||
initLibUtil();
|
||||
|
||||
if (loadConfig)
|
||||
loadConfFile();
|
||||
loadConfFile(globalConfig);
|
||||
|
||||
preloadNSS();
|
||||
|
||||
|
|
|
|||
|
|
@ -1284,7 +1284,13 @@ extern Settings settings;
|
|||
*/
|
||||
void initPlugins();
|
||||
|
||||
void loadConfFile();
|
||||
/**
|
||||
* Load the configuration (from `nix.conf`, `NIX_CONFIG`, etc.) into the
|
||||
* given configuration object.
|
||||
*
|
||||
* Usually called with `globalConfig`.
|
||||
*/
|
||||
void loadConfFile(AbstractConfig & config);
|
||||
|
||||
// Used by the Settings constructor
|
||||
std::vector<Path> getUserConfigFiles();
|
||||
|
|
|
|||
|
|
@ -76,8 +76,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override
|
||||
|
|
|
|||
|
|
@ -1155,7 +1155,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
auto fim = specified.method.getFileIngestionMethod();
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
case FileIngestionMethod::NixArchive:
|
||||
{
|
||||
HashModuloSink caSink {
|
||||
specified.hash.algo,
|
||||
|
|
@ -1253,7 +1253,7 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
std::filesystem::path tempDir;
|
||||
AutoCloseFD tempDirFd;
|
||||
|
||||
bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod;
|
||||
bool methodsMatch = static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod();
|
||||
|
||||
/* If the methods don't match, our streaming hash of the dump is the
|
||||
wrong sort, and we need to rehash. */
|
||||
|
|
@ -1314,7 +1314,7 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
auto fim = hashMethod.getFileIngestionMethod();
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
case FileIngestionMethod::NixArchive:
|
||||
restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
|
||||
break;
|
||||
case FileIngestionMethod::Git:
|
||||
|
|
@ -1330,7 +1330,7 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
/* For computing the nar hash. In recursive SHA-256 mode, this
|
||||
is the same as the store hash, so no need to do it again. */
|
||||
auto narHash = std::pair { dumpHash, size };
|
||||
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
|
||||
if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) {
|
||||
HashSink narSink { HashAlgorithm::SHA256 };
|
||||
dumpPath(realPath, narSink);
|
||||
narHash = narSink.finish();
|
||||
|
|
@ -1423,7 +1423,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
PosixSourceAccessor accessor;
|
||||
std::string hash = hashPath(
|
||||
PosixSourceAccessor::createAtRoot(link.path()),
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
|
||||
FileIngestionMethod::NixArchive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
|
||||
if (hash != name.string()) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
link.path(), name, hash);
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
|||
dstStore,
|
||||
path.name(),
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.method = FileIngestionMethod::NixArchive,
|
||||
.hash = narModuloHash,
|
||||
.references = std::move(refs),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
Hash hash = ({
|
||||
hashPath(
|
||||
{make_ref<PosixSourceAccessor>(), CanonPath(path)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
|
||||
});
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
|
|
@ -165,7 +165,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
|| (repair && hash != ({
|
||||
hashPath(
|
||||
PosixSourceAccessor::createAtRoot(linkPath),
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
|
||||
})))
|
||||
{
|
||||
// XXX: Consider overwriting linkPath with our valid version.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, releaseTools
|
||||
, fileset
|
||||
|
||||
, meson
|
||||
, ninja
|
||||
|
|
@ -13,7 +12,6 @@
|
|||
, aws-sdk-cpp
|
||||
, libseccomp
|
||||
, nlohmann_json
|
||||
, man
|
||||
, sqlite
|
||||
|
||||
, busybox-sandbox-shell ? null
|
||||
|
|
@ -21,7 +19,6 @@
|
|||
# Configuration Options
|
||||
|
||||
, versionSuffix ? ""
|
||||
, officialRelease ? false
|
||||
|
||||
# Check test coverage of Nix. Probably want to use with at least
|
||||
# one of `doCheck` or `doInstallCheck` enabled.
|
||||
|
|
@ -32,6 +29,8 @@
|
|||
}:
|
||||
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
|
||||
version = lib.fileContents ./.version + versionSuffix;
|
||||
|
||||
mkDerivation =
|
||||
|
|
|
|||
|
|
@ -48,15 +48,21 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
|
|||
if (! ca)
|
||||
return std::nullopt;
|
||||
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) -> ContentAddressWithReferences {
|
||||
switch (ca->method.raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
{
|
||||
assert(references.count(path) == 0);
|
||||
return TextInfo {
|
||||
.hash = ca->hash,
|
||||
.references = references,
|
||||
};
|
||||
},
|
||||
[&](const FileIngestionMethod & m2) -> ContentAddressWithReferences {
|
||||
}
|
||||
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
default:
|
||||
{
|
||||
auto refs = references;
|
||||
bool hasSelfReference = false;
|
||||
if (refs.count(path)) {
|
||||
|
|
@ -64,15 +70,15 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
|
|||
refs.erase(path);
|
||||
}
|
||||
return FixedOutputInfo {
|
||||
.method = m2,
|
||||
.method = ca->method.getFileIngestionMethod(),
|
||||
.hash = ca->hash,
|
||||
.references = {
|
||||
.others = std::move(refs),
|
||||
.self = hasSelfReference,
|
||||
},
|
||||
};
|
||||
},
|
||||
}, ca->method.raw);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||
|
|
@ -127,22 +133,18 @@ ValidPathInfo::ValidPathInfo(
|
|||
: UnkeyedValidPathInfo(narHash)
|
||||
, path(store.makeFixedOutputPathFromCA(name, ca))
|
||||
{
|
||||
this->ca = ContentAddress {
|
||||
.method = ca.getMethod(),
|
||||
.hash = ca.getHash(),
|
||||
};
|
||||
std::visit(overloaded {
|
||||
[this](TextInfo && ti) {
|
||||
this->references = std::move(ti.references);
|
||||
this->ca = ContentAddress {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = std::move(ti.hash),
|
||||
};
|
||||
},
|
||||
[this](FixedOutputInfo && foi) {
|
||||
this->references = std::move(foi.references.others);
|
||||
if (foi.references.self)
|
||||
this->references.insert(path);
|
||||
this->ca = ContentAddress {
|
||||
.method = std::move(foi.method),
|
||||
.hash = std::move(foi.hash),
|
||||
};
|
||||
},
|
||||
}, std::move(ca).raw);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,25 +2,24 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static void checkName(std::string_view path, std::string_view name)
|
||||
void checkName(std::string_view name)
|
||||
{
|
||||
if (name.empty())
|
||||
throw BadStorePath("store path '%s' has an empty name", path);
|
||||
throw BadStorePathName("name must not be empty");
|
||||
if (name.size() > StorePath::MaxPathLen)
|
||||
throw BadStorePath("store path '%s' has a name longer than %d characters",
|
||||
path, StorePath::MaxPathLen);
|
||||
throw BadStorePathName("name '%s' must be no longer than %d characters", name, StorePath::MaxPathLen);
|
||||
// See nameRegexStr for the definition
|
||||
if (name[0] == '.') {
|
||||
// check against "." and "..", followed by end or dash
|
||||
if (name.size() == 1)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
throw BadStorePathName("name '%s' is not valid", name);
|
||||
if (name[1] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".");
|
||||
throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, ".");
|
||||
if (name[1] == '.') {
|
||||
if (name.size() == 2)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
throw BadStorePathName("name '%s' is not valid", name);
|
||||
if (name[2] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "..");
|
||||
throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, "..");
|
||||
}
|
||||
}
|
||||
for (auto c : name)
|
||||
|
|
@ -28,7 +27,16 @@ static void checkName(std::string_view path, std::string_view name)
|
|||
|| (c >= 'a' && c <= 'z')
|
||||
|| (c >= 'A' && c <= 'Z')
|
||||
|| c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '='))
|
||||
throw BadStorePath("store path '%s' contains illegal character '%s'", path, c);
|
||||
throw BadStorePathName("name '%s' contains illegal character '%s'", name, c);
|
||||
}
|
||||
|
||||
static void checkPathName(std::string_view path, std::string_view name)
|
||||
{
|
||||
try {
|
||||
checkName(name);
|
||||
} catch (BadStorePathName & e) {
|
||||
throw BadStorePath("path '%s' is not a valid store path: %s", path, Uncolored(e.message()));
|
||||
}
|
||||
}
|
||||
|
||||
StorePath::StorePath(std::string_view _baseName)
|
||||
|
|
@ -40,13 +48,13 @@ StorePath::StorePath(std::string_view _baseName)
|
|||
if (c == 'e' || c == 'o' || c == 'u' || c == 't'
|
||||
|| !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z')))
|
||||
throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c);
|
||||
checkName(baseName, name());
|
||||
checkPathName(baseName, name());
|
||||
}
|
||||
|
||||
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
||||
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
|
||||
{
|
||||
checkName(baseName, name());
|
||||
checkPathName(baseName, name());
|
||||
}
|
||||
|
||||
bool StorePath::isDerivation() const noexcept
|
||||
|
|
|
|||
|
|
@ -9,6 +9,13 @@ namespace nix {
|
|||
|
||||
struct Hash;
|
||||
|
||||
/**
|
||||
* Check whether a name is a valid store path name.
|
||||
*
|
||||
* @throws BadStorePathName if the name is invalid. The message is of the format "name %s is not valid, for this specific reason".
|
||||
*/
|
||||
void checkName(std::string_view name);
|
||||
|
||||
/**
|
||||
* \ref StorePath "Store path" is the fundamental reference type of Nix.
|
||||
* A store paths refers to a Store object.
|
||||
|
|
@ -31,8 +38,10 @@ public:
|
|||
|
||||
StorePath() = delete;
|
||||
|
||||
/** @throws BadStorePath */
|
||||
StorePath(std::string_view baseName);
|
||||
|
||||
/** @throws BadStorePath */
|
||||
StorePath(const Hash & hash, std::string_view name);
|
||||
|
||||
std::string_view to_string() const noexcept
|
||||
|
|
|
|||
|
|
@ -33,19 +33,9 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
|
|||
|
||||
#ifndef _WIN32 // TODO implement
|
||||
if (st.st_mtime != mtimeStore) {
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = st.st_atime;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = mtimeStore;
|
||||
times[1].tv_usec = 0;
|
||||
#if HAVE_LUTIMES
|
||||
if (lutimes(path.c_str(), times) == -1)
|
||||
if (errno != ENOSYS ||
|
||||
(!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1))
|
||||
#else
|
||||
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
|
||||
#endif
|
||||
throw SysError("changing modification time of '%1%'", path);
|
||||
struct stat st2 = st;
|
||||
st2.st_mtime = mtimeStore,
|
||||
setWriteTime(path, st2);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
|
|
|||
|
|
@ -392,8 +392,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
else {
|
||||
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
|
||||
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & thm) -> void {
|
||||
switch (caMethod.raw) {
|
||||
case ContentAddressMethod::Raw::Text:
|
||||
{
|
||||
if (hashAlgo != HashAlgorithm::SHA256)
|
||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||
name, printHashAlgo(hashAlgo));
|
||||
|
|
@ -401,13 +402,19 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
conn->to << WorkerProto::Op::AddTextToStore << name << s;
|
||||
WorkerProto::write(*this, *conn, references);
|
||||
conn.processStderr();
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) -> void {
|
||||
break;
|
||||
}
|
||||
case ContentAddressMethod::Raw::Flat:
|
||||
case ContentAddressMethod::Raw::NixArchive:
|
||||
case ContentAddressMethod::Raw::Git:
|
||||
default:
|
||||
{
|
||||
auto fim = caMethod.getFileIngestionMethod();
|
||||
conn->to
|
||||
<< WorkerProto::Op::AddToStore
|
||||
<< name
|
||||
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
|
||||
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::NixArchive) ? 0 : 1) /* backwards compatibility hack */
|
||||
<< (fim == FileIngestionMethod::NixArchive ? 1 : 0)
|
||||
<< printHashAlgo(hashAlgo);
|
||||
|
||||
try {
|
||||
|
|
@ -415,7 +422,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
connections->incCapacity();
|
||||
{
|
||||
Finally cleanup([&]() { connections->decCapacity(); });
|
||||
if (fim == FileIngestionMethod::Recursive) {
|
||||
if (fim == FileIngestionMethod::NixArchive) {
|
||||
dump.drainInto(conn->to);
|
||||
} else {
|
||||
std::string contents = dump.drain();
|
||||
|
|
@ -432,9 +439,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
} catch (EndOfFile & e) { }
|
||||
throw;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}, caMethod.raw);
|
||||
}
|
||||
auto path = parseStorePath(readString(conn->from));
|
||||
// Release our connection to prevent a deadlock in queryPathInfo().
|
||||
conn_.reset();
|
||||
|
|
@ -457,12 +464,12 @@ StorePath RemoteStore::addToStoreFromDump(
|
|||
case FileIngestionMethod::Flat:
|
||||
fsm = FileSerialisationMethod::Flat;
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
fsm = FileSerialisationMethod::Recursive;
|
||||
case FileIngestionMethod::NixArchive:
|
||||
fsm = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
case FileIngestionMethod::Git:
|
||||
// Use NAR; Git is not a serialization method
|
||||
fsm = FileSerialisationMethod::Recursive;
|
||||
fsm = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
|
|
|
|||
|
|
@ -87,8 +87,8 @@ public:
|
|||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override;
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@
|
|||
#include "signals.hh"
|
||||
#include "users.hh"
|
||||
|
||||
#include <filesystem>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
|
@ -121,7 +122,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
|
|||
if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1)
|
||||
throw Error("Git file ingestion must use SHA-1 hash");
|
||||
|
||||
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
|
||||
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) {
|
||||
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
|
||||
} else {
|
||||
if (!info.references.empty()) {
|
||||
|
|
@ -199,12 +200,12 @@ StorePath Store::addToStore(
|
|||
case FileIngestionMethod::Flat:
|
||||
fsm = FileSerialisationMethod::Flat;
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
fsm = FileSerialisationMethod::Recursive;
|
||||
case FileIngestionMethod::NixArchive:
|
||||
fsm = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
case FileIngestionMethod::Git:
|
||||
// Use NAR; Git is not a serialization method
|
||||
fsm = FileSerialisationMethod::Recursive;
|
||||
fsm = FileSerialisationMethod::NixArchive;
|
||||
break;
|
||||
}
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
|
|
@ -355,7 +356,7 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
RegularFileSink fileSink { caHashSink };
|
||||
TeeSink unusualHashTee { narHashSink, caHashSink };
|
||||
|
||||
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256
|
||||
auto & narSink = method == ContentAddressMethod::Raw::NixArchive && hashAlgo != HashAlgorithm::SHA256
|
||||
? static_cast<Sink &>(unusualHashTee)
|
||||
: narHashSink;
|
||||
|
||||
|
|
@ -383,9 +384,9 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
finish. */
|
||||
auto [narHash, narSize] = narHashSink.finish();
|
||||
|
||||
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
|
||||
auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256
|
||||
? narHash
|
||||
: method == FileIngestionMethod::Git
|
||||
: method == ContentAddressMethod::Raw::Git
|
||||
? git::dumpHash(hashAlgo, srcPath).hash
|
||||
: caHashSink.finish().first;
|
||||
|
||||
|
|
@ -1303,7 +1304,7 @@ ref<Store> openStore(StoreReference && storeURI)
|
|||
if (!pathExists(chrootStore)) {
|
||||
try {
|
||||
createDirs(chrootStore);
|
||||
} catch (Error & e) {
|
||||
} catch (SystemError & e) {
|
||||
return std::make_shared<LocalStore>(params);
|
||||
}
|
||||
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
||||
|
|
|
|||
|
|
@ -441,7 +441,7 @@ public:
|
|||
virtual StorePath addToStore(
|
||||
std::string_view name,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
|
|
@ -455,7 +455,7 @@ public:
|
|||
ValidPathInfo addToStoreSlow(
|
||||
std::string_view name,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
std::optional<Hash> expectedCAHash = {});
|
||||
|
|
@ -470,7 +470,7 @@ public:
|
|||
*
|
||||
* @param dumpMethod What serialisation format is `dump`, i.e. how
|
||||
* to deserialize it. Must either match hashMethod or be
|
||||
* `FileSerialisationMethod::Recursive`.
|
||||
* `FileSerialisationMethod::NixArchive`.
|
||||
*
|
||||
* @param hashMethod How content addressing? Need not match be the
|
||||
* same as `dumpMethod`.
|
||||
|
|
@ -480,8 +480,8 @@ public:
|
|||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
|
||||
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
|
||||
FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
|
||||
ContentAddressMethod hashMethod = ContentAddressMethod::Raw::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) = 0;
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ namespace nix {
|
|||
struct SourcePath;
|
||||
|
||||
MakeError(BadStorePath, Error);
|
||||
MakeError(BadStorePathName, BadStorePath);
|
||||
|
||||
struct StoreDirConfig : public Config
|
||||
{
|
||||
|
|
@ -97,7 +98,7 @@ struct StoreDirConfig : public Config
|
|||
std::pair<StorePath, Hash> computeStorePath(
|
||||
std::string_view name,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
ContentAddressMethod method = FileIngestionMethod::NixArchive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = {},
|
||||
PathFilter & filter = defaultPathFilter) const;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#include "globals.hh"
|
||||
#include "config-global.hh"
|
||||
#include "hook-instance.hh"
|
||||
#include "file-system.hh"
|
||||
#include "child.hh"
|
||||
|
|
|
|||
|
|
@ -2499,7 +2499,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
auto fim = outputHash.method.getFileIngestionMethod();
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
case FileIngestionMethod::NixArchive:
|
||||
{
|
||||
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
|
||||
auto fim = outputHash.method.getFileIngestionMethod();
|
||||
|
|
@ -2541,7 +2541,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
{
|
||||
HashResult narHashAndSize = hashPath(
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
|
||||
FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
}
|
||||
|
|
@ -2564,7 +2564,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
rewriteOutput(outputRewrites);
|
||||
HashResult narHashAndSize = hashPath(
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
|
||||
FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
auto refs = rewriteRefs();
|
||||
|
|
@ -2914,6 +2914,24 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
|
|||
};
|
||||
|
||||
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
|
||||
if (get(*structuredAttrs, "allowedReferences")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedReferences'; use 'outputChecks' instead");
|
||||
}
|
||||
if (get(*structuredAttrs, "allowedRequisites")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedRequisites'; use 'outputChecks' instead");
|
||||
}
|
||||
if (get(*structuredAttrs, "disallowedRequisites")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedRequisites'; use 'outputChecks' instead");
|
||||
}
|
||||
if (get(*structuredAttrs, "disallowedReferences")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedReferences'; use 'outputChecks' instead");
|
||||
}
|
||||
if (get(*structuredAttrs, "maxSize")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxSize'; use 'outputChecks' instead");
|
||||
}
|
||||
if (get(*structuredAttrs, "maxClosureSize")){
|
||||
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxClosureSize'; use 'outputChecks' instead");
|
||||
}
|
||||
if (auto outputChecks = get(*structuredAttrs, "outputChecks")) {
|
||||
if (auto output = get(*outputChecks, outputName)) {
|
||||
Checks checks;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue