mirror of
https://github.com/NixOS/nix.git
synced 2025-11-14 22:42:41 +01:00
libutil: Make HashResult a proper struct
This resolves an existing TODO and makes the code slightly more readable.
This commit is contained in:
parent
241420a788
commit
143bd60136
18 changed files with 56 additions and 50 deletions
|
|
@ -368,16 +368,16 @@ StorePath BinaryCacheStore::addToStoreFromDump(
|
|||
name,
|
||||
ContentAddressWithReferences::fromParts(
|
||||
hashMethod,
|
||||
caHash ? *caHash : nar.first,
|
||||
caHash ? *caHash : nar.hash,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed
|
||||
// without modulus
|
||||
.self = false,
|
||||
}),
|
||||
nar.first,
|
||||
nar.hash,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.narSize = nar.numBytesDigested;
|
||||
return info;
|
||||
})
|
||||
->path;
|
||||
|
|
@ -493,9 +493,9 @@ StorePath BinaryCacheStore::addToStore(
|
|||
// without modulus
|
||||
.self = false,
|
||||
}),
|
||||
nar.first,
|
||||
nar.hash,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.narSize = nar.numBytesDigested;
|
||||
return info;
|
||||
})
|
||||
->path;
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
|||
/* Refuse to export paths that have changed. This prevents
|
||||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashSink.currentHash().first;
|
||||
Hash hash = hashSink.currentHash().hash;
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.algo))
|
||||
throw Error(
|
||||
"hash of path '%s' has changed from '%s' to '%s'!",
|
||||
|
|
|
|||
|
|
@ -1072,19 +1072,19 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF
|
|||
|
||||
auto hashResult = hashSink.finish();
|
||||
|
||||
if (hashResult.first != info.narHash)
|
||||
if (hashResult.hash != info.narHash)
|
||||
throw Error(
|
||||
"hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
info.narHash.to_string(HashFormat::Nix32, true),
|
||||
hashResult.first.to_string(HashFormat::Nix32, true));
|
||||
hashResult.hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
if (hashResult.second != info.narSize)
|
||||
if (hashResult.numBytesDigested != info.narSize)
|
||||
throw Error(
|
||||
"size mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
info.narSize,
|
||||
hashResult.second);
|
||||
hashResult.numBytesDigested);
|
||||
|
||||
if (info.ca) {
|
||||
auto & specified = *info.ca;
|
||||
|
|
@ -1101,7 +1101,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF
|
|||
std::string{info.path.hashPart()},
|
||||
};
|
||||
dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim);
|
||||
h = caSink.finish().first;
|
||||
h = caSink.finish().hash;
|
||||
break;
|
||||
}
|
||||
case FileIngestionMethod::Git:
|
||||
|
|
@ -1279,7 +1279,7 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
|
||||
/* For computing the nar hash. In recursive SHA-256 mode, this
|
||||
is the same as the store hash, so no need to do it again. */
|
||||
auto narHash = std::pair{dumpHash, size};
|
||||
HashResult narHash = {dumpHash, size};
|
||||
if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) {
|
||||
HashSink narSink{HashAlgorithm::SHA256};
|
||||
dumpPath(realPath, narSink);
|
||||
|
|
@ -1295,8 +1295,8 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
syncParent(realPath);
|
||||
}
|
||||
|
||||
ValidPathInfo info{*this, name, std::move(desc), narHash.first};
|
||||
info.narSize = narHash.second;
|
||||
ValidPathInfo info{*this, name, std::move(desc), narHash.hash};
|
||||
info.narSize = narHash.numBytesDigested;
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
|
|
@ -1402,12 +1402,12 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
dumpPath(Store::toRealPath(i), hashSink);
|
||||
auto current = hashSink.finish();
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
if (info->narHash != nullHash && info->narHash != current.hash) {
|
||||
printError(
|
||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i),
|
||||
info->narHash.to_string(HashFormat::Nix32, true),
|
||||
current.first.to_string(HashFormat::Nix32, true));
|
||||
current.hash.to_string(HashFormat::Nix32, true));
|
||||
if (repair)
|
||||
repairPath(i);
|
||||
else
|
||||
|
|
@ -1419,14 +1419,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
/* Fill in missing hashes. */
|
||||
if (info->narHash == nullHash) {
|
||||
printInfo("fixing missing hash on '%s'", printStorePath(i));
|
||||
info->narHash = current.first;
|
||||
info->narHash = current.hash;
|
||||
update = true;
|
||||
}
|
||||
|
||||
/* Fill in missing narSize fields (from old stores). */
|
||||
if (info->narSize == 0) {
|
||||
printInfo("updating size field on '%s' to %s", printStorePath(i), current.second);
|
||||
info->narSize = current.second;
|
||||
printInfo("updating size field on '%s' to %s", printStorePath(i), current.numBytesDigested);
|
||||
info->narSize = current.numBytesDigested;
|
||||
update = true;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ std::map<StorePath, StorePath> makeContentAddressed(Store & srcStore, Store & ds
|
|||
HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart);
|
||||
hashModuloSink(sink.s);
|
||||
|
||||
auto narModuloHash = hashModuloSink.finish().first;
|
||||
auto narModuloHash = hashModuloSink.finish().hash;
|
||||
|
||||
ValidPathInfo info{
|
||||
dstStore,
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ void LocalStore::optimisePath_(
|
|||
{make_ref<PosixSourceAccessor>(), CanonPath(path)},
|
||||
FileSerialisationMethod::NixArchive,
|
||||
HashAlgorithm::SHA256)
|
||||
.first;
|
||||
.hash;
|
||||
});
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
|
|
@ -175,7 +175,7 @@ void LocalStore::optimisePath_(
|
|||
PosixSourceAccessor::createAtRoot(linkPath),
|
||||
FileSerialisationMethod::NixArchive,
|
||||
HashAlgorithm::SHA256)
|
||||
.first;
|
||||
.hash;
|
||||
}))) {
|
||||
// XXX: Consider overwriting linkPath with our valid version.
|
||||
warn("removing corrupted link %s", linkPath);
|
||||
|
|
|
|||
|
|
@ -254,7 +254,7 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
|
||||
auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256 ? narHash
|
||||
: method == ContentAddressMethod::Raw::Git ? git::dumpHash(hashAlgo, srcPath).hash
|
||||
: caHashSink.finish().first;
|
||||
: caHashSink.finish().hash;
|
||||
|
||||
if (expectedCAHash && expectedCAHash != hash)
|
||||
throw Error("hash mismatch for '%s'", srcPath);
|
||||
|
|
@ -1035,8 +1035,8 @@ decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashR
|
|||
throw Error("number expected");
|
||||
hashGiven = {narHash, *narSize};
|
||||
}
|
||||
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
|
||||
info.narSize = hashGiven->second;
|
||||
ValidPathInfo info(store.parseStorePath(path), hashGiven->hash);
|
||||
info.narSize = hashGiven->numBytesDigested;
|
||||
std::string deriver;
|
||||
getline(str, deriver);
|
||||
if (deriver != "")
|
||||
|
|
|
|||
|
|
@ -1676,7 +1676,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs()
|
|||
HashModuloSink caSink{outputHash.hashAlgo, oldHashPart};
|
||||
auto fim = outputHash.method.getFileIngestionMethod();
|
||||
dumpPath({getFSSourceAccessor(), CanonPath(actualPath)}, caSink, (FileSerialisationMethod) fim);
|
||||
return caSink.finish().first;
|
||||
return caSink.finish().hash;
|
||||
}
|
||||
case FileIngestionMethod::Git: {
|
||||
return git::dumpHash(outputHash.hashAlgo, {getFSSourceAccessor(), CanonPath(actualPath)}).hash;
|
||||
|
|
@ -1705,8 +1705,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs()
|
|||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::NixArchive,
|
||||
HashAlgorithm::SHA256);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
newInfo0.narHash = narHashAndSize.hash;
|
||||
newInfo0.narSize = narHashAndSize.numBytesDigested;
|
||||
}
|
||||
|
||||
assert(newInfo0.ca);
|
||||
|
|
@ -1729,8 +1729,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs()
|
|||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::NixArchive,
|
||||
HashAlgorithm::SHA256);
|
||||
ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.first};
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.hash};
|
||||
newInfo0.narSize = narHashAndSize.numBytesDigested;
|
||||
auto refs = rewriteRefs();
|
||||
newInfo0.references = std::move(refs.others);
|
||||
if (refs.self)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue