1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-09 12:06:01 +01:00

Futher cleans up store object info JSON v2

Since we haven't released v2 yet (2.32 has v1) we can just update this
in-place and avoid version churn.

Note that as a nice side effect of using the standard `Hash` JSON impl,
we don't neeed this `hashFormat` parameter anymore.
This commit is contained in:
John Ericson 2025-11-06 17:07:15 -05:00
parent 9c04c629e5
commit 4f1c8f62c3
16 changed files with 91 additions and 48 deletions

View file

@ -22,6 +22,14 @@ The store path info JSON format has been updated from version 1 to version 2:
- New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}`
- Still `null` values for input-addressed store objects
- **Structured hash fields**:
Hash values (`narHash` and `downloadHash`) are now structured JSON objects instead of strings:
- Old: `"narHash": "sha256:FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="`
- New: `"narHash": {"algorithm": "sha256", "format": "base64", "hash": "FePFYIlM..."}`
- Same structure applies to `downloadHash` in NAR info contexts
Nix currently only produces, and doesn't consume this format.
**Affected command**: `nix path-info --json`

View file

@ -71,7 +71,7 @@ $defs:
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
narHash:
type: string
"$ref": "./hash-v1.yaml"
title: NAR Hash
description: |
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
@ -229,7 +229,7 @@ $defs:
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
downloadHash:
type: string
"$ref": "./hash-v1.yaml"
title: Download Hash
description: |
A digest for the compressed archive itself, as opposed to the data contained within.

View file

@ -9,9 +9,17 @@
},
"compression": "xz",
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"downloadHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"downloadSize": 4029176,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",

View file

@ -7,7 +7,11 @@
},
"method": "nar"
},
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",

View file

@ -1,7 +1,11 @@
{
"ca": null,
"deriver": null,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 0,
"references": [],
"registrationTime": null,

View file

@ -1,6 +1,10 @@
{
"ca": null,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 0,
"references": [],
"version": 2

View file

@ -8,7 +8,11 @@
"method": "nar"
},
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",

View file

@ -7,7 +7,11 @@
},
"method": "nar"
},
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",

View file

@ -59,24 +59,24 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo)
return info;
}
#define JSON_TEST(STEM, PURE) \
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
#define JSON_TEST(STEM, PURE) \
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}
JSON_TEST(pure, false)

View file

@ -80,7 +80,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
{ \
writeTest( \
#STEM, \
[&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \
[&]() -> json { return OBJ.toJSON(*store, PURE); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}

View file

@ -42,7 +42,7 @@ struct NarInfo : ValidPathInfo
std::string to_string(const StoreDirConfig & store) const;
nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override;
nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const override;
static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json);
};

View file

@ -117,7 +117,7 @@ struct UnkeyedValidPathInfo
* @param includeImpureInfo If true, variable elements such as the
* registration time are included.
*/
virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const;
virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const;
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json);
};

View file

@ -130,11 +130,11 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const
return res;
}
nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const
{
using nlohmann::json;
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo);
if (includeImpureInfo) {
if (!url.empty())
@ -142,7 +142,7 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI
if (!compression.empty())
jsonObject["compression"] = compression;
if (fileHash)
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
jsonObject["downloadHash"] = *fileHash;
if (fileSize)
jsonObject["downloadSize"] = fileSize;
}
@ -161,17 +161,17 @@ NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path,
auto & obj = getObject(json);
if (json.contains("url"))
res.url = getString(valueAt(obj, "url"));
if (auto * url = get(obj, "url"))
res.url = getString(*url);
if (json.contains("compression"))
res.compression = getString(valueAt(obj, "compression"));
if (auto * compression = get(obj, "compression"))
res.compression = getString(*compression);
if (json.contains("downloadHash"))
res.fileHash = Hash::parseAny(getString(valueAt(obj, "downloadHash")), std::nullopt);
if (auto * downloadHash = get(obj, "downloadHash"))
res.fileHash = *downloadHash;
if (json.contains("downloadSize"))
res.fileSize = getUnsigned(valueAt(obj, "downloadSize"));
if (auto * downloadSize = get(obj, "downloadSize"))
res.fileSize = getUnsigned(*downloadSize);
return res;
}

View file

@ -149,8 +149,7 @@ ValidPathInfo ValidPathInfo::makeFromCA(
return res;
}
nlohmann::json
UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const
{
using nlohmann::json;
@ -158,7 +157,7 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf
jsonObject["version"] = 2;
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
jsonObject["narHash"] = narHash;
jsonObject["narSize"] = narSize;
{
@ -198,7 +197,7 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store
throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version);
}
res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt);
res.narHash = valueAt(json, "narHash");
res.narSize = getUnsigned(valueAt(json, "narSize"));
try {

View file

@ -51,7 +51,7 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
// know the name yet until we've read the NAR info.
printedStorePath = store.printStorePath(info->path);
jsonObject = info->toJSON(store, true, HashFormat::SRI);
jsonObject = info->toJSON(store, true);
if (showClosureSize) {
StorePathSet closure;

View file

@ -17,8 +17,16 @@ diff --unified --color=always \
jq --sort-keys 'map_values(.narHash)') \
<(jq --sort-keys <<-EOF
{
"$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=",
"$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=",
"$foo": {
"algorithm": "sha256",
"format": "base64",
"hash": "QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA="
},
"$bar": {
"algorithm": "sha256",
"format": "base64",
"hash": "9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ="
},
"$baz": null
}
EOF