1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-12-08 18:11:02 +01:00

Merge pull request #14704 from NixOS/version-output

Introduce `--json-format` for `nix path-info`
This commit is contained in:
John Ericson 2025-12-04 03:48:49 +00:00 committed by GitHub
commit 9246dca541
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
36 changed files with 464 additions and 132 deletions

View file

@ -6,13 +6,29 @@ issues: []
JSON formats for store path info and derivations have been updated with new versions and structured fields.
## Store Path Info JSON (Version 2)
## Store Path Info JSON
The store path info JSON format has been updated from version 1 to version 2:
`nix path-info --json` now requires a `--json-format` flag to specify the output format version.
Using `--json` without `--json-format` is deprecated and will become an error in a future release.
For now, it defaults to version 1 with a warning, for a smoother migration.
- **Added `version` field**:
### Version 1 (`--json-format 1`)
All store path info JSON now includes `"version": 2`.
This is the legacy format, preserved for backwards compatibility:
- String-based hash values (e.g., `"narHash": "sha256:FePFYIlM..."`)
- String-based content addresses (e.g., `"ca": "fixed:r:sha256:1abc..."`)
- Full store paths in references (e.g., `"/nix/store/abc...-foo"`)
- Now includes `"storeDir"` field at the top level
### Version 2 (`--json-format 2`)
The new structured format with the following changes:
- **Store path base names in references**:
References use store path base names (e.g., `"abc...-foo"`) instead of full paths.
Combined with `storeDir`, the full path can be reconstructed.
- **Structured `ca` field**:
@ -33,7 +49,16 @@ The store path info JSON format has been updated from version 1 to version 2:
Nix currently only produces, and doesn't consume this format.
**Affected command**: `nix path-info --json`
Additionally the following field is added to both formats.
(The `version` tracks breaking changes, and adding fields to outputted JSON is not a breaking change.)
- **`version` field**:
All store path info JSON now includes `"version": <1|2>`.
- **`storeDir` field**:
Top-level `"storeDir"` field contains the store directory path (e.g., `"/nix/store"`).
## Derivation JSON (Version 4)

View file

@ -1 +0,0 @@
../../../../../../src/libstore-tests/data/nar-info

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/nar-info/json-2

View file

@ -1 +1 @@
../../../../../../src/libstore-tests/data/path-info
../../../../../../src/libstore-tests/data/path-info/json-2

View file

@ -63,7 +63,7 @@ $defs:
- Version 2: Use structured JSON type for `ca`
path:
type: string
"$ref": "./store-path-v1.yaml"
title: Store Path
description: |
[Store path](@docroot@/store/store-path.md) to the given store object.
@ -89,7 +89,7 @@ $defs:
description: |
An array of [store paths](@docroot@/store/store-path.md), possibly including this one.
items:
type: string
"$ref": "./store-path-v1.yaml"
ca:
oneOf:
@ -128,7 +128,9 @@ $defs:
references: { $ref: "#/$defs/base/properties/references" }
ca: { $ref: "#/$defs/base/properties/ca" }
deriver:
type: ["string", "null"]
oneOf:
- "$ref": "./store-path-v1.yaml"
- type: "null"
title: Deriver
description: |
If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced.

View file

@ -29,13 +29,13 @@
### NAR info (minimal)
```json
{{#include schema/nar-info-v1/pure.json}}
{{#include schema/nar-info-v2/pure.json}}
```
### NAR info (with binary cache fields)
```json
{{#include schema/nar-info-v1/impure.json}}
{{#include schema/nar-info-v2/impure.json}}
```
<!-- need to convert YAML to JSON first

View file

@ -154,18 +154,18 @@ schemas += [
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml',
'files' : [
'pure.json',
'impure.json',
'empty_pure.json',
'empty_impure.json',
'json-2' / 'pure.json',
'json-2' / 'impure.json',
'json-2' / 'empty_pure.json',
'json-2' / 'empty_impure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml',
'files' : [
'pure.json',
'impure.json',
'json-2' / 'pure.json',
'json-2' / 'impure.json',
],
},
{
@ -182,30 +182,30 @@ schemas += [
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
'files' : [
'pure.json',
'empty_pure.json',
'json-2' / 'pure.json',
'json-2' / 'empty_pure.json',
],
},
{
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/impure',
'files' : [
'impure.json',
'empty_impure.json',
'json-2' / 'impure.json',
'json-2' / 'empty_impure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
'files' : [
'pure.json',
'json-2' / 'pure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/narInfo',
'files' : [
'impure.json',
'json-2' / 'impure.json',
],
},
]

View file

@ -0,0 +1,21 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"compression": "xz",
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"downloadSize": 4029176,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"registrationTime": 23423,
"signatures": [
"asdf",
"qwer"
],
"ultimate": true,
"url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz",
"version": 1
}

View file

@ -0,0 +1,10 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"version": 1
}

View file

@ -0,0 +1,9 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
]
}

View file

@ -8,7 +8,7 @@
"method": "nar"
},
"compression": "xz",
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"downloadHash": {
"algorithm": "sha256",
"format": "base16",
@ -22,8 +22,8 @@
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"registrationTime": 23423,
"signatures": [

View file

@ -14,8 +14,8 @@
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"version": 2
}

View file

@ -0,0 +1,11 @@
{
"ca": null,
"deriver": null,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 0,
"references": [],
"registrationTime": null,
"signatures": [],
"ultimate": false,
"version": 1
}

View file

@ -0,0 +1,7 @@
{
"ca": null,
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 0,
"references": [],
"version": 1
}

View file

@ -0,0 +1,17 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"registrationTime": 23423,
"signatures": [
"asdf",
"qwer"
],
"ultimate": true,
"version": 1
}

View file

@ -0,0 +1,10 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"version": 1
}

View file

@ -0,0 +1,9 @@
{
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
]
}

View file

@ -7,7 +7,7 @@
},
"method": "nar"
},
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"narHash": {
"algorithm": "sha256",
"format": "base16",
@ -15,8 +15,8 @@
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"registrationTime": 23423,
"signatures": [

View file

@ -14,8 +14,8 @@
},
"narSize": 34878,
"references": [
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
],
"version": 2
}

View file

@ -11,9 +11,19 @@ namespace nix {
using nlohmann::json;
class NarInfoTest : public CharacterizationTest, public LibStoreTest
class NarInfoTestV1 : public CharacterizationTest, public LibStoreTest
{
std::filesystem::path unitTestData = getUnitTestData() / "nar-info";
std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-1";
std::filesystem::path goldenMaster(PathView testStem) const override
{
return unitTestData / (testStem + ".json");
}
};
class NarInfoTestV2 : public CharacterizationTest, public LibStoreTest
{
std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-2";
std::filesystem::path goldenMaster(PathView testStem) const override
{
@ -59,27 +69,63 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo)
return info;
}
#define JSON_TEST(STEM, PURE) \
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
#define JSON_READ_TEST_V1(STEM, PURE) \
TEST_F(NarInfoTestV1, NarInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
ASSERT_EQ(got, expected); \
}); \
}
JSON_TEST(pure, false)
JSON_TEST(impure, true)
#define JSON_WRITE_TEST_V1(STEM, PURE) \
TEST_F(NarInfoTestV1, NarInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}
#define JSON_TEST_V1(STEM, PURE) \
JSON_READ_TEST_V1(STEM, PURE) \
JSON_WRITE_TEST_V1(STEM, PURE)
#define JSON_READ_TEST_V2(STEM, PURE) \
TEST_F(NarInfoTestV2, NarInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
auto got = UnkeyedNarInfo::fromJSON(nullptr, encoded); \
ASSERT_EQ(got, expected); \
}); \
}
#define JSON_WRITE_TEST_V2(STEM, PURE) \
TEST_F(NarInfoTestV2, NarInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}
#define JSON_TEST_V2(STEM, PURE) \
JSON_READ_TEST_V2(STEM, PURE) \
JSON_WRITE_TEST_V2(STEM, PURE)
JSON_TEST_V1(pure, false)
JSON_TEST_V1(impure, true)
// Test that JSON without explicit version field parses as V1
JSON_READ_TEST_V1(pure_noversion, false)
JSON_TEST_V2(pure, false)
JSON_TEST_V2(impure, true)
} // namespace nix

View file

@ -10,9 +10,19 @@ namespace nix {
using nlohmann::json;
class PathInfoTest : public CharacterizationTest, public LibStoreTest
class PathInfoTestV1 : public CharacterizationTest, public LibStoreTest
{
std::filesystem::path unitTestData = getUnitTestData() / "path-info";
std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-1";
std::filesystem::path goldenMaster(PathView testStem) const override
{
return unitTestData / (testStem + ".json");
}
};
class PathInfoTestV2 : public CharacterizationTest, public LibStoreTest
{
std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-2";
std::filesystem::path goldenMaster(PathView testStem) const override
{
@ -65,33 +75,70 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
return makeFullKeyed(store, includeImpureInfo);
}
#define JSON_TEST(STEM, OBJ, PURE) \
TEST_F(PathInfoTest, PathInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
auto expected = OBJ; \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(PathInfoTest, PathInfo_##STEM##_to_json) \
#define JSON_READ_TEST_V1(STEM, OBJ) \
TEST_F(PathInfoTestV1, PathInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
auto expected = OBJ; \
ASSERT_EQ(got, expected); \
}); \
}
#define JSON_WRITE_TEST_V1(STEM, OBJ, PURE) \
TEST_F(PathInfoTestV1, PathInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return OBJ.toJSON(&*store, PURE); }, \
[&]() -> json { return OBJ.toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}
JSON_TEST(empty_pure, makeEmpty(), false)
JSON_TEST(empty_impure, makeEmpty(), true)
#define JSON_TEST_V1(STEM, OBJ, PURE) \
JSON_READ_TEST_V1(STEM, OBJ) \
JSON_WRITE_TEST_V1(STEM, OBJ, PURE)
JSON_TEST(pure, makeFull(*store, false), false)
JSON_TEST(impure, makeFull(*store, true), true)
#define JSON_READ_TEST_V2(STEM, OBJ) \
TEST_F(PathInfoTestV2, PathInfo_##STEM##_from_json) \
{ \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(nullptr, encoded); \
auto expected = OBJ; \
ASSERT_EQ(got, expected); \
}); \
}
TEST_F(PathInfoTest, PathInfo_full_shortRefs)
#define JSON_WRITE_TEST_V2(STEM, OBJ, PURE) \
TEST_F(PathInfoTestV2, PathInfo_##STEM##_to_json) \
{ \
writeTest( \
#STEM, \
[&]() -> json { return OBJ.toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
}
#define JSON_TEST_V2(STEM, OBJ, PURE) \
JSON_READ_TEST_V2(STEM, OBJ) \
JSON_WRITE_TEST_V2(STEM, OBJ, PURE)
JSON_TEST_V1(empty_pure, makeEmpty(), false)
JSON_TEST_V1(empty_impure, makeEmpty(), true)
JSON_TEST_V1(pure, makeFull(*store, false), false)
JSON_TEST_V1(impure, makeFull(*store, true), true)
// Test that JSON without explicit version field parses as V1
JSON_READ_TEST_V1(pure_noversion, makeFull(*store, false))
JSON_TEST_V2(empty_pure, makeEmpty(), false)
JSON_TEST_V2(empty_impure, makeEmpty(), true)
JSON_TEST_V2(pure, makeFull(*store, false), false)
JSON_TEST_V2(impure, makeFull(*store, true), true)
TEST_F(PathInfoTestV2, PathInfo_full_shortRefs)
{
ValidPathInfo it = makeFullKeyed(*store, true);
// it.references = unkeyed.references;

View file

@ -25,7 +25,8 @@ struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
// auto operator <=>(const NarInfo &) const = default;
nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const override;
nlohmann::json
toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const override;
static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
};

View file

@ -14,6 +14,22 @@ namespace nix {
class Store;
struct StoreDirConfig;
/**
* JSON format version for path info output.
*/
enum class PathInfoJsonFormat {
/// Legacy format with string hashes and full store paths
V1 = 1,
/// New format with structured hashes and store path base names
V2 = 2,
};
/**
* Convert an integer version number to PathInfoJsonFormat.
* Throws Error if the version is not supported.
*/
PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version);
struct SubstitutablePathInfo
{
std::optional<StorePath> deriver;
@ -114,10 +130,16 @@ struct UnkeyedValidPathInfo
virtual ~UnkeyedValidPathInfo() {}
/**
* @param store If non-null, store paths are rendered as full paths.
* If null, store paths are rendered as base names.
* @param includeImpureInfo If true, variable elements such as the
* registration time are included.
* registration time are included.
* @param format JSON format version. Version 1 uses string hashes and
* string content addresses. Version 2 uses structured
* hashes and structured content addresses.
*/
virtual nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const;
virtual nlohmann::json
toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const;
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
};
@ -196,5 +218,6 @@ using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
} // namespace nix
JSON_IMPL(nix::PathInfoJsonFormat)
JSON_IMPL(nix::UnkeyedValidPathInfo)
JSON_IMPL(nix::ValidPathInfo)

View file

@ -132,19 +132,24 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const
return res;
}
nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
nlohmann::json
UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const
{
using nlohmann::json;
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo);
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo, format);
if (includeImpureInfo) {
if (!url.empty())
jsonObject["url"] = url;
if (!compression.empty())
jsonObject["compression"] = compression;
if (fileHash)
jsonObject["downloadHash"] = *fileHash;
if (fileHash) {
if (format == PathInfoJsonFormat::V1)
jsonObject["downloadHash"] = fileHash->to_string(HashFormat::SRI, true);
else
jsonObject["downloadHash"] = *fileHash;
}
if (fileSize)
jsonObject["downloadSize"] = fileSize;
}
@ -154,20 +159,26 @@ nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool include
UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json)
{
using nlohmann::detail::value_t;
UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)};
auto & obj = getObject(json);
PathInfoJsonFormat format = PathInfoJsonFormat::V1;
if (auto * version = optionalValueAt(obj, "version"))
format = *version;
if (auto * url = get(obj, "url"))
res.url = getString(*url);
if (auto * compression = get(obj, "compression"))
res.compression = getString(*compression);
if (auto * downloadHash = get(obj, "downloadHash"))
res.fileHash = *downloadHash;
if (auto * downloadHash = get(obj, "downloadHash")) {
if (format == PathInfoJsonFormat::V1)
res.fileHash = Hash::parseSRI(getString(*downloadHash));
else
res.fileHash = *downloadHash;
}
if (auto * downloadSize = get(obj, "downloadSize"))
res.fileSize = getUnsigned(*downloadSize);
@ -188,7 +199,7 @@ UnkeyedNarInfo adl_serializer<UnkeyedNarInfo>::from_json(const json & json)
void adl_serializer<UnkeyedNarInfo>::to_json(json & json, const UnkeyedNarInfo & c)
{
json = c.toJSON(nullptr, true);
json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2);
}
} // namespace nlohmann

View file

@ -8,6 +8,18 @@
namespace nix {
PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version)
{
switch (version) {
case 1:
return PathInfoJsonFormat::V1;
case 2:
return PathInfoJsonFormat::V2;
default:
throw Error("unsupported path info JSON format version %d; supported versions are 1 and 2", version);
}
}
GENERATE_CMP_EXT(
,
std::weak_ordering,
@ -149,31 +161,45 @@ ValidPathInfo ValidPathInfo::makeFromCA(
return res;
}
nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
nlohmann::json
UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const
{
using nlohmann::json;
if (format == PathInfoJsonFormat::V1)
assert(store);
auto jsonObject = json::object();
jsonObject["version"] = 2;
jsonObject["version"] = format;
jsonObject["narHash"] = format == PathInfoJsonFormat::V1
? static_cast<json>(narHash.to_string(HashFormat::SRI, true))
: static_cast<json>(narHash);
jsonObject["narHash"] = narHash;
jsonObject["narSize"] = narSize;
{
auto & jsonRefs = jsonObject["references"] = json::array();
for (auto & ref : references)
jsonRefs.emplace_back(store ? static_cast<json>(store->printStorePath(ref)) : static_cast<json>(ref));
jsonRefs.emplace_back(
format == PathInfoJsonFormat::V1 ? static_cast<json>(store->printStorePath(ref))
: static_cast<json>(ref));
}
jsonObject["ca"] = ca;
if (format == PathInfoJsonFormat::V1)
jsonObject["ca"] = ca ? static_cast<json>(renderContentAddress(*ca)) : static_cast<json>(nullptr);
else
jsonObject["ca"] = ca;
if (includeImpureInfo) {
jsonObject["deriver"] = deriver ? (store ? static_cast<json>(std::optional{store->printStorePath(*deriver)})
: static_cast<json>(std::optional{*deriver}))
: static_cast<json>(std::optional<StorePath>{});
jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt;
if (format == PathInfoJsonFormat::V1) {
jsonObject["deriver"] =
deriver ? static_cast<json>(store->printStorePath(*deriver)) : static_cast<json>(nullptr);
} else {
jsonObject["deriver"] = deriver;
}
jsonObject["registrationTime"] = registrationTime ? std::optional{registrationTime} : std::nullopt;
jsonObject["ultimate"] = ultimate;
@ -193,34 +219,51 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store
auto & json = getObject(_json);
{
auto version = getUnsigned(valueAt(json, "version"));
if (version != 2)
throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version);
}
PathInfoJsonFormat format = PathInfoJsonFormat::V1;
if (auto * version = optionalValueAt(json, "version"))
format = *version;
if (format == PathInfoJsonFormat::V1)
assert(store);
if (format == PathInfoJsonFormat::V1)
res.narHash = Hash::parseSRI(getString(valueAt(json, "narHash")));
else
res.narHash = valueAt(json, "narHash");
res.narHash = valueAt(json, "narHash");
res.narSize = getUnsigned(valueAt(json, "narSize"));
try {
auto references = getStringList(valueAt(json, "references"));
auto & references = getArray(valueAt(json, "references"));
for (auto & input : references)
res.references.insert(store ? store->parseStorePath(getString(input)) : static_cast<StorePath>(input));
res.references.insert(
format == PathInfoJsonFormat::V1 ? store->parseStorePath(getString(input))
: static_cast<StorePath>(input));
} catch (Error & e) {
e.addTrace({}, "while reading key 'references'");
throw;
}
try {
res.ca = ptrToOwned<ContentAddress>(getNullable(valueAt(json, "ca")));
if (format == PathInfoJsonFormat::V1) {
if (auto * rawCa = getNullable(valueAt(json, "ca")))
res.ca = ContentAddress::parse(getString(*rawCa));
} else {
res.ca = ptrToOwned<ContentAddress>(getNullable(valueAt(json, "ca")));
}
} catch (Error & e) {
e.addTrace({}, "while reading key 'ca'");
throw;
}
if (auto * rawDeriver0 = optionalValueAt(json, "deriver"))
if (auto * rawDeriver = getNullable(*rawDeriver0))
res.deriver = store ? store->parseStorePath(getString(*rawDeriver)) : static_cast<StorePath>(*rawDeriver);
if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) {
if (format == PathInfoJsonFormat::V1) {
if (auto * rawDeriver = getNullable(*rawDeriver0))
res.deriver = store->parseStorePath(getString(*rawDeriver));
} else {
res.deriver = ptrToOwned<StorePath>(getNullable(*rawDeriver0));
}
}
if (auto * rawRegistrationTime0 = optionalValueAt(json, "registrationTime"))
if (auto * rawRegistrationTime = getNullable(*rawRegistrationTime0))
@ -241,6 +284,16 @@ namespace nlohmann {
using namespace nix;
PathInfoJsonFormat adl_serializer<PathInfoJsonFormat>::from_json(const json & json)
{
return parsePathInfoJsonFormat(getUnsigned(json));
}
void adl_serializer<PathInfoJsonFormat>::to_json(json & json, const PathInfoJsonFormat & format)
{
json = static_cast<int>(format);
}
UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json & json)
{
return UnkeyedValidPathInfo::fromJSON(nullptr, json);
@ -248,7 +301,7 @@ UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json
void adl_serializer<UnkeyedValidPathInfo>::to_json(json & json, const UnkeyedValidPathInfo & c)
{
json = c.toJSON(nullptr, true);
json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2);
}
ValidPathInfo adl_serializer<ValidPathInfo>::from_json(const json & json0)

View file

@ -34,14 +34,17 @@ static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & clo
*
* @param showClosureSize If true, the closure size of each path is
* included.
* @param format The JSON format version to use.
*/
static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize)
static json
pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize, PathInfoJsonFormat format)
{
json::object_t jsonAllObjects = json::object();
for (auto & storePath : storePaths) {
json jsonObject;
auto printedStorePath = store.printStorePath(storePath);
std::string key = store.printStorePath(storePath);
try {
auto info = store.queryPathInfo(storePath);
@ -49,9 +52,13 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
// `storePath` has the representation `<hash>-x` rather than
// `<hash>-<name>` in case of binary-cache stores & `--all` because we don't
// know the name yet until we've read the NAR info.
printedStorePath = store.printStorePath(info->path);
key = store.printStorePath(info->path);
jsonObject = info->toJSON(&store, true);
jsonObject = info->toJSON(format == PathInfoJsonFormat::V1 ? &store : nullptr, true, format);
/* Hack in the store dir for now. TODO update the data type
instead. */
jsonObject["storeDir"] = store.storeDir;
if (showClosureSize) {
StorePathSet closure;
@ -74,12 +81,11 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
jsonObject["closureDownloadSize"] = totalDownloadSize;
}
}
} catch (InvalidPath &) {
jsonObject = nullptr;
}
jsonAllObjects[printedStorePath] = std::move(jsonObject);
jsonAllObjects[key] = std::move(jsonObject);
}
return jsonAllObjects;
}
@ -90,6 +96,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
bool showClosureSize = false;
bool humanReadable = false;
bool showSigs = false;
std::optional<PathInfoJsonFormat> jsonFormat;
CmdPathInfo()
{
@ -119,6 +126,16 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
.description = "Show signatures.",
.handler = {&showSigs, true},
});
addFlag({
.longName = "json-format",
.description =
"JSON format version to use (1 or 2). Version 1 uses string hashes and full store paths. Version 2 uses structured hashes and store path base names. This flag will be required in a future release.",
.labels = {"version"},
.handler = {[this](std::string s) {
jsonFormat = parsePathInfoJsonFormat(string2IntWithUnitPrefix<uint64_t>(s));
}},
});
}
std::string description() override
@ -157,7 +174,14 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
*store,
// FIXME: preserve order?
StorePathSet(storePaths.begin(), storePaths.end()),
showClosureSize));
showClosureSize,
jsonFormat
.or_else([&]() {
warn(
"'--json' without '--json-format' is deprecated; please specify '--json-format 1' or '--json-format 2'. This will become an error in a future release.");
return std::optional{PathInfoJsonFormat::V1};
})
.value()));
}
else {

View file

@ -18,7 +18,7 @@ outPath=$(nix-build dependencies.nix --no-out-link)
nix copy --to "file://$cacheDir" "$outPath"
readarray -t paths < <(nix path-info --all --json --store "file://$cacheDir" | jq 'keys|sort|.[]' -r)
readarray -t paths < <(nix path-info --all --json --json-format 2 --store "file://$cacheDir" | jq 'keys|sort|.[]' -r)
[[ "${#paths[@]}" -eq 3 ]]
for path in "${paths[@]}"; do
[[ "$path" =~ -dependencies-input-0$ ]] \

View file

@ -14,7 +14,7 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail"
# Building with the bad hash should produce the "good" output path as
# a side-effect.
[[ -e $path ]]
nix path-info --json "$path" | jq -e \
nix path-info --json --json-format 2 "$path" | jq -e \
'.[].ca == {
method: "flat",
hash: {

View file

@ -47,7 +47,7 @@ try2 () {
hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath")
[[ "$hashFromGit" == "$expected" ]]
nix path-info --json "$path" | jq -e \
nix path-info --json --json-format 2 "$path" | jq -e \
--arg algo "$hashAlgo" \
--arg hash "$hashFromGit" \
'.[].ca == {

View file

@ -30,7 +30,7 @@ path1_stuff=$(echo "$json" | jq -r .[].outputs.stuff)
[[ $(< "$path1"/n) = 0 ]]
[[ $(< "$path1_stuff"/bla) = 0 ]]
nix path-info --json "$path1" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
nix path-info --json --json-format 2 "$path1" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out)
[[ $(< "$path2"/n) = 1 ]]

View file

@ -166,7 +166,7 @@ printf 4.0 > "$flake1Dir"/version
printf Utrecht > "$flake1Dir"/who
nix profile add "$flake1Dir"
[[ $("$TEST_HOME"/.nix-profile/bin/hello) = "Hello Utrecht" ]]
nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
nix path-info --json --json-format 2 "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
# Override the outputs.
nix profile remove simple flake1

View file

@ -13,7 +13,7 @@ baz=$(nix store add-file "$TEST_ROOT"/baz)
nix-store --delete "$baz"
diff --unified --color=always \
<(nix path-info --json "$foo" "$bar" "$baz" |
<(nix path-info --json --json-format 2 "$foo" "$bar" "$baz" |
jq --sort-keys 'map_values(.narHash)') \
<(jq --sort-keys <<-EOF
{
@ -31,3 +31,9 @@ diff --unified --color=always \
}
EOF
)
# Test that storeDir is returned in the JSON output
nix path-info --json --json-format 2 "$foo" | jq -e \
--arg foo "$foo" \
--arg storeDir "${NIX_STORE_DIR:-/nix/store}" \
'.[$foo].storeDir == $storeDir'

View file

@ -14,7 +14,7 @@ pk2=$(cat "$TEST_ROOT"/pk2)
outPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1 $TEST_ROOT/sk2")
# Verify that the path got signed.
info=$(nix path-info --json "$outPath")
info=$(nix path-info --json --json-format 2 "$outPath")
echo "$info" | jq -e '.[] | .ultimate == true'
TODO_NixOS # looks like an actual bug? Following line fails on NixOS:
echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))'
@ -39,7 +39,7 @@ outPath2=$(nix-build simple.nix --no-out-link)
nix store verify -r "$outPath"
# Verify that the path did not get signed but does have the ultimate bit.
info=$(nix path-info --json "$outPath2")
info=$(nix path-info --json --json-format 2 "$outPath2")
echo "$info" | jq -e '.[] | .ultimate == true'
echo "$info" | jq -e '.[] | .signatures == []'
@ -58,7 +58,7 @@ nix store verify -r "$outPath2" --sigs-needed 1 --trusted-public-keys "$pk1"
# Build something content-addressed.
outPathCA=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build ./fixed.nix -A good.0 --no-out-link)
nix path-info --json "$outPathCA" | jq -e '.[].ca | .method == "flat" and .hash.algorithm == "md5"'
nix path-info --json --json-format 2 "$outPathCA" | jq -e '.[].ca | .method == "flat" and .hash.algorithm == "md5"'
# Content-addressed paths don't need signatures, so they verify
# regardless of --sigs-needed.
@ -73,14 +73,14 @@ nix store verify -r "$outPathCA" --sigs-needed 1000 --trusted-public-keys "$pk1"
nix copy --to file://"$cacheDir" "$outPath2"
# Verify that signatures got copied.
info=$(nix path-info --store file://"$cacheDir" --json "$outPath2")
info=$(nix path-info --store file://"$cacheDir" --json --json-format 2 "$outPath2")
echo "$info" | jq -e '.[] | .ultimate == false'
echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))'
echo "$info" | expect 4 jq -e '.[] | .signatures.[] | select(startswith("cache2.example.org"))'
# Verify that adding a signature to a path in a binary cache works.
nix store sign --store file://"$cacheDir" --key-file "$TEST_ROOT"/sk2 "$outPath2"
info=$(nix path-info --store file://"$cacheDir" --json "$outPath2")
info=$(nix path-info --store file://"$cacheDir" --json --json-format 2 "$outPath2")
echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))'
echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache2.example.org"))'

View file

@ -117,12 +117,12 @@
tarball_store_path = json.loads(tarball_store_path_json)
# Get the NAR hash of the unpacked tarball in SRI format
path_info_json = substituter.succeed(f"nix path-info --json {tarball_store_path}").strip()
path_info_json = substituter.succeed(f"nix path-info --json-format 2 --json {tarball_store_path}").strip()
path_info_dict = json.loads(path_info_json)
# nix path-info returns a dict with store paths as keys
narHash_obj = path_info_dict[tarball_store_path]["narHash"]
# Convert from structured format {"algorithm": "sha256", "format": "base64", "hash": "..."} to SRI string
tarball_hash_sri = f"{narHash_obj['algorithm']}-{narHash_obj['hash']}"
# Convert from structured format {"algorithm": "sha256", "format": "base16", "hash": "..."} to SRI string
tarball_hash_sri = substituter.succeed(f"nix hash convert --to sri {narHash_obj['algorithm']}:{narHash_obj['hash']}").strip()
print(f"Tarball NAR hash (SRI): {tarball_hash_sri}")
# Also get the old format hash for fetchTarball (which uses sha256 parameter)