mirror of
https://github.com/NixOS/nix.git
synced 2025-12-12 20:11:03 +01:00
Introduce --json-format for nix path-info
As discussed today at great length in the Nix meeting, we don't want to break the format, but we also don't want to impede the improvement of JSON formats. The solution is to add a new flag for control the output format. Note that prior to the release, we may want to replace `--json --json-format N` with `--json=N`, but this is being left for a separate PR, as we don't yet have `=` support for CLI flags.
This commit is contained in:
parent
69920f9557
commit
1ad13a1423
36 changed files with 464 additions and 132 deletions
|
|
@ -154,18 +154,18 @@ schemas += [
|
|||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
'empty_pure.json',
|
||||
'empty_impure.json',
|
||||
'json-2' / 'pure.json',
|
||||
'json-2' / 'impure.json',
|
||||
'json-2' / 'empty_pure.json',
|
||||
'json-2' / 'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
'json-2' / 'pure.json',
|
||||
'json-2' / 'impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
@ -182,30 +182,30 @@ schemas += [
|
|||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'empty_pure.json',
|
||||
'json-2' / 'pure.json',
|
||||
'json-2' / 'empty_pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/impure',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
'empty_impure.json',
|
||||
'json-2' / 'impure.json',
|
||||
'json-2' / 'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'json-2' / 'pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/narInfo',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
'json-2' / 'impure.json',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
|
|
|||
21
src/libstore-tests/data/nar-info/json-1/impure.json
Normal file
21
src/libstore-tests/data/nar-info/json-1/impure.json
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"compression": "xz",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"downloadSize": 4029176,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
"asdf",
|
||||
"qwer"
|
||||
],
|
||||
"ultimate": true,
|
||||
"url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz",
|
||||
"version": 1
|
||||
}
|
||||
10
src/libstore-tests/data/nar-info/json-1/pure.json
Normal file
10
src/libstore-tests/data/nar-info/json-1/pure.json
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
]
|
||||
}
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
"method": "nar"
|
||||
},
|
||||
"compression": "xz",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"downloadHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base16",
|
||||
|
|
@ -22,8 +22,8 @@
|
|||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
|
|
@ -14,8 +14,8 @@
|
|||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
11
src/libstore-tests/data/path-info/json-1/empty_impure.json
Normal file
11
src/libstore-tests/data/path-info/json-1/empty_impure.json
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"ca": null,
|
||||
"deriver": null,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 0,
|
||||
"references": [],
|
||||
"registrationTime": null,
|
||||
"signatures": [],
|
||||
"ultimate": false,
|
||||
"version": 1
|
||||
}
|
||||
7
src/libstore-tests/data/path-info/json-1/empty_pure.json
Normal file
7
src/libstore-tests/data/path-info/json-1/empty_pure.json
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"ca": null,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 0,
|
||||
"references": [],
|
||||
"version": 1
|
||||
}
|
||||
17
src/libstore-tests/data/path-info/json-1/impure.json
Normal file
17
src/libstore-tests/data/path-info/json-1/impure.json
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
"asdf",
|
||||
"qwer"
|
||||
],
|
||||
"ultimate": true,
|
||||
"version": 1
|
||||
}
|
||||
10
src/libstore-tests/data/path-info/json-1/pure.json
Normal file
10
src/libstore-tests/data/path-info/json-1/pure.json
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
]
|
||||
}
|
||||
|
|
@ -7,7 +7,7 @@
|
|||
},
|
||||
"method": "nar"
|
||||
},
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base16",
|
||||
|
|
@ -15,8 +15,8 @@
|
|||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
|
|
@ -14,8 +14,8 @@
|
|||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
|
|
@ -11,9 +11,19 @@ namespace nix {
|
|||
|
||||
using nlohmann::json;
|
||||
|
||||
class NarInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
class NarInfoTestV1 : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "nar-info";
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-1";
|
||||
|
||||
std::filesystem::path goldenMaster(PathView testStem) const override
|
||||
{
|
||||
return unitTestData / (testStem + ".json");
|
||||
}
|
||||
};
|
||||
|
||||
class NarInfoTestV2 : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-2";
|
||||
|
||||
std::filesystem::path goldenMaster(PathView testStem) const override
|
||||
{
|
||||
|
|
@ -59,27 +69,63 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo)
|
|||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
#define JSON_READ_TEST_V1(STEM, PURE) \
|
||||
TEST_F(NarInfoTestV1, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
#define JSON_WRITE_TEST_V1(STEM, PURE) \
|
||||
TEST_F(NarInfoTestV1, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
#define JSON_TEST_V1(STEM, PURE) \
|
||||
JSON_READ_TEST_V1(STEM, PURE) \
|
||||
JSON_WRITE_TEST_V1(STEM, PURE)
|
||||
|
||||
#define JSON_READ_TEST_V2(STEM, PURE) \
|
||||
TEST_F(NarInfoTestV2, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
auto got = UnkeyedNarInfo::fromJSON(nullptr, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
}
|
||||
|
||||
#define JSON_WRITE_TEST_V2(STEM, PURE) \
|
||||
TEST_F(NarInfoTestV2, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
#define JSON_TEST_V2(STEM, PURE) \
|
||||
JSON_READ_TEST_V2(STEM, PURE) \
|
||||
JSON_WRITE_TEST_V2(STEM, PURE)
|
||||
|
||||
JSON_TEST_V1(pure, false)
|
||||
JSON_TEST_V1(impure, true)
|
||||
|
||||
// Test that JSON without explicit version field parses as V1
|
||||
JSON_READ_TEST_V1(pure_noversion, false)
|
||||
|
||||
JSON_TEST_V2(pure, false)
|
||||
JSON_TEST_V2(impure, true)
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -10,9 +10,19 @@ namespace nix {
|
|||
|
||||
using nlohmann::json;
|
||||
|
||||
class PathInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
class PathInfoTestV1 : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "path-info";
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-1";
|
||||
|
||||
std::filesystem::path goldenMaster(PathView testStem) const override
|
||||
{
|
||||
return unitTestData / (testStem + ".json");
|
||||
}
|
||||
};
|
||||
|
||||
class PathInfoTestV2 : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-2";
|
||||
|
||||
std::filesystem::path goldenMaster(PathView testStem) const override
|
||||
{
|
||||
|
|
@ -65,33 +75,70 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
return makeFullKeyed(store, includeImpureInfo);
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, OBJ, PURE) \
|
||||
TEST_F(PathInfoTest, PathInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
|
||||
auto expected = OBJ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(PathInfoTest, PathInfo_##STEM##_to_json) \
|
||||
#define JSON_READ_TEST_V1(STEM, OBJ) \
|
||||
TEST_F(PathInfoTestV1, PathInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
|
||||
auto expected = OBJ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
}
|
||||
|
||||
#define JSON_WRITE_TEST_V1(STEM, OBJ, PURE) \
|
||||
TEST_F(PathInfoTestV1, PathInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return OBJ.toJSON(&*store, PURE); }, \
|
||||
[&]() -> json { return OBJ.toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
JSON_TEST(empty_pure, makeEmpty(), false)
|
||||
JSON_TEST(empty_impure, makeEmpty(), true)
|
||||
#define JSON_TEST_V1(STEM, OBJ, PURE) \
|
||||
JSON_READ_TEST_V1(STEM, OBJ) \
|
||||
JSON_WRITE_TEST_V1(STEM, OBJ, PURE)
|
||||
|
||||
JSON_TEST(pure, makeFull(*store, false), false)
|
||||
JSON_TEST(impure, makeFull(*store, true), true)
|
||||
#define JSON_READ_TEST_V2(STEM, OBJ) \
|
||||
TEST_F(PathInfoTestV2, PathInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(nullptr, encoded); \
|
||||
auto expected = OBJ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
}
|
||||
|
||||
TEST_F(PathInfoTest, PathInfo_full_shortRefs)
|
||||
#define JSON_WRITE_TEST_V2(STEM, OBJ, PURE) \
|
||||
TEST_F(PathInfoTestV2, PathInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return OBJ.toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
#define JSON_TEST_V2(STEM, OBJ, PURE) \
|
||||
JSON_READ_TEST_V2(STEM, OBJ) \
|
||||
JSON_WRITE_TEST_V2(STEM, OBJ, PURE)
|
||||
|
||||
JSON_TEST_V1(empty_pure, makeEmpty(), false)
|
||||
JSON_TEST_V1(empty_impure, makeEmpty(), true)
|
||||
JSON_TEST_V1(pure, makeFull(*store, false), false)
|
||||
JSON_TEST_V1(impure, makeFull(*store, true), true)
|
||||
|
||||
// Test that JSON without explicit version field parses as V1
|
||||
JSON_READ_TEST_V1(pure_noversion, makeFull(*store, false))
|
||||
|
||||
JSON_TEST_V2(empty_pure, makeEmpty(), false)
|
||||
JSON_TEST_V2(empty_impure, makeEmpty(), true)
|
||||
JSON_TEST_V2(pure, makeFull(*store, false), false)
|
||||
JSON_TEST_V2(impure, makeFull(*store, true), true)
|
||||
|
||||
TEST_F(PathInfoTestV2, PathInfo_full_shortRefs)
|
||||
{
|
||||
ValidPathInfo it = makeFullKeyed(*store, true);
|
||||
// it.references = unkeyed.references;
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo
|
|||
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
|
||||
// auto operator <=>(const NarInfo &) const = default;
|
||||
|
||||
nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const override;
|
||||
nlohmann::json
|
||||
toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const override;
|
||||
static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,22 @@ namespace nix {
|
|||
class Store;
|
||||
struct StoreDirConfig;
|
||||
|
||||
/**
|
||||
* JSON format version for path info output.
|
||||
*/
|
||||
enum class PathInfoJsonFormat {
|
||||
/// Legacy format with string hashes and full store paths
|
||||
V1 = 1,
|
||||
/// New format with structured hashes and store path base names
|
||||
V2 = 2,
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert an integer version number to PathInfoJsonFormat.
|
||||
* Throws Error if the version is not supported.
|
||||
*/
|
||||
PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version);
|
||||
|
||||
struct SubstitutablePathInfo
|
||||
{
|
||||
std::optional<StorePath> deriver;
|
||||
|
|
@ -114,10 +130,16 @@ struct UnkeyedValidPathInfo
|
|||
virtual ~UnkeyedValidPathInfo() {}
|
||||
|
||||
/**
|
||||
* @param store If non-null, store paths are rendered as full paths.
|
||||
* If null, store paths are rendered as base names.
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
* registration time are included.
|
||||
* @param format JSON format version. Version 1 uses string hashes and
|
||||
* string content addresses. Version 2 uses structured
|
||||
* hashes and structured content addresses.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const;
|
||||
virtual nlohmann::json
|
||||
toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const;
|
||||
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
|
|
@ -196,5 +218,6 @@ using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
|
|||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::PathInfoJsonFormat)
|
||||
JSON_IMPL(nix::UnkeyedValidPathInfo)
|
||||
JSON_IMPL(nix::ValidPathInfo)
|
||||
|
|
|
|||
|
|
@ -132,19 +132,24 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
nlohmann::json
|
||||
UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo);
|
||||
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo, format);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (!url.empty())
|
||||
jsonObject["url"] = url;
|
||||
if (!compression.empty())
|
||||
jsonObject["compression"] = compression;
|
||||
if (fileHash)
|
||||
jsonObject["downloadHash"] = *fileHash;
|
||||
if (fileHash) {
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
jsonObject["downloadHash"] = fileHash->to_string(HashFormat::SRI, true);
|
||||
else
|
||||
jsonObject["downloadHash"] = *fileHash;
|
||||
}
|
||||
if (fileSize)
|
||||
jsonObject["downloadSize"] = fileSize;
|
||||
}
|
||||
|
|
@ -154,20 +159,26 @@ nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool include
|
|||
|
||||
UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)};
|
||||
|
||||
auto & obj = getObject(json);
|
||||
|
||||
PathInfoJsonFormat format = PathInfoJsonFormat::V1;
|
||||
if (auto * version = optionalValueAt(obj, "version"))
|
||||
format = *version;
|
||||
|
||||
if (auto * url = get(obj, "url"))
|
||||
res.url = getString(*url);
|
||||
|
||||
if (auto * compression = get(obj, "compression"))
|
||||
res.compression = getString(*compression);
|
||||
|
||||
if (auto * downloadHash = get(obj, "downloadHash"))
|
||||
res.fileHash = *downloadHash;
|
||||
if (auto * downloadHash = get(obj, "downloadHash")) {
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
res.fileHash = Hash::parseSRI(getString(*downloadHash));
|
||||
else
|
||||
res.fileHash = *downloadHash;
|
||||
}
|
||||
|
||||
if (auto * downloadSize = get(obj, "downloadSize"))
|
||||
res.fileSize = getUnsigned(*downloadSize);
|
||||
|
|
@ -188,7 +199,7 @@ UnkeyedNarInfo adl_serializer<UnkeyedNarInfo>::from_json(const json & json)
|
|||
|
||||
void adl_serializer<UnkeyedNarInfo>::to_json(json & json, const UnkeyedNarInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2);
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -8,6 +8,18 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version)
|
||||
{
|
||||
switch (version) {
|
||||
case 1:
|
||||
return PathInfoJsonFormat::V1;
|
||||
case 2:
|
||||
return PathInfoJsonFormat::V2;
|
||||
default:
|
||||
throw Error("unsupported path info JSON format version %d; supported versions are 1 and 2", version);
|
||||
}
|
||||
}
|
||||
|
||||
GENERATE_CMP_EXT(
|
||||
,
|
||||
std::weak_ordering,
|
||||
|
|
@ -149,31 +161,45 @@ ValidPathInfo ValidPathInfo::makeFromCA(
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
nlohmann::json
|
||||
UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
assert(store);
|
||||
|
||||
auto jsonObject = json::object();
|
||||
|
||||
jsonObject["version"] = 2;
|
||||
jsonObject["version"] = format;
|
||||
|
||||
jsonObject["narHash"] = format == PathInfoJsonFormat::V1
|
||||
? static_cast<json>(narHash.to_string(HashFormat::SRI, true))
|
||||
: static_cast<json>(narHash);
|
||||
|
||||
jsonObject["narHash"] = narHash;
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
{
|
||||
auto & jsonRefs = jsonObject["references"] = json::array();
|
||||
for (auto & ref : references)
|
||||
jsonRefs.emplace_back(store ? static_cast<json>(store->printStorePath(ref)) : static_cast<json>(ref));
|
||||
jsonRefs.emplace_back(
|
||||
format == PathInfoJsonFormat::V1 ? static_cast<json>(store->printStorePath(ref))
|
||||
: static_cast<json>(ref));
|
||||
}
|
||||
|
||||
jsonObject["ca"] = ca;
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
jsonObject["ca"] = ca ? static_cast<json>(renderContentAddress(*ca)) : static_cast<json>(nullptr);
|
||||
else
|
||||
jsonObject["ca"] = ca;
|
||||
|
||||
if (includeImpureInfo) {
|
||||
jsonObject["deriver"] = deriver ? (store ? static_cast<json>(std::optional{store->printStorePath(*deriver)})
|
||||
: static_cast<json>(std::optional{*deriver}))
|
||||
: static_cast<json>(std::optional<StorePath>{});
|
||||
|
||||
jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt;
|
||||
if (format == PathInfoJsonFormat::V1) {
|
||||
jsonObject["deriver"] =
|
||||
deriver ? static_cast<json>(store->printStorePath(*deriver)) : static_cast<json>(nullptr);
|
||||
} else {
|
||||
jsonObject["deriver"] = deriver;
|
||||
}
|
||||
jsonObject["registrationTime"] = registrationTime ? std::optional{registrationTime} : std::nullopt;
|
||||
|
||||
jsonObject["ultimate"] = ultimate;
|
||||
|
||||
|
|
@ -193,34 +219,51 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store
|
|||
|
||||
auto & json = getObject(_json);
|
||||
|
||||
{
|
||||
auto version = getUnsigned(valueAt(json, "version"));
|
||||
if (version != 2)
|
||||
throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version);
|
||||
}
|
||||
PathInfoJsonFormat format = PathInfoJsonFormat::V1;
|
||||
if (auto * version = optionalValueAt(json, "version"))
|
||||
format = *version;
|
||||
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
assert(store);
|
||||
|
||||
if (format == PathInfoJsonFormat::V1)
|
||||
res.narHash = Hash::parseSRI(getString(valueAt(json, "narHash")));
|
||||
else
|
||||
res.narHash = valueAt(json, "narHash");
|
||||
|
||||
res.narHash = valueAt(json, "narHash");
|
||||
res.narSize = getUnsigned(valueAt(json, "narSize"));
|
||||
|
||||
try {
|
||||
auto references = getStringList(valueAt(json, "references"));
|
||||
auto & references = getArray(valueAt(json, "references"));
|
||||
for (auto & input : references)
|
||||
res.references.insert(store ? store->parseStorePath(getString(input)) : static_cast<StorePath>(input));
|
||||
res.references.insert(
|
||||
format == PathInfoJsonFormat::V1 ? store->parseStorePath(getString(input))
|
||||
: static_cast<StorePath>(input));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'references'");
|
||||
throw;
|
||||
}
|
||||
|
||||
try {
|
||||
res.ca = ptrToOwned<ContentAddress>(getNullable(valueAt(json, "ca")));
|
||||
if (format == PathInfoJsonFormat::V1) {
|
||||
if (auto * rawCa = getNullable(valueAt(json, "ca")))
|
||||
res.ca = ContentAddress::parse(getString(*rawCa));
|
||||
} else {
|
||||
res.ca = ptrToOwned<ContentAddress>(getNullable(valueAt(json, "ca")));
|
||||
}
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'ca'");
|
||||
throw;
|
||||
}
|
||||
|
||||
if (auto * rawDeriver0 = optionalValueAt(json, "deriver"))
|
||||
if (auto * rawDeriver = getNullable(*rawDeriver0))
|
||||
res.deriver = store ? store->parseStorePath(getString(*rawDeriver)) : static_cast<StorePath>(*rawDeriver);
|
||||
if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) {
|
||||
if (format == PathInfoJsonFormat::V1) {
|
||||
if (auto * rawDeriver = getNullable(*rawDeriver0))
|
||||
res.deriver = store->parseStorePath(getString(*rawDeriver));
|
||||
} else {
|
||||
res.deriver = ptrToOwned<StorePath>(getNullable(*rawDeriver0));
|
||||
}
|
||||
}
|
||||
|
||||
if (auto * rawRegistrationTime0 = optionalValueAt(json, "registrationTime"))
|
||||
if (auto * rawRegistrationTime = getNullable(*rawRegistrationTime0))
|
||||
|
|
@ -241,6 +284,16 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
PathInfoJsonFormat adl_serializer<PathInfoJsonFormat>::from_json(const json & json)
|
||||
{
|
||||
return parsePathInfoJsonFormat(getUnsigned(json));
|
||||
}
|
||||
|
||||
void adl_serializer<PathInfoJsonFormat>::to_json(json & json, const PathInfoJsonFormat & format)
|
||||
{
|
||||
json = static_cast<int>(format);
|
||||
}
|
||||
|
||||
UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json & json)
|
||||
{
|
||||
return UnkeyedValidPathInfo::fromJSON(nullptr, json);
|
||||
|
|
@ -248,7 +301,7 @@ UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json
|
|||
|
||||
void adl_serializer<UnkeyedValidPathInfo>::to_json(json & json, const UnkeyedValidPathInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2);
|
||||
}
|
||||
|
||||
ValidPathInfo adl_serializer<ValidPathInfo>::from_json(const json & json0)
|
||||
|
|
|
|||
|
|
@ -34,14 +34,17 @@ static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & clo
|
|||
*
|
||||
* @param showClosureSize If true, the closure size of each path is
|
||||
* included.
|
||||
* @param format The JSON format version to use.
|
||||
*/
|
||||
static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize)
|
||||
static json
|
||||
pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize, PathInfoJsonFormat format)
|
||||
{
|
||||
json::object_t jsonAllObjects = json::object();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
json jsonObject;
|
||||
auto printedStorePath = store.printStorePath(storePath);
|
||||
|
||||
std::string key = store.printStorePath(storePath);
|
||||
|
||||
try {
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
|
@ -49,9 +52,13 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
|
|||
// `storePath` has the representation `<hash>-x` rather than
|
||||
// `<hash>-<name>` in case of binary-cache stores & `--all` because we don't
|
||||
// know the name yet until we've read the NAR info.
|
||||
printedStorePath = store.printStorePath(info->path);
|
||||
key = store.printStorePath(info->path);
|
||||
|
||||
jsonObject = info->toJSON(&store, true);
|
||||
jsonObject = info->toJSON(format == PathInfoJsonFormat::V1 ? &store : nullptr, true, format);
|
||||
|
||||
/* Hack in the store dir for now. TODO update the data type
|
||||
instead. */
|
||||
jsonObject["storeDir"] = store.storeDir;
|
||||
|
||||
if (showClosureSize) {
|
||||
StorePathSet closure;
|
||||
|
|
@ -74,12 +81,11 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
|
|||
jsonObject["closureDownloadSize"] = totalDownloadSize;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (InvalidPath &) {
|
||||
jsonObject = nullptr;
|
||||
}
|
||||
|
||||
jsonAllObjects[printedStorePath] = std::move(jsonObject);
|
||||
jsonAllObjects[key] = std::move(jsonObject);
|
||||
}
|
||||
return jsonAllObjects;
|
||||
}
|
||||
|
|
@ -90,6 +96,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
bool showClosureSize = false;
|
||||
bool humanReadable = false;
|
||||
bool showSigs = false;
|
||||
std::optional<PathInfoJsonFormat> jsonFormat;
|
||||
|
||||
CmdPathInfo()
|
||||
{
|
||||
|
|
@ -119,6 +126,16 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
.description = "Show signatures.",
|
||||
.handler = {&showSigs, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "json-format",
|
||||
.description =
|
||||
"JSON format version to use (1 or 2). Version 1 uses string hashes and full store paths. Version 2 uses structured hashes and store path base names. This flag will be required in a future release.",
|
||||
.labels = {"version"},
|
||||
.handler = {[this](std::string s) {
|
||||
jsonFormat = parsePathInfoJsonFormat(string2IntWithUnitPrefix<uint64_t>(s));
|
||||
}},
|
||||
});
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
|
|
@ -157,7 +174,14 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
*store,
|
||||
// FIXME: preserve order?
|
||||
StorePathSet(storePaths.begin(), storePaths.end()),
|
||||
showClosureSize));
|
||||
showClosureSize,
|
||||
jsonFormat
|
||||
.or_else([&]() {
|
||||
warn(
|
||||
"'--json' without '--json-format' is deprecated; please specify '--json-format 1' or '--json-format 2'. This will become an error in a future release.");
|
||||
return std::optional{PathInfoJsonFormat::V1};
|
||||
})
|
||||
.value()));
|
||||
}
|
||||
|
||||
else {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue