1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-14 14:32:42 +01:00

JSON for Hash now has to be Base16

Fix #14532.

As discussed on the call today:

1. We'll stick with `format = "base16"` and `hash = "<hash>"`, not do
   `base16 = "<hash>"`, in order to be forward compatible with
   supporting more versioning formats.

   The motivation we discussed for someday *possibly* doing this is
   making it easier to write very slap-dash lang2nix tools that create
   (not consume) derivations with dynamic derivations.

2. We will remove support for non-base16 (and make that the default, not
   base64) in `Hash`, so this is strictly forward contingency, *not*
   yet something we support. (And also not something we have concrete
   plans to start supporting.)
This commit is contained in:
John Ericson 2025-11-10 16:07:28 -05:00
parent 750306234d
commit 13b44742a4
22 changed files with 79 additions and 101 deletions

View file

@ -2,28 +2,16 @@
## Examples
### SHA-256 with Base64 encoding
```json
{{#include schema/hash-v1/sha256-base64.json}}
```
### SHA-256 with Base16 (hexadecimal) encoding
### SHA-256
```json
{{#include schema/hash-v1/sha256-base16.json}}
```
### SHA-256 with Nix32 encoding
### BLAKE3
```json
{{#include schema/hash-v1/sha256-nix32.json}}
```
### BLAKE3 with Base64 encoding
```json
{{#include schema/hash-v1/blake3-base64.json}}
{{#include schema/hash-v1/blake3-base16.json}}
```
<!-- need to convert YAML to JSON first

View file

@ -12,18 +12,14 @@ properties:
format:
type: string
enum:
- base64
- nix32
- base16
- sri
title: Hash format
description: |
The encoding format of the hash value.
- `base64` uses standard Base64 encoding [RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4)
- `nix32` is Nix-specific base-32 encoding
- `base16` is lowercase hexadecimal
- `sri` is the [Subresource Integrity format](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
`base16` (lowercase hexadecimal) is the only format that is currently supported for JSON serialization.
This field exists primarily to reduce ambiguity about what the hash means.
It would also help us support other formats in the future, but there are no concrete plans to do so at this.
hash:
type: string
title: Hash

View file

@ -24,10 +24,8 @@ schemas = [
'stem' : 'hash',
'schema' : schema_dir / 'hash-v1.yaml',
'files' : [
'sha256-base64.json',
'sha256-base16.json',
'sha256-nix32.json',
'blake3-base64.json',
'blake3-base16.json',
],
},
{

View file

@ -1,8 +1,8 @@
{
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "9vLqj0XYoFfJVmoz+ZR02i5camYE1zYSFlDicwxvsKM="
"format": "base16",
"hash": "f6f2ea8f45d8a057c9566a33f99474da2e5c6a6604d736121650e2730c6fb0a3"
},
"method": "nar"
}

View file

@ -1,8 +1,8 @@
{
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts="
"format": "base16",
"hash": "f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"
},
"method": "text"
}

View file

@ -1,8 +1,8 @@
{
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
"format": "base16",
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f"
},
"method": "flat"
}

View file

@ -1,8 +1,8 @@
{
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
"format": "base16",
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f"
},
"method": "nar"
}

View file

@ -1,8 +1,8 @@
{
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
"format": "base16",
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f"
},
"method": "text"
}

View file

@ -2,8 +2,8 @@
"ca": {
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM="
"format": "base16",
"hash": "10c209fa0890fe02c85a8c663ca8e7a37cc766bc5b1b29a0cc61b266f64121d3"
},
"method": "nar"
},
@ -11,14 +11,14 @@
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"downloadHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"downloadSize": 4029176,
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 34878,
"references": [

View file

@ -2,15 +2,15 @@
"ca": {
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM="
"format": "base16",
"hash": "10c209fa0890fe02c85a8c663ca8e7a37cc766bc5b1b29a0cc61b266f64121d3"
},
"method": "nar"
},
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 34878,
"references": [

View file

@ -3,8 +3,8 @@
"deriver": null,
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 0,
"references": [],

View file

@ -2,8 +2,8 @@
"ca": null,
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 0,
"references": [],

View file

@ -2,16 +2,16 @@
"ca": {
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM="
"format": "base16",
"hash": "10c209fa0890fe02c85a8c663ca8e7a37cc766bc5b1b29a0cc61b266f64121d3"
},
"method": "nar"
},
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 34878,
"references": [

View file

@ -2,15 +2,15 @@
"ca": {
"hash": {
"algorithm": "sha256",
"format": "base64",
"hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM="
"format": "base16",
"hash": "10c209fa0890fe02c85a8c663ca8e7a37cc766bc5b1b29a0cc61b266f64121d3"
},
"method": "nar"
},
"narHash": {
"algorithm": "sha256",
"format": "base64",
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
"format": "base16",
"hash": "15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527"
},
"narSize": 34878,
"references": [

View file

@ -0,0 +1,5 @@
{
"algorithm": "blake3",
"format": "base16",
"hash": "9e70ee1449965fb62d049040a1ed06ec377430da6ec13173e7c4fffcd28be980"
}

View file

@ -1,5 +0,0 @@
{
"algorithm": "blake3",
"format": "base64",
"hash": "nnDuFEmWX7YtBJBAoe0G7Dd0MNpuwTFz58T//NKL6YA="
}

View file

@ -1,5 +1,5 @@
{
"algorithm": "sha256",
"format": "base64",
"hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts="
"format": "base16",
"hash": "f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"
}

View file

@ -215,9 +215,7 @@ struct HashJsonTest : virtual HashTest,
::testing::WithParamInterface<std::pair<std::string_view, Hash>>
{};
struct HashJsonParseOnlyTest : virtual HashTest,
JsonCharacterizationTest<Hash>,
::testing::WithParamInterface<std::pair<std::string_view, Hash>>
struct HashJsonParseFailureTest : virtual HashTest, ::testing::WithParamInterface<std::string_view>
{};
struct BLAKE3HashJsonTest : virtual HashTest,
@ -238,10 +236,12 @@ TEST_P(HashJsonTest, to_json)
writeJsonTest(name, value);
}
TEST_P(HashJsonParseOnlyTest, from_json)
TEST_P(HashJsonParseFailureTest, from_json)
{
auto & [name, expected] = GetParam();
readJsonTest(name, expected);
auto & name = GetParam();
auto path = goldenMaster(Path{name} + ".json");
auto encoded = json::parse(readFile(path));
ASSERT_THROW(nlohmann::adl_serializer<Hash>::from_json(encoded), Error);
}
TEST_P(BLAKE3HashJsonTest, from_json)
@ -256,8 +256,8 @@ TEST_P(BLAKE3HashJsonTest, to_json)
writeJsonTest(name, expected);
}
// Round-trip tests (from_json + to_json) for base64 format only
// (to_json always outputs base64)
// Round-trip tests (from_json + to_json) for base16 format only
// (to_json always outputs base16)
INSTANTIATE_TEST_SUITE_P(
HashJSON,
HashJsonTest,
@ -266,32 +266,22 @@ INSTANTIATE_TEST_SUITE_P(
"simple",
hashString(HashAlgorithm::SHA256, "asdf"),
},
std::pair{
"sha256-base64",
hashString(HashAlgorithm::SHA256, "asdf"),
}));
// Parse-only tests for non-base64 formats
// These verify C++ can deserialize other formats correctly
INSTANTIATE_TEST_SUITE_P(
HashJSONParseOnly,
HashJsonParseOnlyTest,
::testing::Values(
std::pair{
"sha256-base16",
hashString(HashAlgorithm::SHA256, "asdf"),
},
std::pair{
"sha256-nix32",
hashString(HashAlgorithm::SHA256, "asdf"),
}));
INSTANTIATE_TEST_SUITE_P(BLAKE3HashJSONParseOnly, BLAKE3HashJsonTest, ([] {
// Failure tests for unsupported formats (base64, nix32, sri)
// These verify that non-base16 formats are rejected
INSTANTIATE_TEST_SUITE_P(
HashJSONParseFailure, HashJsonParseFailureTest, ::testing::Values("sha256-base64", "sha256-nix32"));
INSTANTIATE_TEST_SUITE_P(BLAKE3HashJSON, BLAKE3HashJsonTest, ([] {
ExperimentalFeatureSettings mockXpSettings;
mockXpSettings.set("experimental-features", "blake3-hashes");
return ::testing::Values(
std::pair{
"blake3-base64",
"blake3-base16",
hashString(HashAlgorithm::BLAKE3, "asdf", mockXpSettings),
});
}()));

View file

@ -508,7 +508,14 @@ Hash adl_serializer<Hash>::from_json(const json & json, const ExperimentalFeatur
{
auto & obj = getObject(json);
auto algo = parseHashAlgo(getString(valueAt(obj, "algorithm")), xpSettings);
auto format = parseHashFormat(getString(valueAt(obj, "format")));
auto formatStr = getString(valueAt(obj, "format"));
auto format = parseHashFormat(formatStr);
// Only base16 format is supported for JSON serialization
if (format != HashFormat::Base16) {
throw Error("hash format '%s' is not supported in JSON; only 'base16' is currently supported", formatStr);
}
auto & hashS = getString(valueAt(obj, "hash"));
return Hash::parseExplicitFormatUnprefixed(hashS, algo, format, xpSettings);
}
@ -516,9 +523,9 @@ Hash adl_serializer<Hash>::from_json(const json & json, const ExperimentalFeatur
void adl_serializer<Hash>::to_json(json & json, const Hash & hash)
{
json = {
{"format", printHashFormat(HashFormat::Base64)},
{"format", printHashFormat(HashFormat::Base16)},
{"algorithm", printHashAlgo(hash.algo)},
{"hash", hash.to_string(HashFormat::Base64, false)},
{"hash", hash.to_string(HashFormat::Base16, false)},
};
}

View file

@ -15,13 +15,12 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail"
# a side-effect.
[[ -e $path ]]
nix path-info --json "$path" | jq -e \
--arg hash "$(nix hash convert --to base64 "md5:8ddd8be4b179a529afa5f2ffae4b9858")" \
'.[].ca == {
method: "flat",
hash: {
algorithm: "md5",
format: "base64",
hash: $hash
format: "base16",
hash: "8ddd8be4b179a529afa5f2ffae4b9858"
},
}'

View file

@ -49,12 +49,12 @@ try2 () {
nix path-info --json "$path" | jq -e \
--arg algo "$hashAlgo" \
--arg hash "$(nix hash convert --to base64 "$hashAlgo:$hashFromGit")" \
--arg hash "$hashFromGit" \
'.[].ca == {
method: "git",
hash: {
algorithm: $algo,
format: "base64",
format: "base16",
hash: $hash
},
}'

View file

@ -19,13 +19,13 @@ diff --unified --color=always \
{
"$foo": {
"algorithm": "sha256",
"format": "base64",
"hash": "QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA="
"format": "base16",
"hash": "42fb4031b525feebe2f8b08e6e6a8e86f34e6a91dd036ada888e311b9cc8e690"
},
"$bar": {
"algorithm": "sha256",
"format": "base64",
"hash": "9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ="
"format": "base16",
"hash": "f5f8581aef5fab17100b629cf35aa1d91328d5070b054068f14fa93e7fa3b614"
},
"$baz": null
}