mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 19:46:02 +01:00
Merge a1c39edd4c into 479b6b73a9
This commit is contained in:
commit
895643f1b5
38 changed files with 647 additions and 149 deletions
|
|
@ -37,6 +37,7 @@ mkMesonDerivation (finalAttrs: {
|
|||
(fileset.unions [
|
||||
../../.version
|
||||
# For example JSON
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
|
|
|
|||
|
|
@ -22,7 +22,15 @@ The store path info JSON format has been updated from version 1 to version 2:
|
|||
- New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}`
|
||||
- Still `null` values for input-addressed store objects
|
||||
|
||||
Version 1 format is still accepted when reading for backward compatibility.
|
||||
- **Structured hash fields**:
|
||||
|
||||
Hash values (`narHash` and `downloadHash`) are now structured JSON objects instead of strings:
|
||||
|
||||
- Old: `"narHash": "sha256:FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="`
|
||||
- New: `"narHash": {"algorithm": "sha256", "format": "base64", "hash": "FePFYIlM..."}`
|
||||
- Same structure applies to `downloadHash` in NAR info contexts
|
||||
|
||||
Nix currently only produces, and doesn't consume this format.
|
||||
|
||||
**Affected command**: `nix path-info --json`
|
||||
|
||||
|
|
|
|||
|
|
@ -120,6 +120,7 @@
|
|||
- [Architecture and Design](architecture/architecture.md)
|
||||
- [Formats and Protocols](protocols/index.md)
|
||||
- [JSON Formats](protocols/json/index.md)
|
||||
- [File System Object](protocols/json/file-system-object.md)
|
||||
- [Hash](protocols/json/hash.md)
|
||||
- [Content Address](protocols/json/content-address.md)
|
||||
- [Store Path](protocols/json/store-path.md)
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include file-system-object-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/simple.json}}
|
||||
```
|
||||
|
||||
### Complex
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/complex.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for File System Object v1](schema/file-system-object-v1.json)
|
||||
-->
|
||||
|
|
@ -11,6 +11,7 @@ s/\\`/`/g
|
|||
#
|
||||
# As we have more such relative links, more replacements of this nature
|
||||
# should appear below.
|
||||
s^#/\$defs/\(regular\|symlink\|directory\)^In this schema^g
|
||||
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
|
||||
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
|
||||
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ json_schema_for_humans = find_program('generate-schema-doc', required : false)
|
|||
json_schema_config = files('json-schema-for-humans-config.yaml')
|
||||
|
||||
schemas = [
|
||||
'file-system-object-v1',
|
||||
'hash-v1',
|
||||
'content-address-v1',
|
||||
'store-path-v1',
|
||||
|
|
|
|||
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/file-system-object-v1.json
|
||||
title: File System Object
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's [File System Object](@docroot@/store/file-system-object.md).
|
||||
|
||||
The schema is recursive because file system objects contain other file system objects.
|
||||
type: object
|
||||
required: ["type"]
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
enum: ["regular", "symlink", "directory"]
|
||||
|
||||
# Enforce conditional structure based on `type`
|
||||
anyOf:
|
||||
- $ref: "#/$defs/regular"
|
||||
required: ["type", "contents"]
|
||||
|
||||
- $ref: "#/$defs/symlink"
|
||||
required: ["type", "target"]
|
||||
|
||||
- $ref: "#/$defs/directory"
|
||||
required: ["type", "contents"]
|
||||
|
||||
"$defs":
|
||||
regular:
|
||||
title: Regular File
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "regular"
|
||||
contents:
|
||||
type: string
|
||||
description: Base64-encoded file contents
|
||||
executable:
|
||||
type: boolean
|
||||
description: Whether the file is executable.
|
||||
default: false
|
||||
additionalProperties: false
|
||||
|
||||
symlink:
|
||||
title: Symbolic Link
|
||||
required: ["target"]
|
||||
properties:
|
||||
type:
|
||||
const: "symlink"
|
||||
target:
|
||||
type: string
|
||||
description: Target path of the symlink.
|
||||
additionalProperties: false
|
||||
|
||||
directory:
|
||||
title: Directory
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "directory"
|
||||
contents:
|
||||
type: object
|
||||
description: |
|
||||
Map of names to nested file system objects (for type=directory)
|
||||
additionalProperties:
|
||||
$ref: "#"
|
||||
additionalProperties: false
|
||||
|
|
@ -71,7 +71,7 @@ $defs:
|
|||
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
|
||||
|
||||
narHash:
|
||||
type: string
|
||||
"$ref": "./hash-v1.yaml"
|
||||
title: NAR Hash
|
||||
description: |
|
||||
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||
|
|
@ -229,7 +229,7 @@ $defs:
|
|||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
downloadHash:
|
||||
type: string
|
||||
"$ref": "./hash-v1.yaml"
|
||||
title: Download Hash
|
||||
description: |
|
||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||
|
|
|
|||
1
src/json-schema-checks/file-system-object
Symbolic link
1
src/json-schema-checks/file-system-object
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -20,6 +20,14 @@ schema_dir = meson.current_source_dir() / 'schema'
|
|||
|
||||
# Get all example files
|
||||
schemas = [
|
||||
{
|
||||
'stem' : 'file-system-object',
|
||||
'schema' : schema_dir / 'file-system-object-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
'complex.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'hash',
|
||||
'schema' : schema_dir / 'hash-v1.yaml',
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ mkMesonDerivation (finalAttrs: {
|
|||
fileset = lib.fileset.unions [
|
||||
../../.version
|
||||
../../doc/manual/source/protocols/json/schema
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
|
|
|
|||
|
|
@ -9,9 +9,17 @@
|
|||
},
|
||||
"compression": "xz",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"downloadHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"downloadSize": 4029176,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,11 @@
|
|||
},
|
||||
"method": "nar"
|
||||
},
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
{
|
||||
"ca": null,
|
||||
"deriver": null,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 0,
|
||||
"references": [],
|
||||
"registrationTime": null,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
{
|
||||
"ca": null,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 0,
|
||||
"references": [],
|
||||
"version": 2
|
||||
|
|
|
|||
|
|
@ -8,7 +8,11 @@
|
|||
"method": "nar"
|
||||
},
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,11 @@
|
|||
},
|
||||
"method": "nar"
|
||||
},
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="
|
||||
},
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
|
|
|
|||
|
|
@ -59,24 +59,24 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo)
|
|||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(*store, encoded); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
|
||||
auto expected = OBJ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
|
|
@ -80,7 +80,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \
|
||||
[&]() -> json { return OBJ.toJSON(&*store, PURE); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#include "nix/store/references.hh"
|
||||
#include "nix/store/path-references.hh"
|
||||
#include "nix/util/bytes.hh"
|
||||
#include "nix/util/memory-source-accessor.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
|
@ -104,21 +105,21 @@ TEST(references, scanForReferencesDeep)
|
|||
// file1.txt: contains hash1
|
||||
"file1.txt",
|
||||
File::Regular{
|
||||
.contents = "This file references " + hash1 + " in its content",
|
||||
.contents = to_owned(as_bytes("This file references " + hash1 + " in its content")),
|
||||
},
|
||||
},
|
||||
{
|
||||
// file2.txt: contains hash2 and hash3
|
||||
"file2.txt",
|
||||
File::Regular{
|
||||
.contents = "Multiple refs: " + hash2 + " and also " + hash3,
|
||||
.contents = to_owned(as_bytes("Multiple refs: " + hash2 + " and also " + hash3)),
|
||||
},
|
||||
},
|
||||
{
|
||||
// file3.txt: contains no references
|
||||
"file3.txt",
|
||||
File::Regular{
|
||||
.contents = "This file has no store path references at all",
|
||||
.contents = to_owned(as_bytes("This file has no store path references at all")),
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -130,7 +131,7 @@ TEST(references, scanForReferencesDeep)
|
|||
// subdir/file4.txt: contains hash1 again
|
||||
"file4.txt",
|
||||
File::Regular{
|
||||
.contents = "Subdirectory file with " + hash1,
|
||||
.contents = to_owned(as_bytes("Subdirectory file with " + hash1)),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -140,7 +141,7 @@ TEST(references, scanForReferencesDeep)
|
|||
// link1: a symlink that contains a reference in its target
|
||||
"link1",
|
||||
File::Symlink{
|
||||
.target = hash2 + "-target",
|
||||
.target = to_owned(as_bytes(hash2 + "-target")),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -423,15 +423,6 @@ void adl_serializer<DerivationOptions>::to_json(json & json, const DerivationOpt
|
|||
json["allowSubstitutes"] = o.allowSubstitutes;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline std::optional<T> ptrToOwned(const json * ptr)
|
||||
{
|
||||
if (ptr)
|
||||
return std::optional{*ptr};
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
DerivationOptions::OutputChecks adl_serializer<DerivationOptions::OutputChecks>::from_json(const json & json_)
|
||||
{
|
||||
auto & json = getObject(json_);
|
||||
|
|
|
|||
|
|
@ -9,17 +9,38 @@ namespace nix {
|
|||
|
||||
struct StoreDirConfig;
|
||||
|
||||
struct NarInfo : ValidPathInfo
|
||||
struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo
|
||||
{
|
||||
std::string url;
|
||||
std::string compression;
|
||||
std::optional<Hash> fileHash;
|
||||
uint64_t fileSize = 0;
|
||||
|
||||
UnkeyedNarInfo(UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(const UnkeyedNarInfo &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
|
||||
// auto operator <=>(const NarInfo &) const = default;
|
||||
|
||||
nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const override;
|
||||
static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
/**
|
||||
* Key and the extra NAR fields
|
||||
*/
|
||||
struct NarInfo : ValidPathInfo, UnkeyedNarInfo
|
||||
{
|
||||
NarInfo() = delete;
|
||||
|
||||
NarInfo(ValidPathInfo info)
|
||||
: ValidPathInfo{std::move(info)}
|
||||
: UnkeyedValidPathInfo(std::move(static_cast<UnkeyedValidPathInfo &&>(info)))
|
||||
// later moves will be partially ignored
|
||||
, ValidPathInfo(std::move(info))
|
||||
, UnkeyedNarInfo(std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
|
|
@ -37,13 +58,10 @@ struct NarInfo : ValidPathInfo
|
|||
NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence);
|
||||
|
||||
bool operator==(const NarInfo &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
|
||||
// auto operator <=>(const NarInfo &) const = default;
|
||||
|
||||
std::string to_string(const StoreDirConfig & store) const;
|
||||
|
||||
nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override;
|
||||
static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::UnkeyedNarInfo)
|
||||
|
|
|
|||
|
|
@ -117,11 +117,11 @@ struct UnkeyedValidPathInfo
|
|||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const;
|
||||
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json);
|
||||
virtual nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const;
|
||||
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
struct ValidPathInfo : UnkeyedValidPathInfo
|
||||
struct ValidPathInfo : virtual UnkeyedValidPathInfo
|
||||
{
|
||||
StorePath path;
|
||||
|
||||
|
|
@ -174,10 +174,14 @@ struct ValidPathInfo : UnkeyedValidPathInfo
|
|||
|
||||
ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(info)
|
||||
, path(std::move(path)) {};
|
||||
, path(std::move(path))
|
||||
{
|
||||
}
|
||||
|
||||
ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(info)
|
||||
, path(path) {};
|
||||
: ValidPathInfo(StorePath{path}, std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
static ValidPathInfo
|
||||
makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash);
|
||||
|
|
@ -191,3 +195,5 @@ static_assert(std::is_move_constructible_v<ValidPathInfo>);
|
|||
using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::UnkeyedValidPathInfo)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@
|
|||
namespace nix {
|
||||
|
||||
NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||
: UnkeyedValidPathInfo(Hash::dummy) // FIXME: hack
|
||||
, ValidPathInfo(StorePath::dummy, static_cast<const UnkeyedValidPathInfo &>(*this)) // FIXME: hack
|
||||
, UnkeyedNarInfo(static_cast<const UnkeyedValidPathInfo &>(*this))
|
||||
{
|
||||
unsigned line = 1;
|
||||
|
||||
|
|
@ -130,11 +132,11 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
|
||||
nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
|
||||
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (!url.empty())
|
||||
|
|
@ -142,7 +144,7 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI
|
|||
if (!compression.empty())
|
||||
jsonObject["compression"] = compression;
|
||||
if (fileHash)
|
||||
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
|
||||
jsonObject["downloadHash"] = *fileHash;
|
||||
if (fileSize)
|
||||
jsonObject["downloadSize"] = fileSize;
|
||||
}
|
||||
|
|
@ -150,30 +152,43 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI
|
|||
return jsonObject;
|
||||
}
|
||||
|
||||
NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json)
|
||||
UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
NarInfo res{ValidPathInfo{
|
||||
path,
|
||||
UnkeyedValidPathInfo::fromJSON(store, json),
|
||||
}};
|
||||
UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)};
|
||||
|
||||
auto & obj = getObject(json);
|
||||
|
||||
if (json.contains("url"))
|
||||
res.url = getString(valueAt(obj, "url"));
|
||||
if (auto * url = get(obj, "url"))
|
||||
res.url = getString(*url);
|
||||
|
||||
if (json.contains("compression"))
|
||||
res.compression = getString(valueAt(obj, "compression"));
|
||||
if (auto * compression = get(obj, "compression"))
|
||||
res.compression = getString(*compression);
|
||||
|
||||
if (json.contains("downloadHash"))
|
||||
res.fileHash = Hash::parseAny(getString(valueAt(obj, "downloadHash")), std::nullopt);
|
||||
if (auto * downloadHash = get(obj, "downloadHash"))
|
||||
res.fileHash = *downloadHash;
|
||||
|
||||
if (json.contains("downloadSize"))
|
||||
res.fileSize = getUnsigned(valueAt(obj, "downloadSize"));
|
||||
if (auto * downloadSize = get(obj, "downloadSize"))
|
||||
res.fileSize = getUnsigned(*downloadSize);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
UnkeyedNarInfo adl_serializer<UnkeyedNarInfo>::from_json(const json & json)
|
||||
{
|
||||
return UnkeyedNarInfo::fromJSON(nullptr, json);
|
||||
}
|
||||
|
||||
void adl_serializer<UnkeyedNarInfo>::to_json(json & json, const UnkeyedNarInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -149,8 +149,7 @@ ValidPathInfo ValidPathInfo::makeFromCA(
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json
|
||||
UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
|
|
@ -158,19 +157,21 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf
|
|||
|
||||
jsonObject["version"] = 2;
|
||||
|
||||
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
|
||||
jsonObject["narHash"] = narHash;
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
{
|
||||
auto & jsonRefs = jsonObject["references"] = json::array();
|
||||
for (auto & ref : references)
|
||||
jsonRefs.emplace_back(store.printStorePath(ref));
|
||||
jsonRefs.emplace_back(store ? static_cast<json>(store->printStorePath(ref)) : static_cast<json>(ref));
|
||||
}
|
||||
|
||||
jsonObject["ca"] = ca;
|
||||
|
||||
if (includeImpureInfo) {
|
||||
jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt;
|
||||
jsonObject["deriver"] = deriver ? (store ? static_cast<json>(std::optional{store->printStorePath(*deriver)})
|
||||
: static_cast<json>(std::optional{*deriver}))
|
||||
: static_cast<json>(std::optional<StorePath>{});
|
||||
|
||||
jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt;
|
||||
|
||||
|
|
@ -184,7 +185,7 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf
|
|||
return jsonObject;
|
||||
}
|
||||
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store, const nlohmann::json & _json)
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & _json)
|
||||
{
|
||||
UnkeyedValidPathInfo res{
|
||||
Hash(Hash::dummy),
|
||||
|
|
@ -192,44 +193,34 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store
|
|||
|
||||
auto & json = getObject(_json);
|
||||
|
||||
// Check version (optional for backward compatibility)
|
||||
nlohmann::json::number_unsigned_t version = 1;
|
||||
if (json.contains("version")) {
|
||||
version = getUnsigned(valueAt(json, "version"));
|
||||
if (version != 1 && version != 2) {
|
||||
throw Error("Unsupported path info JSON format version %d, expected 1 through 2", version);
|
||||
}
|
||||
{
|
||||
auto version = getUnsigned(valueAt(json, "version"));
|
||||
if (version != 2)
|
||||
throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version);
|
||||
}
|
||||
|
||||
res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt);
|
||||
res.narHash = valueAt(json, "narHash");
|
||||
res.narSize = getUnsigned(valueAt(json, "narSize"));
|
||||
|
||||
try {
|
||||
auto references = getStringList(valueAt(json, "references"));
|
||||
for (auto & input : references)
|
||||
res.references.insert(store.parseStorePath(static_cast<const std::string &>(input)));
|
||||
res.references.insert(store ? store->parseStorePath(getString(input)) : static_cast<StorePath>(input));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'references'");
|
||||
throw;
|
||||
}
|
||||
|
||||
// New format as this as nullable but mandatory field; handling
|
||||
// missing is for back-compat.
|
||||
if (auto * rawCa0 = optionalValueAt(json, "ca"))
|
||||
if (auto * rawCa = getNullable(*rawCa0))
|
||||
switch (version) {
|
||||
case 1:
|
||||
// old string format also used in SQLite DB and .narinfo
|
||||
res.ca = ContentAddress::parse(getString(*rawCa));
|
||||
break;
|
||||
case 2 ... std::numeric_limits<decltype(version)>::max():
|
||||
res.ca = *rawCa;
|
||||
break;
|
||||
}
|
||||
try {
|
||||
res.ca = ptrToOwned<ContentAddress>(getNullable(valueAt(json, "ca")));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'ca'");
|
||||
throw;
|
||||
}
|
||||
|
||||
if (auto * rawDeriver0 = optionalValueAt(json, "deriver"))
|
||||
if (auto * rawDeriver = getNullable(*rawDeriver0))
|
||||
res.deriver = store.parseStorePath(getString(*rawDeriver));
|
||||
res.deriver = store ? store->parseStorePath(getString(*rawDeriver)) : static_cast<StorePath>(*rawDeriver);
|
||||
|
||||
if (auto * rawRegistrationTime0 = optionalValueAt(json, "registrationTime"))
|
||||
if (auto * rawRegistrationTime = getNullable(*rawRegistrationTime0))
|
||||
|
|
@ -245,3 +236,19 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store
|
|||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json & json)
|
||||
{
|
||||
return UnkeyedValidPathInfo::fromJSON(nullptr, json);
|
||||
}
|
||||
|
||||
void adl_serializer<UnkeyedValidPathInfo>::to_json(json & json, const UnkeyedValidPathInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
24
src/libutil-tests/data/memory-source-accessor/complex.json
Normal file
24
src/libutil-tests/data/memory-source-accessor/complex.json
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"contents": {
|
||||
"bar": {
|
||||
"contents": {
|
||||
"baz": {
|
||||
"contents": "Z29vZCBkYXksCg==",
|
||||
"executable": true,
|
||||
"type": "regular"
|
||||
},
|
||||
"quux": {
|
||||
"target": "L292ZXIvdGhlcmU=",
|
||||
"type": "symlink"
|
||||
}
|
||||
},
|
||||
"type": "directory"
|
||||
},
|
||||
"foo": {
|
||||
"contents": "aGVsbG8K",
|
||||
"executable": false,
|
||||
"type": "regular"
|
||||
}
|
||||
},
|
||||
"type": "directory"
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"contents": "YXNkZg==",
|
||||
"executable": false,
|
||||
"type": "regular"
|
||||
}
|
||||
|
|
@ -224,42 +224,15 @@ TEST_F(GitTest, tree_sha256_write)
|
|||
});
|
||||
}
|
||||
|
||||
namespace memory_source_accessor {
|
||||
|
||||
extern ref<MemorySourceAccessor> exampleComplex();
|
||||
|
||||
}
|
||||
|
||||
TEST_F(GitTest, both_roundrip)
|
||||
{
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
auto files = make_ref<MemorySourceAccessor>();
|
||||
files->root = File::Directory{
|
||||
.contents{
|
||||
{
|
||||
"foo",
|
||||
File::Regular{
|
||||
.contents = "hello\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
File::Directory{
|
||||
.contents =
|
||||
{
|
||||
{
|
||||
"baz",
|
||||
File::Regular{
|
||||
.executable = true,
|
||||
.contents = "good day,\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"quux",
|
||||
File::Symlink{
|
||||
.target = "/over/there",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
auto files = memory_source_accessor::exampleComplex();
|
||||
|
||||
for (const auto hashAlgo : {HashAlgorithm::SHA1, HashAlgorithm::SHA256}) {
|
||||
std::map<Hash, std::string> cas;
|
||||
|
|
|
|||
116
src/libutil-tests/memory-source-accessor.cc
Normal file
116
src/libutil-tests/memory-source-accessor.cc
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
#include <string_view>
|
||||
|
||||
#include "nix/util/bytes.hh"
|
||||
#include "nix/util/memory-source-accessor.hh"
|
||||
#include "nix/util/tests/json-characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace memory_source_accessor {
|
||||
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
ref<MemorySourceAccessor> exampleSimple()
|
||||
{
|
||||
auto sc = make_ref<MemorySourceAccessor>();
|
||||
sc->root = File{File::Regular{
|
||||
.executable = false,
|
||||
.contents = to_owned(as_bytes("asdf")),
|
||||
}};
|
||||
return sc;
|
||||
}
|
||||
|
||||
ref<MemorySourceAccessor> exampleComplex()
|
||||
{
|
||||
auto files = make_ref<MemorySourceAccessor>();
|
||||
files->root = File::Directory{
|
||||
.contents{
|
||||
{
|
||||
"foo",
|
||||
File::Regular{
|
||||
.contents = to_owned(as_bytes("hello\n\0\n\tworld!")),
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
File::Directory{
|
||||
.contents =
|
||||
{
|
||||
{
|
||||
"baz",
|
||||
File::Regular{
|
||||
.executable = true,
|
||||
.contents = to_owned(as_bytes("good day,\n\0\n\tworld!")),
|
||||
},
|
||||
},
|
||||
{
|
||||
"quux",
|
||||
File::Symlink{
|
||||
.target = to_owned(as_bytes("/over/there")),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
return files;
|
||||
}
|
||||
|
||||
} // namespace memory_source_accessor
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* JSON
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
class MemorySourceAccessorTest : public virtual CharacterizationTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "memory-source-accessor";
|
||||
|
||||
public:
|
||||
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override
|
||||
{
|
||||
return unitTestData / testStem;
|
||||
}
|
||||
};
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
struct MemorySourceAccessorJsonTest : MemorySourceAccessorTest,
|
||||
JsonCharacterizationTest<MemorySourceAccessor>,
|
||||
::testing::WithParamInterface<std::pair<std::string_view, MemorySourceAccessor>>
|
||||
{};
|
||||
|
||||
TEST_P(MemorySourceAccessorJsonTest, from_json)
|
||||
{
|
||||
auto & [name, expected] = GetParam();
|
||||
/* Cannot use `readJsonTest` because need to compare `root` field of
|
||||
the source accessors for equality. */
|
||||
readTest(Path{name} + ".json", [&](const auto & encodedRaw) {
|
||||
auto encoded = json::parse(encodedRaw);
|
||||
auto decoded = static_cast<MemorySourceAccessor>(encoded);
|
||||
ASSERT_EQ(decoded.root, expected.root);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_P(MemorySourceAccessorJsonTest, to_json)
|
||||
{
|
||||
auto & [name, value] = GetParam();
|
||||
writeJsonTest(name, value);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
MemorySourceAccessorJSON,
|
||||
MemorySourceAccessorJsonTest,
|
||||
::testing::Values(
|
||||
std::pair{
|
||||
"simple",
|
||||
*memory_source_accessor::exampleSimple(),
|
||||
},
|
||||
std::pair{
|
||||
"complex",
|
||||
*memory_source_accessor::exampleComplex(),
|
||||
}));
|
||||
|
||||
} // namespace nix
|
||||
|
|
@ -63,6 +63,7 @@ sources = files(
|
|||
'json-utils.cc',
|
||||
'logging.cc',
|
||||
'lru-cache.cc',
|
||||
'memory-source-accessor.cc',
|
||||
'monitorfdhup.cc',
|
||||
'nix_api_util.cc',
|
||||
'nix_api_util_internal.cc',
|
||||
|
|
|
|||
41
src/libutil/include/nix/util/bytes.hh
Normal file
41
src/libutil/include/nix/util/bytes.hh
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <string_view>
|
||||
#include <span>
|
||||
#include <vector>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static inline std::span<const std::byte> as_bytes(std::string_view sv) noexcept
|
||||
{
|
||||
return std::span<const std::byte>{
|
||||
reinterpret_cast<const std::byte *>(sv.data()),
|
||||
sv.size(),
|
||||
};
|
||||
}
|
||||
|
||||
static inline std::vector<std::byte> to_owned(std::span<const std::byte> bytes)
|
||||
{
|
||||
return std::vector<std::byte>{
|
||||
bytes.begin(),
|
||||
bytes.end(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @note this should be avoided, as arbitrary binary data in strings
|
||||
* views, while allowed, is not really proper. Generally this should
|
||||
* only be used as a stop-gap with other definitions that themselves
|
||||
* should be converted to accept `std::span<const std::byte>` or
|
||||
* similar, directly.
|
||||
*/
|
||||
static inline std::string_view as_str(std::span<const std::byte> sp)
|
||||
{
|
||||
return std::string_view{
|
||||
reinterpret_cast<const char *>(sp.data()),
|
||||
sp.size(),
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
@ -114,4 +114,13 @@ struct adl_serializer<std::optional<T>>
|
|||
}
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
static inline std::optional<T> ptrToOwned(const json * ptr)
|
||||
{
|
||||
if (ptr)
|
||||
return std::optional{*ptr};
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
#include "nix/util/source-path.hh"
|
||||
#include "nix/util/fs-sink.hh"
|
||||
#include "nix/util/variant-wrapper.hh"
|
||||
#include "nix/util/json-impls.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -25,7 +26,7 @@ struct MemorySourceAccessor : virtual SourceAccessor
|
|||
struct Regular
|
||||
{
|
||||
bool executable = false;
|
||||
std::string contents;
|
||||
std::vector<std::byte> contents;
|
||||
|
||||
bool operator==(const Regular &) const = default;
|
||||
auto operator<=>(const Regular &) const = default;
|
||||
|
|
@ -44,7 +45,7 @@ struct MemorySourceAccessor : virtual SourceAccessor
|
|||
|
||||
struct Symlink
|
||||
{
|
||||
std::string target;
|
||||
std::vector<std::byte> target;
|
||||
|
||||
bool operator==(const Symlink &) const = default;
|
||||
auto operator<=>(const Symlink &) const = default;
|
||||
|
|
@ -86,7 +87,13 @@ struct MemorySourceAccessor : virtual SourceAccessor
|
|||
*/
|
||||
File * open(const CanonPath & path, std::optional<File> create);
|
||||
|
||||
SourcePath addFile(CanonPath path, std::string && contents);
|
||||
SourcePath addFile(CanonPath path, std::vector<std::byte> && contents);
|
||||
|
||||
/**
|
||||
* Small wrapper of the other `addFile`, purely for convenience when
|
||||
* the file in question to be added is a string.
|
||||
*/
|
||||
SourcePath addFile(CanonPath path, std::string_view contents);
|
||||
};
|
||||
|
||||
inline bool MemorySourceAccessor::File::Directory::operator==(
|
||||
|
|
@ -121,4 +128,30 @@ struct MemorySink : FileSystemObjectSink
|
|||
void createSymlink(const CanonPath & path, const std::string & target) override;
|
||||
};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<MemorySourceAccessor::File::Regular> : std::true_type
|
||||
{};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<MemorySourceAccessor::File::Directory> : std::true_type
|
||||
{};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<MemorySourceAccessor::File::Symlink> : std::true_type
|
||||
{};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<MemorySourceAccessor::File> : std::true_type
|
||||
{};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<MemorySourceAccessor> : std::true_type
|
||||
{};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(MemorySourceAccessor::File::Regular)
|
||||
JSON_IMPL(MemorySourceAccessor::File::Directory)
|
||||
JSON_IMPL(MemorySourceAccessor::File::Symlink)
|
||||
JSON_IMPL(MemorySourceAccessor::File)
|
||||
JSON_IMPL(MemorySourceAccessor)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ headers = files(
|
|||
'array-from-string-literal.hh',
|
||||
'base-n.hh',
|
||||
'base-nix-32.hh',
|
||||
'bytes.hh',
|
||||
'callback.hh',
|
||||
'canon-path.hh',
|
||||
'checked-arithmetic.hh',
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
#include "nix/util/memory-source-accessor.hh"
|
||||
#include "nix/util/base-n.hh"
|
||||
#include "nix/util/bytes.hh"
|
||||
#include "nix/util/json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -58,7 +61,7 @@ std::string MemorySourceAccessor::readFile(const CanonPath & path)
|
|||
if (!f)
|
||||
throw Error("file '%s' does not exist", path);
|
||||
if (auto * r = std::get_if<File::Regular>(&f->raw))
|
||||
return r->contents;
|
||||
return std::string{as_str(r->contents)};
|
||||
else
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
}
|
||||
|
|
@ -120,12 +123,12 @@ std::string MemorySourceAccessor::readLink(const CanonPath & path)
|
|||
if (!f)
|
||||
throw Error("file '%s' does not exist", path);
|
||||
if (auto * s = std::get_if<File::Symlink>(&f->raw))
|
||||
return s->target;
|
||||
return std::string{as_str(s->target)};
|
||||
else
|
||||
throw Error("file '%s' is not a symbolic link", path);
|
||||
}
|
||||
|
||||
SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents)
|
||||
SourcePath MemorySourceAccessor::addFile(CanonPath path, std::vector<std::byte> && contents)
|
||||
{
|
||||
// Create root directory automatically if necessary as a convenience.
|
||||
if (!root && !path.isRoot())
|
||||
|
|
@ -142,6 +145,11 @@ SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents
|
|||
return SourcePath{ref(shared_from_this()), path};
|
||||
}
|
||||
|
||||
SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string_view contents)
|
||||
{
|
||||
return addFile(path, to_owned(as_bytes(contents)));
|
||||
}
|
||||
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
void MemorySink::createDirectory(const CanonPath & path)
|
||||
|
|
@ -190,9 +198,10 @@ void CreateMemoryRegularFile::preallocateContents(uint64_t len)
|
|||
regularFile.contents.reserve(len);
|
||||
}
|
||||
|
||||
void CreateMemoryRegularFile::operator()(std::string_view data)
|
||||
void CreateMemoryRegularFile::operator()(std::string_view data_)
|
||||
{
|
||||
regularFile.contents += data;
|
||||
auto data = as_bytes(data_);
|
||||
regularFile.contents.insert(regularFile.contents.end(), data.begin(), data.end());
|
||||
}
|
||||
|
||||
void MemorySink::createSymlink(const CanonPath & path, const std::string & target)
|
||||
|
|
@ -201,7 +210,7 @@ void MemorySink::createSymlink(const CanonPath & path, const std::string & targe
|
|||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
if (auto * s = std::get_if<File::Symlink>(&f->raw))
|
||||
s->target = target;
|
||||
s->target = to_owned(as_bytes(target));
|
||||
else
|
||||
throw Error("file '%s' is not a symbolic link", path);
|
||||
}
|
||||
|
|
@ -222,3 +231,106 @@ ref<SourceAccessor> makeEmptySourceAccessor()
|
|||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
MemorySourceAccessor::File::Regular adl_serializer<MemorySourceAccessor::File::Regular>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
return MemorySourceAccessor::File::Regular{
|
||||
.executable = getBoolean(valueAt(obj, "executable")),
|
||||
.contents = to_owned(as_bytes(base64::decode(getString(valueAt(obj, "contents"))))),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<MemorySourceAccessor::File::Regular>::to_json(
|
||||
json & json, const MemorySourceAccessor::File::Regular & val)
|
||||
{
|
||||
json = {
|
||||
{"executable", val.executable},
|
||||
{"contents", base64::encode(val.contents)},
|
||||
};
|
||||
}
|
||||
|
||||
MemorySourceAccessor::File::Directory
|
||||
adl_serializer<MemorySourceAccessor::File::Directory>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
return MemorySourceAccessor::File::Directory{
|
||||
.contents = valueAt(obj, "contents"),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<MemorySourceAccessor::File::Directory>::to_json(
|
||||
json & json, const MemorySourceAccessor::File::Directory & val)
|
||||
{
|
||||
json = {
|
||||
{"contents", val.contents},
|
||||
};
|
||||
}
|
||||
|
||||
MemorySourceAccessor::File::Symlink adl_serializer<MemorySourceAccessor::File::Symlink>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
return MemorySourceAccessor::File::Symlink{
|
||||
.target = to_owned(as_bytes(base64::decode(getString(valueAt(obj, "target"))))),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<MemorySourceAccessor::File::Symlink>::to_json(
|
||||
json & json, const MemorySourceAccessor::File::Symlink & val)
|
||||
{
|
||||
json = {
|
||||
{"target", base64::encode(val.target)},
|
||||
};
|
||||
}
|
||||
|
||||
MemorySourceAccessor::File adl_serializer<MemorySourceAccessor::File>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
auto type = getString(valueAt(obj, "type"));
|
||||
if (type == "regular")
|
||||
return static_cast<MemorySourceAccessor::File::Regular>(json);
|
||||
if (type == "directory")
|
||||
return static_cast<MemorySourceAccessor::File::Directory>(json);
|
||||
if (type == "symlink")
|
||||
return static_cast<MemorySourceAccessor::File::Symlink>(json);
|
||||
else
|
||||
throw Error("unknown type of file '%s'", type);
|
||||
}
|
||||
|
||||
void adl_serializer<MemorySourceAccessor::File>::to_json(json & json, const MemorySourceAccessor::File & val)
|
||||
{
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const MemorySourceAccessor::File::Regular & r) {
|
||||
json = r;
|
||||
json["type"] = "regular";
|
||||
},
|
||||
[&](const MemorySourceAccessor::File::Directory & d) {
|
||||
json = d;
|
||||
json["type"] = "directory";
|
||||
},
|
||||
[&](const MemorySourceAccessor::File::Symlink & s) {
|
||||
json = s;
|
||||
json["type"] = "symlink";
|
||||
},
|
||||
},
|
||||
val.raw);
|
||||
}
|
||||
|
||||
MemorySourceAccessor adl_serializer<MemorySourceAccessor>::from_json(const json & json)
|
||||
{
|
||||
MemorySourceAccessor res;
|
||||
res.root = json;
|
||||
return res;
|
||||
}
|
||||
|
||||
void adl_serializer<MemorySourceAccessor>::to_json(json & json, const MemorySourceAccessor & val)
|
||||
{
|
||||
json = val.root;
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool
|
|||
// know the name yet until we've read the NAR info.
|
||||
printedStorePath = store.printStorePath(info->path);
|
||||
|
||||
jsonObject = info->toJSON(store, true, HashFormat::SRI);
|
||||
jsonObject = info->toJSON(&store, true);
|
||||
|
||||
if (showClosureSize) {
|
||||
StorePathSet closure;
|
||||
|
|
|
|||
|
|
@ -17,8 +17,16 @@ diff --unified --color=always \
|
|||
jq --sort-keys 'map_values(.narHash)') \
|
||||
<(jq --sort-keys <<-EOF
|
||||
{
|
||||
"$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=",
|
||||
"$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=",
|
||||
"$foo": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA="
|
||||
},
|
||||
"$bar": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ="
|
||||
},
|
||||
"$baz": null
|
||||
}
|
||||
EOF
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue