mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 19:46:02 +01:00
Compare commits
17 commits
c893454926
...
e59b4c69f1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e59b4c69f1 | ||
|
|
04edfa34fe | ||
|
|
acd9dbfc55 | ||
|
|
f446cb290c | ||
|
|
436e3e779a | ||
|
|
d7464ecd96 | ||
|
|
82a736bcbb | ||
|
|
e79b1f5384 | ||
|
|
044b21fd31 | ||
|
|
043eed85da | ||
|
|
15f3abb35e | ||
|
|
f53c8b8c90 | ||
|
|
0bec9d0716 | ||
|
|
34de68d260 | ||
|
|
2d4c7d3d03 | ||
|
|
9afb7e5082 | ||
|
|
2a194aa29e |
120 changed files with 2230 additions and 1192 deletions
|
|
@ -34,7 +34,9 @@ mkMesonDerivation (finalAttrs: {
|
|||
(fileset.unions [
|
||||
../../.version
|
||||
# For example JSON
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
# Too many different types of files to filter for now
|
||||
../../doc/manual
|
||||
./.
|
||||
|
|
|
|||
|
|
@ -117,7 +117,9 @@
|
|||
- [Architecture and Design](architecture/architecture.md)
|
||||
- [Formats and Protocols](protocols/index.md)
|
||||
- [JSON Formats](protocols/json/index.md)
|
||||
- [File System Object](protocols/json/file-system-object.md)
|
||||
- [Hash](protocols/json/hash.md)
|
||||
- [Pseudo Content Address](protocols/json/content-address.md)
|
||||
- [Store Object Info](protocols/json/store-object-info.md)
|
||||
- [Derivation](protocols/json/derivation.md)
|
||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/content-address.md
Normal file
21
doc/manual/source/protocols/json/content-address.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include content-address-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### [Text](file:///home/jcericson/src/nix/4/build-linux-clang/src/nix-manual/manual/store/store-object/content-address.html#method-text) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/text.json}}
|
||||
```
|
||||
|
||||
### [Nix Archive](file:///home/jcericson/src/nix/4/build-linux-clang/src/nix-manual/manual/store/store-object/content-address.html#method-nix-archive) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/nar.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Hash v1](schema/content-address-v1.json)
|
||||
-->
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
{{#include derivation-v3-fixed.md}}
|
||||
{{#include derivation-v4-fixed.md}}
|
||||
|
||||
<!--
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Derivation v3](schema/derivation-v3.json)
|
||||
[JSON Schema for Derivation v3](schema/derivation-v4.json)
|
||||
-->
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include file-system-object-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/simple.json}}
|
||||
```
|
||||
|
||||
### Complex
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/complex.json}}
|
||||
```
|
||||
|
||||
<!--
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for File System Object v1](schema/file-system-object-v1.json)
|
||||
-->
|
||||
|
|
@ -11,4 +11,8 @@ s/\\`/`/g
|
|||
#
|
||||
# As we have more such relative links, more replacements of this nature
|
||||
# should appear below.
|
||||
s^#/\$defs/\(regular\|symlink\|directory\)^In this schema^g
|
||||
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
|
||||
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
|
||||
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
|
||||
s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g
|
||||
|
|
|
|||
|
|
@ -9,8 +9,10 @@ json_schema_for_humans = find_program('generate-schema-doc', required : false)
|
|||
json_schema_config = files('json-schema-for-humans-config.yaml')
|
||||
|
||||
schemas = [
|
||||
'file-system-object-v1',
|
||||
'hash-v1',
|
||||
'derivation-v3',
|
||||
'content-address-v1',
|
||||
'derivation-v4',
|
||||
]
|
||||
|
||||
schema_files = files()
|
||||
|
|
|
|||
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/content-address
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json
|
||||
title: Pesudo Content Address
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's `ContentAddress` type.
|
||||
|
||||
This data type is not a simple as a looks, and therefore called a *pseudo* content address.
|
||||
See the description of the `hash` field for why this is.
|
||||
|
||||
When creating the store path of a content-addressed store object, the `hash` from this will be combined with the references of the store object in order to create a content-address that is properly sensitive to the entirety of the store object — file system objects and references alike.
|
||||
So it is that store path that is arguably the *true* content-address for content-addressed store paths, not this data type.
|
||||
|
||||
The only problem with that is that store paths are truncated in various ways from the underlying hashes, so their cryptographic strength can be reasonably doubted.
|
||||
Given that uncertain cryptographic strength, we do need something else.
|
||||
Currently this in conjunction with the reference list captures all the content securely, more concisely.
|
||||
(Though the reference list is still arbitrarily-sized, it is presumably in practice still way smaller than the file system object data).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> A hypothetical hash of both of those which is *not* so truncated would be even better, as it would be secure and fixed-size.
|
||||
> A hypothetical new method content-addressing store objects where we compute the store paths just from such a hash, so we don't need to "dereference" the hash to get the underlying reference list and rehash it, would be better still.
|
||||
type: object
|
||||
properties:
|
||||
method:
|
||||
"$ref": "#/$defs/method"
|
||||
hash:
|
||||
title: Content Address
|
||||
description: |
|
||||
This would be the content-address itself.
|
||||
|
||||
For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/store-object/content-address.md), and not of the store object as a whole.
|
||||
In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods).
|
||||
"$ref": "./hash-v1.yaml"
|
||||
required:
|
||||
- method
|
||||
- hash
|
||||
additionalProperties: false
|
||||
"$defs":
|
||||
method:
|
||||
type: string
|
||||
enum: [flat, nar, text, git]
|
||||
title: Content-Addressing Method
|
||||
description: |
|
||||
A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v4.json
|
||||
title: Derivation
|
||||
description: |
|
||||
Experimental JSON representation of a Nix derivation (version 3).
|
||||
|
|
@ -32,10 +32,10 @@ properties:
|
|||
Used when calculating store paths for the derivation’s outputs.
|
||||
|
||||
version:
|
||||
const: 3
|
||||
const: 4
|
||||
title: Format version (must be 3)
|
||||
description: |
|
||||
Must be `3`.
|
||||
Must be `4`.
|
||||
This is a guard that allows us to continue evolving this format.
|
||||
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
||||
|
||||
|
|
@ -47,6 +47,8 @@ properties:
|
|||
|
||||
- Version 3: Drop store dir from store paths, just include base name.
|
||||
|
||||
- Version 4: Use canonical content address JSON format for floating content addressed derivation outputs.
|
||||
|
||||
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
|
||||
|
||||
outputs:
|
||||
|
|
@ -154,25 +156,18 @@ properties:
|
|||
The output path, if known in advance.
|
||||
|
||||
method:
|
||||
type: string
|
||||
title: Content addressing method
|
||||
enum: [flat, nar, text, git]
|
||||
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||
description: |
|
||||
For an output which will be [content addressed](@docroot@/store/derivation/outputs/content-address.md), a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
|
||||
See the linked original definition for further details.
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
description: |
|
||||
For an output which will be [content addressed], but the content address is not specified up front, the name of the hash algorithm used. When the content address is fixed, use `hash.hashAlgo` instead.
|
||||
|
||||
hash:
|
||||
type: string
|
||||
title: Expected hash value
|
||||
description: |
|
||||
For fixed-output derivations, the expected content hash in base-16.
|
||||
For fixed-output derivations, the expected content hash.
|
||||
"$ref": "./hash-v1.yaml"
|
||||
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/file-system-object-v1.json
|
||||
title: File System Object
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's [File System Object](@docroot@/store/file-system-object.md).
|
||||
|
||||
The schema is recursive because file system objects contain other file system objects.
|
||||
type: object
|
||||
required: ["type"]
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
enum: ["regular", "symlink", "directory"]
|
||||
|
||||
# Enforce conditional structure based on `type`
|
||||
anyOf:
|
||||
- $ref: "#/$defs/regular"
|
||||
required: ["type", "contents"]
|
||||
|
||||
- $ref: "#/$defs/symlink"
|
||||
required: ["type", "target"]
|
||||
|
||||
- $ref: "#/$defs/directory"
|
||||
required: ["type", "contents"]
|
||||
|
||||
"$defs":
|
||||
regular:
|
||||
title: Regular File
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "regular"
|
||||
contents:
|
||||
type: string
|
||||
description: Base64-encoded file contents
|
||||
executable:
|
||||
type: boolean
|
||||
description: Whether the file is executable.
|
||||
default: false
|
||||
additionalProperties: false
|
||||
|
||||
symlink:
|
||||
title: Symbolic Link
|
||||
required: ["target"]
|
||||
properties:
|
||||
type:
|
||||
const: "symlink"
|
||||
target:
|
||||
type: string
|
||||
description: Target path of the symlink.
|
||||
additionalProperties: false
|
||||
|
||||
directory:
|
||||
title: Directory
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "directory"
|
||||
contents:
|
||||
type: object
|
||||
description: |
|
||||
Map of names to nested file system objects (for type=directory)
|
||||
additionalProperties:
|
||||
$ref: "#"
|
||||
additionalProperties: false
|
||||
1
src/json-schema-checks/content-address
Symbolic link
1
src/json-schema-checks/content-address
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/content-address
|
||||
1
src/json-schema-checks/file-system-object
Symbolic link
1
src/json-schema-checks/file-system-object
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -20,6 +20,14 @@ schema_dir = meson.current_source_dir() / 'schema'
|
|||
|
||||
# Get all example files
|
||||
schemas = [
|
||||
{
|
||||
'stem' : 'file-system-object',
|
||||
'schema' : schema_dir / 'file-system-object-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
'complex.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'hash',
|
||||
'schema' : schema_dir / 'hash-v1.yaml',
|
||||
|
|
@ -30,9 +38,17 @@ schemas = [
|
|||
'blake3-base64.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'content-address',
|
||||
'schema' : schema_dir / 'content-address-v1.yaml',
|
||||
'files' : [
|
||||
'text.json',
|
||||
'nar.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml',
|
||||
'schema' : schema_dir / 'derivation-v4.yaml',
|
||||
'files' : [
|
||||
'dyn-dep-derivation.json',
|
||||
'simple-derivation.json',
|
||||
|
|
@ -41,7 +57,7 @@ schemas = [
|
|||
# # Not sure how to make subschema work
|
||||
# {
|
||||
# 'stem': 'derivation',
|
||||
# 'schema': schema_dir / 'derivation-v3.yaml#output',
|
||||
# 'schema': schema_dir / 'derivation-v4.yaml#output',
|
||||
# 'files' : [
|
||||
# 'output-caFixedFlat.json',
|
||||
# 'output-caFixedNAR.json',
|
||||
|
|
@ -64,8 +80,6 @@ foreach schema : schemas
|
|||
stem + '-schema-valid',
|
||||
jv,
|
||||
args : [
|
||||
'--map',
|
||||
'./hash-v1.yaml=' + schema_dir / 'hash-v1.yaml',
|
||||
'http://json-schema.org/draft-04/schema',
|
||||
schema_file,
|
||||
],
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ mkMesonDerivation (finalAttrs: {
|
|||
fileset = lib.fileset.unions [
|
||||
../../.version
|
||||
../../doc/manual/source/protocols/json/schema
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/derivation
|
||||
./.
|
||||
];
|
||||
|
|
|
|||
|
|
@ -108,20 +108,16 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
overloaded{
|
||||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes = staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
|
||||
for (auto & [outputName, outputPath] : p.outputs) {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
auto drvOutput = get(drvHashes, outputName);
|
||||
if (!drvOutput)
|
||||
throw Error(
|
||||
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||
store.printStorePath(p.drvPath->outPath()),
|
||||
outputName);
|
||||
DrvOutput key{*drvOutput, outputName};
|
||||
DrvOutput key{
|
||||
.drvPath = p.drvPath->outPath(),
|
||||
.outputName = outputName,
|
||||
};
|
||||
auto thisRealisation = store.queryRealisation(key);
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
// have the realisation
|
||||
res.insert(Realisation{*thisRealisation, std::move(key)});
|
||||
// We’ve built it, so we must have the realisation.
|
||||
assert(thisRealisation);
|
||||
res.insert(Realisation{*thisRealisation, key});
|
||||
} else {
|
||||
res.insert(outputPath);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1718,28 +1718,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName
|
|||
drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{});
|
||||
}
|
||||
|
||||
auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true);
|
||||
switch (hashModulo.kind) {
|
||||
case DrvHash::Kind::Regular:
|
||||
for (auto & i : outputs) {
|
||||
auto h = get(hashModulo.hashes, i);
|
||||
if (!h)
|
||||
state.error<AssertionError>("derivation produced no hash for output '%s'", i).atPos(v).debugThrow();
|
||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign(
|
||||
i,
|
||||
DerivationOutput::InputAddressed{
|
||||
.path = std::move(outPath),
|
||||
});
|
||||
}
|
||||
break;
|
||||
;
|
||||
case DrvHash::Kind::Deferred:
|
||||
for (auto & i : outputs) {
|
||||
drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{});
|
||||
}
|
||||
}
|
||||
resolveInputAddressed(*state.store, drv);
|
||||
}
|
||||
|
||||
/* Write the resulting term into the Nix store directory. */
|
||||
|
|
|
|||
|
|
@ -69,14 +69,20 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
TEST_F(FIXTURE, NAME##_read) \
|
||||
{ \
|
||||
readProtoTest(STEM, VERSION, VALUE); \
|
||||
} \
|
||||
TEST_F(FIXTURE, NAME##_write) \
|
||||
{ \
|
||||
writeProtoTest(STEM, VERSION, VALUE); \
|
||||
#define VERSIONED_READ_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
TEST_F(FIXTURE, NAME##_read) \
|
||||
{ \
|
||||
readProtoTest(STEM, VERSION, VALUE); \
|
||||
}
|
||||
|
||||
#define VERSIONED_WRITE_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
TEST_F(FIXTURE, NAME##_write) \
|
||||
{ \
|
||||
writeProtoTest(STEM, VERSION, VALUE); \
|
||||
}
|
||||
|
||||
#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
VERSIONED_READ_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
VERSIONED_WRITE_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE)
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -46,16 +46,22 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
TEST_F(CommonProtoTest, NAME##_read) \
|
||||
{ \
|
||||
readProtoTest(STEM, VALUE); \
|
||||
} \
|
||||
TEST_F(CommonProtoTest, NAME##_write) \
|
||||
{ \
|
||||
writeProtoTest(STEM, VALUE); \
|
||||
#define READ_CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
TEST_F(CommonProtoTest, NAME##_read) \
|
||||
{ \
|
||||
readProtoTest(STEM, VALUE); \
|
||||
}
|
||||
|
||||
#define WRITE_CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
TEST_F(CommonProtoTest, NAME##_write) \
|
||||
{ \
|
||||
writeProtoTest(STEM, VALUE); \
|
||||
}
|
||||
|
||||
#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
READ_CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
WRITE_CHARACTERIZATION_TEST(NAME, STEM, VALUE)
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
string,
|
||||
"string",
|
||||
|
|
@ -93,56 +99,6 @@ CHARACTERIZATION_TEST(
|
|||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
(std::tuple<DrvOutput, DrvOutput>{
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
realisation,
|
||||
"realisation",
|
||||
(std::tuple<Realisation, Realisation>{
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
},
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
.dependentRealisations =
|
||||
{
|
||||
{
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
vector,
|
||||
"vector",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "nix/store/content-address.hh"
|
||||
#include "nix/util/tests/json-characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -8,33 +9,93 @@ namespace nix {
|
|||
* ContentAddressMethod::parse, ContentAddressMethod::render
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(ContentAddressMethod, testRoundTripPrintParse_1)
|
||||
static auto methods = ::testing::Values(
|
||||
std::pair{ContentAddressMethod::Raw::Text, "text"},
|
||||
std::pair{ContentAddressMethod::Raw::Flat, "flat"},
|
||||
std::pair{ContentAddressMethod::Raw::NixArchive, "nar"},
|
||||
std::pair{ContentAddressMethod::Raw::Git, "git"});
|
||||
|
||||
struct ContentAddressMethodTest : ::testing::Test,
|
||||
::testing::WithParamInterface<std::pair<ContentAddressMethod, std::string_view>>
|
||||
{};
|
||||
|
||||
TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_1)
|
||||
{
|
||||
for (ContentAddressMethod cam : {
|
||||
ContentAddressMethod::Raw::Text,
|
||||
ContentAddressMethod::Raw::Flat,
|
||||
ContentAddressMethod::Raw::NixArchive,
|
||||
ContentAddressMethod::Raw::Git,
|
||||
}) {
|
||||
EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
|
||||
}
|
||||
auto & [cam, _] = GetParam();
|
||||
EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
|
||||
}
|
||||
|
||||
TEST(ContentAddressMethod, testRoundTripPrintParse_2)
|
||||
TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_2)
|
||||
{
|
||||
for (const std::string_view camS : {
|
||||
"text",
|
||||
"flat",
|
||||
"nar",
|
||||
"git",
|
||||
}) {
|
||||
EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
|
||||
}
|
||||
auto & [cam, camS] = GetParam();
|
||||
EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(ContentAddressMethod, ContentAddressMethodTest, methods);
|
||||
|
||||
TEST(ContentAddressMethod, testParseContentAddressMethodOptException)
|
||||
{
|
||||
EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* JSON
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
class ContentAddressTest : public virtual CharacterizationTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "content-address";
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override
|
||||
{
|
||||
return unitTestData / testStem;
|
||||
}
|
||||
};
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
struct ContentAddressJsonTest : ContentAddressTest,
|
||||
JsonCharacterizationTest<ContentAddress>,
|
||||
::testing::WithParamInterface<std::pair<std::string_view, ContentAddress>>
|
||||
{};
|
||||
|
||||
TEST_P(ContentAddressJsonTest, from_json)
|
||||
{
|
||||
auto & [name, expected] = GetParam();
|
||||
readJsonTest(name, expected);
|
||||
}
|
||||
|
||||
TEST_P(ContentAddressJsonTest, to_json)
|
||||
{
|
||||
auto & [name, value] = GetParam();
|
||||
writeJsonTest(name, value);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
ContentAddressJSON,
|
||||
ContentAddressJsonTest,
|
||||
::testing::Values(
|
||||
std::pair{
|
||||
"text",
|
||||
ContentAddress{
|
||||
.method = ContentAddressMethod::Raw::Text,
|
||||
.hash = hashString(HashAlgorithm::SHA256, "asdf"),
|
||||
},
|
||||
},
|
||||
std::pair{
|
||||
"nar",
|
||||
ContentAddress{
|
||||
.method = ContentAddressMethod::Raw::NixArchive,
|
||||
.hash = hashString(HashAlgorithm::SHA256, "qwer"),
|
||||
},
|
||||
}));
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
8
src/libstore-tests/data/content-address/nar.json
Normal file
8
src/libstore-tests/data/content-address/nar.json
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "9vLqj0XYoFfJVmoz+ZR02i5camYE1zYSFlDicwxvsKM="
|
||||
},
|
||||
"method": "nar"
|
||||
}
|
||||
8
src/libstore-tests/data/content-address/text.json
Normal file
8
src/libstore-tests/data/content-address/text.json
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts="
|
||||
},
|
||||
"method": "text"
|
||||
}
|
||||
|
|
@ -22,5 +22,5 @@
|
|||
}
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,5 +33,5 @@
|
|||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,5 +101,5 @@
|
|||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,5 +52,5 @@
|
|||
}
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,5 +20,5 @@
|
|||
}
|
||||
},
|
||||
"system": "x86_64-linux",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,5 +35,5 @@
|
|||
"name": "dyn-dep-derivation",
|
||||
"outputs": {},
|
||||
"system": "wasm-sel4",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,5 +19,5 @@
|
|||
}
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,5 +29,5 @@
|
|||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,5 +96,5 @@
|
|||
"system": "my-system"
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,5 +49,5 @@
|
|||
}
|
||||
},
|
||||
"system": "my-system",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
{
|
||||
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||
"hashAlgo": "sha256",
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
|
||||
},
|
||||
"method": "flat"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
{
|
||||
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||
"hashAlgo": "sha256",
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
|
||||
},
|
||||
"method": "nar"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
{
|
||||
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||
"hashAlgo": "sha256",
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="
|
||||
},
|
||||
"method": "text"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,5 +22,5 @@
|
|||
"name": "simple-derivation",
|
||||
"outputs": {},
|
||||
"system": "wasm-sel4",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
|
|
|
|||
6
src/libstore-tests/data/dummy-store/empty.json
Normal file
6
src/libstore-tests/data/dummy-store/empty.json
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"build-trace": {},
|
||||
"contents": {},
|
||||
"derivations": {},
|
||||
"store-dir": "/nix/store"
|
||||
}
|
||||
18
src/libstore-tests/data/dummy-store/one-derivation.json
Normal file
18
src/libstore-tests/data/dummy-store/one-derivation.json
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"build-trace": {},
|
||||
"contents": {},
|
||||
"derivations": {
|
||||
"rlqjbbb65ggcx9hy577hvnn929wz1aj0-foo.drv": {
|
||||
"args": [],
|
||||
"builder": "",
|
||||
"env": {},
|
||||
"inputDrvs": {},
|
||||
"inputSrcs": [],
|
||||
"name": "foo",
|
||||
"outputs": {},
|
||||
"system": "",
|
||||
"version": 4
|
||||
}
|
||||
},
|
||||
"store-dir": "/nix/store"
|
||||
}
|
||||
35
src/libstore-tests/data/dummy-store/one-flat-file.json
Normal file
35
src/libstore-tests/data/dummy-store/one-flat-file.json
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"build-trace": {},
|
||||
"contents": {
|
||||
"5hizn7xyyrhxr0k2magvxl5ccvk0ci9n-my-file": {
|
||||
"contents": {
|
||||
"contents": "asdf",
|
||||
"executable": false,
|
||||
"type": "regular"
|
||||
},
|
||||
"info": {
|
||||
"ca": {
|
||||
"hash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "f1eduuSIYC1BofXA1tycF79Ai2NSMJQtUErx5DxLYSU="
|
||||
},
|
||||
"method": "nar"
|
||||
},
|
||||
"deriver": null,
|
||||
"narHash": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "f1eduuSIYC1BofXA1tycF79Ai2NSMJQtUErx5DxLYSU="
|
||||
},
|
||||
"narSize": 120,
|
||||
"references": [],
|
||||
"registrationTime": null,
|
||||
"signatures": [],
|
||||
"ultimate": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"derivations": {},
|
||||
"store-dir": "/nix/store"
|
||||
}
|
||||
|
|
@ -1,6 +1,10 @@
|
|||
{
|
||||
"dependentRealisations": {},
|
||||
"id": "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo",
|
||||
"outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv",
|
||||
"signatures": []
|
||||
"key": {
|
||||
"drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"outputName": "foo"
|
||||
},
|
||||
"value": {
|
||||
"outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo",
|
||||
"signatures": []
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
{
|
||||
"dependentRealisations": {},
|
||||
"id": "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo",
|
||||
"outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv",
|
||||
"signatures": [
|
||||
"asdfasdfasdf"
|
||||
]
|
||||
"key": {
|
||||
"drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"outputName": "foo"
|
||||
},
|
||||
"value": {
|
||||
"outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo",
|
||||
"signatures": [
|
||||
"asdfasdfasdf"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
BIN
src/libstore-tests/data/serve-protocol/build-result-2.8.bin
Normal file
BIN
src/libstore-tests/data/serve-protocol/build-result-2.8.bin
Normal file
Binary file not shown.
BIN
src/libstore-tests/data/serve-protocol/drv-output-2.8.bin
Normal file
BIN
src/libstore-tests/data/serve-protocol/drv-output-2.8.bin
Normal file
Binary file not shown.
BIN
src/libstore-tests/data/serve-protocol/realisation-2.8.bin
Normal file
BIN
src/libstore-tests/data/serve-protocol/realisation-2.8.bin
Normal file
Binary file not shown.
BIN
src/libstore-tests/data/serve-protocol/realisation-with-deps.bin
Normal file
BIN
src/libstore-tests/data/serve-protocol/realisation-with-deps.bin
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
src/libstore-tests/data/worker-protocol/build-result-1.39.bin
Normal file
BIN
src/libstore-tests/data/worker-protocol/build-result-1.39.bin
Normal file
Binary file not shown.
BIN
src/libstore-tests/data/worker-protocol/drv-output-1.39.bin
Normal file
BIN
src/libstore-tests/data/worker-protocol/drv-output-1.39.bin
Normal file
Binary file not shown.
BIN
src/libstore-tests/data/worker-protocol/realisation-1.39.bin
Normal file
BIN
src/libstore-tests/data/worker-protocol/realisation-1.39.bin
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,11 +1,33 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "nix/util/bytes.hh"
|
||||
#include "nix/util/memory-source-accessor.hh"
|
||||
#include "nix/store/dummy-store-impl.hh"
|
||||
#include "nix/store/globals.hh"
|
||||
#include "nix/store/realisation.hh"
|
||||
|
||||
#include "nix/util/tests/json-characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class DummyStoreTest : public virtual CharacterizationTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "dummy-store";
|
||||
|
||||
public:
|
||||
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override
|
||||
{
|
||||
return unitTestData / testStem;
|
||||
}
|
||||
|
||||
static void SetUpTestSuite()
|
||||
{
|
||||
initLibStore(false);
|
||||
}
|
||||
};
|
||||
|
||||
TEST(DummyStore, realisation_read)
|
||||
{
|
||||
initLibStore(/*loadConfig=*/false);
|
||||
|
|
@ -16,23 +38,94 @@ TEST(DummyStore, realisation_read)
|
|||
return cfg->openDummyStore();
|
||||
}();
|
||||
|
||||
auto drvHash = Hash::parseExplicitFormatUnprefixed(
|
||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", HashAlgorithm::SHA256, HashFormat::Base16);
|
||||
StorePath drvPath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv"};
|
||||
|
||||
auto outputName = "foo";
|
||||
|
||||
EXPECT_EQ(store->queryRealisation({drvHash, outputName}), nullptr);
|
||||
EXPECT_EQ(store->queryRealisation({drvPath, outputName}), nullptr);
|
||||
|
||||
UnkeyedRealisation value{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
};
|
||||
|
||||
store->buildTrace.insert({drvHash, {{outputName, make_ref<UnkeyedRealisation>(value)}}});
|
||||
store->buildTrace.insert({drvPath, {{outputName, make_ref<UnkeyedRealisation>(value)}}});
|
||||
|
||||
auto value2 = store->queryRealisation({drvHash, outputName});
|
||||
auto value2 = store->queryRealisation({drvPath, outputName});
|
||||
|
||||
ASSERT_TRUE(value2);
|
||||
EXPECT_EQ(*value2, value);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* JSON
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
struct DummyStoreJsonTest : DummyStoreTest,
|
||||
JsonCharacterizationTest<ref<DummyStore>>,
|
||||
::testing::WithParamInterface<std::pair<std::string_view, ref<DummyStore>>>
|
||||
{};
|
||||
|
||||
TEST_P(DummyStoreJsonTest, from_json)
|
||||
{
|
||||
auto & [name, expected] = GetParam();
|
||||
using namespace nlohmann;
|
||||
/* Cannot use `readJsonTest` because need to dereference the stores
|
||||
for equality. */
|
||||
readTest(Path{name} + ".json", [&](const auto & encodedRaw) {
|
||||
auto encoded = json::parse(encodedRaw);
|
||||
ref<DummyStore> decoded = adl_serializer<ref<DummyStore>>::from_json(encoded);
|
||||
ASSERT_EQ(*decoded, *expected);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_P(DummyStoreJsonTest, to_json)
|
||||
{
|
||||
auto & [name, value] = GetParam();
|
||||
writeJsonTest(name, value);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(DummyStoreJSON, DummyStoreJsonTest, [] {
|
||||
initLibStore(false);
|
||||
auto writeCfg = make_ref<DummyStore::Config>(DummyStore::Config::Params{});
|
||||
writeCfg->readOnly = false;
|
||||
return ::testing::Values(
|
||||
std::pair{
|
||||
"empty",
|
||||
make_ref<DummyStore::Config>(DummyStore::Config::Params{})->openDummyStore(),
|
||||
},
|
||||
std::pair{
|
||||
"one-flat-file",
|
||||
[&] {
|
||||
auto store = writeCfg->openDummyStore();
|
||||
store->addToStore(
|
||||
"my-file",
|
||||
SourcePath{
|
||||
[] {
|
||||
auto sc = make_ref<MemorySourceAccessor>();
|
||||
sc->root = MemorySourceAccessor::File{MemorySourceAccessor::File::Regular{
|
||||
.executable = false,
|
||||
.contents = to_owned(as_bytes("asdf")),
|
||||
}};
|
||||
return sc;
|
||||
}(),
|
||||
},
|
||||
ContentAddressMethod::Raw::NixArchive,
|
||||
HashAlgorithm::SHA256);
|
||||
return store;
|
||||
}(),
|
||||
},
|
||||
std::pair{
|
||||
"one-derivation",
|
||||
[&] {
|
||||
auto store = writeCfg->openDummyStore();
|
||||
Derivation drv;
|
||||
drv.name = "foo";
|
||||
store->writeDerivation(drv);
|
||||
return store;
|
||||
}(),
|
||||
});
|
||||
}());
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -59,24 +59,24 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo)
|
|||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \
|
||||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
{ \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(*store, encoded); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \
|
||||
auto expected = OBJ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
|
|
@ -80,7 +80,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
{ \
|
||||
writeTest( \
|
||||
#STEM, \
|
||||
[&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \
|
||||
[&]() -> json { return OBJ.toJSON(&*store, PURE); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,54 +44,30 @@ TEST_P(RealisationJsonTest, to_json)
|
|||
writeJsonTest(name, value);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
RealisationJSON,
|
||||
RealisationJsonTest,
|
||||
([] {
|
||||
Realisation simple{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
},
|
||||
{
|
||||
.drvHash = Hash::parseExplicitFormatUnprefixed(
|
||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
|
||||
HashAlgorithm::SHA256,
|
||||
HashFormat::Base16),
|
||||
.outputName = "foo",
|
||||
},
|
||||
};
|
||||
return ::testing::Values(
|
||||
std::pair{
|
||||
"simple",
|
||||
simple,
|
||||
},
|
||||
std::pair{
|
||||
"with-signature",
|
||||
[&] {
|
||||
auto r = simple;
|
||||
// FIXME actually sign properly
|
||||
r.signatures = {"asdfasdfasdf"};
|
||||
return r;
|
||||
}()},
|
||||
std::pair{
|
||||
"with-dependent-realisations",
|
||||
[&] {
|
||||
auto r = simple;
|
||||
r.dependentRealisations = {{
|
||||
{
|
||||
.drvHash = Hash::parseExplicitFormatUnprefixed(
|
||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
|
||||
HashAlgorithm::SHA256,
|
||||
HashFormat::Base16),
|
||||
.outputName = "foo",
|
||||
},
|
||||
StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
}};
|
||||
return r;
|
||||
}(),
|
||||
});
|
||||
}
|
||||
|
||||
()));
|
||||
INSTANTIATE_TEST_SUITE_P(RealisationJSON, RealisationJsonTest, ([] {
|
||||
Realisation simple{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
{
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv"},
|
||||
.outputName = "foo",
|
||||
},
|
||||
};
|
||||
return ::testing::Values(
|
||||
std::pair{
|
||||
"simple",
|
||||
simple,
|
||||
},
|
||||
std::pair{
|
||||
"with-signature",
|
||||
[&] {
|
||||
auto r = simple;
|
||||
// FIXME actually sign properly
|
||||
r.signatures = {"asdfasdfasdf"};
|
||||
return r;
|
||||
}(),
|
||||
});
|
||||
}()));
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -72,16 +72,16 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
defaultVersion,
|
||||
drvOutput_2_8,
|
||||
"drv-output-2.8",
|
||||
2 << 8 | 8,
|
||||
(std::tuple<DrvOutput, DrvOutput>{
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
|
@ -90,39 +90,27 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
realisation,
|
||||
"realisation",
|
||||
defaultVersion,
|
||||
(std::tuple<Realisation, Realisation>{
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
},
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
unkeyedRealisation_2_8,
|
||||
"unkeyed-realisation-2.8",
|
||||
2 << 8 | 8,
|
||||
(UnkeyedRealisation{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
realisation_2_8,
|
||||
"realisation-2.8",
|
||||
2 << 8 | 8,
|
||||
(Realisation{
|
||||
UnkeyedRealisation{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
},
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
.dependentRealisations =
|
||||
{
|
||||
{
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
{
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "baz",
|
||||
},
|
||||
}))
|
||||
|
||||
|
|
@ -172,7 +160,10 @@ VERSIONED_CHARACTERIZATION_TEST(ServeProtoTest, buildResult_2_3, "build-result-2
|
|||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
/* We now do a lossy read which does not allow us to faithfully right
|
||||
back, since we changed the data type. We still however want to test
|
||||
that this read works, and so for that we have a one-way test. */
|
||||
VERSIONED_READ_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest, buildResult_2_6, "build-result-2.6", 2 << 8 | 6, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
|
|
@ -198,27 +189,65 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
{
|
||||
"foo",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}},
|
||||
.timesBuilt = 1,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
#if 0
|
||||
// These fields are not yet serialized.
|
||||
// FIXME Include in next version of protocol or document
|
||||
// why they are skipped.
|
||||
.cpuUser = std::chrono::milliseconds(500s),
|
||||
.cpuSystem = std::chrono::milliseconds(604s),
|
||||
#endif
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest, buildResult_2_8, "build-result-2.8", 2 << 8 | 8, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
BuildResult{.inner{BuildResult::Failure{
|
||||
.status = BuildResult::Failure::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
}}},
|
||||
BuildResult{
|
||||
.inner{BuildResult::Failure{
|
||||
.status = BuildResult::Failure::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.isNonDeterministic = true,
|
||||
}},
|
||||
.timesBuilt = 3,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
BuildResult{
|
||||
.inner{BuildResult::Success{
|
||||
.status = BuildResult::Success::Built,
|
||||
.builtOutputs =
|
||||
{
|
||||
{
|
||||
"foo",
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -128,54 +128,42 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
defaultVersion,
|
||||
"drv-output-1.39",
|
||||
1 << 8 | 39,
|
||||
(std::tuple<DrvOutput, DrvOutput>{
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
realisation,
|
||||
"realisation",
|
||||
defaultVersion,
|
||||
(std::tuple<Realisation, Realisation>{
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
unkeyedRealisation_1_39,
|
||||
"unkeyed-realisation-1.39",
|
||||
1 << 8 | 39,
|
||||
(UnkeyedRealisation{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
realisation_1_39,
|
||||
"realisation-1.39",
|
||||
1 << 8 | 39,
|
||||
(Realisation{
|
||||
UnkeyedRealisation{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
},
|
||||
Realisation{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
.signatures = {"asdf", "qwer"},
|
||||
.dependentRealisations =
|
||||
{
|
||||
{
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
{
|
||||
.drvPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"},
|
||||
.outputName = "baz",
|
||||
},
|
||||
}))
|
||||
|
||||
|
|
@ -197,7 +185,10 @@ VERSIONED_CHARACTERIZATION_TEST(WorkerProtoTest, buildResult_1_27, "build-result
|
|||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
/* We now do a lossy read which does not allow us to faithfully right
|
||||
back, since we changed the data type. We still however want to test
|
||||
that this read works, and so for that we have a one-way test. */
|
||||
VERSIONED_READ_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest, buildResult_1_28, "build-result-1.28", 1 << 8 | 28, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
|
|
@ -216,25 +207,13 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
{
|
||||
"foo",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -243,7 +222,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
// See above note
|
||||
VERSIONED_READ_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest, buildResult_1_29, "build-result-1.29", 1 << 8 | 29, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
|
|
@ -269,27 +249,13 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
{
|
||||
"foo",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -302,7 +268,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
// See above note
|
||||
VERSIONED_READ_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest, buildResult_1_37, "build-result-1.37", 1 << 8 | 37, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
|
|
@ -328,27 +295,60 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
{
|
||||
"foo",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
DrvOutput{
|
||||
.drvHash =
|
||||
Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}},
|
||||
.timesBuilt = 1,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
.cpuUser = std::chrono::microseconds(500s),
|
||||
.cpuSystem = std::chrono::microseconds(604s),
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest, buildResult_1_39, "build-result-1.39", 1 << 8 | 39, ({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t{
|
||||
BuildResult{.inner{BuildResult::Failure{
|
||||
.status = BuildResult::Failure::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
}}},
|
||||
BuildResult{
|
||||
.inner{BuildResult::Failure{
|
||||
.status = BuildResult::Failure::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.isNonDeterministic = true,
|
||||
}},
|
||||
.timesBuilt = 3,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
BuildResult{
|
||||
.inner{BuildResult::Success{
|
||||
.status = BuildResult::Success::Built,
|
||||
.builtOutputs =
|
||||
{
|
||||
{
|
||||
"foo",
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
.outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -50,8 +50,8 @@ TEST_F(WriteDerivationTest, addToStoreFromDumpCalledOnce)
|
|||
EXPECT_EQ(path1, path2);
|
||||
EXPECT_THAT(
|
||||
[&] { writeDerivation(*store, drv, Repair); },
|
||||
::testing::ThrowsMessage<Error>(testing::HasSubstrIgnoreANSIMatcher(
|
||||
"operation 'addToStoreFromDump' is not supported by store 'dummy://'")));
|
||||
::testing::ThrowsMessage<Error>(
|
||||
testing::HasSubstrIgnoreANSIMatcher("operation 'writeDerivation' is not supported by store 'dummy://'")));
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -504,7 +504,7 @@ StorePath BinaryCacheStore::addToStore(
|
|||
|
||||
std::string BinaryCacheStore::makeRealisationPath(const DrvOutput & id)
|
||||
{
|
||||
return realisationsPrefix + "/" + id.to_string() + ".doi";
|
||||
return realisationsPrefix + "/" + id.drvPath.to_string() + "/" + id.outputName + ".doi";
|
||||
}
|
||||
|
||||
void BinaryCacheStore::queryRealisationUncached(
|
||||
|
|
@ -525,7 +525,10 @@ void BinaryCacheStore::queryRealisationUncached(
|
|||
realisation = std::make_shared<const UnkeyedRealisation>(nlohmann::json::parse(*data));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(
|
||||
{}, "while parsing file '%s' as a realisation for key '%s'", outputInfoFilePath, id.to_string());
|
||||
{},
|
||||
"while parsing file '%s' as a build trace value for key '%s'",
|
||||
outputInfoFilePath,
|
||||
id.to_string());
|
||||
throw;
|
||||
}
|
||||
return (*callbackPtr)(std::move(realisation));
|
||||
|
|
@ -541,7 +544,10 @@ void BinaryCacheStore::registerDrvOutput(const Realisation & info)
|
|||
{
|
||||
if (diskCache)
|
||||
diskCache->upsertRealisation(config.getReference().render(/*FIXME withParams=*/false), info);
|
||||
upsertFile(makeRealisationPath(info.id), static_cast<nlohmann::json>(info).dump(), "application/json");
|
||||
upsertFile(
|
||||
makeRealisationPath(info.id),
|
||||
static_cast<nlohmann::json>(static_cast<const UnkeyedRealisation &>(info)).dump(),
|
||||
"application/json");
|
||||
}
|
||||
|
||||
ref<RemoteFSAccessor> BinaryCacheStore::getRemoteFSAccessor(bool requireValidPath)
|
||||
|
|
|
|||
|
|
@ -212,9 +212,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild()
|
|||
given this information by the downstream goal, that cannot happen
|
||||
anymore if the downstream goal only cares about one output, but
|
||||
we care about all outputs. */
|
||||
auto outputHashes = staticOutputHashes(worker.evalStore, *drv);
|
||||
for (auto & [outputName, outputHash] : outputHashes) {
|
||||
InitialOutput v{.outputHash = outputHash};
|
||||
for (auto & [outputName, _] : drv->outputs) {
|
||||
InitialOutput v;
|
||||
|
||||
/* TODO we might want to also allow randomizing the paths
|
||||
for regular CA derivations, e.g. for sake of checking
|
||||
|
|
@ -1096,7 +1095,7 @@ DerivationBuildingGoal::checkPathValidity(std::map<std::string, InitialOutput> &
|
|||
: PathStatus::Corrupt,
|
||||
};
|
||||
}
|
||||
auto drvOutput = DrvOutput{info.outputHash, i.first};
|
||||
auto drvOutput = DrvOutput{drvPath, i.first};
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
||||
info.known = {
|
||||
|
|
|
|||
|
|
@ -36,12 +36,6 @@ DerivationGoal::DerivationGoal(
|
|||
, drvPath(drvPath)
|
||||
, wantedOutput(wantedOutput)
|
||||
, drv{std::make_unique<Derivation>(drv)}
|
||||
, outputHash{[&] {
|
||||
auto outputHashes = staticOutputHashes(worker.evalStore, drv);
|
||||
if (auto * mOutputHash = get(outputHashes, wantedOutput))
|
||||
return *mOutputHash;
|
||||
throw Error("derivation '%s' does not have output '%s'", worker.store.printStorePath(drvPath), wantedOutput);
|
||||
}()}
|
||||
, buildMode(buildMode)
|
||||
{
|
||||
|
||||
|
|
@ -100,7 +94,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
them. */
|
||||
if (settings.useSubstitutes && drvOptions.substitutesAllowed()) {
|
||||
if (!checkResult)
|
||||
waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal(DrvOutput{outputHash, wantedOutput})));
|
||||
waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal(DrvOutput{drvPath, wantedOutput})));
|
||||
else {
|
||||
auto * cap = getDerivationCA(*drv);
|
||||
waitees.insert(upcast_goal(worker.makePathSubstitutionGoal(
|
||||
|
|
@ -167,12 +161,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
// No `std::visit` for coroutines yet
|
||||
if (auto * successP = resolvedResult.tryGetSuccess()) {
|
||||
auto & success = *successP;
|
||||
auto outputHashes = staticOutputHashes(worker.evalStore, *drv);
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, drvResolved);
|
||||
|
||||
auto outputHash = get(outputHashes, wantedOutput);
|
||||
auto resolvedHash = get(resolvedHashes, wantedOutput);
|
||||
if ((!outputHash) || (!resolvedHash))
|
||||
if (!drv->outputs.contains(wantedOutput))
|
||||
throw Error(
|
||||
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolve)",
|
||||
worker.store.printStorePath(drvPath),
|
||||
|
|
@ -181,7 +170,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
auto realisation = [&] {
|
||||
auto take1 = get(success.builtOutputs, wantedOutput);
|
||||
if (take1)
|
||||
return static_cast<UnkeyedRealisation>(*take1);
|
||||
return *take1;
|
||||
|
||||
/* The above `get` should work. But stateful tracking of
|
||||
outputs in resolvedResult, this can get out of sync with the
|
||||
|
|
@ -189,7 +178,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
check the store directly if it fails. */
|
||||
auto take2 = worker.evalStore.queryRealisation(
|
||||
DrvOutput{
|
||||
.drvHash = *resolvedHash,
|
||||
.drvPath = pathResolved,
|
||||
.outputName = wantedOutput,
|
||||
});
|
||||
if (take2)
|
||||
|
|
@ -205,15 +194,10 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
Realisation newRealisation{
|
||||
realisation,
|
||||
{
|
||||
.drvHash = *outputHash,
|
||||
.drvPath = drvPath,
|
||||
.outputName = wantedOutput,
|
||||
}};
|
||||
newRealisation.signatures.clear();
|
||||
if (!drv->type().isFixed()) {
|
||||
auto & drvStore = worker.evalStore.isValidPath(drvPath) ? worker.evalStore : worker.store;
|
||||
newRealisation.dependentRealisations =
|
||||
drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore);
|
||||
}
|
||||
worker.store.signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
}
|
||||
|
|
@ -256,16 +240,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation)
|
|||
/* In checking mode, the builder will not register any outputs.
|
||||
So we want to make sure the ones that we wanted to check are
|
||||
properly there. */
|
||||
success.builtOutputs = {{
|
||||
wantedOutput,
|
||||
{
|
||||
assertPathValidity(),
|
||||
{
|
||||
.drvHash = outputHash,
|
||||
.outputName = wantedOutput,
|
||||
},
|
||||
},
|
||||
}};
|
||||
success.builtOutputs = {{wantedOutput, assertPathValidity()}};
|
||||
} else {
|
||||
/* Otherwise the builder will give us info for out output, but
|
||||
also for other outputs. Filter down to just our output so as
|
||||
|
|
@ -374,7 +349,7 @@ std::optional<std::pair<UnkeyedRealisation, PathStatus>> DerivationGoal::checkPa
|
|||
if (drv->type().isImpure())
|
||||
return std::nullopt;
|
||||
|
||||
auto drvOutput = DrvOutput{outputHash, wantedOutput};
|
||||
auto drvOutput = DrvOutput{drvPath, wantedOutput};
|
||||
|
||||
std::optional<UnkeyedRealisation> mRealisation;
|
||||
|
||||
|
|
@ -414,7 +389,7 @@ std::optional<std::pair<UnkeyedRealisation, PathStatus>> DerivationGoal::checkPa
|
|||
Realisation{
|
||||
*mRealisation,
|
||||
{
|
||||
.drvHash = outputHash,
|
||||
.drvPath = drvPath,
|
||||
.outputName = wantedOutput,
|
||||
},
|
||||
});
|
||||
|
|
@ -437,16 +412,7 @@ Goal::Done DerivationGoal::doneSuccess(BuildResult::Success::Status status, Unke
|
|||
{
|
||||
buildResult.inner = BuildResult::Success{
|
||||
.status = status,
|
||||
.builtOutputs = {{
|
||||
wantedOutput,
|
||||
{
|
||||
std::move(builtOutput),
|
||||
DrvOutput{
|
||||
.drvHash = outputHash,
|
||||
.outputName = wantedOutput,
|
||||
},
|
||||
},
|
||||
}},
|
||||
.builtOutputs = {{wantedOutput, std::move(builtOutput)}},
|
||||
};
|
||||
|
||||
mcExpectedBuilds.reset();
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput & id, Worke
|
|||
: Goal(worker, init())
|
||||
, id(id)
|
||||
{
|
||||
name = fmt("substitution of '%s'", id.to_string());
|
||||
name = fmt("substitution of '%s'", id.render(worker.store));
|
||||
trace("created");
|
||||
}
|
||||
|
||||
|
|
@ -86,32 +86,8 @@ Goal::Co DrvOutputSubstitutionGoal::init()
|
|||
if (!outputInfo)
|
||||
continue;
|
||||
|
||||
bool failed = false;
|
||||
|
||||
Goals waitees;
|
||||
|
||||
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
|
||||
if (depId != id) {
|
||||
if (auto localOutputInfo = worker.store.queryRealisation(depId);
|
||||
localOutputInfo && localOutputInfo->outPath != depPath) {
|
||||
warn(
|
||||
"substituter '%s' has an incompatible realisation for '%s', ignoring.\n"
|
||||
"Local: %s\n"
|
||||
"Remote: %s",
|
||||
sub->config.getHumanReadableURI(),
|
||||
depId.to_string(),
|
||||
worker.store.printStorePath(localOutputInfo->outPath),
|
||||
worker.store.printStorePath(depPath));
|
||||
failed = true;
|
||||
break;
|
||||
}
|
||||
waitees.insert(worker.makeDrvOutputSubstitutionGoal(depId));
|
||||
}
|
||||
}
|
||||
|
||||
if (failed)
|
||||
continue;
|
||||
|
||||
waitees.insert(worker.makePathSubstitutionGoal(outputInfo->outPath));
|
||||
|
||||
co_await await(std::move(waitees));
|
||||
|
|
@ -131,7 +107,8 @@ Goal::Co DrvOutputSubstitutionGoal::init()
|
|||
|
||||
/* None left. Terminate this goal and let someone else deal
|
||||
with it. */
|
||||
debug("derivation output '%s' is required, but there is no substituter that can provide it", id.to_string());
|
||||
debug(
|
||||
"derivation output '%s' is required, but there is no substituter that can provide it", id.render(worker.store));
|
||||
|
||||
if (substituterFailed) {
|
||||
worker.failedSubstitutions++;
|
||||
|
|
@ -146,7 +123,7 @@ Goal::Co DrvOutputSubstitutionGoal::init()
|
|||
|
||||
std::string DrvOutputSubstitutionGoal::key()
|
||||
{
|
||||
return "a$" + std::string(id.to_string());
|
||||
return "a$" + std::string(id.render(worker.store));
|
||||
}
|
||||
|
||||
void DrvOutputSubstitutionGoal::handleEOF(Descriptor fd)
|
||||
|
|
|
|||
|
|
@ -12,30 +12,3 @@ create table if not exists Realisations (
|
|||
);
|
||||
|
||||
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
|
||||
|
||||
-- We can end-up in a weird edge-case where a path depends on itself because
|
||||
-- it’s an output of a CA derivation, that happens to be the same as one of its
|
||||
-- dependencies.
|
||||
-- In that case we have a dependency loop (path -> realisation1 -> realisation2
|
||||
-- -> path) that we need to break by removing the dependencies between the
|
||||
-- realisations
|
||||
create trigger if not exists DeleteSelfRefsViaRealisations before delete on ValidPaths
|
||||
begin
|
||||
delete from RealisationsRefs where realisationReference in (
|
||||
select id from Realisations where outputPath = old.id
|
||||
);
|
||||
end;
|
||||
|
||||
create table if not exists RealisationsRefs (
|
||||
referrer integer not null,
|
||||
realisationReference integer,
|
||||
foreign key (referrer) references Realisations(id) on delete cascade,
|
||||
foreign key (realisationReference) references Realisations(id) on delete restrict
|
||||
);
|
||||
-- used by deletion trigger
|
||||
create index if not exists IndexRealisationsRefsRealisationReference on RealisationsRefs(realisationReference);
|
||||
|
||||
-- used by QueryRealisationReferences
|
||||
create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer);
|
||||
-- used by cascade deletion when ValidPaths is deleted
|
||||
create index if not exists IndexRealisationsRefsOnOutputPath on Realisations(outputPath);
|
||||
|
|
|
|||
|
|
@ -46,34 +46,6 @@ void CommonProto::Serialise<ContentAddress>::write(
|
|||
conn.to << renderContentAddress(ca);
|
||||
}
|
||||
|
||||
Realisation CommonProto::Serialise<Realisation>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
|
||||
{
|
||||
std::string rawInput = readString(conn.from);
|
||||
try {
|
||||
return nlohmann::json::parse(rawInput);
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while parsing a realisation object in the remote protocol");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
void CommonProto::Serialise<Realisation>::write(
|
||||
const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation)
|
||||
{
|
||||
conn.to << static_cast<nlohmann::json>(realisation).dump();
|
||||
}
|
||||
|
||||
DrvOutput CommonProto::Serialise<DrvOutput>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
|
||||
{
|
||||
return DrvOutput::parse(readString(conn.from));
|
||||
}
|
||||
|
||||
void CommonProto::Serialise<DrvOutput>::write(
|
||||
const StoreDirConfig & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput)
|
||||
{
|
||||
conn.to << drvOutput.to_string();
|
||||
}
|
||||
|
||||
std::optional<StorePath>
|
||||
CommonProto::Serialise<std::optional<StorePath>>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#include "nix/util/args.hh"
|
||||
#include "nix/store/content-address.hh"
|
||||
#include "nix/util/split.hh"
|
||||
#include "nix/util/json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -300,3 +301,36 @@ Hash ContentAddressWithReferences::getHash() const
|
|||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
ContentAddressMethod adl_serializer<ContentAddressMethod>::from_json(const json & json)
|
||||
{
|
||||
return ContentAddressMethod::parse(getString(json));
|
||||
}
|
||||
|
||||
void adl_serializer<ContentAddressMethod>::to_json(json & json, const ContentAddressMethod & m)
|
||||
{
|
||||
json = m.render();
|
||||
}
|
||||
|
||||
ContentAddress adl_serializer<ContentAddress>::from_json(const json & json)
|
||||
{
|
||||
auto obj = getObject(json);
|
||||
return {
|
||||
.method = adl_serializer<ContentAddressMethod>::from_json(valueAt(obj, "method")),
|
||||
.hash = valueAt(obj, "hash"),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<ContentAddress>::to_json(json & json, const ContentAddress & ca)
|
||||
{
|
||||
json = {
|
||||
{"method", ca.method},
|
||||
{"hash", ca.hash},
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -963,33 +963,31 @@ static void performOp(
|
|||
|
||||
case WorkerProto::Op::RegisterDrvOutput: {
|
||||
logger->startWork();
|
||||
if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) {
|
||||
auto outputId = DrvOutput::parse(readString(conn.from));
|
||||
auto outputPath = StorePath(readString(conn.from));
|
||||
store->registerDrvOutput(Realisation{{.outPath = outputPath}, outputId});
|
||||
} else {
|
||||
auto realisation = WorkerProto::Serialise<Realisation>::read(*store, rconn);
|
||||
store->registerDrvOutput(realisation);
|
||||
}
|
||||
// TODO move to WorkerProto::Serialise<DrvOutput> and friends
|
||||
// if (GET_PROTOCOL_MINOR(conn.protoVersion) < 39) {
|
||||
// throw Error("old-style build traces no longer supported");
|
||||
//}
|
||||
auto realisation = WorkerProto::Serialise<Realisation>::read(*store, rconn);
|
||||
store->registerDrvOutput(realisation);
|
||||
logger->stopWork();
|
||||
break;
|
||||
}
|
||||
|
||||
case WorkerProto::Op::QueryRealisation: {
|
||||
logger->startWork();
|
||||
auto outputId = DrvOutput::parse(readString(conn.from));
|
||||
auto info = store->queryRealisation(outputId);
|
||||
auto outputId = WorkerProto::Serialise<DrvOutput>::read(*store, rconn);
|
||||
std::optional<UnkeyedRealisation> info = *store->queryRealisation(outputId);
|
||||
logger->stopWork();
|
||||
if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) {
|
||||
std::set<StorePath> outPaths;
|
||||
if (info)
|
||||
outPaths.insert(info->outPath);
|
||||
WorkerProto::write(*store, wconn, outPaths);
|
||||
} else if (GET_PROTOCOL_MINOR(conn.protoVersion) < 39) {
|
||||
// No longer support this format
|
||||
WorkerProto::write(*store, wconn, StringSet{});
|
||||
} else {
|
||||
std::set<Realisation> realisations;
|
||||
if (info)
|
||||
realisations.insert({*info, outputId});
|
||||
WorkerProto::write(*store, wconn, realisations);
|
||||
WorkerProto::write(*store, wconn, info);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ bool BasicDerivation::isBuiltin() const
|
|||
return builder.substr(0, 8) == "builtin:";
|
||||
}
|
||||
|
||||
StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly)
|
||||
static auto infoForDerivation(Store & store, const Derivation & drv)
|
||||
{
|
||||
auto references = drv.inputSrcs;
|
||||
for (auto & i : drv.inputDrvs.map)
|
||||
|
|
@ -117,13 +117,32 @@ StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repa
|
|||
auto contents = drv.unparse(store, false);
|
||||
auto hash = hashString(HashAlgorithm::SHA256, contents);
|
||||
auto ca = TextInfo{.hash = hash, .references = references};
|
||||
auto path = store.makeFixedOutputPathFromCA(suffix, ca);
|
||||
return std::tuple{
|
||||
suffix,
|
||||
contents,
|
||||
references,
|
||||
store.makeFixedOutputPathFromCA(suffix, ca),
|
||||
};
|
||||
}
|
||||
|
||||
if (readOnly || settings.readOnlyMode || (store.isValidPath(path) && !repair))
|
||||
StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly)
|
||||
{
|
||||
if (readOnly || settings.readOnlyMode) {
|
||||
auto [_x, _y, _z, path] = infoForDerivation(store, drv);
|
||||
return path;
|
||||
} else
|
||||
return store.writeDerivation(drv, repair);
|
||||
}
|
||||
|
||||
StorePath Store::writeDerivation(const Derivation & drv, RepairFlag repair)
|
||||
{
|
||||
auto [suffix, contents, references, path] = infoForDerivation(*this, drv);
|
||||
|
||||
if (isValidPath(path) && !repair)
|
||||
return path;
|
||||
|
||||
StringSource s{contents};
|
||||
auto path2 = store.addToStoreFromDump(
|
||||
auto path2 = addToStoreFromDump(
|
||||
s,
|
||||
suffix,
|
||||
FileSerialisationMethod::Flat,
|
||||
|
|
@ -847,13 +866,14 @@ DrvHashes drvHashes;
|
|||
/* Look up the derivation by value and memoize the
|
||||
`hashDerivationModulo` call.
|
||||
*/
|
||||
static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPath)
|
||||
static DrvHashModulo pathDerivationModulo(Store & store, const StorePath & drvPath)
|
||||
{
|
||||
std::optional<DrvHash> hash;
|
||||
std::optional<DrvHashModulo> hash;
|
||||
if (drvHashes.cvisit(drvPath, [&hash](const auto & kv) { hash.emplace(kv.second); })) {
|
||||
return *hash;
|
||||
}
|
||||
auto h = hashDerivationModulo(store, store.readInvalidDerivation(drvPath), false);
|
||||
|
||||
// Cache it
|
||||
drvHashes.insert_or_assign(drvPath, h);
|
||||
return h;
|
||||
|
|
@ -876,12 +896,10 @@ static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPa
|
|||
don't leak the provenance of fixed outputs, reducing pointless cache
|
||||
misses as the build itself won't know this.
|
||||
*/
|
||||
DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs)
|
||||
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs)
|
||||
{
|
||||
auto type = drv.type();
|
||||
|
||||
/* Return a fixed hash for fixed-output derivations. */
|
||||
if (type.isFixed()) {
|
||||
if (drv.type().isFixed()) {
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & i : drv.outputs) {
|
||||
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
|
||||
|
|
@ -891,54 +909,66 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
|||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||
outputHashes.insert_or_assign(i.first, std::move(hash));
|
||||
}
|
||||
return DrvHash{
|
||||
.hashes = outputHashes,
|
||||
.kind = DrvHash::Kind::Regular,
|
||||
};
|
||||
return outputHashes;
|
||||
}
|
||||
|
||||
auto kind = std::visit(
|
||||
overloaded{
|
||||
[](const DerivationType::InputAddressed & ia) {
|
||||
/* This might be a "pesimistically" deferred output, so we don't
|
||||
"taint" the kind yet. */
|
||||
return DrvHash::Kind::Regular;
|
||||
},
|
||||
[](const DerivationType::ContentAddressed & ca) {
|
||||
return ca.fixed ? DrvHash::Kind::Regular : DrvHash::Kind::Deferred;
|
||||
},
|
||||
[](const DerivationType::Impure &) -> DrvHash::Kind { return DrvHash::Kind::Deferred; }},
|
||||
drv.type().raw);
|
||||
if (std::visit(
|
||||
overloaded{
|
||||
[](const DerivationType::InputAddressed & ia) {
|
||||
/* This might be a "pesimistically" deferred output, so we don't
|
||||
"taint" the kind yet. */
|
||||
return false;
|
||||
},
|
||||
[](const DerivationType::ContentAddressed & ca) {
|
||||
// Already covered
|
||||
assert(!ca.fixed);
|
||||
return true;
|
||||
},
|
||||
[](const DerivationType::Impure &) { return true; }},
|
||||
drv.type().raw)) {
|
||||
return DrvHashModulo::DeferredDrv{};
|
||||
}
|
||||
|
||||
/* For other derivations, replace the inputs paths with recursive
|
||||
calls to this function. */
|
||||
DerivedPathMap<StringSet>::ChildNode::Map inputs2;
|
||||
for (auto & [drvPath, node] : drv.inputDrvs.map) {
|
||||
/* Need to build and resolve dynamic derivations first */
|
||||
if (!node.childMap.empty()) {
|
||||
return DrvHashModulo::DeferredDrv{};
|
||||
}
|
||||
|
||||
const auto & res = pathDerivationModulo(store, drvPath);
|
||||
if (res.kind == DrvHash::Kind::Deferred)
|
||||
kind = DrvHash::Kind::Deferred;
|
||||
for (auto & outputName : node.value) {
|
||||
const auto h = get(res.hashes, outputName);
|
||||
if (!h)
|
||||
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
|
||||
inputs2[h->to_string(HashFormat::Base16, false)].value.insert(outputName);
|
||||
if (std::visit(
|
||||
overloaded{
|
||||
[&](const DrvHashModulo::DeferredDrv &) { return true; },
|
||||
// Regular non-CA derivation, replace derivation
|
||||
[&](const DrvHashModulo::DrvHash & drvHash) {
|
||||
inputs2.insert_or_assign(drvHash.to_string(HashFormat::Base16, false), node);
|
||||
return false;
|
||||
},
|
||||
// CA derivation's output hashes
|
||||
[&](const DrvHashModulo::CaOutputHashes & outputHashes) {
|
||||
for (auto & outputName : node.value) {
|
||||
/* Put each one in with a single "out" output.. */
|
||||
const auto h = get(outputHashes, outputName);
|
||||
if (!h)
|
||||
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
|
||||
inputs2.insert_or_assign(
|
||||
h->to_string(HashFormat::Base16, false),
|
||||
DerivedPathMap<StringSet>::ChildNode{
|
||||
.value = {"out"},
|
||||
});
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
res.raw)) {
|
||||
return DrvHashModulo::DeferredDrv{};
|
||||
}
|
||||
}
|
||||
|
||||
auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs) {
|
||||
outputHashes.insert_or_assign(outputName, hash);
|
||||
}
|
||||
|
||||
return DrvHash{
|
||||
.hashes = outputHashes,
|
||||
.kind = kind,
|
||||
};
|
||||
}
|
||||
|
||||
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv)
|
||||
{
|
||||
return hashDerivationModulo(store, drv, true).hashes;
|
||||
return hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||
}
|
||||
|
||||
static DerivationOutput readDerivationOutput(Source & in, const StoreDirConfig & store)
|
||||
|
|
@ -1088,22 +1118,39 @@ void BasicDerivation::applyRewrites(const StringMap & rewrites)
|
|||
}
|
||||
}
|
||||
|
||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
||||
void resolveInputAddressed(Store & store, Derivation & drv)
|
||||
{
|
||||
drv.applyRewrites(rewrites);
|
||||
std::optional<DrvHashModulo> hashModulo_;
|
||||
|
||||
auto hashModulo = [&]() -> const auto & {
|
||||
if (!hashModulo_) {
|
||||
// somewhat expensive so we do lazily
|
||||
hashModulo_ = hashDerivationModulo(store, drv, true);
|
||||
}
|
||||
return *hashModulo_;
|
||||
};
|
||||
|
||||
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
|
||||
for (auto & [outputName, output] : drv.outputs) {
|
||||
if (std::holds_alternative<DerivationOutput::Deferred>(output.raw)) {
|
||||
auto h = get(hashModulo.hashes, outputName);
|
||||
if (!h)
|
||||
throw Error(
|
||||
"derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)", drv.name, outputName);
|
||||
auto outPath = store.makeOutputPath(outputName, *h, drv.name);
|
||||
drv.env[outputName] = store.printStorePath(outPath);
|
||||
output = DerivationOutput::InputAddressed{
|
||||
.path = std::move(outPath),
|
||||
};
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DrvHashModulo::DrvHash & drvHash) {
|
||||
auto outPath = store.makeOutputPath(outputName, drvHash, drv.name);
|
||||
drv.env.insert_or_assign(outputName, store.printStorePath(outPath));
|
||||
output = DerivationOutput::InputAddressed{
|
||||
.path = std::move(outPath),
|
||||
};
|
||||
},
|
||||
[&](const DrvHashModulo::CaOutputHashes &) {
|
||||
/* Shouldn't happen as the original output is
|
||||
deferred (waiting to be input-addressed). */
|
||||
assert(false);
|
||||
},
|
||||
[&](const DrvHashModulo::DeferredDrv &) {
|
||||
// Nothing to do, already deferred
|
||||
},
|
||||
},
|
||||
hashModulo().raw);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1186,9 +1233,13 @@ std::optional<BasicDerivation> Derivation::tryResolve(
|
|||
queryResolutionChain))
|
||||
return std::nullopt;
|
||||
|
||||
rewriteDerivation(store, resolved, inputRewrites);
|
||||
resolved.applyRewrites(inputRewrites);
|
||||
|
||||
return resolved;
|
||||
Derivation resolved2{std::move(resolved)};
|
||||
|
||||
resolveInputAddressed(store, resolved2);
|
||||
|
||||
return resolved2;
|
||||
}
|
||||
|
||||
void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
||||
|
|
@ -1216,46 +1267,79 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
|||
// combinations that are currently prohibited.
|
||||
type();
|
||||
|
||||
std::optional<DrvHash> hashesModulo;
|
||||
for (auto & i : outputs) {
|
||||
std::optional<DrvHashModulo> hashModulo_;
|
||||
|
||||
auto hashModulo = [&]() -> const auto & {
|
||||
if (!hashModulo_) {
|
||||
// somewhat expensive so we do lazily
|
||||
hashModulo_ = hashDerivationModulo(store, *this, true);
|
||||
}
|
||||
return *hashModulo_;
|
||||
};
|
||||
|
||||
for (auto & [outputName, output] : outputs) {
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivationOutput::InputAddressed & doia) {
|
||||
if (!hashesModulo) {
|
||||
// somewhat expensive so we do lazily
|
||||
hashesModulo = hashDerivationModulo(store, *this, true);
|
||||
}
|
||||
auto currentOutputHash = get(hashesModulo->hashes, i.first);
|
||||
if (!currentOutputHash)
|
||||
throw Error(
|
||||
"derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'",
|
||||
store.printStorePath(drvPath),
|
||||
store.printStorePath(doia.path),
|
||||
i.first);
|
||||
StorePath recomputed = store.makeOutputPath(i.first, *currentOutputHash, drvName);
|
||||
if (doia.path != recomputed)
|
||||
throw Error(
|
||||
"derivation '%s' has incorrect output '%s', should be '%s'",
|
||||
store.printStorePath(drvPath),
|
||||
store.printStorePath(doia.path),
|
||||
store.printStorePath(recomputed));
|
||||
envHasRightPath(doia.path, i.first);
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DrvHashModulo::DrvHash & drvHash) {
|
||||
StorePath recomputed = store.makeOutputPath(outputName, drvHash, drvName);
|
||||
if (doia.path != recomputed)
|
||||
throw Error(
|
||||
"derivation '%s' has incorrect output '%s', should be '%s'",
|
||||
store.printStorePath(drvPath),
|
||||
store.printStorePath(doia.path),
|
||||
store.printStorePath(recomputed));
|
||||
},
|
||||
[&](const DrvHashModulo::CaOutputHashes &) {
|
||||
/* Shouldn't happen as the original output is
|
||||
input-addressed. */
|
||||
assert(false);
|
||||
},
|
||||
[&](const DrvHashModulo::DeferredDrv &) {
|
||||
throw Error(
|
||||
"derivation '%s' has output '%s', but derivation is not yet ready to be input-addressed",
|
||||
store.printStorePath(drvPath),
|
||||
store.printStorePath(doia.path));
|
||||
},
|
||||
},
|
||||
hashModulo().raw);
|
||||
envHasRightPath(doia.path, outputName);
|
||||
},
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
auto path = dof.path(store, drvName, i.first);
|
||||
envHasRightPath(path, i.first);
|
||||
auto path = dof.path(store, drvName, outputName);
|
||||
envHasRightPath(path, outputName);
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {
|
||||
/* Nothing to check */
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DrvHashModulo::DrvHash & drvHash) {
|
||||
throw Error(
|
||||
"derivation '%s' has deferred output '%s', yet is ready to be input-addressed",
|
||||
store.printStorePath(drvPath),
|
||||
outputName);
|
||||
},
|
||||
[&](const DrvHashModulo::CaOutputHashes &) {
|
||||
/* Shouldn't happen as the original output is
|
||||
input-addressed. */
|
||||
assert(false);
|
||||
},
|
||||
[&](const DrvHashModulo::DeferredDrv &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
},
|
||||
hashModulo().raw);
|
||||
},
|
||||
[&](const DerivationOutput::Impure &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
},
|
||||
i.second.raw);
|
||||
output.raw);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1274,15 +1358,13 @@ void adl_serializer<DerivationOutput>::to_json(json & res, const DerivationOutpu
|
|||
overloaded{
|
||||
[&](const DerivationOutput::InputAddressed & doi) { res["path"] = doi.path; },
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
/* it would be nice to output the path for user convenience, but
|
||||
this would require us to know the store dir. */
|
||||
res = dof.ca;
|
||||
// FIXME print refs?
|
||||
/* it would be nice to output the path for user convenience, but
|
||||
this would require us to know the store dir. */
|
||||
#if 0
|
||||
res["path"] = dof.path(store, drvName, outputName);
|
||||
#endif
|
||||
res["method"] = std::string{dof.ca.method.render()};
|
||||
res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo);
|
||||
res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false);
|
||||
// FIXME print refs?
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
res["method"] = std::string{dof.method.render()};
|
||||
|
|
@ -1322,15 +1404,12 @@ adl_serializer<DerivationOutput>::from_json(const json & _json, const Experiment
|
|||
};
|
||||
}
|
||||
|
||||
else if (keys == (std::set<std::string_view>{"method", "hashAlgo", "hash"})) {
|
||||
auto [method, hashAlgo] = methodAlgo();
|
||||
else if (keys == (std::set<std::string_view>{"method", "hash"})) {
|
||||
auto dof = DerivationOutput::CAFixed{
|
||||
.ca =
|
||||
ContentAddress{
|
||||
.method = std::move(method),
|
||||
.hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo),
|
||||
},
|
||||
.ca = static_cast<ContentAddress>(_json),
|
||||
};
|
||||
if (dof.ca.method == ContentAddressMethod::Raw::Text)
|
||||
xpSettings.require(Xp::DynamicDerivations, "text-hashed derivation output in JSON");
|
||||
/* We no longer produce this (denormalized) field (for the
|
||||
reasons described above), so we don't need to check it. */
|
||||
#if 0
|
||||
|
|
@ -1373,7 +1452,7 @@ void adl_serializer<Derivation>::to_json(json & res, const Derivation & d)
|
|||
|
||||
res["name"] = d.name;
|
||||
|
||||
res["version"] = 3;
|
||||
res["version"] = 4;
|
||||
|
||||
{
|
||||
nlohmann::json & outputsObj = res["outputs"];
|
||||
|
|
@ -1431,8 +1510,8 @@ Derivation adl_serializer<Derivation>::from_json(const json & _json, const Exper
|
|||
|
||||
res.name = getString(valueAt(json, "name"));
|
||||
|
||||
if (valueAt(json, "version") != 3)
|
||||
throw Error("Only derivation format version 3 is currently supported.");
|
||||
if (valueAt(json, "version") != 4)
|
||||
throw Error("Only derivation format version 4 is currently supported.");
|
||||
|
||||
try {
|
||||
auto outputs = getObject(valueAt(json, "outputs"));
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
#include "nix/store/store-registration.hh"
|
||||
#include "nix/util/archive.hh"
|
||||
#include "nix/util/bytes.hh"
|
||||
#include "nix/util/callback.hh"
|
||||
#include "nix/util/memory-source-accessor.hh"
|
||||
#include "nix/util/json-utils.hh"
|
||||
#include "nix/store/dummy-store-impl.hh"
|
||||
#include "nix/store/realisation.hh"
|
||||
|
||||
|
|
@ -16,6 +18,16 @@ std::string DummyStoreConfig::doc()
|
|||
;
|
||||
}
|
||||
|
||||
bool DummyStore::PathInfoAndContents::operator==(const PathInfoAndContents & other) const
|
||||
{
|
||||
return info == other.info && contents->root == other.contents->root;
|
||||
}
|
||||
|
||||
bool DummyStore::operator==(const DummyStore & other) const
|
||||
{
|
||||
return contents == other.contents && buildTrace == other.buildTrace;
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
class WholeStoreViewAccessor : public SourceAccessor
|
||||
|
|
@ -137,12 +149,44 @@ struct DummyStoreImpl : DummyStore
|
|||
void queryPathInfoUncached(
|
||||
const StorePath & path, Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
||||
{
|
||||
bool visited = contents.cvisit(path, [&](const auto & kv) {
|
||||
callback(std::make_shared<ValidPathInfo>(StorePath{kv.first}, kv.second.info));
|
||||
});
|
||||
if (path.isDerivation()) {
|
||||
if (derivations.cvisit(path, [&](const auto & kv) {
|
||||
/* compute path info on demand */
|
||||
auto accessor = make_ref<MemorySourceAccessor>();
|
||||
accessor->root = MemorySourceAccessor::File::Regular{
|
||||
.contents = to_owned(as_bytes(kv.second.unparse(*this, false))),
|
||||
};
|
||||
auto narHash = hashPath(
|
||||
{accessor, CanonPath::root}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
|
||||
auto info =
|
||||
std::make_shared<ValidPathInfo>(StorePath{kv.first}, UnkeyedValidPathInfo{narHash.hash});
|
||||
info->narSize = narHash.numBytesDigested;
|
||||
info->ca = ContentAddress{
|
||||
.method = ContentAddressMethod::Raw::Text,
|
||||
.hash = hashString(
|
||||
HashAlgorithm::SHA256,
|
||||
to_str(std::get<MemorySourceAccessor::File::Regular>(accessor->root->raw).contents)),
|
||||
};
|
||||
callback(std::move(info));
|
||||
}))
|
||||
return;
|
||||
} else {
|
||||
if (contents.cvisit(path, [&](const auto & kv) {
|
||||
callback(std::make_shared<ValidPathInfo>(StorePath{kv.first}, kv.second.info));
|
||||
}))
|
||||
return;
|
||||
}
|
||||
|
||||
if (!visited)
|
||||
callback(nullptr);
|
||||
callback(nullptr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do this to avoid `queryPathInfoUncached` computing `PathInfo`
|
||||
* that we don't need just to return a `bool`.
|
||||
*/
|
||||
bool isValidPathUncached(const StorePath & path) override
|
||||
{
|
||||
return path.isDerivation() ? derivations.contains(path) : Store::isValidPathUncached(path);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -169,18 +213,25 @@ struct DummyStoreImpl : DummyStore
|
|||
if (checkSigs)
|
||||
throw Error("checking signatures is not supported for '%s' store", config->getHumanReadableURI());
|
||||
|
||||
auto temp = make_ref<MemorySourceAccessor>();
|
||||
MemorySink tempSink{*temp};
|
||||
auto accessor = make_ref<MemorySourceAccessor>();
|
||||
MemorySink tempSink{*accessor};
|
||||
parseDump(tempSink, source);
|
||||
auto path = info.path;
|
||||
|
||||
auto accessor = make_ref<MemorySourceAccessor>(std::move(*temp));
|
||||
contents.insert(
|
||||
{path,
|
||||
PathInfoAndContents{
|
||||
std::move(info),
|
||||
accessor,
|
||||
}});
|
||||
if (info.path.isDerivation()) {
|
||||
warn("back compat supporting `addToStore` for inserting derivations in dummy store");
|
||||
writeDerivation(
|
||||
parseDerivation(*this, accessor->readFile(CanonPath::root), Derivation::nameFromPath(info.path)));
|
||||
return;
|
||||
}
|
||||
|
||||
contents.insert({
|
||||
path,
|
||||
PathInfoAndContents{
|
||||
std::move(info),
|
||||
accessor,
|
||||
},
|
||||
});
|
||||
wholeStoreView->addObject(path.to_string(), accessor);
|
||||
}
|
||||
|
||||
|
|
@ -193,6 +244,9 @@ struct DummyStoreImpl : DummyStore
|
|||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override
|
||||
{
|
||||
if (isDerivation(name))
|
||||
throw Error("Do not insert derivation into dummy store with `addToStoreFromDump`");
|
||||
|
||||
if (config->readOnly)
|
||||
unsupported("addToStoreFromDump");
|
||||
|
||||
|
|
@ -239,21 +293,51 @@ struct DummyStoreImpl : DummyStore
|
|||
|
||||
auto path = info.path;
|
||||
auto accessor = make_ref<MemorySourceAccessor>(std::move(*temp));
|
||||
contents.insert(
|
||||
{path,
|
||||
PathInfoAndContents{
|
||||
std::move(info),
|
||||
accessor,
|
||||
}});
|
||||
contents.insert({
|
||||
path,
|
||||
PathInfoAndContents{
|
||||
std::move(info),
|
||||
accessor,
|
||||
},
|
||||
});
|
||||
wholeStoreView->addObject(path.to_string(), accessor);
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair) override
|
||||
{
|
||||
auto drvPath = ::nix::writeDerivation(*this, drv, repair, /*readonly=*/true);
|
||||
|
||||
if (!derivations.contains(drvPath) || repair) {
|
||||
if (config->readOnly)
|
||||
unsupported("writeDerivation");
|
||||
derivations.insert({drvPath, drv});
|
||||
}
|
||||
|
||||
return drvPath;
|
||||
}
|
||||
|
||||
Derivation readDerivation(const StorePath & drvPath) override
|
||||
{
|
||||
if (std::optional res = getConcurrent(derivations, drvPath))
|
||||
return *res;
|
||||
else
|
||||
throw Error("derivation '%s' is not valid", printStorePath(drvPath));
|
||||
}
|
||||
|
||||
/**
|
||||
* No such thing as an "invalid derivation" with the dummy store
|
||||
*/
|
||||
Derivation readInvalidDerivation(const StorePath & drvPath) override
|
||||
{
|
||||
return readDerivation(drvPath);
|
||||
}
|
||||
|
||||
void registerDrvOutput(const Realisation & output) override
|
||||
{
|
||||
auto ref = make_ref<UnkeyedRealisation>(output);
|
||||
buildTrace.insert_or_visit({output.id.drvHash, {{output.id.outputName, ref}}}, [&](auto & kv) {
|
||||
buildTrace.insert_or_visit({output.id.drvPath, {{output.id.outputName, ref}}}, [&](auto & kv) {
|
||||
kv.second.insert_or_assign(output.id.outputName, make_ref<UnkeyedRealisation>(output));
|
||||
});
|
||||
}
|
||||
|
|
@ -274,7 +358,7 @@ struct DummyStoreImpl : DummyStore
|
|||
const DrvOutput & drvOutput, Callback<std::shared_ptr<const UnkeyedRealisation>> callback) noexcept override
|
||||
{
|
||||
bool visited = false;
|
||||
buildTrace.cvisit(drvOutput.drvHash, [&](const auto & kv) {
|
||||
buildTrace.cvisit(drvOutput.drvPath, [&](const auto & kv) {
|
||||
if (auto it = kv.second.find(drvOutput.outputName); it != kv.second.end()) {
|
||||
visited = true;
|
||||
callback(it->second.get_ptr());
|
||||
|
|
@ -306,3 +390,89 @@ ref<DummyStore> DummyStore::Config::openDummyStore() const
|
|||
static RegisterStoreImplementation<DummyStore::Config> regDummyStore;
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
DummyStore::PathInfoAndContents adl_serializer<DummyStore::PathInfoAndContents>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
return DummyStore::PathInfoAndContents{
|
||||
.info = valueAt(obj, "info"),
|
||||
.contents = make_ref<MemorySourceAccessor>(valueAt(obj, "contents")),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<DummyStore::PathInfoAndContents>::to_json(json & json, const DummyStore::PathInfoAndContents & val)
|
||||
{
|
||||
json = {
|
||||
{"info", val.info},
|
||||
{"contents", *val.contents},
|
||||
};
|
||||
}
|
||||
|
||||
ref<DummyStore> adl_serializer<ref<DummyStore>>::from_json(const json & json)
|
||||
{
|
||||
auto & obj = getObject(json);
|
||||
ref<DummyStore> res = [&] {
|
||||
auto cfg = make_ref<DummyStore::Config>(DummyStore::Config::Params{});
|
||||
const_cast<PathSetting &>(cfg->storeDir_).set(getString(valueAt(obj, "store-dir")));
|
||||
cfg->readOnly = true;
|
||||
return cfg->openDummyStore();
|
||||
}();
|
||||
for (auto & [k, v] : getObject(valueAt(obj, "contents")))
|
||||
res->contents.insert({StorePath{k}, v});
|
||||
for (auto & [k, v] : getObject(valueAt(obj, "derivations")))
|
||||
res->derivations.insert({StorePath{k}, v});
|
||||
for (auto & [k0, v] : getObject(valueAt(obj, "build-trace"))) {
|
||||
for (auto & [k1, v2] : getObject(v)) {
|
||||
auto vref = make_ref<UnkeyedRealisation>(v2);
|
||||
res->buildTrace.insert_or_visit(
|
||||
{
|
||||
StorePath{k0},
|
||||
{{k1, vref}},
|
||||
},
|
||||
[&](auto & kv) { kv.second.insert_or_assign(k1, vref); });
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
void adl_serializer<ref<DummyStore>>::to_json(json & json, const ref<DummyStore> & val)
|
||||
{
|
||||
json = {
|
||||
{"store-dir", val->storeDir},
|
||||
{"contents",
|
||||
[&] {
|
||||
auto obj = json::object();
|
||||
val->contents.cvisit_all([&](const auto & kv) {
|
||||
auto & [k, v] = kv;
|
||||
obj[k.to_string()] = v;
|
||||
});
|
||||
return obj;
|
||||
}()},
|
||||
{"derivations",
|
||||
[&] {
|
||||
auto obj = json::object();
|
||||
val->derivations.cvisit_all([&](const auto & kv) {
|
||||
auto & [k, v] = kv;
|
||||
obj[k.to_string()] = v;
|
||||
});
|
||||
return obj;
|
||||
}()},
|
||||
{"build-trace",
|
||||
[&] {
|
||||
auto obj = json::object();
|
||||
val->buildTrace.cvisit_all([&](const auto & kv) {
|
||||
auto & [k, v] = kv;
|
||||
auto & obj2 = obj[k.to_string()] = json::object();
|
||||
for (auto & [k2, v2] : kv.second)
|
||||
obj2[k2] = *v2;
|
||||
});
|
||||
return obj;
|
||||
}()},
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -82,8 +82,13 @@ protected:
|
|||
|
||||
/**
|
||||
* The prefix under which realisation infos will be stored
|
||||
*
|
||||
* @note The previous (still experimental, though) hash-keyed
|
||||
* realisations were under "realisations". "build trace" is a better
|
||||
* name anyways (issue #11895), and this serves as some light
|
||||
* versioning.
|
||||
*/
|
||||
constexpr const static std::string realisationsPrefix = "realisations";
|
||||
constexpr const static std::string realisationsPrefix = "build-trace";
|
||||
|
||||
constexpr const static std::string cacheInfoFile = "nix-cache-info";
|
||||
|
||||
|
|
@ -92,7 +97,7 @@ protected:
|
|||
/**
|
||||
* Compute the path to the given realisation
|
||||
*
|
||||
* It's `${realisationsPrefix}/${drvOutput}.doi`.
|
||||
* It's `${realisationsPrefix}/${drvPath}/${outputName}`.
|
||||
*/
|
||||
std::string makeRealisationPath(const DrvOutput & id);
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,6 @@ struct InitialOutputStatus
|
|||
|
||||
struct InitialOutput
|
||||
{
|
||||
Hash outputHash;
|
||||
std::optional<InitialOutputStatus> known;
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -71,8 +71,6 @@ private:
|
|||
*/
|
||||
std::unique_ptr<Derivation> drv;
|
||||
|
||||
const Hash outputHash;
|
||||
|
||||
const BuildMode buildMode;
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -26,12 +26,13 @@ namespace nix {
|
|||
LengthPrefixedProtoHelper<CommonProto, T>::write(store, conn, t); \
|
||||
}
|
||||
|
||||
#define COMMA_ ,
|
||||
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
||||
#define COMMA_ ,
|
||||
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename T COMMA_ typename Compare>, std::set<T COMMA_ Compare>)
|
||||
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||
|
||||
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename K COMMA_ typename V>, std::map<K COMMA_ V>)
|
||||
COMMON_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K COMMA_ typename V COMMA_ typename Compare>, std::map<K COMMA_ V COMMA_ Compare>)
|
||||
#undef COMMA_
|
||||
|
||||
/* protocol-specific templates */
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ struct Source;
|
|||
class StorePath;
|
||||
struct ContentAddress;
|
||||
struct DrvOutput;
|
||||
struct Realisation;
|
||||
|
||||
/**
|
||||
* Shared serializers between the worker protocol, serve protocol, and a
|
||||
|
|
@ -70,8 +69,6 @@ template<>
|
|||
DECLARE_COMMON_SERIALISER(ContentAddress);
|
||||
template<>
|
||||
DECLARE_COMMON_SERIALISER(DrvOutput);
|
||||
template<>
|
||||
DECLARE_COMMON_SERIALISER(Realisation);
|
||||
|
||||
#define COMMA_ ,
|
||||
template<typename T>
|
||||
|
|
@ -81,8 +78,8 @@ DECLARE_COMMON_SERIALISER(std::set<T COMMA_ Compare>);
|
|||
template<typename... Ts>
|
||||
DECLARE_COMMON_SERIALISER(std::tuple<Ts...>);
|
||||
|
||||
template<typename K, typename V>
|
||||
DECLARE_COMMON_SERIALISER(std::map<K COMMA_ V>);
|
||||
template<typename K, typename V, typename Compare>
|
||||
DECLARE_COMMON_SERIALISER(std::map<K COMMA_ V COMMA_ Compare>);
|
||||
#undef COMMA_
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
#include "nix/store/path.hh"
|
||||
#include "nix/util/file-content-address.hh"
|
||||
#include "nix/util/variant-wrapper.hh"
|
||||
#include "nix/util/json-impls.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -308,4 +309,15 @@ struct ContentAddressWithReferences
|
|||
Hash getHash() const;
|
||||
};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<ContentAddressMethod> : std::true_type
|
||||
{};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<ContentAddress> : std::true_type
|
||||
{};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::ContentAddressMethod)
|
||||
JSON_IMPL(nix::ContentAddress)
|
||||
|
|
|
|||
|
|
@ -427,34 +427,39 @@ std::string outputPathName(std::string_view drvName, OutputNameView outputName);
|
|||
* derivations (fixed-output or not) will have a different hash for each
|
||||
* output.
|
||||
*/
|
||||
struct DrvHash
|
||||
struct DrvHashModulo
|
||||
{
|
||||
/**
|
||||
* Map from output names to hashes
|
||||
* Single hash for the derivation
|
||||
*
|
||||
* This is for an input-addressed derivation that doesn't
|
||||
* transitively depend on any floating-CA derivations.
|
||||
*/
|
||||
std::map<std::string, Hash> hashes;
|
||||
|
||||
enum struct Kind : bool {
|
||||
/**
|
||||
* Statically determined derivations.
|
||||
* This hash will be directly used to compute the output paths
|
||||
*/
|
||||
Regular,
|
||||
|
||||
/**
|
||||
* Floating-output derivations (and their reverse dependencies).
|
||||
*/
|
||||
Deferred,
|
||||
};
|
||||
using DrvHash = Hash;
|
||||
|
||||
/**
|
||||
* The kind of derivation this is, simplified for just "derivation hash
|
||||
* modulo" purposes.
|
||||
* Known CA drv's output hashes, for fixed-output derivations whose
|
||||
* output hashes are always known since they are fixed up-front.
|
||||
*/
|
||||
Kind kind;
|
||||
};
|
||||
using CaOutputHashes = std::map<std::string, Hash>;
|
||||
|
||||
void operator|=(DrvHash::Kind & self, const DrvHash::Kind & other) noexcept;
|
||||
/**
|
||||
* This derivation doesn't yet have known output hashes.
|
||||
*
|
||||
* Either because itself is floating CA, or it (transtively) depends
|
||||
* on a floating CA derivation.
|
||||
*/
|
||||
using DeferredDrv = std::monostate;
|
||||
|
||||
using Raw = std::variant<DrvHash, CaOutputHashes, DeferredDrv>;
|
||||
|
||||
Raw raw;
|
||||
|
||||
bool operator==(const DrvHashModulo &) const = default;
|
||||
// auto operator <=> (const DrvHashModulo &) const = default;
|
||||
|
||||
MAKE_WRAPPER_CONSTRUCTOR(DrvHashModulo);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns hashes with the details of fixed-output subderivations
|
||||
|
|
@ -480,15 +485,17 @@ void operator|=(DrvHash::Kind & self, const DrvHash::Kind & other) noexcept;
|
|||
* ATerm, after subderivations have been likewise expunged from that
|
||||
* derivation.
|
||||
*/
|
||||
DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
|
||||
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
|
||||
|
||||
/**
|
||||
* Return a map associating each output to a hash that uniquely identifies its
|
||||
* derivation (modulo the self-references).
|
||||
* If a derivation is input addressed and doesn't yet have its input
|
||||
* addressed (is deferred) try using `hashDerivationModulo`.
|
||||
*
|
||||
* \todo What is the Hash in this map?
|
||||
* Does nothing if not deferred input-addressed, or
|
||||
* `hashDerivationModulo` indicates it is missing inputs' output paths
|
||||
* and is not yet ready (and must stay deferred).
|
||||
*/
|
||||
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv);
|
||||
void resolveInputAddressed(Store & store, Derivation & drv);
|
||||
|
||||
struct DrvHashFct
|
||||
{
|
||||
|
|
@ -503,7 +510,7 @@ struct DrvHashFct
|
|||
/**
|
||||
* Memoisation of hashDerivationModulo().
|
||||
*/
|
||||
typedef boost::concurrent_flat_map<StorePath, DrvHash, DrvHashFct> DrvHashes;
|
||||
typedef boost::concurrent_flat_map<StorePath, DrvHashModulo, DrvHashFct> DrvHashes;
|
||||
|
||||
// FIXME: global, though at least thread-safe.
|
||||
extern DrvHashes drvHashes;
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include "nix/store/dummy-store.hh"
|
||||
#include "nix/store/derivations.hh"
|
||||
|
||||
#include <boost/unordered/concurrent_flat_map.hpp>
|
||||
|
||||
|
|
@ -22,14 +23,22 @@ struct DummyStore : virtual Store
|
|||
{
|
||||
UnkeyedValidPathInfo info;
|
||||
ref<MemorySourceAccessor> contents;
|
||||
|
||||
bool operator==(const PathInfoAndContents &) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* This is map conceptually owns the file system objects for each
|
||||
* This map conceptually owns the file system objects for each
|
||||
* store object.
|
||||
*/
|
||||
boost::concurrent_flat_map<StorePath, PathInfoAndContents> contents;
|
||||
|
||||
/**
|
||||
* This map conceptually owns every derivation, allowing us to
|
||||
* avoid "on-disk drv format" serialization round-trips.
|
||||
*/
|
||||
boost::concurrent_flat_map<StorePath, Derivation> derivations;
|
||||
|
||||
/**
|
||||
* The build trace maps the pair of a content-addressing (fixed or
|
||||
* floating) derivations an one of its output to a
|
||||
|
|
@ -40,13 +49,21 @@ struct DummyStore : virtual Store
|
|||
* outer map for the derivation, and inner maps for the outputs of a
|
||||
* given derivation.
|
||||
*/
|
||||
boost::concurrent_flat_map<Hash, std::map<std::string, ref<UnkeyedRealisation>>> buildTrace;
|
||||
boost::concurrent_flat_map<StorePath, std::map<std::string, ref<UnkeyedRealisation>>> buildTrace;
|
||||
|
||||
DummyStore(ref<const Config> config)
|
||||
: Store{*config}
|
||||
, config(config)
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(const DummyStore &) const;
|
||||
};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<DummyStore::PathInfoAndContents> : std::true_type
|
||||
{};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::DummyStore::PathInfoAndContents)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include "nix/store/store-api.hh"
|
||||
#include "nix/util/json-impls.hh"
|
||||
|
||||
#include <boost/unordered/concurrent_flat_map.hpp>
|
||||
|
||||
|
|
@ -65,4 +66,10 @@ struct DummyStoreConfig : public std::enable_shared_from_this<DummyStoreConfig>,
|
|||
}
|
||||
};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<ref<nix::DummyStore>> : std::true_type
|
||||
{};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::ref<nix::DummyStore>)
|
||||
|
|
|
|||
|
|
@ -56,14 +56,14 @@ LENGTH_PREFIXED_PROTO_HELPER(Inner, std::vector<T>);
|
|||
#define COMMA_ ,
|
||||
template<class Inner, typename T, typename Compare>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::set<T COMMA_ Compare>);
|
||||
#undef COMMA_
|
||||
|
||||
template<class Inner, typename... Ts>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple<Ts...>);
|
||||
|
||||
template<class Inner, typename K, typename V>
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER_X std::map<K, V>
|
||||
template<class Inner, typename K, typename V, typename Compare>
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER_X std::map<K, V, Compare>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, LENGTH_PREFIXED_PROTO_HELPER_X);
|
||||
#undef COMMA_
|
||||
|
||||
template<class Inner, typename T>
|
||||
std::vector<T>
|
||||
|
|
@ -109,11 +109,11 @@ void LengthPrefixedProtoHelper<Inner, std::set<T, Compare>>::write(
|
|||
}
|
||||
}
|
||||
|
||||
template<class Inner, typename K, typename V>
|
||||
std::map<K, V>
|
||||
LengthPrefixedProtoHelper<Inner, std::map<K, V>>::read(const StoreDirConfig & store, typename Inner::ReadConn conn)
|
||||
template<class Inner, typename K, typename V, typename Compare>
|
||||
std::map<K, V, Compare> LengthPrefixedProtoHelper<Inner, std::map<K, V, Compare>>::read(
|
||||
const StoreDirConfig & store, typename Inner::ReadConn conn)
|
||||
{
|
||||
std::map<K, V> resMap;
|
||||
std::map<K, V, Compare> resMap;
|
||||
auto size = readNum<size_t>(conn.from);
|
||||
while (size--) {
|
||||
auto k = S<K>::read(store, conn);
|
||||
|
|
@ -123,9 +123,9 @@ LengthPrefixedProtoHelper<Inner, std::map<K, V>>::read(const StoreDirConfig & st
|
|||
return resMap;
|
||||
}
|
||||
|
||||
template<class Inner, typename K, typename V>
|
||||
void LengthPrefixedProtoHelper<Inner, std::map<K, V>>::write(
|
||||
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::map<K, V> & resMap)
|
||||
template<class Inner, typename K, typename V, typename Compare>
|
||||
void LengthPrefixedProtoHelper<Inner, std::map<K, V, Compare>>::write(
|
||||
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::map<K, V, Compare> & resMap)
|
||||
{
|
||||
conn.to << resMap.size();
|
||||
for (auto & i : resMap) {
|
||||
|
|
|
|||
|
|
@ -9,17 +9,38 @@ namespace nix {
|
|||
|
||||
struct StoreDirConfig;
|
||||
|
||||
struct NarInfo : ValidPathInfo
|
||||
struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo
|
||||
{
|
||||
std::string url;
|
||||
std::string compression;
|
||||
std::optional<Hash> fileHash;
|
||||
uint64_t fileSize = 0;
|
||||
|
||||
UnkeyedNarInfo(UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(const UnkeyedNarInfo &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
|
||||
// auto operator <=>(const NarInfo &) const = default;
|
||||
|
||||
nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const override;
|
||||
static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
/**
|
||||
* Key and the extra NAR fields
|
||||
*/
|
||||
struct NarInfo : ValidPathInfo, UnkeyedNarInfo
|
||||
{
|
||||
NarInfo() = delete;
|
||||
|
||||
NarInfo(ValidPathInfo info)
|
||||
: ValidPathInfo{std::move(info)}
|
||||
: UnkeyedValidPathInfo(std::move(static_cast<UnkeyedValidPathInfo &&>(info)))
|
||||
// later moves will be partially ignored
|
||||
, ValidPathInfo(std::move(info))
|
||||
, UnkeyedNarInfo(std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
|
|
@ -37,13 +58,10 @@ struct NarInfo : ValidPathInfo
|
|||
NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence);
|
||||
|
||||
bool operator==(const NarInfo &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet
|
||||
// auto operator <=>(const NarInfo &) const = default;
|
||||
|
||||
std::string to_string(const StoreDirConfig & store) const;
|
||||
|
||||
nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override;
|
||||
static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::UnkeyedNarInfo)
|
||||
|
|
|
|||
|
|
@ -117,11 +117,11 @@ struct UnkeyedValidPathInfo
|
|||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const;
|
||||
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json);
|
||||
virtual nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const;
|
||||
static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json);
|
||||
};
|
||||
|
||||
struct ValidPathInfo : UnkeyedValidPathInfo
|
||||
struct ValidPathInfo : virtual UnkeyedValidPathInfo
|
||||
{
|
||||
StorePath path;
|
||||
|
||||
|
|
@ -174,10 +174,14 @@ struct ValidPathInfo : UnkeyedValidPathInfo
|
|||
|
||||
ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(info)
|
||||
, path(std::move(path)) {};
|
||||
, path(std::move(path))
|
||||
{
|
||||
}
|
||||
|
||||
ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info)
|
||||
: UnkeyedValidPathInfo(info)
|
||||
, path(path) {};
|
||||
: ValidPathInfo(StorePath{path}, std::move(info))
|
||||
{
|
||||
}
|
||||
|
||||
static ValidPathInfo
|
||||
makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash);
|
||||
|
|
@ -191,3 +195,5 @@ static_assert(std::is_move_constructible_v<ValidPathInfo>);
|
|||
using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::UnkeyedValidPathInfo)
|
||||
|
|
|
|||
|
|
@ -18,33 +18,40 @@ struct OutputsSpec;
|
|||
/**
|
||||
* A general `Realisation` key.
|
||||
*
|
||||
* This is similar to a `DerivedPath::Opaque`, but the derivation is
|
||||
* identified by its "hash modulo" instead of by its store path.
|
||||
* This is similar to a `DerivedPath::Built`, except it is only a single
|
||||
* step: `drvPath` is a `StorePath` rather than a `DerivedPath`.
|
||||
*/
|
||||
struct DrvOutput
|
||||
{
|
||||
/**
|
||||
* The hash modulo of the derivation.
|
||||
*
|
||||
* Computed from the derivation itself for most types of
|
||||
* derivations, but computed from the (fixed) content address of the
|
||||
* output for fixed-output derivations.
|
||||
* The store path to the derivation
|
||||
*/
|
||||
Hash drvHash;
|
||||
StorePath drvPath;
|
||||
|
||||
/**
|
||||
* The name of the output.
|
||||
*/
|
||||
OutputName outputName;
|
||||
|
||||
/**
|
||||
* Skips the store dir on the `drvPath`
|
||||
*/
|
||||
std::string to_string() const;
|
||||
|
||||
std::string strHash() const
|
||||
{
|
||||
return drvHash.to_string(HashFormat::Base16, true);
|
||||
}
|
||||
/**
|
||||
* Skips the store dir on the `drvPath`
|
||||
*/
|
||||
static DrvOutput from_string(std::string_view);
|
||||
|
||||
static DrvOutput parse(const std::string &);
|
||||
/**
|
||||
* Includes the store dir on `drvPath`
|
||||
*/
|
||||
std::string render(const StoreDirConfig & store) const;
|
||||
|
||||
/**
|
||||
* Includes the store dir on `drvPath`
|
||||
*/
|
||||
static DrvOutput parse(const StoreDirConfig & store, std::string_view);
|
||||
|
||||
bool operator==(const DrvOutput &) const = default;
|
||||
auto operator<=>(const DrvOutput &) const = default;
|
||||
|
|
@ -56,14 +63,6 @@ struct UnkeyedRealisation
|
|||
|
||||
StringSet signatures;
|
||||
|
||||
/**
|
||||
* The realisations that are required for the current one to be valid.
|
||||
*
|
||||
* When importing this realisation, the store will first check that all its
|
||||
* dependencies exist, and map to the correct output path
|
||||
*/
|
||||
std::map<DrvOutput, StorePath> dependentRealisations;
|
||||
|
||||
std::string fingerprint(const DrvOutput & key) const;
|
||||
|
||||
void sign(const DrvOutput & key, const Signer &);
|
||||
|
|
@ -72,6 +71,16 @@ struct UnkeyedRealisation
|
|||
|
||||
size_t checkSignatures(const DrvOutput & key, const PublicKeys & publicKeys) const;
|
||||
|
||||
/**
|
||||
* Just check the `outPath`. Signatures don't matter for this.
|
||||
* Callers must ensure that the corresponding key is the same for
|
||||
* most use-cases.
|
||||
*/
|
||||
bool isCompatibleWith(const UnkeyedRealisation & other) const
|
||||
{
|
||||
return outPath == other.outPath;
|
||||
}
|
||||
|
||||
const StorePath & getPath() const
|
||||
{
|
||||
return outPath;
|
||||
|
|
@ -85,12 +94,6 @@ struct Realisation : UnkeyedRealisation
|
|||
{
|
||||
DrvOutput id;
|
||||
|
||||
bool isCompatibleWith(const UnkeyedRealisation & other) const;
|
||||
|
||||
static std::set<Realisation> closure(Store &, const std::set<Realisation> &);
|
||||
|
||||
static void closure(Store &, const std::set<Realisation> &, std::set<Realisation> & res);
|
||||
|
||||
bool operator==(const Realisation &) const = default;
|
||||
auto operator<=>(const Realisation &) const = default;
|
||||
};
|
||||
|
|
@ -101,16 +104,7 @@ struct Realisation : UnkeyedRealisation
|
|||
* Since these are the outputs of a single derivation, we know the
|
||||
* output names are unique so we can use them as the map key.
|
||||
*/
|
||||
typedef std::map<OutputName, Realisation> SingleDrvOutputs;
|
||||
|
||||
/**
|
||||
* Collection type for multiple derivations' outputs' `Realisation`s.
|
||||
*
|
||||
* `DrvOutput` is used because in general the derivations are not all
|
||||
* the same, so we need to identify firstly which derivation, and
|
||||
* secondly which output of that derivation.
|
||||
*/
|
||||
typedef std::map<DrvOutput, Realisation> DrvOutputs;
|
||||
typedef std::map<OutputName, UnkeyedRealisation> SingleDrvOutputs;
|
||||
|
||||
struct OpaquePath
|
||||
{
|
||||
|
|
@ -154,10 +148,6 @@ struct RealisedPath
|
|||
*/
|
||||
const StorePath & path() const &;
|
||||
|
||||
void closure(Store & store, Set & ret) const;
|
||||
static void closure(Store & store, const Set & startPaths, Set & ret);
|
||||
Set closure(Store & store) const;
|
||||
|
||||
bool operator==(const RealisedPath &) const = default;
|
||||
auto operator<=>(const RealisedPath &) const = default;
|
||||
};
|
||||
|
|
@ -165,22 +155,21 @@ struct RealisedPath
|
|||
class MissingRealisation : public Error
|
||||
{
|
||||
public:
|
||||
MissingRealisation(DrvOutput & outputId)
|
||||
: MissingRealisation(outputId.outputName, outputId.strHash())
|
||||
MissingRealisation(const StoreDirConfig & store, DrvOutput & outputId)
|
||||
: MissingRealisation(store, outputId.drvPath, outputId.outputName)
|
||||
{
|
||||
}
|
||||
|
||||
MissingRealisation(std::string_view drv, OutputName outputName)
|
||||
: Error(
|
||||
"cannot operate on output '%s' of the "
|
||||
"unbuilt derivation '%s'",
|
||||
outputName,
|
||||
drv)
|
||||
{
|
||||
}
|
||||
MissingRealisation(const StoreDirConfig & store, const StorePath & drvPath, const OutputName & outputName);
|
||||
MissingRealisation(
|
||||
const StoreDirConfig & store,
|
||||
const SingleDerivedPath & drvPath,
|
||||
const StorePath & drvPathResolved,
|
||||
const OutputName & outputName);
|
||||
};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
JSON_IMPL(nix::DrvOutput)
|
||||
JSON_IMPL(nix::UnkeyedRealisation)
|
||||
JSON_IMPL(nix::Realisation)
|
||||
|
|
|
|||
|
|
@ -34,8 +34,10 @@ SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
|||
|
||||
#define SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
template<typename K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA
|
||||
typename Compare>
|
||||
,
|
||||
std::map<K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA V SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA Compare>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ namespace nix {
|
|||
#define SERVE_MAGIC_1 0x390c9deb
|
||||
#define SERVE_MAGIC_2 0x5452eecb
|
||||
|
||||
#define SERVE_PROTOCOL_VERSION (2 << 8 | 7)
|
||||
#define SERVE_PROTOCOL_VERSION (2 << 8 | 8)
|
||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||
|
||||
|
|
@ -18,6 +18,9 @@ struct Source;
|
|||
// items being serialised
|
||||
struct BuildResult;
|
||||
struct UnkeyedValidPathInfo;
|
||||
struct DrvOutput;
|
||||
struct UnkeyedRealisation;
|
||||
struct Realisation;
|
||||
|
||||
/**
|
||||
* The "serve protocol", used by ssh:// stores.
|
||||
|
|
@ -178,6 +181,12 @@ inline std::ostream & operator<<(std::ostream & s, ServeProto::Command op)
|
|||
template<>
|
||||
DECLARE_SERVE_SERIALISER(BuildResult);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(DrvOutput);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(UnkeyedRealisation);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(Realisation);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions);
|
||||
|
|
@ -190,8 +199,8 @@ DECLARE_SERVE_SERIALISER(std::set<T COMMA_ Compare>);
|
|||
template<typename... Ts>
|
||||
DECLARE_SERVE_SERIALISER(std::tuple<Ts...>);
|
||||
|
||||
template<typename K, typename V>
|
||||
DECLARE_SERVE_SERIALISER(std::map<K COMMA_ V>);
|
||||
template<typename K, typename V, typename Compare>
|
||||
DECLARE_SERVE_SERIALISER(std::map<K COMMA_ V COMMA_ Compare>);
|
||||
#undef COMMA_
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -778,15 +778,20 @@ public:
|
|||
*/
|
||||
Derivation derivationFromPath(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* Write a derivation to the Nix store, and return its path.
|
||||
*/
|
||||
virtual StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair);
|
||||
|
||||
/**
|
||||
* Read a derivation (which must already be valid).
|
||||
*/
|
||||
Derivation readDerivation(const StorePath & drvPath);
|
||||
virtual Derivation readDerivation(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* Read a derivation from a potentially invalid path.
|
||||
*/
|
||||
Derivation readInvalidDerivation(const StorePath & drvPath);
|
||||
virtual Derivation readInvalidDerivation(const StorePath & drvPath);
|
||||
|
||||
/**
|
||||
* @param [out] out Place in here the set of all store paths in the
|
||||
|
|
@ -1006,7 +1011,4 @@ decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashR
|
|||
|
||||
const ContentAddress * getDerivationCA(const BasicDerivation & drv);
|
||||
|
||||
std::map<DrvOutput, StorePath>
|
||||
drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore = nullptr);
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -34,8 +34,10 @@ WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
|||
|
||||
#define WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
template<typename K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA
|
||||
typename Compare>
|
||||
,
|
||||
std::map<K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA V WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA Compare>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ namespace nix {
|
|||
|
||||
/* Note: you generally shouldn't change the protocol version. Define a
|
||||
new `WorkerProto::Feature` instead. */
|
||||
#define PROTOCOL_VERSION (1 << 8 | 38)
|
||||
#define PROTOCOL_VERSION (1 << 8 | 39)
|
||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||
|
||||
|
|
@ -34,6 +34,9 @@ struct BuildResult;
|
|||
struct KeyedBuildResult;
|
||||
struct ValidPathInfo;
|
||||
struct UnkeyedValidPathInfo;
|
||||
struct DrvOutput;
|
||||
struct UnkeyedRealisation;
|
||||
struct Realisation;
|
||||
enum BuildMode : uint8_t;
|
||||
enum TrustedFlag : bool;
|
||||
|
||||
|
|
@ -258,6 +261,14 @@ DECLARE_WORKER_SERIALISER(ValidPathInfo);
|
|||
template<>
|
||||
DECLARE_WORKER_SERIALISER(UnkeyedValidPathInfo);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(DrvOutput);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(UnkeyedRealisation);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(Realisation);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(std::optional<UnkeyedRealisation>);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(BuildMode);
|
||||
template<>
|
||||
DECLARE_WORKER_SERIALISER(std::optional<TrustedFlag>);
|
||||
|
|
@ -274,8 +285,8 @@ DECLARE_WORKER_SERIALISER(std::set<T COMMA_ Compare>);
|
|||
template<typename... Ts>
|
||||
DECLARE_WORKER_SERIALISER(std::tuple<Ts...>);
|
||||
|
||||
template<typename K, typename V>
|
||||
DECLARE_WORKER_SERIALISER(std::map<K COMMA_ V>);
|
||||
template<typename K, typename V, typename Compare>
|
||||
DECLARE_WORKER_SERIALISER(std::map<K COMMA_ V COMMA_ Compare>);
|
||||
#undef COMMA_
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -58,9 +58,10 @@ protected:
|
|||
std::shared_ptr<std::basic_iostream<char>> istream,
|
||||
const std::string & mimeType) override
|
||||
{
|
||||
auto path2 = config->binaryCacheDir + "/" + path;
|
||||
auto path2 = std::filesystem::path{config->binaryCacheDir} / path;
|
||||
static std::atomic<int> counter{0};
|
||||
Path tmp = fmt("%s.tmp.%d.%d", path2, getpid(), ++counter);
|
||||
createDirs(path2.parent_path());
|
||||
auto tmp = path2 + fmt(".tmp.%d.%d", getpid(), ++counter);
|
||||
AutoDelete del(tmp, false);
|
||||
StreamToSourceAdapter source(istream);
|
||||
writeFile(tmp, source);
|
||||
|
|
|
|||
|
|
@ -110,8 +110,6 @@ struct LocalStore::State::Stmts
|
|||
SQLiteStmt QueryAllRealisedOutputs;
|
||||
SQLiteStmt QueryPathFromHashPart;
|
||||
SQLiteStmt QueryValidPaths;
|
||||
SQLiteStmt QueryRealisationReferences;
|
||||
SQLiteStmt AddRealisationReference;
|
||||
};
|
||||
|
||||
LocalStore::LocalStore(ref<const Config> config)
|
||||
|
|
@ -390,21 +388,6 @@ LocalStore::LocalStore(ref<const Config> config)
|
|||
where drvPath = ?
|
||||
;
|
||||
)");
|
||||
state->stmts->QueryRealisationReferences.create(
|
||||
state->db,
|
||||
R"(
|
||||
select drvPath, outputName from Realisations
|
||||
join RealisationsRefs on realisationReference = Realisations.id
|
||||
where referrer = ?;
|
||||
)");
|
||||
state->stmts->AddRealisationReference.create(
|
||||
state->db,
|
||||
R"(
|
||||
insert or replace into RealisationsRefs (referrer, realisationReference)
|
||||
values (
|
||||
(select id from Realisations where drvPath = ? and outputName = ?),
|
||||
(select id from Realisations where drvPath = ? and outputName = ?));
|
||||
)");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -636,7 +619,7 @@ void LocalStore::registerDrvOutput(const Realisation & info)
|
|||
auto combinedSignatures = oldR->signatures;
|
||||
combinedSignatures.insert(info.signatures.begin(), info.signatures.end());
|
||||
state->stmts->UpdateRealisedOutput
|
||||
.use()(concatStringsSep(" ", combinedSignatures))(info.id.strHash())(info.id.outputName)
|
||||
.use()(concatStringsSep(" ", combinedSignatures))(info.id.drvPath.to_string())(info.id.outputName)
|
||||
.exec();
|
||||
} else {
|
||||
throw Error(
|
||||
|
|
@ -650,29 +633,10 @@ void LocalStore::registerDrvOutput(const Realisation & info)
|
|||
}
|
||||
} else {
|
||||
state->stmts->RegisterRealisedOutput
|
||||
.use()(info.id.strHash())(info.id.outputName)(printStorePath(info.outPath))(
|
||||
.use()(info.id.drvPath.to_string())(info.id.outputName)(printStorePath(info.outPath))(
|
||||
concatStringsSep(" ", info.signatures))
|
||||
.exec();
|
||||
}
|
||||
for (auto & [outputId, depPath] : info.dependentRealisations) {
|
||||
auto localRealisation = queryRealisationCore_(*state, outputId);
|
||||
if (!localRealisation)
|
||||
throw Error(
|
||||
"unable to register the derivation '%s' as it "
|
||||
"depends on the non existent '%s'",
|
||||
info.id.to_string(),
|
||||
outputId.to_string());
|
||||
if (localRealisation->second.outPath != depPath)
|
||||
throw Error(
|
||||
"unable to register the derivation '%s' as it "
|
||||
"depends on a realisation of '%s' that doesn’t"
|
||||
"match what we have locally",
|
||||
info.id.to_string(),
|
||||
outputId.to_string());
|
||||
state->stmts->AddRealisationReference
|
||||
.use()(info.id.strHash())(info.id.outputName)(outputId.strHash())(outputId.outputName)
|
||||
.exec();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -1589,7 +1553,7 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si
|
|||
std::optional<std::pair<int64_t, UnkeyedRealisation>>
|
||||
LocalStore::queryRealisationCore_(LocalStore::State & state, const DrvOutput & id)
|
||||
{
|
||||
auto useQueryRealisedOutput(state.stmts->QueryRealisedOutput.use()(id.strHash())(id.outputName));
|
||||
auto useQueryRealisedOutput(state.stmts->QueryRealisedOutput.use()(id.drvPath.to_string())(id.outputName));
|
||||
if (!useQueryRealisedOutput.next())
|
||||
return std::nullopt;
|
||||
auto realisationDbId = useQueryRealisedOutput.getInt(0);
|
||||
|
|
@ -1611,21 +1575,6 @@ std::optional<const UnkeyedRealisation> LocalStore::queryRealisation_(LocalStore
|
|||
return std::nullopt;
|
||||
auto [realisationDbId, res] = *maybeCore;
|
||||
|
||||
std::map<DrvOutput, StorePath> dependentRealisations;
|
||||
auto useRealisationRefs(state.stmts->QueryRealisationReferences.use()(realisationDbId));
|
||||
while (useRealisationRefs.next()) {
|
||||
auto depId = DrvOutput{
|
||||
Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)),
|
||||
useRealisationRefs.getStr(1),
|
||||
};
|
||||
auto dependentRealisation = queryRealisationCore_(state, depId);
|
||||
assert(dependentRealisation); // Enforced by the db schema
|
||||
auto outputPath = dependentRealisation->second.outPath;
|
||||
dependentRealisations.insert({depId, outputPath});
|
||||
}
|
||||
|
||||
res.dependentRealisations = dependentRealisations;
|
||||
|
||||
return {res};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -239,16 +239,15 @@ MissingPaths Store::queryMissing(const std::vector<DerivedPath> & targets)
|
|||
|
||||
// If there are unknown output paths, attempt to find if the
|
||||
// paths are known to substituters through a realisation.
|
||||
auto outputHashes = staticOutputHashes(*this, *drv);
|
||||
knownOutputPaths = true;
|
||||
|
||||
for (auto [outputName, hash] : outputHashes) {
|
||||
for (auto & [outputName, _] : drv->outputs) {
|
||||
if (!bfd.outputs.contains(outputName))
|
||||
continue;
|
||||
|
||||
bool found = false;
|
||||
for (auto & sub : getDefaultSubstituters()) {
|
||||
auto realisation = sub->queryRealisation({hash, outputName});
|
||||
auto realisation = sub->queryRealisation({drvPath, outputName});
|
||||
if (!realisation)
|
||||
continue;
|
||||
found = true;
|
||||
|
|
@ -329,65 +328,6 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
|
|||
}});
|
||||
}
|
||||
|
||||
std::map<DrvOutput, StorePath>
|
||||
drvOutputReferences(const std::set<Realisation> & inputRealisations, const StorePathSet & pathReferences)
|
||||
{
|
||||
std::map<DrvOutput, StorePath> res;
|
||||
|
||||
for (const auto & input : inputRealisations) {
|
||||
if (pathReferences.count(input.outPath)) {
|
||||
res.insert({input.id, input.outPath});
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
std::map<DrvOutput, StorePath>
|
||||
drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore_)
|
||||
{
|
||||
auto & evalStore = evalStore_ ? *evalStore_ : store;
|
||||
|
||||
std::set<Realisation> inputRealisations;
|
||||
|
||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumRealisations;
|
||||
|
||||
accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
||||
if (!inputNode.value.empty()) {
|
||||
auto outputHashes = staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv));
|
||||
for (const auto & outputName : inputNode.value) {
|
||||
auto outputHash = get(outputHashes, outputName);
|
||||
if (!outputHash)
|
||||
throw Error(
|
||||
"output '%s' of derivation '%s' isn't realised", outputName, store.printStorePath(inputDrv));
|
||||
DrvOutput key{*outputHash, outputName};
|
||||
auto thisRealisation = store.queryRealisation(key);
|
||||
if (!thisRealisation)
|
||||
throw Error(
|
||||
"output '%s' of derivation '%s' isn’t built", outputName, store.printStorePath(inputDrv));
|
||||
inputRealisations.insert({*thisRealisation, std::move(key)});
|
||||
}
|
||||
}
|
||||
if (!inputNode.value.empty()) {
|
||||
auto d = makeConstantStorePathRef(inputDrv);
|
||||
for (const auto & [outputName, childNode] : inputNode.childMap) {
|
||||
SingleDerivedPath next = SingleDerivedPath::Built{d, outputName};
|
||||
accumRealisations(
|
||||
// TODO deep resolutions for dynamic derivations, issue #8947, would go here.
|
||||
resolveDerivedPath(store, next, evalStore_),
|
||||
childNode);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map)
|
||||
accumRealisations(inputDrv, inputNode);
|
||||
|
||||
auto info = store.queryPathInfo(outputPath);
|
||||
|
||||
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
||||
}
|
||||
|
||||
OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, Store * evalStore_)
|
||||
{
|
||||
auto drvPath = resolveDerivedPath(store, *bfd.drvPath, evalStore_);
|
||||
|
|
@ -420,7 +360,7 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd,
|
|||
OutputPathMap outputs;
|
||||
for (auto & [outputName, outputPathOpt] : outputsOpt) {
|
||||
if (!outputPathOpt)
|
||||
throw MissingRealisation(bfd.drvPath->to_string(store), outputName);
|
||||
throw MissingRealisation(store, *bfd.drvPath, drvPath, outputName);
|
||||
auto & outputPath = *outputPathOpt;
|
||||
outputs.insert_or_assign(outputName, outputPath);
|
||||
}
|
||||
|
|
@ -444,7 +384,7 @@ StorePath resolveDerivedPath(Store & store, const SingleDerivedPath & req, Store
|
|||
bfd.output);
|
||||
auto & optPath = outputPaths.at(bfd.output);
|
||||
if (!optPath)
|
||||
throw MissingRealisation(bfd.drvPath->to_string(store), bfd.output);
|
||||
throw MissingRealisation(store, *bfd.drvPath, drvPath, bfd.output);
|
||||
return *optPath;
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -44,10 +44,16 @@ create table if not exists NARs (
|
|||
|
||||
create table if not exists Realisations (
|
||||
cache integer not null,
|
||||
outputId text not null,
|
||||
content blob, -- Json serialisation of the realisation, or null if the realisation is absent
|
||||
|
||||
drvPath text not null,
|
||||
outputName text not null,
|
||||
|
||||
-- The following are null if the realisation is absent
|
||||
outputPath text,
|
||||
sigs text,
|
||||
|
||||
timestamp integer not null,
|
||||
primary key (cache, outputId),
|
||||
primary key (cache, drvPath, outputName),
|
||||
foreign key (cache) references BinaryCaches(id) on delete cascade
|
||||
);
|
||||
|
||||
|
|
@ -121,24 +127,24 @@ public:
|
|||
state->insertRealisation.create(
|
||||
state->db,
|
||||
R"(
|
||||
insert or replace into Realisations(cache, outputId, content, timestamp)
|
||||
values (?, ?, ?, ?)
|
||||
insert or replace into Realisations(cache, drvPath, outputName, outputPath, sigs, timestamp)
|
||||
values (?, ?, ?, ?, ?, ?)
|
||||
)");
|
||||
|
||||
state->insertMissingRealisation.create(
|
||||
state->db,
|
||||
R"(
|
||||
insert or replace into Realisations(cache, outputId, timestamp)
|
||||
values (?, ?, ?)
|
||||
insert or replace into Realisations(cache, drvPath, outputName, timestamp)
|
||||
values (?, ?, ?, ?)
|
||||
)");
|
||||
|
||||
state->queryRealisation.create(
|
||||
state->db,
|
||||
R"(
|
||||
select content from Realisations
|
||||
where cache = ? and outputId = ? and
|
||||
((content is null and timestamp > ?) or
|
||||
(content is not null and timestamp > ?))
|
||||
select outputPath, sigs from Realisations
|
||||
where cache = ? and drvPath = ? and outputName = ? and
|
||||
((outputPath is null and timestamp > ?) or
|
||||
(outputPath is not null and timestamp > ?))
|
||||
)");
|
||||
|
||||
/* Periodically purge expired entries from the database. */
|
||||
|
|
@ -295,22 +301,27 @@ public:
|
|||
|
||||
auto now = time(0);
|
||||
|
||||
auto queryRealisation(state->queryRealisation.use()(cache.id)(id.to_string())(
|
||||
auto queryRealisation(state->queryRealisation.use()(cache.id)(id.drvPath.to_string())(id.outputName)(
|
||||
now - settings.ttlNegativeNarInfoCache)(now - settings.ttlPositiveNarInfoCache));
|
||||
|
||||
if (!queryRealisation.next())
|
||||
return {oUnknown, 0};
|
||||
return {oUnknown, nullptr};
|
||||
|
||||
if (queryRealisation.isNull(0))
|
||||
return {oInvalid, 0};
|
||||
return {oInvalid, nullptr};
|
||||
|
||||
try {
|
||||
return {
|
||||
oValid,
|
||||
std::make_shared<Realisation>(nlohmann::json::parse(queryRealisation.getStr(0))),
|
||||
std::make_shared<Realisation>(
|
||||
UnkeyedRealisation{
|
||||
.outPath = StorePath{queryRealisation.getStr(0)},
|
||||
.signatures = nlohmann::json::parse(queryRealisation.getStr(1)),
|
||||
},
|
||||
id),
|
||||
};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while parsing the local disk cache");
|
||||
e.addTrace({}, "reading build trace key-value from the local disk cache");
|
||||
throw;
|
||||
}
|
||||
});
|
||||
|
|
@ -355,7 +366,9 @@ public:
|
|||
auto & cache(getCache(*state, uri));
|
||||
|
||||
state->insertRealisation
|
||||
.use()(cache.id)(realisation.id.to_string())(static_cast<nlohmann::json>(realisation).dump())(time(0))
|
||||
.use()(cache.id)(realisation.id.drvPath.to_string())(realisation.id.outputName)(
|
||||
realisation.outPath.to_string())(static_cast<nlohmann::json>(realisation.signatures).dump())(
|
||||
time(0))
|
||||
.exec();
|
||||
});
|
||||
}
|
||||
|
|
@ -366,7 +379,7 @@ public:
|
|||
auto state(_state.lock());
|
||||
|
||||
auto & cache(getCache(*state, uri));
|
||||
state->insertMissingRealisation.use()(cache.id)(id.to_string())(time(0)).exec();
|
||||
state->insertMissingRealisation.use()(cache.id)(id.drvPath.to_string())(id.outputName)(time(0)).exec();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@
|
|||
namespace nix {
|
||||
|
||||
NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||
: UnkeyedValidPathInfo(Hash::dummy) // FIXME: hack
|
||||
, ValidPathInfo(StorePath::dummy, static_cast<const UnkeyedValidPathInfo &>(*this)) // FIXME: hack
|
||||
, UnkeyedNarInfo(static_cast<const UnkeyedValidPathInfo &>(*this))
|
||||
{
|
||||
unsigned line = 1;
|
||||
|
||||
|
|
@ -130,19 +132,23 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
|
||||
nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
|
||||
auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (!url.empty())
|
||||
jsonObject["url"] = url;
|
||||
if (!compression.empty())
|
||||
jsonObject["compression"] = compression;
|
||||
if (fileHash)
|
||||
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
|
||||
if (fileHash) {
|
||||
/* Back compat hack, see comment for "narHash" in
|
||||
`UnkeyedValidPathInfo::toJSON` for details. */
|
||||
jsonObject["downloadHash"] =
|
||||
store ? static_cast<json>(fileHash->to_string(HashFormat::SRI, true)) : static_cast<json>(*fileHash);
|
||||
}
|
||||
if (fileSize)
|
||||
jsonObject["downloadSize"] = fileSize;
|
||||
}
|
||||
|
|
@ -150,14 +156,11 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI
|
|||
return jsonObject;
|
||||
}
|
||||
|
||||
NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json)
|
||||
UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
NarInfo res{ValidPathInfo{
|
||||
path,
|
||||
UnkeyedValidPathInfo::fromJSON(store, json),
|
||||
}};
|
||||
UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)};
|
||||
|
||||
if (json.contains("url"))
|
||||
res.url = getString(valueAt(json, "url"));
|
||||
|
|
@ -175,3 +178,19 @@ NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path,
|
|||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
UnkeyedNarInfo adl_serializer<UnkeyedNarInfo>::from_json(const json & json)
|
||||
{
|
||||
return UnkeyedNarInfo::fromJSON(nullptr, json);
|
||||
}
|
||||
|
||||
void adl_serializer<UnkeyedNarInfo>::to_json(json & json, const UnkeyedNarInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -149,26 +149,32 @@ ValidPathInfo ValidPathInfo::makeFromCA(
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json
|
||||
UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = json::object();
|
||||
|
||||
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
|
||||
/* Back-compat hack, if we are passing a `StoreDirConfig`, do SRI,
|
||||
which `nix path-info` has always down. Otherwise, use the new
|
||||
cannonical JSON serialization for `Hash`. */
|
||||
jsonObject["narHash"] =
|
||||
store ? static_cast<json>(narHash.to_string(HashFormat::SRI, true)) : static_cast<json>(narHash);
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
{
|
||||
auto & jsonRefs = jsonObject["references"] = json::array();
|
||||
for (auto & ref : references)
|
||||
jsonRefs.emplace_back(store.printStorePath(ref));
|
||||
jsonRefs.emplace_back(store ? static_cast<json>(store->printStorePath(ref)) : static_cast<json>(ref));
|
||||
}
|
||||
|
||||
jsonObject["ca"] = ca ? (std::optional{renderContentAddress(*ca)}) : std::nullopt;
|
||||
jsonObject["ca"] = ca ? (store ? static_cast<json>(renderContentAddress(*ca)) : static_cast<json>(*ca))
|
||||
: static_cast<json>(nullptr);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt;
|
||||
jsonObject["deriver"] = deriver ? (store ? static_cast<json>(std::optional{store->printStorePath(*deriver)})
|
||||
: static_cast<json>(std::optional{*deriver}))
|
||||
: static_cast<json>(std::optional<StorePath>{});
|
||||
|
||||
jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt;
|
||||
|
||||
|
|
@ -182,20 +188,23 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf
|
|||
return jsonObject;
|
||||
}
|
||||
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store, const nlohmann::json & _json)
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & _json)
|
||||
{
|
||||
UnkeyedValidPathInfo res{
|
||||
Hash(Hash::dummy),
|
||||
};
|
||||
|
||||
auto & json = getObject(_json);
|
||||
res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt);
|
||||
res.narHash = [&] {
|
||||
auto & j = valueAt(json, "narHash");
|
||||
return store ? Hash::parseAny(getString(j), std::nullopt) : static_cast<Hash>(j);
|
||||
}();
|
||||
res.narSize = getUnsigned(valueAt(json, "narSize"));
|
||||
|
||||
try {
|
||||
auto references = getStringList(valueAt(json, "references"));
|
||||
for (auto & input : references)
|
||||
res.references.insert(store.parseStorePath(static_cast<const std::string &>(input)));
|
||||
res.references.insert(store ? store->parseStorePath(getString(input)) : static_cast<StorePath>(input));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'references'");
|
||||
throw;
|
||||
|
|
@ -205,11 +214,11 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store
|
|||
// missing is for back-compat.
|
||||
if (json.contains("ca"))
|
||||
if (auto * rawCa = getNullable(valueAt(json, "ca")))
|
||||
res.ca = ContentAddress::parse(getString(*rawCa));
|
||||
res.ca = store ? ContentAddress::parse(getString(*rawCa)) : static_cast<ContentAddress>(*rawCa);
|
||||
|
||||
if (json.contains("deriver"))
|
||||
if (auto * rawDeriver = getNullable(valueAt(json, "deriver")))
|
||||
res.deriver = store.parseStorePath(getString(*rawDeriver));
|
||||
res.deriver = store ? store->parseStorePath(getString(*rawDeriver)) : static_cast<StorePath>(*rawDeriver);
|
||||
|
||||
if (json.contains("registrationTime"))
|
||||
if (auto * rawRegistrationTime = getNullable(valueAt(json, "registrationTime")))
|
||||
|
|
@ -225,3 +234,19 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store
|
|||
}
|
||||
|
||||
} // namespace nix
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
UnkeyedValidPathInfo adl_serializer<UnkeyedValidPathInfo>::from_json(const json & json)
|
||||
{
|
||||
return UnkeyedValidPathInfo::fromJSON(nullptr, json);
|
||||
}
|
||||
|
||||
void adl_serializer<UnkeyedValidPathInfo>::to_json(json & json, const UnkeyedValidPathInfo & c)
|
||||
{
|
||||
json = c.toJSON(nullptr, true);
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
#include "nix/store/realisation.hh"
|
||||
#include "nix/store/store-api.hh"
|
||||
#include "nix/util/closure.hh"
|
||||
#include "nix/util/signature/local-keys.hh"
|
||||
#include "nix/util/json-utils.hh"
|
||||
#include <nlohmann/json.hpp>
|
||||
|
|
@ -9,63 +8,34 @@ namespace nix {
|
|||
|
||||
MakeError(InvalidDerivationOutputId, Error);
|
||||
|
||||
DrvOutput DrvOutput::parse(const std::string & strRep)
|
||||
DrvOutput DrvOutput::parse(const StoreDirConfig & store, std::string_view s)
|
||||
{
|
||||
size_t n = strRep.find("!");
|
||||
if (n == strRep.npos)
|
||||
throw InvalidDerivationOutputId("Invalid derivation output id %s", strRep);
|
||||
|
||||
size_t n = s.rfind('^');
|
||||
if (n == s.npos)
|
||||
throw InvalidDerivationOutputId("Invalid derivation output id '%s': missing '^'", s);
|
||||
return DrvOutput{
|
||||
.drvHash = Hash::parseAnyPrefixed(strRep.substr(0, n)),
|
||||
.outputName = strRep.substr(n + 1),
|
||||
.drvPath = store.parseStorePath(s.substr(0, n)),
|
||||
.outputName = OutputName{s.substr(n + 1)},
|
||||
};
|
||||
}
|
||||
|
||||
std::string DrvOutput::render(const StoreDirConfig & store) const
|
||||
{
|
||||
return std::string(store.printStorePath(drvPath)) + "^" + outputName;
|
||||
}
|
||||
|
||||
std::string DrvOutput::to_string() const
|
||||
{
|
||||
return strHash() + "!" + outputName;
|
||||
}
|
||||
|
||||
std::set<Realisation> Realisation::closure(Store & store, const std::set<Realisation> & startOutputs)
|
||||
{
|
||||
std::set<Realisation> res;
|
||||
Realisation::closure(store, startOutputs, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
void Realisation::closure(Store & store, const std::set<Realisation> & startOutputs, std::set<Realisation> & res)
|
||||
{
|
||||
auto getDeps = [&](const Realisation & current) -> std::set<Realisation> {
|
||||
std::set<Realisation> res;
|
||||
for (auto & [currentDep, _] : current.dependentRealisations) {
|
||||
if (auto currentRealisation = store.queryRealisation(currentDep))
|
||||
res.insert({*currentRealisation, currentDep});
|
||||
else
|
||||
throw Error("Unrealised derivation '%s'", currentDep.to_string());
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
computeClosure<Realisation>(
|
||||
startOutputs,
|
||||
res,
|
||||
[&](const Realisation & current, std::function<void(std::promise<std::set<Realisation>> &)> processEdges) {
|
||||
std::promise<std::set<Realisation>> promise;
|
||||
try {
|
||||
auto res = getDeps(current);
|
||||
promise.set_value(res);
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
return processEdges(promise);
|
||||
});
|
||||
return std::string(drvPath.to_string()) + "^" + outputName;
|
||||
}
|
||||
|
||||
std::string UnkeyedRealisation::fingerprint(const DrvOutput & key) const
|
||||
{
|
||||
nlohmann::json serialized = Realisation{*this, key};
|
||||
serialized.erase("signatures");
|
||||
return serialized.dump();
|
||||
auto serialised = static_cast<nlohmann::json>(Realisation{*this, key});
|
||||
auto value = serialised.find("value");
|
||||
assert(value != serialised.end());
|
||||
value->erase("signatures");
|
||||
return serialised.dump();
|
||||
}
|
||||
|
||||
void UnkeyedRealisation::sign(const DrvOutput & key, const Signer & signer)
|
||||
|
|
@ -97,45 +67,20 @@ const StorePath & RealisedPath::path() const &
|
|||
return std::visit([](auto & arg) -> auto & { return arg.getPath(); }, raw);
|
||||
}
|
||||
|
||||
bool Realisation::isCompatibleWith(const UnkeyedRealisation & other) const
|
||||
MissingRealisation::MissingRealisation(
|
||||
const StoreDirConfig & store, const StorePath & drvPath, const OutputName & outputName)
|
||||
: Error("cannot operate on output '%s' of the unbuilt derivation '%s'", outputName, store.printStorePath(drvPath))
|
||||
{
|
||||
if (outPath == other.outPath) {
|
||||
if (dependentRealisations.empty() != other.dependentRealisations.empty()) {
|
||||
warn(
|
||||
"Encountered a realisation for '%s' with an empty set of "
|
||||
"dependencies. This is likely an artifact from an older Nix. "
|
||||
"I’ll try to fix the realisation if I can",
|
||||
id.to_string());
|
||||
return true;
|
||||
} else if (dependentRealisations == other.dependentRealisations) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void RealisedPath::closure(Store & store, const RealisedPath::Set & startPaths, RealisedPath::Set & ret)
|
||||
MissingRealisation::MissingRealisation(
|
||||
const StoreDirConfig & store,
|
||||
const SingleDerivedPath & drvPath,
|
||||
const StorePath & drvPathResolved,
|
||||
const OutputName & outputName)
|
||||
: MissingRealisation{store, drvPathResolved, outputName}
|
||||
{
|
||||
// FIXME: This only builds the store-path closure, not the real realisation
|
||||
// closure
|
||||
StorePathSet initialStorePaths, pathsClosure;
|
||||
for (auto & path : startPaths)
|
||||
initialStorePaths.insert(path.path());
|
||||
store.computeFSClosure(initialStorePaths, pathsClosure);
|
||||
ret.insert(startPaths.begin(), startPaths.end());
|
||||
ret.insert(pathsClosure.begin(), pathsClosure.end());
|
||||
}
|
||||
|
||||
void RealisedPath::closure(Store & store, RealisedPath::Set & ret) const
|
||||
{
|
||||
RealisedPath::closure(store, {*this}, ret);
|
||||
}
|
||||
|
||||
RealisedPath::Set RealisedPath::closure(Store & store) const
|
||||
{
|
||||
RealisedPath::Set ret;
|
||||
closure(store, ret);
|
||||
return ret;
|
||||
addTrace({}, "looking up realisation for derivation '%s'", drvPath.to_string(store));
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
@ -144,52 +89,62 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
UnkeyedRealisation adl_serializer<UnkeyedRealisation>::from_json(const json & json0)
|
||||
DrvOutput adl_serializer<DrvOutput>::from_json(const json & json)
|
||||
{
|
||||
auto json = getObject(json0);
|
||||
auto obj = getObject(json);
|
||||
|
||||
return {
|
||||
.drvPath = valueAt(obj, "drvPath"),
|
||||
.outputName = getString(valueAt(obj, "outputName")),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<DrvOutput>::to_json(json & json, const DrvOutput & drvOutput)
|
||||
{
|
||||
json = {
|
||||
{"drvPath", drvOutput.drvPath},
|
||||
{"outputName", drvOutput.outputName},
|
||||
};
|
||||
}
|
||||
|
||||
UnkeyedRealisation adl_serializer<UnkeyedRealisation>::from_json(const json & json)
|
||||
{
|
||||
auto obj = getObject(json);
|
||||
|
||||
StringSet signatures;
|
||||
if (auto signaturesOpt = optionalValueAt(json, "signatures"))
|
||||
signatures = *signaturesOpt;
|
||||
if (auto * signaturesJson = get(obj, "signatures"))
|
||||
signatures = getStringSet(*signaturesJson);
|
||||
|
||||
std::map<DrvOutput, StorePath> dependentRealisations;
|
||||
if (auto jsonDependencies = optionalValueAt(json, "dependentRealisations"))
|
||||
for (auto & [jsonDepId, jsonDepOutPath] : getObject(*jsonDependencies))
|
||||
dependentRealisations.insert({DrvOutput::parse(jsonDepId), jsonDepOutPath});
|
||||
|
||||
return UnkeyedRealisation{
|
||||
.outPath = valueAt(json, "outPath"),
|
||||
return {
|
||||
.outPath = valueAt(obj, "outPath"),
|
||||
.signatures = signatures,
|
||||
.dependentRealisations = dependentRealisations,
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<UnkeyedRealisation>::to_json(json & json, const UnkeyedRealisation & r)
|
||||
{
|
||||
auto jsonDependentRealisations = nlohmann::json::object();
|
||||
for (auto & [depId, depOutPath] : r.dependentRealisations)
|
||||
jsonDependentRealisations.emplace(depId.to_string(), depOutPath);
|
||||
json = {
|
||||
{"outPath", r.outPath},
|
||||
{"signatures", r.signatures},
|
||||
{"dependentRealisations", jsonDependentRealisations},
|
||||
};
|
||||
}
|
||||
|
||||
Realisation adl_serializer<Realisation>::from_json(const json & json0)
|
||||
Realisation adl_serializer<Realisation>::from_json(const nlohmann::json & json)
|
||||
{
|
||||
auto json = getObject(json0);
|
||||
auto obj = getObject(json);
|
||||
|
||||
return Realisation{
|
||||
static_cast<UnkeyedRealisation>(json0),
|
||||
DrvOutput::parse(valueAt(json, "id")),
|
||||
return {
|
||||
static_cast<UnkeyedRealisation>(valueAt(obj, "value")),
|
||||
static_cast<DrvOutput>(valueAt(obj, "key")),
|
||||
};
|
||||
}
|
||||
|
||||
void adl_serializer<Realisation>::to_json(json & json, const Realisation & r)
|
||||
{
|
||||
json = static_cast<const UnkeyedRealisation &>(r);
|
||||
json["id"] = r.id.to_string();
|
||||
json = {
|
||||
{"key", r.id},
|
||||
{"value", static_cast<const UnkeyedRealisation &>(r)},
|
||||
};
|
||||
}
|
||||
|
||||
} // namespace nlohmann
|
||||
|
|
|
|||
|
|
@ -507,7 +507,7 @@ void RemoteStore::queryRealisationUncached(
|
|||
try {
|
||||
auto conn(getConnection());
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->protoVersion) < 27) {
|
||||
if (GET_PROTOCOL_MINOR(conn->protoVersion) < 39) {
|
||||
warn("the daemon is too old to support content-addressing derivations, please upgrade it to 2.4");
|
||||
return callback(nullptr);
|
||||
}
|
||||
|
|
@ -516,21 +516,12 @@ void RemoteStore::queryRealisationUncached(
|
|||
conn->to << id.to_string();
|
||||
conn.processStderr();
|
||||
|
||||
auto real = [&]() -> std::shared_ptr<const UnkeyedRealisation> {
|
||||
if (GET_PROTOCOL_MINOR(conn->protoVersion) < 31) {
|
||||
auto outPaths = WorkerProto::Serialise<std::set<StorePath>>::read(*this, *conn);
|
||||
if (outPaths.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const UnkeyedRealisation>(UnkeyedRealisation{.outPath = *outPaths.begin()});
|
||||
} else {
|
||||
auto realisations = WorkerProto::Serialise<std::set<Realisation>>::read(*this, *conn);
|
||||
if (realisations.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const UnkeyedRealisation>(*realisations.begin());
|
||||
}
|
||||
}();
|
||||
|
||||
callback(std::shared_ptr<const UnkeyedRealisation>(real));
|
||||
callback([&]() -> std::shared_ptr<const UnkeyedRealisation> {
|
||||
auto realisation = WorkerProto::Serialise<std::optional<UnkeyedRealisation>>::read(*this, *conn);
|
||||
if (!realisation)
|
||||
return nullptr;
|
||||
return std::make_shared<const UnkeyedRealisation>(*realisation);
|
||||
}());
|
||||
} catch (...) {
|
||||
return callback.rethrow();
|
||||
}
|
||||
|
|
@ -612,30 +603,19 @@ std::vector<KeyedBuildResult> RemoteStore::buildPathsWithResults(
|
|||
|
||||
OutputPathMap outputs;
|
||||
auto drvPath = resolveDerivedPath(*evalStore, *bfd.drvPath);
|
||||
auto drv = evalStore->readDerivation(drvPath);
|
||||
const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
auto built = resolveDerivedPath(*this, bfd, &*evalStore);
|
||||
for (auto & [output, outputPath] : built) {
|
||||
auto outputHash = get(outputHashes, output);
|
||||
if (!outputHash)
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
printStorePath(drvPath),
|
||||
output);
|
||||
auto outputId = DrvOutput{*outputHash, output};
|
||||
auto outputId = DrvOutput{drvPath, output};
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
auto realisation = queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw MissingRealisation(outputId);
|
||||
success.builtOutputs.emplace(output, Realisation{*realisation, outputId});
|
||||
throw MissingRealisation(*this, outputId);
|
||||
success.builtOutputs.emplace(output, *realisation);
|
||||
} else {
|
||||
success.builtOutputs.emplace(
|
||||
output,
|
||||
Realisation{
|
||||
UnkeyedRealisation{
|
||||
.outPath = outputPath,
|
||||
},
|
||||
outputId,
|
||||
UnkeyedRealisation{
|
||||
.outPath = outputPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -281,9 +281,18 @@ std::vector<KeyedBuildResult> RestrictedStore::buildPathsWithResults(
|
|||
|
||||
for (auto & result : results) {
|
||||
if (auto * successP = result.tryGetSuccess()) {
|
||||
for (auto & [outputName, output] : successP->builtOutputs) {
|
||||
newPaths.insert(output.outPath);
|
||||
newRealisations.insert(output);
|
||||
if (auto * pathBuilt = std::get_if<DerivedPathBuilt>(&result.path)) {
|
||||
// TODO ugly extra IO
|
||||
auto drvPath = resolveDerivedPath(*next, *pathBuilt->drvPath);
|
||||
for (auto & [outputName, output] : successP->builtOutputs) {
|
||||
newPaths.insert(output.outPath);
|
||||
newRealisations.insert(
|
||||
{output,
|
||||
{
|
||||
.drvPath = drvPath,
|
||||
.outputName = outputName,
|
||||
}});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -292,7 +301,7 @@ std::vector<KeyedBuildResult> RestrictedStore::buildPathsWithResults(
|
|||
next->computeFSClosure(newPaths, closure);
|
||||
for (auto & path : closure)
|
||||
goal.addDependency(path);
|
||||
for (auto & real : Realisation::closure(*next, newRealisations))
|
||||
for (auto & real : newRealisations)
|
||||
goal.addedDrvOutputs.insert(real.id);
|
||||
|
||||
return results;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue