mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 11:36:03 +01:00
Compare commits
27 commits
6f13c0bc18
...
d70f72412d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d70f72412d | ||
|
|
daace78239 | ||
|
|
d596b9754e | ||
|
|
3f18cad5f1 | ||
|
|
41b62aa979 | ||
|
|
af41eccb31 | ||
|
|
6bd92d47e5 | ||
|
|
b5302fc111 | ||
|
|
724086005a | ||
|
|
038d74edf7 | ||
|
|
b177354c35 | ||
|
|
2039235f6e | ||
|
|
0fd3b6fee6 | ||
|
|
b2f0472fe2 | ||
|
|
91af29f37a | ||
|
|
099af7578f | ||
|
|
7e84ce3904 | ||
|
|
a828cf777a | ||
|
|
0507674a13 | ||
|
|
4f85cfe824 | ||
|
|
7d5567a8d7 | ||
|
|
9f322398b4 | ||
|
|
e07510e504 | ||
|
|
ae15d4eaf3 | ||
|
|
469123eda1 | ||
|
|
144c66215b | ||
|
|
d8cec03fce |
61 changed files with 545 additions and 143 deletions
|
|
@ -107,12 +107,29 @@ rec {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
disable =
|
||||||
|
let
|
||||||
|
inherit (pkgs.stdenv) hostPlatform;
|
||||||
|
in
|
||||||
|
args@{
|
||||||
|
pkgName,
|
||||||
|
testName,
|
||||||
|
test,
|
||||||
|
}:
|
||||||
|
lib.any (b: b) [
|
||||||
|
# FIXME: Nix manual is impure and does not produce all settings on darwin
|
||||||
|
(hostPlatform.isDarwin && pkgName == "nix-manual" && testName == "linkcheck")
|
||||||
|
];
|
||||||
|
|
||||||
componentTests =
|
componentTests =
|
||||||
(lib.concatMapAttrs (
|
(lib.concatMapAttrs (
|
||||||
pkgName: pkg:
|
pkgName: pkg:
|
||||||
lib.concatMapAttrs (testName: test: {
|
lib.concatMapAttrs (
|
||||||
|
testName: test:
|
||||||
|
lib.optionalAttrs (!disable { inherit pkgName testName test; }) {
|
||||||
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
||||||
}) (pkg.tests or { })
|
}
|
||||||
|
) (pkg.tests or { })
|
||||||
) nixComponentsInstrumented)
|
) nixComponentsInstrumented)
|
||||||
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
||||||
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
|
|
||||||
def transform_anchors_html:
|
def transform_anchors_html:
|
||||||
. | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>")
|
. | gsub($empty_anchor_regex; "<a id=\"" + .anchor + "\"></a>")
|
||||||
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
|
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,9 @@
|
||||||
# Configuration Options
|
# Configuration Options
|
||||||
|
|
||||||
version,
|
version,
|
||||||
|
|
||||||
|
# `tests` attribute
|
||||||
|
testers,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
@ -37,6 +40,7 @@ mkMesonDerivation (finalAttrs: {
|
||||||
../../src/libutil-tests/data/hash
|
../../src/libutil-tests/data/hash
|
||||||
../../src/libstore-tests/data/content-address
|
../../src/libstore-tests/data/content-address
|
||||||
../../src/libstore-tests/data/store-path
|
../../src/libstore-tests/data/store-path
|
||||||
|
../../src/libstore-tests/data/realisation
|
||||||
../../src/libstore-tests/data/derived-path
|
../../src/libstore-tests/data/derived-path
|
||||||
../../src/libstore-tests/data/path-info
|
../../src/libstore-tests/data/path-info
|
||||||
../../src/libstore-tests/data/nar-info
|
../../src/libstore-tests/data/nar-info
|
||||||
|
|
@ -87,6 +91,29 @@ mkMesonDerivation (finalAttrs: {
|
||||||
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
|
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The root of the HTML manual.
|
||||||
|
E.g. "${nix-manual.site}/index.html" exists.
|
||||||
|
*/
|
||||||
|
passthru.site = finalAttrs.finalPackage + "/share/doc/nix/manual";
|
||||||
|
|
||||||
|
passthru.tests = {
|
||||||
|
# https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck
|
||||||
|
linkcheck = testers.lycheeLinkCheck {
|
||||||
|
inherit (finalAttrs.finalPackage) site;
|
||||||
|
extraConfig = {
|
||||||
|
exclude = [
|
||||||
|
# Exclude auto-generated JSON schema documentation which has
|
||||||
|
# auto-generated fragment IDs that don't match the link references
|
||||||
|
".*/protocols/json/.*\\.html"
|
||||||
|
# Exclude undocumented builtins
|
||||||
|
".*/language/builtins\\.html#builtins-addErrorContext"
|
||||||
|
".*/language/builtins\\.html#builtins-appendContext"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
platforms = lib.platforms.all;
|
platforms = lib.platforms.all;
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -126,6 +126,7 @@
|
||||||
- [Store Object Info](protocols/json/store-object-info.md)
|
- [Store Object Info](protocols/json/store-object-info.md)
|
||||||
- [Derivation](protocols/json/derivation.md)
|
- [Derivation](protocols/json/derivation.md)
|
||||||
- [Deriving Path](protocols/json/deriving-path.md)
|
- [Deriving Path](protocols/json/deriving-path.md)
|
||||||
|
- [Build Trace Entry](protocols/json/build-trace-entry.md)
|
||||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||||
- [Store Path Specification](protocols/store-path.md)
|
- [Store Path Specification](protocols/store-path.md)
|
||||||
- [Nix Archive (NAR) Format](protocols/nix-archive/index.md)
|
- [Nix Archive (NAR) Format](protocols/nix-archive/index.md)
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ The moving parts of channels are:
|
||||||
- The official channels listed at <https://nixos.org/channels>
|
- The official channels listed at <https://nixos.org/channels>
|
||||||
- The user-specific list of [subscribed channels](#subscribed-channels)
|
- The user-specific list of [subscribed channels](#subscribed-channels)
|
||||||
- The [downloaded channel contents](#channels)
|
- The [downloaded channel contents](#channels)
|
||||||
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an
|
||||||
element of *args* matches no installed derivations.
|
element of *args* matches no installed derivations.
|
||||||
|
|
||||||
For a description of how *args* is mapped to a set of store paths, see
|
For a description of how *args* is mapped to a set of store paths, see
|
||||||
[`--install`](#operation---install). If *args* describes multiple
|
[`--install`](./install.md). If *args* describes multiple
|
||||||
store paths with the same symbolic name, only the one with the highest
|
store paths with the same symbolic name, only the one with the highest
|
||||||
version is installed.
|
version is installed.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
||||||
|
|
||||||
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
|
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
|
||||||
[`nix-command`]: @docroot@/development/experimental-features.md#xp-nix-command
|
[`nix-command`]: @docroot@/development/experimental-features.md#xp-feature-nix-command
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
||||||
|
|
||||||
|
|
@ -256,7 +256,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS
|
||||||
## Editor integration
|
## Editor integration
|
||||||
|
|
||||||
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
||||||
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix).
|
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#building-nix-with-flakes) or in [classic Nix](#building-nix).
|
||||||
|
|
||||||
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
||||||
Meson's configure always produces this inside the build directory.
|
Meson's configure always produces this inside the build directory.
|
||||||
|
|
|
||||||
|
|
@ -119,7 +119,7 @@ This will:
|
||||||
|
|
||||||
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
|
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
|
||||||
|
|
||||||
### Characterisation testing { #characaterisation-testing-unit }
|
### Characterisation testing { #characterisation-testing-unit }
|
||||||
|
|
||||||
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -208,7 +208,7 @@
|
||||||
|
|
||||||
- [impure derivation]{#gloss-impure-derivation}
|
- [impure derivation]{#gloss-impure-derivation}
|
||||||
|
|
||||||
[An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
[An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||||
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
||||||
|
|
||||||
- [Nix database]{#gloss-nix-database}
|
- [Nix database]{#gloss-nix-database}
|
||||||
|
|
@ -279,7 +279,7 @@
|
||||||
|
|
||||||
See [References](@docroot@/store/store-object.md#references) for details.
|
See [References](@docroot@/store/store-object.md#references) for details.
|
||||||
|
|
||||||
- [referrer]{#gloss-reference}
|
- [referrer]{#gloss-referrer}
|
||||||
|
|
||||||
A reversed edge from one [store object] to another.
|
A reversed edge from one [store object] to another.
|
||||||
|
|
||||||
|
|
@ -367,8 +367,8 @@
|
||||||
|
|
||||||
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
|
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
|
||||||
|
|
||||||
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing:
|
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing:
|
||||||
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#output),
|
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output),
|
||||||
- attributes with metadata, such as information about how the package is supposed to be used.
|
- attributes with metadata, such as information about how the package is supposed to be used.
|
||||||
|
|
||||||
The exact shape of these attribute sets is up to convention.
|
The exact shape of these attribute sets is up to convention.
|
||||||
|
|
@ -383,7 +383,7 @@
|
||||||
|
|
||||||
[string]: ./language/types.md#type-string
|
[string]: ./language/types.md#type-string
|
||||||
[path]: ./language/types.md#type-path
|
[path]: ./language/types.md#type-path
|
||||||
[attribute name]: ./language/types.md#attribute-set
|
[attribute name]: ./language/types.md#type-attrs
|
||||||
|
|
||||||
- [base directory]{#gloss-base-directory}
|
- [base directory]{#gloss-base-directory}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b
|
||||||
|
|
||||||
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
|
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
|
||||||
|
|
||||||
- [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode}
|
- [`outputHash`]{#adv-attr-outputHash}
|
||||||
|
|
||||||
This will specify the output hash of the single output of a [fixed-output derivation].
|
This will specify the output hash of the single output of a [fixed-output derivation].
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
|
||||||
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
|
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
|
||||||
|
|
||||||
A symbolic name for the derivation.
|
A symbolic name for the derivation.
|
||||||
See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects.
|
See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects.
|
||||||
|
|
||||||
[store path]: @docroot@/store/store-path.md
|
[store path]: @docroot@/store/store-path.md
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq
|
||||||
|
|
||||||
# Names
|
# Names
|
||||||
|
|
||||||
A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md).
|
A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md).
|
||||||
|
|
||||||
> **Syntax**
|
> **Syntax**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Booleans](@docroot@/language/types.md#type-boolean)
|
[Booleans](@docroot@/language/types.md#type-bool)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y`
|
An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y`
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Lists](@docroot@/language/types.md#list) with three elements.
|
[Lists](@docroot@/language/types.md#type-list) with three elements.
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`)
|
[Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`)
|
[Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
|
||||||
|
|
@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp
|
||||||
|
|
||||||
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext
|
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext
|
||||||
[`builtins.getContext`]: ./builtins.md#builtins-getContext
|
[`builtins.getContext`]: ./builtins.md#builtins-getContext
|
||||||
[attribute set]: ./types.md#attribute-set
|
[attribute set]: ./types.md#type-attrs
|
||||||
|
|
||||||
## Clearing string contexts
|
## Clearing string contexts
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a
|
||||||
|
|
||||||
[string]: ./types.md#type-string
|
[string]: ./types.md#type-string
|
||||||
[path]: ./types.md#type-path
|
[path]: ./types.md#type-path
|
||||||
[attribute set]: ./types.md#attribute-set
|
[attribute set]: ./types.md#type-attrs
|
||||||
|
|
||||||
> **Syntax**
|
> **Syntax**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ See [String literals](string-literals.md).
|
||||||
|
|
||||||
Path literals can also include [string interpolation], besides being [interpolated into other expressions].
|
Path literals can also include [string interpolation], besides being [interpolated into other expressions].
|
||||||
|
|
||||||
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions
|
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expression
|
||||||
|
|
||||||
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||||
|
|
||||||
|
|
@ -235,7 +235,7 @@ of object-oriented programming, for example.
|
||||||
|
|
||||||
## Recursive sets
|
## Recursive sets
|
||||||
|
|
||||||
Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other.
|
Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other.
|
||||||
|
|
||||||
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
|
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
|
||||||
|
|
||||||
|
|
@ -287,7 +287,7 @@ This evaluates to `"foobar"`.
|
||||||
|
|
||||||
## Inheriting attributes
|
## Inheriting attributes
|
||||||
|
|
||||||
When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
||||||
This can be shortened using the `inherit` keyword.
|
This can be shortened using the `inherit` keyword.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
|
||||||
27
doc/manual/source/protocols/json/build-trace-entry.md
Normal file
27
doc/manual/source/protocols/json/build-trace-entry.md
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
{{#include build-trace-entry-v1-fixed.md}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Simple build trace entry
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/build-trace-entry-v1/simple.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build trace entry with dependencies
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/build-trace-entry-v1/with-dependent-realisations.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build trace entry with signature
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/build-trace-entry-v1/with-signature.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--
|
||||||
|
## Raw Schema
|
||||||
|
|
||||||
|
[JSON Schema for Build Trace Entry v1](schema/build-trace-entry-v1.json)
|
||||||
|
-->
|
||||||
|
|
@ -15,6 +15,7 @@ schemas = [
|
||||||
'store-object-info-v1',
|
'store-object-info-v1',
|
||||||
'derivation-v3',
|
'derivation-v3',
|
||||||
'deriving-path-v1',
|
'deriving-path-v1',
|
||||||
|
'build-trace-entry-v1',
|
||||||
]
|
]
|
||||||
|
|
||||||
schema_files = files()
|
schema_files = files()
|
||||||
|
|
|
||||||
1
doc/manual/source/protocols/json/schema/build-trace-entry-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/build-trace-entry-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/realisation
|
||||||
|
|
@ -0,0 +1,74 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-trace-entry-v1.json"
|
||||||
|
title: Build Trace Entry
|
||||||
|
description: |
|
||||||
|
A record of a successful build outcome for a specific derivation output.
|
||||||
|
|
||||||
|
This schema describes the JSON representation of a [build trace entry](@docroot@/store/build-trace.md) entry.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- outPath
|
||||||
|
- dependentRealisations
|
||||||
|
- signatures
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
title: Derivation Output ID
|
||||||
|
pattern: "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$"
|
||||||
|
description: |
|
||||||
|
Unique identifier for the derivation output that was built.
|
||||||
|
|
||||||
|
Format: `{hash-quotient-drv}!{output-name}`
|
||||||
|
|
||||||
|
- **hash-quotient-drv**: SHA-256 [hash of the quotient derivation](@docroot@/store/derivation/outputs/input-address.md#hash-quotient-drv).
|
||||||
|
Begins with `sha256:`.
|
||||||
|
|
||||||
|
- **output-name**: Name of the specific output (e.g., "out", "dev", "doc")
|
||||||
|
|
||||||
|
Example: `"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo"`
|
||||||
|
|
||||||
|
outPath:
|
||||||
|
"$ref": "store-path-v1.yaml"
|
||||||
|
title: Output Store Path
|
||||||
|
description: |
|
||||||
|
The path to the store object that resulted from building this derivation for the given output name.
|
||||||
|
|
||||||
|
dependentRealisations:
|
||||||
|
type: object
|
||||||
|
title: Underlying Base Build Trace
|
||||||
|
description: |
|
||||||
|
This is for [*derived*](@docroot@/store/build-trace.md#derived) build trace entries to ensure coherence.
|
||||||
|
|
||||||
|
Keys are derivation output IDs (same format as the main `id` field).
|
||||||
|
Values are the store paths that those dependencies resolved to.
|
||||||
|
|
||||||
|
As described in the linked section on derived build trace traces, derived build trace entries must be kept in addition and not instead of the underlying base build entries.
|
||||||
|
This is the set of base build trace entries that this derived build trace is derived from.
|
||||||
|
(The set is also a map since this miniature base build trace must be coherent, mapping each key to a single value.)
|
||||||
|
|
||||||
|
patternProperties:
|
||||||
|
"^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$":
|
||||||
|
$ref: "store-path-v1.yaml"
|
||||||
|
title: Dependent Store Path
|
||||||
|
description: Store path that this dependency resolved to during the build
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
signatures:
|
||||||
|
type: array
|
||||||
|
title: Build Signatures
|
||||||
|
description: |
|
||||||
|
A set of cryptographic signatures attesting to the authenticity of this build trace entry.
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
title: Signature
|
||||||
|
description: A single cryptographic signature
|
||||||
|
|
||||||
|
additionalProperties: false
|
||||||
|
|
@ -51,4 +51,4 @@ additionalProperties: false
|
||||||
description: |
|
description: |
|
||||||
The hash algorithm used to compute the hash value.
|
The hash algorithm used to compute the hash value.
|
||||||
|
|
||||||
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake-hashing) experimental feature.
|
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature.
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format.
|
||||||
The Nix Archive format closely follows the abstract specification of a [file system object] tree,
|
The Nix Archive format closely follows the abstract specification of a [file system object] tree,
|
||||||
because it is designed to serialize exactly that data structure.
|
because it is designed to serialize exactly that data structure.
|
||||||
|
|
||||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive
|
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
||||||
[file system object]: @docroot@/store/file-system-object.md
|
[file system object]: @docroot@/store/file-system-object.md
|
||||||
|
|
||||||
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
|
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
- The `discard-references` feature has been stabilized.
|
- The `discard-references` feature has been stabilized.
|
||||||
This means that the
|
This means that the
|
||||||
[unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references)
|
[unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences)
|
||||||
attribute is no longer guarded by an experimental flag and can be used
|
attribute is no longer guarded by an experimental flag and can be used
|
||||||
freely.
|
freely.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,8 @@
|
||||||
|
|
||||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||||
|
|
||||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree).
|
||||||
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes).
|
||||||
|
|
||||||
- The interface for creating and updating lock files has been overhauled:
|
- The interface for creating and updating lock files has been overhauled:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
|
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
|
||||||
|
|
||||||
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json).
|
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md).
|
||||||
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
|
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
|
||||||
rather than one field with an arcane `:`-separated format.
|
rather than one field with an arcane `:`-separated format.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -93,7 +93,7 @@
|
||||||
|
|
||||||
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
|
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
|
||||||
|
|
||||||
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte.
|
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte.
|
||||||
|
|
||||||
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).
|
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ And even in that case, a different result doesn't mean the original entry was a
|
||||||
As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice.
|
As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice.
|
||||||
Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices.
|
Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices.
|
||||||
|
|
||||||
## Derived build traces
|
## Derived build traces {#derived}
|
||||||
|
|
||||||
Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations.
|
Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations.
|
||||||
But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around.
|
But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around.
|
||||||
|
|
@ -40,13 +40,13 @@ Unlike with base build traces, incoherence with derived build traces is possible
|
||||||
The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace.
|
The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace.
|
||||||
Without fixing the base build trace, it inherits the subjectivity of base build traces themselves.
|
Without fixing the base build trace, it inherits the subjectivity of base build traces themselves.
|
||||||
|
|
||||||
Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \((c\\).
|
Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \\(c\\).
|
||||||
Let \\(a\\) be a resolved derivation, but let \\(b\\) and \((c\\) be unresolved and both take as an input an output of \\(a\\).
|
Let \\(a\\) be a resolved derivation, but let \\(b\\) and \\(c\\) be unresolved and both take as an input an output of \\(a\\).
|
||||||
Now suppose that derived entries are made for \\(b\\) and \((c\\) based on two different entries of \\(a\\).
|
Now suppose that derived entries are made for \\(b\\) and \\(c\\) based on two different entries of \\(a\\).
|
||||||
(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.)
|
(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.)
|
||||||
|
|
||||||
If trusting the derived build trace entries for \\(b\\) and \((c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught.
|
If trusting the derived build trace entries for \\(b\\) and \\(c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught.
|
||||||
However, if \\(b\\) and \((c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output.
|
However, if \\(b\\) and \\(c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output.
|
||||||
|
|
||||||
[derivation]: ./derivation/index.md
|
[derivation]: ./derivation/index.md
|
||||||
[output]: ./derivation/outputs/index.md
|
[output]: ./derivation/outputs/index.md
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
|
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
|
||||||
|
|
||||||
## Builder Execution
|
## Builder Execution {#builder-execution}
|
||||||
|
|
||||||
The [`builder`](./derivation/index.md#builder) is executed as follows:
|
The [`builder`](./derivation/index.md#builder) is executed as follows:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -102,7 +102,7 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t
|
||||||
|
|
||||||
### System {#system}
|
### System {#system}
|
||||||
|
|
||||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
The system type on which the [`builder`](#builder) executable is meant to be run.
|
||||||
|
|
||||||
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,119 @@ In particular, the specification decides:
|
||||||
|
|
||||||
- if the content is content-addressed, how is it content addressed
|
- if the content is content-addressed, how is it content addressed
|
||||||
|
|
||||||
- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path])
|
- if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path])
|
||||||
|
|
||||||
|
## Output Checks
|
||||||
|
|
||||||
|
Additional checks for each output can also be mandated by the derivation,
|
||||||
|
supplementing the core required output specification above additional properties that must hold on the produced outputs for the derivation build to be considered successful.
|
||||||
|
|
||||||
|
**TODO No nix lang**
|
||||||
|
|
||||||
|
### Reference checks
|
||||||
|
|
||||||
|
The main checks assert properties about the [references][reference] of an output.
|
||||||
|
These checks vary on two different axes, yielding 4 possible checks.
|
||||||
|
The first axis is *direct* (references proper) vs *transitive* ([requisites]).
|
||||||
|
The first axis is *allowal* vs *disallowal*.
|
||||||
|
|
||||||
|
[reference]: @docroot@/glossary.md#gloss-reference
|
||||||
|
|
||||||
|
[requisites]: @docroot@/store/store-object.md#requisites
|
||||||
|
|
||||||
|
- [*allowed references*]{#allowed-references}: Set (store path or output name)
|
||||||
|
|
||||||
|
The outputs references must be a subset of this set.
|
||||||
|
Not every store path in the set must be a reference of the output,
|
||||||
|
but every reference of the output must be in this set.
|
||||||
|
|
||||||
|
For example, the empty set enforces that the output of a derivation cannot have any runtime dependencies on its inputs.
|
||||||
|
|
||||||
|
> **Usage note**
|
||||||
|
>
|
||||||
|
> This is used in NixOS to check that generated files such as initial ramdisks for booting Linux don’t have accidental dependencies on other paths in the Nix store.
|
||||||
|
|
||||||
|
- [`allowedRequisites`]{#adv-attr-allowedRequisites}: Set (store paths or outputs name)
|
||||||
|
|
||||||
|
like
|
||||||
|
This attribute is similar to `allowedReferences`, but it specifies
|
||||||
|
the legal requisites of the whole closure, so all the dependencies
|
||||||
|
recursively. For example,
|
||||||
|
|
||||||
|
```nix
|
||||||
|
allowedRequisites = [ foobar ];
|
||||||
|
```
|
||||||
|
|
||||||
|
enforces that the output of a derivation cannot have any other
|
||||||
|
runtime dependency than `foobar`, and in addition it enforces that
|
||||||
|
`foobar` itself doesn't introduce any other dependency itself.
|
||||||
|
|
||||||
|
- [`disallowedReferences`]{#adv-attr-disallowedReferences}\
|
||||||
|
The optional attribute `disallowedReferences` specifies a list of
|
||||||
|
illegal references (dependencies) of the output of the builder. For
|
||||||
|
example,
|
||||||
|
|
||||||
|
```nix
|
||||||
|
disallowedReferences = [ foo ];
|
||||||
|
```
|
||||||
|
|
||||||
|
enforces that the output of a derivation cannot have a direct
|
||||||
|
runtime dependencies on the derivation `foo`.
|
||||||
|
|
||||||
|
https://en.wikipedia.org/wiki/Blacklist_(computing)
|
||||||
|
|
||||||
|
- [`disallowedRequisites`]{#adv-attr-disallowedRequisites}\
|
||||||
|
This attribute is similar to `disallowedReferences`, but it
|
||||||
|
specifies illegal requisites for the whole closure, so all the
|
||||||
|
dependencies recursively. For example,
|
||||||
|
|
||||||
|
```nix
|
||||||
|
disallowedRequisites = [ foobar ];
|
||||||
|
```
|
||||||
|
|
||||||
|
enforces that the output of a derivation cannot have any runtime
|
||||||
|
dependency on `foobar` or any other derivation depending recursively
|
||||||
|
on `foobar`.
|
||||||
|
|
||||||
|
The final references of the store object are always store paths.
|
||||||
|
However, if all elements of the sets above had to be store paths, it would be hard-to-impossible to write down the reference from outputs *to other outputs*, because in general we don't know outputs' store paths until they are built.
|
||||||
|
|
||||||
|
For this reason, it is also acceptable to use an output specification name (of the current derivation) instead of a store path.
|
||||||
|
To allow an output to have a runtime
|
||||||
|
dependency on itself, use `"out"` as a list item.
|
||||||
|
|
||||||
|
- [`outputChecks`]{#adv-attr-outputChecks}\
|
||||||
|
When using [structured attributes](#adv-attr-structuredAttrs), the `outputChecks`
|
||||||
|
attribute allows defining checks per-output.
|
||||||
|
|
||||||
|
In addition to
|
||||||
|
[`allowedReferences`](#adv-attr-allowedReferences), [`allowedRequisites`](#adv-attr-allowedRequisites),
|
||||||
|
[`disallowedReferences`](#adv-attr-disallowedReferences) and [`disallowedRequisites`](#adv-attr-disallowedRequisites),
|
||||||
|
the following attributes are available:
|
||||||
|
|
||||||
|
- `maxSize` defines the maximum size of the resulting [store object](@docroot@/store/store-object.md).
|
||||||
|
- `maxClosureSize` defines the maximum size of the output's closure.
|
||||||
|
- `ignoreSelfRefs` controls whether self-references should be considered when
|
||||||
|
checking for allowed references/requisites.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
__structuredAttrs = true;
|
||||||
|
|
||||||
|
outputChecks.out = {
|
||||||
|
# The closure of 'out' must not be larger than 256 MiB.
|
||||||
|
maxClosureSize = 256 * 1024 * 1024;
|
||||||
|
|
||||||
|
# It must not refer to the C compiler or to the 'dev' output.
|
||||||
|
disallowedRequisites = [ stdenv.cc "dev" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
outputChecks.dev = {
|
||||||
|
# The 'dev' output must not be larger than 128 KiB.
|
||||||
|
maxSize = 128 * 1024;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
## Types of derivations
|
## Types of derivations
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Content-Addressing Store Objects
|
# Content-Addressing Store Objects
|
||||||
|
|
||||||
Just [like][fso-ca] [File System Objects][File System Object],
|
Just [like][fso-ca] [File System Objects][File System Object],
|
||||||
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed),
|
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address),
|
||||||
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
|
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
|
||||||
|
|
||||||
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
|
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
|
||||||
|
|
@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l
|
||||||
>
|
>
|
||||||
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
|
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
|
||||||
|
|
||||||
This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing.
|
This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing.
|
||||||
|
|
||||||
References are not supported.
|
References are not supported.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
>
|
>
|
||||||
> A rendered store path
|
> A rendered store path
|
||||||
|
|
||||||
Nix implements references to [store objects](./index.md#store-object) as *store paths*.
|
Nix implements references to [store objects](./store-object.md) as *store paths*.
|
||||||
|
|
||||||
Think of a store path as an [opaque], [unique identifier]:
|
Think of a store path as an [opaque], [unique identifier]:
|
||||||
The only way to obtain store path is by adding or building store objects.
|
The only way to obtain store path is by adding or building store objects.
|
||||||
|
|
|
||||||
|
|
@ -485,10 +485,10 @@
|
||||||
open-manual = {
|
open-manual = {
|
||||||
type = "app";
|
type = "app";
|
||||||
program = "${pkgs.writeShellScript "open-nix-manual" ''
|
program = "${pkgs.writeShellScript "open-nix-manual" ''
|
||||||
manual_path="${self.packages.${system}.nix-manual}/share/doc/nix/manual/index.html"
|
path="${self.packages.${system}.nix-manual.site}/index.html"
|
||||||
if ! ${opener} "$manual_path"; then
|
if ! ${opener} "$path"; then
|
||||||
echo "Failed to open manual with ${opener}. Manual is located at:"
|
echo "Failed to open manual with ${opener}. Manual is located at:"
|
||||||
echo "$manual_path"
|
echo "$path"
|
||||||
fi
|
fi
|
||||||
''}";
|
''}";
|
||||||
meta.description = "Open the Nix manual in your browser";
|
meta.description = "Open the Nix manual in your browser";
|
||||||
|
|
|
||||||
|
|
@ -61,4 +61,3 @@ if get_option('unit-tests')
|
||||||
endif
|
endif
|
||||||
subproject('nix-functional-tests')
|
subproject('nix-functional-tests')
|
||||||
subproject('json-schema-checks')
|
subproject('json-schema-checks')
|
||||||
subproject('kaitai-struct-checks')
|
|
||||||
|
|
|
||||||
|
|
@ -109,7 +109,6 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
|
||||||
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs
|
++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-kaitai-struct-checks.externalNativeBuildInputs
|
|
||||||
++ lib.optional (
|
++ lib.optional (
|
||||||
!buildCanExecuteHost
|
!buildCanExecuteHost
|
||||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||||
|
|
@ -149,7 +148,6 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
|
||||||
++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs
|
++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-cmd.buildInputs
|
++ pkgs.nixComponents2.nix-cmd.buildInputs
|
||||||
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs
|
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs
|
||||||
++ lib.optional havePerl pkgs.perl
|
++ lib.optional havePerl pkgs.perl;
|
||||||
++ pkgs.nixComponents2.nix-kaitai-struct-checks.externalBuildInputs;
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
||||||
1
src/json-schema-checks/build-trace-entry
Symbolic link
1
src/json-schema-checks/build-trace-entry
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/realisation
|
||||||
|
|
@ -54,6 +54,15 @@ schemas = [
|
||||||
'single_built_built.json',
|
'single_built_built.json',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'build-trace-entry',
|
||||||
|
'schema' : schema_dir / 'build-trace-entry-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'simple.json',
|
||||||
|
'with-dependent-realisations.json',
|
||||||
|
'with-signature.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# Derivation and Derivation output
|
# Derivation and Derivation output
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ mkMesonDerivation (finalAttrs: {
|
||||||
../../src/libutil-tests/data/hash
|
../../src/libutil-tests/data/hash
|
||||||
../../src/libstore-tests/data/content-address
|
../../src/libstore-tests/data/content-address
|
||||||
../../src/libstore-tests/data/store-path
|
../../src/libstore-tests/data/store-path
|
||||||
|
../../src/libstore-tests/data/realisation
|
||||||
../../src/libstore-tests/data/derivation
|
../../src/libstore-tests/data/derivation
|
||||||
../../src/libstore-tests/data/derived-path
|
../../src/libstore-tests/data/derived-path
|
||||||
../../src/libstore-tests/data/path-info
|
../../src/libstore-tests/data/path-info
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
# Run with: nix build .#nix-kaitai-struct-checks
|
# Run with: nix build .#nix-kaitai-struct-checks
|
||||||
|
# or: `nix develop .#nix-kaitai-struct-checks` to enter a dev shell
|
||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
mkMesonDerivation,
|
mkMesonDerivation,
|
||||||
|
|
|
||||||
|
|
@ -813,7 +813,7 @@ public:
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
C * add(auto &&... args)
|
C * add(auto &&... args)
|
||||||
{
|
{
|
||||||
return new C(std::forward<decltype(args)>(args)...);
|
return alloc.new_object<C>(std::forward<decltype(args)>(args)...);
|
||||||
}
|
}
|
||||||
|
|
||||||
// we define some calls to add explicitly so that the argument can be passed in as initializer lists
|
// we define some calls to add explicitly so that the argument can be passed in as initializer lists
|
||||||
|
|
@ -822,7 +822,7 @@ public:
|
||||||
C * add(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args)
|
C * add(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args)
|
||||||
requires(std::same_as<C, ExprCall>)
|
requires(std::same_as<C, ExprCall>)
|
||||||
{
|
{
|
||||||
return new C(pos, fun, std::move(args));
|
return alloc.new_object<C>(pos, fun, std::move(args));
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class C>
|
template<class C>
|
||||||
|
|
@ -830,7 +830,7 @@ public:
|
||||||
C * add(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args, PosIdx && cursedOrEndPos)
|
C * add(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args, PosIdx && cursedOrEndPos)
|
||||||
requires(std::same_as<C, ExprCall>)
|
requires(std::same_as<C, ExprCall>)
|
||||||
{
|
{
|
||||||
return new C(pos, fun, std::move(args), std::move(cursedOrEndPos));
|
return alloc.new_object<C>(pos, fun, std::move(args), std::move(cursedOrEndPos));
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class C>
|
template<class C>
|
||||||
|
|
|
||||||
|
|
@ -825,10 +825,10 @@ static RegisterPrimOp primop_genericClosure(
|
||||||
|
|
||||||
- [Int](@docroot@/language/types.md#type-int)
|
- [Int](@docroot@/language/types.md#type-int)
|
||||||
- [Float](@docroot@/language/types.md#type-float)
|
- [Float](@docroot@/language/types.md#type-float)
|
||||||
- [Boolean](@docroot@/language/types.md#type-boolean)
|
- [Boolean](@docroot@/language/types.md#type-bool)
|
||||||
- [String](@docroot@/language/types.md#type-string)
|
- [String](@docroot@/language/types.md#type-string)
|
||||||
- [Path](@docroot@/language/types.md#type-path)
|
- [Path](@docroot@/language/types.md#type-path)
|
||||||
- [List](@docroot@/language/types.md#list)
|
- [List](@docroot@/language/types.md#type-list)
|
||||||
|
|
||||||
The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added.
|
The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added.
|
||||||
Items are compared by their `key` attribute.
|
Items are compared by their `key` attribute.
|
||||||
|
|
@ -2103,7 +2103,7 @@ static RegisterPrimOp primop_findFile(
|
||||||
builtins.findFile builtins.nixPath "nixpkgs"
|
builtins.findFile builtins.nixPath "nixpkgs"
|
||||||
```
|
```
|
||||||
|
|
||||||
A search path is represented as a list of [attribute sets](./types.md#attribute-set) with two attributes:
|
A search path is represented as a list of [attribute sets](./types.md#type-attrs) with two attributes:
|
||||||
- `prefix` is a relative path.
|
- `prefix` is a relative path.
|
||||||
- `path` denotes a file system location
|
- `path` denotes a file system location
|
||||||
|
|
||||||
|
|
@ -2395,7 +2395,7 @@ static RegisterPrimOp primop_outputOf({
|
||||||
|
|
||||||
returns an input placeholder for the output of the output of `myDrv`.
|
returns an input placeholder for the output of the output of `myDrv`.
|
||||||
|
|
||||||
This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-paths), e.g. as part of installable syntax on the command line.
|
This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-path), e.g. as part of installable syntax on the command line.
|
||||||
)",
|
)",
|
||||||
.fun = prim_outputOf,
|
.fun = prim_outputOf,
|
||||||
.experimentalFeature = Xp::DynamicDerivations,
|
.experimentalFeature = Xp::DynamicDerivations,
|
||||||
|
|
@ -4966,7 +4966,7 @@ static RegisterPrimOp primop_compareVersions({
|
||||||
version *s1* is older than version *s2*, `0` if they are the same,
|
version *s1* is older than version *s2*, `0` if they are the same,
|
||||||
and `1` if *s1* is newer than *s2*. The version comparison
|
and `1` if *s1* is newer than *s2*. The version comparison
|
||||||
algorithm is the same as the one used by [`nix-env
|
algorithm is the same as the one used by [`nix-env
|
||||||
-u`](../command-ref/nix-env.md#operation---upgrade).
|
-u`](../command-ref/nix-env/upgrade.md).
|
||||||
)",
|
)",
|
||||||
.fun = prim_compareVersions,
|
.fun = prim_compareVersions,
|
||||||
});
|
});
|
||||||
|
|
@ -4995,7 +4995,7 @@ static RegisterPrimOp primop_splitVersion({
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Split a string representing a version into its components, by the
|
Split a string representing a version into its components, by the
|
||||||
same version splitting logic underlying the version comparison in
|
same version splitting logic underlying the version comparison in
|
||||||
[`nix-env -u`](../command-ref/nix-env.md#operation---upgrade).
|
[`nix-env -u`](../command-ref/nix-env/upgrade.md).
|
||||||
)",
|
)",
|
||||||
.fun = prim_splitVersion,
|
.fun = prim_splitVersion,
|
||||||
});
|
});
|
||||||
|
|
@ -5045,9 +5045,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings)
|
||||||
Primitive value.
|
Primitive value.
|
||||||
|
|
||||||
It can be returned by
|
It can be returned by
|
||||||
[comparison operators](@docroot@/language/operators.md#Comparison)
|
[comparison operators](@docroot@/language/operators.md#comparison)
|
||||||
and used in
|
and used in
|
||||||
[conditional expressions](@docroot@/language/syntax.md#Conditionals).
|
[conditional expressions](@docroot@/language/syntax.md#conditionals).
|
||||||
|
|
||||||
The name `true` is not special, and can be shadowed:
|
The name `true` is not special, and can be shadowed:
|
||||||
|
|
||||||
|
|
@ -5068,9 +5068,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings)
|
||||||
Primitive value.
|
Primitive value.
|
||||||
|
|
||||||
It can be returned by
|
It can be returned by
|
||||||
[comparison operators](@docroot@/language/operators.md#Comparison)
|
[comparison operators](@docroot@/language/operators.md#comparison)
|
||||||
and used in
|
and used in
|
||||||
[conditional expressions](@docroot@/language/syntax.md#Conditionals).
|
[conditional expressions](@docroot@/language/syntax.md#conditionals).
|
||||||
|
|
||||||
The name `false` is not special, and can be shadowed:
|
The name `false` is not special, and can be shadowed:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency(
|
||||||
Create a copy of the given string where every
|
Create a copy of the given string where every
|
||||||
[derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep)
|
[derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep)
|
||||||
string context element is turned into a
|
string context element is turned into a
|
||||||
[constant](@docroot@/language/string-context.md#string-context-element-constant)
|
[constant](@docroot@/language/string-context.md#string-context-constant)
|
||||||
string context element.
|
string context element.
|
||||||
|
|
||||||
This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies).
|
This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies).
|
||||||
|
|
@ -145,7 +145,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies(
|
||||||
.args = {"s"},
|
.args = {"s"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Create a copy of the given string where a single
|
Create a copy of the given string where a single
|
||||||
[constant](@docroot@/language/string-context.md#string-context-element-constant)
|
[constant](@docroot@/language/string-context.md#string-context-constant)
|
||||||
string context element is turned into a
|
string context element is turned into a
|
||||||
[derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep)
|
[derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep)
|
||||||
string context element.
|
string context element.
|
||||||
|
|
|
||||||
|
|
@ -582,25 +582,15 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
// then use code that was removed in this commit (see blame)
|
// then use code that was removed in this commit (see blame)
|
||||||
|
|
||||||
auto dir = this->path;
|
auto dir = this->path;
|
||||||
Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"};
|
Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--progress", "--force"};
|
||||||
if (shallow)
|
if (shallow)
|
||||||
append(gitArgs, {"--depth", "1"});
|
append(gitArgs, {"--depth", "1"});
|
||||||
append(gitArgs, {std::string("--"), url, refspec});
|
append(gitArgs, {std::string("--"), url, refspec});
|
||||||
|
|
||||||
auto [status, output] = runProgram(
|
auto status = runProgram(RunOptions{.program = "git", .args = gitArgs, .isInteractive = true}).first;
|
||||||
RunOptions{
|
|
||||||
.program = "git",
|
|
||||||
.lookupPath = true,
|
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
|
||||||
// we're using --quiet for now. Should process its stderr.
|
|
||||||
.args = gitArgs,
|
|
||||||
.input = {},
|
|
||||||
.mergeStderrToStdout = true,
|
|
||||||
.isInteractive = true});
|
|
||||||
|
|
||||||
if (status > 0) {
|
if (status > 0)
|
||||||
throw Error("Failed to fetch git repository %s : %s", url, output);
|
throw Error("Failed to fetch git repository '%s'", url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
|
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "nix/util/types.hh"
|
#include "nix/util/types.hh"
|
||||||
|
#include "nix/util/source-path.hh"
|
||||||
#include "nix/fetchers/fetchers.hh"
|
#include "nix/fetchers/fetchers.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
@ -39,7 +40,7 @@ struct Registry
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::shared_ptr<Registry> read(const Settings & settings, const Path & path, RegistryType type);
|
static std::shared_ptr<Registry> read(const Settings & settings, const SourcePath & path, RegistryType type);
|
||||||
|
|
||||||
void write(const Path & path);
|
void write(const Path & path);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,18 +10,18 @@
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
std::shared_ptr<Registry> Registry::read(const Settings & settings, const Path & path, RegistryType type)
|
std::shared_ptr<Registry> Registry::read(const Settings & settings, const SourcePath & path, RegistryType type)
|
||||||
{
|
{
|
||||||
debug("reading registry '%s'", path);
|
debug("reading registry '%s'", path);
|
||||||
|
|
||||||
auto registry = std::make_shared<Registry>(settings, type);
|
auto registry = std::make_shared<Registry>(settings, type);
|
||||||
|
|
||||||
if (!pathExists(path))
|
if (!path.pathExists())
|
||||||
return std::make_shared<Registry>(settings, type);
|
return std::make_shared<Registry>(settings, type);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
auto json = nlohmann::json::parse(readFile(path));
|
auto json = nlohmann::json::parse(path.readFile());
|
||||||
|
|
||||||
auto version = json.value("version", 0);
|
auto version = json.value("version", 0);
|
||||||
|
|
||||||
|
|
@ -97,7 +97,10 @@ static Path getSystemRegistryPath()
|
||||||
|
|
||||||
static std::shared_ptr<Registry> getSystemRegistry(const Settings & settings)
|
static std::shared_ptr<Registry> getSystemRegistry(const Settings & settings)
|
||||||
{
|
{
|
||||||
static auto systemRegistry = Registry::read(settings, getSystemRegistryPath(), Registry::System);
|
static auto systemRegistry = Registry::read(
|
||||||
|
settings,
|
||||||
|
SourcePath{getFSSourceAccessor(), CanonPath{getSystemRegistryPath()}}.resolveSymlinks(),
|
||||||
|
Registry::System);
|
||||||
return systemRegistry;
|
return systemRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -108,13 +111,17 @@ Path getUserRegistryPath()
|
||||||
|
|
||||||
std::shared_ptr<Registry> getUserRegistry(const Settings & settings)
|
std::shared_ptr<Registry> getUserRegistry(const Settings & settings)
|
||||||
{
|
{
|
||||||
static auto userRegistry = Registry::read(settings, getUserRegistryPath(), Registry::User);
|
static auto userRegistry = Registry::read(
|
||||||
|
settings,
|
||||||
|
SourcePath{getFSSourceAccessor(), CanonPath{getUserRegistryPath()}}.resolveSymlinks(),
|
||||||
|
Registry::User);
|
||||||
return userRegistry;
|
return userRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<Registry> getCustomRegistry(const Settings & settings, const Path & p)
|
std::shared_ptr<Registry> getCustomRegistry(const Settings & settings, const Path & p)
|
||||||
{
|
{
|
||||||
static auto customRegistry = Registry::read(settings, p, Registry::Custom);
|
static auto customRegistry =
|
||||||
|
Registry::read(settings, SourcePath{getFSSourceAccessor(), CanonPath{p}}.resolveSymlinks(), Registry::Custom);
|
||||||
return customRegistry;
|
return customRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -137,14 +144,19 @@ static std::shared_ptr<Registry> getGlobalRegistry(const Settings & settings, re
|
||||||
return std::make_shared<Registry>(settings, Registry::Global); // empty registry
|
return std::make_shared<Registry>(settings, Registry::Global); // empty registry
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return Registry::read(
|
||||||
|
settings,
|
||||||
|
[&] -> SourcePath {
|
||||||
if (!isAbsolute(path)) {
|
if (!isAbsolute(path)) {
|
||||||
auto storePath = downloadFile(store, settings, path, "flake-registry.json").storePath;
|
auto storePath = downloadFile(store, settings, path, "flake-registry.json").storePath;
|
||||||
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
|
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
|
||||||
store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json");
|
store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json");
|
||||||
path = store->toRealPath(storePath);
|
return {store->requireStoreObjectAccessor(storePath)};
|
||||||
|
} else {
|
||||||
|
return SourcePath{getFSSourceAccessor(), CanonPath{path}}.resolveSymlinks();
|
||||||
}
|
}
|
||||||
|
}(),
|
||||||
return Registry::read(settings, path, Registry::Global);
|
Registry::Global);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
return reg;
|
return reg;
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@
|
||||||
#include "nix/store/store-api.hh"
|
#include "nix/store/store-api.hh"
|
||||||
#include "nix/store/store-open.hh"
|
#include "nix/store/store-open.hh"
|
||||||
#include "nix/store/build-result.hh"
|
#include "nix/store/build-result.hh"
|
||||||
|
#include "nix/store/local-fs-store.hh"
|
||||||
|
|
||||||
#include "nix/store/globals.hh"
|
#include "nix/store/globals.hh"
|
||||||
|
|
||||||
|
|
@ -109,7 +110,8 @@ nix_err nix_store_real_path(
|
||||||
if (context)
|
if (context)
|
||||||
context->last_err_code = NIX_OK;
|
context->last_err_code = NIX_OK;
|
||||||
try {
|
try {
|
||||||
auto res = store->ptr->toRealPath(path->path);
|
auto store2 = store->ptr.dynamic_pointer_cast<nix::LocalFSStore>();
|
||||||
|
auto res = store2 ? store2->toRealPath(path->path) : store->ptr->printStorePath(path->path);
|
||||||
return call_nix_get_string_callback(res, callback, user_data);
|
return call_nix_get_string_callback(res, callback, user_data);
|
||||||
}
|
}
|
||||||
NIXC_CATCH_ERRS
|
NIXC_CATCH_ERRS
|
||||||
|
|
|
||||||
|
|
@ -286,7 +286,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild()
|
||||||
PathSet lockFiles;
|
PathSet lockFiles;
|
||||||
/* FIXME: Should lock something like the drv itself so we don't build same
|
/* FIXME: Should lock something like the drv itself so we don't build same
|
||||||
CA drv concurrently */
|
CA drv concurrently */
|
||||||
if (dynamic_cast<LocalStore *>(&worker.store)) {
|
if (auto * localStore = dynamic_cast<LocalStore *>(&worker.store)) {
|
||||||
/* If we aren't a local store, we might need to use the local store as
|
/* If we aren't a local store, we might need to use the local store as
|
||||||
a build remote, but that would cause a deadlock. */
|
a build remote, but that would cause a deadlock. */
|
||||||
/* FIXME: Make it so we can use ourselves as a build remote even if we
|
/* FIXME: Make it so we can use ourselves as a build remote even if we
|
||||||
|
|
@ -296,9 +296,9 @@ Goal::Co DerivationBuildingGoal::tryToBuild()
|
||||||
*/
|
*/
|
||||||
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
||||||
if (i.second.second)
|
if (i.second.second)
|
||||||
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
|
lockFiles.insert(localStore->toRealPath(*i.second.second));
|
||||||
else
|
else
|
||||||
lockFiles.insert(worker.store.Store::toRealPath(drvPath) + "." + i.first);
|
lockFiles.insert(localStore->toRealPath(drvPath) + "." + i.first);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -331,12 +331,14 @@ Goal::Co DerivationBuildingGoal::tryToBuild()
|
||||||
|
|
||||||
/* If any of the outputs already exist but are not valid, delete
|
/* If any of the outputs already exist but are not valid, delete
|
||||||
them. */
|
them. */
|
||||||
|
if (auto * localStore = dynamic_cast<LocalFSStore *>(&worker.store)) {
|
||||||
for (auto & [_, status] : initialOutputs) {
|
for (auto & [_, status] : initialOutputs) {
|
||||||
if (!status.known || status.known->isValid())
|
if (!status.known || status.known->isValid())
|
||||||
continue;
|
continue;
|
||||||
auto storePath = status.known->path;
|
auto storePath = status.known->path;
|
||||||
debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path));
|
debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path));
|
||||||
deletePath(worker.store.Store::toRealPath(storePath));
|
deletePath(localStore->toRealPath(storePath));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Don't do a remote build if the derivation has the attribute
|
/* Don't do a remote build if the derivation has the attribute
|
||||||
|
|
|
||||||
|
|
@ -896,7 +896,7 @@ static void performOp(
|
||||||
auto path = WorkerProto::Serialise<StorePath>::read(*store, rconn);
|
auto path = WorkerProto::Serialise<StorePath>::read(*store, rconn);
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
dumpPath(store->toRealPath(path), conn.to);
|
store->narFromPath(path, conn.to);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -189,7 +189,7 @@ public:
|
||||||
0,
|
0,
|
||||||
"cores",
|
"cores",
|
||||||
R"(
|
R"(
|
||||||
Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation.
|
Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/store/building.md#builder-execution) of a derivation.
|
||||||
The `builder` executable can use this variable to control its own maximum amount of parallelism.
|
The `builder` executable can use this variable to control its own maximum amount of parallelism.
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
|
||||||
|
|
@ -102,7 +102,12 @@ struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore
|
||||||
return config.realStoreDir;
|
return config.realStoreDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
Path toRealPath(const Path & storePath) override
|
Path toRealPath(const StorePath & storePath)
|
||||||
|
{
|
||||||
|
return toRealPath(printStorePath(storePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
Path toRealPath(const Path & storePath)
|
||||||
{
|
{
|
||||||
assert(isInStore(storePath));
|
assert(isInStore(storePath));
|
||||||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ private:
|
||||||
|
|
||||||
This is also the location where [`--keep-failed`](@docroot@/command-ref/opt-common.md#opt-keep-failed) leaves its files.
|
This is also the location where [`--keep-failed`](@docroot@/command-ref/opt-common.md#opt-keep-failed) leaves its files.
|
||||||
|
|
||||||
If Nix runs without sandbox, or if the platform does not support sandboxing with bind mounts (e.g. macOS), then the [`builder`](@docroot@/language/derivations.md#attr-builder)'s environment will contain this directory, instead of the virtual location [`sandbox-build-dir`](#conf-sandbox-build-dir).
|
If Nix runs without sandbox, or if the platform does not support sandboxing with bind mounts (e.g. macOS), then the [`builder`](@docroot@/language/derivations.md#attr-builder)'s environment will contain this directory, instead of the virtual location [`sandbox-build-dir`](@docroot@/command-ref/conf-file.md#conf-sandbox-build-dir).
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -895,16 +895,6 @@ public:
|
||||||
*/
|
*/
|
||||||
virtual std::optional<TrustedFlag> isTrustedClient() = 0;
|
virtual std::optional<TrustedFlag> isTrustedClient() = 0;
|
||||||
|
|
||||||
virtual Path toRealPath(const Path & storePath)
|
|
||||||
{
|
|
||||||
return storePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
Path toRealPath(const StorePath & storePath)
|
|
||||||
{
|
|
||||||
return toRealPath(printStorePath(storePath));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Synchronises the options of the client with those of the daemon
|
* Synchronises the options of the client with those of the daemon
|
||||||
* (a no-op when there’s no daemon)
|
* (a no-op when there’s no daemon)
|
||||||
|
|
|
||||||
|
|
@ -246,7 +246,7 @@ void LocalOverlayStore::optimiseStore()
|
||||||
if (lowerStore->isValidPath(path)) {
|
if (lowerStore->isValidPath(path)) {
|
||||||
uint64_t bytesFreed = 0;
|
uint64_t bytesFreed = 0;
|
||||||
// Deduplicate store path
|
// Deduplicate store path
|
||||||
deleteStorePath(Store::toRealPath(path), bytesFreed);
|
deleteStorePath(toRealPath(path), bytesFreed);
|
||||||
}
|
}
|
||||||
done++;
|
done++;
|
||||||
act.progress(done, paths.size());
|
act.progress(done, paths.size());
|
||||||
|
|
|
||||||
|
|
@ -1063,7 +1063,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF
|
||||||
|
|
||||||
PathLocks outputLock;
|
PathLocks outputLock;
|
||||||
|
|
||||||
auto realPath = Store::toRealPath(info.path);
|
auto realPath = toRealPath(info.path);
|
||||||
|
|
||||||
/* Lock the output path. But don't lock if we're being called
|
/* Lock the output path. But don't lock if we're being called
|
||||||
from a build hook (whose parent process already acquired a
|
from a build hook (whose parent process already acquired a
|
||||||
|
|
@ -1262,7 +1262,7 @@ StorePath LocalStore::addToStoreFromDump(
|
||||||
/* The first check above is an optimisation to prevent
|
/* The first check above is an optimisation to prevent
|
||||||
unnecessary lock acquisition. */
|
unnecessary lock acquisition. */
|
||||||
|
|
||||||
auto realPath = Store::toRealPath(dstPath);
|
auto realPath = toRealPath(dstPath);
|
||||||
|
|
||||||
PathLocks outputLock({realPath});
|
PathLocks outputLock({realPath});
|
||||||
|
|
||||||
|
|
@ -1413,7 +1413,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
auto hashSink = HashSink(info->narHash.algo);
|
auto hashSink = HashSink(info->narHash.algo);
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), hashSink);
|
dumpPath(toRealPath(i), hashSink);
|
||||||
auto current = hashSink.finish();
|
auto current = hashSink.finish();
|
||||||
|
|
||||||
if (info->narHash != nullHash && info->narHash != current.hash) {
|
if (info->narHash != nullHash && info->narHash != current.hash) {
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl
|
||||||
environment using bind-mounts. We put it in the Nix store
|
environment using bind-mounts. We put it in the Nix store
|
||||||
so that the build outputs can be moved efficiently from the
|
so that the build outputs can be moved efficiently from the
|
||||||
chroot to their final location. */
|
chroot to their final location. */
|
||||||
auto chrootParentDir = store.Store::toRealPath(drvPath) + ".chroot";
|
auto chrootParentDir = store.toRealPath(drvPath) + ".chroot";
|
||||||
deletePath(chrootParentDir);
|
deletePath(chrootParentDir);
|
||||||
|
|
||||||
/* Clean up the chroot directory automatically. */
|
/* Clean up the chroot directory automatically. */
|
||||||
|
|
@ -171,7 +171,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl
|
||||||
continue;
|
continue;
|
||||||
if (buildMode != bmCheck && status.known->isValid())
|
if (buildMode != bmCheck && status.known->isValid())
|
||||||
continue;
|
continue;
|
||||||
auto p = store.Store::toRealPath(status.known->path);
|
auto p = store.toRealPath(status.known->path);
|
||||||
if (pathExists(chrootRootDir + p))
|
if (pathExists(chrootRootDir + p))
|
||||||
std::filesystem::rename((chrootRootDir + p), p);
|
std::filesystem::rename((chrootRootDir + p), p);
|
||||||
}
|
}
|
||||||
|
|
@ -185,7 +185,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl
|
||||||
|
|
||||||
debug("materialising '%s' in the sandbox", store.printStorePath(path));
|
debug("materialising '%s' in the sandbox", store.printStorePath(path));
|
||||||
|
|
||||||
Path source = store.Store::toRealPath(path);
|
Path source = store.toRealPath(path);
|
||||||
Path target = chrootRootDir + store.printStorePath(path);
|
Path target = chrootRootDir + store.printStorePath(path);
|
||||||
|
|
||||||
if (pathExists(target)) {
|
if (pathExists(target)) {
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,33 @@
|
||||||
# include <spawn.h>
|
# include <spawn.h>
|
||||||
# include <sys/sysctl.h>
|
# include <sys/sysctl.h>
|
||||||
# include <sandbox.h>
|
# include <sandbox.h>
|
||||||
|
# include <sys/ipc.h>
|
||||||
|
# include <sys/shm.h>
|
||||||
|
# include <sys/msg.h>
|
||||||
|
# include <sys/sem.h>
|
||||||
|
|
||||||
/* This definition is undocumented but depended upon by all major browsers. */
|
/* This definition is undocumented but depended upon by all major browsers. */
|
||||||
extern "C" int
|
extern "C" int
|
||||||
sandbox_init_with_parameters(const char * profile, uint64_t flags, const char * const parameters[], char ** errorbuf);
|
sandbox_init_with_parameters(const char * profile, uint64_t flags, const char * const parameters[], char ** errorbuf);
|
||||||
|
|
||||||
|
/* Darwin IPC structures and constants */
|
||||||
|
# define IPCS_MAGIC 0x00000001
|
||||||
|
# define IPCS_SHM_ITER 0x00000002
|
||||||
|
# define IPCS_SEM_ITER 0x00000020
|
||||||
|
# define IPCS_MSG_ITER 0x00000200
|
||||||
|
# define IPCS_SHM_SYSCTL "kern.sysv.ipcs.shm"
|
||||||
|
# define IPCS_MSG_SYSCTL "kern.sysv.ipcs.msg"
|
||||||
|
# define IPCS_SEM_SYSCTL "kern.sysv.ipcs.sem"
|
||||||
|
|
||||||
|
struct IpcsCommand
|
||||||
|
{
|
||||||
|
uint32_t ipcs_magic;
|
||||||
|
uint32_t ipcs_op;
|
||||||
|
uint32_t ipcs_cursor;
|
||||||
|
uint32_t ipcs_datalen;
|
||||||
|
void * ipcs_data;
|
||||||
|
};
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct DarwinDerivationBuilder : DerivationBuilderImpl
|
struct DarwinDerivationBuilder : DerivationBuilderImpl
|
||||||
|
|
@ -204,6 +226,119 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl
|
||||||
posix_spawn(
|
posix_spawn(
|
||||||
NULL, drv.builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
NULL, drv.builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans up all System V IPC objects owned by the specified user.
|
||||||
|
*
|
||||||
|
* On Darwin, IPC objects (shared memory segments, message queues, and semaphore)
|
||||||
|
* can persist after the build user's processes are killed, since there are no IPC namespaces
|
||||||
|
* like on Linux. This can exhaust kernel IPC limits over time.
|
||||||
|
*
|
||||||
|
* Uses sysctl to enumerate and remove all IPC objects owned by the given UID.
|
||||||
|
*/
|
||||||
|
void cleanupSysVIPCForUser(uid_t uid)
|
||||||
|
{
|
||||||
|
struct IpcsCommand ic;
|
||||||
|
size_t ic_size = sizeof(ic);
|
||||||
|
// IPC ids to cleanup
|
||||||
|
std::vector<int> shm_ids, msg_ids, sem_ids;
|
||||||
|
|
||||||
|
{
|
||||||
|
struct shmid_ds shm_ds;
|
||||||
|
ic.ipcs_magic = IPCS_MAGIC;
|
||||||
|
ic.ipcs_op = IPCS_SHM_ITER;
|
||||||
|
ic.ipcs_cursor = 0;
|
||||||
|
ic.ipcs_data = &shm_ds;
|
||||||
|
ic.ipcs_datalen = sizeof(shm_ds);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
memset(&shm_ds, 0, sizeof(shm_ds));
|
||||||
|
|
||||||
|
if (sysctlbyname(IPCS_SHM_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shm_ds.shm_perm.uid == uid) {
|
||||||
|
int shmid = shmget(shm_ds.shm_perm._key, 0, 0);
|
||||||
|
if (shmid != -1) {
|
||||||
|
shm_ids.push_back(shmid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto id : shm_ids) {
|
||||||
|
if (shmctl(id, IPC_RMID, NULL) == 0)
|
||||||
|
debug("removed shared memory segment with shmid %d", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
struct msqid_ds msg_ds;
|
||||||
|
ic.ipcs_magic = IPCS_MAGIC;
|
||||||
|
ic.ipcs_op = IPCS_MSG_ITER;
|
||||||
|
ic.ipcs_cursor = 0;
|
||||||
|
ic.ipcs_data = &msg_ds;
|
||||||
|
ic.ipcs_datalen = sizeof(msg_ds);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
memset(&msg_ds, 0, sizeof(msg_ds));
|
||||||
|
|
||||||
|
if (sysctlbyname(IPCS_MSG_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg_ds.msg_perm.uid == uid) {
|
||||||
|
int msgid = msgget(msg_ds.msg_perm._key, 0);
|
||||||
|
if (msgid != -1) {
|
||||||
|
msg_ids.push_back(msgid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto id : msg_ids) {
|
||||||
|
if (msgctl(id, IPC_RMID, NULL) == 0)
|
||||||
|
debug("removed message queue with msgid %d", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
struct semid_ds sem_ds;
|
||||||
|
ic.ipcs_magic = IPCS_MAGIC;
|
||||||
|
ic.ipcs_op = IPCS_SEM_ITER;
|
||||||
|
ic.ipcs_cursor = 0;
|
||||||
|
ic.ipcs_data = &sem_ds;
|
||||||
|
ic.ipcs_datalen = sizeof(sem_ds);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
memset(&sem_ds, 0, sizeof(sem_ds));
|
||||||
|
|
||||||
|
if (sysctlbyname(IPCS_SEM_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sem_ds.sem_perm.uid == uid) {
|
||||||
|
int semid = semget(sem_ds.sem_perm._key, 0, 0);
|
||||||
|
if (semid != -1) {
|
||||||
|
sem_ids.push_back(semid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto id : sem_ids) {
|
||||||
|
if (semctl(id, 0, IPC_RMID) == 0)
|
||||||
|
debug("removed semaphore with semid %d", id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void killSandbox(bool getStats) override
|
||||||
|
{
|
||||||
|
DerivationBuilderImpl::killSandbox(getStats);
|
||||||
|
if (buildUser) {
|
||||||
|
auto uid = buildUser->getUID();
|
||||||
|
cleanupSysVIPCForUser(uid);
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace nix
|
} // namespace nix
|
||||||
|
|
|
||||||
|
|
@ -1887,7 +1887,7 @@ void DerivationBuilderImpl::cleanupBuild(bool force)
|
||||||
if (force) {
|
if (force) {
|
||||||
/* Delete unused redirected outputs (when doing hash rewriting). */
|
/* Delete unused redirected outputs (when doing hash rewriting). */
|
||||||
for (auto & i : redirectedOutputs)
|
for (auto & i : redirectedOutputs)
|
||||||
deletePath(store.Store::toRealPath(i.second));
|
deletePath(store.toRealPath(i.second));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (topTmpDir != "") {
|
if (topTmpDir != "") {
|
||||||
|
|
|
||||||
|
|
@ -269,7 +269,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
||||||
.tag = Xp::LocalOverlayStore,
|
.tag = Xp::LocalOverlayStore,
|
||||||
.name = "local-overlay-store",
|
.name = "local-overlay-store",
|
||||||
.description = R"(
|
.description = R"(
|
||||||
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
|
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#experimental-local-overlay-store).
|
||||||
)",
|
)",
|
||||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/50",
|
.trackingUrl = "https://github.com/NixOS/nix/milestone/50",
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -793,8 +793,6 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
// via substitution, as `nix flake check` only needs to verify buildability,
|
// via substitution, as `nix flake check` only needs to verify buildability,
|
||||||
// not actually produce the outputs.
|
// not actually produce the outputs.
|
||||||
auto missing = store->queryMissing(drvPaths);
|
auto missing = store->queryMissing(drvPaths);
|
||||||
// Only occurs if `drvPaths` contains a `DerivedPath::Opaque`, which should never happen
|
|
||||||
assert(missing.unknown.empty());
|
|
||||||
|
|
||||||
std::vector<DerivedPath> toBuild;
|
std::vector<DerivedPath> toBuild;
|
||||||
for (auto & path : missing.willBuild) {
|
for (auto & path : missing.willBuild) {
|
||||||
|
|
@ -811,6 +809,8 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
if (hasErrors)
|
if (hasErrors)
|
||||||
throw Error("some errors were encountered during the evaluation");
|
throw Error("some errors were encountered during the evaluation");
|
||||||
|
|
||||||
|
logger->log(lvlInfo, ANSI_GREEN "all checks passed!" ANSI_NORMAL);
|
||||||
|
|
||||||
if (!omittedSystems.empty()) {
|
if (!omittedSystems.empty()) {
|
||||||
// TODO: empty system is not visible; render all as nix strings?
|
// TODO: empty system is not visible; render all as nix strings?
|
||||||
warn(
|
warn(
|
||||||
|
|
|
||||||
|
|
@ -187,7 +187,7 @@ Currently the `type` attribute can be one of the following:
|
||||||
* `nixpkgs/nixos-unstable/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`
|
* `nixpkgs/nixos-unstable/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`
|
||||||
* `sub/dir` (if a flake named `sub` is in the registry)
|
* `sub/dir` (if a flake named `sub` is in the registry)
|
||||||
|
|
||||||
* <a name="path-fetcher"></a>`path`: arbitrary local directories. The required attribute `path`
|
* <a id="path-fetcher"></a>`path`: arbitrary local directories. The required attribute `path`
|
||||||
specifies the path of the flake. The URL form is
|
specifies the path of the flake. The URL form is
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -122,37 +122,33 @@ static void update(const StringSet & channelNames)
|
||||||
// got redirected in the process, so that we can grab the various parts of a nix channel
|
// got redirected in the process, so that we can grab the various parts of a nix channel
|
||||||
// definition from a consistent location if the redirect changes mid-download.
|
// definition from a consistent location if the redirect changes mid-download.
|
||||||
auto result = fetchers::downloadFile(store, fetchSettings, url, std::string(baseNameOf(url)));
|
auto result = fetchers::downloadFile(store, fetchSettings, url, std::string(baseNameOf(url)));
|
||||||
auto filename = store->toRealPath(result.storePath);
|
|
||||||
url = result.effectiveUrl;
|
url = result.effectiveUrl;
|
||||||
|
|
||||||
bool unpacked = false;
|
bool unpacked = false;
|
||||||
if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
|
if (std::regex_search(std::string{result.storePath.to_string()}, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
|
||||||
runProgram(
|
runProgram(
|
||||||
getNixBin("nix-build").string(),
|
getNixBin("nix-build").string(),
|
||||||
false,
|
false,
|
||||||
{"--no-out-link",
|
{"--no-out-link",
|
||||||
"--expr",
|
"--expr",
|
||||||
"import " + unpackChannelPath + "{ name = \"" + cname + "\"; channelName = \"" + name
|
"import " + unpackChannelPath + "{ name = \"" + cname + "\"; channelName = \"" + name
|
||||||
+ "\"; src = builtins.storePath \"" + filename + "\"; }"});
|
+ "\"; src = builtins.storePath \"" + store->printStorePath(result.storePath) + "\"; }"});
|
||||||
unpacked = true;
|
unpacked = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!unpacked) {
|
if (!unpacked) {
|
||||||
// Download the channel tarball.
|
// Download the channel tarball.
|
||||||
try {
|
try {
|
||||||
filename = store->toRealPath(
|
result = fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz");
|
||||||
fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz")
|
|
||||||
.storePath);
|
|
||||||
} catch (FileTransferError & e) {
|
} catch (FileTransferError & e) {
|
||||||
filename = store->toRealPath(
|
result =
|
||||||
fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2")
|
fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2");
|
||||||
.storePath);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Regardless of where it came from, add the expression representing this channel to accumulated expression
|
// Regardless of where it came from, add the expression representing this channel to accumulated expression
|
||||||
exprs.push_back(
|
exprs.push_back(
|
||||||
"f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \""
|
"f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \""
|
||||||
+ filename + "\"; " + extraAttrs + " }");
|
+ store->printStorePath(result.storePath) + "\"; " + extraAttrs + " }");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue