1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-18 08:19:35 +01:00

Merge branch 'master' into flake_show_attr

This commit is contained in:
Matthieu Coudron 2025-11-15 23:30:42 +01:00 committed by GitHub
commit 653d701300
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
466 changed files with 11446 additions and 3038 deletions

18
.coderabbit.yaml Normal file
View file

@ -0,0 +1,18 @@
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
# Disable CodeRabbit auto-review to prevent verbose comments on PRs.
# When enabled: false, CodeRabbit won't attempt reviews and won't post
# "Review skipped" or other automated comments.
reviews:
auto_review:
enabled: false
review_status: false
high_level_summary: false
poem: false
sequence_diagrams: false
changed_files_summary: false
tools:
github-checks:
enabled: false
chat:
art: false
auto_reply: false

View file

@ -16,13 +16,17 @@ inputs:
install_url: install_url:
description: "URL of the Nix installer" description: "URL of the Nix installer"
required: false required: false
default: "https://releases.nixos.org/nix/nix-2.30.2/install" default: "https://releases.nixos.org/nix/nix-2.32.1/install"
tarball_url: tarball_url:
description: "URL of the Nix tarball to use with the experimental installer" description: "URL of the Nix tarball to use with the experimental installer"
required: false required: false
github_token: github_token:
description: "Github token" description: "Github token"
required: true required: true
use_cache:
description: "Whether to setup magic-nix-cache"
default: true
required: false
runs: runs:
using: "composite" using: "composite"
steps: steps:
@ -118,3 +122,10 @@ runs:
source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }} source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }}
nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }} nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }}
extra-conf: ${{ inputs.extra_nix_config }} extra-conf: ${{ inputs.extra_nix_config }}
- uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13
if: ${{ inputs.use_cache == 'true' }}
with:
diagnostic-endpoint: ''
use-flakehub: false
use-gha-cache: true
source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main

View file

@ -14,6 +14,10 @@ on:
default: true default: true
type: boolean type: boolean
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: read-all permissions: read-all
jobs: jobs:
@ -29,6 +33,7 @@ jobs:
extra_nix_config: extra_nix_config:
experimental-features = nix-command flakes experimental-features = nix-command flakes
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
use_cache: false
- run: nix flake show --all-systems --json - run: nix flake show --all-systems --json
pre-commit-checks: pre-commit-checks:
@ -41,7 +46,6 @@ jobs:
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
extra_nix_config: experimental-features = nix-command flakes extra_nix_config: experimental-features = nix-command flakes
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: ./ci/gha/tests/pre-commit-checks - run: ./ci/gha/tests/pre-commit-checks
basic-checks: basic-checks:
@ -92,7 +96,6 @@ jobs:
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
# The sandbox would otherwise be disabled by default on Darwin # The sandbox would otherwise be disabled by default on Darwin
extra_nix_config: "sandbox = true" extra_nix_config: "sandbox = true"
- uses: DeterminateSystems/magic-nix-cache-action@main
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user: # Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 - run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
@ -122,13 +125,13 @@ jobs:
cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY
if: ${{ matrix.instrumented }} if: ${{ matrix.instrumented }}
- name: Upload coverage reports - name: Upload coverage reports
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v5
with: with:
name: coverage-reports name: coverage-reports
path: coverage-reports/ path: coverage-reports/
if: ${{ matrix.instrumented }} if: ${{ matrix.instrumented }}
- name: Upload installer tarball - name: Upload installer tarball
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v5
with: with:
name: installer-${{matrix.os}} name: installer-${{matrix.os}}
path: out/* path: out/*
@ -161,7 +164,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v5
- name: Download installer tarball - name: Download installer tarball
uses: actions/download-artifact@v5 uses: actions/download-artifact@v6
with: with:
name: installer-${{matrix.os}} name: installer-${{matrix.os}}
path: out path: out
@ -171,7 +174,7 @@ jobs:
echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT" echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)" TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)"
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT" echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
- uses: cachix/install-nix-action@v31 - uses: cachix/install-nix-action@7ec16f2c061ab07b235a7245e06ed46fe9a1cab6 # v31.8.3
if: ${{ !matrix.experimental-installer }} if: ${{ !matrix.experimental-installer }}
with: with:
install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }} install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }}
@ -227,12 +230,13 @@ jobs:
- uses: actions/checkout@v5 - uses: actions/checkout@v5
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v31 - uses: ./.github/actions/install-nix-action
with: with:
install_url: https://releases.nixos.org/nix/nix-2.20.3/install dogfood: false
- uses: DeterminateSystems/magic-nix-cache-action@main extra_nix_config: |
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV experimental-features = flakes nix-command
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L - run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
- run: nix build .#dockerImage -L
- run: docker load -i ./result/image.tar.gz - run: docker load -i ./result/image.tar.gz
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
@ -289,7 +293,6 @@ jobs:
extra_nix_config: extra_nix_config:
experimental-features = nix-command flakes experimental-features = nix-command flakes
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
profile_build: profile_build:
@ -310,7 +313,6 @@ jobs:
extra_nix_config: | extra_nix_config: |
experimental-features = flakes nix-command ca-derivations impure-derivations experimental-features = flakes nix-command ca-derivations impure-derivations
max-jobs = 1 max-jobs = 1
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: | - run: |
nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md
cat build-time-report.md >> $GITHUB_STEP_SUMMARY cat build-time-report.md >> $GITHUB_STEP_SUMMARY

View file

@ -107,12 +107,29 @@ rec {
}; };
}; };
disable =
let
inherit (pkgs.stdenv) hostPlatform;
in
args@{
pkgName,
testName,
test,
}:
lib.any (b: b) [
# FIXME: Nix manual is impure and does not produce all settings on darwin
(hostPlatform.isDarwin && pkgName == "nix-manual" && testName == "linkcheck")
];
componentTests = componentTests =
(lib.concatMapAttrs ( (lib.concatMapAttrs (
pkgName: pkg: pkgName: pkg:
lib.concatMapAttrs (testName: test: { lib.concatMapAttrs (
"${componentTestsPrefix}${pkgName}-${testName}" = test; testName: test:
}) (pkg.tests or { }) lib.optionalAttrs (!disable { inherit pkgName testName test; }) {
"${componentTestsPrefix}${pkgName}-${testName}" = test;
}
) (pkg.tests or { })
) nixComponentsInstrumented) ) nixComponentsInstrumented)
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) { // lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests; "${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;

View file

@ -3,7 +3,7 @@
def transform_anchors_html: def transform_anchors_html:
. | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>") . | gsub($empty_anchor_regex; "<a id=\"" + .anchor + "\"></a>")
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>"); | gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");

View file

@ -7,6 +7,7 @@ additional-css = ["custom.css"]
additional-js = ["redirects.js"] additional-js = ["redirects.js"]
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
git-repository-url = "https://github.com/NixOS/nix" git-repository-url = "https://github.com/NixOS/nix"
mathjax-support = true
# Handles replacing @docroot@ with a path to ./source relative to that markdown file, # Handles replacing @docroot@ with a path to ./source relative to that markdown file,
# {{#include handlebars}}, and the @generated@ syntax used within these. it mostly # {{#include handlebars}}, and the @generated@ syntax used within these. it mostly

View file

@ -24,9 +24,9 @@ let
in in
concatStringsSep "\n" (map showEntry storesList); concatStringsSep "\n" (map showEntry storesList);
"index.md" = "index.md" = replaceStrings [ "@store-types@" ] [ index ] (
replaceStrings [ "@store-types@" ] [ index ] readFile ./source/store/types/index.md.in
(readFile ./source/store/types/index.md.in); );
tableOfContents = tableOfContents =
let let

View file

@ -92,6 +92,8 @@ manual = custom_target(
(cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3
rm -rf @2@/manual rm -rf @2@/manual
mv @2@/html @2@/manual mv @2@/html @2@/manual
# Remove Mathjax 2.7, because we will actually use MathJax 3.x
find @2@/manual | grep .html | xargs sed -i -e '/2.7.1.MathJax.js/d'
find @2@/manual -iname meson.build -delete find @2@/manual -iname meson.build -delete
'''.format( '''.format(
python.full_path(), python.full_path(),

View file

@ -18,6 +18,9 @@
# Configuration Options # Configuration Options
version, version,
# `tests` attribute
testers,
}: }:
let let
@ -35,6 +38,13 @@ mkMesonDerivation (finalAttrs: {
../../.version ../../.version
# For example JSON # For example JSON
../../src/libutil-tests/data/hash ../../src/libutil-tests/data/hash
../../src/libstore-tests/data/content-address
../../src/libstore-tests/data/store-path
../../src/libstore-tests/data/realisation
../../src/libstore-tests/data/derived-path
../../src/libstore-tests/data/path-info
../../src/libstore-tests/data/nar-info
../../src/libstore-tests/data/build-result
# Too many different types of files to filter for now # Too many different types of files to filter for now
../../doc/manual ../../doc/manual
./. ./.
@ -48,8 +58,8 @@ mkMesonDerivation (finalAttrs: {
"man" "man"
]; ];
# Hack for sake of the dev shell nativeBuildInputs = [
passthru.externalNativeBuildInputs = [ nix-cli
meson meson
ninja ninja
(lib.getBin lowdown-unsandboxed) (lib.getBin lowdown-unsandboxed)
@ -68,10 +78,6 @@ mkMesonDerivation (finalAttrs: {
changelog-d changelog-d
]; ];
nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [
nix-cli
];
preConfigure = '' preConfigure = ''
chmod u+w ./.version chmod u+w ./.version
echo ${finalAttrs.version} > ./.version echo ${finalAttrs.version} > ./.version
@ -82,6 +88,29 @@ mkMesonDerivation (finalAttrs: {
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
''; '';
/**
The root of the HTML manual.
E.g. "${nix-manual.site}/index.html" exists.
*/
passthru.site = finalAttrs.finalPackage + "/share/doc/nix/manual";
passthru.tests = {
# https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck
linkcheck = testers.lycheeLinkCheck {
inherit (finalAttrs.finalPackage) site;
extraConfig = {
exclude = [
# Exclude auto-generated JSON schema documentation which has
# auto-generated fragment IDs that don't match the link references
".*/protocols/json/.*\\.html"
# Exclude undocumented builtins
".*/language/builtins\\.html#builtins-addErrorContext"
".*/language/builtins\\.html#builtins-appendContext"
];
};
};
};
meta = { meta = {
platforms = lib.platforms.all; platforms = lib.platforms.all;
}; };

View file

@ -0,0 +1,9 @@
---
synopsis: Channel URLs migrated to channels.nixos.org subdomain
prs: [14518]
issues: [14517]
---
Channel URLs have been updated from `https://nixos.org/channels/` to `https://channels.nixos.org/` throughout Nix.
The subdomain provides better reliability with IPv6 support and improved CDN distribution. The old domain apex (`nixos.org/channels/`) currently redirects to the new location but may be deprecated in the future.

View file

@ -0,0 +1,55 @@
---
synopsis: "JSON format changes for store path info and derivations"
prs: []
issues: []
---
JSON formats for store path info and derivations have been updated with new versions and structured fields.
## Store Path Info JSON (Version 2)
The store path info JSON format has been updated from version 1 to version 2:
- **Added `version` field**:
All store path info JSON now includes `"version": 2`.
- **Structured `ca` field**:
Content address is now a structured JSON object instead of a string:
- Old: `"ca": "fixed:r:sha256:1abc..."`
- New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}`
- Still `null` values for input-addressed store objects
- **Structured hash fields**:
Hash values (`narHash` and `downloadHash`) are now structured JSON objects instead of strings:
- Old: `"narHash": "sha256:FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="`
- New: `"narHash": {"algorithm": "sha256", "format": "base64", "hash": "FePFYIlM..."}`
- Same structure applies to `downloadHash` in NAR info contexts
Nix currently only produces, and doesn't consume this format.
**Affected command**: `nix path-info --json`
## Derivation JSON (Version 4)
The derivation JSON format has been updated from version 3 to version 4:
- **Restructured inputs**:
Inputs are now nested under an `inputs` object:
- Old: `"inputSrcs": [...], "inputDrvs": {...}`
- New: `"inputs": {"srcs": [...], "drvs": {...}}`
- **Consistent content addresses**:
Floating content-addressed outputs now use structured JSON format.
This is the same format as `ca` in in store path info (after the new version).
Version 3 and earlier formats are *not* accepted when reading.
**Affected command**: `nix derivation`, namely it's `show` and `add` sub-commands.

View file

@ -1,6 +1,6 @@
--- ---
synopsis: "Improved S3 binary cache support via HTTP" synopsis: "Improved S3 binary cache support via HTTP"
prs: [13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 13752] prs: [13752, 13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 14330, 14333, 14335, 14336, 14337, 14350, 14356, 14357, 14374, 14375, 14376, 14377, 14391, 14393, 14420, 14421]
issues: [13084, 12671, 11748, 12403] issues: [13084, 12671, 11748, 12403]
--- ---
@ -18,9 +18,23 @@ improvements:
The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential
management. management.
All existing S3 URL formats and parameters remain supported, with the notable All existing S3 URL formats and parameters remain supported, however the store
exception of multi-part uploads, which are no longer supported. settings for configuring multipart uploads have changed:
- **`multipart-upload`** (default: `false`): Enable multipart uploads for large
files. When enabled, files exceeding the multipart threshold will be uploaded
in multiple parts.
- **`multipart-threshold`** (default: `100 MiB`): Minimum file size for using
multipart uploads. Files smaller than this will use regular PUT requests.
Only takes effect when `multipart-upload` is enabled.
- **`multipart-chunk-size`** (default: `5 MiB`): Size of each part in multipart
uploads. Must be at least 5 MiB (AWS S3 requirement). Larger chunk sizes
reduce the number of requests but use more memory.
- **`buffer-size`**: Has been replaced by `multipart-chunk-size` and is now an alias to it.
Note that this change also means Nix now supports S3 binary cache stores even Note that this change also means Nix now supports S3 binary cache stores even
if build without `aws-crt-cpp`, but only for public buckets which do not if built without `aws-crt-cpp`, but only for public buckets which do not
require auth. require authentication.

View file

@ -0,0 +1,14 @@
---
synopsis: "S3 URLs now support object versioning via versionId parameter"
prs: [14274]
issues: [13955]
---
S3 URLs now support a `versionId` query parameter to fetch specific versions
of objects from S3 buckets with versioning enabled. This allows pinning to
exact object versions for reproducibility and protection against unexpected
changes:
```
s3://bucket/key?region=us-east-1&versionId=abc123def456
```

View file

@ -0,0 +1,21 @@
---
synopsis: "S3 binary cache stores now support storage class configuration"
prs: [14464]
issues: [7015]
---
S3 binary cache stores now support configuring the storage class for uploaded objects via the `storage-class` parameter. This allows users to optimize costs by selecting appropriate storage tiers based on access patterns.
Example usage:
```bash
# Use Glacier storage for long-term archival
nix copy --to 's3://my-bucket?storage-class=GLACIER' /nix/store/...
# Use Intelligent Tiering for automatic cost optimization
nix copy --to 's3://my-bucket?storage-class=INTELLIGENT_TIERING' /nix/store/...
```
The storage class applies to both regular uploads and multipart uploads. When not specified, objects use the bucket's default storage class.
See the [S3 storage classes documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html) for available storage classes and their characteristics.

View file

@ -26,9 +26,13 @@
- [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md) - [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md)
- [Content-addressing derivation outputs](store/derivation/outputs/content-address.md) - [Content-addressing derivation outputs](store/derivation/outputs/content-address.md)
- [Input-addressing derivation outputs](store/derivation/outputs/input-address.md) - [Input-addressing derivation outputs](store/derivation/outputs/input-address.md)
- [Build Trace](store/build-trace.md)
- [Derivation Resolution](store/resolution.md)
- [Building](store/building.md) - [Building](store/building.md)
- [Secrets](store/secrets.md)
- [Store Types](store/types/index.md) - [Store Types](store/types/index.md)
{{#include ./store/types/SUMMARY.md}} {{#include ./store/types/SUMMARY.md}}
- [Appendix: Math notation](store/math-notation.md)
- [Nix Language](language/index.md) - [Nix Language](language/index.md)
- [Data Types](language/types.md) - [Data Types](language/types.md)
- [String context](language/string-context.md) - [String context](language/string-context.md)
@ -118,11 +122,16 @@
- [Formats and Protocols](protocols/index.md) - [Formats and Protocols](protocols/index.md)
- [JSON Formats](protocols/json/index.md) - [JSON Formats](protocols/json/index.md)
- [Hash](protocols/json/hash.md) - [Hash](protocols/json/hash.md)
- [Content Address](protocols/json/content-address.md)
- [Store Path](protocols/json/store-path.md)
- [Store Object Info](protocols/json/store-object-info.md) - [Store Object Info](protocols/json/store-object-info.md)
- [Derivation](protocols/json/derivation.md) - [Derivation](protocols/json/derivation.md)
- [Deriving Path](protocols/json/deriving-path.md)
- [Build Trace Entry](protocols/json/build-trace-entry.md)
- [Build Result](protocols/json/build-result.md)
- [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md)
- [Store Path Specification](protocols/store-path.md) - [Store Path Specification](protocols/store-path.md)
- [Nix Archive (NAR) Format](protocols/nix-archive.md) - [Nix Archive (NAR) Format](protocols/nix-archive/index.md)
- [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md)
- [C API](c-api.md) - [C API](c-api.md)
- [Glossary](glossary.md) - [Glossary](glossary.md)

View file

@ -11,10 +11,10 @@
Channels are a mechanism for referencing remote Nix expressions and conveniently retrieving their latest version. Channels are a mechanism for referencing remote Nix expressions and conveniently retrieving their latest version.
The moving parts of channels are: The moving parts of channels are:
- The official channels listed at <https://nixos.org/channels> - The official channels listed at <https://channels.nixos.org>
- The user-specific list of [subscribed channels](#subscribed-channels) - The user-specific list of [subscribed channels](#subscribed-channels)
- The [downloaded channel contents](#channels) - The [downloaded channel contents](#channels)
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH) - The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
> **Note** > **Note**
> >
@ -88,9 +88,9 @@ This command has the following operations:
Subscribe to the Nixpkgs channel and run `hello` from the GNU Hello package: Subscribe to the Nixpkgs channel and run `hello` from the GNU Hello package:
```console ```console
$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable $ nix-channel --add https://channels.nixos.org/nixpkgs-unstable
$ nix-channel --list $ nix-channel --list
nixpkgs https://nixos.org/channels/nixpkgs nixpkgs https://channels.nixos.org/nixpkgs
$ nix-channel --update $ nix-channel --update
$ nix-shell -p hello --run hello $ nix-shell -p hello --run hello
hello hello

View file

@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an
element of *args* matches no installed derivations. element of *args* matches no installed derivations.
For a description of how *args* is mapped to a set of store paths, see For a description of how *args* is mapped to a set of store paths, see
[`--install`](#operation---install). If *args* describes multiple [`--install`](./install.md). If *args* describes multiple
store paths with the same symbolic name, only the one with the highest store paths with the same symbolic name, only the one with the highest
version is installed. version is installed.

View file

@ -19,7 +19,7 @@
This man page describes the command `nix-shell`, which is distinct from `nix This man page describes the command `nix-shell`, which is distinct from `nix
shell`. For documentation on the latter, run `nix shell --help` or see `man shell`. For documentation on the latter, run `nix shell --help` or see `man
nix3-shell`. nix3-env-shell`.
# Description # Description

View file

@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the
configuration file. configuration file.
By default, the collector prints the total number of freed bytes when it By default, the collector prints the total number of freed bytes when it
finishes (or when it is interrupted). With `--print-dead`, it prints the finishes (or when it is interrupted).
number of bytes that would be freed.
{{#include ./opt-common.md}} {{#include ./opt-common.md}}

View file

@ -66,7 +66,7 @@ You can also build Nix for one of the [supported platforms](#platforms).
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled. This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes [`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
[`nix-command`]: @docroot@/development/experimental-features.md#xp-nix-command [`nix-command`]: @docroot@/development/experimental-features.md#xp-feature-nix-command
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
@ -256,7 +256,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS
## Editor integration ## Editor integration
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s. The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix). See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#building-nix-with-flakes) or in [classic Nix](#building-nix).
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code. To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
Meson's configure always produces this inside the build directory. Meson's configure always produces this inside the build directory.

View file

@ -240,3 +240,9 @@ $ configurePhase
$ ninja src/external-api-docs/html $ ninja src/external-api-docs/html
$ xdg-open src/external-api-docs/html/index.html $ xdg-open src/external-api-docs/html/index.html
``` ```
If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use:
```bash
nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'appendToVar mesonFlags "-Ddoc-gen=true"; mesonConfigurePhase'
```

View file

@ -119,7 +119,7 @@ This will:
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB. 3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
### Characterisation testing { #characaterisation-testing-unit } ### Characterisation testing { #characterisation-testing-unit }
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing. See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.

View file

@ -208,7 +208,7 @@
- [impure derivation]{#gloss-impure-derivation} - [impure derivation]{#gloss-impure-derivation}
[An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, [An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them. so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
- [Nix database]{#gloss-nix-database} - [Nix database]{#gloss-nix-database}
@ -279,7 +279,7 @@
See [References](@docroot@/store/store-object.md#references) for details. See [References](@docroot@/store/store-object.md#references) for details.
- [referrer]{#gloss-reference} - [referrer]{#gloss-referrer}
A reversed edge from one [store object] to another. A reversed edge from one [store object] to another.
@ -367,8 +367,8 @@
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects. Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing: The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing:
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#output), - attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output),
- attributes with metadata, such as information about how the package is supposed to be used. - attributes with metadata, such as information about how the package is supposed to be used.
The exact shape of these attribute sets is up to convention. The exact shape of these attribute sets is up to convention.
@ -383,7 +383,7 @@
[string]: ./language/types.md#type-string [string]: ./language/types.md#type-string
[path]: ./language/types.md#type-path [path]: ./language/types.md#type-path
[attribute name]: ./language/types.md#attribute-set [attribute name]: ./language/types.md#type-attrs
- [base directory]{#gloss-base-directory} - [base directory]{#gloss-base-directory}

View file

@ -3,19 +3,21 @@
To run the latest stable release of Nix with Docker run the following command: To run the latest stable release of Nix with Docker run the following command:
```console ```console
$ docker run -ti ghcr.io/nixos/nix $ docker run -ti docker.io/nixos/nix
Unable to find image 'ghcr.io/nixos/nix:latest' locally Unable to find image 'docker.io/nixos/nix:latest' locally
latest: Pulling from ghcr.io/nixos/nix latest: Pulling from docker.io/nixos/nix
5843afab3874: Pull complete 5843afab3874: Pull complete
b52bf13f109c: Pull complete b52bf13f109c: Pull complete
1e2415612aa3: Pull complete 1e2415612aa3: Pull complete
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
Status: Downloaded newer image for ghcr.io/nixos/nix:latest Status: Downloaded newer image for docker.io/nixos/nix:latest
35ca4ada6e96:/# nix --version 35ca4ada6e96:/# nix --version
nix (Nix) 2.3.12 nix (Nix) 2.3.12
35ca4ada6e96:/# exit 35ca4ada6e96:/# exit
``` ```
> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix
# What is included in Nix's Docker image? # What is included in Nix's Docker image?
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage` The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`

View file

@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`. `outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
- [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode} - [`outputHash`]{#adv-attr-outputHash}
This will specify the output hash of the single output of a [fixed-output derivation]. This will specify the output hash of the single output of a [fixed-output derivation].

View file

@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string)) - [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
A symbolic name for the derivation. A symbolic name for the derivation.
See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects. See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects.
[store path]: @docroot@/store/store-path.md [store path]: @docroot@/store/store-path.md

View file

@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq
# Names # Names
A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md). A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md).
> **Syntax** > **Syntax**
> >

View file

@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example.
</td> </td>
<td> <td>
[Booleans](@docroot@/language/types.md#type-boolean) [Booleans](@docroot@/language/types.md#type-bool)
</td> </td>
</tr> </tr>
@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example.
</td> </td>
<td> <td>
An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y` An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y`
</td> </td>
</tr> </tr>
@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example.
</td> </td>
<td> <td>
[Lists](@docroot@/language/types.md#list) with three elements. [Lists](@docroot@/language/types.md#type-list) with three elements.
</td> </td>
</tr> </tr>
@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example.
</td> </td>
<td> <td>
[Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`) [Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`)
</td> </td>
</tr> </tr>
@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example.
</td> </td>
<td> <td>
[Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`) [Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`)
</td> </td>
</tr> </tr>

View file

@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext [`builtins.hasContext`]: ./builtins.md#builtins-hasContext
[`builtins.getContext`]: ./builtins.md#builtins-getContext [`builtins.getContext`]: ./builtins.md#builtins-getContext
[attribute set]: ./types.md#attribute-set [attribute set]: ./types.md#type-attrs
## Clearing string contexts ## Clearing string contexts

View file

@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a
[string]: ./types.md#type-string [string]: ./types.md#type-string
[path]: ./types.md#type-path [path]: ./types.md#type-path
[attribute set]: ./types.md#attribute-set [attribute set]: ./types.md#type-attrs
> **Syntax** > **Syntax**
> >

View file

@ -51,7 +51,7 @@ See [String literals](string-literals.md).
Path literals can also include [string interpolation], besides being [interpolated into other expressions]. Path literals can also include [string interpolation], besides being [interpolated into other expressions].
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions [interpolated into other expressions]: ./string-interpolation.md#interpolated-expression
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
@ -235,7 +235,7 @@ of object-oriented programming, for example.
## Recursive sets ## Recursive sets
Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other. Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other.
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}` > *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
@ -287,7 +287,7 @@ This evaluates to `"foobar"`.
## Inheriting attributes ## Inheriting attributes
When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
This can be shortened using the `inherit` keyword. This can be shortened using the `inherit` keyword.
Example: Example:

View file

@ -1,6 +1,8 @@
# Derivation "ATerm" file format # Derivation "ATerm" file format
For historical reasons, [store derivations][store derivation] are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format. For historical reasons, [store derivations][store derivation] are stored on-disk in "Annotated Term" (ATerm) format
([guide](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html),
[paper](https://doi.org/10.1002/(SICI)1097-024X(200003)30:3%3C259::AID-SPE298%3E3.0.CO;2-Y)).
## The ATerm format used ## The ATerm format used

View file

@ -0,0 +1,21 @@
{{#include build-result-v1-fixed.md}}
## Examples
### Successful build
```json
{{#include schema/build-result-v1/success.json}}
```
### Failed build (output rejected)
```json
{{#include schema/build-result-v1/output-rejected.json}}
```
### Failed build (non-deterministic)
```json
{{#include schema/build-result-v1/not-deterministic.json}}
```

View file

@ -0,0 +1,27 @@
{{#include build-trace-entry-v1-fixed.md}}
## Examples
### Simple build trace entry
```json
{{#include schema/build-trace-entry-v1/simple.json}}
```
### Build trace entry with dependencies
```json
{{#include schema/build-trace-entry-v1/with-dependent-realisations.json}}
```
### Build trace entry with signature
```json
{{#include schema/build-trace-entry-v1/with-signature.json}}
```
<!--
## Raw Schema
[JSON Schema for Build Trace Entry v1](schema/build-trace-entry-v1.json)
-->

View file

@ -0,0 +1,21 @@
{{#include content-address-v1-fixed.md}}
## Examples
### [Text](@docroot@/store/store-object/content-address.html#method-text) method
```json
{{#include schema/content-address-v1/text.json}}
```
### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method
```json
{{#include schema/content-address-v1/nar.json}}
```
<!-- need to convert YAML to JSON first
## Raw Schema
[JSON Schema for Hash v1](schema/content-address-v1.json)
-->

View file

@ -1,7 +1,7 @@
{{#include derivation-v3-fixed.md}} {{#include derivation-v4-fixed.md}}
<!-- <!-- need to convert YAML to JSON first
## Raw Schema ## Raw Schema
[JSON Schema for Derivation v3](schema/derivation-v3.json) [JSON Schema for Derivation v3](schema/derivation-v4.json)
--> -->

View file

@ -0,0 +1,21 @@
{{#include deriving-path-v1-fixed.md}}
## Examples
### Constant
```json
{{#include schema/deriving-path-v1/single_opaque.json}}
```
### Output of static derivation
```json
{{#include schema/deriving-path-v1/single_built.json}}
```
### Output of dynamic derivation
```json
{{#include schema/deriving-path-v1/single_built_built.json}}
```

View file

@ -12,3 +12,6 @@ s/\\`/`/g
# As we have more such relative links, more replacements of this nature # As we have more such relative links, more replacements of this nature
# should appear below. # should appear below.
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g

View file

@ -26,7 +26,7 @@
{{#include schema/hash-v1/blake3-base64.json}} {{#include schema/hash-v1/blake3-base64.json}}
``` ```
<!-- <!-- need to convert YAML to JSON first
## Raw Schema ## Raw Schema
[JSON Schema for Hash v1](schema/hash-v1.json) [JSON Schema for Hash v1](schema/hash-v1.json)

View file

@ -10,7 +10,13 @@ json_schema_config = files('json-schema-for-humans-config.yaml')
schemas = [ schemas = [
'hash-v1', 'hash-v1',
'derivation-v3', 'content-address-v1',
'store-path-v1',
'store-object-info-v2',
'derivation-v4',
'deriving-path-v1',
'build-trace-entry-v1',
'build-result-v1',
] ]
schema_files = files() schema_files = files()

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/build-result

View file

@ -0,0 +1,136 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-result-v1.json"
title: Build Result
description: |
This schema describes the JSON representation of Nix's `BuildResult` type, which represents the result of building a derivation or substituting store paths.
Build results can represent either successful builds (with built outputs) or various types of failures.
oneOf:
- "$ref": "#/$defs/success"
- "$ref": "#/$defs/failure"
type: object
required:
- success
- status
properties:
timesBuilt:
type: integer
minimum: 0
title: Times built
description: |
How many times this build was performed.
startTime:
type: integer
minimum: 0
title: Start time
description: |
The start time of the build (or one of the rounds, if it was repeated), as a Unix timestamp.
stopTime:
type: integer
minimum: 0
title: Stop time
description: |
The stop time of the build (or one of the rounds, if it was repeated), as a Unix timestamp.
cpuUser:
type: integer
minimum: 0
title: User CPU time
description: |
User CPU time the build took, in microseconds.
cpuSystem:
type: integer
minimum: 0
title: System CPU time
description: |
System CPU time the build took, in microseconds.
"$defs":
success:
type: object
title: Successful Build Result
description: |
Represents a successful build with built outputs.
required:
- success
- status
- builtOutputs
properties:
success:
const: true
title: Success indicator
description: |
Always true for successful build results.
status:
type: string
title: Success status
description: |
Status string for successful builds.
enum:
- "Built"
- "Substituted"
- "AlreadyValid"
- "ResolvesToAlreadyValid"
builtOutputs:
type: object
title: Built outputs
description: |
A mapping from output names to their build trace entries.
additionalProperties:
"$ref": "build-trace-entry-v1.yaml"
failure:
type: object
title: Failed Build Result
description: |
Represents a failed build with error information.
required:
- success
- status
- errorMsg
properties:
success:
const: false
title: Success indicator
description: |
Always false for failed build results.
status:
type: string
title: Failure status
description: |
Status string for failed builds.
enum:
- "PermanentFailure"
- "InputRejected"
- "OutputRejected"
- "TransientFailure"
- "CachedFailure"
- "TimedOut"
- "MiscFailure"
- "DependencyFailed"
- "LogLimitExceeded"
- "NotDeterministic"
- "NoSubstituters"
- "HashMismatch"
errorMsg:
type: string
title: Error message
description: |
Information about the error if the build failed.
isNonDeterministic:
type: boolean
title: Non-deterministic flag
description: |
If timesBuilt > 1, whether some builds did not produce the same result.
Note that 'isNonDeterministic = false' does not mean the build is deterministic,
just that we don't have evidence of non-determinism.

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/realisation

View file

@ -0,0 +1,74 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-trace-entry-v1.json"
title: Build Trace Entry
description: |
A record of a successful build outcome for a specific derivation output.
This schema describes the JSON representation of a [build trace entry](@docroot@/store/build-trace.md) entry.
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
> and subject to change.
type: object
required:
- id
- outPath
- dependentRealisations
- signatures
properties:
id:
type: string
title: Derivation Output ID
pattern: "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$"
description: |
Unique identifier for the derivation output that was built.
Format: `{hash-quotient-drv}!{output-name}`
- **hash-quotient-drv**: SHA-256 [hash of the quotient derivation](@docroot@/store/derivation/outputs/input-address.md#hash-quotient-drv).
Begins with `sha256:`.
- **output-name**: Name of the specific output (e.g., "out", "dev", "doc")
Example: `"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo"`
outPath:
"$ref": "store-path-v1.yaml"
title: Output Store Path
description: |
The path to the store object that resulted from building this derivation for the given output name.
dependentRealisations:
type: object
title: Underlying Base Build Trace
description: |
This is for [*derived*](@docroot@/store/build-trace.md#derived) build trace entries to ensure coherence.
Keys are derivation output IDs (same format as the main `id` field).
Values are the store paths that those dependencies resolved to.
As described in the linked section on derived build trace traces, derived build trace entries must be kept in addition and not instead of the underlying base build entries.
This is the set of base build trace entries that this derived build trace is derived from.
(The set is also a map since this miniature base build trace must be coherent, mapping each key to a single value.)
patternProperties:
"^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$":
$ref: "store-path-v1.yaml"
title: Dependent Store Path
description: Store path that this dependency resolved to during the build
additionalProperties: false
signatures:
type: array
title: Build Signatures
description: |
A set of cryptographic signatures attesting to the authenticity of this build trace entry.
items:
type: string
title: Signature
description: A single cryptographic signature
additionalProperties: false

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/content-address

View file

@ -0,0 +1,55 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json"
title: Content Address
description: |
This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md).
> **Note**
>
> For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references).
> It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant.
<!--
TODO currently `ContentAddress` is used in both of these, and so same rationale applies, but actually in both cases the JSON is currently ad-hoc.
That will be fixed, and as each is fixed, the example (along with a more precise link to the field in question) should be become part of the above note, so what is is saying is more clear.
> For example:
> - Fixed outputs of derivations are not allowed to have any references, so an empty reference set is statically known by assumption.
> - [Store object info](./store-object-info.md) includes the set of references along side the (optional) content address.
> This data type is thus safely used in both of these contexts.
-->
type: object
properties:
method:
"$ref": "#/$defs/method"
hash:
title: Content Address
description: |
This would be the content-address itself.
For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole.
In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods).
"$ref": "./hash-v1.yaml"
required:
- method
- hash
additionalProperties: false
"$defs":
method:
type: string
enum: [flat, nar, text, git]
title: Content-Addressing Method
description: |
A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
Valid method strings are:
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file)
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
- [`git`](@docroot@/store/store-object/content-address.md#method-git)

View file

@ -1,178 +0,0 @@
"$schema": http://json-schema.org/draft-04/schema#
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json
title: Derivation
description: |
Experimental JSON representation of a Nix derivation (version 3).
This schema describes the JSON representation of Nix's `Derivation` type.
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
> and subject to change.
type: object
required:
- name
- version
- outputs
- inputSrcs
- inputDrvs
- system
- builder
- args
- env
properties:
name:
type: string
title: Derivation name
description: |
The name of the derivation.
Used when calculating store paths for the derivations outputs.
version:
const: 3
title: Format version (must be 3)
description: |
Must be `3`.
This is a guard that allows us to continue evolving this format.
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
- Version 0: A-Term format
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format.
- Version 2: Separate `method` and `hashAlgo` fields in output specs
- Version 3: Drop store dir from store paths, just include base name.
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
outputs:
type: object
title: Output specifications
description: |
Information about the output paths of the derivation.
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object as described.
> **Example**
>
> ```json
> "outputs": {
> "out": {
> "method": "nar",
> "hashAlgo": "sha256",
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
> }
> }
> ```
additionalProperties:
"$ref": "#/$defs/output"
inputSrcs:
type: array
title: Input source paths
description: |
List of store paths on which this derivation depends.
> **Example**
>
> ```json
> "inputSrcs": [
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
> ]
> ```
items:
type: string
inputDrvs:
type: object
title: Input derivations
description: |
Mapping of derivation paths to lists of output names they provide.
> **Example**
>
> ```json
> "inputDrvs": {
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
> }
> ```
>
> specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
system:
type: string
title: Build system type
description: |
The system type on which this derivation is to be built
(e.g. `x86_64-linux`).
builder:
type: string
title: Build program path
description: |
Absolute path of the program used to perform the build.
Typically this is the `bash` shell
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
args:
type: array
title: Builder arguments
description: |
Command-line arguments passed to the `builder`.
items:
type: string
env:
type: object
title: Environment variables
description: |
Environment variables passed to the `builder`.
additionalProperties:
type: string
structuredAttrs:
title: Structured attributes
description: |
[Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
Structured attributes are JSON, and thus embedded as-is.
type: object
additionalProperties: true
"$defs":
output:
type: object
properties:
path:
type: string
title: Output path
description: |
The output path, if known in advance.
method:
type: string
title: Content addressing method
enum: [flat, nar, text, git]
description: |
For an output which will be [content addressed](@docroot@/store/derivation/outputs/content-address.md), a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
Valid method strings are:
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat)
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
hashAlgo:
title: Hash algorithm
"$ref": "./hash-v1.yaml#/$defs/algorithm"
hash:
type: string
title: Expected hash value
description: |
For fixed-output derivations, the expected content hash in base-16.

View file

@ -0,0 +1,299 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v4.json"
title: Derivation
description: |
Experimental JSON representation of a Nix derivation (version 4).
This schema describes the JSON representation of Nix's `Derivation` type.
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
> and subject to change.
type: object
required:
- name
- version
- outputs
- inputs
- system
- builder
- args
- env
properties:
name:
type: string
title: Derivation name
description: |
The name of the derivation.
Used when calculating store paths for the derivations outputs.
version:
const: 4
title: Format version (must be 4)
description: |
Must be `4`.
This is a guard that allows us to continue evolving this format.
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
- Version 0: ATerm format
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from ATerm format.
- Version 2: Separate `method` and `hashAlgo` fields in output specs
- Version 3: Drop store dir from store paths, just include base name.
- Version 4: Two cleanups, batched together to lesson churn:
- Reorganize inputs into nested structure (`inputs.srcs` and `inputs.drvs`)
- Use canonical content address JSON format for floating content addressed derivation outputs.
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
outputs:
type: object
title: Output specifications
description: |
Information about the output paths of the derivation.
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object as described.
> **Example**
>
> ```json
> "outputs": {
> "out": {
> "method": "nar",
> "hashAlgo": "sha256",
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
> }
> }
> ```
additionalProperties:
"$ref": "#/$defs/output/overall"
inputs:
type: object
title: Derivation inputs
description: |
Input dependencies for the derivation, organized into source paths and derivation dependencies.
required:
- srcs
- drvs
properties:
srcs:
type: array
title: Input source paths
description: |
List of store paths on which this derivation depends.
> **Example**
>
> ```json
> "srcs": [
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
> ]
> ```
items:
$ref: "store-path-v1.yaml"
drvs:
type: object
title: Input derivations
description: |
Mapping of derivation paths to lists of output names they provide.
> **Example**
>
> ```json
> "drvs": {
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
> }
> ```
>
> specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
patternProperties:
"^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$":
title: Store Path
description: |
A store path to a derivation, mapped to the outputs of that derivation.
oneOf:
- "$ref": "#/$defs/outputNames"
- "$ref": "#/$defs/dynamicOutputs"
additionalProperties: false
additionalProperties: false
system:
type: string
title: Build system type
description: |
The system type on which this derivation is to be built
(e.g. `x86_64-linux`).
builder:
type: string
title: Build program path
description: |
Absolute path of the program used to perform the build.
Typically this is the `bash` shell
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
args:
type: array
title: Builder arguments
description: |
Command-line arguments passed to the `builder`.
items:
type: string
env:
type: object
title: Environment variables
description: |
Environment variables passed to the `builder`.
additionalProperties:
type: string
structuredAttrs:
title: Structured attributes
description: |
[Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
Structured attributes are JSON, and thus embedded as-is.
type: object
additionalProperties: true
"$defs":
output:
overall:
title: Derivation Output
description: |
A single output of a derivation, with different variants for different output types.
oneOf:
- "$ref": "#/$defs/output/inputAddressed"
- "$ref": "#/$defs/output/caFixed"
- "$ref": "#/$defs/output/caFloating"
- "$ref": "#/$defs/output/deferred"
- "$ref": "#/$defs/output/impure"
inputAddressed:
title: Input-Addressed Output
description: |
The traditional non-fixed-output derivation type.
The output path is determined from the derivation itself.
See [Input-addressing derivation outputs](@docroot@/store/derivation/outputs/input-address.md) for more details.
type: object
required:
- path
properties:
path:
$ref: "store-path-v1.yaml"
title: Output path
description: |
The output path determined from the derivation itself.
additionalProperties: false
caFixed:
title: Fixed Content-Addressed Output
description: |
The output is content-addressed, and the content-address is fixed in advance.
See [Fixed-output content-addressing](@docroot@/store/derivation/outputs/content-address.md#fixed) for more details.
"$ref": "./content-address-v1.yaml"
required:
- method
- hash
properties:
method:
description: |
Method of content addressing used for this output.
hash:
title: Expected hash value
description: |
The expected content hash.
additionalProperties: false
caFloating:
title: Floating Content-Addressed Output
description: |
Floating-output derivations, whose outputs are content
addressed, but not fixed, and so the output paths are dynamically calculated from
whatever the output ends up being.
See [Floating Content-Addressing](@docroot@/store/derivation/outputs/content-address.md#floating) for more details.
type: object
required:
- method
- hashAlgo
properties:
method:
"$ref": "./content-address-v1.yaml#/$defs/method"
description: |
Method of content addressing used for this output.
hashAlgo:
title: Hash algorithm
"$ref": "./hash-v1.yaml#/$defs/algorithm"
description: |
What hash algorithm to use for the given method of content-addressing.
additionalProperties: false
deferred:
title: Deferred Output
description: |
Input-addressed output which depends on a (CA) derivation whose outputs (and thus their content-address
are not yet known.
type: object
properties: {}
additionalProperties: false
impure:
title: Impure Output
description: |
Impure output which is just like a floating content-addressed output, but this derivation runs without sandboxing.
As such, we don't record it in the build trace, under the assumption that if we need it again, we should rebuild it, as it might produce something different.
required:
- impure
- method
- hashAlgo
properties:
impure:
const: true
method:
"$ref": "./content-address-v1.yaml#/$defs/method"
description: |
How the file system objects will be serialized for hashing.
hashAlgo:
title: Hash algorithm
"$ref": "./hash-v1.yaml#/$defs/algorithm"
description: |
How the serialization will be hashed.
additionalProperties: false
outputName:
type: string
title: Output name
description: Name of the derivation output to depend on
outputNames:
type: array
title: Output Names
description: Set of names of derivation outputs to depend on
items:
"$ref": "#/$defs/outputName"
dynamicOutputs:
type: object
title: Dynamic Outputs
description: |
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
This recursive data type allows for depending on outputs of outputs.
properties:
outputs:
"$ref": "#/$defs/outputNames"
dynamicOutputs:
"$ref": "#/$defs/dynamicOutputs"

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/derived-path

View file

@ -0,0 +1,27 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json"
title: Deriving Path
description: |
This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path).
oneOf:
- title: Constant
description: |
See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path.
$ref: "store-path-v1.yaml"
- title: Output
description: |
See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path.
type: object
properties:
drvPath:
"$ref": "#"
description: |
A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to.
output:
type: string
description: |
The name of an output produced by that derivation (e.g. "out", "doc", etc.).
required:
- drvPath
- output
additionalProperties: false

View file

@ -1,5 +1,5 @@
"$schema": http://json-schema.org/draft-04/schema# "$schema": "http://json-schema.org/draft-04/schema"
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json "$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json"
title: Hash title: Hash
description: | description: |
A cryptographic hash value used throughout Nix for content addressing and integrity verification. A cryptographic hash value used throughout Nix for content addressing and integrity verification.
@ -51,4 +51,4 @@ additionalProperties: false
description: | description: |
The hash algorithm used to compute the hash value. The hash algorithm used to compute the hash value.
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake-hashing) experimental feature. `blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature.

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/nar-info

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/path-info

View file

@ -0,0 +1,258 @@
"$schema": "http://json-schema.org/draft-04/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v2.json"
title: Store Object Info v2
description: |
Information about a [store object](@docroot@/store/store-object.md).
This schema describes the JSON representation of store object metadata as returned by commands like [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md).
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
> and subject to change.
### Field Categories
Store object information can come in a few different variations.
Firstly, "impure" fields, which contain non-intrinsic information about the store object, may or may not be included.
Second, binary cache stores have extra non-intrinsic infomation about the store objects they contain.
Thirdly, [`nix path-info --json --closure-size`](@docroot@/command-ref/new-cli/nix3-path-info.html#opt-closure-size) can compute some extra information about not just the single store object in question, but the store object and its [closure](@docroot@/glossary.md#gloss-closure).
The impure and NAR fields are grouped into separate variants below.
See their descriptions for additional information.
The closure fields however as just included as optional fields, to avoid a combinatorial explosion of variants.
oneOf:
- $ref: "#/$defs/base"
- $ref: "#/$defs/impure"
- $ref: "#/$defs/narInfo"
$defs:
base:
title: Store Object Info
description: |
Basic store object metadata containing only intrinsic properties.
This is the minimal set of fields that describe what a store object contains.
type: object
required:
- version
- narHash
- narSize
- references
- ca
properties:
version:
type: integer
const: 2
title: Format version (must be 2)
description: |
Must be `2`.
This is a guard that allows us to continue evolving this format.
Here is the rough version history:
- Version 0: `.narinfo` line-oriented format
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from `.narinfo` format.
- Version 2: Use structured JSON type for `ca`
path:
type: string
title: Store Path
description: |
[Store path](@docroot@/store/store-path.md) to the given store object.
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
narHash:
"$ref": "./hash-v1.yaml"
title: NAR Hash
description: |
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
narSize:
type: integer
minimum: 0
title: NAR Size
description: |
Size of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
references:
type: array
title: References
description: |
An array of [store paths](@docroot@/store/store-path.md), possibly including this one.
items:
type: string
ca:
oneOf:
- type: "null"
const: null
- "$ref": "./content-address-v1.yaml"
title: Content Address
description: |
If the store object is [content-addressed](@docroot@/store/store-object/content-address.md),
this is the content address of this store object's file system object, used to compute its store path.
Otherwise (i.e. if it is [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object)), this is `null`.
additionalProperties: false
impure:
title: Store Object Info with Impure Fields
description: |
Store object metadata including impure fields that are not *intrinsic* properties.
In other words, the same store object in different stores could have different values for these impure fields.
type: object
required:
- version
- narHash
- narSize
- references
- ca
# impure
- deriver
- registrationTime
- ultimate
- signatures
properties:
version: { $ref: "#/$defs/base/properties/version" }
path: { $ref: "#/$defs/base/properties/path" }
narHash: { $ref: "#/$defs/base/properties/narHash" }
narSize: { $ref: "#/$defs/base/properties/narSize" }
references: { $ref: "#/$defs/base/properties/references" }
ca: { $ref: "#/$defs/base/properties/ca" }
deriver:
type: ["string", "null"]
title: Deriver
description: |
If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced.
Otherwise `null`.
> This is an "impure" field that may not be included in certain contexts.
registrationTime:
type: ["integer", "null"]
title: Registration Time
description: |
If known, when this derivation was added to the store (Unix timestamp).
Otherwise `null`.
> This is an "impure" field that may not be included in certain contexts.
ultimate:
type: boolean
title: Ultimate
description: |
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
> This is an "impure" field that may not be included in certain contexts.
signatures:
type: array
title: Signatures
description: |
Signatures claiming that this store object is what it claims to be.
Not relevant for [content-addressed](@docroot@/store/store-object/content-address.md) store objects,
but useful for [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) store objects.
> This is an "impure" field that may not be included in certain contexts.
items:
type: string
# Computed closure fields
closureSize:
type: integer
minimum: 0
title: Closure Size
description: |
The total size of this store object and every other object in its [closure](@docroot@/glossary.md#gloss-closure).
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
additionalProperties: false
narInfo:
title: Store Object Info with Impure fields and NAR Info
description: |
The store object info in the "binary cache" family of Nix store type contain extra information pertaining to *downloads* of the store object in question.
(This store info is called "NAR info", since the downloads take the form of [Nix Archives](@docroot@/store/file-system-object/content-address.md#serial-nix-archive, and the metadata is served in a file with a `.narinfo` extension.)
This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure.
type: object
required:
- version
- narHash
- narSize
- references
- ca
# impure
- deriver
- registrationTime
- ultimate
- signatures
# nar
- url
- compression
- downloadHash
- downloadSize
properties:
version: { $ref: "#/$defs/base/properties/version" }
path: { $ref: "#/$defs/base/properties/path" }
narHash: { $ref: "#/$defs/base/properties/narHash" }
narSize: { $ref: "#/$defs/base/properties/narSize" }
references: { $ref: "#/$defs/base/properties/references" }
ca: { $ref: "#/$defs/base/properties/ca" }
deriver: { $ref: "#/$defs/impure/properties/deriver" }
registrationTime: { $ref: "#/$defs/impure/properties/registrationTime" }
ultimate: { $ref: "#/$defs/impure/properties/ultimate" }
signatures: { $ref: "#/$defs/impure/properties/signatures" }
closureSize: { $ref: "#/$defs/impure/properties/closureSize" }
url:
type: string
title: URL
description: |
Where to download a compressed archive of the file system objects of this store object.
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
compression:
type: string
title: Compression
description: |
The compression format that the archive is in.
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
downloadHash:
"$ref": "./hash-v1.yaml"
title: Download Hash
description: |
A digest for the compressed archive itself, as opposed to the data contained within.
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
downloadSize:
type: integer
minimum: 0
title: Download Size
description: |
The size of the compressed archive itself.
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
closureDownloadSize:
type: integer
minimum: 0
title: Closure Download Size
description: |
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure](@docroot@/glossary.md#gloss-closure).
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
additionalProperties: false

View file

@ -0,0 +1 @@
../../../../../../src/libstore-tests/data/store-path

View file

@ -0,0 +1,32 @@
"$schema": "http://json-schema.org/draft-07/schema"
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json"
title: Store Path
description: |
A [store path](@docroot@/store/store-path.md) identifying a store object.
This schema describes the JSON representation of store paths as used in various Nix JSON APIs.
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
> and subject to change.
## Format
Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix.
For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"`
(If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.)
## Structure
The format follows this pattern: `${digest}-${name}`
- **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
- **name**: The package name and optional version/suffix information
type: string
pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$"
minLength: 34

View file

@ -1,102 +1,45 @@
# Store object info JSON format {{#include store-object-info-v2-fixed.md}}
> **Warning** ## Examples
>
> This JSON format is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
> and subject to change.
Info about a [store object]. ### Minimal store object (content-addressed)
* `path`: ```json
{{#include schema/store-object-info-v2/pure.json}}
```
[Store path][store path] to the given store object. ### Store object with impure fields
* `narHash`: ```json
{{#include schema/store-object-info-v2/impure.json}}
```
Hash of the [file system object] part of the store object when serialized as a [Nix Archive]. ### Minimal store object (empty)
* `narSize`: ```json
{{#include schema/store-object-info-v2/empty_pure.json}}
```
Size of the [file system object] part of the store object when serialized as a [Nix Archive]. ### Store object with all impure fields
* `references`: ```json
{{#include schema/store-object-info-v2/empty_impure.json}}
```
An array of [store paths][store path], possibly including this one. ### NAR info (minimal)
* `ca`: ```json
{{#include schema/nar-info-v1/pure.json}}
```
If the store object is [content-addressed], ### NAR info (with binary cache fields)
this is the content address of this store object's file system object, used to compute its store path.
Otherwise (i.e. if it is [input-addressed]), this is `null`.
[store path]: @docroot@/store/store-path.md ```json
[file system object]: @docroot@/store/file-system-object.md {{#include schema/nar-info-v1/impure.json}}
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive ```
## Impure fields <!-- need to convert YAML to JSON first
## Raw Schema
These are not intrinsic properties of the store object. [JSON Schema for Store Object Info v1](schema/store-object-info-v2.json)
In other words, the same store object residing in different store could have different values for these properties. -->
* `deriver`:
If known, the path to the [store derivation] from which this store object was produced.
Otherwise `null`.
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
* `registrationTime` (optional):
If known, when this derivation was added to the store.
Otherwise `null`.
* `ultimate`:
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
* `signatures`:
Signatures claiming that this store object is what it claims to be.
Not relevant for [content-addressed] store objects,
but useful for [input-addressed] store objects.
[content-addressed]: @docroot@/store/store-object/content-address.md
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
### `.narinfo` extra fields
This meta data is specific to the "binary cache" family of Nix store types.
This information is not intrinsic to the store object, but about how it is stored.
* `url`:
Where to download a compressed archive of the file system objects of this store object.
* `compression`:
The compression format that the archive is in.
* `fileHash`:
A digest for the compressed archive itself, as opposed to the data contained within.
* `fileSize`:
The size of the compressed archive itself.
## Computed closure fields
These fields are not stored at all, but computed by traversing the other fields across all the store objects in a [closure].
* `closureSize`:
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
### `.narinfo` extra fields
* `closureSize`:
The total size of this store object and every other object in its [closure].
[closure]: @docroot@/glossary.md#gloss-closure

View file

@ -0,0 +1,15 @@
{{#include store-path-v1-fixed.md}}
## Examples
### Simple store path
```json
{{#include schema/store-path-v1/simple.json}}
```
<!-- need to convert YAML to JSON first
## Raw Schema
[JSON Schema for Store Path v1](schema/store-path-v1.json)
-->

View file

@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format.
The Nix Archive format closely follows the abstract specification of a [file system object] tree, The Nix Archive format closely follows the abstract specification of a [file system object] tree,
because it is designed to serialize exactly that data structure. because it is designed to serialize exactly that data structure.
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive [Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
[file system object]: @docroot@/store/file-system-object.md [file system object]: @docroot@/store/file-system-object.md
The format of this specification is close to [Extended BackusNaur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings. The format of this specification is close to [Extended BackusNaur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
@ -24,7 +24,7 @@ nar-obj-inner
| str("type"), str("directory") directory | str("type"), str("directory") directory
; ;
regular = [ str("executable") ], str("contents"), str(contents); regular = [ str("executable"), str("") ], str("contents"), str(contents);
symlink = str("target"), str(target); symlink = str("target"), str(target);
@ -41,3 +41,15 @@ The `str` function / parameterized rule is defined as follows:
- `int(n)` = the 64-bit little endian representation of the number `n` - `int(n)` = the 64-bit little endian representation of the number `n`
- `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte - `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte
## Kaitai Struct Specification
The Nix Archive (NAR) format is also formally described using [Kaitai Struct](https://kaitai.io/), an Interface Description Language (IDL) for defining binary data structures.
> Kaitai Struct provides a language-agnostic, machine-readable specification that can be compiled into parsers for various programming languages (e.g., C++, Python, Java, Rust).
```yaml
{{#include nar.ksy}}
```
The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed.

View file

@ -0,0 +1,169 @@
meta:
id: nix_nar
title: Nix Archive (NAR)
file-extension: nar
endian: le
doc: |
Nix Archive (NAR) format. A simple, reproducible binary archive
format used by the Nix package manager to serialize file system objects.
doc-ref: 'https://nixos.org/manual/nix/stable/command-ref/nix-store.html#nar-format'
seq:
- id: magic
type: padded_str
doc: "Magic string, must be 'nix-archive-1'."
valid:
expr: _.body == 'nix-archive-1'
- id: root_node
type: node
doc: "The root of the archive, which is always a single node."
types:
padded_str:
doc: |
A string, prefixed with its length (u8le) and
padded with null bytes to the next 8-byte boundary.
seq:
- id: len_str
type: u8
- id: body
type: str
size: len_str
encoding: 'ASCII'
- id: padding
size: (8 - (len_str % 8)) % 8
node:
doc: "A single filesystem node (file, directory, or symlink)."
seq:
- id: open_paren
type: padded_str
doc: "Must be '(', a token starting the node definition."
valid:
expr: _.body == '('
- id: type_key
type: padded_str
doc: "Must be 'type'."
valid:
expr: _.body == 'type'
- id: type_val
type: padded_str
doc: "The type of the node: 'regular', 'directory', or 'symlink'."
- id: body
type:
switch-on: type_val.body
cases:
"'directory'": type_directory
"'regular'": type_regular
"'symlink'": type_symlink
- id: close_paren
type: padded_str
valid:
expr: _.body == ')'
if: "type_val.body != 'directory'"
doc: "Must be ')', a token ending the node definition."
type_directory:
doc: "A directory node, containing a list of entries. Entries must be ordered by their names."
seq:
- id: entries
type: dir_entry
repeat: until
repeat-until: _.kind.body == ')'
types:
dir_entry:
doc: "A single entry within a directory, or a terminator."
seq:
- id: kind
type: padded_str
valid:
expr: _.body == 'entry' or _.body == ')'
doc: "Must be 'entry' (for a child node) or '' (for terminator)."
- id: open_paren
type: padded_str
valid:
expr: _.body == '('
if: 'kind.body == "entry"'
- id: name_key
type: padded_str
valid:
expr: _.body == 'name'
if: 'kind.body == "entry"'
- id: name
type: padded_str
if: 'kind.body == "entry"'
- id: node_key
type: padded_str
valid:
expr: _.body == 'node'
if: 'kind.body == "entry"'
- id: node
type: node
if: 'kind.body == "entry"'
doc: "The child node, present only if kind is 'entry'."
- id: close_paren
type: padded_str
valid:
expr: _.body == ')'
if: 'kind.body == "entry"'
instances:
is_terminator:
value: kind.body == ')'
type_regular:
doc: "A regular file node."
seq:
# Read attributes (like 'executable') until we hit 'contents'
- id: attributes
type: reg_attribute
repeat: until
repeat-until: _.key.body == "contents"
# After the 'contents' token, read the file data
- id: file_data
type: file_content
instances:
is_executable:
value: 'attributes[0].key.body == "executable"'
doc: "True if the file has the 'executable' attribute."
types:
reg_attribute:
doc: "An attribute of the file, e.g., 'executable' or 'contents'."
seq:
- id: key
type: padded_str
doc: "Attribute key, e.g., 'executable' or 'contents'."
valid:
expr: _.body == 'executable' or _.body == 'contents'
- id: value
type: padded_str
if: 'key.body == "executable"'
valid:
expr: _.body == ''
doc: "Must be '' if key is 'executable'."
file_content:
doc: "The raw data of the file, prefixed by length."
seq:
- id: len_contents
type: u8
# # This relies on the property of instances that they are lazily evaluated and cached.
- size: 0
if: nar_offset < 0
- id: contents
size: len_contents
- id: padding
size: (8 - (len_contents % 8)) % 8
instances:
nar_offset:
value: _io.pos
type_symlink:
doc: "A symbolic link node."
seq:
- id: target_key
type: padded_str
doc: "Must be 'target'."
valid:
expr: _.body == 'target'
- id: target_val
type: padded_str
doc: "The destination path of the symlink."

View file

@ -358,7 +358,7 @@ This release has the following new features:
they are needed for evaluation. they are needed for evaluation.
- You can now use `channel:` as a short-hand for - You can now use `channel:` as a short-hand for
<https://nixos.org/channels//nixexprs.tar.xz>. For example, <https://nixos.org/channels//nixexprs.tar.xz> [now <https://channels.nixos.org//nixexprs.tar.xz>]. For example,
`nix-build channel:nixos-15.09 -A hello` will build the GNU Hello `nix-build channel:nixos-15.09 -A hello` will build the GNU Hello
package from the `nixos-15.09` channel. In the future, this may package from the `nixos-15.09` channel. In the future, this may
use Git to fetch updates more efficiently. use Git to fetch updates more efficiently.

View file

@ -13,7 +13,7 @@
- The `discard-references` feature has been stabilized. - The `discard-references` feature has been stabilized.
This means that the This means that the
[unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references) [unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences)
attribute is no longer guarded by an experimental flag and can be used attribute is no longer guarded by an experimental flag and can be used
freely. freely.

View file

@ -17,8 +17,8 @@
- `nix-shell` shebang lines now support single-quoted arguments. - `nix-shell` shebang lines now support single-quoted arguments.
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree). - `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree).
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree). This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes).
- The interface for creating and updating lock files has been overhauled: - The interface for creating and updating lock files has been overhauled:

View file

@ -14,7 +14,7 @@
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722) - Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json). The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md).
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`, In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
rather than one field with an arcane `:`-separated format. rather than one field with an arcane `:`-separated format.

View file

@ -93,7 +93,7 @@
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668) - Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte. Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte.
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661). This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).

View file

@ -0,0 +1,53 @@
# Build Trace
> **Warning**
>
> This entire concept is currently
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
> and subject to change.
The *build trace* is a [memoization table](https://en.wikipedia.org/wiki/Memoization) for builds.
It maps the inputs of builds to the outputs of builds.
Concretely, that means it maps [derivations][derivation] to maps of [output] names to [store objects][store object].
In general the derivations used as a key should be [*resolved*](./resolution.md).
A build trace with all-resolved-derivation keys is also called a *base build trace* for extra clarity.
If all the resolved inputs of a derivation are content-addressed, that means the inputs will be fully determined, leaving no ambiguity for what build was performed.
(Input-addressed inputs however are still ambiguous. They too should be locked down, but this is left as future work.)
Accordingly, to look up an unresolved derivation, one must first resolve it to get a resolved derivation.
Resolving itself involves looking up entries in the build trace, so this is a mutually recursive process that will end up inspecting possibly many entries.
Except for the issue with input-addressed paths called out above, base build traces are trivially *coherent* -- incoherence is not possible.
That means that the claims that each key-value base build try entry makes are independent, and no mapping invalidates another mapping.
Whether the mappings are *true*, i.e. the faithful recording of actual builds performed, is another matter.
Coherence is about the multiple claims of the build trace being mutually consistent, not about whether the claims are individually true or false.
In general, there is no way to audit a build trace entry except for by performing the build again from scratch.
And even in that case, a different result doesn't mean the original entry was a "lie", because the derivation being built may be non-deterministic.
As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice.
Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices.
## Derived build traces {#derived}
Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations.
But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around.
Firstly, this ensures that the derived entries are merely cache, which could be recomputed from scratch.
Secondly, this ensures the coherence of the derived build trace.
Unlike with base build traces, incoherence with derived build traces is possible.
The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace.
Without fixing the base build trace, it inherits the subjectivity of base build traces themselves.
Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \\(c\\).
Let \\(a\\) be a resolved derivation, but let \\(b\\) and \\(c\\) be unresolved and both take as an input an output of \\(a\\).
Now suppose that derived entries are made for \\(b\\) and \\(c\\) based on two different entries of \\(a\\).
(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.)
If trusting the derived build trace entries for \\(b\\) and \\(c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught.
However, if \\(b\\) and \\(c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output.
[derivation]: ./derivation/index.md
[output]: ./derivation/outputs/index.md
[store object]: @docroot@/store/store-object.md

View file

@ -8,7 +8,7 @@
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input. - Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
## Builder Execution ## Builder Execution {#builder-execution}
The [`builder`](./derivation/index.md#builder) is executed as follows: The [`builder`](./derivation/index.md#builder) is executed as follows:

View file

@ -102,7 +102,7 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t
### System {#system} ### System {#system}
The system type on which the [`builder`](#attr-builder) executable is meant to be run. The system type on which the [`builder`](#builder) executable is meant to be run.
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option]. A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
@ -245,7 +245,7 @@ If those other derivations *also* abide by this common case (and likewise for tr
> note the ".drv" > note the ".drv"
> ``` > ```
## Extending the model to be higher-order ## Extending the model to be higher-order {#dynamic}
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations) **Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)

View file

@ -167,10 +167,10 @@ It is only in the potential for that check to fail that they are different.
> >
> In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing. > In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing.
> Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation). > Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation).
> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [Issue #11955](https://github.com/NixOS/nix/issues/11955). > A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [issue #11955](https://github.com/NixOS/nix/issues/11955).
> >
> In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them. > In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them.
> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt --- avoiding downloading potentially large amounts of data twice. > This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt &mdash; avoiding downloading potentially large amounts of data twice.
> This optimisation prefigures the design above: > This optimisation prefigures the design above:
> If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data. > If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data.
For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL. For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL.

View file

@ -43,7 +43,7 @@ In particular, the specification decides:
- if the content is content-addressed, how is it content addressed - if the content is content-addressed, how is it content addressed
- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path]) - if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path])
## Types of derivations ## Types of derivations

View file

@ -6,26 +6,221 @@
That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it. That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it.
Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object. Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object.
<!--- ## Modulo content addressed derivation outputs {#hash-quotient-drv}
### Modulo fixed-output derivations A naive implementation of an output hash computation for input-addressed outputs would be to hash the derivation hash and output together.
This clearly has the uniqueness properties we want for input-addressed outputs, but suffers from an inefficiency.
Specifically, new builds would be required whenever a change is made to a fixed-output derivation, despite having provably no differences in the inputs to the new derivation compared to what it used to be.
Concretely, this would cause a "mass rebuild" whenever any fetching detail changes, including mirror lists, certificate authority certificates, etc.
**TODO hash derivation modulo.** To solve this problem, we compute output hashes differently, so that certain output hashes become identical.
We call this concept quotient hashing, in reference to quotient types or sets.
So how do we compute the hash part of the output path of a derivation? So how do we compute the hash part of the output paths of an input-addressed derivation?
This is done by the function `hashDrv`, shown in Figure 5.10. This is done by the function `hashQuotientDerivation`, shown below.
It distinguishes between two cases.
If the derivation is a fixed-output derivation, then it computes a hash over just the `outputHash` attributes.
If the derivation is not a fixed-output derivation, we replace each element in the derivations inputDrvs with the result of a call to `hashDrv` for that element. First, a word on inputs.
(The derivation at each store path in `inputDrvs` is converted from its on-disk ATerm representation back to a `StoreDrv` by the function `parseDrv`.) In essence, `hashDrv` partitions store derivations into equivalence classes, and for hashing purpose it replaces each store path in a derivation graph with its equivalence class. `hashQuotientDerivation` is only defined on derivations whose [inputs](@docroot@/store/derivation/index.md#inputs) take the first-order form:
```typescript
type ConstantPath = {
path: StorePath;
};
The recursion in Figure 5.10 is inefficient: type FirstOrderOutputPath = {
it will call itself once for each path by which a subderivation can be reached, i.e., `O(V k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`. drvPath: StorePath;
In the actual implementation, memoisation is used to reduce this to `O(V + E)` complexity for a graph with E edges. output: OutputName;
};
--> type FirstOrderDerivingPath = ConstantPath | FirstOrderOutputPath;
type Inputs = Set<FirstOrderDerivingPath>;
```
For the algorithm below, we adopt a derivation where the two types of (first order) derived paths are partitioned into two sets, as follows:
```typescript
type Derivation = {
// inputs: Set<FirstOrderDerivingPath>; // replaced
inputSrcs: Set<ConstantPath>; // new instead
inputDrvOutputs: Set<FirstOrderOutputPath>; // new instead
// ...other fields...
};
```
In the [currently-experimental][xp-feature-dynamic-derivations] higher-order case where outputs of outputs are allowed as [deriving paths][deriving-path] and thus derivation inputs, derivations using that generalization are not valid arguments to this function.
Those derivations must be (partially) [resolved](@docroot@/store/resolution.md) enough first, to the point where no such higher-order inputs remain.
Then, and only then, can input addresses be assigned.
```
function hashQuotientDerivation(drv) -> Hash:
assert(drv.outputs are input-addressed)
drv ← drv with {
inputDrvOutputs = (
assert(drvPath is store path)
case hashOutputsOrQuotientDerivation(readDrv(drvPath)) of
drvHash : Hash →
(drvHash.toBase16(), output)
outputHashes : Map[String, Hash] →
(outputHashes[output].toBase16(), "out")
| (drvPath, output) ∈ drv.inputDrvOutputs
)
}
return hashSHA256(printDrv(drv))
function hashOutputsOrQuotientDerivation(drv) -> Map[String, Hash] | Hash:
if drv.outputs are content-addressed:
return {
outputName ↦ hashSHA256(
"fixed:out:" + ca.printMethodAlgo() +
":" + ca.hash.toBase16() +
":" + ca.makeFixedOutputPath(drv.name, outputName))
| (outputName ↦ output) ∈ drv.outputs
, ca = output.contentAddress // or get from build trace if floating
}
else: // drv.outputs are input-addressed
return hashQuotientDerivation(drv)
```
### `hashQuotientDerivation`
We replace each element in the derivation's `inputDrvOutputs` using data from a call to `hashOutputsOrQuotientDerivation` on the `drvPath` of that element.
When `hashOutputsOrQuotientDerivation` returns a single drv hash (because the input derivation in question is input-addressing), we simply swap out the `drvPath` for that hash, and keep the same output name.
When `hashOutputsOrQuotientDerivation` returns a map of content addresses per-output, we look up the output in question, and pair it with the output name `out`.
The resulting pseudo-derivation (with hashes instead of store paths in `inputDrvs`) is then printed (in the ["ATerm" format](@docroot@/protocols/derivation-aterm.md)) and hashed, and this becomes the hash of the "quotient derivation".
When calculating output hashes, `hashQuotientDerivation` is called on an almost-complete input-addressing derivation, which is just missing its input-addressed outputs paths.
The derivation hash is then used to calculate output paths for each output.
<!-- TODO describe how this is done. -->
Those output paths can then be substituted into the almost-complete input-addressed derivation to complete it.
> **Note**
>
> There may be an unintentional deviation from specification currently implemented in the `(outputHashes[output].toBase16(), "out")` case.
> This is not fatal because the deviation would only apply for content-addressing derivations with more than one output, and that only occurs in the floating case, which is [experimental][xp-feature-ca-derivations].
> Once this bug is fixed, this note will be removed.
### `hashOutputsOrQuotientDerivation`
How does `hashOutputsOrQuotientDerivation` in turn work?
It consists of two main cases, based on whether the outputs of the derivation are to be input-addressed or content-addressed.
#### Input-addressed outputs case
In the input-addressed case, it just calls `hashQuotientDerivation`, and returns that derivation hash.
This makes `hashQuotientDerivation` and `hashOutputsOrQuotientDerivation` mutually-recursive.
> **Note**
>
> In this case, `hashQuotientDerivation` is being called on a *complete* input-addressing derivation that already has its output paths calculated.
> The `inputDrvs` substitution takes place anyways.
#### Content-addressed outputs case
If the outputs are [content-addressed](./content-address.md), then it computes a hash for each output derived from the content-address of that output.
> **Note**
>
> In the [fixed](./content-address.md#fixed) content-addressing case, the outputs' content addresses are statically specified in advance, so this always just works.
> (The fixed case is what the pseudo-code shows.)
>
> In the [floating](./content-address.md#floating) case, the content addresses are not specified in advance.
> This is what the "or get from [build trace](@docroot@/store/build-trace.md) if floating" comment refers to.
> In this case, the algorithm is *stuck* until the input in question is built, and we know what the actual contents of the output in question is.
>
> That is OK however, because there is no problem with delaying the assigning of input addresses (which, remember, is what `hashQuotientDerivation` is ultimately for) until all inputs are known.
### Performance
The recursion in the algorithm is potentially inefficient:
it could call itself once for each path by which a subderivation can be reached, i.e., `O(V^k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`.
In the actual implementation, [memoisation](https://en.wikipedia.org/wiki/Memoization) is used to reduce this cost to be proportional to the total number of `inputDrvOutputs` encountered.
### Semantic properties
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
In essence, `hashQuotientDerivation` partitions input-addressing derivations into equivalence classes: every derivation in that equivalence class is mapped to the same derivation hash.
We can characterize this equivalence relation directly, by working bottom up.
We start by defining an equivalence relation on first-order output deriving paths that refer content-addressed derivation outputs. Two such paths are equivalent if they refer to the same store object:
\\[
\\begin{prooftree}
\\AxiomC{$d\_1$ is content-addressing}
\\AxiomC{$d\_2$ is content-addressing}
\\AxiomC{$
{}^\*(\text{path}(d\_1), o\_1)
\=
{}^\*(\text{path}(d\_2), o\_2)
$}
\\TrinaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{CA}}\\, (d\_2, o\_2)$}
\\end{prooftree}
\\]
where \\({}^*(s, o)\\) denotes the store object that the output deriving path refers to.
We will also need the following construction to lift any equivalence relation on \\(X\\) to an equivalence relation on (finite) sets of \\(X\\) (in short, \\(\\mathcal{P}(X)\\)):
\\[
\\begin{prooftree}
\\AxiomC{$\\forall a \\in A. \\exists b \\in B. a \\,\\sim\_X\\, b$}
\\AxiomC{$\\forall b \\in B. \\exists a \\in A. b \\,\\sim\_X\\, a$}
\\BinaryInfC{$A \\,\\sim_{\\mathcal{P}(X)}\\, B$}
\\end{prooftree}
\\]
Now we can define the equivalence relation \\(\\sim_\\mathrm{IA}\\) on input-addressed derivation outputs. Two input-addressed outputs are equivalent if their derivations are equivalent (via the yet-to-be-defined \\(\\sim_{\\mathrm{IADrv}}\\) relation) and their output names are the same:
\\[
\\begin{prooftree}
\\AxiomC{$d\_1$ is input-addressing}
\\AxiomC{$d\_2$ is input-addressing}
\\AxiomC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
\\AxiomC{$o\_1 = o\_2$}
\\QuaternaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{IA}}\\, (\text{path}(d\_2), o\_2)$}
\\end{prooftree}
\\]
And now we can define \\(\\sim_{\\mathrm{IADrv}}\\).
Two input-addressed derivations are equivalent if their content-addressed inputs are equivalent, their input-addressed inputs are also equivalent, and they are otherwise equal:
<!-- cheating a bit with the semantics to get a good layout that fits on the page -->
\\[
\\begin{prooftree}
\\alwaysNoLine
\\AxiomC{$
\\mathrm{caInputs}(d\_1)
\\,\\sim_{\\mathcal{P}(\\mathrm{CA})}\\,
\\mathrm{caInputs}(d\_2)
$}
\\AxiomC{$
\\mathrm{iaInputs}(d\_1)
\\,\\sim_{\\mathcal{P}(\\mathrm{IA})}\\,
\\mathrm{iaInputs}(d\_2)
$}
\\BinaryInfC{$
d\_1\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
\=
d\_2\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
$}
\\alwaysSingleLine
\\UnaryInfC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
\\end{prooftree}
\\]
where \\(\\mathrm{caInputs}(d)\\) returns the content-addressed inputs of \\(d\\) and \\(\\mathrm{iaInputs}(d)\\) returns the input-addressed inputs.
> **Note**
>
> An astute reader might notice that that nowhere does `inputSrcs` enter into these definitions.
> That means that replacing an input derivation with its outputs directly added to `inputSrcs` always results in a derivation in a different equivalence class, despite the resulting input closure (as would be mounted in the store at build time) being the same.
> [Issue #9259](https://github.com/NixOS/nix/issues/9259) is about creating a coarser equivalence relation to address this.
>
> \\(\\sim_\mathrm{Drv}\\) from [derivation resolution](@docroot@/store/resolution.md) is such an equivalence relation.
> It is coarser than this one: any two derivations which are "'hash quotient derivation'-equivalent" (\\(\\sim_\mathrm{IADrv}\\)) are also "resolution-equivalent" (\\(\\sim_\mathrm{Drv}\\)).
> It also relates derivations whose `inputDrvOutputs` have been rewritten into `inputSrcs`.
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations
[xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations [xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations
[xp-feature-git-hashing]: @docroot@/development/experimental-features.md#xp-feature-git-hashing
[xp-feature-impure-derivations]: @docroot@/development/experimental-features.md#xp-feature-impure-derivations

View file

@ -46,7 +46,7 @@ be many different serialisations.
For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format, For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format,
which is carefully designed to avoid the problems described above. which is carefully designed to avoid the problems described above.
The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive.md). The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive/index.md).
## Content addressing File System Objects beyond a single serialisation pass ## Content addressing File System Objects beyond a single serialisation pass

View file

@ -0,0 +1,16 @@
# Appendix: Math notation
A few times in this manual, formal "proof trees" are used for [natural deduction](https://en.wikipedia.org/wiki/Natural_deduction)-style definition of various [relations](https://en.wikipedia.org/wiki/Relation_(mathematics)).
The following grammar and assignment of metavariables to syntactic categories is used in these sections.
\\begin{align}
s, t &\in \text{store-path} \\\\
o &\in \text{output-name} \\\\
i, p &\in \text{deriving-path} \\\\
d &\in \text{derivation}
\\end{align}
\\begin{align}
\text{deriving-path} \quad p &::= s \mid (p, o)
\\end{align}

View file

@ -0,0 +1,219 @@
# Derivation Resolution
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
To *resolve* a derivation is to replace its [inputs] with the simplest inputs &mdash; plain store paths &mdash; that denote the same store objects.
Derivations that only have store paths as inputs are likewise called *resolved derivations*.
(They are called that whether they are in fact the output of derivation resolution, or just made that way without non-store-path inputs to begin with.)
## Input Content Equivalence of Derivations
[Deriving paths][deriving-path] intentionally make it possible to refer to the same [store object] in multiple ways.
This is a consequence of content-addressing, since different derivations can produce the same outputs, and the same data can also be manually added to the store.
This is also a consequence even of input-addressing, as an output can be referred to by derivation and output name, or directly by its [computed](./derivation/outputs/input-address.md) store path.
Since dereferencing deriving paths is thus not injective, it induces an equivalence relation on deriving paths.
Let's call this equivalence relation \\(\\sim\\), where \\(p_1 \\sim p_2\\) means that deriving paths \\(p_1\\) and \\(p_2\\) refer to the same store object.
**Content Equivalence**: Two deriving paths are equivalent if they refer to the same store object:
\\[
\\begin{prooftree}
\\AxiomC{${}^*p_1 = {}^*p_2$}
\\UnaryInfC{$p_1 \\,\\sim_\\mathrm{DP}\\, p_2$}
\\end{prooftree}
\\]
where \\({}^\*p\\) denotes the store object that deriving path \\(p\\) refers to.
This also induces an equivalence relation on sets of deriving paths:
\\[
\\begin{prooftree}
\\AxiomC{$\\{ {}^*p | p \\in P_1 \\} = \\{ {}^*p | p \\in P_2 \\}$}
\\UnaryInfC{$P_1 \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, P_2$}
\\end{prooftree}
\\]
**Input Content Equivalence**: This, in turn, induces an equivalence relation on derivations: two derivations are equivalent if their inputs are equivalent, and they are otherwise equal:
\\[
\\begin{prooftree}
\\AxiomC{$\\mathrm{inputs}(d_1) \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, \\mathrm{inputs}(d_2)$}
\\AxiomC{$
d\_1\left[\\mathrm{inputs} := \\{\\}\right]
\=
d\_2\left[\\mathrm{inputs} := \\{\\}\right]
$}
\\BinaryInfC{$d_1 \\,\\sim_\\mathrm{Drv}\\, d_2$}
\\end{prooftree}
\\]
Derivation resolution always maps derivations to input-content-equivalent derivations.
## Resolution relation
Dereferencing a derived path &mdash; \\({}^\*p\\) above &mdash; was just introduced as a black box.
But actually it is a multi-step process of looking up build results in the [build trace] that itself depends on resolving the lookup keys.
Resolution is thus a recursive multi-step process that is worth diagramming formally.
We can do this with a small-step binary transition relation; let's call it \\(\rightsquigarrow\\).
We can then conclude dereferenced equality like this:
\\[
\\begin{prooftree}
\\AxiomC{$p\_1 \\rightsquigarrow^* p$}
\\AxiomC{$p\_2 \\rightsquigarrow^* p$}
\\BinaryInfC{${}^*p\_1 = {}^*p\_2$}
\\end{prooftree}
\\]
I.e. by showing that both original items resolve (over 0 or more small steps, hence the \\({}^*\\)) to the same exact item.
With this motivation, let's now formalize a [small-step](https://en.wikipedia.org/wiki/Operational_semantics#Small-step_semantics) system of reduction rules for resolution.
### Formal rules
### \\(\text{resolved}\\) unary relation
\\[
\\begin{prooftree}
\\AxiomC{$s \in \text{store-path}$}
\\UnaryInfC{$s$ resolved}
\\end{prooftree}
\\]
\\[
\\begin{prooftree}
\\AxiomC{$\forall i \in \mathrm{inputs}(d). i \text{ resolved}$}
\\UnaryInfC{$d$ resolved}
\\end{prooftree}
\\]
### \\(\rightsquigarrow\\) binary relation
> **Remark**
>
> Actually, to be completely formal we would need to keep track of the build trace we are choosing to resolve against.
>
> We could do that by making \\(\rightsquigarrow\\) a ternary relation, which would pass the build trace to itself until it finally uses it in that one rule.
> This would add clutter more than insight, so we didn't bother to write it.
>
> There are other options too, like saying the whole reduction rule system is parameterized on the build trace, essentially [currying](https://en.wikipedia.org/wiki/Currying) the ternary \\(\rightsquigarrow\\) into a function from build traces to the binary relation written above.
#### Core build trace lookup rule
\\[
\\begin{prooftree}
\\AxiomC{$s \in \text{store-path}$}
\\AxiomC{${}^*s \in \text{derivation}$}
\\AxiomC{${}^*s$ resolved}
\\AxiomC{$\mathrm{build\text{-}trace}[s][o] = t$}
\\QuaternaryInfC{$(s, o) \rightsquigarrow t$}
\\RightLabel{\\scriptsize output path resolution}
\\end{prooftree}
\\]
#### Inductive rules
\\[
\\begin{prooftree}
\\AxiomC{$i \\rightsquigarrow i'$}
\\AxiomC{$i \\in \\mathrm{inputs}(d)$}
\\BinaryInfC{$d \\rightsquigarrow d[i \\mapsto i']$}
\\end{prooftree}
\\]
\\[
\\begin{prooftree}
\\AxiomC{$d \\rightsquigarrow d'$}
\\UnaryInfC{$(\\mathrm{path}(d), o) \\rightsquigarrow (\\mathrm{path}(d'), o)$}
\\end{prooftree}
\\]
\\[
\\begin{prooftree}
\\AxiomC{$p \\rightsquigarrow p'$}
\\UnaryInfC{$(p, o) \\rightsquigarrow (p', o)$}
\\end{prooftree}
\\]
### Properties
Like all well-behaved evaluation relations, partial resolution is [*confluent*](https://en.wikipedia.org/wiki/Confluence_(abstract_rewriting)).
Also, if we take the symmetric closure of \\(\\rightsquigarrow^\*\\), we end up with the equivalence relations of the previous section.
Resolution respects content equivalence for deriving paths, and input content equivalence for derivations.
> **Remark**
>
> We chose to define from scratch an "resolved" unary relation explicitly above.
> But it can also be defined as the normal forms of the \\(\\rightsquigarrow^\*\\) relation:
>
> \\[ a \text{ resolved} \Leftrightarrow \forall b. b \rightsquigarrow^* a \Rightarrow b = a\\]
>
> In prose, resolved terms are terms which \\(\\rightsquigarrow^\*\\) only relates on the left side to the same term on the right side; they are the terms which can be resolved no further.
## Partial versus Complete Resolution
Similar to evaluation, we can also speak of *partial* versus *complete* derivation resolution.
Partial derivation resolution is what we've actually formalized above with \\(\\rightsquigarrow^\*\\).
Complete resolution is resolution ending in a resolved term (deriving path or derivation).
(Which is a normal form of the relation, per the remark above.)
With partial resolution, a derivation is related to equivalent derivations with the same or simpler inputs, but not all those inputs will be plain store paths.
This is useful when the input refers to a floating content addressed output we have not yet built &mdash; we don't know what (content-address) store path will used for that derivation, so we are "stuck" trying to resolve the deriving path in question.
(In the above formalization, this happens when the build trace is missing the keys we wish to look up in it.)
Complete resolution is a *functional* relation, i.e. values on the left are uniquely related with values on the right.
It is not however, a *total* relation (in general, assuming arbitrary build traces).
This is discussed in the next section.
## Termination
For static derivations graphs, complete resolution is indeed total, because it always terminates for all inputs.
(A relation that is both total and functional is a function.)
For [dynamic][xp-feature-dynamic-derivations] derivation graphs, however, this is not the case &mdash; resolution is not guaranteed to terminate.
The issue isn't rewriting deriving paths themselves:
a single rewrite to normalize an output deriving path to a constant one always exists, and always proceeds in one step.
The issue is that dynamic derivations (i.e. those that are filled-in the graph by a previous resolution) may have more transitive dependencies than the original derivation.
> **Example**
>
> Suppose we have this deriving path
> ```json
> {
> "drvPath": {
> "drvPath": "...-foo.drv",
> "output": "bar.drv"
> },
> "output": "baz"
> }
> ```
> and derivation `foo` is already resolved.
> When we resolve deriving path we'll end up with something like.
> ```json
> {
> "drvPath": "...-foo-bar.drv",
> "output": "baz"
> }
> ```
> So far is just an atomic single rewrite, with no termination issues.
> But the derivation `foo-bar` may have its *own* dynamic derivation inputs.
> Resolution must resolve that derivation first before the above deriving path can finally be normalized to a plain `...-foo-bar-baz` store path.
The important thing to notice is that while "build trace" *keys* must be resolved.
The *value* those keys are mapped to have no such constraints.
An arbitrary store object has no notion of being resolved or not.
But, an arbitrary store object can be read back as a derivation (as will in fact be done in case for dynamic derivations / nested output deriving paths).
And those derivations need *not* be resolved.
It is those dynamic non-resolved derivations which are the source of non-termination.
By the same token, they are also the reason why dynamic derivations offer greater expressive power.
[store object]: @docroot@/store/store-object.md
[inputs]: @docroot@/store/derivation/index.md#inputs
[build trace]: @docroot@/store/build-trace.md
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations

View file

@ -0,0 +1,20 @@
# Secrets
The store is readable to all users on the system. For this reason, it
is generally discouraged to allow secrets to make it into the store.
Even on a single-user system, separate system users isolate services
from each other and having secrets that all local users can read
weakens that isolation. When using external store caches the secrets
may end up there, and on multi-user systems the secrets will be
available to all those users.
Organize your derivations so that secrets are read from the filesystem
(with appropriate access controls) at run time. Place the secrets on
the filesystem manually or use a scheme that includes the secret in
the store in encrypted form, and decrypts it adding the relevant
access control on system activation.
Several such schemes for NixOS can in the
[comparison of secret managing schemes] on the wiki.
[comparison of secret managing schemes]: https://wiki.nixos.org/wiki/Comparison_of_secret_managing_schemes

View file

@ -1,7 +1,7 @@
# Content-Addressing Store Objects # Content-Addressing Store Objects
Just [like][fso-ca] [File System Objects][File System Object], Just [like][fso-ca] [File System Objects][File System Object],
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed), [Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address),
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object). unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash. For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l
> >
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature. > This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing. This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing.
References are not supported. References are not supported.

View file

@ -6,7 +6,7 @@
> >
> A rendered store path > A rendered store path
Nix implements references to [store objects](./index.md#store-object) as *store paths*. Nix implements references to [store objects](./store-object.md) as *store paths*.
Think of a store path as an [opaque], [unique identifier]: Think of a store path as an [opaque], [unique identifier]:
The only way to obtain store path is by adding or building store objects. The only way to obtain store path is by adding or building store objects.

15
doc/manual/theme/head.hbs Normal file
View file

@ -0,0 +1,15 @@
<script>
MathJax = {
loader: {load: ['[tex]/bussproofs']},
tex: {
packages: {'[+]': ['bussproofs']},
// Doesn't seem to work in mathjax 3
//formatError: function(jax, error) {
// console.log(`TeX error in "${jax.latex}": ${error.message}`);
// return jax.formatError(error);
//}
}
};
</script>
<!-- Load a newer versino of MathJax than mdbook does by default, and which in particular has working relative paths for the "bussproofs" extension. -->
<script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/3.0.1/es5/tex-mml-chtml.js"></script>

View file

@ -10,7 +10,7 @@
tag ? "latest", tag ? "latest",
bundleNixpkgs ? true, bundleNixpkgs ? true,
channelName ? "nixpkgs", channelName ? "nixpkgs",
channelURL ? "https://nixos.org/channels/nixpkgs-unstable", channelURL ? "https://channels.nixos.org/nixpkgs-unstable",
extraPkgs ? [ ], extraPkgs ? [ ],
maxLayers ? 70, maxLayers ? 70,
nixConf ? { }, nixConf ? { },

8
flake.lock generated
View file

@ -63,16 +63,16 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1756178832, "lastModified": 1761597516,
"narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=", "narHash": "sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8+ON/0Yy8+a5vsDU=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "d98ce345cdab58477ca61855540999c86577d19d", "rev": "daf6dc47aa4b44791372d6139ab7b25269184d55",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-25.05-small", "ref": "nixos-25.05",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View file

@ -1,7 +1,7 @@
{ {
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
@ -417,6 +417,10 @@
supportsCross = false; supportsCross = false;
}; };
"nix-kaitai-struct-checks" = {
supportsCross = false;
};
"nix-perl-bindings" = { "nix-perl-bindings" = {
supportsCross = false; supportsCross = false;
}; };
@ -471,6 +475,27 @@
} }
); );
apps = forAllSystems (
system:
let
pkgs = nixpkgsFor.${system}.native;
opener = if pkgs.stdenv.isDarwin then "open" else "xdg-open";
in
{
open-manual = {
type = "app";
program = "${pkgs.writeShellScript "open-nix-manual" ''
path="${self.packages.${system}.nix-manual.site}/index.html"
if ! ${opener} "$path"; then
echo "Failed to open manual with ${opener}. Manual is located at:"
echo "$path"
fi
''}";
meta.description = "Open the Nix manual in your browser";
};
}
);
devShells = devShells =
let let
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };

View file

@ -60,4 +60,9 @@ if get_option('unit-tests')
subproject('libflake-tests') subproject('libflake-tests')
endif endif
subproject('nix-functional-tests') subproject('nix-functional-tests')
subproject('json-schema-checks') if get_option('json-schema-checks')
subproject('json-schema-checks')
endif
if get_option('kaitai-struct-checks')
subproject('kaitai-struct-checks')
endif

View file

@ -27,3 +27,17 @@ option(
value : false, value : false,
description : 'Build benchmarks (requires gbenchmark)', description : 'Build benchmarks (requires gbenchmark)',
) )
option(
'kaitai-struct-checks',
type : 'boolean',
value : true,
description : 'Check the Kaitai Struct specifications (requires Kaitai Struct)',
)
option(
'json-schema-checks',
type : 'boolean',
value : true,
description : 'Check JSON schema validity of schemas and examples (requires jv)',
)

View file

@ -1,5 +1,5 @@
# shellcheck disable=all
#compdef nix #compdef nix
# shellcheck disable=all
function _nix() { function _nix() {
local ifs_bk="$IFS" local ifs_bk="$IFS"

View file

@ -42,8 +42,28 @@ if cxx.get_id() == 'clang'
add_project_arguments('-fpch-instantiate-templates', language : 'cpp') add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
endif endif
# Darwin ld doesn't like "X.Y.Zpre" # Detect if we're using libstdc++ (GCC's standard library)
nix_soversion = meson.project_version().split('pre')[0] # libstdc++ uses Intel TBB as backend for C++17 parallel algorithms when <execution> is included.
# boost::concurrent_flat_map includes <execution>, which would require linking against TBB.
# Since we don't actually use parallel algorithms, disable the TBB backend to avoid the dependency.
# TBB is a dependency of blake3 and leaking into our build environment.
is_using_libstdcxx = cxx.compiles(
'''
#include <ciso646>
#ifndef __GLIBCXX__
#error "not libstdc++"
#endif
int main() { return 0; }
''',
name : 'using libstdc++',
)
if is_using_libstdcxx
add_project_arguments('-D_GLIBCXX_USE_TBB_PAR_BACKEND=0', language : 'cpp')
endif
# Darwin ld doesn't like "X.Y.ZpreABCD+W"
nix_soversion = meson.project_version().split('+')[0].split('pre')[0]
subdir('assert-fail') subdir('assert-fail')
subdir('asan-options') subdir('asan-options')

View file

@ -443,6 +443,11 @@ in
*/ */
nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { }; nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { };
/**
Kaitai struct schema validation checks
*/
nix-kaitai-struct-checks = callPackage ../src/kaitai-struct-checks/package.nix { };
nix-perl-bindings = callPackage ../src/perl/package.nix { }; nix-perl-bindings = callPackage ../src/perl/package.nix { };
/** /**

View file

@ -3,10 +3,118 @@
devFlake, devFlake,
}: }:
let
# Some helper functions
/**
Compute a filtered closure of build inputs.
Specifically, `buildInputsClosure cond startSet` computes the closure formed
by recursive application of `p: filter cond p.buildInputs ++ filter cond p.propagatedBuildInputs`
to `startSet`.
Example:
```nix
builtInputsClosure isInternal [ pkg1 pkg2 ]
=> [ pkg1 pkg3 pkg2 pkg10 ]
```
Note: order tbd
Note: `startSet` is *NOT* filtered.
*/
buildInputsClosureCond =
cond: startSet:
let
closure = builtins.genericClosure {
startSet = map (d: {
key = d.drvPath;
value = d;
}) startSet;
operator =
d:
let
r =
map
(d': {
key = d'.drvPath;
value = d';
})
(
lib.filter cond d.value.buildInputs or [ ] ++ lib.filter cond d.value.propagatedBuildInputs or [ ]
);
in
r;
};
in
map (item: item.value) closure;
/**
`[ pkg1 pkg2 ]` -> `{ "...-pkg2.drv" = null; "...-pkg1.drv" = null }`
Note: fairly arbitrary order (hash based). Use for efficient set membership test only.
*/
byDrvPath =
l:
lib.listToAttrs (
map (c: {
name =
# Just a lookup key
builtins.unsafeDiscardStringContext c.drvPath;
value = null;
}) l
);
/**
Stable dedup.
Unlike `listToAttrs` -> `attrValues`, this preserves the input ordering,
which is more predictable ("deterministic") than e.g. sorting store paths,
whose hashes affect the ordering on every change.
*/
# TODO: add to Nixpkgs lib, refer from uniqueStrings
dedupByString =
key: l:
let
r =
lib.foldl'
(
a@{ list, set }:
elem:
let
k = builtins.unsafeDiscardStringContext (key elem);
in
if set ? ${k} then
a
else
let
# Note: O(n²) copying. Use linkedLists to concat them in one go at the end.
# https://github.com/NixOS/nixpkgs/pull/452088
newList = [ elem ] ++ list;
newSet = set // {
${k} = null;
};
in
builtins.seq newList builtins.seq newSet {
list = newList;
set = newSet;
}
)
{
list = [ ];
set = { };
}
l;
in
r.list;
in
{ pkgs }: { pkgs }:
# TODO: don't use nix-util for this?
pkgs.nixComponents2.nix-util.overrideAttrs ( pkgs.nixComponents2.nix-util.overrideAttrs (
attrs: finalAttrs: prevAttrs:
let let
stdenv = pkgs.nixDependencies2.stdenv; stdenv = pkgs.nixDependencies2.stdenv;
@ -21,13 +129,89 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
"-D${prefix}:${rest}"; "-D${prefix}:${rest}";
havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix;
ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags;
activeComponents = buildInputsClosureCond isInternal (
lib.attrValues (finalAttrs.passthru.config.getComponents allComponents)
);
allComponents = lib.filterAttrs (k: v: lib.isDerivation v) pkgs.nixComponents2;
internalDrvs = byDrvPath (
# Drop the attr names (not present in buildInputs anyway)
lib.attrValues allComponents
++ lib.concatMap (c: lib.attrValues c.tests or { }) (lib.attrValues allComponents)
);
isInternal =
dep: internalDrvs ? ${builtins.unsafeDiscardStringContext dep.drvPath or "_non-existent_"};
in in
{ {
pname = "shell-for-" + attrs.pname; pname = "shell-for-nix";
passthru = {
inherit activeComponents;
# We use this attribute to store non-derivation values like functions and
# perhaps other things that are primarily for overriding and not the shell.
config = {
# Default getComponents
getComponents =
c:
builtins.removeAttrs c (
lib.optionals (!havePerl) [ "nix-perl-bindings" ]
++ lib.optionals (!buildCanExecuteHost) [ "nix-manual" ]
);
};
/**
Produce a devShell for a given set of nix components
Example:
```nix
shell.withActiveComponents (c: {
inherit (c) nix-util;
})
```
*/
withActiveComponents =
f2:
finalAttrs.finalPackage.overrideAttrs (
finalAttrs: prevAttrs: {
passthru = prevAttrs.passthru // {
config = prevAttrs.passthru.config // {
getComponents = f2;
};
};
}
);
small =
(finalAttrs.finalPackage.withActiveComponents (c: {
inherit (c)
nix-cli
nix-util-tests
nix-store-tests
nix-expr-tests
nix-fetchers-tests
nix-flake-tests
nix-functional-tests
# Currently required
nix-perl-bindings
;
})).overrideAttrs
(o: {
mesonFlags = o.mesonFlags ++ [
# TODO: infer from activeComponents or vice versa
"-Dkaitai-struct-checks=false"
"-Djson-schema-checks=false"
];
});
};
# Remove the version suffix to avoid unnecessary attempts to substitute in nix develop # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop
version = lib.fileContents ../.version; version = lib.fileContents ../.version;
name = attrs.pname; name = finalAttrs.pname;
installFlags = "sysconfdir=$(out)/etc"; installFlags = "sysconfdir=$(out)/etc";
shellHook = '' shellHook = ''
@ -98,17 +282,9 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
nativeBuildInputs = nativeBuildInputs =
let let
inputs = inputs =
attrs.nativeBuildInputs or [ ] dedupByString (v: "${v}") (
++ pkgs.nixComponents2.nix-util.nativeBuildInputs lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.nativeBuildInputs) activeComponents)
++ pkgs.nixComponents2.nix-store.nativeBuildInputs )
++ pkgs.nixComponents2.nix-fetchers.nativeBuildInputs
++ pkgs.nixComponents2.nix-expr.nativeBuildInputs
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.nativeBuildInputs
++ lib.optionals buildCanExecuteHost pkgs.nixComponents2.nix-manual.externalNativeBuildInputs
++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs
++ lib.optional ( ++ lib.optional (
!buildCanExecuteHost !buildCanExecuteHost
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
@ -117,9 +293,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages) && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
) pkgs.buildPackages.mesonEmulatorHook ) pkgs.buildPackages.mesonEmulatorHook
++ [ ++ [
pkgs.buildPackages.cmake
pkgs.buildPackages.gnused pkgs.buildPackages.gnused
pkgs.buildPackages.changelog-d
modular.pre-commit.settings.package modular.pre-commit.settings.package
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
pkgs.buildPackages.nixfmt-rfc-style pkgs.buildPackages.nixfmt-rfc-style
@ -136,18 +310,22 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
# from making its way into NIX_CFLAGS_COMPILE. # from making its way into NIX_CFLAGS_COMPILE.
lib.filter (p: !lib.hasInfix "separate-debug-info" p) inputs; lib.filter (p: !lib.hasInfix "separate-debug-info" p) inputs;
propagatedNativeBuildInputs = dedupByString (v: "${v}") (
lib.filter (x: !isInternal x) (
lib.lists.concatMap (c: c.propagatedNativeBuildInputs) activeComponents
)
);
buildInputs = [ buildInputs = [
pkgs.gbenchmark pkgs.gbenchmark
] ]
++ attrs.buildInputs or [ ] ++ dedupByString (v: "${v}") (
++ pkgs.nixComponents2.nix-util.buildInputs lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.buildInputs) activeComponents)
++ pkgs.nixComponents2.nix-store.buildInputs )
++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs
++ pkgs.nixComponents2.nix-fetchers.buildInputs
++ pkgs.nixComponents2.nix-expr.buildInputs
++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs
++ pkgs.nixComponents2.nix-cmd.buildInputs
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs
++ lib.optional havePerl pkgs.perl; ++ lib.optional havePerl pkgs.perl;
propagatedBuildInputs = dedupByString (v: "${v}") (
lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.propagatedBuildInputs) activeComponents)
);
} }
) )

View file

@ -63,6 +63,7 @@ let
"nix-cli" "nix-cli"
"nix-functional-tests" "nix-functional-tests"
"nix-json-schema-checks" "nix-json-schema-checks"
"nix-kaitai-struct-checks"
] ]
++ lib.optionals enableBindings [ ++ lib.optionals enableBindings [
"nix-perl-bindings" "nix-perl-bindings"

View file

@ -714,7 +714,7 @@ EOF
place_channel_configuration() { place_channel_configuration() {
if [ -z "${NIX_INSTALLER_NO_CHANNEL_ADD:-}" ]; then if [ -z "${NIX_INSTALLER_NO_CHANNEL_ADD:-}" ]; then
echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels" echo "https://channels.nixos.org/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels"
_sudo "to set up the default system channel (part 1)" \ _sudo "to set up the default system channel (part 1)" \
install -m 0644 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels" install -m 0644 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels"
fi fi

View file

@ -213,7 +213,7 @@ fi
# Subscribe the user to the Nixpkgs channel and fetch it. # Subscribe the user to the Nixpkgs channel and fetch it.
if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then
"$nix/bin/nix-channel" --add https://nixos.org/channels/nixpkgs-unstable "$nix/bin/nix-channel" --add https://channels.nixos.org/nixpkgs-unstable
fi fi
if [ -z "$_NIX_INSTALLER_TEST" ]; then if [ -z "$_NIX_INSTALLER_TEST" ]; then
if ! "$nix/bin/nix-channel" --update nixpkgs; then if ! "$nix/bin/nix-channel" --update nixpkgs; then

View file

@ -15,7 +15,7 @@ programmatically:
1. Embedding the evaluator 1. Embedding the evaluator
2. Writing language plug-ins 2. Writing language plug-ins
Embedding means you link the Nix C libraries in your program and use them from Embedding means you link the Nix C API libraries in your program and use them from
there. Adding a plug-in means you make a library that gets loaded by the Nix there. Adding a plug-in means you make a library that gets loaded by the Nix
language evaluator, specified through a configuration option. language evaluator, specified through a configuration option.

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/build-result

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/realisation

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/content-address

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/derived-path

View file

@ -30,28 +30,166 @@ schemas = [
'blake3-base64.json', 'blake3-base64.json',
], ],
}, },
{
'stem' : 'content-address',
'schema' : schema_dir / 'content-address-v1.yaml',
'files' : [
'text.json',
'nar.json',
],
},
{
'stem' : 'store-path',
'schema' : schema_dir / 'store-path-v1.yaml',
'files' : [
'simple.json',
],
},
{
'stem' : 'deriving-path',
'schema' : schema_dir / 'deriving-path-v1.yaml',
'files' : [
'single_opaque.json',
'single_built.json',
'single_built_built.json',
],
},
{
'stem' : 'build-trace-entry',
'schema' : schema_dir / 'build-trace-entry-v1.yaml',
'files' : [
'simple.json',
'with-dependent-realisations.json',
'with-signature.json',
],
},
]
# Derivation and Derivation output
schemas += [
# Match overall
{ {
'stem' : 'derivation', 'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v3.yaml', 'schema' : schema_dir / 'derivation-v4.yaml',
'files' : [ 'files' : [
'dyn-dep-derivation.json', 'dyn-dep-derivation.json',
'simple-derivation.json', 'simple-derivation.json',
], ],
}, },
# # Not sure how to make subschema work {
# { 'stem' : 'derivation',
# 'stem': 'derivation', 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/overall',
# 'schema': schema_dir / 'derivation-v3.yaml#output', 'files' : [
# 'files' : [ 'output-caFixedFlat.json',
# 'output-caFixedFlat.json', 'output-caFixedNAR.json',
# 'output-caFixedNAR.json', 'output-caFixedText.json',
# 'output-caFixedText.json', 'output-caFloating.json',
# 'output-caFloating.json', 'output-deferred.json',
# 'output-deferred.json', 'output-impure.json',
# 'output-impure.json', 'output-inputAddressed.json',
# 'output-inputAddressed.json', ],
# ], },
# }, # Match exact variant
{
'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/inputAddressed',
'files' : [
'output-inputAddressed.json',
],
},
{
'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFixed',
'files' : [
'output-caFixedFlat.json',
'output-caFixedNAR.json',
'output-caFixedText.json',
],
},
{
'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFloating',
'files' : [
'output-caFloating.json',
],
},
{
'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/deferred',
'files' : [
'output-deferred.json',
],
},
{
'stem' : 'derivation',
'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/impure',
'files' : [
'output-impure.json',
],
},
]
# Store object info
schemas += [
# Match overall
{
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml',
'files' : [
'pure.json',
'impure.json',
'empty_pure.json',
'empty_impure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml',
'files' : [
'pure.json',
'impure.json',
],
},
{
'stem' : 'build-result',
'schema' : schema_dir / 'build-result-v1.yaml',
'files' : [
'success.json',
'output-rejected.json',
'not-deterministic.json',
],
},
# Match exact variant
{
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
'files' : [
'pure.json',
'empty_pure.json',
],
},
{
'stem' : 'store-object-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/impure',
'files' : [
'impure.json',
'empty_impure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base',
'files' : [
'pure.json',
],
},
{
'stem' : 'nar-info',
'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/narInfo',
'files' : [
'impure.json',
],
},
] ]
# Validate each example against the schema # Validate each example against the schema
@ -64,8 +202,6 @@ foreach schema : schemas
stem + '-schema-valid', stem + '-schema-valid',
jv, jv,
args : [ args : [
'--map',
'./hash-v1.yaml=' + schema_dir / 'hash-v1.yaml',
'http://json-schema.org/draft-04/schema', 'http://json-schema.org/draft-04/schema',
schema_file, schema_file,
], ],

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/nar-info

View file

@ -21,21 +21,24 @@ mkMesonDerivation (finalAttrs: {
../../.version ../../.version
../../doc/manual/source/protocols/json/schema ../../doc/manual/source/protocols/json/schema
../../src/libutil-tests/data/hash ../../src/libutil-tests/data/hash
../../src/libstore-tests/data/content-address
../../src/libstore-tests/data/store-path
../../src/libstore-tests/data/realisation
../../src/libstore-tests/data/derivation ../../src/libstore-tests/data/derivation
../../src/libstore-tests/data/derived-path
../../src/libstore-tests/data/path-info
../../src/libstore-tests/data/nar-info
../../src/libstore-tests/data/build-result
./. ./.
]; ];
outputs = [ "out" ]; outputs = [ "out" ];
passthru.externalNativeBuildInputs = [
jsonschema
];
nativeBuildInputs = [ nativeBuildInputs = [
meson meson
ninja ninja
] jsonschema
++ finalAttrs.passthru.externalNativeBuildInputs; ];
doCheck = true; doCheck = true;

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/path-info

View file

@ -0,0 +1 @@
../../src/libstore-tests/data/store-path

Some files were not shown because too many files have changed in this diff Show more