mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 11:36:03 +01:00
Compare commits
259 commits
e59b4c69f1
...
c893454926
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c893454926 | ||
|
|
b41ce0d263 | ||
|
|
f392663533 | ||
|
|
f7dcfd1072 | ||
|
|
12760756bb | ||
|
|
8026dd1ba4 | ||
|
|
8dfab7c2dd | ||
|
|
147e183c68 | ||
|
|
52b2909fd2 | ||
|
|
34c77ffe38 | ||
|
|
af8e44821e | ||
|
|
70fbd1cdf4 | ||
|
|
daace78239 | ||
|
|
d596b9754e | ||
|
|
c1317017e9 | ||
|
|
3f18cad5f1 | ||
|
|
41b62aa979 | ||
|
|
af41eccb31 | ||
|
|
e7b274f85a | ||
|
|
6bd92d47e5 | ||
|
|
b5302fc111 | ||
|
|
724086005a | ||
|
|
038d74edf7 | ||
|
|
b177354c35 | ||
|
|
2039235f6e | ||
|
|
0fd3b6fee6 | ||
|
|
b2f0472fe2 | ||
|
|
91af29f37a | ||
|
|
099af7578f | ||
|
|
948c89b367 | ||
|
|
7e84ce3904 | ||
|
|
a828cf777a | ||
|
|
687dd38998 | ||
|
|
62729ff472 | ||
|
|
0507674a13 | ||
|
|
4f85cfe824 | ||
|
|
7d5567a8d7 | ||
|
|
3ed42cd354 | ||
|
|
4a888b4138 | ||
|
|
f2436a47bb | ||
|
|
83ddfaebf4 | ||
|
|
2b382b171c | ||
|
|
b7553378a4 | ||
|
|
d40f66109b | ||
|
|
9657feaf8c | ||
|
|
d05e85e5be | ||
|
|
9daef9cca2 | ||
|
|
341c42f321 | ||
|
|
631fb6c9ad | ||
|
|
11e19ee690 | ||
|
|
9f322398b4 | ||
|
|
e07510e504 | ||
|
|
ae15d4eaf3 | ||
|
|
469123eda1 | ||
|
|
389bcba97a | ||
|
|
3ef22a521d | ||
|
|
c8e24491c0 | ||
|
|
c3d4c5f69d | ||
|
|
43ce9da6ad | ||
|
|
144c66215b | ||
|
|
0d7b16da4d | ||
|
|
72d0f7b619 | ||
|
|
34ac1792f9 | ||
|
|
0586370e58 | ||
|
|
f63bb5b338 | ||
|
|
53b4ea6c85 | ||
|
|
7c85ac23e2 | ||
|
|
0539b58253 | ||
|
|
beace42e7a | ||
|
|
4a0ccc89d9 | ||
|
|
89fa8c09a9 | ||
|
|
5e025ce940 | ||
|
|
2f6c865e25 | ||
|
|
bd42092873 | ||
|
|
81a2809a52 | ||
|
|
3448d4fa4c | ||
|
|
965d6be7c1 | ||
|
|
040d1aae41 | ||
|
|
bf947bfc26 | ||
|
|
2d83bc6b83 | ||
|
|
e0debd61d5 | ||
|
|
233bd250d1 | ||
|
|
4ea32d0b03 | ||
|
|
892eba4944 | ||
|
|
e4e4063f16 | ||
|
|
d8cec03fce | ||
|
|
b67c2f1572 | ||
|
|
ca9fde1b88 | ||
|
|
0ba1aa34dc | ||
|
|
6fa7510055 | ||
|
|
8151afb345 | ||
|
|
134613e885 | ||
|
|
9d1907fff7 | ||
|
|
c29411ada9 | ||
|
|
8dbc2475f7 | ||
|
|
9e79e83cb5 | ||
|
|
937a6df809 | ||
|
|
1ca6e9ef54 | ||
|
|
ade3d5d746 | ||
|
|
d035d8ba8d | ||
|
|
67be2df174 | ||
|
|
34f780d747 | ||
|
|
e43888890f | ||
|
|
4a80c92a4d | ||
|
|
3a3c062982 | ||
|
|
4a2fb18ba0 | ||
|
|
9eecee3d4e | ||
|
|
089a222111 | ||
|
|
c2609df08c | ||
|
|
37c1ef52e6 | ||
|
|
e776a10db3 | ||
|
|
1507843f6c | ||
|
|
e636888a09 | ||
|
|
3b2186e1c8 | ||
|
|
7e2d2db8ef | ||
|
|
2cc53201eb | ||
|
|
720f693627 | ||
|
|
49084a7e9e | ||
|
|
6d87184a52 | ||
|
|
6985e9f2c3 | ||
|
|
e6f0dd8df5 | ||
|
|
d857a4be50 | ||
|
|
93fe3354b5 | ||
|
|
8b3af40006 | ||
|
|
bffbdcfddc | ||
|
|
495d1b8435 | ||
|
|
66d7b8fe1b | ||
|
|
cf75079bd8 | ||
|
|
b8d7f551e4 | ||
|
|
e947c895ec | ||
|
|
f301669adc | ||
|
|
e3c41407f9 | ||
|
|
00f4a860e7 | ||
|
|
560a596de7 | ||
|
|
da637a05da | ||
|
|
956fffdd6f | ||
|
|
bac41d6989 | ||
|
|
de192794c9 | ||
|
|
246dbe1c05 | ||
|
|
6280905638 | ||
|
|
194c21fc82 | ||
|
|
e08853a67c | ||
|
|
ae49074548 | ||
|
|
f1d4fab1e5 | ||
|
|
c874e7071b | ||
|
|
c67966418f | ||
|
|
be2572ed8d | ||
|
|
be99a1c6bb | ||
|
|
fe8cdbc3e4 | ||
|
|
70176ed317 | ||
|
|
84a5bee424 | ||
|
|
e3246301a6 | ||
|
|
d4c69c7b8f | ||
|
|
f5aafbd6ed | ||
|
|
943788754f | ||
|
|
883860c7ff | ||
|
|
5fc0c4f102 | ||
|
|
1a4ad0706b | ||
|
|
972915cabd | ||
|
|
94965a3a3e | ||
|
|
c77317b1a9 | ||
|
|
dd0d006517 | ||
|
|
ccc06451df | ||
|
|
3775a2a226 | ||
|
|
1d3f0ca22e | ||
|
|
1c41e07b46 | ||
|
|
c592090fff | ||
|
|
4b6d07d642 | ||
|
|
e177f42536 | ||
|
|
ac8b1efcf9 | ||
|
|
ad664ce64e | ||
|
|
18941a2421 | ||
|
|
136825b4a2 | ||
|
|
28b73cabcc | ||
|
|
aa4106fd68 | ||
|
|
7f1d92793e | ||
|
|
234f029940 | ||
|
|
dd716dc9be | ||
|
|
ea17cc1b57 | ||
|
|
0c1be3aabe | ||
|
|
6ca3434cac | ||
|
|
6129aee988 | ||
|
|
5e220271e2 | ||
|
|
8e6b69de54 | ||
|
|
3915b3a111 | ||
|
|
c5515bb22e | ||
|
|
91b69e9e70 | ||
|
|
9e9dfe36df | ||
|
|
50e8d17f3c | ||
|
|
ef8dd58d9b | ||
|
|
91ed3701fe | ||
|
|
b8e5d1f290 | ||
|
|
d44b33562f | ||
|
|
d46504a136 | ||
|
|
126f30deb2 | ||
|
|
5dcfa86910 | ||
|
|
6b6ceddf72 | ||
|
|
60f9489b83 | ||
|
|
584a8e8a00 | ||
|
|
f234633e27 | ||
|
|
6417863ce9 | ||
|
|
91cd42511e | ||
|
|
1af5a98955 | ||
|
|
17777e3b70 | ||
|
|
9321669353 | ||
|
|
3742ae061e | ||
|
|
a91115bf22 | ||
|
|
8c8b706f6b | ||
|
|
fb26285458 | ||
|
|
bbfaaf3a20 | ||
|
|
f9b73185e4 | ||
|
|
27e3d28ed8 | ||
|
|
3994e5627f | ||
|
|
ec2fd2dc23 | ||
|
|
fdc5600fa7 | ||
|
|
1f6ac88efc | ||
|
|
9d4d10954a | ||
|
|
7e53afd8b9 | ||
|
|
bef3c37cb2 | ||
|
|
0f0d9255c6 | ||
|
|
f0b95b6d5b | ||
|
|
e38128b90d | ||
|
|
78e98691d6 | ||
|
|
e213fd64b6 | ||
|
|
1cd8458c28 | ||
|
|
ecaf9470b9 | ||
|
|
8b7e03f0f9 | ||
|
|
04606d50d1 | ||
|
|
476c21d5ef | ||
|
|
1a9ba0d6fe | ||
|
|
648714cd44 | ||
|
|
6b7223b6b7 | ||
|
|
afe5ed879f | ||
|
|
d924374bf2 | ||
|
|
f1968ea38e | ||
|
|
8d338c9234 | ||
|
|
9a695f9067 | ||
|
|
925c0fa4a2 | ||
|
|
7308fde0bc | ||
|
|
324bfd82dc | ||
|
|
8e01e4ad5c | ||
|
|
4c4eb5d07f | ||
|
|
b5ae3e10c2 | ||
|
|
4f5af471fb | ||
|
|
b9af19cedf | ||
|
|
d6f1e2de21 | ||
|
|
5d365cd61f | ||
|
|
c87f29a0b6 | ||
|
|
f594a8e11e | ||
|
|
0a74b4905c | ||
|
|
d74177dccc | ||
|
|
953929f899 | ||
|
|
3c83856494 | ||
|
|
f3d8d1f719 | ||
|
|
c8a15bf70d | ||
|
|
ad5c6a53b9 | ||
|
|
459f9e0185 | ||
|
|
3d147c04a5 | ||
|
|
61fbef42a6 |
375 changed files with 9320 additions and 2854 deletions
18
.coderabbit.yaml
Normal file
18
.coderabbit.yaml
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
|
||||
# Disable CodeRabbit auto-review to prevent verbose comments on PRs.
|
||||
# When enabled: false, CodeRabbit won't attempt reviews and won't post
|
||||
# "Review skipped" or other automated comments.
|
||||
reviews:
|
||||
auto_review:
|
||||
enabled: false
|
||||
review_status: false
|
||||
high_level_summary: false
|
||||
poem: false
|
||||
sequence_diagrams: false
|
||||
changed_files_summary: false
|
||||
tools:
|
||||
github-checks:
|
||||
enabled: false
|
||||
chat:
|
||||
art: false
|
||||
auto_reply: false
|
||||
13
.github/actions/install-nix-action/action.yaml
vendored
13
.github/actions/install-nix-action/action.yaml
vendored
|
|
@ -16,13 +16,17 @@ inputs:
|
|||
install_url:
|
||||
description: "URL of the Nix installer"
|
||||
required: false
|
||||
default: "https://releases.nixos.org/nix/nix-2.30.2/install"
|
||||
default: "https://releases.nixos.org/nix/nix-2.32.1/install"
|
||||
tarball_url:
|
||||
description: "URL of the Nix tarball to use with the experimental installer"
|
||||
required: false
|
||||
github_token:
|
||||
description: "Github token"
|
||||
required: true
|
||||
use_cache:
|
||||
description: "Whether to setup magic-nix-cache"
|
||||
default: true
|
||||
required: false
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
|
|
@ -118,3 +122,10 @@ runs:
|
|||
source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }}
|
||||
nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }}
|
||||
extra-conf: ${{ inputs.extra_nix_config }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13
|
||||
if: ${{ inputs.use_cache == 'true' }}
|
||||
with:
|
||||
diagnostic-endpoint: ''
|
||||
use-flakehub: false
|
||||
use-gha-cache: true
|
||||
source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main
|
||||
|
|
|
|||
28
.github/workflows/ci.yml
vendored
28
.github/workflows/ci.yml
vendored
|
|
@ -14,6 +14,10 @@ on:
|
|||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
|
|
@ -29,6 +33,7 @@ jobs:
|
|||
extra_nix_config:
|
||||
experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
use_cache: false
|
||||
- run: nix flake show --all-systems --json
|
||||
|
||||
pre-commit-checks:
|
||||
|
|
@ -41,7 +46,6 @@ jobs:
|
|||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||
extra_nix_config: experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: ./ci/gha/tests/pre-commit-checks
|
||||
|
||||
basic-checks:
|
||||
|
|
@ -92,7 +96,6 @@ jobs:
|
|||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||
# The sandbox would otherwise be disabled by default on Darwin
|
||||
extra_nix_config: "sandbox = true"
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
|
||||
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
|
||||
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
|
|
@ -122,13 +125,13 @@ jobs:
|
|||
cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY
|
||||
if: ${{ matrix.instrumented }}
|
||||
- name: Upload coverage reports
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: coverage-reports
|
||||
path: coverage-reports/
|
||||
if: ${{ matrix.instrumented }}
|
||||
- name: Upload installer tarball
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: installer-${{matrix.os}}
|
||||
path: out/*
|
||||
|
|
@ -161,7 +164,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Download installer tarball
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: installer-${{matrix.os}}
|
||||
path: out
|
||||
|
|
@ -171,7 +174,7 @@ jobs:
|
|||
echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
|
||||
TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)"
|
||||
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
|
||||
- uses: cachix/install-nix-action@v31
|
||||
- uses: cachix/install-nix-action@456688f15bc354bef6d396e4a35f4f89d40bf2b7 # v31.8.2
|
||||
if: ${{ !matrix.experimental-installer }}
|
||||
with:
|
||||
install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }}
|
||||
|
|
@ -227,12 +230,13 @@ jobs:
|
|||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v31
|
||||
- uses: ./.github/actions/install-nix-action
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
||||
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
|
||||
dogfood: false
|
||||
extra_nix_config: |
|
||||
experimental-features = flakes nix-command
|
||||
- run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
||||
- run: nix build .#dockerImage -L
|
||||
- run: docker load -i ./result/image.tar.gz
|
||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
|
||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
|
||||
|
|
@ -289,7 +293,6 @@ jobs:
|
|||
extra_nix_config:
|
||||
experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
|
||||
|
||||
profile_build:
|
||||
|
|
@ -310,7 +313,6 @@ jobs:
|
|||
extra_nix_config: |
|
||||
experimental-features = flakes nix-command ca-derivations impure-derivations
|
||||
max-jobs = 1
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: |
|
||||
nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md
|
||||
cat build-time-report.md >> $GITHUB_STEP_SUMMARY
|
||||
|
|
|
|||
|
|
@ -107,12 +107,29 @@ rec {
|
|||
};
|
||||
};
|
||||
|
||||
disable =
|
||||
let
|
||||
inherit (pkgs.stdenv) hostPlatform;
|
||||
in
|
||||
args@{
|
||||
pkgName,
|
||||
testName,
|
||||
test,
|
||||
}:
|
||||
lib.any (b: b) [
|
||||
# FIXME: Nix manual is impure and does not produce all settings on darwin
|
||||
(hostPlatform.isDarwin && pkgName == "nix-manual" && testName == "linkcheck")
|
||||
];
|
||||
|
||||
componentTests =
|
||||
(lib.concatMapAttrs (
|
||||
pkgName: pkg:
|
||||
lib.concatMapAttrs (testName: test: {
|
||||
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
||||
}) (pkg.tests or { })
|
||||
lib.concatMapAttrs (
|
||||
testName: test:
|
||||
lib.optionalAttrs (!disable { inherit pkgName testName test; }) {
|
||||
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
||||
}
|
||||
) (pkg.tests or { })
|
||||
) nixComponentsInstrumented)
|
||||
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
||||
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
|
||||
def transform_anchors_html:
|
||||
. | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>")
|
||||
. | gsub($empty_anchor_regex; "<a id=\"" + .anchor + "\"></a>")
|
||||
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ additional-css = ["custom.css"]
|
|||
additional-js = ["redirects.js"]
|
||||
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||
git-repository-url = "https://github.com/NixOS/nix"
|
||||
mathjax-support = true
|
||||
|
||||
# Handles replacing @docroot@ with a path to ./source relative to that markdown file,
|
||||
# {{#include handlebars}}, and the @generated@ syntax used within these. it mostly
|
||||
|
|
|
|||
|
|
@ -24,9 +24,9 @@ let
|
|||
in
|
||||
concatStringsSep "\n" (map showEntry storesList);
|
||||
|
||||
"index.md" =
|
||||
replaceStrings [ "@store-types@" ] [ index ]
|
||||
(readFile ./source/store/types/index.md.in);
|
||||
"index.md" = replaceStrings [ "@store-types@" ] [ index ] (
|
||||
readFile ./source/store/types/index.md.in
|
||||
);
|
||||
|
||||
tableOfContents =
|
||||
let
|
||||
|
|
|
|||
|
|
@ -92,6 +92,8 @@ manual = custom_target(
|
|||
(cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3
|
||||
rm -rf @2@/manual
|
||||
mv @2@/html @2@/manual
|
||||
# Remove Mathjax 2.7, because we will actually use MathJax 3.x
|
||||
find @2@/manual | grep .html | xargs sed -i -e '/2.7.1.MathJax.js/d'
|
||||
find @2@/manual -iname meson.build -delete
|
||||
'''.format(
|
||||
python.full_path(),
|
||||
|
|
|
|||
|
|
@ -18,6 +18,9 @@
|
|||
# Configuration Options
|
||||
|
||||
version,
|
||||
|
||||
# `tests` attribute
|
||||
testers,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
@ -34,7 +37,15 @@ mkMesonDerivation (finalAttrs: {
|
|||
(fileset.unions [
|
||||
../../.version
|
||||
# For example JSON
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
../../src/libstore-tests/data/realisation
|
||||
../../src/libstore-tests/data/derived-path
|
||||
../../src/libstore-tests/data/path-info
|
||||
../../src/libstore-tests/data/nar-info
|
||||
../../src/libstore-tests/data/build-result
|
||||
# Too many different types of files to filter for now
|
||||
../../doc/manual
|
||||
./.
|
||||
|
|
@ -82,6 +93,29 @@ mkMesonDerivation (finalAttrs: {
|
|||
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
/**
|
||||
The root of the HTML manual.
|
||||
E.g. "${nix-manual.site}/index.html" exists.
|
||||
*/
|
||||
passthru.site = finalAttrs.finalPackage + "/share/doc/nix/manual";
|
||||
|
||||
passthru.tests = {
|
||||
# https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck
|
||||
linkcheck = testers.lycheeLinkCheck {
|
||||
inherit (finalAttrs.finalPackage) site;
|
||||
extraConfig = {
|
||||
exclude = [
|
||||
# Exclude auto-generated JSON schema documentation which has
|
||||
# auto-generated fragment IDs that don't match the link references
|
||||
".*/protocols/json/.*\\.html"
|
||||
# Exclude undocumented builtins
|
||||
".*/language/builtins\\.html#builtins-addErrorContext"
|
||||
".*/language/builtins\\.html#builtins-appendContext"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
synopsis: "Improved S3 binary cache support via HTTP"
|
||||
prs: [13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 13752]
|
||||
prs: [13752, 13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 14330, 14333, 14335, 14336, 14337, 14350, 14356, 14357, 14374, 14375, 14376, 14377, 14391, 14393, 14420, 14421]
|
||||
issues: [13084, 12671, 11748, 12403]
|
||||
---
|
||||
|
||||
|
|
@ -18,9 +18,23 @@ improvements:
|
|||
The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential
|
||||
management.
|
||||
|
||||
All existing S3 URL formats and parameters remain supported, with the notable
|
||||
exception of multi-part uploads, which are no longer supported.
|
||||
All existing S3 URL formats and parameters remain supported, however the store
|
||||
settings for configuring multipart uploads have changed:
|
||||
|
||||
- **`multipart-upload`** (default: `false`): Enable multipart uploads for large
|
||||
files. When enabled, files exceeding the multipart threshold will be uploaded
|
||||
in multiple parts.
|
||||
|
||||
- **`multipart-threshold`** (default: `100 MiB`): Minimum file size for using
|
||||
multipart uploads. Files smaller than this will use regular PUT requests.
|
||||
Only takes effect when `multipart-upload` is enabled.
|
||||
|
||||
- **`multipart-chunk-size`** (default: `5 MiB`): Size of each part in multipart
|
||||
uploads. Must be at least 5 MiB (AWS S3 requirement). Larger chunk sizes
|
||||
reduce the number of requests but use more memory.
|
||||
|
||||
- **`buffer-size`**: Has been replaced by `multipart-chunk-size` and is now an alias to it.
|
||||
|
||||
Note that this change also means Nix now supports S3 binary cache stores even
|
||||
if build without `aws-crt-cpp`, but only for public buckets which do not
|
||||
require auth.
|
||||
if built without `aws-crt-cpp`, but only for public buckets which do not
|
||||
require authentication.
|
||||
|
|
|
|||
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
synopsis: "S3 URLs now support object versioning via versionId parameter"
|
||||
prs: [14274]
|
||||
issues: [13955]
|
||||
---
|
||||
|
||||
S3 URLs now support a `versionId` query parameter to fetch specific versions
|
||||
of objects from S3 buckets with versioning enabled. This allows pinning to
|
||||
exact object versions for reproducibility and protection against unexpected
|
||||
changes:
|
||||
|
||||
```
|
||||
s3://bucket/key?region=us-east-1&versionId=abc123def456
|
||||
```
|
||||
|
|
@ -26,9 +26,12 @@
|
|||
- [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md)
|
||||
- [Content-addressing derivation outputs](store/derivation/outputs/content-address.md)
|
||||
- [Input-addressing derivation outputs](store/derivation/outputs/input-address.md)
|
||||
- [Build Trace](store/build-trace.md)
|
||||
- [Derivation Resolution](store/resolution.md)
|
||||
- [Building](store/building.md)
|
||||
- [Store Types](store/types/index.md)
|
||||
{{#include ./store/types/SUMMARY.md}}
|
||||
- [Appendix: Math notation](store/math-notation.md)
|
||||
- [Nix Language](language/index.md)
|
||||
- [Data Types](language/types.md)
|
||||
- [String context](language/string-context.md)
|
||||
|
|
@ -117,12 +120,18 @@
|
|||
- [Architecture and Design](architecture/architecture.md)
|
||||
- [Formats and Protocols](protocols/index.md)
|
||||
- [JSON Formats](protocols/json/index.md)
|
||||
- [File System Object](protocols/json/file-system-object.md)
|
||||
- [Hash](protocols/json/hash.md)
|
||||
- [Content Address](protocols/json/content-address.md)
|
||||
- [Store Path](protocols/json/store-path.md)
|
||||
- [Store Object Info](protocols/json/store-object-info.md)
|
||||
- [Derivation](protocols/json/derivation.md)
|
||||
- [Deriving Path](protocols/json/deriving-path.md)
|
||||
- [Build Trace Entry](protocols/json/build-trace-entry.md)
|
||||
- [Build Result](protocols/json/build-result.md)
|
||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||
- [Store Path Specification](protocols/store-path.md)
|
||||
- [Nix Archive (NAR) Format](protocols/nix-archive.md)
|
||||
- [Nix Archive (NAR) Format](protocols/nix-archive/index.md)
|
||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||
- [C API](c-api.md)
|
||||
- [Glossary](glossary.md)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ The moving parts of channels are:
|
|||
- The official channels listed at <https://nixos.org/channels>
|
||||
- The user-specific list of [subscribed channels](#subscribed-channels)
|
||||
- The [downloaded channel contents](#channels)
|
||||
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
||||
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
||||
|
||||
> **Note**
|
||||
>
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an
|
|||
element of *args* matches no installed derivations.
|
||||
|
||||
For a description of how *args* is mapped to a set of store paths, see
|
||||
[`--install`](#operation---install). If *args* describes multiple
|
||||
[`--install`](./install.md). If *args* describes multiple
|
||||
store paths with the same symbolic name, only the one with the highest
|
||||
version is installed.
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
This man page describes the command `nix-shell`, which is distinct from `nix
|
||||
shell`. For documentation on the latter, run `nix shell --help` or see `man
|
||||
nix3-shell`.
|
||||
nix3-env-shell`.
|
||||
|
||||
# Description
|
||||
|
||||
|
|
|
|||
|
|
@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the
|
|||
configuration file.
|
||||
|
||||
By default, the collector prints the total number of freed bytes when it
|
||||
finishes (or when it is interrupted). With `--print-dead`, it prints the
|
||||
number of bytes that would be freed.
|
||||
finishes (or when it is interrupted).
|
||||
|
||||
{{#include ./opt-common.md}}
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ You can also build Nix for one of the [supported platforms](#platforms).
|
|||
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
||||
|
||||
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
|
||||
[`nix-command`]: @docroot@/development/experimental-features.md#xp-nix-command
|
||||
[`nix-command`]: @docroot@/development/experimental-features.md#xp-feature-nix-command
|
||||
|
||||
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
||||
|
||||
|
|
@ -256,7 +256,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS
|
|||
## Editor integration
|
||||
|
||||
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
||||
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix).
|
||||
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#building-nix-with-flakes) or in [classic Nix](#building-nix).
|
||||
|
||||
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
||||
Meson's configure always produces this inside the build directory.
|
||||
|
|
|
|||
|
|
@ -240,3 +240,9 @@ $ configurePhase
|
|||
$ ninja src/external-api-docs/html
|
||||
$ xdg-open src/external-api-docs/html/index.html
|
||||
```
|
||||
|
||||
If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use:
|
||||
|
||||
```bash
|
||||
nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'appendToVar mesonFlags "-Ddoc-gen=true"; mesonConfigurePhase'
|
||||
```
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ This will:
|
|||
|
||||
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
|
||||
|
||||
### Characterisation testing { #characaterisation-testing-unit }
|
||||
### Characterisation testing { #characterisation-testing-unit }
|
||||
|
||||
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
||||
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@
|
|||
|
||||
- [impure derivation]{#gloss-impure-derivation}
|
||||
|
||||
[An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||
[An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
||||
|
||||
- [Nix database]{#gloss-nix-database}
|
||||
|
|
@ -279,7 +279,7 @@
|
|||
|
||||
See [References](@docroot@/store/store-object.md#references) for details.
|
||||
|
||||
- [referrer]{#gloss-reference}
|
||||
- [referrer]{#gloss-referrer}
|
||||
|
||||
A reversed edge from one [store object] to another.
|
||||
|
||||
|
|
@ -367,8 +367,8 @@
|
|||
|
||||
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
|
||||
|
||||
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing:
|
||||
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#output),
|
||||
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing:
|
||||
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output),
|
||||
- attributes with metadata, such as information about how the package is supposed to be used.
|
||||
|
||||
The exact shape of these attribute sets is up to convention.
|
||||
|
|
@ -383,7 +383,7 @@
|
|||
|
||||
[string]: ./language/types.md#type-string
|
||||
[path]: ./language/types.md#type-path
|
||||
[attribute name]: ./language/types.md#attribute-set
|
||||
[attribute name]: ./language/types.md#type-attrs
|
||||
|
||||
- [base directory]{#gloss-base-directory}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,19 +3,21 @@
|
|||
To run the latest stable release of Nix with Docker run the following command:
|
||||
|
||||
```console
|
||||
$ docker run -ti ghcr.io/nixos/nix
|
||||
Unable to find image 'ghcr.io/nixos/nix:latest' locally
|
||||
latest: Pulling from ghcr.io/nixos/nix
|
||||
$ docker run -ti docker.io/nixos/nix
|
||||
Unable to find image 'docker.io/nixos/nix:latest' locally
|
||||
latest: Pulling from docker.io/nixos/nix
|
||||
5843afab3874: Pull complete
|
||||
b52bf13f109c: Pull complete
|
||||
1e2415612aa3: Pull complete
|
||||
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
||||
Status: Downloaded newer image for ghcr.io/nixos/nix:latest
|
||||
Status: Downloaded newer image for docker.io/nixos/nix:latest
|
||||
35ca4ada6e96:/# nix --version
|
||||
nix (Nix) 2.3.12
|
||||
35ca4ada6e96:/# exit
|
||||
```
|
||||
|
||||
> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix
|
||||
|
||||
# What is included in Nix's Docker image?
|
||||
|
||||
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
|
||||
|
|
|
|||
|
|
@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b
|
|||
|
||||
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
|
||||
|
||||
- [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode}
|
||||
- [`outputHash`]{#adv-attr-outputHash}
|
||||
|
||||
This will specify the output hash of the single output of a [fixed-output derivation].
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
|
|||
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
|
||||
|
||||
A symbolic name for the derivation.
|
||||
See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects.
|
||||
See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects.
|
||||
|
||||
[store path]: @docroot@/store/store-path.md
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq
|
|||
|
||||
# Names
|
||||
|
||||
A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md).
|
||||
A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md).
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
[Booleans](@docroot@/language/types.md#type-boolean)
|
||||
[Booleans](@docroot@/language/types.md#type-bool)
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y`
|
||||
An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y`
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
[Lists](@docroot@/language/types.md#list) with three elements.
|
||||
[Lists](@docroot@/language/types.md#type-list) with three elements.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
[Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`)
|
||||
[Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`)
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
[Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`)
|
||||
[Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`)
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp
|
|||
|
||||
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext
|
||||
[`builtins.getContext`]: ./builtins.md#builtins-getContext
|
||||
[attribute set]: ./types.md#attribute-set
|
||||
[attribute set]: ./types.md#type-attrs
|
||||
|
||||
## Clearing string contexts
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a
|
|||
|
||||
[string]: ./types.md#type-string
|
||||
[path]: ./types.md#type-path
|
||||
[attribute set]: ./types.md#attribute-set
|
||||
[attribute set]: ./types.md#type-attrs
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ See [String literals](string-literals.md).
|
|||
|
||||
Path literals can also include [string interpolation], besides being [interpolated into other expressions].
|
||||
|
||||
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions
|
||||
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expression
|
||||
|
||||
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||
|
||||
|
|
@ -235,7 +235,7 @@ of object-oriented programming, for example.
|
|||
|
||||
## Recursive sets
|
||||
|
||||
Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other.
|
||||
Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other.
|
||||
|
||||
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
|
||||
|
||||
|
|
@ -287,7 +287,7 @@ This evaluates to `"foobar"`.
|
|||
|
||||
## Inheriting attributes
|
||||
|
||||
When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
||||
When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
||||
This can be shortened using the `inherit` keyword.
|
||||
|
||||
Example:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# Derivation "ATerm" file format
|
||||
|
||||
For historical reasons, [store derivations][store derivation] are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format.
|
||||
For historical reasons, [store derivations][store derivation] are stored on-disk in "Annotated Term" (ATerm) format
|
||||
([guide](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html),
|
||||
[paper](https://doi.org/10.1002/(SICI)1097-024X(200003)30:3%3C259::AID-SPE298%3E3.0.CO;2-Y)).
|
||||
|
||||
## The ATerm format used
|
||||
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/build-result.md
Normal file
21
doc/manual/source/protocols/json/build-result.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include build-result-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Successful build
|
||||
|
||||
```json
|
||||
{{#include schema/build-result-v1/success.json}}
|
||||
```
|
||||
|
||||
### Failed build (output rejected)
|
||||
|
||||
```json
|
||||
{{#include schema/build-result-v1/output-rejected.json}}
|
||||
```
|
||||
|
||||
### Failed build (non-deterministic)
|
||||
|
||||
```json
|
||||
{{#include schema/build-result-v1/not-deterministic.json}}
|
||||
```
|
||||
27
doc/manual/source/protocols/json/build-trace-entry.md
Normal file
27
doc/manual/source/protocols/json/build-trace-entry.md
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
{{#include build-trace-entry-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple build trace entry
|
||||
|
||||
```json
|
||||
{{#include schema/build-trace-entry-v1/simple.json}}
|
||||
```
|
||||
|
||||
### Build trace entry with dependencies
|
||||
|
||||
```json
|
||||
{{#include schema/build-trace-entry-v1/with-dependent-realisations.json}}
|
||||
```
|
||||
|
||||
### Build trace entry with signature
|
||||
|
||||
```json
|
||||
{{#include schema/build-trace-entry-v1/with-signature.json}}
|
||||
```
|
||||
|
||||
<!--
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Build Trace Entry v1](schema/build-trace-entry-v1.json)
|
||||
-->
|
||||
21
doc/manual/source/protocols/json/content-address.md
Normal file
21
doc/manual/source/protocols/json/content-address.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include content-address-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### [Text](@docroot@/store/store-object/content-address.html#method-text) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/text.json}}
|
||||
```
|
||||
|
||||
### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/nar.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Hash v1](schema/content-address-v1.json)
|
||||
-->
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{{#include derivation-v3-fixed.md}}
|
||||
|
||||
<!--
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Derivation v3](schema/derivation-v3.json)
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include deriving-path-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Constant
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_opaque.json}}
|
||||
```
|
||||
|
||||
### Output of static derivation
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_built.json}}
|
||||
```
|
||||
|
||||
### Output of dynamic derivation
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_built_built.json}}
|
||||
```
|
||||
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
21
doc/manual/source/protocols/json/file-system-object.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include file-system-object-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/simple.json}}
|
||||
```
|
||||
|
||||
### Complex
|
||||
|
||||
```json
|
||||
{{#include schema/file-system-object-v1/complex.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for File System Object v1](schema/file-system-object-v1.json)
|
||||
-->
|
||||
|
|
@ -11,4 +11,8 @@ s/\\`/`/g
|
|||
#
|
||||
# As we have more such relative links, more replacements of this nature
|
||||
# should appear below.
|
||||
s^#/\$defs/\(regular\|symlink\|directory\)^In this schema^g
|
||||
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
|
||||
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
|
||||
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
|
||||
s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@
|
|||
{{#include schema/hash-v1/blake3-base64.json}}
|
||||
```
|
||||
|
||||
<!--
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Hash v1](schema/hash-v1.json)
|
||||
|
|
|
|||
|
|
@ -9,8 +9,15 @@ json_schema_for_humans = find_program('generate-schema-doc', required : false)
|
|||
json_schema_config = files('json-schema-for-humans-config.yaml')
|
||||
|
||||
schemas = [
|
||||
'file-system-object-v1',
|
||||
'hash-v1',
|
||||
'content-address-v1',
|
||||
'store-path-v1',
|
||||
'store-object-info-v1',
|
||||
'derivation-v3',
|
||||
'deriving-path-v1',
|
||||
'build-trace-entry-v1',
|
||||
'build-result-v1',
|
||||
]
|
||||
|
||||
schema_files = files()
|
||||
|
|
|
|||
1
doc/manual/source/protocols/json/schema/build-result-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/build-result-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/build-result
|
||||
136
doc/manual/source/protocols/json/schema/build-result-v1.yaml
Normal file
136
doc/manual/source/protocols/json/schema/build-result-v1.yaml
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-result-v1.json"
|
||||
title: Build Result
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's `BuildResult` type, which represents the result of building a derivation or substituting store paths.
|
||||
|
||||
Build results can represent either successful builds (with built outputs) or various types of failures.
|
||||
|
||||
oneOf:
|
||||
- "$ref": "#/$defs/success"
|
||||
- "$ref": "#/$defs/failure"
|
||||
type: object
|
||||
required:
|
||||
- success
|
||||
- status
|
||||
properties:
|
||||
timesBuilt:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Times built
|
||||
description: |
|
||||
How many times this build was performed.
|
||||
|
||||
startTime:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Start time
|
||||
description: |
|
||||
The start time of the build (or one of the rounds, if it was repeated), as a Unix timestamp.
|
||||
|
||||
stopTime:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Stop time
|
||||
description: |
|
||||
The stop time of the build (or one of the rounds, if it was repeated), as a Unix timestamp.
|
||||
|
||||
cpuUser:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: User CPU time
|
||||
description: |
|
||||
User CPU time the build took, in microseconds.
|
||||
|
||||
cpuSystem:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: System CPU time
|
||||
description: |
|
||||
System CPU time the build took, in microseconds.
|
||||
|
||||
"$defs":
|
||||
success:
|
||||
type: object
|
||||
title: Successful Build Result
|
||||
description: |
|
||||
Represents a successful build with built outputs.
|
||||
required:
|
||||
- success
|
||||
- status
|
||||
- builtOutputs
|
||||
properties:
|
||||
success:
|
||||
const: true
|
||||
title: Success indicator
|
||||
description: |
|
||||
Always true for successful build results.
|
||||
|
||||
status:
|
||||
type: string
|
||||
title: Success status
|
||||
description: |
|
||||
Status string for successful builds.
|
||||
enum:
|
||||
- "Built"
|
||||
- "Substituted"
|
||||
- "AlreadyValid"
|
||||
- "ResolvesToAlreadyValid"
|
||||
|
||||
builtOutputs:
|
||||
type: object
|
||||
title: Built outputs
|
||||
description: |
|
||||
A mapping from output names to their build trace entries.
|
||||
additionalProperties:
|
||||
"$ref": "build-trace-entry-v1.yaml"
|
||||
|
||||
failure:
|
||||
type: object
|
||||
title: Failed Build Result
|
||||
description: |
|
||||
Represents a failed build with error information.
|
||||
required:
|
||||
- success
|
||||
- status
|
||||
- errorMsg
|
||||
properties:
|
||||
success:
|
||||
const: false
|
||||
title: Success indicator
|
||||
description: |
|
||||
Always false for failed build results.
|
||||
|
||||
status:
|
||||
type: string
|
||||
title: Failure status
|
||||
description: |
|
||||
Status string for failed builds.
|
||||
enum:
|
||||
- "PermanentFailure"
|
||||
- "InputRejected"
|
||||
- "OutputRejected"
|
||||
- "TransientFailure"
|
||||
- "CachedFailure"
|
||||
- "TimedOut"
|
||||
- "MiscFailure"
|
||||
- "DependencyFailed"
|
||||
- "LogLimitExceeded"
|
||||
- "NotDeterministic"
|
||||
- "NoSubstituters"
|
||||
- "HashMismatch"
|
||||
|
||||
errorMsg:
|
||||
type: string
|
||||
title: Error message
|
||||
description: |
|
||||
Information about the error if the build failed.
|
||||
|
||||
isNonDeterministic:
|
||||
type: boolean
|
||||
title: Non-deterministic flag
|
||||
description: |
|
||||
If timesBuilt > 1, whether some builds did not produce the same result.
|
||||
|
||||
Note that 'isNonDeterministic = false' does not mean the build is deterministic,
|
||||
just that we don't have evidence of non-determinism.
|
||||
1
doc/manual/source/protocols/json/schema/build-trace-entry-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/build-trace-entry-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/realisation
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-trace-entry-v1.json"
|
||||
title: Build Trace Entry
|
||||
description: |
|
||||
A record of a successful build outcome for a specific derivation output.
|
||||
|
||||
This schema describes the JSON representation of a [build trace entry](@docroot@/store/build-trace.md) entry.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
|
||||
> and subject to change.
|
||||
|
||||
type: object
|
||||
required:
|
||||
- id
|
||||
- outPath
|
||||
- dependentRealisations
|
||||
- signatures
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
title: Derivation Output ID
|
||||
pattern: "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$"
|
||||
description: |
|
||||
Unique identifier for the derivation output that was built.
|
||||
|
||||
Format: `{hash-quotient-drv}!{output-name}`
|
||||
|
||||
- **hash-quotient-drv**: SHA-256 [hash of the quotient derivation](@docroot@/store/derivation/outputs/input-address.md#hash-quotient-drv).
|
||||
Begins with `sha256:`.
|
||||
|
||||
- **output-name**: Name of the specific output (e.g., "out", "dev", "doc")
|
||||
|
||||
Example: `"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo"`
|
||||
|
||||
outPath:
|
||||
"$ref": "store-path-v1.yaml"
|
||||
title: Output Store Path
|
||||
description: |
|
||||
The path to the store object that resulted from building this derivation for the given output name.
|
||||
|
||||
dependentRealisations:
|
||||
type: object
|
||||
title: Underlying Base Build Trace
|
||||
description: |
|
||||
This is for [*derived*](@docroot@/store/build-trace.md#derived) build trace entries to ensure coherence.
|
||||
|
||||
Keys are derivation output IDs (same format as the main `id` field).
|
||||
Values are the store paths that those dependencies resolved to.
|
||||
|
||||
As described in the linked section on derived build trace traces, derived build trace entries must be kept in addition and not instead of the underlying base build entries.
|
||||
This is the set of base build trace entries that this derived build trace is derived from.
|
||||
(The set is also a map since this miniature base build trace must be coherent, mapping each key to a single value.)
|
||||
|
||||
patternProperties:
|
||||
"^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$":
|
||||
$ref: "store-path-v1.yaml"
|
||||
title: Dependent Store Path
|
||||
description: Store path that this dependency resolved to during the build
|
||||
additionalProperties: false
|
||||
|
||||
signatures:
|
||||
type: array
|
||||
title: Build Signatures
|
||||
description: |
|
||||
A set of cryptographic signatures attesting to the authenticity of this build trace entry.
|
||||
items:
|
||||
type: string
|
||||
title: Signature
|
||||
description: A single cryptographic signature
|
||||
|
||||
additionalProperties: false
|
||||
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/content-address
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json"
|
||||
title: Content Address
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references).
|
||||
> It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant.
|
||||
|
||||
<!--
|
||||
TODO currently `ContentAddress` is used in both of these, and so same rationale applies, but actually in both cases the JSON is currently ad-hoc.
|
||||
That will be fixed, and as each is fixed, the example (along with a more precise link to the field in question) should be become part of the above note, so what is is saying is more clear.
|
||||
|
||||
> For example:
|
||||
|
||||
> - Fixed outputs of derivations are not allowed to have any references, so an empty reference set is statically known by assumption.
|
||||
|
||||
> - [Store object info](./store-object-info.md) includes the set of references along side the (optional) content address.
|
||||
|
||||
> This data type is thus safely used in both of these contexts.
|
||||
|
||||
-->
|
||||
|
||||
type: object
|
||||
properties:
|
||||
method:
|
||||
"$ref": "#/$defs/method"
|
||||
hash:
|
||||
title: Content Address
|
||||
description: |
|
||||
This would be the content-address itself.
|
||||
|
||||
For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole.
|
||||
In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods).
|
||||
"$ref": "./hash-v1.yaml"
|
||||
required:
|
||||
- method
|
||||
- hash
|
||||
additionalProperties: false
|
||||
"$defs":
|
||||
method:
|
||||
type: string
|
||||
enum: [flat, nar, text, git]
|
||||
title: Content-Addressing Method
|
||||
description: |
|
||||
A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json
|
||||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json"
|
||||
title: Derivation
|
||||
description: |
|
||||
Experimental JSON representation of a Nix derivation (version 3).
|
||||
|
|
@ -39,9 +39,9 @@ properties:
|
|||
This is a guard that allows us to continue evolving this format.
|
||||
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
||||
|
||||
- Version 0: A-Term format
|
||||
- Version 0: ATerm format
|
||||
|
||||
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format.
|
||||
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from ATerm format.
|
||||
|
||||
- Version 2: Separate `method` and `hashAlgo` fields in output specs
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ properties:
|
|||
> }
|
||||
> ```
|
||||
additionalProperties:
|
||||
"$ref": "#/$defs/output"
|
||||
"$ref": "#/$defs/output/overall"
|
||||
|
||||
inputSrcs:
|
||||
type: array
|
||||
|
|
@ -85,7 +85,7 @@ properties:
|
|||
> ]
|
||||
> ```
|
||||
items:
|
||||
type: string
|
||||
$ref: "store-path-v1.yaml"
|
||||
|
||||
inputDrvs:
|
||||
type: object
|
||||
|
|
@ -103,6 +103,15 @@ properties:
|
|||
> ```
|
||||
>
|
||||
> specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||
patternProperties:
|
||||
"^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$":
|
||||
title: Store Path
|
||||
description: |
|
||||
A store path to a derivation, mapped to the outputs of that derivation.
|
||||
oneOf:
|
||||
- "$ref": "#/$defs/outputNames"
|
||||
- "$ref": "#/$defs/dynamicOutputs"
|
||||
additionalProperties: false
|
||||
|
||||
system:
|
||||
type: string
|
||||
|
|
@ -145,34 +154,138 @@ properties:
|
|||
|
||||
"$defs":
|
||||
output:
|
||||
overall:
|
||||
title: Derivation Output
|
||||
description: |
|
||||
A single output of a derivation, with different variants for different output types.
|
||||
oneOf:
|
||||
- "$ref": "#/$defs/output/inputAddressed"
|
||||
- "$ref": "#/$defs/output/caFixed"
|
||||
- "$ref": "#/$defs/output/caFloating"
|
||||
- "$ref": "#/$defs/output/deferred"
|
||||
- "$ref": "#/$defs/output/impure"
|
||||
|
||||
inputAddressed:
|
||||
title: Input-Addressed Output
|
||||
description: |
|
||||
The traditional non-fixed-output derivation type.
|
||||
The output path is determined from the derivation itself.
|
||||
|
||||
See [Input-addressing derivation outputs](@docroot@/store/derivation/outputs/input-address.md) for more details.
|
||||
type: object
|
||||
required:
|
||||
- path
|
||||
properties:
|
||||
path:
|
||||
$ref: "store-path-v1.yaml"
|
||||
title: Output path
|
||||
description: |
|
||||
The output path determined from the derivation itself.
|
||||
additionalProperties: false
|
||||
|
||||
caFixed:
|
||||
title: Fixed Content-Addressed Output
|
||||
description: |
|
||||
The output is content-addressed, and the content-address is fixed in advance.
|
||||
|
||||
See [Fixed-output content-addressing](@docroot@/store/derivation/outputs/content-address.md#fixed) for more details.
|
||||
type: object
|
||||
required:
|
||||
- method
|
||||
- hashAlgo
|
||||
- hash
|
||||
properties:
|
||||
method:
|
||||
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||
description: |
|
||||
Method of content addressing used for this output.
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
hash:
|
||||
type: string
|
||||
title: Expected hash value
|
||||
description: |
|
||||
The expected content hash in base-16.
|
||||
additionalProperties: false
|
||||
|
||||
caFloating:
|
||||
title: Floating Content-Addressed Output
|
||||
description: |
|
||||
Floating-output derivations, whose outputs are content
|
||||
addressed, but not fixed, and so the output paths are dynamically calculated from
|
||||
whatever the output ends up being.
|
||||
|
||||
See [Floating Content-Addressing](@docroot@/store/derivation/outputs/content-address.md#floating) for more details.
|
||||
type: object
|
||||
required:
|
||||
- method
|
||||
- hashAlgo
|
||||
properties:
|
||||
method:
|
||||
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||
description: |
|
||||
Method of content addressing used for this output.
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
description: |
|
||||
What hash algorithm to use for the given method of content-addressing.
|
||||
additionalProperties: false
|
||||
|
||||
deferred:
|
||||
title: Deferred Output
|
||||
description: |
|
||||
Input-addressed output which depends on a (CA) derivation whose outputs (and thus their content-address
|
||||
are not yet known.
|
||||
type: object
|
||||
properties: {}
|
||||
additionalProperties: false
|
||||
|
||||
impure:
|
||||
title: Impure Output
|
||||
description: |
|
||||
Impure output which is just like a floating content-addressed output, but this derivation runs without sandboxing.
|
||||
As such, we don't record it in the build trace, under the assumption that if we need it again, we should rebuild it, as it might produce something different.
|
||||
required:
|
||||
- impure
|
||||
- method
|
||||
- hashAlgo
|
||||
properties:
|
||||
impure:
|
||||
const: true
|
||||
method:
|
||||
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||
description: |
|
||||
How the file system objects will be serialized for hashing.
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
description: |
|
||||
How the serialization will be hashed.
|
||||
additionalProperties: false
|
||||
|
||||
outputName:
|
||||
type: string
|
||||
title: Output name
|
||||
description: Name of the derivation output to depend on
|
||||
|
||||
outputNames:
|
||||
type: array
|
||||
title: Output Names
|
||||
description: Set of names of derivation outputs to depend on
|
||||
items:
|
||||
"$ref": "#/$defs/outputName"
|
||||
|
||||
dynamicOutputs:
|
||||
type: object
|
||||
title: Dynamic Outputs
|
||||
description: |
|
||||
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
||||
|
||||
This recursive data type allows for depending on outputs of outputs.
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
title: Output path
|
||||
description: |
|
||||
The output path, if known in advance.
|
||||
|
||||
method:
|
||||
type: string
|
||||
title: Content addressing method
|
||||
enum: [flat, nar, text, git]
|
||||
description: |
|
||||
For an output which will be [content addressed](@docroot@/store/derivation/outputs/content-address.md), a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
|
||||
hash:
|
||||
type: string
|
||||
title: Expected hash value
|
||||
description: |
|
||||
For fixed-output derivations, the expected content hash in base-16.
|
||||
outputs:
|
||||
"$ref": "#/$defs/outputNames"
|
||||
dynamicOutputs:
|
||||
"$ref": "#/$defs/dynamicOutputs"
|
||||
|
|
|
|||
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/derived-path
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json"
|
||||
title: Deriving Path
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path).
|
||||
oneOf:
|
||||
- title: Constant
|
||||
description: |
|
||||
See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path.
|
||||
$ref: "store-path-v1.yaml"
|
||||
- title: Output
|
||||
description: |
|
||||
See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path.
|
||||
type: object
|
||||
properties:
|
||||
drvPath:
|
||||
"$ref": "#"
|
||||
description: |
|
||||
A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to.
|
||||
output:
|
||||
type: string
|
||||
description: |
|
||||
The name of an output produced by that derivation (e.g. "out", "doc", etc.).
|
||||
required:
|
||||
- drvPath
|
||||
- output
|
||||
additionalProperties: false
|
||||
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/file-system-object-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/file-system-object-v1.json
|
||||
title: File System Object
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's [File System Object](@docroot@/store/file-system-object.md).
|
||||
|
||||
The schema is recursive because file system objects contain other file system objects.
|
||||
type: object
|
||||
required: ["type"]
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
enum: ["regular", "symlink", "directory"]
|
||||
|
||||
# Enforce conditional structure based on `type`
|
||||
anyOf:
|
||||
- $ref: "#/$defs/regular"
|
||||
required: ["type", "contents"]
|
||||
|
||||
- $ref: "#/$defs/symlink"
|
||||
required: ["type", "target"]
|
||||
|
||||
- $ref: "#/$defs/directory"
|
||||
required: ["type", "contents"]
|
||||
|
||||
"$defs":
|
||||
regular:
|
||||
title: Regular File
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "regular"
|
||||
contents:
|
||||
type: string
|
||||
description: Base64-encoded file contents
|
||||
executable:
|
||||
type: boolean
|
||||
description: Whether the file is executable.
|
||||
default: false
|
||||
additionalProperties: false
|
||||
|
||||
symlink:
|
||||
title: Symbolic Link
|
||||
required: ["target"]
|
||||
properties:
|
||||
type:
|
||||
const: "symlink"
|
||||
target:
|
||||
type: string
|
||||
description: Target path of the symlink.
|
||||
additionalProperties: false
|
||||
|
||||
directory:
|
||||
title: Directory
|
||||
required: ["contents"]
|
||||
properties:
|
||||
type:
|
||||
const: "directory"
|
||||
contents:
|
||||
type: object
|
||||
description: |
|
||||
Map of names to nested file system objects (for type=directory)
|
||||
additionalProperties:
|
||||
$ref: "#"
|
||||
additionalProperties: false
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json
|
||||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json"
|
||||
title: Hash
|
||||
description: |
|
||||
A cryptographic hash value used throughout Nix for content addressing and integrity verification.
|
||||
|
|
@ -51,4 +51,4 @@ additionalProperties: false
|
|||
description: |
|
||||
The hash algorithm used to compute the hash value.
|
||||
|
||||
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake-hashing) experimental feature.
|
||||
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature.
|
||||
|
|
|
|||
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/nar-info
|
||||
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/path-info
|
||||
|
|
@ -0,0 +1,235 @@
|
|||
"$schema": "http://json-schema.org/draft-07/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v1.json"
|
||||
title: Store Object Info
|
||||
description: |
|
||||
Information about a [store object](@docroot@/store/store-object.md).
|
||||
|
||||
This schema describes the JSON representation of store object metadata as returned by commands like [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md).
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
|
||||
### Field Categories
|
||||
|
||||
Store object information can come in a few different variations.
|
||||
|
||||
Firstly, "impure" fields, which contain non-intrinsic information about the store object, may or may not be included.
|
||||
|
||||
Second, binary cache stores have extra non-intrinsic infomation about the store objects they contain.
|
||||
|
||||
Thirdly, [`nix path-info --json --closure-size`](@docroot@/command-ref/new-cli/nix3-path-info.html#opt-closure-size) can compute some extra information about not just the single store object in question, but the store object and its [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
The impure and NAR fields are grouped into separate variants below.
|
||||
See their descriptions for additional information.
|
||||
The closure fields however as just included as optional fields, to avoid a combinatorial explosion of variants.
|
||||
|
||||
oneOf:
|
||||
- $ref: "#/$defs/base"
|
||||
|
||||
- $ref: "#/$defs/impure"
|
||||
|
||||
- $ref: "#/$defs/narInfo"
|
||||
|
||||
$defs:
|
||||
base:
|
||||
title: Store Object Info
|
||||
description: |
|
||||
Basic store object metadata containing only intrinsic properties.
|
||||
This is the minimal set of fields that describe what a store object contains.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
title: Store Path
|
||||
description: |
|
||||
[Store path](@docroot@/store/store-path.md) to the given store object.
|
||||
|
||||
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
|
||||
|
||||
narHash:
|
||||
type: string
|
||||
title: NAR Hash
|
||||
description: |
|
||||
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||
|
||||
narSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: NAR Size
|
||||
description: |
|
||||
Size of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||
|
||||
references:
|
||||
type: array
|
||||
title: References
|
||||
description: |
|
||||
An array of [store paths](@docroot@/store/store-path.md), possibly including this one.
|
||||
items:
|
||||
type: string
|
||||
|
||||
ca:
|
||||
type: ["string", "null"]
|
||||
title: Content Address
|
||||
description: |
|
||||
If the store object is [content-addressed](@docroot@/store/store-object/content-address.md),
|
||||
this is the content address of this store object's file system object, used to compute its store path.
|
||||
Otherwise (i.e. if it is [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object)), this is `null`.
|
||||
additionalProperties: false
|
||||
|
||||
impure:
|
||||
title: Store Object Info with Impure Fields
|
||||
description: |
|
||||
Store object metadata including impure fields that are not *intrinsic* properties.
|
||||
In other words, the same store object in different stores could have different values for these impure fields.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
# impure
|
||||
- deriver
|
||||
- registrationTime
|
||||
- ultimate
|
||||
- signatures
|
||||
properties:
|
||||
path: { $ref: "#/$defs/base/properties/path" }
|
||||
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||
references: { $ref: "#/$defs/base/properties/references" }
|
||||
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||
deriver:
|
||||
type: ["string", "null"]
|
||||
title: Deriver
|
||||
description: |
|
||||
If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced.
|
||||
Otherwise `null`.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
registrationTime:
|
||||
type: ["integer", "null"]
|
||||
title: Registration Time
|
||||
description: |
|
||||
If known, when this derivation was added to the store (Unix timestamp).
|
||||
Otherwise `null`.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
ultimate:
|
||||
type: boolean
|
||||
title: Ultimate
|
||||
description: |
|
||||
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
signatures:
|
||||
type: array
|
||||
title: Signatures
|
||||
description: |
|
||||
Signatures claiming that this store object is what it claims to be.
|
||||
Not relevant for [content-addressed](@docroot@/store/store-object/content-address.md) store objects,
|
||||
but useful for [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) store objects.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
items:
|
||||
type: string
|
||||
|
||||
# Computed closure fields
|
||||
closureSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Closure Size
|
||||
description: |
|
||||
The total size of this store object and every other object in its [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||
additionalProperties: false
|
||||
|
||||
narInfo:
|
||||
title: Store Object Info with Impure fields and NAR Info
|
||||
description: |
|
||||
The store object info in the "binary cache" family of Nix store type contain extra information pertaining to *downloads* of the store object in question.
|
||||
(This store info is called "NAR info", since the downloads take the form of [Nix Archives](@docroot@/store/file-system-object/content-address.md#serial-nix-archive, and the metadata is served in a file with a `.narinfo` extension.)
|
||||
|
||||
This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
# impure
|
||||
- deriver
|
||||
- registrationTime
|
||||
- ultimate
|
||||
- signatures
|
||||
# nar
|
||||
- url
|
||||
- compression
|
||||
- downloadHash
|
||||
- downloadSize
|
||||
properties:
|
||||
path: { $ref: "#/$defs/base/properties/path" }
|
||||
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||
references: { $ref: "#/$defs/base/properties/references" }
|
||||
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||
deriver: { $ref: "#/$defs/impure/properties/deriver" }
|
||||
registrationTime: { $ref: "#/$defs/impure/properties/registrationTime" }
|
||||
ultimate: { $ref: "#/$defs/impure/properties/ultimate" }
|
||||
signatures: { $ref: "#/$defs/impure/properties/signatures" }
|
||||
closureSize: { $ref: "#/$defs/impure/properties/closureSize" }
|
||||
url:
|
||||
type: string
|
||||
title: URL
|
||||
description: |
|
||||
Where to download a compressed archive of the file system objects of this store object.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
compression:
|
||||
type: string
|
||||
title: Compression
|
||||
description: |
|
||||
The compression format that the archive is in.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
downloadHash:
|
||||
type: string
|
||||
title: Download Hash
|
||||
description: |
|
||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
downloadSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Download Size
|
||||
description: |
|
||||
The size of the compressed archive itself.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
closureDownloadSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Closure Download Size
|
||||
description: |
|
||||
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||
additionalProperties: false
|
||||
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/store-path
|
||||
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
"$schema": "http://json-schema.org/draft-07/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json"
|
||||
title: Store Path
|
||||
description: |
|
||||
A [store path](@docroot@/store/store-path.md) identifying a store object.
|
||||
|
||||
This schema describes the JSON representation of store paths as used in various Nix JSON APIs.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
|
||||
## Format
|
||||
|
||||
Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix.
|
||||
|
||||
For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"`
|
||||
|
||||
(If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.)
|
||||
|
||||
## Structure
|
||||
|
||||
The format follows this pattern: `${digest}-${name}`
|
||||
|
||||
- **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
||||
- **name**: The package name and optional version/suffix information
|
||||
|
||||
type: string
|
||||
pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$"
|
||||
minLength: 34
|
||||
|
|
@ -1,102 +1,45 @@
|
|||
# Store object info JSON format
|
||||
{{#include store-object-info-v1-fixed.md}}
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
## Examples
|
||||
|
||||
Info about a [store object].
|
||||
### Minimal store object (content-addressed)
|
||||
|
||||
* `path`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/pure.json}}
|
||||
```
|
||||
|
||||
[Store path][store path] to the given store object.
|
||||
### Store object with impure fields
|
||||
|
||||
* `narHash`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/impure.json}}
|
||||
```
|
||||
|
||||
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
### Minimal store object (empty)
|
||||
|
||||
* `narSize`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/empty_pure.json}}
|
||||
```
|
||||
|
||||
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
### Store object with all impure fields
|
||||
|
||||
* `references`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/empty_impure.json}}
|
||||
```
|
||||
|
||||
An array of [store paths][store path], possibly including this one.
|
||||
### NAR info (minimal)
|
||||
|
||||
* `ca`:
|
||||
```json
|
||||
{{#include schema/nar-info-v1/pure.json}}
|
||||
```
|
||||
|
||||
If the store object is [content-addressed],
|
||||
this is the content address of this store object's file system object, used to compute its store path.
|
||||
Otherwise (i.e. if it is [input-addressed]), this is `null`.
|
||||
### NAR info (with binary cache fields)
|
||||
|
||||
[store path]: @docroot@/store/store-path.md
|
||||
[file system object]: @docroot@/store/file-system-object.md
|
||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
||||
```json
|
||||
{{#include schema/nar-info-v1/impure.json}}
|
||||
```
|
||||
|
||||
## Impure fields
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
These are not intrinsic properties of the store object.
|
||||
In other words, the same store object residing in different store could have different values for these properties.
|
||||
|
||||
* `deriver`:
|
||||
|
||||
If known, the path to the [store derivation] from which this store object was produced.
|
||||
Otherwise `null`.
|
||||
|
||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||
|
||||
* `registrationTime` (optional):
|
||||
|
||||
If known, when this derivation was added to the store.
|
||||
Otherwise `null`.
|
||||
|
||||
* `ultimate`:
|
||||
|
||||
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||
|
||||
* `signatures`:
|
||||
|
||||
Signatures claiming that this store object is what it claims to be.
|
||||
Not relevant for [content-addressed] store objects,
|
||||
but useful for [input-addressed] store objects.
|
||||
|
||||
[content-addressed]: @docroot@/store/store-object/content-address.md
|
||||
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
|
||||
|
||||
### `.narinfo` extra fields
|
||||
|
||||
This meta data is specific to the "binary cache" family of Nix store types.
|
||||
This information is not intrinsic to the store object, but about how it is stored.
|
||||
|
||||
* `url`:
|
||||
|
||||
Where to download a compressed archive of the file system objects of this store object.
|
||||
|
||||
* `compression`:
|
||||
|
||||
The compression format that the archive is in.
|
||||
|
||||
* `fileHash`:
|
||||
|
||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||
|
||||
* `fileSize`:
|
||||
|
||||
The size of the compressed archive itself.
|
||||
|
||||
## Computed closure fields
|
||||
|
||||
These fields are not stored at all, but computed by traversing the other fields across all the store objects in a [closure].
|
||||
|
||||
* `closureSize`:
|
||||
|
||||
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
|
||||
|
||||
### `.narinfo` extra fields
|
||||
|
||||
* `closureSize`:
|
||||
|
||||
The total size of this store object and every other object in its [closure].
|
||||
|
||||
[closure]: @docroot@/glossary.md#gloss-closure
|
||||
[JSON Schema for Store Object Info v1](schema/store-object-info-v1.json)
|
||||
-->
|
||||
|
|
|
|||
15
doc/manual/source/protocols/json/store-path.md
Normal file
15
doc/manual/source/protocols/json/store-path.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{{#include store-path-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple store path
|
||||
|
||||
```json
|
||||
{{#include schema/store-path-v1/simple.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Store Path v1](schema/store-path-v1.json)
|
||||
-->
|
||||
|
|
@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format.
|
|||
The Nix Archive format closely follows the abstract specification of a [file system object] tree,
|
||||
because it is designed to serialize exactly that data structure.
|
||||
|
||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive
|
||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
||||
[file system object]: @docroot@/store/file-system-object.md
|
||||
|
||||
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
|
||||
|
|
@ -41,3 +41,15 @@ The `str` function / parameterized rule is defined as follows:
|
|||
- `int(n)` = the 64-bit little endian representation of the number `n`
|
||||
|
||||
- `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte
|
||||
|
||||
## Kaitai Struct Specification
|
||||
|
||||
The Nix Archive (NAR) format is also formally described using [Kaitai Struct](https://kaitai.io/), an Interface Description Language (IDL) for defining binary data structures.
|
||||
|
||||
> Kaitai Struct provides a language-agnostic, machine-readable specification that can be compiled into parsers for various programming languages (e.g., C++, Python, Java, Rust).
|
||||
|
||||
```yaml
|
||||
{{#include nar.ksy}}
|
||||
```
|
||||
|
||||
The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed.
|
||||
169
doc/manual/source/protocols/nix-archive/nar.ksy
Normal file
169
doc/manual/source/protocols/nix-archive/nar.ksy
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
meta:
|
||||
id: nix_nar
|
||||
title: Nix Archive (NAR)
|
||||
file-extension: nar
|
||||
endian: le
|
||||
doc: |
|
||||
Nix Archive (NAR) format. A simple, reproducible binary archive
|
||||
format used by the Nix package manager to serialize file system objects.
|
||||
doc-ref: 'https://nixos.org/manual/nix/stable/command-ref/nix-store.html#nar-format'
|
||||
|
||||
seq:
|
||||
- id: magic
|
||||
type: padded_str
|
||||
doc: "Magic string, must be 'nix-archive-1'."
|
||||
valid:
|
||||
expr: _.body == 'nix-archive-1'
|
||||
- id: root_node
|
||||
type: node
|
||||
doc: "The root of the archive, which is always a single node."
|
||||
|
||||
types:
|
||||
padded_str:
|
||||
doc: |
|
||||
A string, prefixed with its length (u8le) and
|
||||
padded with null bytes to the next 8-byte boundary.
|
||||
seq:
|
||||
- id: len_str
|
||||
type: u8
|
||||
- id: body
|
||||
type: str
|
||||
size: len_str
|
||||
encoding: 'ASCII'
|
||||
- id: padding
|
||||
size: (8 - (len_str % 8)) % 8
|
||||
|
||||
node:
|
||||
doc: "A single filesystem node (file, directory, or symlink)."
|
||||
seq:
|
||||
- id: open_paren
|
||||
type: padded_str
|
||||
doc: "Must be '(', a token starting the node definition."
|
||||
valid:
|
||||
expr: _.body == '('
|
||||
- id: type_key
|
||||
type: padded_str
|
||||
doc: "Must be 'type'."
|
||||
valid:
|
||||
expr: _.body == 'type'
|
||||
- id: type_val
|
||||
type: padded_str
|
||||
doc: "The type of the node: 'regular', 'directory', or 'symlink'."
|
||||
- id: body
|
||||
type:
|
||||
switch-on: type_val.body
|
||||
cases:
|
||||
"'directory'": type_directory
|
||||
"'regular'": type_regular
|
||||
"'symlink'": type_symlink
|
||||
- id: close_paren
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == ')'
|
||||
if: "type_val.body != 'directory'"
|
||||
doc: "Must be ')', a token ending the node definition."
|
||||
|
||||
type_directory:
|
||||
doc: "A directory node, containing a list of entries. Entries must be ordered by their names."
|
||||
seq:
|
||||
- id: entries
|
||||
type: dir_entry
|
||||
repeat: until
|
||||
repeat-until: _.kind.body == ')'
|
||||
types:
|
||||
dir_entry:
|
||||
doc: "A single entry within a directory, or a terminator."
|
||||
seq:
|
||||
- id: kind
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == 'entry' or _.body == ')'
|
||||
doc: "Must be 'entry' (for a child node) or '' (for terminator)."
|
||||
- id: open_paren
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == '('
|
||||
if: 'kind.body == "entry"'
|
||||
- id: name_key
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == 'name'
|
||||
if: 'kind.body == "entry"'
|
||||
- id: name
|
||||
type: padded_str
|
||||
if: 'kind.body == "entry"'
|
||||
- id: node_key
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == 'node'
|
||||
if: 'kind.body == "entry"'
|
||||
- id: node
|
||||
type: node
|
||||
if: 'kind.body == "entry"'
|
||||
doc: "The child node, present only if kind is 'entry'."
|
||||
- id: close_paren
|
||||
type: padded_str
|
||||
valid:
|
||||
expr: _.body == ')'
|
||||
if: 'kind.body == "entry"'
|
||||
instances:
|
||||
is_terminator:
|
||||
value: kind.body == ')'
|
||||
|
||||
type_regular:
|
||||
doc: "A regular file node."
|
||||
seq:
|
||||
# Read attributes (like 'executable') until we hit 'contents'
|
||||
- id: attributes
|
||||
type: reg_attribute
|
||||
repeat: until
|
||||
repeat-until: _.key.body == "contents"
|
||||
# After the 'contents' token, read the file data
|
||||
- id: file_data
|
||||
type: file_content
|
||||
instances:
|
||||
is_executable:
|
||||
value: 'attributes[0].key.body == "executable"'
|
||||
doc: "True if the file has the 'executable' attribute."
|
||||
types:
|
||||
reg_attribute:
|
||||
doc: "An attribute of the file, e.g., 'executable' or 'contents'."
|
||||
seq:
|
||||
- id: key
|
||||
type: padded_str
|
||||
doc: "Attribute key, e.g., 'executable' or 'contents'."
|
||||
valid:
|
||||
expr: _.body == 'executable' or _.body == 'contents'
|
||||
- id: value
|
||||
type: padded_str
|
||||
if: 'key.body == "executable"'
|
||||
valid:
|
||||
expr: _.body == ''
|
||||
doc: "Must be '' if key is 'executable'."
|
||||
file_content:
|
||||
doc: "The raw data of the file, prefixed by length."
|
||||
seq:
|
||||
- id: len_contents
|
||||
type: u8
|
||||
# # This relies on the property of instances that they are lazily evaluated and cached.
|
||||
- size: 0
|
||||
if: nar_offset < 0
|
||||
- id: contents
|
||||
size: len_contents
|
||||
- id: padding
|
||||
size: (8 - (len_contents % 8)) % 8
|
||||
instances:
|
||||
nar_offset:
|
||||
value: _io.pos
|
||||
|
||||
type_symlink:
|
||||
doc: "A symbolic link node."
|
||||
seq:
|
||||
- id: target_key
|
||||
type: padded_str
|
||||
doc: "Must be 'target'."
|
||||
valid:
|
||||
expr: _.body == 'target'
|
||||
- id: target_val
|
||||
type: padded_str
|
||||
doc: "The destination path of the symlink."
|
||||
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
- The `discard-references` feature has been stabilized.
|
||||
This means that the
|
||||
[unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references)
|
||||
[unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences)
|
||||
attribute is no longer guarded by an experimental flag and can be used
|
||||
freely.
|
||||
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@
|
|||
|
||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||
|
||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
||||
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree).
|
||||
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes).
|
||||
|
||||
- The interface for creating and updating lock files has been overhauled:
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
|
||||
|
||||
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json).
|
||||
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md).
|
||||
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
|
||||
rather than one field with an arcane `:`-separated format.
|
||||
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@
|
|||
|
||||
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
|
||||
|
||||
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte.
|
||||
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte.
|
||||
|
||||
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).
|
||||
|
||||
|
|
|
|||
53
doc/manual/source/store/build-trace.md
Normal file
53
doc/manual/source/store/build-trace.md
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# Build Trace
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This entire concept is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
|
||||
> and subject to change.
|
||||
|
||||
The *build trace* is a [memoization table](https://en.wikipedia.org/wiki/Memoization) for builds.
|
||||
It maps the inputs of builds to the outputs of builds.
|
||||
Concretely, that means it maps [derivations][derivation] to maps of [output] names to [store objects][store object].
|
||||
|
||||
In general the derivations used as a key should be [*resolved*](./resolution.md).
|
||||
A build trace with all-resolved-derivation keys is also called a *base build trace* for extra clarity.
|
||||
If all the resolved inputs of a derivation are content-addressed, that means the inputs will be fully determined, leaving no ambiguity for what build was performed.
|
||||
(Input-addressed inputs however are still ambiguous. They too should be locked down, but this is left as future work.)
|
||||
|
||||
Accordingly, to look up an unresolved derivation, one must first resolve it to get a resolved derivation.
|
||||
Resolving itself involves looking up entries in the build trace, so this is a mutually recursive process that will end up inspecting possibly many entries.
|
||||
|
||||
Except for the issue with input-addressed paths called out above, base build traces are trivially *coherent* -- incoherence is not possible.
|
||||
That means that the claims that each key-value base build try entry makes are independent, and no mapping invalidates another mapping.
|
||||
|
||||
Whether the mappings are *true*, i.e. the faithful recording of actual builds performed, is another matter.
|
||||
Coherence is about the multiple claims of the build trace being mutually consistent, not about whether the claims are individually true or false.
|
||||
|
||||
In general, there is no way to audit a build trace entry except for by performing the build again from scratch.
|
||||
And even in that case, a different result doesn't mean the original entry was a "lie", because the derivation being built may be non-deterministic.
|
||||
As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice.
|
||||
Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices.
|
||||
|
||||
## Derived build traces {#derived}
|
||||
|
||||
Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations.
|
||||
But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around.
|
||||
Firstly, this ensures that the derived entries are merely cache, which could be recomputed from scratch.
|
||||
Secondly, this ensures the coherence of the derived build trace.
|
||||
|
||||
Unlike with base build traces, incoherence with derived build traces is possible.
|
||||
The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace.
|
||||
Without fixing the base build trace, it inherits the subjectivity of base build traces themselves.
|
||||
|
||||
Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \\(c\\).
|
||||
Let \\(a\\) be a resolved derivation, but let \\(b\\) and \\(c\\) be unresolved and both take as an input an output of \\(a\\).
|
||||
Now suppose that derived entries are made for \\(b\\) and \\(c\\) based on two different entries of \\(a\\).
|
||||
(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.)
|
||||
|
||||
If trusting the derived build trace entries for \\(b\\) and \\(c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught.
|
||||
However, if \\(b\\) and \\(c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output.
|
||||
|
||||
[derivation]: ./derivation/index.md
|
||||
[output]: ./derivation/outputs/index.md
|
||||
[store object]: @docroot@/store/store-object.md
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
|
||||
|
||||
## Builder Execution
|
||||
## Builder Execution {#builder-execution}
|
||||
|
||||
The [`builder`](./derivation/index.md#builder) is executed as follows:
|
||||
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t
|
|||
|
||||
### System {#system}
|
||||
|
||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
||||
The system type on which the [`builder`](#builder) executable is meant to be run.
|
||||
|
||||
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
||||
|
||||
|
|
@ -245,7 +245,7 @@ If those other derivations *also* abide by this common case (and likewise for tr
|
|||
> note the ".drv"
|
||||
> ```
|
||||
|
||||
## Extending the model to be higher-order
|
||||
## Extending the model to be higher-order {#dynamic}
|
||||
|
||||
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
||||
|
||||
|
|
|
|||
|
|
@ -167,10 +167,10 @@ It is only in the potential for that check to fail that they are different.
|
|||
>
|
||||
> In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing.
|
||||
> Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation).
|
||||
> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [Issue #11955](https://github.com/NixOS/nix/issues/11955).
|
||||
> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [issue #11955](https://github.com/NixOS/nix/issues/11955).
|
||||
>
|
||||
> In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them.
|
||||
> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt --- avoiding downloading potentially large amounts of data twice.
|
||||
> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt — avoiding downloading potentially large amounts of data twice.
|
||||
> This optimisation prefigures the design above:
|
||||
> If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data.
|
||||
For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL.
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ In particular, the specification decides:
|
|||
|
||||
- if the content is content-addressed, how is it content addressed
|
||||
|
||||
- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path])
|
||||
- if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path])
|
||||
|
||||
## Types of derivations
|
||||
|
||||
|
|
|
|||
|
|
@ -6,26 +6,221 @@
|
|||
That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it.
|
||||
Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object.
|
||||
|
||||
<!---
|
||||
## Modulo content addressed derivation outputs {#hash-quotient-drv}
|
||||
|
||||
### Modulo fixed-output derivations
|
||||
A naive implementation of an output hash computation for input-addressed outputs would be to hash the derivation hash and output together.
|
||||
This clearly has the uniqueness properties we want for input-addressed outputs, but suffers from an inefficiency.
|
||||
Specifically, new builds would be required whenever a change is made to a fixed-output derivation, despite having provably no differences in the inputs to the new derivation compared to what it used to be.
|
||||
Concretely, this would cause a "mass rebuild" whenever any fetching detail changes, including mirror lists, certificate authority certificates, etc.
|
||||
|
||||
**TODO hash derivation modulo.**
|
||||
To solve this problem, we compute output hashes differently, so that certain output hashes become identical.
|
||||
We call this concept quotient hashing, in reference to quotient types or sets.
|
||||
|
||||
So how do we compute the hash part of the output path of a derivation?
|
||||
This is done by the function `hashDrv`, shown in Figure 5.10.
|
||||
It distinguishes between two cases.
|
||||
If the derivation is a fixed-output derivation, then it computes a hash over just the `outputHash` attributes.
|
||||
So how do we compute the hash part of the output paths of an input-addressed derivation?
|
||||
This is done by the function `hashQuotientDerivation`, shown below.
|
||||
|
||||
If the derivation is not a fixed-output derivation, we replace each element in the derivation’s inputDrvs with the result of a call to `hashDrv` for that element.
|
||||
(The derivation at each store path in `inputDrvs` is converted from its on-disk ATerm representation back to a `StoreDrv` by the function `parseDrv`.) In essence, `hashDrv` partitions store derivations into equivalence classes, and for hashing purpose it replaces each store path in a derivation graph with its equivalence class.
|
||||
First, a word on inputs.
|
||||
`hashQuotientDerivation` is only defined on derivations whose [inputs](@docroot@/store/derivation/index.md#inputs) take the first-order form:
|
||||
```typescript
|
||||
type ConstantPath = {
|
||||
path: StorePath;
|
||||
};
|
||||
|
||||
The recursion in Figure 5.10 is inefficient:
|
||||
it will call itself once for each path by which a subderivation can be reached, i.e., `O(V k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`.
|
||||
In the actual implementation, memoisation is used to reduce this to `O(V + E)` complexity for a graph with E edges.
|
||||
type FirstOrderOutputPath = {
|
||||
drvPath: StorePath;
|
||||
output: OutputName;
|
||||
};
|
||||
|
||||
-->
|
||||
type FirstOrderDerivingPath = ConstantPath | FirstOrderOutputPath;
|
||||
|
||||
type Inputs = Set<FirstOrderDerivingPath>;
|
||||
```
|
||||
|
||||
For the algorithm below, we adopt a derivation where the two types of (first order) derived paths are partitioned into two sets, as follows:
|
||||
```typescript
|
||||
type Derivation = {
|
||||
// inputs: Set<FirstOrderDerivingPath>; // replaced
|
||||
inputSrcs: Set<ConstantPath>; // new instead
|
||||
inputDrvOutputs: Set<FirstOrderOutputPath>; // new instead
|
||||
// ...other fields...
|
||||
};
|
||||
```
|
||||
|
||||
In the [currently-experimental][xp-feature-dynamic-derivations] higher-order case where outputs of outputs are allowed as [deriving paths][deriving-path] and thus derivation inputs, derivations using that generalization are not valid arguments to this function.
|
||||
Those derivations must be (partially) [resolved](@docroot@/store/resolution.md) enough first, to the point where no such higher-order inputs remain.
|
||||
Then, and only then, can input addresses be assigned.
|
||||
|
||||
```
|
||||
function hashQuotientDerivation(drv) -> Hash:
|
||||
assert(drv.outputs are input-addressed)
|
||||
drv′ ← drv with {
|
||||
inputDrvOutputs = ⋃(
|
||||
assert(drvPath is store path)
|
||||
case hashOutputsOrQuotientDerivation(readDrv(drvPath)) of
|
||||
drvHash : Hash →
|
||||
(drvHash.toBase16(), output)
|
||||
outputHashes : Map[String, Hash] →
|
||||
(outputHashes[output].toBase16(), "out")
|
||||
| (drvPath, output) ∈ drv.inputDrvOutputs
|
||||
)
|
||||
}
|
||||
return hashSHA256(printDrv(drv′))
|
||||
|
||||
function hashOutputsOrQuotientDerivation(drv) -> Map[String, Hash] | Hash:
|
||||
if drv.outputs are content-addressed:
|
||||
return {
|
||||
outputName ↦ hashSHA256(
|
||||
"fixed:out:" + ca.printMethodAlgo() +
|
||||
":" + ca.hash.toBase16() +
|
||||
":" + ca.makeFixedOutputPath(drv.name, outputName))
|
||||
| (outputName ↦ output) ∈ drv.outputs
|
||||
, ca = output.contentAddress // or get from build trace if floating
|
||||
}
|
||||
else: // drv.outputs are input-addressed
|
||||
return hashQuotientDerivation(drv)
|
||||
```
|
||||
|
||||
### `hashQuotientDerivation`
|
||||
|
||||
We replace each element in the derivation's `inputDrvOutputs` using data from a call to `hashOutputsOrQuotientDerivation` on the `drvPath` of that element.
|
||||
When `hashOutputsOrQuotientDerivation` returns a single drv hash (because the input derivation in question is input-addressing), we simply swap out the `drvPath` for that hash, and keep the same output name.
|
||||
When `hashOutputsOrQuotientDerivation` returns a map of content addresses per-output, we look up the output in question, and pair it with the output name `out`.
|
||||
|
||||
The resulting pseudo-derivation (with hashes instead of store paths in `inputDrvs`) is then printed (in the ["ATerm" format](@docroot@/protocols/derivation-aterm.md)) and hashed, and this becomes the hash of the "quotient derivation".
|
||||
|
||||
When calculating output hashes, `hashQuotientDerivation` is called on an almost-complete input-addressing derivation, which is just missing its input-addressed outputs paths.
|
||||
The derivation hash is then used to calculate output paths for each output.
|
||||
<!-- TODO describe how this is done. -->
|
||||
Those output paths can then be substituted into the almost-complete input-addressed derivation to complete it.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> There may be an unintentional deviation from specification currently implemented in the `(outputHashes[output].toBase16(), "out")` case.
|
||||
> This is not fatal because the deviation would only apply for content-addressing derivations with more than one output, and that only occurs in the floating case, which is [experimental][xp-feature-ca-derivations].
|
||||
> Once this bug is fixed, this note will be removed.
|
||||
|
||||
### `hashOutputsOrQuotientDerivation`
|
||||
|
||||
How does `hashOutputsOrQuotientDerivation` in turn work?
|
||||
It consists of two main cases, based on whether the outputs of the derivation are to be input-addressed or content-addressed.
|
||||
|
||||
#### Input-addressed outputs case
|
||||
|
||||
In the input-addressed case, it just calls `hashQuotientDerivation`, and returns that derivation hash.
|
||||
This makes `hashQuotientDerivation` and `hashOutputsOrQuotientDerivation` mutually-recursive.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> In this case, `hashQuotientDerivation` is being called on a *complete* input-addressing derivation that already has its output paths calculated.
|
||||
> The `inputDrvs` substitution takes place anyways.
|
||||
|
||||
#### Content-addressed outputs case
|
||||
|
||||
If the outputs are [content-addressed](./content-address.md), then it computes a hash for each output derived from the content-address of that output.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> In the [fixed](./content-address.md#fixed) content-addressing case, the outputs' content addresses are statically specified in advance, so this always just works.
|
||||
> (The fixed case is what the pseudo-code shows.)
|
||||
>
|
||||
> In the [floating](./content-address.md#floating) case, the content addresses are not specified in advance.
|
||||
> This is what the "or get from [build trace](@docroot@/store/build-trace.md) if floating" comment refers to.
|
||||
> In this case, the algorithm is *stuck* until the input in question is built, and we know what the actual contents of the output in question is.
|
||||
>
|
||||
> That is OK however, because there is no problem with delaying the assigning of input addresses (which, remember, is what `hashQuotientDerivation` is ultimately for) until all inputs are known.
|
||||
|
||||
### Performance
|
||||
|
||||
The recursion in the algorithm is potentially inefficient:
|
||||
it could call itself once for each path by which a subderivation can be reached, i.e., `O(V^k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`.
|
||||
In the actual implementation, [memoisation](https://en.wikipedia.org/wiki/Memoization) is used to reduce this cost to be proportional to the total number of `inputDrvOutputs` encountered.
|
||||
|
||||
### Semantic properties
|
||||
|
||||
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
|
||||
|
||||
In essence, `hashQuotientDerivation` partitions input-addressing derivations into equivalence classes: every derivation in that equivalence class is mapped to the same derivation hash.
|
||||
We can characterize this equivalence relation directly, by working bottom up.
|
||||
|
||||
We start by defining an equivalence relation on first-order output deriving paths that refer content-addressed derivation outputs. Two such paths are equivalent if they refer to the same store object:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$d\_1$ is content-addressing}
|
||||
\\AxiomC{$d\_2$ is content-addressing}
|
||||
\\AxiomC{$
|
||||
{}^\*(\text{path}(d\_1), o\_1)
|
||||
\=
|
||||
{}^\*(\text{path}(d\_2), o\_2)
|
||||
$}
|
||||
\\TrinaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{CA}}\\, (d\_2, o\_2)$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
where \\({}^*(s, o)\\) denotes the store object that the output deriving path refers to.
|
||||
|
||||
We will also need the following construction to lift any equivalence relation on \\(X\\) to an equivalence relation on (finite) sets of \\(X\\) (in short, \\(\\mathcal{P}(X)\\)):
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$\\forall a \\in A. \\exists b \\in B. a \\,\\sim\_X\\, b$}
|
||||
\\AxiomC{$\\forall b \\in B. \\exists a \\in A. b \\,\\sim\_X\\, a$}
|
||||
\\BinaryInfC{$A \\,\\sim_{\\mathcal{P}(X)}\\, B$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
Now we can define the equivalence relation \\(\\sim_\\mathrm{IA}\\) on input-addressed derivation outputs. Two input-addressed outputs are equivalent if their derivations are equivalent (via the yet-to-be-defined \\(\\sim_{\\mathrm{IADrv}}\\) relation) and their output names are the same:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$d\_1$ is input-addressing}
|
||||
\\AxiomC{$d\_2$ is input-addressing}
|
||||
\\AxiomC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
|
||||
\\AxiomC{$o\_1 = o\_2$}
|
||||
\\QuaternaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{IA}}\\, (\text{path}(d\_2), o\_2)$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
And now we can define \\(\\sim_{\\mathrm{IADrv}}\\).
|
||||
Two input-addressed derivations are equivalent if their content-addressed inputs are equivalent, their input-addressed inputs are also equivalent, and they are otherwise equal:
|
||||
|
||||
<!-- cheating a bit with the semantics to get a good layout that fits on the page -->
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\alwaysNoLine
|
||||
\\AxiomC{$
|
||||
\\mathrm{caInputs}(d\_1)
|
||||
\\,\\sim_{\\mathcal{P}(\\mathrm{CA})}\\,
|
||||
\\mathrm{caInputs}(d\_2)
|
||||
$}
|
||||
\\AxiomC{$
|
||||
\\mathrm{iaInputs}(d\_1)
|
||||
\\,\\sim_{\\mathcal{P}(\\mathrm{IA})}\\,
|
||||
\\mathrm{iaInputs}(d\_2)
|
||||
$}
|
||||
\\BinaryInfC{$
|
||||
d\_1\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
|
||||
\=
|
||||
d\_2\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
|
||||
$}
|
||||
\\alwaysSingleLine
|
||||
\\UnaryInfC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
where \\(\\mathrm{caInputs}(d)\\) returns the content-addressed inputs of \\(d\\) and \\(\\mathrm{iaInputs}(d)\\) returns the input-addressed inputs.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> An astute reader might notice that that nowhere does `inputSrcs` enter into these definitions.
|
||||
> That means that replacing an input derivation with its outputs directly added to `inputSrcs` always results in a derivation in a different equivalence class, despite the resulting input closure (as would be mounted in the store at build time) being the same.
|
||||
> [Issue #9259](https://github.com/NixOS/nix/issues/9259) is about creating a coarser equivalence relation to address this.
|
||||
>
|
||||
> \\(\\sim_\mathrm{Drv}\\) from [derivation resolution](@docroot@/store/resolution.md) is such an equivalence relation.
|
||||
> It is coarser than this one: any two derivations which are "'hash quotient derivation'-equivalent" (\\(\\sim_\mathrm{IADrv}\\)) are also "resolution-equivalent" (\\(\\sim_\mathrm{Drv}\\)).
|
||||
> It also relates derivations whose `inputDrvOutputs` have been rewritten into `inputSrcs`.
|
||||
|
||||
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
|
||||
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations
|
||||
[xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations
|
||||
[xp-feature-git-hashing]: @docroot@/development/experimental-features.md#xp-feature-git-hashing
|
||||
[xp-feature-impure-derivations]: @docroot@/development/experimental-features.md#xp-feature-impure-derivations
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ be many different serialisations.
|
|||
For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format,
|
||||
which is carefully designed to avoid the problems described above.
|
||||
|
||||
The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive.md).
|
||||
The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive/index.md).
|
||||
|
||||
## Content addressing File System Objects beyond a single serialisation pass
|
||||
|
||||
|
|
|
|||
16
doc/manual/source/store/math-notation.md
Normal file
16
doc/manual/source/store/math-notation.md
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# Appendix: Math notation
|
||||
|
||||
A few times in this manual, formal "proof trees" are used for [natural deduction](https://en.wikipedia.org/wiki/Natural_deduction)-style definition of various [relations](https://en.wikipedia.org/wiki/Relation_(mathematics)).
|
||||
|
||||
The following grammar and assignment of metavariables to syntactic categories is used in these sections.
|
||||
|
||||
\\begin{align}
|
||||
s, t &\in \text{store-path} \\\\
|
||||
o &\in \text{output-name} \\\\
|
||||
i, p &\in \text{deriving-path} \\\\
|
||||
d &\in \text{derivation}
|
||||
\\end{align}
|
||||
|
||||
\\begin{align}
|
||||
\text{deriving-path} \quad p &::= s \mid (p, o)
|
||||
\\end{align}
|
||||
219
doc/manual/source/store/resolution.md
Normal file
219
doc/manual/source/store/resolution.md
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
# Derivation Resolution
|
||||
|
||||
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
|
||||
|
||||
To *resolve* a derivation is to replace its [inputs] with the simplest inputs — plain store paths — that denote the same store objects.
|
||||
|
||||
Derivations that only have store paths as inputs are likewise called *resolved derivations*.
|
||||
(They are called that whether they are in fact the output of derivation resolution, or just made that way without non-store-path inputs to begin with.)
|
||||
|
||||
## Input Content Equivalence of Derivations
|
||||
|
||||
[Deriving paths][deriving-path] intentionally make it possible to refer to the same [store object] in multiple ways.
|
||||
This is a consequence of content-addressing, since different derivations can produce the same outputs, and the same data can also be manually added to the store.
|
||||
This is also a consequence even of input-addressing, as an output can be referred to by derivation and output name, or directly by its [computed](./derivation/outputs/input-address.md) store path.
|
||||
Since dereferencing deriving paths is thus not injective, it induces an equivalence relation on deriving paths.
|
||||
|
||||
Let's call this equivalence relation \\(\\sim\\), where \\(p_1 \\sim p_2\\) means that deriving paths \\(p_1\\) and \\(p_2\\) refer to the same store object.
|
||||
|
||||
**Content Equivalence**: Two deriving paths are equivalent if they refer to the same store object:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{${}^*p_1 = {}^*p_2$}
|
||||
\\UnaryInfC{$p_1 \\,\\sim_\\mathrm{DP}\\, p_2$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
where \\({}^\*p\\) denotes the store object that deriving path \\(p\\) refers to.
|
||||
|
||||
This also induces an equivalence relation on sets of deriving paths:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$\\{ {}^*p | p \\in P_1 \\} = \\{ {}^*p | p \\in P_2 \\}$}
|
||||
\\UnaryInfC{$P_1 \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, P_2$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
**Input Content Equivalence**: This, in turn, induces an equivalence relation on derivations: two derivations are equivalent if their inputs are equivalent, and they are otherwise equal:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$\\mathrm{inputs}(d_1) \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, \\mathrm{inputs}(d_2)$}
|
||||
\\AxiomC{$
|
||||
d\_1\left[\\mathrm{inputs} := \\{\\}\right]
|
||||
\=
|
||||
d\_2\left[\\mathrm{inputs} := \\{\\}\right]
|
||||
$}
|
||||
\\BinaryInfC{$d_1 \\,\\sim_\\mathrm{Drv}\\, d_2$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
Derivation resolution always maps derivations to input-content-equivalent derivations.
|
||||
|
||||
## Resolution relation
|
||||
|
||||
Dereferencing a derived path — \\({}^\*p\\) above — was just introduced as a black box.
|
||||
But actually it is a multi-step process of looking up build results in the [build trace] that itself depends on resolving the lookup keys.
|
||||
Resolution is thus a recursive multi-step process that is worth diagramming formally.
|
||||
|
||||
We can do this with a small-step binary transition relation; let's call it \\(\rightsquigarrow\\).
|
||||
We can then conclude dereferenced equality like this:
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$p\_1 \\rightsquigarrow^* p$}
|
||||
\\AxiomC{$p\_2 \\rightsquigarrow^* p$}
|
||||
\\BinaryInfC{${}^*p\_1 = {}^*p\_2$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
I.e. by showing that both original items resolve (over 0 or more small steps, hence the \\({}^*\\)) to the same exact item.
|
||||
|
||||
With this motivation, let's now formalize a [small-step](https://en.wikipedia.org/wiki/Operational_semantics#Small-step_semantics) system of reduction rules for resolution.
|
||||
|
||||
### Formal rules
|
||||
|
||||
### \\(\text{resolved}\\) unary relation
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$s \in \text{store-path}$}
|
||||
\\UnaryInfC{$s$ resolved}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$\forall i \in \mathrm{inputs}(d). i \text{ resolved}$}
|
||||
\\UnaryInfC{$d$ resolved}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
### \\(\rightsquigarrow\\) binary relation
|
||||
|
||||
> **Remark**
|
||||
>
|
||||
> Actually, to be completely formal we would need to keep track of the build trace we are choosing to resolve against.
|
||||
>
|
||||
> We could do that by making \\(\rightsquigarrow\\) a ternary relation, which would pass the build trace to itself until it finally uses it in that one rule.
|
||||
> This would add clutter more than insight, so we didn't bother to write it.
|
||||
>
|
||||
> There are other options too, like saying the whole reduction rule system is parameterized on the build trace, essentially [currying](https://en.wikipedia.org/wiki/Currying) the ternary \\(\rightsquigarrow\\) into a function from build traces to the binary relation written above.
|
||||
|
||||
#### Core build trace lookup rule
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$s \in \text{store-path}$}
|
||||
\\AxiomC{${}^*s \in \text{derivation}$}
|
||||
\\AxiomC{${}^*s$ resolved}
|
||||
\\AxiomC{$\mathrm{build\text{-}trace}[s][o] = t$}
|
||||
\\QuaternaryInfC{$(s, o) \rightsquigarrow t$}
|
||||
\\RightLabel{\\scriptsize output path resolution}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
#### Inductive rules
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$i \\rightsquigarrow i'$}
|
||||
\\AxiomC{$i \\in \\mathrm{inputs}(d)$}
|
||||
\\BinaryInfC{$d \\rightsquigarrow d[i \\mapsto i']$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$d \\rightsquigarrow d'$}
|
||||
\\UnaryInfC{$(\\mathrm{path}(d), o) \\rightsquigarrow (\\mathrm{path}(d'), o)$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
\\[
|
||||
\\begin{prooftree}
|
||||
\\AxiomC{$p \\rightsquigarrow p'$}
|
||||
\\UnaryInfC{$(p, o) \\rightsquigarrow (p', o)$}
|
||||
\\end{prooftree}
|
||||
\\]
|
||||
|
||||
### Properties
|
||||
|
||||
Like all well-behaved evaluation relations, partial resolution is [*confluent*](https://en.wikipedia.org/wiki/Confluence_(abstract_rewriting)).
|
||||
Also, if we take the symmetric closure of \\(\\rightsquigarrow^\*\\), we end up with the equivalence relations of the previous section.
|
||||
Resolution respects content equivalence for deriving paths, and input content equivalence for derivations.
|
||||
|
||||
> **Remark**
|
||||
>
|
||||
> We chose to define from scratch an "resolved" unary relation explicitly above.
|
||||
> But it can also be defined as the normal forms of the \\(\\rightsquigarrow^\*\\) relation:
|
||||
>
|
||||
> \\[ a \text{ resolved} \Leftrightarrow \forall b. b \rightsquigarrow^* a \Rightarrow b = a\\]
|
||||
>
|
||||
> In prose, resolved terms are terms which \\(\\rightsquigarrow^\*\\) only relates on the left side to the same term on the right side; they are the terms which can be resolved no further.
|
||||
|
||||
## Partial versus Complete Resolution
|
||||
|
||||
Similar to evaluation, we can also speak of *partial* versus *complete* derivation resolution.
|
||||
Partial derivation resolution is what we've actually formalized above with \\(\\rightsquigarrow^\*\\).
|
||||
Complete resolution is resolution ending in a resolved term (deriving path or derivation).
|
||||
(Which is a normal form of the relation, per the remark above.)
|
||||
|
||||
With partial resolution, a derivation is related to equivalent derivations with the same or simpler inputs, but not all those inputs will be plain store paths.
|
||||
This is useful when the input refers to a floating content addressed output we have not yet built — we don't know what (content-address) store path will used for that derivation, so we are "stuck" trying to resolve the deriving path in question.
|
||||
(In the above formalization, this happens when the build trace is missing the keys we wish to look up in it.)
|
||||
|
||||
Complete resolution is a *functional* relation, i.e. values on the left are uniquely related with values on the right.
|
||||
It is not however, a *total* relation (in general, assuming arbitrary build traces).
|
||||
This is discussed in the next section.
|
||||
|
||||
## Termination
|
||||
|
||||
For static derivations graphs, complete resolution is indeed total, because it always terminates for all inputs.
|
||||
(A relation that is both total and functional is a function.)
|
||||
|
||||
For [dynamic][xp-feature-dynamic-derivations] derivation graphs, however, this is not the case — resolution is not guaranteed to terminate.
|
||||
The issue isn't rewriting deriving paths themselves:
|
||||
a single rewrite to normalize an output deriving path to a constant one always exists, and always proceeds in one step.
|
||||
The issue is that dynamic derivations (i.e. those that are filled-in the graph by a previous resolution) may have more transitive dependencies than the original derivation.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> Suppose we have this deriving path
|
||||
> ```json
|
||||
> {
|
||||
> "drvPath": {
|
||||
> "drvPath": "...-foo.drv",
|
||||
> "output": "bar.drv"
|
||||
> },
|
||||
> "output": "baz"
|
||||
> }
|
||||
> ```
|
||||
> and derivation `foo` is already resolved.
|
||||
> When we resolve deriving path we'll end up with something like.
|
||||
> ```json
|
||||
> {
|
||||
> "drvPath": "...-foo-bar.drv",
|
||||
> "output": "baz"
|
||||
> }
|
||||
> ```
|
||||
> So far is just an atomic single rewrite, with no termination issues.
|
||||
> But the derivation `foo-bar` may have its *own* dynamic derivation inputs.
|
||||
> Resolution must resolve that derivation first before the above deriving path can finally be normalized to a plain `...-foo-bar-baz` store path.
|
||||
|
||||
The important thing to notice is that while "build trace" *keys* must be resolved.
|
||||
The *value* those keys are mapped to have no such constraints.
|
||||
An arbitrary store object has no notion of being resolved or not.
|
||||
But, an arbitrary store object can be read back as a derivation (as will in fact be done in case for dynamic derivations / nested output deriving paths).
|
||||
And those derivations need *not* be resolved.
|
||||
|
||||
It is those dynamic non-resolved derivations which are the source of non-termination.
|
||||
By the same token, they are also the reason why dynamic derivations offer greater expressive power.
|
||||
|
||||
[store object]: @docroot@/store/store-object.md
|
||||
[inputs]: @docroot@/store/derivation/index.md#inputs
|
||||
[build trace]: @docroot@/store/build-trace.md
|
||||
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
|
||||
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# Content-Addressing Store Objects
|
||||
|
||||
Just [like][fso-ca] [File System Objects][File System Object],
|
||||
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed),
|
||||
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address),
|
||||
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
|
||||
|
||||
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
|
||||
|
|
@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l
|
|||
>
|
||||
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
|
||||
|
||||
This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing.
|
||||
This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing.
|
||||
|
||||
References are not supported.
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
>
|
||||
> A rendered store path
|
||||
|
||||
Nix implements references to [store objects](./index.md#store-object) as *store paths*.
|
||||
Nix implements references to [store objects](./store-object.md) as *store paths*.
|
||||
|
||||
Think of a store path as an [opaque], [unique identifier]:
|
||||
The only way to obtain store path is by adding or building store objects.
|
||||
|
|
|
|||
15
doc/manual/theme/head.hbs
Normal file
15
doc/manual/theme/head.hbs
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
<script>
|
||||
MathJax = {
|
||||
loader: {load: ['[tex]/bussproofs']},
|
||||
tex: {
|
||||
packages: {'[+]': ['bussproofs']},
|
||||
// Doesn't seem to work in mathjax 3
|
||||
//formatError: function(jax, error) {
|
||||
// console.log(`TeX error in "${jax.latex}": ${error.message}`);
|
||||
// return jax.formatError(error);
|
||||
//}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
<!-- Load a newer versino of MathJax than mdbook does by default, and which in particular has working relative paths for the "bussproofs" extension. -->
|
||||
<script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/3.0.1/es5/tex-mml-chtml.js"></script>
|
||||
8
flake.lock
generated
8
flake.lock
generated
|
|
@ -63,16 +63,16 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1756178832,
|
||||
"narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=",
|
||||
"lastModified": 1761597516,
|
||||
"narHash": "sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8+ON/0Yy8+a5vsDU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "d98ce345cdab58477ca61855540999c86577d19d",
|
||||
"rev": "daf6dc47aa4b44791372d6139ab7b25269184d55",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-25.05-small",
|
||||
"ref": "nixos-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
|
|
|||
12
flake.nix
12
flake.nix
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
description = "The purely functional package manager";
|
||||
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
|
||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
||||
|
|
@ -417,6 +417,10 @@
|
|||
supportsCross = false;
|
||||
};
|
||||
|
||||
"nix-kaitai-struct-checks" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
|
||||
"nix-perl-bindings" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
|
|
@ -481,10 +485,10 @@
|
|||
open-manual = {
|
||||
type = "app";
|
||||
program = "${pkgs.writeShellScript "open-nix-manual" ''
|
||||
manual_path="${self.packages.${system}.nix-manual}/share/doc/nix/manual/index.html"
|
||||
if ! ${opener} "$manual_path"; then
|
||||
path="${self.packages.${system}.nix-manual.site}/index.html"
|
||||
if ! ${opener} "$path"; then
|
||||
echo "Failed to open manual with ${opener}. Manual is located at:"
|
||||
echo "$manual_path"
|
||||
echo "$path"
|
||||
fi
|
||||
''}";
|
||||
meta.description = "Open the Nix manual in your browser";
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# shellcheck disable=all
|
||||
#compdef nix
|
||||
# shellcheck disable=all
|
||||
|
||||
function _nix() {
|
||||
local ifs_bk="$IFS"
|
||||
|
|
|
|||
|
|
@ -42,8 +42,8 @@ if cxx.get_id() == 'clang'
|
|||
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
||||
endif
|
||||
|
||||
# Darwin ld doesn't like "X.Y.Zpre"
|
||||
nix_soversion = meson.project_version().split('pre')[0]
|
||||
# Darwin ld doesn't like "X.Y.ZpreABCD+W"
|
||||
nix_soversion = meson.project_version().split('+')[0].split('pre')[0]
|
||||
|
||||
subdir('assert-fail')
|
||||
subdir('asan-options')
|
||||
|
|
|
|||
|
|
@ -443,6 +443,11 @@ in
|
|||
*/
|
||||
nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { };
|
||||
|
||||
/**
|
||||
Kaitai struct schema validation checks
|
||||
*/
|
||||
nix-kaitai-struct-checks = callPackage ../src/kaitai-struct-checks/package.nix { };
|
||||
|
||||
nix-perl-bindings = callPackage ../src/perl/package.nix { };
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ let
|
|||
"nix-cli"
|
||||
"nix-functional-tests"
|
||||
"nix-json-schema-checks"
|
||||
"nix-kaitai-struct-checks"
|
||||
]
|
||||
++ lib.optionals enableBindings [
|
||||
"nix-perl-bindings"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ programmatically:
|
|||
1. Embedding the evaluator
|
||||
2. Writing language plug-ins
|
||||
|
||||
Embedding means you link the Nix C libraries in your program and use them from
|
||||
Embedding means you link the Nix C API libraries in your program and use them from
|
||||
there. Adding a plug-in means you make a library that gets loaded by the Nix
|
||||
language evaluator, specified through a configuration option.
|
||||
|
||||
|
|
|
|||
1
src/json-schema-checks/build-result
Symbolic link
1
src/json-schema-checks/build-result
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/build-result
|
||||
1
src/json-schema-checks/build-trace-entry
Symbolic link
1
src/json-schema-checks/build-trace-entry
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/realisation
|
||||
1
src/json-schema-checks/content-address
Symbolic link
1
src/json-schema-checks/content-address
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/content-address
|
||||
1
src/json-schema-checks/deriving-path
Symbolic link
1
src/json-schema-checks/deriving-path
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/derived-path
|
||||
1
src/json-schema-checks/file-system-object
Symbolic link
1
src/json-schema-checks/file-system-object
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libutil-tests/data/memory-source-accessor
|
||||
|
|
@ -20,6 +20,14 @@ schema_dir = meson.current_source_dir() / 'schema'
|
|||
|
||||
# Get all example files
|
||||
schemas = [
|
||||
{
|
||||
'stem' : 'file-system-object',
|
||||
'schema' : schema_dir / 'file-system-object-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
'complex.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'hash',
|
||||
'schema' : schema_dir / 'hash-v1.yaml',
|
||||
|
|
@ -30,6 +38,44 @@ schemas = [
|
|||
'blake3-base64.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'content-address',
|
||||
'schema' : schema_dir / 'content-address-v1.yaml',
|
||||
'files' : [
|
||||
'text.json',
|
||||
'nar.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'store-path',
|
||||
'schema' : schema_dir / 'store-path-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'deriving-path',
|
||||
'schema' : schema_dir / 'deriving-path-v1.yaml',
|
||||
'files' : [
|
||||
'single_opaque.json',
|
||||
'single_built.json',
|
||||
'single_built_built.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'build-trace-entry',
|
||||
'schema' : schema_dir / 'build-trace-entry-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
'with-dependent-realisations.json',
|
||||
'with-signature.json',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Derivation and Derivation output
|
||||
schemas += [
|
||||
# Match overall
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml',
|
||||
|
|
@ -38,20 +84,120 @@ schemas = [
|
|||
'simple-derivation.json',
|
||||
],
|
||||
},
|
||||
# # Not sure how to make subschema work
|
||||
# {
|
||||
# 'stem': 'derivation',
|
||||
# 'schema': schema_dir / 'derivation-v3.yaml#output',
|
||||
# 'files' : [
|
||||
# 'output-caFixedFlat.json',
|
||||
# 'output-caFixedNAR.json',
|
||||
# 'output-caFixedText.json',
|
||||
# 'output-caFloating.json',
|
||||
# 'output-deferred.json',
|
||||
# 'output-impure.json',
|
||||
# 'output-inputAddressed.json',
|
||||
# ],
|
||||
# },
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/overall',
|
||||
'files' : [
|
||||
'output-caFixedFlat.json',
|
||||
'output-caFixedNAR.json',
|
||||
'output-caFixedText.json',
|
||||
'output-caFloating.json',
|
||||
'output-deferred.json',
|
||||
'output-impure.json',
|
||||
'output-inputAddressed.json',
|
||||
],
|
||||
},
|
||||
# Match exact variant
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/inputAddressed',
|
||||
'files' : [
|
||||
'output-inputAddressed.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFixed',
|
||||
'files' : [
|
||||
'output-caFixedFlat.json',
|
||||
'output-caFixedNAR.json',
|
||||
'output-caFixedText.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFloating',
|
||||
'files' : [
|
||||
'output-caFloating.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/deferred',
|
||||
'files' : [
|
||||
'output-deferred.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/impure',
|
||||
'files' : [
|
||||
'output-impure.json',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Store object info
|
||||
schemas += [
|
||||
# Match overall
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
'empty_pure.json',
|
||||
'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'build-result',
|
||||
'schema' : schema_dir / 'build-result-v1.yaml',
|
||||
'files' : [
|
||||
'success.json',
|
||||
'output-rejected.json',
|
||||
'not-deterministic.json',
|
||||
],
|
||||
},
|
||||
# Match exact variant
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'empty_pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/impure',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/narInfo',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Validate each example against the schema
|
||||
|
|
@ -64,8 +210,6 @@ foreach schema : schemas
|
|||
stem + '-schema-valid',
|
||||
jv,
|
||||
args : [
|
||||
'--map',
|
||||
'./hash-v1.yaml=' + schema_dir / 'hash-v1.yaml',
|
||||
'http://json-schema.org/draft-04/schema',
|
||||
schema_file,
|
||||
],
|
||||
|
|
|
|||
1
src/json-schema-checks/nar-info
Symbolic link
1
src/json-schema-checks/nar-info
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/nar-info
|
||||
|
|
@ -20,8 +20,16 @@ mkMesonDerivation (finalAttrs: {
|
|||
fileset = lib.fileset.unions [
|
||||
../../.version
|
||||
../../doc/manual/source/protocols/json/schema
|
||||
../../src/libutil-tests/data/memory-source-accessor
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
../../src/libstore-tests/data/realisation
|
||||
../../src/libstore-tests/data/derivation
|
||||
../../src/libstore-tests/data/derived-path
|
||||
../../src/libstore-tests/data/path-info
|
||||
../../src/libstore-tests/data/nar-info
|
||||
../../src/libstore-tests/data/build-result
|
||||
./.
|
||||
];
|
||||
|
||||
|
|
|
|||
1
src/json-schema-checks/store-object-info
Symbolic link
1
src/json-schema-checks/store-object-info
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/path-info
|
||||
1
src/json-schema-checks/store-path
Symbolic link
1
src/json-schema-checks/store-path
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/store-path
|
||||
1
src/kaitai-struct-checks/.version
Symbolic link
1
src/kaitai-struct-checks/.version
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../.version
|
||||
77
src/kaitai-struct-checks/meson.build
Normal file
77
src/kaitai-struct-checks/meson.build
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# Run with:
|
||||
# meson test --suite kaitai-struct
|
||||
# Run with: (without shell / configure)
|
||||
# nix build .#nix-kaitai-struct-checks
|
||||
|
||||
project(
|
||||
'nix-kaitai-struct-checks',
|
||||
'cpp',
|
||||
version : files('.version'),
|
||||
default_options : [
|
||||
'cpp_std=c++23',
|
||||
# TODO(Qyriad): increase the warning level
|
||||
'warning_level=1',
|
||||
'errorlogs=true', # Please print logs for tests that fail
|
||||
],
|
||||
meson_version : '>= 1.1',
|
||||
license : 'LGPL-2.1-or-later',
|
||||
)
|
||||
|
||||
kaitai_runtime_dep = dependency('kaitai-struct-cpp-stl-runtime', required : true)
|
||||
gtest_dep = dependency('gtest')
|
||||
gtest_main_dep = dependency('gtest_main', required : true)
|
||||
|
||||
# Find the Kaitai Struct compiler
|
||||
ksc = find_program('ksc', required : true)
|
||||
|
||||
kaitai_generated_srcs = custom_target(
|
||||
'kaitai-generated-sources',
|
||||
input : [ 'nar.ksy' ],
|
||||
output : [ 'nix_nar.cpp', 'nix_nar.h' ],
|
||||
command : [
|
||||
ksc,
|
||||
'@INPUT@',
|
||||
'--target', 'cpp_stl',
|
||||
'--outdir',
|
||||
meson.current_build_dir(),
|
||||
],
|
||||
)
|
||||
|
||||
nar_kaitai_lib = library(
|
||||
'nix-nar-kaitai-lib',
|
||||
kaitai_generated_srcs,
|
||||
dependencies : [ kaitai_runtime_dep ],
|
||||
install : true,
|
||||
)
|
||||
|
||||
nar_kaitai_dep = declare_dependency(
|
||||
link_with : nar_kaitai_lib,
|
||||
sources : kaitai_generated_srcs[1],
|
||||
)
|
||||
|
||||
# The nar directory is a committed symlink to the actual nars location
|
||||
nars_dir = meson.current_source_dir() / 'nars'
|
||||
|
||||
# Get all example files
|
||||
nars = [
|
||||
'dot.nar',
|
||||
]
|
||||
|
||||
test_deps = [
|
||||
nar_kaitai_dep,
|
||||
kaitai_runtime_dep,
|
||||
gtest_main_dep,
|
||||
]
|
||||
|
||||
this_exe = executable(
|
||||
meson.project_name(),
|
||||
'test-parse-nar.cc',
|
||||
dependencies : test_deps,
|
||||
)
|
||||
|
||||
test(
|
||||
meson.project_name(),
|
||||
this_exe,
|
||||
env : [ 'NIX_NARS_DIR=' + nars_dir ],
|
||||
protocol : 'gtest',
|
||||
)
|
||||
1
src/kaitai-struct-checks/nar.ksy
Symbolic link
1
src/kaitai-struct-checks/nar.ksy
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../doc/manual/source/protocols/nix-archive/nar.ksy
|
||||
1
src/kaitai-struct-checks/nars
Symbolic link
1
src/kaitai-struct-checks/nars
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../libutil-tests/data/nars
|
||||
1
src/kaitai-struct-checks/nix-meson-build-support
Symbolic link
1
src/kaitai-struct-checks/nix-meson-build-support
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../nix-meson-build-support
|
||||
76
src/kaitai-struct-checks/package.nix
Normal file
76
src/kaitai-struct-checks/package.nix
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
# Run with: nix build .#nix-kaitai-struct-checks
|
||||
# or: `nix develop .#nix-kaitai-struct-checks` to enter a dev shell
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
gtest,
|
||||
meson,
|
||||
ninja,
|
||||
pkg-config,
|
||||
kaitai-struct-compiler,
|
||||
fetchzip,
|
||||
kaitai-struct-cpp-stl-runtime,
|
||||
# Configuration Options
|
||||
version,
|
||||
}:
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
in
|
||||
mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-kaitai-struct-checks";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = lib.fileset.unions [
|
||||
../../nix-meson-build-support
|
||||
./nix-meson-build-support
|
||||
./.version
|
||||
../../.version
|
||||
../../doc/manual/source/protocols/nix-archive/nar.ksy
|
||||
./nars
|
||||
../../src/libutil-tests/data
|
||||
./meson.build
|
||||
./nar.ksy
|
||||
(fileset.fileFilter (file: file.hasExt "cc") ./.)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
];
|
||||
|
||||
outputs = [ "out" ];
|
||||
|
||||
passthru.externalNativeBuildInputs = [
|
||||
# This can go away when we bump up to 25.11
|
||||
(kaitai-struct-compiler.overrideAttrs (finalAttrs: {
|
||||
version = "0.11";
|
||||
src = fetchzip {
|
||||
url = "https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/${version}/kaitai-struct-compiler-${version}.zip";
|
||||
sha256 = "sha256-j9TEilijqgIiD0GbJfGKkU1FLio9aTopIi1v8QT1b+A=";
|
||||
};
|
||||
}))
|
||||
];
|
||||
|
||||
passthru.externalBuildInputs = [
|
||||
gtest
|
||||
kaitai-struct-cpp-stl-runtime
|
||||
];
|
||||
|
||||
buildInputs = finalAttrs.passthru.externalBuildInputs;
|
||||
|
||||
nativeBuildInputs = [
|
||||
meson
|
||||
ninja
|
||||
pkg-config
|
||||
]
|
||||
++ finalAttrs.passthru.externalNativeBuildInputs;
|
||||
|
||||
doCheck = true;
|
||||
|
||||
mesonCheckFlags = [ "--print-errorlogs" ];
|
||||
|
||||
postInstall = ''
|
||||
touch $out
|
||||
'';
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
})
|
||||
48
src/kaitai-struct-checks/test-parse-nar.cc
Normal file
48
src/kaitai-struct-checks/test-parse-nar.cc
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include <kaitai/kaitaistream.h>
|
||||
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "nix_nar.h"
|
||||
|
||||
static const std::vector<std::string> NarFiles = {
|
||||
"empty.nar",
|
||||
"dot.nar",
|
||||
"dotdot.nar",
|
||||
"executable-after-contents.nar",
|
||||
"invalid-tag-instead-of-contents.nar",
|
||||
"name-after-node.nar",
|
||||
"nul-character.nar",
|
||||
"slash.nar",
|
||||
};
|
||||
|
||||
class NarParseTest : public ::testing::TestWithParam<std::string>
|
||||
{};
|
||||
|
||||
TEST_P(NarParseTest, ParseSucceeds)
|
||||
{
|
||||
const auto nar_file = GetParam();
|
||||
|
||||
const char * nars_dir_env = std::getenv("NIX_NARS_DIR");
|
||||
if (nars_dir_env == nullptr) {
|
||||
FAIL() << "NIX_NARS_DIR environment variable not set.";
|
||||
}
|
||||
|
||||
const std::filesystem::path nar_file_path = std::filesystem::path(nars_dir_env) / "dot.nar";
|
||||
ASSERT_TRUE(std::filesystem::exists(nar_file_path)) << "Missing test file: " << nar_file_path;
|
||||
|
||||
std::ifstream ifs(nar_file_path, std::ifstream::binary);
|
||||
ASSERT_TRUE(ifs.is_open()) << "Failed to open file: " << nar_file;
|
||||
kaitai::kstream ks(&ifs);
|
||||
nix_nar_t nar(&ks);
|
||||
ASSERT_TRUE(nar.root_node() != nullptr) << "Failed to parse NAR file: " << nar_file;
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(AllNarFiles, NarParseTest, ::testing::ValuesIn(NarFiles));
|
||||
|
|
@ -108,20 +108,16 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
overloaded{
|
||||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes = staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
|
||||
for (auto & [outputName, outputPath] : p.outputs) {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
auto drvOutput = get(drvHashes, outputName);
|
||||
if (!drvOutput)
|
||||
throw Error(
|
||||
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||
store.printStorePath(p.drvPath->outPath()),
|
||||
outputName);
|
||||
DrvOutput key{*drvOutput, outputName};
|
||||
DrvOutput key{
|
||||
.drvPath = p.drvPath->outPath(),
|
||||
.outputName = outputName,
|
||||
};
|
||||
auto thisRealisation = store.queryRealisation(key);
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
// have the realisation
|
||||
res.insert(Realisation{*thisRealisation, std::move(key)});
|
||||
// We’ve built it, so we must have the realisation.
|
||||
assert(thisRealisation);
|
||||
res.insert(Realisation{*thisRealisation, key});
|
||||
} else {
|
||||
res.insert(outputPath);
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue