mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 11:36:03 +01:00
Compare commits
328 commits
35b08b71a4
...
33f18c6492
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33f18c6492 | ||
|
|
050ff4c6a3 | ||
|
|
d596b9754e | ||
|
|
3f18cad5f1 | ||
|
|
41b62aa979 | ||
|
|
af41eccb31 | ||
|
|
6bd92d47e5 | ||
|
|
b5302fc111 | ||
|
|
724086005a | ||
|
|
038d74edf7 | ||
|
|
b177354c35 | ||
|
|
2039235f6e | ||
|
|
0fd3b6fee6 | ||
|
|
b2f0472fe2 | ||
|
|
91af29f37a | ||
|
|
099af7578f | ||
|
|
948c89b367 | ||
|
|
7e84ce3904 | ||
|
|
a828cf777a | ||
|
|
687dd38998 | ||
|
|
62729ff472 | ||
|
|
0507674a13 | ||
|
|
4f85cfe824 | ||
|
|
7d5567a8d7 | ||
|
|
3ed42cd354 | ||
|
|
4a888b4138 | ||
|
|
f2436a47bb | ||
|
|
83ddfaebf4 | ||
|
|
2b382b171c | ||
|
|
b7553378a4 | ||
|
|
d40f66109b | ||
|
|
9657feaf8c | ||
|
|
341c42f321 | ||
|
|
631fb6c9ad | ||
|
|
11e19ee690 | ||
|
|
9f322398b4 | ||
|
|
e07510e504 | ||
|
|
ae15d4eaf3 | ||
|
|
469123eda1 | ||
|
|
3ef22a521d | ||
|
|
c8e24491c0 | ||
|
|
c3d4c5f69d | ||
|
|
43ce9da6ad | ||
|
|
72d0f7b619 | ||
|
|
34ac1792f9 | ||
|
|
0586370e58 | ||
|
|
f63bb5b338 | ||
|
|
53b4ea6c85 | ||
|
|
7c85ac23e2 | ||
|
|
0539b58253 | ||
|
|
beace42e7a | ||
|
|
4a0ccc89d9 | ||
|
|
89fa8c09a9 | ||
|
|
5e025ce940 | ||
|
|
2f6c865e25 | ||
|
|
bd42092873 | ||
|
|
81a2809a52 | ||
|
|
3448d4fa4c | ||
|
|
965d6be7c1 | ||
|
|
040d1aae41 | ||
|
|
bf947bfc26 | ||
|
|
2d83bc6b83 | ||
|
|
e0debd61d5 | ||
|
|
233bd250d1 | ||
|
|
4ea32d0b03 | ||
|
|
892eba4944 | ||
|
|
e4e4063f16 | ||
|
|
d8cec03fce | ||
|
|
b67c2f1572 | ||
|
|
ca9fde1b88 | ||
|
|
0ba1aa34dc | ||
|
|
6fa7510055 | ||
|
|
8151afb345 | ||
|
|
134613e885 | ||
|
|
9d1907fff7 | ||
|
|
c29411ada9 | ||
|
|
8dbc2475f7 | ||
|
|
9e79e83cb5 | ||
|
|
937a6df809 | ||
|
|
1ca6e9ef54 | ||
|
|
ade3d5d746 | ||
|
|
d035d8ba8d | ||
|
|
67be2df174 | ||
|
|
34f780d747 | ||
|
|
e43888890f | ||
|
|
4a80c92a4d | ||
|
|
3a3c062982 | ||
|
|
4a2fb18ba0 | ||
|
|
9eecee3d4e | ||
|
|
089a222111 | ||
|
|
c2609df08c | ||
|
|
37c1ef52e6 | ||
|
|
e776a10db3 | ||
|
|
1507843f6c | ||
|
|
e636888a09 | ||
|
|
3b2186e1c8 | ||
|
|
7e2d2db8ef | ||
|
|
2cc53201eb | ||
|
|
720f693627 | ||
|
|
49084a7e9e | ||
|
|
6d87184a52 | ||
|
|
6985e9f2c3 | ||
|
|
e6f0dd8df5 | ||
|
|
d857a4be50 | ||
|
|
93fe3354b5 | ||
|
|
8b3af40006 | ||
|
|
bffbdcfddc | ||
|
|
495d1b8435 | ||
|
|
66d7b8fe1b | ||
|
|
cf75079bd8 | ||
|
|
b8d7f551e4 | ||
|
|
e947c895ec | ||
|
|
f301669adc | ||
|
|
e3c41407f9 | ||
|
|
00f4a860e7 | ||
|
|
560a596de7 | ||
|
|
da637a05da | ||
|
|
956fffdd6f | ||
|
|
bac41d6989 | ||
|
|
de192794c9 | ||
|
|
246dbe1c05 | ||
|
|
6280905638 | ||
|
|
194c21fc82 | ||
|
|
e08853a67c | ||
|
|
ae49074548 | ||
|
|
f1d4fab1e5 | ||
|
|
c874e7071b | ||
|
|
c67966418f | ||
|
|
be2572ed8d | ||
|
|
be99a1c6bb | ||
|
|
fe8cdbc3e4 | ||
|
|
70176ed317 | ||
|
|
84a5bee424 | ||
|
|
e3246301a6 | ||
|
|
d4c69c7b8f | ||
|
|
f5aafbd6ed | ||
|
|
943788754f | ||
|
|
883860c7ff | ||
|
|
5fc0c4f102 | ||
|
|
1a4ad0706b | ||
|
|
972915cabd | ||
|
|
94965a3a3e | ||
|
|
c77317b1a9 | ||
|
|
dd0d006517 | ||
|
|
ccc06451df | ||
|
|
3775a2a226 | ||
|
|
1d3f0ca22e | ||
|
|
1c41e07b46 | ||
|
|
c592090fff | ||
|
|
4b6d07d642 | ||
|
|
e177f42536 | ||
|
|
ac8b1efcf9 | ||
|
|
ad664ce64e | ||
|
|
18941a2421 | ||
|
|
136825b4a2 | ||
|
|
28b73cabcc | ||
|
|
aa4106fd68 | ||
|
|
7f1d92793e | ||
|
|
234f029940 | ||
|
|
dd716dc9be | ||
|
|
ea17cc1b57 | ||
|
|
0c1be3aabe | ||
|
|
6ca3434cac | ||
|
|
6129aee988 | ||
|
|
5e220271e2 | ||
|
|
8e6b69de54 | ||
|
|
3915b3a111 | ||
|
|
c5515bb22e | ||
|
|
91b69e9e70 | ||
|
|
9e9dfe36df | ||
|
|
50e8d17f3c | ||
|
|
ef8dd58d9b | ||
|
|
91ed3701fe | ||
|
|
b8e5d1f290 | ||
|
|
d44b33562f | ||
|
|
d46504a136 | ||
|
|
126f30deb2 | ||
|
|
5dcfa86910 | ||
|
|
6b6ceddf72 | ||
|
|
60f9489b83 | ||
|
|
584a8e8a00 | ||
|
|
f234633e27 | ||
|
|
6417863ce9 | ||
|
|
91cd42511e | ||
|
|
1af5a98955 | ||
|
|
17777e3b70 | ||
|
|
9321669353 | ||
|
|
3742ae061e | ||
|
|
a91115bf22 | ||
|
|
8c8b706f6b | ||
|
|
fb26285458 | ||
|
|
bbfaaf3a20 | ||
|
|
f9b73185e4 | ||
|
|
27e3d28ed8 | ||
|
|
3994e5627f | ||
|
|
ec2fd2dc23 | ||
|
|
fdc5600fa7 | ||
|
|
1f6ac88efc | ||
|
|
9d4d10954a | ||
|
|
7e53afd8b9 | ||
|
|
bef3c37cb2 | ||
|
|
0f0d9255c6 | ||
|
|
f0b95b6d5b | ||
|
|
e38128b90d | ||
|
|
78e98691d6 | ||
|
|
e213fd64b6 | ||
|
|
1cd8458c28 | ||
|
|
ecaf9470b9 | ||
|
|
8b7e03f0f9 | ||
|
|
04606d50d1 | ||
|
|
476c21d5ef | ||
|
|
1a9ba0d6fe | ||
|
|
648714cd44 | ||
|
|
6b7223b6b7 | ||
|
|
afe5ed879f | ||
|
|
d924374bf2 | ||
|
|
f1968ea38e | ||
|
|
8d338c9234 | ||
|
|
9a695f9067 | ||
|
|
925c0fa4a2 | ||
|
|
7308fde0bc | ||
|
|
324bfd82dc | ||
|
|
8e01e4ad5c | ||
|
|
4c4eb5d07f | ||
|
|
b5ae3e10c2 | ||
|
|
4f5af471fb | ||
|
|
b9af19cedf | ||
|
|
d6f1e2de21 | ||
|
|
5d365cd61f | ||
|
|
c87f29a0b6 | ||
|
|
f594a8e11e | ||
|
|
0a74b4905c | ||
|
|
d74177dccc | ||
|
|
36ee38efd1 | ||
|
|
5d7912eb18 | ||
|
|
78888ec8a8 | ||
|
|
b047cecf5c | ||
|
|
d0217ec180 | ||
|
|
953929f899 | ||
|
|
3c83856494 | ||
|
|
f3d8d1f719 | ||
|
|
c8a15bf70d | ||
|
|
ad5c6a53b9 | ||
|
|
350d602832 | ||
|
|
115dea10b2 | ||
|
|
ddb8830c97 | ||
|
|
459f9e0185 | ||
|
|
5390bba920 | ||
|
|
387eceff45 | ||
|
|
96c8cc550f | ||
|
|
32b286e5d6 | ||
|
|
b558dac7a9 | ||
|
|
7e8db2eb59 | ||
|
|
6ca2efc7d4 | ||
|
|
62247af363 | ||
|
|
a4a49a9dae | ||
|
|
606c258c6f | ||
|
|
ef8218f2e3 | ||
|
|
ada008a795 | ||
|
|
2a2bb8330d | ||
|
|
645794b458 | ||
|
|
1121f0d8ec | ||
|
|
6420879728 | ||
|
|
67f5cb97a3 | ||
|
|
5e7ee808de | ||
|
|
270f20a505 | ||
|
|
1b1d7e3047 | ||
|
|
0f28c76a44 | ||
|
|
e3b3f05e5d | ||
|
|
f05d240222 | ||
|
|
22c73868c3 | ||
|
|
a91b787524 | ||
|
|
ddf7de0a76 | ||
|
|
1fabed18b6 | ||
|
|
6c9083db2c | ||
|
|
c663f7ec79 | ||
|
|
d0fb03c35d | ||
|
|
c847cd87f1 | ||
|
|
dbbdae926b | ||
|
|
3c03050cd6 | ||
|
|
e33cd5aa38 | ||
|
|
d9c808f8a7 | ||
|
|
55ea3d3476 | ||
|
|
7d0c06f921 | ||
|
|
5b4bd5bcb8 | ||
|
|
4ae6c65bc5 | ||
|
|
4f19e63a8f | ||
|
|
f88c3055f8 | ||
|
|
9058d90ab2 | ||
|
|
c1a15d1a26 | ||
|
|
22f4cccc71 | ||
|
|
b56e456b0d | ||
|
|
3d147c04a5 | ||
|
|
61fbef42a6 | ||
|
|
c92ba4b9b7 | ||
|
|
67bffa19a5 | ||
|
|
daa7e0d2e9 | ||
|
|
109f6449cc | ||
|
|
ad2360c59f | ||
|
|
20c7c551bf | ||
|
|
e78e6ca4f4 | ||
|
|
e34063cf21 | ||
|
|
e457ea7688 | ||
|
|
64c55961eb | ||
|
|
ffbc33fec6 | ||
|
|
a80fc252e8 | ||
|
|
bcd5a9d05c | ||
|
|
01b001d5ba | ||
|
|
27767a6094 | ||
|
|
1177d65094 | ||
|
|
a2c6f38e1f | ||
|
|
1c02dd5b9c | ||
|
|
d87a06af7a | ||
|
|
2dc9f2a2b7 | ||
|
|
a7991d55cc | ||
|
|
e7047fde25 | ||
|
|
33e94fe19f | ||
|
|
dc03c6a812 | ||
|
|
b1d067c9bb | ||
|
|
d782c5e586 | ||
|
|
a48a737517 | ||
|
|
6036aaf798 | ||
|
|
a9d9b50b72 | ||
|
|
6fa03765ed | ||
|
|
12293a8b11 | ||
|
|
3fb943d130 | ||
|
|
aace1fb5d6 | ||
|
|
9abcc68ad1 |
301 changed files with 7550 additions and 2201 deletions
18
.coderabbit.yaml
Normal file
18
.coderabbit.yaml
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
|
||||||
|
# Disable CodeRabbit auto-review to prevent verbose comments on PRs.
|
||||||
|
# When enabled: false, CodeRabbit won't attempt reviews and won't post
|
||||||
|
# "Review skipped" or other automated comments.
|
||||||
|
reviews:
|
||||||
|
auto_review:
|
||||||
|
enabled: false
|
||||||
|
review_status: false
|
||||||
|
high_level_summary: false
|
||||||
|
poem: false
|
||||||
|
sequence_diagrams: false
|
||||||
|
changed_files_summary: false
|
||||||
|
tools:
|
||||||
|
github-checks:
|
||||||
|
enabled: false
|
||||||
|
chat:
|
||||||
|
art: false
|
||||||
|
auto_reply: false
|
||||||
13
.github/actions/install-nix-action/action.yaml
vendored
13
.github/actions/install-nix-action/action.yaml
vendored
|
|
@ -16,13 +16,17 @@ inputs:
|
||||||
install_url:
|
install_url:
|
||||||
description: "URL of the Nix installer"
|
description: "URL of the Nix installer"
|
||||||
required: false
|
required: false
|
||||||
default: "https://releases.nixos.org/nix/nix-2.30.2/install"
|
default: "https://releases.nixos.org/nix/nix-2.32.1/install"
|
||||||
tarball_url:
|
tarball_url:
|
||||||
description: "URL of the Nix tarball to use with the experimental installer"
|
description: "URL of the Nix tarball to use with the experimental installer"
|
||||||
required: false
|
required: false
|
||||||
github_token:
|
github_token:
|
||||||
description: "Github token"
|
description: "Github token"
|
||||||
required: true
|
required: true
|
||||||
|
use_cache:
|
||||||
|
description: "Whether to setup magic-nix-cache"
|
||||||
|
default: true
|
||||||
|
required: false
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
|
|
@ -118,3 +122,10 @@ runs:
|
||||||
source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }}
|
source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }}
|
||||||
nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }}
|
nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }}
|
||||||
extra-conf: ${{ inputs.extra_nix_config }}
|
extra-conf: ${{ inputs.extra_nix_config }}
|
||||||
|
- uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13
|
||||||
|
if: ${{ inputs.use_cache == 'true' }}
|
||||||
|
with:
|
||||||
|
diagnostic-endpoint: ''
|
||||||
|
use-flakehub: false
|
||||||
|
use-gha-cache: true
|
||||||
|
source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main
|
||||||
|
|
|
||||||
28
.github/workflows/ci.yml
vendored
28
.github/workflows/ci.yml
vendored
|
|
@ -14,6 +14,10 @@ on:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
@ -29,6 +33,7 @@ jobs:
|
||||||
extra_nix_config:
|
extra_nix_config:
|
||||||
experimental-features = nix-command flakes
|
experimental-features = nix-command flakes
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
use_cache: false
|
||||||
- run: nix flake show --all-systems --json
|
- run: nix flake show --all-systems --json
|
||||||
|
|
||||||
pre-commit-checks:
|
pre-commit-checks:
|
||||||
|
|
@ -41,7 +46,6 @@ jobs:
|
||||||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||||
extra_nix_config: experimental-features = nix-command flakes
|
extra_nix_config: experimental-features = nix-command flakes
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
|
||||||
- run: ./ci/gha/tests/pre-commit-checks
|
- run: ./ci/gha/tests/pre-commit-checks
|
||||||
|
|
||||||
basic-checks:
|
basic-checks:
|
||||||
|
|
@ -92,7 +96,6 @@ jobs:
|
||||||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||||
# The sandbox would otherwise be disabled by default on Darwin
|
# The sandbox would otherwise be disabled by default on Darwin
|
||||||
extra_nix_config: "sandbox = true"
|
extra_nix_config: "sandbox = true"
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
|
||||||
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
|
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
|
||||||
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
|
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
|
||||||
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||||
|
|
@ -122,13 +125,13 @@ jobs:
|
||||||
cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY
|
cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY
|
||||||
if: ${{ matrix.instrumented }}
|
if: ${{ matrix.instrumented }}
|
||||||
- name: Upload coverage reports
|
- name: Upload coverage reports
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: coverage-reports
|
name: coverage-reports
|
||||||
path: coverage-reports/
|
path: coverage-reports/
|
||||||
if: ${{ matrix.instrumented }}
|
if: ${{ matrix.instrumented }}
|
||||||
- name: Upload installer tarball
|
- name: Upload installer tarball
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: installer-${{matrix.os}}
|
name: installer-${{matrix.os}}
|
||||||
path: out/*
|
path: out/*
|
||||||
|
|
@ -161,7 +164,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Download installer tarball
|
- name: Download installer tarball
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: installer-${{matrix.os}}
|
name: installer-${{matrix.os}}
|
||||||
path: out
|
path: out
|
||||||
|
|
@ -171,7 +174,7 @@ jobs:
|
||||||
echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
|
echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
|
||||||
TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)"
|
TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)"
|
||||||
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
|
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
|
||||||
- uses: cachix/install-nix-action@v31
|
- uses: cachix/install-nix-action@456688f15bc354bef6d396e4a35f4f89d40bf2b7 # v31.8.2
|
||||||
if: ${{ !matrix.experimental-installer }}
|
if: ${{ !matrix.experimental-installer }}
|
||||||
with:
|
with:
|
||||||
install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }}
|
install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }}
|
||||||
|
|
@ -227,12 +230,13 @@ jobs:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v31
|
- uses: ./.github/actions/install-nix-action
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
dogfood: false
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
extra_nix_config: |
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
experimental-features = flakes nix-command
|
||||||
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
|
- run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
||||||
|
- run: nix build .#dockerImage -L
|
||||||
- run: docker load -i ./result/image.tar.gz
|
- run: docker load -i ./result/image.tar.gz
|
||||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
|
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
|
||||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
|
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
|
||||||
|
|
@ -289,7 +293,6 @@ jobs:
|
||||||
extra_nix_config:
|
extra_nix_config:
|
||||||
experimental-features = nix-command flakes
|
experimental-features = nix-command flakes
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
|
||||||
- run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
|
- run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
|
||||||
|
|
||||||
profile_build:
|
profile_build:
|
||||||
|
|
@ -310,7 +313,6 @@ jobs:
|
||||||
extra_nix_config: |
|
extra_nix_config: |
|
||||||
experimental-features = flakes nix-command ca-derivations impure-derivations
|
experimental-features = flakes nix-command ca-derivations impure-derivations
|
||||||
max-jobs = 1
|
max-jobs = 1
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
|
||||||
- run: |
|
- run: |
|
||||||
nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md
|
nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md
|
||||||
cat build-time-report.md >> $GITHUB_STEP_SUMMARY
|
cat build-time-report.md >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
|
||||||
|
|
@ -107,15 +107,33 @@ rec {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
disable =
|
||||||
|
let
|
||||||
|
inherit (pkgs.stdenv) hostPlatform;
|
||||||
|
in
|
||||||
|
args@{
|
||||||
|
pkgName,
|
||||||
|
testName,
|
||||||
|
test,
|
||||||
|
}:
|
||||||
|
lib.any (b: b) [
|
||||||
|
# FIXME: Nix manual is impure and does not produce all settings on darwin
|
||||||
|
(hostPlatform.isDarwin && pkgName == "nix-manual" && testName == "linkcheck")
|
||||||
|
];
|
||||||
|
|
||||||
componentTests =
|
componentTests =
|
||||||
(lib.concatMapAttrs (
|
(lib.concatMapAttrs (
|
||||||
pkgName: pkg:
|
pkgName: pkg:
|
||||||
lib.concatMapAttrs (testName: test: {
|
lib.concatMapAttrs (
|
||||||
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
testName: test:
|
||||||
}) (pkg.tests or { })
|
lib.optionalAttrs (!disable { inherit pkgName testName test; }) {
|
||||||
|
"${componentTestsPrefix}${pkgName}-${testName}" = test;
|
||||||
|
}
|
||||||
|
) (pkg.tests or { })
|
||||||
) nixComponentsInstrumented)
|
) nixComponentsInstrumented)
|
||||||
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
||||||
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
||||||
|
"${componentTestsPrefix}nix-json-schema-checks" = nixComponentsInstrumented.nix-json-schema-checks;
|
||||||
};
|
};
|
||||||
|
|
||||||
codeCoverage =
|
codeCoverage =
|
||||||
|
|
@ -222,7 +240,7 @@ rec {
|
||||||
};
|
};
|
||||||
|
|
||||||
vmTests = {
|
vmTests = {
|
||||||
inherit (nixosTests) curl-s3-binary-cache-store;
|
inherit (nixosTests) s3-binary-cache-store;
|
||||||
}
|
}
|
||||||
// lib.optionalAttrs (!withSanitizers && !withCoverage) {
|
// lib.optionalAttrs (!withSanitizers && !withCoverage) {
|
||||||
# evalNixpkgs uses non-instrumented components from hydraJobs, so only run it
|
# evalNixpkgs uses non-instrumented components from hydraJobs, so only run it
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
|
|
||||||
def transform_anchors_html:
|
def transform_anchors_html:
|
||||||
. | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>")
|
. | gsub($empty_anchor_regex; "<a id=\"" + .anchor + "\"></a>")
|
||||||
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
|
| gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ additional-css = ["custom.css"]
|
||||||
additional-js = ["redirects.js"]
|
additional-js = ["redirects.js"]
|
||||||
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||||
git-repository-url = "https://github.com/NixOS/nix"
|
git-repository-url = "https://github.com/NixOS/nix"
|
||||||
|
mathjax-support = true
|
||||||
|
|
||||||
# Handles replacing @docroot@ with a path to ./source relative to that markdown file,
|
# Handles replacing @docroot@ with a path to ./source relative to that markdown file,
|
||||||
# {{#include handlebars}}, and the @generated@ syntax used within these. it mostly
|
# {{#include handlebars}}, and the @generated@ syntax used within these. it mostly
|
||||||
|
|
|
||||||
|
|
@ -24,9 +24,9 @@ let
|
||||||
in
|
in
|
||||||
concatStringsSep "\n" (map showEntry storesList);
|
concatStringsSep "\n" (map showEntry storesList);
|
||||||
|
|
||||||
"index.md" =
|
"index.md" = replaceStrings [ "@store-types@" ] [ index ] (
|
||||||
replaceStrings [ "@store-types@" ] [ index ]
|
readFile ./source/store/types/index.md.in
|
||||||
(readFile ./source/store/types/index.md.in);
|
);
|
||||||
|
|
||||||
tableOfContents =
|
tableOfContents =
|
||||||
let
|
let
|
||||||
|
|
|
||||||
|
|
@ -88,10 +88,12 @@ manual = custom_target(
|
||||||
@0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@
|
@0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@
|
||||||
@0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md
|
@0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md
|
||||||
sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml
|
sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml
|
||||||
@4@ -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/
|
@4@ -r -L --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/
|
||||||
(cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3
|
(cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3
|
||||||
rm -rf @2@/manual
|
rm -rf @2@/manual
|
||||||
mv @2@/html @2@/manual
|
mv @2@/html @2@/manual
|
||||||
|
# Remove Mathjax 2.7, because we will actually use MathJax 3.x
|
||||||
|
find @2@/manual | grep .html | xargs sed -i -e '/2.7.1.MathJax.js/d'
|
||||||
find @2@/manual -iname meson.build -delete
|
find @2@/manual -iname meson.build -delete
|
||||||
'''.format(
|
'''.format(
|
||||||
python.full_path(),
|
python.full_path(),
|
||||||
|
|
@ -115,6 +117,7 @@ manual = custom_target(
|
||||||
builtins_md,
|
builtins_md,
|
||||||
rl_next_generated,
|
rl_next_generated,
|
||||||
summary_rl_next,
|
summary_rl_next,
|
||||||
|
json_schema_generated_files,
|
||||||
nix_input,
|
nix_input,
|
||||||
],
|
],
|
||||||
output : [
|
output : [
|
||||||
|
|
|
||||||
|
|
@ -12,11 +12,15 @@
|
||||||
rsync,
|
rsync,
|
||||||
nix-cli,
|
nix-cli,
|
||||||
changelog-d,
|
changelog-d,
|
||||||
|
json-schema-for-humans,
|
||||||
officialRelease,
|
officialRelease,
|
||||||
|
|
||||||
# Configuration Options
|
# Configuration Options
|
||||||
|
|
||||||
version,
|
version,
|
||||||
|
|
||||||
|
# `tests` attribute
|
||||||
|
testers,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
@ -32,6 +36,13 @@ mkMesonDerivation (finalAttrs: {
|
||||||
fileset.difference
|
fileset.difference
|
||||||
(fileset.unions [
|
(fileset.unions [
|
||||||
../../.version
|
../../.version
|
||||||
|
# For example JSON
|
||||||
|
../../src/libutil-tests/data/hash
|
||||||
|
../../src/libstore-tests/data/content-address
|
||||||
|
../../src/libstore-tests/data/store-path
|
||||||
|
../../src/libstore-tests/data/derived-path
|
||||||
|
../../src/libstore-tests/data/path-info
|
||||||
|
../../src/libstore-tests/data/nar-info
|
||||||
# Too many different types of files to filter for now
|
# Too many different types of files to filter for now
|
||||||
../../doc/manual
|
../../doc/manual
|
||||||
./.
|
./.
|
||||||
|
|
@ -55,6 +66,7 @@ mkMesonDerivation (finalAttrs: {
|
||||||
jq
|
jq
|
||||||
python3
|
python3
|
||||||
rsync
|
rsync
|
||||||
|
json-schema-for-humans
|
||||||
changelog-d
|
changelog-d
|
||||||
]
|
]
|
||||||
++ lib.optionals (!officialRelease) [
|
++ lib.optionals (!officialRelease) [
|
||||||
|
|
@ -78,6 +90,29 @@ mkMesonDerivation (finalAttrs: {
|
||||||
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
|
echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The root of the HTML manual.
|
||||||
|
E.g. "${nix-manual.site}/index.html" exists.
|
||||||
|
*/
|
||||||
|
passthru.site = finalAttrs.finalPackage + "/share/doc/nix/manual";
|
||||||
|
|
||||||
|
passthru.tests = {
|
||||||
|
# https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck
|
||||||
|
linkcheck = testers.lycheeLinkCheck {
|
||||||
|
inherit (finalAttrs.finalPackage) site;
|
||||||
|
extraConfig = {
|
||||||
|
exclude = [
|
||||||
|
# Exclude auto-generated JSON schema documentation which has
|
||||||
|
# auto-generated fragment IDs that don't match the link references
|
||||||
|
".*/protocols/json/.*\\.html"
|
||||||
|
# Exclude undocumented builtins
|
||||||
|
".*/language/builtins\\.html#builtins-addErrorContext"
|
||||||
|
".*/language/builtins\\.html#builtins-appendContext"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
platforms = lib.platforms.all;
|
platforms = lib.platforms.all;
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
synopsis: "Improved S3 binary cache support via HTTP"
|
synopsis: "Improved S3 binary cache support via HTTP"
|
||||||
prs: [13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 13752]
|
prs: [13752, 13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 14330, 14333, 14335, 14336, 14337, 14350, 14356, 14357, 14374, 14375, 14376, 14377, 14391, 14393, 14420, 14421]
|
||||||
issues: [13084, 12671, 11748, 12403]
|
issues: [13084, 12671, 11748, 12403]
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
@ -18,9 +18,23 @@ improvements:
|
||||||
The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential
|
The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential
|
||||||
management.
|
management.
|
||||||
|
|
||||||
All existing S3 URL formats and parameters remain supported, with the notable
|
All existing S3 URL formats and parameters remain supported, however the store
|
||||||
exception of multi-part uploads, which are no longer supported.
|
settings for configuring multipart uploads have changed:
|
||||||
|
|
||||||
|
- **`multipart-upload`** (default: `false`): Enable multipart uploads for large
|
||||||
|
files. When enabled, files exceeding the multipart threshold will be uploaded
|
||||||
|
in multiple parts.
|
||||||
|
|
||||||
|
- **`multipart-threshold`** (default: `100 MiB`): Minimum file size for using
|
||||||
|
multipart uploads. Files smaller than this will use regular PUT requests.
|
||||||
|
Only takes effect when `multipart-upload` is enabled.
|
||||||
|
|
||||||
|
- **`multipart-chunk-size`** (default: `5 MiB`): Size of each part in multipart
|
||||||
|
uploads. Must be at least 5 MiB (AWS S3 requirement). Larger chunk sizes
|
||||||
|
reduce the number of requests but use more memory.
|
||||||
|
|
||||||
|
- **`buffer-size`**: Has been replaced by `multipart-chunk-size` and is now an alias to it.
|
||||||
|
|
||||||
Note that this change also means Nix now supports S3 binary cache stores even
|
Note that this change also means Nix now supports S3 binary cache stores even
|
||||||
if build without `aws-crt-cpp`, but only for public buckets which do not
|
if built without `aws-crt-cpp`, but only for public buckets which do not
|
||||||
require auth.
|
require authentication.
|
||||||
|
|
|
||||||
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
synopsis: "S3 URLs now support object versioning via versionId parameter"
|
||||||
|
prs: [14274]
|
||||||
|
issues: [13955]
|
||||||
|
---
|
||||||
|
|
||||||
|
S3 URLs now support a `versionId` query parameter to fetch specific versions
|
||||||
|
of objects from S3 buckets with versioning enabled. This allows pinning to
|
||||||
|
exact object versions for reproducibility and protection against unexpected
|
||||||
|
changes:
|
||||||
|
|
||||||
|
```
|
||||||
|
s3://bucket/key?region=us-east-1&versionId=abc123def456
|
||||||
|
```
|
||||||
|
|
@ -26,9 +26,12 @@
|
||||||
- [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md)
|
- [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md)
|
||||||
- [Content-addressing derivation outputs](store/derivation/outputs/content-address.md)
|
- [Content-addressing derivation outputs](store/derivation/outputs/content-address.md)
|
||||||
- [Input-addressing derivation outputs](store/derivation/outputs/input-address.md)
|
- [Input-addressing derivation outputs](store/derivation/outputs/input-address.md)
|
||||||
|
- [Build Trace](store/build-trace.md)
|
||||||
|
- [Derivation Resolution](store/resolution.md)
|
||||||
- [Building](store/building.md)
|
- [Building](store/building.md)
|
||||||
- [Store Types](store/types/index.md)
|
- [Store Types](store/types/index.md)
|
||||||
{{#include ./store/types/SUMMARY.md}}
|
{{#include ./store/types/SUMMARY.md}}
|
||||||
|
- [Appendix: Math notation](store/math-notation.md)
|
||||||
- [Nix Language](language/index.md)
|
- [Nix Language](language/index.md)
|
||||||
- [Data Types](language/types.md)
|
- [Data Types](language/types.md)
|
||||||
- [String context](language/string-context.md)
|
- [String context](language/string-context.md)
|
||||||
|
|
@ -117,11 +120,15 @@
|
||||||
- [Architecture and Design](architecture/architecture.md)
|
- [Architecture and Design](architecture/architecture.md)
|
||||||
- [Formats and Protocols](protocols/index.md)
|
- [Formats and Protocols](protocols/index.md)
|
||||||
- [JSON Formats](protocols/json/index.md)
|
- [JSON Formats](protocols/json/index.md)
|
||||||
|
- [Hash](protocols/json/hash.md)
|
||||||
|
- [Content Address](protocols/json/content-address.md)
|
||||||
|
- [Store Path](protocols/json/store-path.md)
|
||||||
- [Store Object Info](protocols/json/store-object-info.md)
|
- [Store Object Info](protocols/json/store-object-info.md)
|
||||||
- [Derivation](protocols/json/derivation.md)
|
- [Derivation](protocols/json/derivation.md)
|
||||||
|
- [Deriving Path](protocols/json/deriving-path.md)
|
||||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||||
- [Store Path Specification](protocols/store-path.md)
|
- [Store Path Specification](protocols/store-path.md)
|
||||||
- [Nix Archive (NAR) Format](protocols/nix-archive.md)
|
- [Nix Archive (NAR) Format](protocols/nix-archive/index.md)
|
||||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||||
- [C API](c-api.md)
|
- [C API](c-api.md)
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ The moving parts of channels are:
|
||||||
- The official channels listed at <https://nixos.org/channels>
|
- The official channels listed at <https://nixos.org/channels>
|
||||||
- The user-specific list of [subscribed channels](#subscribed-channels)
|
- The user-specific list of [subscribed channels](#subscribed-channels)
|
||||||
- The [downloaded channel contents](#channels)
|
- The [downloaded channel contents](#channels)
|
||||||
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH)
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an
|
||||||
element of *args* matches no installed derivations.
|
element of *args* matches no installed derivations.
|
||||||
|
|
||||||
For a description of how *args* is mapped to a set of store paths, see
|
For a description of how *args* is mapped to a set of store paths, see
|
||||||
[`--install`](#operation---install). If *args* describes multiple
|
[`--install`](./install.md). If *args* describes multiple
|
||||||
store paths with the same symbolic name, only the one with the highest
|
store paths with the same symbolic name, only the one with the highest
|
||||||
version is installed.
|
version is installed.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
This man page describes the command `nix-shell`, which is distinct from `nix
|
This man page describes the command `nix-shell`, which is distinct from `nix
|
||||||
shell`. For documentation on the latter, run `nix shell --help` or see `man
|
shell`. For documentation on the latter, run `nix shell --help` or see `man
|
||||||
nix3-shell`.
|
nix3-env-shell`.
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the
|
||||||
configuration file.
|
configuration file.
|
||||||
|
|
||||||
By default, the collector prints the total number of freed bytes when it
|
By default, the collector prints the total number of freed bytes when it
|
||||||
finishes (or when it is interrupted). With `--print-dead`, it prints the
|
finishes (or when it is interrupted).
|
||||||
number of bytes that would be freed.
|
|
||||||
|
|
||||||
{{#include ./opt-common.md}}
|
{{#include ./opt-common.md}}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
||||||
|
|
||||||
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
|
[`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes
|
||||||
[`nix-command`]: @docroot@/development/experimental-features.md#xp-nix-command
|
[`nix-command`]: @docroot@/development/experimental-features.md#xp-feature-nix-command
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
||||||
|
|
||||||
|
|
@ -256,7 +256,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS
|
||||||
## Editor integration
|
## Editor integration
|
||||||
|
|
||||||
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
||||||
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix).
|
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#building-nix-with-flakes) or in [classic Nix](#building-nix).
|
||||||
|
|
||||||
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
||||||
Meson's configure always produces this inside the build directory.
|
Meson's configure always produces this inside the build directory.
|
||||||
|
|
|
||||||
|
|
@ -240,3 +240,9 @@ $ configurePhase
|
||||||
$ ninja src/external-api-docs/html
|
$ ninja src/external-api-docs/html
|
||||||
$ xdg-open src/external-api-docs/html/index.html
|
$ xdg-open src/external-api-docs/html/index.html
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'appendToVar mesonFlags "-Ddoc-gen=true"; mesonConfigurePhase'
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -119,7 +119,7 @@ This will:
|
||||||
|
|
||||||
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
|
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
|
||||||
|
|
||||||
### Characterisation testing { #characaterisation-testing-unit }
|
### Characterisation testing { #characterisation-testing-unit }
|
||||||
|
|
||||||
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -208,7 +208,7 @@
|
||||||
|
|
||||||
- [impure derivation]{#gloss-impure-derivation}
|
- [impure derivation]{#gloss-impure-derivation}
|
||||||
|
|
||||||
[An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
[An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||||
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
||||||
|
|
||||||
- [Nix database]{#gloss-nix-database}
|
- [Nix database]{#gloss-nix-database}
|
||||||
|
|
@ -279,7 +279,7 @@
|
||||||
|
|
||||||
See [References](@docroot@/store/store-object.md#references) for details.
|
See [References](@docroot@/store/store-object.md#references) for details.
|
||||||
|
|
||||||
- [referrer]{#gloss-reference}
|
- [referrer]{#gloss-referrer}
|
||||||
|
|
||||||
A reversed edge from one [store object] to another.
|
A reversed edge from one [store object] to another.
|
||||||
|
|
||||||
|
|
@ -367,8 +367,8 @@
|
||||||
|
|
||||||
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
|
Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects.
|
||||||
|
|
||||||
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing:
|
The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing:
|
||||||
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#output),
|
- attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output),
|
||||||
- attributes with metadata, such as information about how the package is supposed to be used.
|
- attributes with metadata, such as information about how the package is supposed to be used.
|
||||||
|
|
||||||
The exact shape of these attribute sets is up to convention.
|
The exact shape of these attribute sets is up to convention.
|
||||||
|
|
@ -383,7 +383,7 @@
|
||||||
|
|
||||||
[string]: ./language/types.md#type-string
|
[string]: ./language/types.md#type-string
|
||||||
[path]: ./language/types.md#type-path
|
[path]: ./language/types.md#type-path
|
||||||
[attribute name]: ./language/types.md#attribute-set
|
[attribute name]: ./language/types.md#type-attrs
|
||||||
|
|
||||||
- [base directory]{#gloss-base-directory}
|
- [base directory]{#gloss-base-directory}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,19 +3,21 @@
|
||||||
To run the latest stable release of Nix with Docker run the following command:
|
To run the latest stable release of Nix with Docker run the following command:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ docker run -ti ghcr.io/nixos/nix
|
$ docker run -ti docker.io/nixos/nix
|
||||||
Unable to find image 'ghcr.io/nixos/nix:latest' locally
|
Unable to find image 'docker.io/nixos/nix:latest' locally
|
||||||
latest: Pulling from ghcr.io/nixos/nix
|
latest: Pulling from docker.io/nixos/nix
|
||||||
5843afab3874: Pull complete
|
5843afab3874: Pull complete
|
||||||
b52bf13f109c: Pull complete
|
b52bf13f109c: Pull complete
|
||||||
1e2415612aa3: Pull complete
|
1e2415612aa3: Pull complete
|
||||||
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
||||||
Status: Downloaded newer image for ghcr.io/nixos/nix:latest
|
Status: Downloaded newer image for docker.io/nixos/nix:latest
|
||||||
35ca4ada6e96:/# nix --version
|
35ca4ada6e96:/# nix --version
|
||||||
nix (Nix) 2.3.12
|
nix (Nix) 2.3.12
|
||||||
35ca4ada6e96:/# exit
|
35ca4ada6e96:/# exit
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix
|
||||||
|
|
||||||
# What is included in Nix's Docker image?
|
# What is included in Nix's Docker image?
|
||||||
|
|
||||||
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
|
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
|
||||||
|
|
|
||||||
|
|
@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b
|
||||||
|
|
||||||
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
|
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`.
|
||||||
|
|
||||||
- [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode}
|
- [`outputHash`]{#adv-attr-outputHash}
|
||||||
|
|
||||||
This will specify the output hash of the single output of a [fixed-output derivation].
|
This will specify the output hash of the single output of a [fixed-output derivation].
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
|
||||||
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
|
- [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string))
|
||||||
|
|
||||||
A symbolic name for the derivation.
|
A symbolic name for the derivation.
|
||||||
See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects.
|
See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects.
|
||||||
|
|
||||||
[store path]: @docroot@/store/store-path.md
|
[store path]: @docroot@/store/store-path.md
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq
|
||||||
|
|
||||||
# Names
|
# Names
|
||||||
|
|
||||||
A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md).
|
A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md).
|
||||||
|
|
||||||
> **Syntax**
|
> **Syntax**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Booleans](@docroot@/language/types.md#type-boolean)
|
[Booleans](@docroot@/language/types.md#type-bool)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y`
|
An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y`
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Lists](@docroot@/language/types.md#list) with three elements.
|
[Lists](@docroot@/language/types.md#type-list) with three elements.
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`)
|
[Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example.
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
[Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`)
|
[Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`)
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
|
||||||
|
|
@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp
|
||||||
|
|
||||||
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext
|
[`builtins.hasContext`]: ./builtins.md#builtins-hasContext
|
||||||
[`builtins.getContext`]: ./builtins.md#builtins-getContext
|
[`builtins.getContext`]: ./builtins.md#builtins-getContext
|
||||||
[attribute set]: ./types.md#attribute-set
|
[attribute set]: ./types.md#type-attrs
|
||||||
|
|
||||||
## Clearing string contexts
|
## Clearing string contexts
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a
|
||||||
|
|
||||||
[string]: ./types.md#type-string
|
[string]: ./types.md#type-string
|
||||||
[path]: ./types.md#type-path
|
[path]: ./types.md#type-path
|
||||||
[attribute set]: ./types.md#attribute-set
|
[attribute set]: ./types.md#type-attrs
|
||||||
|
|
||||||
> **Syntax**
|
> **Syntax**
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ See [String literals](string-literals.md).
|
||||||
|
|
||||||
Path literals can also include [string interpolation], besides being [interpolated into other expressions].
|
Path literals can also include [string interpolation], besides being [interpolated into other expressions].
|
||||||
|
|
||||||
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions
|
[interpolated into other expressions]: ./string-interpolation.md#interpolated-expression
|
||||||
|
|
||||||
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||||
|
|
||||||
|
|
@ -235,7 +235,7 @@ of object-oriented programming, for example.
|
||||||
|
|
||||||
## Recursive sets
|
## Recursive sets
|
||||||
|
|
||||||
Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other.
|
Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other.
|
||||||
|
|
||||||
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
|
> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}`
|
||||||
|
|
||||||
|
|
@ -287,7 +287,7 @@ This evaluates to `"foobar"`.
|
||||||
|
|
||||||
## Inheriting attributes
|
## Inheriting attributes
|
||||||
|
|
||||||
When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes).
|
||||||
This can be shortened using the `inherit` keyword.
|
This can be shortened using the `inherit` keyword.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,6 @@
|
||||||
|
# Process JSON schema documentation
|
||||||
|
subdir('protocols')
|
||||||
|
|
||||||
summary_rl_next = custom_target(
|
summary_rl_next = custom_target(
|
||||||
command : [
|
command : [
|
||||||
bash,
|
bash,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Derivation "ATerm" file format
|
# Derivation "ATerm" file format
|
||||||
|
|
||||||
For historical reasons, [store derivations][store derivation] are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format.
|
For historical reasons, [store derivations][store derivation] are stored on-disk in "Annotated Term" (ATerm) format
|
||||||
|
([guide](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html),
|
||||||
|
[paper](https://doi.org/10.1002/(SICI)1097-024X(200003)30:3%3C259::AID-SPE298%3E3.0.CO;2-Y)).
|
||||||
|
|
||||||
## The ATerm format used
|
## The ATerm format used
|
||||||
|
|
||||||
|
|
|
||||||
21
doc/manual/source/protocols/json/content-address.md
Normal file
21
doc/manual/source/protocols/json/content-address.md
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
{{#include content-address-v1-fixed.md}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### [Text](@docroot@/store/store-object/content-address.html#method-text) method
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/content-address-v1/text.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/content-address-v1/nar.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- need to convert YAML to JSON first
|
||||||
|
## Raw Schema
|
||||||
|
|
||||||
|
[JSON Schema for Hash v1](schema/content-address-v1.json)
|
||||||
|
-->
|
||||||
|
|
@ -1,120 +1,7 @@
|
||||||
# Derivation JSON Format
|
{{#include derivation-v3-fixed.md}}
|
||||||
|
|
||||||
> **Warning**
|
<!-- need to convert YAML to JSON first
|
||||||
>
|
## Raw Schema
|
||||||
> This JSON format is currently
|
|
||||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
|
||||||
> and subject to change.
|
|
||||||
|
|
||||||
The JSON serialization of a
|
[JSON Schema for Derivation v3](schema/derivation-v3.json)
|
||||||
[derivations](@docroot@/glossary.md#gloss-store-derivation)
|
-->
|
||||||
is a JSON object with the following fields:
|
|
||||||
|
|
||||||
* `name`:
|
|
||||||
The name of the derivation.
|
|
||||||
This is used when calculating the store paths of the derivation's outputs.
|
|
||||||
|
|
||||||
* `version`:
|
|
||||||
Must be `3`.
|
|
||||||
This is a guard that allows us to continue evolving this format.
|
|
||||||
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
|
||||||
|
|
||||||
- Version 0: A-Term format
|
|
||||||
|
|
||||||
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format.
|
|
||||||
|
|
||||||
- Version 2: Separate `method` and `hashAlgo` fields in output specs
|
|
||||||
|
|
||||||
- Version 3: Drop store dir from store paths, just include base name.
|
|
||||||
|
|
||||||
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
|
|
||||||
|
|
||||||
* `outputs`:
|
|
||||||
Information about the output paths of the derivation.
|
|
||||||
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields:
|
|
||||||
|
|
||||||
* `path`:
|
|
||||||
The output path, if it is known in advanced.
|
|
||||||
Otherwise, `null`.
|
|
||||||
|
|
||||||
|
|
||||||
* `method`:
|
|
||||||
For an output which will be [content addressed], a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
|
||||||
Valid method strings are:
|
|
||||||
|
|
||||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat)
|
|
||||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
|
||||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
|
||||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
|
||||||
|
|
||||||
Otherwise, `null`.
|
|
||||||
|
|
||||||
* `hashAlgo`:
|
|
||||||
For an output which will be [content addressed], the name of the hash algorithm used.
|
|
||||||
Valid algorithm strings are:
|
|
||||||
|
|
||||||
- `blake3`
|
|
||||||
- `md5`
|
|
||||||
- `sha1`
|
|
||||||
- `sha256`
|
|
||||||
- `sha512`
|
|
||||||
|
|
||||||
* `hash`:
|
|
||||||
For fixed-output derivations, the expected content hash in base-16.
|
|
||||||
|
|
||||||
> **Example**
|
|
||||||
>
|
|
||||||
> ```json
|
|
||||||
> "outputs": {
|
|
||||||
> "out": {
|
|
||||||
> "method": "nar",
|
|
||||||
> "hashAlgo": "sha256",
|
|
||||||
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
|
||||||
> }
|
|
||||||
> }
|
|
||||||
> ```
|
|
||||||
|
|
||||||
* `inputSrcs`:
|
|
||||||
A list of store paths on which this derivation depends.
|
|
||||||
|
|
||||||
> **Example**
|
|
||||||
>
|
|
||||||
> ```json
|
|
||||||
> "inputSrcs": [
|
|
||||||
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
|
|
||||||
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
|
|
||||||
> ]
|
|
||||||
> ```
|
|
||||||
|
|
||||||
* `inputDrvs`:
|
|
||||||
A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations.
|
|
||||||
|
|
||||||
> **Example**
|
|
||||||
>
|
|
||||||
> ```json
|
|
||||||
> "inputDrvs": {
|
|
||||||
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
|
||||||
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
|
||||||
> }
|
|
||||||
> ```
|
|
||||||
|
|
||||||
specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
|
||||||
|
|
||||||
* `system`:
|
|
||||||
The system type on which this derivation is to be built
|
|
||||||
(e.g. `x86_64-linux`).
|
|
||||||
|
|
||||||
* `builder`:
|
|
||||||
The absolute path of the program to be executed to run the build.
|
|
||||||
Typically this is the `bash` shell
|
|
||||||
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
|
||||||
|
|
||||||
* `args`:
|
|
||||||
The command-line arguments passed to the `builder`.
|
|
||||||
|
|
||||||
* `env`:
|
|
||||||
The environment passed to the `builder`.
|
|
||||||
|
|
||||||
* `structuredAttrs`:
|
|
||||||
[Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
|
|
||||||
Structured attributes are JSON, and thus embedded as-is.
|
|
||||||
|
|
|
||||||
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
{{#include deriving-path-v1-fixed.md}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Constant
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/deriving-path-v1/single_opaque.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Output of static derivation
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/deriving-path-v1/single_built.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Output of dynamic derivation
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/deriving-path-v1/single_built_built.json}}
|
||||||
|
```
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
# For some reason, backticks in the JSON schema are being escaped rather
|
||||||
|
# than being kept as intentional code spans. This removes all backtick
|
||||||
|
# escaping, which is an ugly solution, but one that is fine, because we
|
||||||
|
# are not using backticks for any other purpose.
|
||||||
|
s/\\`/`/g
|
||||||
|
|
||||||
|
# The way that semi-external references are rendered (i.e. ones to
|
||||||
|
# sibling schema files, as opposed to separate website ones, is not nice
|
||||||
|
# for humans. Replace it with a nice relative link within the manual
|
||||||
|
# instead.
|
||||||
|
#
|
||||||
|
# As we have more such relative links, more replacements of this nature
|
||||||
|
# should appear below.
|
||||||
|
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
|
||||||
|
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
|
||||||
|
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
|
||||||
|
s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g
|
||||||
33
doc/manual/source/protocols/json/hash.md
Normal file
33
doc/manual/source/protocols/json/hash.md
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
{{#include hash-v1-fixed.md}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### SHA-256 with Base64 encoding
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/hash-v1/sha256-base64.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### SHA-256 with Base16 (hexadecimal) encoding
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/hash-v1/sha256-base16.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### SHA-256 with Nix32 encoding
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/hash-v1/sha256-nix32.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
### BLAKE3 with Base64 encoding
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/hash-v1/blake3-base64.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- need to convert YAML to JSON first
|
||||||
|
## Raw Schema
|
||||||
|
|
||||||
|
[JSON Schema for Hash v1](schema/hash-v1.json)
|
||||||
|
-->
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
# Configuration file for json-schema-for-humans
|
||||||
|
#
|
||||||
|
# https://github.com/coveooss/json-schema-for-humans/blob/main/docs/examples/examples_md_default/Configuration.md
|
||||||
|
|
||||||
|
template_name: md
|
||||||
|
show_toc: true
|
||||||
|
# impure timestamp and distracting
|
||||||
|
with_footer: false
|
||||||
|
recursive_detection_depth: 3
|
||||||
|
show_breadcrumbs: false
|
||||||
|
description_is_markdown: true
|
||||||
|
template_md_options:
|
||||||
|
properties_table_columns:
|
||||||
|
- Property
|
||||||
|
- Type
|
||||||
|
- Pattern
|
||||||
|
- Title/Description
|
||||||
78
doc/manual/source/protocols/json/meson.build
Normal file
78
doc/manual/source/protocols/json/meson.build
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
# Tests in: ../../../../src/json-schema-checks
|
||||||
|
|
||||||
|
fs = import('fs')
|
||||||
|
|
||||||
|
# Find json-schema-for-humans if available
|
||||||
|
json_schema_for_humans = find_program('generate-schema-doc', required : false)
|
||||||
|
|
||||||
|
# Configuration for json-schema-for-humans
|
||||||
|
json_schema_config = files('json-schema-for-humans-config.yaml')
|
||||||
|
|
||||||
|
schemas = [
|
||||||
|
'hash-v1',
|
||||||
|
'content-address-v1',
|
||||||
|
'store-path-v1',
|
||||||
|
'store-object-info-v1',
|
||||||
|
'derivation-v3',
|
||||||
|
'deriving-path-v1',
|
||||||
|
]
|
||||||
|
|
||||||
|
schema_files = files()
|
||||||
|
foreach schema_name : schemas
|
||||||
|
schema_files += files('schema' / schema_name + '.yaml')
|
||||||
|
endforeach
|
||||||
|
|
||||||
|
|
||||||
|
schema_outputs = []
|
||||||
|
foreach schema_name : schemas
|
||||||
|
schema_outputs += schema_name + '.md'
|
||||||
|
endforeach
|
||||||
|
|
||||||
|
json_schema_generated_files = []
|
||||||
|
|
||||||
|
# Generate markdown documentation from JSON schema
|
||||||
|
# Note: output must be just a filename, not a path
|
||||||
|
gen_file = custom_target(
|
||||||
|
schema_name + '-schema-docs.tmp',
|
||||||
|
command : [
|
||||||
|
json_schema_for_humans,
|
||||||
|
'--config-file',
|
||||||
|
json_schema_config,
|
||||||
|
meson.current_source_dir() / 'schema',
|
||||||
|
meson.current_build_dir(),
|
||||||
|
],
|
||||||
|
input : schema_files + [
|
||||||
|
json_schema_config,
|
||||||
|
],
|
||||||
|
output : schema_outputs,
|
||||||
|
capture : false,
|
||||||
|
build_by_default : true,
|
||||||
|
)
|
||||||
|
|
||||||
|
idx = 0
|
||||||
|
if json_schema_for_humans.found()
|
||||||
|
foreach schema_name : schemas
|
||||||
|
#schema_file = 'schema' / schema_name + '.yaml'
|
||||||
|
|
||||||
|
# There is one so-so hack, and one horrible hack being done here.
|
||||||
|
sedded_file = custom_target(
|
||||||
|
schema_name + '-schema-docs',
|
||||||
|
command : [
|
||||||
|
'sed',
|
||||||
|
'-f',
|
||||||
|
# Out of line to avoid https://github.com/mesonbuild/meson/issues/1564
|
||||||
|
files('fixup-json-schema-generated-doc.sed'),
|
||||||
|
'@INPUT@',
|
||||||
|
],
|
||||||
|
capture : true,
|
||||||
|
input : gen_file[idx],
|
||||||
|
output : schema_name + '-fixed.md',
|
||||||
|
)
|
||||||
|
idx += 1
|
||||||
|
json_schema_generated_files += [ sedded_file ]
|
||||||
|
endforeach
|
||||||
|
else
|
||||||
|
warning(
|
||||||
|
'json-schema-for-humans not found, skipping JSON schema documentation generation',
|
||||||
|
)
|
||||||
|
endif
|
||||||
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/content-address
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json"
|
||||||
|
title: Content Address
|
||||||
|
description: |
|
||||||
|
This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md).
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references).
|
||||||
|
> It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
TODO currently `ContentAddress` is used in both of these, and so same rationale applies, but actually in both cases the JSON is currently ad-hoc.
|
||||||
|
That will be fixed, and as each is fixed, the example (along with a more precise link to the field in question) should be become part of the above note, so what is is saying is more clear.
|
||||||
|
|
||||||
|
> For example:
|
||||||
|
|
||||||
|
> - Fixed outputs of derivations are not allowed to have any references, so an empty reference set is statically known by assumption.
|
||||||
|
|
||||||
|
> - [Store object info](./store-object-info.md) includes the set of references along side the (optional) content address.
|
||||||
|
|
||||||
|
> This data type is thus safely used in both of these contexts.
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
method:
|
||||||
|
"$ref": "#/$defs/method"
|
||||||
|
hash:
|
||||||
|
title: Content Address
|
||||||
|
description: |
|
||||||
|
This would be the content-address itself.
|
||||||
|
|
||||||
|
For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole.
|
||||||
|
In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods).
|
||||||
|
"$ref": "./hash-v1.yaml"
|
||||||
|
required:
|
||||||
|
- method
|
||||||
|
- hash
|
||||||
|
additionalProperties: false
|
||||||
|
"$defs":
|
||||||
|
method:
|
||||||
|
type: string
|
||||||
|
enum: [flat, nar, text, git]
|
||||||
|
title: Content-Addressing Method
|
||||||
|
description: |
|
||||||
|
A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||||
|
|
||||||
|
Valid method strings are:
|
||||||
|
|
||||||
|
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file)
|
||||||
|
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||||
|
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||||
|
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||||
291
doc/manual/source/protocols/json/schema/derivation-v3.yaml
Normal file
291
doc/manual/source/protocols/json/schema/derivation-v3.yaml
Normal file
|
|
@ -0,0 +1,291 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json"
|
||||||
|
title: Derivation
|
||||||
|
description: |
|
||||||
|
Experimental JSON representation of a Nix derivation (version 3).
|
||||||
|
|
||||||
|
This schema describes the JSON representation of Nix's `Derivation` type.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- name
|
||||||
|
- version
|
||||||
|
- outputs
|
||||||
|
- inputSrcs
|
||||||
|
- inputDrvs
|
||||||
|
- system
|
||||||
|
- builder
|
||||||
|
- args
|
||||||
|
- env
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
title: Derivation name
|
||||||
|
description: |
|
||||||
|
The name of the derivation.
|
||||||
|
Used when calculating store paths for the derivation’s outputs.
|
||||||
|
|
||||||
|
version:
|
||||||
|
const: 3
|
||||||
|
title: Format version (must be 3)
|
||||||
|
description: |
|
||||||
|
Must be `3`.
|
||||||
|
This is a guard that allows us to continue evolving this format.
|
||||||
|
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
||||||
|
|
||||||
|
- Version 0: ATerm format
|
||||||
|
|
||||||
|
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from ATerm format.
|
||||||
|
|
||||||
|
- Version 2: Separate `method` and `hashAlgo` fields in output specs
|
||||||
|
|
||||||
|
- Version 3: Drop store dir from store paths, just include base name.
|
||||||
|
|
||||||
|
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
type: object
|
||||||
|
title: Output specifications
|
||||||
|
description: |
|
||||||
|
Information about the output paths of the derivation.
|
||||||
|
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object as described.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "outputs": {
|
||||||
|
> "out": {
|
||||||
|
> "method": "nar",
|
||||||
|
> "hashAlgo": "sha256",
|
||||||
|
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
||||||
|
> }
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
additionalProperties:
|
||||||
|
"$ref": "#/$defs/output/overall"
|
||||||
|
|
||||||
|
inputSrcs:
|
||||||
|
type: array
|
||||||
|
title: Input source paths
|
||||||
|
description: |
|
||||||
|
List of store paths on which this derivation depends.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "inputSrcs": [
|
||||||
|
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
|
||||||
|
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
|
||||||
|
> ]
|
||||||
|
> ```
|
||||||
|
items:
|
||||||
|
$ref: "store-path-v1.yaml"
|
||||||
|
|
||||||
|
inputDrvs:
|
||||||
|
type: object
|
||||||
|
title: Input derivations
|
||||||
|
description: |
|
||||||
|
Mapping of derivation paths to lists of output names they provide.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "inputDrvs": {
|
||||||
|
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
||||||
|
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||||
|
patternProperties:
|
||||||
|
"^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$":
|
||||||
|
title: Store Path
|
||||||
|
description: |
|
||||||
|
A store path to a derivation, mapped to the outputs of that derivation.
|
||||||
|
oneOf:
|
||||||
|
- "$ref": "#/$defs/outputNames"
|
||||||
|
- "$ref": "#/$defs/dynamicOutputs"
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
system:
|
||||||
|
type: string
|
||||||
|
title: Build system type
|
||||||
|
description: |
|
||||||
|
The system type on which this derivation is to be built
|
||||||
|
(e.g. `x86_64-linux`).
|
||||||
|
|
||||||
|
builder:
|
||||||
|
type: string
|
||||||
|
title: Build program path
|
||||||
|
description: |
|
||||||
|
Absolute path of the program used to perform the build.
|
||||||
|
Typically this is the `bash` shell
|
||||||
|
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
||||||
|
|
||||||
|
args:
|
||||||
|
type: array
|
||||||
|
title: Builder arguments
|
||||||
|
description: |
|
||||||
|
Command-line arguments passed to the `builder`.
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
env:
|
||||||
|
type: object
|
||||||
|
title: Environment variables
|
||||||
|
description: |
|
||||||
|
Environment variables passed to the `builder`.
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
structuredAttrs:
|
||||||
|
title: Structured attributes
|
||||||
|
description: |
|
||||||
|
[Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
|
||||||
|
Structured attributes are JSON, and thus embedded as-is.
|
||||||
|
type: object
|
||||||
|
additionalProperties: true
|
||||||
|
|
||||||
|
"$defs":
|
||||||
|
output:
|
||||||
|
overall:
|
||||||
|
title: Derivation Output
|
||||||
|
description: |
|
||||||
|
A single output of a derivation, with different variants for different output types.
|
||||||
|
oneOf:
|
||||||
|
- "$ref": "#/$defs/output/inputAddressed"
|
||||||
|
- "$ref": "#/$defs/output/caFixed"
|
||||||
|
- "$ref": "#/$defs/output/caFloating"
|
||||||
|
- "$ref": "#/$defs/output/deferred"
|
||||||
|
- "$ref": "#/$defs/output/impure"
|
||||||
|
|
||||||
|
inputAddressed:
|
||||||
|
title: Input-Addressed Output
|
||||||
|
description: |
|
||||||
|
The traditional non-fixed-output derivation type.
|
||||||
|
The output path is determined from the derivation itself.
|
||||||
|
|
||||||
|
See [Input-addressing derivation outputs](@docroot@/store/derivation/outputs/input-address.md) for more details.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- path
|
||||||
|
properties:
|
||||||
|
path:
|
||||||
|
$ref: "store-path-v1.yaml"
|
||||||
|
title: Output path
|
||||||
|
description: |
|
||||||
|
The output path determined from the derivation itself.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
caFixed:
|
||||||
|
title: Fixed Content-Addressed Output
|
||||||
|
description: |
|
||||||
|
The output is content-addressed, and the content-address is fixed in advance.
|
||||||
|
|
||||||
|
See [Fixed-output content-addressing](@docroot@/store/derivation/outputs/content-address.md#fixed) for more details.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- method
|
||||||
|
- hashAlgo
|
||||||
|
- hash
|
||||||
|
properties:
|
||||||
|
method:
|
||||||
|
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||||
|
description: |
|
||||||
|
Method of content addressing used for this output.
|
||||||
|
hashAlgo:
|
||||||
|
title: Hash algorithm
|
||||||
|
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||||
|
hash:
|
||||||
|
type: string
|
||||||
|
title: Expected hash value
|
||||||
|
description: |
|
||||||
|
The expected content hash in base-16.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
caFloating:
|
||||||
|
title: Floating Content-Addressed Output
|
||||||
|
description: |
|
||||||
|
Floating-output derivations, whose outputs are content
|
||||||
|
addressed, but not fixed, and so the output paths are dynamically calculated from
|
||||||
|
whatever the output ends up being.
|
||||||
|
|
||||||
|
See [Floating Content-Addressing](@docroot@/store/derivation/outputs/content-address.md#floating) for more details.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- method
|
||||||
|
- hashAlgo
|
||||||
|
properties:
|
||||||
|
method:
|
||||||
|
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||||
|
description: |
|
||||||
|
Method of content addressing used for this output.
|
||||||
|
hashAlgo:
|
||||||
|
title: Hash algorithm
|
||||||
|
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||||
|
description: |
|
||||||
|
What hash algorithm to use for the given method of content-addressing.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
deferred:
|
||||||
|
title: Deferred Output
|
||||||
|
description: |
|
||||||
|
Input-addressed output which depends on a (CA) derivation whose outputs (and thus their content-address
|
||||||
|
are not yet known.
|
||||||
|
type: object
|
||||||
|
properties: {}
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
impure:
|
||||||
|
title: Impure Output
|
||||||
|
description: |
|
||||||
|
Impure output which is just like a floating content-addressed output, but this derivation runs without sandboxing.
|
||||||
|
As such, we don't record it in the build trace, under the assumption that if we need it again, we should rebuild it, as it might produce something different.
|
||||||
|
required:
|
||||||
|
- impure
|
||||||
|
- method
|
||||||
|
- hashAlgo
|
||||||
|
properties:
|
||||||
|
impure:
|
||||||
|
const: true
|
||||||
|
method:
|
||||||
|
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||||
|
description: |
|
||||||
|
How the file system objects will be serialized for hashing.
|
||||||
|
hashAlgo:
|
||||||
|
title: Hash algorithm
|
||||||
|
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||||
|
description: |
|
||||||
|
How the serialization will be hashed.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
outputName:
|
||||||
|
type: string
|
||||||
|
title: Output name
|
||||||
|
description: Name of the derivation output to depend on
|
||||||
|
|
||||||
|
outputNames:
|
||||||
|
type: array
|
||||||
|
title: Output Names
|
||||||
|
description: Set of names of derivation outputs to depend on
|
||||||
|
items:
|
||||||
|
"$ref": "#/$defs/outputName"
|
||||||
|
|
||||||
|
dynamicOutputs:
|
||||||
|
type: object
|
||||||
|
title: Dynamic Outputs
|
||||||
|
description: |
|
||||||
|
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
||||||
|
|
||||||
|
This recursive data type allows for depending on outputs of outputs.
|
||||||
|
properties:
|
||||||
|
outputs:
|
||||||
|
"$ref": "#/$defs/outputNames"
|
||||||
|
dynamicOutputs:
|
||||||
|
"$ref": "#/$defs/dynamicOutputs"
|
||||||
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/derived-path
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json"
|
||||||
|
title: Deriving Path
|
||||||
|
description: |
|
||||||
|
This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path).
|
||||||
|
oneOf:
|
||||||
|
- title: Constant
|
||||||
|
description: |
|
||||||
|
See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path.
|
||||||
|
$ref: "store-path-v1.yaml"
|
||||||
|
- title: Output
|
||||||
|
description: |
|
||||||
|
See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path.
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
drvPath:
|
||||||
|
"$ref": "#"
|
||||||
|
description: |
|
||||||
|
A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to.
|
||||||
|
output:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The name of an output produced by that derivation (e.g. "out", "doc", etc.).
|
||||||
|
required:
|
||||||
|
- drvPath
|
||||||
|
- output
|
||||||
|
additionalProperties: false
|
||||||
1
doc/manual/source/protocols/json/schema/hash-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/hash-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libutil-tests/data/hash/
|
||||||
54
doc/manual/source/protocols/json/schema/hash-v1.yaml
Normal file
54
doc/manual/source/protocols/json/schema/hash-v1.yaml
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json"
|
||||||
|
title: Hash
|
||||||
|
description: |
|
||||||
|
A cryptographic hash value used throughout Nix for content addressing and integrity verification.
|
||||||
|
|
||||||
|
This schema describes the JSON representation of Nix's `Hash` type.
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
algorithm:
|
||||||
|
"$ref": "#/$defs/algorithm"
|
||||||
|
format:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- base64
|
||||||
|
- nix32
|
||||||
|
- base16
|
||||||
|
- sri
|
||||||
|
title: Hash format
|
||||||
|
description: |
|
||||||
|
The encoding format of the hash value.
|
||||||
|
|
||||||
|
- `base64` uses standard Base64 encoding [RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4)
|
||||||
|
- `nix32` is Nix-specific base-32 encoding
|
||||||
|
- `base16` is lowercase hexadecimal
|
||||||
|
- `sri` is the [Subresource Integrity format](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
|
||||||
|
hash:
|
||||||
|
type: string
|
||||||
|
title: Hash
|
||||||
|
description: |
|
||||||
|
The encoded hash value, itself.
|
||||||
|
|
||||||
|
It is specified in the format specified by the `format` field.
|
||||||
|
It must be the right length for the hash algorithm specified in the `algorithm` field, also.
|
||||||
|
The hash value does not include any algorithm prefix.
|
||||||
|
required:
|
||||||
|
- algorithm
|
||||||
|
- format
|
||||||
|
- hash
|
||||||
|
additionalProperties: false
|
||||||
|
"$defs":
|
||||||
|
algorithm:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- blake3
|
||||||
|
- md5
|
||||||
|
- sha1
|
||||||
|
- sha256
|
||||||
|
- sha512
|
||||||
|
title: Hash algorithm
|
||||||
|
description: |
|
||||||
|
The hash algorithm used to compute the hash value.
|
||||||
|
|
||||||
|
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature.
|
||||||
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/nar-info
|
||||||
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/path-info
|
||||||
|
|
@ -0,0 +1,235 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v1.json"
|
||||||
|
title: Store Object Info
|
||||||
|
description: |
|
||||||
|
Information about a [store object](@docroot@/store/store-object.md).
|
||||||
|
|
||||||
|
This schema describes the JSON representation of store object metadata as returned by commands like [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md).
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
### Field Categories
|
||||||
|
|
||||||
|
Store object information can come in a few different variations.
|
||||||
|
|
||||||
|
Firstly, "impure" fields, which contain non-intrinsic information about the store object, may or may not be included.
|
||||||
|
|
||||||
|
Second, binary cache stores have extra non-intrinsic infomation about the store objects they contain.
|
||||||
|
|
||||||
|
Thirdly, [`nix path-info --json --closure-size`](@docroot@/command-ref/new-cli/nix3-path-info.html#opt-closure-size) can compute some extra information about not just the single store object in question, but the store object and its [closure](@docroot@/glossary.md#gloss-closure).
|
||||||
|
|
||||||
|
The impure and NAR fields are grouped into separate variants below.
|
||||||
|
See their descriptions for additional information.
|
||||||
|
The closure fields however as just included as optional fields, to avoid a combinatorial explosion of variants.
|
||||||
|
|
||||||
|
oneOf:
|
||||||
|
- $ref: "#/$defs/base"
|
||||||
|
|
||||||
|
- $ref: "#/$defs/impure"
|
||||||
|
|
||||||
|
- $ref: "#/$defs/narInfo"
|
||||||
|
|
||||||
|
$defs:
|
||||||
|
base:
|
||||||
|
title: Store Object Info
|
||||||
|
description: |
|
||||||
|
Basic store object metadata containing only intrinsic properties.
|
||||||
|
This is the minimal set of fields that describe what a store object contains.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- narHash
|
||||||
|
- narSize
|
||||||
|
- references
|
||||||
|
- ca
|
||||||
|
properties:
|
||||||
|
path:
|
||||||
|
type: string
|
||||||
|
title: Store Path
|
||||||
|
description: |
|
||||||
|
[Store path](@docroot@/store/store-path.md) to the given store object.
|
||||||
|
|
||||||
|
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
|
||||||
|
|
||||||
|
narHash:
|
||||||
|
type: string
|
||||||
|
title: NAR Hash
|
||||||
|
description: |
|
||||||
|
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||||
|
|
||||||
|
narSize:
|
||||||
|
type: integer
|
||||||
|
minimum: 0
|
||||||
|
title: NAR Size
|
||||||
|
description: |
|
||||||
|
Size of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||||
|
|
||||||
|
references:
|
||||||
|
type: array
|
||||||
|
title: References
|
||||||
|
description: |
|
||||||
|
An array of [store paths](@docroot@/store/store-path.md), possibly including this one.
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
ca:
|
||||||
|
type: ["string", "null"]
|
||||||
|
title: Content Address
|
||||||
|
description: |
|
||||||
|
If the store object is [content-addressed](@docroot@/store/store-object/content-address.md),
|
||||||
|
this is the content address of this store object's file system object, used to compute its store path.
|
||||||
|
Otherwise (i.e. if it is [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object)), this is `null`.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
impure:
|
||||||
|
title: Store Object Info with Impure Fields
|
||||||
|
description: |
|
||||||
|
Store object metadata including impure fields that are not *intrinsic* properties.
|
||||||
|
In other words, the same store object in different stores could have different values for these impure fields.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- narHash
|
||||||
|
- narSize
|
||||||
|
- references
|
||||||
|
- ca
|
||||||
|
# impure
|
||||||
|
- deriver
|
||||||
|
- registrationTime
|
||||||
|
- ultimate
|
||||||
|
- signatures
|
||||||
|
properties:
|
||||||
|
path: { $ref: "#/$defs/base/properties/path" }
|
||||||
|
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||||
|
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||||
|
references: { $ref: "#/$defs/base/properties/references" }
|
||||||
|
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||||
|
deriver:
|
||||||
|
type: ["string", "null"]
|
||||||
|
title: Deriver
|
||||||
|
description: |
|
||||||
|
If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced.
|
||||||
|
Otherwise `null`.
|
||||||
|
|
||||||
|
> This is an "impure" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
registrationTime:
|
||||||
|
type: ["integer", "null"]
|
||||||
|
title: Registration Time
|
||||||
|
description: |
|
||||||
|
If known, when this derivation was added to the store (Unix timestamp).
|
||||||
|
Otherwise `null`.
|
||||||
|
|
||||||
|
> This is an "impure" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
ultimate:
|
||||||
|
type: boolean
|
||||||
|
title: Ultimate
|
||||||
|
description: |
|
||||||
|
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||||
|
|
||||||
|
> This is an "impure" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
signatures:
|
||||||
|
type: array
|
||||||
|
title: Signatures
|
||||||
|
description: |
|
||||||
|
Signatures claiming that this store object is what it claims to be.
|
||||||
|
Not relevant for [content-addressed](@docroot@/store/store-object/content-address.md) store objects,
|
||||||
|
but useful for [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) store objects.
|
||||||
|
|
||||||
|
> This is an "impure" field that may not be included in certain contexts.
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
# Computed closure fields
|
||||||
|
closureSize:
|
||||||
|
type: integer
|
||||||
|
minimum: 0
|
||||||
|
title: Closure Size
|
||||||
|
description: |
|
||||||
|
The total size of this store object and every other object in its [closure](@docroot@/glossary.md#gloss-closure).
|
||||||
|
|
||||||
|
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||||
|
additionalProperties: false
|
||||||
|
|
||||||
|
narInfo:
|
||||||
|
title: Store Object Info with Impure fields and NAR Info
|
||||||
|
description: |
|
||||||
|
The store object info in the "binary cache" family of Nix store type contain extra information pertaining to *downloads* of the store object in question.
|
||||||
|
(This store info is called "NAR info", since the downloads take the form of [Nix Archives](@docroot@/store/file-system-object/content-address.md#serial-nix-archive, and the metadata is served in a file with a `.narinfo` extension.)
|
||||||
|
|
||||||
|
This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- narHash
|
||||||
|
- narSize
|
||||||
|
- references
|
||||||
|
- ca
|
||||||
|
# impure
|
||||||
|
- deriver
|
||||||
|
- registrationTime
|
||||||
|
- ultimate
|
||||||
|
- signatures
|
||||||
|
# nar
|
||||||
|
- url
|
||||||
|
- compression
|
||||||
|
- downloadHash
|
||||||
|
- downloadSize
|
||||||
|
properties:
|
||||||
|
path: { $ref: "#/$defs/base/properties/path" }
|
||||||
|
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||||
|
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||||
|
references: { $ref: "#/$defs/base/properties/references" }
|
||||||
|
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||||
|
deriver: { $ref: "#/$defs/impure/properties/deriver" }
|
||||||
|
registrationTime: { $ref: "#/$defs/impure/properties/registrationTime" }
|
||||||
|
ultimate: { $ref: "#/$defs/impure/properties/ultimate" }
|
||||||
|
signatures: { $ref: "#/$defs/impure/properties/signatures" }
|
||||||
|
closureSize: { $ref: "#/$defs/impure/properties/closureSize" }
|
||||||
|
url:
|
||||||
|
type: string
|
||||||
|
title: URL
|
||||||
|
description: |
|
||||||
|
Where to download a compressed archive of the file system objects of this store object.
|
||||||
|
|
||||||
|
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
compression:
|
||||||
|
type: string
|
||||||
|
title: Compression
|
||||||
|
description: |
|
||||||
|
The compression format that the archive is in.
|
||||||
|
|
||||||
|
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
downloadHash:
|
||||||
|
type: string
|
||||||
|
title: Download Hash
|
||||||
|
description: |
|
||||||
|
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||||
|
|
||||||
|
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
downloadSize:
|
||||||
|
type: integer
|
||||||
|
minimum: 0
|
||||||
|
title: Download Size
|
||||||
|
description: |
|
||||||
|
The size of the compressed archive itself.
|
||||||
|
|
||||||
|
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
closureDownloadSize:
|
||||||
|
type: integer
|
||||||
|
minimum: 0
|
||||||
|
title: Closure Download Size
|
||||||
|
description: |
|
||||||
|
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure](@docroot@/glossary.md#gloss-closure).
|
||||||
|
|
||||||
|
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||||
|
|
||||||
|
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||||
|
additionalProperties: false
|
||||||
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../../../../src/libstore-tests/data/store-path
|
||||||
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema"
|
||||||
|
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json"
|
||||||
|
title: Store Path
|
||||||
|
description: |
|
||||||
|
A [store path](@docroot@/store/store-path.md) identifying a store object.
|
||||||
|
|
||||||
|
This schema describes the JSON representation of store paths as used in various Nix JSON APIs.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix.
|
||||||
|
|
||||||
|
For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"`
|
||||||
|
|
||||||
|
(If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.)
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
The format follows this pattern: `${digest}-${name}`
|
||||||
|
|
||||||
|
- **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
||||||
|
- **name**: The package name and optional version/suffix information
|
||||||
|
|
||||||
|
type: string
|
||||||
|
pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$"
|
||||||
|
minLength: 34
|
||||||
|
|
@ -1,102 +1,45 @@
|
||||||
# Store object info JSON format
|
{{#include store-object-info-v1-fixed.md}}
|
||||||
|
|
||||||
> **Warning**
|
## Examples
|
||||||
>
|
|
||||||
> This JSON format is currently
|
|
||||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
|
||||||
> and subject to change.
|
|
||||||
|
|
||||||
Info about a [store object].
|
### Minimal store object (content-addressed)
|
||||||
|
|
||||||
* `path`:
|
```json
|
||||||
|
{{#include schema/store-object-info-v1/pure.json}}
|
||||||
|
```
|
||||||
|
|
||||||
[Store path][store path] to the given store object.
|
### Store object with impure fields
|
||||||
|
|
||||||
* `narHash`:
|
```json
|
||||||
|
{{#include schema/store-object-info-v1/impure.json}}
|
||||||
|
```
|
||||||
|
|
||||||
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
### Minimal store object (empty)
|
||||||
|
|
||||||
* `narSize`:
|
```json
|
||||||
|
{{#include schema/store-object-info-v1/empty_pure.json}}
|
||||||
|
```
|
||||||
|
|
||||||
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
### Store object with all impure fields
|
||||||
|
|
||||||
* `references`:
|
```json
|
||||||
|
{{#include schema/store-object-info-v1/empty_impure.json}}
|
||||||
|
```
|
||||||
|
|
||||||
An array of [store paths][store path], possibly including this one.
|
### NAR info (minimal)
|
||||||
|
|
||||||
* `ca`:
|
```json
|
||||||
|
{{#include schema/nar-info-v1/pure.json}}
|
||||||
|
```
|
||||||
|
|
||||||
If the store object is [content-addressed],
|
### NAR info (with binary cache fields)
|
||||||
this is the content address of this store object's file system object, used to compute its store path.
|
|
||||||
Otherwise (i.e. if it is [input-addressed]), this is `null`.
|
|
||||||
|
|
||||||
[store path]: @docroot@/store/store-path.md
|
```json
|
||||||
[file system object]: @docroot@/store/file-system-object.md
|
{{#include schema/nar-info-v1/impure.json}}
|
||||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
```
|
||||||
|
|
||||||
## Impure fields
|
<!-- need to convert YAML to JSON first
|
||||||
|
## Raw Schema
|
||||||
|
|
||||||
These are not intrinsic properties of the store object.
|
[JSON Schema for Store Object Info v1](schema/store-object-info-v1.json)
|
||||||
In other words, the same store object residing in different store could have different values for these properties.
|
-->
|
||||||
|
|
||||||
* `deriver`:
|
|
||||||
|
|
||||||
If known, the path to the [store derivation] from which this store object was produced.
|
|
||||||
Otherwise `null`.
|
|
||||||
|
|
||||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
|
||||||
|
|
||||||
* `registrationTime` (optional):
|
|
||||||
|
|
||||||
If known, when this derivation was added to the store.
|
|
||||||
Otherwise `null`.
|
|
||||||
|
|
||||||
* `ultimate`:
|
|
||||||
|
|
||||||
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
|
||||||
|
|
||||||
* `signatures`:
|
|
||||||
|
|
||||||
Signatures claiming that this store object is what it claims to be.
|
|
||||||
Not relevant for [content-addressed] store objects,
|
|
||||||
but useful for [input-addressed] store objects.
|
|
||||||
|
|
||||||
[content-addressed]: @docroot@/store/store-object/content-address.md
|
|
||||||
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
|
|
||||||
|
|
||||||
### `.narinfo` extra fields
|
|
||||||
|
|
||||||
This meta data is specific to the "binary cache" family of Nix store types.
|
|
||||||
This information is not intrinsic to the store object, but about how it is stored.
|
|
||||||
|
|
||||||
* `url`:
|
|
||||||
|
|
||||||
Where to download a compressed archive of the file system objects of this store object.
|
|
||||||
|
|
||||||
* `compression`:
|
|
||||||
|
|
||||||
The compression format that the archive is in.
|
|
||||||
|
|
||||||
* `fileHash`:
|
|
||||||
|
|
||||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
|
||||||
|
|
||||||
* `fileSize`:
|
|
||||||
|
|
||||||
The size of the compressed archive itself.
|
|
||||||
|
|
||||||
## Computed closure fields
|
|
||||||
|
|
||||||
These fields are not stored at all, but computed by traversing the other fields across all the store objects in a [closure].
|
|
||||||
|
|
||||||
* `closureSize`:
|
|
||||||
|
|
||||||
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
|
|
||||||
|
|
||||||
### `.narinfo` extra fields
|
|
||||||
|
|
||||||
* `closureSize`:
|
|
||||||
|
|
||||||
The total size of this store object and every other object in its [closure].
|
|
||||||
|
|
||||||
[closure]: @docroot@/glossary.md#gloss-closure
|
|
||||||
|
|
|
||||||
15
doc/manual/source/protocols/json/store-path.md
Normal file
15
doc/manual/source/protocols/json/store-path.md
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
{{#include store-path-v1-fixed.md}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Simple store path
|
||||||
|
|
||||||
|
```json
|
||||||
|
{{#include schema/store-path-v1/simple.json}}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- need to convert YAML to JSON first
|
||||||
|
## Raw Schema
|
||||||
|
|
||||||
|
[JSON Schema for Store Path v1](schema/store-path-v1.json)
|
||||||
|
-->
|
||||||
2
doc/manual/source/protocols/meson.build
Normal file
2
doc/manual/source/protocols/meson.build
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Process JSON schema documentation
|
||||||
|
subdir('json')
|
||||||
|
|
@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format.
|
||||||
The Nix Archive format closely follows the abstract specification of a [file system object] tree,
|
The Nix Archive format closely follows the abstract specification of a [file system object] tree,
|
||||||
because it is designed to serialize exactly that data structure.
|
because it is designed to serialize exactly that data structure.
|
||||||
|
|
||||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive
|
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
||||||
[file system object]: @docroot@/store/file-system-object.md
|
[file system object]: @docroot@/store/file-system-object.md
|
||||||
|
|
||||||
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
|
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings.
|
||||||
|
|
@ -41,3 +41,15 @@ The `str` function / parameterized rule is defined as follows:
|
||||||
- `int(n)` = the 64-bit little endian representation of the number `n`
|
- `int(n)` = the 64-bit little endian representation of the number `n`
|
||||||
|
|
||||||
- `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte
|
- `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte
|
||||||
|
|
||||||
|
## Kaitai Struct Specification
|
||||||
|
|
||||||
|
The Nix Archive (NAR) format is also formally described using [Kaitai Struct](https://kaitai.io/), an Interface Description Language (IDL) for defining binary data structures.
|
||||||
|
|
||||||
|
> Kaitai Struct provides a language-agnostic, machine-readable specification that can be compiled into parsers for various programming languages (e.g., C++, Python, Java, Rust).
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
{{#include nar.ksy}}
|
||||||
|
```
|
||||||
|
|
||||||
|
The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed.
|
||||||
169
doc/manual/source/protocols/nix-archive/nar.ksy
Normal file
169
doc/manual/source/protocols/nix-archive/nar.ksy
Normal file
|
|
@ -0,0 +1,169 @@
|
||||||
|
meta:
|
||||||
|
id: nix_nar
|
||||||
|
title: Nix Archive (NAR)
|
||||||
|
file-extension: nar
|
||||||
|
endian: le
|
||||||
|
doc: |
|
||||||
|
Nix Archive (NAR) format. A simple, reproducible binary archive
|
||||||
|
format used by the Nix package manager to serialize file system objects.
|
||||||
|
doc-ref: 'https://nixos.org/manual/nix/stable/command-ref/nix-store.html#nar-format'
|
||||||
|
|
||||||
|
seq:
|
||||||
|
- id: magic
|
||||||
|
type: padded_str
|
||||||
|
doc: "Magic string, must be 'nix-archive-1'."
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'nix-archive-1'
|
||||||
|
- id: root_node
|
||||||
|
type: node
|
||||||
|
doc: "The root of the archive, which is always a single node."
|
||||||
|
|
||||||
|
types:
|
||||||
|
padded_str:
|
||||||
|
doc: |
|
||||||
|
A string, prefixed with its length (u8le) and
|
||||||
|
padded with null bytes to the next 8-byte boundary.
|
||||||
|
seq:
|
||||||
|
- id: len_str
|
||||||
|
type: u8
|
||||||
|
- id: body
|
||||||
|
type: str
|
||||||
|
size: len_str
|
||||||
|
encoding: 'ASCII'
|
||||||
|
- id: padding
|
||||||
|
size: (8 - (len_str % 8)) % 8
|
||||||
|
|
||||||
|
node:
|
||||||
|
doc: "A single filesystem node (file, directory, or symlink)."
|
||||||
|
seq:
|
||||||
|
- id: open_paren
|
||||||
|
type: padded_str
|
||||||
|
doc: "Must be '(', a token starting the node definition."
|
||||||
|
valid:
|
||||||
|
expr: _.body == '('
|
||||||
|
- id: type_key
|
||||||
|
type: padded_str
|
||||||
|
doc: "Must be 'type'."
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'type'
|
||||||
|
- id: type_val
|
||||||
|
type: padded_str
|
||||||
|
doc: "The type of the node: 'regular', 'directory', or 'symlink'."
|
||||||
|
- id: body
|
||||||
|
type:
|
||||||
|
switch-on: type_val.body
|
||||||
|
cases:
|
||||||
|
"'directory'": type_directory
|
||||||
|
"'regular'": type_regular
|
||||||
|
"'symlink'": type_symlink
|
||||||
|
- id: close_paren
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == ')'
|
||||||
|
if: "type_val.body != 'directory'"
|
||||||
|
doc: "Must be ')', a token ending the node definition."
|
||||||
|
|
||||||
|
type_directory:
|
||||||
|
doc: "A directory node, containing a list of entries. Entries must be ordered by their names."
|
||||||
|
seq:
|
||||||
|
- id: entries
|
||||||
|
type: dir_entry
|
||||||
|
repeat: until
|
||||||
|
repeat-until: _.kind.body == ')'
|
||||||
|
types:
|
||||||
|
dir_entry:
|
||||||
|
doc: "A single entry within a directory, or a terminator."
|
||||||
|
seq:
|
||||||
|
- id: kind
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'entry' or _.body == ')'
|
||||||
|
doc: "Must be 'entry' (for a child node) or '' (for terminator)."
|
||||||
|
- id: open_paren
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == '('
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
- id: name_key
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'name'
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
- id: name
|
||||||
|
type: padded_str
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
- id: node_key
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'node'
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
- id: node
|
||||||
|
type: node
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
doc: "The child node, present only if kind is 'entry'."
|
||||||
|
- id: close_paren
|
||||||
|
type: padded_str
|
||||||
|
valid:
|
||||||
|
expr: _.body == ')'
|
||||||
|
if: 'kind.body == "entry"'
|
||||||
|
instances:
|
||||||
|
is_terminator:
|
||||||
|
value: kind.body == ')'
|
||||||
|
|
||||||
|
type_regular:
|
||||||
|
doc: "A regular file node."
|
||||||
|
seq:
|
||||||
|
# Read attributes (like 'executable') until we hit 'contents'
|
||||||
|
- id: attributes
|
||||||
|
type: reg_attribute
|
||||||
|
repeat: until
|
||||||
|
repeat-until: _.key.body == "contents"
|
||||||
|
# After the 'contents' token, read the file data
|
||||||
|
- id: file_data
|
||||||
|
type: file_content
|
||||||
|
instances:
|
||||||
|
is_executable:
|
||||||
|
value: 'attributes[0].key.body == "executable"'
|
||||||
|
doc: "True if the file has the 'executable' attribute."
|
||||||
|
types:
|
||||||
|
reg_attribute:
|
||||||
|
doc: "An attribute of the file, e.g., 'executable' or 'contents'."
|
||||||
|
seq:
|
||||||
|
- id: key
|
||||||
|
type: padded_str
|
||||||
|
doc: "Attribute key, e.g., 'executable' or 'contents'."
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'executable' or _.body == 'contents'
|
||||||
|
- id: value
|
||||||
|
type: padded_str
|
||||||
|
if: 'key.body == "executable"'
|
||||||
|
valid:
|
||||||
|
expr: _.body == ''
|
||||||
|
doc: "Must be '' if key is 'executable'."
|
||||||
|
file_content:
|
||||||
|
doc: "The raw data of the file, prefixed by length."
|
||||||
|
seq:
|
||||||
|
- id: len_contents
|
||||||
|
type: u8
|
||||||
|
# # This relies on the property of instances that they are lazily evaluated and cached.
|
||||||
|
- size: 0
|
||||||
|
if: nar_offset < 0
|
||||||
|
- id: contents
|
||||||
|
size: len_contents
|
||||||
|
- id: padding
|
||||||
|
size: (8 - (len_contents % 8)) % 8
|
||||||
|
instances:
|
||||||
|
nar_offset:
|
||||||
|
value: _io.pos
|
||||||
|
|
||||||
|
type_symlink:
|
||||||
|
doc: "A symbolic link node."
|
||||||
|
seq:
|
||||||
|
- id: target_key
|
||||||
|
type: padded_str
|
||||||
|
doc: "Must be 'target'."
|
||||||
|
valid:
|
||||||
|
expr: _.body == 'target'
|
||||||
|
- id: target_val
|
||||||
|
type: padded_str
|
||||||
|
doc: "The destination path of the symlink."
|
||||||
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
- The `discard-references` feature has been stabilized.
|
- The `discard-references` feature has been stabilized.
|
||||||
This means that the
|
This means that the
|
||||||
[unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references)
|
[unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences)
|
||||||
attribute is no longer guarded by an experimental flag and can be used
|
attribute is no longer guarded by an experimental flag and can be used
|
||||||
freely.
|
freely.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,8 @@
|
||||||
|
|
||||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||||
|
|
||||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree).
|
||||||
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree).
|
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes).
|
||||||
|
|
||||||
- The interface for creating and updating lock files has been overhauled:
|
- The interface for creating and updating lock files has been overhauled:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
|
- Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722)
|
||||||
|
|
||||||
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json).
|
The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md).
|
||||||
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
|
In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`,
|
||||||
rather than one field with an arcane `:`-separated format.
|
rather than one field with an arcane `:`-separated format.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -93,7 +93,7 @@
|
||||||
|
|
||||||
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
|
- Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668)
|
||||||
|
|
||||||
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte.
|
Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte.
|
||||||
|
|
||||||
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).
|
This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661).
|
||||||
|
|
||||||
|
|
|
||||||
53
doc/manual/source/store/build-trace.md
Normal file
53
doc/manual/source/store/build-trace.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
# Build Trace
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This entire concept is currently
|
||||||
|
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
The *build trace* is a [memoization table](https://en.wikipedia.org/wiki/Memoization) for builds.
|
||||||
|
It maps the inputs of builds to the outputs of builds.
|
||||||
|
Concretely, that means it maps [derivations][derivation] to maps of [output] names to [store objects][store object].
|
||||||
|
|
||||||
|
In general the derivations used as a key should be [*resolved*](./resolution.md).
|
||||||
|
A build trace with all-resolved-derivation keys is also called a *base build trace* for extra clarity.
|
||||||
|
If all the resolved inputs of a derivation are content-addressed, that means the inputs will be fully determined, leaving no ambiguity for what build was performed.
|
||||||
|
(Input-addressed inputs however are still ambiguous. They too should be locked down, but this is left as future work.)
|
||||||
|
|
||||||
|
Accordingly, to look up an unresolved derivation, one must first resolve it to get a resolved derivation.
|
||||||
|
Resolving itself involves looking up entries in the build trace, so this is a mutually recursive process that will end up inspecting possibly many entries.
|
||||||
|
|
||||||
|
Except for the issue with input-addressed paths called out above, base build traces are trivially *coherent* -- incoherence is not possible.
|
||||||
|
That means that the claims that each key-value base build try entry makes are independent, and no mapping invalidates another mapping.
|
||||||
|
|
||||||
|
Whether the mappings are *true*, i.e. the faithful recording of actual builds performed, is another matter.
|
||||||
|
Coherence is about the multiple claims of the build trace being mutually consistent, not about whether the claims are individually true or false.
|
||||||
|
|
||||||
|
In general, there is no way to audit a build trace entry except for by performing the build again from scratch.
|
||||||
|
And even in that case, a different result doesn't mean the original entry was a "lie", because the derivation being built may be non-deterministic.
|
||||||
|
As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice.
|
||||||
|
Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices.
|
||||||
|
|
||||||
|
## Derived build traces
|
||||||
|
|
||||||
|
Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations.
|
||||||
|
But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around.
|
||||||
|
Firstly, this ensures that the derived entries are merely cache, which could be recomputed from scratch.
|
||||||
|
Secondly, this ensures the coherence of the derived build trace.
|
||||||
|
|
||||||
|
Unlike with base build traces, incoherence with derived build traces is possible.
|
||||||
|
The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace.
|
||||||
|
Without fixing the base build trace, it inherits the subjectivity of base build traces themselves.
|
||||||
|
|
||||||
|
Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \\(c\\).
|
||||||
|
Let \\(a\\) be a resolved derivation, but let \\(b\\) and \\(c\\) be unresolved and both take as an input an output of \\(a\\).
|
||||||
|
Now suppose that derived entries are made for \\(b\\) and \\(c\\) based on two different entries of \\(a\\).
|
||||||
|
(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.)
|
||||||
|
|
||||||
|
If trusting the derived build trace entries for \\(b\\) and \\(c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught.
|
||||||
|
However, if \\(b\\) and \\(c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output.
|
||||||
|
|
||||||
|
[derivation]: ./derivation/index.md
|
||||||
|
[output]: ./derivation/outputs/index.md
|
||||||
|
[store object]: @docroot@/store/store-object.md
|
||||||
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
|
- Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input.
|
||||||
|
|
||||||
## Builder Execution
|
## Builder Execution {#builder-execution}
|
||||||
|
|
||||||
The [`builder`](./derivation/index.md#builder) is executed as follows:
|
The [`builder`](./derivation/index.md#builder) is executed as follows:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -102,7 +102,7 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t
|
||||||
|
|
||||||
### System {#system}
|
### System {#system}
|
||||||
|
|
||||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
The system type on which the [`builder`](#builder) executable is meant to be run.
|
||||||
|
|
||||||
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
||||||
|
|
||||||
|
|
@ -245,7 +245,7 @@ If those other derivations *also* abide by this common case (and likewise for tr
|
||||||
> note the ".drv"
|
> note the ".drv"
|
||||||
> ```
|
> ```
|
||||||
|
|
||||||
## Extending the model to be higher-order
|
## Extending the model to be higher-order {#dynamic}
|
||||||
|
|
||||||
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -167,10 +167,10 @@ It is only in the potential for that check to fail that they are different.
|
||||||
>
|
>
|
||||||
> In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing.
|
> In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing.
|
||||||
> Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation).
|
> Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation).
|
||||||
> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [Issue #11955](https://github.com/NixOS/nix/issues/11955).
|
> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [issue #11955](https://github.com/NixOS/nix/issues/11955).
|
||||||
>
|
>
|
||||||
> In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them.
|
> In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them.
|
||||||
> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt --- avoiding downloading potentially large amounts of data twice.
|
> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt — avoiding downloading potentially large amounts of data twice.
|
||||||
> This optimisation prefigures the design above:
|
> This optimisation prefigures the design above:
|
||||||
> If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data.
|
> If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data.
|
||||||
For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL.
|
For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL.
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ In particular, the specification decides:
|
||||||
|
|
||||||
- if the content is content-addressed, how is it content addressed
|
- if the content is content-addressed, how is it content addressed
|
||||||
|
|
||||||
- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path])
|
- if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path])
|
||||||
|
|
||||||
## Types of derivations
|
## Types of derivations
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,26 +6,221 @@
|
||||||
That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it.
|
That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it.
|
||||||
Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object.
|
Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object.
|
||||||
|
|
||||||
<!---
|
## Modulo content addressed derivation outputs {#hash-quotient-drv}
|
||||||
|
|
||||||
### Modulo fixed-output derivations
|
A naive implementation of an output hash computation for input-addressed outputs would be to hash the derivation hash and output together.
|
||||||
|
This clearly has the uniqueness properties we want for input-addressed outputs, but suffers from an inefficiency.
|
||||||
|
Specifically, new builds would be required whenever a change is made to a fixed-output derivation, despite having provably no differences in the inputs to the new derivation compared to what it used to be.
|
||||||
|
Concretely, this would cause a "mass rebuild" whenever any fetching detail changes, including mirror lists, certificate authority certificates, etc.
|
||||||
|
|
||||||
**TODO hash derivation modulo.**
|
To solve this problem, we compute output hashes differently, so that certain output hashes become identical.
|
||||||
|
We call this concept quotient hashing, in reference to quotient types or sets.
|
||||||
|
|
||||||
So how do we compute the hash part of the output path of a derivation?
|
So how do we compute the hash part of the output paths of an input-addressed derivation?
|
||||||
This is done by the function `hashDrv`, shown in Figure 5.10.
|
This is done by the function `hashQuotientDerivation`, shown below.
|
||||||
It distinguishes between two cases.
|
|
||||||
If the derivation is a fixed-output derivation, then it computes a hash over just the `outputHash` attributes.
|
|
||||||
|
|
||||||
If the derivation is not a fixed-output derivation, we replace each element in the derivation’s inputDrvs with the result of a call to `hashDrv` for that element.
|
First, a word on inputs.
|
||||||
(The derivation at each store path in `inputDrvs` is converted from its on-disk ATerm representation back to a `StoreDrv` by the function `parseDrv`.) In essence, `hashDrv` partitions store derivations into equivalence classes, and for hashing purpose it replaces each store path in a derivation graph with its equivalence class.
|
`hashQuotientDerivation` is only defined on derivations whose [inputs](@docroot@/store/derivation/index.md#inputs) take the first-order form:
|
||||||
|
```typescript
|
||||||
|
type ConstantPath = {
|
||||||
|
path: StorePath;
|
||||||
|
};
|
||||||
|
|
||||||
The recursion in Figure 5.10 is inefficient:
|
type FirstOrderOutputPath = {
|
||||||
it will call itself once for each path by which a subderivation can be reached, i.e., `O(V k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`.
|
drvPath: StorePath;
|
||||||
In the actual implementation, memoisation is used to reduce this to `O(V + E)` complexity for a graph with E edges.
|
output: OutputName;
|
||||||
|
};
|
||||||
|
|
||||||
-->
|
type FirstOrderDerivingPath = ConstantPath | FirstOrderOutputPath;
|
||||||
|
|
||||||
|
type Inputs = Set<FirstOrderDerivingPath>;
|
||||||
|
```
|
||||||
|
|
||||||
|
For the algorithm below, we adopt a derivation where the two types of (first order) derived paths are partitioned into two sets, as follows:
|
||||||
|
```typescript
|
||||||
|
type Derivation = {
|
||||||
|
// inputs: Set<FirstOrderDerivingPath>; // replaced
|
||||||
|
inputSrcs: Set<ConstantPath>; // new instead
|
||||||
|
inputDrvOutputs: Set<FirstOrderOutputPath>; // new instead
|
||||||
|
// ...other fields...
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
In the [currently-experimental][xp-feature-dynamic-derivations] higher-order case where outputs of outputs are allowed as [deriving paths][deriving-path] and thus derivation inputs, derivations using that generalization are not valid arguments to this function.
|
||||||
|
Those derivations must be (partially) [resolved](@docroot@/store/resolution.md) enough first, to the point where no such higher-order inputs remain.
|
||||||
|
Then, and only then, can input addresses be assigned.
|
||||||
|
|
||||||
|
```
|
||||||
|
function hashQuotientDerivation(drv) -> Hash:
|
||||||
|
assert(drv.outputs are input-addressed)
|
||||||
|
drv′ ← drv with {
|
||||||
|
inputDrvOutputs = ⋃(
|
||||||
|
assert(drvPath is store path)
|
||||||
|
case hashOutputsOrQuotientDerivation(readDrv(drvPath)) of
|
||||||
|
drvHash : Hash →
|
||||||
|
(drvHash.toBase16(), output)
|
||||||
|
outputHashes : Map[String, Hash] →
|
||||||
|
(outputHashes[output].toBase16(), "out")
|
||||||
|
| (drvPath, output) ∈ drv.inputDrvOutputs
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return hashSHA256(printDrv(drv′))
|
||||||
|
|
||||||
|
function hashOutputsOrQuotientDerivation(drv) -> Map[String, Hash] | Hash:
|
||||||
|
if drv.outputs are content-addressed:
|
||||||
|
return {
|
||||||
|
outputName ↦ hashSHA256(
|
||||||
|
"fixed:out:" + ca.printMethodAlgo() +
|
||||||
|
":" + ca.hash.toBase16() +
|
||||||
|
":" + ca.makeFixedOutputPath(drv.name, outputName))
|
||||||
|
| (outputName ↦ output) ∈ drv.outputs
|
||||||
|
, ca = output.contentAddress // or get from build trace if floating
|
||||||
|
}
|
||||||
|
else: // drv.outputs are input-addressed
|
||||||
|
return hashQuotientDerivation(drv)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `hashQuotientDerivation`
|
||||||
|
|
||||||
|
We replace each element in the derivation's `inputDrvOutputs` using data from a call to `hashOutputsOrQuotientDerivation` on the `drvPath` of that element.
|
||||||
|
When `hashOutputsOrQuotientDerivation` returns a single drv hash (because the input derivation in question is input-addressing), we simply swap out the `drvPath` for that hash, and keep the same output name.
|
||||||
|
When `hashOutputsOrQuotientDerivation` returns a map of content addresses per-output, we look up the output in question, and pair it with the output name `out`.
|
||||||
|
|
||||||
|
The resulting pseudo-derivation (with hashes instead of store paths in `inputDrvs`) is then printed (in the ["ATerm" format](@docroot@/protocols/derivation-aterm.md)) and hashed, and this becomes the hash of the "quotient derivation".
|
||||||
|
|
||||||
|
When calculating output hashes, `hashQuotientDerivation` is called on an almost-complete input-addressing derivation, which is just missing its input-addressed outputs paths.
|
||||||
|
The derivation hash is then used to calculate output paths for each output.
|
||||||
|
<!-- TODO describe how this is done. -->
|
||||||
|
Those output paths can then be substituted into the almost-complete input-addressed derivation to complete it.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> There may be an unintentional deviation from specification currently implemented in the `(outputHashes[output].toBase16(), "out")` case.
|
||||||
|
> This is not fatal because the deviation would only apply for content-addressing derivations with more than one output, and that only occurs in the floating case, which is [experimental][xp-feature-ca-derivations].
|
||||||
|
> Once this bug is fixed, this note will be removed.
|
||||||
|
|
||||||
|
### `hashOutputsOrQuotientDerivation`
|
||||||
|
|
||||||
|
How does `hashOutputsOrQuotientDerivation` in turn work?
|
||||||
|
It consists of two main cases, based on whether the outputs of the derivation are to be input-addressed or content-addressed.
|
||||||
|
|
||||||
|
#### Input-addressed outputs case
|
||||||
|
|
||||||
|
In the input-addressed case, it just calls `hashQuotientDerivation`, and returns that derivation hash.
|
||||||
|
This makes `hashQuotientDerivation` and `hashOutputsOrQuotientDerivation` mutually-recursive.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> In this case, `hashQuotientDerivation` is being called on a *complete* input-addressing derivation that already has its output paths calculated.
|
||||||
|
> The `inputDrvs` substitution takes place anyways.
|
||||||
|
|
||||||
|
#### Content-addressed outputs case
|
||||||
|
|
||||||
|
If the outputs are [content-addressed](./content-address.md), then it computes a hash for each output derived from the content-address of that output.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> In the [fixed](./content-address.md#fixed) content-addressing case, the outputs' content addresses are statically specified in advance, so this always just works.
|
||||||
|
> (The fixed case is what the pseudo-code shows.)
|
||||||
|
>
|
||||||
|
> In the [floating](./content-address.md#floating) case, the content addresses are not specified in advance.
|
||||||
|
> This is what the "or get from [build trace](@docroot@/store/build-trace.md) if floating" comment refers to.
|
||||||
|
> In this case, the algorithm is *stuck* until the input in question is built, and we know what the actual contents of the output in question is.
|
||||||
|
>
|
||||||
|
> That is OK however, because there is no problem with delaying the assigning of input addresses (which, remember, is what `hashQuotientDerivation` is ultimately for) until all inputs are known.
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
The recursion in the algorithm is potentially inefficient:
|
||||||
|
it could call itself once for each path by which a subderivation can be reached, i.e., `O(V^k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`.
|
||||||
|
In the actual implementation, [memoisation](https://en.wikipedia.org/wiki/Memoization) is used to reduce this cost to be proportional to the total number of `inputDrvOutputs` encountered.
|
||||||
|
|
||||||
|
### Semantic properties
|
||||||
|
|
||||||
|
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
|
||||||
|
|
||||||
|
In essence, `hashQuotientDerivation` partitions input-addressing derivations into equivalence classes: every derivation in that equivalence class is mapped to the same derivation hash.
|
||||||
|
We can characterize this equivalence relation directly, by working bottom up.
|
||||||
|
|
||||||
|
We start by defining an equivalence relation on first-order output deriving paths that refer content-addressed derivation outputs. Two such paths are equivalent if they refer to the same store object:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$d\_1$ is content-addressing}
|
||||||
|
\\AxiomC{$d\_2$ is content-addressing}
|
||||||
|
\\AxiomC{$
|
||||||
|
{}^\*(\text{path}(d\_1), o\_1)
|
||||||
|
\=
|
||||||
|
{}^\*(\text{path}(d\_2), o\_2)
|
||||||
|
$}
|
||||||
|
\\TrinaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{CA}}\\, (d\_2, o\_2)$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
where \\({}^*(s, o)\\) denotes the store object that the output deriving path refers to.
|
||||||
|
|
||||||
|
We will also need the following construction to lift any equivalence relation on \\(X\\) to an equivalence relation on (finite) sets of \\(X\\) (in short, \\(\\mathcal{P}(X)\\)):
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$\\forall a \\in A. \\exists b \\in B. a \\,\\sim\_X\\, b$}
|
||||||
|
\\AxiomC{$\\forall b \\in B. \\exists a \\in A. b \\,\\sim\_X\\, a$}
|
||||||
|
\\BinaryInfC{$A \\,\\sim_{\\mathcal{P}(X)}\\, B$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
Now we can define the equivalence relation \\(\\sim_\\mathrm{IA}\\) on input-addressed derivation outputs. Two input-addressed outputs are equivalent if their derivations are equivalent (via the yet-to-be-defined \\(\\sim_{\\mathrm{IADrv}}\\) relation) and their output names are the same:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$d\_1$ is input-addressing}
|
||||||
|
\\AxiomC{$d\_2$ is input-addressing}
|
||||||
|
\\AxiomC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
|
||||||
|
\\AxiomC{$o\_1 = o\_2$}
|
||||||
|
\\QuaternaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{IA}}\\, (\text{path}(d\_2), o\_2)$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
And now we can define \\(\\sim_{\\mathrm{IADrv}}\\).
|
||||||
|
Two input-addressed derivations are equivalent if their content-addressed inputs are equivalent, their input-addressed inputs are also equivalent, and they are otherwise equal:
|
||||||
|
|
||||||
|
<!-- cheating a bit with the semantics to get a good layout that fits on the page -->
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\alwaysNoLine
|
||||||
|
\\AxiomC{$
|
||||||
|
\\mathrm{caInputs}(d\_1)
|
||||||
|
\\,\\sim_{\\mathcal{P}(\\mathrm{CA})}\\,
|
||||||
|
\\mathrm{caInputs}(d\_2)
|
||||||
|
$}
|
||||||
|
\\AxiomC{$
|
||||||
|
\\mathrm{iaInputs}(d\_1)
|
||||||
|
\\,\\sim_{\\mathcal{P}(\\mathrm{IA})}\\,
|
||||||
|
\\mathrm{iaInputs}(d\_2)
|
||||||
|
$}
|
||||||
|
\\BinaryInfC{$
|
||||||
|
d\_1\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
|
||||||
|
\=
|
||||||
|
d\_2\left[\\mathrm{inputDrvOutputs} := \\{\\}\right]
|
||||||
|
$}
|
||||||
|
\\alwaysSingleLine
|
||||||
|
\\UnaryInfC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
where \\(\\mathrm{caInputs}(d)\\) returns the content-addressed inputs of \\(d\\) and \\(\\mathrm{iaInputs}(d)\\) returns the input-addressed inputs.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> An astute reader might notice that that nowhere does `inputSrcs` enter into these definitions.
|
||||||
|
> That means that replacing an input derivation with its outputs directly added to `inputSrcs` always results in a derivation in a different equivalence class, despite the resulting input closure (as would be mounted in the store at build time) being the same.
|
||||||
|
> [Issue #9259](https://github.com/NixOS/nix/issues/9259) is about creating a coarser equivalence relation to address this.
|
||||||
|
>
|
||||||
|
> \\(\\sim_\mathrm{Drv}\\) from [derivation resolution](@docroot@/store/resolution.md) is such an equivalence relation.
|
||||||
|
> It is coarser than this one: any two derivations which are "'hash quotient derivation'-equivalent" (\\(\\sim_\mathrm{IADrv}\\)) are also "resolution-equivalent" (\\(\\sim_\mathrm{Drv}\\)).
|
||||||
|
> It also relates derivations whose `inputDrvOutputs` have been rewritten into `inputSrcs`.
|
||||||
|
|
||||||
|
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
|
||||||
|
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations
|
||||||
[xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations
|
[xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations
|
||||||
[xp-feature-git-hashing]: @docroot@/development/experimental-features.md#xp-feature-git-hashing
|
|
||||||
[xp-feature-impure-derivations]: @docroot@/development/experimental-features.md#xp-feature-impure-derivations
|
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ be many different serialisations.
|
||||||
For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format,
|
For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format,
|
||||||
which is carefully designed to avoid the problems described above.
|
which is carefully designed to avoid the problems described above.
|
||||||
|
|
||||||
The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive.md).
|
The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive/index.md).
|
||||||
|
|
||||||
## Content addressing File System Objects beyond a single serialisation pass
|
## Content addressing File System Objects beyond a single serialisation pass
|
||||||
|
|
||||||
|
|
|
||||||
16
doc/manual/source/store/math-notation.md
Normal file
16
doc/manual/source/store/math-notation.md
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Appendix: Math notation
|
||||||
|
|
||||||
|
A few times in this manual, formal "proof trees" are used for [natural deduction](https://en.wikipedia.org/wiki/Natural_deduction)-style definition of various [relations](https://en.wikipedia.org/wiki/Relation_(mathematics)).
|
||||||
|
|
||||||
|
The following grammar and assignment of metavariables to syntactic categories is used in these sections.
|
||||||
|
|
||||||
|
\\begin{align}
|
||||||
|
s, t &\in \text{store-path} \\\\
|
||||||
|
o &\in \text{output-name} \\\\
|
||||||
|
i, p &\in \text{deriving-path} \\\\
|
||||||
|
d &\in \text{derivation}
|
||||||
|
\\end{align}
|
||||||
|
|
||||||
|
\\begin{align}
|
||||||
|
\text{deriving-path} \quad p &::= s \mid (p, o)
|
||||||
|
\\end{align}
|
||||||
219
doc/manual/source/store/resolution.md
Normal file
219
doc/manual/source/store/resolution.md
Normal file
|
|
@ -0,0 +1,219 @@
|
||||||
|
# Derivation Resolution
|
||||||
|
|
||||||
|
*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.*
|
||||||
|
|
||||||
|
To *resolve* a derivation is to replace its [inputs] with the simplest inputs — plain store paths — that denote the same store objects.
|
||||||
|
|
||||||
|
Derivations that only have store paths as inputs are likewise called *resolved derivations*.
|
||||||
|
(They are called that whether they are in fact the output of derivation resolution, or just made that way without non-store-path inputs to begin with.)
|
||||||
|
|
||||||
|
## Input Content Equivalence of Derivations
|
||||||
|
|
||||||
|
[Deriving paths][deriving-path] intentionally make it possible to refer to the same [store object] in multiple ways.
|
||||||
|
This is a consequence of content-addressing, since different derivations can produce the same outputs, and the same data can also be manually added to the store.
|
||||||
|
This is also a consequence even of input-addressing, as an output can be referred to by derivation and output name, or directly by its [computed](./derivation/outputs/input-address.md) store path.
|
||||||
|
Since dereferencing deriving paths is thus not injective, it induces an equivalence relation on deriving paths.
|
||||||
|
|
||||||
|
Let's call this equivalence relation \\(\\sim\\), where \\(p_1 \\sim p_2\\) means that deriving paths \\(p_1\\) and \\(p_2\\) refer to the same store object.
|
||||||
|
|
||||||
|
**Content Equivalence**: Two deriving paths are equivalent if they refer to the same store object:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{${}^*p_1 = {}^*p_2$}
|
||||||
|
\\UnaryInfC{$p_1 \\,\\sim_\\mathrm{DP}\\, p_2$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
where \\({}^\*p\\) denotes the store object that deriving path \\(p\\) refers to.
|
||||||
|
|
||||||
|
This also induces an equivalence relation on sets of deriving paths:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$\\{ {}^*p | p \\in P_1 \\} = \\{ {}^*p | p \\in P_2 \\}$}
|
||||||
|
\\UnaryInfC{$P_1 \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, P_2$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
**Input Content Equivalence**: This, in turn, induces an equivalence relation on derivations: two derivations are equivalent if their inputs are equivalent, and they are otherwise equal:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$\\mathrm{inputs}(d_1) \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, \\mathrm{inputs}(d_2)$}
|
||||||
|
\\AxiomC{$
|
||||||
|
d\_1\left[\\mathrm{inputs} := \\{\\}\right]
|
||||||
|
\=
|
||||||
|
d\_2\left[\\mathrm{inputs} := \\{\\}\right]
|
||||||
|
$}
|
||||||
|
\\BinaryInfC{$d_1 \\,\\sim_\\mathrm{Drv}\\, d_2$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
Derivation resolution always maps derivations to input-content-equivalent derivations.
|
||||||
|
|
||||||
|
## Resolution relation
|
||||||
|
|
||||||
|
Dereferencing a derived path — \\({}^\*p\\) above — was just introduced as a black box.
|
||||||
|
But actually it is a multi-step process of looking up build results in the [build trace] that itself depends on resolving the lookup keys.
|
||||||
|
Resolution is thus a recursive multi-step process that is worth diagramming formally.
|
||||||
|
|
||||||
|
We can do this with a small-step binary transition relation; let's call it \\(\rightsquigarrow\\).
|
||||||
|
We can then conclude dereferenced equality like this:
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$p\_1 \\rightsquigarrow^* p$}
|
||||||
|
\\AxiomC{$p\_2 \\rightsquigarrow^* p$}
|
||||||
|
\\BinaryInfC{${}^*p\_1 = {}^*p\_2$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
I.e. by showing that both original items resolve (over 0 or more small steps, hence the \\({}^*\\)) to the same exact item.
|
||||||
|
|
||||||
|
With this motivation, let's now formalize a [small-step](https://en.wikipedia.org/wiki/Operational_semantics#Small-step_semantics) system of reduction rules for resolution.
|
||||||
|
|
||||||
|
### Formal rules
|
||||||
|
|
||||||
|
### \\(\text{resolved}\\) unary relation
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$s \in \text{store-path}$}
|
||||||
|
\\UnaryInfC{$s$ resolved}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$\forall i \in \mathrm{inputs}(d). i \text{ resolved}$}
|
||||||
|
\\UnaryInfC{$d$ resolved}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
### \\(\rightsquigarrow\\) binary relation
|
||||||
|
|
||||||
|
> **Remark**
|
||||||
|
>
|
||||||
|
> Actually, to be completely formal we would need to keep track of the build trace we are choosing to resolve against.
|
||||||
|
>
|
||||||
|
> We could do that by making \\(\rightsquigarrow\\) a ternary relation, which would pass the build trace to itself until it finally uses it in that one rule.
|
||||||
|
> This would add clutter more than insight, so we didn't bother to write it.
|
||||||
|
>
|
||||||
|
> There are other options too, like saying the whole reduction rule system is parameterized on the build trace, essentially [currying](https://en.wikipedia.org/wiki/Currying) the ternary \\(\rightsquigarrow\\) into a function from build traces to the binary relation written above.
|
||||||
|
|
||||||
|
#### Core build trace lookup rule
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$s \in \text{store-path}$}
|
||||||
|
\\AxiomC{${}^*s \in \text{derivation}$}
|
||||||
|
\\AxiomC{${}^*s$ resolved}
|
||||||
|
\\AxiomC{$\mathrm{build\text{-}trace}[s][o] = t$}
|
||||||
|
\\QuaternaryInfC{$(s, o) \rightsquigarrow t$}
|
||||||
|
\\RightLabel{\\scriptsize output path resolution}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
#### Inductive rules
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$i \\rightsquigarrow i'$}
|
||||||
|
\\AxiomC{$i \\in \\mathrm{inputs}(d)$}
|
||||||
|
\\BinaryInfC{$d \\rightsquigarrow d[i \\mapsto i']$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$d \\rightsquigarrow d'$}
|
||||||
|
\\UnaryInfC{$(\\mathrm{path}(d), o) \\rightsquigarrow (\\mathrm{path}(d'), o)$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
\\[
|
||||||
|
\\begin{prooftree}
|
||||||
|
\\AxiomC{$p \\rightsquigarrow p'$}
|
||||||
|
\\UnaryInfC{$(p, o) \\rightsquigarrow (p', o)$}
|
||||||
|
\\end{prooftree}
|
||||||
|
\\]
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
Like all well-behaved evaluation relations, partial resolution is [*confluent*](https://en.wikipedia.org/wiki/Confluence_(abstract_rewriting)).
|
||||||
|
Also, if we take the symmetric closure of \\(\\rightsquigarrow^\*\\), we end up with the equivalence relations of the previous section.
|
||||||
|
Resolution respects content equivalence for deriving paths, and input content equivalence for derivations.
|
||||||
|
|
||||||
|
> **Remark**
|
||||||
|
>
|
||||||
|
> We chose to define from scratch an "resolved" unary relation explicitly above.
|
||||||
|
> But it can also be defined as the normal forms of the \\(\\rightsquigarrow^\*\\) relation:
|
||||||
|
>
|
||||||
|
> \\[ a \text{ resolved} \Leftrightarrow \forall b. b \rightsquigarrow^* a \Rightarrow b = a\\]
|
||||||
|
>
|
||||||
|
> In prose, resolved terms are terms which \\(\\rightsquigarrow^\*\\) only relates on the left side to the same term on the right side; they are the terms which can be resolved no further.
|
||||||
|
|
||||||
|
## Partial versus Complete Resolution
|
||||||
|
|
||||||
|
Similar to evaluation, we can also speak of *partial* versus *complete* derivation resolution.
|
||||||
|
Partial derivation resolution is what we've actually formalized above with \\(\\rightsquigarrow^\*\\).
|
||||||
|
Complete resolution is resolution ending in a resolved term (deriving path or derivation).
|
||||||
|
(Which is a normal form of the relation, per the remark above.)
|
||||||
|
|
||||||
|
With partial resolution, a derivation is related to equivalent derivations with the same or simpler inputs, but not all those inputs will be plain store paths.
|
||||||
|
This is useful when the input refers to a floating content addressed output we have not yet built — we don't know what (content-address) store path will used for that derivation, so we are "stuck" trying to resolve the deriving path in question.
|
||||||
|
(In the above formalization, this happens when the build trace is missing the keys we wish to look up in it.)
|
||||||
|
|
||||||
|
Complete resolution is a *functional* relation, i.e. values on the left are uniquely related with values on the right.
|
||||||
|
It is not however, a *total* relation (in general, assuming arbitrary build traces).
|
||||||
|
This is discussed in the next section.
|
||||||
|
|
||||||
|
## Termination
|
||||||
|
|
||||||
|
For static derivations graphs, complete resolution is indeed total, because it always terminates for all inputs.
|
||||||
|
(A relation that is both total and functional is a function.)
|
||||||
|
|
||||||
|
For [dynamic][xp-feature-dynamic-derivations] derivation graphs, however, this is not the case — resolution is not guaranteed to terminate.
|
||||||
|
The issue isn't rewriting deriving paths themselves:
|
||||||
|
a single rewrite to normalize an output deriving path to a constant one always exists, and always proceeds in one step.
|
||||||
|
The issue is that dynamic derivations (i.e. those that are filled-in the graph by a previous resolution) may have more transitive dependencies than the original derivation.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> Suppose we have this deriving path
|
||||||
|
> ```json
|
||||||
|
> {
|
||||||
|
> "drvPath": {
|
||||||
|
> "drvPath": "...-foo.drv",
|
||||||
|
> "output": "bar.drv"
|
||||||
|
> },
|
||||||
|
> "output": "baz"
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
> and derivation `foo` is already resolved.
|
||||||
|
> When we resolve deriving path we'll end up with something like.
|
||||||
|
> ```json
|
||||||
|
> {
|
||||||
|
> "drvPath": "...-foo-bar.drv",
|
||||||
|
> "output": "baz"
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
> So far is just an atomic single rewrite, with no termination issues.
|
||||||
|
> But the derivation `foo-bar` may have its *own* dynamic derivation inputs.
|
||||||
|
> Resolution must resolve that derivation first before the above deriving path can finally be normalized to a plain `...-foo-bar-baz` store path.
|
||||||
|
|
||||||
|
The important thing to notice is that while "build trace" *keys* must be resolved.
|
||||||
|
The *value* those keys are mapped to have no such constraints.
|
||||||
|
An arbitrary store object has no notion of being resolved or not.
|
||||||
|
But, an arbitrary store object can be read back as a derivation (as will in fact be done in case for dynamic derivations / nested output deriving paths).
|
||||||
|
And those derivations need *not* be resolved.
|
||||||
|
|
||||||
|
It is those dynamic non-resolved derivations which are the source of non-termination.
|
||||||
|
By the same token, they are also the reason why dynamic derivations offer greater expressive power.
|
||||||
|
|
||||||
|
[store object]: @docroot@/store/store-object.md
|
||||||
|
[inputs]: @docroot@/store/derivation/index.md#inputs
|
||||||
|
[build trace]: @docroot@/store/build-trace.md
|
||||||
|
[deriving-path]: @docroot@/store/derivation/index.md#deriving-path
|
||||||
|
[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Content-Addressing Store Objects
|
# Content-Addressing Store Objects
|
||||||
|
|
||||||
Just [like][fso-ca] [File System Objects][File System Object],
|
Just [like][fso-ca] [File System Objects][File System Object],
|
||||||
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed),
|
[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address),
|
||||||
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
|
unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object).
|
||||||
|
|
||||||
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
|
For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash.
|
||||||
|
|
@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l
|
||||||
>
|
>
|
||||||
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
|
> This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature.
|
||||||
|
|
||||||
This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing.
|
This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing.
|
||||||
|
|
||||||
References are not supported.
|
References are not supported.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
>
|
>
|
||||||
> A rendered store path
|
> A rendered store path
|
||||||
|
|
||||||
Nix implements references to [store objects](./index.md#store-object) as *store paths*.
|
Nix implements references to [store objects](./store-object.md) as *store paths*.
|
||||||
|
|
||||||
Think of a store path as an [opaque], [unique identifier]:
|
Think of a store path as an [opaque], [unique identifier]:
|
||||||
The only way to obtain store path is by adding or building store objects.
|
The only way to obtain store path is by adding or building store objects.
|
||||||
|
|
|
||||||
15
doc/manual/theme/head.hbs
Normal file
15
doc/manual/theme/head.hbs
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
<script>
|
||||||
|
MathJax = {
|
||||||
|
loader: {load: ['[tex]/bussproofs']},
|
||||||
|
tex: {
|
||||||
|
packages: {'[+]': ['bussproofs']},
|
||||||
|
// Doesn't seem to work in mathjax 3
|
||||||
|
//formatError: function(jax, error) {
|
||||||
|
// console.log(`TeX error in "${jax.latex}": ${error.message}`);
|
||||||
|
// return jax.formatError(error);
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<!-- Load a newer versino of MathJax than mdbook does by default, and which in particular has working relative paths for the "bussproofs" extension. -->
|
||||||
|
<script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/3.0.1/es5/tex-mml-chtml.js"></script>
|
||||||
8
flake.lock
generated
8
flake.lock
generated
|
|
@ -63,16 +63,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1756178832,
|
"lastModified": 1761597516,
|
||||||
"narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=",
|
"narHash": "sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8+ON/0Yy8+a5vsDU=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "d98ce345cdab58477ca61855540999c86577d19d",
|
"rev": "daf6dc47aa4b44791372d6139ab7b25269184d55",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-25.05-small",
|
"ref": "nixos-25.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
31
flake.nix
31
flake.nix
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||||
|
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
||||||
|
|
@ -413,6 +413,14 @@
|
||||||
supportsCross = false;
|
supportsCross = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
"nix-json-schema-checks" = {
|
||||||
|
supportsCross = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
"nix-kaitai-struct-checks" = {
|
||||||
|
supportsCross = false;
|
||||||
|
};
|
||||||
|
|
||||||
"nix-perl-bindings" = {
|
"nix-perl-bindings" = {
|
||||||
supportsCross = false;
|
supportsCross = false;
|
||||||
};
|
};
|
||||||
|
|
@ -467,6 +475,27 @@
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
apps = forAllSystems (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = nixpkgsFor.${system}.native;
|
||||||
|
opener = if pkgs.stdenv.isDarwin then "open" else "xdg-open";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
open-manual = {
|
||||||
|
type = "app";
|
||||||
|
program = "${pkgs.writeShellScript "open-nix-manual" ''
|
||||||
|
path="${self.packages.${system}.nix-manual.site}/index.html"
|
||||||
|
if ! ${opener} "$path"; then
|
||||||
|
echo "Failed to open manual with ${opener}. Manual is located at:"
|
||||||
|
echo "$path"
|
||||||
|
fi
|
||||||
|
''}";
|
||||||
|
meta.description = "Open the Nix manual in your browser";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
devShells =
|
devShells =
|
||||||
let
|
let
|
||||||
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };
|
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };
|
||||||
|
|
|
||||||
|
|
@ -60,3 +60,4 @@ if get_option('unit-tests')
|
||||||
subproject('libflake-tests')
|
subproject('libflake-tests')
|
||||||
endif
|
endif
|
||||||
subproject('nix-functional-tests')
|
subproject('nix-functional-tests')
|
||||||
|
subproject('json-schema-checks')
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
# shellcheck disable=all
|
|
||||||
#compdef nix
|
#compdef nix
|
||||||
|
# shellcheck disable=all
|
||||||
|
|
||||||
function _nix() {
|
function _nix() {
|
||||||
local ifs_bk="$IFS"
|
local ifs_bk="$IFS"
|
||||||
|
|
|
||||||
|
|
@ -42,8 +42,8 @@ if cxx.get_id() == 'clang'
|
||||||
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Darwin ld doesn't like "X.Y.Zpre"
|
# Darwin ld doesn't like "X.Y.ZpreABCD+W"
|
||||||
nix_soversion = meson.project_version().split('pre')[0]
|
nix_soversion = meson.project_version().split('+')[0].split('pre')[0]
|
||||||
|
|
||||||
subdir('assert-fail')
|
subdir('assert-fail')
|
||||||
subdir('asan-options')
|
subdir('asan-options')
|
||||||
|
|
|
||||||
|
|
@ -438,6 +438,16 @@ in
|
||||||
*/
|
*/
|
||||||
nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; };
|
nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; };
|
||||||
|
|
||||||
|
/**
|
||||||
|
JSON schema validation checks
|
||||||
|
*/
|
||||||
|
nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { };
|
||||||
|
|
||||||
|
/**
|
||||||
|
Kaitai struct schema validation checks
|
||||||
|
*/
|
||||||
|
nix-kaitai-struct-checks = callPackage ../src/kaitai-struct-checks/package.nix { };
|
||||||
|
|
||||||
nix-perl-bindings = callPackage ../src/perl/package.nix { };
|
nix-perl-bindings = callPackage ../src/perl/package.nix { };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -108,6 +108,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
|
||||||
++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs
|
++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
||||||
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
||||||
|
++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs
|
||||||
++ lib.optional (
|
++ lib.optional (
|
||||||
!buildCanExecuteHost
|
!buildCanExecuteHost
|
||||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||||
|
|
|
||||||
|
|
@ -62,6 +62,8 @@ let
|
||||||
"nix-cmd"
|
"nix-cmd"
|
||||||
"nix-cli"
|
"nix-cli"
|
||||||
"nix-functional-tests"
|
"nix-functional-tests"
|
||||||
|
"nix-json-schema-checks"
|
||||||
|
"nix-kaitai-struct-checks"
|
||||||
]
|
]
|
||||||
++ lib.optionals enableBindings [
|
++ lib.optionals enableBindings [
|
||||||
"nix-perl-bindings"
|
"nix-perl-bindings"
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ programmatically:
|
||||||
1. Embedding the evaluator
|
1. Embedding the evaluator
|
||||||
2. Writing language plug-ins
|
2. Writing language plug-ins
|
||||||
|
|
||||||
Embedding means you link the Nix C libraries in your program and use them from
|
Embedding means you link the Nix C API libraries in your program and use them from
|
||||||
there. Adding a plug-in means you make a library that gets loaded by the Nix
|
there. Adding a plug-in means you make a library that gets loaded by the Nix
|
||||||
language evaluator, specified through a configuration option.
|
language evaluator, specified through a configuration option.
|
||||||
|
|
||||||
|
|
|
||||||
1
src/json-schema-checks/.version
Symbolic link
1
src/json-schema-checks/.version
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../.version
|
||||||
1
src/json-schema-checks/content-address
Symbolic link
1
src/json-schema-checks/content-address
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/content-address
|
||||||
1
src/json-schema-checks/derivation
Symbolic link
1
src/json-schema-checks/derivation
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/derivation
|
||||||
1
src/json-schema-checks/deriving-path
Symbolic link
1
src/json-schema-checks/deriving-path
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/derived-path
|
||||||
1
src/json-schema-checks/hash
Symbolic link
1
src/json-schema-checks/hash
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libutil-tests/data/hash
|
||||||
204
src/json-schema-checks/meson.build
Normal file
204
src/json-schema-checks/meson.build
Normal file
|
|
@ -0,0 +1,204 @@
|
||||||
|
# Run with:
|
||||||
|
# meson test --suite json-schema
|
||||||
|
# Run with: (without shell / configure)
|
||||||
|
# nix build .#nix-json-schema-checks
|
||||||
|
|
||||||
|
project(
|
||||||
|
'nix-json-schema-checks',
|
||||||
|
version : files('.version'),
|
||||||
|
meson_version : '>= 1.1',
|
||||||
|
license : 'LGPL-2.1-or-later',
|
||||||
|
)
|
||||||
|
|
||||||
|
fs = import('fs')
|
||||||
|
|
||||||
|
# Note: The 'jsonschema' package provides the 'jv' command
|
||||||
|
jv = find_program('jv', required : true)
|
||||||
|
|
||||||
|
# The schema directory is a committed symlink to the actual schema location
|
||||||
|
schema_dir = meson.current_source_dir() / 'schema'
|
||||||
|
|
||||||
|
# Get all example files
|
||||||
|
schemas = [
|
||||||
|
{
|
||||||
|
'stem' : 'hash',
|
||||||
|
'schema' : schema_dir / 'hash-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'sha256-base64.json',
|
||||||
|
'sha256-base16.json',
|
||||||
|
'sha256-nix32.json',
|
||||||
|
'blake3-base64.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'content-address',
|
||||||
|
'schema' : schema_dir / 'content-address-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'text.json',
|
||||||
|
'nar.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'store-path',
|
||||||
|
'schema' : schema_dir / 'store-path-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'simple.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'deriving-path',
|
||||||
|
'schema' : schema_dir / 'deriving-path-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'single_opaque.json',
|
||||||
|
'single_built.json',
|
||||||
|
'single_built_built.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Derivation and Derivation output
|
||||||
|
schemas += [
|
||||||
|
# Match overall
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml',
|
||||||
|
'files' : [
|
||||||
|
'dyn-dep-derivation.json',
|
||||||
|
'simple-derivation.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/overall',
|
||||||
|
'files' : [
|
||||||
|
'output-caFixedFlat.json',
|
||||||
|
'output-caFixedNAR.json',
|
||||||
|
'output-caFixedText.json',
|
||||||
|
'output-caFloating.json',
|
||||||
|
'output-deferred.json',
|
||||||
|
'output-impure.json',
|
||||||
|
'output-inputAddressed.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
# Match exact variant
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/inputAddressed',
|
||||||
|
'files' : [
|
||||||
|
'output-inputAddressed.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFixed',
|
||||||
|
'files' : [
|
||||||
|
'output-caFixedFlat.json',
|
||||||
|
'output-caFixedNAR.json',
|
||||||
|
'output-caFixedText.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFloating',
|
||||||
|
'files' : [
|
||||||
|
'output-caFloating.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/deferred',
|
||||||
|
'files' : [
|
||||||
|
'output-deferred.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'derivation',
|
||||||
|
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/impure',
|
||||||
|
'files' : [
|
||||||
|
'output-impure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Store object info
|
||||||
|
schemas += [
|
||||||
|
# Match overall
|
||||||
|
{
|
||||||
|
'stem' : 'store-object-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'pure.json',
|
||||||
|
'impure.json',
|
||||||
|
'empty_pure.json',
|
||||||
|
'empty_impure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'nar-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||||
|
'files' : [
|
||||||
|
'pure.json',
|
||||||
|
'impure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
# Match exact variant
|
||||||
|
{
|
||||||
|
'stem' : 'store-object-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||||
|
'files' : [
|
||||||
|
'pure.json',
|
||||||
|
'empty_pure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'store-object-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/impure',
|
||||||
|
'files' : [
|
||||||
|
'impure.json',
|
||||||
|
'empty_impure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'nar-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||||
|
'files' : [
|
||||||
|
'pure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'stem' : 'nar-info',
|
||||||
|
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/narInfo',
|
||||||
|
'files' : [
|
||||||
|
'impure.json',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Validate each example against the schema
|
||||||
|
foreach schema : schemas
|
||||||
|
stem = schema['stem']
|
||||||
|
schema_file = schema['schema']
|
||||||
|
if '#' not in schema_file
|
||||||
|
# Validate the schema itself against JSON Schema Draft 04
|
||||||
|
test(
|
||||||
|
stem + '-schema-valid',
|
||||||
|
jv,
|
||||||
|
args : [
|
||||||
|
'http://json-schema.org/draft-04/schema',
|
||||||
|
schema_file,
|
||||||
|
],
|
||||||
|
suite : 'json-schema',
|
||||||
|
)
|
||||||
|
endif
|
||||||
|
foreach example : schema['files']
|
||||||
|
test(
|
||||||
|
stem + '-example-' + fs.stem(example),
|
||||||
|
jv,
|
||||||
|
args : [
|
||||||
|
schema_file,
|
||||||
|
files(stem / example),
|
||||||
|
],
|
||||||
|
suite : 'json-schema',
|
||||||
|
)
|
||||||
|
endforeach
|
||||||
|
endforeach
|
||||||
1
src/json-schema-checks/nar-info
Symbolic link
1
src/json-schema-checks/nar-info
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/nar-info
|
||||||
56
src/json-schema-checks/package.nix
Normal file
56
src/json-schema-checks/package.nix
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
# Run with: nix build .#nix-json-schema-checks
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
mkMesonDerivation,
|
||||||
|
|
||||||
|
meson,
|
||||||
|
ninja,
|
||||||
|
jsonschema,
|
||||||
|
|
||||||
|
# Configuration Options
|
||||||
|
|
||||||
|
version,
|
||||||
|
}:
|
||||||
|
|
||||||
|
mkMesonDerivation (finalAttrs: {
|
||||||
|
pname = "nix-json-schema-checks";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
workDir = ./.;
|
||||||
|
fileset = lib.fileset.unions [
|
||||||
|
../../.version
|
||||||
|
../../doc/manual/source/protocols/json/schema
|
||||||
|
../../src/libutil-tests/data/hash
|
||||||
|
../../src/libstore-tests/data/content-address
|
||||||
|
../../src/libstore-tests/data/store-path
|
||||||
|
../../src/libstore-tests/data/derivation
|
||||||
|
../../src/libstore-tests/data/derived-path
|
||||||
|
../../src/libstore-tests/data/path-info
|
||||||
|
../../src/libstore-tests/data/nar-info
|
||||||
|
./.
|
||||||
|
];
|
||||||
|
|
||||||
|
outputs = [ "out" ];
|
||||||
|
|
||||||
|
passthru.externalNativeBuildInputs = [
|
||||||
|
jsonschema
|
||||||
|
];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
meson
|
||||||
|
ninja
|
||||||
|
]
|
||||||
|
++ finalAttrs.passthru.externalNativeBuildInputs;
|
||||||
|
|
||||||
|
doCheck = true;
|
||||||
|
|
||||||
|
mesonCheckFlags = [ "--print-errorlogs" ];
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
touch $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
};
|
||||||
|
})
|
||||||
1
src/json-schema-checks/schema
Symbolic link
1
src/json-schema-checks/schema
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../doc/manual/source/protocols/json/schema
|
||||||
1
src/json-schema-checks/store-object-info
Symbolic link
1
src/json-schema-checks/store-object-info
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/path-info
|
||||||
1
src/json-schema-checks/store-path
Symbolic link
1
src/json-schema-checks/store-path
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../src/libstore-tests/data/store-path
|
||||||
1
src/kaitai-struct-checks/.version
Symbolic link
1
src/kaitai-struct-checks/.version
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../.version
|
||||||
77
src/kaitai-struct-checks/meson.build
Normal file
77
src/kaitai-struct-checks/meson.build
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
# Run with:
|
||||||
|
# meson test --suite kaitai-struct
|
||||||
|
# Run with: (without shell / configure)
|
||||||
|
# nix build .#nix-kaitai-struct-checks
|
||||||
|
|
||||||
|
project(
|
||||||
|
'nix-kaitai-struct-checks',
|
||||||
|
'cpp',
|
||||||
|
version : files('.version'),
|
||||||
|
default_options : [
|
||||||
|
'cpp_std=c++23',
|
||||||
|
# TODO(Qyriad): increase the warning level
|
||||||
|
'warning_level=1',
|
||||||
|
'errorlogs=true', # Please print logs for tests that fail
|
||||||
|
],
|
||||||
|
meson_version : '>= 1.1',
|
||||||
|
license : 'LGPL-2.1-or-later',
|
||||||
|
)
|
||||||
|
|
||||||
|
kaitai_runtime_dep = dependency('kaitai-struct-cpp-stl-runtime', required : true)
|
||||||
|
gtest_dep = dependency('gtest')
|
||||||
|
gtest_main_dep = dependency('gtest_main', required : true)
|
||||||
|
|
||||||
|
# Find the Kaitai Struct compiler
|
||||||
|
ksc = find_program('ksc', required : true)
|
||||||
|
|
||||||
|
kaitai_generated_srcs = custom_target(
|
||||||
|
'kaitai-generated-sources',
|
||||||
|
input : [ 'nar.ksy' ],
|
||||||
|
output : [ 'nix_nar.cpp', 'nix_nar.h' ],
|
||||||
|
command : [
|
||||||
|
ksc,
|
||||||
|
'@INPUT@',
|
||||||
|
'--target', 'cpp_stl',
|
||||||
|
'--outdir',
|
||||||
|
meson.current_build_dir(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
nar_kaitai_lib = library(
|
||||||
|
'nix-nar-kaitai-lib',
|
||||||
|
kaitai_generated_srcs,
|
||||||
|
dependencies : [ kaitai_runtime_dep ],
|
||||||
|
install : true,
|
||||||
|
)
|
||||||
|
|
||||||
|
nar_kaitai_dep = declare_dependency(
|
||||||
|
link_with : nar_kaitai_lib,
|
||||||
|
sources : kaitai_generated_srcs[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
# The nar directory is a committed symlink to the actual nars location
|
||||||
|
nars_dir = meson.current_source_dir() / 'nars'
|
||||||
|
|
||||||
|
# Get all example files
|
||||||
|
nars = [
|
||||||
|
'dot.nar',
|
||||||
|
]
|
||||||
|
|
||||||
|
test_deps = [
|
||||||
|
nar_kaitai_dep,
|
||||||
|
kaitai_runtime_dep,
|
||||||
|
gtest_main_dep,
|
||||||
|
]
|
||||||
|
|
||||||
|
this_exe = executable(
|
||||||
|
meson.project_name(),
|
||||||
|
'test-parse-nar.cc',
|
||||||
|
dependencies : test_deps,
|
||||||
|
)
|
||||||
|
|
||||||
|
test(
|
||||||
|
meson.project_name(),
|
||||||
|
this_exe,
|
||||||
|
env : [ 'NIX_NARS_DIR=' + nars_dir ],
|
||||||
|
protocol : 'gtest',
|
||||||
|
)
|
||||||
1
src/kaitai-struct-checks/nar.ksy
Symbolic link
1
src/kaitai-struct-checks/nar.ksy
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../doc/manual/source/protocols/nix-archive/nar.ksy
|
||||||
1
src/kaitai-struct-checks/nars
Symbolic link
1
src/kaitai-struct-checks/nars
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../libutil-tests/data/nars
|
||||||
1
src/kaitai-struct-checks/nix-meson-build-support
Symbolic link
1
src/kaitai-struct-checks/nix-meson-build-support
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../nix-meson-build-support
|
||||||
76
src/kaitai-struct-checks/package.nix
Normal file
76
src/kaitai-struct-checks/package.nix
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
# Run with: nix build .#nix-kaitai-struct-checks
|
||||||
|
# or: `nix develop .#nix-kaitai-struct-checks` to enter a dev shell
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
mkMesonDerivation,
|
||||||
|
gtest,
|
||||||
|
meson,
|
||||||
|
ninja,
|
||||||
|
pkg-config,
|
||||||
|
kaitai-struct-compiler,
|
||||||
|
fetchzip,
|
||||||
|
kaitai-struct-cpp-stl-runtime,
|
||||||
|
# Configuration Options
|
||||||
|
version,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib) fileset;
|
||||||
|
in
|
||||||
|
mkMesonDerivation (finalAttrs: {
|
||||||
|
pname = "nix-kaitai-struct-checks";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
workDir = ./.;
|
||||||
|
fileset = lib.fileset.unions [
|
||||||
|
../../nix-meson-build-support
|
||||||
|
./nix-meson-build-support
|
||||||
|
./.version
|
||||||
|
../../.version
|
||||||
|
../../doc/manual/source/protocols/nix-archive/nar.ksy
|
||||||
|
./nars
|
||||||
|
../../src/libutil-tests/data
|
||||||
|
./meson.build
|
||||||
|
./nar.ksy
|
||||||
|
(fileset.fileFilter (file: file.hasExt "cc") ./.)
|
||||||
|
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||||
|
];
|
||||||
|
|
||||||
|
outputs = [ "out" ];
|
||||||
|
|
||||||
|
passthru.externalNativeBuildInputs = [
|
||||||
|
# This can go away when we bump up to 25.11
|
||||||
|
(kaitai-struct-compiler.overrideAttrs (finalAttrs: {
|
||||||
|
version = "0.11";
|
||||||
|
src = fetchzip {
|
||||||
|
url = "https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/${version}/kaitai-struct-compiler-${version}.zip";
|
||||||
|
sha256 = "sha256-j9TEilijqgIiD0GbJfGKkU1FLio9aTopIi1v8QT1b+A=";
|
||||||
|
};
|
||||||
|
}))
|
||||||
|
];
|
||||||
|
|
||||||
|
passthru.externalBuildInputs = [
|
||||||
|
gtest
|
||||||
|
kaitai-struct-cpp-stl-runtime
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = finalAttrs.passthru.externalBuildInputs;
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
meson
|
||||||
|
ninja
|
||||||
|
pkg-config
|
||||||
|
]
|
||||||
|
++ finalAttrs.passthru.externalNativeBuildInputs;
|
||||||
|
|
||||||
|
doCheck = true;
|
||||||
|
|
||||||
|
mesonCheckFlags = [ "--print-errorlogs" ];
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
touch $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
};
|
||||||
|
})
|
||||||
48
src/kaitai-struct-checks/test-parse-nar.cc
Normal file
48
src/kaitai-struct-checks/test-parse-nar.cc
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <fstream>
|
||||||
|
#include <vector>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include <kaitai/kaitaistream.h>
|
||||||
|
|
||||||
|
#include <fstream>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "nix_nar.h"
|
||||||
|
|
||||||
|
static const std::vector<std::string> NarFiles = {
|
||||||
|
"empty.nar",
|
||||||
|
"dot.nar",
|
||||||
|
"dotdot.nar",
|
||||||
|
"executable-after-contents.nar",
|
||||||
|
"invalid-tag-instead-of-contents.nar",
|
||||||
|
"name-after-node.nar",
|
||||||
|
"nul-character.nar",
|
||||||
|
"slash.nar",
|
||||||
|
};
|
||||||
|
|
||||||
|
class NarParseTest : public ::testing::TestWithParam<std::string>
|
||||||
|
{};
|
||||||
|
|
||||||
|
TEST_P(NarParseTest, ParseSucceeds)
|
||||||
|
{
|
||||||
|
const auto nar_file = GetParam();
|
||||||
|
|
||||||
|
const char * nars_dir_env = std::getenv("NIX_NARS_DIR");
|
||||||
|
if (nars_dir_env == nullptr) {
|
||||||
|
FAIL() << "NIX_NARS_DIR environment variable not set.";
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::filesystem::path nar_file_path = std::filesystem::path(nars_dir_env) / "dot.nar";
|
||||||
|
ASSERT_TRUE(std::filesystem::exists(nar_file_path)) << "Missing test file: " << nar_file_path;
|
||||||
|
|
||||||
|
std::ifstream ifs(nar_file_path, std::ifstream::binary);
|
||||||
|
ASSERT_TRUE(ifs.is_open()) << "Failed to open file: " << nar_file;
|
||||||
|
kaitai::kstream ks(&ifs);
|
||||||
|
nix_nar_t nar(&ks);
|
||||||
|
ASSERT_TRUE(nar.root_node() != nullptr) << "Failed to parse NAR file: " << nar_file;
|
||||||
|
}
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_SUITE_P(AllNarFiles, NarParseTest, ::testing::ValuesIn(NarFiles));
|
||||||
|
|
@ -4,11 +4,14 @@
|
||||||
* @brief Bindings to the Nix language evaluator
|
* @brief Bindings to the Nix language evaluator
|
||||||
*
|
*
|
||||||
* See *[Embedding the Nix Evaluator](@ref nix_evaluator_example)* for an example.
|
* See *[Embedding the Nix Evaluator](@ref nix_evaluator_example)* for an example.
|
||||||
* @{
|
|
||||||
*/
|
*/
|
||||||
/** @file
|
/** @file
|
||||||
* @brief Main entry for the libexpr C bindings
|
* @brief Main entry for the libexpr C bindings
|
||||||
*/
|
*/
|
||||||
|
/** @defgroup libexpr_init Initialization
|
||||||
|
* @ingroup libexpr
|
||||||
|
* @{
|
||||||
|
*/
|
||||||
|
|
||||||
#include "nix_api_store.h"
|
#include "nix_api_store.h"
|
||||||
#include "nix_api_util.h"
|
#include "nix_api_util.h"
|
||||||
|
|
@ -45,7 +48,10 @@ typedef struct nix_eval_state_builder nix_eval_state_builder;
|
||||||
*/
|
*/
|
||||||
typedef struct EvalState EvalState; // nix::EvalState
|
typedef struct EvalState EvalState; // nix::EvalState
|
||||||
|
|
||||||
|
/** @} */
|
||||||
|
|
||||||
/** @brief A Nix language value, or thunk that may evaluate to a value.
|
/** @brief A Nix language value, or thunk that may evaluate to a value.
|
||||||
|
* @ingroup value
|
||||||
*
|
*
|
||||||
* Values are the primary objects manipulated in the Nix language.
|
* Values are the primary objects manipulated in the Nix language.
|
||||||
* They are considered to be immutable from a user's perspective, but the process of evaluating a value changes its
|
* They are considered to be immutable from a user's perspective, but the process of evaluating a value changes its
|
||||||
|
|
@ -56,7 +62,8 @@ typedef struct EvalState EvalState; // nix::EvalState
|
||||||
*
|
*
|
||||||
* The evaluator manages its own memory, but your use of the C API must follow the reference counting rules.
|
* The evaluator manages its own memory, but your use of the C API must follow the reference counting rules.
|
||||||
*
|
*
|
||||||
* @see value_manip
|
* @struct nix_value
|
||||||
|
* @see value_create, value_extract
|
||||||
* @see nix_value_incref, nix_value_decref
|
* @see nix_value_incref, nix_value_decref
|
||||||
*/
|
*/
|
||||||
typedef struct nix_value nix_value;
|
typedef struct nix_value nix_value;
|
||||||
|
|
@ -65,6 +72,7 @@ NIX_DEPRECATED("use nix_value instead") typedef nix_value Value;
|
||||||
// Function prototypes
|
// Function prototypes
|
||||||
/**
|
/**
|
||||||
* @brief Initialize the Nix language evaluator.
|
* @brief Initialize the Nix language evaluator.
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* This function must be called at least once,
|
* This function must be called at least once,
|
||||||
* at some point before constructing a EvalState for the first time.
|
* at some point before constructing a EvalState for the first time.
|
||||||
|
|
@ -77,6 +85,7 @@ nix_err nix_libexpr_init(nix_c_context * context);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Parses and evaluates a Nix expression from a string.
|
* @brief Parses and evaluates a Nix expression from a string.
|
||||||
|
* @ingroup value_create
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] state The state of the evaluation.
|
* @param[in] state The state of the evaluation.
|
||||||
|
|
@ -93,6 +102,7 @@ nix_err nix_expr_eval_from_string(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Calls a Nix function with an argument.
|
* @brief Calls a Nix function with an argument.
|
||||||
|
* @ingroup value_create
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] state The state of the evaluation.
|
* @param[in] state The state of the evaluation.
|
||||||
|
|
@ -107,6 +117,7 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, nix_value * f
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Calls a Nix function with multiple arguments.
|
* @brief Calls a Nix function with multiple arguments.
|
||||||
|
* @ingroup value_create
|
||||||
*
|
*
|
||||||
* Technically these are functions that return functions. It is common for Nix
|
* Technically these are functions that return functions. It is common for Nix
|
||||||
* functions to be curried, so this function is useful for calling them.
|
* functions to be curried, so this function is useful for calling them.
|
||||||
|
|
@ -126,10 +137,12 @@ nix_err nix_value_call_multi(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Calls a Nix function with multiple arguments.
|
* @brief Calls a Nix function with multiple arguments.
|
||||||
|
* @ingroup value_create
|
||||||
*
|
*
|
||||||
* Technically these are functions that return functions. It is common for Nix
|
* Technically these are functions that return functions. It is common for Nix
|
||||||
* functions to be curried, so this function is useful for calling them.
|
* functions to be curried, so this function is useful for calling them.
|
||||||
*
|
*
|
||||||
|
* @def NIX_VALUE_CALL
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] state The state of the evaluation.
|
* @param[in] state The state of the evaluation.
|
||||||
* @param[out] value The result of the function call.
|
* @param[out] value The result of the function call.
|
||||||
|
|
@ -147,6 +160,7 @@ nix_err nix_value_call_multi(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Forces the evaluation of a Nix value.
|
* @brief Forces the evaluation of a Nix value.
|
||||||
|
* @ingroup value_create
|
||||||
*
|
*
|
||||||
* The Nix interpreter is lazy, and not-yet-evaluated values can be
|
* The Nix interpreter is lazy, and not-yet-evaluated values can be
|
||||||
* of type NIX_TYPE_THUNK instead of their actual value.
|
* of type NIX_TYPE_THUNK instead of their actual value.
|
||||||
|
|
@ -180,18 +194,20 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Create a new nix_eval_state_builder
|
* @brief Create a new nix_eval_state_builder
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* The settings are initialized to their default value.
|
* The settings are initialized to their default value.
|
||||||
* Values can be sourced elsewhere with nix_eval_state_builder_load.
|
* Values can be sourced elsewhere with nix_eval_state_builder_load.
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] store The Nix store to use.
|
* @param[in] store The Nix store to use.
|
||||||
* @return A new nix_eval_state_builder or NULL on failure.
|
* @return A new nix_eval_state_builder or NULL on failure. Call nix_eval_state_builder_free() when you're done.
|
||||||
*/
|
*/
|
||||||
nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store);
|
nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Read settings from the ambient environment
|
* @brief Read settings from the ambient environment
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* Settings are sourced from environment variables and configuration files,
|
* Settings are sourced from environment variables and configuration files,
|
||||||
* as documented in the Nix manual.
|
* as documented in the Nix manual.
|
||||||
|
|
@ -204,6 +220,7 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Set the lookup path for `<...>` expressions
|
* @brief Set the lookup path for `<...>` expressions
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* @param[in] context Optional, stores error information
|
* @param[in] context Optional, stores error information
|
||||||
* @param[in] builder The builder to modify.
|
* @param[in] builder The builder to modify.
|
||||||
|
|
@ -214,18 +231,21 @@ nix_err nix_eval_state_builder_set_lookup_path(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Create a new Nix language evaluator state
|
* @brief Create a new Nix language evaluator state
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* Remember to nix_eval_state_builder_free after building the state.
|
* The builder becomes unusable after this call. Remember to call nix_eval_state_builder_free()
|
||||||
|
* after building the state.
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] builder The builder to use and free
|
* @param[in] builder The builder to use and free
|
||||||
* @return A new Nix state or NULL on failure.
|
* @return A new Nix state or NULL on failure. Call nix_state_free() when you're done.
|
||||||
* @see nix_eval_state_builder_new, nix_eval_state_builder_free
|
* @see nix_eval_state_builder_new, nix_eval_state_builder_free
|
||||||
*/
|
*/
|
||||||
EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder);
|
EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Free a nix_eval_state_builder
|
* @brief Free a nix_eval_state_builder
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* Does not fail.
|
* Does not fail.
|
||||||
*
|
*
|
||||||
|
|
@ -235,19 +255,21 @@ void nix_eval_state_builder_free(nix_eval_state_builder * builder);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Create a new Nix language evaluator state
|
* @brief Create a new Nix language evaluator state
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* For more control, use nix_eval_state_builder
|
* For more control, use nix_eval_state_builder
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH.
|
* @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH.
|
||||||
* @param[in] store The Nix store to use.
|
* @param[in] store The Nix store to use.
|
||||||
* @return A new Nix state or NULL on failure.
|
* @return A new Nix state or NULL on failure. Call nix_state_free() when you're done.
|
||||||
* @see nix_state_builder_new
|
* @see nix_state_builder_new
|
||||||
*/
|
*/
|
||||||
EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store);
|
EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Frees a Nix state.
|
* @brief Frees a Nix state.
|
||||||
|
* @ingroup libexpr_init
|
||||||
*
|
*
|
||||||
* Does not fail.
|
* Does not fail.
|
||||||
*
|
*
|
||||||
|
|
@ -256,6 +278,7 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath,
|
||||||
void nix_state_free(EvalState * state);
|
void nix_state_free(EvalState * state);
|
||||||
|
|
||||||
/** @addtogroup GC
|
/** @addtogroup GC
|
||||||
|
* @ingroup libexpr
|
||||||
* @brief Reference counting and garbage collector operations
|
* @brief Reference counting and garbage collector operations
|
||||||
*
|
*
|
||||||
* The Nix language evaluator uses a garbage collector. To ease C interop, we implement
|
* The Nix language evaluator uses a garbage collector. To ease C interop, we implement
|
||||||
|
|
@ -286,6 +309,9 @@ nix_err nix_gc_incref(nix_c_context * context, const void * object);
|
||||||
/**
|
/**
|
||||||
* @brief Decrement the garbage collector reference counter for the given object
|
* @brief Decrement the garbage collector reference counter for the given object
|
||||||
*
|
*
|
||||||
|
* @deprecated We are phasing out the general nix_gc_decref() in favor of type-specified free functions, such as
|
||||||
|
* nix_value_decref().
|
||||||
|
*
|
||||||
* We also provide typed `nix_*_decref` functions, which are
|
* We also provide typed `nix_*_decref` functions, which are
|
||||||
* - safer to use
|
* - safer to use
|
||||||
* - easier to integrate when deriving bindings
|
* - easier to integrate when deriving bindings
|
||||||
|
|
@ -314,12 +340,11 @@ void nix_gc_now();
|
||||||
*/
|
*/
|
||||||
void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * obj, void * cd));
|
void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * obj, void * cd));
|
||||||
|
|
||||||
/** @} */
|
/** @} */ // doxygen group GC
|
||||||
|
|
||||||
// cffi end
|
// cffi end
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** @} */
|
|
||||||
|
|
||||||
#endif // NIX_API_EXPR_H
|
#endif // NIX_API_EXPR_H
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,12 @@
|
||||||
#define NIX_API_EXTERNAL_H
|
#define NIX_API_EXTERNAL_H
|
||||||
/** @ingroup libexpr
|
/** @ingroup libexpr
|
||||||
* @addtogroup Externals
|
* @addtogroup Externals
|
||||||
* @brief Deal with external values
|
* @brief Externals let Nix expressions work with foreign values that aren't part of the normal Nix value data model
|
||||||
* @{
|
* @{
|
||||||
*/
|
*/
|
||||||
/** @file
|
/** @file
|
||||||
* @brief libexpr C bindings dealing with external values
|
* @brief libexpr C bindings dealing with external values
|
||||||
|
* @see Externals
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "nix_api_expr.h"
|
#include "nix_api_expr.h"
|
||||||
|
|
@ -115,7 +116,7 @@ typedef struct NixCExternalValueDesc
|
||||||
* @brief Try to compare two external values
|
* @brief Try to compare two external values
|
||||||
*
|
*
|
||||||
* Optional, the default is always false.
|
* Optional, the default is always false.
|
||||||
* If the other object was not a Nix C external value, this comparison will
|
* If the other object was not a Nix C API external value, this comparison will
|
||||||
* also return false
|
* also return false
|
||||||
* @param[in] self the void* passed to nix_create_external_value
|
* @param[in] self the void* passed to nix_create_external_value
|
||||||
* @param[in] other the void* passed to the other object's
|
* @param[in] other the void* passed to the other object's
|
||||||
|
|
@ -168,7 +169,7 @@ typedef struct NixCExternalValueDesc
|
||||||
/**
|
/**
|
||||||
* @brief Create an external value, that can be given to nix_init_external
|
* @brief Create an external value, that can be given to nix_init_external
|
||||||
*
|
*
|
||||||
* Owned by the GC. Use nix_gc_decref when you're done with the pointer.
|
* Call nix_gc_decref() when you're done with the pointer.
|
||||||
*
|
*
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] desc a NixCExternalValueDesc, you should keep this alive as long
|
* @param[in] desc a NixCExternalValueDesc, you should keep this alive as long
|
||||||
|
|
@ -180,10 +181,11 @@ typedef struct NixCExternalValueDesc
|
||||||
ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v);
|
ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Extract the pointer from a nix c external value.
|
* @brief Extract the pointer from a Nix C API external value.
|
||||||
* @param[out] context Optional, stores error information
|
* @param[out] context Optional, stores error information
|
||||||
* @param[in] b The external value
|
* @param[in] b The external value
|
||||||
* @returns The pointer, or null if the external value was not from nix c.
|
* @returns The pointer, valid while the external value is valid, or null if the external value was not from the Nix C
|
||||||
|
* API.
|
||||||
* @see nix_get_external
|
* @see nix_get_external
|
||||||
*/
|
*/
|
||||||
void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b);
|
void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b);
|
||||||
|
|
|
||||||
|
|
@ -235,7 +235,7 @@ nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_
|
||||||
try {
|
try {
|
||||||
auto & v = check_value_in(value);
|
auto & v = check_value_in(value);
|
||||||
assert(v.type() == nix::nString);
|
assert(v.type() == nix::nString);
|
||||||
call_nix_get_string_callback(v.c_str(), callback, user_data);
|
call_nix_get_string_callback(v.string_view(), callback, user_data);
|
||||||
}
|
}
|
||||||
NIXC_CATCH_ERRS
|
NIXC_CATCH_ERRS
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue