mirror of
https://github.com/NixOS/nix.git
synced 2025-11-25 03:39:36 +01:00
Merge branch 'master' into flake_show_attr
This commit is contained in:
commit
653d701300
466 changed files with 11446 additions and 3038 deletions
|
|
@ -111,7 +111,13 @@ clearStore
|
|||
|
||||
mv "$cacheDir/nar" "$cacheDir/nar2"
|
||||
|
||||
nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result"
|
||||
nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result" 2>&1 | tee "$TEST_ROOT/log"
|
||||
|
||||
# Verify that missing NARs produce warnings, not errors
|
||||
# The build should succeed despite the warnings
|
||||
grepQuiet "does not exist in binary cache" "$TEST_ROOT/log"
|
||||
# Ensure the message is not at error level by checking that the command succeeded
|
||||
[ -e "$TEST_ROOT/result" ]
|
||||
|
||||
mv "$cacheDir/nar2" "$cacheDir/nar"
|
||||
|
||||
|
|
|
|||
12
tests/functional/build-hook-list-paths.sh
Executable file
12
tests/functional/build-hook-list-paths.sh
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
[ -n "$OUT_PATHS" ]
|
||||
[ -n "$DRV_PATH" ]
|
||||
[ -n "$HOOK_DEST" ]
|
||||
|
||||
for o in $OUT_PATHS; do
|
||||
echo "$o" >> "$HOOK_DEST"
|
||||
done
|
||||
|
|
@ -184,6 +184,7 @@ test "$status" = 1
|
|||
if isDaemonNewer "2.29pre"; then
|
||||
<<<"$out" grepQuiet -E "error: Cannot build '.*-x4\\.drv'"
|
||||
<<<"$out" grepQuiet -E "Reason: 1 dependency failed."
|
||||
<<<"$out" grepQuiet -E "Build failed due to failed dependency"
|
||||
else
|
||||
<<<"$out" grepQuiet -E "error: 1 dependencies of derivation '.*-x4\\.drv' failed to build"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ function characterisationTestExit() {
|
|||
echo >&2 ''
|
||||
echo >&2 'You can rerun this test with:'
|
||||
echo >&2 ''
|
||||
echo >&2 " _NIX_TEST_ACCEPT=1 meson test ${TEST_NAME}"
|
||||
echo >&2 " _NIX_TEST_ACCEPT=1 meson test --suite ${TEST_SUITE_NAME} ${TEST_NAME}"
|
||||
echo >&2 ''
|
||||
echo >&2 'to regenerate the files containing the expected output,'
|
||||
echo >&2 'and then view the git diff to decide whether a change is'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
# shellcheck shell=bash
|
||||
|
||||
TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/${TEST_NAME:-default/tests\/functional//}
|
||||
TEST_SUBDIR="${TEST_SUITE_NAME:-default}/${TEST_NAME:-tests/functional/}"
|
||||
TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/"$TEST_SUBDIR"
|
||||
export TEST_ROOT
|
||||
|
|
|
|||
|
|
@ -17,4 +17,6 @@ ln -s "$out" "$out"/self
|
|||
echo program > "$out"/program
|
||||
chmod +x "$out"/program
|
||||
|
||||
echo '1 + 2' > "$out"/foo.nix
|
||||
|
||||
echo FOO
|
||||
|
|
|
|||
|
|
@ -66,10 +66,16 @@ derivation' {
|
|||
outputChecks = {
|
||||
out = {
|
||||
allowedReferences = [ foo ];
|
||||
allowedRequisites = [ foo.dev ];
|
||||
allowedRequisites = [
|
||||
foo.dev
|
||||
"bin"
|
||||
];
|
||||
};
|
||||
bin = {
|
||||
disallowedReferences = [ bar ];
|
||||
disallowedReferences = [
|
||||
bar
|
||||
"dev"
|
||||
];
|
||||
disallowedRequisites = [ bar.dev ];
|
||||
};
|
||||
dev = {
|
||||
|
|
|
|||
|
|
@ -58,8 +58,14 @@ derivation' {
|
|||
impureEnvVars = [ "UNICORN" ];
|
||||
__darwinAllowLocalNetworking = true;
|
||||
allowedReferences = [ foo ];
|
||||
allowedRequisites = [ foo.dev ];
|
||||
disallowedReferences = [ bar ];
|
||||
allowedRequisites = [
|
||||
foo.dev
|
||||
"bin"
|
||||
];
|
||||
disallowedReferences = [
|
||||
bar
|
||||
"dev"
|
||||
];
|
||||
disallowedRequisites = [ bar.dev ];
|
||||
requiredSystemFeatures = [
|
||||
"rainbow"
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")])
|
||||
Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\",\"dev\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\",\"bin\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")])
|
||||
|
|
@ -1 +1 @@
|
|||
Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")])
|
||||
Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z bin"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g dev"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")])
|
||||
|
|
@ -1 +1 @@
|
|||
Derive([("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev"),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs")])
|
||||
Derive([("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\",\"dev\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\",\"bin\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev"),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs")])
|
||||
|
|
@ -1 +1 @@
|
|||
Derive([("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")])
|
||||
Derive([("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev bin"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar dev"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")])
|
||||
|
|
@ -51,10 +51,12 @@ builtins.outputOf
|
|||
"$word": "hello, from $word!",
|
||||
"PATH": ${builtins.toJSON path}
|
||||
},
|
||||
"inputDrvs": {
|
||||
$inputDrvs
|
||||
"inputs": {
|
||||
"drvs": {
|
||||
$inputDrvs
|
||||
},
|
||||
"srcs": []
|
||||
},
|
||||
"inputSrcs": [],
|
||||
"name": "build-$word",
|
||||
"outputs": {
|
||||
"out": {
|
||||
|
|
@ -63,7 +65,7 @@ builtins.outputOf
|
|||
}
|
||||
},
|
||||
"system": "${system}",
|
||||
"version": 3
|
||||
"version": 4
|
||||
}
|
||||
EOF
|
||||
drvPath=$(echo "$json" | nix derivation add)
|
||||
|
|
|
|||
|
|
@ -99,6 +99,14 @@ clearStore
|
|||
|
||||
[ -e "$caPath" ]
|
||||
|
||||
# Test import-from-derivation on the result of fetchClosure.
|
||||
[[ $(nix eval -v --expr "
|
||||
import \"\${builtins.fetchClosure {
|
||||
fromStore = \"file://$cacheDir\";
|
||||
fromPath = $caPath;
|
||||
}}/foo.nix\"
|
||||
") = 3 ]]
|
||||
|
||||
# Check that URL query parameters aren't allowed.
|
||||
clearStore
|
||||
narCache=$TEST_ROOT/nar-cache
|
||||
|
|
|
|||
|
|
@ -14,7 +14,16 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail"
|
|||
# Building with the bad hash should produce the "good" output path as
|
||||
# a side-effect.
|
||||
[[ -e $path ]]
|
||||
nix path-info --json "$path" | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd
|
||||
nix path-info --json "$path" | jq -e \
|
||||
--arg hash "$(nix hash convert --to base64 "md5:8ddd8be4b179a529afa5f2ffae4b9858")" \
|
||||
'.[].ca == {
|
||||
method: "flat",
|
||||
hash: {
|
||||
algorithm: "md5",
|
||||
format: "base64",
|
||||
hash: $hash
|
||||
},
|
||||
}'
|
||||
|
||||
echo 'testing good...'
|
||||
nix-build fixed.nix -A good --no-out-link
|
||||
|
|
|
|||
|
|
@ -192,3 +192,24 @@ EOF
|
|||
# shellcheck disable=SC2015
|
||||
checkRes=$(nix flake check "$flakeDir" 2>&1 && fail "nix flake check should have failed" || true)
|
||||
echo "$checkRes" | grepQuiet -E "builder( for .*)? failed with exit code 1"
|
||||
|
||||
# Test that attribute paths are shown in error messages
|
||||
cat > "$flakeDir"/flake.nix <<EOF
|
||||
{
|
||||
outputs = { self }: with import ./config.nix; {
|
||||
checks.${system}.failingCheck = mkDerivation {
|
||||
name = "failing-check";
|
||||
buildCommand = "echo 'This check fails'; exit 1";
|
||||
};
|
||||
checks.${system}.anotherFailingCheck = mkDerivation {
|
||||
name = "another-failing-check";
|
||||
buildCommand = "echo 'This also fails'; exit 1";
|
||||
};
|
||||
};
|
||||
}
|
||||
EOF
|
||||
|
||||
# shellcheck disable=SC2015
|
||||
checkRes=$(nix flake check --keep-going "$flakeDir" 2>&1 && fail "nix flake check should have failed" || true)
|
||||
echo "$checkRes" | grepQuiet "checks.${system}.failingCheck"
|
||||
echo "$checkRes" | grepQuiet "checks.${system}.anotherFailingCheck"
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ cat > "$repo/flake.nix" <<EOF
|
|||
{
|
||||
outputs = { ... }: {
|
||||
x = 1;
|
||||
y = assert false; 1;
|
||||
z = builtins.readFile ./foo;
|
||||
a = import ./foo;
|
||||
b = import ./dir;
|
||||
};
|
||||
}
|
||||
EOF
|
||||
|
|
@ -21,3 +25,33 @@ expectStderr 1 nix eval "$repo#x" | grepQuiet "error: Path 'flake.nix' in the re
|
|||
git -C "$repo" add flake.nix
|
||||
|
||||
[[ $(nix eval "$repo#x") = 1 ]]
|
||||
|
||||
expectStderr 1 nix eval "$repo#y" | grepQuiet "at $repo/flake.nix:"
|
||||
|
||||
git -C "$repo" commit -a -m foo
|
||||
|
||||
expectStderr 1 nix eval "git+file://$repo?ref=master#y" | grepQuiet "at «git+file://$repo?ref=master&rev=.*»/flake.nix:"
|
||||
|
||||
expectStderr 1 nix eval "$repo#z" | grepQuiet "error: Path 'foo' does not exist in Git repository \"$repo\"."
|
||||
expectStderr 1 nix eval "git+file://$repo?ref=master#z" | grepQuiet "error: '«git+file://$repo?ref=master&rev=.*»/foo' does not exist"
|
||||
expectStderr 1 nix eval "$repo#a" | grepQuiet "error: Path 'foo' does not exist in Git repository \"$repo\"."
|
||||
|
||||
echo 123 > "$repo/foo"
|
||||
|
||||
expectStderr 1 nix eval "$repo#z" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git."
|
||||
expectStderr 1 nix eval "$repo#a" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git."
|
||||
|
||||
git -C "$repo" add "$repo/foo"
|
||||
|
||||
[[ $(nix eval --raw "$repo#z") = 123 ]]
|
||||
|
||||
expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' does not exist in Git repository \"$repo\"."
|
||||
|
||||
mkdir -p "$repo/dir"
|
||||
echo 456 > "$repo/dir/default.nix"
|
||||
|
||||
expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' in the repository \"$repo\" is not tracked by Git."
|
||||
|
||||
git -C "$repo" add "$repo/dir/default.nix"
|
||||
|
||||
[[ $(nix eval "$repo#b") = 456 ]]
|
||||
|
|
|
|||
|
|
@ -47,9 +47,17 @@ try2 () {
|
|||
hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath")
|
||||
[[ "$hashFromGit" == "$expected" ]]
|
||||
|
||||
local caFromNix
|
||||
caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca")
|
||||
[[ "fixed:git:$hashAlgo:$(nix hash convert --to nix32 "$hashAlgo:$hashFromGit")" = "$caFromNix" ]]
|
||||
nix path-info --json "$path" | jq -e \
|
||||
--arg algo "$hashAlgo" \
|
||||
--arg hash "$(nix hash convert --to base64 "$hashAlgo:$hashFromGit")" \
|
||||
'.[].ca == {
|
||||
method: "git",
|
||||
hash: {
|
||||
algorithm: $algo,
|
||||
format: "base64",
|
||||
hash: $hash
|
||||
},
|
||||
}'
|
||||
}
|
||||
|
||||
test0 () {
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ path1_stuff=$(echo "$json" | jq -r .[].outputs.stuff)
|
|||
[[ $(< "$path1"/n) = 0 ]]
|
||||
[[ $(< "$path1_stuff"/bla) = 0 ]]
|
||||
|
||||
[[ $(nix path-info --json "$path1" | jq .[].ca) =~ fixed:r:sha256: ]]
|
||||
nix path-info --json "$path1" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
|
||||
|
||||
path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out)
|
||||
[[ $(< "$path2"/n) = 1 ]]
|
||||
|
|
|
|||
12
tests/functional/lang/eval-fail-empty-formals.err.exp
Normal file
12
tests/functional/lang/eval-fail-empty-formals.err.exp
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
error:
|
||||
… from call site
|
||||
at /pwd/lang/eval-fail-empty-formals.nix:1:1:
|
||||
1| (foo@{ }: 1) { a = 3; }
|
||||
| ^
|
||||
2|
|
||||
|
||||
error: function 'anonymous lambda' called with unexpected argument 'a'
|
||||
at /pwd/lang/eval-fail-empty-formals.nix:1:2:
|
||||
1| (foo@{ }: 1) { a = 3; }
|
||||
| ^
|
||||
2|
|
||||
1
tests/functional/lang/eval-fail-empty-formals.nix
Normal file
1
tests/functional/lang/eval-fail-empty-formals.nix
Normal file
|
|
@ -0,0 +1 @@
|
|||
(foo@{ }: 1) { a = 3; }
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
error:
|
||||
… while calling the 'seq' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:25:1:
|
||||
24| in
|
||||
25| builtins.seq finiteVal (
|
||||
| ^
|
||||
26| builtins.genericClosure {
|
||||
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:26:3:
|
||||
25| builtins.seq finiteVal (
|
||||
26| builtins.genericClosure {
|
||||
| ^
|
||||
27| startSet = [
|
||||
|
||||
… in genericClosure element { finite = { a0 = { a1 = { a2 = { a3 = { a4 = { a5 = { a6 = { a7 = { a8 = { ... }; }; }; }; }; }; }; }; }; }; «1 attribute elided» }
|
||||
|
||||
error: attribute 'key' missing
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
let
|
||||
finite = {
|
||||
a0 = {
|
||||
a1 = {
|
||||
a2 = {
|
||||
a3 = {
|
||||
a4 = {
|
||||
a5 = {
|
||||
a6 = {
|
||||
a7 = {
|
||||
a8 = {
|
||||
a9 = "deep";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
finiteVal = builtins.deepSeq finite finite;
|
||||
in
|
||||
builtins.seq finiteVal (
|
||||
builtins.genericClosure {
|
||||
startSet = [
|
||||
{
|
||||
infinite = import ./infinite-nesting.nix;
|
||||
finite = finiteVal;
|
||||
}
|
||||
];
|
||||
operator = x: [ (import ./infinite-nesting.nix) ];
|
||||
}
|
||||
)
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-element-missing-key.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [ { nokey = 1; } ];
|
||||
|
||||
… in genericClosure element { nokey = 1; }
|
||||
|
||||
error: attribute 'key' missing
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [ { nokey = 1; } ];
|
||||
operator = x: [ ];
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-element-not-attrset.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [ "not an attrset" ];
|
||||
|
||||
… in genericClosure element "not an attrset"
|
||||
|
||||
error: expected a set but found a string: "not an attrset"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [ "not an attrset" ];
|
||||
operator = x: [ ];
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-keys-incompatible-types.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [
|
||||
|
||||
… while comparing element { key = "string"; }
|
||||
|
||||
… with element { key = 1; }
|
||||
|
||||
error: cannot compare a string with an integer; values are "string" and 1
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [
|
||||
{ key = 1; }
|
||||
{ key = "string"; }
|
||||
];
|
||||
operator = x: [ ];
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-keys-uncomparable.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [
|
||||
|
||||
… while comparing element { key = { }; }
|
||||
|
||||
… with element { key = { }; }
|
||||
|
||||
error: cannot compare a set with a set; values of that type are incomparable (values are { } and { })
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [
|
||||
{ key = { }; }
|
||||
{ key = { }; }
|
||||
];
|
||||
operator = x: [ ];
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-missing-operator.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [ { key = 1; } ];
|
||||
|
||||
… in the attrset passed as argument to builtins.genericClosure
|
||||
|
||||
error: attribute 'operator' missing
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [ { key = 1; } ];
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-missing-startSet.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| operator = x: [ ];
|
||||
|
||||
… in the attrset passed as argument to builtins.genericClosure
|
||||
|
||||
error: attribute 'startSet' missing
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
builtins.genericClosure {
|
||||
operator = x: [ ];
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-not-attrset.nix:1:1:
|
||||
1| builtins.genericClosure "not an attrset"
|
||||
| ^
|
||||
2|
|
||||
|
||||
… while evaluating the first argument passed to builtins.genericClosure
|
||||
|
||||
error: expected a set but found a string: "not an attrset"
|
||||
|
|
@ -0,0 +1 @@
|
|||
builtins.genericClosure "not an attrset"
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-operator-not-function.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [ { key = 1; } ];
|
||||
|
||||
… while evaluating the 'operator' attribute passed as argument to builtins.genericClosure
|
||||
|
||||
error: expected a function but found a string: "not a function"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [ { key = 1; } ];
|
||||
operator = "not a function";
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-operator-not-list.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = [ { key = 1; } ];
|
||||
|
||||
… while calling operator on genericClosure element { key = 1; }
|
||||
|
||||
… while evaluating the return value of the `operator` passed to builtins.genericClosure
|
||||
|
||||
error: expected a list but found a string: "not a list"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
builtins.genericClosure {
|
||||
startSet = [ { key = 1; } ];
|
||||
operator = x: "not a list";
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
error:
|
||||
… while calling the 'genericClosure' builtin
|
||||
at /pwd/lang/eval-fail-genericClosure-startSet-not-list.nix:1:1:
|
||||
1| builtins.genericClosure {
|
||||
| ^
|
||||
2| startSet = "not a list";
|
||||
|
||||
… while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure
|
||||
|
||||
error: expected a list but found a string: "not a list"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
builtins.genericClosure {
|
||||
startSet = "not a list";
|
||||
operator = x: [ ];
|
||||
}
|
||||
1
tests/functional/lang/eval-okay-builtins-dirOf.exp
Normal file
1
tests/functional/lang/eval-okay-builtins-dirOf.exp
Normal file
|
|
@ -0,0 +1 @@
|
|||
{ pathDoesntExistNested1 = /totallydoesntexistreally; pathDoesntExistNested2 = /totallydoesntexistreally/subdir1; pathDoesntExistRoot = /; pathRoot = /; stringEmpty = "."; stringMultipleSeps = "a//"; stringNoSep = "."; stringRoot = "/"; stringRootA = "/"; stringRootSlash = "/"; stringRootSlashSlash = "//"; stringSingleDir = "a"; stringWithDot = "a/b/c/."; stringWithDotAndDotDot = "a/b/c/../."; stringWithDotAndDotDotSep2 = "a/b/c/.././"; stringWithDotDot = "a/b/c/.."; stringWithDotDotSep2 = "a/b/c/../"; stringWithDotSep2 = "a/b/c/./"; }
|
||||
21
tests/functional/lang/eval-okay-builtins-dirOf.nix
Normal file
21
tests/functional/lang/eval-okay-builtins-dirOf.nix
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
stringEmpty = dirOf "";
|
||||
stringNoSep = dirOf "filename";
|
||||
stringSingleDir = dirOf "a/b";
|
||||
stringMultipleSeps = dirOf "a///b";
|
||||
stringRoot = dirOf "/";
|
||||
stringRootSlash = dirOf "//";
|
||||
stringRootSlashSlash = dirOf "///";
|
||||
stringRootA = dirOf "/a";
|
||||
stringWithDot = dirOf "a/b/c/./d";
|
||||
stringWithDotSep2 = dirOf "a/b/c/.//d";
|
||||
stringWithDotDot = dirOf "a/b/c/../d";
|
||||
stringWithDotDotSep2 = dirOf "a/b/c/..//d";
|
||||
stringWithDotAndDotDot = dirOf "a/b/c/.././d";
|
||||
stringWithDotAndDotDotSep2 = dirOf "a/b/c/.././/d";
|
||||
|
||||
pathRoot = dirOf /.;
|
||||
pathDoesntExistRoot = dirOf /totallydoesntexistreally;
|
||||
pathDoesntExistNested1 = dirOf /totallydoesntexistreally/subdir1;
|
||||
pathDoesntExistNested2 = dirOf /totallydoesntexistreally/subdir1/subdir2;
|
||||
}
|
||||
4
tests/functional/lang/infinite-nesting.nix
Normal file
4
tests/functional/lang/infinite-nesting.nix
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
let
|
||||
mkInfinite = i: { "a${toString i}" = mkInfinite (i + 1); };
|
||||
in
|
||||
mkInfinite 0
|
||||
|
|
@ -263,7 +263,8 @@ foreach suite : suites
|
|||
'ASAN_OPTIONS' : asan_options,
|
||||
'_NIX_TEST_SOURCE_DIR' : meson.current_source_dir(),
|
||||
'_NIX_TEST_BUILD_DIR' : meson.current_build_dir(),
|
||||
'TEST_NAME' : suite_name / name,
|
||||
'TEST_SUITE_NAME' : suite_name,
|
||||
'TEST_NAME' : name,
|
||||
'NIX_REMOTE' : '',
|
||||
'PS4' : '+(${BASH_SOURCE[0]-$0}:$LINENO) ',
|
||||
},
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ printf 4.0 > "$flake1Dir"/version
|
|||
printf Utrecht > "$flake1Dir"/who
|
||||
nix profile add "$flake1Dir"
|
||||
[[ $("$TEST_HOME"/.nix-profile/bin/hello) = "Hello Utrecht" ]]
|
||||
[[ $(nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -r .[].ca) =~ fixed:r:sha256: ]]
|
||||
nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"'
|
||||
|
||||
# Override the outputs.
|
||||
nix profile remove simple flake1
|
||||
|
|
|
|||
|
|
@ -2,7 +2,16 @@
|
|||
lib,
|
||||
stdenv,
|
||||
mkMesonDerivation,
|
||||
buildPackages,
|
||||
|
||||
meson,
|
||||
ninja,
|
||||
pkg-config,
|
||||
|
||||
jq,
|
||||
git,
|
||||
mercurial,
|
||||
unixtools,
|
||||
util-linux,
|
||||
|
||||
nix-store,
|
||||
nix-expr,
|
||||
|
|
@ -37,17 +46,20 @@ mkMesonDerivation (
|
|||
./.
|
||||
];
|
||||
|
||||
# Hack for sake of the dev shell. Need to "manually splice" since
|
||||
# this isn't a specially-recognized list of dependencies.
|
||||
passthru.externalNativeBuildInputs = [
|
||||
buildPackages.meson
|
||||
buildPackages.ninja
|
||||
buildPackages.pkg-config
|
||||
nativeBuildInputs = [
|
||||
meson
|
||||
ninja
|
||||
pkg-config
|
||||
|
||||
buildPackages.jq
|
||||
buildPackages.git
|
||||
buildPackages.mercurial
|
||||
buildPackages.unixtools.script
|
||||
jq
|
||||
git
|
||||
mercurial
|
||||
unixtools.script
|
||||
|
||||
# Explicitly splice the hostHost variant to fix LLVM tests. The nix-cli
|
||||
# has to be in PATH, but must come from the host context where it's built
|
||||
# with libc++.
|
||||
(nix-cli.__spliced.hostHost or nix-cli)
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
# For various sandboxing tests that needs a statically-linked shell,
|
||||
|
|
@ -56,14 +68,7 @@ mkMesonDerivation (
|
|||
# For Overlay FS tests need `mount`, `umount`, and `unshare`.
|
||||
# For `script` command (ensuring a TTY)
|
||||
# TODO use `unixtools` to be precise over which executables instead?
|
||||
buildPackages.util-linux
|
||||
];
|
||||
|
||||
nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [
|
||||
# Explicitly splice the hostHost variant to fix LLVM tests. The nix-cli
|
||||
# has to be in PATH, but must come from the host context where it's built
|
||||
# with libc++.
|
||||
(nix-cli.__spliced.hostHost or nix-cli)
|
||||
util-linux
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
|
|
|
|||
|
|
@ -17,8 +17,16 @@ diff --unified --color=always \
|
|||
jq --sort-keys 'map_values(.narHash)') \
|
||||
<(jq --sort-keys <<-EOF
|
||||
{
|
||||
"$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=",
|
||||
"$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=",
|
||||
"$foo": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA="
|
||||
},
|
||||
"$bar": {
|
||||
"algorithm": "sha256",
|
||||
"format": "base64",
|
||||
"hash": "9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ="
|
||||
},
|
||||
"$baz": null
|
||||
}
|
||||
EOF
|
||||
|
|
|
|||
|
|
@ -29,6 +29,18 @@ nix-build -o "$TEST_ROOT"/result dependencies.nix --post-build-hook "$pushToStor
|
|||
export BUILD_HOOK_ONLY_OUT_PATHS=$([ ! "$NIX_TESTS_CA_BY_DEFAULT" ])
|
||||
nix-build -o "$TEST_ROOT"/result-mult multiple-outputs.nix -A a.first --post-build-hook "$pushToStore"
|
||||
|
||||
if isDaemonNewer "2.33.0pre20251029"; then
|
||||
# Regression test for issue #14287: `--check` should re-run post build
|
||||
# hook, even though nothing is getting newly registered.
|
||||
export HOOK_DEST=$TEST_ROOT/listing
|
||||
# Needed so the hook will get the above environment variable.
|
||||
restartDaemon
|
||||
nix-build -o "$TEST_ROOT"/result-mult multiple-outputs.nix --check -A a.first --post-build-hook "$PWD/build-hook-list-paths.sh"
|
||||
grepQuiet a-first "$HOOK_DEST"
|
||||
grepQuiet a-second "$HOOK_DEST"
|
||||
unset HOOK_DEST
|
||||
fi
|
||||
|
||||
clearStore
|
||||
|
||||
# Ensure that the remote store contains both the runtime and build-time
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ nix store verify -r "$outPath2" --sigs-needed 1 --trusted-public-keys "$pk1"
|
|||
# Build something content-addressed.
|
||||
outPathCA=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build ./fixed.nix -A good.0 --no-out-link)
|
||||
|
||||
nix path-info --json "$outPathCA" | jq -e '.[] | .ca | startswith("fixed:md5:")'
|
||||
nix path-info --json "$outPathCA" | jq -e '.[].ca | .method == "flat" and .hash.algorithm == "md5"'
|
||||
|
||||
# Content-addressed paths don't need signatures, so they verify
|
||||
# regardless of --sigs-needed.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
{
|
||||
lib,
|
||||
config,
|
||||
nixpkgs,
|
||||
...
|
||||
}:
|
||||
|
||||
|
|
@ -36,8 +34,10 @@ in
|
|||
pkgA
|
||||
pkgB
|
||||
pkgC
|
||||
pkgs.coreutils
|
||||
];
|
||||
environment.systemPackages = [ pkgs.minio-client ];
|
||||
nix.nixPath = [ "nixpkgs=${pkgs.path}" ];
|
||||
nix.extraOptions = ''
|
||||
experimental-features = nix-command
|
||||
substituters =
|
||||
|
|
@ -147,7 +147,7 @@ in
|
|||
else:
|
||||
machine.fail(f"nix path-info {pkg}")
|
||||
|
||||
def setup_s3(populate_bucket=[], public=False):
|
||||
def setup_s3(populate_bucket=[], public=False, versioned=False):
|
||||
"""
|
||||
Decorator that creates/destroys a unique bucket for each test.
|
||||
Optionally pre-populates bucket with specified packages.
|
||||
|
|
@ -156,14 +156,17 @@ in
|
|||
Args:
|
||||
populate_bucket: List of packages to upload before test runs
|
||||
public: If True, make the bucket publicly accessible
|
||||
versioned: If True, enable versioning on the bucket before populating
|
||||
"""
|
||||
def decorator(test_func):
|
||||
def wrapper():
|
||||
bucket = str(uuid.uuid4())
|
||||
server.succeed(f"mc mb minio/{bucket}")
|
||||
if public:
|
||||
server.succeed(f"mc anonymous set download minio/{bucket}")
|
||||
try:
|
||||
if public:
|
||||
server.succeed(f"mc anonymous set download minio/{bucket}")
|
||||
if versioned:
|
||||
server.succeed(f"mc version enable minio/{bucket}")
|
||||
if populate_bucket:
|
||||
store_url = make_s3_url(bucket)
|
||||
for pkg in populate_bucket:
|
||||
|
|
@ -597,6 +600,170 @@ in
|
|||
|
||||
print(" ✓ File content verified correct (hash matches)")
|
||||
|
||||
@setup_s3(populate_bucket=[PKGS['A']], versioned=True)
|
||||
def test_versioned_urls(bucket):
|
||||
"""Test that versionId parameter is accepted in S3 URLs"""
|
||||
print("\n=== Testing Versioned URLs ===")
|
||||
|
||||
# Get the nix-cache-info file
|
||||
cache_info_url = make_s3_url(bucket, path="/nix-cache-info")
|
||||
|
||||
# Fetch without versionId should work
|
||||
client.succeed(
|
||||
f"{ENV_WITH_CREDS} nix eval --impure --expr "
|
||||
f"'builtins.fetchurl {{ name = \"cache-info\"; url = \"{cache_info_url}\"; }}'"
|
||||
)
|
||||
print(" ✓ Fetch without versionId works")
|
||||
|
||||
# List versions to get a version ID
|
||||
# MinIO output format: [timestamp] size tier versionId versionNumber method filename
|
||||
versions_output = server.succeed(f"mc ls --versions minio/{bucket}/nix-cache-info")
|
||||
|
||||
# Extract version ID from output (4th field after STANDARD)
|
||||
import re
|
||||
version_match = re.search(r'STANDARD\s+(\S+)\s+v\d+', versions_output)
|
||||
if not version_match:
|
||||
print(f"Debug: versions output: {versions_output}")
|
||||
raise Exception("Could not extract version ID from MinIO output")
|
||||
|
||||
version_id = version_match.group(1)
|
||||
print(f" ✓ Found version ID: {version_id}")
|
||||
|
||||
# Version ID should not be "null" since versioning was enabled before upload
|
||||
if version_id == "null":
|
||||
raise Exception("Version ID is 'null' - versioning may not be working correctly")
|
||||
|
||||
# Fetch with versionId parameter
|
||||
versioned_url = f"{cache_info_url}&versionId={version_id}"
|
||||
client.succeed(
|
||||
f"{ENV_WITH_CREDS} nix eval --impure --expr "
|
||||
f"'builtins.fetchurl {{ name = \"cache-info-versioned\"; url = \"{versioned_url}\"; }}'"
|
||||
)
|
||||
print(" ✓ Fetch with versionId parameter works")
|
||||
|
||||
@setup_s3()
|
||||
def test_multipart_upload_basic(bucket):
|
||||
"""Test basic multipart upload with a large file"""
|
||||
print("\n--- Test: Multipart Upload Basic ---")
|
||||
|
||||
large_file_size = 10 * 1024 * 1024
|
||||
large_pkg = server.succeed(
|
||||
"nix-store --add $(dd if=/dev/urandom of=/tmp/large-file bs=1M count=10 2>/dev/null && echo /tmp/large-file)"
|
||||
).strip()
|
||||
|
||||
chunk_size = 5 * 1024 * 1024
|
||||
expected_parts = 3 # 10 MB raw becomes ~10.5 MB compressed (NAR + xz overhead)
|
||||
|
||||
store_url = make_s3_url(
|
||||
bucket,
|
||||
**{
|
||||
"multipart-upload": "true",
|
||||
"multipart-threshold": str(5 * 1024 * 1024),
|
||||
"multipart-chunk-size": str(chunk_size),
|
||||
}
|
||||
)
|
||||
|
||||
print(f" Uploading {large_file_size} byte file (expect {expected_parts} parts)")
|
||||
output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {large_pkg} --debug 2>&1")
|
||||
|
||||
if "using S3 multipart upload" not in output:
|
||||
raise Exception("Expected multipart upload to be used")
|
||||
|
||||
expected_msg = f"{expected_parts} parts uploaded"
|
||||
if expected_msg not in output:
|
||||
print("Debug output:")
|
||||
print(output)
|
||||
raise Exception(f"Expected '{expected_msg}' in output")
|
||||
|
||||
print(f" ✓ Multipart upload used with {expected_parts} parts")
|
||||
|
||||
client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' {large_pkg} --no-check-sigs")
|
||||
verify_packages_in_store(client, large_pkg, should_exist=True)
|
||||
|
||||
print(" ✓ Large file downloaded and verified")
|
||||
|
||||
@setup_s3()
|
||||
def test_multipart_threshold(bucket):
|
||||
"""Test that files below threshold use regular upload"""
|
||||
print("\n--- Test: Multipart Threshold Behavior ---")
|
||||
|
||||
store_url = make_s3_url(
|
||||
bucket,
|
||||
**{
|
||||
"multipart-upload": "true",
|
||||
"multipart-threshold": str(1024 * 1024 * 1024),
|
||||
}
|
||||
)
|
||||
|
||||
print(" Uploading small file with high threshold")
|
||||
output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']} --debug 2>&1")
|
||||
|
||||
if "using S3 multipart upload" in output:
|
||||
raise Exception("Should not use multipart for file below threshold")
|
||||
|
||||
if "using S3 regular upload" not in output:
|
||||
raise Exception("Expected regular upload to be used")
|
||||
|
||||
print(" ✓ Regular upload used for file below threshold")
|
||||
|
||||
client.succeed(f"{ENV_WITH_CREDS} nix copy --no-check-sigs --from '{store_url}' {PKGS['A']}")
|
||||
verify_packages_in_store(client, PKGS['A'], should_exist=True)
|
||||
|
||||
print(" ✓ Small file uploaded and verified")
|
||||
|
||||
@setup_s3()
|
||||
def test_multipart_with_log_compression(bucket):
|
||||
"""Test multipart upload with compressed build logs"""
|
||||
print("\n--- Test: Multipart Upload with Log Compression ---")
|
||||
|
||||
# Create a derivation that produces a large text log (12 MB of base64 output)
|
||||
drv_path = server.succeed(
|
||||
"""
|
||||
nix-instantiate --expr '
|
||||
let pkgs = import <nixpkgs> {};
|
||||
in derivation {
|
||||
name = "large-log-builder";
|
||||
builder = "/bin/sh";
|
||||
args = ["-c" "$coreutils/bin/dd if=/dev/urandom bs=1M count=12 | $coreutils/bin/base64; echo success > $out"];
|
||||
coreutils = pkgs.coreutils;
|
||||
system = builtins.currentSystem;
|
||||
}
|
||||
'
|
||||
"""
|
||||
).strip()
|
||||
|
||||
print(" Building derivation to generate large log")
|
||||
server.succeed(f"nix-store --realize {drv_path} &>/dev/null")
|
||||
|
||||
# Upload logs with compression and multipart
|
||||
store_url = make_s3_url(
|
||||
bucket,
|
||||
**{
|
||||
"multipart-upload": "true",
|
||||
"multipart-threshold": str(5 * 1024 * 1024),
|
||||
"multipart-chunk-size": str(5 * 1024 * 1024),
|
||||
"log-compression": "xz",
|
||||
}
|
||||
)
|
||||
|
||||
print(" Uploading build log with compression and multipart")
|
||||
output = server.succeed(
|
||||
f"{ENV_WITH_CREDS} nix store copy-log --to '{store_url}' {drv_path} --debug 2>&1"
|
||||
)
|
||||
|
||||
# Should use multipart for the compressed log
|
||||
if "using S3 multipart upload" not in output or "log/" not in output:
|
||||
print("Debug output:")
|
||||
print(output)
|
||||
raise Exception("Expected multipart upload to be used for compressed log")
|
||||
|
||||
if "parts uploaded" not in output:
|
||||
print("Debug output:")
|
||||
print(output)
|
||||
raise Exception("Expected multipart completion message")
|
||||
|
||||
print(" ✓ Compressed log uploaded with multipart")
|
||||
|
||||
# ============================================================================
|
||||
# Main Test Execution
|
||||
# ============================================================================
|
||||
|
|
@ -626,6 +793,10 @@ in
|
|||
test_compression_mixed()
|
||||
test_compression_disabled()
|
||||
test_nix_prefetch_url()
|
||||
test_versioned_urls()
|
||||
test_multipart_upload_basic()
|
||||
test_multipart_threshold()
|
||||
test_multipart_with_log_compression()
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("✓ All S3 Binary Cache Store Tests Passed!")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue