diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e7e103b63..dcf0814d8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,32 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} - run: nix flake show --all-systems --json + pre-commit-checks: + name: pre-commit checks + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v5 + - uses: ./.github/actions/install-nix-action + with: + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} + extra_nix_config: experimental-features = nix-command flakes + github_token: ${{ secrets.GITHUB_TOKEN }} + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: ./ci/gha/tests/pre-commit-checks + + basic-checks: + name: aggregate basic checks + if: ${{ always() }} + runs-on: ubuntu-24.04 + needs: [pre-commit-checks, eval] + steps: + - name: Exit with any errors + if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') }} + run: | + exit 1 + tests: + needs: basic-checks strategy: fail-fast: false matrix: @@ -214,6 +239,7 @@ jobs: docker push $IMAGE_ID:master vm_tests: + needs: basic-checks runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v5 diff --git a/COPYING b/COPYING index 5ab7695ab..f6683e74e 100644 --- a/COPYING +++ b/COPYING @@ -1,8 +1,8 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. @@ -10,7 +10,7 @@ as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] - Preamble + Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public @@ -112,7 +112,7 @@ modification follow. Pay close attention to the difference between a former contains code derived from the library, whereas the latter must be combined with the library in order to run. - GNU LESSER GENERAL PUBLIC LICENSE + GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other @@ -146,7 +146,7 @@ such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. - + 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an @@ -432,7 +432,7 @@ decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. - NO WARRANTY + NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. @@ -455,7 +455,7 @@ FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - END OF TERMS AND CONDITIONS + END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries @@ -484,8 +484,7 @@ convey the exclusion of warranty; and each file should have at least the Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + License along with this library; if not, see . Also add information on how to contact you by electronic and paper mail. @@ -496,9 +495,7 @@ necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. - , 1 April 1990 - Ty Coon, President of Vice + , 1 April 1990 + Moe Ghoul, President of Vice That's all there is to it! - - diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index 74d0b8c7e..b89d51c76 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -24,16 +24,7 @@ let enableSanitizersLayer = finalAttrs: prevAttrs: { mesonFlags = (prevAttrs.mesonFlags or [ ]) - ++ [ - # Run all tests with UBSAN enabled. Running both with ubsan and - # without doesn't seem to have much immediate benefit for doubling - # the GHA CI workaround. - # - # TODO: Work toward enabling "address,undefined" if it seems feasible. - # This would maybe require dropping Boost coroutines and ignoring intentional - # memory leaks with detect_leaks=0. - (lib.mesonOption "b_sanitize" "undefined") - ] + ++ [ (lib.mesonOption "b_sanitize" "address,undefined") ] ++ (lib.optionals stdenv.cc.isClang [ # https://www.github.com/mesonbuild/meson/issues/764 (lib.mesonBool "b_lundef" false) @@ -71,8 +62,12 @@ rec { nixComponentsInstrumented = nixComponents.overrideScope ( final: prev: { nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; + # Boehm is incompatible with ASAN. + nix-expr = prev.nix-expr.override { enableGC = !withSanitizers; }; mesonComponentOverrides = lib.composeManyExtensions componentOverrides; + # Unclear how to make Perl bindings work with a dynamically linked ASAN. + nix-perl-bindings = if withSanitizers then null else prev.nix-perl-bindings; } ); diff --git a/ci/gha/tests/pre-commit-checks b/ci/gha/tests/pre-commit-checks new file mode 100755 index 000000000..8c9f64d6c --- /dev/null +++ b/ci/gha/tests/pre-commit-checks @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -euo pipefail + +system=$(nix eval --raw --impure --expr builtins.currentSystem) + +echo "::group::Running pre-commit checks" + +if nix build ".#checks.$system.pre-commit" -L; then + echo "::endgroup::" + exit 0 +fi + +echo "::error ::Changes do not pass pre-commit checks" + +cat < **Note** diff --git a/doc/manual/source/development/debugging.md b/doc/manual/source/development/debugging.md index 98456841a..ccc6614b7 100644 --- a/doc/manual/source/development/debugging.md +++ b/doc/manual/source/development/debugging.md @@ -24,6 +24,19 @@ It is also possible to build without debugging for faster build: (The first line is needed because `fortify` hardening requires at least some optimization.) +## Building Nix with sanitizers + +Nix can be built with [Address](https://clang.llvm.org/docs/AddressSanitizer.html) and +[UB](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html) sanitizers using LLVM +or GCC. This is useful when debugging memory corruption issues. + +```console +[nix-shell]$ export mesonBuildType=debugoptimized +[nix-shell]$ appendToVar mesonFlags "-Dlibexpr:gc=disabled" # Disable Boehm +[nix-shell]$ appendToVar mesonFlags "-Dbindings=false" # Disable nix-perl +[nix-shell]$ appendToVar mesonFlags "-Db_sanitize=address,undefined" +``` + ## Debugging the Nix Binary Obtain your preferred debugger within the development shell: diff --git a/doc/manual/source/development/meson.build b/doc/manual/source/development/meson.build index 4831cf8f0..b3fb11023 100644 --- a/doc/manual/source/development/meson.build +++ b/doc/manual/source/development/meson.build @@ -7,5 +7,6 @@ experimental_feature_descriptions_md = custom_target( xp_features_json, ], capture : true, + env : nix_env_for_docs, output : 'experimental-feature-descriptions.md', ) diff --git a/doc/manual/source/language/builtins-prefix.md b/doc/manual/source/language/builtins-prefix.md index fb983bb7f..fff0f7cb5 100644 --- a/doc/manual/source/language/builtins-prefix.md +++ b/doc/manual/source/language/builtins-prefix.md @@ -5,12 +5,28 @@ All built-ins are available through the global [`builtins`](#builtins-builtins) Some built-ins are also exposed directly in the global scope: - - - [`derivation`](#builtins-derivation) -- [`import`](#builtins-import) +- `derivationStrict` - [`abort`](#builtins-abort) +- [`baseNameOf`](#builtins-baseNameOf) +- [`break`](#builtins-break) +- [`dirOf`](#builtins-dirOf) +- [`false`](#builtins-false) +- [`fetchGit`](#builtins-fetchGit) +- `fetchMercurial` +- [`fetchTarball`](#builtins-fetchTarball) +- [`fetchTree`](#builtins-fetchTree) +- [`fromTOML`](#builtins-fromTOML) +- [`import`](#builtins-import) +- [`isNull`](#builtins-isNull) +- [`map`](#builtins-map) +- [`null`](#builtins-null) +- [`placeholder`](#builtins-placeholder) +- [`removeAttrs`](#builtins-removeAttrs) +- `scopedImport` - [`throw`](#builtins-throw) +- [`toString`](#builtins-toString) +- [`true`](#builtins-true)
derivation attrs
diff --git a/doc/manual/source/protocols/json/derivation.md b/doc/manual/source/protocols/json/derivation.md index 04881776a..566288962 100644 --- a/doc/manual/source/protocols/json/derivation.md +++ b/doc/manual/source/protocols/json/derivation.md @@ -14,6 +14,21 @@ is a JSON object with the following fields: The name of the derivation. This is used when calculating the store paths of the derivation's outputs. +* `version`: + Must be `3`. + This is a guard that allows us to continue evolving this format. + The choice of `3` is fairly arbitrary, but corresponds to this informal version: + + - Version 0: A-Term format + + - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format. + + - Version 2: Separate `method` and `hashAlgo` fields in output specs + + - Verison 3: Drop store dir from store paths, just include base name. + + Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change. + * `outputs`: Information about the output paths of the derivation. This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields: @@ -52,7 +67,6 @@ is a JSON object with the following fields: > ```json > "outputs": { > "out": { - > "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source", > "method": "nar", > "hashAlgo": "sha256", > "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" @@ -63,6 +77,15 @@ is a JSON object with the following fields: * `inputSrcs`: A list of store paths on which this derivation depends. + > **Example** + > + > ```json + > "inputSrcs": [ + > "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh", + > "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch" + > ] + > ``` + * `inputDrvs`: A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations. @@ -70,8 +93,8 @@ is a JSON object with the following fields: > > ```json > "inputDrvs": { - > "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], - > "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] + > "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], + > "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] > } > ``` diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 4815313dd..8c84d0517 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -106,63 +106,6 @@ enable = true; excludes = [ # We haven't linted these files yet - ''^config/install-sh$'' - ''^misc/bash/completion\.sh$'' - ''^misc/fish/completion\.fish$'' - ''^misc/zsh/completion\.zsh$'' - ''^scripts/create-darwin-volume\.sh$'' - ''^scripts/install-darwin-multi-user\.sh$'' - ''^scripts/install-multi-user\.sh$'' - ''^scripts/install-systemd-multi-user\.sh$'' - ''^src/nix/get-env\.sh$'' - ''^tests/functional/ca/build-dry\.sh$'' - ''^tests/functional/ca/build-with-garbage-path\.sh$'' - ''^tests/functional/ca/common\.sh$'' - ''^tests/functional/ca/concurrent-builds\.sh$'' - ''^tests/functional/ca/eval-store\.sh$'' - ''^tests/functional/ca/gc\.sh$'' - ''^tests/functional/ca/import-from-derivation\.sh$'' - ''^tests/functional/ca/new-build-cmd\.sh$'' - ''^tests/functional/ca/nix-shell\.sh$'' - ''^tests/functional/ca/post-hook\.sh$'' - ''^tests/functional/ca/recursive\.sh$'' - ''^tests/functional/ca/repl\.sh$'' - ''^tests/functional/ca/selfref-gc\.sh$'' - ''^tests/functional/ca/why-depends\.sh$'' - ''^tests/functional/characterisation-test-infra\.sh$'' - ''^tests/functional/common/vars-and-functions\.sh$'' - ''^tests/functional/completions\.sh$'' - ''^tests/functional/compute-levels\.sh$'' - ''^tests/functional/config\.sh$'' - ''^tests/functional/db-migration\.sh$'' - ''^tests/functional/debugger\.sh$'' - ''^tests/functional/dependencies\.builder0\.sh$'' - ''^tests/functional/dependencies\.sh$'' - ''^tests/functional/dump-db\.sh$'' - ''^tests/functional/dyn-drv/build-built-drv\.sh$'' - ''^tests/functional/dyn-drv/common\.sh$'' - ''^tests/functional/dyn-drv/dep-built-drv\.sh$'' - ''^tests/functional/dyn-drv/eval-outputOf\.sh$'' - ''^tests/functional/dyn-drv/old-daemon-error-hack\.sh$'' - ''^tests/functional/dyn-drv/recursive-mod-json\.sh$'' - ''^tests/functional/eval-store\.sh$'' - ''^tests/functional/export-graph\.sh$'' - ''^tests/functional/export\.sh$'' - ''^tests/functional/extra-sandbox-profile\.sh$'' - ''^tests/functional/fetchClosure\.sh$'' - ''^tests/functional/fetchGit\.sh$'' - ''^tests/functional/fetchGitRefs\.sh$'' - ''^tests/functional/fetchGitSubmodules\.sh$'' - ''^tests/functional/fetchGitVerification\.sh$'' - ''^tests/functional/fetchMercurial\.sh$'' - ''^tests/functional/fixed\.builder1\.sh$'' - ''^tests/functional/fixed\.builder2\.sh$'' - ''^tests/functional/fixed\.sh$'' - ''^tests/functional/flakes/absolute-paths\.sh$'' - ''^tests/functional/flakes/check\.sh$'' - ''^tests/functional/flakes/config\.sh$'' - ''^tests/functional/flakes/flakes\.sh$'' - ''^tests/functional/flakes/follow-paths\.sh$'' ''^tests/functional/flakes/prefetch\.sh$'' ''^tests/functional/flakes/run\.sh$'' ''^tests/functional/flakes/show\.sh$'' @@ -179,29 +122,6 @@ ''^tests/functional/install-darwin\.sh$'' ''^tests/functional/legacy-ssh-store\.sh$'' ''^tests/functional/linux-sandbox\.sh$'' - ''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' - ''^tests/functional/local-overlay-store/add-lower\.sh$'' - ''^tests/functional/local-overlay-store/bad-uris\.sh$'' - ''^tests/functional/local-overlay-store/build-inner\.sh$'' - ''^tests/functional/local-overlay-store/build\.sh$'' - ''^tests/functional/local-overlay-store/check-post-init-inner\.sh$'' - ''^tests/functional/local-overlay-store/check-post-init\.sh$'' - ''^tests/functional/local-overlay-store/common\.sh$'' - ''^tests/functional/local-overlay-store/delete-duplicate-inner\.sh$'' - ''^tests/functional/local-overlay-store/delete-duplicate\.sh$'' - ''^tests/functional/local-overlay-store/delete-refs-inner\.sh$'' - ''^tests/functional/local-overlay-store/delete-refs\.sh$'' - ''^tests/functional/local-overlay-store/gc-inner\.sh$'' - ''^tests/functional/local-overlay-store/gc\.sh$'' - ''^tests/functional/local-overlay-store/optimise-inner\.sh$'' - ''^tests/functional/local-overlay-store/optimise\.sh$'' - ''^tests/functional/local-overlay-store/redundant-add-inner\.sh$'' - ''^tests/functional/local-overlay-store/redundant-add\.sh$'' - ''^tests/functional/local-overlay-store/remount\.sh$'' - ''^tests/functional/local-overlay-store/stale-file-handle-inner\.sh$'' - ''^tests/functional/local-overlay-store/stale-file-handle\.sh$'' - ''^tests/functional/local-overlay-store/verify-inner\.sh$'' - ''^tests/functional/local-overlay-store/verify\.sh$'' ''^tests/functional/logging\.sh$'' ''^tests/functional/misc\.sh$'' ''^tests/functional/multiple-outputs\.sh$'' @@ -248,6 +168,23 @@ ''^tests/functional/user-envs\.builder\.sh$'' ''^tests/functional/user-envs\.sh$'' ''^tests/functional/why-depends\.sh$'' + + # Content-addressed test files that use recursive-*looking* sourcing + # (cd .. && source ), causing shellcheck to loop + # They're small wrapper scripts with not a lot going on + ''^tests/functional/ca/build-delete\.sh$'' + ''^tests/functional/ca/build-dry\.sh$'' + ''^tests/functional/ca/eval-store\.sh$'' + ''^tests/functional/ca/gc\.sh$'' + ''^tests/functional/ca/import-from-derivation\.sh$'' + ''^tests/functional/ca/multiple-outputs\.sh$'' + ''^tests/functional/ca/new-build-cmd\.sh$'' + ''^tests/functional/ca/nix-shell\.sh$'' + ''^tests/functional/ca/post-hook\.sh$'' + ''^tests/functional/ca/recursive\.sh$'' + ''^tests/functional/ca/repl\.sh$'' + ''^tests/functional/ca/selfref-gc\.sh$'' + ''^tests/functional/ca/why-depends\.sh$'' ]; }; }; diff --git a/meson.build b/meson.build index 5dcf98717..736756157 100644 --- a/meson.build +++ b/meson.build @@ -41,8 +41,10 @@ subproject('libexpr-c') subproject('libflake-c') subproject('libmain-c') +asan_enabled = 'address' in get_option('b_sanitize') + # Language Bindings -if get_option('bindings') and not meson.is_cross_build() +if get_option('bindings') and not meson.is_cross_build() and not asan_enabled subproject('perl') endif diff --git a/misc/bash/completion.sh b/misc/bash/completion.sh index c4ba96cd3..96f98d6c1 100644 --- a/misc/bash/completion.sh +++ b/misc/bash/completion.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash function _complete_nix { local -a words local cword cur diff --git a/misc/fish/completion.fish b/misc/fish/completion.fish index c6b8ef16a..b6584963b 100644 --- a/misc/fish/completion.fish +++ b/misc/fish/completion.fish @@ -1,3 +1,4 @@ +# shellcheck disable=all function _nix_complete # Get the current command up to a cursor. # - Behaves correctly even with pipes and nested in commands like env. diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh index f9b3dca74..eb26a16cb 100644 --- a/misc/zsh/completion.zsh +++ b/misc/zsh/completion.zsh @@ -1,3 +1,4 @@ +# shellcheck disable=all #compdef nix function _nix() { diff --git a/nix-meson-build-support/asan-options/meson.build b/nix-meson-build-support/asan-options/meson.build new file mode 100644 index 000000000..17880b0ed --- /dev/null +++ b/nix-meson-build-support/asan-options/meson.build @@ -0,0 +1,12 @@ +asan_test_options_env = { + 'ASAN_OPTIONS' : 'abort_on_error=1:print_summary=1:detect_leaks=0', +} + +# Clang gets grumpy about missing libasan symbols if -shared-libasan is not +# passed when building shared libs, at least on Linux +if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefined' in get_option( + 'b_sanitize', +)) + add_project_link_arguments('-shared-libasan', language : 'cpp') +endif + diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index 5a29ff61d..8c4e98862 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -5,6 +5,15 @@ if not (host_machine.system() == 'windows' and cxx.get_id() == 'gcc') deps_private += dependency('threads') endif +if host_machine.system() == 'cygwin' + # -std=gnu on cygwin defines 'unix', which conflicts with the namespace + add_project_arguments( + '-D_POSIX_C_SOURCE=200809L', + '-D_GNU_SOURCE', + language : 'cpp', + ) +endif + add_project_arguments( '-Wdeprecated-copy', '-Werror=suggest-override', @@ -33,13 +42,5 @@ if cxx.get_id() == 'clang' add_project_arguments('-fpch-instantiate-templates', language : 'cpp') endif -# Clang gets grumpy about missing libasan symbols if -shared-libasan is not -# passed when building shared libs, at least on Linux -if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefined' in get_option( - 'b_sanitize', -)) - add_project_link_arguments('-shared-libasan', language : 'cpp') -endif - # Darwin ld doesn't like "X.Y.Zpre" -nix_soversion = meson.project_version().strip('pre') +nix_soversion = meson.project_version().split('pre')[0] diff --git a/packaging/components.nix b/packaging/components.nix index b5fad4043..2be4fa61d 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -164,6 +164,24 @@ let }; mesonLibraryLayer = finalAttrs: prevAttrs: { + preConfigure = + let + interpositionFlags = [ + "-fno-semantic-interposition" + "-Wl,-Bsymbolic-functions" + ]; + in + # NOTE: By default GCC disables interprocedular optimizations (in particular inlining) for + # position-independent code and thus shared libraries. + # Since LD_PRELOAD tricks aren't worth losing out on optimizations, we disable it for good. + # This is not the case for Clang, where inlining is done by default even without -fno-semantic-interposition. + # https://reviews.llvm.org/D102453 + # https://fedoraproject.org/wiki/Changes/PythonNoSemanticInterpositionSpeedup + prevAttrs.preConfigure or "" + + lib.optionalString stdenv.cc.isGNU '' + export CFLAGS="''${CFLAGS:-} ${toString interpositionFlags}" + export CXXFLAGS="''${CXXFLAGS:-} ${toString interpositionFlags}" + ''; outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; }; diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 949f79752..ccfb9c4ae 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -118,6 +118,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( modular.pre-commit.settings.package (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) pkgs.buildPackages.nixfmt-rfc-style + pkgs.buildPackages.shellcheck pkgs.buildPackages.gdb ] ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) ( diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 477eb1fd6..b013190f9 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -55,18 +55,22 @@ readonly NIX_INSTALLED_NIX="@nix@" readonly NIX_INSTALLED_CACERT="@cacert@" #readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6" #readonly NIX_INSTALLED_CACERT="/nix/store/7dxhzymvy330i28ii676fl1pqwcahv2f-nss-cacert-3.49.2" -readonly EXTRACTED_NIX_PATH="$(dirname "$0")" +EXTRACTED_NIX_PATH="$(dirname "$0")" +readonly EXTRACTED_NIX_PATH # allow to override identity change command -readonly NIX_BECOME=${NIX_BECOME:-sudo} +NIX_BECOME=${NIX_BECOME:-sudo} +readonly NIX_BECOME -readonly ROOT_HOME=~root +ROOT_HOME=~root +readonly ROOT_HOME if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then - readonly IS_HEADLESS='no' + IS_HEADLESS='no' else - readonly IS_HEADLESS='yes' + IS_HEADLESS='yes' fi +readonly IS_HEADLESS headless() { if [ "$IS_HEADLESS" = "yes" ]; then @@ -156,6 +160,7 @@ EOF } nix_user_for_core() { + # shellcheck disable=SC2059 printf "$NIX_BUILD_USER_NAME_TEMPLATE" "$1" } @@ -381,10 +386,12 @@ _sudo() { # Ensure that $TMPDIR exists if defined. if [[ -n "${TMPDIR:-}" ]] && [[ ! -d "${TMPDIR:-}" ]]; then + # shellcheck disable=SC2174 mkdir -m 0700 -p "${TMPDIR:-}" fi -readonly SCRATCH=$(mktemp -d) +SCRATCH=$(mktemp -d) +readonly SCRATCH finish_cleanup() { rm -rf "$SCRATCH" } @@ -677,7 +684,8 @@ create_directories() { # hiding behind || true, and the general state # should be one the user can repair once they # figure out where chown is... - local get_chr_own="$(PATH="$(getconf PATH 2>/dev/null)" command -vp chown)" + local get_chr_own + get_chr_own="$(PATH="$(getconf PATH 2>/dev/null)" command -vp chown)" if [[ -z "$get_chr_own" ]]; then get_chr_own="$(command -v chown)" fi @@ -915,9 +923,11 @@ configure_shell_profile() { fi if [ -e "$profile_target" ]; then - shell_source_lines \ - | _sudo "extend your $profile_target with nix-daemon settings" \ - tee -a "$profile_target" + { + shell_source_lines + cat "$profile_target" + } | _sudo "extend your $profile_target with nix-daemon settings" \ + tee "$profile_target" fi done @@ -1013,6 +1023,7 @@ main() { # Set profile targets after OS-specific scripts are loaded if command -v poly_configure_default_profile_targets > /dev/null 2>&1; then + # shellcheck disable=SC2207 PROFILE_TARGETS=($(poly_configure_default_profile_targets)) else PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/etc/bash.bashrc" "/etc/zsh/zshrc") diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index dc373f4db..8abbb7af4 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -39,7 +39,7 @@ create_systemd_proxy_env() { vars="http_proxy https_proxy ftp_proxy all_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY ALL_PROXY NO_PROXY" for v in $vars; do if [ "x${!v:-}" != "x" ]; then - echo "Environment=${v}=$(escape_systemd_env ${!v})" + echo "Environment=${v}=$(escape_systemd_env "${!v}")" fi done } diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 80d97dc3e..4d76dd6da 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -83,12 +83,22 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) cons nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const { - return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); + return std::visit( + overloaded{ + [&](const SingleBuiltPath::Opaque & o) -> nlohmann::json { return store.printStorePath(o.path); }, + [&](const SingleBuiltPath::Built & b) { return b.toJSON(store); }, + }, + raw()); } nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const { - return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); + return std::visit( + overloaded{ + [&](const BuiltPath::Opaque & o) -> nlohmann::json { return store.printStorePath(o.path); }, + [&](const BuiltPath::Built & b) { return b.toJSON(store); }, + }, + raw()); } RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index f553afa0b..3833d7e0a 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -67,6 +67,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'built-path.cc', diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 01d786deb..38d06336b 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -760,7 +760,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS) void NixRepl::initEnv() { - env = &state->allocEnv(envSize); + env = &state->mem.allocEnv(envSize); env->up = &state->baseEnv; displ = 0; staticEnv->vars.clear(); @@ -869,14 +869,8 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) Expr * NixRepl::parseString(std::string s) { - return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); -} - -void NixRepl::evalString(std::string s, Value & v) -{ - Expr * e; try { - e = parseString(s); + return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); } catch (ParseError & e) { if (e.msg().find("unexpected end of file") != std::string::npos) // For parse errors on incomplete input, we continue waiting for the next line of @@ -885,6 +879,11 @@ void NixRepl::evalString(std::string s, Value & v) else throw; } +} + +void NixRepl::evalString(std::string s, Value & v) +{ + Expr * e = parseString(s); e->eval(*state, *env, v); state->forceValue(v, v.determinePos(noPos)); } diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index c47704ce4..03cee41a0 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -28,6 +28,7 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_expr.cc', diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index 46e08b5f7..db11dd40d 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -137,7 +137,7 @@ nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Sto void nix_eval_state_builder_free(nix_eval_state_builder * builder) { - delete builder; + operator delete(builder, static_cast(alignof(nix_eval_state_builder))); } nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_builder * builder) @@ -203,7 +203,7 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c void nix_state_free(EvalState * state) { - delete state; + operator delete(state, static_cast(alignof(EvalState))); } #if NIX_USE_BOEHMGC diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index 093daf2f8..3b8c7dd04 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -326,6 +326,10 @@ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, try { auto & v = check_value_in(value); assert(v.type() == nix::nList); + if (ix >= v.listSize()) { + nix_set_err_msg(context, NIX_ERR_KEY, "list index out of bounds"); + return nullptr; + } auto * p = v.listView()[ix]; nix_gc_incref(nullptr, p); if (p != nullptr) @@ -335,6 +339,26 @@ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, NIXC_CATCH_ERRS_NULL } +nix_value * +nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto & v = check_value_in(value); + assert(v.type() == nix::nList); + if (ix >= v.listSize()) { + nix_set_err_msg(context, NIX_ERR_KEY, "list index out of bounds"); + return nullptr; + } + auto * p = v.listView()[ix]; + nix_gc_incref(nullptr, p); + // Note: intentionally NOT calling forceValue() to keep the element lazy + return as_nix_value_ptr(p); + } + NIXC_CATCH_ERRS_NULL +} + nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name) { if (context) @@ -355,6 +379,27 @@ nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value NIXC_CATCH_ERRS_NULL } +nix_value * +nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalState * state, const char * name) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto & v = check_value_in(value); + assert(v.type() == nix::nAttrs); + nix::Symbol s = state->state.symbols.create(name); + auto attr = v.attrs()->get(s); + if (attr) { + nix_gc_incref(nullptr, attr->value); + // Note: intentionally NOT calling forceValue() to keep the attribute lazy + return as_nix_value_ptr(attr->value); + } + nix_set_err_msg(context, NIX_ERR_KEY, "missing attribute"); + return nullptr; + } + NIXC_CATCH_ERRS_NULL +} + bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name) { if (context) @@ -371,13 +416,28 @@ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalS NIXC_CATCH_ERRS_RES(false); } -nix_value * nix_get_attr_byidx( - nix_c_context * context, const nix_value * value, EvalState * state, unsigned int i, const char ** name) +static void collapse_attrset_layer_chain_if_needed(nix::Value & v, EvalState * state) +{ + auto & attrs = *v.attrs(); + if (attrs.isLayered()) { + auto bindings = state->state.buildBindings(attrs.size()); + std::ranges::copy(attrs, std::back_inserter(bindings)); + v.mkAttrs(bindings); + } +} + +nix_value * +nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name) { if (context) context->last_err_code = NIX_OK; try { auto & v = check_value_in(value); + collapse_attrset_layer_chain_if_needed(v, state); + if (i >= v.attrs()->size()) { + nix_set_err_msg(context, NIX_ERR_KEY, "attribute index out of bounds"); + return nullptr; + } const nix::Attr & a = (*v.attrs())[i]; *name = state->state.symbols[a.name].c_str(); nix_gc_incref(nullptr, a.value); @@ -387,13 +447,38 @@ nix_value * nix_get_attr_byidx( NIXC_CATCH_ERRS_NULL } -const char * -nix_get_attr_name_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int i) +nix_value * nix_get_attr_byidx_lazy( + nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name) { if (context) context->last_err_code = NIX_OK; try { auto & v = check_value_in(value); + collapse_attrset_layer_chain_if_needed(v, state); + if (i >= v.attrs()->size()) { + nix_set_err_msg(context, NIX_ERR_KEY, "attribute index out of bounds (Nix C API contract violation)"); + return nullptr; + } + const nix::Attr & a = (*v.attrs())[i]; + *name = state->state.symbols[a.name].c_str(); + nix_gc_incref(nullptr, a.value); + // Note: intentionally NOT calling forceValue() to keep the attribute lazy + return as_nix_value_ptr(a.value); + } + NIXC_CATCH_ERRS_NULL +} + +const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto & v = check_value_in(value); + collapse_attrset_layer_chain_if_needed(v, state); + if (i >= v.attrs()->size()) { + nix_set_err_msg(context, NIX_ERR_KEY, "attribute index out of bounds (Nix C API contract violation)"); + return nullptr; + } const nix::Attr & a = (*v.attrs())[i]; return state->state.symbols[a.name].c_str(); } @@ -594,7 +679,7 @@ nix_err nix_bindings_builder_insert(nix_c_context * context, BindingsBuilder * b context->last_err_code = NIX_OK; try { auto & v = check_value_not_null(value); - nix::Symbol s = bb->builder.state.get().symbols.create(name); + nix::Symbol s = bb->builder.symbols.get().create(name); bb->builder.insert(s, &v); } NIXC_CATCH_ERRS diff --git a/src/libexpr-c/nix_api_value.h b/src/libexpr-c/nix_api_value.h index 7cd6ad180..835eaec6e 100644 --- a/src/libexpr-c/nix_api_value.h +++ b/src/libexpr-c/nix_api_value.h @@ -265,10 +265,25 @@ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value); */ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); -/** @brief Get an attr by name +/** @brief Get the ix'th element of a list without forcing evaluation of the element + * + * Returns the list element without forcing its evaluation, allowing access to lazy values. + * The list value itself must already be evaluated. * * Owned by the GC. Use nix_gc_decref when you're done with the pointer * @param[out] context Optional, stores error information + * @param[in] value Nix value to inspect (must be an evaluated list) + * @param[in] state nix evaluator state + * @param[in] ix list element to get + * @return value, NULL in case of errors + */ +nix_value * +nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); + +/** @brief Get an attr by name + * + * Use nix_gc_decref when you're done with the pointer + * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state * @param[in] name attribute name @@ -276,6 +291,21 @@ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, */ nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); +/** @brief Get an attribute value by attribute name, without forcing evaluation of the attribute's value + * + * Returns the attribute value without forcing its evaluation, allowing access to lazy values. + * The attribute set value itself must already be evaluated. + * + * Use nix_gc_decref when you're done with the pointer + * @param[out] context Optional, stores error information + * @param[in] value Nix value to inspect (must be an evaluated attribute set) + * @param[in] state nix evaluator state + * @param[in] name attribute name + * @return value, NULL in case of errors + */ +nix_value * +nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); + /** @brief Check if an attribute name exists on a value * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect @@ -285,11 +315,21 @@ nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value */ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); -/** @brief Get an attribute by index in the sorted bindings +/** @brief Get an attribute by index * * Also gives you the name. * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer + * Attributes are returned in an unspecified order which is NOT suitable for + * reproducible operations. In Nix's domain, reproducibility is paramount. The caller + * is responsible for sorting the attributes or storing them in an ordered map to + * ensure deterministic behavior in your application. + * + * @note When Nix does sort attributes, which it does for virtually all intermediate + * operations and outputs, it uses byte-wise lexicographic order (equivalent to + * lexicographic order by Unicode scalar value for valid UTF-8). We recommend + * applying this same ordering for consistency. + * + * Use nix_gc_decref when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -297,12 +337,50 @@ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalS * @param[out] name will store a pointer to the attribute name * @return value, NULL in case of errors */ -nix_value * nix_get_attr_byidx( - nix_c_context * context, const nix_value * value, EvalState * state, unsigned int i, const char ** name); +nix_value * +nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); -/** @brief Get an attribute name by index in the sorted bindings +/** @brief Get an attribute by index, without forcing evaluation of the attribute's value * - * Useful when you want the name but want to avoid evaluation. + * Also gives you the name. + * + * Returns the attribute value without forcing its evaluation, allowing access to lazy values. + * The attribute set value itself must already have been evaluated. + * + * Attributes are returned in an unspecified order which is NOT suitable for + * reproducible operations. In Nix's domain, reproducibility is paramount. The caller + * is responsible for sorting the attributes or storing them in an ordered map to + * ensure deterministic behavior in your application. + * + * @note When Nix does sort attributes, which it does for virtually all intermediate + * operations and outputs, it uses byte-wise lexicographic order (equivalent to + * lexicographic order by Unicode scalar value for valid UTF-8). We recommend + * applying this same ordering for consistency. + * + * Use nix_gc_decref when you're done with the pointer + * @param[out] context Optional, stores error information + * @param[in] value Nix value to inspect (must be an evaluated attribute set) + * @param[in] state nix evaluator state + * @param[in] i attribute index + * @param[out] name will store a pointer to the attribute name + * @return value, NULL in case of errors + */ +nix_value * nix_get_attr_byidx_lazy( + nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); + +/** @brief Get an attribute name by index + * + * Returns the attribute name without forcing evaluation of the attribute's value. + * + * Attributes are returned in an unspecified order which is NOT suitable for + * reproducible operations. In Nix's domain, reproducibility is paramount. The caller + * is responsible for sorting the attributes or storing them in an ordered map to + * ensure deterministic behavior in your application. + * + * @note When Nix does sort attributes, which it does for virtually all intermediate + * operations and outputs, it uses byte-wise lexicographic order (equivalent to + * lexicographic order by Unicode scalar value for valid UTF-8). We recommend + * applying this same ordering for consistency. * * Owned by the nix EvalState * @param[out] context Optional, stores error information @@ -311,8 +389,7 @@ nix_value * nix_get_attr_byidx( * @param[in] i attribute index * @return name, NULL in case of errors */ -const char * -nix_get_attr_name_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int i); +const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i); /**@}*/ /** @name Initializers diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index df28661b7..01a3f3bcb 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -31,6 +31,7 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'tests/value/context.cc', diff --git a/src/libexpr-tests/main.cc b/src/libexpr-tests/main.cc index 61b40e834..88a9d6684 100644 --- a/src/libexpr-tests/main.cc +++ b/src/libexpr-tests/main.cc @@ -1,40 +1,15 @@ #include -#include -#include "nix/store/globals.hh" -#include "nix/util/logging.hh" + +#include "nix/store/tests/test-main.hh" +#include "nix/util/config-global.hh" using namespace nix; int main(int argc, char ** argv) { - if (argc > 1 && std::string_view(argv[1]) == "__build-remote") { - printError("test-build-remote: not supported in libexpr unit tests"); - return 1; - } - - // Disable build hook. We won't be testing remote builds in these unit tests. If we do, fix the above build hook. - settings.buildHook = {}; - -#ifdef __linux__ // should match the conditional around sandboxBuildDir declaration. - - // When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's - // sandboxBuildDir, e.g.: Host - // storeDir = /nix/store - // sandboxBuildDir = /build - // This process - // storeDir = /build/foo/bar/store - // sandboxBuildDir = /build - // However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different - // sandboxBuildDir. - settings.sandboxBuildDir = "/test-build-dir-instead-of-usual-build-dir"; -#endif - -#ifdef __APPLE__ - // Avoid this error, when already running in a sandbox: - // sandbox-exec: sandbox_apply: Operation not permitted - settings.sandboxMode = smDisabled; - setEnv("_NIX_TEST_NO_SANDBOX", "1"); -#endif + auto res = testMainForBuidingPre(argc, argv); + if (res) + return res; // For pipe operator tests in trivial.cc experimentalFeatureSettings.set("experimental-features", "pipe-operators"); diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index c5dafe0de..7f7c08955 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -45,6 +45,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'derived-path.cc', @@ -82,7 +83,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libexpr-tests/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc index 5e0868b6e..de508b4e4 100644 --- a/src/libexpr-tests/nix_api_expr.cc +++ b/src/libexpr-tests/nix_api_expr.cc @@ -423,6 +423,55 @@ TEST_F(nix_api_expr_test, nix_expr_primop_bad_return_thunk) ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("badReturnThunk")); } +static void primop_with_nix_err_key( + void * user_data, nix_c_context * context, EvalState * state, nix_value ** args, nix_value * ret) +{ + nix_set_err_msg(context, NIX_ERR_KEY, "Test error from primop"); +} + +TEST_F(nix_api_expr_test, nix_expr_primop_nix_err_key_conversion) +{ + // Test that NIX_ERR_KEY from a custom primop gets converted to a generic EvalError + // + // RATIONALE: NIX_ERR_KEY must not be propagated from custom primops because it would + // create semantic confusion. NIX_ERR_KEY indicates missing keys/indices in C API functions + // (like nix_get_attr_byname, nix_get_list_byidx). If custom primops could return NIX_ERR_KEY, + // an evaluation error would be indistinguishable from an actual missing attribute. + // + // For example, if nix_get_attr_byname returned NIX_ERR_KEY when the attribute is present + // but the value evaluation fails, callers expecting NIX_ERR_KEY to mean "missing attribute" + // would incorrectly handle evaluation failures as missing attributes. In places where + // missing attributes are tolerated (like optional attributes), this would cause the + // program to continue after swallowing the error, leading to silent failures. + PrimOp * primop = nix_alloc_primop( + ctx, primop_with_nix_err_key, 1, "testErrorPrimop", nullptr, "a test primop that sets NIX_ERR_KEY", nullptr); + assert_ctx_ok(); + nix_value * primopValue = nix_alloc_value(ctx, state); + assert_ctx_ok(); + nix_init_primop(ctx, primopValue, primop); + assert_ctx_ok(); + + nix_value * arg = nix_alloc_value(ctx, state); + assert_ctx_ok(); + nix_init_int(ctx, arg, 42); + assert_ctx_ok(); + + nix_value * result = nix_alloc_value(ctx, state); + assert_ctx_ok(); + nix_value_call(ctx, state, primopValue, arg, result); + + // Verify that NIX_ERR_KEY gets converted to NIX_ERR_NIX_ERROR (generic evaluation error) + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("Error from custom function")); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("Test error from primop")); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("testErrorPrimop")); + + // Clean up + nix_gc_decref(ctx, primopValue); + nix_gc_decref(ctx, arg); + nix_gc_decref(ctx, result); +} + TEST_F(nix_api_expr_test, nix_value_call_multi_no_args) { nix_value * n = nix_alloc_value(ctx, state); @@ -437,4 +486,31 @@ TEST_F(nix_api_expr_test, nix_value_call_multi_no_args) assert_ctx_ok(); ASSERT_EQ(3, rInt); } + +TEST_F(nix_api_expr_test, nix_expr_attrset_update) +{ + nix_expr_eval_from_string(ctx, state, "{ a = 0; b = 2; } // { a = 1; b = 3; } // { a = 2; }", ".", value); + assert_ctx_ok(); + + ASSERT_EQ(nix_get_attrs_size(ctx, value), 2); + assert_ctx_ok(); + std::array, 2> values; + for (unsigned int i = 0; i < 2; ++i) { + const char * name; + values[i].second = nix_get_attr_byidx(ctx, value, state, i, &name); + assert_ctx_ok(); + values[i].first = name; + } + std::sort(values.begin(), values.end(), [](const auto & lhs, const auto & rhs) { return lhs.first < rhs.first; }); + + nix_value * a = values[0].second; + ASSERT_EQ("a", values[0].first); + ASSERT_EQ(nix_get_int(ctx, a), 2); + assert_ctx_ok(); + nix_value * b = values[1].second; + ASSERT_EQ("b", values[1].first); + ASSERT_EQ(nix_get_int(ctx, b), 3); + assert_ctx_ok(); +} + } // namespace nixC diff --git a/src/libexpr-tests/nix_api_value.cc b/src/libexpr-tests/nix_api_value.cc index af95224de..830637f3e 100644 --- a/src/libexpr-tests/nix_api_value.cc +++ b/src/libexpr-tests/nix_api_value.cc @@ -162,6 +162,114 @@ TEST_F(nix_api_expr_test, nix_build_and_init_list) nix_gc_decref(ctx, intValue); } +TEST_F(nix_api_expr_test, nix_get_list_byidx_large_indices) +{ + // Create a small list to test extremely large out-of-bounds access + ListBuilder * builder = nix_make_list_builder(ctx, state, 2); + nix_value * intValue = nix_alloc_value(ctx, state); + nix_init_int(ctx, intValue, 42); + nix_list_builder_insert(ctx, builder, 0, intValue); + nix_list_builder_insert(ctx, builder, 1, intValue); + nix_make_list(ctx, builder, value); + nix_list_builder_free(builder); + + // Test extremely large indices that would definitely crash without bounds checking + ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, 1000000)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, UINT_MAX / 2)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_list_byidx(ctx, value, state, UINT_MAX / 2 + 1000000)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + + // Clean up + nix_gc_decref(ctx, intValue); +} + +TEST_F(nix_api_expr_test, nix_get_list_byidx_lazy) +{ + // Create a list with a throwing lazy element, an already-evaluated int, and a lazy function call + + // 1. Throwing lazy element - create a function application thunk that will throw when forced + nix_value * throwingFn = nix_alloc_value(ctx, state); + nix_value * throwingValue = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string( + ctx, + state, + R"( + _: throw "This should not be evaluated by the lazy accessor" + )", + "", + throwingFn); + assert_ctx_ok(); + + nix_init_apply(ctx, throwingValue, throwingFn, throwingFn); + assert_ctx_ok(); + + // 2. Already evaluated int (not lazy) + nix_value * intValue = nix_alloc_value(ctx, state); + nix_init_int(ctx, intValue, 42); + assert_ctx_ok(); + + // 3. Lazy function application that would compute increment 5 = 6 + nix_value * lazyApply = nix_alloc_value(ctx, state); + nix_value * incrementFn = nix_alloc_value(ctx, state); + nix_value * argFive = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string(ctx, state, "x: x + 1", "", incrementFn); + assert_ctx_ok(); + nix_init_int(ctx, argFive, 5); + + // Create a lazy application: (x: x + 1) 5 + nix_init_apply(ctx, lazyApply, incrementFn, argFive); + assert_ctx_ok(); + + ListBuilder * builder = nix_make_list_builder(ctx, state, 3); + nix_list_builder_insert(ctx, builder, 0, throwingValue); + nix_list_builder_insert(ctx, builder, 1, intValue); + nix_list_builder_insert(ctx, builder, 2, lazyApply); + nix_make_list(ctx, builder, value); + nix_list_builder_free(builder); + + // Test 1: Lazy accessor should return the throwing element without forcing evaluation + nix_value * lazyThrowingElement = nix_get_list_byidx_lazy(ctx, value, state, 0); + assert_ctx_ok(); + ASSERT_NE(nullptr, lazyThrowingElement); + + // Verify the element is still lazy by checking that forcing it throws + nix_value_force(ctx, state, lazyThrowingElement); + assert_ctx_err(); + ASSERT_THAT( + nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("This should not be evaluated by the lazy accessor")); + + // Test 2: Lazy accessor should return the already-evaluated int + nix_value * intElement = nix_get_list_byidx_lazy(ctx, value, state, 1); + assert_ctx_ok(); + ASSERT_NE(nullptr, intElement); + ASSERT_EQ(42, nix_get_int(ctx, intElement)); + + // Test 3: Lazy accessor should return the lazy function application without forcing + nix_value * lazyFunctionElement = nix_get_list_byidx_lazy(ctx, value, state, 2); + assert_ctx_ok(); + ASSERT_NE(nullptr, lazyFunctionElement); + + // Force the lazy function application - should compute 5 + 1 = 6 + nix_value_force(ctx, state, lazyFunctionElement); + assert_ctx_ok(); + ASSERT_EQ(6, nix_get_int(ctx, lazyFunctionElement)); + + // Clean up + nix_gc_decref(ctx, throwingFn); + nix_gc_decref(ctx, throwingValue); + nix_gc_decref(ctx, intValue); + nix_gc_decref(ctx, lazyApply); + nix_gc_decref(ctx, incrementFn); + nix_gc_decref(ctx, argFive); + nix_gc_decref(ctx, lazyThrowingElement); + nix_gc_decref(ctx, intElement); + nix_gc_decref(ctx, lazyFunctionElement); +} + TEST_F(nix_api_expr_test, nix_build_and_init_attr_invalid) { ASSERT_EQ(nullptr, nix_get_attr_byname(ctx, nullptr, state, 0)); @@ -244,6 +352,225 @@ TEST_F(nix_api_expr_test, nix_build_and_init_attr) free(out_name); } +TEST_F(nix_api_expr_test, nix_get_attr_byidx_large_indices) +{ + // Create a small attribute set to test extremely large out-of-bounds access + const char ** out_name = (const char **) malloc(sizeof(char *)); + BindingsBuilder * builder = nix_make_bindings_builder(ctx, state, 2); + nix_value * intValue = nix_alloc_value(ctx, state); + nix_init_int(ctx, intValue, 42); + nix_bindings_builder_insert(ctx, builder, "test", intValue); + nix_make_attrs(ctx, value, builder); + nix_bindings_builder_free(builder); + + // Test extremely large indices that would definitely crash without bounds checking + ASSERT_EQ(nullptr, nix_get_attr_byidx(ctx, value, state, 1000000, out_name)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_attr_byidx(ctx, value, state, UINT_MAX / 2, out_name)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_attr_byidx(ctx, value, state, UINT_MAX / 2 + 1000000, out_name)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + + // Test nix_get_attr_name_byidx with large indices too + ASSERT_EQ(nullptr, nix_get_attr_name_byidx(ctx, value, state, 1000000)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_attr_name_byidx(ctx, value, state, UINT_MAX / 2)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + ASSERT_EQ(nullptr, nix_get_attr_name_byidx(ctx, value, state, UINT_MAX / 2 + 1000000)); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + + // Clean up + nix_gc_decref(ctx, intValue); + free(out_name); +} + +TEST_F(nix_api_expr_test, nix_get_attr_byname_lazy) +{ + // Create an attribute set with a throwing lazy attribute, an already-evaluated int, and a lazy function call + + // 1. Throwing lazy element - create a function application thunk that will throw when forced + nix_value * throwingFn = nix_alloc_value(ctx, state); + nix_value * throwingValue = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string( + ctx, + state, + R"( + _: throw "This should not be evaluated by the lazy accessor" + )", + "", + throwingFn); + assert_ctx_ok(); + + nix_init_apply(ctx, throwingValue, throwingFn, throwingFn); + assert_ctx_ok(); + + // 2. Already evaluated int (not lazy) + nix_value * intValue = nix_alloc_value(ctx, state); + nix_init_int(ctx, intValue, 42); + assert_ctx_ok(); + + // 3. Lazy function application that would compute increment 7 = 8 + nix_value * lazyApply = nix_alloc_value(ctx, state); + nix_value * incrementFn = nix_alloc_value(ctx, state); + nix_value * argSeven = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string(ctx, state, "x: x + 1", "", incrementFn); + assert_ctx_ok(); + nix_init_int(ctx, argSeven, 7); + + // Create a lazy application: (x: x + 1) 7 + nix_init_apply(ctx, lazyApply, incrementFn, argSeven); + assert_ctx_ok(); + + BindingsBuilder * builder = nix_make_bindings_builder(ctx, state, 3); + nix_bindings_builder_insert(ctx, builder, "throwing", throwingValue); + nix_bindings_builder_insert(ctx, builder, "normal", intValue); + nix_bindings_builder_insert(ctx, builder, "lazy", lazyApply); + nix_make_attrs(ctx, value, builder); + nix_bindings_builder_free(builder); + + // Test 1: Lazy accessor should return the throwing attribute without forcing evaluation + nix_value * lazyThrowingAttr = nix_get_attr_byname_lazy(ctx, value, state, "throwing"); + assert_ctx_ok(); + ASSERT_NE(nullptr, lazyThrowingAttr); + + // Verify the attribute is still lazy by checking that forcing it throws + nix_value_force(ctx, state, lazyThrowingAttr); + assert_ctx_err(); + ASSERT_THAT( + nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("This should not be evaluated by the lazy accessor")); + + // Test 2: Lazy accessor should return the already-evaluated int + nix_value * intAttr = nix_get_attr_byname_lazy(ctx, value, state, "normal"); + assert_ctx_ok(); + ASSERT_NE(nullptr, intAttr); + ASSERT_EQ(42, nix_get_int(ctx, intAttr)); + + // Test 3: Lazy accessor should return the lazy function application without forcing + nix_value * lazyFunctionAttr = nix_get_attr_byname_lazy(ctx, value, state, "lazy"); + assert_ctx_ok(); + ASSERT_NE(nullptr, lazyFunctionAttr); + + // Force the lazy function application - should compute 7 + 1 = 8 + nix_value_force(ctx, state, lazyFunctionAttr); + assert_ctx_ok(); + ASSERT_EQ(8, nix_get_int(ctx, lazyFunctionAttr)); + + // Test 4: Missing attribute should return NULL with NIX_ERR_KEY + nix_value * missingAttr = nix_get_attr_byname_lazy(ctx, value, state, "nonexistent"); + ASSERT_EQ(nullptr, missingAttr); + ASSERT_EQ(NIX_ERR_KEY, nix_err_code(ctx)); + + // Clean up + nix_gc_decref(ctx, throwingFn); + nix_gc_decref(ctx, throwingValue); + nix_gc_decref(ctx, intValue); + nix_gc_decref(ctx, lazyApply); + nix_gc_decref(ctx, incrementFn); + nix_gc_decref(ctx, argSeven); + nix_gc_decref(ctx, lazyThrowingAttr); + nix_gc_decref(ctx, intAttr); + nix_gc_decref(ctx, lazyFunctionAttr); +} + +TEST_F(nix_api_expr_test, nix_get_attr_byidx_lazy) +{ + // Create an attribute set with a throwing lazy attribute, an already-evaluated int, and a lazy function call + + // 1. Throwing lazy element - create a function application thunk that will throw when forced + nix_value * throwingFn = nix_alloc_value(ctx, state); + nix_value * throwingValue = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string( + ctx, + state, + R"( + _: throw "This should not be evaluated by the lazy accessor" + )", + "", + throwingFn); + assert_ctx_ok(); + + nix_init_apply(ctx, throwingValue, throwingFn, throwingFn); + assert_ctx_ok(); + + // 2. Already evaluated int (not lazy) + nix_value * intValue = nix_alloc_value(ctx, state); + nix_init_int(ctx, intValue, 99); + assert_ctx_ok(); + + // 3. Lazy function application that would compute increment 10 = 11 + nix_value * lazyApply = nix_alloc_value(ctx, state); + nix_value * incrementFn = nix_alloc_value(ctx, state); + nix_value * argTen = nix_alloc_value(ctx, state); + + nix_expr_eval_from_string(ctx, state, "x: x + 1", "", incrementFn); + assert_ctx_ok(); + nix_init_int(ctx, argTen, 10); + + // Create a lazy application: (x: x + 1) 10 + nix_init_apply(ctx, lazyApply, incrementFn, argTen); + assert_ctx_ok(); + + BindingsBuilder * builder = nix_make_bindings_builder(ctx, state, 3); + nix_bindings_builder_insert(ctx, builder, "a_throwing", throwingValue); + nix_bindings_builder_insert(ctx, builder, "b_normal", intValue); + nix_bindings_builder_insert(ctx, builder, "c_lazy", lazyApply); + nix_make_attrs(ctx, value, builder); + nix_bindings_builder_free(builder); + + // Proper usage: first get the size and gather all attributes into a map + unsigned int attrCount = nix_get_attrs_size(ctx, value); + assert_ctx_ok(); + ASSERT_EQ(3u, attrCount); + + // Gather all attributes into a map (proper contract usage) + std::map attrMap; + const char * name; + + for (unsigned int i = 0; i < attrCount; i++) { + nix_value * attr = nix_get_attr_byidx_lazy(ctx, value, state, i, &name); + assert_ctx_ok(); + ASSERT_NE(nullptr, attr); + attrMap[std::string(name)] = attr; + } + + // Now test the gathered attributes + ASSERT_EQ(3u, attrMap.size()); + ASSERT_TRUE(attrMap.count("a_throwing")); + ASSERT_TRUE(attrMap.count("b_normal")); + ASSERT_TRUE(attrMap.count("c_lazy")); + + // Test 1: Throwing attribute should be lazy + nix_value * throwingAttr = attrMap["a_throwing"]; + nix_value_force(ctx, state, throwingAttr); + assert_ctx_err(); + ASSERT_THAT( + nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("This should not be evaluated by the lazy accessor")); + + // Test 2: Normal attribute should be already evaluated + nix_value * normalAttr = attrMap["b_normal"]; + ASSERT_EQ(99, nix_get_int(ctx, normalAttr)); + + // Test 3: Lazy function should compute when forced + nix_value * lazyAttr = attrMap["c_lazy"]; + nix_value_force(ctx, state, lazyAttr); + assert_ctx_ok(); + ASSERT_EQ(11, nix_get_int(ctx, lazyAttr)); + + // Clean up + nix_gc_decref(ctx, throwingFn); + nix_gc_decref(ctx, throwingValue); + nix_gc_decref(ctx, intValue); + nix_gc_decref(ctx, lazyApply); + nix_gc_decref(ctx, incrementFn); + nix_gc_decref(ctx, argTen); + for (auto & pair : attrMap) { + nix_gc_decref(ctx, pair.second); + } +} + TEST_F(nix_api_expr_test, nix_value_init) { // Setup diff --git a/src/libexpr-tests/package.nix b/src/libexpr-tests/package.nix index 51d52e935..c36aa2dc7 100644 --- a/src/libexpr-tests/package.nix +++ b/src/libexpr-tests/package.nix @@ -62,6 +62,7 @@ mkMesonExecutable (finalAttrs: { mkdir -p "$HOME" '' + '' + export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libexpr-tests/primops.cc b/src/libexpr-tests/primops.cc index aa4ef5e21..74d676844 100644 --- a/src/libexpr-tests/primops.cc +++ b/src/libexpr-tests/primops.cc @@ -642,7 +642,7 @@ class ToStringPrimOpTest : public PrimOpTest, TEST_P(ToStringPrimOpTest, toString) { - const auto [input, output] = GetParam(); + const auto & [input, output] = GetParam(); auto v = eval(input); ASSERT_THAT(v, IsStringEq(output)); } @@ -798,7 +798,7 @@ class CompareVersionsPrimOpTest : public PrimOpTest, TEST_P(CompareVersionsPrimOpTest, compareVersions) { - auto [expression, expectation] = GetParam(); + const auto & [expression, expectation] = GetParam(); auto v = eval(expression); ASSERT_THAT(v, IsIntEq(expectation)); } @@ -834,7 +834,7 @@ class ParseDrvNamePrimOpTest TEST_P(ParseDrvNamePrimOpTest, parseDrvName) { - auto [input, expectedName, expectedVersion] = GetParam(); + const auto & [input, expectedName, expectedVersion] = GetParam(); const auto expr = fmt("builtins.parseDrvName \"%1%\"", input); auto v = eval(expr); ASSERT_THAT(v, IsAttrsOfSize(2)); diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 88474c36f..92b67f6ad 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -10,32 +10,32 @@ Bindings Bindings::emptyBindings; /* Allocate a new array of attributes for an attribute set with a specific capacity. The space is implicitly reserved after the Bindings structure. */ -Bindings * EvalState::allocBindings(size_t capacity) +Bindings * EvalMemory::allocBindings(size_t capacity) { if (capacity == 0) return &Bindings::emptyBindings; - if (capacity > std::numeric_limits::max()) + if (capacity > std::numeric_limits::max()) throw Error("attribute set of size %d is too big", capacity); - nrAttrsets++; - nrAttrsInAttrsets += capacity; + stats.nrAttrsets++; + stats.nrAttrsInAttrsets += capacity; return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings(); } Value & BindingsBuilder::alloc(Symbol name, PosIdx pos) { - auto value = state.get().allocValue(); + auto value = mem.get().allocValue(); bindings->push_back(Attr(name, value, pos)); return *value; } Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos) { - return alloc(state.get().symbols.create(name), pos); + return alloc(symbols.get().create(name), pos); } void Bindings::sort() { - std::sort(attrs, attrs + size_); + std::sort(attrs, attrs + numAttrs); } Value & Value::mkAttrs(BindingsBuilder & bindings) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index ed7231b1e..2df373520 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -17,6 +17,7 @@ #include "nix/expr/print.hh" #include "nix/fetchers/filtering-source-accessor.hh" #include "nix/util/memory-source-accessor.hh" +#include "nix/util/mounted-source-accessor.hh" #include "nix/expr/gc-small-vector.hh" #include "nix/util/url.hh" #include "nix/fetchers/fetch-to-store.hh" @@ -193,6 +194,15 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) static constexpr size_t BASE_ENV_SIZE = 128; +EvalMemory::EvalMemory() +#if NIX_USE_BOEHMGC + : valueAllocCache(std::allocate_shared(traceable_allocator(), nullptr)) + , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) +#endif +{ + assertGCInitialized(); +} + EvalState::EvalState( const LookupPath & lookupPathFromArguments, ref store, @@ -225,22 +235,25 @@ EvalState::EvalState( */ {CanonPath(store->storeDir), store->getFSAccessor(settings.pureEval)}, })) - , rootFS(({ - /* In pure eval mode, we provide a filesystem that only - contains the Nix store. + , rootFS([&] { + auto accessor = [&]() -> decltype(rootFS) { + /* In pure eval mode, we provide a filesystem that only + contains the Nix store. */ + if (settings.pureEval) + return storeFS; - If we have a chroot store and pure eval is not enabled, - use a union accessor to make the chroot store available - at its logical location while still having the - underlying directory available. This is necessary for - instance if we're evaluating a file from the physical - /nix/store while using a chroot store. */ - auto accessor = getFSSourceAccessor(); + /* If we have a chroot store and pure eval is not enabled, + use a union accessor to make the chroot store available + at its logical location while still having the underlying + directory available. This is necessary for instance if + we're evaluating a file from the physical /nix/store + while using a chroot store. */ + auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy)); + if (store->storeDir != realStoreDir) + return makeUnionSourceAccessor({getFSSourceAccessor(), storeFS}); - auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy)); - if (settings.pureEval || store->storeDir != realStoreDir) { - accessor = settings.pureEval ? storeFS : makeUnionSourceAccessor({accessor, storeFS}); - } + return getFSSourceAccessor(); + }(); /* Apply access control if needed. */ if (settings.restrictEval || settings.pureEval) @@ -251,8 +264,8 @@ EvalState::EvalState( throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); }); - accessor; - })) + return accessor; + }()) , corepkgsFS(make_ref()) , internalFS(make_ref()) , derivationInternal{corepkgsFS->addFile( @@ -270,12 +283,10 @@ EvalState::EvalState( , fileEvalCache(make_ref()) , regexCache(makeRegexCache()) #if NIX_USE_BOEHMGC - , valueAllocCache(std::allocate_shared(traceable_allocator(), nullptr)) - , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) - , baseEnvP(std::allocate_shared(traceable_allocator(), &allocEnv(BASE_ENV_SIZE))) + , baseEnvP(std::allocate_shared(traceable_allocator(), &mem.allocEnv(BASE_ENV_SIZE))) , baseEnv(**baseEnvP) #else - , baseEnv(allocEnv(BASE_ENV_SIZE)) + , baseEnv(mem.allocEnv(BASE_ENV_SIZE)) #endif , staticBaseEnv{std::make_shared(nullptr, nullptr)} { @@ -284,9 +295,8 @@ EvalState::EvalState( countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0"; - assertGCInitialized(); - static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); + static_assert(sizeof(Counter) == 64, "counters must be 64 bytes"); /* Construct the Nix expression search path. */ assert(lookupPath.elements.empty()); @@ -333,7 +343,7 @@ EvalState::EvalState( EvalState::~EvalState() {} -void EvalState::allowPath(const Path & path) +void EvalState::allowPathLegacy(const Path & path) { if (auto rootFS2 = rootFS.dynamic_pointer_cast()) rootFS2->allowPrefix(CanonPath(path)); @@ -880,11 +890,10 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) } } -ListBuilder::ListBuilder(EvalState & state, size_t size) +ListBuilder::ListBuilder(size_t size) : size(size) , elems(size <= 2 ? inlineElems : (Value **) allocBytes(size * sizeof(Value *))) { - state.nrListElems += size; } Value * EvalState::getBool(bool b) @@ -892,7 +901,7 @@ Value * EvalState::getBool(bool b) return b ? &Value::vTrue : &Value::vFalse; } -unsigned long nrThunks = 0; +static Counter nrThunks; static inline void mkThunk(Value & v, Env & env, Expr * expr) { @@ -983,10 +992,6 @@ void EvalState::mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v }); } -/* Create a thunk for the delayed computation of the given expression - in the given environment. But if the expression is a variable, - then look it up right away. This significantly reduces the number - of thunks allocated. */ Value * Expr::maybeThunk(EvalState & state, Env & env) { Value * v = state.allocValue(); @@ -1035,9 +1040,10 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) * from a thunk, ensuring that every file is parsed/evaluated only * once (via the thunk stored in `EvalState::fileEvalCache`). */ -struct ExprParseFile : Expr +struct ExprParseFile : Expr, gc { - SourcePath & path; + // FIXME: make this a reference (see below). + SourcePath path; bool mustBeTrivial; ExprParseFile(SourcePath & path, bool mustBeTrivial) @@ -1088,14 +1094,18 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) } Value * vExpr; - ExprParseFile expr{*resolvedPath, mustBeTrivial}; + // FIXME: put ExprParseFile on the stack instead of the heap once + // https://github.com/NixOS/nix/pull/13930 is merged. That will ensure + // the post-condition that `expr` is unreachable after + // `forceValue()` returns. + auto expr = new ExprParseFile{*resolvedPath, mustBeTrivial}; fileEvalCache->try_emplace_and_cvisit( *resolvedPath, nullptr, [&](auto & i) { vExpr = allocValue(); - vExpr->mkThunk(&baseEnv, &expr); + vExpr->mkThunk(&baseEnv, expr); i.second = vExpr; }, [&](auto & i) { vExpr = i.second; }); @@ -1177,7 +1187,7 @@ void ExprPath::eval(EvalState & state, Env & env, Value & v) Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up) { - Env & inheritEnv = state.allocEnv(inheritFromExprs->size()); + Env & inheritEnv = state.mem.allocEnv(inheritFromExprs->size()); inheritEnv.up = &up; Displacement displ = 0; @@ -1196,7 +1206,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) if (recursive) { /* Create a new environment that contains the attributes in this `rec'. */ - Env & env2(state.allocEnv(attrs.size())); + Env & env2(state.mem.allocEnv(attrs.size())); env2.up = &env; dynamicEnv = &env2; Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr; @@ -1288,7 +1298,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) { /* Create a new environment that contains the attributes in this `let'. */ - Env & env2(state.allocEnv(attrs->attrs.size())); + Env & env2(state.mem.allocEnv(attrs->attrs.size())); env2.up = &env; Env * inheritEnv = attrs->inheritFromExprs ? attrs->buildInheritFromEnv(state, env2) : nullptr; @@ -1494,7 +1504,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, ExprLambda & lambda(*vCur.lambda().fun); auto size = (!lambda.arg ? 0 : 1) + (lambda.hasFormals() ? lambda.formals->formals.size() : 0); - Env & env2(allocEnv(size)); + Env & env2(mem.allocEnv(size)); env2.up = vCur.lambda().env; Displacement displ = 0; @@ -1783,7 +1793,7 @@ https://nix.dev/manual/nix/stable/language/syntax.html#functions.)", void ExprWith::eval(EvalState & state, Env & env, Value & v) { - Env & env2(state.allocEnv(1)); + Env & env2(state.mem.allocEnv(1)); env2.up = &env; env2.values[0] = attrs->maybeThunk(state, env); @@ -1865,51 +1875,113 @@ void ExprOpImpl::eval(EvalState & state, Env & env, Value & v) || state.evalBool(env, e2, pos, "in the right operand of the IMPL (->) operator")); } -void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v) +void ExprOpUpdate::eval(EvalState & state, Value & v, Value & v1, Value & v2) { - Value v1, v2; - state.evalAttrs(env, e1, v1, pos, "in the left operand of the update (//) operator"); - state.evalAttrs(env, e2, v2, pos, "in the right operand of the update (//) operator"); - state.nrOpUpdates++; - if (v1.attrs()->size() == 0) { + const Bindings & bindings1 = *v1.attrs(); + if (bindings1.empty()) { v = v2; return; } - if (v2.attrs()->size() == 0) { + + const Bindings & bindings2 = *v2.attrs(); + if (bindings2.empty()) { v = v1; return; } - auto attrs = state.buildBindings(v1.attrs()->size() + v2.attrs()->size()); + /* Simple heuristic for determining whether attrs2 should be "layered" on top of + attrs1 instead of copying to a new Bindings. */ + bool shouldLayer = [&]() -> bool { + if (bindings1.isLayerListFull()) + return false; + + if (bindings2.size() > state.settings.bindingsUpdateLayerRhsSizeThreshold) + return false; + + return true; + }(); + + if (shouldLayer) { + auto attrs = state.buildBindings(bindings2.size()); + attrs.layerOnTopOf(bindings1); + + std::ranges::copy(bindings2, std::back_inserter(attrs)); + v.mkAttrs(attrs.alreadySorted()); + + state.nrOpUpdateValuesCopied += bindings2.size(); + return; + } + + auto attrs = state.buildBindings(bindings1.size() + bindings2.size()); /* Merge the sets, preferring values from the second set. Make sure to keep the resulting vector in sorted order. */ - auto i = v1.attrs()->begin(); - auto j = v2.attrs()->begin(); + auto i = bindings1.begin(); + auto j = bindings2.begin(); - while (i != v1.attrs()->end() && j != v2.attrs()->end()) { + while (i != bindings1.end() && j != bindings2.end()) { if (i->name == j->name) { attrs.insert(*j); ++i; ++j; - } else if (i->name < j->name) - attrs.insert(*i++); - else - attrs.insert(*j++); + } else if (i->name < j->name) { + attrs.insert(*i); + ++i; + } else { + attrs.insert(*j); + ++j; + } } - while (i != v1.attrs()->end()) - attrs.insert(*i++); - while (j != v2.attrs()->end()) - attrs.insert(*j++); + while (i != bindings1.end()) { + attrs.insert(*i); + ++i; + } + + while (j != bindings2.end()) { + attrs.insert(*j); + ++j; + } v.mkAttrs(attrs.alreadySorted()); state.nrOpUpdateValuesCopied += v.attrs()->size(); } +void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v) +{ + UpdateQueue q; + evalForUpdate(state, env, q); + + v.mkAttrs(&Bindings::emptyBindings); + for (auto & rhs : std::views::reverse(q)) { + /* Remember that queue is sorted rightmost attrset first. */ + eval(state, /*v=*/v, /*v1=*/v, /*v2=*/rhs); + } +} + +void Expr::evalForUpdate(EvalState & state, Env & env, UpdateQueue & q, std::string_view errorCtx) +{ + Value v; + state.evalAttrs(env, this, v, getPos(), errorCtx); + q.push_back(v); +} + +void ExprOpUpdate::evalForUpdate(EvalState & state, Env & env, UpdateQueue & q) +{ + /* Output rightmost attrset first to the merge queue as the one + with the most priority. */ + e2->evalForUpdate(state, env, q, "in the right operand of the update (//) operator"); + e1->evalForUpdate(state, env, q, "in the left operand of the update (//) operator"); +} + +void ExprOpUpdate::evalForUpdate(EvalState & state, Env & env, UpdateQueue & q, std::string_view errorCtx) +{ + evalForUpdate(state, env, q); +} + void ExprOpConcatLists::eval(EvalState & state, Env & env, Value & v) { Value v1; @@ -2828,11 +2900,11 @@ bool EvalState::fullGC() #endif } +bool Counter::enabled = getEnv("NIX_SHOW_STATS").value_or("0") != "0"; + void EvalState::maybePrintStats() { - bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0"; - - if (showStats) { + if (Counter::enabled) { // Make the final heap size more deterministic. #if NIX_USE_BOEHMGC if (!fullGC()) { @@ -2848,10 +2920,12 @@ void EvalState::printStatistics() std::chrono::microseconds cpuTimeDuration = getCpuUserTime(); float cpuTime = std::chrono::duration_cast>(cpuTimeDuration).count(); - uint64_t bEnvs = nrEnvs * sizeof(Env) + nrValuesInEnvs * sizeof(Value *); - uint64_t bLists = nrListElems * sizeof(Value *); - uint64_t bValues = nrValues * sizeof(Value); - uint64_t bAttrsets = nrAttrsets * sizeof(Bindings) + nrAttrsInAttrsets * sizeof(Attr); + auto & memstats = mem.getStats(); + + uint64_t bEnvs = memstats.nrEnvs * sizeof(Env) + memstats.nrValuesInEnvs * sizeof(Value *); + uint64_t bLists = memstats.nrListElems * sizeof(Value *); + uint64_t bValues = memstats.nrValues * sizeof(Value); + uint64_t bAttrsets = memstats.nrAttrsets * sizeof(Bindings) + memstats.nrAttrsInAttrsets * sizeof(Attr); #if NIX_USE_BOEHMGC GC_word heapSize, totalBytes; @@ -2877,18 +2951,18 @@ void EvalState::printStatistics() #endif }; topObj["envs"] = { - {"number", nrEnvs}, - {"elements", nrValuesInEnvs}, + {"number", memstats.nrEnvs.load()}, + {"elements", memstats.nrValuesInEnvs.load()}, {"bytes", bEnvs}, }; - topObj["nrExprs"] = Expr::nrExprs; + topObj["nrExprs"] = Expr::nrExprs.load(); topObj["list"] = { - {"elements", nrListElems}, + {"elements", memstats.nrListElems.load()}, {"bytes", bLists}, - {"concats", nrListConcats}, + {"concats", nrListConcats.load()}, }; topObj["values"] = { - {"number", nrValues}, + {"number", memstats.nrValues.load()}, {"bytes", bValues}, }; topObj["symbols"] = { @@ -2896,9 +2970,9 @@ void EvalState::printStatistics() {"bytes", symbols.totalSize()}, }; topObj["sets"] = { - {"number", nrAttrsets}, + {"number", memstats.nrAttrsets.load()}, {"bytes", bAttrsets}, - {"elements", nrAttrsInAttrsets}, + {"elements", memstats.nrAttrsInAttrsets.load()}, }; topObj["sizes"] = { {"Env", sizeof(Env)}, @@ -2906,13 +2980,13 @@ void EvalState::printStatistics() {"Bindings", sizeof(Bindings)}, {"Attr", sizeof(Attr)}, }; - topObj["nrOpUpdates"] = nrOpUpdates; - topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied; - topObj["nrThunks"] = nrThunks; - topObj["nrAvoided"] = nrAvoided; - topObj["nrLookups"] = nrLookups; - topObj["nrPrimOpCalls"] = nrPrimOpCalls; - topObj["nrFunctionCalls"] = nrFunctionCalls; + topObj["nrOpUpdates"] = nrOpUpdates.load(); + topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied.load(); + topObj["nrThunks"] = nrThunks.load(); + topObj["nrAvoided"] = nrAvoided.load(); + topObj["nrLookups"] = nrLookups.load(); + topObj["nrPrimOpCalls"] = nrPrimOpCalls.load(); + topObj["nrFunctionCalls"] = nrFunctionCalls.load(); #if NIX_USE_BOEHMGC topObj["gc"] = { {"heapSize", heapSize}, @@ -3113,7 +3187,7 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat /* Allow access to paths in the search path. */ if (initAccessControl) { - allowPath(path.path.abs()); + allowPathLegacy(path.path.abs()); if (store->isInStore(path.path.abs())) { try { allowClosure(store->toStorePath(path.path.abs()).first); @@ -3143,7 +3217,8 @@ Expr * EvalState::parse( docComments = &it->second; } - auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS); + auto result = parseExprFromBuf( + text, length, origin, basePath, mem.exprs.alloc, symbols, settings, positions, *docComments, rootFS); result->bindVars(*this, staticEnv); diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 8b8edddf4..46eecd9bd 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -4,13 +4,16 @@ #include "nix/expr/nixexpr.hh" #include "nix/expr/symbol-table.hh" +#include + #include #include -#include +#include +#include namespace nix { -class EvalState; +class EvalMemory; struct Value; /** @@ -48,11 +51,18 @@ static_assert( * by its size and its capacity, the capacity being the number of Attr * elements allocated after this structure, while the size corresponds to * the number of elements already inserted in this structure. + * + * Bindings can be efficiently `//`-composed into an intrusive linked list of "layers" + * that saves on copies and allocations. Each lookup (@see Bindings::get) traverses + * this linked list until a matching attribute is found (thus overlays earlier in + * the list take precedence). For iteration over the whole Bindings, an on-the-fly + * k-way merge is performed by Bindings::iterator class. */ class Bindings { public: - typedef uint32_t size_t; + using size_type = uint32_t; + PosIdx pos; /** @@ -62,7 +72,32 @@ public: static Bindings emptyBindings; private: - size_t size_ = 0; + /** + * Number of attributes in the attrs FAM (Flexible Array Member). + */ + size_type numAttrs = 0; + + /** + * Number of attributes with unique names in the layer chain. + * + * This is the *real* user-facing size of bindings, whereas @ref numAttrs is + * an implementation detail of the data structure. + */ + size_type numAttrsInChain = 0; + + /** + * Length of the layers list. + */ + uint32_t numLayers = 1; + + /** + * Bindings that this attrset is "layered" on top of. + */ + const Bindings * baseLayer = nullptr; + + /** + * Flexible array member of attributes. + */ Attr attrs[0]; Bindings() = default; @@ -71,15 +106,22 @@ private: Bindings & operator=(const Bindings &) = delete; Bindings & operator=(Bindings &&) = delete; + friend class BindingsBuilder; + + /** + * Maximum length of the Bindings layer chains. + */ + static constexpr unsigned maxLayers = 8; + public: - size_t size() const + size_type size() const { - return size_; + return numAttrsInChain; } bool empty() const { - return !size_; + return size() == 0; } class iterator @@ -94,77 +136,276 @@ public: friend class Bindings; private: - pointer ptr = nullptr; - - explicit iterator(pointer ptr) - : ptr(ptr) + struct BindingsCursor { + /** + * Attr that the cursor currently points to. + */ + pointer current; + + /** + * One past the end pointer to the contiguous buffer of Attrs. + */ + pointer end; + + /** + * Priority of the value. Lesser values have more priority (i.e. they override + * attributes that appear later in the linked list of Bindings). + */ + uint32_t priority; + + pointer operator->() const noexcept + { + return current; + } + + reference get() const noexcept + { + return *current; + } + + bool empty() const noexcept + { + return current == end; + } + + void increment() noexcept + { + ++current; + } + + void consume(Symbol name) noexcept + { + while (!empty() && current->name <= name) + ++current; + } + + GENERATE_CMP(BindingsCursor, me->current->name, me->priority) + }; + + using QueueStorageType = boost::container::static_vector; + + /** + * Comparator implementing the override priority / name ordering + * for BindingsCursor. + */ + static constexpr auto comp = std::greater(); + + /** + * A priority queue used to implement an on-the-fly k-way merge. + */ + QueueStorageType cursorHeap; + + /** + * The attribute the iterator currently points to. + */ + pointer current = nullptr; + + /** + * Whether iterating over a single attribute and not a merge chain. + */ + bool doMerge = true; + + void push(BindingsCursor cursor) noexcept + { + cursorHeap.push_back(cursor); + std::ranges::make_heap(cursorHeap, comp); + } + + [[nodiscard]] BindingsCursor pop() noexcept + { + std::ranges::pop_heap(cursorHeap, comp); + auto cursor = cursorHeap.back(); + cursorHeap.pop_back(); + return cursor; + } + + iterator & finished() noexcept + { + current = nullptr; + return *this; + } + + void next(BindingsCursor cursor) noexcept + { + current = &cursor.get(); + cursor.increment(); + + if (!cursor.empty()) + push(cursor); + } + + std::optional consumeAllUntilCurrentName() noexcept + { + auto cursor = pop(); + Symbol lastHandledName = current->name; + + while (cursor->name <= lastHandledName) { + cursor.consume(lastHandledName); + if (!cursor.empty()) + push(cursor); + + if (cursorHeap.empty()) + return std::nullopt; + + cursor = pop(); + } + + return cursor; + } + + explicit iterator(const Bindings & attrs) noexcept + : doMerge(attrs.baseLayer) + { + auto pushBindings = [this, priority = unsigned{0}](const Bindings & layer) mutable { + auto first = layer.attrs; + push( + BindingsCursor{ + .current = first, + .end = first + layer.numAttrs, + .priority = priority++, + }); + }; + + if (!doMerge) { + if (attrs.empty()) + return; + + current = attrs.attrs; + pushBindings(attrs); + + return; + } + + const Bindings * layer = &attrs; + while (layer) { + if (layer->numAttrs != 0) + pushBindings(*layer); + layer = layer->baseLayer; + } + + if (cursorHeap.empty()) + return; + + next(pop()); } public: iterator() = default; - reference operator*() const + reference operator*() const noexcept { - return *ptr; + return *current; } - const value_type * operator->() const + pointer operator->() const noexcept { - return ptr; + return current; } - iterator & operator++() + iterator & operator++() noexcept { - ++ptr; + if (!doMerge) { + ++current; + if (current == cursorHeap.front().end) + return finished(); + return *this; + } + + if (cursorHeap.empty()) + return finished(); + + auto cursor = consumeAllUntilCurrentName(); + if (!cursor) + return finished(); + + next(*cursor); return *this; } - iterator operator++(int) + iterator operator++(int) noexcept { - pointer tmp = ptr; + iterator tmp = *this; ++*this; - return iterator(tmp); + return tmp; } - bool operator==(const iterator & rhs) const = default; + bool operator==(const iterator & rhs) const noexcept + { + return current == rhs.current; + } }; using const_iterator = iterator; void push_back(const Attr & attr) { - attrs[size_++] = attr; + attrs[numAttrs++] = attr; + numAttrsInChain = numAttrs; } - const Attr * get(Symbol name) const + /** + * Get attribute by name or nullptr if no such attribute exists. + */ + const Attr * get(Symbol name) const noexcept { - Attr key(name, 0); - auto first = attrs; - auto last = attrs + size_; - const Attr * i = std::lower_bound(first, last, key); - if (i != last && i->name == name) - return i; + auto getInChunk = [key = Attr{name, nullptr}](const Bindings & chunk) -> const Attr * { + auto first = chunk.attrs; + auto last = first + chunk.numAttrs; + const Attr * i = std::lower_bound(first, last, key); + if (i != last && i->name == key.name) + return i; + return nullptr; + }; + + const Bindings * currentChunk = this; + while (currentChunk) { + const Attr * maybeAttr = getInChunk(*currentChunk); + if (maybeAttr) + return maybeAttr; + currentChunk = currentChunk->baseLayer; + } + return nullptr; } + /** + * Check if the layer chain is full. + */ + bool isLayerListFull() const noexcept + { + return numLayers == Bindings::maxLayers; + } + + /** + * Test if the length of the linked list of layers is greater than 1. + */ + bool isLayered() const noexcept + { + return numLayers > 1; + } + const_iterator begin() const { - return const_iterator(attrs); + return const_iterator(*this); } const_iterator end() const { - return const_iterator(attrs + size_); + return const_iterator(); } - Attr & operator[](size_t pos) + Attr & operator[](size_type pos) { + if (isLayered()) [[unlikely]] + unreachable(); return attrs[pos]; } - const Attr & operator[](size_t pos) const + const Attr & operator[](size_type pos) const { + if (isLayered()) [[unlikely]] + unreachable(); return attrs[pos]; } @@ -176,17 +417,16 @@ public: std::vector lexicographicOrder(const SymbolTable & symbols) const { std::vector res; - res.reserve(size_); - for (size_t n = 0; n < size_; n++) - res.emplace_back(&attrs[n]); - std::sort(res.begin(), res.end(), [&](const Attr * a, const Attr * b) { + res.reserve(size()); + std::ranges::transform(*this, std::back_inserter(res), [](const Attr & a) { return &a; }); + std::ranges::sort(res, [&](const Attr * a, const Attr * b) { std::string_view sa = symbols[a->name], sb = symbols[b->name]; return sa < sb; }); return res; } - friend class EvalState; + friend class EvalMemory; }; static_assert(std::forward_iterator); @@ -202,23 +442,38 @@ class BindingsBuilder final public: // needed by std::back_inserter using value_type = Attr; - using size_type = Bindings::size_t; + using size_type = Bindings::size_type; private: Bindings * bindings; - Bindings::size_t capacity_; + Bindings::size_type capacity_; - friend class EvalState; + friend class EvalMemory; - BindingsBuilder(EvalState & state, Bindings * bindings, size_type capacity) + BindingsBuilder(EvalMemory & mem, SymbolTable & symbols, Bindings * bindings, size_type capacity) : bindings(bindings) , capacity_(capacity) - , state(state) + , mem(mem) + , symbols(symbols) { } + bool hasBaseLayer() const noexcept + { + return bindings->baseLayer; + } + + void finishSizeIfNecessary() + { + if (hasBaseLayer()) + /* NOTE: Do not use std::ranges::distance, since Bindings is a sized + range, but we are calculating this size here. */ + bindings->numAttrsInChain = std::distance(bindings->begin(), bindings->end()); + } + public: - std::reference_wrapper state; + std::reference_wrapper mem; + std::reference_wrapper symbols; void insert(Symbol name, Value * value, PosIdx pos = noPos) { @@ -232,10 +487,26 @@ public: void push_back(const Attr & attr) { - assert(bindings->size() < capacity_); + assert(bindings->numAttrs < capacity_); bindings->push_back(attr); } + /** + * "Layer" the newly constructured Bindings on top of another attribute set. + * + * This effectively performs an attribute set merge, while giving preference + * to attributes from the newly constructed Bindings in case of duplicate attribute + * names. + * + * This operation amortizes the need to copy over all attributes and allows + * for efficient implementation of attribute set merges (ExprOpUpdate::eval). + */ + void layerOnTopOf(const Bindings & base) noexcept + { + bindings->baseLayer = &base; + bindings->numLayers = base.numLayers + 1; + } + Value & alloc(Symbol name, PosIdx pos = noPos); Value & alloc(std::string_view name, PosIdx pos = noPos); @@ -243,11 +514,13 @@ public: Bindings * finish() { bindings->sort(); + finishSizeIfNecessary(); return bindings; } Bindings * alreadySorted() { + finishSizeIfNecessary(); return bindings; } diff --git a/src/libexpr/include/nix/expr/counter.hh b/src/libexpr/include/nix/expr/counter.hh new file mode 100644 index 000000000..efbf23de3 --- /dev/null +++ b/src/libexpr/include/nix/expr/counter.hh @@ -0,0 +1,70 @@ +#pragma once + +#include +#include + +namespace nix { + +/** + * An atomic counter aligned on a cache line to prevent false sharing. + * The counter is only enabled when the `NIX_SHOW_STATS` environment + * variable is set. This is to prevent contention on these counters + * when multi-threaded evaluation is enabled. + */ +struct alignas(64) Counter +{ + using value_type = uint64_t; + + std::atomic inner{0}; + + static bool enabled; + + Counter() {} + + operator value_type() const noexcept + { + return inner; + } + + void operator=(value_type n) noexcept + { + inner = n; + } + + value_type load() const noexcept + { + return inner; + } + + value_type operator++() noexcept + { + return enabled ? ++inner : 0; + } + + value_type operator++(int) noexcept + { + return enabled ? inner++ : 0; + } + + value_type operator--() noexcept + { + return enabled ? --inner : 0; + } + + value_type operator--(int) noexcept + { + return enabled ? inner-- : 0; + } + + value_type operator+=(value_type n) noexcept + { + return enabled ? inner += n : 0; + } + + value_type operator-=(value_type n) noexcept + { + return enabled ? inner -= n : 0; + } +}; + +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-inline.hh b/src/libexpr/include/nix/expr/eval-inline.hh index 749e51537..1320da914 100644 --- a/src/libexpr/include/nix/expr/eval-inline.hh +++ b/src/libexpr/include/nix/expr/eval-inline.hh @@ -26,7 +26,7 @@ inline void * allocBytes(size_t n) } [[gnu::always_inline]] -Value * EvalState::allocValue() +Value * EvalMemory::allocValue() { #if NIX_USE_BOEHMGC /* We use the boehm batch allocator to speed up allocations of Values (of which there are many). @@ -48,15 +48,15 @@ Value * EvalState::allocValue() void * p = allocBytes(sizeof(Value)); #endif - nrValues++; + stats.nrValues++; return (Value *) p; } [[gnu::always_inline]] -Env & EvalState::allocEnv(size_t size) +Env & EvalMemory::allocEnv(size_t size) { - nrEnvs++; - nrValuesInEnvs += size; + stats.nrEnvs++; + stats.nrValuesInEnvs += size; Env * env; diff --git a/src/libexpr/include/nix/expr/eval-settings.hh b/src/libexpr/include/nix/expr/eval-settings.hh index 4c9db0c73..250c2cddf 100644 --- a/src/libexpr/include/nix/expr/eval-settings.hh +++ b/src/libexpr/include/nix/expr/eval-settings.hh @@ -342,6 +342,25 @@ struct EvalSettings : Config This is useful for improving code readability and making path literals more explicit. )"}; + + Setting bindingsUpdateLayerRhsSizeThreshold{ + this, + sizeof(void *) == 4 ? 8192 : 16, + "eval-attrset-update-layer-rhs-threshold", + R"( + Tunes the maximum size of an attribute set that, when used + as a right operand in an [attribute set update expression](@docroot@/language/operators.md#update), + uses a more space-efficient linked-list representation of attribute sets. + + Setting this to larger values generally leads to less memory allocations, + but may lead to worse evaluation performance. + + A value of `0` disables this optimization completely. + + This is an advanced performance tuning option and typically should not be changed. + The default value is chosen to balance performance and memory usage. On 32 bit systems + where memory is scarce, the default is a large value to reduce the amount of allocations. + )"}; }; /** diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 8f7a0ec32..2601d8de8 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -16,6 +16,7 @@ #include "nix/expr/search-path.hh" #include "nix/expr/repl-exit-status.hh" #include "nix/util/ref.hh" +#include "nix/expr/counter.hh" // For `NIX_USE_BOEHMGC`, and if that's set, `GC_THREADS` #include "nix/expr/config.hh" @@ -48,6 +49,7 @@ class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; struct MemorySourceAccessor; +struct MountedSourceAccessor; namespace eval_cache { class EvalCache; @@ -300,6 +302,68 @@ struct StaticEvalSymbols } }; +class EvalMemory +{ +#if NIX_USE_BOEHMGC + /** + * Allocation cache for GC'd Value objects. + */ + std::shared_ptr valueAllocCache; + + /** + * Allocation cache for size-1 Env objects. + */ + std::shared_ptr env1AllocCache; +#endif + +public: + struct Statistics + { + Counter nrEnvs; + Counter nrValuesInEnvs; + Counter nrValues; + Counter nrAttrsets; + Counter nrAttrsInAttrsets; + Counter nrListElems; + }; + + EvalMemory(); + + EvalMemory(const EvalMemory &) = delete; + EvalMemory(EvalMemory &&) = delete; + EvalMemory & operator=(const EvalMemory &) = delete; + EvalMemory & operator=(EvalMemory &&) = delete; + + inline Value * allocValue(); + inline Env & allocEnv(size_t size); + + Bindings * allocBindings(size_t capacity); + + BindingsBuilder buildBindings(SymbolTable & symbols, size_t capacity) + { + return BindingsBuilder(*this, symbols, allocBindings(capacity), capacity); + } + + ListBuilder buildList(size_t size) + { + stats.nrListElems += size; + return ListBuilder(size); + } + + const Statistics & getStats() const & + { + return stats; + } + + /** + * Storage for the AST nodes + */ + Exprs exprs; + +private: + Statistics stats; +}; + class EvalState : public std::enable_shared_from_this { public: @@ -310,6 +374,8 @@ public: SymbolTable symbols; PosTable positions; + EvalMemory mem; + /** * If set, force copying files to the Nix store even if they * already exist there. @@ -319,7 +385,7 @@ public: /** * The accessor corresponding to `store`. */ - const ref storeFS; + const ref storeFS; /** * The accessor for the root filesystem. @@ -439,18 +505,6 @@ private: */ std::shared_ptr regexCache; -#if NIX_USE_BOEHMGC - /** - * Allocation cache for GC'd Value objects. - */ - std::shared_ptr valueAllocCache; - - /** - * Allocation cache for size-1 Env objects. - */ - std::shared_ptr env1AllocCache; -#endif - public: EvalState( @@ -461,6 +515,15 @@ public: std::shared_ptr buildStore = nullptr); ~EvalState(); + /** + * A wrapper around EvalMemory::allocValue() to avoid code churn when it + * was introduced. + */ + inline Value * allocValue() + { + return mem.allocValue(); + } + LookupPath getLookupPath() { return lookupPath; @@ -488,8 +551,11 @@ public: /** * Allow access to a path. + * + * Only for restrict eval: pure eval just whitelist store paths, + * never arbitrary paths. */ - void allowPath(const Path & path); + void allowPathLegacy(const Path & path); /** * Allow access to a store path. Note that this gets remapped to @@ -829,22 +895,14 @@ public: */ void autoCallFunction(const Bindings & args, Value & fun, Value & res); - /** - * Allocation primitives. - */ - inline Value * allocValue(); - inline Env & allocEnv(size_t size); - - Bindings * allocBindings(size_t capacity); - BindingsBuilder buildBindings(size_t capacity) { - return BindingsBuilder(*this, allocBindings(capacity), capacity); + return mem.buildBindings(symbols, capacity); } ListBuilder buildList(size_t size) { - return ListBuilder(*this, size); + return mem.buildList(size); } /** @@ -961,19 +1019,13 @@ private: */ std::string mkSingleDerivedPathStringRaw(const SingleDerivedPath & p); - unsigned long nrEnvs = 0; - unsigned long nrValuesInEnvs = 0; - unsigned long nrValues = 0; - unsigned long nrListElems = 0; - unsigned long nrLookups = 0; - unsigned long nrAttrsets = 0; - unsigned long nrAttrsInAttrsets = 0; - unsigned long nrAvoided = 0; - unsigned long nrOpUpdates = 0; - unsigned long nrOpUpdateValuesCopied = 0; - unsigned long nrListConcats = 0; - unsigned long nrPrimOpCalls = 0; - unsigned long nrFunctionCalls = 0; + Counter nrLookups; + Counter nrAvoided; + Counter nrOpUpdates; + Counter nrOpUpdateValuesCopied; + Counter nrListConcats; + Counter nrPrimOpCalls; + Counter nrFunctionCalls; bool countCalls; diff --git a/src/libexpr/include/nix/expr/gc-small-vector.hh b/src/libexpr/include/nix/expr/gc-small-vector.hh index fdd80b2c7..95c028e5a 100644 --- a/src/libexpr/include/nix/expr/gc-small-vector.hh +++ b/src/libexpr/include/nix/expr/gc-small-vector.hh @@ -26,4 +26,20 @@ using SmallValueVector = SmallVector; template using SmallTemporaryValueVector = SmallVector; +/** + * For functions where we do not expect deep recursion, we can use a sizable + * part of the stack a free allocation space. + * + * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. + */ +constexpr size_t nonRecursiveStackReservation = 128; + +/** + * Functions that maybe applied to self-similar inputs, such as concatMap on a + * tree, should reserve a smaller part of the stack for allocation. + * + * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. + */ +constexpr size_t conservativeStackReservation = 16; + } // namespace nix diff --git a/src/libexpr/include/nix/expr/meson.build b/src/libexpr/include/nix/expr/meson.build index 04f8eaf71..44ff171c2 100644 --- a/src/libexpr/include/nix/expr/meson.build +++ b/src/libexpr/include/nix/expr/meson.build @@ -10,6 +10,7 @@ config_pub_h = configure_file( headers = [ config_pub_h ] + files( 'attr-path.hh', 'attr-set.hh', + 'counter.hh', 'eval-cache.hh', 'eval-error.hh', 'eval-gc.hh', diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 414eb5116..747a8e4b2 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -3,11 +3,14 @@ #include #include +#include +#include "nix/expr/gc-small-vector.hh" #include "nix/expr/value.hh" #include "nix/expr/symbol-table.hh" #include "nix/expr/eval-error.hh" #include "nix/util/pos-idx.hh" +#include "nix/expr/counter.hh" namespace nix { @@ -80,6 +83,15 @@ typedef std::vector AttrPath; std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath); +using UpdateQueue = SmallTemporaryValueVector; + +class Exprs +{ + std::pmr::monotonic_buffer_resource buffer; +public: + std::pmr::polymorphic_allocator alloc{&buffer}; +}; + /* Abstract syntax of Nix expressions. */ struct Expr @@ -89,7 +101,7 @@ struct Expr Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body; }; - static unsigned long nrExprs; + static Counter nrExprs; Expr() { @@ -99,8 +111,25 @@ struct Expr virtual ~Expr() {}; virtual void show(const SymbolTable & symbols, std::ostream & str) const; virtual void bindVars(EvalState & es, const std::shared_ptr & env); + + /** Normal evaluation, implemented directly by all subclasses. */ virtual void eval(EvalState & state, Env & env, Value & v); + + /** + * Create a thunk for the delayed computation of the given expression + * in the given environment. But if the expression is a variable, + * then look it up right away. This significantly reduces the number + * of thunks allocated. + */ virtual Value * maybeThunk(EvalState & state, Env & env); + + /** + * Only called when performing an attrset update: `//` or similar. + * Instead of writing to a Value &, this function writes to an UpdateQueue. + * This allows the expression to perform multiple updates in a delayed manner, gathering up all the updates before + * applying them. + */ + virtual void evalForUpdate(EvalState & state, Env & env, UpdateQueue & q, std::string_view errorCtx); virtual void setName(Symbol name); virtual void setDocComment(DocComment docComment) {}; @@ -152,13 +181,28 @@ struct ExprFloat : Expr struct ExprString : Expr { - std::string s; Value v; - ExprString(std::string && s) - : s(std::move(s)) + /** + * This is only for strings already allocated in our polymorphic allocator, + * or that live at least that long (e.g. c++ string literals) + */ + ExprString(const char * s) { - v.mkStringNoCopy(this->s.data()); + v.mkStringNoCopy(s); + }; + + ExprString(std::pmr::polymorphic_allocator & alloc, std::string_view sv) + { + auto len = sv.length(); + if (len == 0) { + v.mkStringNoCopy(""); + return; + } + char * s = alloc.allocate(len + 1); + sv.copy(s, len); + s[len] = '\0'; + v.mkStringNoCopy(s); }; Value * maybeThunk(EvalState & state, Env & env) override; @@ -565,36 +609,39 @@ struct ExprOpNot : Expr COMMON_METHODS }; -#define MakeBinOp(name, s) \ - struct name : Expr \ - { \ - PosIdx pos; \ - Expr *e1, *e2; \ - name(Expr * e1, Expr * e2) \ - : e1(e1) \ - , e2(e2) {}; \ - name(const PosIdx & pos, Expr * e1, Expr * e2) \ - : pos(pos) \ - , e1(e1) \ - , e2(e2) {}; \ - void show(const SymbolTable & symbols, std::ostream & str) const override \ - { \ - str << "("; \ - e1->show(symbols, str); \ - str << " " s " "; \ - e2->show(symbols, str); \ - str << ")"; \ - } \ - void bindVars(EvalState & es, const std::shared_ptr & env) override \ - { \ - e1->bindVars(es, env); \ - e2->bindVars(es, env); \ - } \ - void eval(EvalState & state, Env & env, Value & v) override; \ - PosIdx getPos() const override \ - { \ - return pos; \ - } \ +#define MakeBinOpMembers(name, s) \ + PosIdx pos; \ + Expr *e1, *e2; \ + name(Expr * e1, Expr * e2) \ + : e1(e1) \ + , e2(e2){}; \ + name(const PosIdx & pos, Expr * e1, Expr * e2) \ + : pos(pos) \ + , e1(e1) \ + , e2(e2){}; \ + void show(const SymbolTable & symbols, std::ostream & str) const override \ + { \ + str << "("; \ + e1->show(symbols, str); \ + str << " " s " "; \ + e2->show(symbols, str); \ + str << ")"; \ + } \ + void bindVars(EvalState & es, const std::shared_ptr & env) override \ + { \ + e1->bindVars(es, env); \ + e2->bindVars(es, env); \ + } \ + void eval(EvalState & state, Env & env, Value & v) override; \ + PosIdx getPos() const override \ + { \ + return pos; \ + } + +#define MakeBinOp(name, s) \ + struct name : Expr \ + { \ + MakeBinOpMembers(name, s) \ } MakeBinOp(ExprOpEq, "=="); @@ -602,9 +649,20 @@ MakeBinOp(ExprOpNEq, "!="); MakeBinOp(ExprOpAnd, "&&"); MakeBinOp(ExprOpOr, "||"); MakeBinOp(ExprOpImpl, "->"); -MakeBinOp(ExprOpUpdate, "//"); MakeBinOp(ExprOpConcatLists, "++"); +struct ExprOpUpdate : Expr +{ +private: + /** Special case for merging of two attrsets. */ + void eval(EvalState & state, Value & v, Value & v1, Value & v2); + void evalForUpdate(EvalState & state, Env & env, UpdateQueue & q); + +public: + MakeBinOpMembers(ExprOpUpdate, "//"); + virtual void evalForUpdate(EvalState & state, Env & env, UpdateQueue & q, std::string_view errorCtx) override; +}; + struct ExprConcatStrings : Expr { PosIdx pos; diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index e689678de..55dce3047 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -24,7 +24,6 @@ struct StringToken } }; -// This type must be trivially copyable; see YYLTYPE_IS_TRIVIAL in parser.y. struct ParserLocation { int beginOffset; @@ -44,9 +43,6 @@ struct ParserLocation beginOffset = stashedBeginOffset; endOffset = stashedEndOffset; } - - /** Latest doc comment position, or 0. */ - int doc_comment_first_column, doc_comment_last_column; }; struct LexerState @@ -82,6 +78,7 @@ struct LexerState struct ParserState { const LexerState & lexerState; + std::pmr::polymorphic_allocator & alloc; SymbolTable & symbols; PosTable & positions; Expr * result; @@ -327,7 +324,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vectoremplace_back(i->first, new ExprString(std::move(s2))); + es2->emplace_back(i->first, new ExprString(alloc, s2)); } }; for (; i != es.end(); ++i, --n) { diff --git a/src/libexpr/include/nix/expr/primops.hh b/src/libexpr/include/nix/expr/primops.hh index 885a53e9a..6407ba84e 100644 --- a/src/libexpr/include/nix/expr/primops.hh +++ b/src/libexpr/include/nix/expr/primops.hh @@ -8,22 +8,6 @@ namespace nix { -/** - * For functions where we do not expect deep recursion, we can use a sizable - * part of the stack a free allocation space. - * - * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. - */ -constexpr size_t nonRecursiveStackReservation = 128; - -/** - * Functions that maybe applied to self-similar inputs, such as concatMap on a - * tree, should reserve a smaller part of the stack for allocation. - * - * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. - */ -constexpr size_t conservativeStackReservation = 16; - struct RegisterPrimOp { typedef std::vector PrimOps; diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index e526fcde0..22d85dc99 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -155,7 +155,7 @@ class ListBuilder Value * inlineElems[2] = {nullptr, nullptr}; public: Value ** elems; - ListBuilder(EvalState & state, size_t size); + ListBuilder(size_t size); // NOTE: Can be noexcept because we are just copying integral values and // raw pointers. diff --git a/src/libexpr/lexer-helpers.cc b/src/libexpr/lexer-helpers.cc index 927e3cc73..59f6f6f70 100644 --- a/src/libexpr/lexer-helpers.cc +++ b/src/libexpr/lexer-helpers.cc @@ -1,11 +1,11 @@ #include "lexer-helpers.hh" -void nix::lexer::internal::initLoc(YYLTYPE * loc) +void nix::lexer::internal::initLoc(Parser::location_type * loc) { loc->beginOffset = loc->endOffset = 0; } -void nix::lexer::internal::adjustLoc(yyscan_t yyscanner, YYLTYPE * loc, const char * s, size_t len) +void nix::lexer::internal::adjustLoc(yyscan_t yyscanner, Parser::location_type * loc, const char * s, size_t len) { loc->stash(); diff --git a/src/libexpr/lexer-helpers.hh b/src/libexpr/lexer-helpers.hh index 49865f794..b60fb9e7d 100644 --- a/src/libexpr/lexer-helpers.hh +++ b/src/libexpr/lexer-helpers.hh @@ -2,16 +2,12 @@ #include -// including the generated headers twice leads to errors -#ifndef BISON_HEADER -# include "lexer-tab.hh" -# include "parser-tab.hh" -#endif +#include "parser-scanner-decls.hh" namespace nix::lexer::internal { -void initLoc(YYLTYPE * loc); +void initLoc(Parser::location_type * loc); -void adjustLoc(yyscan_t yyscanner, YYLTYPE * loc, const char * s, size_t len); +void adjustLoc(yyscan_t yyscanner, Parser::location_type * loc, const char * s, size_t len); } // namespace nix::lexer::internal diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 1005f9f7e..f420fc13f 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -82,6 +82,10 @@ static void requireExperimentalFeature(const ExperimentalFeature & feature, cons } +using enum nix::Parser::token::token_kind_type; +using YYSTYPE = nix::Parser::value_type; +using YYLTYPE = nix::Parser::location_type; + // yacc generates code that uses unannotated fallthrough. #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 55a36c1bd..d24e7fae3 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -53,7 +53,12 @@ deps_other += boost nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') deps_public += nlohmann_json -bdw_gc = dependency('bdw-gc', required : get_option('gc')) +bdw_gc_required = get_option('gc').disable_if( + 'address' in get_option('b_sanitize'), + error_message : 'Building with Boehm GC and ASAN is not supported', +) + +bdw_gc = dependency('bdw-gc', required : bdw_gc_required) if bdw_gc.found() deps_public += bdw_gc foreach funcspec : [ @@ -64,6 +69,10 @@ if bdw_gc.found() define_value = cxx.has_function(funcspec).to_int() configdata_priv.set(define_name, define_value) endforeach + if host_machine.system() == 'cygwin' + # undefined reference to `__wrap__Znwm' + configdata_pub.set('GC_NO_INLINE_STD_NEW', 1) + endif endif # Used in public header. Affects ABI! configdata_pub.set('NIX_USE_BOEHMGC', bdw_gc.found().to_int()) @@ -88,6 +97,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') parser_tab = custom_target( input : 'parser.y', diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index c0a25d1d4..a2980af6b 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -11,7 +11,7 @@ namespace nix { -unsigned long Expr::nrExprs = 0; +Counter Expr::nrExprs; ExprBlackHole eBlackHole; @@ -40,7 +40,7 @@ void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const void ExprString::show(const SymbolTable & symbols, std::ostream & str) const { - printLiteralString(str, s); + printLiteralString(str, v.string_view()); } void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const diff --git a/src/libexpr/parser-scanner-decls.hh b/src/libexpr/parser-scanner-decls.hh new file mode 100644 index 000000000..e4e061883 --- /dev/null +++ b/src/libexpr/parser-scanner-decls.hh @@ -0,0 +1,17 @@ +#pragma once + +#ifndef BISON_HEADER +# include "parser-tab.hh" +using YYSTYPE = nix::parser::BisonParser::value_type; +using YYLTYPE = nix::parser::BisonParser::location_type; +# include "lexer-tab.hh" // IWYU pragma: export +#endif + +namespace nix { + +class Parser : public parser::BisonParser +{ + using BisonParser::BisonParser; +}; + +} // namespace nix diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 89da001ef..7dabd6b56 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -1,5 +1,7 @@ +%skeleton "lalr1.cc" %define api.location.type { ::nix::ParserLocation } -%define api.pure +%define api.namespace { ::nix::parser } +%define api.parser.class { BisonParser } %locations %define parse.error verbose %defines @@ -26,19 +28,12 @@ #include "nix/expr/eval-settings.hh" #include "nix/expr/parser-state.hh" -// Bison seems to have difficulty growing the parser stack when using C++ with -// a custom location type. This undocumented macro tells Bison that our -// location type is "trivially copyable" in C++-ese, so it is safe to use the -// same memcpy macro it uses to grow the stack that it uses with its own -// default location type. Without this, we get "error: memory exhausted" when -// parsing some large Nix files. Our other options are to increase the initial -// stack size (200 by default) to be as large as we ever want to support (so -// that growing the stack is unnecessary), or redefine the stack-relocation -// macro ourselves (which is also undocumented). -#define YYLTYPE_IS_TRIVIAL 1 - -#define YY_DECL int yylex \ - (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state) +#define YY_DECL \ + int yylex( \ + nix::Parser::value_type * yylval_param, \ + nix::Parser::location_type * yylloc_param, \ + yyscan_t yyscanner, \ + nix::ParserState * state) // For efficiency, we only track offsets; not line,column coordinates # define YYLLOC_DEFAULT(Current, Rhs, N) \ @@ -64,6 +59,7 @@ Expr * parseExprFromBuf( size_t length, Pos::Origin origin, const SourcePath & basePath, + std::pmr::polymorphic_allocator & alloc, SymbolTable & symbols, const EvalSettings & settings, PosTable & positions, @@ -78,24 +74,30 @@ Expr * parseExprFromBuf( %{ -#include "parser-tab.hh" -#include "lexer-tab.hh" +/* The parser is very performance sensitive and loses out on a lot + of performance even with basic stdlib assertions. Since those don't + affect ABI we can disable those just for this file. */ +#if defined(_GLIBCXX_ASSERTIONS) && !defined(_GLIBCXX_DEBUG) +#undef _GLIBCXX_ASSERTIONS +#endif + +#include "parser-scanner-decls.hh" YY_DECL; using namespace nix; -#define CUR_POS state->at(yyloc) +#define CUR_POS state->at(yylhs.location) - -void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) +void parser::BisonParser::error(const location_type &loc_, const std::string &error) { + auto loc = loc_; if (std::string_view(error).starts_with("syntax error, unexpected end of file")) { - loc->beginOffset = loc->endOffset; + loc.beginOffset = loc.endOffset; } throw ParseError({ .msg = HintFmt(error), - .pos = state->positions[state->at(*loc)] + .pos = state->positions[state->at(loc)] }); } @@ -134,6 +136,7 @@ static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) { std::vector * attrNames; std::vector> * inheritAttrs; std::vector> * string_parts; + std::variant * to_be_string; std::vector>> * ind_string_parts; } @@ -148,7 +151,8 @@ static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) { %type attrs %type string_parts_interpolated %type ind_string_parts -%type path_start string_parts string_attr +%type path_start +%type string_parts string_attr %type attr %token ID %token STR IND_STR @@ -182,7 +186,7 @@ start: expr { state->result = $1; // This parser does not use yynerrs; suppress the warning. - (void) yynerrs; + (void) yynerrs_; }; expr: expr_function; @@ -303,7 +307,13 @@ expr_simple } | INT_LIT { $$ = new ExprInt($1); } | FLOAT_LIT { $$ = new ExprFloat($1); } - | '"' string_parts '"' { $$ = $2; } + | '"' string_parts '"' { + std::visit(overloaded{ + [&](std::string_view str) { $$ = new ExprString(state->alloc, str); }, + [&](Expr * expr) { $$ = expr; }}, + *$2); + delete $2; + } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { $$ = state->stripIndentation(CUR_POS, std::move(*$2)); delete $2; @@ -314,11 +324,11 @@ expr_simple $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { - std::string path($1.p + 1, $1.l - 2); + std::string_view path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, new ExprVar(state->s.findFile), {new ExprVar(state->s.nixPath), - new ExprString(std::move(path))}); + new ExprString(state->alloc, path)}); } | URI { static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals); @@ -327,7 +337,7 @@ expr_simple .msg = HintFmt("URL literals are disabled"), .pos = state->positions[CUR_POS] }); - $$ = new ExprString(std::string($1)); + $$ = new ExprString(state->alloc, $1); } | '(' expr ')' { $$ = $2; } /* Let expressions `let {..., body = ...}' are just desugared @@ -344,19 +354,19 @@ expr_simple ; string_parts - : STR { $$ = new ExprString(std::string($1)); } - | string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, $1); } - | { $$ = new ExprString(""); } + : STR { $$ = new std::variant($1); } + | string_parts_interpolated { $$ = new std::variant(new ExprConcatStrings(CUR_POS, true, $1)); } + | { $$ = new std::variant(std::string_view()); } ; string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); } + { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(state->at(@1), new ExprString(std::string($1))); + $$->emplace_back(state->at(@1), new ExprString(state->alloc, $1)); $$->emplace_back(state->at(@2), $3); } ; @@ -454,15 +464,16 @@ attrs : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } | attrs string_attr { $$ = $1; - ExprString * str = dynamic_cast($2); - if (str) { - $$->emplace_back(AttrName(state->symbols.create(str->s)), state->at(@2)); - delete str; - } else - throw ParseError({ - .msg = HintFmt("dynamic attributes not allowed in inherit"), - .pos = state->positions[state->at(@2)] - }); + std::visit(overloaded { + [&](std::string_view str) { $$->emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, + [&](Expr * expr) { + throw ParseError({ + .msg = HintFmt("dynamic attributes not allowed in inherit"), + .pos = state->positions[state->at(@2)] + }); + } + }, *$2); + delete $2; } | { $$ = new std::vector>; } ; @@ -471,22 +482,20 @@ attrpath : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); } | attrpath '.' string_attr { $$ = $1; - ExprString * str = dynamic_cast($3); - if (str) { - $$->push_back(AttrName(state->symbols.create(str->s))); - delete str; - } else - $$->push_back(AttrName($3)); + std::visit(overloaded { + [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, + [&](Expr * expr) { $$->push_back(AttrName(expr)); } + }, *$3); + delete $3; } | attr { $$ = new std::vector; $$->push_back(AttrName(state->symbols.create($1))); } | string_attr { $$ = new std::vector; - ExprString *str = dynamic_cast($1); - if (str) { - $$->push_back(AttrName(state->symbols.create(str->s))); - delete str; - } else - $$->push_back(AttrName($1)); + std::visit(overloaded { + [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, + [&](Expr * expr) { $$->push_back(AttrName(expr)); } + }, *$1); + delete $1; } ; @@ -497,7 +506,7 @@ attr string_attr : '"' string_parts '"' { $$ = $2; } - | DOLLAR_CURLY expr '}' { $$ = $2; } + | DOLLAR_CURLY expr '}' { $$ = new std::variant($2); } ; expr_list @@ -537,6 +546,7 @@ Expr * parseExprFromBuf( size_t length, Pos::Origin origin, const SourcePath & basePath, + std::pmr::polymorphic_allocator & alloc, SymbolTable & symbols, const EvalSettings & settings, PosTable & positions, @@ -551,6 +561,7 @@ Expr * parseExprFromBuf( }; ParserState state { .lexerState = lexerState, + .alloc = alloc, .symbols = symbols, .positions = positions, .basePath = basePath, @@ -563,7 +574,8 @@ Expr * parseExprFromBuf( Finally _destroy([&] { yylex_destroy(scanner); }); yy_scan_buffer(text, length, scanner); - yyparse(scanner, &state); + Parser parser(scanner, &state); + parser.parse(); return state.result; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a046a2c28..a8ac8d159 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -262,7 +262,7 @@ static void scopedImport(EvalState & state, const PosIdx pos, SourcePath & path, { state.forceAttrs(*vScope, pos, "while evaluating the first argument passed to builtins.scopedImport"); - Env * env = &state.allocEnv(vScope->attrs()->size()); + Env * env = &state.mem.allocEnv(vScope->attrs()->size()); env->up = &state.baseEnv; auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv, vScope->attrs()->size()); @@ -3161,7 +3161,7 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value ** args, // Step 1. Sort the name-value attrsets in place using the memory we allocate for the result auto listView = args[0]->listView(); size_t listSize = listView.size(); - auto & bindings = *state.allocBindings(listSize); + auto & bindings = *state.mem.allocBindings(listSize); using ElemPtr = decltype(&bindings[0].value); for (const auto & [n, v2] : enumerate(listView)) { diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index db415d917..3761b0df2 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -32,6 +32,7 @@ add_project_arguments( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_fetchers.cc', diff --git a/src/libfetchers-tests/git.cc b/src/libfetchers-tests/git.cc index af987e260..4f0e0d974 100644 --- a/src/libfetchers-tests/git.cc +++ b/src/libfetchers-tests/git.cc @@ -1,5 +1,6 @@ #include "nix/store/store-open.hh" #include "nix/store/globals.hh" +#include "nix/store/dummy-store.hh" #include "nix/fetchers/fetch-settings.hh" #include "nix/fetchers/fetchers.hh" #include "nix/fetchers/git-utils.hh" @@ -179,10 +180,11 @@ TEST_F(GitTest, submodulePeriodSupport) // 6) Commit the addition in super commitAll(super.get(), "Add submodule with branch='.'"); - // TODO: Use dummy:// store with MemorySourceAccessor. - Path storeTmpDir = createTempDir(); - auto storeTmpDirAutoDelete = AutoDelete(storeTmpDir, true); - ref store = openStore(storeTmpDir); + auto store = [] { + auto cfg = make_ref(StoreReference::Params{}); + cfg->readOnly = false; + return cfg->openStore(); + }(); auto settings = fetchers::Settings{}; auto input = fetchers::Input::fromAttrs( diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index a18f64d79..858d7f3af 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -37,6 +37,7 @@ libgit2 = dependency('libgit2') deps_private += libgit2 subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'access-tokens.cc', @@ -63,7 +64,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix index 780618725..8e82430d7 100644 --- a/src/libfetchers-tests/package.nix +++ b/src/libfetchers-tests/package.nix @@ -61,6 +61,7 @@ mkMesonExecutable (finalAttrs: { buildInputs = [ writableTmpDirAsHomeHook ]; } '' + export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libfetchers-tests/public-key.cc b/src/libfetchers-tests/public-key.cc index 97a232447..2991223f6 100644 --- a/src/libfetchers-tests/public-key.cc +++ b/src/libfetchers-tests/public-key.cc @@ -1,14 +1,14 @@ #include #include "nix/fetchers/fetchers.hh" #include "nix/util/json-utils.hh" -#include -#include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { using nlohmann::json; -class PublicKeyTest : public CharacterizationTest +class PublicKeyTest : public JsonCharacterizationTest, + public ::testing::WithParamInterface> { std::filesystem::path unitTestData = getUnitTestData() / "public-key"; @@ -19,30 +19,35 @@ public: } }; -#define TEST_JSON(FIXTURE, NAME, VAL) \ - TEST_F(FIXTURE, PublicKey_##NAME##_from_json) \ - { \ - readTest(#NAME ".json", [&](const auto & encoded_) { \ - fetchers::PublicKey expected{VAL}; \ - fetchers::PublicKey got = nlohmann::json::parse(encoded_); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, PublicKey_##NAME##_to_json) \ - { \ - writeTest( \ - #NAME ".json", \ - [&]() -> json { return nlohmann::json(fetchers::PublicKey{VAL}); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ - } +TEST_P(PublicKeyTest, from_json) +{ + const auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} -TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey{.type = "ssh-rsa", .key = "ABCDE"})) +TEST_P(PublicKeyTest, to_json) +{ + const auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} -TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey{.key = "ABCDE"}) - -#undef TEST_JSON +INSTANTIATE_TEST_SUITE_P( + PublicKeyJSON, + PublicKeyTest, + ::testing::Values( + std::pair{ + "simple", + fetchers::PublicKey{ + .type = "ssh-rsa", + .key = "ABCDE", + }, + }, + std::pair{ + "defaultType", + fetchers::PublicKey{ + .key = "ABCDE", + }, + })); TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json) { diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 54013bf55..045aafdcb 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -3,8 +3,8 @@ #include "nix/util/source-path.hh" #include "nix/fetchers/fetch-to-store.hh" #include "nix/util/json-utils.hh" -#include "nix/fetchers/store-path-accessor.hh" #include "nix/fetchers/fetch-settings.hh" +#include "nix/fetchers/fetch-to-store.hh" #include @@ -332,10 +332,20 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto debug("using substituted/cached input '%s' in '%s'", to_string(), store->printStorePath(storePath)); - auto accessor = makeStorePathAccessor(store, storePath); + // We just ensured the store object was there + auto accessor = ref{store->getFSAccessor(storePath)}; accessor->fingerprint = getFingerprint(store); + // Store a cache entry for the substituted tree so later fetches + // can reuse the existing nar instead of copying the unpacked + // input back into the store on every evaluation. + if (accessor->fingerprint) { + ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive; + auto cacheKey = makeFetchToStoreCacheKey(getName(), *accessor->fingerprint, method, "/"); + settings->getCache()->upsert(cacheKey, *store, {}, storePath); + } + accessor->setPathDisplay("«" + to_string() + "»"); return {accessor, *this}; @@ -509,7 +519,7 @@ fetchers::PublicKey adl_serializer::from_json(const json & return res; } -void adl_serializer::to_json(json & json, fetchers::PublicKey p) +void adl_serializer::to_json(json & json, const fetchers::PublicKey & p) { json["type"] = p.type; json["key"] = p.key; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f750d907d..f6f5c30ee 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -15,6 +15,7 @@ #include "nix/fetchers/fetch-settings.hh" #include "nix/util/json-utils.hh" #include "nix/util/archive.hh" +#include "nix/util/mounted-source-accessor.hh" #include #include diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 723c075f2..3b723d7d8 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -398,8 +398,8 @@ struct GitHubInputScheme : GitArchiveInputScheme Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - auto json = nlohmann::json::parse( - readFile(store->toRealPath(downloadFile(store, *input.settings, url, "source", headers).storePath))); + auto downloadResult = downloadFile(store, *input.settings, url, "source", headers); + auto json = nlohmann::json::parse(store->getFSAccessor(downloadResult.storePath)->readFile(CanonPath::root)); return RefInfo{ .rev = Hash::parseAny(std::string{json["sha"]}, HashAlgorithm::SHA1), @@ -472,8 +472,8 @@ struct GitLabInputScheme : GitArchiveInputScheme Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - auto json = nlohmann::json::parse( - readFile(store->toRealPath(downloadFile(store, *input.settings, url, "source", headers).storePath))); + auto downloadResult = downloadFile(store, *input.settings, url, "source", headers); + auto json = nlohmann::json::parse(store->getFSAccessor(downloadResult.storePath)->readFile(CanonPath::root)); if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) { return RefInfo{.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)}; diff --git a/src/libfetchers/include/nix/fetchers/meson.build b/src/libfetchers/include/nix/fetchers/meson.build index fcd446a6d..a313b1e0b 100644 --- a/src/libfetchers/include/nix/fetchers/meson.build +++ b/src/libfetchers/include/nix/fetchers/meson.build @@ -11,6 +11,5 @@ headers = files( 'git-utils.hh', 'input-cache.hh', 'registry.hh', - 'store-path-accessor.hh', 'tarball.hh', ) diff --git a/src/libfetchers/include/nix/fetchers/store-path-accessor.hh b/src/libfetchers/include/nix/fetchers/store-path-accessor.hh deleted file mode 100644 index a107293f8..000000000 --- a/src/libfetchers/include/nix/fetchers/store-path-accessor.hh +++ /dev/null @@ -1,14 +0,0 @@ -#pragma once - -#include "nix/util/source-path.hh" - -namespace nix { - -class StorePath; -class Store; - -ref makeStorePathAccessor(ref store, const StorePath & storePath); - -SourcePath getUnfilteredRootPath(CanonPath path); - -} // namespace nix diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 641b3d6a8..bf460d9c6 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -6,7 +6,6 @@ #include "nix/util/tarfile.hh" #include "nix/store/store-api.hh" #include "nix/util/url-parts.hh" -#include "nix/fetchers/store-path-accessor.hh" #include "nix/fetchers/fetch-settings.hh" #include @@ -331,7 +330,8 @@ struct MercurialInputScheme : InputScheme auto storePath = fetchToStore(store, input); - auto accessor = makeStorePathAccessor(store, storePath); + // We just added it, it should be there. + auto accessor = ref{store->getFSAccessor(storePath)}; accessor->setPathDisplay("«" + input.to_string() + "»"); diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 792a0fdbf..5b53a147b 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -32,6 +32,7 @@ libgit2 = dependency('libgit2', version : '>= 1.9') deps_private += libgit2 subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'attrs.cc', @@ -49,7 +50,6 @@ sources = files( 'mercurial.cc', 'path.cc', 'registry.cc', - 'store-path-accessor.cc', 'tarball.cc', ) diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index b66459fb9..3c4b9c06d 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -1,7 +1,6 @@ #include "nix/fetchers/fetchers.hh" #include "nix/store/store-api.hh" #include "nix/util/archive.hh" -#include "nix/fetchers/store-path-accessor.hh" #include "nix/fetchers/cache.hh" #include "nix/fetchers/fetch-to-store.hh" #include "nix/fetchers/fetch-settings.hh" @@ -153,7 +152,7 @@ struct PathInputScheme : InputScheme if (!input.getLastModified()) input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); - return {makeStorePathAccessor(store, *storePath), std::move(input)}; + return {ref{store->getFSAccessor(*storePath)}, std::move(input)}; } std::optional getFingerprint(ref store, const Input & input) const override diff --git a/src/libfetchers/store-path-accessor.cc b/src/libfetchers/store-path-accessor.cc deleted file mode 100644 index 65160e311..000000000 --- a/src/libfetchers/store-path-accessor.cc +++ /dev/null @@ -1,11 +0,0 @@ -#include "nix/fetchers/store-path-accessor.hh" -#include "nix/store/store-api.hh" - -namespace nix { - -ref makeStorePathAccessor(ref store, const StorePath & storePath) -{ - return projectSubdirSourceAccessor(store->getFSAccessor(), storePath.to_string()); -} - -} // namespace nix diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index b55837c9e..31d5ab460 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -6,7 +6,6 @@ #include "nix/util/archive.hh" #include "nix/util/tarfile.hh" #include "nix/util/types.hh" -#include "nix/fetchers/store-path-accessor.hh" #include "nix/store/store-api.hh" #include "nix/fetchers/git-utils.hh" #include "nix/fetchers/fetch-settings.hh" @@ -354,7 +353,7 @@ struct FileInputScheme : CurlInputScheme auto narHash = store->queryPathInfo(file.storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); - auto accessor = makeStorePathAccessor(store, file.storePath); + auto accessor = ref{store->getFSAccessor(file.storePath)}; accessor->setPathDisplay("«" + input.to_string() + "»"); diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index fddb39bdf..d0d45cfa8 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -32,6 +32,7 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_flake.cc', diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 59094abe8..41ae6cf3d 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -34,6 +34,7 @@ gtest = dependency('gtest', main : true) deps_private += gtest subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'flakeref.cc', @@ -58,7 +59,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', 'NIX_CONFIG' : 'extra-experimental-features = flakes', 'HOME' : meson.current_build_dir() / 'test-home', diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index 397ef4192..09812a57b 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -59,6 +59,7 @@ mkMesonExecutable (finalAttrs: { buildInputs = [ writableTmpDirAsHomeHook ]; } ('' + export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} export NIX_CONFIG="extra-experimental-features = flakes" ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} diff --git a/src/libflake/meson.build b/src/libflake/meson.build index 58916ecd9..3bd04fcf4 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -29,6 +29,7 @@ nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') deps_public += nlohmann_json subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') subdir('nix-meson-build-support/generate-header') diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 36332fdb7..2ac2b799b 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -28,6 +28,7 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_main.cc', diff --git a/src/libmain-c/nix_api_main.cc b/src/libmain-c/nix_api_main.cc index 2d4f588a8..0ee965dc8 100644 --- a/src/libmain-c/nix_api_main.cc +++ b/src/libmain-c/nix_api_main.cc @@ -4,6 +4,7 @@ #include "nix_api_util_internal.h" #include "nix/main/plugin.hh" +#include "nix/main/loggers.hh" extern "C" { @@ -17,4 +18,16 @@ nix_err nix_init_plugins(nix_c_context * context) NIXC_CATCH_ERRS } +nix_err nix_set_log_format(nix_c_context * context, const char * format) +{ + if (context) + context->last_err_code = NIX_OK; + if (format == nullptr) + return nix_set_err_msg(context, NIX_ERR_UNKNOWN, "Log format is null"); + try { + nix::setLogFormat(format); + } + NIXC_CATCH_ERRS +} + } // extern "C" diff --git a/src/libmain-c/nix_api_main.h b/src/libmain-c/nix_api_main.h index 3957b992f..3d5d12c15 100644 --- a/src/libmain-c/nix_api_main.h +++ b/src/libmain-c/nix_api_main.h @@ -30,6 +30,14 @@ extern "C" { */ nix_err nix_init_plugins(nix_c_context * context); +/** + * @brief Sets the log format + * + * @param[out] context Optional, stores error information + * @param[in] format The string name of the format. + */ +nix_err nix_set_log_format(nix_c_context * context, const char * format); + // cffi end #ifdef __cplusplus } diff --git a/src/libmain/meson.build b/src/libmain/meson.build index 2ac59924e..21bfbea3e 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -53,6 +53,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'common-args.cc', diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index c6b6174c7..a92771efc 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -26,6 +26,7 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_store.cc', diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 7ce63f5c2..c4c17f127 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -166,11 +166,44 @@ void nix_store_path_free(StorePath * sp) delete sp; } +void nix_derivation_free(nix_derivation * drv) +{ + delete drv; +} + StorePath * nix_store_path_clone(const StorePath * p) { return new StorePath{p->path}; } +nix_derivation * nix_derivation_from_json(nix_c_context * context, Store * store, const char * json) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto drv = static_cast(nlohmann::json::parse(json)); + + auto drvPath = nix::writeDerivation(*store->ptr, drv, nix::NoRepair, /* read only */ true); + + drv.checkInvariants(*store->ptr, drvPath); + + return new nix_derivation{drv}; + } + NIXC_CATCH_ERRS_NULL +} + +StorePath * nix_add_derivation(nix_c_context * context, Store * store, nix_derivation * derivation) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto ret = nix::writeDerivation(*store->ptr, derivation->drv, nix::NoRepair); + + return new StorePath{ret}; + } + NIXC_CATCH_ERRS_NULL +} + nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path) { if (context) diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index 51bd1bc89..e76e376b4 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -23,6 +23,8 @@ extern "C" { typedef struct Store Store; /** @brief Nix store path */ typedef struct StorePath StorePath; +/** @brief Nix Derivation */ +typedef struct nix_derivation nix_derivation; /** * @brief Initializes the Nix store library @@ -207,6 +209,32 @@ nix_err nix_store_realise( nix_err nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_callback callback, void * user_data); +/** + * @brief Create a `nix_derivation` from a JSON representation of that derivation. + * + * @param[out] context Optional, stores error information. + * @param[in] store nix store reference. + * @param[in] json JSON of the derivation as a string. + */ +nix_derivation * nix_derivation_from_json(nix_c_context * context, Store * store, const char * json); + +/** + * @brief Add the given `nix_derivation` to the given store + * + * @param[out] context Optional, stores error information. + * @param[in] store nix store reference. The derivation will be inserted here. + * @param[in] derivation nix_derivation to insert into the given store. + */ +StorePath * nix_add_derivation(nix_c_context * context, Store * store, nix_derivation * derivation); + +/** + * @brief Deallocate a `nix_derivation` + * + * Does not fail. + * @param[in] drv the derivation to free + */ +void nix_derivation_free(nix_derivation * drv); + /** * @brief Copy the closure of `path` from `srcStore` to `dstStore`. * diff --git a/src/libstore-c/nix_api_store_internal.h b/src/libstore-c/nix_api_store_internal.h index cbe04b2c7..712d96488 100644 --- a/src/libstore-c/nix_api_store_internal.h +++ b/src/libstore-c/nix_api_store_internal.h @@ -1,6 +1,7 @@ #ifndef NIX_API_STORE_INTERNAL_H #define NIX_API_STORE_INTERNAL_H #include "nix/store/store-api.hh" +#include "nix/store/derivations.hh" extern "C" { @@ -14,6 +15,11 @@ struct StorePath nix::StorePath path; }; +struct nix_derivation +{ + nix::Derivation drv; +}; + } // extern "C" #endif diff --git a/src/libstore-test-support/include/nix/store/tests/meson.build b/src/libstore-test-support/include/nix/store/tests/meson.build index f79769d41..33524de38 100644 --- a/src/libstore-test-support/include/nix/store/tests/meson.build +++ b/src/libstore-test-support/include/nix/store/tests/meson.build @@ -9,4 +9,5 @@ headers = files( 'outputs-spec.hh', 'path.hh', 'protocol.hh', + 'test-main.hh', ) diff --git a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh index 608aa63d6..7ecc5603b 100644 --- a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh +++ b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh @@ -12,33 +12,32 @@ #include namespace nixC { -class nix_api_store_test : public nix_api_util_context + +class nix_api_store_test_base : public nix_api_util_context { public: - nix_api_store_test() + nix_api_store_test_base() { nix_libstore_init(ctx); - init_local_store(); }; - ~nix_api_store_test() override + ~nix_api_store_test_base() override { - nix_store_free(store); - - for (auto & path : std::filesystem::recursive_directory_iterator(nixDir)) { - std::filesystem::permissions(path, std::filesystem::perms::owner_all); + if (exists(std::filesystem::path{nixDir})) { + for (auto & path : std::filesystem::recursive_directory_iterator(nixDir)) { + std::filesystem::permissions(path, std::filesystem::perms::owner_all); + } + std::filesystem::remove_all(nixDir); } - std::filesystem::remove_all(nixDir); } - Store * store; std::string nixDir; std::string nixStoreDir; std::string nixStateDir; std::string nixLogDir; protected: - void init_local_store() + Store * open_local_store() { #ifdef _WIN32 // no `mkdtemp` with MinGW @@ -66,11 +65,37 @@ protected: const char ** params[] = {p1, p2, p3, nullptr}; - store = nix_store_open(ctx, "local", params); + auto * store = nix_store_open(ctx, "local", params); if (!store) { std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); - ASSERT_NE(store, nullptr) << "Could not open store: " << errMsg; + EXPECT_NE(store, nullptr) << "Could not open store: " << errMsg; + assert(store); }; + return store; } }; + +class nix_api_store_test : public nix_api_store_test_base +{ +public: + nix_api_store_test() + : nix_api_store_test_base{} + { + init_local_store(); + }; + + ~nix_api_store_test() override + { + nix_store_free(store); + } + + Store * store; + +protected: + void init_local_store() + { + store = open_local_store(); + } +}; + } // namespace nixC diff --git a/src/libstore-test-support/include/nix/store/tests/test-main.hh b/src/libstore-test-support/include/nix/store/tests/test-main.hh new file mode 100644 index 000000000..3a1897469 --- /dev/null +++ b/src/libstore-test-support/include/nix/store/tests/test-main.hh @@ -0,0 +1,13 @@ +#pragma once + +///@file + +namespace nix { + +/** + * Call this for a GTest test suite that will including performing Nix + * builds, before running tests. + */ +int testMainForBuidingPre(int argc, char ** argv); + +} // namespace nix diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index 5873680ea..e929ae2b4 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -29,11 +29,13 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'derived-path.cc', 'outputs-spec.cc', 'path.cc', + 'test-main.cc', ) subdir('include/nix/store/tests') diff --git a/src/libstore-test-support/test-main.cc b/src/libstore-test-support/test-main.cc new file mode 100644 index 000000000..0b9072dc0 --- /dev/null +++ b/src/libstore-test-support/test-main.cc @@ -0,0 +1,47 @@ +#include + +#include "nix/store/globals.hh" +#include "nix/util/logging.hh" + +#include "nix/store/tests/test-main.hh" + +namespace nix { + +int testMainForBuidingPre(int argc, char ** argv) +{ + if (argc > 1 && std::string_view(argv[1]) == "__build-remote") { + printError("test-build-remote: not supported in libexpr unit tests"); + return EXIT_FAILURE; + } + + // Disable build hook. We won't be testing remote builds in these unit tests. If we do, fix the above build hook. + settings.buildHook = {}; + + // No substituters, unless a test specifically requests. + settings.substituters = {}; + +#ifdef __linux__ // should match the conditional around sandboxBuildDir declaration. + + // When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's + // sandboxBuildDir, e.g.: Host + // storeDir = /nix/store + // sandboxBuildDir = /build + // This process + // storeDir = /build/foo/bar/store + // sandboxBuildDir = /build + // However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different + // sandboxBuildDir. + settings.sandboxBuildDir = "/test-build-dir-instead-of-usual-build-dir"; +#endif + +#ifdef __APPLE__ + // Avoid this error, when already running in a sandbox: + // sandbox-exec: sandbox_apply: Operation not permitted + settings.sandboxMode = smDisabled; + setEnv("_NIX_TEST_NO_SANDBOX", "1"); +#endif + + return EXIT_SUCCESS; +} + +} // namespace nix diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json index bc67236b5..eb4bd4f3d 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json @@ -21,5 +21,6 @@ "method": "nar" } }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json index 183148b29..3a4a3079b 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json @@ -32,5 +32,6 @@ ], "system": "my-system" }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json index ec044d778..b10355af7 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -10,14 +10,14 @@ "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, "inputDrvs": { - "/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { "dynamicOutputs": {}, "outputs": [ "dev", "out" ] }, - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { "dynamicOutputs": {}, "outputs": [ "dev", @@ -26,7 +26,7 @@ } }, "inputSrcs": [ - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" ], "name": "advanced-attributes-structured-attrs", "outputs": { @@ -100,5 +100,6 @@ ], "system": "my-system" }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json index 0ac0a9c5c..d66882036 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -26,14 +26,14 @@ "system": "my-system" }, "inputDrvs": { - "/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { "dynamicOutputs": {}, "outputs": [ "dev", "out" ] }, - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { "dynamicOutputs": {}, "outputs": [ "dev", @@ -42,7 +42,7 @@ } }, "inputSrcs": [ - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" ], "name": "advanced-attributes", "outputs": { @@ -51,5 +51,6 @@ "method": "nar" } }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ca/self-contained.json b/src/libstore-tests/data/derivation/ca/self-contained.json new file mode 100644 index 000000000..331beb7be --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/self-contained.json @@ -0,0 +1,24 @@ +{ + "args": [ + "-c", + "echo $name foo > $out" + ], + "builder": "/bin/sh", + "env": { + "builder": "/bin/sh", + "name": "myname", + "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", + "system": "x86_64-linux" + }, + "inputDrvs": {}, + "inputSrcs": [], + "name": "myname", + "outputs": { + "out": { + "hashAlgo": "sha256", + "method": "nar" + } + }, + "system": "x86_64-linux", + "version": 3 +} diff --git a/src/libstore-tests/data/derivation/dynDerivationDeps.drv b/src/libstore-tests/data/derivation/dyn-dep-derivation.drv similarity index 100% rename from src/libstore-tests/data/derivation/dynDerivationDeps.drv rename to src/libstore-tests/data/derivation/dyn-dep-derivation.drv diff --git a/src/libstore-tests/data/derivation/dynDerivationDeps.json b/src/libstore-tests/data/derivation/dyn-dep-derivation.json similarity index 78% rename from src/libstore-tests/data/derivation/dynDerivationDeps.json rename to src/libstore-tests/data/derivation/dyn-dep-derivation.json index 9dbeb1f15..1a9f54c53 100644 --- a/src/libstore-tests/data/derivation/dynDerivationDeps.json +++ b/src/libstore-tests/data/derivation/dyn-dep-derivation.json @@ -8,7 +8,7 @@ "BIG_BAD": "WOLF" }, "inputDrvs": { - "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { "dynamicOutputs": { "cat": { "dynamicOutputs": {}, @@ -30,9 +30,10 @@ } }, "inputSrcs": [ - "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" ], "name": "dyn-dep-derivation", "outputs": {}, - "system": "wasm-sel4" + "system": "wasm-sel4", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json index d58e7d5b5..0fa543f21 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json @@ -15,8 +15,9 @@ "name": "advanced-attributes-defaults", "outputs": { "out": { - "path": "/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults" + "path": "1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults" } }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json index f5349e6c3..e02392ea1 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json @@ -13,10 +13,10 @@ "name": "advanced-attributes-structured-attrs-defaults", "outputs": { "dev": { - "path": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev" + "path": "8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev" }, "out": { - "path": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults" + "path": "f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults" } }, "structuredAttrs": { @@ -28,5 +28,6 @@ ], "system": "my-system" }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json index b8d566462..9230b06b6 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json @@ -10,14 +10,14 @@ "out": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" }, "inputDrvs": { - "/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { "dynamicOutputs": {}, "outputs": [ "dev", "out" ] }, - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { "dynamicOutputs": {}, "outputs": [ "dev", @@ -26,18 +26,18 @@ } }, "inputSrcs": [ - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" ], "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { - "path": "/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin" + "path": "33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin" }, "dev": { - "path": "/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev" + "path": "wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev" }, "out": { - "path": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" + "path": "7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" } }, "structuredAttrs": { @@ -95,5 +95,6 @@ ], "system": "my-system" }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json index 20ce5e1c2..ba5911c91 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -24,14 +24,14 @@ "system": "my-system" }, "inputDrvs": { - "/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { "dynamicOutputs": {}, "outputs": [ "dev", "out" ] }, - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { "dynamicOutputs": {}, "outputs": [ "dev", @@ -40,13 +40,14 @@ } }, "inputSrcs": [ - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" ], "name": "advanced-attributes", "outputs": { "out": { - "path": "/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes" + "path": "wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes" } }, - "system": "my-system" + "system": "my-system", + "version": 3 } diff --git a/src/libstore-tests/data/derivation/output-caFixedFlat.json b/src/libstore-tests/data/derivation/output-caFixedFlat.json index 7001ea0a9..e6a0123f6 100644 --- a/src/libstore-tests/data/derivation/output-caFixedFlat.json +++ b/src/libstore-tests/data/derivation/output-caFixedFlat.json @@ -1,6 +1,5 @@ { "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", "hashAlgo": "sha256", - "method": "flat", - "path": "/nix/store/rhcg9h16sqvlbpsa6dqm57sbr2al6nzg-drv-name-output-name" + "method": "flat" } diff --git a/src/libstore-tests/data/derivation/output-caFixedNAR.json b/src/libstore-tests/data/derivation/output-caFixedNAR.json index 54eb306e6..b57e065a9 100644 --- a/src/libstore-tests/data/derivation/output-caFixedNAR.json +++ b/src/libstore-tests/data/derivation/output-caFixedNAR.json @@ -1,6 +1,5 @@ { "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", "hashAlgo": "sha256", - "method": "nar", - "path": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name" + "method": "nar" } diff --git a/src/libstore-tests/data/derivation/output-caFixedText.json b/src/libstore-tests/data/derivation/output-caFixedText.json index e8a651860..84778509e 100644 --- a/src/libstore-tests/data/derivation/output-caFixedText.json +++ b/src/libstore-tests/data/derivation/output-caFixedText.json @@ -1,6 +1,5 @@ { "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", "hashAlgo": "sha256", - "method": "text", - "path": "/nix/store/6s1zwabh956jvhv4w9xcdb5jiyanyxg1-drv-name-output-name" + "method": "text" } diff --git a/src/libstore-tests/data/derivation/output-inputAddressed.json b/src/libstore-tests/data/derivation/output-inputAddressed.json index 86c7f3a05..04491ffde 100644 --- a/src/libstore-tests/data/derivation/output-inputAddressed.json +++ b/src/libstore-tests/data/derivation/output-inputAddressed.json @@ -1,3 +1,3 @@ { - "path": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name" + "path": "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name" } diff --git a/src/libstore-tests/data/derivation/simple.drv b/src/libstore-tests/data/derivation/simple-derivation.drv similarity index 100% rename from src/libstore-tests/data/derivation/simple.drv rename to src/libstore-tests/data/derivation/simple-derivation.drv diff --git a/src/libstore-tests/data/derivation/simple.json b/src/libstore-tests/data/derivation/simple-derivation.json similarity index 66% rename from src/libstore-tests/data/derivation/simple.json rename to src/libstore-tests/data/derivation/simple-derivation.json index 20d0f8933..41a049aef 100644 --- a/src/libstore-tests/data/derivation/simple.json +++ b/src/libstore-tests/data/derivation/simple-derivation.json @@ -8,7 +8,7 @@ "BIG_BAD": "WOLF" }, "inputDrvs": { - "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { "dynamicOutputs": {}, "outputs": [ "cat", @@ -17,9 +17,10 @@ } }, "inputSrcs": [ - "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" ], "name": "simple-derivation", "outputs": {}, - "system": "wasm-sel4" + "system": "wasm-sel4", + "version": 3 } diff --git a/src/libstore-tests/data/derived-path/multi_built_built.json b/src/libstore-tests/data/derived-path/multi_built_built.json new file mode 100644 index 000000000..561d04850 --- /dev/null +++ b/src/libstore-tests/data/derived-path/multi_built_built.json @@ -0,0 +1,10 @@ +{ + "drvPath": { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "output": "bar" + }, + "outputs": [ + "baz", + "quux" + ] +} diff --git a/src/libstore-tests/data/derived-path/multi_built_built_wildcard.json b/src/libstore-tests/data/derived-path/multi_built_built_wildcard.json new file mode 100644 index 000000000..da1f9d996 --- /dev/null +++ b/src/libstore-tests/data/derived-path/multi_built_built_wildcard.json @@ -0,0 +1,9 @@ +{ + "drvPath": { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "output": "bar" + }, + "outputs": [ + "*" + ] +} diff --git a/src/libstore-tests/data/derived-path/multi_opaque.json b/src/libstore-tests/data/derived-path/multi_opaque.json new file mode 100644 index 000000000..9bedb882b --- /dev/null +++ b/src/libstore-tests/data/derived-path/multi_opaque.json @@ -0,0 +1 @@ +"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" diff --git a/src/libstore-tests/data/derived-path/mutli_built.json b/src/libstore-tests/data/derived-path/mutli_built.json new file mode 100644 index 000000000..d7bcff53d --- /dev/null +++ b/src/libstore-tests/data/derived-path/mutli_built.json @@ -0,0 +1,7 @@ +{ + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "outputs": [ + "bar", + "baz" + ] +} diff --git a/src/libstore-tests/data/derived-path/single_built.json b/src/libstore-tests/data/derived-path/single_built.json new file mode 100644 index 000000000..64110a364 --- /dev/null +++ b/src/libstore-tests/data/derived-path/single_built.json @@ -0,0 +1,4 @@ +{ + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "output": "bar" +} diff --git a/src/libstore-tests/data/derived-path/single_built_built.json b/src/libstore-tests/data/derived-path/single_built_built.json new file mode 100644 index 000000000..66faa668c --- /dev/null +++ b/src/libstore-tests/data/derived-path/single_built_built.json @@ -0,0 +1,7 @@ +{ + "drvPath": { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "output": "bar" + }, + "output": "baz" +} diff --git a/src/libstore-tests/data/derived-path/single_opaque.json b/src/libstore-tests/data/derived-path/single_opaque.json new file mode 100644 index 000000000..9bedb882b --- /dev/null +++ b/src/libstore-tests/data/derived-path/single_opaque.json @@ -0,0 +1 @@ +"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" diff --git a/src/libstore-tests/data/outputs-spec/all.json b/src/libstore-tests/data/outputs-spec/all.json new file mode 100644 index 000000000..1449203e9 --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/all.json @@ -0,0 +1,3 @@ +[ + "*" +] diff --git a/src/libstore-tests/data/outputs-spec/extended/all.json b/src/libstore-tests/data/outputs-spec/extended/all.json new file mode 100644 index 000000000..1449203e9 --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/extended/all.json @@ -0,0 +1,3 @@ +[ + "*" +] diff --git a/src/libstore-tests/data/outputs-spec/extended/def.json b/src/libstore-tests/data/outputs-spec/extended/def.json new file mode 100644 index 000000000..19765bd50 --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/extended/def.json @@ -0,0 +1 @@ +null diff --git a/src/libstore-tests/data/outputs-spec/extended/name.json b/src/libstore-tests/data/outputs-spec/extended/name.json new file mode 100644 index 000000000..0ede90fb4 --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/extended/name.json @@ -0,0 +1,3 @@ +[ + "a" +] diff --git a/src/libstore-tests/data/outputs-spec/extended/names.json b/src/libstore-tests/data/outputs-spec/extended/names.json new file mode 100644 index 000000000..517c9d68e --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/extended/names.json @@ -0,0 +1,4 @@ +[ + "a", + "b" +] diff --git a/src/libstore-tests/data/outputs-spec/name.json b/src/libstore-tests/data/outputs-spec/name.json new file mode 100644 index 000000000..0ede90fb4 --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/name.json @@ -0,0 +1,3 @@ +[ + "a" +] diff --git a/src/libstore-tests/data/outputs-spec/names.json b/src/libstore-tests/data/outputs-spec/names.json new file mode 100644 index 000000000..517c9d68e --- /dev/null +++ b/src/libstore-tests/data/outputs-spec/names.json @@ -0,0 +1,4 @@ +[ + "a", + "b" +] diff --git a/src/libstore-tests/data/realisation/simple.json b/src/libstore-tests/data/realisation/simple.json new file mode 100644 index 000000000..2ccb1e721 --- /dev/null +++ b/src/libstore-tests/data/realisation/simple.json @@ -0,0 +1,6 @@ +{ + "dependentRealisations": {}, + "id": "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "signatures": [] +} diff --git a/src/libstore-tests/data/realisation/with-dependent-realisations.json b/src/libstore-tests/data/realisation/with-dependent-realisations.json new file mode 100644 index 000000000..a58e0d7fe --- /dev/null +++ b/src/libstore-tests/data/realisation/with-dependent-realisations.json @@ -0,0 +1,8 @@ +{ + "dependentRealisations": { + "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" + }, + "id": "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "signatures": [] +} diff --git a/src/libstore-tests/data/realisation/with-signature.json b/src/libstore-tests/data/realisation/with-signature.json new file mode 100644 index 000000000..a28848cb0 --- /dev/null +++ b/src/libstore-tests/data/realisation/with-signature.json @@ -0,0 +1,8 @@ +{ + "dependentRealisations": {}, + "id": "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + "signatures": [ + "asdfasdfasdf" + ] +} diff --git a/src/libstore-tests/data/store-path/simple.json b/src/libstore-tests/data/store-path/simple.json new file mode 100644 index 000000000..9bedb882b --- /dev/null +++ b/src/libstore-tests/data/store-path/simple.json @@ -0,0 +1 @@ +"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index 37b422421..9c13bf048 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -51,45 +51,44 @@ using BothFixtures = ::testing::TypesreadTest(NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ - auto expected = parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ - Derivation got = Derivation::fromJSON(*this->store, encoded, this->mockXpSettings); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ - TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_to_json) \ - { \ - this->writeTest( \ - NAME ".json", \ - [&]() -> json { \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ - return parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings) \ - .toJSON(*this->store); \ - }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ - } \ - \ - TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_aterm) \ - { \ - this->readTest(NAME ".drv", [&](auto encoded) { \ - /* Use JSON file instead of C++ literal as source of truth. */ \ - auto json = json::parse(readFile(this->goldenMaster(NAME ".json"))); \ - auto expected = Derivation::fromJSON(*this->store, json, this->mockXpSettings); \ - auto got = parseDerivation(*this->store, std::move(encoded), NAME, this->mockXpSettings); \ - EXPECT_EQ(got.toJSON(*this->store), expected.toJSON(*this->store)); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ +#define TEST_ATERM_JSON(STEM, NAME) \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_json) \ + { \ + this->readTest(NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + auto expected = parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ + Derivation got = Derivation::fromJSON(encoded, this->mockXpSettings); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_to_json) \ + { \ + this->writeTest( \ + NAME ".json", \ + [&]() -> json { \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + return parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings).toJSON(); \ + }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_aterm) \ + { \ + this->readTest(NAME ".drv", [&](auto encoded) { \ + /* Use JSON file instead of C++ literal as source of truth. */ \ + auto json = json::parse(readFile(this->goldenMaster(NAME ".json"))); \ + auto expected = Derivation::fromJSON(json, this->mockXpSettings); \ + auto got = parseDerivation(*this->store, std::move(encoded), NAME, this->mockXpSettings); \ + EXPECT_EQ(got.toJSON(), expected.toJSON()); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ /* No corresponding write test, because we need to read the drv to write the json file */ TEST_ATERM_JSON(advancedAttributes, "advanced-attributes-defaults"); diff --git a/src/libstore-tests/derivation.cc b/src/libstore-tests/derivation.cc index 812e1d01b..65a5d011d 100644 --- a/src/libstore-tests/derivation.cc +++ b/src/libstore-tests/derivation.cc @@ -5,13 +5,13 @@ #include "nix/store/derivations.hh" #include "nix/store/tests/libstore.hh" -#include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { using nlohmann::json; -class DerivationTest : public CharacterizationTest, public LibStoreTest +class DerivationTest : public virtual CharacterizationTest, public LibStoreTest { std::filesystem::path unitTestData = getUnitTestData() / "derivation"; @@ -66,146 +66,183 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) FormatError); } -#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \ - TEST_F(FIXTURE, DerivationOutput_##NAME##_from_json) \ - { \ - readTest("output-" #NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - DerivationOutput got = DerivationOutput::fromJSON(*store, DRV_NAME, OUTPUT_NAME, encoded, mockXpSettings); \ - DerivationOutput expected{VAL}; \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, DerivationOutput_##NAME##_to_json) \ - { \ - writeTest( \ - "output-" #NAME ".json", \ - [&]() -> json { return DerivationOutput{(VAL)}.toJSON(*store, (DRV_NAME), (OUTPUT_NAME)); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ +#define MAKE_OUTPUT_JSON_TEST_P(FIXTURE) \ + TEST_P(FIXTURE, from_json) \ + { \ + const auto & [name, expected] = GetParam(); \ + /* Don't use readJsonTest because we want to check experimental \ + features. */ \ + readTest(Path{"output-"} + name + ".json", [&](const auto & encoded_) { \ + json j = json::parse(encoded_); \ + DerivationOutput got = DerivationOutput::fromJSON(j, mockXpSettings); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_P(FIXTURE, to_json) \ + { \ + const auto & [name, value] = GetParam(); \ + writeJsonTest("output-" + name, value); \ } -TEST_JSON( - DerivationTest, - inputAddressed, - (DerivationOutput::InputAddressed{ - .path = store->parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"), - }), - "drv-name", - "output-name") +struct DerivationOutputJsonTest : DerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; -TEST_JSON( - DerivationTest, - caFixedFlat, - (DerivationOutput::CAFixed{ - .ca = - { - .method = ContentAddressMethod::Raw::Flat, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, - }), - "drv-name", - "output-name") +MAKE_OUTPUT_JSON_TEST_P(DerivationOutputJsonTest) -TEST_JSON( - DerivationTest, - caFixedNAR, - (DerivationOutput::CAFixed{ - .ca = - { +INSTANTIATE_TEST_SUITE_P( + DerivationOutputJSON, + DerivationOutputJsonTest, + ::testing::Values( + std::pair{ + "inputAddressed", + DerivationOutput{DerivationOutput::InputAddressed{ + .path = StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"}, + }}, + }, + std::pair{ + "caFixedFlat", + DerivationOutput{DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::Flat, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, + }}, + }, + std::pair{ + "caFixedNAR", + DerivationOutput{DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::NixArchive, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, + }}, + }, + std::pair{ + "deferred", + DerivationOutput{DerivationOutput::Deferred{}}, + })); + +struct DynDerivationOutputJsonTest : DynDerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +MAKE_OUTPUT_JSON_TEST_P(DynDerivationOutputJsonTest); + +INSTANTIATE_TEST_SUITE_P( + DynDerivationOutputJSON, + DynDerivationOutputJsonTest, + ::testing::Values( + std::pair{ + "caFixedText", + DerivationOutput{DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::Text, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, + }}, + })); + +struct CaDerivationOutputJsonTest : CaDerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +MAKE_OUTPUT_JSON_TEST_P(CaDerivationOutputJsonTest); + +INSTANTIATE_TEST_SUITE_P( + CaDerivationOutputJSON, + CaDerivationOutputJsonTest, + ::testing::Values( + std::pair{ + "caFloating", + DerivationOutput{DerivationOutput::CAFloating{ .method = ContentAddressMethod::Raw::NixArchive, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, - }), - "drv-name", - "output-name") + .hashAlgo = HashAlgorithm::SHA256, + }}, + })); -TEST_JSON( - DynDerivationTest, - caFixedText, - (DerivationOutput::CAFixed{ - .ca = - { - .method = ContentAddressMethod::Raw::Text, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, - }), - "drv-name", - "output-name") +struct ImpureDerivationOutputJsonTest : ImpureDerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; -TEST_JSON( - CaDerivationTest, - caFloating, - (DerivationOutput::CAFloating{ - .method = ContentAddressMethod::Raw::NixArchive, - .hashAlgo = HashAlgorithm::SHA256, - }), - "drv-name", - "output-name") +MAKE_OUTPUT_JSON_TEST_P(ImpureDerivationOutputJsonTest); -TEST_JSON(DerivationTest, deferred, DerivationOutput::Deferred{}, "drv-name", "output-name") +INSTANTIATE_TEST_SUITE_P( + ImpureDerivationOutputJSON, + ImpureDerivationOutputJsonTest, + ::testing::Values( + std::pair{ + "impure", + DerivationOutput{DerivationOutput::Impure{ + .method = ContentAddressMethod::Raw::NixArchive, + .hashAlgo = HashAlgorithm::SHA256, + }}, + })); -TEST_JSON( - ImpureDerivationTest, - impure, - (DerivationOutput::Impure{ - .method = ContentAddressMethod::Raw::NixArchive, - .hashAlgo = HashAlgorithm::SHA256, - }), - "drv-name", - "output-name") +#undef MAKE_OUTPUT_JSON_TEST_P -#undef TEST_JSON - -#define TEST_JSON(FIXTURE, NAME, VAL) \ - TEST_F(FIXTURE, Derivation_##NAME##_from_json) \ - { \ - readTest(#NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - Derivation expected{VAL}; \ - Derivation got = Derivation::fromJSON(*store, encoded, mockXpSettings); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, Derivation_##NAME##_to_json) \ - { \ - writeTest( \ - #NAME ".json", \ - [&]() -> json { return Derivation{VAL}.toJSON(*store); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ +#define MAKE_TEST_P(FIXTURE) \ + TEST_P(FIXTURE, from_json) \ + { \ + const auto & drv = GetParam(); \ + /* Don't use readJsonTest because we want to check experimental \ + features. */ \ + readTest(drv.name + ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + Derivation got = Derivation::fromJSON(encoded, mockXpSettings); \ + ASSERT_EQ(got, drv); \ + }); \ + } \ + \ + TEST_P(FIXTURE, to_json) \ + { \ + const auto & drv = GetParam(); \ + writeJsonTest(drv.name, drv); \ + } \ + \ + TEST_P(FIXTURE, from_aterm) \ + { \ + const auto & drv = GetParam(); \ + readTest(drv.name + ".drv", [&](auto encoded) { \ + auto got = parseDerivation(*store, std::move(encoded), drv.name, mockXpSettings); \ + ASSERT_EQ(got.toJSON(), drv.toJSON()); \ + ASSERT_EQ(got, drv); \ + }); \ + } \ + \ + TEST_P(FIXTURE, to_aterm) \ + { \ + const auto & drv = GetParam(); \ + writeTest(drv.name + ".drv", [&]() -> std::string { return drv.unparse(*store, false); }); \ } -#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \ - TEST_F(FIXTURE, Derivation_##NAME##_from_aterm) \ - { \ - readTest(#NAME ".drv", [&](auto encoded) { \ - Derivation expected{VAL}; \ - auto got = parseDerivation(*store, std::move(encoded), DRV_NAME, mockXpSettings); \ - ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, Derivation_##NAME##_to_aterm) \ - { \ - writeTest(#NAME ".drv", [&]() -> std::string { return (VAL).unparse(*store, false); }); \ - } +struct DerivationJsonAtermTest : DerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface +{}; -Derivation makeSimpleDrv(const Store & store) +MAKE_TEST_P(DerivationJsonAtermTest); + +Derivation makeSimpleDrv() { Derivation drv; drv.name = "simple-derivation"; drv.inputSrcs = { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), + StorePath("c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), }; drv.inputDrvs = { .map = { { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + StorePath("c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), { .value = { @@ -231,22 +268,27 @@ Derivation makeSimpleDrv(const Store & store) return drv; } -TEST_JSON(DerivationTest, simple, makeSimpleDrv(*store)) +INSTANTIATE_TEST_SUITE_P(DerivationJSONATerm, DerivationJsonAtermTest, ::testing::Values(makeSimpleDrv())); -TEST_ATERM(DerivationTest, simple, makeSimpleDrv(*store), "simple-derivation") +struct DynDerivationJsonAtermTest : DynDerivationTest, + JsonCharacterizationTest, + ::testing::WithParamInterface +{}; -Derivation makeDynDepDerivation(const Store & store) +MAKE_TEST_P(DynDerivationJsonAtermTest); + +Derivation makeDynDepDerivation() { Derivation drv; drv.name = "dyn-dep-derivation"; drv.inputSrcs = { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), + StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"}, }; drv.inputDrvs = { .map = { { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"}, DerivedPathMap::ChildNode{ .value = { @@ -293,11 +335,8 @@ Derivation makeDynDepDerivation(const Store & store) return drv; } -TEST_JSON(DynDerivationTest, dynDerivationDeps, makeDynDepDerivation(*store)) +INSTANTIATE_TEST_SUITE_P(DynDerivationJSONATerm, DynDerivationJsonAtermTest, ::testing::Values(makeDynDepDerivation())); -TEST_ATERM(DynDerivationTest, dynDerivationDeps, makeDynDepDerivation(*store), "dyn-dep-derivation") - -#undef TEST_JSON -#undef TEST_ATERM +#undef MAKE_TEST_P } // namespace nix diff --git a/src/libstore-tests/derived-path.cc b/src/libstore-tests/derived-path.cc index c7d2c5817..6e7648f25 100644 --- a/src/libstore-tests/derived-path.cc +++ b/src/libstore-tests/derived-path.cc @@ -3,13 +3,23 @@ #include #include +#include "nix/util/tests/characterization.hh" #include "nix/store/tests/derived-path.hh" #include "nix/store/tests/libstore.hh" namespace nix { -class DerivedPathTest : public LibStoreTest -{}; +class DerivedPathTest : public CharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "derived-path"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; /** * Round trip (string <-> data structure) test for @@ -107,4 +117,90 @@ RC_GTEST_FIXTURE_PROP(DerivedPathTest, prop_round_rip, (const DerivedPath & o)) #endif +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +using nlohmann::json; + +#define TEST_JSON(TYPE, NAME, VAL) \ + static const TYPE NAME = VAL; \ + \ + TEST_F(DerivedPathTest, NAME##_from_json) \ + { \ + readTest(#NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + TYPE got = static_cast(encoded); \ + ASSERT_EQ(got, NAME); \ + }); \ + } \ + \ + TEST_F(DerivedPathTest, NAME##_to_json) \ + { \ + writeTest( \ + #NAME ".json", \ + [&]() -> json { return static_cast(NAME); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } + +TEST_JSON( + SingleDerivedPath, single_opaque, SingleDerivedPath::Opaque{StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}); + +TEST_JSON( + SingleDerivedPath, + single_built, + (SingleDerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}), + .output = "bar", + })); + +TEST_JSON( + SingleDerivedPath, + single_built_built, + (SingleDerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}), + .output = "bar", + }), + .output = "baz", + })); + +TEST_JSON(DerivedPath, multi_opaque, DerivedPath::Opaque{StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}); + +TEST_JSON( + DerivedPath, + mutli_built, + (DerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}), + .outputs = OutputsSpec::Names{"bar", "baz"}, + })); + +TEST_JSON( + DerivedPath, + multi_built_built, + (DerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}), + .output = "bar", + }), + .outputs = OutputsSpec::Names{"baz", "quux"}, + })); + +TEST_JSON( + DerivedPath, + multi_built_built_wildcard, + (DerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Built{ + .drvPath = make_ref(SingleDerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}}), + .output = "bar", + }), + .outputs = OutputsSpec::All{}, + })); + } // namespace nix diff --git a/src/libstore-tests/http-binary-cache-store.cc b/src/libstore-tests/http-binary-cache-store.cc index 0e3be4ced..4b3754a1f 100644 --- a/src/libstore-tests/http-binary-cache-store.cc +++ b/src/libstore-tests/http-binary-cache-store.cc @@ -18,4 +18,20 @@ TEST(HttpBinaryCacheStore, constructConfigNoTrailingSlash) EXPECT_EQ(config.cacheUri.to_string(), "https://foo.bar.baz/a/b"); } +TEST(HttpBinaryCacheStore, constructConfigWithParams) +{ + StoreConfig::Params params{{"compression", "xz"}}; + HttpBinaryCacheStoreConfig config{"https", "foo.bar.baz/a/b/", params}; + EXPECT_EQ(config.cacheUri.to_string(), "https://foo.bar.baz/a/b"); + EXPECT_EQ(config.getReference().params, params); +} + +TEST(HttpBinaryCacheStore, constructConfigWithParamsAndUrlWithParams) +{ + StoreConfig::Params params{{"compression", "xz"}}; + HttpBinaryCacheStoreConfig config{"https", "foo.bar.baz/a/b?some-param=some-value", params}; + EXPECT_EQ(config.cacheUri.to_string(), "https://foo.bar.baz/a/b?some-param=some-value"); + EXPECT_EQ(config.getReference().params, params); +} + } // namespace nix diff --git a/src/libstore-tests/main.cc b/src/libstore-tests/main.cc new file mode 100644 index 000000000..ffe981613 --- /dev/null +++ b/src/libstore-tests/main.cc @@ -0,0 +1,15 @@ +#include + +#include "nix/store/tests/test-main.hh" + +using namespace nix; + +int main(int argc, char ** argv) +{ + auto res = testMainForBuidingPre(argc, argv); + if (res) + return res; + + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 4c2840ab7..915c10a38 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -52,6 +52,7 @@ gtest = dependency('gmock') deps_private += gtest subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'common-protocol.cc', @@ -66,12 +67,14 @@ sources = files( 'local-overlay-store.cc', 'local-store.cc', 'machines.cc', + 'main.cc', 'nar-info-disk-cache.cc', 'nar-info.cc', 'nix_api_store.cc', 'outputs-spec.cc', 'path-info.cc', 'path.cc', + 'realisation.cc', 'references.cc', 's3-binary-cache-store.cc', 's3.cc', @@ -101,7 +104,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', 'HOME' : meson.current_build_dir() / 'test-home', 'NIX_REMOTE' : meson.current_build_dir() / 'test-home' / 'store', @@ -135,7 +138,7 @@ if get_option('benchmarks') benchmark( 'nix-store-benchmarks', benchmark_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, ) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index c14fb6d9f..dfd554ec1 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -1,7 +1,10 @@ +#include + #include "nix_api_util.h" #include "nix_api_store.h" #include "nix/store/tests/nix_api_store.hh" +#include "nix/store/globals.hh" #include "nix/util/tests/string_callback.hh" #include "nix/util/url.hh" @@ -197,4 +200,60 @@ TEST_F(nix_api_util_context, nix_store_real_path_binary_cache) ASSERT_STREQ(path_raw.c_str(), rp.c_str()); } +template +struct LambdaAdapter +{ + F fun; + + template + static inline auto call(LambdaAdapter * ths, Args... args) + { + return ths->fun(args...); + } + + template + static auto call_void(void * ths, Args... args) + { + return call(static_cast *>(ths), args...); + } +}; + +TEST_F(nix_api_store_test_base, build_from_json) +{ + // FIXME get rid of these + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + auto * drv = nix_derivation_from_json(ctx, store, buffer.str().c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { + auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath); + ASSERT_EQ(is_valid_path, true); + }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + } // namespace nixC diff --git a/src/libstore-tests/outputs-spec.cc b/src/libstore-tests/outputs-spec.cc index b0b80e7c4..1fac222fc 100644 --- a/src/libstore-tests/outputs-spec.cc +++ b/src/libstore-tests/outputs-spec.cc @@ -1,18 +1,43 @@ -#include "nix/store/tests/outputs-spec.hh" - #include #include #include +#include "nix/store/tests/outputs-spec.hh" +#include "nix/util/tests/json-characterization.hh" + namespace nix { -TEST(OutputsSpec, no_empty_names) +class OutputsSpecTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "outputs-spec"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +class ExtendedOutputsSpecTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "outputs-spec" / "extended"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +TEST_F(OutputsSpecTest, no_empty_names) { ASSERT_DEATH(OutputsSpec::Names{StringSet{}}, ""); } #define TEST_DONT_PARSE(NAME, STR) \ - TEST(OutputsSpec, bad_##NAME) \ + TEST_F(OutputsSpecTest, bad_##NAME) \ { \ std::optional OutputsSpecOpt = OutputsSpec::parseOpt(STR); \ ASSERT_FALSE(OutputsSpecOpt); \ @@ -26,7 +51,7 @@ TEST_DONT_PARSE(star_second, "foo,*") #undef TEST_DONT_PARSE -TEST(OutputsSpec, all) +TEST_F(OutputsSpecTest, all) { std::string_view str = "*"; OutputsSpec expected = OutputsSpec::All{}; @@ -34,7 +59,7 @@ TEST(OutputsSpec, all) ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_out) +TEST_F(OutputsSpecTest, names_out) { std::string_view str = "out"; OutputsSpec expected = OutputsSpec::Names{"out"}; @@ -42,7 +67,7 @@ TEST(OutputsSpec, names_out) ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_underscore) +TEST_F(OutputsSpecTest, names_underscore) { std::string_view str = "a_b"; OutputsSpec expected = OutputsSpec::Names{"a_b"}; @@ -50,7 +75,7 @@ TEST(OutputsSpec, names_underscore) ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_numeric) +TEST_F(OutputsSpecTest, names_numeric) { std::string_view str = "01"; OutputsSpec expected = OutputsSpec::Names{"01"}; @@ -58,7 +83,7 @@ TEST(OutputsSpec, names_numeric) ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_out_bin) +TEST_F(OutputsSpecTest, names_out_bin) { OutputsSpec expected = OutputsSpec::Names{"out", "bin"}; ASSERT_EQ(OutputsSpec::parse("out,bin"), expected); @@ -68,32 +93,32 @@ TEST(OutputsSpec, names_out_bin) #define TEST_SUBSET(X, THIS, THAT) X((OutputsSpec{THIS}).isSubsetOf(THAT)); -TEST(OutputsSpec, subsets_all_all) +TEST_F(OutputsSpecTest, subsets_all_all) { TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All{}, OutputsSpec::All{}); } -TEST(OutputsSpec, subsets_names_all) +TEST_F(OutputsSpecTest, subsets_names_all) { TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, OutputsSpec::All{}); } -TEST(OutputsSpec, subsets_names_names_eq) +TEST_F(OutputsSpecTest, subsets_names_names_eq) { TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, subsets_names_names_noneq) +TEST_F(OutputsSpecTest, subsets_names_names_noneq) { TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, (OutputsSpec::Names{"a", "b"})); } -TEST(OutputsSpec, not_subsets_all_names) +TEST_F(OutputsSpecTest, not_subsets_all_names) { TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All{}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, not_subsets_names_names) +TEST_F(OutputsSpecTest, not_subsets_names_names) { TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names{"a", "b"}), (OutputsSpec::Names{"a"})); } @@ -102,22 +127,22 @@ TEST(OutputsSpec, not_subsets_names_names) #define TEST_UNION(RES, THIS, THAT) ASSERT_EQ(OutputsSpec{RES}, (OutputsSpec{THIS}).union_(THAT)); -TEST(OutputsSpec, union_all_all) +TEST_F(OutputsSpecTest, union_all_all) { TEST_UNION(OutputsSpec::All{}, OutputsSpec::All{}, OutputsSpec::All{}); } -TEST(OutputsSpec, union_all_names) +TEST_F(OutputsSpecTest, union_all_names) { TEST_UNION(OutputsSpec::All{}, OutputsSpec::All{}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, union_names_all) +TEST_F(OutputsSpecTest, union_names_all) { TEST_UNION(OutputsSpec::All{}, OutputsSpec::Names{"a"}, OutputsSpec::All{}); } -TEST(OutputsSpec, union_names_names) +TEST_F(OutputsSpecTest, union_names_names) { TEST_UNION((OutputsSpec::Names{"a", "b"}), OutputsSpec::Names{"a"}, OutputsSpec::Names{"b"}); } @@ -125,7 +150,7 @@ TEST(OutputsSpec, union_names_names) #undef TEST_UNION #define TEST_DONT_PARSE(NAME, STR) \ - TEST(ExtendedOutputsSpec, bad_##NAME) \ + TEST_F(ExtendedOutputsSpecTest, bad_##NAME) \ { \ std::optional extendedOutputsSpecOpt = ExtendedOutputsSpec::parseOpt(STR); \ ASSERT_FALSE(extendedOutputsSpecOpt); \ @@ -140,7 +165,7 @@ TEST_DONT_PARSE(star_second, "^foo,*") #undef TEST_DONT_PARSE -TEST(ExtendedOutputsSpec, default) +TEST_F(ExtendedOutputsSpecTest, default) { std::string_view str = "foo"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); @@ -150,7 +175,7 @@ TEST(ExtendedOutputsSpec, default) ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, all) +TEST_F(ExtendedOutputsSpecTest, all) { std::string_view str = "foo^*"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); @@ -160,7 +185,7 @@ TEST(ExtendedOutputsSpec, all) ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, out) +TEST_F(ExtendedOutputsSpecTest, out) { std::string_view str = "foo^out"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); @@ -170,7 +195,7 @@ TEST(ExtendedOutputsSpec, out) ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, out_bin) +TEST_F(ExtendedOutputsSpecTest, out_bin) { auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^out,bin"); ASSERT_EQ(prefix, "foo"); @@ -179,7 +204,7 @@ TEST(ExtendedOutputsSpec, out_bin) ASSERT_EQ(std::string{prefix} + expected.to_string(), "foo^bin,out"); } -TEST(ExtendedOutputsSpec, many_carrot) +TEST_F(ExtendedOutputsSpecTest, many_carrot) { auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^bar^out,bin"); ASSERT_EQ(prefix, "foo^bar"); @@ -188,28 +213,49 @@ TEST(ExtendedOutputsSpec, many_carrot) ASSERT_EQ(std::string{prefix} + expected.to_string(), "foo^bar^bin,out"); } -#define TEST_JSON(TYPE, NAME, STR, VAL) \ - \ - TEST(TYPE, NAME##_to_json) \ - { \ - using nlohmann::literals::operator"" _json; \ - ASSERT_EQ(STR##_json, ((nlohmann::json) TYPE{VAL})); \ - } \ - \ - TEST(TYPE, NAME##_from_json) \ - { \ - using nlohmann::literals::operator"" _json; \ - ASSERT_EQ(TYPE{VAL}, (STR##_json).get()); \ +#define MAKE_TEST_P(FIXTURE, TYPE) \ + TEST_P(FIXTURE, from_json) \ + { \ + const auto & [name, value] = GetParam(); \ + readJsonTest(name, value); \ + } \ + \ + TEST_P(FIXTURE, to_json) \ + { \ + const auto & [name, value] = GetParam(); \ + writeJsonTest(name, value); \ } -TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All{}) -TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names{"a"}) -TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names{"a", "b"})) +struct OutputsSpecJsonTest : OutputsSpecTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; -TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default{}) -TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit{OutputsSpec::All{}}) -TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a"}}) -TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a", "b"}})) +MAKE_TEST_P(OutputsSpecJsonTest, OutputsSpec); + +INSTANTIATE_TEST_SUITE_P( + OutputsSpecJSON, + OutputsSpecJsonTest, + ::testing::Values( + std::pair{"all", OutputsSpec{OutputsSpec::All{}}}, + std::pair{"name", OutputsSpec{OutputsSpec::Names{"a"}}}, + std::pair{"names", OutputsSpec{OutputsSpec::Names{"a", "b"}}})); + +struct ExtendedOutputsSpecJsonTest : ExtendedOutputsSpecTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +MAKE_TEST_P(ExtendedOutputsSpecJsonTest, ExtendedOutputsSpec); + +INSTANTIATE_TEST_SUITE_P( + ExtendedOutputsSpecJSON, + ExtendedOutputsSpecJsonTest, + ::testing::Values( + std::pair{"def", ExtendedOutputsSpec{ExtendedOutputsSpec::Default{}}}, + std::pair{"all", ExtendedOutputsSpec{ExtendedOutputsSpec::Explicit{OutputsSpec::All{}}}}, + std::pair{"name", ExtendedOutputsSpec{ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a"}}}}, + std::pair{"names", ExtendedOutputsSpec{ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a", "b"}}}})); #undef TEST_JSON diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 90e6af519..d5255f4f9 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -83,6 +83,7 @@ mkMesonExecutable (finalAttrs: { } ( '' + export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} export NIX_REMOTE=$HOME/store ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} diff --git a/src/libstore-tests/path.cc b/src/libstore-tests/path.cc index 01d1ca792..eb860a34d 100644 --- a/src/libstore-tests/path.cc +++ b/src/libstore-tests/path.cc @@ -7,7 +7,7 @@ #include "nix/store/path-regex.hh" #include "nix/store/store-api.hh" -#include "nix/util/tests/hash.hh" +#include "nix/util/tests/json-characterization.hh" #include "nix/store/tests/libstore.hh" #include "nix/store/tests/path.hh" @@ -16,8 +16,17 @@ namespace nix { #define STORE_DIR "/nix/store/" #define HASH_PART "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q" -class StorePathTest : public LibStoreTest -{}; +class StorePathTest : public virtual CharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "store-path"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; static std::regex nameRegex{std::string{nameRegexStr}}; @@ -134,4 +143,36 @@ RC_GTEST_FIXTURE_PROP(StorePathTest, prop_check_regex_eq_parse, ()) #endif +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +using nlohmann::json; + +struct StorePathJsonTest : StorePathTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(StorePathJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(StorePathJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + StorePathJSON, + StorePathJsonTest, + ::testing::Values( + std::pair{ + "simple", + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + })); + } // namespace nix diff --git a/src/libstore-tests/realisation.cc b/src/libstore-tests/realisation.cc new file mode 100644 index 000000000..a5a5bee50 --- /dev/null +++ b/src/libstore-tests/realisation.cc @@ -0,0 +1,97 @@ +#include + +#include +#include +#include + +#include "nix/store/store-api.hh" + +#include "nix/util/tests/json-characterization.hh" +#include "nix/store/tests/libstore.hh" + +namespace nix { + +class RealisationTest : public JsonCharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "realisation"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +using nlohmann::json; + +struct RealisationJsonTest : RealisationTest, ::testing::WithParamInterface> +{}; + +TEST_P(RealisationJsonTest, from_json) +{ + const auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(RealisationJsonTest, to_json) +{ + const auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + RealisationJSON, + RealisationJsonTest, + ([] { + Realisation simple{ + + .id = + { + .drvHash = Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Base16), + .outputName = "foo", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + }; + return ::testing::Values( + std::pair{ + "simple", + simple, + }, + std::pair{ + "with-signature", + [&] { + auto r = simple; + // FIXME actually sign properly + r.signatures = {"asdfasdfasdf"}; + return r; + }()}, + std::pair{ + "with-dependent-realisations", + [&] { + auto r = simple; + r.dependentRealisations = {{ + { + .drvHash = Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Base16), + .outputName = "foo", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + }}; + return r; + }(), + }); + } + + ())); + +} // namespace nix diff --git a/src/libstore-tests/s3.cc b/src/libstore-tests/s3.cc index 44a31ddc9..799e102fe 100644 --- a/src/libstore-tests/s3.cc +++ b/src/libstore-tests/s3.cc @@ -8,6 +8,10 @@ namespace nix { +// ============================================================================= +// ParsedS3URL Tests +// ============================================================================= + struct ParsedS3URLTestCase { std::string url; @@ -86,18 +90,41 @@ INSTANTIATE_TEST_SUITE_P( }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); -TEST(InvalidParsedS3URLTest, parseS3URLErrors) +// Parameterized test for invalid S3 URLs +struct InvalidS3URLTestCase { - auto invalidBucketMatcher = ::testing::ThrowsMessage( - testing::HasSubstrIgnoreANSIMatcher("error: URI has a missing or invalid bucket name")); + std::string url; + std::string expectedErrorSubstring; + std::string description; +}; - /* Empty bucket (authority) */ - ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3:///key")); }, invalidBucketMatcher); - /* Invalid bucket name */ - ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3://127.0.0.1")); }, invalidBucketMatcher); +class InvalidParsedS3URLTest : public ::testing::WithParamInterface, public ::testing::Test +{}; + +TEST_P(InvalidParsedS3URLTest, parseS3URLErrors) +{ + const auto & testCase = GetParam(); + + ASSERT_THAT( + [&testCase]() { ParsedS3URL::parse(parseURL(testCase.url)); }, + ::testing::ThrowsMessage(testing::HasSubstrIgnoreANSIMatcher(testCase.expectedErrorSubstring))); } -// Parameterized test for s3ToHttpsUrl conversion +INSTANTIATE_TEST_SUITE_P( + InvalidUrls, + InvalidParsedS3URLTest, + ::testing::Values( + InvalidS3URLTestCase{"s3:///key", "error: URI has a missing or invalid bucket name", "empty_bucket"}, + InvalidS3URLTestCase{"s3://127.0.0.1", "error: URI has a missing or invalid bucket name", "ip_address_bucket"}, + InvalidS3URLTestCase{"s3://bucket with spaces/key", "is not a valid URL", "bucket_with_spaces"}, + InvalidS3URLTestCase{"s3://", "error: URI has a missing or invalid bucket name", "completely_empty"}, + InvalidS3URLTestCase{"s3://bucket", "error: URI has a missing or invalid key", "missing_key"}), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + +// ============================================================================= +// S3 URL to HTTPS Conversion Tests +// ============================================================================= + struct S3ToHttpsConversionTestCase { ParsedS3URL input; diff --git a/src/libstore-tests/uds-remote-store.cc b/src/libstore-tests/uds-remote-store.cc index 11e6b04a3..415dfc4ac 100644 --- a/src/libstore-tests/uds-remote-store.cc +++ b/src/libstore-tests/uds-remote-store.cc @@ -22,4 +22,24 @@ TEST(UDSRemoteStore, constructConfig_to_string) EXPECT_EQ(config.getReference().to_string(), "daemon"); } +TEST(UDSRemoteStore, constructConfigWithParams) +{ + StoreConfig::Params params{{"max-connections", "1"}}; + UDSRemoteStoreConfig config{"unix", "/tmp/socket", params}; + auto storeReference = config.getReference(); + EXPECT_EQ(storeReference.to_string(), "unix:///tmp/socket?max-connections=1"); + EXPECT_EQ(storeReference.render(/*withParams=*/false), "unix:///tmp/socket"); + EXPECT_EQ(storeReference.params, params); +} + +TEST(UDSRemoteStore, constructConfigWithParamsNoPath) +{ + StoreConfig::Params params{{"max-connections", "1"}}; + UDSRemoteStoreConfig config{"unix", "", params}; + auto storeReference = config.getReference(); + EXPECT_EQ(storeReference.to_string(), "daemon?max-connections=1"); + EXPECT_EQ(storeReference.render(/*withParams=*/false), "daemon"); + EXPECT_EQ(storeReference.params, params); +} + } // namespace nix diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index e08a1449b..badfb4b14 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -515,8 +515,14 @@ void BinaryCacheStore::queryRealisationUncached( if (!data) return (*callbackPtr)({}); - auto realisation = Realisation::fromJSON(nlohmann::json::parse(*data), outputInfoFilePath); - return (*callbackPtr)(std::make_shared(realisation)); + std::shared_ptr realisation; + try { + realisation = std::make_shared(nlohmann::json::parse(*data)); + } catch (Error & e) { + e.addTrace({}, "while parsing file '%s' as a realisation", outputInfoFilePath); + throw; + } + return (*callbackPtr)(std::move(realisation)); } catch (...) { callbackPtr->rethrow(); } @@ -530,12 +536,22 @@ void BinaryCacheStore::registerDrvOutput(const Realisation & info) if (diskCache) diskCache->upsertRealisation(config.getReference().render(/*FIXME withParams=*/false), info); auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi"; - upsertFile(filePath, info.toJSON().dump(), "application/json"); + upsertFile(filePath, static_cast(info).dump(), "application/json"); +} + +ref BinaryCacheStore::getRemoteFSAccessor(bool requireValidPath) +{ + return make_ref(ref(shared_from_this()), requireValidPath, config.localNarCache); } ref BinaryCacheStore::getFSAccessor(bool requireValidPath) { - return make_ref(ref(shared_from_this()), requireValidPath, config.localNarCache); + return getRemoteFSAccessor(requireValidPath); +} + +std::shared_ptr BinaryCacheStore::getFSAccessor(const StorePath & storePath, bool requireValidPath) +{ + return getRemoteFSAccessor(requireValidPath)->accessObject(storePath); } void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs) diff --git a/src/libstore/common-protocol.cc b/src/libstore/common-protocol.cc index d4f3efc9b..b069c9498 100644 --- a/src/libstore/common-protocol.cc +++ b/src/libstore/common-protocol.cc @@ -49,13 +49,18 @@ void CommonProto::Serialise::write( Realisation CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { std::string rawInput = readString(conn.from); - return Realisation::fromJSON(nlohmann::json::parse(rawInput), "remote-protocol"); + try { + return nlohmann::json::parse(rawInput); + } catch (Error & e) { + e.addTrace({}, "while parsing a realisation object in the remote protocol"); + throw; + } } void CommonProto::Serialise::write( const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation) { - conn.to << realisation.toJSON().dump(); + conn.to << static_cast(realisation).dump(); } DrvOutput CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 630159629..4cb9bf726 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -356,7 +356,7 @@ DerivationOptions adl_serializer::from_json(const json & json }; } -void adl_serializer::to_json(json & json, DerivationOptions o) +void adl_serializer::to_json(json & json, const DerivationOptions & o) { json["outputChecks"] = std::visit( overloaded{ @@ -398,7 +398,7 @@ DerivationOptions::OutputChecks adl_serializer: }; } -void adl_serializer::to_json(json & json, DerivationOptions::OutputChecks c) +void adl_serializer::to_json(json & json, const DerivationOptions::OutputChecks & c) { json["ignoreSelfRefs"] = c.ignoreSelfRefs; json["allowedReferences"] = c.allowedReferences; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 84889ceac..6d7dbc99c 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1257,15 +1257,18 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure"); -nlohmann::json -DerivationOutput::toJSON(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const +nlohmann::json DerivationOutput::toJSON() const { nlohmann::json res = nlohmann::json::object(); std::visit( overloaded{ - [&](const DerivationOutput::InputAddressed & doi) { res["path"] = store.printStorePath(doi.path); }, + [&](const DerivationOutput::InputAddressed & doi) { res["path"] = doi.path; }, [&](const DerivationOutput::CAFixed & dof) { - res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); + /* it would be nice to output the path for user convenience, but + this would require us to know the store dir. */ +#if 0 + res["path"] = dof.path(store, drvName, outputName); +#endif res["method"] = std::string{dof.ca.method.render()}; res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo); res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); @@ -1286,12 +1289,8 @@ DerivationOutput::toJSON(const StoreDirConfig & store, std::string_view drvName, return res; } -DerivationOutput DerivationOutput::fromJSON( - const StoreDirConfig & store, - std::string_view drvName, - OutputNameView outputName, - const nlohmann::json & _json, - const ExperimentalFeatureSettings & xpSettings) +DerivationOutput +DerivationOutput::fromJSON(const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { std::set keys; auto & json = getObject(_json); @@ -1310,11 +1309,11 @@ DerivationOutput DerivationOutput::fromJSON( if (keys == (std::set{"path"})) { return DerivationOutput::InputAddressed{ - .path = store.parseStorePath(getString(valueAt(json, "path"))), + .path = valueAt(json, "path"), }; } - else if (keys == (std::set{"path", "method", "hashAlgo", "hash"})) { + else if (keys == (std::set{"method", "hashAlgo", "hash"})) { auto [method, hashAlgo] = methodAlgo(); auto dof = DerivationOutput::CAFixed{ .ca = @@ -1323,8 +1322,12 @@ DerivationOutput DerivationOutput::fromJSON( .hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo), }, }; - if (dof.path(store, drvName, outputName) != store.parseStorePath(getString(valueAt(json, "path")))) + /* We no longer produce this (denormalized) field (for the + reasons described above), so we don't need to check it. */ +#if 0 + if (dof.path(store, drvName, outputName) != static_cast(valueAt(json, "path"))) throw Error("Path doesn't match derivation output"); +#endif return dof; } @@ -1355,17 +1358,19 @@ DerivationOutput DerivationOutput::fromJSON( } } -nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const +nlohmann::json Derivation::toJSON() const { nlohmann::json res = nlohmann::json::object(); res["name"] = name; + res["version"] = 3; + { nlohmann::json & outputsObj = res["outputs"]; outputsObj = nlohmann::json::object(); for (auto & [outputName, output] : outputs) { - outputsObj[outputName] = output.toJSON(store, name, outputName); + outputsObj[outputName] = output; } } @@ -1373,7 +1378,7 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const auto & inputsList = res["inputSrcs"]; inputsList = nlohmann::json ::array(); for (auto & input : inputSrcs) - inputsList.emplace_back(store.printStorePath(input)); + inputsList.emplace_back(input); } { @@ -1393,7 +1398,7 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const auto & inputDrvsObj = res["inputDrvs"]; inputDrvsObj = nlohmann::json::object(); for (auto & [inputDrv, inputNode] : inputDrvs.map) { - inputDrvsObj[store.printStorePath(inputDrv)] = doInput(inputNode); + inputDrvsObj[inputDrv.to_string()] = doInput(inputNode); } } } @@ -1409,8 +1414,7 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const return res; } -Derivation Derivation::fromJSON( - const StoreDirConfig & store, const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) +Derivation Derivation::fromJSON(const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { using nlohmann::detail::value_t; @@ -1420,11 +1424,13 @@ Derivation Derivation::fromJSON( res.name = getString(valueAt(json, "name")); + if (valueAt(json, "version") != 3) + throw Error("Only derivation format version 3 is currently supported."); + try { auto outputs = getObject(valueAt(json, "outputs")); for (auto & [outputName, output] : outputs) { - res.outputs.insert_or_assign( - outputName, DerivationOutput::fromJSON(store, res.name, outputName, output, xpSettings)); + res.outputs.insert_or_assign(outputName, DerivationOutput::fromJSON(output, xpSettings)); } } catch (Error & e) { e.addTrace({}, "while reading key 'outputs'"); @@ -1434,7 +1440,7 @@ Derivation Derivation::fromJSON( try { auto inputSrcs = getArray(valueAt(json, "inputSrcs")); for (auto & input : inputSrcs) - res.inputSrcs.insert(store.parseStorePath(static_cast(input))); + res.inputSrcs.insert(input); } catch (Error & e) { e.addTrace({}, "while reading key 'inputSrcs'"); throw; @@ -1455,7 +1461,7 @@ Derivation Derivation::fromJSON( }; auto drvs = getObject(valueAt(json, "inputDrvs")); for (auto & [inputDrvPath, inputOutputs] : drvs) - res.inputDrvs.map[store.parseStorePath(inputDrvPath)] = doInput(inputOutputs); + res.inputDrvs.map[StorePath{inputDrvPath}] = doInput(inputOutputs); } catch (Error & e) { e.addTrace({}, "while reading key 'inputDrvs'"); throw; @@ -1480,3 +1486,29 @@ Derivation Derivation::fromJSON( } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +DerivationOutput adl_serializer::from_json(const json & json) +{ + return DerivationOutput::fromJSON(json); +} + +void adl_serializer::to_json(json & json, const DerivationOutput & c) +{ + json = c.toJSON(); +} + +Derivation adl_serializer::from_json(const json & json) +{ + return Derivation::fromJSON(json); +} + +void adl_serializer::to_json(json & json, const Derivation & c) +{ + json = c.toJSON(); +} + +} // namespace nlohmann diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 1fee1ae75..2cf720b82 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -2,8 +2,7 @@ #include "nix/store/derivations.hh" #include "nix/store/store-api.hh" #include "nix/util/comparator.hh" - -#include +#include "nix/util/json-utils.hh" #include @@ -19,59 +18,6 @@ GENERATE_CMP_EXT(, std::strong_ordering, SingleDerivedPathBuilt, *me->drvPath, m GENERATE_EQUAL(, DerivedPathBuilt ::, DerivedPathBuilt, *me->drvPath, me->outputs); GENERATE_ONE_CMP(, bool, DerivedPathBuilt ::, <, DerivedPathBuilt, *me->drvPath, me->outputs); -nlohmann::json DerivedPath::Opaque::toJSON(const StoreDirConfig & store) const -{ - return store.printStorePath(path); -} - -nlohmann::json SingleDerivedPath::Built::toJSON(Store & store) const -{ - nlohmann::json res; - res["drvPath"] = drvPath->toJSON(store); - // Fallback for the input-addressed derivation case: We expect to always be - // able to print the output paths, so let’s do it - // FIXME try-resolve on drvPath - const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *drvPath)); - res["output"] = output; - auto outputPathIter = outputMap.find(output); - if (outputPathIter == outputMap.end()) - res["outputPath"] = nullptr; - else if (std::optional p = outputPathIter->second) - res["outputPath"] = store.printStorePath(*p); - else - res["outputPath"] = nullptr; - return res; -} - -nlohmann::json DerivedPath::Built::toJSON(Store & store) const -{ - nlohmann::json res; - res["drvPath"] = drvPath->toJSON(store); - // Fallback for the input-addressed derivation case: We expect to always be - // able to print the output paths, so let’s do it - // FIXME try-resolve on drvPath - const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *drvPath)); - for (const auto & [output, outputPathOpt] : outputMap) { - if (!outputs.contains(output)) - continue; - if (outputPathOpt) - res["outputs"][output] = store.printStorePath(*outputPathOpt); - else - res["outputs"][output] = nullptr; - } - return res; -} - -nlohmann::json SingleDerivedPath::toJSON(Store & store) const -{ - return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); -} - -nlohmann::json DerivedPath::toJSON(Store & store) const -{ - return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); -} - std::string DerivedPath::Opaque::to_string(const StoreDirConfig & store) const { return store.printStorePath(path); @@ -273,3 +219,77 @@ const StorePath & DerivedPath::getBaseStorePath() const } } // namespace nix + +namespace nlohmann { + +void adl_serializer::to_json(json & json, const SingleDerivedPath::Opaque & o) +{ + json = o.path; +} + +SingleDerivedPath::Opaque adl_serializer::from_json(const json & json) +{ + return SingleDerivedPath::Opaque{json}; +} + +void adl_serializer::to_json(json & json, const SingleDerivedPath::Built & sdpb) +{ + json = { + {"drvPath", *sdpb.drvPath}, + {"output", sdpb.output}, + }; +} + +void adl_serializer::to_json(json & json, const DerivedPath::Built & dbp) +{ + json = { + {"drvPath", *dbp.drvPath}, + {"outputs", dbp.outputs}, + }; +} + +SingleDerivedPath::Built adl_serializer::from_json(const json & json0) +{ + auto & json = getObject(json0); + return { + .drvPath = make_ref(static_cast(valueAt(json, "drvPath"))), + .output = getString(valueAt(json, "output")), + }; +} + +DerivedPath::Built adl_serializer::from_json(const json & json0) +{ + auto & json = getObject(json0); + return { + .drvPath = make_ref(static_cast(valueAt(json, "drvPath"))), + .outputs = adl_serializer::from_json(valueAt(json, "outputs")), + }; +} + +void adl_serializer::to_json(json & json, const SingleDerivedPath & sdp) +{ + std::visit([&](const auto & buildable) { json = buildable; }, sdp.raw()); +} + +void adl_serializer::to_json(json & json, const DerivedPath & sdp) +{ + std::visit([&](const auto & buildable) { json = buildable; }, sdp.raw()); +} + +SingleDerivedPath adl_serializer::from_json(const json & json) +{ + if (json.is_string()) + return static_cast(json); + else + return static_cast(json); +} + +DerivedPath adl_serializer::from_json(const json & json) +{ + if (json.is_string()) + return static_cast(json); + else + return static_cast(json); +} + +} // namespace nlohmann diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index d0e298968..4b485ca66 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -1,65 +1,156 @@ #include "nix/store/store-registration.hh" +#include "nix/util/archive.hh" #include "nix/util/callback.hh" +#include "nix/util/memory-source-accessor.hh" +#include "nix/store/dummy-store.hh" + +#include namespace nix { -struct DummyStoreConfig : public std::enable_shared_from_this, virtual StoreConfig +std::string DummyStoreConfig::doc() { - using StoreConfig::StoreConfig; - - DummyStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) - : StoreConfig(params) - { - if (!authority.empty()) - throw UsageError("`%s` store URIs must not contain an authority part %s", scheme, authority); - } - - static const std::string name() - { - return "Dummy Store"; - } - - static std::string doc() - { - return + return #include "dummy-store.md" - ; + ; +} + +namespace { + +class WholeStoreViewAccessor : public SourceAccessor +{ + using BaseName = std::string; + + /** + * Map from store path basenames to corresponding accessors. + */ + boost::concurrent_flat_map> subdirs; + + /** + * Helper accessor for accessing just the CanonPath::root. + */ + MemorySourceAccessor rootPathAccessor; + + /** + * Helper empty accessor. + */ + MemorySourceAccessor emptyAccessor; + + auto + callWithAccessorForPath(CanonPath path, std::invocable auto callback) + { + if (path.isRoot()) + return callback(rootPathAccessor, path); + + BaseName baseName(*path.begin()); + MemorySourceAccessor * res = nullptr; + + subdirs.cvisit(baseName, [&](const auto & kv) { + path = path.removePrefix(CanonPath{baseName}); + res = &*kv.second; + }); + + if (!res) + res = &emptyAccessor; + + return callback(*res, path); } - static StringSet uriSchemes() +public: + WholeStoreViewAccessor() { - return {"dummy"}; + MemorySink sink{rootPathAccessor}; + sink.createDirectory(CanonPath::root); } - ref openStore() const override; - - StoreReference getReference() const override + void addObject(std::string_view baseName, ref accessor) { - return { - .variant = - StoreReference::Specified{ - .scheme = *uriSchemes().begin(), - }, - }; + subdirs.emplace(baseName, std::move(accessor)); + } + + std::string readFile(const CanonPath & path) override + { + return callWithAccessorForPath( + path, [](SourceAccessor & accessor, const CanonPath & path) { return accessor.readFile(path); }); + } + + void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override + { + return callWithAccessorForPath(path, [&](SourceAccessor & accessor, const CanonPath & path) { + return accessor.readFile(path, sink, sizeCallback); + }); + } + + bool pathExists(const CanonPath & path) override + { + return callWithAccessorForPath( + path, [](SourceAccessor & accessor, const CanonPath & path) { return accessor.pathExists(path); }); + } + + std::optional maybeLstat(const CanonPath & path) override + { + return callWithAccessorForPath( + path, [](SourceAccessor & accessor, const CanonPath & path) { return accessor.maybeLstat(path); }); + } + + DirEntries readDirectory(const CanonPath & path) override + { + return callWithAccessorForPath( + path, [](SourceAccessor & accessor, const CanonPath & path) { return accessor.readDirectory(path); }); + } + + std::string readLink(const CanonPath & path) override + { + return callWithAccessorForPath( + path, [](SourceAccessor & accessor, const CanonPath & path) { return accessor.readLink(path); }); } }; +} // namespace + struct DummyStore : virtual Store { using Config = DummyStoreConfig; ref config; + struct PathInfoAndContents + { + UnkeyedValidPathInfo info; + ref contents; + }; + + /** + * This is map conceptually owns the file system objects for each + * store object. + */ + boost::concurrent_flat_map contents; + + /** + * This view conceptually just borrows the file systems objects of + * each store object from `contents`, and combines them together + * into one store-wide source accessor. + * + * This is needed just in order to implement `Store::getFSAccessor`. + */ + ref wholeStoreView = make_ref(); + DummyStore(ref config) : Store{*config} , config(config) { + wholeStoreView->setPathDisplay(config->storeDir); } void queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept override { - callback(nullptr); + bool visited = contents.cvisit(path, [&](const auto & kv) { + callback(std::make_shared(StorePath{kv.first}, kv.second.info)); + }); + + if (!visited) + callback(nullptr); } /** @@ -77,11 +168,32 @@ struct DummyStore : virtual Store void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override { - unsupported("addToStore"); + if (config->readOnly) + unsupported("addToStore"); + + if (repair) + throw Error("repairing is not supported for '%s' store", config->getHumanReadableURI()); + + if (checkSigs) + throw Error("checking signatures is not supported for '%s' store", config->getHumanReadableURI()); + + auto temp = make_ref(); + MemorySink tempSink{*temp}; + parseDump(tempSink, source); + auto path = info.path; + + auto accessor = make_ref(std::move(*temp)); + contents.insert( + {path, + PathInfoAndContents{ + std::move(info), + accessor, + }}); + wholeStoreView->addObject(path.to_string(), accessor); } - virtual StorePath addToStoreFromDump( - Source & dump, + StorePath addToStoreFromDump( + Source & source, std::string_view name, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive, @@ -89,12 +201,73 @@ struct DummyStore : virtual Store const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override { - unsupported("addToStore"); + if (config->readOnly) + unsupported("addToStoreFromDump"); + + if (repair) + throw Error("repairing is not supported for '%s' store", config->getHumanReadableURI()); + + auto temp = make_ref(); + + { + MemorySink tempSink{*temp}; + + // TODO factor this out into `restorePath`, same todo on it. + switch (dumpMethod) { + case FileSerialisationMethod::NixArchive: + parseDump(tempSink, source); + break; + case FileSerialisationMethod::Flat: { + // Replace root dir with file so next part succeeds. + temp->root = MemorySourceAccessor::File::Regular{}; + tempSink.createRegularFile(CanonPath::root, [&](auto & sink) { source.drainInto(sink); }); + break; + } + } + } + + auto hash = hashPath({temp, CanonPath::root}, hashMethod.getFileIngestionMethod(), hashAlgo).first; + auto narHash = hashPath({temp, CanonPath::root}, FileIngestionMethod::NixArchive, HashAlgorithm::SHA256); + + auto info = ValidPathInfo::makeFromCA( + *this, + name, + ContentAddressWithReferences::fromParts( + hashMethod, + std::move(hash), + { + .others = references, + // caller is not capable of creating a self-reference, because + // this is content-addressed without modulus + .self = false, + }), + std::move(narHash.first)); + + info.narSize = narHash.second.value(); + + auto path = info.path; + auto accessor = make_ref(std::move(*temp)); + contents.insert( + {path, + PathInfoAndContents{ + std::move(info), + accessor, + }}); + wholeStoreView->addObject(path.to_string(), accessor); + + return path; } void narFromPath(const StorePath & path, Sink & sink) override { - unsupported("narFromPath"); + bool visited = contents.cvisit(path, [&](const auto & kv) { + const auto & [info, accessor] = kv.second; + SourcePath sourcePath(accessor); + dumpPath(sourcePath, sink, FileSerialisationMethod::NixArchive); + }); + + if (!visited) + throw Error("path '%s' is not valid", printStorePath(path)); } void @@ -103,9 +276,16 @@ struct DummyStore : virtual Store callback(nullptr); } - virtual ref getFSAccessor(bool requireValidPath) override + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath) override { - return makeEmptySourceAccessor(); + std::shared_ptr res; + contents.cvisit(path, [&](const auto & kv) { res = kv.second.contents.get_ptr(); }); + return res; + } + + ref getFSAccessor(bool requireValidPath) override + { + return wholeStoreView; } }; diff --git a/src/libstore/dummy-store.md b/src/libstore/dummy-store.md index eb7b4ba0d..3ba96fecb 100644 --- a/src/libstore/dummy-store.md +++ b/src/libstore/dummy-store.md @@ -2,9 +2,11 @@ R"( **Store URL format**: `dummy://` -This store type represents a store that contains no store paths and -cannot be written to. It's useful when you want to use the Nix -evaluator when no actual Nix store exists, e.g. +This store type represents a store in memory. +Store objects can be read and written, but only so long as the store is open. +Once the store is closed, all data will be discarded. + +It's useful when you want to use the Nix evaluator when no actual Nix store exists, e.g. ```console # nix eval --store dummy:// --expr '1 + 2' diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index fdbc670df..86c4e37a6 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -330,7 +330,7 @@ static void readProcLink(const std::filesystem::path & file, UncheckedRoots & ro throw; } if (buf.is_absolute()) - roots[buf].emplace(file.string()); + roots[buf.string()].emplace(file.string()); } static std::string quoteRegexChars(const std::string & raw) @@ -343,7 +343,7 @@ static std::string quoteRegexChars(const std::string & raw) static void readFileRoots(const std::filesystem::path & path, UncheckedRoots & roots) { try { - roots[readFile(path)].emplace(path); + roots[readFile(path)].emplace(path.string()); } catch (SysError & e) { if (e.errNo != ENOENT && e.errNo != EACCES) throw; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 7737389a3..6922c0f69 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -39,7 +39,7 @@ StoreReference HttpBinaryCacheStoreConfig::getReference() const .scheme = cacheUri.scheme, .authority = cacheUri.renderAuthorityAndPath(), }, - .params = cacheUri.query, + .params = getQueryParams(), }; } diff --git a/src/libstore/include/nix/store/binary-cache-store.hh b/src/libstore/include/nix/store/binary-cache-store.hh index 908500b42..c316b1199 100644 --- a/src/libstore/include/nix/store/binary-cache-store.hh +++ b/src/libstore/include/nix/store/binary-cache-store.hh @@ -12,6 +12,7 @@ namespace nix { struct NarInfo; +class RemoteFSAccessor; struct BinaryCacheStoreConfig : virtual StoreConfig { @@ -136,6 +137,11 @@ private: CheckSigsFlag checkSigs, std::function mkInfo); + /** + * Same as `getFSAccessor`, but with a more preceise return type. + */ + ref getRemoteFSAccessor(bool requireValidPath = true); + public: bool isValidPathUncached(const StorePath & path) override; @@ -175,6 +181,8 @@ public: ref getFSAccessor(bool requireValidPath = true) override; + std::shared_ptr getFSAccessor(const StorePath &, bool requireValidPath = true) override; + void addSignatures(const StorePath & storePath, const StringSet & sigs) override; std::optional getBuildLogExact(const StorePath & path) override; diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index 08bb7183f..0dfb80347 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -135,16 +135,12 @@ struct DerivationOutput std::optional path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; - nlohmann::json toJSON(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; + nlohmann::json toJSON() const; /** * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static DerivationOutput fromJSON( - const StoreDirConfig & store, - std::string_view drvName, - OutputNameView outputName, - const nlohmann::json & json, - const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + static DerivationOutput + fromJSON(const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); }; typedef std::map DerivationOutputs; @@ -394,11 +390,9 @@ struct Derivation : BasicDerivation { } - nlohmann::json toJSON(const StoreDirConfig & store) const; - static Derivation fromJSON( - const StoreDirConfig & store, - const nlohmann::json & json, - const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + nlohmann::json toJSON() const; + static Derivation + fromJSON(const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); bool operator==(const Derivation &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. @@ -542,3 +536,6 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva std::string hashPlaceholder(const OutputNameView outputName); } // namespace nix + +JSON_IMPL(nix::DerivationOutput) +JSON_IMPL(nix::Derivation) diff --git a/src/libstore/include/nix/store/derived-path.hh b/src/libstore/include/nix/store/derived-path.hh index bc89b012e..47b29b2d6 100644 --- a/src/libstore/include/nix/store/derived-path.hh +++ b/src/libstore/include/nix/store/derived-path.hh @@ -5,6 +5,7 @@ #include "nix/store/outputs-spec.hh" #include "nix/util/configuration.hh" #include "nix/util/ref.hh" +#include "nix/util/json-impls.hh" #include @@ -14,9 +15,6 @@ namespace nix { struct StoreDirConfig; -// TODO stop needing this, `toJSON` below should be pure -class Store; - /** * An opaque derived path. * @@ -30,7 +28,6 @@ struct DerivedPathOpaque std::string to_string(const StoreDirConfig & store) const; static DerivedPathOpaque parse(const StoreDirConfig & store, std::string_view); - nlohmann::json toJSON(const StoreDirConfig & store) const; bool operator==(const DerivedPathOpaque &) const = default; auto operator<=>(const DerivedPathOpaque &) const = default; @@ -80,7 +77,6 @@ struct SingleDerivedPathBuilt ref drvPath, OutputNameView outputs, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - nlohmann::json toJSON(Store & store) const; bool operator==(const SingleDerivedPathBuilt &) const noexcept; std::strong_ordering operator<=>(const SingleDerivedPathBuilt &) const noexcept; @@ -153,7 +149,6 @@ struct SingleDerivedPath : _SingleDerivedPathRaw const StoreDirConfig & store, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - nlohmann::json toJSON(Store & store) const; }; static inline ref makeConstantStorePathRef(StorePath drvPath) @@ -208,7 +203,6 @@ struct DerivedPathBuilt ref, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - nlohmann::json toJSON(Store & store) const; bool operator==(const DerivedPathBuilt &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. @@ -287,8 +281,6 @@ struct DerivedPath : _DerivedPathRaw * Convert a `SingleDerivedPath` to a `DerivedPath`. */ static DerivedPath fromSingle(const SingleDerivedPath &); - - nlohmann::json toJSON(Store & store) const; }; typedef std::vector DerivedPaths; @@ -305,3 +297,9 @@ typedef std::vector DerivedPaths; void drvRequireExperiment( const SingleDerivedPath & drv, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); } // namespace nix + +JSON_IMPL(nix::SingleDerivedPath::Opaque) +JSON_IMPL(nix::SingleDerivedPath::Built) +JSON_IMPL(nix::SingleDerivedPath) +JSON_IMPL(nix::DerivedPath::Built) +JSON_IMPL(nix::DerivedPath) diff --git a/src/libstore/include/nix/store/dummy-store.hh b/src/libstore/include/nix/store/dummy-store.hh new file mode 100644 index 000000000..e93aad366 --- /dev/null +++ b/src/libstore/include/nix/store/dummy-store.hh @@ -0,0 +1,59 @@ +#pragma once +///@file + +#include "nix/store/store-api.hh" + +namespace nix { + +struct DummyStoreConfig : public std::enable_shared_from_this, virtual StoreConfig +{ + DummyStoreConfig(const Params & params) + : StoreConfig(params) + { + // Disable caching since this a temporary in-memory store. + pathInfoCacheSize = 0; + } + + DummyStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) + : DummyStoreConfig(params) + { + if (!authority.empty()) + throw UsageError("`%s` store URIs must not contain an authority part %s", scheme, authority); + } + + Setting readOnly{ + this, + true, + "read-only", + R"( + Make any sort of write fail instead of succeeding. + No additional memory will be used, because no information needs to be stored. + )"}; + + static const std::string name() + { + return "Dummy Store"; + } + + static std::string doc(); + + static StringSet uriSchemes() + { + return {"dummy"}; + } + + ref openStore() const override; + + StoreReference getReference() const override + { + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + }, + .params = getQueryParams(), + }; + } +}; + +} // namespace nix diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index ac31506d0..75751e2d1 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -142,7 +142,12 @@ public: unsupported("ensurePath"); } - virtual ref getFSAccessor(bool requireValidPath) override + ref getFSAccessor(bool requireValidPath) override + { + unsupported("getFSAccessor"); + } + + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath) override { unsupported("getFSAccessor"); } diff --git a/src/libstore/include/nix/store/local-fs-store.hh b/src/libstore/include/nix/store/local-fs-store.hh index 84777f3d7..08f8e1656 100644 --- a/src/libstore/include/nix/store/local-fs-store.hh +++ b/src/libstore/include/nix/store/local-fs-store.hh @@ -9,6 +9,18 @@ namespace nix { struct LocalFSStoreConfig : virtual StoreConfig { +private: + static OptionalPathSetting makeRootDirSetting(LocalFSStoreConfig & self, std::optional defaultValue) + { + return { + &self, + std::move(defaultValue), + "root", + "Directory prefixed to all other paths.", + }; + } + +public: using StoreConfig::StoreConfig; /** @@ -20,7 +32,7 @@ struct LocalFSStoreConfig : virtual StoreConfig */ LocalFSStoreConfig(PathView path, const Params & params); - OptionalPathSetting rootDir{this, std::nullopt, "root", "Directory prefixed to all other paths."}; + OptionalPathSetting rootDir = makeRootDirSetting(*this, std::nullopt); private: @@ -68,6 +80,7 @@ struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore void narFromPath(const StorePath & path, Sink & sink) override; ref getFSAccessor(bool requireValidPath = true) override; + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override; /** * Creates symlink from the `gcRoot` to the `storePath` and diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index 60af5ff53..428ef00f3 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -34,6 +34,7 @@ headers = [ config_pub_h ] + files( 'derived-path-map.hh', 'derived-path.hh', 'downstream-placeholder.hh', + 'dummy-store.hh', 'export-import.hh', 'filetransfer.hh', 'gc-store.hh', diff --git a/src/libstore/include/nix/store/path.hh b/src/libstore/include/nix/store/path.hh index 784298daa..74ee0422b 100644 --- a/src/libstore/include/nix/store/path.hh +++ b/src/libstore/include/nix/store/path.hh @@ -4,6 +4,8 @@ #include #include "nix/util/types.hh" +#include "nix/util/json-impls.hh" +#include "nix/util/json-non-null.hh" namespace nix { @@ -87,6 +89,10 @@ typedef std::vector StorePaths; */ constexpr std::string_view drvExtension = ".drv"; +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix namespace std { @@ -101,3 +107,14 @@ struct hash }; } // namespace std + +namespace nix { + +inline std::size_t hash_value(const StorePath & path) +{ + return std::hash{}(path); +} + +} // namespace nix + +JSON_IMPL(nix::StorePath) diff --git a/src/libstore/include/nix/store/realisation.hh b/src/libstore/include/nix/store/realisation.hh index 6eb3eecf3..3424a39c9 100644 --- a/src/libstore/include/nix/store/realisation.hh +++ b/src/libstore/include/nix/store/realisation.hh @@ -64,9 +64,6 @@ struct Realisation */ std::map dependentRealisations; - nlohmann::json toJSON() const; - static Realisation fromJSON(const nlohmann::json & json, const std::string & whence); - std::string fingerprint() const; void sign(const Signer &); bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; @@ -169,3 +166,5 @@ public: }; } // namespace nix + +JSON_IMPL(nix::Realisation) diff --git a/src/libstore/include/nix/store/remote-fs-accessor.hh b/src/libstore/include/nix/store/remote-fs-accessor.hh index fa0555d9b..9e1999cc0 100644 --- a/src/libstore/include/nix/store/remote-fs-accessor.hh +++ b/src/libstore/include/nix/store/remote-fs-accessor.hh @@ -27,6 +27,11 @@ class RemoteFSAccessor : public SourceAccessor public: + /** + * @return nullptr if the store does not contain any object at that path. + */ + std::shared_ptr accessObject(const StorePath & path); + RemoteFSAccessor( ref store, bool requireValidPath = true, const /* FIXME: use std::optional */ Path & cacheDir = ""); diff --git a/src/libstore/include/nix/store/remote-store.hh b/src/libstore/include/nix/store/remote-store.hh index 76591cf93..1aaf29d37 100644 --- a/src/libstore/include/nix/store/remote-store.hh +++ b/src/libstore/include/nix/store/remote-store.hh @@ -16,6 +16,7 @@ struct FdSink; struct FdSource; template class Pool; +class RemoteFSAccessor; struct RemoteStoreConfig : virtual StoreConfig { @@ -176,10 +177,18 @@ protected: virtual ref getFSAccessor(bool requireValidPath = true) override; + virtual std::shared_ptr + getFSAccessor(const StorePath & path, bool requireValidPath = true) override; + virtual void narFromPath(const StorePath & path, Sink & sink) override; private: + /** + * Same as the default implemenation of `RemoteStore::getFSAccessor`, but with a more preceise return type. + */ + ref getRemoteFSAccessor(bool requireValidPath = true); + std::atomic_bool failed{false}; void copyDrvsFromEvalStore(const std::vector & paths, std::shared_ptr evalStore); diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 2519002b3..6d3f6b8d0 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -717,10 +717,20 @@ public: }; /** - * @return An object to access files in the Nix store. + * @return An object to access files in the Nix store, across all + * store objects. */ virtual ref getFSAccessor(bool requireValidPath = true) = 0; + /** + * @return An object to access files for a specific store object in + * the Nix store. + * + * @return nullptr if the store doesn't contain an object at the + * givine path. + */ + virtual std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) = 0; + /** * Repair the contents of the given path by redownloading it using * a substituter (if available). diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index 37c239796..fe6e486f4 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -61,6 +61,11 @@ struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore return LocalFSStore::getFSAccessor(requireValidPath); } + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override + { + return LocalFSStore::getFSAccessor(path, requireValidPath); + } + void narFromPath(const StorePath & path, Sink & sink) override { LocalFSStore::narFromPath(path, sink); diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index e0f07b91b..28069dcaf 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -20,13 +20,17 @@ Path LocalFSStoreConfig::getDefaultLogDir() LocalFSStoreConfig::LocalFSStoreConfig(PathView rootDir, const Params & params) : StoreConfig(params) - // Default `?root` from `rootDir` if non set - // FIXME don't duplicate description once we don't have root setting - , rootDir{ - this, - !rootDir.empty() && params.count("root") == 0 ? (std::optional{rootDir}) : std::nullopt, - "root", - "Directory prefixed to all other paths."} + /* Default `?root` from `rootDir` if non set + * NOTE: We would like to just do rootDir.set(...), which would take care of + * all normalization and error checking for us. Unfortunately we cannot do + * that because of the complicated initialization order of other fields with + * the virtual class hierarchy of nix store configs, and the design of the + * settings system. As such, we have no choice but to redefine the field and + * manually repeat the same normalization logic. + */ + , rootDir{makeRootDirSetting( + *this, + !rootDir.empty() && params.count("root") == 0 ? std::optional{canonPath(rootDir)} : std::nullopt)} { } @@ -91,6 +95,23 @@ ref LocalFSStore::getFSAccessor(bool requireValidPath) ref(std::dynamic_pointer_cast(shared_from_this())), requireValidPath); } +std::shared_ptr LocalFSStore::getFSAccessor(const StorePath & path, bool requireValidPath) +{ + auto absPath = std::filesystem::path{config.realStoreDir.get()} / path.to_string(); + if (requireValidPath) { + /* Only return non-null if the store object is a fully-valid + member of the store. */ + if (!isValidPath(path)) + return nullptr; + } else { + /* Return non-null as long as the some file system data exists, + even if the store object is not fully registered. */ + if (!pathExists(absPath)) + return nullptr; + } + return std::make_shared(std::move(absPath)); +} + void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) { if (!isValidPath(path)) diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 556616181..e3004ebf5 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -265,6 +265,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'binary-cache-store.cc', diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 69d8d2e14..11608a667 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -304,10 +304,15 @@ public: if (queryRealisation.isNull(0)) return {oInvalid, 0}; - auto realisation = std::make_shared( - Realisation::fromJSON(nlohmann::json::parse(queryRealisation.getStr(0)), "Local disk cache")); - - return {oValid, realisation}; + try { + return { + oValid, + std::make_shared(nlohmann::json::parse(queryRealisation.getStr(0))), + }; + } catch (Error & e) { + e.addTrace({}, "while parsing the local disk cache"); + throw; + } }); } @@ -349,7 +354,8 @@ public: auto & cache(getCache(*state, uri)); - state->insertRealisation.use()(cache.id)(realisation.id.to_string())(realisation.toJSON().dump())(time(0)) + state->insertRealisation + .use()(cache.id)(realisation.id.to_string())(static_cast(realisation).dump())(time(0)) .exec(); }); } diff --git a/src/libstore/outputs-spec.cc b/src/libstore/outputs-spec.cc index 7f73c7d35..aacc964cd 100644 --- a/src/libstore/outputs-spec.cc +++ b/src/libstore/outputs-spec.cc @@ -150,7 +150,7 @@ OutputsSpec adl_serializer::from_json(const json & json) return OutputsSpec::Names{std::move(names)}; } -void adl_serializer::to_json(json & json, OutputsSpec t) +void adl_serializer::to_json(json & json, const OutputsSpec & t) { std::visit( overloaded{ @@ -169,7 +169,7 @@ ExtendedOutputsSpec adl_serializer::from_json(const json & } } -void adl_serializer::to_json(json & json, ExtendedOutputsSpec t) +void adl_serializer::to_json(json & json, const ExtendedOutputsSpec & t) { std::visit( overloaded{ diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 516b01571..fa430ce94 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -1,4 +1,7 @@ +#include + #include "nix/store/store-dir-config.hh" +#include "nix/util/json-utils.hh" namespace nix { @@ -75,3 +78,19 @@ StorePath StorePath::random(std::string_view name) } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +StorePath adl_serializer::from_json(const json & json) +{ + return StorePath{getString(json)}; +} + +void adl_serializer::to_json(json & json, const StorePath & storePath) +{ + json = storePath.to_string(); +} + +} // namespace nlohmann diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 8c3baa73b..febd67bd2 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -2,6 +2,7 @@ #include "nix/store/store-api.hh" #include "nix/util/closure.hh" #include "nix/util/signature/local-keys.hh" +#include "nix/util/json-utils.hh" #include namespace nix { @@ -60,54 +61,9 @@ void Realisation::closure(Store & store, const std::set & startOutp }); } -nlohmann::json Realisation::toJSON() const -{ - auto jsonDependentRealisations = nlohmann::json::object(); - for (auto & [depId, depOutPath] : dependentRealisations) - jsonDependentRealisations.emplace(depId.to_string(), depOutPath.to_string()); - return nlohmann::json{ - {"id", id.to_string()}, - {"outPath", outPath.to_string()}, - {"signatures", signatures}, - {"dependentRealisations", jsonDependentRealisations}, - }; -} - -Realisation Realisation::fromJSON(const nlohmann::json & json, const std::string & whence) -{ - auto getOptionalField = [&](std::string fieldName) -> std::optional { - auto fieldIterator = json.find(fieldName); - if (fieldIterator == json.end()) - return std::nullopt; - return {*fieldIterator}; - }; - auto getField = [&](std::string fieldName) -> std::string { - if (auto field = getOptionalField(fieldName)) - return *field; - else - throw Error("Drv output info file '%1%' is corrupt, missing field %2%", whence, fieldName); - }; - - StringSet signatures; - if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end()) - signatures.insert(signaturesIterator->begin(), signaturesIterator->end()); - - std::map dependentRealisations; - if (auto jsonDependencies = json.find("dependentRealisations"); jsonDependencies != json.end()) - for (auto & [jsonDepId, jsonDepOutPath] : jsonDependencies->get()) - dependentRealisations.insert({DrvOutput::parse(jsonDepId), StorePath(jsonDepOutPath)}); - - return Realisation{ - .id = DrvOutput::parse(getField("id")), - .outPath = StorePath(getField("outPath")), - .signatures = signatures, - .dependentRealisations = dependentRealisations, - }; -} - std::string Realisation::fingerprint() const { - auto serialized = toJSON(); + nlohmann::json serialized = *this; serialized.erase("signatures"); return serialized.dump(); } @@ -183,3 +139,43 @@ RealisedPath::Set RealisedPath::closure(Store & store) const } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +Realisation adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + StringSet signatures; + if (auto signaturesOpt = optionalValueAt(json, "signatures")) + signatures = *signaturesOpt; + + std::map dependentRealisations; + if (auto jsonDependencies = optionalValueAt(json, "dependentRealisations")) + for (auto & [jsonDepId, jsonDepOutPath] : getObject(*jsonDependencies)) + dependentRealisations.insert({DrvOutput::parse(jsonDepId), jsonDepOutPath}); + + return Realisation{ + .id = DrvOutput::parse(valueAt(json, "id")), + .outPath = valueAt(json, "outPath"), + .signatures = signatures, + .dependentRealisations = dependentRealisations, + }; +} + +void adl_serializer::to_json(json & json, const Realisation & r) +{ + auto jsonDependentRealisations = nlohmann::json::object(); + for (auto & [depId, depOutPath] : r.dependentRealisations) + jsonDependentRealisations.emplace(depId.to_string(), depOutPath); + json = { + {"id", r.id.to_string()}, + {"outPath", r.outPath}, + {"signatures", r.signatures}, + {"dependentRealisations", jsonDependentRealisations}, + }; +} + +} // namespace nlohmann diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index 12c810eca..e6715cbdf 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -51,15 +51,17 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std: std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPath & path) { - auto [storePath, restPath_] = store->toStorePath(store->storeDir + path.abs()); - auto restPath = CanonPath(restPath_); - + auto [storePath, restPath] = store->toStorePath(store->storeDir + path.abs()); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); + return {ref{accessObject(storePath)}, CanonPath{restPath}}; +} +std::shared_ptr RemoteFSAccessor::accessObject(const StorePath & storePath) +{ auto i = nars.find(std::string(storePath.hashPart())); if (i != nars.end()) - return {i->second, restPath}; + return i->second; std::string listing; Path cacheFile; @@ -90,7 +92,7 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat }); nars.emplace(storePath.hashPart(), narAccessor); - return {narAccessor, restPath}; + return narAccessor; } catch (SystemError &) { } @@ -98,14 +100,14 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat try { auto narAccessor = makeNarAccessor(nix::readFile(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); - return {narAccessor, restPath}; + return narAccessor; } catch (SystemError &) { } } StringSink sink; store->narFromPath(storePath, sink); - return {addToCache(storePath.hashPart(), std::move(sink.s)), restPath}; + return addToCache(storePath.hashPart(), std::move(sink.s)); } std::optional RemoteFSAccessor::maybeLstat(const CanonPath & path) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index b918871fa..bb7425081 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -794,9 +794,19 @@ void RemoteStore::narFromPath(const StorePath & path, Sink & sink) conn->narFromPath(*this, &conn.daemonException, path, [&](Source & source) { copyNAR(conn->from, sink); }); } +ref RemoteStore::getRemoteFSAccessor(bool requireValidPath) +{ + return make_ref(ref(shared_from_this()), requireValidPath); +} + ref RemoteStore::getFSAccessor(bool requireValidPath) { - return make_ref(ref(shared_from_this())); + return getRemoteFSAccessor(requireValidPath); +} + +std::shared_ptr RemoteStore::getFSAccessor(const StorePath & path, bool requireValidPath) +{ + return getRemoteFSAccessor(requireValidPath)->accessObject(path); } void RemoteStore::ConnectionHandle::withFramedSink(std::function fun) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 4ad09aff2..b70f04be7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -262,6 +262,7 @@ StoreReference S3BinaryCacheStoreConfig::getReference() const .scheme = *uriSchemes().begin(), .authority = bucketName, }, + .params = getQueryParams(), }; } diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index dafe14fea..a7e28017f 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -151,6 +151,11 @@ struct MountedSSHStore : virtual SSHStore, virtual LocalFSStore return LocalFSStore::getFSAccessor(requireValidPath); } + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath) override + { + return LocalFSStore::getFSAccessor(path, requireValidPath); + } + std::optional getBuildLogExact(const StorePath & path) override { return LocalFSStore::getBuildLogExact(path); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 17748ec53..c26c7d826 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -58,12 +58,22 @@ std::pair StoreDirConfig::toStorePath(PathView path) const Path Store::followLinksToStore(std::string_view _path) const { Path path = absPath(std::string(_path)); + + // Limit symlink follows to prevent infinite loops + unsigned int followCount = 0; + const unsigned int maxFollow = 1024; + while (!isInStore(path)) { if (!std::filesystem::is_symlink(path)) break; + + if (++followCount >= maxFollow) + throw Error("too many symbolic links encountered while resolving '%s'", _path); + auto target = readLink(path); path = absPath(target, dirOf(path)); } + if (!isInStore(path)) throw BadStorePath("path '%1%' is not in the Nix store", path); return path; @@ -1120,10 +1130,9 @@ Derivation Store::derivationFromPath(const StorePath & drvPath) static Derivation readDerivationCommon(Store & store, const StorePath & drvPath, bool requireValidPath) { - auto accessor = store.getFSAccessor(requireValidPath); + auto accessor = store.getFSAccessor(drvPath, requireValidPath); try { - return parseDerivation( - store, accessor->readFile(CanonPath(drvPath.to_string())), Derivation::nameFromPath(drvPath)); + return parseDerivation(store, accessor->readFile(CanonPath::root), Derivation::nameFromPath(drvPath)); } catch (FormatError & e) { throw Error("error parsing derivation '%s': %s", store.printStorePath(drvPath), e.msg()); } diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index 9725fe8a0..6106a99ce 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -61,13 +61,17 @@ StoreReference UDSRemoteStoreConfig::getReference() const * to be more compatible with older versions of nix. Some tooling out there * tries hard to parse store references and it might not be able to handle "unix://". */ if (path == settings.nixDaemonSocketFile) - return {.variant = StoreReference::Daemon{}}; + return { + .variant = StoreReference::Daemon{}, + .params = getQueryParams(), + }; return { .variant = StoreReference::Specified{ .scheme = *uriSchemes().begin(), .authority = path, }, + .params = getQueryParams(), }; } diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 770bdad4d..04e8cb176 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -18,6 +18,7 @@ #include "nix/store/user-lock.hh" #include "nix/store/globals.hh" #include "nix/store/build/derivation-env-desugar.hh" +#include "nix/util/terminal.hh" #include @@ -808,8 +809,7 @@ std::optional DerivationBuilderImpl::startBuild() if (!builderOut) throw SysError("opening pseudoterminal master"); - // FIXME: not thread-safe, use ptsname_r - std::string slaveName = ptsname(builderOut.get()); + std::string slaveName = getPtsName(builderOut.get()); if (buildUser) { if (chmod(slaveName.c_str(), 0600)) @@ -923,7 +923,7 @@ void DerivationBuilderImpl::prepareSandbox() void DerivationBuilderImpl::openSlave() { - std::string slaveName = ptsname(builderOut.get()); + std::string slaveName = getPtsName(builderOut.get()); AutoCloseFD builderOut = open(slaveName.c_str(), O_RDWR | O_NOCTTY); if (!builderOut) @@ -1712,6 +1712,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() /* Path already exists because CA path produced by something else. No moving needed. */ assert(newInfo.ca); + /* Can delete our scratch copy now. */ + deletePath(actualPath); } else { auto destPath = store.toRealPath(finalDestPath); deletePath(destPath); diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index 35730644b..f6e910d08 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -3,6 +3,7 @@ # include "nix/store/personality.hh" # include "nix/util/cgroup.hh" # include "nix/util/linux-namespaces.hh" +# include "nix/util/logging.hh" # include "linux/fchmodat2-compat.hh" # include @@ -505,8 +506,16 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/pts"); ss.push_back("/dev/full"); - if (systemFeatures.count("kvm") && pathExists("/dev/kvm")) - ss.push_back("/dev/kvm"); + if (systemFeatures.count("kvm")) { + if (pathExists("/dev/kvm")) { + ss.push_back("/dev/kvm"); + } else { + warn( + "KVM is enabled in system-features but /dev/kvm is not available. " + "QEMU builds may fall back to slow emulation. " + "Consider removing 'kvm' from system-features in nix.conf if KVM is not supported on this system."); + } + } ss.push_back("/dev/null"); ss.push_back("/dev/random"); ss.push_back("/dev/tty"); diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 1806dbb6f..54fd53c74 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -32,6 +32,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_util.cc', diff --git a/src/libutil-c/nix_api_util.cc b/src/libutil-c/nix_api_util.cc index a43e7103b..3903823aa 100644 --- a/src/libutil-c/nix_api_util.cc +++ b/src/libutil-c/nix_api_util.cc @@ -159,4 +159,16 @@ nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callb return NIX_OK; } +nix_err nix_set_verbosity(nix_c_context * context, nix_verbosity level) +{ + if (context) + context->last_err_code = NIX_OK; + if (level > NIX_LVL_VOMIT || level < NIX_LVL_ERROR) + return nix_set_err_msg(context, NIX_ERR_UNKNOWN, "Invalid verbosity level"); + try { + nix::verbosity = static_cast(level); + } + NIXC_CATCH_ERRS +} + } // extern "C" diff --git a/src/libutil-c/nix_api_util.h b/src/libutil-c/nix_api_util.h index 5f42641d4..4d7f394fa 100644 --- a/src/libutil-c/nix_api_util.h +++ b/src/libutil-c/nix_api_util.h @@ -53,7 +53,7 @@ extern "C" { * - NIX_OK: No error occurred (0) * - NIX_ERR_UNKNOWN: An unknown error occurred (-1) * - NIX_ERR_OVERFLOW: An overflow error occurred (-2) - * - NIX_ERR_KEY: A key error occurred (-3) + * - NIX_ERR_KEY: A key/index access error occurred in C API functions (-3) * - NIX_ERR_NIX_ERROR: A generic Nix error occurred (-4) */ enum nix_err { @@ -83,10 +83,21 @@ enum nix_err { NIX_ERR_OVERFLOW = -2, /** - * @brief A key error occurred. + * @brief A key/index access error occurred in C API functions. * - * This error code is returned when a key error occurred during the function - * execution. + * This error code is returned when accessing a key, index, or identifier that + * does not exist in C API functions. Common scenarios include: + * - Setting keys that don't exist (nix_setting_get, nix_setting_set) + * - List indices that are out of bounds (nix_get_list_byidx*) + * - Attribute names that don't exist (nix_get_attr_byname*) + * - Attribute indices that are out of bounds (nix_get_attr_byidx*, nix_get_attr_name_byidx) + * + * This error typically indicates incorrect usage or assumptions about data structure + * contents, rather than internal Nix evaluation errors. + * + * @note This error code should ONLY be returned by C API functions themselves, + * not by underlying Nix evaluation. For example, evaluating `{}.foo` in Nix + * will throw a normal error (NIX_ERR_NIX_ERROR), not NIX_ERR_KEY. */ NIX_ERR_KEY = -3, @@ -102,6 +113,24 @@ enum nix_err { typedef enum nix_err nix_err; +/** + * @brief Verbosity level + * + * @note This should be kept in sync with the C++ implementation (nix::Verbosity) + */ +enum nix_verbosity { + NIX_LVL_ERROR = 0, + NIX_LVL_WARN, + NIX_LVL_NOTICE, + NIX_LVL_INFO, + NIX_LVL_TALKATIVE, + NIX_LVL_CHATTY, + NIX_LVL_DEBUG, + NIX_LVL_VOMIT, +}; + +typedef enum nix_verbosity nix_verbosity; + /** * @brief This object stores error state. * @struct nix_c_context @@ -316,6 +345,14 @@ nix_err nix_set_err_msg(nix_c_context * context, nix_err err, const char * msg); */ void nix_clear_err(nix_c_context * context); +/** + * @brief Sets the verbosity level + * + * @param[out] context Optional, additional error context. + * @param[in] level Verbosity level + */ +nix_err nix_set_verbosity(nix_c_context * context, nix_verbosity level); + /** * @} */ diff --git a/src/libutil-test-support/include/nix/util/tests/json-characterization.hh b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh new file mode 100644 index 000000000..5a38b8e2c --- /dev/null +++ b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh @@ -0,0 +1,54 @@ +#pragma once +///@file + +#include +#include + +#include "nix/util/types.hh" +#include "nix/util/file-system.hh" + +#include "nix/util/tests/characterization.hh" + +namespace nix { + +/** + * Mixin class for writing characterization tests for `nlohmann::json` + * conversions for a given type. + */ +template +struct JsonCharacterizationTest : virtual CharacterizationTest +{ + /** + * Golden test for reading + * + * @param test hook that takes the contents of the file and does the + * actual work + */ + void readJsonTest(PathView testStem, const T & expected) + { + using namespace nlohmann; + readTest(Path{testStem} + ".json", [&](const auto & encodedRaw) { + auto encoded = json::parse(encodedRaw); + T decoded = adl_serializer::from_json(encoded); + ASSERT_EQ(decoded, expected); + }); + } + + /** + * Golden test for writing + * + * @param test hook that produces contents of the file and does the + * actual work + */ + void writeJsonTest(PathView testStem, const T & value) + { + using namespace nlohmann; + writeTest( + Path{testStem} + ".json", + [&]() -> json { return static_cast(value); }, + [](const auto & file) { return json::parse(readFile(file)); }, + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); + } +}; + +} // namespace nix diff --git a/src/libutil-test-support/include/nix/util/tests/meson.build b/src/libutil-test-support/include/nix/util/tests/meson.build index ab143757c..3be085892 100644 --- a/src/libutil-test-support/include/nix/util/tests/meson.build +++ b/src/libutil-test-support/include/nix/util/tests/meson.build @@ -7,6 +7,7 @@ headers = files( 'gmock-matchers.hh', 'gtest-with-params.hh', 'hash.hh', + 'json-characterization.hh', 'nix_api_util.hh', 'string_callback.hh', ) diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index 64231107e..1ca251ce8 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -27,6 +27,7 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'hash.cc', diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index ff71d2215..2d28c8bb1 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -42,6 +42,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = files( 'args.cc', @@ -96,7 +97,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : { + env : asan_test_options_env + { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libutil-tests/package.nix b/src/libutil-tests/package.nix index c06de6894..077d36a4d 100644 --- a/src/libutil-tests/package.nix +++ b/src/libutil-tests/package.nix @@ -61,6 +61,7 @@ mkMesonExecutable (finalAttrs: { mkdir -p "$HOME" '' + '' + export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libutil-tests/sort.cc b/src/libutil-tests/sort.cc index 8eee961c8..11d8e5938 100644 --- a/src/libutil-tests/sort.cc +++ b/src/libutil-tests/sort.cc @@ -102,14 +102,14 @@ struct RandomPeekSort : public ::testing::TestWithParam< void SetUp() override { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); urng_ = std::mt19937(GTEST_FLAG_GET(random_seed)); distribution_ = std::uniform_int_distribution(min, max); } auto regenerate() { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); std::size_t dataSize = std::uniform_int_distribution(0, maxSize)(urng_); data_.resize(dataSize); std::generate(data_.begin(), data_.end(), [&]() { return distribution_(urng_); }); @@ -118,7 +118,7 @@ struct RandomPeekSort : public ::testing::TestWithParam< TEST_P(RandomPeekSort, defaultComparator) { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); for (std::size_t i = 0; i < iterations; ++i) { regenerate(); @@ -132,7 +132,7 @@ TEST_P(RandomPeekSort, defaultComparator) TEST_P(RandomPeekSort, greater) { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); for (std::size_t i = 0; i < iterations; ++i) { regenerate(); @@ -146,7 +146,7 @@ TEST_P(RandomPeekSort, greater) TEST_P(RandomPeekSort, brokenComparator) { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); /* This is a pretty nice way of modeling a worst-case scenario for a broken comparator. If the sorting algorithm doesn't break in such case, then surely all deterministic @@ -170,7 +170,7 @@ TEST_P(RandomPeekSort, brokenComparator) TEST_P(RandomPeekSort, stability) { - auto [maxSize, min, max, iterations] = GetParam(); + const auto & [maxSize, min, max, iterations] = GetParam(); for (std::size_t i = 0; i < iterations; ++i) { regenerate(); diff --git a/src/libutil/include/nix/util/json-impls.hh b/src/libutil/include/nix/util/json-impls.hh index 8a6198313..751fc410f 100644 --- a/src/libutil/include/nix/util/json-impls.hh +++ b/src/libutil/include/nix/util/json-impls.hh @@ -4,13 +4,13 @@ #include // Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types -#define JSON_IMPL(TYPE) \ - namespace nlohmann { \ - using namespace nix; \ - template<> \ - struct adl_serializer \ - { \ - static TYPE from_json(const json & json); \ - static void to_json(json & json, TYPE t); \ - }; \ +#define JSON_IMPL(TYPE) \ + namespace nlohmann { \ + using namespace nix; \ + template<> \ + struct adl_serializer \ + { \ + static TYPE from_json(const json & json); \ + static void to_json(json & json, const TYPE & t); \ + }; \ } diff --git a/src/libutil/include/nix/util/memory-source-accessor.hh b/src/libutil/include/nix/util/memory-source-accessor.hh index a04d1d347..eba282fe1 100644 --- a/src/libutil/include/nix/util/memory-source-accessor.hh +++ b/src/libutil/include/nix/util/memory-source-accessor.hh @@ -1,3 +1,6 @@ +#pragma once +///@file + #include "nix/util/source-path.hh" #include "nix/util/fs-sink.hh" #include "nix/util/variant-wrapper.hh" @@ -55,7 +58,7 @@ struct MemorySourceAccessor : virtual SourceAccessor Stat lstat() const; }; - File root{File::Directory{}}; + std::optional root; bool operator==(const MemorySourceAccessor &) const noexcept = default; diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index 07a4f1d11..dcfaa8e3f 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -47,6 +47,7 @@ headers = files( 'logging.hh', 'lru-cache.hh', 'memory-source-accessor.hh', + 'mounted-source-accessor.hh', 'muxable-pipe.hh', 'os-string.hh', 'pool.hh', diff --git a/src/libutil/include/nix/util/mounted-source-accessor.hh b/src/libutil/include/nix/util/mounted-source-accessor.hh new file mode 100644 index 000000000..518ae4f09 --- /dev/null +++ b/src/libutil/include/nix/util/mounted-source-accessor.hh @@ -0,0 +1,20 @@ +#pragma once + +#include "source-accessor.hh" + +namespace nix { + +struct MountedSourceAccessor : SourceAccessor +{ + virtual void mount(CanonPath mountPoint, ref accessor) = 0; + + /** + * Return the accessor mounted on `mountPoint`, or `nullptr` if + * there is no such mount point. + */ + virtual std::shared_ptr getMount(CanonPath mountPoint) = 0; +}; + +ref makeMountedSourceAccessor(std::map> mounts); + +} // namespace nix diff --git a/src/libutil/include/nix/util/source-accessor.hh b/src/libutil/include/nix/util/source-accessor.hh index aa937da48..7419ef392 100644 --- a/src/libutil/include/nix/util/source-accessor.hh +++ b/src/libutil/include/nix/util/source-accessor.hh @@ -214,8 +214,6 @@ ref getFSSourceAccessor(); */ ref makeFSSourceAccessor(std::filesystem::path root); -ref makeMountedSourceAccessor(std::map> mounts); - /** * Construct an accessor that presents a "union" view of a vector of * underlying accessors. Earlier accessors take precedence over later. diff --git a/src/libutil/include/nix/util/terminal.hh b/src/libutil/include/nix/util/terminal.hh index f19de268c..fa71e074e 100644 --- a/src/libutil/include/nix/util/terminal.hh +++ b/src/libutil/include/nix/util/terminal.hh @@ -36,4 +36,12 @@ void updateWindowSize(); */ std::pair getWindowSize(); +/** + * Get the slave name of a pseudoterminal in a thread-safe manner. + * + * @param fd The file descriptor of the pseudoterminal master + * @return The slave device name as a string + */ +std::string getPtsName(int fd); + } // namespace nix diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 363f52a54..caff5b56a 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -4,7 +4,22 @@ namespace nix { MemorySourceAccessor::File * MemorySourceAccessor::open(const CanonPath & path, std::optional create) { - File * cur = &root; + bool hasRoot = root.has_value(); + + // Special handling of root directory. + if (path.isRoot() && !hasRoot) { + if (create) { + root = std::move(*create); + return &root.value(); + } + return nullptr; + } + + // Root does not exist. + if (!hasRoot) + return nullptr; + + File * cur = &root.value(); bool newF = false; @@ -112,6 +127,10 @@ std::string MemorySourceAccessor::readLink(const CanonPath & path) SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents) { + // Create root directory automatically if necessary as a convenience. + if (!root && !path.isRoot()) + open(CanonPath::root, File::Directory{}); + auto * f = open(path, File{File::Regular{}}); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); diff --git a/src/libutil/meson.build b/src/libutil/meson.build index f4b8dbb61..8c9e1f1eb 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -118,6 +118,7 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') sources = [ config_priv_h ] + files( 'archive.cc', diff --git a/src/libutil/mounted-source-accessor.cc b/src/libutil/mounted-source-accessor.cc index 4c32147f9..5c0ecc1ff 100644 --- a/src/libutil/mounted-source-accessor.cc +++ b/src/libutil/mounted-source-accessor.cc @@ -1,18 +1,22 @@ -#include "nix/util/source-accessor.hh" +#include "nix/util/mounted-source-accessor.hh" + +#include namespace nix { -struct MountedSourceAccessor : SourceAccessor +struct MountedSourceAccessorImpl : MountedSourceAccessor { - std::map> mounts; + boost::concurrent_flat_map> mounts; - MountedSourceAccessor(std::map> _mounts) - : mounts(std::move(_mounts)) + MountedSourceAccessorImpl(std::map> _mounts) { displayPrefix.clear(); // Currently we require a root filesystem. This could be relaxed. - assert(mounts.contains(CanonPath::root)); + assert(_mounts.contains(CanonPath::root)); + + for (auto & [path, accessor] : _mounts) + mount(path, accessor); // FIXME: return dummy parent directories automatically? } @@ -52,10 +56,9 @@ struct MountedSourceAccessor : SourceAccessor // Find the nearest parent of `path` that is a mount point. std::vector subpath; while (true) { - auto i = mounts.find(path); - if (i != mounts.end()) { + if (auto mount = getMount(path)) { std::reverse(subpath.begin(), subpath.end()); - return {i->second, CanonPath(subpath)}; + return {ref(mount), CanonPath(subpath)}; } assert(!path.isRoot()); @@ -69,11 +72,24 @@ struct MountedSourceAccessor : SourceAccessor auto [accessor, subpath] = resolve(path); return accessor->getPhysicalPath(subpath); } + + void mount(CanonPath mountPoint, ref accessor) override + { + mounts.emplace(std::move(mountPoint), std::move(accessor)); + } + + std::shared_ptr getMount(CanonPath mountPoint) override + { + if (auto res = getConcurrent(mounts, mountPoint)) + return *res; + else + return nullptr; + } }; -ref makeMountedSourceAccessor(std::map> mounts) +ref makeMountedSourceAccessor(std::map> mounts) { - return make_ref(std::move(mounts)); + return make_ref(std::move(mounts)); } } // namespace nix diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index c524f3e4f..fe3bcb1c1 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -114,6 +114,8 @@ std::optional PosixSourceAccessor::maybeLstat(const CanonP auto st = cachedLstat(path); if (!st) return std::nullopt; + // This makes the accessor thread-unsafe, but we only seem to use the actual value in a single threaded context in + // `src/libfetchers/path.cc`. mtime = std::max(mtime, st->st_mtime); return Stat{ .type = S_ISREG(st->st_mode) ? tRegular diff --git a/src/libutil/terminal.cc b/src/libutil/terminal.cc index b5765487c..656847487 100644 --- a/src/libutil/terminal.cc +++ b/src/libutil/terminal.cc @@ -1,6 +1,7 @@ #include "nix/util/terminal.hh" #include "nix/util/environment-variables.hh" #include "nix/util/sync.hh" +#include "nix/util/error.hh" #ifdef _WIN32 # include @@ -12,6 +13,8 @@ #endif #include #include +#include +#include // for ptsname and ptsname_r namespace { @@ -176,4 +179,29 @@ std::pair getWindowSize() return *windowSize.lock(); } +std::string getPtsName(int fd) +{ +#ifdef __APPLE__ + static std::mutex ptsnameMutex; + // macOS doesn't have ptsname_r, use mutex-protected ptsname + std::lock_guard lock(ptsnameMutex); + const char * name = ptsname(fd); + if (!name) { + throw SysError("getting pseudoterminal slave name"); + } + return name; +#else + // Use thread-safe ptsname_r on platforms that support it + // PTY names are typically short: + // - Linux: /dev/pts/N (where N is usually < 1000) + // - FreeBSD: /dev/pts/N + // 64 bytes is more than sufficient for any Unix PTY name + char buf[64]; + if (ptsname_r(fd, buf, sizeof(buf)) != 0) { + throw SysError("getting pseudoterminal slave name"); + } + return buf; +#endif +} + } // namespace nix diff --git a/src/nix/asan-options.cc b/src/nix/asan-options.cc new file mode 100644 index 000000000..256f34cbe --- /dev/null +++ b/src/nix/asan-options.cc @@ -0,0 +1,6 @@ +extern "C" [[gnu::retain]] const char * __asan_default_options() +{ + // We leak a bunch of memory knowingly on purpose. It's not worthwhile to + // diagnose that memory being leaked for now. + return "abort_on_error=1:print_summary=1:detect_leaks=0"; +} diff --git a/src/nix/build.cc b/src/nix/build.cc index eb47c3133..2d4f426a4 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -8,11 +8,79 @@ using namespace nix; +/* This serialization code is diferent from the canonical (single) + derived path serialization because: + + - It looks up output paths where possible + + - It includes the store dir in store paths + + We might want to replace it with the canonical format at some point, + but that would be a breaking change (to a still-experimental but + widely-used command, so that isn't being done at this time just yet. + */ + +static nlohmann::json toJSON(Store & store, const SingleDerivedPath::Opaque & o) +{ + return store.printStorePath(o.path); +} + +static nlohmann::json toJSON(Store & store, const SingleDerivedPath & sdp); +static nlohmann::json toJSON(Store & store, const DerivedPath & dp); + +static nlohmann::json toJSON(Store & store, const SingleDerivedPath::Built & sdpb) +{ + nlohmann::json res; + res["drvPath"] = toJSON(store, *sdpb.drvPath); + // Fallback for the input-addressed derivation case: We expect to always be + // able to print the output paths, so let’s do it + // FIXME try-resolve on drvPath + const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *sdpb.drvPath)); + res["output"] = sdpb.output; + auto outputPathIter = outputMap.find(sdpb.output); + if (outputPathIter == outputMap.end()) + res["outputPath"] = nullptr; + else if (std::optional p = outputPathIter->second) + res["outputPath"] = store.printStorePath(*p); + else + res["outputPath"] = nullptr; + return res; +} + +static nlohmann::json toJSON(Store & store, const DerivedPath::Built & dpb) +{ + nlohmann::json res; + res["drvPath"] = toJSON(store, *dpb.drvPath); + // Fallback for the input-addressed derivation case: We expect to always be + // able to print the output paths, so let’s do it + // FIXME try-resolve on drvPath + const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *dpb.drvPath)); + for (const auto & [output, outputPathOpt] : outputMap) { + if (!dpb.outputs.contains(output)) + continue; + if (outputPathOpt) + res["outputs"][output] = store.printStorePath(*outputPathOpt); + else + res["outputs"][output] = nullptr; + } + return res; +} + +static nlohmann::json toJSON(Store & store, const SingleDerivedPath & sdp) +{ + return std::visit([&](const auto & buildable) { return toJSON(store, buildable); }, sdp.raw()); +} + +static nlohmann::json toJSON(Store & store, const DerivedPath & dp) +{ + return std::visit([&](const auto & buildable) { return toJSON(store, buildable); }, dp.raw()); +} + static nlohmann::json derivedPathsToJSON(const DerivedPaths & paths, Store & store) { auto res = nlohmann::json::array(); for (auto & t : paths) { - std::visit([&](const auto & t) { res.push_back(t.toJSON(store)); }, t.raw()); + res.push_back(toJSON(store, t)); } return res; } @@ -22,22 +90,18 @@ builtPathsWithResultToJSON(const std::vector & buildables, { auto res = nlohmann::json::array(); for (auto & b : buildables) { - std::visit( - [&](const auto & t) { - auto j = t.toJSON(store); - if (b.result) { - if (b.result->startTime) - j["startTime"] = b.result->startTime; - if (b.result->stopTime) - j["stopTime"] = b.result->stopTime; - if (b.result->cpuUser) - j["cpuUser"] = ((double) b.result->cpuUser->count()) / 1000000; - if (b.result->cpuSystem) - j["cpuSystem"] = ((double) b.result->cpuSystem->count()) / 1000000; - } - res.push_back(j); - }, - b.path.raw()); + auto j = b.path.toJSON(store); + if (b.result) { + if (b.result->startTime) + j["startTime"] = b.result->startTime; + if (b.result->stopTime) + j["stopTime"] = b.result->stopTime; + if (b.result->cpuUser) + j["cpuUser"] = ((double) b.result->cpuUser->count()) / 1000000; + if (b.result->cpuSystem) + j["cpuSystem"] = ((double) b.result->cpuSystem->count()) / 1000000; + } + res.push_back(j); } return res; } diff --git a/src/nix/cat.cc b/src/nix/cat.cc index 276e01f5d..145336723 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -41,7 +41,10 @@ struct CmdCatStore : StoreCommand, MixCat void run(ref store) override { auto [storePath, rest] = store->toStorePath(path); - cat(store->getFSAccessor(), CanonPath{storePath.to_string()} / CanonPath{rest}); + auto accessor = store->getFSAccessor(storePath); + if (!accessor) + throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); + cat(ref{std::move(accessor)}, CanonPath{rest}); } }; diff --git a/src/nix/derivation-add.cc b/src/nix/derivation-add.cc index 0f797bb20..2d13aba52 100644 --- a/src/nix/derivation-add.cc +++ b/src/nix/derivation-add.cc @@ -33,7 +33,7 @@ struct CmdAddDerivation : MixDryRun, StoreCommand { auto json = nlohmann::json::parse(drainFD(STDIN_FILENO)); - auto drv = Derivation::fromJSON(*store, json); + auto drv = Derivation::fromJSON(json); auto drvPath = writeDerivation(*store, drv, NoRepair, /* read only */ dryRun); diff --git a/src/nix/derivation-show.cc b/src/nix/derivation-show.cc index 1a61ccd5c..20e54bba7 100644 --- a/src/nix/derivation-show.cc +++ b/src/nix/derivation-show.cc @@ -58,7 +58,7 @@ struct CmdShowDerivation : InstallablesCommand, MixPrintJSON if (!drvPath.isDerivation()) continue; - jsonRoot[store->printStorePath(drvPath)] = store->readDerivation(drvPath).toJSON(*store); + jsonRoot[drvPath.to_string()] = store->readDerivation(drvPath).toJSON(); } printJSON(jsonRoot); } diff --git a/src/nix/env.cc b/src/nix/env.cc index c8fb5bee0..0a211399a 100644 --- a/src/nix/env.cc +++ b/src/nix/env.cc @@ -7,6 +7,7 @@ #include "nix/util/strings.hh" #include "nix/util/executable-path.hh" #include "nix/util/environment-variables.hh" +#include "nix/util/mounted-source-accessor.hh" using namespace nix; diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh index 071edf9b9..39fa6f9ac 100644 --- a/src/nix/get-env.sh +++ b/src/nix/get-env.sh @@ -1,11 +1,14 @@ +# shellcheck shell=bash set -e +# shellcheck disable=SC1090 # Dynamic sourcing is intentional if [ -e "$NIX_ATTRS_SH_FILE" ]; then source "$NIX_ATTRS_SH_FILE"; fi export IN_NIX_SHELL=impure export dontAddDisableDepTrack=1 if [[ -n $stdenv ]]; then - source $stdenv/setup + # shellcheck disable=SC1091 # setup file is in nix store + source "$stdenv"/setup fi # Better to use compgen, but stdenv bash doesn't have it. @@ -17,10 +20,10 @@ __dumpEnv() { printf ' "bashFunctions": {\n' local __first=1 - while read __line; do + while read -r __line; do if ! [[ $__line =~ ^declare\ -f\ (.*) ]]; then continue; fi __fun_name="${BASH_REMATCH[1]}" - __fun_body="$(type $__fun_name)" + __fun_body="$(type "$__fun_name")" if [[ $__fun_body =~ \{(.*)\} ]]; then if [[ -z $__first ]]; then printf ',\n'; else __first=; fi __fun_body="${BASH_REMATCH[1]}" @@ -37,7 +40,7 @@ __dumpEnv() { printf ' "variables": {\n' local __first=1 - while read __line; do + while read -r __line; do if ! [[ $__line =~ ^declare\ (-[^ ])\ ([^=]*) ]]; then continue; fi local type="${BASH_REMATCH[1]}" local __var_name="${BASH_REMATCH[2]}" @@ -76,7 +79,9 @@ __dumpEnv() { elif [[ $type == -a ]]; then printf '"type": "array", "value": [' local __first2=1 + # shellcheck disable=SC1087 # Complex array manipulation, syntax is correct __var_name="$__var_name[@]" + # shellcheck disable=SC1087 # Complex array manipulation, syntax is correct for __i in "${!__var_name}"; do if [[ -z $__first2 ]]; then printf ', '; else __first2=; fi __escapeString "$__i" @@ -142,6 +147,7 @@ __dumpEnvToOutput() { # array with a format like `outname => /nix/store/hash-drvname-outname`. # Otherwise it is a space-separated list of output variable names. if [ -e "$NIX_ATTRS_SH_FILE" ]; then + # shellcheck disable=SC2154 # outputs is set by sourced file for __output in "${outputs[@]}"; do __dumpEnvToOutput "$__output" done diff --git a/src/nix/ls.cc b/src/nix/ls.cc index dcc46fa14..4952d5243 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -115,7 +115,10 @@ struct CmdLsStore : StoreCommand, MixLs void run(ref store) override { auto [storePath, rest] = store->toStorePath(path); - list(store->getFSAccessor(), CanonPath{storePath.to_string()} / CanonPath{rest}); + auto accessor = store->getFSAccessor(storePath); + if (!accessor) + throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); + list(ref{std::move(accessor)}, CanonPath{rest}); } }; diff --git a/src/nix/main.cc b/src/nix/main.cc index a6077f5e9..ed889a189 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -226,8 +226,8 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) auto mdName = subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand)); - evalSettings.restrictEval = false; - evalSettings.pureEval = false; + evalSettings.restrictEval = true; + evalSettings.pureEval = true; EvalState state({}, openStore("dummy://"), fetchSettings, evalSettings); auto vGenerateManpage = state.allocValue(); diff --git a/src/nix/meson.build b/src/nix/meson.build index e989e8016..f67a2948f 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -56,11 +56,13 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') +subdir('nix-meson-build-support/asan-options') subdir('nix-meson-build-support/generate-header') nix_sources = [ config_priv_h ] + files( 'add-to-store.cc', 'app.cc', + 'asan-options.cc', 'build.cc', 'bundle.cc', 'cat.cc', diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 5f85e06f0..f8078426c 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -603,7 +603,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) #endif if (!hashGiven) { HashResult hash = hashPath( - {store->getFSAccessor(false), CanonPath{info->path.to_string()}}, + {ref{store->getFSAccessor(info->path, false)}}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); info->narHash = hash.hash; diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 68005171f..80177cf13 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -177,8 +177,8 @@ struct ProfileManifest else if (std::filesystem::exists(profile / "manifest.nix")) { // FIXME: needed because of pure mode; ugly. - state.allowPath(state.store->followLinksToStore(profile.string())); - state.allowPath(state.store->followLinksToStore((profile / "manifest.nix").string())); + state.allowPath(state.store->followLinksToStorePath(profile.string())); + state.allowPath(state.store->followLinksToStorePath((profile / "manifest.nix").string())); auto packageInfos = queryInstalled(state, state.store->followLinksToStore(profile.string())); diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc index a0e400f54..8dd608d23 100644 --- a/src/nix/realisation.cc +++ b/src/nix/realisation.cc @@ -59,7 +59,7 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON for (auto & path : realisations) { nlohmann::json currentPath; if (auto realisation = std::get_if(&path.raw)) - currentPath = realisation->toJSON(); + currentPath = *realisation; else currentPath["opaquePath"] = store->printStorePath(path.path()); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 7869e33a7..473827a93 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -108,8 +108,6 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions auto dependencyPath = *optDependencyPath; auto dependencyPathHash = dependencyPath.hashPart(); - auto accessor = store->getFSAccessor(); - auto const inf = std::numeric_limits::max(); struct Node @@ -172,8 +170,6 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions {}; printNode = [&](Node & node, const std::string & firstPad, const std::string & tailPad) { - CanonPath pathS(node.path.to_string()); - assert(node.dist != inf); if (precise) { logger->cout( @@ -181,7 +177,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions firstPad, node.visited ? "\e[38;5;244m" : "", firstPad != "" ? "→ " : "", - pathS.abs()); + store->printStorePath(node.path)); } if (node.path == dependencyPath && !all && packagePath != dependencyPath) @@ -211,13 +207,13 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions contain the reference. */ std::map hits; - std::function visitPath; + auto accessor = store->getFSAccessor(node.path); - visitPath = [&](const CanonPath & p) { + auto visitPath = [&](this auto && recur, const CanonPath & p) -> void { auto st = accessor->maybeLstat(p); assert(st); - auto p2 = p == pathS ? "/" : p.abs().substr(pathS.abs().size() + 1); + auto p2 = p.isRoot() ? p.abs() : p.rel(); auto getColour = [&](const std::string & hash) { return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; @@ -226,7 +222,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (st->type == SourceAccessor::Type::tDirectory) { auto names = accessor->readDirectory(p); for (auto & [name, type] : names) - visitPath(p / name); + recur(p / name); } else if (st->type == SourceAccessor::Type::tRegular) { @@ -264,7 +260,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions // FIXME: should use scanForReferences(). if (precise) - visitPath(pathS); + visitPath(CanonPath::root); for (auto & ref : refs) { std::string hash(ref.second->path.hashPart()); @@ -280,13 +276,12 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions } if (!precise) { - auto pathS = store->printStorePath(ref.second->path); logger->cout( "%s%s%s%s" ANSI_NORMAL, firstPad, ref.second->visited ? "\e[38;5;244m" : "", last ? treeLast : treeConn, - pathS); + store->printStorePath(ref.second->path)); node.visited = true; } diff --git a/src/perl/lib/Nix/Store.xs b/src/perl/lib/Nix/Store.xs index 7aa918ba0..93e9f0f95 100644 --- a/src/perl/lib/Nix/Store.xs +++ b/src/perl/lib/Nix/Store.xs @@ -168,7 +168,7 @@ StoreWrapper::queryRawRealisation(char * outputId) try { auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId)); if (realisation) - XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(static_cast(*realisation).dump().c_str(), 0))); else XPUSHs(sv_2mortal(newSVpv("", 0))); } catch (Error & e) { diff --git a/tests/functional/build-delete.sh b/tests/functional/build-delete.sh index 18841509d..66b14fd14 100755 --- a/tests/functional/build-delete.sh +++ b/tests/functional/build-delete.sh @@ -43,6 +43,10 @@ issue_6572_dependent_outputs() { nix-store --delete "$p" # Clean up for next test # Make sure that 'nix build' tracks input-outputs correctly when a single output is already present. + if [[ -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + # Resolved derivations interferre with the deletion + nix-store --delete "${NIX_STORE_DIR}"/*.drv + fi nix-store --delete "$(jq -r <"$TEST_ROOT"/a.json .[0].outputs.second)" p=$(nix build -f multiple-outputs.nix use-a --no-link --print-out-paths) cmp "$p" < $out/file + ''; + }; + + # Same output, different drv + a-prime = mkDerivation { + name = "issue-13247-a"; + builder = builtins.toFile "builder.sh" '' + echo 'will make the same stuff as `a`, but different drv hash' + + mkdir $out + test -z $all + echo "output" > $out/file + ''; + }; + + # Multiple outputs in a derivation that depends on other derivations + f = + dep: + mkDerivation { + name = "use-a-more-outputs"; + outputs = [ + "first" + "second" + ]; + inherit dep; + builder = builtins.toFile "builder.sh" '' + ln -s $dep/file $first + ln -s $first $second + ''; + }; + + use-a-more-outputs = f a; + + use-a-prime-more-outputs = f a-prime; + +} diff --git a/tests/functional/ca/issue-13247.sh b/tests/functional/ca/issue-13247.sh new file mode 100755 index 000000000..686d90ced --- /dev/null +++ b/tests/functional/ca/issue-13247.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env bash + +# https://github.com/NixOS/nix/issues/13247 + +export NIX_TESTS_CA_BY_DEFAULT=1 + +source common.sh + +clearStoreIfPossible + +set -x + +# Build derivation (both outputs) +nix build -f issue-13247.nix --json a a-prime use-a-more-outputs --no-link > "$TEST_ROOT"/a.json + +cache="file://$TEST_ROOT/cache" + +# Copy all outputs and realisations to cache +declare -a drvs +for d in "$NIX_STORE_DIR"/*-issue-13247-a.drv "$NIX_STORE_DIR"/*-use-a-more-outputs.drv; do + drvs+=("$d" "$d"^*) +done +nix copy --to "$cache" "${drvs[@]}" + +function delete () { + # Delete local copy + # shellcheck disable=SC2046 + nix-store --delete \ + $(jq -r <"$TEST_ROOT"/a.json '.[] | .drvPath, .outputs.[]') \ + "$NIX_STORE_DIR"/*-issue-13247-a.drv \ + "$NIX_STORE_DIR"/*-use-a-more-outputs.drv + + [[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[0].outputs.out')" ]] + [[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[1].outputs.out')" ]] + [[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.first')" ]] + [[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.second')" ]] +} + +delete + +buildViaSubstitute () { + nix build -f issue-13247.nix "$1" --no-link --max-jobs 0 --substituters "$cache" --no-require-sigs --offline --substitute +} + +# Substitue just the first output +buildViaSubstitute use-a-more-outputs^first + +# Should only fetch the output we asked for +[[ -d "$(jq -r <"$TEST_ROOT"/a.json '.[0].outputs.out')" ]] +[[ -f "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.first')" ]] +[[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.second')" ]] + +delete + +# Failure with 2.28 encountered in CI +requireDaemonNewerThan "2.29" + +# Substitue just the first output +# +# This derivation is the same after normalization, so we should get +# early cut-off, and thus a chance to download just the output we want +# rather than building more +buildViaSubstitute use-a-prime-more-outputs^first + +# Should only fetch the output we asked for +[[ -d "$(jq -r <"$TEST_ROOT"/a.json '.[0].outputs.out')" ]] +[[ -f "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.first')" ]] + +# Output should *not* be here, this is the bug +[[ -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.second')" ]] +skipTest "bug is not yet fixed" diff --git a/tests/functional/ca/meson.build b/tests/functional/ca/meson.build index ec34e9644..b1912fd86 100644 --- a/tests/functional/ca/meson.build +++ b/tests/functional/ca/meson.build @@ -9,6 +9,7 @@ suites += { 'deps' : [], 'tests' : [ 'build-cache.sh', + 'build-delete.sh', 'build-with-garbage-path.sh', 'build.sh', 'concurrent-builds.sh', @@ -18,6 +19,8 @@ suites += { 'eval-store.sh', 'gc.sh', 'import-from-derivation.sh', + 'issue-13247.sh', + 'multiple-outputs.sh', 'new-build-cmd.sh', 'nix-copy.sh', 'nix-run.sh', diff --git a/tests/functional/ca/multiple-outputs.sh b/tests/functional/ca/multiple-outputs.sh new file mode 100644 index 000000000..63b7d3197 --- /dev/null +++ b/tests/functional/ca/multiple-outputs.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +source common.sh + +export NIX_TESTS_CA_BY_DEFAULT=1 +cd .. +source ./multiple-outputs.sh diff --git a/tests/functional/characterisation-test-infra.sh b/tests/functional/characterisation-test-infra.sh index 279454550..fecae29e8 100755 --- a/tests/functional/characterisation-test-infra.sh +++ b/tests/functional/characterisation-test-infra.sh @@ -40,7 +40,7 @@ echo Bye! > "$TEST_ROOT/expected" diffAndAcceptInner test "$TEST_ROOT/got" "$TEST_ROOT/expected" (( "$badDiff" == 1 )) ) -[[ "$(echo Bye! )" == $(< "$TEST_ROOT/expected") ]] +[[ "Bye!" == $(< "$TEST_ROOT/expected") ]] # _NIX_TEST_ACCEPT=1 matches non-empty echo Hi! > "$TEST_ROOT/got" @@ -57,7 +57,7 @@ echo Bye! > "$TEST_ROOT/expected" _NIX_TEST_ACCEPT=1 diffAndAcceptInner test "$TEST_ROOT/got" "$TEST_ROOT/expected" (( "$badDiff" == 1 )) ) -[[ "$(echo Hi! )" == $(< "$TEST_ROOT/expected") ]] +[[ "Hi!" == $(< "$TEST_ROOT/expected") ]] # second time succeeds ( diffAndAcceptInner test "$TEST_ROOT/got" "$TEST_ROOT/expected" diff --git a/tests/functional/completions.sh b/tests/functional/completions.sh index 9164c5013..b521d35fb 100755 --- a/tests/functional/completions.sh +++ b/tests/functional/completions.sh @@ -53,7 +53,9 @@ cd .. ## With multiple input flakes [[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]] ## With tilde expansion +# shellcheck disable=SC2088 [[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]] +# shellcheck disable=SC2088 [[ "$(HOME=$PWD NIX_GET_COMPLETIONS=5 nix flake update --flake '~/foo' '')" == $'normal\na\t' ]] ## Out of order [[ "$(NIX_GET_COMPLETIONS=3 nix build --override-input '' '' ./foo)" == $'normal\na\t' ]] diff --git a/tests/functional/config.sh b/tests/functional/config.sh index 50858eaa4..c1d47454e 100755 --- a/tests/functional/config.sh +++ b/tests/functional/config.sh @@ -62,7 +62,7 @@ prev=$(nix config show | grep '^cores' | cut -d '=' -f 2 | xargs) export NIX_CONFIG="cores = 4242"$'\n'"experimental-features = nix-command flakes" exp_cores=$(nix config show | grep '^cores' | cut -d '=' -f 2 | xargs) exp_features=$(nix config show | grep '^experimental-features' | cut -d '=' -f 2 | xargs) -[[ $prev != $exp_cores ]] +[[ $prev != "$exp_cores" ]] [[ $exp_cores == "4242" ]] # flakes implies fetch-tree [[ $exp_features == "fetch-tree flakes nix-command" ]] @@ -70,7 +70,7 @@ exp_features=$(nix config show | grep '^experimental-features' | cut -d '=' -f 2 # Test that it's possible to retrieve a single setting's value val=$(nix config show | grep '^warn-dirty' | cut -d '=' -f 2 | xargs) val2=$(nix config show warn-dirty) -[[ $val == $val2 ]] +[[ $val == "$val2" ]] # Test unit prefixes. [[ $(nix config show --min-free 64K min-free) = 65536 ]] diff --git a/tests/functional/db-migration.sh b/tests/functional/db-migration.sh index 6feabb90d..bdbdd21fa 100755 --- a/tests/functional/db-migration.sh +++ b/tests/functional/db-migration.sh @@ -19,14 +19,15 @@ PATH_WITH_NEW_NIX="$PATH" export PATH="${NIX_DAEMON_PACKAGE}/bin:$PATH" clearStore nix-build simple.nix --no-out-link -nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1 +nix-store --generate-binary-cache-key cache1.example.org "$TEST_ROOT/sk1" "$TEST_ROOT/pk1" dependenciesOutPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1") fixedOutPath=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build fixed.nix -A good.0 --no-out-link) # Migrate to the new schema and ensure that everything's there export PATH="$PATH_WITH_NEW_NIX" -info=$(nix path-info --json $dependenciesOutPath) +info=$(nix path-info --json "$dependenciesOutPath") [[ $info =~ '"ultimate":true' ]] +# shellcheck disable=SC2076 [[ $info =~ 'cache1.example.org' ]] nix verify -r "$fixedOutPath" -nix verify -r "$dependenciesOutPath" --sigs-needed 1 --trusted-public-keys $(cat $TEST_ROOT/pk1) +nix verify -r "$dependenciesOutPath" --sigs-needed 1 --trusted-public-keys "$(cat "$TEST_ROOT/pk1")" diff --git a/tests/functional/dependencies.builder0.sh b/tests/functional/dependencies.builder0.sh index 9b11576e0..6fbe4a07a 100644 --- a/tests/functional/dependencies.builder0.sh +++ b/tests/functional/dependencies.builder0.sh @@ -1,16 +1,20 @@ +# shellcheck shell=bash +# shellcheck disable=SC2154 [ "${input1: -2}" = /. ] +# shellcheck disable=SC2154 [ "${input2: -2}" = /. ] -mkdir $out -echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar +# shellcheck disable=SC2154 +mkdir "$out" +echo "$(cat "$input1"/foo)$(cat "$input2"/bar)" > "$out"/foobar -ln -s $input2 $out/reference-to-input-2 +ln -s "$input2" "$out"/reference-to-input-2 # Self-reference. -ln -s $out $out/self +ln -s "$out" "$out"/self # Executable. -echo program > $out/program -chmod +x $out/program +echo program > "$out"/program +chmod +x "$out"/program echo FOO diff --git a/tests/functional/dependencies.sh b/tests/functional/dependencies.sh index 972bc5a9b..68c0d3f2e 100755 --- a/tests/functional/dependencies.sh +++ b/tests/functional/dependencies.sh @@ -11,22 +11,22 @@ echo "derivation is $drvPath" nix-store -q --tree "$drvPath" | grep '───.*builder-dependencies-input-1.sh' # Test Graphviz graph generation. -nix-store -q --graph "$drvPath" > $TEST_ROOT/graph +nix-store -q --graph "$drvPath" > "$TEST_ROOT"/graph if test -n "$dot"; then # Does it parse? - $dot < $TEST_ROOT/graph + $dot < "$TEST_ROOT"/graph fi # Test GraphML graph generation -nix-store -q --graphml "$drvPath" > $TEST_ROOT/graphml +nix-store -q --graphml "$drvPath" > "$TEST_ROOT"/graphml outPath=$(nix-store -rvv "$drvPath") || fail "build failed" # Test Graphviz graph generation. -nix-store -q --graph "$outPath" > $TEST_ROOT/graph +nix-store -q --graph "$outPath" > "$TEST_ROOT"/graph if test -n "$dot"; then # Does it parse? - $dot < $TEST_ROOT/graph + $dot < "$TEST_ROOT"/graph fi nix-store -q --tree "$outPath" | grep '───.*dependencies-input-2' @@ -53,7 +53,7 @@ input2OutPath=$(echo "$deps" | grep "dependencies-input-2") nix-store -q --referrers-closure "$input2OutPath" | grep "$outPath" # Check that the derivers are set properly. -test $(nix-store -q --deriver "$outPath") = "$drvPath" +test "$(nix-store -q --deriver "$outPath")" = "$drvPath" nix-store -q --deriver "$input2OutPath" | grepQuiet -- "-input-2.drv" # --valid-derivers returns the currently single valid .drv file diff --git a/tests/functional/dump-db.sh b/tests/functional/dump-db.sh index 14181b4b6..70d79e9fb 100755 --- a/tests/functional/dump-db.sh +++ b/tests/functional/dump-db.sh @@ -8,19 +8,18 @@ needLocalStore "--dump-db requires a local store" clearStore -path=$(nix-build dependencies.nix -o $TEST_ROOT/result) +nix-build dependencies.nix -o "$TEST_ROOT"/result +deps="$(nix-store -qR "$TEST_ROOT"/result)" -deps="$(nix-store -qR $TEST_ROOT/result)" +nix-store --dump-db > "$TEST_ROOT"/dump -nix-store --dump-db > $TEST_ROOT/dump +rm -rf "$NIX_STATE_DIR"/db -rm -rf $NIX_STATE_DIR/db +nix-store --load-db < "$TEST_ROOT"/dump -nix-store --load-db < $TEST_ROOT/dump - -deps2="$(nix-store -qR $TEST_ROOT/result)" +deps2="$(nix-store -qR "$TEST_ROOT"/result)" [ "$deps" = "$deps2" ]; -nix-store --dump-db > $TEST_ROOT/dump2 -cmp $TEST_ROOT/dump $TEST_ROOT/dump2 +nix-store --dump-db > "$TEST_ROOT"/dump2 +cmp "$TEST_ROOT"/dump "$TEST_ROOT"/dump2 diff --git a/tests/functional/dyn-drv/build-built-drv.sh b/tests/functional/dyn-drv/build-built-drv.sh index 49d61c6ce..78db41327 100644 --- a/tests/functional/dyn-drv/build-built-drv.sh +++ b/tests/functional/dyn-drv/build-built-drv.sh @@ -23,4 +23,4 @@ requireDaemonNewerThan "2.30pre20250515" out2=$(nix build "${drvDep}^out^out" --no-link) -test $out1 == $out2 +test "$out1" == "$out2" diff --git a/tests/functional/dyn-drv/common.sh b/tests/functional/dyn-drv/common.sh index 0d95881b6..ca24498d0 100644 --- a/tests/functional/dyn-drv/common.sh +++ b/tests/functional/dyn-drv/common.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash source ../common.sh # Need backend to support text-hashing too diff --git a/tests/functional/dyn-drv/dep-built-drv.sh b/tests/functional/dyn-drv/dep-built-drv.sh index e9a8b6b83..f5be23645 100644 --- a/tests/functional/dyn-drv/dep-built-drv.sh +++ b/tests/functional/dyn-drv/dep-built-drv.sh @@ -11,4 +11,4 @@ clearStore out2=$(nix-build ./text-hashed-output.nix -A wrapper --no-out-link) -diff -r $out1 $out2 +diff -r "$out1" "$out2" diff --git a/tests/functional/dyn-drv/non-trivial.nix b/tests/functional/dyn-drv/non-trivial.nix index 5cfafbb62..3c24ac2ee 100644 --- a/tests/functional/dyn-drv/non-trivial.nix +++ b/tests/functional/dyn-drv/non-trivial.nix @@ -62,12 +62,15 @@ builtins.outputOf "hashAlgo": "sha256" } }, - "system": "${system}" + "system": "${system}", + "version": 3 } EOF - drvs[$word]="$(echo "$json" | nix derivation add)" + drvPath=$(echo "$json" | nix derivation add) + storeDir=$(dirname "$drvPath") + drvs[$word]="$(basename "$drvPath")" done - cp "''${drvs[e]}" $out + cp "''${storeDir}/''${drvs[e]}" $out ''; __contentAddressed = true; diff --git a/tests/functional/dyn-drv/old-daemon-error-hack.sh b/tests/functional/dyn-drv/old-daemon-error-hack.sh index 43b049973..02129bd73 100644 --- a/tests/functional/dyn-drv/old-daemon-error-hack.sh +++ b/tests/functional/dyn-drv/old-daemon-error-hack.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash # Purposely bypassing our usual common for this subgroup source ../common.sh diff --git a/tests/functional/dyn-drv/recursive-mod-json.sh b/tests/functional/dyn-drv/recursive-mod-json.sh index 0698b81bd..01e8f16e9 100644 --- a/tests/functional/dyn-drv/recursive-mod-json.sh +++ b/tests/functional/dyn-drv/recursive-mod-json.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash source common.sh # FIXME @@ -10,18 +11,18 @@ restartDaemon clearStore -rm -f $TEST_ROOT/result +rm -f "$TEST_ROOT"/result -EXTRA_PATH=$(dirname $(type -p nix)):$(dirname $(type -p jq)) +EXTRA_PATH=$(dirname "$(type -p nix)"):$(dirname "$(type -p jq)") export EXTRA_PATH # Will produce a drv metaDrv=$(nix-instantiate ./recursive-mod-json.nix) # computed "dynamic" derivation -drv=$(nix-store -r $metaDrv) +drv=$(nix-store -r "$metaDrv") # build that dyn drv -res=$(nix-store -r $drv) +res=$(nix-store -r "$drv") -grep 'I am alive!' $res/hello +grep 'I am alive!' "$res"/hello diff --git a/tests/functional/eval-store.sh b/tests/functional/eval-store.sh index 202e7b004..9f4b3b036 100755 --- a/tests/functional/eval-store.sh +++ b/tests/functional/eval-store.sh @@ -6,6 +6,7 @@ TODO_NixOS # Using `--eval-store` with the daemon will eventually copy everything # to the build store, invalidating most of the tests here +# shellcheck disable=SC1111 needLocalStore "“--eval-store” doesn't achieve much with the daemon" eval_store=$TEST_ROOT/eval-store @@ -15,7 +16,7 @@ rm -rf "$eval_store" nix build -f dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then +if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then # Resolved CA derivations are written to store for building # # TODO when we something more systematic @@ -23,32 +24,36 @@ if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then # between scratch storage for building and the final destination # store, we'll be able to make this unconditional again -- resolved # derivations should only appear in the scratch store. - (! ls $NIX_STORE_DIR/*.drv) + (! ls "$NIX_STORE_DIR"/*.drv) fi -ls $eval_store/nix/store/*.drv +ls "$eval_store"/nix/store/*.drv clearStore rm -rf "$eval_store" nix-instantiate dependencies.nix --eval-store "$eval_store" -(! ls $NIX_STORE_DIR/*.drv) -ls $eval_store/nix/store/*.drv +(! ls "$NIX_STORE_DIR"/*.drv) +ls "$eval_store"/nix/store/*.drv clearStore rm -rf "$eval_store" nix-build dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then +if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then # See above - (! ls $NIX_STORE_DIR/*.drv) + (! ls "$NIX_STORE_DIR"/*.drv) fi -ls $eval_store/nix/store/*.drv +ls "$eval_store"/nix/store/*.drv clearStore rm -rf "$eval_store" # Confirm that import-from-derivation builds on the build store [[ $(nix eval --eval-store "$eval_store?require-sigs=false" --impure --raw --file ./ifd.nix) = hi ]] -ls $NIX_STORE_DIR/*dependencies-top/foobar -(! ls $eval_store/nix/store/*dependencies-top/foobar) +ls "$NIX_STORE_DIR"/*dependencies-top/foobar +(! ls "$eval_store"/nix/store/*dependencies-top/foobar) + +# Can't write .drv by default +(! nix-instantiate dependencies.nix --eval-store "dummy://") +nix-instantiate dependencies.nix --eval-store "dummy://?read-only=false" diff --git a/tests/functional/export-graph.sh b/tests/functional/export-graph.sh index b507b6d3a..0490b580d 100755 --- a/tests/functional/export-graph.sh +++ b/tests/functional/export-graph.sh @@ -8,27 +8,29 @@ clearStore clearProfiles checkRef() { - nix-store -q --references $TEST_ROOT/result | grepQuiet "$1"'$' || fail "missing reference $1" + nix-store -q --references "$TEST_ROOT"/result | grepQuiet "$1"'$' || fail "missing reference $1" } # Test the export of the runtime dependency graph. -outPath=$(nix-build ./export-graph.nix -A 'foo."bar.runtimeGraph"' -o $TEST_ROOT/result) +outPath=$(nix-build ./export-graph.nix -A 'foo."bar.runtimeGraph"' -o "$TEST_ROOT"/result) -test $(nix-store -q --references $TEST_ROOT/result | wc -l) = 3 || fail "bad nr of references" +test "$(nix-store -q --references "$TEST_ROOT"/result | wc -l)" = 3 || fail "bad nr of references" checkRef input-2 -for i in $(cat $outPath); do checkRef $i; done +# shellcheck disable=SC2013 +for i in $(cat "$outPath"); do checkRef "$i"; done # Test the export of the build-time dependency graph. nix-store --gc # should force rebuild of input-1 -outPath=$(nix-build ./export-graph.nix -A 'foo."bar.buildGraph"' -o $TEST_ROOT/result) +outPath=$(nix-build ./export-graph.nix -A 'foo."bar.buildGraph"' -o "$TEST_ROOT"/result) checkRef input-1 checkRef input-1.drv checkRef input-2 checkRef input-2.drv -for i in $(cat $outPath); do checkRef $i; done +# shellcheck disable=SC2013 +for i in $(cat "$outPath"); do checkRef "$i"; done diff --git a/tests/functional/export.sh b/tests/functional/export.sh index 3e895a540..53bbdd9ac 100755 --- a/tests/functional/export.sh +++ b/tests/functional/export.sh @@ -8,11 +8,12 @@ clearStore outPath=$(nix-build dependencies.nix --no-out-link) -nix-store --export $outPath > $TEST_ROOT/exp +nix-store --export "$outPath" > "$TEST_ROOT"/exp -nix-store --export $(nix-store -qR $outPath) > $TEST_ROOT/exp_all +# shellcheck disable=SC2046 +nix-store --export $(nix-store -qR "$outPath") > "$TEST_ROOT"/exp_all -if nix-store --export $outPath >/dev/full ; then +if nix-store --export "$outPath" >/dev/full ; then echo "exporting to a bad file descriptor should fail" exit 1 fi @@ -20,7 +21,7 @@ fi clearStore -if nix-store --import < $TEST_ROOT/exp; then +if nix-store --import < "$TEST_ROOT"/exp; then echo "importing a non-closure should fail" exit 1 fi @@ -28,13 +29,14 @@ fi clearStore -nix-store --import < $TEST_ROOT/exp_all +nix-store --import < "$TEST_ROOT"/exp_all -nix-store --export $(nix-store -qR $outPath) > $TEST_ROOT/exp_all2 +# shellcheck disable=SC2046 +nix-store --export $(nix-store -qR "$outPath") > "$TEST_ROOT"/exp_all2 clearStore # Regression test: the derivers in exp_all2 are empty, which shouldn't # cause a failure. -nix-store --import < $TEST_ROOT/exp_all2 +nix-store --import < "$TEST_ROOT"/exp_all2 diff --git a/tests/functional/fetchClosure.sh b/tests/functional/fetchClosure.sh index 7ef635d36..9b79ab396 100755 --- a/tests/functional/fetchClosure.sh +++ b/tests/functional/fetchClosure.sh @@ -17,14 +17,14 @@ requireDaemonNewerThan "2.16.0pre20230524" # Initialize binary cache. nonCaPath=$(nix build --json --file ./dependencies.nix --no-link | jq -r .[].outputs.out) -caPath=$(nix store make-content-addressed --json $nonCaPath | jq -r '.rewrites | map(.) | .[]') -nix copy --to file://$cacheDir $nonCaPath +caPath=$(nix store make-content-addressed --json "$nonCaPath" | jq -r '.rewrites | map(.) | .[]') +nix copy --to file://"$cacheDir" "$nonCaPath" # Test basic fetchClosure rewriting from non-CA to CA. clearStore -[ ! -e $nonCaPath ] -[ ! -e $caPath ] +[ ! -e "$nonCaPath" ] +[ ! -e "$caPath" ] [[ $(nix eval -v --raw --expr " builtins.fetchClosure { @@ -32,10 +32,10 @@ clearStore fromPath = $nonCaPath; toPath = $caPath; } -") = $caPath ]] +") = "$caPath" ]] -[ ! -e $nonCaPath ] -[ -e $caPath ] +[ ! -e "$nonCaPath" ] +[ -e "$caPath" ] clearStore @@ -55,7 +55,7 @@ if [[ "$NIX_REMOTE" != "daemon" ]]; then # TODO: Should the closure be rejected, despite single user mode? # [ ! -e $nonCaPath ] - [ ! -e $caPath ] + [ ! -e "$caPath" ] # We can use non-CA paths when we ask explicitly. [[ $(nix eval --raw --no-require-sigs --expr " @@ -64,15 +64,15 @@ if [[ "$NIX_REMOTE" != "daemon" ]]; then fromPath = $nonCaPath; inputAddressed = true; } - ") = $nonCaPath ]] + ") = "$nonCaPath" ]] - [ -e $nonCaPath ] - [ ! -e $caPath ] + [ -e "$nonCaPath" ] + [ ! -e "$caPath" ] fi -[ ! -e $caPath ] +[ ! -e "$caPath" ] # 'toPath' set to empty string should fail but print the expected path. expectStderr 1 nix eval -v --json --expr " @@ -84,39 +84,41 @@ expectStderr 1 nix eval -v --json --expr " " | grep "error: rewriting.*$nonCaPath.*yielded.*$caPath" # If fromPath is CA, then toPath isn't needed. -nix copy --to file://$cacheDir $caPath +nix copy --to file://"$cacheDir" "$caPath" clearStore -[ ! -e $caPath ] +[ ! -e "$caPath" ] [[ $(nix eval -v --raw --expr " builtins.fetchClosure { fromStore = \"file://$cacheDir\"; fromPath = $caPath; } -") = $caPath ]] +") = "$caPath" ]] -[ -e $caPath ] +[ -e "$caPath" ] # Check that URL query parameters aren't allowed. clearStore narCache=$TEST_ROOT/nar-cache -rm -rf $narCache +rm -rf "$narCache" (! nix eval -v --raw --expr " builtins.fetchClosure { fromStore = \"file://$cacheDir?local-nar-cache=$narCache\"; fromPath = $caPath; } ") -(! [ -e $narCache ]) +# shellcheck disable=SC2235 +(! [ -e "$narCache" ]) # If toPath is specified but wrong, we check it (only) when the path is missing. clearStore -badPath=$(echo $caPath | sed -e 's!/store/................................-!/store/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx-!') +# shellcheck disable=SC2001 +badPath=$(echo "$caPath" | sed -e 's!/store/................................-!/store/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx-!') -[ ! -e $badPath ] +[ ! -e "$badPath" ] expectStderr 1 nix eval -v --raw --expr " builtins.fetchClosure { @@ -126,11 +128,11 @@ expectStderr 1 nix eval -v --raw --expr " } " | grep "error: rewriting.*$nonCaPath.*yielded.*$caPath.*while.*$badPath.*was expected" -[ ! -e $badPath ] +[ ! -e "$badPath" ] # We only check it when missing, as a performance optimization similar to what we do for fixed output derivations. So if it's already there, we don't check it. # It would be nice for this to fail, but checking it would be too(?) slow. -[ -e $caPath ] +[ -e "$caPath" ] [[ $(nix eval -v --raw --expr " builtins.fetchClosure { @@ -138,7 +140,7 @@ expectStderr 1 nix eval -v --raw --expr " fromPath = $badPath; toPath = $caPath; } -") = $caPath ]] +") = "$caPath" ]] # However, if the output address is unexpected, we can report it diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index e7c9c77a5..be8b5cb34 100755 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -12,25 +12,25 @@ repo=$TEST_ROOT/./git export _NIX_FORCE_HTTP=1 -rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix $TEST_ROOT/worktree $TEST_ROOT/minimal +rm -rf "$repo" "${repo}"-tmp "$TEST_HOME"/.cache/nix "$TEST_ROOT"/worktree "$TEST_ROOT"/minimal -git init $repo -git -C $repo config user.email "foobar@example.com" -git -C $repo config user.name "Foobar" +git init "$repo" +git -C "$repo" config user.email "foobar@example.com" +git -C "$repo" config user.name "Foobar" -echo utrecht > $repo/hello -touch $repo/.gitignore -git -C $repo add hello .gitignore -git -C $repo commit -m 'Bla1' -rev1=$(git -C $repo rev-parse HEAD) -git -C $repo tag -a tag1 -m tag1 +echo utrecht > "$repo"/hello +touch "$repo"/.gitignore +git -C "$repo" add hello .gitignore +git -C "$repo" commit -m 'Bla1' +rev1=$(git -C "$repo" rev-parse HEAD) +git -C "$repo" tag -a tag1 -m tag1 -echo world > $repo/hello -git -C $repo commit -m 'Bla2' -a -git -C $repo worktree add $TEST_ROOT/worktree -echo hello >> $TEST_ROOT/worktree/hello -rev2=$(git -C $repo rev-parse HEAD) -git -C $repo tag -a tag2 -m tag2 +echo world > "$repo"/hello +git -C "$repo" commit -m 'Bla2' -a +git -C "$repo" worktree add "$TEST_ROOT"/worktree +echo hello >> "$TEST_ROOT"/worktree/hello +rev2=$(git -C "$repo" rev-parse HEAD) +git -C "$repo" tag -a tag2 -m tag2 # Check whether fetching in read-only mode works. nix-instantiate --eval -E "builtins.readFile ((builtins.fetchGit file://$TEST_ROOT/worktree) + \"/hello\") == \"utrecht\\n\"" @@ -40,52 +40,52 @@ unset _NIX_FORCE_HTTP expectStderr 0 nix eval -vvvv --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath" | grepQuiet "copying '$TEST_ROOT/worktree/' to the store" path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath") path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = file://$TEST_ROOT/worktree; }).outPath") -[[ $path0 = $path0_ ]] +[[ $path0 = "$path0_" ]] path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree git+file://$TEST_ROOT/worktree).outPath") -[[ $path0 = $path0_ ]] +[[ $path0 = "$path0_" ]] export _NIX_FORCE_HTTP=1 -[[ $(tail -n 1 $path0/hello) = "hello" ]] +[[ $(tail -n 1 "$path0"/hello) = "hello" ]] # Nuke the cache -rm -rf $TEST_HOME/.cache/nix +rm -rf "$TEST_HOME"/.cache/nix # Fetch the default branch. path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") -[[ $(cat $path/hello) = world ]] +[[ $(cat "$path"/hello) = world ]] # Fetch again. This should be cached. # NOTE: This has to be done before the test case below which tries to pack-refs # the reason being that the lookup on the cache uses the ref-file `/refs/heads/master` # which does not exist after packing. -mv $repo ${repo}-tmp +mv "$repo" "${repo}"-tmp path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] [[ $(nix eval --impure --expr "(builtins.fetchGit file://$repo).revCount") = 2 ]] -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).rev") = $rev2 ]] -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).shortRev") = ${rev2:0:7} ]] +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).rev") = "$rev2" ]] +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).shortRev") = "${rev2:0:7}" ]] # Fetching with a explicit hash should succeed. path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath") -[[ $(cat $path2/hello) = utrecht ]] +[[ $(cat "$path2"/hello) = utrecht ]] -mv ${repo}-tmp $repo +mv "${repo}"-tmp "$repo" # Fetch when the cache has packed-refs # Regression test of #8822 -git -C $TEST_HOME/.cache/nix/gitv3/*/ pack-refs --all +git -C "$TEST_HOME"/.cache/nix/gitv3/*/ pack-refs --all path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") # Fetch a rev from another branch -git -C $repo checkout -b devtest -echo "different file" >> $TEST_ROOT/git/differentbranch -git -C $repo add differentbranch -git -C $repo commit -m 'Test2' -git -C $repo checkout master -devrev=$(git -C $repo rev-parse devtest) +git -C "$repo" checkout -b devtest +echo "different file" >> "$TEST_ROOT"/git/differentbranch +git -C "$repo" add differentbranch +git -C "$repo" commit -m 'Test2' +git -C "$repo" checkout master +devrev=$(git -C "$repo" rev-parse devtest) nix eval --raw --expr "builtins.fetchGit { url = file://$repo; rev = \"$devrev\"; }" [[ $(nix eval --raw --expr "builtins.readFile (builtins.fetchGit { url = file://$repo; rev = \"$devrev\"; allRefs = true; } + \"/differentbranch\")") = 'different file' ]] @@ -96,7 +96,7 @@ nix eval --raw --expr "builtins.fetchGit { url = file://$repo; rev = \"$devrev\" # Fetch using an explicit revision hash. path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] # In pure eval mode, fetchGit with a revision should succeed. [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] @@ -106,23 +106,23 @@ expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'f # Using a clean working tree should produce the same result. path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] # Using an unclean tree should yield the tracked but uncommitted changes. -mkdir $repo/dir1 $repo/dir2 -echo foo > $repo/dir1/foo -echo bar > $repo/bar -echo bar > $repo/dir2/bar -git -C $repo add dir1/foo -git -C $repo rm hello +mkdir "$repo"/dir1 "$repo"/dir2 +echo foo > "$repo"/dir1/foo +echo bar > "$repo"/bar +echo bar > "$repo"/dir2/bar +git -C "$repo" add dir1/foo +git -C "$repo" rm hello unset _NIX_FORCE_HTTP path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") -[ ! -e $path2/hello ] -[ ! -e $path2/bar ] -[ ! -e $path2/dir2/bar ] -[ ! -e $path2/.git ] -[[ $(cat $path2/dir1/foo) = foo ]] +[ ! -e "$path2"/hello ] +[ ! -e "$path2"/bar ] +[ ! -e "$path2"/dir2/bar ] +[ ! -e "$path2"/.git ] +[[ $(cat "$path2"/dir1/foo) = foo ]] [[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]] [[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).dirtyRev") = "${rev2}-dirty" ]] @@ -130,16 +130,16 @@ path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") # ... unless we're using an explicit ref or rev. path3=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath") -[[ $path = $path3 ]] +[[ $path = "$path3" ]] path3=$(nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path3 ]] +[[ $path = "$path3" ]] # Committing should not affect the store path. -git -C $repo commit -m 'Bla3' -a +git -C "$repo" commit -m 'Bla3' -a path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$repo).outPath") -[[ $path2 = $path4 ]] +[[ $path2 = "$path4" ]] [[ $(nix eval --impure --expr "builtins.hasAttr \"rev\" (builtins.fetchGit $repo)") == "true" ]] [[ $(nix eval --impure --expr "builtins.hasAttr \"dirtyRev\" (builtins.fetchGit $repo)") == "false" ]] @@ -148,7 +148,7 @@ path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$rep expect 102 nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-B5yIPHhEm0eysJKEsO7nqxprh9vcblFxpJG11gXJus1=\"; }).outPath" path5=$(nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-Hr8g6AqANb3xqX28eu1XnjK/3ab8Gv6TJSnkb1LezG9=\"; }).outPath") -[[ $path = $path5 ]] +[[ $path = "$path5" ]] # Ensure that NAR hashes are checked. expectStderr 102 nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-Hr8g6AqANb4xqX28eu1XnjK/3ab8Gv6TJSnkb1LezG9=\"; }).outPath" | grepQuiet "error: NAR hash mismatch" @@ -157,22 +157,22 @@ expectStderr 102 nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = expectStderr 0 nix eval --raw --expr "(builtins.fetchGit { url = $repo; ref = \"tag2\"; narHash = \"sha256-Hr8g6AqANb3xqX28eu1XnjK/3ab8Gv6TJSnkb1LezG9=\"; }).outPath" | grepQuiet "warning: Input .* is unlocked" # tarball-ttl should be ignored if we specify a rev -echo delft > $repo/hello -git -C $repo add hello -git -C $repo commit -m 'Bla4' -rev3=$(git -C $repo rev-parse HEAD) +echo delft > "$repo"/hello +git -C "$repo" add hello +git -C "$repo" commit -m 'Bla4' +rev3=$(git -C "$repo" rev-parse HEAD) nix eval --tarball-ttl 3600 --expr "builtins.fetchGit { url = $repo; rev = \"$rev3\"; }" >/dev/null # Update 'path' to reflect latest master path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") # Check behavior when non-master branch is used -git -C $repo checkout $rev2 -b dev -echo dev > $repo/hello +git -C "$repo" checkout "$rev2" -b dev +echo dev > "$repo"/hello # File URI uses dirty tree unless specified otherwise path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") -[ $(cat $path2/hello) = dev ] +[ "$(cat "$path2"/hello)" = dev ] # Using local path with branch other than 'master' should work when clean or dirty path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") @@ -181,53 +181,53 @@ path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") [[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).shortRev") = 0000000 ]] # Making a dirty tree clean again and fetching it should # record correct revision information. See: #4140 -echo world > $repo/hello -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = $rev2 ]] +echo world > "$repo"/hello +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = "$rev2" ]] # Committing shouldn't change store path, or switch to using 'master' -echo dev > $repo/hello -git -C $repo commit -m 'Bla5' -a +echo dev > "$repo"/hello +git -C "$repo" commit -m 'Bla5' -a path4=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") -[[ $(cat $path4/hello) = dev ]] -[[ $path3 = $path4 ]] +[[ $(cat "$path4"/hello) = dev ]] +[[ $path3 = "$path4" ]] # Using remote path with branch other than 'master' should fetch the HEAD revision. # (--tarball-ttl 0 to prevent using the cached repo above) export _NIX_FORCE_HTTP=1 path4=$(nix eval --tarball-ttl 0 --impure --raw --expr "(builtins.fetchGit $repo).outPath") -[[ $(cat $path4/hello) = dev ]] -[[ $path3 = $path4 ]] +[[ $(cat "$path4"/hello) = dev ]] +[[ $path3 = "$path4" ]] unset _NIX_FORCE_HTTP # Confirm same as 'dev' branch path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") -[[ $path3 = $path5 ]] +[[ $path3 = "$path5" ]] # Nuke the cache -rm -rf $TEST_HOME/.cache/nix +rm -rf "$TEST_HOME"/.cache/nix # Try again. This should work. path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") -[[ $path3 = $path5 ]] +[[ $path3 = "$path5" ]] # Fetching from a repo with only a specific revision and no branches should # not fall back to copying files and record correct revision information. See: #5302 -mkdir $TEST_ROOT/minimal -git -C $TEST_ROOT/minimal init -git -C $TEST_ROOT/minimal fetch $repo $rev2 -git -C $TEST_ROOT/minimal checkout $rev2 -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit { url = $TEST_ROOT/minimal; }).rev") = $rev2 ]] +mkdir "$TEST_ROOT"/minimal +git -C "$TEST_ROOT"/minimal init +git -C "$TEST_ROOT"/minimal fetch "$repo" "$rev2" +git -C "$TEST_ROOT"/minimal checkout "$rev2" +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit { url = $TEST_ROOT/minimal; }).rev") = "$rev2" ]] # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; }).outPath") -[[ $path7 = $path8 ]] +[[ $path7 = "$path8" ]] # ref = "HEAD" should fetch the HEAD revision -rev4=$(git -C $repo rev-parse HEAD) +rev4=$(git -C "$repo" rev-parse HEAD) rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev") -[[ $rev4 = $rev4_nix ]] +[[ $rev4 = "$rev4_nix" ]] # The name argument should be handled path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; name = \"foo\"; }).outPath") @@ -236,33 +236,36 @@ path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$rep # Specifying a ref without a rev shouldn't pick a cached rev for a different ref export _NIX_FORCE_HTTP=1 rev_tag1_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag1\"; }).rev") -rev_tag1=$(git -C $repo rev-parse refs/tags/tag1^{commit}) -[[ $rev_tag1_nix = $rev_tag1 ]] +# shellcheck disable=SC1083 +rev_tag1=$(git -C "$repo" rev-parse refs/tags/tag1^{commit}) +[[ $rev_tag1_nix = "$rev_tag1" ]] rev_tag2_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag2\"; }).rev") -rev_tag2=$(git -C $repo rev-parse refs/tags/tag2^{commit}) -[[ $rev_tag2_nix = $rev_tag2 ]] +# shellcheck disable=SC1083 +rev_tag2=$(git -C "$repo" rev-parse refs/tags/tag2^{commit}) +[[ $rev_tag2_nix = "$rev_tag2" ]] unset _NIX_FORCE_HTTP # Ensure .gitattributes is respected -touch $repo/not-exported-file -touch $repo/exported-wonky -echo "/not-exported-file export-ignore" >> $repo/.gitattributes -echo "/exported-wonky export-ignore=wonk" >> $repo/.gitattributes -git -C $repo add not-exported-file exported-wonky .gitattributes -git -C $repo commit -m 'Bla6' -rev5=$(git -C $repo rev-parse HEAD) +touch "$repo"/not-exported-file +touch "$repo"/exported-wonky +echo "/not-exported-file export-ignore" >> "$repo"/.gitattributes +echo "/exported-wonky export-ignore=wonk" >> "$repo"/.gitattributes +git -C "$repo" add not-exported-file exported-wonky .gitattributes +git -C "$repo" commit -m 'Bla6' +rev5=$(git -C "$repo" rev-parse HEAD) path12=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev5\"; }).outPath") [[ ! -e $path12/not-exported-file ]] [[ -e $path12/exported-wonky ]] # should fail if there is no repo -rm -rf $repo/.git -rm -rf $TEST_HOME/.cache/nix +rm -rf "$repo"/.git +rm -rf "$TEST_HOME"/.cache/nix (! nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") # should succeed for a repo without commits -git init $repo -git -C $repo add hello # need to add at least one file to cause the root of the repo to be visible +git init "$repo" +git -C "$repo" add hello # need to add at least one file to cause the root of the repo to be visible +# shellcheck disable=SC2034 path10=$(nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") # should succeed for a path with a space @@ -277,6 +280,7 @@ touch "$repo/.gitignore" git -C "$repo" add hello .gitignore git -C "$repo" commit -m 'Bla1' cd "$repo" +# shellcheck disable=SC2034 path11=$(nix eval --impure --raw --expr "(builtins.fetchGit ./.).outPath") # Test a workdir with no commits. diff --git a/tests/functional/fetchGitRefs.sh b/tests/functional/fetchGitRefs.sh index 258a65525..288b26591 100755 --- a/tests/functional/fetchGitRefs.sh +++ b/tests/functional/fetchGitRefs.sh @@ -38,16 +38,16 @@ path=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = \ # 10. They cannot contain a \. valid_ref() { - { set +x; printf >&2 '\n>>>>>>>>>> valid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; } + { set +x; printf >&2 '\n>>>>>>>>>> valid_ref %s\b <<<<<<<<<<\n' "$(printf %s "$1" | sed -n -e l)"; set -x; } git check-ref-format --branch "$1" >/dev/null git -C "$repo" branch "$1" master >/dev/null path1=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath") - [[ $path1 = $path ]] + [[ $path1 = "$path" ]] git -C "$repo" branch -D "$1" >/dev/null } invalid_ref() { - { set +x; printf >&2 '\n>>>>>>>>>> invalid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; } + { set +x; printf >&2 '\n>>>>>>>>>> invalid_ref %s\b <<<<<<<<<<\n' "$(printf %s "$1" | sed -n -e l)"; set -x; } # special case for a sole @: # --branch @ will try to interpret @ as a branch reference and not fail. Thus we need --allow-onelevel if [ "$1" = "@" ]; then @@ -68,6 +68,7 @@ valid_ref 'heads/foo@bar' valid_ref "$(printf 'heads/fu\303\237')" valid_ref 'foo-bar-baz' valid_ref 'branch#' +# shellcheck disable=SC2016 valid_ref '$1' valid_ref 'foo.locke' diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index cd3b51674..2a25245be 100755 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -11,7 +11,7 @@ clearStoreIfPossible rootRepo=$TEST_ROOT/gitSubmodulesRoot subRepo=$TEST_ROOT/gitSubmodulesSub -rm -rf ${rootRepo} ${subRepo} $TEST_HOME/.cache/nix +rm -rf "${rootRepo}" "${subRepo}" "$TEST_HOME"/.cache/nix # Submodules can't be fetched locally by default, which can cause # information leakage vulnerabilities, but for these tests our @@ -23,35 +23,35 @@ export XDG_CONFIG_HOME=$TEST_HOME/.config git config --global protocol.file.allow always initGitRepo() { - git init $1 - git -C $1 config user.email "foobar@example.com" - git -C $1 config user.name "Foobar" + git init "$1" + git -C "$1" config user.email "foobar@example.com" + git -C "$1" config user.name "Foobar" } addGitContent() { - echo "lorem ipsum" > $1/content - git -C $1 add content - git -C $1 commit -m "Initial commit" + echo "lorem ipsum" > "$1"/content + git -C "$1" add content + git -C "$1" commit -m "Initial commit" } -initGitRepo $subRepo -addGitContent $subRepo +initGitRepo "$subRepo" +addGitContent "$subRepo" -initGitRepo $rootRepo +initGitRepo "$rootRepo" -git -C $rootRepo submodule init -git -C $rootRepo submodule add $subRepo sub -git -C $rootRepo add sub -git -C $rootRepo commit -m "Add submodule" +git -C "$rootRepo" submodule init +git -C "$rootRepo" submodule add "$subRepo" sub +git -C "$rootRepo" add sub +git -C "$rootRepo" commit -m "Add submodule" -rev=$(git -C $rootRepo rev-parse HEAD) +rev=$(git -C "$rootRepo" rev-parse HEAD) r1=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath") r2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath") r3=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath") -[[ $r1 == $r2 ]] -[[ $r2 != $r3 ]] +[[ $r1 == "$r2" ]] +[[ $r2 != "$r3" ]] r4=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; }).outPath") r5=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = false; }).outPath") @@ -59,11 +59,11 @@ r6=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \ r7=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath") r8=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).outPath") -[[ $r1 == $r4 ]] -[[ $r4 == $r5 ]] -[[ $r3 == $r6 ]] -[[ $r6 == $r7 ]] -[[ $r7 == $r8 ]] +[[ $r1 == "$r4" ]] +[[ $r4 == "$r5" ]] +[[ $r3 == "$r6" ]] +[[ $r6 == "$r7" ]] +[[ $r7 == "$r8" ]] have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; }).submodules") [[ $have_submodules == false ]] @@ -80,13 +80,13 @@ pathWithSubmodulesAgain=$(nix eval --raw --expr "(builtins.fetchGit { url = file pathWithSubmodulesAgainWithRef=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath") # The resulting store path cannot be the same. -[[ $pathWithoutSubmodules != $pathWithSubmodules ]] +[[ $pathWithoutSubmodules != "$pathWithSubmodules" ]] # Checking out the same repo with submodules returns in the same store path. -[[ $pathWithSubmodules == $pathWithSubmodulesAgain ]] +[[ $pathWithSubmodules == "$pathWithSubmodulesAgain" ]] # Checking out the same repo with submodules returns in the same store path. -[[ $pathWithSubmodulesAgain == $pathWithSubmodulesAgainWithRef ]] +[[ $pathWithSubmodulesAgain == "$pathWithSubmodulesAgainWithRef" ]] # The submodules flag is actually honored. [[ ! -e $pathWithoutSubmodules/sub/content ]] @@ -98,14 +98,14 @@ pathWithSubmodulesAgainWithRef=$(nix eval --raw --expr "(builtins.fetchGit { url test "$(find "$pathWithSubmodules" -name .git)" = "" # Git repos without submodules can be fetched with submodules = true. -subRev=$(git -C $subRepo rev-parse HEAD) +subRev=$(git -C "$subRepo" rev-parse HEAD) noSubmoduleRepoBaseline=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; }).outPath") noSubmoduleRepo=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; submodules = true; }).outPath") -[[ $noSubmoduleRepoBaseline == $noSubmoduleRepo ]] +[[ $noSubmoduleRepoBaseline == "$noSubmoduleRepo" ]] # Test .gitmodules with entries that refer to non-existent objects or objects that are not submodules. -cat >> $rootRepo/.gitmodules <> "$rootRepo"/.gitmodules <> $rootRepo/.gitmodules < $rootRepo/file -git -C $rootRepo add file -git -C $rootRepo commit -a -m "Add bad submodules" +echo foo > "$rootRepo"/file +git -C "$rootRepo" add file +git -C "$rootRepo" commit -a -m "Add bad submodules" -rev=$(git -C $rootRepo rev-parse HEAD) +rev=$(git -C "$rootRepo" rev-parse HEAD) r=$(nix eval --raw --expr "builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }") @@ -126,44 +126,44 @@ r=$(nix eval --raw --expr "builtins.fetchGit { url = file://$rootRepo; rev = \"$ [[ ! -e $r/missing ]] # Test relative submodule URLs. -rm $TEST_HOME/.cache/nix/fetcher-cache* -rm -rf $rootRepo/.git $rootRepo/.gitmodules $rootRepo/sub -initGitRepo $rootRepo -git -C $rootRepo submodule add ../gitSubmodulesSub sub -git -C $rootRepo commit -m "Add submodule" -rev2=$(git -C $rootRepo rev-parse HEAD) +rm "$TEST_HOME"/.cache/nix/fetcher-cache* +rm -rf "$rootRepo"/.git "$rootRepo"/.gitmodules "$rootRepo"/sub +initGitRepo "$rootRepo" +git -C "$rootRepo" submodule add ../gitSubmodulesSub sub +git -C "$rootRepo" commit -m "Add submodule" +rev2=$(git -C "$rootRepo" rev-parse HEAD) pathWithRelative=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev2\"; submodules = true; }).outPath") -diff -r -x .gitmodules $pathWithSubmodules $pathWithRelative +diff -r -x .gitmodules "$pathWithSubmodules" "$pathWithRelative" # Test clones that have an upstream with relative submodule URLs. -rm $TEST_HOME/.cache/nix/fetcher-cache* +rm "$TEST_HOME"/.cache/nix/fetcher-cache* cloneRepo=$TEST_ROOT/a/b/gitSubmodulesClone # NB /a/b to make the relative path not work relative to $cloneRepo -git clone $rootRepo $cloneRepo +git clone "$rootRepo" "$cloneRepo" pathIndirect=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$cloneRepo; rev = \"$rev2\"; submodules = true; }).outPath") -[[ $pathIndirect = $pathWithRelative ]] +[[ $pathIndirect = "$pathWithRelative" ]] # Test submodule export-ignore interaction -git -C $rootRepo/sub config user.email "foobar@example.com" -git -C $rootRepo/sub config user.name "Foobar" +git -C "$rootRepo"/sub config user.email "foobar@example.com" +git -C "$rootRepo"/sub config user.name "Foobar" -echo "/exclude-from-root export-ignore" >> $rootRepo/.gitattributes +echo "/exclude-from-root export-ignore" >> "$rootRepo"/.gitattributes # TBD possible semantics for submodules + exportIgnore # echo "/sub/exclude-deep export-ignore" >> $rootRepo/.gitattributes -echo nope > $rootRepo/exclude-from-root -git -C $rootRepo add .gitattributes exclude-from-root -git -C $rootRepo commit -m "Add export-ignore" +echo nope > "$rootRepo"/exclude-from-root +git -C "$rootRepo" add .gitattributes exclude-from-root +git -C "$rootRepo" commit -m "Add export-ignore" -echo "/exclude-from-sub export-ignore" >> $rootRepo/sub/.gitattributes -echo nope > $rootRepo/sub/exclude-from-sub +echo "/exclude-from-sub export-ignore" >> "$rootRepo"/sub/.gitattributes +echo nope > "$rootRepo"/sub/exclude-from-sub # TBD possible semantics for submodules + exportIgnore # echo aye > $rootRepo/sub/exclude-from-root -git -C $rootRepo/sub add .gitattributes exclude-from-sub -git -C $rootRepo/sub commit -m "Add export-ignore (sub)" +git -C "$rootRepo"/sub add .gitattributes exclude-from-sub +git -C "$rootRepo"/sub commit -m "Add export-ignore (sub)" -git -C $rootRepo add sub -git -C $rootRepo commit -m "Update submodule" +git -C "$rootRepo" add sub +git -C "$rootRepo" commit -m "Update submodule" -git -C $rootRepo status +git -C "$rootRepo" status # # TBD: not supported yet, because semantics are undecided and current implementation leaks rules from the root to submodules # # exportIgnore can be used with submodules @@ -199,39 +199,40 @@ test_submodule_nested() { local repoB=$TEST_ROOT/submodule_nested/b local repoC=$TEST_ROOT/submodule_nested/c - rm -rf $repoA $repoB $repoC $TEST_HOME/.cache/nix + rm -rf "$repoA" "$repoB" "$repoC" "$TEST_HOME"/.cache/nix - initGitRepo $repoC - touch $repoC/inside-c - git -C $repoC add inside-c - addGitContent $repoC + initGitRepo "$repoC" + touch "$repoC"/inside-c + git -C "$repoC" add inside-c + addGitContent "$repoC" - initGitRepo $repoB - git -C $repoB submodule add $repoC c - git -C $repoB add c - addGitContent $repoB + initGitRepo "$repoB" + git -C "$repoB" submodule add "$repoC" c + git -C "$repoB" add c + addGitContent "$repoB" - initGitRepo $repoA - git -C $repoA submodule add $repoB b - git -C $repoA add b - addGitContent $repoA + initGitRepo "$repoA" + git -C "$repoA" submodule add "$repoB" b + git -C "$repoA" add b + addGitContent "$repoA" # Check non-worktree fetch - local rev=$(git -C $repoA rev-parse HEAD) + local rev + rev=$(git -C "$repoA" rev-parse HEAD) out=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repoA\"; rev = \"$rev\"; submodules = true; }).outPath") - test -e $out/b/c/inside-c - test -e $out/content - test -e $out/b/content - test -e $out/b/c/content + test -e "$out"/b/c/inside-c + test -e "$out"/content + test -e "$out"/b/content + test -e "$out"/b/c/content local nonWorktree=$out # Check worktree based fetch # TODO: make it work without git submodule update - git -C $repoA submodule update --init --recursive + git -C "$repoA" submodule update --init --recursive out=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repoA\"; submodules = true; }).outPath") - find $out - [[ $out == $nonWorktree ]] || { find $out; false; } + find "$out" + [[ $out == "$nonWorktree" ]] || { find "$out"; false; } } test_submodule_nested diff --git a/tests/functional/fetchGitVerification.sh b/tests/functional/fetchGitVerification.sh index 4012d8229..79c78d0c9 100755 --- a/tests/functional/fetchGitVerification.sh +++ b/tests/functional/fetchGitVerification.sh @@ -21,29 +21,29 @@ ssh-keygen -f "$keysDir/testkey2" -t rsa -P "" -C "test key 2" key2File="$keysDir/testkey2.pub" publicKey2=$(awk '{print $2}' "$key2File") -git init $repo -git -C $repo config user.email "foobar@example.com" -git -C $repo config user.name "Foobar" -git -C $repo config gpg.format ssh +git init "$repo" +git -C "$repo" config user.email "foobar@example.com" +git -C "$repo" config user.name "Foobar" +git -C "$repo" config gpg.format ssh -echo 'hello' > $repo/text -git -C $repo add text -git -C $repo -c "user.signingkey=$key1File" commit -S -m 'initial commit' +echo 'hello' > "$repo"/text +git -C "$repo" add text +git -C "$repo" -c "user.signingkey=$key1File" commit -S -m 'initial commit' out=$(nix eval --impure --raw --expr "builtins.fetchGit { url = \"file://$repo\"; keytype = \"ssh-rsa\"; publicKey = \"$publicKey2\"; }" 2>&1) || status=$? [[ $status == 1 ]] -[[ $out =~ 'No principal matched.' ]] +[[ $out == *'No principal matched.'* ]] [[ $(nix eval --impure --raw --expr "builtins.readFile (builtins.fetchGit { url = \"file://$repo\"; publicKey = \"$publicKey1\"; } + \"/text\")") = 'hello' ]] -echo 'hello world' > $repo/text +echo 'hello world' > "$repo"/text # Verification on a dirty repo should fail. out=$(nix eval --impure --raw --expr "builtins.fetchGit { url = \"file://$repo\"; keytype = \"ssh-rsa\"; publicKey = \"$publicKey2\"; }" 2>&1) || status=$? [[ $status == 1 ]] [[ $out =~ 'dirty' ]] -git -C $repo add text -git -C $repo -c "user.signingkey=$key2File" commit -S -m 'second commit' +git -C "$repo" add text +git -C "$repo" -c "user.signingkey=$key2File" commit -S -m 'second commit' [[ $(nix eval --impure --raw --expr "builtins.readFile (builtins.fetchGit { url = \"file://$repo\"; publicKeys = [{key = \"$publicKey1\";} {type = \"ssh-rsa\"; key = \"$publicKey2\";}]; } + \"/text\")") = 'hello world' ]] @@ -80,5 +80,6 @@ cat > "$flakeDir/flake.nix" <&1) || status=$? + [[ $status == 1 ]] -[[ $out =~ 'No principal matched.' ]] +[[ $out == *'No principal matched.'* ]] diff --git a/tests/functional/fetchMercurial.sh b/tests/functional/fetchMercurial.sh index 6de192865..6293fb76a 100755 --- a/tests/functional/fetchMercurial.sh +++ b/tests/functional/fetchMercurial.sh @@ -12,34 +12,35 @@ clearStore # See https://github.com/NixOS/nix/issues/6195 repo=$TEST_ROOT/./hg -rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix +rm -rf "$repo" "${repo}"-tmp "$TEST_HOME"/.cache/nix -hg init $repo -echo '[ui]' >> $repo/.hg/hgrc -echo 'username = Foobar ' >> $repo/.hg/hgrc +hg init "$repo" +{ + echo '[ui]' + echo 'username = Foobar ' + # Set ui.tweakdefaults to ensure HGPLAIN is being set. + echo 'tweakdefaults = True' +} >> "$repo"/.hg/hgrc -# Set ui.tweakdefaults to ensure HGPLAIN is being set. -echo 'tweakdefaults = True' >> $repo/.hg/hgrc +echo utrecht > "$repo"/hello +touch "$repo"/.hgignore +hg add --cwd "$repo" hello .hgignore +hg commit --cwd "$repo" -m 'Bla1' +rev1=$(hg log --cwd "$repo" -r tip --template '{node}') -echo utrecht > $repo/hello -touch $repo/.hgignore -hg add --cwd $repo hello .hgignore -hg commit --cwd $repo -m 'Bla1' -rev1=$(hg log --cwd $repo -r tip --template '{node}') - -echo world > $repo/hello -hg commit --cwd $repo -m 'Bla2' -rev2=$(hg log --cwd $repo -r tip --template '{node}') +echo world > "$repo"/hello +hg commit --cwd "$repo" -m 'Bla2' +rev2=$(hg log --cwd "$repo" -r tip --template '{node}') # Fetch an unclean branch. -echo unclean > $repo/hello +echo unclean > "$repo"/hello path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath") -[[ $(cat $path/hello) = unclean ]] -hg revert --cwd $repo --all +[[ $(cat "$path"/hello) = unclean ]] +hg revert --cwd "$repo" --all # Fetch the default branch. path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath") -[[ $(cat $path/hello) = world ]] +[[ $(cat "$path"/hello) = world ]] # In pure eval mode, fetchGit without a revision should fail. [[ $(nix eval --impure --raw --expr "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]] @@ -47,64 +48,64 @@ path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).ou # Fetch using an explicit revision hash. path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] # In pure eval mode, fetchGit with a revision should succeed. [[ $(nix eval --raw --expr "builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] # Fetch again. This should be cached. -mv $repo ${repo}-tmp +mv "$repo" "${repo}"-tmp path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] [[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).branch") = default ]] [[ $(nix eval --impure --expr "(builtins.fetchMercurial file://$repo).revCount") = 1 ]] -[[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).rev") = $rev2 ]] +[[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).rev") = "$rev2" ]] # But with TTL 0, it should fail. (! nix eval --impure --refresh --expr "builtins.fetchMercurial file://$repo") # Fetching with a explicit hash should succeed. path2=$(nix eval --refresh --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] path2=$(nix eval --refresh --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev1\"; }).outPath") -[[ $(cat $path2/hello) = utrecht ]] +[[ $(cat "$path2"/hello) = utrecht ]] -mv ${repo}-tmp $repo +mv "${repo}"-tmp "$repo" # Using a clean working tree should produce the same result. path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).outPath") -[[ $path = $path2 ]] +[[ $path = "$path2" ]] # Using an unclean tree should yield the tracked but uncommitted changes. -mkdir $repo/dir1 $repo/dir2 -echo foo > $repo/dir1/foo -echo bar > $repo/bar -echo bar > $repo/dir2/bar -hg add --cwd $repo dir1/foo -hg rm --cwd $repo hello +mkdir "$repo"/dir1 "$repo"/dir2 +echo foo > "$repo"/dir1/foo +echo bar > "$repo"/bar +echo bar > "$repo"/dir2/bar +hg add --cwd "$repo" dir1/foo +hg rm --cwd "$repo" hello path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).outPath") -[ ! -e $path2/hello ] -[ ! -e $path2/bar ] -[ ! -e $path2/dir2/bar ] -[ ! -e $path2/.hg ] -[[ $(cat $path2/dir1/foo) = foo ]] +[ ! -e "$path2"/hello ] +[ ! -e "$path2"/bar ] +[ ! -e "$path2"/dir2/bar ] +[ ! -e "$path2"/.hg ] +[[ $(cat "$path2"/dir1/foo) = foo ]] [[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).rev") = 0000000000000000000000000000000000000000 ]] # ... unless we're using an explicit ref. path3=$(nix eval --impure --raw --expr "(builtins.fetchMercurial { url = $repo; rev = \"default\"; }).outPath") -[[ $path = $path3 ]] +[[ $path = "$path3" ]] # Committing should not affect the store path. -hg commit --cwd $repo -m 'Bla3' +hg commit --cwd "$repo" -m 'Bla3' path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchMercurial file://$repo).outPath") -[[ $path2 = $path4 ]] +[[ $path2 = "$path4" ]] -echo paris > $repo/hello +echo paris > "$repo"/hello # Passing a `name` argument should be reflected in the output path path5=$(nix eval -vvvvv --impure --refresh --raw --expr "(builtins.fetchMercurial { url = \"file://$repo\"; name = \"foo\"; } ).outPath") diff --git a/tests/functional/fixed.builder1.sh b/tests/functional/fixed.builder1.sh index c41bb2b9a..172f65e6b 100644 --- a/tests/functional/fixed.builder1.sh +++ b/tests/functional/fixed.builder1.sh @@ -1,3 +1,5 @@ +# shellcheck shell=bash if test "$IMPURE_VAR1" != "foo"; then exit 1; fi if test "$IMPURE_VAR2" != "bar"; then exit 1; fi -echo "Hello World!" > $out +# shellcheck disable=SC2154 +echo "Hello World!" > "$out" diff --git a/tests/functional/fixed.builder2.sh b/tests/functional/fixed.builder2.sh index 31ea1579a..9fbcf022e 100644 --- a/tests/functional/fixed.builder2.sh +++ b/tests/functional/fixed.builder2.sh @@ -1,6 +1,9 @@ -echo dummy: $dummy +# shellcheck shell=bash +# shellcheck disable=SC2154 +echo dummy: "$dummy" if test -n "$dummy"; then sleep 2; fi -mkdir $out -mkdir $out/bla -echo "Hello World!" > $out/foo -ln -s foo $out/bar +# shellcheck disable=SC2154 +mkdir "$out" +mkdir "$out"/bla +echo "Hello World!" > "$out"/foo +ln -s foo "$out"/bar diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index d98769e64..edf6f88d4 100755 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -6,7 +6,7 @@ TODO_NixOS clearStore -path=$(nix-store -q $(nix-instantiate fixed.nix -A good.0)) +path=$(nix-store -q "$(nix-instantiate fixed.nix -A good.0)") echo 'testing bad...' nix-build fixed.nix -A bad --no-out-link && fail "should fail" @@ -14,7 +14,7 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail" # Building with the bad hash should produce the "good" output path as # a side-effect. [[ -e $path ]] -nix path-info --json $path | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd +nix path-info --json "$path" | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd echo 'testing good...' nix-build fixed.nix -A good --no-out-link @@ -37,7 +37,7 @@ fi # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' -test $(nix-instantiate fixed.nix -A good.1 | wc -l) = 1 +test "$(nix-instantiate fixed.nix -A good.1 | wc -l)" = 1 # Test parallel builds of derivations that produce the same output. # Only one should run at the same time. @@ -51,16 +51,16 @@ echo 'testing sameAsAdd...' out=$(nix-build fixed.nix -A sameAsAdd --no-out-link) # This is what fixed.builder2 produces... -rm -rf $TEST_ROOT/fixed -mkdir $TEST_ROOT/fixed -mkdir $TEST_ROOT/fixed/bla -echo "Hello World!" > $TEST_ROOT/fixed/foo -ln -s foo $TEST_ROOT/fixed/bar +rm -rf "$TEST_ROOT"/fixed +mkdir "$TEST_ROOT"/fixed +mkdir "$TEST_ROOT"/fixed/bla +echo "Hello World!" > "$TEST_ROOT"/fixed/foo +ln -s foo "$TEST_ROOT"/fixed/bar -out2=$(nix-store --add $TEST_ROOT/fixed) +out2=$(nix-store --add "$TEST_ROOT"/fixed) [ "$out" = "$out2" ] -out3=$(nix-store --add-fixed --recursive sha256 $TEST_ROOT/fixed) +out3=$(nix-store --add-fixed --recursive sha256 "$TEST_ROOT"/fixed) [ "$out" = "$out3" ] out4=$(nix-store --print-fixed-path --recursive sha256 "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik" fixed) diff --git a/tests/functional/flakes/absolute-paths.sh b/tests/functional/flakes/absolute-paths.sh index a355a7a1c..6565857cb 100755 --- a/tests/functional/flakes/absolute-paths.sh +++ b/tests/functional/flakes/absolute-paths.sh @@ -7,13 +7,13 @@ requireGit flake1Dir=$TEST_ROOT/flake1 flake2Dir=$TEST_ROOT/flake2 -createGitRepo $flake1Dir -cat > $flake1Dir/flake.nix < "$flake1Dir"/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +# shellcheck disable=SC2015 +checkRes=$(nix flake check "$flakeDir" 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "error: overlay is not a function, but a set instead" -cat > $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +# shellcheck disable=SC2015 +checkRes=$(nix flake check --all-systems --keep-going "$flakeDir" 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "packages.system-1.default" echo "$checkRes" | grepQuiet "packages.system-2.default" -cat > $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +# shellcheck disable=SC2015 +checkRes=$(nix flake check --all-systems "$flakeDir" 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "unknown-attr" -cat > $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +# shellcheck disable=SC2015 +checkRes=$(nix flake check --all-systems "$flakeDir" 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "formatter.system-1" # Test whether `nix flake check` builds checks. -cat > $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix < $flakeDir/flake.nix < $flakeDir/flake.nix < "$flakeDir"/flake.nix <&1 && fail "nix flake check should have failed" || true) +# shellcheck disable=SC2015 +checkRes=$(nix flake check "$flakeDir" 2>&1 && fail "nix flake check should have failed" || true) echo "$checkRes" | grepQuiet -E "builder( for .*)? failed with exit code 1" diff --git a/tests/functional/flakes/config.sh b/tests/functional/flakes/config.sh index ab2d9f47c..87714b5db 100755 --- a/tests/functional/flakes/config.sh +++ b/tests/functional/flakes/config.sh @@ -2,9 +2,9 @@ source common.sh -cp ../simple.nix ../simple.builder.sh "${config_nix}" $TEST_HOME +cp ../simple.nix ../simple.builder.sh "${config_nix}" "$TEST_HOME" -cd $TEST_HOME +cd "$TEST_HOME" rm -f post-hook-ran cat < echoing-post-hook.sh @@ -37,6 +37,7 @@ if type -p script >/dev/null && script -q -c true /dev/null; then else echo "script is not available or not GNU-like, so we skip testing with an added tty" fi +# shellcheck disable=SC2235 (! [[ -f post-hook-ran ]]) TODO_NixOS clearStore diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index 7b5be112e..5b1da0f02 100755 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -7,7 +7,7 @@ TODO_NixOS requireGit clearStore -rm -rf $TEST_HOME/.cache $TEST_HOME/.config +rm -rf "$TEST_HOME"/.cache "$TEST_HOME"/.config createFlake1 createFlake2 @@ -59,7 +59,7 @@ nix flake metadata flake1 nix flake metadata flake1 | grepQuiet 'Locked URL:.*flake1.*' # Test 'nix flake metadata' on a chroot store. -nix flake metadata --store $TEST_ROOT/chroot-store flake1 +nix flake metadata --store "$TEST_ROOT"/chroot-store flake1 # Test 'nix flake metadata' on a local flake. (cd "$flake1Dir" && nix flake metadata) | grepQuiet 'URL:.*flake1.*' @@ -75,17 +75,18 @@ hash1=$(echo "$json" | jq -r .revision) [[ -n $(echo "$json" | jq -r .fingerprint) ]] echo foo > "$flake1Dir/foo" -git -C "$flake1Dir" add $flake1Dir/foo +git -C "$flake1Dir" add "$flake1Dir"/foo [[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "$hash1-dirty" ]] [[ "$(nix flake metadata flake1 --json | jq -r .fingerprint)" != null ]] echo -n '# foo' >> "$flake1Dir/flake.nix" flake1OriginalCommit=$(git -C "$flake1Dir" rev-parse HEAD) git -C "$flake1Dir" commit -a -m 'Foo' +# shellcheck disable=SC2034 flake1NewCommit=$(git -C "$flake1Dir" rev-parse HEAD) hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision) [[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "null" ]] -[[ $hash1 != $hash2 ]] +[[ $hash1 != "$hash2" ]] # Test 'nix build' on a flake. nix build -o "$TEST_ROOT/result" flake1#foo @@ -204,8 +205,8 @@ git -C "$flake3Dir" add flake.nix git -C "$flake3Dir" commit -m 'Update flake.nix' # Check whether `nix build` works with an incomplete lockfile -nix build -o $TEST_ROOT/result "$flake3Dir#sth sth" -nix build -o $TEST_ROOT/result "$flake3Dir#sth%20sth" +nix build -o "$TEST_ROOT"/result "$flake3Dir#sth sth" +nix build -o "$TEST_ROOT"/result "$flake3Dir#sth%20sth" # Check whether it saved the lockfile [[ -n $(git -C "$flake3Dir" diff master) ]] @@ -249,7 +250,7 @@ nix flake lock "$flake3Dir" [[ -z $(git -C "$flake3Dir" diff master || echo failed) ]] nix flake update --flake "$flake3Dir" --override-flake flake2 nixpkgs -[[ ! -z $(git -C "$flake3Dir" diff master || echo failed) ]] +[[ -n $(git -C "$flake3Dir" diff master || echo failed) ]] # Testing the nix CLI nix registry add flake1 flake3 @@ -262,7 +263,7 @@ nix registry remove flake1 [[ $(nix registry list | wc -l) == 4 ]] # Test 'nix registry list' with a disabled global registry. -nix registry add user-flake1 git+file://$flake1Dir +nix registry add user-flake1 git+file://"$flake1Dir" nix registry add user-flake2 "git+file://$percentEncodedFlake2Dir" [[ $(nix --flake-registry "" registry list | wc -l) == 2 ]] nix --flake-registry "" registry list | grepQuietInverse '^global' # nothing in global registry @@ -273,9 +274,9 @@ nix registry remove user-flake2 [[ $(nix registry list | wc -l) == 4 ]] # Test 'nix flake clone'. -rm -rf $TEST_ROOT/flake1-v2 -nix flake clone flake1 --dest $TEST_ROOT/flake1-v2 -[ -e $TEST_ROOT/flake1-v2/flake.nix ] +rm -rf "$TEST_ROOT"/flake1-v2 +nix flake clone flake1 --dest "$TEST_ROOT"/flake1-v2 +[ -e "$TEST_ROOT"/flake1-v2/flake.nix ] # Test 'follows' inputs. cat > "$flake3Dir/flake.nix" < "$flake3Dir/flake.nix" < "$flake3Dir/flake.nix" < $badFlakeDir/flake.nix -nix store delete $(nix store add-path $badFlakeDir) +rm -rf "$badFlakeDir" +mkdir "$badFlakeDir" +echo INVALID > "$badFlakeDir"/flake.nix +nix store delete "$(nix store add-path "$badFlakeDir")" -[[ $(nix path-info $(nix store add-path $flake1Dir)) =~ flake1 ]] -[[ $(nix path-info path:$(nix store add-path $flake1Dir)) =~ simple ]] +[[ $(nix path-info "$(nix store add-path "$flake1Dir")") =~ flake1 ]] +[[ $(nix path-info path:"$(nix store add-path "$flake1Dir")") =~ simple ]] # Test fetching flakerefs in the legacy CLI. [[ $(nix-instantiate --eval flake:flake3 -A x) = 123 ]] @@ -423,15 +426,15 @@ nix store delete $(nix store add-path $badFlakeDir) [[ $(NIX_PATH=flake3=flake:flake3 nix-instantiate --eval '' -A x) = 123 ]] # Test alternate lockfile paths. -nix flake lock "$flake2Dir" --output-lock-file $TEST_ROOT/flake2.lock -cmp "$flake2Dir/flake.lock" $TEST_ROOT/flake2.lock >/dev/null # lockfiles should be identical, since we're referencing flake2's original one +nix flake lock "$flake2Dir" --output-lock-file "$TEST_ROOT"/flake2.lock +cmp "$flake2Dir/flake.lock" "$TEST_ROOT"/flake2.lock >/dev/null # lockfiles should be identical, since we're referencing flake2's original one -nix flake lock "$flake2Dir" --output-lock-file $TEST_ROOT/flake2-overridden.lock --override-input flake1 git+file://$flake1Dir?rev=$flake1OriginalCommit -expectStderr 1 cmp "$flake2Dir/flake.lock" $TEST_ROOT/flake2-overridden.lock -nix flake metadata "$flake2Dir" --reference-lock-file $TEST_ROOT/flake2-overridden.lock | grepQuiet $flake1OriginalCommit +nix flake lock "$flake2Dir" --output-lock-file "$TEST_ROOT"/flake2-overridden.lock --override-input flake1 git+file://"$flake1Dir"?rev="$flake1OriginalCommit" +expectStderr 1 cmp "$flake2Dir/flake.lock" "$TEST_ROOT"/flake2-overridden.lock +nix flake metadata "$flake2Dir" --reference-lock-file "$TEST_ROOT"/flake2-overridden.lock | grepQuiet "$flake1OriginalCommit" # reference-lock-file can only be used if allow-dirty is set. -expectStderr 1 nix flake metadata "$flake2Dir" --no-allow-dirty --reference-lock-file $TEST_ROOT/flake2-overridden.lock +expectStderr 1 nix flake metadata "$flake2Dir" --no-allow-dirty --reference-lock-file "$TEST_ROOT"/flake2-overridden.lock # After changing an input (flake2 from newFlake2Rev to prevFlake2Rev), we should have the transitive inputs locked by revision $prevFlake2Rev of flake2. prevFlake1Rev=$(nix flake metadata --json "$flake1Dir" | jq -r .revision) @@ -458,7 +461,7 @@ git -C "$flake3Dir" commit flake.nix -m 'bla' rm "$flake3Dir/flake.lock" nix flake lock "$flake3Dir" -[[ "$(nix flake metadata --json "$flake3Dir" | jq -r .locks.nodes.flake1.locked.rev)" = $newFlake1Rev ]] +[[ "$(nix flake metadata --json "$flake3Dir" | jq -r .locks.nodes.flake1.locked.rev)" = "$newFlake1Rev" ]] cat > "$flake3Dir/flake.nix" < "$flake3Dir/flake.nix" < $flakeFollowsA/flake.nix < "$flakeFollowsA"/flake.nix < $flakeFollowsA/flake.nix < $flakeFollowsB/flake.nix < "$flakeFollowsB"/flake.nix < $flakeFollowsB/flake.nix < $flakeFollowsC/flake.nix < "$flakeFollowsC"/flake.nix < $flakeFollowsC/flake.nix < $flakeFollowsD/flake.nix < "$flakeFollowsD"/flake.nix < $flakeFollowsD/flake.nix < $flakeFollowsE/flake.nix < "$flakeFollowsE"/flake.nix < $flakeFollowsE/flake.nix < $flakeFollowsA/flake.nix < "$flakeFollowsA"/flake.nix < $flakeFollowsA/flake.nix < $flakeFollowsA/flake.nix < "$flakeFollowsA"/flake.nix < $flakeFollowsA/flake.nix <&1 | grep '/flakeB.*is forbidden in pure evaluation mode' -expect 1 nix flake lock --impure $flakeFollowsA 2>&1 | grep '/flakeB.*does not exist' +expect 1 nix flake lock "$flakeFollowsA" 2>&1 | grep '/flakeB.*is forbidden in pure evaluation mode' +expect 1 nix flake lock --impure "$flakeFollowsA" 2>&1 | grep '/flakeB.*does not exist' # Test relative non-flake inputs. -cat > $flakeFollowsA/flake.nix < "$flakeFollowsA"/flake.nix < $flakeFollowsA/flake.nix < $flakeFollowsA/foo.nix +echo 123 > "$flakeFollowsA"/foo.nix -git -C $flakeFollowsA add flake.nix foo.nix +git -C "$flakeFollowsA" add flake.nix foo.nix -nix flake lock $flakeFollowsA +nix flake lock "$flakeFollowsA" -[[ $(nix eval --json $flakeFollowsA#e) = 123 ]] +[[ $(nix eval --json "$flakeFollowsA"#e) = 123 ]] # Non-existant follows should print a warning. -cat >$flakeFollowsA/flake.nix <"$flakeFollowsA"/flake.nix <$flakeFollowsA/flake.nix <&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'" nix flake lock "$flakeFollowsA" 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'" @@ -269,7 +269,7 @@ flakeFollowCycle="$TEST_ROOT/follows/followCycle" # Test following path flakerefs. mkdir -p "$flakeFollowCycle" -cat > $flakeFollowCycle/flake.nix < "$flakeFollowCycle"/flake.nix < $flakeFollowCycle/flake.nix <&1 && fail "nix flake lock should have failed." || true) -echo $checkRes | grep -F "error: follow cycle detected: [baz -> foo -> bar -> baz]" +echo "$checkRes" | grep -F "error: follow cycle detected: [baz -> foo -> bar -> baz]" # Test transitive input url locking @@ -362,22 +363,22 @@ echo "$json" | jq .locks.nodes.C.original # Test deep overrides, e.g. `inputs.B.inputs.C.inputs.D.follows = ...`. -cat < $flakeFollowsD/flake.nix +cat < "$flakeFollowsD"/flake.nix { outputs = _: {}; } EOF -cat < $flakeFollowsC/flake.nix +cat < "$flakeFollowsC"/flake.nix { inputs.D.url = "path:nosuchflake"; outputs = _: {}; } EOF -cat < $flakeFollowsB/flake.nix +cat < "$flakeFollowsB"/flake.nix { inputs.C.url = "path:$flakeFollowsC"; outputs = _: {}; } EOF -cat < $flakeFollowsA/flake.nix +cat < "$flakeFollowsA"/flake.nix { inputs.B.url = "path:$flakeFollowsB"; inputs.D.url = "path:$flakeFollowsD"; @@ -386,26 +387,26 @@ cat < $flakeFollowsA/flake.nix } EOF -nix flake lock $flakeFollowsA +nix flake lock "$flakeFollowsA" -[[ $(jq -c .nodes.C.inputs.D $flakeFollowsA/flake.lock) = '["D"]' ]] +[[ $(jq -c .nodes.C.inputs.D "$flakeFollowsA"/flake.lock) = '["D"]' ]] # Test overlapping flake follows: B has D follow C/D, while A has B/C follow C -cat < $flakeFollowsC/flake.nix +cat < "$flakeFollowsC"/flake.nix { inputs.D.url = "path:$flakeFollowsD"; outputs = _: {}; } EOF -cat < $flakeFollowsB/flake.nix +cat < "$flakeFollowsB"/flake.nix { inputs.C.url = "path:nosuchflake"; inputs.D.follows = "C/D"; outputs = _: {}; } EOF -cat < $flakeFollowsA/flake.nix +cat < "$flakeFollowsA"/flake.nix { inputs.B.url = "path:$flakeFollowsB"; inputs.C.url = "path:$flakeFollowsC"; @@ -415,12 +416,12 @@ cat < $flakeFollowsA/flake.nix EOF # bug was not triggered without recreating the lockfile -nix flake lock $flakeFollowsA --recreate-lock-file +nix flake lock "$flakeFollowsA" --recreate-lock-file -[[ $(jq -c .nodes.B.inputs.D $flakeFollowsA/flake.lock) = '["B","C","D"]' ]] +[[ $(jq -c .nodes.B.inputs.D "$flakeFollowsA"/flake.lock) = '["B","C","D"]' ]] # Check that you can't have both a flakeref and a follows attribute on an input. -cat < $flakeFollowsB/flake.nix +cat < "$flakeFollowsB"/flake.nix { inputs.C.url = "path:nosuchflake"; inputs.D.url = "path:nosuchflake"; @@ -429,4 +430,4 @@ cat < $flakeFollowsB/flake.nix } EOF -expectStderr 1 nix flake lock $flakeFollowsA --recreate-lock-file | grepQuiet "flake input has both a flake reference and a follows attribute" +expectStderr 1 nix flake lock "$flakeFollowsA" --recreate-lock-file | grepQuiet "flake input has both a flake reference and a follows attribute" diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh index 5dea220fe..9e483d376 100755 --- a/tests/functional/impure-derivations.sh +++ b/tests/functional/impure-derivations.sh @@ -50,8 +50,8 @@ path4=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnIm (! nix build -L --no-link --json --file ./impure-derivations.nix inputAddressed 2>&1) | grep 'depends on impure derivation' drvPath=$(nix eval --json --file ./impure-derivations.nix impure.drvPath | jq -r .) -[[ $(nix derivation show $drvPath | jq ".[\"$drvPath\"].outputs.out.impure") = true ]] -[[ $(nix derivation show $drvPath | jq ".[\"$drvPath\"].outputs.stuff.impure") = true ]] +[[ $(nix derivation show $drvPath | jq ".[\"$(basename "$drvPath")\"].outputs.out.impure") = true ]] +[[ $(nix derivation show $drvPath | jq ".[\"$(basename "$drvPath")\"].outputs.stuff.impure") = true ]] # Fixed-output derivations *can* depend on impure derivations. path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out) diff --git a/tests/functional/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp index 8bfb4e12e..ee41ff46b 100644 --- a/tests/functional/lang/eval-fail-recursion.err.exp +++ b/tests/functional/lang/eval-fail-recursion.err.exp @@ -1,9 +1,9 @@ error: … in the right operand of the update (//) operator - at /pwd/lang/eval-fail-recursion.nix:2:11: + at /pwd/lang/eval-fail-recursion.nix:2:14: 1| let 2| a = { } // a; - | ^ + | ^ 3| in error: infinite recursion encountered diff --git a/tests/functional/local-overlay-store/add-lower.sh b/tests/functional/local-overlay-store/add-lower.sh index 33bf20ebd..87cdb4f59 100755 --- a/tests/functional/local-overlay-store/add-lower.sh +++ b/tests/functional/local-overlay-store/add-lower.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash source common.sh source ../common/init.sh diff --git a/tests/functional/local-overlay-store/bad-uris.sh b/tests/functional/local-overlay-store/bad-uris.sh index f0c6a151c..1b5b7fc54 100644 --- a/tests/functional/local-overlay-store/bad-uris.sh +++ b/tests/functional/local-overlay-store/bad-uris.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash source common.sh source ../common/init.sh @@ -5,7 +6,7 @@ requireEnvironment setupConfig setupStoreDirs -mkdir -p $TEST_ROOT/bad_test +mkdir -p "$TEST_ROOT"/bad_test badTestRoot=$TEST_ROOT/bad_test storeBadRoot="local-overlay://?root=$badTestRoot&lower-store=$storeA&upper-layer=$storeBTop" storeBadLower="local-overlay://?root=$storeBRoot&lower-store=$badTestRoot&upper-layer=$storeBTop" @@ -18,7 +19,8 @@ declare -a storesBad=( TODO_NixOS for i in "${storesBad[@]}"; do - echo $i + echo "$i" + # shellcheck disable=SC2119 execUnshare <&1 | grep 'contains illegal character' -expect 1 nix build -f multiple-outputs.nix invalid-output-name-2 2>&1 | grep 'contains illegal character' +# TODO inspect why this doesn't work with floating content-addressing +# derivations. +if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + expect 1 nix build -f multiple-outputs.nix invalid-output-name-1 2>&1 | grep 'contains illegal character' + expect 1 nix build -f multiple-outputs.nix invalid-output-name-2 2>&1 | grep 'contains illegal character' +fi diff --git a/tests/functional/structured-attrs.sh b/tests/functional/structured-attrs.sh index 2bd9b4aaf..dfd5a1412 100755 --- a/tests/functional/structured-attrs.sh +++ b/tests/functional/structured-attrs.sh @@ -50,4 +50,4 @@ expectStderr 0 nix-instantiate --expr "$hackyExpr" --eval --strict | grepQuiet " # Check it works with the expected structured attrs hacky=$(nix-instantiate --expr "$hackyExpr") -nix derivation show "$hacky" | jq --exit-status '."'"$hacky"'".structuredAttrs | . == {"a": 1}' +nix derivation show "$hacky" | jq --exit-status '."'"$(basename "$hacky")"'".structuredAttrs | . == {"a": 1}' diff --git a/tests/functional/test-libstoreconsumer/main.cc b/tests/functional/test-libstoreconsumer/main.cc index a372886ea..d8db67a4d 100644 --- a/tests/functional/test-libstoreconsumer/main.cc +++ b/tests/functional/test-libstoreconsumer/main.cc @@ -5,6 +5,13 @@ using namespace nix; +extern "C" [[gnu::retain]] const char * __asan_default_options() +{ + // We leak a bunch of memory knowingly on purpose. It's not worthwhile to + // diagnose that memory being leaked for now. + return "abort_on_error=1:print_summary=1:detect_leaks=0"; +} + int main(int argc, char ** argv) { try { diff --git a/tests/functional/test-libstoreconsumer/meson.build b/tests/functional/test-libstoreconsumer/meson.build index e5a1cc182..7f619d01b 100644 --- a/tests/functional/test-libstoreconsumer/meson.build +++ b/tests/functional/test-libstoreconsumer/meson.build @@ -1,3 +1,7 @@ +cxx = meson.get_compiler('cpp') + +subdir('nix-meson-build-support/asan-options') + libstoreconsumer_tester = executable( 'test-libstoreconsumer', 'main.cc', diff --git a/tests/functional/test-libstoreconsumer/nix-meson-build-support b/tests/functional/test-libstoreconsumer/nix-meson-build-support new file mode 120000 index 000000000..ac8a39762 --- /dev/null +++ b/tests/functional/test-libstoreconsumer/nix-meson-build-support @@ -0,0 +1 @@ +../../../nix-meson-build-support \ No newline at end of file diff --git a/tests/nixos/authorization.nix b/tests/nixos/authorization.nix index ee3be7504..6540e9fa3 100644 --- a/tests/nixos/authorization.nix +++ b/tests/nixos/authorization.nix @@ -84,7 +84,7 @@ su --login mallory -c ' nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 (! nix store sign --key-file sk1 ${pathFour} 2>&1)' | tee diag 1>&2 - grep -F "cannot open connection to remote store 'unix://'" diag + grep -F "cannot open connection to remote store 'daemon'" diag """) machine.succeed(""" diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 91fd6b062..d14cd9d0c 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -219,6 +219,7 @@ in client.succeed("nix registry pin nixpkgs") client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") + client.succeed("nix eval nixpkgs#hello --eval-store dummy://?read-only=false >&2") # Test fetchTree on a github URL. hash = client.succeed(f"nix eval --no-trust-tarballs-from-git-forges --raw --expr '(fetchTree {info['url']}).narHash'")