diff --git a/.github/release-notes.sh b/.github/release-notes.sh index 9937c18cf..198361161 100755 --- a/.github/release-notes.sh +++ b/.github/release-notes.sh @@ -45,6 +45,7 @@ linkify_gh() { | trim_trailing_newlines \ | sed -e 's/^\* /\n* /' \ | linkify_gh + echo "" # final newline ) > "$scratch/changes.md" ( diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8baa6127f..b195acd8f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,10 +1,16 @@ on: workflow_call: inputs: - os: + system: required: true type: string - system: + runner: + required: true + type: string + runner_for_virt: + required: true + type: string + runner_small: required: true type: string if: @@ -15,13 +21,21 @@ on: required: false default: true type: boolean + run_vm_tests: + required: false + default: false + type: boolean + run_regression_tests: + required: false + default: false + type: boolean jobs: build: if: ${{ inputs.if }} strategy: fail-fast: false - runs-on: ${{ inputs.os }} + runs-on: ${{ inputs.runner }} timeout-minutes: 60 steps: - uses: actions/checkout@v4 @@ -33,15 +47,172 @@ jobs: with: name: ${{ inputs.system }} path: ./tarball/*.xz + test: if: ${{ inputs.if && inputs.run_tests}} needs: build strategy: fail-fast: false - runs-on: ${{ inputs.os }} + runs-on: ${{ inputs.runner }} timeout-minutes: 60 steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/determinate-nix-action@main - uses: DeterminateSystems/flakehub-cache-action@main - run: nix flake check -L --system ${{ inputs.system }} + + vm_tests_smoke: + if: inputs.run_vm_tests && github.event_name != 'merge_group' + needs: build + runs-on: ${{ inputs.runner_for_virt }} + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/determinate-nix-action@main + - uses: DeterminateSystems/flakehub-cache-action@main + - run: | + nix build -L \ + .#hydraJobs.tests.functional_user \ + .#hydraJobs.tests.githubFlakes \ + .#hydraJobs.tests.nix-docker \ + .#hydraJobs.tests.tarballFlakes \ + ; + + vm_tests_all: + if: inputs.run_vm_tests && github.event_name == 'merge_group' + needs: build + runs-on: ${{ inputs.runner_for_virt }} + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/determinate-nix-action@main + - uses: DeterminateSystems/flakehub-cache-action@main + - run: | + cmd() { + nix build -L --keep-going --timeout 600 \ + $(nix flake show --json \ + | jq -r ' + .hydraJobs.tests + | with_entries(select(.value.type == "derivation")) + | keys[] + | ".#hydraJobs.tests." + .') + } + + if ! cmd; then + echo "failed, retrying once ..." + printf "\n\n\n\n\n\n\n\n" + cmd + fi + + flake_regressions: + if: | + (inputs.run_regression_tests && github.event_name == 'merge_group') + || ( + inputs.run_regression_tests + && github.event.pull_request.head.repo.full_name == 'DeterminateSystems/nix-src' + && ( + (github.event.action == 'labeled' && github.event.label.name == 'flake-regression-test') + || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'flake-regression-test')) + ) + ) + needs: build + runs-on: ${{ inputs.runner }} + strategy: + matrix: + nix_config: + - "lazy-trees = true" + - "lazy-trees = false" + glob: + - "[0-d]*" + - "[e-l]*" + - "[m]*" + - "[n-r]*" + - "[s-z]*" + + steps: + - name: Checkout nix + uses: actions/checkout@v4 + - name: Checkout flake-regressions + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions + path: flake-regressions + - name: Checkout flake-regressions-data + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions-data + path: flake-regressions/tests + - uses: DeterminateSystems/determinate-nix-action@main + - uses: DeterminateSystems/flakehub-cache-action@main + - env: + PARALLEL: "-P 50%" + FLAKE_REGRESSION_GLOB: ${{ matrix.glob }} + NIX_CONFIG: ${{ matrix.nix_config }} + run: | + set -x + if [ ! -z "${NSC_CACHE_PATH:-}" ]; then + mkdir -p "${NSC_CACHE_PATH}/nix/xdg-cache" + export XDG_CACHE_HOME="${NSC_CACHE_PATH}/nix/xdg-cache" + fi + nix build -L --out-link ./new-nix + export PATH=$(pwd)/new-nix/bin:$PATH + + if ! flake-regressions/eval-all.sh; then + echo "Some failed, trying again" + printf "\n\n\n\n\n\n\n\n" + flake-regressions/eval-all.sh + fi + + manual: + if: github.event_name != 'merge_group' + needs: build + runs-on: ${{ inputs.runner_small }} + permissions: + id-token: "write" + contents: "read" + pull-requests: "write" + statuses: "write" + deployments: "write" + steps: + - name: Checkout nix + uses: actions/checkout@v4 + - uses: DeterminateSystems/determinate-nix-action@main + - uses: DeterminateSystems/flakehub-cache-action@main + - name: Build manual + run: nix build .#hydraJobs.manual + - uses: nwtgck/actions-netlify@v3.0 + with: + publish-dir: "./result/share/doc/nix/manual" + production-branch: detsys-main + github-token: ${{ secrets.GITHUB_TOKEN }} + deploy-message: "Deploy from GitHub Actions" + # NOTE(cole-h): We have a perpetual PR displaying our changes against upstream open, but + # its conversation is locked, so this PR comment can never be posted. + # https://github.com/DeterminateSystems/nix-src/pull/4 + enable-pull-request-comment: ${{ github.event.pull_request.number != 4 }} + enable-commit-comment: true + enable-commit-status: true + overwrites-pull-request-comment: true + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + + success: + needs: + - build + - test + - vm_tests_smoke + - vm_tests_all + - flake_regressions + - manual + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - run: "true" + - run: | + echo "A dependent in the build matrix failed:" + echo "$needs" + exit 1 + env: + needs: ${{ toJSON(needs) }} + if: | + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2fcbf9360..c002d0b66 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,18 +4,26 @@ on: pull_request: push: branches: + # NOTE: make sure any branches here are also valid directory names, + # otherwise creating the directory and uploading to s3 will fail - detsys-main - main - master merge_group: + release: + types: + - published permissions: id-token: "write" contents: "read" + pull-requests: "write" + statuses: "write" + deployments: "write" jobs: eval: - runs-on: blacksmith-32vcpu-ubuntu-2204 + runs-on: UbuntuLatest32Cores128G steps: - uses: actions/checkout@v4 with: @@ -26,150 +34,108 @@ jobs: build_x86_64-linux: uses: ./.github/workflows/build.yml with: - os: blacksmith-32vcpu-ubuntu-2204 system: x86_64-linux + runner: namespace-profile-linuxamd32c64g-cache + runner_for_virt: UbuntuLatest32Cores128G + runner_small: ubuntu-latest + run_tests: true + run_vm_tests: true + run_regression_tests: true build_aarch64-linux: uses: ./.github/workflows/build.yml with: - if: ${{ github.event_name == 'merge_group' }} - os: blacksmith-32vcpu-ubuntu-2204-arm + if: ${{ github.event_name != 'pull_request' }} system: aarch64-linux + runner: UbuntuLatest32Cores128GArm + runner_for_virt: UbuntuLatest32Cores128GArm + runner_small: UbuntuLatest32Cores128GArm build_x86_64-darwin: uses: ./.github/workflows/build.yml with: - if: ${{ github.event_name == 'merge_group' }} - os: macos-latest-large + if: ${{ github.event_name != 'pull_request' }} system: x86_64-darwin + runner: macos-latest-large + runner_for_virt: macos-latest-large + runner_small: macos-latest-large build_aarch64-darwin: uses: ./.github/workflows/build.yml with: - os: namespace-profile-mac-m2-12c28g system: aarch64-darwin + runner: namespace-profile-mac-m2-12c28g + runner_for_virt: namespace-profile-mac-m2-12c28g + runner_small: macos-latest-xlarge - vm_tests_smoke: - if: github.event_name != 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith-32vcpu-ubuntu-2204 + success: + runs-on: ubuntu-latest + needs: + - eval + - build_x86_64-linux + - build_aarch64-linux + - build_x86_64-darwin + - build_aarch64-darwin + if: ${{ always() }} steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-cache-action@main + - run: "true" - run: | - nix build -L \ - .#hydraJobs.tests.functional_user \ - .#hydraJobs.tests.githubFlakes \ - .#hydraJobs.tests.nix-docker \ - .#hydraJobs.tests.tarballFlakes \ - ; - - vm_tests_all: - if: github.event_name == 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith-32vcpu-ubuntu-2204 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-cache-action@main - - run: | - nix build -L --keep-going \ - $(nix flake show --json \ - | jq -r ' - .hydraJobs.tests - | with_entries(select(.value.type == "derivation")) - | keys[] - | ".#hydraJobs.tests." + .') - - flake_regressions: - if: | - github.event_name == 'merge_group' - || ( - github.event.pull_request.head.repo.full_name == 'DeterminateSystems/nix-src' - && ( - (github.event.action == 'labeled' && github.event.label.name == 'flake-regression-test') - || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'flake-regression-test')) - ) - ) - needs: build_x86_64-linux - runs-on: namespace-profile-x86-32cpu-64gb - steps: - - name: Checkout nix - uses: actions/checkout@v4 - - name: Checkout flake-regressions - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions - path: flake-regressions - - name: Checkout flake-regressions-data - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions-data - path: flake-regressions/tests - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH PARALLEL="-P 50%" flake-regressions/eval-all.sh - - flake_regressions_lazy: - if: | - github.event_name == 'merge_group' - || ( - github.event.pull_request.head.repo.full_name == 'DeterminateSystems/nix-src' - && ( - (github.event.action == 'labeled' && github.event.label.name == 'flake-regression-test') - || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'flake-regression-test')) - ) - ) - needs: build_x86_64-linux - runs-on: namespace-profile-x86-32cpu-64gb - steps: - - name: Checkout nix - uses: actions/checkout@v4 - - name: Checkout flake-regressions - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions - path: flake-regressions - - name: Checkout flake-regressions-data - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions-data - path: flake-regressions/tests - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH PARALLEL="-P 50%" NIX_CONFIG="lazy-trees = true" flake-regressions/eval-all.sh - - manual: - if: github.event_name != 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith - permissions: - id-token: "write" - contents: "read" - pull-requests: "write" - statuses: "write" - deployments: "write" - steps: - - name: Checkout nix - uses: actions/checkout@v4 - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-cache-action@main - - name: Build manual - run: nix build .#hydraJobs.manual - - uses: nwtgck/actions-netlify@v3.0 - with: - publish-dir: "./result/share/doc/nix/manual" - production-branch: detsys-main - github-token: ${{ secrets.GITHUB_TOKEN }} - deploy-message: "Deploy from GitHub Actions" - # NOTE(cole-h): We have a perpetual PR displaying our changes against upstream open, but - # its conversation is locked, so this PR comment can never be posted. - # https://github.com/DeterminateSystems/nix-src/pull/4 - enable-pull-request-comment: ${{ github.event.pull_request.number != 4 }} - enable-commit-comment: true - enable-commit-status: true - overwrites-pull-request-comment: true + echo "A dependent in the build matrix failed:" + echo "$needs" + exit 1 env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + needs: ${{ toJSON(needs) }} + if: | + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + + - uses: actions/checkout@v4 + - uses: DeterminateSystems/determinate-nix-action@main + + - name: Create artifacts directory + run: mkdir -p ./artifacts + + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + path: downloaded + - name: Move downloaded artifacts to artifacts directory + run: | + for dir in ./downloaded/*; do + arch="$(basename "$dir")" + mv "$dir"/*.xz ./artifacts/"${arch}" + done + + - name: Build fallback-paths.nix + if: ${{ github.event_name != 'pull_request' }} + run: | + nix build .#fallbackPathsNix --out-link fallback + cat fallback > ./artifacts/fallback-paths.nix + + - uses: DeterminateSystems/push-artifact-ids@main + with: + s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE_ARN }} + bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET_NAME }} + directory: ./artifacts + ids_project_name: determinate-nix + ids_binary_prefix: determinate-nix + skip_acl: true + allowed_branches: '["detsys-main"]' + + publish: + needs: + - success + if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) + environment: ${{ github.event_name == 'release' && 'production' || '' }} + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/determinate-nix-action@main + - uses: DeterminateSystems/flakehub-push@main + with: + rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} + visibility: "public" + tag: "${{ github.ref_name }}" diff --git a/.github/workflows/propose-release.yml b/.github/workflows/propose-release.yml index 82407abe7..ea01e4b7a 100644 --- a/.github/workflows/propose-release.yml +++ b/.github/workflows/propose-release.yml @@ -29,4 +29,4 @@ jobs: git commit -m "Set .version-determinate to ${{ inputs.version }}" || true ./.github/release-notes.sh git add doc - git commit -m "Generare release notes for ${{ inputs.version }}" || true + git commit -m "Generate release notes for ${{ inputs.version }}" || true diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml deleted file mode 100644 index 9e173c34f..000000000 --- a/.github/workflows/upload-release.yml +++ /dev/null @@ -1,113 +0,0 @@ -name: Upload release - -concurrency: - group: upload-release - -on: - workflow_call: - push: - branches: - # NOTE: make sure any branches here are also valid directory names, - # otherwise creating the directory and uploading to s3 will fail - - "detsys-main" - pull_request: - types: - - opened - - reopened - - synchronize - - labeled - release: - types: - - published - -permissions: - id-token: "write" - contents: "read" - -jobs: - build-x86_64-linux: - uses: ./.github/workflows/build.yml - with: - os: blacksmith-32vcpu-ubuntu-2204 - system: x86_64-linux - run_tests: false - - build-aarch64-linux: - uses: ./.github/workflows/build.yml - with: - os: blacksmith-32vcpu-ubuntu-2204-arm - system: aarch64-linux - run_tests: false - - build-x86_64-darwin: - uses: ./.github/workflows/build.yml - with: - os: macos-latest-large - system: x86_64-darwin - run_tests: false - - build-aarch64-darwin: - uses: ./.github/workflows/build.yml - with: - os: macos-latest-xlarge - system: aarch64-darwin - run_tests: false - - release: - runs-on: ubuntu-latest - needs: - - build-x86_64-linux - - build-aarch64-linux - - build-x86_64-darwin - - build-aarch64-darwin - steps: - - name: Checkout - uses: actions/checkout@v4 - - uses: DeterminateSystems/determinate-nix-action@main - - - name: Create artifacts directory - run: mkdir -p ./artifacts - - - name: Fetch artifacts - uses: actions/download-artifact@v4 - with: - path: downloaded - - name: Move downloaded artifacts to artifacts directory - run: | - for dir in ./downloaded/*; do - arch="$(basename "$dir")" - mv "$dir"/*.xz ./artifacts/"${arch}" - done - - - name: Build fallback-paths.nix - run: | - nix build .#fallbackPathsNix --out-link fallback - cat fallback > ./artifacts/fallback-paths.nix - - - uses: DeterminateSystems/push-artifact-ids@main - with: - s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE_ARN }} - bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET_NAME }} - directory: ./artifacts - ids_project_name: determinate-nix - ids_binary_prefix: determinate-nix - skip_acl: true - allowed_branches: '["detsys-main"]' - - publish: - needs: - - release - if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) - environment: ${{ github.event_name == 'release' && 'production' || '' }} - runs-on: ubuntu-latest - permissions: - contents: read - id-token: write - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/determinate-nix-action@main - - uses: DeterminateSystems/flakehub-push@main - with: - rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} - visibility: "public" - tag: "${{ github.ref_name }}" diff --git a/.version b/.version index f01356823..69886179f 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.29.0 +2.29.1 diff --git a/.version-determinate b/.version-determinate index b72762837..424e1794d 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.6.2 +3.6.8 diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 8efc01612..ebcb7b956 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -129,6 +129,10 @@ - [Contributing](development/contributing.md) - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) + - [Release 3.6.8 (2025-06-25)](release-notes-determinate/rl-3.6.8.md) + - [Release 3.6.7 (2025-06-24)](release-notes-determinate/rl-3.6.7.md) + - [Release 3.6.6 (2025-06-17)](release-notes-determinate/rl-3.6.6.md) + - [Release 3.6.5 (2025-06-16)](release-notes-determinate/rl-3.6.5.md) - [Release 3.6.2 (2025-06-02)](release-notes-determinate/rl-3.6.2.md) - [Release 3.6.1 (2025-05-24)](release-notes-determinate/rl-3.6.1.md) - [Release 3.6.0 (2025-05-22)](release-notes-determinate/rl-3.6.0.md) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index f4ea70747..3a38378e0 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -1,6 +1,6 @@ # Changes between Nix and Determinate Nix -This section lists the differences between upstream Nix 2.29 and Determinate Nix 3.6.2. +This section lists the differences between upstream Nix 2.29 and Determinate Nix 3.6.8. * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. @@ -44,4 +44,45 @@ This section lists the differences between upstream Nix 2.29 and Determinate Nix * nix profile: Replace ε and ∅ with descriptive English words by @grahamc in [DeterminateSystems/nix-src#81](https://github.com/DeterminateSystems/nix-src/pull/81) -* Call out that `--keep-failed` with remote builders will keep the failed build directory on that builder by @cole-h in [DeterminateSystems/nix-src#85](https://github.com/DeterminateSystems/nix-src/pull/85) \ No newline at end of file +* Call out that `--keep-failed` with remote builders will keep the failed build directory on that builder by @cole-h in [DeterminateSystems/nix-src#85](https://github.com/DeterminateSystems/nix-src/pull/85) + + + + + + +* When remote building with --keep-failed, only show "you can rerun" message if the derivation's platform is supported on this machine by @cole-h in [DeterminateSystems/nix-src#87](https://github.com/DeterminateSystems/nix-src/pull/87) + +* Indicate that sandbox-paths specifies a missing file in the corresponding error message. by @cole-h in [DeterminateSystems/nix-src#88](https://github.com/DeterminateSystems/nix-src/pull/88) + +* Use 'published' release type to avoid double uploads by @gustavderdrache in [DeterminateSystems/nix-src#90](https://github.com/DeterminateSystems/nix-src/pull/90) + +* Render lazy tree paths in messages withouth the/nix/store/hash... prefix in substituted source trees by @edolstra in [DeterminateSystems/nix-src#91](https://github.com/DeterminateSystems/nix-src/pull/91) + +* Use FlakeHub inputs by @lucperkins in [DeterminateSystems/nix-src#89](https://github.com/DeterminateSystems/nix-src/pull/89) + +* Proactively cache more flake inputs and fetches by @edolstra in [DeterminateSystems/nix-src#93](https://github.com/DeterminateSystems/nix-src/pull/93) + +* Fix: register extra builtins just once by @edolstra in [DeterminateSystems/nix-src#97](https://github.com/DeterminateSystems/nix-src/pull/97) + +* Fix: Make the S3 test more robust by @gustavderdrache in [DeterminateSystems/nix-src#101](https://github.com/DeterminateSystems/nix-src/pull/101) + +* Fix the link to `builders-use-substitutes` documentation for `builders` by @lucperkins in [DeterminateSystems/nix-src#102](https://github.com/DeterminateSystems/nix-src/pull/102) + +* Improve error messages that use the hypothetical future tense of "will" by @lucperkins in [DeterminateSystems/nix-src#92](https://github.com/DeterminateSystems/nix-src/pull/92) + +* Improve caching of inputs in dry-run mode by @edolstra in [DeterminateSystems/nix-src#98](https://github.com/DeterminateSystems/nix-src/pull/98) + + + + + + + +* Fix fetchToStore() caching with --impure, improve testing by @edolstra in [DeterminateSystems/nix-src#117](https://github.com/DeterminateSystems/nix-src/pull/117) + +* Add lazy-locks setting by @edolstra in [DeterminateSystems/nix-src#113](https://github.com/DeterminateSystems/nix-src/pull/113) + +* Sync 2.29.1 by @edolstra in [DeterminateSystems/nix-src#124](https://github.com/DeterminateSystems/nix-src/pull/124) + +* Release v3.6.7 by @github-actions in [DeterminateSystems/nix-src#126](https://github.com/DeterminateSystems/nix-src/pull/126) diff --git a/doc/manual/source/release-notes-determinate/rl-3.6.5.md b/doc/manual/source/release-notes-determinate/rl-3.6.5.md new file mode 100644 index 000000000..8ef5be0fd --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.6.5.md @@ -0,0 +1,19 @@ +# Release 3.6.5 (2025-06-12) + +* Based on [upstream Nix 2.29.0](../release-notes/rl-2.29.md). + +## What's Changed +* When remote building with --keep-failed, only show "you can rerun" message if the derivation's platform is supported on this machine by @cole-h in [DeterminateSystems/nix-src#87](https://github.com/DeterminateSystems/nix-src/pull/87) +* Indicate that sandbox-paths specifies a missing file in the corresponding error message. by @cole-h in [DeterminateSystems/nix-src#88](https://github.com/DeterminateSystems/nix-src/pull/88) +* Render lazy tree paths in messages withouth the/nix/store/hash... prefix in substituted source trees by @edolstra in [DeterminateSystems/nix-src#91](https://github.com/DeterminateSystems/nix-src/pull/91) +* Use FlakeHub inputs by @lucperkins in [DeterminateSystems/nix-src#89](https://github.com/DeterminateSystems/nix-src/pull/89) +* Proactively cache more flake inputs and fetches by @edolstra in [DeterminateSystems/nix-src#93](https://github.com/DeterminateSystems/nix-src/pull/93) +* Fix: register extra builtins just once by @edolstra in [DeterminateSystems/nix-src#97](https://github.com/DeterminateSystems/nix-src/pull/97) +* Fix the link to `builders-use-substitutes` documentation for `builders` by @lucperkins in [DeterminateSystems/nix-src#102](https://github.com/DeterminateSystems/nix-src/pull/102) +* Improve error messages that use the hypothetical future tense of "will" by @lucperkins in [DeterminateSystems/nix-src#92](https://github.com/DeterminateSystems/nix-src/pull/92) +* Make the `nix repl` test more stable by @edolstra in [DeterminateSystems/nix-src#103](https://github.com/DeterminateSystems/nix-src/pull/103) +* Run nixpkgsLibTests against lazy trees by @edolstra in [DeterminateSystems/nix-src#100](https://github.com/DeterminateSystems/nix-src/pull/100) +* Run the Nix test suite against lazy trees by @edolstra in [DeterminateSystems/nix-src#105](https://github.com/DeterminateSystems/nix-src/pull/105) +* Improve caching of inputs by @edolstra in [DeterminateSystems/nix-src#98](https://github.com/DeterminateSystems/nix-src/pull/98), [DeterminateSystems/nix-src#110](https://github.com/DeterminateSystems/nix-src/pull/110), and [DeterminateSystems/nix-src#115](https://github.com/DeterminateSystems/nix-src/pull/115) + +**Full Changelog**: [v3.6.2...v3.6.5](https://github.com/DeterminateSystems/nix-src/compare/v3.6.2...v3.6.4) diff --git a/doc/manual/source/release-notes-determinate/rl-3.6.6.md b/doc/manual/source/release-notes-determinate/rl-3.6.6.md new file mode 100644 index 000000000..bf4e3690a --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.6.6.md @@ -0,0 +1,7 @@ +# Release 3.6.6 (2025-06-17) + +* Based on [upstream Nix 2.29.0](../release-notes/rl-2.29.md). + +## What's Changed + +* No-op release on the nix-src side, due to a regression on nix-darwin in determinate-nixd. diff --git a/doc/manual/source/release-notes-determinate/rl-3.6.7.md b/doc/manual/source/release-notes-determinate/rl-3.6.7.md new file mode 100644 index 000000000..197587f1b --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.6.7.md @@ -0,0 +1,17 @@ +# Release 3.6.7 (2025-06-24) + +* Based on [upstream Nix 2.29.1](../release-notes/rl-2.29.md). + +## What's Changed + +### Security contents + +* Patched against GHSA-g948-229j-48j3 + +### Lazy trees: + +* Lazy trees now produces `flake.lock` files with NAR hashes unless `lazy-locks` is set to `true` by @edolstra in [DeterminateSystems/nix-src#113](https://github.com/DeterminateSystems/nix-src/pull/113) +* Improved caching with lazy-trees when using --impure, with enhanced testing by @edolstra in [DeterminateSystems/nix-src#117](https://github.com/DeterminateSystems/nix-src/pull/117) + + +**Full Changelog**: [v3.6.6...v3.6.7](https://github.com/DeterminateSystems/nix-src/compare/v3.6.6...v3.6.7) diff --git a/doc/manual/source/release-notes-determinate/rl-3.6.8.md b/doc/manual/source/release-notes-determinate/rl-3.6.8.md new file mode 100644 index 000000000..c4b4b96c9 --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.6.8.md @@ -0,0 +1,12 @@ +# Release 3.6.8 (2025-06-25) + +* Based on [upstream Nix 2.29.1](../release-notes/rl-2.29.md). + +## What's Changed +* Fix fetchToStore() caching with --impure, improve testing by @edolstra in [DeterminateSystems/nix-src#117](https://github.com/DeterminateSystems/nix-src/pull/117) +* Add lazy-locks setting by @edolstra in [DeterminateSystems/nix-src#113](https://github.com/DeterminateSystems/nix-src/pull/113) +* Sync 2.29.1 by @edolstra in [DeterminateSystems/nix-src#124](https://github.com/DeterminateSystems/nix-src/pull/124) +* Release v3.6.7 by @github-actions in [DeterminateSystems/nix-src#126](https://github.com/DeterminateSystems/nix-src/pull/126) + + +**Full Changelog**: [v3.6.6...v3.6.8](https://github.com/DeterminateSystems/nix-src/compare/v3.6.6...v3.6.8) diff --git a/flake.nix b/flake.nix index b5fa93e58..0207134cd 100644 --- a/flake.nix +++ b/flake.nix @@ -220,6 +220,11 @@ ''; repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; + lazyTrees = nixpkgsFor.${system}.native.nixComponents2.nix-functional-tests.override { + pname = "nix-lazy-trees-tests"; + lazyTrees = true; + }; + /** Checks for our packaging expressions. This shouldn't build anything significant; just check that things diff --git a/src/libexpr/include/nix/expr/eval-settings.hh b/src/libexpr/include/nix/expr/eval-settings.hh index 782f5f9e1..9b7573b20 100644 --- a/src/libexpr/include/nix/expr/eval-settings.hh +++ b/src/libexpr/include/nix/expr/eval-settings.hh @@ -262,6 +262,19 @@ struct EvalSettings : Config R"( If set to true, flakes and trees fetched by [`builtins.fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) are only copied to the Nix store when they're used as a dependency of a derivation. This avoids copying (potentially large) source trees unnecessarily. )"}; + + // FIXME: this setting should really be in libflake, but it's + // currently needed in mountInput(). + Setting lazyLocks{ + this, + false, + "lazy-locks", + R"( + If enabled, Nix only includes NAR hashes in lock file entries if they're necessary to lock the input (i.e. when there is no other attribute that allows the content to be verified, like a Git revision). + This is not backward compatible with older versions of Nix. + If disabled, lock file entries always contain a NAR hash. + )" + }; }; /** diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index 40c0a23b6..38ded067a 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -77,25 +77,30 @@ StorePath EvalState::mountInput( allowPath(storePath); // FIXME: should just whitelist the entire virtual store + std::optional _narHash; + + auto getNarHash = [&]() { + if (!_narHash) { + if (store->isValidPath(storePath)) + _narHash = store->queryPathInfo(storePath)->narHash; + else + _narHash = fetchToStore2(*store, accessor, FetchMode::DryRun, input.getName()).second; + } + return _narHash; + }; + storeFS->mount(CanonPath(store->printStorePath(storePath)), accessor); - if (requireLockable && (!settings.lazyTrees || !input.isLocked()) && !input.getNarHash()) { - // FIXME: use fetchToStore to make it cache this - auto narHash = accessor->hashPath(CanonPath::root); - input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); - } + if (requireLockable && (!settings.lazyTrees || !settings.lazyLocks || !input.isLocked()) && !input.getNarHash()) + input.attrs.insert_or_assign("narHash", getNarHash()->to_string(HashFormat::SRI, true)); - // FIXME: what to do with the NAR hash in lazy mode? - if (!settings.lazyTrees && originalInput.getNarHash()) { - auto expected = originalInput.computeStorePath(*store); - if (storePath != expected) - throw Error( - (unsigned int) 102, - "NAR hash mismatch in input '%s', expected '%s' but got '%s'", - originalInput.to_string(), - store->printStorePath(storePath), - store->printStorePath(expected)); - } + if (originalInput.getNarHash() && *getNarHash() != *originalInput.getNarHash()) + throw Error( + (unsigned int) 102, + "NAR hash mismatch in input '%s', expected '%s' but got '%s'", + originalInput.to_string(), + getNarHash()->to_string(HashFormat::SRI, true), + originalInput.getNarHash()->to_string(HashFormat::SRI, true)); return storePath; } diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc index 9a861a11d..e6b9430a2 100644 --- a/src/libfetchers/fetch-to-store.cc +++ b/src/libfetchers/fetch-to-store.cc @@ -3,19 +3,16 @@ namespace nix { -fetchers::Cache::Key makeFetchToStoreCacheKey( - const std::string &name, - const std::string &fingerprint, +fetchers::Cache::Key makeSourcePathToHashCacheKey( + const std::string & fingerprint, ContentAddressMethod method, - const std::string &path) + const std::string & path) { - return fetchers::Cache::Key{"fetchToStore", { - {"name", name}, + return fetchers::Cache::Key{"sourcePathToHash", { {"fingerprint", fingerprint}, {"method", std::string{method.render()}}, {"path", path} }}; - } StorePath fetchToStore( @@ -27,39 +24,81 @@ StorePath fetchToStore( PathFilter * filter, RepairFlag repair) { - // FIXME: add an optimisation for the case where the accessor is - // a `PosixSourceAccessor` pointing to a store path. + return fetchToStore2(store, path, mode, name, method, filter, repair).first; +} +std::pair fetchToStore2( + Store & store, + const SourcePath & path, + FetchMode mode, + std::string_view name, + ContentAddressMethod method, + PathFilter * filter, + RepairFlag repair) +{ std::optional cacheKey; - std::optional fingerprint; - if (!filter && (fingerprint = path.accessor->getFingerprint(path.path))) { - cacheKey = makeFetchToStoreCacheKey(std::string{name}, *fingerprint, method, path.path.abs()); - if (auto res = fetchers::getCache()->lookupStorePath(*cacheKey, store, mode == FetchMode::DryRun)) { - debug("store path cache hit for '%s'", path); - return res->storePath; + auto [subpath, fingerprint] = + filter + ? std::pair>{path.path, std::nullopt} + : path.accessor->getFingerprint(path.path); + + if (fingerprint) { + cacheKey = makeSourcePathToHashCacheKey(*fingerprint, method, subpath.abs()); + if (auto res = fetchers::getCache()->lookup(*cacheKey)) { + auto hash = Hash::parseSRI(fetchers::getStrAttr(*res, "hash")); + auto storePath = store.makeFixedOutputPathFromCA(name, + ContentAddressWithReferences::fromParts(method, hash, {})); + if (mode == FetchMode::DryRun || store.isValidPath(storePath)) { + debug("source path '%s' cache hit in '%s' (hash '%s')", path, store.printStorePath(storePath), hash.to_string(HashFormat::SRI, true)); + return {storePath, hash}; + } + debug("source path '%s' not in store", path); } - } else - debug("source path '%s' is uncacheable (%d, %d)", path, filter, (bool) fingerprint); + } else { + static auto barf = getEnv("_NIX_TEST_BARF_ON_UNCACHEABLE").value_or("") == "1"; + if (barf) + throw Error("source path '%s' is uncacheable (filter=%d)", path, (bool) filter); + // FIXME: could still provide in-memory caching keyed on `SourcePath`. + debug("source path '%s' is uncacheable", path); + } Activity act(*logger, lvlChatty, actUnknown, fmt(mode == FetchMode::DryRun ? "hashing '%s'" : "copying '%s' to the store", path)); auto filter2 = filter ? *filter : defaultPathFilter; - auto storePath = + auto [storePath, hash] = mode == FetchMode::DryRun - ? store.computeStorePath( - name, path, method, HashAlgorithm::SHA256, {}, filter2).first - : store.addToStore( - name, path, method, HashAlgorithm::SHA256, {}, filter2, repair); - - debug(mode == FetchMode::DryRun ? "hashed '%s'" : "copied '%s' to '%s'", path, store.printStorePath(storePath)); + ? ({ + auto [storePath, hash] = store.computeStorePath( + name, path, method, HashAlgorithm::SHA256, {}, filter2); + debug("hashed '%s' to '%s' (hash '%s')", path, store.printStorePath(storePath), hash.to_string(HashFormat::SRI, true)); + std::make_pair(storePath, hash); + }) + : ({ + // FIXME: ideally addToStore() would return the hash + // right away (like computeStorePath()). + auto storePath = store.addToStore( + name, path, method, HashAlgorithm::SHA256, {}, filter2, repair); + auto info = store.queryPathInfo(storePath); + assert(info->references.empty()); + auto hash = + method == ContentAddressMethod::Raw::NixArchive + ? info->narHash + : ({ + if (!info->ca || info->ca->method != method) + throw Error("path '%s' lacks a CA field", store.printStorePath(storePath)); + info->ca->hash; + }); + debug("copied '%s' to '%s' (hash '%s')", path, store.printStorePath(storePath), hash.to_string(HashFormat::SRI, true)); + std::make_pair(storePath, hash); + }); if (cacheKey) - fetchers::getCache()->upsert(*cacheKey, store, {}, storePath); + fetchers::getCache()->upsert(*cacheKey, {{"hash", hash.to_string(HashFormat::SRI, true)}}); - return storePath; + return {storePath, hash}; } } diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 6d73daa1a..d91f24b6a 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -338,8 +338,7 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto auto accessor = make_ref(makeStorePathAccessor(store, storePath)); - if (auto fingerprint = getFingerprint(store)) - accessor->setFingerprint(*fingerprint); + accessor->fingerprint = getFingerprint(store); // FIXME: ideally we would use the `showPath()` of the // "real" accessor for this fetcher type. @@ -353,10 +352,8 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto auto [accessor, result] = scheme->getAccessor(store, *this); - assert(!accessor->getFingerprint(CanonPath::root)); - - if (auto fingerprint = getFingerprint(store)) - accessor->setFingerprint(*fingerprint); + if (!accessor->fingerprint) + accessor->fingerprint = result.getFingerprint(store); return {accessor, std::move(result)}; } diff --git a/src/libfetchers/filtering-source-accessor.cc b/src/libfetchers/filtering-source-accessor.cc index 12e4a688b..c339cdbdb 100644 --- a/src/libfetchers/filtering-source-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -58,16 +58,13 @@ std::string FilteringSourceAccessor::showPath(const CanonPath & path) return displayPrefix + next->showPath(prefix / path) + displaySuffix; } -std::optional FilteringSourceAccessor::getFingerprint(const CanonPath & path) +std::pair> FilteringSourceAccessor::getFingerprint(const CanonPath & path) { + if (fingerprint) + return {path, fingerprint}; return next->getFingerprint(prefix / path); } -void FilteringSourceAccessor::setFingerprint(std::string fingerprint) -{ - next->setFingerprint(std::move(fingerprint)); -} - void FilteringSourceAccessor::checkAccess(const CanonPath & path) { if (!isAllowed(path)) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 7730e0db4..4a00d4e34 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -860,7 +860,7 @@ struct GitInputScheme : InputScheme return makeFingerprint(*rev); else { auto repoInfo = getRepoInfo(input); - if (auto repoPath = repoInfo.getPath(); repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) { + if (auto repoPath = repoInfo.getPath(); repoPath && repoInfo.workdirInfo.submodules.empty()) { /* Calculate a fingerprint that takes into account the deleted and modified/added files. */ HashSink hashSink{HashAlgorithm::SHA512}; @@ -873,7 +873,7 @@ struct GitInputScheme : InputScheme writeString("deleted:", hashSink); writeString(file.abs(), hashSink); } - return makeFingerprint(*repoInfo.workdirInfo.headRev) + return makeFingerprint(repoInfo.workdirInfo.headRev.value_or(nullRev)) + ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false); } return std::nullopt; @@ -882,7 +882,8 @@ struct GitInputScheme : InputScheme bool isLocked(const Input & input) const override { - return (bool) input.getRev(); + auto rev = input.getRev(); + return rev && rev != nullRev; } }; diff --git a/src/libfetchers/include/nix/fetchers/fetch-to-store.hh b/src/libfetchers/include/nix/fetchers/fetch-to-store.hh index 44c33c147..364d25375 100644 --- a/src/libfetchers/include/nix/fetchers/fetch-to-store.hh +++ b/src/libfetchers/include/nix/fetchers/fetch-to-store.hh @@ -23,7 +23,16 @@ StorePath fetchToStore( PathFilter * filter = nullptr, RepairFlag repair = NoRepair); -fetchers::Cache::Key makeFetchToStoreCacheKey( - const std::string & name, const std::string & fingerprint, ContentAddressMethod method, const std::string & path); +std::pair fetchToStore2( + Store & store, + const SourcePath & path, + FetchMode mode, + std::string_view name = "source", + ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive, + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair); + +fetchers::Cache::Key +makeSourcePathToHashCacheKey(const std::string & fingerprint, ContentAddressMethod method, const std::string & path); } diff --git a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh index 391cd371b..e0228ad9b 100644 --- a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh +++ b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh @@ -50,9 +50,7 @@ struct FilteringSourceAccessor : SourceAccessor std::string showPath(const CanonPath & path) override; - std::optional getFingerprint(const CanonPath & path) override; - - void setFingerprint(std::string fingerprint) override; + std::pair> getFingerprint(const CanonPath & path) override; /** * Call `makeNotAllowedError` to throw a `RestrictedPathError` diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index ff39cb02f..0de81ae43 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -144,37 +144,22 @@ struct PathInputScheme : InputScheme storePath = store->addToStoreFromDump(*src, "source"); } - // To avoid copying the path again to the /nix/store, we need to add a cache entry. - ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive; - auto fp = getFingerprint(store, input); - if (fp) { - auto cacheKey = makeFetchToStoreCacheKey(input.getName(), *fp, method, "/"); - fetchers::getCache()->upsert(cacheKey, *store, {}, *storePath); - } + auto accessor = makeStorePathAccessor(store, *storePath); + + // To prevent `fetchToStore()` copying the path again to Nix + // store, pre-create an entry in the fetcher cache. + auto info = store->queryPathInfo(*storePath); + accessor->fingerprint = fmt("path:%s", store->queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)); + fetchers::getCache()->upsert( + makeSourcePathToHashCacheKey(*accessor->fingerprint, ContentAddressMethod::Raw::NixArchive, "/"), + {{"hash", info->narHash.to_string(HashFormat::SRI, true)}}); /* Trust the lastModified value supplied by the user, if any. It's not a "secure" attribute so we don't care. */ if (!input.getLastModified()) input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); - return {makeStorePathAccessor(store, *storePath), std::move(input)}; - } - - std::optional getFingerprint(ref store, const Input & input) const override - { - if (isRelative(input)) - return std::nullopt; - - /* If this path is in the Nix store, use the hash of the - store object and the subpath. */ - auto path = getAbsPath(input); - try { - auto [storePath, subPath] = store->toStorePath(path.string()); - auto info = store->queryPathInfo(storePath); - return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath); - } catch (Error &) { - return std::nullopt; - } + return {accessor, std::move(input)}; } }; diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 80c94bd77..b7a48b89e 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -59,6 +59,7 @@ test( this_exe, env : { '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + 'HOME': meson.current_build_dir() / 'test-home', }, protocol : 'gtest', ) diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index db507fc3a..8344d98d7 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -56,17 +56,13 @@ mkMesonExecutable (finalAttrs: { { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; } - ( - lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' - + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - '' - ); + ('' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + export HOME="$TMPDIR/home" + mkdir -p "$HOME" + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + ''); }; }; diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index d9f042953..075708234 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -562,7 +562,7 @@ LockedFlake lockFlake( /* Get the input flake, resolve 'path:./...' flakerefs relative to the parent flake. */ - auto getInputFlake = [&](const FlakeRef & ref) + auto getInputFlake = [&](const FlakeRef & ref, const fetchers::UseRegistries useRegistries) { if (auto resolvedPath = resolveRelativePath()) { return readFlake(state, ref, ref, ref, *resolvedPath, inputAttrPath); @@ -653,7 +653,7 @@ LockedFlake lockFlake( } if (mustRefetch) { - auto inputFlake = getInputFlake(oldLock->lockedRef); + auto inputFlake = getInputFlake(oldLock->lockedRef, useRegistriesInputs); nodePaths.emplace(childNode, inputFlake.path.parent()); computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix, inputFlake.path, false); @@ -678,7 +678,8 @@ LockedFlake lockFlake( nuked the next time we update the lock file. That is, overrides are sticky unless you use --no-write-lock-file. */ - auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref; + auto inputIsOverride = explicitCliOverrides.contains(inputAttrPath); + auto ref = (input2.ref && inputIsOverride) ? *input2.ref : *input.ref; /* Warn against the use of indirect flakerefs (but only at top-level since we don't want @@ -704,7 +705,7 @@ LockedFlake lockFlake( }; if (input.isFlake) { - auto inputFlake = getInputFlake(*input.ref); + auto inputFlake = getInputFlake(*input.ref, inputIsOverride ? fetchers::UseRegistries::All : useRegistriesInputs); auto childNode = make_ref( inputFlake.lockedRef, diff --git a/src/libmain/plugin.cc b/src/libmain/plugin.cc index f5eddabdd..760a096ad 100644 --- a/src/libmain/plugin.cc +++ b/src/libmain/plugin.cc @@ -60,7 +60,7 @@ struct PluginSettings : Config itself, they must be DSOs compatible with the instance of Nix running at the time (i.e. compiled against the same headers, not linked to any incompatible libraries). They should not be linked to - any Nix libraries directly, as those are already at load + any Nix libraries directly, as those are already available at load time. If an entry in the list is a directory, all files in the directory diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 8a1ff40f0..8b9893b23 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -100,6 +100,7 @@ test( this_exe, env : { '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + 'HOME': meson.current_build_dir() / 'test-home', }, protocol : 'gtest', ) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 4eb95360a..b7495e0ab 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -28,10 +28,6 @@ TEST_F(nix_api_store_test, nix_store_get_uri) TEST_F(nix_api_util_context, nix_store_get_storedir_default) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // skipping test in sandbox because nix_store_open tries to create /nix/var/nix/profiles - GTEST_SKIP(); - } nix_libstore_init(ctx); Store * store = nix_store_open(ctx, nullptr, nullptr); assert_ctx_ok(); @@ -136,10 +132,6 @@ TEST_F(nix_api_store_test, nix_store_real_path) TEST_F(nix_api_util_context, nix_store_real_path_relocated) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // Can't open default store from within sandbox - GTEST_SKIP(); - } auto tmp = nix::createTempDir(); std::string storeRoot = tmp + "/store"; std::string stateDir = tmp + "/state"; @@ -179,13 +171,7 @@ TEST_F(nix_api_util_context, nix_store_real_path_relocated) TEST_F(nix_api_util_context, nix_store_real_path_binary_cache) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // TODO: override NIX_CACHE_HOME? - // skipping test in sandbox because narinfo cache can't be written - GTEST_SKIP(); - } - - Store * store = nix_store_open(ctx, "https://cache.nixos.org", nullptr); + Store * store = nix_store_open(ctx, nix::fmt("file://%s/binary-cache", nix::createTempDir()).c_str(), nullptr); assert_ctx_ok(); ASSERT_NE(store, nullptr); diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index b39ee7fa7..1f3701c7f 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -73,17 +73,13 @@ mkMesonExecutable (finalAttrs: { { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; } - ( - lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' - + '' - export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - '' - ); + ('' + export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} + export HOME="$TMPDIR/home" + mkdir -p "$HOME" + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + ''); }; }; diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 259af7640..745aeb29e 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -46,7 +46,7 @@ struct FileTransferSettings : Config )"}; Setting tries{this, 5, "download-attempts", - "The number of times Nix will attempt to download a file before giving up."}; + "The number of times Nix attempts to download a file before giving up."}; Setting downloadBufferSize{this, 64 * 1024 * 1024, "download-buffer-size", R"( diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 76fadba86..1ab3ed13a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -247,7 +247,7 @@ LocalStore::LocalStore(ref config) else if (curSchema == 0) { /* new store */ curSchema = nixSchemaVersion; openDB(*state, true); - writeFile(schemaPath, fmt("%1%", curSchema), 0666, true); + writeFile(schemaPath, fmt("%1%", curSchema), 0666, FsSync::Yes); } else if (curSchema < nixSchemaVersion) { @@ -298,7 +298,7 @@ LocalStore::LocalStore(ref config) txn.commit(); } - writeFile(schemaPath, fmt("%1%", nixSchemaVersion), 0666, true); + writeFile(schemaPath, fmt("%1%", nixSchemaVersion), 0666, FsSync::Yes); lockFile(globalLock.get(), ltRead, true); } diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index e84e2db6e..43dfe1832 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -129,6 +129,11 @@ private: */ Path topTmpDir; + /** + * The file descriptor of the temporary directory. + */ + AutoCloseFD tmpDirFd; + /** * The path of the temporary directory in the sandbox. */ @@ -325,9 +330,24 @@ private: /** * Make a file owned by the builder. + * + * SAFETY: this function is prone to TOCTOU as it receives a path and not a descriptor. + * It's only safe to call in a child of a directory only visible to the owner. */ void chownToBuilder(const Path & path); + /** + * Make a file owned by the builder addressed by its file descriptor. + */ + void chownToBuilder(int fd, const Path & path); + + /** + * Create a file in `tmpDir` owned by the builder. + */ + void writeBuilderFile( + const std::string & name, + std::string_view contents); + /** * Run the builder's process. */ @@ -900,7 +920,14 @@ void DerivationBuilderImpl::startBuilder() } else { tmpDir = topTmpDir; } - chownToBuilder(tmpDir); + + /* The TOCTOU between the previous mkdir call and this open call is unavoidable due to + POSIX semantics.*/ + tmpDirFd = AutoCloseFD{open(tmpDir.c_str(), O_RDONLY | O_NOFOLLOW | O_DIRECTORY)}; + if (!tmpDirFd) + throw SysError("failed to open the build temporary directory descriptor '%1%'", tmpDir); + + chownToBuilder(tmpDirFd.get(), tmpDir); for (auto & [outputName, status] : initialOutputs) { /* Set scratch path we'll actually use during the build. @@ -1485,9 +1512,7 @@ void DerivationBuilderImpl::initTmpDir() } else { auto hash = hashString(HashAlgorithm::SHA256, i.first); std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false); - Path p = tmpDir + "/" + fn; - writeFile(p, rewriteStrings(i.second, inputRewrites)); - chownToBuilder(p); + writeBuilderFile(fn, rewriteStrings(i.second, inputRewrites)); env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; } } @@ -1596,11 +1621,9 @@ void DerivationBuilderImpl::writeStructuredAttrs() auto jsonSh = StructuredAttrs::writeShell(json); - writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites)); - chownToBuilder(tmpDir + "/.attrs.sh"); + writeBuilderFile(".attrs.sh", rewriteStrings(jsonSh, inputRewrites)); env["NIX_ATTRS_SH_FILE"] = tmpDirInSandbox + "/.attrs.sh"; - writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites)); - chownToBuilder(tmpDir + "/.attrs.json"); + writeBuilderFile(".attrs.json", rewriteStrings(json.dump(), inputRewrites)); env["NIX_ATTRS_JSON_FILE"] = tmpDirInSandbox + "/.attrs.json"; } } @@ -1854,6 +1877,24 @@ void setupSeccomp() #endif } +void DerivationBuilderImpl::chownToBuilder(int fd, const Path & path) +{ + if (!buildUser) return; + if (fchown(fd, buildUser->getUID(), buildUser->getGID()) == -1) + throw SysError("cannot change ownership of file '%1%'", path); +} + +void DerivationBuilderImpl::writeBuilderFile( + const std::string & name, + std::string_view contents) +{ + auto path = std::filesystem::path(tmpDir) / name; + AutoCloseFD fd{openat(tmpDirFd.get(), name.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC | O_EXCL | O_NOFOLLOW, 0666)}; + if (!fd) + throw SysError("creating file %s", path); + writeFile(fd, path, contents); + chownToBuilder(fd.get(), path); +} void DerivationBuilderImpl::runChild() { @@ -3065,6 +3106,15 @@ void DerivationBuilderImpl::checkOutputs(const std::mapd_name; if (childName == "." || childName == "..") continue; - _deletePath(dirfd(dir.get()), path + "/" + childName, bytesFreed, ex); + _deletePath(dirfd(dir.get()), path / childName, bytesFreed, ex); } if (errno) throw SysError("reading directory %1%", path); } @@ -497,14 +505,13 @@ static void _deletePath(Descriptor parentfd, const std::filesystem::path & path, static void _deletePath(const std::filesystem::path & path, uint64_t & bytesFreed) { - Path dir = dirOf(path.string()); - if (dir == "") - dir = "/"; + assert(path.is_absolute()); + assert(path.parent_path() != path); - AutoCloseFD dirfd = toDescriptor(open(dir.c_str(), O_RDONLY)); + AutoCloseFD dirfd = toDescriptor(open(path.parent_path().string().c_str(), O_RDONLY)); if (!dirfd) { if (errno == ENOENT) return; - throw SysError("opening directory '%1%'", path); + throw SysError("opening directory %s", path.parent_path()); } std::exception_ptr ex; diff --git a/src/libutil/include/nix/util/file-system.hh b/src/libutil/include/nix/util/file-system.hh index b8fa4cfa0..a9a6e43bf 100644 --- a/src/libutil/include/nix/util/file-system.hh +++ b/src/libutil/include/nix/util/file-system.hh @@ -175,21 +175,27 @@ std::string readFile(const Path & path); std::string readFile(const std::filesystem::path & path); void readFile(const Path & path, Sink & sink, bool memory_map = true); +enum struct FsSync { Yes, No }; + /** * Write a string to a file. */ -void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false); -static inline void writeFile(const std::filesystem::path & path, std::string_view s, mode_t mode = 0666, bool sync = false) +void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No); + +static inline void writeFile(const std::filesystem::path & path, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No) { return writeFile(path.string(), s, mode, sync); } -void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false); -static inline void writeFile(const std::filesystem::path & path, Source & source, mode_t mode = 0666, bool sync = false) +void writeFile(const Path & path, Source & source, mode_t mode = 0666, FsSync sync = FsSync::No); + +static inline void writeFile(const std::filesystem::path & path, Source & source, mode_t mode = 0666, FsSync sync = FsSync::No) { return writeFile(path.string(), source, mode, sync); } +void writeFile(AutoCloseFD & fd, const Path & origPath, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No); + /** * Flush a path's parent directory to disk. */ diff --git a/src/libutil/include/nix/util/forwarding-source-accessor.hh b/src/libutil/include/nix/util/forwarding-source-accessor.hh index cfa5ff9b8..bdba2addc 100644 --- a/src/libutil/include/nix/util/forwarding-source-accessor.hh +++ b/src/libutil/include/nix/util/forwarding-source-accessor.hh @@ -52,16 +52,6 @@ struct ForwardingSourceAccessor : SourceAccessor { return next->getPhysicalPath(path); } - - std::optional getFingerprint(const CanonPath & path) override - { - return next->getFingerprint(path); - } - - void setFingerprint(std::string fingerprint) override - { - next->setFingerprint(std::move(fingerprint)); - } }; } diff --git a/src/libutil/include/nix/util/source-accessor.hh b/src/libutil/include/nix/util/source-accessor.hh index 560e1fda0..4084b3bdc 100644 --- a/src/libutil/include/nix/util/source-accessor.hh +++ b/src/libutil/include/nix/util/source-accessor.hh @@ -177,28 +177,32 @@ struct SourceAccessor : std::enable_shared_from_this SymlinkResolution mode = SymlinkResolution::Full); /** - * Return a string that uniquely represents the contents of this - * accessor. This is used for caching lookups (see - * `fetchToStore()`). - * - * Fingerprints are generally for the entire accessor, but this - * method takes a `path` argument to support accessors like - * `MountedSourceAccessor` that combine multiple underlying - * accessors. A fingerprint should only be returned if it uniquely - * represents everything under `path`. + * A string that uniquely represents the contents of this + * accessor. This is used for caching lookups (see `fetchToStore()`). */ - virtual std::optional getFingerprint(const CanonPath & path) - { - return _fingerprint; - } + std::optional fingerprint; - virtual void setFingerprint(std::string fingerprint) + /** + * Return the fingerprint for `path`. This is usually the + * fingerprint of the current accessor, but for composite + * accessors (like `MountedSourceAccessor`), we want to return the + * fingerprint of the "inner" accessor if the current one lacks a + * fingerprint. + * + * So this method is intended to return the most-outer accessor + * that has a fingerprint for `path`. It also returns the path that `path` + * corresponds to in that accessor. + * + * For example: in a `MountedSourceAccessor` that has + * `/nix/store/foo` mounted, + * `getFingerprint("/nix/store/foo/bar")` will return the path + * `/bar` and the fingerprint of the `/nix/store/foo` accessor. + */ + virtual std::pair> getFingerprint(const CanonPath & path) { - _fingerprint = std::move(fingerprint); + return {path, fingerprint}; } - std::optional _fingerprint; - /** * Return the maximum last-modified time of the files in this * tree, if available. diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 7764ff946..5612c9454 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -187,6 +187,10 @@ void MemorySink::createSymlink(const CanonPath & path, const std::string & targe ref makeEmptySourceAccessor() { static auto empty = make_ref().cast(); + /* Don't forget to clear the display prefix, as the default constructed + SourceAccessor has the «unknown» prefix. Since this accessor is supposed + to mimic an empty root directory the prefix needs to be empty. */ + empty->setPathDisplay(""); return empty; } diff --git a/src/libutil/mounted-source-accessor.cc b/src/libutil/mounted-source-accessor.cc index 9292291c1..ed62fd2a3 100644 --- a/src/libutil/mounted-source-accessor.cc +++ b/src/libutil/mounted-source-accessor.cc @@ -91,12 +91,11 @@ struct MountedSourceAccessorImpl : MountedSourceAccessor return nullptr; } - std::optional getFingerprint(const CanonPath & path) override + std::pair> getFingerprint(const CanonPath & path) override { + if (fingerprint) + return {path, fingerprint}; auto [accessor, subpath] = resolve(path); - // FIXME: check that there are no mounts underneath the mount - // point of `accessor`, since that would invalidate the - // fingerprint. (However we don't have such at the moment.) return accessor->getFingerprint(subpath); } }; diff --git a/src/libutil/union-source-accessor.cc b/src/libutil/union-source-accessor.cc index 9950f6049..69cf04c18 100644 --- a/src/libutil/union-source-accessor.cc +++ b/src/libutil/union-source-accessor.cc @@ -72,6 +72,18 @@ struct UnionSourceAccessor : SourceAccessor } return std::nullopt; } + + std::pair> getFingerprint(const CanonPath & path) override + { + if (fingerprint) + return {path, fingerprint}; + for (auto & accessor : accessors) { + auto [subpath, fingerprint] = accessor->getFingerprint(path); + if (fingerprint) + return {subpath, fingerprint}; + } + return {path, std::nullopt}; + } }; ref makeUnionSourceAccessor(std::vector> && accessors) diff --git a/tests/functional/common/init.sh b/tests/functional/common/init.sh index 6e9bffec5..7f28a09d7 100755 --- a/tests/functional/common/init.sh +++ b/tests/functional/common/init.sh @@ -54,6 +54,7 @@ flake-registry = $TEST_ROOT/registry.json show-trace = true include nix.conf.extra trusted-users = $(whoami) +${_NIX_TEST_EXTRA_CONFIG:-} EOF cat > "$NIX_CONF_DIR"/nix.conf.extra < "$rootRepo"/submodule/sub.nix -[[ $(nix eval --json "$flakeref#sub" ) = '"foo"' ]] -[[ $(nix flake metadata --json "$flakeref" | jq -r .locked.rev) = null ]] +[[ $(_NIX_TEST_BARF_ON_UNCACHEABLE='' nix eval --json "$flakeref#sub" ) = '"foo"' ]] +[[ $(_NIX_TEST_BARF_ON_UNCACHEABLE='' nix flake metadata --json "$flakeref" | jq -r .locked.rev) = null ]] # Test that `nix flake metadata` parses `submodule` correctly. cat > "$rootRepo"/flake.nix <&1 | grepQuiet "source path.*cache hit" +fi + # Check that relative paths are allowed for git flakes. # This may change in the future once git submodule support is refined. # See: https://discourse.nixos.org/t/57783 and #9708. @@ -161,7 +168,12 @@ expect 1 nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --no-update-lock-file nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --commit-lock-file [[ -e "$flake2Dir/flake.lock" ]] [[ -z $(git -C "$flake2Dir" diff main || echo failed) ]] -[[ $(jq --indent 0 . < "$flake2Dir/flake.lock") =~ ^'{"nodes":{"flake1":{"locked":{"lastModified":'.*',"narHash":"sha256-'.*'","ref":"refs/heads/master","rev":"'.*'","revCount":2,"type":"git","url":"file:///'.*'"},"original":{"id":"flake1","type":"indirect"}},"root":{"inputs":{"flake1":"flake1"}}},"root":"root","version":7}'$ ]] +[[ $(jq --indent 0 --compact-output . < "$flake2Dir/flake.lock") =~ ^'{"nodes":{"flake1":{"locked":{"lastModified":'[0-9]*',"narHash":"sha256-'.*'","ref":"refs/heads/master","rev":"'.*'","revCount":2,"type":"git","url":"file:///'.*'"},"original":{"id":"flake1","type":"indirect"}},"root":{"inputs":{"flake1":"flake1"}}},"root":"root","version":7}'$ ]] +if [[ $(nix config show lazy-trees) = true ]]; then + # Test that `lazy-locks` causes NAR hashes to be omitted from the lock file. + nix flake update --flake "$flake2Dir" --commit-lock-file --lazy-locks + [[ $(jq --indent 0 --compact-output . < "$flake2Dir/flake.lock") =~ ^'{"nodes":{"flake1":{"locked":{"lastModified":'[0-9]*',"ref":"refs/heads/master","rev":"'.*'","revCount":2,"type":"git","url":"file:///'.*'"},"original":{"id":"flake1","type":"indirect"}},"root":{"inputs":{"flake1":"flake1"}}},"root":"root","version":7}'$ ]] +fi # Rerunning the build should not change the lockfile. nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" diff --git a/tests/functional/flakes/mercurial.sh b/tests/functional/flakes/mercurial.sh index b9045bf6b..b6c14fc26 100755 --- a/tests/functional/flakes/mercurial.sh +++ b/tests/functional/flakes/mercurial.sh @@ -27,9 +27,9 @@ nix build -o "$TEST_ROOT/result" "hg+file://$flake2Dir" (! nix flake metadata --json "hg+file://$flake2Dir" | jq -e -r .revision) -nix eval "hg+file://$flake2Dir"#expr +_NIX_TEST_BARF_ON_UNCACHEABLE='' nix eval "hg+file://$flake2Dir"#expr -nix eval "hg+file://$flake2Dir"#expr +_NIX_TEST_BARF_ON_UNCACHEABLE='' nix eval "hg+file://$flake2Dir"#expr (! nix eval "hg+file://$flake2Dir"#expr --no-allow-dirty) diff --git a/tests/functional/flakes/non-flake-inputs.sh b/tests/functional/flakes/non-flake-inputs.sh index f5e12cd01..7e55aca20 100644 --- a/tests/functional/flakes/non-flake-inputs.sh +++ b/tests/functional/flakes/non-flake-inputs.sh @@ -72,7 +72,7 @@ nix build -o "$TEST_ROOT/result" "$flake3Dir#sth" --commit-lock-file nix registry add --registry "$registry" flake3 "git+file://$flake3Dir" -nix build -o "$TEST_ROOT/result" flake3#fnord +_NIX_TEST_BARF_ON_UNCACHEABLE='' nix build -o "$TEST_ROOT/result" flake3#fnord [[ $(cat "$TEST_ROOT/result") = FNORD ]] # Check whether flake input fetching is lazy: flake3#sth does not @@ -82,11 +82,11 @@ clearStore mv "$flake2Dir" "$flake2Dir.tmp" mv "$nonFlakeDir" "$nonFlakeDir.tmp" nix build -o "$TEST_ROOT/result" flake3#sth -(! nix build -o "$TEST_ROOT/result" flake3#xyzzy) -(! nix build -o "$TEST_ROOT/result" flake3#fnord) +(! _NIX_TEST_BARF_ON_UNCACHEABLE='' nix build -o "$TEST_ROOT/result" flake3#xyzzy) +(! _NIX_TEST_BARF_ON_UNCACHEABLE='' nix build -o "$TEST_ROOT/result" flake3#fnord) mv "$flake2Dir.tmp" "$flake2Dir" mv "$nonFlakeDir.tmp" "$nonFlakeDir" -nix build -o "$TEST_ROOT/result" flake3#xyzzy flake3#fnord +_NIX_TEST_BARF_ON_UNCACHEABLE='' nix build -o "$TEST_ROOT/result" flake3#xyzzy flake3#fnord # Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore git -C "$flake3Dir" checkout -b removeXyzzy diff --git a/tests/functional/flakes/relative-paths-lockfile.sh b/tests/functional/flakes/relative-paths-lockfile.sh index d91aedd16..662c9329c 100644 --- a/tests/functional/flakes/relative-paths-lockfile.sh +++ b/tests/functional/flakes/relative-paths-lockfile.sh @@ -4,6 +4,8 @@ source ./common.sh requireGit +unset _NIX_TEST_BARF_ON_UNCACHEABLE + # Test a "vendored" subflake dependency. This is a relative path flake # which doesn't reference the root flake and has its own lock file. # diff --git a/tests/functional/flakes/relative-paths.sh b/tests/functional/flakes/relative-paths.sh index 9d31da0ad..7a76bee1b 100644 --- a/tests/functional/flakes/relative-paths.sh +++ b/tests/functional/flakes/relative-paths.sh @@ -69,7 +69,7 @@ git -C "$rootFlake" add flake.nix sub2/flake.nix git -C "$rootFlake" add sub2/flake.lock [[ $(nix eval "$subflake2#y") = 15 ]] -[[ $(jq --indent 0 . < "$subflake2/flake.lock") =~ ^'{"nodes":{"root":{"inputs":{"root":"root_2","sub1":"sub1"}},"root_2":{"inputs":{"sub0":"sub0"},"locked":{"path":"..","type":"path"},"original":{"path":"..","type":"path"},"parent":[]},"root_3":{"inputs":{"sub0":"sub0_2"},"locked":{"path":"../","type":"path"},"original":{"path":"../","type":"path"},"parent":["sub1"]},"sub0":{"locked":{"path":"sub0","type":"path"},"original":{"path":"sub0","type":"path"},"parent":["root"]},"sub0_2":{"locked":{"path":"sub0","type":"path"},"original":{"path":"sub0","type":"path"},"parent":["sub1","root"]},"sub1":{"inputs":{"root":"root_3"},"locked":{"path":"../sub1","type":"path"},"original":{"path":"../sub1","type":"path"},"parent":[]}},"root":"root","version":7}'$ ]] +[[ $(jq --indent 0 --compact-output . < "$subflake2/flake.lock") =~ ^'{"nodes":{"root":{"inputs":{"root":"root_2","sub1":"sub1"}},"root_2":{"inputs":{"sub0":"sub0"},"locked":{"path":"..","type":"path"},"original":{"path":"..","type":"path"},"parent":[]},"root_3":{"inputs":{"sub0":"sub0_2"},"locked":{"path":"../","type":"path"},"original":{"path":"../","type":"path"},"parent":["sub1"]},"sub0":{"locked":{"path":"sub0","type":"path"},"original":{"path":"sub0","type":"path"},"parent":["root"]},"sub0_2":{"locked":{"path":"sub0","type":"path"},"original":{"path":"sub0","type":"path"},"parent":["sub1","root"]},"sub1":{"inputs":{"root":"root_3"},"locked":{"path":"../sub1","type":"path"},"original":{"path":"../sub1","type":"path"},"parent":[]}},"root":"root","version":7}'$ ]] # Make sure there are no content locks for relative path flakes. (! grep "$TEST_ROOT" "$subflake2/flake.lock") diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 43f2f25a2..799026ebe 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -26,6 +26,9 @@ # For running the functional tests against a different pre-built Nix. test-daemon ? null, + + # Whether to run tests with lazy trees enabled. + lazyTrees ? false, }: let @@ -95,6 +98,8 @@ mkMesonDerivation ( mkdir $out ''; + _NIX_TEST_EXTRA_CONFIG = lib.optionalString lazyTrees "lazy-trees = true"; + meta = { platforms = lib.platforms.unix; }; diff --git a/tests/functional/pure-eval.sh b/tests/functional/pure-eval.sh index 250381099..45a65f9ab 100755 --- a/tests/functional/pure-eval.sh +++ b/tests/functional/pure-eval.sh @@ -34,3 +34,15 @@ rm -rf $TEST_ROOT/eval-out (! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }') (! nix eval --expr '~/foo') + +expectStderr 0 nix eval --expr "/some/absolute/path" \ + | grepQuiet "/some/absolute/path" + +expectStderr 0 nix eval --expr "/some/absolute/path" --impure \ + | grepQuiet "/some/absolute/path" + +expectStderr 0 nix eval --expr "some/relative/path" \ + | grepQuiet "$PWD/some/relative/path" + +expectStderr 0 nix eval --expr "some/relative/path" --impure \ + | grepQuiet "$PWD/some/relative/path"