diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 984f9a9ea..a5005f8a0 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,36 +1,54 @@
---
name: Bug report
-about: Create a report to help us improve
+about: Report unexpected or incorrect behaviour
title: ''
labels: bug
assignees: ''
---
-**Describe the bug**
+## Describe the bug
-A clear and concise description of what the bug is.
+
-**Steps To Reproduce**
+## Steps To Reproduce
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
+
-A clear and concise description of what you expected to happen.
+## Expected behavior
-**`nix-env --version` output**
+
-**Additional context**
+## Metadata
-Add any other context about the problem here.
+
-**Priorities**
+## Additional context
+
+
+
+## Checklist
+
+
+
+- [ ] checked [latest Nix manual] \([source])
+- [ ] checked [open bug issues and pull requests] for possible duplicates
+
+[latest Nix manual]: https://nixos.org/manual/nix/unstable/
+[source]: https://github.com/NixOS/nix/tree/master/doc/manual/source
+[open bug issues and pull requests]: https://github.com/NixOS/nix/labels/bug
+
+---
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 42c658b52..c75a46951 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,24 +1,39 @@
---
name: Feature request
-about: Suggest an idea for this project
+about: Suggest a new feature
title: ''
labels: feature
assignees: ''
---
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+## Is your feature request related to a problem?
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
+
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
+## Proposed solution
-**Additional context**
-Add any other context or screenshots about the feature request here.
+
-**Priorities**
+## Alternative solutions
+
+
+
+## Additional context
+
+
+
+## Checklist
+
+
+
+- [ ] checked [latest Nix manual] \([source])
+- [ ] checked [open feature issues and pull requests] for possible duplicates
+
+[latest Nix manual]: https://nixos.org/manual/nix/unstable/
+[source]: https://github.com/NixOS/nix/tree/master/doc/manual/source
+[open feature issues and pull requests]: https://github.com/NixOS/nix/labels/feature
+
+---
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/ISSUE_TEMPLATE/installer.md b/.github/ISSUE_TEMPLATE/installer.md
index 3768a49c9..ed5e1ce87 100644
--- a/.github/ISSUE_TEMPLATE/installer.md
+++ b/.github/ISSUE_TEMPLATE/installer.md
@@ -23,14 +23,25 @@ assignees: ''
Output
-```log
+
-
+```log
```
-## Priorities
+## Checklist
+
+
+
+- [ ] checked [latest Nix manual] \([source])
+- [ ] checked [open installer issues and pull requests] for possible duplicates
+
+[latest Nix manual]: https://nixos.org/manual/nix/unstable/
+[source]: https://github.com/NixOS/nix/tree/master/doc/manual/source
+[open installer issues and pull requests]: https://github.com/NixOS/nix/labels/installer
+
+---
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/ISSUE_TEMPLATE/missing_documentation.md b/.github/ISSUE_TEMPLATE/missing_documentation.md
index cf663e28d..6c334b722 100644
--- a/.github/ISSUE_TEMPLATE/missing_documentation.md
+++ b/.github/ISSUE_TEMPLATE/missing_documentation.md
@@ -26,6 +26,6 @@ assignees: ''
[source]: https://github.com/NixOS/nix/tree/master/doc/manual/source
[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
-## Priorities
+---
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 69da87db7..c6843d86f 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -17,10 +17,12 @@ so you understand the process and the expectations.
-->
-# Motivation
+## Motivation
+
-# Context
+## Context
+
@@ -29,7 +31,7 @@ so you understand the process and the expectations.
-# Priorities and Process
+---
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 27f60574e..6169c0924 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -7,14 +7,28 @@ on:
permissions: read-all
jobs:
+ eval:
+ runs-on: ubuntu-24.04
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - uses: cachix/install-nix-action@v30
+ - run: nix --experimental-features 'nix-command flakes' flake show --all-systems --json
tests:
- needs: [check_secrets]
strategy:
fail-fast: false
matrix:
- os: [ubuntu-latest, macos-latest]
- runs-on: ${{ matrix.os }}
+ include:
+ - scenario: on ubuntu
+ runs-on: ubuntu-24.04
+ os: linux
+ - scenario: on macos
+ runs-on: macos-14
+ os: darwin
+ name: tests ${{ matrix.scenario }}
+ runs-on: ${{ matrix.runs-on }}
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
@@ -23,103 +37,54 @@ jobs:
- uses: cachix/install-nix-action@v30
with:
# The sandbox would otherwise be disabled by default on Darwin
- extra_nix_config: "sandbox = true"
- - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/cachix-action@v15
- if: needs.check_secrets.outputs.cachix == 'true'
+ extra_nix_config: |
+ sandbox = true
+ max-jobs = 1
+ - uses: DeterminateSystems/magic-nix-cache-action@main
+ # Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
+ # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
+ - run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
+ if: matrix.os == 'linux'
+ - run: scripts/build-checks
+ - run: scripts/prepare-installer-for-github-actions
+ - name: Upload installer tarball
+ uses: actions/upload-artifact@v4
with:
- name: '${{ env.CACHIX_NAME }}'
- signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- - if: matrix.os == 'ubuntu-latest'
- run: |
- free -h
- swapon --show
- swap=$(swapon --show --noheadings | head -n 1 | awk '{print $1}')
- echo "Found swap: $swap"
- sudo swapoff $swap
- # resize it (fallocate)
- sudo fallocate -l 10G $swap
- sudo mkswap $swap
- sudo swapon $swap
- free -h
- (
- while sleep 60; do
- free -h
- done
- ) &
- - run: nix --experimental-features 'nix-command flakes' flake check -L
- - run: nix --experimental-features 'nix-command flakes' flake show --all-systems --json
-
- # Steps to test CI automation in your own fork.
- # Cachix:
- # 1. Sign-up for https://www.cachix.org/
- # 2. Create a cache for $githubuser-nix-install-tests
- # 3. Create a cachix auth token and save it in https://github.com/$githubuser/nix/settings/secrets/actions in "Repository secrets" as CACHIX_AUTH_TOKEN
- # Dockerhub:
- # 1. Sign-up for https://hub.docker.com/
- # 2. Store your dockerhub username as DOCKERHUB_USERNAME in "Repository secrets" of your fork repository settings (https://github.com/$githubuser/nix/settings/secrets/actions)
- # 3. Create an access token in https://hub.docker.com/settings/security and store it as DOCKERHUB_TOKEN in "Repository secrets" of your fork
- check_secrets:
- permissions:
- contents: none
- name: Check Cachix and Docker secrets present for installer tests
- runs-on: ubuntu-latest
- outputs:
- cachix: ${{ steps.secret.outputs.cachix }}
- docker: ${{ steps.secret.outputs.docker }}
- steps:
- - name: Check for secrets
- id: secret
- env:
- _CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
- _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
- run: |
- echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}"
- echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
-
- installer:
- needs: [tests, check_secrets]
- if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
- runs-on: ubuntu-latest
- outputs:
- installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 0
- - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v30
- with:
- install_url: https://releases.nixos.org/nix/nix-2.20.3/install
- - uses: cachix/cachix-action@v15
- with:
- name: '${{ env.CACHIX_NAME }}'
- signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- cachixArgs: '-v'
- - id: prepare-installer
- run: scripts/prepare-installer-for-github-actions
+ name: installer-${{matrix.os}}
+ path: out/*
installer_test:
- needs: [installer, check_secrets]
- if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
+ needs: [tests]
strategy:
fail-fast: false
matrix:
- os: [ubuntu-latest, macos-latest]
- runs-on: ${{ matrix.os }}
+ include:
+ - scenario: on ubuntu
+ runs-on: ubuntu-24.04
+ os: linux
+ - scenario: on macos
+ runs-on: macos-14
+ os: darwin
+ name: installer test ${{ matrix.scenario }}
+ runs-on: ${{ matrix.runs-on }}
steps:
- uses: actions/checkout@v4
- - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - name: Download installer tarball
+ uses: actions/download-artifact@v4
+ with:
+ name: installer-${{matrix.os}}
+ path: out
+ - name: Serving installer
+ id: serving_installer
+ run: ./scripts/serve-installer-for-github-actions
- uses: cachix/install-nix-action@v30
with:
- install_url: '${{needs.installer.outputs.installerURL}}'
- install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
+ install_url: 'http://localhost:8126/install'
+ install_options: "--tarball-url-prefix http://localhost:8126/"
- run: sudo apt install fish zsh
- if: matrix.os == 'ubuntu-latest'
+ if: matrix.os == 'linux'
- run: brew install fish
- if: matrix.os == 'macos-latest'
+ if: matrix.os == 'darwin'
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
@@ -127,32 +92,50 @@ jobs:
- run: exec bash -c "nix-channel --add https://releases.nixos.org/nixos/unstable/nixos-23.05pre466020.60c1d71f2ba nixpkgs"
- run: exec bash -c "nix-channel --update && nix-env -iA nixpkgs.hello && hello"
+ # Steps to test CI automation in your own fork.
+ # 1. Sign-up for https://hub.docker.com/
+ # 2. Store your dockerhub username as DOCKERHUB_USERNAME in "Repository secrets" of your fork repository settings (https://github.com/$githubuser/nix/settings/secrets/actions)
+ # 3. Create an access token in https://hub.docker.com/settings/security and store it as DOCKERHUB_TOKEN in "Repository secrets" of your fork
+ check_secrets:
+ permissions:
+ contents: none
+ name: Check Docker secrets present for installer tests
+ runs-on: ubuntu-24.04
+ outputs:
+ docker: ${{ steps.secret.outputs.docker }}
+ steps:
+ - name: Check for secrets
+ id: secret
+ env:
+ _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
+ run: |
+ echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
+
docker_push_image:
- needs: [check_secrets, tests]
+ needs: [tests, vm_tests, check_secrets]
permissions:
contents: read
packages: write
if: >-
+ needs.check_secrets.outputs.docker == 'true' &&
github.event_name == 'push' &&
- github.ref_name == 'master' &&
- needs.check_secrets.outputs.cachix == 'true' &&
- needs.check_secrets.outputs.docker == 'true'
- runs-on: ubuntu-latest
+ github.ref_name == 'master'
+ runs-on: ubuntu-24.04
steps:
+ - name: Check for secrets
+ id: secret
+ env:
+ _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
+ run: |
+ echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: cachix/install-nix-action@v30
with:
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
- - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - uses: DeterminateSystems/magic-nix-cache-action@main
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
- - uses: cachix/cachix-action@v15
- if: needs.check_secrets.outputs.cachix == 'true'
- with:
- name: '${{ env.CACHIX_NAME }}'
- signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
- run: docker load -i ./result/image.tar.gz
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
@@ -189,16 +172,22 @@ jobs:
docker push $IMAGE_ID:master
vm_tests:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes .#hydraJobs.tests.functional_user
+ - run: |
+ nix build -L \
+ .#hydraJobs.tests.functional_user \
+ .#hydraJobs.tests.githubFlakes \
+ .#hydraJobs.tests.nix-docker \
+ .#hydraJobs.tests.tarballFlakes \
+ ;
flake_regressions:
needs: vm_tests
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-24.04
steps:
- name: Checkout nix
uses: actions/checkout@v4
@@ -214,4 +203,4 @@ jobs:
path: flake-regressions/tests
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
+ - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml
index 34aa4e6bd..23a5d9e51 100644
--- a/.github/workflows/labels.yml
+++ b/.github/workflows/labels.yml
@@ -15,7 +15,7 @@ permissions:
jobs:
labels:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/labeler@v5
diff --git a/.gitignore b/.gitignore
index de1183977..337a7c154 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,110 +1,12 @@
-Makefile.config
-perl/Makefile.config
-
-# /
-/aclocal.m4
-/autom4te.cache
-/precompiled-headers.h.gch
-/config.*
-/configure
-/stamp-h1
-/svn-revision
-/libtool
-/config/config.*
# Default meson build dir
/build
-# /doc/manual/
-/doc/manual/*.1
-/doc/manual/*.5
-/doc/manual/*.8
-/doc/manual/generated/*
-/doc/manual/nix.json
-/doc/manual/conf-file.json
-/doc/manual/language.json
-/doc/manual/xp-features.json
-/doc/manual/source/SUMMARY.md
-/doc/manual/source/SUMMARY-rl-next.md
-/doc/manual/source/store/types/*
-!/doc/manual/source/store/types/index.md.in
-/doc/manual/source/command-ref/new-cli
-/doc/manual/source/command-ref/conf-file.md
-/doc/manual/source/command-ref/experimental-features-shortlist.md
-/doc/manual/source/contributing/experimental-feature-descriptions.md
-/doc/manual/source/language/builtins.md
-/doc/manual/source/language/builtin-constants.md
-/doc/manual/source/release-notes/rl-next.md
-
-# /scripts/
-/scripts/nix-profile.sh
-/scripts/nix-profile-daemon.sh
-/scripts/nix-profile.fish
-/scripts/nix-profile-daemon.fish
-
-# /src/libexpr/
-/src/libexpr/lexer-tab.cc
-/src/libexpr/lexer-tab.hh
-/src/libexpr/parser-tab.cc
-/src/libexpr/parser-tab.hh
-/src/libexpr/parser-tab.output
-/src/libexpr/nix.tbl
-/src/libexpr/tests
-/src/libexpr-tests/libnixexpr-tests
-
-# /src/libfetchers
-/src/libfetchers-tests/libnixfetchers-tests
-
-# /src/libflake
-/src/libflake-tests/libnixflake-tests
-
-# /src/libstore/
-*.gen.*
-/src/libstore/tests
-/src/libstore-tests/libnixstore-tests
-
-# /src/libutil/
-/src/libutil/tests
-/src/libutil-tests/libnixutil-tests
-
-/src/nix/nix
-
-/src/nix/generated-doc
-
-# /src/nix-env/
-/src/nix-env/nix-env
-
-# /src/nix-instantiate/
-/src/nix-instantiate/nix-instantiate
-
-# /src/nix-store/
-/src/nix-store/nix-store
-
-/src/nix-prefetch-url/nix-prefetch-url
-
-/src/nix-collect-garbage/nix-collect-garbage
-
-# /src/nix-channel/
-/src/nix-channel/nix-channel
-
-# /src/nix-build/
-/src/nix-build/nix-build
-
-/src/nix-copy-closure/nix-copy-closure
-
-/src/error-demo/error-demo
-
-/src/build-remote/build-remote
-
# /tests/functional/
-/tests/functional/test-tmp
/tests/functional/common/subst-vars.sh
-/tests/functional/result*
/tests/functional/restricted-innocent
-/tests/functional/shell
-/tests/functional/shell.drv
-/tests/functional/repl-result-out
/tests/functional/debugger-test-out
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
+/tests/functional/nix-shell
# /tests/functional/lang/
/tests/functional/lang/*.out
@@ -112,27 +14,9 @@ perl/Makefile.config
/tests/functional/lang/*.err
/tests/functional/lang/*.ast
-/perl/lib/Nix/Config.pm
-/perl/lib/Nix/Store.cc
-
-/misc/systemd/nix-daemon.service
-/misc/systemd/nix-daemon.socket
-/misc/systemd/nix-daemon.conf
-/misc/upstart/nix-daemon.conf
-
outputs/
-*.a
-*.o
-*.o.tmp
-*.so
-*.dylib
-*.dll
-*.exe
-*.dep
*~
-*.pc
-*.plist
# GNU Global
GPATH
@@ -147,8 +31,6 @@ GTAGS
compile_commands.json
*.compile_commands.json
-nix-rust/target
-
result
result-*
@@ -163,3 +45,5 @@ result-*
# Mac OS
.DS_Store
+
+flake-regressions
diff --git a/.mergify.yml b/.mergify.yml
index c297d3d5e..5d2bf8520 100644
--- a/.mergify.yml
+++ b/.mergify.yml
@@ -2,13 +2,11 @@ queue_rules:
- name: default
# all required tests need to go here
merge_conditions:
- - check-success=installer
- - check-success=installer_test (macos-latest)
- - check-success=installer_test (ubuntu-latest)
- - check-success=tests (macos-latest)
- - check-success=tests (ubuntu-latest)
+ - check-success=tests on macos
+ - check-success=tests on ubuntu
+ - check-success=installer test on macos
+ - check-success=installer test on ubuntu
- check-success=vm_tests
- merge_method: rebase
batch_size: 5
pull_request_rules:
@@ -90,3 +88,13 @@ pull_request_rules:
- "2.24-maintenance"
labels:
- merge-queue
+
+ - name: backport patches to 2.25
+ conditions:
+ - label=backport 2.25-maintenance
+ actions:
+ backport:
+ branches:
+ - "2.25-maintenance"
+ labels:
+ - merge-queue
diff --git a/build-utils-meson/threads/meson.build b/build-utils-meson/threads/meson.build
deleted file mode 100644
index 294160de1..000000000
--- a/build-utils-meson/threads/meson.build
+++ /dev/null
@@ -1,6 +0,0 @@
-# This is only conditional to work around
-# https://github.com/mesonbuild/meson/issues/13293. It should be
-# unconditional.
-if not (host_machine.system() == 'windows' and cxx.get_id() == 'gcc')
- deps_private += dependency('threads')
-endif
diff --git a/doc/manual/meson.build b/doc/manual/meson.build
index 3630e2dc8..c4cc9b893 100644
--- a/doc/manual/meson.build
+++ b/doc/manual/meson.build
@@ -199,6 +199,7 @@ nix3_manpages = [
'nix3-build',
'nix3-bundle',
'nix3-config',
+ 'nix3-config-check',
'nix3-config-show',
'nix3-copy',
'nix3-daemon',
@@ -206,8 +207,8 @@ nix3_manpages = [
'nix3-derivation',
'nix3-derivation-show',
'nix3-develop',
- #'nix3-doctor',
'nix3-edit',
+ 'nix3-env-shell',
'nix3-eval',
'nix3-flake-archive',
'nix3-flake-check',
@@ -224,6 +225,7 @@ nix3_manpages = [
'nix3-fmt',
'nix3-hash-file',
'nix3-hash',
+ 'nix3-hash-convert',
'nix3-hash-path',
'nix3-hash-to-base16',
'nix3-hash-to-base32',
@@ -238,6 +240,7 @@ nix3_manpages = [
'nix3-nar-cat',
'nix3-nar-dump-path',
'nix3-nar-ls',
+ 'nix3-nar-pack',
'nix3-nar',
'nix3-path-info',
'nix3-print-dev-env',
@@ -260,7 +263,7 @@ nix3_manpages = [
'nix3-repl',
'nix3-run',
'nix3-search',
- #'nix3-shell',
+ 'nix3-store-add',
'nix3-store-add-file',
'nix3-store-add-path',
'nix3-store-cat',
@@ -270,6 +273,7 @@ nix3_manpages = [
'nix3-store-diff-closures',
'nix3-store-dump-path',
'nix3-store-gc',
+ 'nix3-store-info',
'nix3-store-ls',
'nix3-store-make-content-addressed',
'nix3-store',
diff --git a/doc/manual/package.nix b/doc/manual/package.nix
index 2e6fcede3..f8133f2e1 100644
--- a/doc/manual/package.nix
+++ b/doc/manual/package.nix
@@ -3,7 +3,7 @@
, meson
, ninja
-, lowdown
+, lowdown-unsandboxed
, mdbook
, mdbook-linkcheck
, jq
@@ -42,7 +42,7 @@ mkMesonDerivation (finalAttrs: {
passthru.externalNativeBuildInputs = [
meson
ninja
- (lib.getBin lowdown)
+ (lib.getBin lowdown-unsandboxed)
mdbook
mdbook-linkcheck
jq
diff --git a/doc/manual/rl-next/ignore-local-registries.md b/doc/manual/rl-next/ignore-local-registries.md
new file mode 100644
index 000000000..8d5e333dd
--- /dev/null
+++ b/doc/manual/rl-next/ignore-local-registries.md
@@ -0,0 +1,22 @@
+---
+synopsis: "Flake lock file generation now ignores local registries"
+prs: [12019]
+---
+
+When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
+
+This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
+
+In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
+```nix
+{
+ outputs = { self, nixpkgs }: { ... };
+}
+```
+write
+```nix
+{
+ inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
+ outputs = { self, nixpkgs }: { ... };
+}
+```
diff --git a/doc/manual/rl-next/nix-copy-flags.md b/doc/manual/rl-next/nix-copy-flags.md
new file mode 100644
index 000000000..f5b2b9716
--- /dev/null
+++ b/doc/manual/rl-next/nix-copy-flags.md
@@ -0,0 +1,18 @@
+---
+synopsis: "`nix copy` supports `--profile` and `--out-link`"
+prs: [11657]
+---
+
+The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
+top-level store path, while `--out-link` create symlinks to the top-level store paths.
+
+For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
+```
+# nix copy --from ssh://server $path
+# nix build --profile /nix/var/nix/profiles/system $path
+```
+you can now do
+```
+# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
+```
+The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.
diff --git a/doc/manual/rl-next/nix-instantiate-raw.md b/doc/manual/rl-next/nix-instantiate-raw.md
new file mode 100644
index 000000000..fb4a72b88
--- /dev/null
+++ b/doc/manual/rl-next/nix-instantiate-raw.md
@@ -0,0 +1,8 @@
+---
+synopsis: "`nix-instantiate --eval` now supports `--raw`"
+prs: [12119]
+---
+
+The `nix-instantiate --eval` command now supports a `--raw` flag, when used
+the evaluation result must be a string, which is printed verbatim without
+quotation marks or escaping.
diff --git a/doc/manual/rl-next/nix-sshopts-parsing.md b/doc/manual/rl-next/nix-sshopts-parsing.md
new file mode 100644
index 000000000..65fe6f562
--- /dev/null
+++ b/doc/manual/rl-next/nix-sshopts-parsing.md
@@ -0,0 +1,21 @@
+---
+synopsis: "Improved `NIX_SSHOPTS` parsing for better SSH option handling"
+issues: [5181]
+prs: [12020]
+---
+
+The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
+Previously, incorrectly split SSH options could cause failures in CLIs like `nix-copy-closure`,
+especially when using complex ssh invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
+
+This change introduces a `shellSplitString` function to ensure
+that `NIX_SSHOPTS` is parsed in a manner consistent with shell
+behavior, addressing common parsing errors.
+
+For example, the following now works as expected:
+
+```bash
+export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
+```
+
+This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.
diff --git a/doc/manual/rl-next/relative-path-flakes.md b/doc/manual/rl-next/relative-path-flakes.md
new file mode 100644
index 000000000..3616f3467
--- /dev/null
+++ b/doc/manual/rl-next/relative-path-flakes.md
@@ -0,0 +1,12 @@
+---
+synopsis: "Support for relative path inputs"
+prs: [10089]
+---
+
+Flakes can now refer to other flakes in the same repository using relative paths, e.g.
+```nix
+inputs.foo.url = "path:./foo";
+```
+uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
+
+This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.
diff --git a/doc/manual/source/command-ref/nix-collect-garbage.md b/doc/manual/source/command-ref/nix-collect-garbage.md
index 2136d28e9..763179b8e 100644
--- a/doc/manual/source/command-ref/nix-collect-garbage.md
+++ b/doc/manual/source/command-ref/nix-collect-garbage.md
@@ -36,7 +36,7 @@ Instead, it looks in a few locations, and acts on all profiles it finds there:
>
> Not stable; subject to change
>
- > Do not rely on this functionality; it just exists for migration purposes and is may change in the future.
+ > Do not rely on this functionality; it just exists for migration purposes and may change in the future.
> These deprecated paths remain a private implementation detail of Nix.
`$NIX_STATE_DIR/profiles` and `$NIX_STATE_DIR/profiles/per-user`.
@@ -62,6 +62,15 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
This is the equivalent of invoking [`nix-env --delete-generations `](@docroot@/command-ref/nix-env/delete-generations.md#generations-time) on each found profile.
See the documentation of that command for additional information about the *period* argument.
+ - [`--max-freed`](#opt-max-freed) *bytes*
+
+
+
+ Keep deleting paths until at least *bytes* bytes have been deleted,
+ then stop. The argument *bytes* can be followed by the
+ multiplicative suffix `K`, `M`, `G` or `T`, denoting KiB, MiB, GiB
+ or TiB units.
+
{{#include ./opt-common.md}}
{{#include ./env-common.md}}
diff --git a/doc/manual/source/command-ref/nix-copy-closure.md b/doc/manual/source/command-ref/nix-copy-closure.md
index 5d1307ca6..7a976fbdd 100644
--- a/doc/manual/source/command-ref/nix-copy-closure.md
+++ b/doc/manual/source/command-ref/nix-copy-closure.md
@@ -84,7 +84,7 @@ When using public key authentication, you can avoid typing the passphrase with `
> Copy GNU Hello from a remote machine using a known store path, and run it:
>
> ```shell-session
-> $ storePath="$(nix-instantiate --eval '' -I nixpkgs=channel:nixpkgs-unstable -A hello.outPath | tr -d '"')"
+> $ storePath="$(nix-instantiate --eval --raw '' -I nixpkgs=channel:nixpkgs-unstable -A hello.outPath)"
> $ nix-copy-closure --from alice@itchy.example.org "$storePath"
> $ "$storePath"/bin/hello
> Hello, world!
diff --git a/doc/manual/source/command-ref/nix-env/install.md b/doc/manual/source/command-ref/nix-env/install.md
index db9c312f6..01d7146be 100644
--- a/doc/manual/source/command-ref/nix-env/install.md
+++ b/doc/manual/source/command-ref/nix-env/install.md
@@ -11,6 +11,7 @@
[`--from-profile` *path*]
[`--preserve-installed` | `-P`]
[`--remove-all` | `-r`]
+ [`--priority` *priority*]
# Description
@@ -61,6 +62,10 @@ The arguments *args* map to store paths in a number of possible ways:
The derivations returned by those function calls are installed.
This allows derivations to be specified in an unambiguous way, which is necessary if there are multiple derivations with the same name.
+- If `--priority` *priority* is given, the priority of the derivations being installed is set to *priority*.
+ This can be used to override the priority of the derivations being installed.
+ This is useful if *args* are [store paths], which don't have any priority information.
+
- If *args* are [store paths] to [derivations](@docroot@/glossary.md#gloss-derivation), then those derivations are [realised], and the resulting output paths are installed.
- If *args* are [store paths] not to derivations, then these are [realised] and installed.
@@ -235,4 +240,3 @@ channel:
```console
$ nix-env --file https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz --install --attr firefox
```
-
diff --git a/doc/manual/source/command-ref/nix-instantiate.md b/doc/manual/source/command-ref/nix-instantiate.md
index 974ba32eb..0ae326538 100644
--- a/doc/manual/source/command-ref/nix-instantiate.md
+++ b/doc/manual/source/command-ref/nix-instantiate.md
@@ -5,7 +5,7 @@
# Synopsis
`nix-instantiate`
- [`--parse` | `--eval` [`--strict`] [`--json`] [`--xml`] ]
+ [`--parse` | `--eval` [`--strict`] [`--raw` | `--json` | `--xml`] ]
[`--read-write-mode`]
[`--arg` *name* *value*]
[{`--attr`| `-A`} *attrPath*]
@@ -102,6 +102,11 @@ standard input.
> This option can cause non-termination, because lazy data
> structures can be infinitely large.
+- `--raw`
+
+ When used with `--eval`, the evaluation result must be a string,
+ which is printed verbatim, without quoting, escaping or trailing newline.
+
- `--json`
When used with `--eval`, print the resulting value as an JSON
diff --git a/doc/manual/source/command-ref/nix-shell.md b/doc/manual/source/command-ref/nix-shell.md
index 69a711bd5..e95db9bea 100644
--- a/doc/manual/source/command-ref/nix-shell.md
+++ b/doc/manual/source/command-ref/nix-shell.md
@@ -88,7 +88,9 @@ All options not listed here are passed to `nix-store
cleared before the interactive shell is started, so you get an
environment that more closely corresponds to the “real” Nix build. A
few variables, in particular `HOME`, `USER` and `DISPLAY`, are
- retained.
+ retained. Note that the shell used to run commands is obtained from
+ [`NIX_BUILD_SHELL`](#env-NIX_BUILD_SHELL) / `` from
+ `NIX_PATH`, and therefore not affected by `--pure`.
- `--packages` / `-p` *packages*…
@@ -112,11 +114,30 @@ All options not listed here are passed to `nix-store
# Environment variables
-- `NIX_BUILD_SHELL`
+- [`NIX_BUILD_SHELL`](#env-NIX_BUILD_SHELL)
- Shell used to start the interactive environment. Defaults to the
- `bash` found in ``, falling back to the `bash` found in
- `PATH` if not found.
+ Shell used to start the interactive environment.
+ Defaults to the `bash` from `bashInteractive` found in ``, falling back to the `bash` found in `PATH` if not found.
+
+ > **Note**
+ >
+ > The shell obtained using this method may not necessarily be the same as any shells requested in *path*.
+
+
+
+ > **Example
+ >
+ > Despite `--pure`, this invocation will not result in a fully reproducible shell environment:
+ >
+ > ```nix
+ > #!/usr/bin/env -S nix-shell --pure
+ > let
+ > pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/854fdc68881791812eddd33b2fed94b954979a8e.tar.gz") {};
+ > in
+ > pkgs.mkShell {
+ > buildInputs = pkgs.bashInteractive;
+ > }
+ > ```
{{#include ./env-common.md}}
diff --git a/doc/manual/source/command-ref/nix-store/add-fixed.md b/doc/manual/source/command-ref/nix-store/add-fixed.md
index bebf15026..2ea90a135 100644
--- a/doc/manual/source/command-ref/nix-store/add-fixed.md
+++ b/doc/manual/source/command-ref/nix-store/add-fixed.md
@@ -21,6 +21,9 @@ This operation has the following options:
Use recursive instead of flat hashing mode, used when adding
directories to the store.
+ *paths* that refer to symlinks are not dereferenced, but added to the store
+ as symlinks with the same target.
+
{{#include ./opt-common.md}}
{{#include ../opt-common.md}}
diff --git a/doc/manual/source/command-ref/nix-store/add.md b/doc/manual/source/command-ref/nix-store/add.md
index 87d504cd3..ab4740723 100644
--- a/doc/manual/source/command-ref/nix-store/add.md
+++ b/doc/manual/source/command-ref/nix-store/add.md
@@ -11,6 +11,9 @@
The operation `--add` adds the specified paths to the Nix store. It
prints the resulting paths in the Nix store on standard output.
+*paths* that refer to symlinks are not dereferenced, but added to the store
+as symlinks with the same target.
+
{{#include ./opt-common.md}}
{{#include ../opt-common.md}}
diff --git a/doc/manual/source/development/documentation.md b/doc/manual/source/development/documentation.md
index 2e188f232..30cc8adc4 100644
--- a/doc/manual/source/development/documentation.md
+++ b/doc/manual/source/development/documentation.md
@@ -19,10 +19,11 @@ nix-build -E '(import ./.).packages.${builtins.currentSystem}.nix.doc'
or
```console
-nix build .#nix^doc
+nix build .#nix-manual
```
-and open `./result-doc/share/doc/nix/manual/index.html`.
+and open `./result/share/doc/nix/manual/index.html`.
+
To build the manual incrementally, [enter the development shell](./building.md) and run:
diff --git a/doc/manual/source/development/testing.md b/doc/manual/source/development/testing.md
index 30aa7d0d5..d582ce4b4 100644
--- a/doc/manual/source/development/testing.md
+++ b/doc/manual/source/development/testing.md
@@ -297,7 +297,7 @@ Creating a Cachix cache for your installer tests and adding its authorisation to
- `armv7l-linux`
- `x86_64-darwin`
-- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
+- The `installer_test` job (which runs on `ubuntu-24.04` and `macos-14`) will try to install Nix with the cached installer and run a trivial Nix command.
### One-time setup
diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md
index 6f77d6a57..9354c1a72 100644
--- a/doc/manual/source/installation/installing-docker.md
+++ b/doc/manual/source/installation/installing-docker.md
@@ -57,3 +57,21 @@ $ nix build ./\#hydraJobs.dockerImage.x86_64-linux
$ docker load -i ./result/image.tar.gz
$ docker run -ti nix:2.5pre20211105
```
+
+# Docker image with non-root Nix
+
+If you would like to run Nix in a container under a user other than `root`,
+you can build an image with a non-root single-user installation of Nix
+by specifying the `uid`, `gid`, `uname`, and `gname` arguments to `docker.nix`:
+
+```console
+$ nix build --file docker.nix \
+ --arg uid 1000 \
+ --arg gid 1000 \
+ --argstr uname user \
+ --argstr gname user \
+ --argstr name nix-user \
+ --out-link nix-user.tar.gz
+$ docker load -i nix-user.tar.gz
+$ docker run -ti nix-user
+```
diff --git a/docker.nix b/docker.nix
index bd16b71cd..e2e9da728 100644
--- a/docker.nix
+++ b/docker.nix
@@ -9,6 +9,10 @@
, maxLayers ? 100
, nixConf ? {}
, flake-registry ? null
+, uid ? 0
+, gid ? 0
+, uname ? "root"
+, gname ? "root"
}:
let
defaultPkgs = with pkgs; [
@@ -50,6 +54,15 @@ let
description = "Unprivileged account (don't use!)";
};
+ } // lib.optionalAttrs (uid != 0) {
+ "${uname}" = {
+ uid = uid;
+ shell = "${pkgs.bashInteractive}/bin/bash";
+ home = "/home/${uname}";
+ gid = gid;
+ groups = [ "${gname}" ];
+ description = "Nix user";
+ };
} // lib.listToAttrs (
map
(
@@ -70,6 +83,8 @@ let
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
+ } // lib.optionalAttrs (gid != 0) {
+ "${gname}".gid = gid;
};
userToPasswd = (
@@ -150,6 +165,8 @@ let
in
"${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n";
+ userHome = if uid == 0 then "/root" else "/home/${uname}";
+
baseSystem =
let
nixpkgs = pkgs.path;
@@ -237,26 +254,26 @@ let
mkdir -p $out/etc/nix
cat $nixConfContentsPath > $out/etc/nix/nix.conf
- mkdir -p $out/root
- mkdir -p $out/nix/var/nix/profiles/per-user/root
+ mkdir -p $out${userHome}
+ mkdir -p $out/nix/var/nix/profiles/per-user/${uname}
ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
- ln -s $out/nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
- ln -s /nix/var/nix/profiles/default $out/root/.nix-profile
+ ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
+ ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile
- ln -s ${channel} $out/nix/var/nix/profiles/per-user/root/channels-1-link
- ln -s $out/nix/var/nix/profiles/per-user/root/channels-1-link $out/nix/var/nix/profiles/per-user/root/channels
+ ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link
+ ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels
- mkdir -p $out/root/.nix-defexpr
- ln -s $out/nix/var/nix/profiles/per-user/root/channels $out/root/.nix-defexpr/channels
- echo "${channelURL} ${channelName}" > $out/root/.nix-channels
+ mkdir -p $out${userHome}/.nix-defexpr
+ ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels
+ echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels
mkdir -p $out/bin $out/usr/bin
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
'' + (lib.optionalString (flake-registry-path != null) ''
- nixCacheDir="/root/.cache/nix"
+ nixCacheDir="${userHome}/.cache/nix"
mkdir -p $out$nixCacheDir
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
ln -s ${flake-registry-path} $out$globalFlakeRegistryPath
@@ -268,7 +285,7 @@ let
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
- inherit name tag maxLayers;
+ inherit name tag maxLayers uid gid uname gname;
contents = [ baseSystem ];
@@ -279,25 +296,28 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
fakeRootCommands = ''
chmod 1777 tmp
chmod 1777 var/tmp
+ chown -R ${toString uid}:${toString gid} .${userHome}
+ chown -R ${toString uid}:${toString gid} nix
'';
config = {
- Cmd = [ "/root/.nix-profile/bin/bash" ];
+ Cmd = [ "${userHome}/.nix-profile/bin/bash" ];
+ User = "${toString uid}:${toString gid}";
Env = [
- "USER=root"
+ "USER=${uname}"
"PATH=${lib.concatStringsSep ":" [
- "/root/.nix-profile/bin"
+ "${userHome}/.nix-profile/bin"
"/nix/var/nix/profiles/default/bin"
"/nix/var/nix/profiles/default/sbin"
]}"
"MANPATH=${lib.concatStringsSep ":" [
- "/root/.nix-profile/share/man"
+ "${userHome}/.nix-profile/share/man"
"/nix/var/nix/profiles/default/share/man"
]}"
"SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
- "NIX_PATH=/nix/var/nix/profiles/per-user/root/channels:/root/.nix-defexpr/channels"
+ "NIX_PATH=/nix/var/nix/profiles/per-user/${uname}/channels:${userHome}/.nix-defexpr/channels"
];
};
diff --git a/flake.lock b/flake.lock
index b5d0b881c..ce484a67a 100644
--- a/flake.lock
+++ b/flake.lock
@@ -3,11 +3,11 @@
"flake-compat": {
"flake": false,
"locked": {
- "lastModified": 1696426674,
- "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
+ "lastModified": 1733328505,
+ "narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
"owner": "edolstra",
"repo": "flake-compat",
- "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
+ "rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
"type": "github"
},
"original": {
@@ -23,11 +23,11 @@
]
},
"locked": {
- "lastModified": 1719994518,
- "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
+ "lastModified": 1733312601,
+ "narHash": "sha256-4pDvzqnegAfRkPwO3wmwBhVi/Sye1mzps0zHWYnP88c=",
"owner": "hercules-ci",
"repo": "flake-parts",
- "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
+ "rev": "205b12d8b7cd4802fbcb8e8ef6a0f1408781a4f9",
"type": "github"
},
"original": {
@@ -48,11 +48,11 @@
]
},
"locked": {
- "lastModified": 1721042469,
- "narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=",
+ "lastModified": 1734279981,
+ "narHash": "sha256-NdaCraHPp8iYMWzdXAt5Nv6sA3MUzlCiGiR586TCwo0=",
"owner": "cachix",
"repo": "git-hooks.nix",
- "rev": "f451c19376071a90d8c58ab1a953c6e9840527fd",
+ "rev": "aa9f40c906904ebd83da78e7f328cd8aeaeae785",
"type": "github"
},
"original": {
@@ -61,35 +61,18 @@
"type": "github"
}
},
- "libgit2": {
- "flake": false,
- "locked": {
- "lastModified": 1715853528,
- "narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=",
- "owner": "libgit2",
- "repo": "libgit2",
- "rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96",
- "type": "github"
- },
- "original": {
- "owner": "libgit2",
- "ref": "v1.8.1",
- "repo": "libgit2",
- "type": "github"
- }
- },
"nixpkgs": {
"locked": {
- "lastModified": 1723688146,
- "narHash": "sha256-sqLwJcHYeWLOeP/XoLwAtYjr01TISlkOfz+NG82pbdg=",
+ "lastModified": 1734359947,
+ "narHash": "sha256-1Noao/H+N8nFB4Beoy8fgwrcOQLVm9o4zKW1ODaqK9E=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "c3d4ac725177c030b1e289015989da2ad9d56af0",
+ "rev": "48d12d5e70ee91fe8481378e540433a7303dbf6a",
"type": "github"
},
"original": {
"owner": "NixOS",
- "ref": "nixos-24.05",
+ "ref": "release-24.11",
"repo": "nixpkgs",
"type": "github"
}
@@ -131,7 +114,6 @@
"flake-compat": "flake-compat",
"flake-parts": "flake-parts",
"git-hooks-nix": "git-hooks-nix",
- "libgit2": "libgit2",
"nixpkgs": "nixpkgs",
"nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression"
diff --git a/flake.nix b/flake.nix
index 06025e3b7..8edc2266f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,11 +1,11 @@
{
description = "The purely functional package manager";
- inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05";
+ inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-24.11";
+
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
- inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; };
# dev tooling
inputs.flake-parts.url = "github:hercules-ci/flake-parts";
@@ -18,7 +18,7 @@
inputs.git-hooks-nix.inputs.flake-compat.follows = "";
inputs.git-hooks-nix.inputs.gitignore.follows = "";
- outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }:
+ outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }:
let
@@ -36,7 +36,8 @@
"armv6l-unknown-linux-gnueabihf"
"armv7l-unknown-linux-gnueabihf"
"riscv64-unknown-linux-gnu"
- "x86_64-unknown-netbsd"
+ # Disabled because of https://github.com/NixOS/nixpkgs/issues/344423
+ # "x86_64-unknown-netbsd"
"x86_64-unknown-freebsd"
"x86_64-w64-mingw32"
];
@@ -106,6 +107,7 @@
in {
inherit stdenvs native;
static = native.pkgsStatic;
+ llvm = native.pkgsLLVM;
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
});
@@ -124,18 +126,36 @@
# without "polluting" the top level "`pkgs`" attrset.
# This also has the benefit of providing us with a distinct set of packages
# we can iterate over.
- nixComponents = lib.makeScope final.nixDependencies.newScope (import ./packaging/components.nix {
- inherit (final) lib;
- inherit officialRelease;
- src = self;
- });
+ nixComponents =
+ lib.makeScopeWithSplicing'
+ {
+ inherit (final) splicePackages;
+ inherit (final.nixDependencies) newScope;
+ }
+ {
+ otherSplices = final.generateSplicesForMkScope "nixComponents";
+ f = import ./packaging/components.nix {
+ inherit (final) lib;
+ inherit officialRelease;
+ src = self;
+ };
+ };
# The dependencies are in their own scope, so that they don't have to be
# in Nixpkgs top level `pkgs` or `nixComponents`.
- nixDependencies = lib.makeScope final.newScope (import ./packaging/dependencies.nix {
- inherit inputs stdenv;
- pkgs = final;
- });
+ nixDependencies =
+ lib.makeScopeWithSplicing'
+ {
+ inherit (final) splicePackages;
+ inherit (final) newScope; # layered directly on pkgs, unlike nixComponents above
+ }
+ {
+ otherSplices = final.generateSplicesForMkScope "nixDependencies";
+ f = import ./packaging/dependencies.nix {
+ inherit inputs stdenv;
+ pkgs = final;
+ };
+ };
nix = final.nixComponents.nix-cli;
@@ -145,7 +165,6 @@
if prev.stdenv.hostPlatform.system == "i686-linux"
then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; })
else prev.pre-commit;
-
};
in {
@@ -168,7 +187,7 @@
};
checks = forAllSystems (system: {
- binaryTarball = self.hydraJobs.binaryTarball.${system};
+ installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
installTests = self.hydraJobs.installTests.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
rl-next =
@@ -183,11 +202,7 @@
# Some perl dependencies are broken on i686-linux.
# Since the support is only best-effort there, disable the perl
# bindings
-
- # Temporarily disabled because GitHub Actions OOM issues. Once
- # the old build system is gone and we are back to one build
- # system, we should reenable this.
- #perlBindings = self.hydraJobs.perlBindings.${system};
+ perlBindings = self.hydraJobs.perlBindings.${system};
}
# Add "passthru" tests
// flatMapAttrs ({
@@ -219,6 +234,8 @@
inherit (nixpkgsFor.${system}.native)
changelog-d;
default = self.packages.${system}.nix;
+ installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
+ binaryTarball = self.hydraJobs.binaryTarball.${system};
# TODO probably should be `nix-cli`
nix = self.packages.${system}.nix-everything;
nix-manual = nixpkgsFor.${system}.native.nixComponents.nix-manual;
@@ -266,6 +283,7 @@
# These attributes go right into `packages.`.
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName};
+ "${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName};
}
// lib.optionalAttrs supportsCross (flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: {
# These attributes go right into `packages.`.
@@ -305,6 +323,9 @@
prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell {
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic;
})) //
+ prefixAttrs "llvm" (forAllStdenvs (stdenvName: makeShell {
+ pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM;
+ })) //
prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell {
pkgs = nixpkgsFor.${system}.cross.${crossSystem};
}))
diff --git a/m4/gcc_bug_80431.m4 b/m4/gcc_bug_80431.m4
deleted file mode 100644
index cdc4ddb40..000000000
--- a/m4/gcc_bug_80431.m4
+++ /dev/null
@@ -1,66 +0,0 @@
-# Ensure that this bug is not present in the C++ toolchain we are using.
-#
-# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431
-#
-# The test program is from that issue, with only a slight modification
-# to set an exit status instead of printing strings.
-AC_DEFUN([ENSURE_NO_GCC_BUG_80431],
-[
- AC_MSG_CHECKING([that GCC bug 80431 is fixed])
- AC_LANG_PUSH(C++)
- AC_RUN_IFELSE(
- [AC_LANG_PROGRAM(
- [[
- #include
-
- static bool a = true;
- static bool b = true;
-
- struct Options { };
-
- struct Option
- {
- Option(Options * options)
- {
- a = false;
- }
-
- ~Option()
- {
- b = false;
- }
- };
-
- struct MyOptions : Options { };
-
- struct MyOptions2 : virtual MyOptions
- {
- Option foo{this};
- };
- ]],
- [[
- {
- MyOptions2 opts;
- }
- return (a << 1) | b;
- ]])],
- [status_80431=0],
- [status_80431=$?],
- [status_80431=''])
- AC_LANG_POP(C++)
- AS_CASE([$status_80431],
- [''],[
- AC_MSG_RESULT(cannot check because cross compiling)
- AC_MSG_NOTICE(assume we are bug free)
- ],
- [0],[
- AC_MSG_RESULT(yes)
- ],
- [2],[
- AC_MSG_RESULT(no)
- AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug)
- ],
- [
- AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring)
- ])
-])
diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix
index fdb031302..fcf370b71 100644
--- a/maintainers/flake-module.nix
+++ b/maintainers/flake-module.nix
@@ -10,8 +10,31 @@
# https://flake.parts/options/git-hooks-nix#options
pre-commit.settings = {
hooks = {
+ # Conflicts are usually found by other checks, but not those in docs,
+ # and potentially other places.
+ check-merge-conflicts.enable = true;
+ # built-in check-merge-conflicts seems ineffective against those produced by mergify backports
+ check-merge-conflicts-2 = {
+ enable = true;
+ entry = "${pkgs.writeScript "check-merge-conflicts" ''
+ #!${pkgs.runtimeShell}
+ conflicts=false
+ for file in "$@"; do
+ if grep --with-filename --line-number -E '^>>>>>>> ' -- "$file"; then
+ conflicts=true
+ fi
+ done
+ if $conflicts; then
+ echo "ERROR: found merge/patch conflicts in files"
+ exit 1
+ fi
+ touch $out
+ ''}";
+ };
clang-format = {
enable = true;
+ # https://github.com/cachix/git-hooks.nix/pull/532
+ package = pkgs.llvmPackages_latest.clang-tools;
excludes = [
# We don't want to format test data
# ''tests/(?!nixos/).*\.nix''
@@ -354,6 +377,7 @@
''^src/libutil/util\.cc$''
''^src/libutil/util\.hh$''
''^src/libutil/variant-wrapper\.hh$''
+ ''^src/libutil/widecharwidth/widechar_width\.h$'' # vendored source
''^src/libutil/windows/file-descriptor\.cc$''
''^src/libutil/windows/file-path\.cc$''
''^src/libutil/windows/processes\.cc$''
@@ -496,7 +520,6 @@
''^scripts/create-darwin-volume\.sh$''
''^scripts/install-darwin-multi-user\.sh$''
''^scripts/install-multi-user\.sh$''
- ''^scripts/install-nix-from-closure\.sh$''
''^scripts/install-systemd-multi-user\.sh$''
''^src/nix/get-env\.sh$''
''^tests/functional/ca/build-dry\.sh$''
diff --git a/meson.build b/meson.build
index 8985b631e..49adf9832 100644
--- a/meson.build
+++ b/meson.build
@@ -34,6 +34,7 @@ endif
subproject('libutil-c')
subproject('libstore-c')
subproject('libexpr-c')
+subproject('libflake-c')
subproject('libmain-c')
# Language Bindings
diff --git a/misc/launchd/meson.build b/misc/launchd/meson.build
new file mode 100644
index 000000000..5168131d1
--- /dev/null
+++ b/misc/launchd/meson.build
@@ -0,0 +1,13 @@
+configure_file(
+ input : 'org.nixos.nix-daemon.plist.in',
+ output : 'org.nixos.nix-daemon.plist',
+ install : true,
+ install_dir : get_option('prefix') / 'Library/LaunchDaemons',
+ install_mode : 'rw-r--r--',
+ configuration : {
+ # TODO: unhardcode paths with something like:
+ # 'storedir' : store_dir,
+ # 'localstatedir' : localstatedir,
+ # 'bindir' : bindir,
+ },
+)
diff --git a/misc/meson.build b/misc/meson.build
index a6d1f944b..82f2b0c65 100644
--- a/misc/meson.build
+++ b/misc/meson.build
@@ -2,4 +2,10 @@ subdir('bash')
subdir('fish')
subdir('zsh')
-subdir('systemd')
+if host_machine.system() == 'linux'
+ subdir('systemd')
+endif
+
+if host_machine.system() == 'darwin'
+ subdir('launchd')
+endif
diff --git a/build-utils-meson/diagnostics/meson.build b/nix-meson-build-support/common/meson.build
similarity index 51%
rename from build-utils-meson/diagnostics/meson.build
rename to nix-meson-build-support/common/meson.build
index 30eedfc13..67b6658f5 100644
--- a/build-utils-meson/diagnostics/meson.build
+++ b/nix-meson-build-support/common/meson.build
@@ -1,3 +1,10 @@
+# This is only conditional to work around
+# https://github.com/mesonbuild/meson/issues/13293. It should be
+# unconditional.
+if not (host_machine.system() == 'windows' and cxx.get_id() == 'gcc')
+ deps_private += dependency('threads')
+endif
+
add_project_arguments(
'-Wdeprecated-copy',
'-Werror=suggest-override',
diff --git a/build-utils-meson/deps-lists/meson.build b/nix-meson-build-support/deps-lists/meson.build
similarity index 100%
rename from build-utils-meson/deps-lists/meson.build
rename to nix-meson-build-support/deps-lists/meson.build
diff --git a/build-utils-meson/export-all-symbols/meson.build b/nix-meson-build-support/export-all-symbols/meson.build
similarity index 100%
rename from build-utils-meson/export-all-symbols/meson.build
rename to nix-meson-build-support/export-all-symbols/meson.build
diff --git a/build-utils-meson/export/meson.build b/nix-meson-build-support/export/meson.build
similarity index 100%
rename from build-utils-meson/export/meson.build
rename to nix-meson-build-support/export/meson.build
diff --git a/build-utils-meson/generate-header/meson.build b/nix-meson-build-support/generate-header/meson.build
similarity index 100%
rename from build-utils-meson/generate-header/meson.build
rename to nix-meson-build-support/generate-header/meson.build
diff --git a/build-utils-meson/libatomic/meson.build b/nix-meson-build-support/libatomic/meson.build
similarity index 100%
rename from build-utils-meson/libatomic/meson.build
rename to nix-meson-build-support/libatomic/meson.build
diff --git a/build-utils-meson/subprojects/meson.build b/nix-meson-build-support/subprojects/meson.build
similarity index 100%
rename from build-utils-meson/subprojects/meson.build
rename to nix-meson-build-support/subprojects/meson.build
diff --git a/build-utils-meson/windows-version/meson.build b/nix-meson-build-support/windows-version/meson.build
similarity index 100%
rename from build-utils-meson/windows-version/meson.build
rename to nix-meson-build-support/windows-version/meson.build
diff --git a/packaging/components.nix b/packaging/components.nix
index c29e04ae9..e1f661be8 100644
--- a/packaging/components.nix
+++ b/packaging/components.nix
@@ -44,6 +44,7 @@ in
nix-expr-tests = callPackage ../src/libexpr-tests/package.nix { };
nix-flake = callPackage ../src/libflake/package.nix { };
+ nix-flake-c = callPackage ../src/libflake-c/package.nix { };
nix-flake-tests = callPackage ../src/libflake-tests/package.nix { };
nix-main = callPackage ../src/libmain/package.nix { };
diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix
index 13766f2c0..08d179b82 100644
--- a/packaging/dependencies.nix
+++ b/packaging/dependencies.nix
@@ -66,10 +66,28 @@ let
mesonLayer = finalAttrs: prevAttrs:
{
+ # NOTE:
+ # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
+ # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
+ # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype.
+ mesonBuildType = "release";
+ # NOTE:
+ # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
+ # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
+ # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
+ preConfigure = prevAttrs.preConfigure or "" + ''
+ case "$mesonBuildType" in
+ release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
+ *) appendToVar mesonFlags "-Db_lto=false" ;;
+ esac
+ '';
nativeBuildInputs = [
pkgs.buildPackages.meson
pkgs.buildPackages.ninja
] ++ prevAttrs.nativeBuildInputs or [];
+ mesonCheckFlags = prevAttrs.mesonCheckFlags or [] ++ [
+ "--print-errorlogs"
+ ];
};
mesonBuildLayer = finalAttrs: prevAttrs:
@@ -79,6 +97,12 @@ let
];
separateDebugInfo = !stdenv.hostPlatform.isStatic;
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
+ env = prevAttrs.env or {}
+ // lib.optionalAttrs
+ (stdenv.isLinux
+ && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
+ && !(stdenv.hostPlatform.useLLVM or false))
+ { LDFLAGS = "-fuse-ld=gold"; };
};
mesonLibraryLayer = finalAttrs: prevAttrs:
@@ -111,14 +135,6 @@ scope: {
requiredSystemFeatures = [ ];
};
- libseccomp = pkgs.libseccomp.overrideAttrs (_: rec {
- version = "2.5.5";
- src = pkgs.fetchurl {
- url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz";
- hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U=";
- };
- });
-
boehmgc = pkgs.boehmgc.override {
enableLargeConfig = true;
};
@@ -137,8 +153,6 @@ scope: {
});
libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
- src = inputs.libgit2;
- version = inputs.libgit2.lastModifiedDate;
cmakeFlags = attrs.cmakeFlags or []
++ [ "-DUSE_SSH=exec" ];
nativeBuildInputs = attrs.nativeBuildInputs or []
@@ -166,36 +180,6 @@ scope: {
];
});
- busybox-sandbox-shell = pkgs.busybox-sandbox-shell or (pkgs.busybox.override {
- useMusl = true;
- enableStatic = true;
- enableMinimal = true;
- extraConfig = ''
- CONFIG_FEATURE_FANCY_ECHO y
- CONFIG_FEATURE_SH_MATH y
- CONFIG_FEATURE_SH_MATH_64 y
-
- CONFIG_ASH y
- CONFIG_ASH_OPTIMIZE_FOR_SIZE y
-
- CONFIG_ASH_ALIAS y
- CONFIG_ASH_BASH_COMPAT y
- CONFIG_ASH_CMDCMD y
- CONFIG_ASH_ECHO y
- CONFIG_ASH_GETOPTS y
- CONFIG_ASH_INTERNAL_GLOB y
- CONFIG_ASH_JOB_CONTROL y
- CONFIG_ASH_PRINTF y
- CONFIG_ASH_TEST y
- '';
- });
-
- # TODO change in Nixpkgs, Windows works fine. First commit of
- # https://github.com/NixOS/nixpkgs/pull/322977 backported will fix.
- toml11 = pkgs.toml11.overrideAttrs (old: {
- meta.platforms = lib.platforms.all;
- });
-
inherit resolvePath filesetToSource;
mkMesonDerivation =
diff --git a/packaging/everything.nix b/packaging/everything.nix
index 6f6bbc6aa..0b04d2c6d 100644
--- a/packaging/everything.nix
+++ b/packaging/everything.nix
@@ -5,12 +5,10 @@
nix-util,
nix-util-c,
- nix-util-test-support,
nix-util-tests,
nix-store,
nix-store-c,
- nix-store-test-support,
nix-store-tests,
nix-fetchers,
@@ -18,10 +16,10 @@
nix-expr,
nix-expr-c,
- nix-expr-test-support,
nix-expr-tests,
nix-flake,
+ nix-flake-c,
nix-flake-tests,
nix-main,
@@ -38,45 +36,82 @@
nix-external-api-docs,
nix-perl-bindings,
+
+ testers,
+ runCommand,
}:
+let
+ dev = stdenv.mkDerivation (finalAttrs: {
+ name = "nix-${nix-cli.version}-dev";
+ pname = "nix";
+ version = nix-cli.version;
+ dontUnpack = true;
+ dontBuild = true;
+ libs = map lib.getDev [
+ nix-cmd
+ nix-expr
+ nix-expr-c
+ nix-fetchers
+ nix-flake
+ nix-flake-c
+ nix-main
+ nix-main-c
+ nix-store
+ nix-store-c
+ nix-util
+ nix-util-c
+ nix-perl-bindings
+ ];
+ installPhase = ''
+ mkdir -p $out/nix-support
+ echo $libs >> $out/nix-support/propagated-build-inputs
+ '';
+ passthru = {
+ tests = {
+ pkg-config =
+ testers.hasPkgConfigModules {
+ package = finalAttrs.finalPackage;
+ };
+ };
+
+ # If we were to fully emulate output selection here, we'd confuse the Nix CLIs,
+ # because they rely on `drvPath`.
+ dev = finalAttrs.finalPackage.out;
+
+ libs = throw "`nix.dev.libs` is not meant to be used; use `nix.libs` instead.";
+ };
+ meta = {
+ pkgConfigModules = [
+ "nix-cmd"
+ "nix-expr"
+ "nix-expr-c"
+ "nix-fetchers"
+ "nix-flake"
+ "nix-flake-c"
+ "nix-main"
+ "nix-main-c"
+ "nix-store"
+ "nix-store-c"
+ "nix-util"
+ "nix-util-c"
+ ];
+ };
+ });
+ devdoc = buildEnv {
+ name = "nix-${nix-cli.version}-devdoc";
+ paths = [
+ nix-internal-api-docs
+ nix-external-api-docs
+ ];
+ };
+
+in
(buildEnv {
name = "nix-${nix-cli.version}";
paths = [
- nix-util
- nix-util-c
- nix-util-test-support
- nix-util-tests
-
- nix-store
- nix-store-c
- nix-store-test-support
- nix-store-tests
-
- nix-fetchers
- nix-fetchers-tests
-
- nix-expr
- nix-expr-c
- nix-expr-test-support
- nix-expr-tests
-
- nix-flake
- nix-flake-tests
-
- nix-main
- nix-main-c
-
- nix-cmd
-
nix-cli
-
- nix-manual
- nix-internal-api-docs
- nix-external-api-docs
-
- ] ++ lib.optionals (stdenv.buildPlatform.canExecute stdenv.hostPlatform) [
- nix-perl-bindings
+ nix-manual.man
];
meta.mainProgram = "nix";
@@ -85,12 +120,25 @@
doInstallCheck = true;
checkInputs = [
- # Actually run the unit tests too
+ # Make sure the unit tests have passed
nix-util-tests.tests.run
nix-store-tests.tests.run
nix-expr-tests.tests.run
+ nix-fetchers-tests.tests.run
nix-flake-tests.tests.run
- ];
+
+ # dev bundle is ok
+ # (checkInputs must be empty paths??)
+ (runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out")
+ ] ++
+ (if stdenv.buildPlatform.canExecute stdenv.hostPlatform
+ then [
+ # TODO: add perl.tests
+ nix-perl-bindings
+ ]
+ else [
+ nix-perl-bindings
+ ]);
installCheckInputs = [
nix-functional-tests
];
@@ -124,9 +172,31 @@
nix-expr
nix-expr-c
nix-flake
+ nix-flake-c
nix-main
nix-main-c
;
};
+
+ tests = prevAttrs.passthru.tests or {} // {
+ # TODO: create a proper fixpoint and:
+ # pkg-config =
+ # testers.hasPkgConfigModules {
+ # package = finalPackage;
+ # };
+ };
+
+ /**
+ A derivation referencing the `dev` outputs of the Nix libraries.
+ */
+ inherit dev;
+ inherit devdoc;
+ doc = nix-manual;
+ outputs = [ "out" "dev" "devdoc" "doc" ];
+ all = lib.attrValues (lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName}));
+ };
+ meta = prevAttrs.meta // {
+ description = "The Nix package manager";
+ pkgConfigModules = dev.meta.pkgConfigModules;
};
})
diff --git a/packaging/hydra.nix b/packaging/hydra.nix
index 81406a249..5b1e47559 100644
--- a/packaging/hydra.nix
+++ b/packaging/hydra.nix
@@ -18,12 +18,8 @@ let
testNixVersions = pkgs: daemon:
pkgs.nixComponents.nix-functional-tests.override {
- pname =
- "nix-tests"
- + lib.optionalString
- (lib.versionAtLeast daemon.version "2.4pre20211005" &&
- lib.versionAtLeast pkgs.nix.version "2.4pre20211005")
- "-${pkgs.nix.version}-against-${daemon.version}";
+ pname = "nix-daemon-compat-tests";
+ version = "${pkgs.nix.version}-with-daemon-${daemon.version}";
test-daemon = daemon;
};
@@ -127,15 +123,10 @@ in
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu"
];
- installerScriptForGHA = installScriptFor [
- # Native
- self.hydraJobs.binaryTarball."x86_64-linux"
- self.hydraJobs.binaryTarball."aarch64-darwin"
- # Cross
- self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf"
- self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
- self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu"
- ];
+
+ installerScriptForGHA = forAllSystems (system: nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix {
+ tarballs = [ self.hydraJobs.binaryTarball.${system} ];
+ });
# docker image with Nix inside
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
diff --git a/scripts/binary-tarball.nix b/scripts/binary-tarball.nix
index 104189b0c..9de90b7fb 100644
--- a/scripts/binary-tarball.nix
+++ b/scripts/binary-tarball.nix
@@ -23,7 +23,7 @@ in
runCommand "nix-binary-tarball-${version}" env ''
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
- substitute ${./install-nix-from-closure.sh} $TMPDIR/install \
+ substitute ${./install-nix-from-tarball.sh} $TMPDIR/install \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
@@ -65,7 +65,7 @@ runCommand "nix-binary-tarball-${version}" env ''
fn=$out/$dir.tar.xz
mkdir -p $out/nix-support
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
- tar cvfJ $fn \
+ tar cfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
--mtime='1970-01-01' \
--absolute-names \
diff --git a/scripts/build-checks b/scripts/build-checks
new file mode 100755
index 000000000..e0ee70631
--- /dev/null
+++ b/scripts/build-checks
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+set -euo pipefail
+system=$(nix eval --raw --impure --expr builtins.currentSystem)
+nix eval --json ".#checks.$system" --apply builtins.attrNames | \
+ jq -r '.[]' | \
+ xargs -P0 -I '{}' sh -c "nix build -L .#checks.$system.{} || { echo 'FAILED: \033[0;31mnix build -L .#checks.$system.{}\\033[0m'; kill 0; }"
diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh
index 103e1e391..7a61764d4 100755
--- a/scripts/create-darwin-volume.sh
+++ b/scripts/create-darwin-volume.sh
@@ -463,7 +463,7 @@ EOF
EDITOR="$SCRATCH/ex_cleanroom_wrapper" _sudo "to add nix to fstab" "$@" < "$SCRATCH/dscl.err"; do
+ local err=$?
+ if [[ $err -eq 140 ]] && grep -q "-14988 (eNotYetImplemented)" "$SCRATCH/dscl.err"; then
+ echo "dscl failed with eNotYetImplemented, retrying..."
+ sleep 1
+ continue
+ fi
+ cat "$SCRATCH/dscl.err"
+ return $err
+ done
+}
+
poly_user_hidden_get() {
dsclattr "/Users/$1" "IsHidden"
}
poly_user_hidden_set() {
- _sudo "in order to make $1 a hidden user" \
- /usr/bin/dscl . -create "/Users/$1" "IsHidden" "1"
+ dscl_create "in order to make $1 a hidden user" \
+ "/Users/$1" "IsHidden" "1"
}
poly_user_home_get() {
@@ -161,8 +176,8 @@ poly_user_home_get() {
poly_user_home_set() {
# This can trigger a permission prompt now:
# "Terminal" would like to administer your computer. Administration can include modifying passwords, networking, and system settings.
- _sudo "in order to give $1 a safe home directory" \
- /usr/bin/dscl . -create "/Users/$1" "NFSHomeDirectory" "$2"
+ dscl_create "in order to give $1 a safe home directory" \
+ "/Users/$1" "NFSHomeDirectory" "$2"
}
poly_user_note_get() {
@@ -170,8 +185,8 @@ poly_user_note_get() {
}
poly_user_note_set() {
- _sudo "in order to give $username a useful note" \
- /usr/bin/dscl . -create "/Users/$1" "RealName" "$2"
+ dscl_create "in order to give $1 a useful note" \
+ "/Users/$1" "RealName" "$2"
}
poly_user_shell_get() {
@@ -179,8 +194,8 @@ poly_user_shell_get() {
}
poly_user_shell_set() {
- _sudo "in order to give $1 a safe shell" \
- /usr/bin/dscl . -create "/Users/$1" "UserShell" "$2"
+ dscl_create "in order to give $1 a safe shell" \
+ "/Users/$1" "UserShell" "$2"
}
poly_user_in_group_check() {
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index a487d459f..f051ccc46 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -56,6 +56,9 @@ readonly NIX_INSTALLED_CACERT="@cacert@"
#readonly NIX_INSTALLED_CACERT="/nix/store/7dxhzymvy330i28ii676fl1pqwcahv2f-nss-cacert-3.49.2"
readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
+# allow to override identity change command
+readonly NIX_BECOME=${NIX_BECOME:-sudo}
+
readonly ROOT_HOME=~root
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
@@ -123,7 +126,7 @@ uninstall_directions() {
cat < "$SCRATCH/.nix-channels"
_sudo "to set up the default system channel (part 1)" \
- install -m 0664 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels"
+ install -m 0644 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels"
fi
}
@@ -964,7 +969,7 @@ $NIX_EXTRA_CONF
build-users-group = $NIX_BUILD_GROUP_NAME
EOF
_sudo "to place the default nix daemon configuration (part 2)" \
- install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf
+ install -m 0644 "$SCRATCH/nix.conf" /etc/nix/nix.conf
}
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-tarball.sh
similarity index 95%
rename from scripts/install-nix-from-closure.sh
rename to scripts/install-nix-from-tarball.sh
index 794622530..8d127a9c5 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-tarball.sh
@@ -9,6 +9,8 @@ self="$(dirname "$0")"
nix="@nix@"
cacert="@cacert@"
+# allow to override identity change command
+readonly NIX_BECOME="${NIX_BECOME:-sudo}"
if ! [ -e "$self/.reginfo" ]; then
echo "$0: incomplete installer (.reginfo is missing)" >&2
@@ -48,15 +50,14 @@ case "$(uname -s)" in
INSTALL_MODE=no-daemon;;
esac
-# space-separated string
-ACTIONS=
+ACTION=
# handle the command line flags
while [ $# -gt 0 ]; do
case $1 in
--daemon)
INSTALL_MODE=daemon
- ACTIONS="${ACTIONS}install "
+ ACTION=install
;;
--no-daemon)
if [ "$(uname -s)" = "Darwin" ]; then
@@ -64,19 +65,14 @@ while [ $# -gt 0 ]; do
exit 1
fi
INSTALL_MODE=no-daemon
- # intentional tail space
- ACTIONS="${ACTIONS}install "
+ ACTION=install
;;
- # --uninstall)
- # # intentional tail space
- # ACTIONS="${ACTIONS}uninstall "
- # ;;
--yes)
export NIX_INSTALLER_YES=1;;
--no-channel-add)
export NIX_INSTALLER_NO_CHANNEL_ADD=1;;
--daemon-user-count)
- export NIX_USER_COUNT=$2
+ export NIX_USER_COUNT="$2"
shift;;
--no-modify-profile)
NIX_INSTALLER_NO_MODIFY_PROFILE=1;;
@@ -128,7 +124,7 @@ done
if [ "$INSTALL_MODE" = "daemon" ]; then
printf '\e[1;31mSwitching to the Multi-user Installer\e[0m\n'
- exec "$self/install-multi-user" $ACTIONS # let ACTIONS split
+ exec "$self/install-multi-user" $ACTION
exit 0
fi
@@ -140,8 +136,8 @@ echo "performing a single-user installation of Nix..." >&2
if ! [ -e "$dest" ]; then
cmd="mkdir -m 0755 $dest && chown $USER $dest"
- echo "directory $dest does not exist; creating it by running '$cmd' using sudo" >&2
- if ! sudo sh -c "$cmd"; then
+ echo "directory $dest does not exist; creating it by running '$cmd' using $NIX_BECOME" >&2
+ if ! $NIX_BECOME sh -c "$cmd"; then
echo "$0: please manually run '$cmd' as root to create $dest" >&2
exit 1
fi
diff --git a/scripts/prepare-installer-for-github-actions b/scripts/prepare-installer-for-github-actions
index 4b994a753..0fbecf25c 100755
--- a/scripts/prepare-installer-for-github-actions
+++ b/scripts/prepare-installer-for-github-actions
@@ -1,10 +1,11 @@
#!/usr/bin/env bash
-set -e
+set -euo pipefail
-script=$(nix-build -A outputs.hydraJobs.installerScriptForGHA --no-out-link)
-installerHash=$(echo "$script" | cut -b12-43 -)
+nix build -L ".#installerScriptForGHA" ".#binaryTarball"
-installerURL=https://$CACHIX_NAME.cachix.org/serve/$installerHash/install
-
-echo "::set-output name=installerURL::$installerURL"
+mkdir -p out
+cp ./result/install "out/install"
+name="$(basename "$(realpath ./result-1)")"
+# everything before the first dash
+cp -r ./result-1 "out/${name%%-*}"
diff --git a/scripts/sequoia-nixbld-user-migration.sh b/scripts/sequoia-nixbld-user-migration.sh
index 88e801706..58b5fea64 100755
--- a/scripts/sequoia-nixbld-user-migration.sh
+++ b/scripts/sequoia-nixbld-user-migration.sh
@@ -2,6 +2,9 @@
set -eo pipefail
+# stock path to avoid unexpected command versions
+PATH="$(/usr/bin/getconf PATH)"
+
((NEW_NIX_FIRST_BUILD_UID=351))
((TEMP_NIX_FIRST_BUILD_UID=31000))
diff --git a/scripts/serve-installer-for-github-actions b/scripts/serve-installer-for-github-actions
new file mode 100755
index 000000000..2efd2aa32
--- /dev/null
+++ b/scripts/serve-installer-for-github-actions
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+if [[ ! -d out ]]; then
+ echo "run prepare-installer-for-github-actions first"
+ exit 1
+fi
+cd out
+PORT=${PORT:-8126}
+nohup python -m http.server "$PORT" >/dev/null 2>&1 &
+pid=$!
+
+while ! curl -s "http://localhost:$PORT"; do
+ sleep 1
+ if ! kill -0 $pid; then
+ echo "Failed to start http server"
+ exit 1
+ fi
+done
+
+echo 'To install nix, run the following command:'
+echo "sh <(curl http://localhost:$PORT/install) --tarball-url-prefix http://localhost:$PORT"
diff --git a/src/external-api-docs/doxygen.cfg.in b/src/external-api-docs/doxygen.cfg.in
index 8e235dae5..3af2f5b81 100644
--- a/src/external-api-docs/doxygen.cfg.in
+++ b/src/external-api-docs/doxygen.cfg.in
@@ -40,6 +40,7 @@ GENERATE_LATEX = NO
INPUT = \
@src@/src/libutil-c \
@src@/src/libexpr-c \
+ @src@/src/libflake-c \
@src@/src/libstore-c \
@src@/src/external-api-docs/README.md
diff --git a/src/external-api-docs/package.nix b/src/external-api-docs/package.nix
index 0c592955a..57c5138cf 100644
--- a/src/external-api-docs/package.nix
+++ b/src/external-api-docs/package.nix
@@ -30,6 +30,7 @@ mkMesonDerivation (finalAttrs: {
# Source is not compiled, but still must be available for Doxygen
# to gather comments.
(cpp ../libexpr-c)
+ (cpp ../libflake-c)
(cpp ../libstore-c)
(cpp ../libutil-c)
];
diff --git a/src/libcmd/build-utils-meson b/src/libcmd/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libcmd/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 0be51d386..85864401c 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -179,30 +179,34 @@ BuiltPathsCommand::BuiltPathsCommand(bool recursive)
void BuiltPathsCommand::run(ref store, Installables && installables)
{
- BuiltPaths paths;
+ BuiltPaths rootPaths, allPaths;
+
if (all) {
if (installables.size())
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
- paths.emplace_back(BuiltPath::Opaque{p});
+ rootPaths.emplace_back(BuiltPath::Opaque{p});
+ allPaths = rootPaths;
} else {
- paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
+ rootPaths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
+ allPaths = rootPaths;
+
if (recursive) {
// XXX: This only computes the store path closure, ignoring
// intermediate realisations
StorePathSet pathsRoots, pathsClosure;
- for (auto & root : paths) {
+ for (auto & root : rootPaths) {
auto rootFromThis = root.outPaths();
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
- paths.emplace_back(BuiltPath::Opaque{path});
+ allPaths.emplace_back(BuiltPath::Opaque{path});
}
}
- run(store, std::move(paths));
+ run(store, std::move(allPaths), std::move(rootPaths));
}
StorePathsCommand::StorePathsCommand(bool recursive)
@@ -210,10 +214,10 @@ StorePathsCommand::StorePathsCommand(bool recursive)
{
}
-void StorePathsCommand::run(ref store, BuiltPaths && paths)
+void StorePathsCommand::run(ref store, BuiltPaths && allPaths, BuiltPaths && rootPaths)
{
StorePathSet storePaths;
- for (auto & builtPath : paths)
+ for (auto & builtPath : allPaths)
for (auto & p : builtPath.outPaths())
storePaths.insert(p);
@@ -245,7 +249,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
{
if (!profile)
return;
- auto store = getStore().dynamic_pointer_cast();
+ auto store = getDstStore().dynamic_pointer_cast();
if (!store)
throw Error("'--profile' is not supported for this Nix store");
auto profile2 = absPath(*profile);
@@ -365,4 +369,31 @@ void MixEnvironment::setEnviron()
return;
}
+void createOutLinks(const std::filesystem::path & outLink, const BuiltPaths & buildables, LocalFSStore & store)
+{
+ for (const auto & [_i, buildable] : enumerate(buildables)) {
+ auto i = _i;
+ std::visit(
+ overloaded{
+ [&](const BuiltPath::Opaque & bo) {
+ auto symlink = outLink;
+ if (i)
+ symlink += fmt("-%d", i);
+ store.addPermRoot(bo.path, absPath(symlink.string()));
+ },
+ [&](const BuiltPath::Built & bfd) {
+ for (auto & output : bfd.outputs) {
+ auto symlink = outLink;
+ if (i)
+ symlink += fmt("-%d", i);
+ if (output.first != "out")
+ symlink += fmt("-%s", output.first);
+ store.addPermRoot(output.second, absPath(symlink.string()));
+ }
+ },
+ },
+ buildable.raw());
+ }
+}
+
}
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index 8da4327c2..23529848f 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -18,6 +18,7 @@ extern char ** savedArgv;
class EvalState;
struct Pos;
class Store;
+class LocalFSStore;
static constexpr Command::Category catHelp = -1;
static constexpr Command::Category catSecondary = 100;
@@ -46,7 +47,20 @@ struct StoreCommand : virtual Command
{
StoreCommand();
void run() override;
+
+ /**
+ * Return the default Nix store.
+ */
ref getStore();
+
+ /**
+ * Return the destination Nix store.
+ */
+ virtual ref getDstStore()
+ {
+ return getStore();
+ }
+
virtual ref createStore();
/**
* Main entry point, with a `Store` provided
@@ -69,7 +83,7 @@ struct CopyCommand : virtual StoreCommand
ref createStore() override;
- ref getDstStore();
+ ref getDstStore() override;
};
/**
@@ -239,7 +253,7 @@ public:
BuiltPathsCommand(bool recursive = false);
- virtual void run(ref store, BuiltPaths && paths) = 0;
+ virtual void run(ref store, BuiltPaths && allPaths, BuiltPaths && rootPaths) = 0;
void run(ref store, Installables && installables) override;
@@ -252,7 +266,7 @@ struct StorePathsCommand : public BuiltPathsCommand
virtual void run(ref store, StorePaths && storePaths) = 0;
- void run(ref store, BuiltPaths && paths) override;
+ void run(ref store, BuiltPaths && allPaths, BuiltPaths && rootPaths) override;
};
/**
@@ -354,4 +368,10 @@ std::string showVersions(const std::set & versions);
void printClosureDiff(
ref store, const StorePath & beforePath, const StorePath & afterPath, std::string_view indent);
+/**
+ * Create symlinks prefixed by `outLink` to the store paths in
+ * `buildables`.
+ */
+void createOutLinks(const std::filesystem::path & outLink, const BuiltPaths & buildables, LocalFSStore & store);
+
}
diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index ccbf957d9..de967e3fe 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -29,13 +29,13 @@ EvalSettings evalSettings {
{
{
"flake",
- [](ref store, std::string_view rest) {
+ [](EvalState & state, std::string_view rest) {
experimentalFeatureSettings.require(Xp::Flakes);
// FIXME `parseFlakeRef` should take a `std::string_view`.
auto flakeRef = parseFlakeRef(fetchSettings, std::string { rest }, {}, true, false);
debug("fetching flake search path element '%s''", rest);
- auto storePath = flakeRef.resolve(store).fetchTree(store).first;
- return store->toRealPath(storePath);
+ auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first;
+ return state.rootPath(state.store->toRealPath(storePath));
},
},
},
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 227bb64ed..ab3ab3104 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -450,7 +450,7 @@ ref openEvalCache(
std::shared_ptr lockedFlake)
{
auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval
- ? lockedFlake->getFingerprint(state.store)
+ ? lockedFlake->getFingerprint(state.store, state.fetchSettings)
: std::nullopt;
auto rootLoader = [&state, lockedFlake]()
{
@@ -858,7 +858,7 @@ std::vector RawInstallablesCommand::getFlakeRefsForCompletion()
applyDefaultInstallables(rawInstallables);
std::vector res;
res.reserve(rawInstallables.size());
- for (auto i : rawInstallables)
+ for (const auto & i : rawInstallables)
res.push_back(parseFlakeRefWithFragment(
fetchSettings,
expandTilde(i),
@@ -918,4 +918,12 @@ void BuiltPathsCommand::applyDefaultInstallables(std::vector & rawI
rawInstallables.push_back(".");
}
+BuiltPaths toBuiltPaths(const std::vector & builtPathsWithResult)
+{
+ BuiltPaths res;
+ for (auto & i : builtPathsWithResult)
+ res.push_back(i.path);
+ return res;
+}
+
}
diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh
index 7604c1de7..7bc1c5944 100644
--- a/src/libcmd/installables.hh
+++ b/src/libcmd/installables.hh
@@ -86,6 +86,8 @@ struct BuiltPathWithResult
std::optional result;
};
+BuiltPaths toBuiltPaths(const std::vector & builtPathsWithResult);
+
/**
* Shorthand, for less typing and helping us keep the choice of
* collection in sync.
diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc
index 6a0d05d9f..4566e6ba6 100644
--- a/src/libcmd/markdown.cc
+++ b/src/libcmd/markdown.cc
@@ -16,13 +16,25 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown)
{
int windowWidth = getWindowSize().second;
- struct lowdown_opts opts
- {
- .type = LOWDOWN_TERM,
- .maxdepth = 20,
+#if HAVE_LOWDOWN_1_4
+ struct lowdown_opts_term opts_term {
.cols = (size_t) std::max(windowWidth - 5, 60),
.hmargin = 0,
.vmargin = 0,
+ };
+#endif
+ struct lowdown_opts opts
+ {
+ .type = LOWDOWN_TERM,
+#if HAVE_LOWDOWN_1_4
+ .term = opts_term,
+#endif
+ .maxdepth = 20,
+#if !HAVE_LOWDOWN_1_4
+ .cols = (size_t) std::max(windowWidth - 5, 60),
+ .hmargin = 0,
+ .vmargin = 0,
+#endif
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
.oflags = LOWDOWN_TERM_NOLINK,
};
diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build
index c484cf998..4145f408a 100644
--- a/src/libcmd/meson.build
+++ b/src/libcmd/meson.build
@@ -4,8 +4,6 @@ project('nix-cmd', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-cmd', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
configdata = configuration_data()
@@ -28,9 +26,7 @@ deps_public_maybe_subproject = [
dependency('nix-flake'),
dependency('nix-main'),
]
-subdir('build-utils-meson/subprojects')
-
-subdir('build-utils-meson/threads')
+subdir('nix-meson-build-support/subprojects')
nlohmann_json = dependency('nlohmann_json', version : '>= 3.9')
deps_public += nlohmann_json
@@ -38,6 +34,8 @@ deps_public += nlohmann_json
lowdown = dependency('lowdown', version : '>= 0.9.0', required : get_option('markdown'))
deps_private += lowdown
configdata.set('HAVE_LOWDOWN', lowdown.found().to_int())
+# The API changed slightly around terminal initialization.
+configdata.set('HAVE_LOWDOWN_1_4', lowdown.version().version_compare('>= 1.4.0').to_int())
readline_flavor = get_option('readline-flavor')
if readline_flavor == 'editline'
@@ -72,7 +70,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'built-path.cc',
@@ -127,4 +125,4 @@ install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
-subdir('build-utils-meson/export')
+subdir('nix-meson-build-support/export')
diff --git a/src/libcmd/nix-meson-build-support b/src/libcmd/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libcmd/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix
index 244179ee4..5cafb4dc1 100644
--- a/src/libcmd/package.nix
+++ b/src/libcmd/package.nix
@@ -39,8 +39,8 @@ mkMesonLibrary (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -76,10 +76,6 @@ mkMesonLibrary (finalAttrs: {
(lib.mesonOption "readline-flavor" readlineFlavor)
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;
};
diff --git a/src/libexpr-c/build-utils-meson b/src/libexpr-c/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libexpr-c/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build
index 4160f0d5a..9487132cf 100644
--- a/src/libexpr-c/meson.build
+++ b/src/libexpr-c/meson.build
@@ -4,8 +4,6 @@ project('nix-expr-c', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-expr-c', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
configdata = configuration_data()
@@ -27,9 +25,7 @@ deps_public_maybe_subproject = [
dependency('nix-util-c'),
dependency('nix-store-c'),
]
-subdir('build-utils-meson/subprojects')
-
-subdir('build-utils-meson/threads')
+subdir('nix-meson-build-support/subprojects')
# TODO rename, because it will conflict with downstream projects
configdata.set_quoted('PACKAGE_VERSION', meson.project_version())
@@ -55,7 +51,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'nix_api_expr.cc',
@@ -74,8 +70,8 @@ headers = [config_h] + files(
# TODO move this header to libexpr, maybe don't use it in tests?
headers += files('nix_api_expr_internal.h')
-subdir('build-utils-meson/export-all-symbols')
-subdir('build-utils-meson/windows-version')
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
this_library = library(
'nixexprc',
@@ -91,4 +87,4 @@ install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
-subdir('build-utils-meson/export')
+subdir('nix-meson-build-support/export')
diff --git a/src/libexpr-c/nix-meson-build-support b/src/libexpr-c/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libexpr-c/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc
index 333e99460..a024248cd 100644
--- a/src/libexpr-c/nix_api_expr.cc
+++ b/src/libexpr-c/nix_api_expr.cc
@@ -6,6 +6,7 @@
#include "eval-gc.hh"
#include "globals.hh"
#include "eval-settings.hh"
+#include "ref.hh"
#include "nix_api_expr.h"
#include "nix_api_expr_internal.h"
@@ -18,6 +19,29 @@
# include
#endif
+/**
+ * @brief Allocate and initialize using self-reference
+ *
+ * This allows a brace initializer to reference the object being constructed.
+ *
+ * @warning Use with care, as the pointer points to an object that is not fully constructed yet.
+ *
+ * @tparam T Type to allocate
+ * @tparam F A function type for `init`, taking a T* and returning the initializer for T
+ * @param init Function that takes a T* and returns the initializer for T
+ * @return Pointer to allocated and initialized object
+ */
+template
+static T * unsafe_new_with_self(F && init)
+{
+ // Allocate
+ void * p = ::operator new(
+ sizeof(T),
+ static_cast(alignof(T)));
+ // Initialize with placement new
+ return new (p) T(init(static_cast(p)));
+}
+
nix_err nix_libexpr_init(nix_c_context * context)
{
if (context)
@@ -67,7 +91,7 @@ nix_err nix_value_call_multi(nix_c_context * context, EvalState * state, nix_val
if (context)
context->last_err_code = NIX_OK;
try {
- state->state.callFunction(fn->value, nargs, (nix::Value * *)args, value->value, nix::noPos);
+ state->state.callFunction(fn->value, {(nix::Value * *) args, nargs}, value->value, nix::noPos);
state->state.forceValue(value->value, nix::noPos);
}
NIXC_CATCH_ERRS
@@ -93,7 +117,42 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val
NIXC_CATCH_ERRS
}
-EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c, Store * store)
+nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store)
+{
+ if (context)
+ context->last_err_code = NIX_OK;
+ try {
+ return unsafe_new_with_self([&](auto * self) {
+ return nix_eval_state_builder{
+ .store = nix::ref(store->ptr),
+ .settings = nix::EvalSettings{/* &bool */ self->readOnlyMode},
+ .fetchSettings = nix::fetchers::Settings{},
+ .readOnlyMode = true,
+ };
+ });
+ }
+ NIXC_CATCH_ERRS_NULL
+}
+
+void nix_eval_state_builder_free(nix_eval_state_builder * builder)
+{
+ delete builder;
+}
+
+nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_builder * builder)
+{
+ if (context)
+ context->last_err_code = NIX_OK;
+ try {
+ // TODO: load in one go?
+ builder->settings.readOnlyMode = nix::settings.readOnlyMode;
+ loadConfFile(builder->settings);
+ loadConfFile(builder->fetchSettings);
+ }
+ NIXC_CATCH_ERRS
+}
+
+nix_err nix_eval_state_builder_set_lookup_path(nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath_c)
{
if (context)
context->last_err_code = NIX_OK;
@@ -102,28 +161,47 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c
if (lookupPath_c != nullptr)
for (size_t i = 0; lookupPath_c[i] != nullptr; i++)
lookupPath.push_back(lookupPath_c[i]);
+ builder->lookupPath = nix::LookupPath::parse(lookupPath);
+ }
+ NIXC_CATCH_ERRS
+}
- void * p = ::operator new(
- sizeof(EvalState),
- static_cast(alignof(EvalState)));
- auto * p2 = static_cast(p);
- new (p) EvalState {
- .fetchSettings = nix::fetchers::Settings{},
- .settings = nix::EvalSettings{
- nix::settings.readOnlyMode,
- },
- .state = nix::EvalState(
- nix::LookupPath::parse(lookupPath),
- store->ptr,
- p2->fetchSettings,
- p2->settings),
- };
- loadConfFile(p2->settings);
- return p2;
+EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder)
+{
+ if (context)
+ context->last_err_code = NIX_OK;
+ try {
+ return unsafe_new_with_self([&](auto * self) {
+ return EvalState{
+ .fetchSettings = std::move(builder->fetchSettings),
+ .settings = std::move(builder->settings),
+ .state = nix::EvalState(
+ builder->lookupPath,
+ builder->store,
+ self->fetchSettings,
+ self->settings),
+ };
+ });
}
NIXC_CATCH_ERRS_NULL
}
+EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c, Store * store)
+{
+ auto builder = nix_eval_state_builder_new(context, store);
+ if (builder == nullptr)
+ return nullptr;
+
+ if (nix_eval_state_builder_load(context, builder) != NIX_OK)
+ return nullptr;
+
+ if (nix_eval_state_builder_set_lookup_path(context, builder, lookupPath_c)
+ != NIX_OK)
+ return nullptr;
+
+ return nix_eval_state_build(context, builder);
+}
+
void nix_state_free(EvalState * state)
{
delete state;
diff --git a/src/libexpr-c/nix_api_expr.h b/src/libexpr-c/nix_api_expr.h
index e680f5ff1..f8d181452 100644
--- a/src/libexpr-c/nix_api_expr.h
+++ b/src/libexpr-c/nix_api_expr.h
@@ -30,6 +30,11 @@ extern "C" {
// cffi start
// Type definitions
+/**
+ * @brief Builder for EvalState
+ */
+typedef struct nix_eval_state_builder nix_eval_state_builder;
+
/**
* @brief Represents a state of the Nix language evaluator.
*
@@ -174,12 +179,70 @@ nix_err nix_value_force(nix_c_context * context, EvalState * state, nix_value *
nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_value * value);
/**
- * @brief Create a new Nix language evaluator state.
+ * @brief Create a new nix_eval_state_builder
+ *
+ * The settings are initialized to their default value.
+ * Values can be sourced elsewhere with nix_eval_state_builder_load.
+ *
+ * @param[out] context Optional, stores error information
+ * @param[in] store The Nix store to use.
+ * @return A new nix_eval_state_builder or NULL on failure.
+ */
+nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store);
+
+/**
+ * @brief Read settings from the ambient environment
+ *
+ * Settings are sourced from environment variables and configuration files,
+ * as documented in the Nix manual.
+ *
+ * @param[out] context Optional, stores error information
+ * @param[out] builder The builder to modify.
+ * @return NIX_OK if successful, an error code otherwise.
+ */
+nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_builder * builder);
+
+/**
+ * @brief Set the lookup path for `<...>` expressions
+ *
+ * @param[in] context Optional, stores error information
+ * @param[in] builder The builder to modify.
+ * @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH.
+ */
+nix_err nix_eval_state_builder_set_lookup_path(
+ nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath);
+
+/**
+ * @brief Create a new Nix language evaluator state
+ *
+ * Remember to nix_eval_state_builder_free after building the state.
+ *
+ * @param[out] context Optional, stores error information
+ * @param[in] builder The builder to use and free
+ * @return A new Nix state or NULL on failure.
+ * @see nix_eval_state_builder_new, nix_eval_state_builder_free
+ */
+EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder);
+
+/**
+ * @brief Free a nix_eval_state_builder
+ *
+ * Does not fail.
+ *
+ * @param[in] builder The builder to free.
+ */
+void nix_eval_state_builder_free(nix_eval_state_builder * builder);
+
+/**
+ * @brief Create a new Nix language evaluator state
+ *
+ * For more control, use nix_eval_state_builder
*
* @param[out] context Optional, stores error information
* @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH.
* @param[in] store The Nix store to use.
* @return A new Nix state or NULL on failure.
+ * @see nix_state_builder_new
*/
EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store);
diff --git a/src/libexpr-c/nix_api_expr_internal.h b/src/libexpr-c/nix_api_expr_internal.h
index 12f24b6eb..f59664011 100644
--- a/src/libexpr-c/nix_api_expr_internal.h
+++ b/src/libexpr-c/nix_api_expr_internal.h
@@ -6,6 +6,17 @@
#include "eval-settings.hh"
#include "attr-set.hh"
#include "nix_api_value.h"
+#include "search-path.hh"
+
+struct nix_eval_state_builder
+{
+ nix::ref store;
+ nix::EvalSettings settings;
+ nix::fetchers::Settings fetchSettings;
+ nix::LookupPath lookupPath;
+ // TODO: make an EvalSettings setting own this instead?
+ bool readOnlyMode;
+};
struct EvalState
{
diff --git a/src/libexpr-c/nix_api_value.h b/src/libexpr-c/nix_api_value.h
index 8a0813ebe..711b0adbc 100644
--- a/src/libexpr-c/nix_api_value.h
+++ b/src/libexpr-c/nix_api_value.h
@@ -213,7 +213,7 @@ nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_
/** @brief Get path as string
* @param[out] context Optional, stores error information
* @param[in] value Nix value to inspect
- * @return string
+ * @return string, if the type is NIX_TYPE_PATH
* @return NULL in case of error.
*/
const char * nix_get_path_string(nix_c_context * context, const nix_value * value);
diff --git a/src/libexpr-c/package.nix b/src/libexpr-c/package.nix
index df49a8bdc..5047f3e2e 100644
--- a/src/libexpr-c/package.nix
+++ b/src/libexpr-c/package.nix
@@ -1,5 +1,4 @@
{ lib
-, stdenv
, mkMesonLibrary
, nix-store-c
@@ -20,8 +19,8 @@ mkMesonLibrary (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -47,10 +46,6 @@ mkMesonLibrary (finalAttrs: {
mesonFlags = [
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;
};
diff --git a/src/libexpr-test-support/build-utils-meson b/src/libexpr-test-support/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libexpr-test-support/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build
index b9e7f390d..56e814cd1 100644
--- a/src/libexpr-test-support/meson.build
+++ b/src/libexpr-test-support/meson.build
@@ -4,8 +4,6 @@ project('nix-expr-test-support', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-expr-test-support', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
deps_private_maybe_subproject = [
]
@@ -24,10 +22,9 @@ deps_public_maybe_subproject = [
dependency('nix-store'),
dependency('nix-store-test-support'),
dependency('nix-expr'),
+ dependency('nix-expr-c'),
]
-subdir('build-utils-meson/subprojects')
-
-subdir('build-utils-meson/threads')
+subdir('nix-meson-build-support/subprojects')
rapidcheck = dependency('rapidcheck')
deps_public += rapidcheck
@@ -41,7 +38,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'tests/value/context.cc',
@@ -55,8 +52,8 @@ headers = files(
'tests/value/context.hh',
)
-subdir('build-utils-meson/export-all-symbols')
-subdir('build-utils-meson/windows-version')
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
this_library = library(
'nix-expr-test-support',
@@ -74,4 +71,4 @@ install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
-subdir('build-utils-meson/export')
+subdir('nix-meson-build-support/export')
diff --git a/src/libexpr-test-support/nix-meson-build-support b/src/libexpr-test-support/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libexpr-test-support/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libexpr-test-support/package.nix b/src/libexpr-test-support/package.nix
index bcf6118e0..48118fa0c 100644
--- a/src/libexpr-test-support/package.nix
+++ b/src/libexpr-test-support/package.nix
@@ -1,9 +1,9 @@
{ lib
-, stdenv
, mkMesonLibrary
, nix-store-test-support
, nix-expr
+, nix-expr-c
, rapidcheck
@@ -22,8 +22,8 @@ mkMesonLibrary (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -35,6 +35,7 @@ mkMesonLibrary (finalAttrs: {
propagatedBuildInputs = [
nix-store-test-support
nix-expr
+ nix-expr-c
rapidcheck
];
@@ -49,10 +50,6 @@ mkMesonLibrary (finalAttrs: {
mesonFlags = [
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;
};
diff --git a/src/libexpr-test-support/tests/libexpr.hh b/src/libexpr-test-support/tests/libexpr.hh
index 045607e87..095ea1d0e 100644
--- a/src/libexpr-test-support/tests/libexpr.hh
+++ b/src/libexpr-test-support/tests/libexpr.hh
@@ -40,6 +40,12 @@ namespace nix {
return v;
}
+ Value * maybeThunk(std::string input, bool forceValue = true) {
+ Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
+ assert(e);
+ return e->maybeThunk(state, state.baseEnv);
+ }
+
Symbol createSymbol(const char * value) {
return state.symbols.create(value);
}
diff --git a/src/libexpr-tests/build-utils-meson b/src/libexpr-tests/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libexpr-tests/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libexpr-tests/error_traces.cc b/src/libexpr-tests/error_traces.cc
index be379a909..2aa13cf62 100644
--- a/src/libexpr-tests/error_traces.cc
+++ b/src/libexpr-tests/error_traces.cc
@@ -691,15 +691,15 @@ namespace nix {
ASSERT_TRACE2("elemAt \"foo\" (-1)",
TypeError,
HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)),
- HintFmt("while evaluating the first argument passed to builtins.elemAt"));
+ HintFmt("while evaluating the first argument passed to 'builtins.elemAt'"));
ASSERT_TRACE1("elemAt [] (-1)",
Error,
- HintFmt("list index %d is out of bounds", -1));
+ HintFmt("'builtins.elemAt' called with index %d on a list of size %d", -1, 0));
ASSERT_TRACE1("elemAt [\"foo\"] 3",
Error,
- HintFmt("list index %d is out of bounds", 3));
+ HintFmt("'builtins.elemAt' called with index %d on a list of size %d", 3, 1));
}
@@ -708,11 +708,11 @@ namespace nix {
ASSERT_TRACE2("head 1",
TypeError,
HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)),
- HintFmt("while evaluating the first argument passed to builtins.elemAt"));
+ HintFmt("while evaluating the first argument passed to 'builtins.head'"));
ASSERT_TRACE1("head []",
Error,
- HintFmt("list index %d is out of bounds", 0));
+ HintFmt("'builtins.head' called on an empty list"));
}
@@ -721,11 +721,11 @@ namespace nix {
ASSERT_TRACE2("tail 1",
TypeError,
HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)),
- HintFmt("while evaluating the first argument passed to builtins.tail"));
+ HintFmt("while evaluating the first argument passed to 'builtins.tail'"));
ASSERT_TRACE1("tail []",
Error,
- HintFmt("'tail' called on an empty list"));
+ HintFmt("'builtins.tail' called on an empty list"));
}
diff --git a/src/libexpr-tests/eval.cc b/src/libexpr-tests/eval.cc
index 93d3f658f..61f6be0db 100644
--- a/src/libexpr-tests/eval.cc
+++ b/src/libexpr-tests/eval.cc
@@ -138,4 +138,27 @@ TEST(nix_isAllowedURI, non_scheme_colon) {
ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed));
}
-} // namespace nix
\ No newline at end of file
+class EvalStateTest : public LibExprTest {};
+
+TEST_F(EvalStateTest, getBuiltins_ok) {
+ auto evaled = maybeThunk("builtins");
+ auto & builtins = state.getBuiltins();
+ ASSERT_TRUE(builtins.type() == nAttrs);
+ ASSERT_EQ(evaled, &builtins);
+}
+
+TEST_F(EvalStateTest, getBuiltin_ok) {
+ auto & builtin = state.getBuiltin("toString");
+ ASSERT_TRUE(builtin.type() == nFunction);
+ // FIXME
+ // auto evaled = maybeThunk("builtins.toString");
+ // ASSERT_EQ(evaled, &builtin);
+ auto & builtin2 = state.getBuiltin("true");
+ ASSERT_EQ(state.forceBool(builtin2, noPos, "in unit test"), true);
+}
+
+TEST_F(EvalStateTest, getBuiltin_fail) {
+ ASSERT_THROW(state.getBuiltin("nonexistent"), EvalError);
+}
+
+} // namespace nix
diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build
index 5a5c9f1d4..667a0d7b7 100644
--- a/src/libexpr-tests/meson.build
+++ b/src/libexpr-tests/meson.build
@@ -4,8 +4,6 @@ project('nix-expr-tests', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-expr-tests', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
deps_private_maybe_subproject = [
dependency('nix-expr'),
@@ -23,12 +21,10 @@ deps_private_maybe_subproject = [
]
deps_public_maybe_subproject = [
]
-subdir('build-utils-meson/subprojects')
+subdir('nix-meson-build-support/subprojects')
-subdir('build-utils-meson/threads')
-
-subdir('build-utils-meson/export-all-symbols')
-subdir('build-utils-meson/windows-version')
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
rapidcheck = dependency('rapidcheck')
deps_private += rapidcheck
@@ -51,7 +47,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'derived-path.cc',
diff --git a/src/libexpr-tests/nix-meson-build-support b/src/libexpr-tests/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libexpr-tests/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libexpr-tests/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc
index b37ac44b3..5ed78d2fc 100644
--- a/src/libexpr-tests/nix_api_expr.cc
+++ b/src/libexpr-tests/nix_api_expr.cc
@@ -7,12 +7,49 @@
#include "tests/nix_api_expr.hh"
#include "tests/string_callback.hh"
+#include "file-system.hh"
#include
#include
namespace nixC {
+TEST_F(nix_api_store_test, nix_eval_state_lookup_path)
+{
+ auto tmpDir = nix::createTempDir();
+ auto delTmpDir = std::make_unique(tmpDir, true);
+ auto nixpkgs = tmpDir + "/pkgs";
+ auto nixos = tmpDir + "/cfg";
+ std::filesystem::create_directories(nixpkgs);
+ std::filesystem::create_directories(nixos);
+
+ std::string nixpkgsEntry = "nixpkgs=" + nixpkgs;
+ std::string nixosEntry = "nixos-config=" + nixos;
+ const char * lookupPath[] = {nixpkgsEntry.c_str(), nixosEntry.c_str(), nullptr};
+
+ auto builder = nix_eval_state_builder_new(ctx, store);
+ assert_ctx_ok();
+
+ ASSERT_EQ(NIX_OK, nix_eval_state_builder_set_lookup_path(ctx, builder, lookupPath));
+ assert_ctx_ok();
+
+ auto state = nix_eval_state_build(ctx, builder);
+ assert_ctx_ok();
+
+ nix_eval_state_builder_free(builder);
+
+ Value * value = nix_alloc_value(ctx, state);
+ nix_expr_eval_from_string(ctx, state, "builtins.seq ", ".", value);
+ assert_ctx_ok();
+
+ ASSERT_EQ(nix_get_type(ctx, value), NIX_TYPE_PATH);
+ assert_ctx_ok();
+
+ auto pathStr = nix_get_path_string(ctx, value);
+ assert_ctx_ok();
+ ASSERT_EQ(0, strcmp(pathStr, nixpkgs.c_str()));
+}
+
TEST_F(nix_api_expr_test, nix_expr_eval_from_string)
{
nix_expr_eval_from_string(nullptr, state, "builtins.nixVersion", ".", value);
diff --git a/src/libexpr-tests/package.nix b/src/libexpr-tests/package.nix
index 959d6b84e..a4a3bb0e7 100644
--- a/src/libexpr-tests/package.nix
+++ b/src/libexpr-tests/package.nix
@@ -27,8 +27,8 @@ mkMesonExecutable (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -56,10 +56,6 @@ mkMesonExecutable (finalAttrs: {
mesonFlags = [
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
diff --git a/src/libexpr-tests/trivial.cc b/src/libexpr-tests/trivial.cc
index e455a571b..d77b4d53b 100644
--- a/src/libexpr-tests/trivial.cc
+++ b/src/libexpr-tests/trivial.cc
@@ -177,6 +177,57 @@ namespace nix {
)
);
+// The following macros ultimately define 48 tests (16 variations on three
+// templates). Each template tests an expression that can be written in 2^4
+// different ways, by making four choices about whether to write a particular
+// attribute path segment as `x.y = ...;` (collapsed) or `x = { y = ...; };`
+// (expanded).
+//
+// The nestedAttrsetMergeXXXX tests check that the expression
+// `{ a.b.c = 1; a.b.d = 2; }` has the same value regardless of how it is
+// expanded. (That exact expression is exercised in test
+// nestedAttrsetMerge0000, because it is fully collapsed. The test
+// nestedAttrsetMerge1001 would instead examine
+// `{ a = { b.c = 1; }; a.b = { d = 2; }; }`.)
+//
+// The nestedAttrsetMergeDupXXXX tests check that the expression
+// `{ a.b.c = 1; a.b.c = 2; }` throws a duplicate attribute error, again
+// regardless of how it is expanded.
+//
+// The nestedAttrsetMergeLetXXXX tests check that the expression
+// `let a.b.c = 1; a.b.d = 2; in a` has the same value regardless of how it is
+// expanded.
+#define X_EXPAND_IF0(k, v) k "." v
+#define X_EXPAND_IF1(k, v) k " = { " v " };"
+#define X4(w, x, y, z) \
+ TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) { \
+ auto v = eval("{ a.b = { c = 1; d = 2; }; } == { " \
+ X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
+ X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
+ ASSERT_THAT(v, IsTrue()); \
+ }; \
+ TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) { \
+ ASSERT_THROW(eval("{ " \
+ X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
+ X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "c = 2;")) " }"), Error); \
+ }; \
+ TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) { \
+ auto v = eval("{ b = { c = 1; d = 2; }; } == (let " \
+ X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
+ X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
+ ASSERT_THAT(v, IsTrue()); \
+ };
+#define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1)
+#define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1)
+#define X1(...) X2(__VA_ARGS__, 0) X2(__VA_ARGS__, 1)
+ X1(0) X1(1)
+#undef X_EXPAND_IF0
+#undef X_EXPAND_IF1
+#undef X1
+#undef X2
+#undef X3
+#undef X4
+
TEST_F(TrivialExpressionTest, functor) {
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
ASSERT_THAT(v, IsIntEq(15));
diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc
index 2f67260c5..822ec7620 100644
--- a/src/libexpr/attr-path.cc
+++ b/src/libexpr/attr-path.cc
@@ -129,7 +129,6 @@ std::pair findPackageFilename(EvalState & state, Value & v
try {
auto colon = fn.rfind(':');
if (colon == std::string::npos) fail();
- std::string filename(fn, 0, colon);
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
} catch (std::invalid_argument & e) {
diff --git a/src/libexpr/build-utils-meson b/src/libexpr/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libexpr/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libexpr/call-flake.nix b/src/libexpr/call-flake.nix
index a008346e5..964ba2521 100644
--- a/src/libexpr/call-flake.nix
+++ b/src/libexpr/call-flake.nix
@@ -41,10 +41,17 @@ let
(key: node:
let
+ parentNode = allNodes.${getInputByPath lockFile.root node.parent};
+
sourceInfo =
if overrides ? ${key}
then
overrides.${key}.sourceInfo
+ else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/"
+ then
+ parentNode.sourceInfo // {
+ outPath = parentNode.outPath + ("/" + node.locked.path);
+ }
else
# FIXME: remove obsolete node.info.
# Note: lock file entries are always final.
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index d5ce238b2..631c0f396 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -87,11 +87,15 @@ void EvalState::forceValue(Value & v, const PosIdx pos)
{
if (v.isThunk()) {
Env * env = v.payload.thunk.env;
+ assert(env || v.isBlackhole());
Expr * expr = v.payload.thunk.expr;
try {
v.mkBlackhole();
//checkInterrupt();
- expr->eval(*this, *env, v);
+ if (env) [[likely]]
+ expr->eval(*this, *env, v);
+ else
+ ExprBlackHole::throwInfiniteRecursionError(*this, v);
} catch (...) {
v.mkThunk(env, expr);
tryFixupBlackHolePos(v, pos);
diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index 115e3ee50..a8fcce539 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -3,10 +3,11 @@
#include "config.hh"
#include "ref.hh"
+#include "source-path.hh"
namespace nix {
-class Store;
+class EvalState;
struct EvalSettings : Config
{
@@ -18,11 +19,8 @@ struct EvalSettings : Config
*
* The return value is (a) whether the entry was valid, and, if so,
* what does it map to.
- *
- * @todo Return (`std::optional` of) `SourceAccssor` or something
- * more structured instead of mere `std::string`?
*/
- using LookupPathHook = std::optional(ref store, std::string_view);
+ using LookupPathHook = std::optional(EvalState & state, std::string_view);
/**
* Map from "scheme" to a `LookupPathHook`.
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index e21f70553..345c09e7e 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -347,6 +347,16 @@ void EvalState::allowPath(const StorePath & storePath)
rootFS2->allowPrefix(CanonPath(store->toRealPath(storePath)));
}
+void EvalState::allowClosure(const StorePath & storePath)
+{
+ if (!rootFS.dynamic_pointer_cast()) return;
+
+ StorePathSet closure;
+ store->computeFSClosure(storePath, closure);
+ for (auto & p : closure)
+ allowPath(p);
+}
+
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
{
allowPath(storePath);
@@ -396,7 +406,7 @@ void EvalState::checkURI(const std::string & uri)
/* If the URI is a path, then check it against allowedPaths as
well. */
- if (hasPrefix(uri, "/")) {
+ if (isAbsolute(uri)) {
if (auto rootFS2 = rootFS.dynamic_pointer_cast())
rootFS2->checkAccess(CanonPath(uri));
return;
@@ -448,7 +458,7 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
/* Install value the base environment. */
staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
- baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(name2), v));
+ getBuiltins().payload.attrs->push_back(Attr(symbols.create(name2), v));
}
}
@@ -516,16 +526,26 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
else {
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
- baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
+ getBuiltins().payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
}
return v;
}
+Value & EvalState::getBuiltins()
+{
+ return *baseEnv.values[0];
+}
+
+
Value & EvalState::getBuiltin(const std::string & name)
{
- return *baseEnv.values[0]->attrs()->find(symbols.create(name))->value;
+ auto it = getBuiltins().attrs()->get(symbols.create(name));
+ if (it)
+ return *it->value;
+ else
+ error("builtin '%1%' not found", name).debugThrow();
}
@@ -588,14 +608,14 @@ std::optional EvalState::getDoc(Value & v)
if (isFunctor(v)) {
try {
Value & functor = *v.attrs()->find(sFunctor)->value;
- Value * vp = &v;
+ Value * vp[] = {&v};
Value partiallyApplied;
// The first paramater is not user-provided, and may be
// handled by code that is opaque to the user, like lib.const = x: y: y;
// So preferably we show docs that are relevant to the
// "partially applied" function returned by e.g. `const`.
// We apply the first argument:
- callFunction(functor, 1, &vp, partiallyApplied, noPos);
+ callFunction(functor, vp, partiallyApplied, noPos);
auto _level = addCallDepth(noPos);
return getDoc(partiallyApplied);
}
@@ -1460,7 +1480,7 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v)
v.mkLambda(&env, this);
}
-void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
+void EvalState::callFunction(Value & fun, std::span args, Value & vRes, const PosIdx pos)
{
auto _level = addCallDepth(pos);
@@ -1475,16 +1495,16 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
auto makeAppChain = [&]()
{
vRes = vCur;
- for (size_t i = 0; i < nrArgs; ++i) {
+ for (auto arg : args) {
auto fun2 = allocValue();
*fun2 = vRes;
- vRes.mkPrimOpApp(fun2, args[i]);
+ vRes.mkPrimOpApp(fun2, arg);
}
};
const Attr * functor;
- while (nrArgs > 0) {
+ while (args.size() > 0) {
if (vCur.isLambda()) {
@@ -1587,15 +1607,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
throw;
}
- nrArgs--;
- args += 1;
+ args = args.subspan(1);
}
else if (vCur.isPrimOp()) {
size_t argsLeft = vCur.primOp()->arity;
- if (nrArgs < argsLeft) {
+ if (args.size() < argsLeft) {
/* We don't have enough arguments, so create a tPrimOpApp chain. */
makeAppChain();
return;
@@ -1607,15 +1626,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
if (countCalls) primOpCalls[fn->name]++;
try {
- fn->fun(*this, vCur.determinePos(noPos), args, vCur);
+ fn->fun(*this, vCur.determinePos(noPos), args.data(), vCur);
} catch (Error & e) {
if (fn->addTrace)
addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name);
throw;
}
- nrArgs -= argsLeft;
- args += argsLeft;
+ args = args.subspan(argsLeft);
}
}
@@ -1631,7 +1649,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
auto arity = primOp->primOp()->arity;
auto argsLeft = arity - argsDone;
- if (nrArgs < argsLeft) {
+ if (args.size() < argsLeft) {
/* We still don't have enough arguments, so extend the tPrimOpApp chain. */
makeAppChain();
return;
@@ -1663,8 +1681,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
throw;
}
- nrArgs -= argsLeft;
- args += argsLeft;
+ args = args.subspan(argsLeft);
}
}
@@ -1675,13 +1692,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
Value * args2[] = {allocValue(), args[0]};
*args2[0] = vCur;
try {
- callFunction(*functor->value, 2, args2, vCur, functor->pos);
+ callFunction(*functor->value, args2, vCur, functor->pos);
} catch (Error & e) {
e.addTrace(positions[pos], "while calling a functor (an attribute set with a '__functor' attribute)");
throw;
}
- nrArgs--;
- args++;
+ args = args.subspan(1);
}
else
@@ -1724,7 +1740,7 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
for (size_t i = 0; i < args.size(); ++i)
vArgs[i] = args[i]->maybeThunk(state, env);
- state.callFunction(vFun, args.size(), vArgs.data(), v, pos);
+ state.callFunction(vFun, vArgs, v, pos);
}
@@ -2046,9 +2062,12 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
state.mkPos(v, pos);
}
-
-void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
+void ExprBlackHole::eval(EvalState & state, [[maybe_unused]] Env & env, Value & v)
{
+ throwInfiniteRecursionError(state, v);
+}
+
+[[gnu::noinline]] [[noreturn]] void ExprBlackHole::throwInfiniteRecursionError(EvalState & state, Value &v) {
state.error("infinite recursion encountered")
.atPos(v.determinePos(noPos))
.debugThrow();
@@ -3029,8 +3048,8 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
if (!rOpt) continue;
auto r = *rOpt;
- Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
- if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
+ auto res = (r / CanonPath(suffix)).resolveSymlinks();
+ if (res.pathExists()) return res;
}
if (hasPrefix(path, "nix/"))
@@ -3045,13 +3064,13 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
}
-std::optional EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl)
+std::optional EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl)
{
auto & value = value0.s;
auto i = lookupPathResolved.find(value);
if (i != lookupPathResolved.end()) return i->second;
- auto finish = [&](std::string res) {
+ auto finish = [&](SourcePath res) {
debug("resolved search path element '%s' to '%s'", value, res);
lookupPathResolved.emplace(value, res);
return res;
@@ -3064,7 +3083,7 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pa
fetchSettings,
EvalSettings::resolvePseudoUrl(value));
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
- return finish(store->toRealPath(storePath));
+ return finish(rootPath(store->toRealPath(storePath)));
} catch (Error & e) {
logWarning({
.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
@@ -3076,29 +3095,26 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pa
auto scheme = value.substr(0, colPos);
auto rest = value.substr(colPos + 1);
if (auto * hook = get(settings.lookupPathHooks, scheme)) {
- auto res = (*hook)(store, rest);
+ auto res = (*hook)(*this, rest);
if (res)
return finish(std::move(*res));
}
}
{
- auto path = absPath(value);
+ auto path = rootPath(value);
/* Allow access to paths in the search path. */
if (initAccessControl) {
- allowPath(path);
- if (store->isInStore(path)) {
+ allowPath(path.path.abs());
+ if (store->isInStore(path.path.abs())) {
try {
- StorePathSet closure;
- store->computeFSClosure(store->toStorePath(path).first, closure);
- for (auto & p : closure)
- allowPath(p);
+ allowClosure(store->toStorePath(path.path.abs()).first);
} catch (InvalidPath &) { }
}
}
- if (pathExists(path))
+ if (path.pathExists())
return finish(std::move(path));
else {
logWarning({
@@ -3109,7 +3125,6 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pa
debug("failed to resolve search path element '%s'", value);
return std::nullopt;
-
}
@@ -3170,5 +3185,18 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) {
return v.print(str);
}
+void forceNoNullByte(std::string_view s, std::function pos)
+{
+ if (s.find('\0') != s.npos) {
+ using namespace std::string_view_literals;
+ auto str = replaceStrings(std::string(s), "\0"sv, "␀"sv);
+ Error error("input string '%s' cannot be represented as Nix string because it contains null bytes", str);
+ if (pos) {
+ error.atPos(pos());
+ }
+ throw error;
+ }
+}
+
}
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index a1882dded..84b7d823c 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -347,7 +347,7 @@ private:
LookupPath lookupPath;
- std::map> lookupPathResolved;
+ std::map> lookupPathResolved;
/**
* Cache used by prim_match().
@@ -400,6 +400,11 @@ public:
*/
void allowPath(const StorePath & storePath);
+ /**
+ * Allow access to the closure of a store path.
+ */
+ void allowClosure(const StorePath & storePath);
+
/**
* Allow access to a store path and return it as a string.
*/
@@ -452,9 +457,9 @@ public:
*
* If the specified search path element is a URI, download it.
*
- * If it is not found, return `std::nullopt`
+ * If it is not found, return `std::nullopt`.
*/
- std::optional resolveLookupPathPath(
+ std::optional resolveLookupPathPath(
const LookupPath::Path & elem,
bool initAccessControl = false);
@@ -623,8 +628,19 @@ private:
public:
+ /**
+ * Retrieve a specific builtin, equivalent to evaluating `builtins.${name}`.
+ * @param name The attribute name of the builtin to retrieve.
+ * @throws EvalError if the builtin does not exist.
+ */
Value & getBuiltin(const std::string & name);
+ /**
+ * Retrieve the `builtins` attrset, equivalent to evaluating the reference `builtins`.
+ * Always returns an attribute set value.
+ */
+ Value & getBuiltins();
+
struct Doc
{
Pos pos;
@@ -690,13 +706,12 @@ public:
bool isFunctor(Value & fun);
- // FIXME: use std::span
- void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos);
+ void callFunction(Value & fun, std::span args, Value & vRes, const PosIdx pos);
void callFunction(Value & fun, Value & arg, Value & vRes, const PosIdx pos)
{
Value * args[] = {&arg};
- callFunction(fun, 1, args, vRes, pos);
+ callFunction(fun, args, vRes, pos);
}
/**
@@ -809,7 +824,6 @@ public:
bool callPathFilter(
Value * filterFun,
const SourcePath & path,
- std::string_view pathArg,
PosIdx pos);
DocComment getDocCommentForPos(PosIdx pos);
diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc
index 9ac56541a..17cab7ad5 100644
--- a/src/libexpr/json-to-value.cc
+++ b/src/libexpr/json-to-value.cc
@@ -50,6 +50,7 @@ class JSONSax : nlohmann::json_sax {
public:
void key(string_t & name, EvalState & state)
{
+ forceNoNullByte(name);
attrs.insert_or_assign(state.symbols.create(name), &value(state));
}
};
@@ -122,6 +123,7 @@ public:
bool string(string_t & val) override
{
+ forceNoNullByte(val);
rs->value(state).mkString(val);
rs->add();
return true;
diff --git a/src/libexpr/lexer-helpers.hh b/src/libexpr/lexer-helpers.hh
index caba6e18f..d40f7b874 100644
--- a/src/libexpr/lexer-helpers.hh
+++ b/src/libexpr/lexer-helpers.hh
@@ -1,5 +1,13 @@
#pragma once
+#include
+
+// inluding the generated headers twice leads to errors
+#ifndef BISON_HEADER
+# include "lexer-tab.hh"
+# include "parser-tab.hh"
+#endif
+
namespace nix::lexer::internal {
void initLoc(YYLTYPE * loc);
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index a7e44cb72..067f86e01 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -41,16 +41,18 @@ namespace nix {
// we make use of the fact that the parser receives a private copy of the input
// string and can munge around in it.
-static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
+// getting the position is expensive and thus it is implemented lazily.
+static StringToken unescapeStr(char * const s, size_t length, std::function && pos)
{
- char * result = s;
+ bool noNullByte = true;
char * t = s;
- char c;
// the input string is terminated with *two* NULs, so we can safely take
// *one* character after the one being checked against.
- while ((c = *s++)) {
+ for (size_t i = 0; i < length; t++) {
+ char c = s[i++];
+ noNullByte &= c != '\0';
if (c == '\\') {
- c = *s++;
+ c = s[i++];
if (c == 'n') *t = '\n';
else if (c == 'r') *t = '\r';
else if (c == 't') *t = '\t';
@@ -59,12 +61,14 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
else if (c == '\r') {
/* Normalise CR and CR/LF into LF. */
*t = '\n';
- if (*s == '\n') s++; /* cr/lf */
+ if (s[i] == '\n') i++; /* cr/lf */
}
else *t = c;
- t++;
}
- return {result, size_t(t - result)};
+ if (!noNullByte) {
+ forceNoNullByte({s, size_t(t - s)}, std::move(pos));
+ }
+ return {s, size_t(t - s)};
}
static void requireExperimentalFeature(const ExperimentalFeature & feature, const Pos & pos)
@@ -175,7 +179,7 @@ or { return OR_KW; }
/* It is impossible to match strings ending with '$' with one
regex because trailing contexts are only valid at the end
of a rule. (A sane but undocumented limitation.) */
- yylval->str = unescapeStr(state->symbols, yytext, yyleng);
+ yylval->str = unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; });
return STR;
}
\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
@@ -191,6 +195,7 @@ or { return OR_KW; }
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
([^\$\']|\$[^\{\']|\'[^\'\$])+ {
yylval->str = {yytext, (size_t) yyleng, true};
+ forceNoNullByte(yylval->str, [&]() { return state->positions[CUR_POS]; });
return IND_STR;
}
\'\'\$ |
@@ -203,7 +208,7 @@ or { return OR_KW; }
return IND_STR;
}
\'\'\\{ANY} {
- yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2);
+ yylval->str = unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; });
return IND_STR;
}
\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build
index 4d8a38b43..b33aebc86 100644
--- a/src/libexpr/meson.build
+++ b/src/libexpr/meson.build
@@ -4,8 +4,6 @@ project('nix-expr', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-expr', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
configdata = configuration_data()
@@ -25,9 +23,7 @@ deps_public_maybe_subproject = [
dependency('nix-store'),
dependency('nix-fetchers'),
]
-subdir('build-utils-meson/subprojects')
-
-subdir('build-utils-meson/threads')
+subdir('nix-meson-build-support/subprojects')
boost = dependency(
'boost',
@@ -79,7 +75,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
parser_tab = custom_target(
input : 'parser.y',
@@ -123,7 +119,7 @@ lexer_tab = custom_target(
install_dir : get_option('includedir') / 'nix',
)
-subdir('build-utils-meson/generate-header')
+subdir('nix-meson-build-support/generate-header')
generated_headers = []
foreach header : [
@@ -207,4 +203,4 @@ install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
-subdir('build-utils-meson/export')
+subdir('nix-meson-build-support/export')
diff --git a/src/libexpr/nix-meson-build-support b/src/libexpr/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libexpr/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 948839bd9..a7ad580d2 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -168,7 +168,7 @@ struct ExprVar : Expr
the set stored in the environment that is `level` levels up
from the current one.*/
Level level;
- Displacement displ;
+ Displacement displ = 0;
ExprVar(Symbol name) : name(name) { };
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
@@ -242,7 +242,7 @@ struct ExprAttrs : Expr
Kind kind;
Expr * e;
PosIdx pos;
- Displacement displ; // displacement
+ Displacement displ = 0; // displacement
AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain)
: kind(kind), e(e), pos(pos) { };
AttrDef() { };
@@ -468,6 +468,7 @@ struct ExprBlackHole : Expr
void show(const SymbolTable & symbols, std::ostream & str) const override {}
void eval(EvalState & state, Env & env, Value & v) override;
void bindVars(EvalState & es, const std::shared_ptr & env) override {}
+ [[noreturn]] static void throwInfiniteRecursionError(EvalState & state, Value & v);
};
extern ExprBlackHole eBlackHole;
diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix
index d97e7f3a8..3d5b78e35 100644
--- a/src/libexpr/package.nix
+++ b/src/libexpr/package.nix
@@ -40,8 +40,8 @@ mkMesonLibrary (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -96,8 +96,6 @@ mkMesonLibrary (finalAttrs: {
# https://github.com/NixOS/nixpkgs/issues/86131.
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
- } // lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
};
meta = {
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index 8ad0d9ad7..21a880e8e 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -88,6 +88,7 @@ struct ParserState
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc);
+ void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def);
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
Expr * stripIndentation(const PosIdx pos,
std::vector>> && es);
@@ -120,64 +121,29 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const
// Checking attrPath validity.
// ===========================
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
+ ExprAttrs * nested;
if (i->symbol) {
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
if (j != attrs->attrs.end()) {
- if (j->second.kind != ExprAttrs::AttrDef::Kind::Inherited) {
- ExprAttrs * attrs2 = dynamic_cast(j->second.e);
- if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
- attrs = attrs2;
- } else
+ nested = dynamic_cast(j->second.e);
+ if (!nested) {
+ attrPath.erase(i + 1, attrPath.end());
dupAttr(attrPath, pos, j->second.pos);
+ }
} else {
- ExprAttrs * nested = new ExprAttrs;
+ nested = new ExprAttrs;
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
- attrs = nested;
}
} else {
- ExprAttrs *nested = new ExprAttrs;
+ nested = new ExprAttrs;
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
- attrs = nested;
}
+ attrs = nested;
}
// Expr insertion.
// ==========================
if (i->symbol) {
- ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
- if (j != attrs->attrs.end()) {
- // This attr path is already defined. However, if both
- // e and the expr pointed by the attr path are two attribute sets,
- // we want to merge them.
- // Otherwise, throw an error.
- auto ae = dynamic_cast(e);
- auto jAttrs = dynamic_cast(j->second.e);
- if (jAttrs && ae) {
- if (ae->inheritFromExprs && !jAttrs->inheritFromExprs)
- jAttrs->inheritFromExprs = std::make_unique>();
- for (auto & ad : ae->attrs) {
- auto j2 = jAttrs->attrs.find(ad.first);
- if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
- dupAttr(ad.first, j2->second.pos, ad.second.pos);
- jAttrs->attrs.emplace(ad.first, ad.second);
- if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) {
- auto & sel = dynamic_cast(*ad.second.e);
- auto & from = dynamic_cast(*sel.e);
- from.displ += jAttrs->inheritFromExprs->size();
- }
- }
- jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
- if (ae->inheritFromExprs) {
- jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
- ae->inheritFromExprs->begin(), ae->inheritFromExprs->end());
- }
- } else {
- dupAttr(attrPath, pos, j->second.pos);
- }
- } else {
- // This attr path is not defined. Let's create it.
- attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
- e->setName(i->symbol);
- }
+ addAttr(attrs, attrPath, i->symbol, ExprAttrs::AttrDef(e, pos));
} else {
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
}
@@ -189,6 +155,60 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const
}
}
+/**
+ * Precondition: attrPath is used for error messages and should already contain
+ * symbol as its last element.
+ */
+inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def)
+{
+ ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(symbol);
+ if (j != attrs->attrs.end()) {
+ // This attr path is already defined. However, if both
+ // e and the expr pointed by the attr path are two attribute sets,
+ // we want to merge them.
+ // Otherwise, throw an error.
+ auto ae = dynamic_cast(def.e);
+ auto jAttrs = dynamic_cast(j->second.e);
+
+ // N.B. In a world in which we are less bound by our past mistakes, we
+ // would also test that jAttrs and ae are not recursive. The effect of
+ // not doing so is that any `rec` marker on ae is discarded, and any
+ // `rec` marker on jAttrs will apply to the attributes in ae.
+ // See https://github.com/NixOS/nix/issues/9020.
+ if (jAttrs && ae) {
+ if (ae->inheritFromExprs && !jAttrs->inheritFromExprs)
+ jAttrs->inheritFromExprs = std::make_unique>();
+ for (auto & ad : ae->attrs) {
+ if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) {
+ auto & sel = dynamic_cast(*ad.second.e);
+ auto & from = dynamic_cast(*sel.e);
+ from.displ += jAttrs->inheritFromExprs->size();
+ }
+ attrPath.emplace_back(AttrName(ad.first));
+ addAttr(jAttrs, attrPath, ad.first, std::move(ad.second));
+ attrPath.pop_back();
+ }
+ ae->attrs.clear();
+ jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(),
+ std::make_move_iterator(ae->dynamicAttrs.begin()),
+ std::make_move_iterator(ae->dynamicAttrs.end()));
+ ae->dynamicAttrs.clear();
+ if (ae->inheritFromExprs) {
+ jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
+ std::make_move_iterator(ae->inheritFromExprs->begin()),
+ std::make_move_iterator(ae->inheritFromExprs->end()));
+ ae->inheritFromExprs = nullptr;
+ }
+ } else {
+ dupAttr(attrPath, def.pos, j->second.pos);
+ }
+ } else {
+ // This attr path is not defined. Let's create it.
+ attrs->attrs.emplace(symbol, def);
+ def.e->setName(symbol);
+ }
+}
+
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
{
std::sort(formals->formals.begin(), formals->formals.end(),
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index ea9090fc6..e6d9feaf2 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -66,14 +66,12 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
ensureValid(b.drvPath->getBaseStorePath());
},
[&](const NixStringContextElem::Opaque & o) {
- auto ctxS = store->printStorePath(o.path);
ensureValid(o.path);
if (maybePathsOut)
maybePathsOut->emplace(o.path);
},
[&](const NixStringContextElem::DrvDeep & d) {
/* Treat same as Opaque */
- auto ctxS = store->printStorePath(d.drvPath);
ensureValid(d.drvPath);
if (maybePathsOut)
maybePathsOut->emplace(d.drvPath);
@@ -121,11 +119,9 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow);
if (isIFD) {
- for (auto & outputPath : outputsToCopyAndAllow) {
- /* Add the output of this derivations to the allowed
- paths. */
- allowPath(outputPath);
- }
+ /* Allow access to the output closures of this derivation. */
+ for (auto & outputPath : outputsToCopyAndAllow)
+ allowClosure(outputPath);
}
return res;
@@ -724,7 +720,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
/* Call the `operator' function with `e' as argument. */
Value newElements;
- state.callFunction(*op->value, 1, &e, newElements, noPos);
+ state.callFunction(*op->value, {&e, 1}, newElements, noPos);
state.forceList(newElements, noPos, "while evaluating the return value of the `operator` passed to builtins.genericClosure");
/* Add the values returned by the operator to the work set. */
@@ -1102,7 +1098,7 @@ static RegisterPrimOp primop_warn({
.name = "__warn",
.args = {"e1", "e2"},
.doc = R"(
- Evaluate *e1*, which must be a string and print iton standard error as a warning.
+ Evaluate *e1*, which must be a string, and print it on standard error as a warning.
Then return *e2*.
This function is useful for non-critical situations where attention is advisable.
@@ -1603,7 +1599,8 @@ static RegisterPrimOp primop_placeholder({
*************************************************************/
-/* Convert the argument to a path. !!! obsolete? */
+/* Convert the argument to a path and then to a string (confusing,
+ eh?). !!! obsolete? */
static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
NixStringContext context;
@@ -2048,7 +2045,7 @@ static RegisterPrimOp primop_readFileType({
.args = {"p"},
.doc = R"(
Determine the directory entry type of a filesystem node, being
- one of "directory", "regular", "symlink", or "unknown".
+ one of `"directory"`, `"regular"`, `"symlink"`, or `"unknown"`.
)",
.fun = prim_readFileType,
});
@@ -2437,7 +2434,6 @@ static RegisterPrimOp primop_toFile({
bool EvalState::callPathFilter(
Value * filterFun,
const SourcePath & path,
- std::string_view pathArg,
PosIdx pos)
{
auto st = path.lstat();
@@ -2445,12 +2441,12 @@ bool EvalState::callPathFilter(
/* Call the filter function. The first argument is the path, the
second is a string indicating the type of the file. */
Value arg1;
- arg1.mkString(pathArg);
+ arg1.mkString(path.path.abs());
// assert that type is not "unknown"
Value * args []{&arg1, fileTypeToString(*this, st.type)};
Value res;
- callFunction(*filterFun, 2, args, res, pos);
+ callFunction(*filterFun, args, res, pos);
return forceBool(res, pos, "while evaluating the return value of the path filter function");
}
@@ -2488,7 +2484,7 @@ static void addPath(
if (filterFun)
filter = std::make_unique([&](const Path & p) {
auto p2 = CanonPath(p);
- return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
+ return state.callPathFilter(filterFun, {path.accessor, p2}, pos);
});
std::optional expectedStorePath;
@@ -2614,13 +2610,13 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
else
state.error(
- "unsupported argument '%1%' to 'addPath'",
+ "unsupported argument '%1%' to 'builtins.path'",
state.symbols[attr.name]
).atPos(attr.pos).debugThrow();
}
if (!path)
state.error(
- "missing required 'path' attribute in the first argument to builtins.path"
+ "missing required 'path' attribute in the first argument to 'builtins.path'"
).atPos(pos).debugThrow();
if (name.empty())
name = path->baseName();
@@ -3261,23 +3257,19 @@ static RegisterPrimOp primop_isList({
.fun = prim_isList,
});
-static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Value & v)
-{
- state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
- if (n < 0 || (unsigned int) n >= list.listSize())
- state.error(
- "list index %1% is out of bounds",
- n
- ).atPos(pos).debugThrow();
- state.forceValue(*list.listElems()[n], pos);
- v = *list.listElems()[n];
-}
-
/* Return the n-1'th element of a list. */
static void prim_elemAt(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
- NixInt::Inner elem = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.elemAt").value;
- elemAt(state, pos, *args[0], elem, v);
+ NixInt::Inner n = state.forceInt(*args[1], pos, "while evaluating the second argument passed to 'builtins.elemAt'").value;
+ state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.elemAt'");
+ if (n < 0 || (unsigned int) n >= args[0]->listSize())
+ state.error(
+ "'builtins.elemAt' called with index %d on a list of size %d",
+ n,
+ args[0]->listSize()
+ ).atPos(pos).debugThrow();
+ state.forceValue(*args[0]->listElems()[n], pos);
+ v = *args[0]->listElems()[n];
}
static RegisterPrimOp primop_elemAt({
@@ -3293,7 +3285,13 @@ static RegisterPrimOp primop_elemAt({
/* Return the first element of a list. */
static void prim_head(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
- elemAt(state, pos, *args[0], 0, v);
+ state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.head'");
+ if (args[0]->listSize() == 0)
+ state.error(
+ "'builtins.head' called on an empty list"
+ ).atPos(pos).debugThrow();
+ state.forceValue(*args[0]->listElems()[0], pos);
+ v = *args[0]->listElems()[0];
}
static RegisterPrimOp primop_head({
@@ -3312,9 +3310,9 @@ static RegisterPrimOp primop_head({
don't want to use it! */
static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
- state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
+ state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.tail'");
if (args[0]->listSize() == 0)
- state.error("'tail' called on an empty list").atPos(pos).debugThrow();
+ state.error("'builtins.tail' called on an empty list").atPos(pos).debugThrow();
auto list = state.buildList(args[0]->listSize() - 1);
for (const auto & [n, v] : enumerate(list))
@@ -3487,7 +3485,7 @@ static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args
for (auto [n, elem] : enumerate(args[2]->listItems())) {
Value * vs []{vCur, elem};
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
- state.callFunction(*args[0], 2, vs, *vCur, pos);
+ state.callFunction(*args[0], vs, *vCur, pos);
}
state.forceValue(v, pos);
} else {
@@ -3637,7 +3635,7 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
Value * vs[] = {a, b};
Value vBool;
- state.callFunction(*args[0], 2, vs, vBool, noPos);
+ state.callFunction(*args[0], vs, vBool, noPos);
return state.forceBool(vBool, pos, "while evaluating the return value of the sorting function passed to builtins.sort");
};
@@ -4061,7 +4059,7 @@ static RegisterPrimOp primop_toString({
});
/* `substring start len str' returns the substring of `str' starting
- at character position `min(start, stringLength str)' inclusive and
+ at byte position `min(start, stringLength str)' inclusive and
ending at `min(start + len, stringLength str)'. `start' must be
non-negative. */
static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v)
@@ -4100,7 +4098,7 @@ static RegisterPrimOp primop_substring({
.name = "__substring",
.args = {"start", "len", "s"},
.doc = R"(
- Return the substring of *s* from character position *start*
+ Return the substring of *s* from byte position *start*
(zero-based) up to but not including *start + len*. If *start* is
greater than the length of the string, an empty string is returned.
If *start + len* lies beyond the end of the string or *len* is `-1`,
@@ -4385,7 +4383,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
for (auto i = begin; i != end; ++i) {
assert(idx <= 2 * len + 1 - 3);
- auto match = *i;
+ const auto & match = *i;
// Add a string for non-matched characters.
list[idx++] = mkString(state, match.prefix());
@@ -4937,7 +4935,7 @@ void EvalState::createBaseEnv()
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */
- baseEnv.values[0]->payload.attrs->sort();
+ getBuiltins().payload.attrs->sort();
staticBaseEnv->sort();
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 50850e187..135e57109 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -132,6 +132,8 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V
},
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
/* Reuse original item because we want this to be idempotent. */
+ /* FIXME: Suspicious move out of const. This is actually a copy, so the comment
+ above does not make much sense. */
return std::move(c);
},
}, context.begin()->raw) }),
diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc
index fc5bb3145..04b8d0595 100644
--- a/src/libexpr/primops/fetchClosure.cc
+++ b/src/libexpr/primops/fetchClosure.cc
@@ -40,7 +40,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
});
}
- auto toPath = *toPathMaybe;
+ const auto & toPath = *toPathMaybe;
// check and return
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index c207da8ad..fe42b88f1 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -33,9 +33,8 @@ void emitTreeAttrs(
// FIXME: support arbitrary input attributes.
- auto narHash = input.getNarHash();
- assert(narHash);
- attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
+ if (auto narHash = input.getNarHash())
+ attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
if (input.getType() == "git")
attrs.alloc("submodules").mkBool(
@@ -183,7 +182,7 @@ static void fetchTree(
if (!state.settings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
input = lookupInRegistries(state.store, input).first;
- if (state.settings.pureEval && !input.isLocked()) {
+ if (state.settings.pureEval && !input.isConsideredLocked(state.fetchSettings)) {
auto fetcher = "fetchTree";
if (params.isFetchGit)
fetcher = "fetchGit";
diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc
index 264046711..404425054 100644
--- a/src/libexpr/primops/fromTOML.cc
+++ b/src/libexpr/primops/fromTOML.cc
@@ -28,8 +28,10 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
auto attrs = state.buildBindings(size);
- for(auto & elem : table)
+ for(auto & elem : table) {
+ forceNoNullByte(elem.first);
visit(attrs.alloc(elem.first), elem.second);
+ }
v.mkAttrs(attrs);
}
@@ -54,7 +56,11 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
v.mkFloat(toml::get(t));
break;;
case toml::value_t::string:
- v.mkString(toml::get(t));
+ {
+ auto s = toml::get(t);
+ forceNoNullByte(s);
+ v.mkString(s);
+ }
break;;
case toml::value_t::local_datetime:
case toml::value_t::offset_datetime:
@@ -66,7 +72,9 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
attrs.alloc("_type").mkString("timestamp");
std::ostringstream s;
s << t;
- attrs.alloc("value").mkString(toView(s));
+ auto str = toView(s);
+ forceNoNullByte(str);
+ attrs.alloc("value").mkString(str);
v.mkAttrs(attrs);
} else {
throw std::runtime_error("Dates and times are not supported");
diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh
index 080ba26b8..9ad54e532 100644
--- a/src/libexpr/print-options.hh
+++ b/src/libexpr/print-options.hh
@@ -5,6 +5,7 @@
*/
#include
+#include
namespace nix {
diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc
index 8044fe347..5aa4fe4fd 100644
--- a/src/libexpr/value-to-json.cc
+++ b/src/libexpr/value-to-json.cc
@@ -108,7 +108,11 @@ json printValueAsJSON(EvalState & state, bool strict,
void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore)
{
- str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
+ try {
+ str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
+ } catch (nlohmann::json::exception & e) {
+ throw JSONSerializationError("JSON serialization error: %s", e.what());
+ }
}
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index 47ac90313..867c4e3a8 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -16,4 +16,7 @@ nlohmann::json printValueAsJSON(EvalState & state, bool strict,
void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true);
+
+MakeError(JSONSerializationError, Error);
+
}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index d98161488..8925693e3 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -510,4 +510,6 @@ typedef std::shared_ptr RootValue;
RootValue allocRootValue(Value * v);
+void forceNoNullByte(std::string_view s, std::function = nullptr);
+
}
diff --git a/src/libfetchers-tests/build-utils-meson b/src/libfetchers-tests/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libfetchers-tests/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build
index d948dbad6..739435501 100644
--- a/src/libfetchers-tests/meson.build
+++ b/src/libfetchers-tests/meson.build
@@ -4,8 +4,6 @@ project('nix-fetchers-tests', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-fetchers-tests', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
deps_private_maybe_subproject = [
dependency('nix-store-test-support'),
@@ -22,12 +20,10 @@ deps_private_maybe_subproject = [
]
deps_public_maybe_subproject = [
]
-subdir('build-utils-meson/subprojects')
+subdir('nix-meson-build-support/subprojects')
-subdir('build-utils-meson/threads')
-
-subdir('build-utils-meson/export-all-symbols')
-subdir('build-utils-meson/windows-version')
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
rapidcheck = dependency('rapidcheck')
deps_private += rapidcheck
@@ -44,7 +40,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'public-key.cc',
diff --git a/src/libfetchers-tests/nix-meson-build-support b/src/libfetchers-tests/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libfetchers-tests/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix
index 7b2ba8f2c..5336672a2 100644
--- a/src/libfetchers-tests/package.nix
+++ b/src/libfetchers-tests/package.nix
@@ -26,8 +26,8 @@ mkMesonExecutable (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -54,10 +54,6 @@ mkMesonExecutable (finalAttrs: {
mesonFlags = [
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
diff --git a/src/libfetchers/build-utils-meson b/src/libfetchers/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libfetchers/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc
index b0b6cb887..6c2241f3a 100644
--- a/src/libfetchers/cache.cc
+++ b/src/libfetchers/cache.cc
@@ -36,7 +36,7 @@ struct CacheImpl : Cache
{
auto state(_state.lock());
- auto dbPath = getCacheDir() + "/fetcher-cache-v2.sqlite";
+ auto dbPath = getCacheDir() + "/fetcher-cache-v3.sqlite";
createDirs(dirOf(dbPath));
state->db = SQLite(dbPath);
diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh
index f7cb34a02..2ad8aa327 100644
--- a/src/libfetchers/fetch-settings.hh
+++ b/src/libfetchers/fetch-settings.hh
@@ -70,6 +70,22 @@ struct Settings : public Config
Setting warnDirty{this, true, "warn-dirty",
"Whether to warn about dirty Git/Mercurial trees."};
+ Setting allowDirtyLocks{
+ this,
+ false,
+ "allow-dirty-locks",
+ R"(
+ Whether to allow dirty inputs (such as dirty Git workdirs)
+ to be locked via their NAR hash. This is generally bad
+ practice since Nix has no way to obtain such inputs if they
+ are subsequently modified. Therefore lock files with dirty
+ locks should generally only be used for local testing, and
+ should not be pushed to other users.
+ )",
+ {},
+ true,
+ Xp::Flakes};
+
Setting trustTarballsFromGitForges{
this, true, "trust-tarballs-from-git-forges",
R"(
diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index 65aa72a6c..fe347a59d 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -44,6 +44,8 @@ StorePath fetchToStore(
: store.addToStore(
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
+ debug(mode == FetchMode::DryRun ? "hashed '%s'" : "copied '%s' to '%s'", path, store.printStorePath(storePath));
+
if (cacheKey && mode == FetchMode::Copy)
fetchers::getCache()->upsert(*cacheKey, store, {}, storePath);
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index e15a460d0..9459db087 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -4,6 +4,7 @@
#include "fetch-to-store.hh"
#include "json-utils.hh"
#include "store-path-accessor.hh"
+#include "fetch-settings.hh"
#include
@@ -66,7 +67,7 @@ Input Input::fromURL(
}
}
- throw Error("input '%s' is unsupported", url.url);
+ throw Error("input '%s' is unsupported", url);
}
Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
@@ -113,7 +114,15 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
std::optional Input::getFingerprint(ref store) const
{
- return scheme ? scheme->getFingerprint(store, *this) : std::nullopt;
+ if (!scheme) return std::nullopt;
+
+ if (cachedFingerprint) return *cachedFingerprint;
+
+ auto fingerprint = scheme->getFingerprint(store, *this);
+
+ cachedFingerprint = fingerprint;
+
+ return fingerprint;
}
ParsedURL Input::toURL() const
@@ -146,11 +155,23 @@ bool Input::isLocked() const
return scheme && scheme->isLocked(*this);
}
+bool Input::isConsideredLocked(
+ const Settings & settings) const
+{
+ return isLocked() || (settings.allowDirtyLocks && getNarHash());
+}
+
bool Input::isFinal() const
{
return maybeGetBoolAttr(attrs, "__final").value_or(false);
}
+std::optional Input::isRelative() const
+{
+ assert(scheme);
+ return scheme->isRelative(*this);
+}
+
Attrs Input::toAttrs() const
{
return attrs;
@@ -307,7 +328,7 @@ std::pair[, Input> Input::getAccessorUnchecked(ref sto
auto accessor = makeStorePathAccessor(store, storePath);
- accessor->fingerprint = scheme->getFingerprint(store, *this);
+ accessor->fingerprint = getFingerprint(store);
return {accessor, *this};
} catch (Error & e) {
@@ -318,7 +339,7 @@ std::pair][, Input> Input::getAccessorUnchecked(ref sto
auto [accessor, result] = scheme->getAccessor(store, *this);
assert(!accessor->fingerprint);
- accessor->fingerprint = scheme->getFingerprint(store, result);
+ accessor->fingerprint = result.getFingerprint(store);
return {accessor, std::move(result)};
}
@@ -337,7 +358,7 @@ void Input::clone(const Path & destDir) const
scheme->clone(*this, destDir);
}
-std::optional Input::getSourcePath() const
+std::optional Input::getSourcePath() const
{
assert(scheme);
return scheme->getSourcePath(*this);
@@ -440,7 +461,7 @@ Input InputScheme::applyOverrides(
return input;
}
-std::optional InputScheme::getSourcePath(const Input & input) const
+std::optional InputScheme::getSourcePath(const Input & input) const
{
return {};
}
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index b28ec4568..644c267c1 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -42,9 +42,9 @@ struct Input
Attrs attrs;
/**
- * path of the parent of this input, used for relative path resolution
+ * Cached result of getFingerprint().
*/
- std::optional parent;
+ mutable std::optional> cachedFingerprint;
public:
/**
@@ -90,6 +90,21 @@ public:
*/
bool isLocked() const;
+ /**
+ * Return whether the input is either locked, or, if
+ * `allow-dirty-locks` is enabled, it has a NAR hash. In the
+ * latter case, we can verify the input but we may not be able to
+ * fetch it from anywhere.
+ */
+ bool isConsideredLocked(
+ const Settings & settings) const;
+
+ /**
+ * Only for relative path flakes, i.e. 'path:./foo', returns the
+ * relative path, i.e. './foo'.
+ */
+ std::optional isRelative() const;
+
/**
* Return whether this is a "final" input, meaning that fetching
* it will not add, remove or change any attributes. (See
@@ -104,6 +119,11 @@ public:
bool operator ==(const Input & other) const noexcept;
+ bool operator <(const Input & other) const
+ {
+ return attrs < other.attrs;
+ }
+
bool contains(const Input & other) const;
/**
@@ -144,7 +164,7 @@ public:
void clone(const Path & destDir) const;
- std::optional getSourcePath() const;
+ std::optional getSourcePath() const;
/**
* Write a file to this input, for input types that support
@@ -227,7 +247,7 @@ struct InputScheme
virtual void clone(const Input & input, const Path & destDir) const;
- virtual std::optional getSourcePath(const Input & input) const;
+ virtual std::optional getSourcePath(const Input & input) const;
virtual void putFile(
const Input & input,
@@ -250,6 +270,9 @@ struct InputScheme
virtual bool isLocked(const Input & input) const
{ return false; }
+
+ virtual std::optional isRelative(const Input & input) const
+ { return std::nullopt; }
};
void registerInputScheme(std::shared_ptr && fetcher);
diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 74e68fe12..b54416b10 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -5,6 +5,7 @@
#include "signals.hh"
#include "users.hh"
#include "fs-sink.hh"
+#include "sync.hh"
#include
#include
@@ -437,7 +438,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
{
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) &&
!(statusFlags & GIT_STATUS_WT_DELETED))
+ {
info.files.insert(CanonPath(path));
+ if (statusFlags != GIT_STATUS_CURRENT)
+ info.dirtyFiles.insert(CanonPath(path));
+ } else
+ info.deletedFiles.insert(CanonPath(path));
if (statusFlags != GIT_STATUS_CURRENT)
info.isDirty = true;
return 0;
@@ -1262,4 +1268,17 @@ ref getTarballCache()
return GitRepo::openRepo(repoDir, true, true);
}
+GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path & path)
+{
+ static Sync> _cache;
+ {
+ auto cache(_cache.lock());
+ auto i = cache->find(path);
+ if (i != cache->end()) return i->second;
+ }
+ auto workdirInfo = GitRepo::openRepo(path)->getWorkdirInfo();
+ _cache.lock()->emplace(path, workdirInfo);
+ return workdirInfo;
+}
+
}
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index f45b5a504..ff115143f 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -59,12 +59,20 @@ struct GitRepo
modified or added, but excluding deleted files. */
std::set files;
+ /* All modified or added files. */
+ std::set dirtyFiles;
+
+ /* The deleted files. */
+ std::set deletedFiles;
+
/* The submodules listed in .gitmodules of this workdir. */
std::vector submodules;
};
virtual WorkdirInfo getWorkdirInfo() = 0;
+ static WorkdirInfo getCachedWorkdirInfo(const std::filesystem::path & path);
+
/* Get the ref that HEAD points to. */
virtual std::optional getWorkdirRef() = 0;
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 99d91919e..4523f49ca 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -15,6 +15,7 @@
#include "finally.hh"
#include "fetch-settings.hh"
#include "json-utils.hh"
+#include "archive.hh"
#include
#include
@@ -296,7 +297,7 @@ struct GitInputScheme : InputScheme
Strings args = {"clone"};
- args.push_back(repoInfo.url);
+ args.push_back(repoInfo.locationToArg());
if (auto ref = input.getRef()) {
args.push_back("--branch");
@@ -310,11 +311,9 @@ struct GitInputScheme : InputScheme
runProgram("git", true, args, {}, true);
}
- std::optional getSourcePath(const Input & input) const override
+ std::optional getSourcePath(const Input & input) const override
{
- auto repoInfo = getRepoInfo(input);
- if (repoInfo.isLocal) return repoInfo.url;
- return std::nullopt;
+ return getRepoInfo(input).getPath();
}
void putFile(
@@ -324,14 +323,15 @@ struct GitInputScheme : InputScheme
std::optional commitMsg) const override
{
auto repoInfo = getRepoInfo(input);
- if (!repoInfo.isLocal)
+ auto repoPath = repoInfo.getPath();
+ if (!repoPath)
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
- writeFile((CanonPath(repoInfo.url) / path).abs(), contents);
+ writeFile(*repoPath / path.rel(), contents);
auto result = runProgram(RunOptions {
.program = "git",
- .args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())},
+ .args = {"-C", *repoPath, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())},
});
auto exitCode =
#ifndef WIN32 // TODO abstract over exit status handling on Windows
@@ -344,7 +344,7 @@ struct GitInputScheme : InputScheme
if (exitCode != 0) {
// The path is not `.gitignore`d, we can add the file.
runProgram("git", true,
- { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
+ { "-C", *repoPath, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
if (commitMsg) {
@@ -352,7 +352,7 @@ struct GitInputScheme : InputScheme
logger->pause();
Finally restoreLogger([]() { logger->resume(); });
runProgram("git", true,
- { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-F", "-" },
+ { "-C", *repoPath, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-F", "-" },
*commitMsg);
}
}
@@ -360,24 +360,41 @@ struct GitInputScheme : InputScheme
struct RepoInfo
{
- /* Whether this is a local, non-bare repository. */
- bool isLocal = false;
+ /* Either the path of the repo (for local, non-bare repos), or
+ the URL (which is never a `file` URL). */
+ std::variant location;
/* Working directory info: the complete list of files, and
whether the working directory is dirty compared to HEAD. */
GitRepo::WorkdirInfo workdirInfo;
- /* URL of the repo, or its path if isLocal. Never a `file` URL. */
- std::string url;
+ std::string locationToArg() const
+ {
+ return std::visit(
+ overloaded {
+ [&](const std::filesystem::path & path)
+ { return path.string(); },
+ [&](const ParsedURL & url)
+ { return url.to_string(); }
+ }, location);
+ }
+
+ std::optional getPath() const
+ {
+ if (auto path = std::get_if(&location))
+ return *path;
+ else
+ return std::nullopt;
+ }
void warnDirty(const Settings & settings) const
{
if (workdirInfo.isDirty) {
if (!settings.allowDirty)
- throw Error("Git tree '%s' is dirty", url);
+ throw Error("Git tree '%s' is dirty", locationToArg());
if (settings.warnDirty)
- warn("Git tree '%s' is dirty", url);
+ warn("Git tree '%s' is dirty", locationToArg());
}
}
@@ -424,13 +441,31 @@ struct GitInputScheme : InputScheme
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
- repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
- repoInfo.url = repoInfo.isLocal ? url.path : url.base;
+ //
+ // FIXME: here we turn a possibly relative path into an absolute path.
+ // This allows relative git flake inputs to be resolved against the
+ // **current working directory** (as in POSIX), which tends to work out
+ // ok in the context of flakes, but is the wrong behavior,
+ // as it should resolve against the flake.nix base directory instead.
+ //
+ // See: https://discourse.nixos.org/t/57783 and #9708
+ //
+ if (url.scheme == "file" && !forceHttp && !isBareRepository) {
+ if (!isAbsolute(url.path)) {
+ warn(
+ "Fetching Git repository '%s', which uses a path relative to the current directory. "
+ "This is not supported and will stop working in a future release. "
+ "See https://github.com/NixOS/nix/issues/12281 for details.",
+ url);
+ }
+ repoInfo.location = std::filesystem::absolute(url.path);
+ } else
+ repoInfo.location = url;
// If this is a local directory and no ref or revision is
// given, then allow the use of an unclean working tree.
- if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
- repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo();
+ if (auto repoPath = repoInfo.getPath(); !input.getRef() && !input.getRev() && repoPath)
+ repoInfo.workdirInfo = GitRepo::getCachedWorkdirInfo(*repoPath);
return repoInfo;
}
@@ -460,7 +495,7 @@ struct GitInputScheme : InputScheme
if (auto revCountAttrs = cache->lookup(key))
return getIntAttr(*revCountAttrs, "revCount");
- Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
+ Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg()));
auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev);
@@ -471,11 +506,15 @@ struct GitInputScheme : InputScheme
std::string getDefaultRef(const RepoInfo & repoInfo) const
{
- auto head = repoInfo.isLocal
- ? GitRepo::openRepo(repoInfo.url)->getWorkdirRef()
- : readHeadCached(repoInfo.url);
+ auto head = std::visit(
+ overloaded {
+ [&](const std::filesystem::path & path)
+ { return GitRepo::openRepo(path)->getWorkdirRef(); },
+ [&](const ParsedURL & url)
+ { return readHeadCached(url.to_string()); }
+ }, repoInfo.location);
if (!head) {
- warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
+ warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.locationToArg());
return "master";
}
return *head;
@@ -514,20 +553,19 @@ struct GitInputScheme : InputScheme
auto origRev = input.getRev();
- std::string name = input.getName();
-
auto originalRef = input.getRef();
auto ref = originalRef ? *originalRef : getDefaultRef(repoInfo);
input.attrs.insert_or_assign("ref", ref);
Path repoDir;
- if (repoInfo.isLocal) {
- repoDir = repoInfo.url;
+ if (auto repoPath = repoInfo.getPath()) {
+ repoDir = *repoPath;
if (!input.getRev())
input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev());
} else {
- Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input));
+ auto repoUrl = std::get(repoInfo.location);
+ Path cacheDir = getCachePath(repoUrl.to_string(), getShallowAttr(input));
repoDir = cacheDir;
repoInfo.gitDir = ".";
@@ -537,7 +575,7 @@ struct GitInputScheme : InputScheme
auto repo = GitRepo::openRepo(cacheDir, true, true);
// We need to set the origin so resolving submodule URLs works
- repo->setRemote("origin", repoInfo.url);
+ repo->setRemote("origin", repoUrl.to_string());
Path localRefFile =
ref.compare(0, 5, "refs/") == 0
@@ -576,11 +614,11 @@ struct GitInputScheme : InputScheme
? ref
: "refs/heads/" + ref;
- repo->fetch(repoInfo.url, fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
+ repo->fetch(repoUrl.to_string(), fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
logError(e.info());
- warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url);
+ warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.locationToArg());
}
try {
@@ -589,8 +627,8 @@ struct GitInputScheme : InputScheme
} catch (Error & e) {
warn("could not update mtime for file '%s': %s", localRefFile, e.info().msg);
}
- if (!originalRef && !storeCachedHead(repoInfo.url, ref))
- warn("could not update cached head '%s' for '%s'", ref, repoInfo.url);
+ if (!originalRef && !storeCachedHead(repoUrl.to_string(), ref))
+ warn("could not update cached head '%s' for '%s'", ref, repoInfo.locationToArg());
}
if (auto rev = input.getRev()) {
@@ -602,8 +640,7 @@ struct GitInputScheme : InputScheme
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
rev->gitRev(),
ref,
- repoInfo.url
- );
+ repoInfo.locationToArg());
} else
input.attrs.insert_or_assign("rev", repo->resolveRef(ref).gitRev());
@@ -615,7 +652,7 @@ struct GitInputScheme : InputScheme
auto isShallow = repo->isShallow();
if (isShallow && !getShallowAttr(input))
- throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
+ throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.locationToArg());
// FIXME: check whether rev is an ancestor of ref?
@@ -630,7 +667,7 @@ struct GitInputScheme : InputScheme
infoAttrs.insert_or_assign("revCount",
getRevCount(repoInfo, repoDir, rev));
- printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url);
+ printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg());
verifyCommit(input, repo);
@@ -684,21 +721,23 @@ struct GitInputScheme : InputScheme
RepoInfo & repoInfo,
Input && input) const
{
+ auto repoPath = repoInfo.getPath().value();
+
if (getSubmodulesAttr(input))
/* Create mountpoints for the submodules. */
for (auto & submodule : repoInfo.workdirInfo.submodules)
repoInfo.workdirInfo.files.insert(submodule.path);
- auto repo = GitRepo::openRepo(repoInfo.url, false, false);
+ auto repo = GitRepo::openRepo(repoPath, false, false);
auto exportIgnore = getExportIgnoreAttr(input);
ref accessor =
repo->getAccessor(repoInfo.workdirInfo,
exportIgnore,
- makeNotAllowedError(repoInfo.url));
+ makeNotAllowedError(repoInfo.locationToArg()));
- accessor->setPathDisplay(repoInfo.url);
+ accessor->setPathDisplay(repoInfo.locationToArg());
/* If the repo has submodules, return a mounted input accessor
consisting of the accessor for the top-level repo and the
@@ -707,10 +746,10 @@ struct GitInputScheme : InputScheme
std::map> mounts;
for (auto & submodule : repoInfo.workdirInfo.submodules) {
- auto submodulePath = CanonPath(repoInfo.url) / submodule.path;
+ auto submodulePath = repoPath / submodule.path.rel();
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "git");
- attrs.insert_or_assign("url", submodulePath.abs());
+ attrs.insert_or_assign("url", submodulePath.string());
attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore });
attrs.insert_or_assign("submodules", Explicit{ true });
// TODO: fall back to getAccessorFromCommit-like fetch when submodules aren't checked out
@@ -734,7 +773,7 @@ struct GitInputScheme : InputScheme
}
if (!repoInfo.workdirInfo.isDirty) {
- auto repo = GitRepo::openRepo(repoInfo.url);
+ auto repo = GitRepo::openRepo(repoPath);
if (auto ref = repo->getWorkdirRef())
input.attrs.insert_or_assign("ref", *ref);
@@ -744,7 +783,7 @@ struct GitInputScheme : InputScheme
input.attrs.insert_or_assign("rev", rev.gitRev());
input.attrs.insert_or_assign("revCount",
- rev == nullRev ? 0 : getRevCount(repoInfo, repoInfo.url, rev));
+ rev == nullRev ? 0 : getRevCount(repoInfo, repoPath, rev));
verifyCommit(input, repo);
} else {
@@ -763,7 +802,7 @@ struct GitInputScheme : InputScheme
input.attrs.insert_or_assign(
"lastModified",
repoInfo.workdirInfo.headRev
- ? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev)
+ ? getLastModified(repoInfo, repoPath, *repoInfo.workdirInfo.headRev)
: 0);
return {accessor, std::move(input)};
@@ -786,7 +825,7 @@ struct GitInputScheme : InputScheme
}
auto [accessor, final] =
- input.getRef() || input.getRev() || !repoInfo.isLocal
+ input.getRef() || input.getRev() || !repoInfo.getPath()
? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
@@ -795,10 +834,33 @@ struct GitInputScheme : InputScheme
std::optional getFingerprint(ref store, const Input & input) const override
{
+ auto makeFingerprint = [&](const Hash & rev)
+ {
+ return rev.gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "");
+ };
+
if (auto rev = input.getRev())
- return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "");
- else
+ return makeFingerprint(*rev);
+ else {
+ auto repoInfo = getRepoInfo(input);
+ if (auto repoPath = repoInfo.getPath(); repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) {
+ /* Calculate a fingerprint that takes into account the
+ deleted and modified/added files. */
+ HashSink hashSink{HashAlgorithm::SHA512};
+ for (auto & file : repoInfo.workdirInfo.dirtyFiles) {
+ writeString("modified:", hashSink);
+ writeString(file.abs(), hashSink);
+ dumpPath(*repoPath / file.rel(), hashSink);
+ }
+ for (auto & file : repoInfo.workdirInfo.deletedFiles) {
+ writeString("deleted:", hashSink);
+ writeString(file.abs(), hashSink);
+ }
+ return makeFingerprint(*repoInfo.workdirInfo.headRev)
+ + ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false);
+ }
return std::nullopt;
+ }
}
bool isLocked(const Input & input) const override
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 308cff33a..185941988 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -50,7 +50,7 @@ struct GitArchiveInputScheme : InputScheme
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
- throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
+ throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url, path[2]);
} else if (size > 3) {
std::string rs;
for (auto i = std::next(path.begin(), 2); i != path.end(); i++) {
@@ -63,34 +63,34 @@ struct GitArchiveInputScheme : InputScheme
if (std::regex_match(rs, refRegex)) {
ref = rs;
} else {
- throw BadURL("in URL '%s', '%s' is not a branch/tag name", url.url, rs);
+ throw BadURL("in URL '%s', '%s' is not a branch/tag name", url, rs);
}
} else if (size < 2)
- throw BadURL("URL '%s' is invalid", url.url);
+ throw BadURL("URL '%s' is invalid", url);
for (auto &[name, value] : url.query) {
if (name == "rev") {
if (rev)
- throw BadURL("URL '%s' contains multiple commit hashes", url.url);
+ throw BadURL("URL '%s' contains multiple commit hashes", url);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
- throw BadURL("URL '%s' contains an invalid branch/tag name", url.url);
+ throw BadURL("URL '%s' contains an invalid branch/tag name", url);
if (ref)
- throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
+ throw BadURL("URL '%s' contains multiple branch/tag names", url);
ref = value;
}
else if (name == "host") {
if (!std::regex_match(value, hostRegex))
- throw BadURL("URL '%s' contains an invalid instance host", url.url);
+ throw BadURL("URL '%s' contains an invalid instance host", url);
host_url = value;
}
// FIXME: barf on unsupported attributes
}
if (ref && rev)
- throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
+ throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev());
Input input{settings};
input.attrs.insert_or_assign("type", std::string { schemeName() });
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index 2e5cd82c7..0e1b86711 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -26,16 +26,16 @@ struct IndirectInputScheme : InputScheme
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
- throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
+ throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url, path[1]);
} else if (path.size() == 3) {
if (!std::regex_match(path[1], refRegex))
- throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]);
+ throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url, path[1]);
ref = path[1];
if (!std::regex_match(path[2], revRegex))
- throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
+ throw BadURL("in flake URL '%s', '%s' is not a commit hash", url, path[2]);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
} else
- throw BadURL("GitHub URL '%s' is invalid", url.url);
+ throw BadURL("GitHub URL '%s' is invalid", url);
std::string id = path[0];
if (!std::regex_match(id, flakeRegex))
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 2c987f79d..61cbca202 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -126,7 +126,7 @@ struct MercurialInputScheme : InputScheme
return res;
}
- std::optional getSourcePath(const Input & input) const override
+ std::optional getSourcePath(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
@@ -161,7 +161,7 @@ struct MercurialInputScheme : InputScheme
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isLocal = url.scheme == "file";
- return {isLocal, isLocal ? url.path : url.base};
+ return {isLocal, isLocal ? url.path : url.to_string()};
}
StorePath fetchToStore(ref store, Input & input) const
diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build
index d4f202796..58afbb7d0 100644
--- a/src/libfetchers/meson.build
+++ b/src/libfetchers/meson.build
@@ -4,8 +4,6 @@ project('nix-fetchers', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,7 +12,7 @@ project('nix-fetchers', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
configdata = configuration_data()
@@ -24,9 +22,7 @@ deps_public_maybe_subproject = [
dependency('nix-util'),
dependency('nix-store'),
]
-subdir('build-utils-meson/subprojects')
-
-subdir('build-utils-meson/threads')
+subdir('nix-meson-build-support/subprojects')
nlohmann_json = dependency('nlohmann_json', version : '>= 3.9')
deps_public += nlohmann_json
@@ -43,7 +39,7 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'attrs.cc',
@@ -52,15 +48,15 @@ sources = files(
'fetch-to-store.cc',
'fetchers.cc',
'filtering-source-accessor.cc',
- 'git.cc',
'git-utils.cc',
+ 'git.cc',
'github.cc',
'indirect.cc',
'mercurial.cc',
'mounted-source-accessor.cc',
'path.cc',
- 'store-path-accessor.cc',
'registry.cc',
+ 'store-path-accessor.cc',
'tarball.cc',
)
@@ -71,10 +67,10 @@ headers = files(
'cache.hh',
'fetch-settings.hh',
'fetch-to-store.hh',
+ 'fetchers.hh',
'filtering-source-accessor.hh',
'git-utils.hh',
'mounted-source-accessor.hh',
- 'fetchers.hh',
'registry.hh',
'store-path-accessor.hh',
'tarball.hh',
@@ -92,4 +88,4 @@ install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
-subdir('build-utils-meson/export')
+subdir('nix-meson-build-support/export')
diff --git a/src/libfetchers/nix-meson-build-support b/src/libfetchers/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libfetchers/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libfetchers/package.nix b/src/libfetchers/package.nix
index 70973bdb2..d4ca18555 100644
--- a/src/libfetchers/package.nix
+++ b/src/libfetchers/package.nix
@@ -1,5 +1,4 @@
{ lib
-, stdenv
, mkMesonLibrary
, nix-util
@@ -22,8 +21,8 @@ mkMesonLibrary (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -49,10 +48,6 @@ mkMesonLibrary (finalAttrs: {
echo ${version} > ../../.version
'';
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;
};
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index 246b68c3a..9d1cce0f3 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -14,7 +14,7 @@ struct PathInputScheme : InputScheme
if (url.scheme != "path") return {};
if (url.authority && *url.authority != "")
- throw Error("path URL '%s' should not have an authority ('%s')", url.url, *url.authority);
+ throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority);
Input input{settings};
input.attrs.insert_or_assign("type", "path");
@@ -27,10 +27,10 @@ struct PathInputScheme : InputScheme
if (auto n = string2Int(value))
input.attrs.insert_or_assign(name, *n);
else
- throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
+ throw Error("path URL '%s' has invalid parameter '%s'", url, name);
}
else
- throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
+ throw Error("path URL '%s' has unsupported parameter '%s'", url, name);
return input;
}
@@ -80,9 +80,9 @@ struct PathInputScheme : InputScheme
};
}
- std::optional getSourcePath(const Input & input) const override
+ std::optional getSourcePath(const Input & input) const override
{
- return getStrAttr(input.attrs, "path");
+ return getAbsPath(input);
}
void putFile(
@@ -91,13 +91,13 @@ struct PathInputScheme : InputScheme
std::string_view contents,
std::optional commitMsg) const override
{
- writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
+ writeFile(getAbsPath(input) / path.rel(), contents);
}
- std::optional isRelative(const Input & input) const
+ std::optional isRelative(const Input & input) const override
{
auto path = getStrAttr(input.attrs, "path");
- if (hasPrefix(path, "/"))
+ if (isAbsolute(path))
return std::nullopt;
else
return path;
@@ -108,12 +108,12 @@ struct PathInputScheme : InputScheme
return (bool) input.getNarHash();
}
- CanonPath getAbsPath(const Input & input) const
+ std::filesystem::path getAbsPath(const Input & input) const
{
auto path = getStrAttr(input.attrs, "path");
- if (path[0] == '/')
- return CanonPath(path);
+ if (isAbsolute(path))
+ return canonPath(path);
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
}
@@ -121,31 +121,14 @@ struct PathInputScheme : InputScheme
std::pair][, Input> getAccessor(ref store, const Input & _input) const override
{
Input input(_input);
- std::string absPath;
auto path = getStrAttr(input.attrs, "path");
- if (path[0] != '/') {
- if (!input.parent)
- throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
+ auto absPath = getAbsPath(input);
- auto parent = canonPath(*input.parent);
-
- // the path isn't relative, prefix it
- absPath = nix::absPath(path, parent);
-
- // for security, ensure that if the parent is a store path, it's inside it
- if (store->isInStore(parent)) {
- auto storePath = store->printStorePath(store->toStorePath(parent).first);
- if (!isDirOrInDir(absPath, storePath))
- throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
- }
- } else
- absPath = path;
-
- Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s' to the store", absPath));
// FIXME: check whether access to 'path' is allowed.
- auto storePath = store->maybeParseStorePath(absPath);
+ auto storePath = store->maybeParseStorePath(absPath.string());
if (storePath)
store->addTempRoot(*storePath);
@@ -154,7 +137,7 @@ struct PathInputScheme : InputScheme
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) {
// FIXME: try to substitute storePath.
auto src = sinkToSource([&](Sink & sink) {
- mtime = dumpPathAndGetMtime(absPath, sink, defaultPathFilter);
+ mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter);
});
storePath = store->addToStoreFromDump(*src, "source");
}
@@ -176,7 +159,7 @@ struct PathInputScheme : InputScheme
store object and the subpath. */
auto path = getAbsPath(input);
try {
- auto [storePath, subPath] = store->toStorePath(path.abs());
+ auto [storePath, subPath] = store->toStorePath(path.string());
auto info = store->queryPathInfo(storePath);
return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath);
} catch (Error &) {
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index 7f7a09053..c18e12d23 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -94,12 +94,9 @@ void Registry::add(
void Registry::remove(const Input & input)
{
- // FIXME: use C++20 std::erase.
- for (auto i = entries.begin(); i != entries.end(); )
- if (i->from == input)
- i = entries.erase(i);
- else
- ++i;
+ entries.erase(
+ std::remove_if(entries.begin(), entries.end(), [&](const Entry & entry) { return entry.from == input; }),
+ entries.end());
}
static Path getSystemRegistryPath()
@@ -156,7 +153,7 @@ static std::shared_ptr getGlobalRegistry(const Settings & settings, re
return std::make_shared(settings, Registry::Global); // empty registry
}
- if (!hasPrefix(path, "/")) {
+ if (!isAbsolute(path)) {
auto storePath = downloadFile(store, path, "flake-registry.json").storePath;
if (auto store2 = store.dynamic_pointer_cast())
store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json");
@@ -181,7 +178,8 @@ Registries getRegistries(const Settings & settings, ref store)
std::pair lookupInRegistries(
ref store,
- const Input & _input)
+ const Input & _input,
+ const RegistryFilter & filter)
{
Attrs extraAttrs;
int n = 0;
@@ -193,6 +191,7 @@ std::pair lookupInRegistries(
if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string());
for (auto & registry : getRegistries(*input.settings, store)) {
+ if (filter && !filter(registry->type)) continue;
// FIXME: O(n)
for (auto & entry : registry->entries) {
if (entry.exact) {
diff --git a/src/libfetchers/registry.hh b/src/libfetchers/registry.hh
index 0d68ac395..8f47e1590 100644
--- a/src/libfetchers/registry.hh
+++ b/src/libfetchers/registry.hh
@@ -65,8 +65,15 @@ void overrideRegistry(
const Input & to,
const Attrs & extraAttrs);
+using RegistryFilter = std::function;
+
+/**
+ * Rewrite a flakeref using the registries. If `filter` is set, only
+ * use the registries for which the filter function returns true.
+ */
std::pair lookupInRegistries(
ref store,
- const Input & input);
+ const Input & input,
+ const RegistryFilter & filter = {});
}
diff --git a/src/libflake-c/.version b/src/libflake-c/.version
new file mode 120000
index 000000000..b7badcd0c
--- /dev/null
+++ b/src/libflake-c/.version
@@ -0,0 +1 @@
+../../.version
\ No newline at end of file
diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build
new file mode 100644
index 000000000..85d20644d
--- /dev/null
+++ b/src/libflake-c/meson.build
@@ -0,0 +1,91 @@
+project('nix-flake-c', 'cpp',
+ version : files('.version'),
+ default_options : [
+ 'cpp_std=c++2a',
+ # TODO(Qyriad): increase the warning level
+ 'warning_level=1',
+ 'errorlogs=true', # Please print logs for tests that fail
+ ],
+ meson_version : '>= 1.1',
+ license : 'LGPL-2.1-or-later',
+)
+
+cxx = meson.get_compiler('cpp')
+
+subdir('nix-meson-build-support/deps-lists')
+
+configdata = configuration_data()
+
+deps_private_maybe_subproject = [
+ dependency('nix-util'),
+ dependency('nix-store'),
+ dependency('nix-expr'),
+ dependency('nix-flake'),
+]
+deps_public_maybe_subproject = [
+ dependency('nix-util-c'),
+ dependency('nix-store-c'),
+ dependency('nix-expr-c'),
+]
+subdir('nix-meson-build-support/subprojects')
+
+# TODO rename, because it will conflict with downstream projects
+configdata.set_quoted('PACKAGE_VERSION', meson.project_version())
+
+config_h = configure_file(
+ configuration : configdata,
+ output : 'config-flake.h',
+)
+
+add_project_arguments(
+ # TODO(Qyriad): Yes this is how the autoconf+Make system did it.
+ # It would be nice for our headers to be idempotent instead.
+
+ # From C++ libraries, only for internals
+ '-include', 'config-util.hh',
+ '-include', 'config-store.hh',
+ '-include', 'config-expr.hh',
+ # not generated (yet?)
+ # '-include', 'config-flake.hh',
+
+ # From C libraries, for our public, installed headers too
+ '-include', 'config-util.h',
+ '-include', 'config-store.h',
+ '-include', 'config-expr.h',
+ '-include', 'config-flake.h',
+ language : 'cpp',
+)
+
+subdir('nix-meson-build-support/common')
+
+sources = files(
+ 'nix_api_flake.cc',
+)
+
+include_dirs = [include_directories('.')]
+
+headers = [config_h] + files(
+ 'nix_api_flake.h',
+)
+
+# TODO move this header to libexpr, maybe don't use it in tests?
+headers += files('nix_api_flake.h')
+
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
+
+this_library = library(
+ 'nixflakec',
+ sources,
+ dependencies : deps_public + deps_private + deps_other,
+ include_directories : include_dirs,
+ link_args: linker_export_flags,
+ prelink : true, # For C++ static initializers
+ install : true,
+)
+
+install_headers(headers, subdir : 'nix', preserve_path : true)
+
+libraries_private = []
+
+subdir('nix-meson-build-support/export')
diff --git a/src/libflake-c/nix-meson-build-support b/src/libflake-c/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libflake-c/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libflake-c/nix_api_flake.cc b/src/libflake-c/nix_api_flake.cc
new file mode 100644
index 000000000..17cf6572d
--- /dev/null
+++ b/src/libflake-c/nix_api_flake.cc
@@ -0,0 +1,32 @@
+#include "nix_api_flake.h"
+#include "nix_api_flake_internal.hh"
+#include "nix_api_util_internal.h"
+
+#include "flake/flake.hh"
+
+nix_flake_settings * nix_flake_settings_new(nix_c_context * context)
+{
+ try {
+ auto settings = nix::make_ref();
+ return new nix_flake_settings{settings};
+ }
+ NIXC_CATCH_ERRS_NULL
+}
+
+void nix_flake_settings_free(nix_flake_settings * settings)
+{
+ delete settings;
+}
+
+nix_err nix_flake_init_global(nix_c_context * context, nix_flake_settings * settings)
+{
+ static std::shared_ptr registeredSettings;
+ try {
+ if (registeredSettings)
+ throw nix::Error("nix_flake_init_global already initialized");
+
+ registeredSettings = settings->settings;
+ nix::flake::initLib(*registeredSettings);
+ }
+ NIXC_CATCH_ERRS
+}
diff --git a/src/libflake-c/nix_api_flake.h b/src/libflake-c/nix_api_flake.h
new file mode 100644
index 000000000..80051298d
--- /dev/null
+++ b/src/libflake-c/nix_api_flake.h
@@ -0,0 +1,46 @@
+#ifndef NIX_API_FLAKE_H
+#define NIX_API_FLAKE_H
+/** @defgroup libflake libflake
+ * @brief Bindings to the Nix Flakes library
+ *
+ * @{
+ */
+/** @file
+ * @brief Main entry for the libflake C bindings
+ */
+
+#include "nix_api_store.h"
+#include "nix_api_util.h"
+#include "nix_api_expr.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+// cffi start
+
+typedef struct nix_flake_settings nix_flake_settings;
+
+// Function prototypes
+/**
+ * Create a nix_flake_settings initialized with default values.
+ * @param[out] context Optional, stores error information
+ * @return A new nix_flake_settings or NULL on failure.
+ * @see nix_flake_settings_free
+ */
+nix_flake_settings * nix_flake_settings_new(nix_c_context * context);
+
+/**
+ * @brief Release the resources associated with a nix_flake_settings.
+ */
+void nix_flake_settings_free(nix_flake_settings * settings);
+
+/**
+ * @brief Register Flakes support process-wide.
+ */
+nix_err nix_flake_init_global(nix_c_context * context, nix_flake_settings * settings);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif
diff --git a/src/libflake-c/nix_api_flake_internal.hh b/src/libflake-c/nix_api_flake_internal.hh
new file mode 100644
index 000000000..4c154a342
--- /dev/null
+++ b/src/libflake-c/nix_api_flake_internal.hh
@@ -0,0 +1,9 @@
+#pragma once
+
+#include "ref.hh"
+#include "flake/settings.hh"
+
+struct nix_flake_settings
+{
+ nix::ref settings;
+};
diff --git a/src/libflake-c/package.nix b/src/libflake-c/package.nix
new file mode 100644
index 000000000..dcd6c4966
--- /dev/null
+++ b/src/libflake-c/package.nix
@@ -0,0 +1,55 @@
+{ lib
+, mkMesonLibrary
+
+, nix-store-c
+, nix-expr-c
+, nix-flake
+
+# Configuration Options
+
+, version
+}:
+
+let
+ inherit (lib) fileset;
+in
+
+mkMesonLibrary (finalAttrs: {
+ pname = "nix-flake-c";
+ inherit version;
+
+ workDir = ./.;
+ fileset = fileset.unions [
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
+ ../../.version
+ ./.version
+ ./meson.build
+ # ./meson.options
+ (fileset.fileFilter (file: file.hasExt "cc") ./.)
+ (fileset.fileFilter (file: file.hasExt "hh") ./.)
+ (fileset.fileFilter (file: file.hasExt "h") ./.)
+ ];
+
+ propagatedBuildInputs = [
+ nix-expr-c
+ nix-store-c
+ nix-flake
+ ];
+
+ preConfigure =
+ # "Inline" .version so it's not a symlink, and includes the suffix.
+ # Do the meson utils, without modification.
+ ''
+ chmod u+w ./.version
+ echo ${version} > ../../.version
+ '';
+
+ mesonFlags = [
+ ];
+
+ meta = {
+ platforms = lib.platforms.unix ++ lib.platforms.windows;
+ };
+
+})
diff --git a/src/libflake-tests/build-utils-meson b/src/libflake-tests/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libflake-tests/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc
index d704a26d3..2b1f5124b 100644
--- a/src/libflake-tests/flakeref.cc
+++ b/src/libflake-tests/flakeref.cc
@@ -7,18 +7,60 @@ namespace nix {
/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/
- /* ----------------------------------------------------------------------------
- * to_string
- * --------------------------------------------------------------------------*/
+ TEST(parseFlakeRef, path) {
+ experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes);
+
+ fetchers::Settings fetchSettings;
+
+ {
+ auto s = "/foo/bar";
+ auto flakeref = parseFlakeRef(fetchSettings, s);
+ ASSERT_EQ(flakeref.to_string(), "path:/foo/bar");
+ }
+
+ {
+ auto s = "/foo/bar?revCount=123&rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ auto flakeref = parseFlakeRef(fetchSettings, s);
+ ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&revCount=123");
+ }
+
+ {
+ auto s = "/foo/bar?xyzzy=123";
+ EXPECT_THROW(
+ parseFlakeRef(fetchSettings, s),
+ Error);
+ }
+
+ {
+ auto s = "/foo/bar#bla";
+ EXPECT_THROW(
+ parseFlakeRef(fetchSettings, s),
+ Error);
+ }
+
+ {
+ auto s = "/foo/bar#bla";
+ auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s);
+ ASSERT_EQ(flakeref.to_string(), "path:/foo/bar");
+ ASSERT_EQ(fragment, "bla");
+ }
+
+ {
+ auto s = "/foo/bar?revCount=123#bla";
+ auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s);
+ ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?revCount=123");
+ ASSERT_EQ(fragment, "bla");
+ }
+ }
TEST(to_string, doesntReencodeUrl) {
fetchers::Settings fetchSettings;
auto s = "http://localhost:8181/test/+3d.tar.gz";
auto flakeref = parseFlakeRef(fetchSettings, s);
- auto parsed = flakeref.to_string();
+ auto unparsed = flakeref.to_string();
auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
- ASSERT_EQ(parsed, expected);
+ ASSERT_EQ(unparsed, expected);
}
}
diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build
index 592a7493b..1c8765f21 100644
--- a/src/libflake-tests/meson.build
+++ b/src/libflake-tests/meson.build
@@ -4,8 +4,6 @@ project('nix-flake-tests', 'cpp',
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
- 'debug=true',
- 'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
@@ -14,20 +12,19 @@ project('nix-flake-tests', 'cpp',
cxx = meson.get_compiler('cpp')
-subdir('build-utils-meson/deps-lists')
+subdir('nix-meson-build-support/deps-lists')
deps_private_maybe_subproject = [
dependency('nix-expr-test-support'),
dependency('nix-flake'),
+ dependency('nix-flake-c'),
]
deps_public_maybe_subproject = [
]
-subdir('build-utils-meson/subprojects')
+subdir('nix-meson-build-support/subprojects')
-subdir('build-utils-meson/threads')
-
-subdir('build-utils-meson/export-all-symbols')
-subdir('build-utils-meson/windows-version')
+subdir('nix-meson-build-support/export-all-symbols')
+subdir('nix-meson-build-support/windows-version')
rapidcheck = dependency('rapidcheck')
deps_private += rapidcheck
@@ -44,10 +41,11 @@ add_project_arguments(
language : 'cpp',
)
-subdir('build-utils-meson/diagnostics')
+subdir('nix-meson-build-support/common')
sources = files(
'flakeref.cc',
+ 'nix_api_flake.cc',
'url-name.cc',
)
@@ -70,6 +68,7 @@ test(
this_exe,
env : {
'_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data',
+ 'NIX_CONFIG': 'extra-experimental-features = flakes',
},
protocol : 'gtest',
)
diff --git a/src/libflake-tests/nix-meson-build-support b/src/libflake-tests/nix-meson-build-support
new file mode 120000
index 000000000..0b140f56b
--- /dev/null
+++ b/src/libflake-tests/nix-meson-build-support
@@ -0,0 +1 @@
+../../nix-meson-build-support
\ No newline at end of file
diff --git a/src/libflake-tests/nix_api_flake.cc b/src/libflake-tests/nix_api_flake.cc
new file mode 100644
index 000000000..21109d181
--- /dev/null
+++ b/src/libflake-tests/nix_api_flake.cc
@@ -0,0 +1,51 @@
+#include "nix_api_store.h"
+#include "nix_api_store_internal.h"
+#include "nix_api_util.h"
+#include "nix_api_util_internal.h"
+#include "nix_api_expr.h"
+#include "nix_api_value.h"
+#include "nix_api_flake.h"
+
+#include "tests/nix_api_expr.hh"
+#include "tests/string_callback.hh"
+
+#include
+#include
+
+namespace nixC {
+
+TEST_F(nix_api_store_test, nix_api_init_global_getFlake_exists)
+{
+ nix_libstore_init(ctx);
+ assert_ctx_ok();
+ nix_libexpr_init(ctx);
+ assert_ctx_ok();
+
+ auto settings = nix_flake_settings_new(ctx);
+ assert_ctx_ok();
+ ASSERT_NE(nullptr, settings);
+
+ nix_flake_init_global(ctx, settings);
+ assert_ctx_ok();
+
+ nix_eval_state_builder * builder = nix_eval_state_builder_new(ctx, store);
+ ASSERT_NE(nullptr, builder);
+ assert_ctx_ok();
+
+ auto state = nix_eval_state_build(ctx, builder);
+ assert_ctx_ok();
+ ASSERT_NE(nullptr, state);
+
+ nix_eval_state_builder_free(builder);
+
+ auto value = nix_alloc_value(ctx, state);
+ assert_ctx_ok();
+ ASSERT_NE(nullptr, value);
+
+ nix_err err = nix_expr_eval_from_string(ctx, state, "builtins.getFlake", ".", value);
+ assert_ctx_ok();
+ ASSERT_EQ(NIX_OK, err);
+ ASSERT_EQ(NIX_TYPE_FUNCTION, nix_get_type(ctx, value));
+}
+
+} // namespace nixC
diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix
index 67e716979..51b68ad58 100644
--- a/src/libflake-tests/package.nix
+++ b/src/libflake-tests/package.nix
@@ -4,6 +4,7 @@
, mkMesonExecutable
, nix-flake
+, nix-flake-c
, nix-expr-test-support
, rapidcheck
@@ -26,8 +27,8 @@ mkMesonExecutable (finalAttrs: {
workDir = ./.;
fileset = fileset.unions [
- ../../build-utils-meson
- ./build-utils-meson
+ ../../nix-meson-build-support
+ ./nix-meson-build-support
../../.version
./.version
./meson.build
@@ -38,6 +39,7 @@ mkMesonExecutable (finalAttrs: {
buildInputs = [
nix-flake
+ nix-flake-c
nix-expr-test-support
rapidcheck
gtest
@@ -54,10 +56,6 @@ mkMesonExecutable (finalAttrs: {
mesonFlags = [
];
- env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
- LDFLAGS = "-fuse-ld=gold";
- };
-
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
@@ -67,6 +65,7 @@ mkMesonExecutable (finalAttrs: {
mkdir -p "$HOME"
'' + ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
+ export NIX_CONFIG="extra-experimental-features = flakes"
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
diff --git a/src/libflake/build-utils-meson b/src/libflake/build-utils-meson
deleted file mode 120000
index 5fff21bab..000000000
--- a/src/libflake/build-utils-meson
+++ /dev/null
@@ -1 +0,0 @@
-../../build-utils-meson
\ No newline at end of file
diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc
index edb76f861..c2145ab39 100644
--- a/src/libflake/flake/flake.cc
+++ b/src/libflake/flake/flake.cc
@@ -21,29 +21,29 @@ using namespace flake;
namespace flake {
-typedef std::pair FetchedFlake;
-typedef std::vector> FlakeCache;
+struct FetchedFlake
+{
+ FlakeRef lockedRef;
+ StorePath storePath;
+};
+
+typedef std::map FlakeCache;
static std::optional lookupInFlakeCache(
const FlakeCache & flakeCache,
const FlakeRef & flakeRef)
{
- // FIXME: inefficient.
- for (auto & i : flakeCache) {
- if (flakeRef == i.first) {
- debug("mapping '%s' to previously seen input '%s' -> '%s",
- flakeRef, i.first, i.second.second);
- return i.second;
- }
- }
-
- return std::nullopt;
+ auto i = flakeCache.find(flakeRef);
+ if (i == flakeCache.end()) return std::nullopt;
+ debug("mapping '%s' to previously seen input '%s' -> '%s",
+ flakeRef, i->first, i->second.lockedRef);
+ return i->second;
}
static std::tuple fetchOrSubstituteTree(
EvalState & state,
const FlakeRef & originalRef,
- bool allowLookup,
+ bool useRegistries,
FlakeCache & flakeCache)
{
auto fetched = lookupInFlakeCache(flakeCache, originalRef);
@@ -51,32 +51,39 @@ static std::tuple fetchOrSubstituteTree(
if (!fetched) {
if (originalRef.input.isDirect()) {
- fetched.emplace(originalRef.fetchTree(state.store));
+ auto [storePath, lockedRef] = originalRef.fetchTree(state.store);
+ fetched.emplace(FetchedFlake{.lockedRef = lockedRef, .storePath = storePath});
} else {
- if (allowLookup) {
- resolvedRef = originalRef.resolve(state.store);
- auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
- if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
- flakeCache.push_back({resolvedRef, *fetchedResolved});
- fetched.emplace(*fetchedResolved);
+ if (useRegistries) {
+ resolvedRef = originalRef.resolve(
+ state.store,
+ [](fetchers::Registry::RegistryType type) {
+ /* Only use the global registry and CLI flags
+ to resolve indirect flakerefs. */
+ return type == fetchers::Registry::Flag || type == fetchers::Registry::Global;
+ });
+ fetched = lookupInFlakeCache(flakeCache, originalRef);
+ if (!fetched) {
+ auto [storePath, lockedRef] = resolvedRef.fetchTree(state.store);
+ fetched.emplace(FetchedFlake{.lockedRef = lockedRef, .storePath = storePath});
+ }
+ flakeCache.insert_or_assign(resolvedRef, *fetched);
}
else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
}
}
- flakeCache.push_back({originalRef, *fetched});
+ flakeCache.insert_or_assign(originalRef, *fetched);
}
- auto [storePath, lockedRef] = *fetched;
-
debug("got tree '%s' from '%s'",
- state.store->printStorePath(storePath), lockedRef);
+ state.store->printStorePath(fetched->storePath), fetched->lockedRef);
- state.allowPath(storePath);
+ state.allowPath(fetched->storePath);
- assert(!originalRef.input.getNarHash() || storePath == originalRef.input.computeStorePath(*state.store));
+ assert(!originalRef.input.getNarHash() || fetched->storePath == originalRef.input.computeStorePath(*state.store));
- return {std::move(storePath), resolvedRef, lockedRef};
+ return {fetched->storePath, resolvedRef, fetched->lockedRef};
}
static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos)
@@ -95,12 +102,19 @@ static void expectType(EvalState & state, ValueType type,
}
static std::map parseFlakeInputs(
- EvalState & state, Value * value, const PosIdx pos,
- const std::optional & baseDir, InputPath lockRootPath);
+ EvalState & state,
+ Value * value,
+ const PosIdx pos,
+ const InputPath & lockRootPath,
+ const SourcePath & flakeDir);
-static FlakeInput parseFlakeInput(EvalState & state,
- std::string_view inputName, Value * value, const PosIdx pos,
- const std::optional & baseDir, InputPath lockRootPath)
+static FlakeInput parseFlakeInput(
+ EvalState & state,
+ std::string_view inputName,
+ Value * value,
+ const PosIdx pos,
+ const InputPath & lockRootPath,
+ const SourcePath & flakeDir)
{
expectType(state, nAttrs, *value, pos);
@@ -117,14 +131,25 @@ static FlakeInput parseFlakeInput(EvalState & state,
for (auto & attr : *value->attrs()) {
try {
if (attr.name == sUrl) {
- expectType(state, nString, *attr.value, attr.pos);
- url = attr.value->string_view();
+ forceTrivialValue(state, *attr.value, pos);
+ if (attr.value->type() == nString)
+ url = attr.value->string_view();
+ else if (attr.value->type() == nPath) {
+ auto path = attr.value->path();
+ if (path.accessor != flakeDir.accessor)
+ throw Error("input path '%s' at %s must be in the same source tree as %s",
+ path, state.positions[attr.pos], flakeDir);
+ url = "path:" + flakeDir.path.makeRelative(path.path);
+ }
+ else
+ throw Error("expected a string or a path but got %s at %s",
+ showType(attr.value->type()), state.positions[attr.pos]);
attrs.emplace("url", *url);
} else if (attr.name == sFlake) {
expectType(state, nBool, *attr.value, attr.pos);
input.isFlake = attr.value->boolean();
} else if (attr.name == sInputs) {
- input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
+ input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath, flakeDir);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, attr.pos);
auto follows(parseInputPath(attr.value->c_str()));
@@ -182,7 +207,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
if (!attrs.empty())
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, state.positions[pos]);
if (url)
- input.ref = parseFlakeRef(state.fetchSettings, *url, baseDir, true, input.isFlake);
+ input.ref = parseFlakeRef(state.fetchSettings, *url, {}, true, input.isFlake, true);
}
if (!input.follows && !input.ref)
@@ -192,8 +217,11 @@ static FlakeInput parseFlakeInput(EvalState & state,
}
static std::map parseFlakeInputs(
- EvalState & state, Value * value, const PosIdx pos,
- const std::optional & baseDir, InputPath lockRootPath)
+ EvalState & state,
+ Value * value,
+ const PosIdx pos,
+ const InputPath & lockRootPath,
+ const SourcePath & flakeDir)
{
std::map inputs;
@@ -205,8 +233,8 @@ static std::map parseFlakeInputs(
state.symbols[inputAttr.name],
inputAttr.value,
inputAttr.pos,
- baseDir,
- lockRootPath));
+ lockRootPath,
+ flakeDir));
}
return inputs;
@@ -220,7 +248,8 @@ static Flake readFlake(
const SourcePath & rootDir,
const InputPath & lockRootPath)
{
- auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
+ auto flakeDir = rootDir / CanonPath(resolvedRef.subdir);
+ auto flakePath = flakeDir / "flake.nix";
// NOTE evalFile forces vInfo to be an attrset because mustBeTrivial is true.
Value vInfo;
@@ -241,7 +270,7 @@ static Flake readFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs()->get(sInputs))
- flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakePath.parent().path.abs(), lockRootPath); // FIXME
+ flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath, flakeDir);
auto sOutputs = state.symbols.create("outputs");
@@ -316,25 +345,20 @@ static Flake readFlake(
static Flake getFlake(
EvalState & state,
const FlakeRef & originalRef,
- bool allowLookup,
+ bool useRegistries,
FlakeCache & flakeCache,
- InputPath lockRootPath)
+ const InputPath & lockRootPath)
{
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
- state, originalRef, allowLookup, flakeCache);
+ state, originalRef, useRegistries, flakeCache);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
}
-Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
-{
- return getFlake(state, originalRef, allowLookup, flakeCache, {});
-}
-
-Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
+Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
{
FlakeCache flakeCache;
- return getFlake(state, originalRef, allowLookup, flakeCache);
+ return getFlake(state, originalRef, useRegistries, flakeCache, {});
}
static LockFile readLockFile(
@@ -360,7 +384,7 @@ LockedFlake lockFlake(
auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries);
- auto flake = getFlake(state, topRef, useRegistries, flakeCache);
+ auto flake = getFlake(state, topRef, useRegistries, flakeCache, {});
if (lockFlags.applyNixConfig) {
flake.config.apply(settings);
@@ -379,13 +403,29 @@ LockedFlake lockFlake(
debug("old lock file: %s", oldLockFile);
- std::map overrides;
+ struct OverrideTarget
+ {
+ FlakeInput input;
+ SourcePath sourcePath;
+ std::optional parentInputPath; // FIXME: rename to inputPathPrefix?
+ };
+
+ std::map overrides;
std::set explicitCliOverrides;
std::set overridesUsed, updatesUsed;
std::map][, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) {
- overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
+ overrides.emplace(
+ i.first,
+ OverrideTarget {
+ .input = FlakeInput { .ref = i.second },
+ /* Note: any relative overrides
+ (e.g. `--override-input B/C "path:./foo/bar"`)
+ are interpreted relative to the top-level
+ flake. */
+ .sourcePath = flake.path,
+ });
explicitCliOverrides.insert(i.first);
}
@@ -398,8 +438,8 @@ LockedFlake lockFlake(
ref node,
const InputPath & inputPathPrefix,
std::shared_ptr oldNode,
- const InputPath & lockRootPath,
- const Path & parentPath,
+ const InputPath & followsPrefix,
+ const SourcePath & sourcePath,
bool trustLock)>
computeLocks;
@@ -414,8 +454,13 @@ LockedFlake lockFlake(
/* The old node, if any, from which locks can be
copied. */
std::shared_ptr oldNode,
- const InputPath & lockRootPath,
- const Path & parentPath,
+ /* The prefix relative to which 'follows' should be
+ interpreted. When a node is initially locked, it's
+ relative to the node's flake; when it's already locked,
+ it's relative to the root of the lock file. */
+ const InputPath & followsPrefix,
+ /* The source path of this node's flake. */
+ const SourcePath & sourcePath,
bool trustLock)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
@@ -427,7 +472,12 @@ LockedFlake lockFlake(
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
inputPath.push_back(idOverride);
- overrides.insert_or_assign(inputPath, inputOverride);
+ overrides.emplace(inputPath,
+ OverrideTarget {
+ .input = inputOverride,
+ .sourcePath = sourcePath,
+ .parentInputPath = inputPathPrefix
+ });
}
}
@@ -459,13 +509,18 @@ LockedFlake lockFlake(
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
bool hasCliOverride = explicitCliOverrides.contains(inputPath);
- if (hasOverride) {
+ if (hasOverride)
overridesUsed.insert(inputPath);
- // Respect the “flakeness” of the input even if we
- // override it
- i->second.isFlake = input2.isFlake;
- }
- auto & input = hasOverride ? i->second : input2;
+ auto input = hasOverride ? i->second.input : input2;
+
+ /* Resolve relative 'path:' inputs relative to
+ the source path of the overrider. */
+ auto overridenSourcePath = hasOverride ? i->second.sourcePath : sourcePath;
+
+ /* Respect the "flakeness" of the input even if we
+ override it. */
+ if (hasOverride)
+ input.isFlake = input2.isFlake;
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
@@ -481,6 +536,33 @@ LockedFlake lockFlake(
assert(input.ref);
+ auto overridenParentPath =
+ input.ref->input.isRelative()
+ ? std::optional(hasOverride ? i->second.parentInputPath : inputPathPrefix)
+ : std::nullopt;
+
+ auto resolveRelativePath = [&]() -> std::optional
+ {
+ if (auto relativePath = input.ref->input.isRelative()) {
+ return SourcePath {
+ overridenSourcePath.accessor,
+ CanonPath(*relativePath, overridenSourcePath.path.parent().value())
+ };
+ } else
+ return std::nullopt;
+ };
+
+ /* Get the input flake, resolve 'path:./...'
+ flakerefs relative to the parent flake. */
+ auto getInputFlake = [&]()
+ {
+ if (auto resolvedPath = resolveRelativePath()) {
+ return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath);
+ } else {
+ return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath);
+ }
+ };
+
/* Do we have an entry in the existing lock file?
And the input is not in updateInputs? */
std::shared_ptr oldLock;
@@ -494,6 +576,7 @@ LockedFlake lockFlake(
if (oldLock
&& oldLock->originalRef == *input.ref
+ && oldLock->parentPath == overridenParentPath
&& !hasCliOverride)
{
debug("keeping existing input '%s'", inputPathS);
@@ -502,7 +585,10 @@ LockedFlake lockFlake(
didn't change and there is no override from a
higher level flake. */
auto childNode = make_ref(
- oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
+ oldLock->lockedRef,
+ oldLock->originalRef,
+ oldLock->isFlake,
+ oldLock->parentPath);
node->inputs.insert_or_assign(id, childNode);
@@ -544,7 +630,7 @@ LockedFlake lockFlake(
break;
}
}
- auto absoluteFollows(lockRootPath);
+ auto absoluteFollows(followsPrefix);
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
fakeInputs.emplace(i.first, FlakeInput {
.follows = absoluteFollows,
@@ -554,11 +640,12 @@ LockedFlake lockFlake(
}
if (mustRefetch) {
- auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
+ auto inputFlake = getInputFlake();
nodePaths.emplace(childNode, inputFlake.path.parent());
- computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
+ computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix,
+ inputFlake.path, false);
} else {
- computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
+ computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, true);
}
} else {
@@ -566,7 +653,9 @@ LockedFlake lockFlake(
this input. */
debug("creating new input '%s'", inputPathS);
- if (!lockFlags.allowUnlocked && !input.ref->input.isLocked())
+ if (!lockFlags.allowUnlocked
+ && !input.ref->input.isLocked()
+ && !input.ref->input.isRelative())
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use
@@ -579,17 +668,13 @@ LockedFlake lockFlake(
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref;
if (input.isFlake) {
- Path localPath = parentPath;
- FlakeRef localRef = *input.ref;
+ auto inputFlake = getInputFlake();
- // If this input is a path, recurse it down.
- // This allows us to resolve path inputs relative to the current flake.
- if (localRef.input.getType() == "path")
- localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
-
- auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
-
- auto childNode = make_ref(inputFlake.lockedRef, ref);
+ auto childNode = make_ref(
+ inputFlake.lockedRef,
+ ref,
+ true,
+ overridenParentPath);
node->inputs.insert_or_assign(id, childNode);
@@ -610,18 +695,27 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast(oldLock)
: readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
- oldLock ? lockRootPath : inputPath,
- localPath,
+ oldLock ? followsPrefix : inputPath,
+ inputFlake.path,
false);
}
else {
- auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
- state, *input.ref, useRegistries, flakeCache);
+ auto [path, lockedRef] = [&]() -> std::tuple
+ {
+ // Handle non-flake 'path:./...' inputs.
+ if (auto resolvedPath = resolveRelativePath()) {
+ return {*resolvedPath, *input.ref};
+ } else {
+ auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
+ state, *input.ref, useRegistries, flakeCache);
+ return {state.rootPath(state.store->toRealPath(storePath)), lockedRef};
+ }
+ }();
- auto childNode = make_ref(lockedRef, ref, false);
+ auto childNode = make_ref(lockedRef, ref, false, overridenParentPath);
- nodePaths.emplace(childNode, state.rootPath(state.store->toRealPath(storePath)));
+ nodePaths.emplace(childNode, path);
node->inputs.insert_or_assign(id, childNode);
}
@@ -634,9 +728,6 @@ LockedFlake lockFlake(
}
};
- // Bring in the current ref for relative path resolution if we have it
- auto parentPath = flake.path.parent().path.abs();
-
nodePaths.emplace(newLockFile.root, flake.path.parent());
computeLocks(
@@ -645,7 +736,7 @@ LockedFlake lockFlake(
{},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root.get_ptr(),
{},
- parentPath,
+ flake.path,
false);
for (auto & i : lockFlags.inputOverrides)
@@ -671,7 +762,11 @@ LockedFlake lockFlake(
if (lockFlags.writeLockFile) {
if (sourcePath || lockFlags.outputLockFilePath) {
- if (auto unlockedInput = newLockFile.isUnlocked()) {
+ if (auto unlockedInput = newLockFile.isUnlocked(state.fetchSettings)) {
+ if (lockFlags.failOnUnlocked)
+ throw Error(
+ "Will not write lock file of flake '%s' because it has an unlocked input ('%s'). "
+ "Use '--allow-dirty-locks' to allow this anyway.", topRef, *unlockedInput);
if (state.fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
} else {
@@ -686,7 +781,7 @@ LockedFlake lockFlake(
writeFile(*lockFlags.outputLockFilePath, newLockFileS);
} else {
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
- auto outputLockFilePath = *sourcePath + "/" + relPath;
+ auto outputLockFilePath = *sourcePath / relPath;
bool lockFileExists = pathExists(outputLockFilePath);
@@ -724,8 +819,7 @@ LockedFlake lockFlake(
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
auto prevLockedRef = flake.lockedRef;
- FlakeCache dummyCache;
- flake = getFlake(state, topRef, useRegistries, dummyCache);
+ flake = getFlake(state, topRef, useRegistries);
if (lockFlags.commitLockFile &&
flake.lockedRef.input.getRev() &&
@@ -816,7 +910,7 @@ void callFlake(EvalState & state,
assert(vFetchFinalTree);
Value * args[] = {vLocks, &vOverrides, *vFetchFinalTree};
- state.callFunction(*vCallFlake, 3, args, vRes, noPos);
+ state.callFunction(*vCallFlake, args, vRes, noPos);
}
void initLib(const Settings & settings)
@@ -970,9 +1064,11 @@ static RegisterPrimOp r4({
}
-std::optional LockedFlake::getFingerprint(ref store) const
+std::optional LockedFlake::getFingerprint(
+ ref store,
+ const fetchers::Settings & fetchSettings) const
{
- if (lockFile.isUnlocked()) return std::nullopt;
+ if (lockFile.isUnlocked(fetchSettings)) return std::nullopt;
auto fingerprint = flake.lockedRef.input.getFingerprint(store);
if (!fingerprint) return std::nullopt;
diff --git a/src/libflake/flake/flake.hh b/src/libflake/flake/flake.hh
index cc2bea76e..9ab661fce 100644
--- a/src/libflake/flake/flake.hh
+++ b/src/libflake/flake/flake.hh
@@ -110,7 +110,7 @@ struct Flake
}
};
-Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
+Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool useRegistries);
/**
* Fingerprint of a locked flake; used as a cache key.
@@ -129,7 +129,9 @@ struct LockedFlake
*/
std::map][, SourcePath> nodePaths;
- std::optional getFingerprint(ref store) const;
+ std::optional getFingerprint(
+ ref store,
+ const fetchers::Settings & fetchSettings) const;
};
struct LockFlags
@@ -156,6 +158,11 @@ struct LockFlags
*/
bool writeLockFile = true;
+ /**
+ * Throw an exception when the flake has an unlocked input.
+ */
+ bool failOnUnlocked = false;
+
/**
* Whether to use the registries to lookup indirect flake
* references like 'nixpkgs'.
diff --git a/src/libflake/flake/flakeref.cc b/src/libflake/flake/flakeref.cc
index 01fe747f9..720f771ab 100644
--- a/src/libflake/flake/flakeref.cc
+++ b/src/libflake/flake/flakeref.cc
@@ -3,7 +3,6 @@
#include "url.hh"
#include "url-parts.hh"
#include "fetchers.hh"
-#include "registry.hh"
namespace nix {
@@ -36,7 +35,9 @@ std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
return str;
}
-FlakeRef FlakeRef::resolve(ref store) const
+FlakeRef FlakeRef::resolve(
+ ref store,
+ const fetchers::RegistryFilter & filter) const
{
auto [input2, extraAttrs] = lookupInRegistries(store, input);
return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir));
@@ -47,9 +48,10 @@ FlakeRef parseFlakeRef(
const std::string & url,
const std::optional & baseDir,
bool allowMissing,
- bool isFlake)
+ bool isFlake,
+ bool preserveRelativePaths)
{
- auto [flakeRef, fragment] = parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake);
+ auto [flakeRef, fragment] = parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths);
if (fragment != "")
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
return flakeRef;
@@ -67,35 +69,43 @@ std::optional maybeParseFlakeRef(
}
}
+static std::pair fromParsedURL(
+ const fetchers::Settings & fetchSettings,
+ ParsedURL && parsedURL,
+ bool isFlake)
+{
+ auto dir = getOr(parsedURL.query, "dir", "");
+ parsedURL.query.erase("dir");
+
+ std::string fragment;
+ std::swap(fragment, parsedURL.fragment);
+
+ return {FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake), dir), fragment};
+}
+
std::pair parsePathFlakeRefWithFragment(
const fetchers::Settings & fetchSettings,
const std::string & url,
const std::optional & baseDir,
bool allowMissing,
- bool isFlake)
+ bool isFlake,
+ bool preserveRelativePaths)
{
- std::string path = url;
- std::string fragment = "";
- std::map query;
- auto pathEnd = url.find_first_of("#?");
- auto fragmentStart = pathEnd;
- if (pathEnd != std::string::npos && url[pathEnd] == '?') {
- fragmentStart = url.find("#");
- }
- if (pathEnd != std::string::npos) {
- path = url.substr(0, pathEnd);
- }
- if (fragmentStart != std::string::npos) {
- fragment = percentDecode(url.substr(fragmentStart+1));
- }
- if (pathEnd != std::string::npos && fragmentStart != std::string::npos && url[pathEnd] == '?') {
- query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
- }
+ static std::regex pathFlakeRegex(
+ R"(([^?#]*)(\?([^#]*))?(#(.*))?)",
+ std::regex::ECMAScript);
+
+ std::smatch match;
+ auto succeeds = std::regex_match(url, match, pathFlakeRegex);
+ assert(succeeds);
+ auto path = match[1].str();
+ auto query = decodeQuery(match[3]);
+ auto fragment = percentDecode(match[5].str());
if (baseDir) {
/* Check if 'url' is a path (either absolute or relative
- to 'baseDir'). If so, search upward to the root of the
- repo (i.e. the directory containing .git). */
+ to 'baseDir'). If so, search upward to the root of the
+ repo (i.e. the directory containing .git). */
path = absPath(path, baseDir);
@@ -144,15 +154,12 @@ std::pair parsePathFlakeRefWithFragment(
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
- auto base = std::string("git+file://") + flakeRoot;
-
auto parsedURL = ParsedURL{
- .url = base, // FIXME
- .base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = query,
+ .fragment = fragment,
};
if (subdir != "") {
@@ -164,9 +171,7 @@ std::pair parsePathFlakeRefWithFragment(
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
- return std::make_pair(
- FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL), getOr(parsedURL.query, "dir", "")),
- fragment);
+ return fromParsedURL(fetchSettings, std::move(parsedURL), isFlake);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
@@ -175,26 +180,27 @@ std::pair parsePathFlakeRefWithFragment(
}
} else {
- if (!hasPrefix(path, "/"))
+ if (!preserveRelativePaths && !isAbsolute(path))
throw BadURL("flake reference '%s' is not an absolute path", url);
- path = canonPath(path + "/" + getOr(query, "dir", ""));
}
- fetchers::Attrs attrs;
- attrs.insert_or_assign("type", "path");
- attrs.insert_or_assign("path", path);
+ return fromParsedURL(fetchSettings, {
+ .scheme = "path",
+ .authority = "",
+ .path = path,
+ .query = query,
+ .fragment = fragment
+ }, isFlake);
+}
- return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(fetchSettings, std::move(attrs)), ""), fragment);
-};
-
-
-/* Check if 'url' is a flake ID. This is an abbreviated syntax for
- 'flake:?ref=][&rev='. */
+/**
+ * Check if `url` is a flake ID. This is an abbreviated syntax for
+ * `flake:?ref=][&rev=`.
+ */
static std::optional> parseFlakeIdRef(
const fetchers::Settings & fetchSettings,
const std::string & url,
- bool isFlake
-)
+ bool isFlake)
{
std::smatch match;
@@ -205,8 +211,6 @@ static std::optional> parseFlakeIdRef(
if (std::regex_match(url, match, flakeRegex)) {
auto parsedURL = ParsedURL{
- .url = url,
- .base = "flake:" + match.str(1),
.scheme = "flake",
.authority = "",
.path = match[1],
@@ -224,25 +228,18 @@ std::optional> parseURLFlakeRef(
const fetchers::Settings & fetchSettings,
const std::string & url,
const std::optional & baseDir,
- bool isFlake
-)
+ bool isFlake)
{
- ParsedURL parsedURL;
try {
- parsedURL = parseURL(url);
+ auto parsed = parseURL(url);
+ if (baseDir
+ && (parsed.scheme == "path" || parsed.scheme == "git+file")
+ && !isAbsolute(parsed.path))
+ parsed.path = absPath(parsed.path, *baseDir);
+ return fromParsedURL(fetchSettings, std::move(parsed), isFlake);
} catch (BadURL &) {
return std::nullopt;
}
-
- std::string fragment;
- std::swap(fragment, parsedURL.fragment);
-
- auto input = fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake);
- input.parent = baseDir;
-
- return std::make_pair(
- FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
- fragment);
}
std::pair parseFlakeRefWithFragment(
@@ -250,18 +247,17 @@ std::pair parseFlakeRefWithFragment(
const std::string & url,
const std::optional & baseDir,
bool allowMissing,
- bool isFlake)
+ bool isFlake,
+ bool preserveRelativePaths)
{
using namespace fetchers;
- std::smatch match;
-
if (auto res = parseFlakeIdRef(fetchSettings, url, isFlake)) {
return *res;
} else if (auto res = parseURLFlakeRef(fetchSettings, url, baseDir, isFlake)) {
return *res;
} else {
- return parsePathFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake);
+ return parsePathFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths);
}
}
diff --git a/src/libflake/flake/flakeref.hh b/src/libflake/flake/flakeref.hh
index 1064538a7..c9cf7952d 100644
--- a/src/libflake/flake/flakeref.hh
+++ b/src/libflake/flake/flakeref.hh
@@ -6,6 +6,7 @@
#include "types.hh"
#include "fetchers.hh"
#include "outputs-spec.hh"
+#include "registry.hh"
namespace nix {
@@ -48,6 +49,11 @@ struct FlakeRef
bool operator ==(const FlakeRef & other) const = default;
+ bool operator <(const FlakeRef & other) const
+ {
+ return std::tie(input, subdir) < std::tie(other.input, other.subdir);
+ }
+
FlakeRef(fetchers::Input && input, const Path & subdir)
: input(std::move(input)), subdir(subdir)
{ }
@@ -57,7 +63,9 @@ struct FlakeRef
fetchers::Attrs toAttrs() const;
- FlakeRef resolve(ref store) const;
+ FlakeRef resolve(
+ ref store,
+ const fetchers::RegistryFilter & filter = {}) const;
static FlakeRef fromAttrs(
const fetchers::Settings & fetchSettings,
@@ -76,7 +84,8 @@ FlakeRef parseFlakeRef(
const std::string & url,
const std::optional & baseDir = {},
bool allowMissing = false,
- bool isFlake = true);
+ bool isFlake = true,
+ bool preserveRelativePaths = false);
/**
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
@@ -94,7 +103,8 @@ std::pair parseFlakeRefWithFragment(
const std::string & url,
const std::optional]